Lower.cpp 1.1 MB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837883888398840884188428843884488458846884788488849885088518852885388548855885688578858885988608861886288638864886588668867886888698870887188728873887488758876887788788879888088818882888388848885888688878888888988908891889288938894889588968897889888998900890189028903890489058906890789088909891089118912891389148915891689178918891989208921892289238924892589268927892889298930893189328933893489358936893789388939894089418942894389448945894689478948894989508951895289538954895589568957895889598960896189628963896489658966896789688969897089718972897389748975897689778978897989808981898289838984898589868987898889898990899189928993899489958996899789988999900090019002900390049005900690079008900990109011901290139014901590169017901890199020902190229023902490259026902790289029903090319032903390349035903690379038903990409041904290439044904590469047904890499050905190529053905490559056905790589059906090619062906390649065906690679068906990709071907290739074907590769077907890799080908190829083908490859086908790889089909090919092909390949095909690979098909991009101910291039104910591069107910891099110911191129113911491159116911791189119912091219122912391249125912691279128912991309131913291339134913591369137913891399140914191429143914491459146914791489149915091519152915391549155915691579158915991609161916291639164916591669167916891699170917191729173917491759176917791789179918091819182918391849185918691879188918991909191919291939194919591969197919891999200920192029203920492059206920792089209921092119212921392149215921692179218921992209221922292239224922592269227922892299230923192329233923492359236923792389239924092419242924392449245924692479248924992509251925292539254925592569257925892599260926192629263926492659266926792689269927092719272927392749275927692779278927992809281928292839284928592869287928892899290929192929293929492959296929792989299930093019302930393049305930693079308930993109311931293139314931593169317931893199320932193229323932493259326932793289329933093319332933393349335933693379338933993409341934293439344934593469347934893499350935193529353935493559356935793589359936093619362936393649365936693679368936993709371937293739374937593769377937893799380938193829383938493859386938793889389939093919392939393949395939693979398939994009401940294039404940594069407940894099410941194129413941494159416941794189419942094219422942394249425942694279428942994309431943294339434943594369437943894399440944194429443944494459446944794489449945094519452945394549455945694579458945994609461946294639464946594669467946894699470947194729473947494759476947794789479948094819482948394849485948694879488948994909491949294939494949594969497949894999500950195029503950495059506950795089509951095119512951395149515951695179518951995209521952295239524952595269527952895299530953195329533953495359536953795389539954095419542954395449545954695479548954995509551955295539554955595569557955895599560956195629563956495659566956795689569957095719572957395749575957695779578957995809581958295839584958595869587958895899590959195929593959495959596959795989599960096019602960396049605960696079608960996109611961296139614961596169617961896199620962196229623962496259626962796289629963096319632963396349635963696379638963996409641964296439644964596469647964896499650965196529653965496559656965796589659966096619662966396649665966696679668966996709671967296739674967596769677967896799680968196829683968496859686968796889689969096919692969396949695969696979698969997009701970297039704970597069707970897099710971197129713971497159716971797189719972097219722972397249725972697279728972997309731973297339734973597369737973897399740974197429743974497459746974797489749975097519752975397549755975697579758975997609761976297639764976597669767976897699770977197729773977497759776977797789779978097819782978397849785978697879788978997909791979297939794979597969797979897999800980198029803980498059806980798089809981098119812981398149815981698179818981998209821982298239824982598269827982898299830983198329833983498359836983798389839984098419842984398449845984698479848984998509851985298539854985598569857985898599860986198629863986498659866986798689869987098719872987398749875987698779878987998809881988298839884988598869887988898899890989198929893989498959896989798989899990099019902990399049905990699079908990999109911991299139914991599169917991899199920992199229923992499259926992799289929993099319932993399349935993699379938993999409941994299439944994599469947994899499950995199529953995499559956995799589959996099619962996399649965996699679968996999709971997299739974997599769977997899799980998199829983998499859986998799889989999099919992999399949995999699979998999910000100011000210003100041000510006100071000810009100101001110012100131001410015100161001710018100191002010021100221002310024100251002610027100281002910030100311003210033100341003510036100371003810039100401004110042100431004410045100461004710048100491005010051100521005310054100551005610057100581005910060100611006210063100641006510066100671006810069100701007110072100731007410075100761007710078100791008010081100821008310084100851008610087100881008910090100911009210093100941009510096100971009810099101001010110102101031010410105101061010710108101091011010111101121011310114101151011610117101181011910120101211012210123101241012510126101271012810129101301013110132101331013410135101361013710138101391014010141101421014310144101451014610147101481014910150101511015210153101541015510156101571015810159101601016110162101631016410165101661016710168101691017010171101721017310174101751017610177101781017910180101811018210183101841018510186101871018810189101901019110192101931019410195101961019710198101991020010201102021020310204102051020610207102081020910210102111021210213102141021510216102171021810219102201022110222102231022410225102261022710228102291023010231102321023310234102351023610237102381023910240102411024210243102441024510246102471024810249102501025110252102531025410255102561025710258102591026010261102621026310264102651026610267102681026910270102711027210273102741027510276102771027810279102801028110282102831028410285102861028710288102891029010291102921029310294102951029610297102981029910300103011030210303103041030510306103071030810309103101031110312103131031410315103161031710318103191032010321103221032310324103251032610327103281032910330103311033210333103341033510336103371033810339103401034110342103431034410345103461034710348103491035010351103521035310354103551035610357103581035910360103611036210363103641036510366103671036810369103701037110372103731037410375103761037710378103791038010381103821038310384103851038610387103881038910390103911039210393103941039510396103971039810399104001040110402104031040410405104061040710408104091041010411104121041310414104151041610417104181041910420104211042210423104241042510426104271042810429104301043110432104331043410435104361043710438104391044010441104421044310444104451044610447104481044910450104511045210453104541045510456104571045810459104601046110462104631046410465104661046710468104691047010471104721047310474104751047610477104781047910480104811048210483104841048510486104871048810489104901049110492104931049410495104961049710498104991050010501105021050310504105051050610507105081050910510105111051210513105141051510516105171051810519105201052110522105231052410525105261052710528105291053010531105321053310534105351053610537105381053910540105411054210543105441054510546105471054810549105501055110552105531055410555105561055710558105591056010561105621056310564105651056610567105681056910570105711057210573105741057510576105771057810579105801058110582105831058410585105861058710588105891059010591105921059310594105951059610597105981059910600106011060210603106041060510606106071060810609106101061110612106131061410615106161061710618106191062010621106221062310624106251062610627106281062910630106311063210633106341063510636106371063810639106401064110642106431064410645106461064710648106491065010651106521065310654106551065610657106581065910660106611066210663106641066510666106671066810669106701067110672106731067410675106761067710678106791068010681106821068310684106851068610687106881068910690106911069210693106941069510696106971069810699107001070110702107031070410705107061070710708107091071010711107121071310714107151071610717107181071910720107211072210723107241072510726107271072810729107301073110732107331073410735107361073710738107391074010741107421074310744107451074610747107481074910750107511075210753107541075510756107571075810759107601076110762107631076410765107661076710768107691077010771107721077310774107751077610777107781077910780107811078210783107841078510786107871078810789107901079110792107931079410795107961079710798107991080010801108021080310804108051080610807108081080910810108111081210813108141081510816108171081810819108201082110822108231082410825108261082710828108291083010831108321083310834108351083610837108381083910840108411084210843108441084510846108471084810849108501085110852108531085410855108561085710858108591086010861108621086310864108651086610867108681086910870108711087210873108741087510876108771087810879108801088110882108831088410885108861088710888108891089010891108921089310894108951089610897108981089910900109011090210903109041090510906109071090810909109101091110912109131091410915109161091710918109191092010921109221092310924109251092610927109281092910930109311093210933109341093510936109371093810939109401094110942109431094410945109461094710948109491095010951109521095310954109551095610957109581095910960109611096210963109641096510966109671096810969109701097110972109731097410975109761097710978109791098010981109821098310984109851098610987109881098910990109911099210993109941099510996109971099810999110001100111002110031100411005110061100711008110091101011011110121101311014110151101611017110181101911020110211102211023110241102511026110271102811029110301103111032110331103411035110361103711038110391104011041110421104311044110451104611047110481104911050110511105211053110541105511056110571105811059110601106111062110631106411065110661106711068110691107011071110721107311074110751107611077110781107911080110811108211083110841108511086110871108811089110901109111092110931109411095110961109711098110991110011101111021110311104111051110611107111081110911110111111111211113111141111511116111171111811119111201112111122111231112411125111261112711128111291113011131111321113311134111351113611137111381113911140111411114211143111441114511146111471114811149111501115111152111531115411155111561115711158111591116011161111621116311164111651116611167111681116911170111711117211173111741117511176111771117811179111801118111182111831118411185111861118711188111891119011191111921119311194111951119611197111981119911200112011120211203112041120511206112071120811209112101121111212112131121411215112161121711218112191122011221112221122311224112251122611227112281122911230112311123211233112341123511236112371123811239112401124111242112431124411245112461124711248112491125011251112521125311254112551125611257112581125911260112611126211263112641126511266112671126811269112701127111272112731127411275112761127711278112791128011281112821128311284112851128611287112881128911290112911129211293112941129511296112971129811299113001130111302113031130411305113061130711308113091131011311113121131311314113151131611317113181131911320113211132211323113241132511326113271132811329113301133111332113331133411335113361133711338113391134011341113421134311344113451134611347113481134911350113511135211353113541135511356113571135811359113601136111362113631136411365113661136711368113691137011371113721137311374113751137611377113781137911380113811138211383113841138511386113871138811389113901139111392113931139411395113961139711398113991140011401114021140311404114051140611407114081140911410114111141211413114141141511416114171141811419114201142111422114231142411425114261142711428114291143011431114321143311434114351143611437114381143911440114411144211443114441144511446114471144811449114501145111452114531145411455114561145711458114591146011461114621146311464114651146611467114681146911470114711147211473114741147511476114771147811479114801148111482114831148411485114861148711488114891149011491114921149311494114951149611497114981149911500115011150211503115041150511506115071150811509115101151111512115131151411515115161151711518115191152011521115221152311524115251152611527115281152911530115311153211533115341153511536115371153811539115401154111542115431154411545115461154711548115491155011551115521155311554115551155611557115581155911560115611156211563115641156511566115671156811569115701157111572115731157411575115761157711578115791158011581115821158311584115851158611587115881158911590115911159211593115941159511596115971159811599116001160111602116031160411605116061160711608116091161011611116121161311614116151161611617116181161911620116211162211623116241162511626116271162811629116301163111632116331163411635116361163711638116391164011641116421164311644116451164611647116481164911650116511165211653116541165511656116571165811659116601166111662116631166411665116661166711668116691167011671116721167311674116751167611677116781167911680116811168211683116841168511686116871168811689116901169111692116931169411695116961169711698116991170011701117021170311704117051170611707117081170911710117111171211713117141171511716117171171811719117201172111722117231172411725117261172711728117291173011731117321173311734117351173611737117381173911740117411174211743117441174511746117471174811749117501175111752117531175411755117561175711758117591176011761117621176311764117651176611767117681176911770117711177211773117741177511776117771177811779117801178111782117831178411785117861178711788117891179011791117921179311794117951179611797117981179911800118011180211803118041180511806118071180811809118101181111812118131181411815118161181711818118191182011821118221182311824118251182611827118281182911830118311183211833118341183511836118371183811839118401184111842118431184411845118461184711848118491185011851118521185311854118551185611857118581185911860118611186211863118641186511866118671186811869118701187111872118731187411875118761187711878118791188011881118821188311884118851188611887118881188911890118911189211893118941189511896118971189811899119001190111902119031190411905119061190711908119091191011911119121191311914119151191611917119181191911920119211192211923119241192511926119271192811929119301193111932119331193411935119361193711938119391194011941119421194311944119451194611947119481194911950119511195211953119541195511956119571195811959119601196111962119631196411965119661196711968119691197011971119721197311974119751197611977119781197911980119811198211983119841198511986119871198811989119901199111992119931199411995119961199711998119991200012001120021200312004120051200612007120081200912010120111201212013120141201512016120171201812019120201202112022120231202412025120261202712028120291203012031120321203312034120351203612037120381203912040120411204212043120441204512046120471204812049120501205112052120531205412055120561205712058120591206012061120621206312064120651206612067120681206912070120711207212073120741207512076120771207812079120801208112082120831208412085120861208712088120891209012091120921209312094120951209612097120981209912100121011210212103121041210512106121071210812109121101211112112121131211412115121161211712118121191212012121121221212312124121251212612127121281212912130121311213212133121341213512136121371213812139121401214112142121431214412145121461214712148121491215012151121521215312154121551215612157121581215912160121611216212163121641216512166121671216812169121701217112172121731217412175121761217712178121791218012181121821218312184121851218612187121881218912190121911219212193121941219512196121971219812199122001220112202122031220412205122061220712208122091221012211122121221312214122151221612217122181221912220122211222212223122241222512226122271222812229122301223112232122331223412235122361223712238122391224012241122421224312244122451224612247122481224912250122511225212253122541225512256122571225812259122601226112262122631226412265122661226712268122691227012271122721227312274122751227612277122781227912280122811228212283122841228512286122871228812289122901229112292122931229412295122961229712298122991230012301123021230312304123051230612307123081230912310123111231212313123141231512316123171231812319123201232112322123231232412325123261232712328123291233012331123321233312334123351233612337123381233912340123411234212343123441234512346123471234812349123501235112352123531235412355123561235712358123591236012361123621236312364123651236612367123681236912370123711237212373123741237512376123771237812379123801238112382123831238412385123861238712388123891239012391123921239312394123951239612397123981239912400124011240212403124041240512406124071240812409124101241112412124131241412415124161241712418124191242012421124221242312424124251242612427124281242912430124311243212433124341243512436124371243812439124401244112442124431244412445124461244712448124491245012451124521245312454124551245612457124581245912460124611246212463124641246512466124671246812469124701247112472124731247412475124761247712478124791248012481124821248312484124851248612487124881248912490124911249212493124941249512496124971249812499125001250112502125031250412505125061250712508125091251012511125121251312514125151251612517125181251912520125211252212523125241252512526125271252812529125301253112532125331253412535125361253712538125391254012541125421254312544125451254612547125481254912550125511255212553125541255512556125571255812559125601256112562125631256412565125661256712568125691257012571125721257312574125751257612577125781257912580125811258212583125841258512586125871258812589125901259112592125931259412595125961259712598125991260012601126021260312604126051260612607126081260912610126111261212613126141261512616126171261812619126201262112622126231262412625126261262712628126291263012631126321263312634126351263612637126381263912640126411264212643126441264512646126471264812649126501265112652126531265412655126561265712658126591266012661126621266312664126651266612667126681266912670126711267212673126741267512676126771267812679126801268112682126831268412685126861268712688126891269012691126921269312694126951269612697126981269912700127011270212703127041270512706127071270812709127101271112712127131271412715127161271712718127191272012721127221272312724127251272612727127281272912730127311273212733127341273512736127371273812739127401274112742127431274412745127461274712748127491275012751127521275312754127551275612757127581275912760127611276212763127641276512766127671276812769127701277112772127731277412775127761277712778127791278012781127821278312784127851278612787127881278912790127911279212793127941279512796127971279812799128001280112802128031280412805128061280712808128091281012811128121281312814128151281612817128181281912820128211282212823128241282512826128271282812829128301283112832128331283412835128361283712838128391284012841128421284312844128451284612847128481284912850128511285212853128541285512856128571285812859128601286112862128631286412865128661286712868128691287012871128721287312874128751287612877128781287912880128811288212883128841288512886128871288812889128901289112892128931289412895128961289712898128991290012901129021290312904129051290612907129081290912910129111291212913129141291512916129171291812919129201292112922129231292412925129261292712928129291293012931129321293312934129351293612937129381293912940129411294212943129441294512946129471294812949129501295112952129531295412955129561295712958129591296012961129621296312964129651296612967129681296912970129711297212973129741297512976129771297812979129801298112982129831298412985129861298712988129891299012991129921299312994129951299612997129981299913000130011300213003130041300513006130071300813009130101301113012130131301413015130161301713018130191302013021130221302313024130251302613027130281302913030130311303213033130341303513036130371303813039130401304113042130431304413045130461304713048130491305013051130521305313054130551305613057130581305913060130611306213063130641306513066130671306813069130701307113072130731307413075130761307713078130791308013081130821308313084130851308613087130881308913090130911309213093130941309513096130971309813099131001310113102131031310413105131061310713108131091311013111131121311313114131151311613117131181311913120131211312213123131241312513126131271312813129131301313113132131331313413135131361313713138131391314013141131421314313144131451314613147131481314913150131511315213153131541315513156131571315813159131601316113162131631316413165131661316713168131691317013171131721317313174131751317613177131781317913180131811318213183131841318513186131871318813189131901319113192131931319413195131961319713198131991320013201132021320313204132051320613207132081320913210132111321213213132141321513216132171321813219132201322113222132231322413225132261322713228132291323013231132321323313234132351323613237132381323913240132411324213243132441324513246132471324813249132501325113252132531325413255132561325713258132591326013261132621326313264132651326613267132681326913270132711327213273132741327513276132771327813279132801328113282132831328413285132861328713288132891329013291132921329313294132951329613297132981329913300133011330213303133041330513306133071330813309133101331113312133131331413315133161331713318133191332013321133221332313324133251332613327133281332913330133311333213333133341333513336133371333813339133401334113342133431334413345133461334713348133491335013351133521335313354133551335613357133581335913360133611336213363133641336513366133671336813369133701337113372133731337413375133761337713378133791338013381133821338313384133851338613387133881338913390133911339213393133941339513396133971339813399134001340113402134031340413405134061340713408134091341013411134121341313414134151341613417134181341913420134211342213423134241342513426134271342813429134301343113432134331343413435134361343713438134391344013441134421344313444134451344613447134481344913450134511345213453134541345513456134571345813459134601346113462134631346413465134661346713468134691347013471134721347313474134751347613477134781347913480134811348213483134841348513486134871348813489134901349113492134931349413495134961349713498134991350013501135021350313504135051350613507135081350913510135111351213513135141351513516135171351813519135201352113522135231352413525135261352713528135291353013531135321353313534135351353613537135381353913540135411354213543135441354513546135471354813549135501355113552135531355413555135561355713558135591356013561135621356313564135651356613567135681356913570135711357213573135741357513576135771357813579135801358113582135831358413585135861358713588135891359013591135921359313594135951359613597135981359913600136011360213603136041360513606136071360813609136101361113612136131361413615136161361713618136191362013621136221362313624136251362613627136281362913630136311363213633136341363513636136371363813639136401364113642136431364413645136461364713648136491365013651136521365313654136551365613657136581365913660136611366213663136641366513666136671366813669136701367113672136731367413675136761367713678136791368013681136821368313684136851368613687136881368913690136911369213693136941369513696136971369813699137001370113702137031370413705137061370713708137091371013711137121371313714137151371613717137181371913720137211372213723137241372513726137271372813729137301373113732137331373413735137361373713738137391374013741137421374313744137451374613747137481374913750137511375213753137541375513756137571375813759137601376113762137631376413765137661376713768137691377013771137721377313774137751377613777137781377913780137811378213783137841378513786137871378813789137901379113792137931379413795137961379713798137991380013801138021380313804138051380613807138081380913810138111381213813138141381513816138171381813819138201382113822138231382413825138261382713828138291383013831138321383313834138351383613837138381383913840138411384213843138441384513846138471384813849138501385113852138531385413855138561385713858138591386013861138621386313864138651386613867138681386913870138711387213873138741387513876138771387813879138801388113882138831388413885138861388713888138891389013891138921389313894138951389613897138981389913900139011390213903139041390513906139071390813909139101391113912139131391413915139161391713918139191392013921139221392313924139251392613927139281392913930139311393213933139341393513936139371393813939139401394113942139431394413945139461394713948139491395013951139521395313954139551395613957139581395913960139611396213963139641396513966139671396813969139701397113972139731397413975139761397713978139791398013981139821398313984139851398613987139881398913990139911399213993139941399513996139971399813999140001400114002140031400414005140061400714008140091401014011140121401314014140151401614017140181401914020140211402214023140241402514026140271402814029140301403114032140331403414035140361403714038140391404014041140421404314044140451404614047140481404914050140511405214053140541405514056140571405814059140601406114062140631406414065140661406714068140691407014071140721407314074140751407614077140781407914080140811408214083140841408514086140871408814089140901409114092140931409414095140961409714098140991410014101141021410314104141051410614107141081410914110141111411214113141141411514116141171411814119141201412114122141231412414125141261412714128141291413014131141321413314134141351413614137141381413914140141411414214143141441414514146141471414814149141501415114152141531415414155141561415714158141591416014161141621416314164141651416614167141681416914170141711417214173141741417514176141771417814179141801418114182141831418414185141861418714188141891419014191141921419314194141951419614197141981419914200142011420214203142041420514206142071420814209142101421114212142131421414215142161421714218142191422014221142221422314224142251422614227142281422914230142311423214233142341423514236142371423814239142401424114242142431424414245142461424714248142491425014251142521425314254142551425614257142581425914260142611426214263142641426514266142671426814269142701427114272142731427414275142761427714278142791428014281142821428314284142851428614287142881428914290142911429214293142941429514296142971429814299143001430114302143031430414305143061430714308143091431014311143121431314314143151431614317143181431914320143211432214323143241432514326143271432814329143301433114332143331433414335143361433714338143391434014341143421434314344143451434614347143481434914350143511435214353143541435514356143571435814359143601436114362143631436414365143661436714368143691437014371143721437314374143751437614377143781437914380143811438214383143841438514386143871438814389143901439114392143931439414395143961439714398143991440014401144021440314404144051440614407144081440914410144111441214413144141441514416144171441814419144201442114422144231442414425144261442714428144291443014431144321443314434144351443614437144381443914440144411444214443144441444514446144471444814449144501445114452144531445414455144561445714458144591446014461144621446314464144651446614467144681446914470144711447214473144741447514476144771447814479144801448114482144831448414485144861448714488144891449014491144921449314494144951449614497144981449914500145011450214503145041450514506145071450814509145101451114512145131451414515145161451714518145191452014521145221452314524145251452614527145281452914530145311453214533145341453514536145371453814539145401454114542145431454414545145461454714548145491455014551145521455314554145551455614557145581455914560145611456214563145641456514566145671456814569145701457114572145731457414575145761457714578145791458014581145821458314584145851458614587145881458914590145911459214593145941459514596145971459814599146001460114602146031460414605146061460714608146091461014611146121461314614146151461614617146181461914620146211462214623146241462514626146271462814629146301463114632146331463414635146361463714638146391464014641146421464314644146451464614647146481464914650146511465214653146541465514656146571465814659146601466114662146631466414665146661466714668146691467014671146721467314674146751467614677146781467914680146811468214683146841468514686146871468814689146901469114692146931469414695146961469714698146991470014701147021470314704147051470614707147081470914710147111471214713147141471514716147171471814719147201472114722147231472414725147261472714728147291473014731147321473314734147351473614737147381473914740147411474214743147441474514746147471474814749147501475114752147531475414755147561475714758147591476014761147621476314764147651476614767147681476914770147711477214773147741477514776147771477814779147801478114782147831478414785147861478714788147891479014791147921479314794147951479614797147981479914800148011480214803148041480514806148071480814809148101481114812148131481414815148161481714818148191482014821148221482314824148251482614827148281482914830148311483214833148341483514836148371483814839148401484114842148431484414845148461484714848148491485014851148521485314854148551485614857148581485914860148611486214863148641486514866148671486814869148701487114872148731487414875148761487714878148791488014881148821488314884148851488614887148881488914890148911489214893148941489514896148971489814899149001490114902149031490414905149061490714908149091491014911149121491314914149151491614917149181491914920149211492214923149241492514926149271492814929149301493114932149331493414935149361493714938149391494014941149421494314944149451494614947149481494914950149511495214953149541495514956149571495814959149601496114962149631496414965149661496714968149691497014971149721497314974149751497614977149781497914980149811498214983149841498514986149871498814989149901499114992149931499414995149961499714998149991500015001150021500315004150051500615007150081500915010150111501215013150141501515016150171501815019150201502115022150231502415025150261502715028150291503015031150321503315034150351503615037150381503915040150411504215043150441504515046150471504815049150501505115052150531505415055150561505715058150591506015061150621506315064150651506615067150681506915070150711507215073150741507515076150771507815079150801508115082150831508415085150861508715088150891509015091150921509315094150951509615097150981509915100151011510215103151041510515106151071510815109151101511115112151131511415115151161511715118151191512015121151221512315124151251512615127151281512915130151311513215133151341513515136151371513815139151401514115142151431514415145151461514715148151491515015151151521515315154151551515615157151581515915160151611516215163151641516515166151671516815169151701517115172151731517415175151761517715178151791518015181151821518315184151851518615187151881518915190151911519215193151941519515196151971519815199152001520115202152031520415205152061520715208152091521015211152121521315214152151521615217152181521915220152211522215223152241522515226152271522815229152301523115232152331523415235152361523715238152391524015241152421524315244152451524615247152481524915250152511525215253152541525515256152571525815259152601526115262152631526415265152661526715268152691527015271152721527315274152751527615277152781527915280152811528215283152841528515286152871528815289152901529115292152931529415295152961529715298152991530015301153021530315304153051530615307153081530915310153111531215313153141531515316153171531815319153201532115322153231532415325153261532715328153291533015331153321533315334153351533615337153381533915340153411534215343153441534515346153471534815349153501535115352153531535415355153561535715358153591536015361153621536315364153651536615367153681536915370153711537215373153741537515376153771537815379153801538115382153831538415385153861538715388153891539015391153921539315394153951539615397153981539915400154011540215403154041540515406154071540815409154101541115412154131541415415154161541715418154191542015421154221542315424154251542615427154281542915430154311543215433154341543515436154371543815439154401544115442154431544415445154461544715448154491545015451154521545315454154551545615457154581545915460154611546215463154641546515466154671546815469154701547115472154731547415475154761547715478154791548015481154821548315484154851548615487154881548915490154911549215493154941549515496154971549815499155001550115502155031550415505155061550715508155091551015511155121551315514155151551615517155181551915520155211552215523155241552515526155271552815529155301553115532155331553415535155361553715538155391554015541155421554315544155451554615547155481554915550155511555215553155541555515556155571555815559155601556115562155631556415565155661556715568155691557015571155721557315574155751557615577155781557915580155811558215583155841558515586155871558815589155901559115592155931559415595155961559715598155991560015601156021560315604156051560615607156081560915610156111561215613156141561515616156171561815619156201562115622156231562415625156261562715628156291563015631156321563315634156351563615637156381563915640156411564215643156441564515646156471564815649156501565115652156531565415655156561565715658156591566015661156621566315664156651566615667156681566915670156711567215673156741567515676156771567815679156801568115682156831568415685156861568715688156891569015691156921569315694156951569615697156981569915700157011570215703157041570515706157071570815709157101571115712157131571415715157161571715718157191572015721157221572315724157251572615727157281572915730157311573215733157341573515736157371573815739157401574115742157431574415745157461574715748157491575015751157521575315754157551575615757157581575915760157611576215763157641576515766157671576815769157701577115772157731577415775157761577715778157791578015781157821578315784157851578615787157881578915790157911579215793157941579515796157971579815799158001580115802158031580415805158061580715808158091581015811158121581315814158151581615817158181581915820158211582215823158241582515826158271582815829158301583115832158331583415835158361583715838158391584015841158421584315844158451584615847158481584915850158511585215853158541585515856158571585815859158601586115862158631586415865158661586715868158691587015871158721587315874158751587615877158781587915880158811588215883158841588515886158871588815889158901589115892158931589415895158961589715898158991590015901159021590315904159051590615907159081590915910159111591215913159141591515916159171591815919159201592115922159231592415925159261592715928159291593015931159321593315934159351593615937159381593915940159411594215943159441594515946159471594815949159501595115952159531595415955159561595715958159591596015961159621596315964159651596615967159681596915970159711597215973159741597515976159771597815979159801598115982159831598415985159861598715988159891599015991159921599315994159951599615997159981599916000160011600216003160041600516006160071600816009160101601116012160131601416015160161601716018160191602016021160221602316024160251602616027160281602916030160311603216033160341603516036160371603816039160401604116042160431604416045160461604716048160491605016051160521605316054160551605616057160581605916060160611606216063160641606516066160671606816069160701607116072160731607416075160761607716078160791608016081160821608316084160851608616087160881608916090160911609216093160941609516096160971609816099161001610116102161031610416105161061610716108161091611016111161121611316114161151611616117161181611916120161211612216123161241612516126161271612816129161301613116132161331613416135161361613716138161391614016141161421614316144161451614616147161481614916150161511615216153161541615516156161571615816159161601616116162161631616416165161661616716168161691617016171161721617316174161751617616177161781617916180161811618216183161841618516186161871618816189161901619116192161931619416195161961619716198161991620016201162021620316204162051620616207162081620916210162111621216213162141621516216162171621816219162201622116222162231622416225162261622716228162291623016231162321623316234162351623616237162381623916240162411624216243162441624516246162471624816249162501625116252162531625416255162561625716258162591626016261162621626316264162651626616267162681626916270162711627216273162741627516276162771627816279162801628116282162831628416285162861628716288162891629016291162921629316294162951629616297162981629916300163011630216303163041630516306163071630816309163101631116312163131631416315163161631716318163191632016321163221632316324163251632616327163281632916330163311633216333163341633516336163371633816339163401634116342163431634416345163461634716348163491635016351163521635316354163551635616357163581635916360163611636216363163641636516366163671636816369163701637116372163731637416375163761637716378163791638016381163821638316384163851638616387163881638916390163911639216393163941639516396163971639816399164001640116402164031640416405164061640716408164091641016411164121641316414164151641616417164181641916420164211642216423164241642516426164271642816429164301643116432164331643416435164361643716438164391644016441164421644316444164451644616447164481644916450164511645216453164541645516456164571645816459164601646116462164631646416465164661646716468164691647016471164721647316474164751647616477164781647916480164811648216483164841648516486164871648816489164901649116492164931649416495164961649716498164991650016501165021650316504165051650616507165081650916510165111651216513165141651516516165171651816519165201652116522165231652416525165261652716528165291653016531165321653316534165351653616537165381653916540165411654216543165441654516546165471654816549165501655116552165531655416555165561655716558165591656016561165621656316564165651656616567165681656916570165711657216573165741657516576165771657816579165801658116582165831658416585165861658716588165891659016591165921659316594165951659616597165981659916600166011660216603166041660516606166071660816609166101661116612166131661416615166161661716618166191662016621166221662316624166251662616627166281662916630166311663216633166341663516636166371663816639166401664116642166431664416645166461664716648166491665016651166521665316654166551665616657166581665916660166611666216663166641666516666166671666816669166701667116672166731667416675166761667716678166791668016681166821668316684166851668616687166881668916690166911669216693166941669516696166971669816699167001670116702167031670416705167061670716708167091671016711167121671316714167151671616717167181671916720167211672216723167241672516726167271672816729167301673116732167331673416735167361673716738167391674016741167421674316744167451674616747167481674916750167511675216753167541675516756167571675816759167601676116762167631676416765167661676716768167691677016771167721677316774167751677616777167781677916780167811678216783167841678516786167871678816789167901679116792167931679416795167961679716798167991680016801168021680316804168051680616807168081680916810168111681216813168141681516816168171681816819168201682116822168231682416825168261682716828168291683016831168321683316834168351683616837168381683916840168411684216843168441684516846168471684816849168501685116852168531685416855168561685716858168591686016861168621686316864168651686616867168681686916870168711687216873168741687516876168771687816879168801688116882168831688416885168861688716888168891689016891168921689316894168951689616897168981689916900169011690216903169041690516906169071690816909169101691116912169131691416915169161691716918169191692016921169221692316924169251692616927169281692916930169311693216933169341693516936169371693816939169401694116942169431694416945169461694716948169491695016951169521695316954169551695616957169581695916960169611696216963169641696516966169671696816969169701697116972169731697416975169761697716978169791698016981169821698316984169851698616987169881698916990169911699216993169941699516996169971699816999170001700117002170031700417005170061700717008170091701017011170121701317014170151701617017170181701917020170211702217023170241702517026170271702817029170301703117032170331703417035170361703717038170391704017041170421704317044170451704617047170481704917050170511705217053170541705517056170571705817059170601706117062170631706417065170661706717068170691707017071170721707317074170751707617077170781707917080170811708217083170841708517086170871708817089170901709117092170931709417095170961709717098170991710017101171021710317104171051710617107171081710917110171111711217113171141711517116171171711817119171201712117122171231712417125171261712717128171291713017131171321713317134171351713617137171381713917140171411714217143171441714517146171471714817149171501715117152171531715417155171561715717158171591716017161171621716317164171651716617167171681716917170171711717217173171741717517176171771717817179171801718117182171831718417185171861718717188171891719017191171921719317194171951719617197171981719917200172011720217203172041720517206172071720817209172101721117212172131721417215172161721717218172191722017221172221722317224172251722617227172281722917230172311723217233172341723517236172371723817239172401724117242172431724417245172461724717248172491725017251172521725317254172551725617257172581725917260172611726217263172641726517266172671726817269172701727117272172731727417275172761727717278172791728017281172821728317284172851728617287172881728917290172911729217293172941729517296172971729817299173001730117302173031730417305173061730717308173091731017311173121731317314173151731617317173181731917320173211732217323173241732517326173271732817329173301733117332173331733417335173361733717338173391734017341173421734317344173451734617347173481734917350173511735217353173541735517356173571735817359173601736117362173631736417365173661736717368173691737017371173721737317374173751737617377173781737917380173811738217383173841738517386173871738817389173901739117392173931739417395173961739717398173991740017401174021740317404174051740617407174081740917410174111741217413174141741517416174171741817419174201742117422174231742417425174261742717428174291743017431174321743317434174351743617437174381743917440174411744217443174441744517446174471744817449174501745117452174531745417455174561745717458174591746017461174621746317464174651746617467174681746917470174711747217473174741747517476174771747817479174801748117482174831748417485174861748717488174891749017491174921749317494174951749617497174981749917500175011750217503175041750517506175071750817509175101751117512175131751417515175161751717518175191752017521175221752317524175251752617527175281752917530175311753217533175341753517536175371753817539175401754117542175431754417545175461754717548175491755017551175521755317554175551755617557175581755917560175611756217563175641756517566175671756817569175701757117572175731757417575175761757717578175791758017581175821758317584175851758617587175881758917590175911759217593175941759517596175971759817599176001760117602176031760417605176061760717608176091761017611176121761317614176151761617617176181761917620176211762217623176241762517626176271762817629176301763117632176331763417635176361763717638176391764017641176421764317644176451764617647176481764917650176511765217653176541765517656176571765817659176601766117662176631766417665176661766717668176691767017671176721767317674176751767617677176781767917680176811768217683176841768517686176871768817689176901769117692176931769417695176961769717698176991770017701177021770317704177051770617707177081770917710177111771217713177141771517716177171771817719177201772117722177231772417725177261772717728177291773017731177321773317734177351773617737177381773917740177411774217743177441774517746177471774817749177501775117752177531775417755177561775717758177591776017761177621776317764177651776617767177681776917770177711777217773177741777517776177771777817779177801778117782177831778417785177861778717788177891779017791177921779317794177951779617797177981779917800178011780217803178041780517806178071780817809178101781117812178131781417815178161781717818178191782017821178221782317824178251782617827178281782917830178311783217833178341783517836178371783817839178401784117842178431784417845178461784717848178491785017851178521785317854178551785617857178581785917860178611786217863178641786517866178671786817869178701787117872178731787417875178761787717878178791788017881178821788317884178851788617887178881788917890178911789217893178941789517896178971789817899179001790117902179031790417905179061790717908179091791017911179121791317914179151791617917179181791917920179211792217923179241792517926179271792817929179301793117932179331793417935179361793717938179391794017941179421794317944179451794617947179481794917950179511795217953179541795517956179571795817959179601796117962179631796417965179661796717968179691797017971179721797317974179751797617977179781797917980179811798217983179841798517986179871798817989179901799117992179931799417995179961799717998179991800018001180021800318004180051800618007180081800918010180111801218013180141801518016180171801818019180201802118022180231802418025180261802718028180291803018031180321803318034180351803618037180381803918040180411804218043180441804518046180471804818049180501805118052180531805418055180561805718058180591806018061180621806318064180651806618067180681806918070180711807218073180741807518076180771807818079180801808118082180831808418085180861808718088180891809018091180921809318094180951809618097180981809918100181011810218103181041810518106181071810818109181101811118112181131811418115181161811718118181191812018121181221812318124181251812618127181281812918130181311813218133181341813518136181371813818139181401814118142181431814418145181461814718148181491815018151181521815318154181551815618157181581815918160181611816218163181641816518166181671816818169181701817118172181731817418175181761817718178181791818018181181821818318184181851818618187181881818918190181911819218193181941819518196181971819818199182001820118202182031820418205182061820718208182091821018211182121821318214182151821618217182181821918220182211822218223182241822518226182271822818229182301823118232182331823418235182361823718238182391824018241182421824318244182451824618247182481824918250182511825218253182541825518256182571825818259182601826118262182631826418265182661826718268182691827018271182721827318274182751827618277182781827918280182811828218283182841828518286182871828818289182901829118292182931829418295182961829718298182991830018301183021830318304183051830618307183081830918310183111831218313183141831518316183171831818319183201832118322183231832418325183261832718328183291833018331183321833318334183351833618337183381833918340183411834218343183441834518346183471834818349183501835118352183531835418355183561835718358183591836018361183621836318364183651836618367183681836918370183711837218373183741837518376183771837818379183801838118382183831838418385183861838718388183891839018391183921839318394183951839618397183981839918400184011840218403184041840518406184071840818409184101841118412184131841418415184161841718418184191842018421184221842318424184251842618427184281842918430184311843218433184341843518436184371843818439184401844118442184431844418445184461844718448184491845018451184521845318454184551845618457184581845918460184611846218463184641846518466184671846818469184701847118472184731847418475184761847718478184791848018481184821848318484184851848618487184881848918490184911849218493184941849518496184971849818499185001850118502185031850418505185061850718508185091851018511185121851318514185151851618517185181851918520185211852218523185241852518526185271852818529185301853118532185331853418535185361853718538185391854018541185421854318544185451854618547185481854918550185511855218553185541855518556185571855818559185601856118562185631856418565185661856718568185691857018571185721857318574185751857618577185781857918580185811858218583185841858518586185871858818589185901859118592185931859418595185961859718598185991860018601186021860318604186051860618607186081860918610186111861218613186141861518616186171861818619186201862118622186231862418625186261862718628186291863018631186321863318634186351863618637186381863918640186411864218643186441864518646186471864818649186501865118652186531865418655186561865718658186591866018661186621866318664186651866618667186681866918670186711867218673186741867518676186771867818679186801868118682186831868418685186861868718688186891869018691186921869318694186951869618697186981869918700187011870218703187041870518706187071870818709187101871118712187131871418715187161871718718187191872018721187221872318724187251872618727187281872918730187311873218733187341873518736187371873818739187401874118742187431874418745187461874718748187491875018751187521875318754187551875618757187581875918760187611876218763187641876518766187671876818769187701877118772187731877418775187761877718778187791878018781187821878318784187851878618787187881878918790187911879218793187941879518796187971879818799188001880118802188031880418805188061880718808188091881018811188121881318814188151881618817188181881918820188211882218823188241882518826188271882818829188301883118832188331883418835188361883718838188391884018841188421884318844188451884618847188481884918850188511885218853188541885518856188571885818859188601886118862188631886418865188661886718868188691887018871188721887318874188751887618877188781887918880188811888218883188841888518886188871888818889188901889118892188931889418895188961889718898188991890018901189021890318904189051890618907189081890918910189111891218913189141891518916189171891818919189201892118922189231892418925189261892718928189291893018931189321893318934189351893618937189381893918940189411894218943189441894518946189471894818949189501895118952189531895418955189561895718958189591896018961189621896318964189651896618967189681896918970189711897218973189741897518976189771897818979189801898118982189831898418985189861898718988189891899018991189921899318994189951899618997189981899919000190011900219003190041900519006190071900819009190101901119012190131901419015190161901719018190191902019021190221902319024190251902619027190281902919030190311903219033190341903519036190371903819039190401904119042190431904419045190461904719048190491905019051190521905319054190551905619057190581905919060190611906219063190641906519066190671906819069190701907119072190731907419075190761907719078190791908019081190821908319084190851908619087190881908919090190911909219093190941909519096190971909819099191001910119102191031910419105191061910719108191091911019111191121911319114191151911619117191181911919120191211912219123191241912519126191271912819129191301913119132191331913419135191361913719138191391914019141191421914319144191451914619147191481914919150191511915219153191541915519156191571915819159191601916119162191631916419165191661916719168191691917019171191721917319174191751917619177191781917919180191811918219183191841918519186191871918819189191901919119192191931919419195191961919719198191991920019201192021920319204192051920619207192081920919210192111921219213192141921519216192171921819219192201922119222192231922419225192261922719228192291923019231192321923319234192351923619237192381923919240192411924219243192441924519246192471924819249192501925119252192531925419255192561925719258192591926019261192621926319264192651926619267192681926919270192711927219273192741927519276192771927819279192801928119282192831928419285192861928719288192891929019291192921929319294192951929619297192981929919300193011930219303193041930519306193071930819309193101931119312193131931419315193161931719318193191932019321193221932319324193251932619327193281932919330193311933219333193341933519336193371933819339193401934119342193431934419345193461934719348193491935019351193521935319354193551935619357193581935919360193611936219363193641936519366193671936819369193701937119372193731937419375193761937719378193791938019381193821938319384193851938619387193881938919390193911939219393193941939519396193971939819399194001940119402194031940419405194061940719408194091941019411194121941319414194151941619417194181941919420194211942219423194241942519426194271942819429194301943119432194331943419435194361943719438194391944019441194421944319444194451944619447194481944919450194511945219453194541945519456194571945819459194601946119462194631946419465194661946719468194691947019471194721947319474194751947619477194781947919480194811948219483194841948519486194871948819489194901949119492194931949419495194961949719498194991950019501195021950319504195051950619507195081950919510195111951219513195141951519516195171951819519195201952119522195231952419525195261952719528195291953019531195321953319534195351953619537195381953919540195411954219543195441954519546195471954819549195501955119552195531955419555195561955719558195591956019561195621956319564195651956619567195681956919570195711957219573195741957519576195771957819579195801958119582195831958419585195861958719588195891959019591195921959319594195951959619597195981959919600196011960219603196041960519606196071960819609196101961119612196131961419615196161961719618196191962019621196221962319624196251962619627196281962919630196311963219633196341963519636196371963819639196401964119642196431964419645196461964719648196491965019651196521965319654196551965619657196581965919660196611966219663196641966519666196671966819669196701967119672196731967419675196761967719678196791968019681196821968319684196851968619687196881968919690196911969219693196941969519696196971969819699197001970119702197031970419705197061970719708197091971019711197121971319714197151971619717197181971919720197211972219723197241972519726197271972819729197301973119732197331973419735197361973719738197391974019741197421974319744197451974619747197481974919750197511975219753197541975519756197571975819759197601976119762197631976419765197661976719768197691977019771197721977319774197751977619777197781977919780197811978219783197841978519786197871978819789197901979119792197931979419795197961979719798197991980019801198021980319804198051980619807198081980919810198111981219813198141981519816198171981819819198201982119822198231982419825198261982719828198291983019831198321983319834198351983619837198381983919840198411984219843198441984519846198471984819849198501985119852198531985419855198561985719858198591986019861198621986319864198651986619867198681986919870198711987219873198741987519876198771987819879198801988119882198831988419885198861988719888198891989019891198921989319894198951989619897198981989919900199011990219903199041990519906199071990819909199101991119912199131991419915199161991719918199191992019921199221992319924199251992619927199281992919930199311993219933199341993519936199371993819939199401994119942199431994419945199461994719948199491995019951199521995319954199551995619957199581995919960199611996219963199641996519966199671996819969199701997119972199731997419975199761997719978199791998019981199821998319984199851998619987199881998919990199911999219993199941999519996199971999819999200002000120002200032000420005200062000720008200092001020011200122001320014200152001620017200182001920020200212002220023200242002520026200272002820029200302003120032200332003420035200362003720038200392004020041200422004320044200452004620047200482004920050200512005220053200542005520056200572005820059200602006120062200632006420065200662006720068200692007020071200722007320074200752007620077200782007920080200812008220083200842008520086200872008820089200902009120092200932009420095200962009720098200992010020101201022010320104201052010620107201082010920110201112011220113201142011520116201172011820119201202012120122201232012420125201262012720128201292013020131201322013320134201352013620137201382013920140201412014220143201442014520146201472014820149201502015120152201532015420155201562015720158201592016020161201622016320164201652016620167201682016920170201712017220173201742017520176201772017820179201802018120182201832018420185201862018720188201892019020191201922019320194201952019620197201982019920200202012020220203202042020520206202072020820209202102021120212202132021420215202162021720218202192022020221202222022320224202252022620227202282022920230202312023220233202342023520236202372023820239202402024120242202432024420245202462024720248202492025020251202522025320254202552025620257202582025920260202612026220263202642026520266202672026820269202702027120272202732027420275202762027720278202792028020281202822028320284202852028620287202882028920290202912029220293202942029520296202972029820299203002030120302203032030420305203062030720308203092031020311203122031320314203152031620317203182031920320203212032220323203242032520326203272032820329203302033120332203332033420335203362033720338203392034020341203422034320344203452034620347203482034920350203512035220353203542035520356203572035820359203602036120362203632036420365203662036720368203692037020371203722037320374203752037620377203782037920380203812038220383203842038520386203872038820389203902039120392203932039420395203962039720398203992040020401204022040320404204052040620407204082040920410204112041220413204142041520416204172041820419204202042120422204232042420425204262042720428204292043020431204322043320434204352043620437204382043920440204412044220443204442044520446204472044820449204502045120452204532045420455204562045720458204592046020461204622046320464204652046620467204682046920470204712047220473204742047520476204772047820479204802048120482204832048420485204862048720488204892049020491204922049320494204952049620497204982049920500205012050220503205042050520506205072050820509205102051120512205132051420515205162051720518205192052020521205222052320524205252052620527205282052920530205312053220533205342053520536205372053820539205402054120542205432054420545205462054720548205492055020551205522055320554205552055620557205582055920560205612056220563205642056520566205672056820569205702057120572205732057420575205762057720578205792058020581205822058320584205852058620587205882058920590205912059220593205942059520596205972059820599206002060120602206032060420605206062060720608206092061020611206122061320614206152061620617206182061920620206212062220623206242062520626206272062820629206302063120632206332063420635206362063720638206392064020641206422064320644206452064620647206482064920650206512065220653206542065520656206572065820659206602066120662206632066420665206662066720668206692067020671206722067320674206752067620677206782067920680206812068220683206842068520686206872068820689206902069120692206932069420695206962069720698206992070020701207022070320704207052070620707207082070920710207112071220713207142071520716207172071820719207202072120722207232072420725207262072720728207292073020731207322073320734207352073620737207382073920740207412074220743207442074520746207472074820749207502075120752207532075420755207562075720758207592076020761207622076320764207652076620767207682076920770207712077220773207742077520776207772077820779207802078120782207832078420785207862078720788207892079020791207922079320794207952079620797207982079920800208012080220803208042080520806208072080820809208102081120812208132081420815208162081720818208192082020821208222082320824208252082620827208282082920830208312083220833208342083520836208372083820839208402084120842208432084420845208462084720848208492085020851208522085320854208552085620857208582085920860208612086220863208642086520866208672086820869208702087120872208732087420875208762087720878208792088020881208822088320884208852088620887208882088920890208912089220893208942089520896208972089820899209002090120902209032090420905209062090720908209092091020911209122091320914209152091620917209182091920920209212092220923209242092520926209272092820929209302093120932209332093420935209362093720938209392094020941209422094320944209452094620947209482094920950209512095220953209542095520956209572095820959209602096120962209632096420965209662096720968209692097020971209722097320974209752097620977209782097920980209812098220983209842098520986209872098820989209902099120992209932099420995209962099720998209992100021001210022100321004210052100621007210082100921010210112101221013210142101521016210172101821019210202102121022210232102421025210262102721028210292103021031210322103321034210352103621037210382103921040210412104221043210442104521046210472104821049210502105121052210532105421055210562105721058210592106021061210622106321064210652106621067210682106921070210712107221073210742107521076210772107821079210802108121082210832108421085210862108721088210892109021091210922109321094210952109621097210982109921100211012110221103211042110521106211072110821109211102111121112211132111421115211162111721118211192112021121211222112321124211252112621127211282112921130211312113221133211342113521136211372113821139211402114121142211432114421145211462114721148211492115021151211522115321154211552115621157211582115921160211612116221163211642116521166211672116821169211702117121172211732117421175211762117721178211792118021181211822118321184211852118621187211882118921190211912119221193211942119521196211972119821199212002120121202212032120421205212062120721208212092121021211212122121321214212152121621217212182121921220212212122221223212242122521226212272122821229212302123121232212332123421235212362123721238212392124021241212422124321244212452124621247212482124921250212512125221253212542125521256212572125821259212602126121262212632126421265212662126721268212692127021271212722127321274212752127621277212782127921280212812128221283212842128521286212872128821289212902129121292212932129421295212962129721298212992130021301213022130321304213052130621307213082130921310213112131221313213142131521316213172131821319213202132121322213232132421325213262132721328213292133021331213322133321334213352133621337213382133921340213412134221343213442134521346213472134821349213502135121352213532135421355213562135721358213592136021361213622136321364213652136621367213682136921370213712137221373213742137521376213772137821379213802138121382213832138421385213862138721388213892139021391213922139321394213952139621397213982139921400214012140221403214042140521406214072140821409214102141121412214132141421415214162141721418214192142021421214222142321424214252142621427214282142921430214312143221433214342143521436214372143821439214402144121442214432144421445214462144721448214492145021451214522145321454214552145621457214582145921460214612146221463214642146521466214672146821469214702147121472214732147421475214762147721478214792148021481214822148321484214852148621487214882148921490214912149221493214942149521496214972149821499215002150121502215032150421505215062150721508215092151021511215122151321514215152151621517215182151921520215212152221523215242152521526215272152821529215302153121532215332153421535215362153721538215392154021541215422154321544215452154621547215482154921550215512155221553215542155521556215572155821559215602156121562215632156421565215662156721568215692157021571215722157321574215752157621577215782157921580215812158221583215842158521586215872158821589215902159121592215932159421595215962159721598215992160021601216022160321604216052160621607216082160921610216112161221613216142161521616216172161821619216202162121622216232162421625216262162721628216292163021631216322163321634216352163621637216382163921640216412164221643216442164521646216472164821649216502165121652216532165421655216562165721658216592166021661216622166321664216652166621667216682166921670216712167221673216742167521676216772167821679216802168121682216832168421685216862168721688216892169021691216922169321694216952169621697216982169921700217012170221703217042170521706217072170821709217102171121712217132171421715217162171721718217192172021721217222172321724217252172621727217282172921730217312173221733217342173521736217372173821739217402174121742217432174421745217462174721748217492175021751217522175321754217552175621757217582175921760217612176221763217642176521766217672176821769217702177121772217732177421775217762177721778217792178021781217822178321784217852178621787217882178921790217912179221793217942179521796217972179821799218002180121802218032180421805218062180721808218092181021811218122181321814218152181621817218182181921820218212182221823218242182521826218272182821829218302183121832218332183421835218362183721838218392184021841218422184321844218452184621847218482184921850218512185221853218542185521856218572185821859218602186121862218632186421865218662186721868218692187021871218722187321874218752187621877218782187921880218812188221883218842188521886218872188821889218902189121892218932189421895218962189721898218992190021901219022190321904219052190621907219082190921910219112191221913219142191521916219172191821919219202192121922219232192421925219262192721928219292193021931219322193321934219352193621937219382193921940219412194221943219442194521946219472194821949219502195121952219532195421955219562195721958219592196021961219622196321964219652196621967219682196921970219712197221973219742197521976219772197821979219802198121982219832198421985219862198721988219892199021991219922199321994219952199621997219982199922000220012200222003220042200522006220072200822009220102201122012220132201422015220162201722018220192202022021220222202322024220252202622027220282202922030220312203222033220342203522036220372203822039220402204122042220432204422045220462204722048220492205022051220522205322054220552205622057220582205922060220612206222063220642206522066220672206822069220702207122072220732207422075220762207722078220792208022081220822208322084220852208622087220882208922090220912209222093220942209522096220972209822099221002210122102221032210422105221062210722108221092211022111221122211322114221152211622117221182211922120221212212222123221242212522126221272212822129221302213122132221332213422135221362213722138221392214022141221422214322144221452214622147221482214922150221512215222153221542215522156221572215822159221602216122162221632216422165221662216722168221692217022171221722217322174221752217622177221782217922180221812218222183221842218522186221872218822189221902219122192221932219422195221962219722198221992220022201222022220322204222052220622207222082220922210222112221222213222142221522216222172221822219222202222122222222232222422225222262222722228222292223022231222322223322234222352223622237222382223922240222412224222243222442224522246222472224822249222502225122252222532225422255222562225722258222592226022261222622226322264222652226622267222682226922270222712227222273222742227522276222772227822279222802228122282222832228422285222862228722288222892229022291222922229322294222952229622297222982229922300223012230222303223042230522306223072230822309223102231122312223132231422315223162231722318223192232022321223222232322324223252232622327223282232922330223312233222333223342233522336223372233822339223402234122342223432234422345223462234722348223492235022351223522235322354223552235622357223582235922360223612236222363223642236522366223672236822369223702237122372223732237422375223762237722378223792238022381223822238322384223852238622387223882238922390223912239222393223942239522396223972239822399224002240122402224032240422405224062240722408224092241022411224122241322414224152241622417224182241922420224212242222423224242242522426224272242822429224302243122432224332243422435224362243722438224392244022441224422244322444224452244622447224482244922450224512245222453224542245522456224572245822459224602246122462224632246422465224662246722468224692247022471224722247322474224752247622477224782247922480224812248222483224842248522486224872248822489224902249122492224932249422495224962249722498224992250022501225022250322504225052250622507225082250922510225112251222513225142251522516225172251822519225202252122522225232252422525225262252722528225292253022531225322253322534225352253622537225382253922540225412254222543225442254522546225472254822549225502255122552225532255422555225562255722558225592256022561225622256322564225652256622567225682256922570225712257222573225742257522576225772257822579225802258122582225832258422585225862258722588225892259022591225922259322594225952259622597225982259922600226012260222603226042260522606226072260822609226102261122612226132261422615226162261722618226192262022621226222262322624226252262622627226282262922630226312263222633226342263522636226372263822639226402264122642226432264422645226462264722648226492265022651226522265322654226552265622657226582265922660226612266222663226642266522666226672266822669226702267122672226732267422675226762267722678226792268022681226822268322684226852268622687226882268922690226912269222693226942269522696226972269822699227002270122702227032270422705227062270722708227092271022711227122271322714227152271622717227182271922720227212272222723227242272522726227272272822729227302273122732227332273422735227362273722738227392274022741227422274322744227452274622747227482274922750227512275222753227542275522756227572275822759227602276122762227632276422765227662276722768227692277022771227722277322774227752277622777227782277922780227812278222783227842278522786227872278822789227902279122792227932279422795227962279722798227992280022801228022280322804228052280622807228082280922810228112281222813228142281522816228172281822819228202282122822228232282422825228262282722828228292283022831228322283322834228352283622837228382283922840228412284222843228442284522846228472284822849228502285122852228532285422855228562285722858228592286022861228622286322864228652286622867228682286922870228712287222873228742287522876228772287822879228802288122882228832288422885228862288722888228892289022891228922289322894228952289622897228982289922900229012290222903229042290522906229072290822909229102291122912229132291422915229162291722918229192292022921229222292322924229252292622927229282292922930229312293222933229342293522936229372293822939229402294122942229432294422945229462294722948229492295022951229522295322954229552295622957229582295922960229612296222963229642296522966229672296822969229702297122972229732297422975229762297722978229792298022981229822298322984229852298622987229882298922990229912299222993229942299522996229972299822999230002300123002230032300423005230062300723008230092301023011230122301323014230152301623017230182301923020230212302223023230242302523026230272302823029230302303123032230332303423035230362303723038230392304023041230422304323044230452304623047230482304923050230512305223053230542305523056230572305823059230602306123062230632306423065230662306723068230692307023071230722307323074230752307623077230782307923080230812308223083230842308523086230872308823089230902309123092230932309423095230962309723098230992310023101231022310323104231052310623107231082310923110231112311223113231142311523116231172311823119231202312123122231232312423125231262312723128231292313023131231322313323134231352313623137231382313923140231412314223143231442314523146231472314823149231502315123152231532315423155231562315723158231592316023161231622316323164231652316623167231682316923170231712317223173231742317523176231772317823179231802318123182231832318423185231862318723188231892319023191231922319323194231952319623197231982319923200232012320223203232042320523206232072320823209232102321123212232132321423215232162321723218232192322023221232222322323224232252322623227232282322923230232312323223233232342323523236232372323823239232402324123242232432324423245232462324723248232492325023251232522325323254232552325623257232582325923260232612326223263232642326523266232672326823269232702327123272232732327423275232762327723278232792328023281232822328323284232852328623287232882328923290232912329223293232942329523296232972329823299233002330123302233032330423305233062330723308233092331023311233122331323314233152331623317233182331923320233212332223323233242332523326233272332823329233302333123332233332333423335233362333723338233392334023341233422334323344233452334623347233482334923350233512335223353233542335523356233572335823359233602336123362233632336423365233662336723368233692337023371233722337323374233752337623377233782337923380233812338223383233842338523386233872338823389233902339123392233932339423395233962339723398233992340023401234022340323404234052340623407234082340923410234112341223413234142341523416234172341823419234202342123422234232342423425234262342723428234292343023431234322343323434234352343623437234382343923440234412344223443234442344523446234472344823449234502345123452234532345423455234562345723458234592346023461234622346323464234652346623467234682346923470234712347223473234742347523476234772347823479234802348123482234832348423485234862348723488234892349023491234922349323494234952349623497234982349923500235012350223503235042350523506235072350823509235102351123512235132351423515235162351723518235192352023521235222352323524235252352623527235282352923530235312353223533235342353523536235372353823539235402354123542235432354423545235462354723548235492355023551235522355323554235552355623557235582355923560235612356223563235642356523566235672356823569235702357123572235732357423575235762357723578235792358023581235822358323584235852358623587235882358923590235912359223593235942359523596235972359823599236002360123602236032360423605236062360723608236092361023611236122361323614236152361623617236182361923620236212362223623236242362523626236272362823629236302363123632236332363423635236362363723638236392364023641236422364323644236452364623647236482364923650236512365223653236542365523656236572365823659236602366123662236632366423665236662366723668236692367023671236722367323674236752367623677236782367923680236812368223683236842368523686236872368823689236902369123692236932369423695236962369723698236992370023701237022370323704237052370623707237082370923710237112371223713237142371523716237172371823719237202372123722237232372423725237262372723728237292373023731237322373323734237352373623737237382373923740237412374223743237442374523746237472374823749237502375123752237532375423755237562375723758237592376023761237622376323764237652376623767237682376923770237712377223773237742377523776237772377823779237802378123782237832378423785237862378723788237892379023791237922379323794237952379623797237982379923800238012380223803238042380523806238072380823809238102381123812238132381423815238162381723818238192382023821238222382323824238252382623827238282382923830238312383223833238342383523836238372383823839238402384123842238432384423845238462384723848238492385023851238522385323854238552385623857238582385923860238612386223863238642386523866238672386823869238702387123872238732387423875238762387723878238792388023881238822388323884238852388623887238882388923890238912389223893238942389523896238972389823899239002390123902239032390423905239062390723908239092391023911239122391323914239152391623917239182391923920239212392223923239242392523926239272392823929239302393123932239332393423935239362393723938239392394023941239422394323944239452394623947239482394923950239512395223953239542395523956239572395823959239602396123962239632396423965239662396723968239692397023971239722397323974239752397623977239782397923980239812398223983239842398523986239872398823989239902399123992239932399423995239962399723998239992400024001240022400324004240052400624007240082400924010240112401224013240142401524016240172401824019240202402124022240232402424025240262402724028240292403024031240322403324034240352403624037240382403924040240412404224043240442404524046240472404824049240502405124052240532405424055240562405724058240592406024061240622406324064240652406624067240682406924070240712407224073240742407524076240772407824079240802408124082240832408424085240862408724088240892409024091240922409324094240952409624097240982409924100241012410224103241042410524106241072410824109241102411124112241132411424115241162411724118241192412024121241222412324124241252412624127241282412924130241312413224133241342413524136241372413824139241402414124142241432414424145241462414724148241492415024151241522415324154241552415624157241582415924160241612416224163241642416524166241672416824169241702417124172241732417424175241762417724178241792418024181241822418324184241852418624187241882418924190241912419224193241942419524196241972419824199242002420124202242032420424205242062420724208242092421024211242122421324214242152421624217242182421924220242212422224223242242422524226242272422824229242302423124232242332423424235242362423724238242392424024241242422424324244242452424624247242482424924250242512425224253242542425524256242572425824259242602426124262242632426424265242662426724268242692427024271242722427324274242752427624277242782427924280242812428224283242842428524286242872428824289242902429124292242932429424295242962429724298242992430024301243022430324304243052430624307243082430924310243112431224313243142431524316243172431824319243202432124322243232432424325243262432724328243292433024331243322433324334243352433624337243382433924340243412434224343243442434524346243472434824349243502435124352243532435424355243562435724358243592436024361243622436324364243652436624367243682436924370243712437224373243742437524376243772437824379243802438124382243832438424385243862438724388243892439024391243922439324394243952439624397243982439924400244012440224403244042440524406244072440824409244102441124412244132441424415244162441724418244192442024421244222442324424244252442624427244282442924430244312443224433244342443524436244372443824439244402444124442244432444424445244462444724448244492445024451244522445324454244552445624457244582445924460244612446224463244642446524466244672446824469244702447124472244732447424475244762447724478244792448024481244822448324484244852448624487244882448924490244912449224493244942449524496244972449824499245002450124502245032450424505245062450724508245092451024511245122451324514245152451624517245182451924520245212452224523245242452524526245272452824529245302453124532245332453424535245362453724538245392454024541245422454324544245452454624547245482454924550245512455224553245542455524556245572455824559245602456124562245632456424565245662456724568245692457024571245722457324574245752457624577245782457924580245812458224583245842458524586245872458824589245902459124592245932459424595245962459724598245992460024601246022460324604246052460624607246082460924610246112461224613246142461524616246172461824619246202462124622246232462424625246262462724628246292463024631246322463324634246352463624637246382463924640246412464224643246442464524646246472464824649246502465124652246532465424655246562465724658246592466024661246622466324664246652466624667246682466924670246712467224673246742467524676246772467824679246802468124682246832468424685246862468724688246892469024691246922469324694246952469624697246982469924700247012470224703247042470524706247072470824709247102471124712247132471424715247162471724718247192472024721247222472324724247252472624727247282472924730247312473224733247342473524736247372473824739247402474124742247432474424745247462474724748247492475024751247522475324754247552475624757247582475924760247612476224763247642476524766247672476824769247702477124772247732477424775247762477724778247792478024781247822478324784247852478624787247882478924790247912479224793247942479524796247972479824799248002480124802248032480424805248062480724808248092481024811248122481324814248152481624817248182481924820248212482224823248242482524826248272482824829248302483124832248332483424835248362483724838248392484024841248422484324844248452484624847248482484924850248512485224853248542485524856248572485824859248602486124862248632486424865248662486724868248692487024871248722487324874248752487624877248782487924880248812488224883248842488524886248872488824889248902489124892248932489424895248962489724898248992490024901249022490324904249052490624907249082490924910249112491224913249142491524916249172491824919249202492124922249232492424925249262492724928249292493024931249322493324934249352493624937249382493924940249412494224943249442494524946249472494824949249502495124952249532495424955249562495724958249592496024961249622496324964249652496624967249682496924970249712497224973249742497524976249772497824979249802498124982249832498424985249862498724988249892499024991249922499324994249952499624997249982499925000250012500225003250042500525006250072500825009250102501125012250132501425015250162501725018250192502025021250222502325024250252502625027250282502925030250312503225033250342503525036250372503825039250402504125042250432504425045250462504725048250492505025051250522505325054250552505625057250582505925060250612506225063250642506525066250672506825069250702507125072250732507425075250762507725078250792508025081250822508325084250852508625087250882508925090250912509225093250942509525096250972509825099251002510125102251032510425105251062510725108251092511025111251122511325114251152511625117251182511925120251212512225123251242512525126251272512825129251302513125132251332513425135251362513725138251392514025141251422514325144251452514625147251482514925150251512515225153251542515525156251572515825159251602516125162251632516425165251662516725168251692517025171251722517325174251752517625177251782517925180251812518225183251842518525186251872518825189251902519125192251932519425195251962519725198251992520025201252022520325204252052520625207252082520925210252112521225213252142521525216252172521825219252202522125222252232522425225252262522725228252292523025231252322523325234252352523625237252382523925240252412524225243252442524525246252472524825249252502525125252252532525425255252562525725258252592526025261252622526325264252652526625267252682526925270252712527225273252742527525276252772527825279252802528125282252832528425285252862528725288252892529025291252922529325294252952529625297252982529925300253012530225303253042530525306253072530825309253102531125312253132531425315253162531725318253192532025321253222532325324253252532625327253282532925330253312533225333253342533525336253372533825339253402534125342253432534425345253462534725348253492535025351253522535325354253552535625357253582535925360253612536225363253642536525366253672536825369253702537125372253732537425375253762537725378253792538025381253822538325384253852538625387253882538925390253912539225393253942539525396253972539825399254002540125402254032540425405254062540725408254092541025411254122541325414254152541625417254182541925420254212542225423254242542525426254272542825429254302543125432254332543425435254362543725438254392544025441254422544325444254452544625447254482544925450254512545225453254542545525456254572545825459254602546125462254632546425465254662546725468254692547025471254722547325474254752547625477254782547925480254812548225483254842548525486254872548825489254902549125492254932549425495254962549725498254992550025501255022550325504255052550625507255082550925510255112551225513255142551525516255172551825519255202552125522255232552425525255262552725528255292553025531255322553325534255352553625537255382553925540255412554225543255442554525546255472554825549255502555125552255532555425555255562555725558255592556025561255622556325564255652556625567255682556925570255712557225573255742557525576255772557825579255802558125582255832558425585255862558725588255892559025591255922559325594255952559625597255982559925600256012560225603256042560525606256072560825609256102561125612256132561425615256162561725618256192562025621256222562325624256252562625627256282562925630256312563225633256342563525636256372563825639256402564125642256432564425645256462564725648256492565025651256522565325654256552565625657256582565925660256612566225663256642566525666256672566825669256702567125672256732567425675256762567725678256792568025681256822568325684256852568625687256882568925690256912569225693256942569525696256972569825699257002570125702257032570425705257062570725708257092571025711257122571325714257152571625717257182571925720257212572225723257242572525726257272572825729257302573125732257332573425735257362573725738257392574025741257422574325744257452574625747257482574925750257512575225753257542575525756257572575825759257602576125762257632576425765257662576725768257692577025771257722577325774257752577625777257782577925780257812578225783257842578525786257872578825789257902579125792257932579425795257962579725798257992580025801258022580325804258052580625807258082580925810258112581225813258142581525816258172581825819258202582125822258232582425825258262582725828258292583025831258322583325834258352583625837258382583925840258412584225843258442584525846258472584825849258502585125852258532585425855258562585725858258592586025861258622586325864258652586625867258682586925870258712587225873258742587525876258772587825879258802588125882258832588425885258862588725888258892589025891258922589325894258952589625897258982589925900259012590225903259042590525906259072590825909259102591125912259132591425915259162591725918259192592025921259222592325924259252592625927259282592925930259312593225933259342593525936259372593825939259402594125942259432594425945259462594725948259492595025951259522595325954259552595625957259582595925960259612596225963259642596525966259672596825969259702597125972259732597425975259762597725978259792598025981259822598325984259852598625987259882598925990259912599225993259942599525996259972599825999260002600126002260032600426005260062600726008260092601026011260122601326014260152601626017260182601926020260212602226023260242602526026260272602826029260302603126032260332603426035260362603726038260392604026041260422604326044260452604626047260482604926050260512605226053260542605526056260572605826059260602606126062260632606426065260662606726068260692607026071260722607326074260752607626077260782607926080260812608226083260842608526086260872608826089260902609126092260932609426095260962609726098260992610026101261022610326104261052610626107261082610926110261112611226113261142611526116261172611826119261202612126122261232612426125261262612726128261292613026131261322613326134261352613626137261382613926140261412614226143261442614526146261472614826149261502615126152261532615426155261562615726158261592616026161261622616326164261652616626167261682616926170261712617226173261742617526176261772617826179261802618126182261832618426185261862618726188261892619026191261922619326194261952619626197261982619926200262012620226203262042620526206262072620826209262102621126212262132621426215262162621726218262192622026221262222622326224262252622626227262282622926230262312623226233262342623526236262372623826239262402624126242262432624426245262462624726248262492625026251262522625326254262552625626257262582625926260262612626226263262642626526266262672626826269262702627126272262732627426275262762627726278262792628026281262822628326284262852628626287262882628926290262912629226293262942629526296262972629826299263002630126302263032630426305263062630726308263092631026311263122631326314263152631626317263182631926320263212632226323263242632526326263272632826329263302633126332263332633426335263362633726338263392634026341263422634326344263452634626347263482634926350263512635226353263542635526356263572635826359263602636126362263632636426365263662636726368263692637026371263722637326374263752637626377263782637926380263812638226383263842638526386263872638826389263902639126392263932639426395263962639726398263992640026401264022640326404264052640626407264082640926410264112641226413264142641526416264172641826419264202642126422264232642426425264262642726428264292643026431264322643326434264352643626437264382643926440264412644226443264442644526446264472644826449264502645126452264532645426455264562645726458264592646026461264622646326464264652646626467264682646926470264712647226473264742647526476264772647826479264802648126482264832648426485264862648726488264892649026491264922649326494264952649626497264982649926500265012650226503265042650526506265072650826509265102651126512265132651426515265162651726518265192652026521265222652326524265252652626527265282652926530265312653226533265342653526536265372653826539265402654126542265432654426545265462654726548265492655026551265522655326554265552655626557265582655926560265612656226563265642656526566265672656826569265702657126572265732657426575265762657726578265792658026581265822658326584265852658626587265882658926590265912659226593265942659526596265972659826599266002660126602266032660426605266062660726608266092661026611266122661326614266152661626617266182661926620266212662226623266242662526626266272662826629266302663126632266332663426635266362663726638266392664026641266422664326644266452664626647266482664926650266512665226653266542665526656266572665826659266602666126662266632666426665266662666726668266692667026671266722667326674266752667626677266782667926680266812668226683266842668526686266872668826689266902669126692266932669426695266962669726698266992670026701267022670326704267052670626707267082670926710267112671226713267142671526716267172671826719267202672126722267232672426725267262672726728267292673026731267322673326734267352673626737267382673926740267412674226743267442674526746267472674826749267502675126752267532675426755267562675726758267592676026761267622676326764267652676626767267682676926770267712677226773267742677526776267772677826779267802678126782267832678426785267862678726788267892679026791267922679326794267952679626797267982679926800268012680226803268042680526806268072680826809268102681126812268132681426815268162681726818268192682026821268222682326824268252682626827268282682926830268312683226833268342683526836268372683826839268402684126842268432684426845268462684726848268492685026851268522685326854268552685626857268582685926860268612686226863268642686526866268672686826869268702687126872268732687426875268762687726878268792688026881268822688326884268852688626887268882688926890268912689226893268942689526896268972689826899269002690126902269032690426905269062690726908269092691026911269122691326914269152691626917269182691926920269212692226923269242692526926269272692826929269302693126932269332693426935269362693726938269392694026941269422694326944269452694626947269482694926950269512695226953269542695526956269572695826959269602696126962269632696426965269662696726968269692697026971269722697326974269752697626977269782697926980269812698226983269842698526986269872698826989269902699126992269932699426995269962699726998269992700027001270022700327004270052700627007270082700927010270112701227013270142701527016270172701827019270202702127022270232702427025270262702727028270292703027031270322703327034270352703627037270382703927040270412704227043270442704527046270472704827049270502705127052270532705427055270562705727058270592706027061270622706327064270652706627067270682706927070270712707227073270742707527076270772707827079270802708127082270832708427085270862708727088270892709027091270922709327094270952709627097270982709927100271012710227103271042710527106271072710827109271102711127112271132711427115271162711727118271192712027121271222712327124271252712627127271282712927130271312713227133271342713527136271372713827139271402714127142271432714427145271462714727148271492715027151271522715327154271552715627157271582715927160271612716227163271642716527166271672716827169271702717127172271732717427175271762717727178271792718027181271822718327184271852718627187271882718927190271912719227193271942719527196271972719827199272002720127202272032720427205272062720727208272092721027211272122721327214272152721627217272182721927220272212722227223272242722527226272272722827229272302723127232272332723427235272362723727238272392724027241272422724327244272452724627247272482724927250272512725227253272542725527256272572725827259272602726127262272632726427265272662726727268272692727027271272722727327274272752727627277272782727927280272812728227283272842728527286272872728827289272902729127292272932729427295272962729727298272992730027301273022730327304273052730627307273082730927310273112731227313273142731527316273172731827319273202732127322273232732427325273262732727328273292733027331273322733327334273352733627337273382733927340273412734227343273442734527346273472734827349273502735127352273532735427355273562735727358273592736027361273622736327364273652736627367273682736927370273712737227373273742737527376273772737827379273802738127382273832738427385273862738727388273892739027391273922739327394273952739627397273982739927400274012740227403274042740527406274072740827409274102741127412274132741427415274162741727418274192742027421274222742327424274252742627427274282742927430274312743227433274342743527436274372743827439274402744127442274432744427445274462744727448274492745027451274522745327454274552745627457274582745927460274612746227463274642746527466274672746827469274702747127472274732747427475274762747727478274792748027481274822748327484274852748627487274882748927490274912749227493274942749527496274972749827499275002750127502275032750427505275062750727508275092751027511275122751327514275152751627517275182751927520275212752227523275242752527526275272752827529275302753127532275332753427535275362753727538275392754027541275422754327544275452754627547275482754927550275512755227553275542755527556275572755827559275602756127562275632756427565275662756727568275692757027571275722757327574275752757627577275782757927580275812758227583275842758527586275872758827589275902759127592275932759427595275962759727598275992760027601276022760327604276052760627607276082760927610276112761227613276142761527616276172761827619276202762127622276232762427625276262762727628276292763027631276322763327634276352763627637276382763927640276412764227643276442764527646276472764827649276502765127652276532765427655276562765727658276592766027661276622766327664276652766627667276682766927670276712767227673276742767527676276772767827679276802768127682276832768427685276862768727688276892769027691276922769327694276952769627697276982769927700277012770227703277042770527706277072770827709277102771127712277132771427715277162771727718277192772027721277222772327724277252772627727277282772927730277312773227733277342773527736277372773827739277402774127742277432774427745277462774727748277492775027751277522775327754277552775627757277582775927760277612776227763277642776527766277672776827769277702777127772277732777427775277762777727778277792778027781277822778327784277852778627787277882778927790277912779227793277942779527796277972779827799278002780127802278032780427805278062780727808278092781027811278122781327814278152781627817278182781927820278212782227823278242782527826278272782827829278302783127832278332783427835278362783727838278392784027841278422784327844278452784627847278482784927850278512785227853278542785527856278572785827859278602786127862278632786427865278662786727868278692787027871278722787327874278752787627877278782787927880278812788227883278842788527886278872788827889278902789127892278932789427895278962789727898278992790027901279022790327904279052790627907279082790927910279112791227913279142791527916279172791827919279202792127922279232792427925279262792727928279292793027931279322793327934279352793627937279382793927940279412794227943279442794527946279472794827949279502795127952279532795427955279562795727958279592796027961279622796327964279652796627967279682796927970279712797227973279742797527976279772797827979279802798127982279832798427985279862798727988279892799027991279922799327994279952799627997279982799928000280012800228003280042800528006280072800828009280102801128012280132801428015280162801728018280192802028021280222802328024280252802628027280282802928030280312803228033280342803528036280372803828039280402804128042280432804428045280462804728048280492805028051280522805328054280552805628057280582805928060280612806228063280642806528066280672806828069280702807128072280732807428075280762807728078280792808028081280822808328084280852808628087280882808928090280912809228093280942809528096280972809828099281002810128102281032810428105281062810728108281092811028111281122811328114281152811628117281182811928120281212812228123281242812528126281272812828129281302813128132281332813428135281362813728138281392814028141281422814328144281452814628147281482814928150281512815228153281542815528156281572815828159281602816128162281632816428165281662816728168281692817028171281722817328174281752817628177281782817928180281812818228183281842818528186281872818828189281902819128192281932819428195281962819728198281992820028201282022820328204282052820628207282082820928210282112821228213282142821528216282172821828219282202822128222282232822428225282262822728228282292823028231282322823328234282352823628237282382823928240282412824228243282442824528246282472824828249282502825128252282532825428255282562825728258282592826028261282622826328264282652826628267282682826928270282712827228273282742827528276282772827828279282802828128282282832828428285282862828728288282892829028291282922829328294282952829628297282982829928300283012830228303283042830528306283072830828309283102831128312283132831428315283162831728318283192832028321283222832328324283252832628327283282832928330283312833228333283342833528336283372833828339283402834128342283432834428345283462834728348283492835028351283522835328354283552835628357283582835928360283612836228363283642836528366283672836828369283702837128372283732837428375283762837728378283792838028381283822838328384283852838628387283882838928390283912839228393283942839528396283972839828399284002840128402284032840428405284062840728408284092841028411284122841328414284152841628417284182841928420284212842228423284242842528426284272842828429284302843128432284332843428435284362843728438284392844028441284422844328444284452844628447284482844928450284512845228453284542845528456284572845828459284602846128462284632846428465284662846728468284692847028471284722847328474284752847628477284782847928480284812848228483284842848528486284872848828489284902849128492284932849428495284962849728498284992850028501285022850328504285052850628507285082850928510285112851228513285142851528516285172851828519285202852128522285232852428525285262852728528285292853028531285322853328534285352853628537285382853928540285412854228543285442854528546285472854828549285502855128552285532855428555285562855728558285592856028561285622856328564285652856628567285682856928570285712857228573285742857528576285772857828579285802858128582285832858428585285862858728588285892859028591285922859328594285952859628597285982859928600286012860228603286042860528606286072860828609286102861128612286132861428615286162861728618286192862028621286222862328624286252862628627286282862928630286312863228633286342863528636286372863828639286402864128642286432864428645286462864728648286492865028651286522865328654286552865628657286582865928660286612866228663286642866528666286672866828669286702867128672286732867428675286762867728678286792868028681286822868328684286852868628687286882868928690286912869228693286942869528696286972869828699287002870128702287032870428705287062870728708287092871028711287122871328714287152871628717287182871928720287212872228723287242872528726287272872828729287302873128732287332873428735287362873728738287392874028741287422874328744287452874628747287482874928750287512875228753287542875528756287572875828759287602876128762287632876428765287662876728768287692877028771287722877328774287752877628777287782877928780287812878228783287842878528786287872878828789287902879128792287932879428795287962879728798287992880028801288022880328804288052880628807288082880928810288112881228813288142881528816288172881828819288202882128822288232882428825288262882728828288292883028831288322883328834288352883628837288382883928840288412884228843288442884528846288472884828849288502885128852288532885428855288562885728858288592886028861288622886328864288652886628867288682886928870288712887228873288742887528876288772887828879288802888128882288832888428885288862888728888288892889028891288922889328894288952889628897288982889928900289012890228903289042890528906289072890828909289102891128912289132891428915289162891728918289192892028921289222892328924289252892628927289282892928930289312893228933289342893528936289372893828939289402894128942289432894428945289462894728948289492895028951289522895328954289552895628957289582895928960289612896228963289642896528966289672896828969289702897128972289732897428975289762897728978289792898028981289822898328984289852898628987289882898928990289912899228993289942899528996289972899828999290002900129002290032900429005290062900729008290092901029011290122901329014290152901629017290182901929020290212902229023290242902529026290272902829029290302903129032290332903429035290362903729038290392904029041290422904329044290452904629047290482904929050290512905229053290542905529056290572905829059290602906129062290632906429065290662906729068290692907029071290722907329074
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #ifdef ENABLE_SCRIPT_DEBUGGING
  7. #include "Debug/DebuggingFlags.h"
  8. #include "Debug/DiagProbe.h"
  9. #include "Debug/DebugManager.h"
  10. #endif
  11. // Parser includes
  12. #include "RegexCommon.h"
  13. #include "RegexPattern.h"
  14. #include "ExternalLowerer.h"
  15. #include "Types/DynamicObjectPropertyEnumerator.h"
  16. #include "Types/JavascriptStaticEnumerator.h"
  17. #include "Library/ForInObjectEnumerator.h"
  18. ///----------------------------------------------------------------------------
  19. ///
  20. /// Lowerer::Lower
  21. ///
  22. /// Lowerer's main entrypoint. Lowers this function..
  23. ///
  24. ///----------------------------------------------------------------------------
  25. void
  26. Lowerer::Lower()
  27. {
  28. this->m_func->StopMaintainByteCodeOffset();
  29. NoRecoverMemoryJitArenaAllocator localAlloc(_u("BE-Lower"), this->m_func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  30. this->m_alloc = &localAlloc;
  31. BVSparse<JitArenaAllocator> localInitializedTempSym(&localAlloc);
  32. this->initializedTempSym = &localInitializedTempSym;
  33. BVSparse<JitArenaAllocator> localAddToLiveOnBackEdgeSyms(&localAlloc);
  34. this->addToLiveOnBackEdgeSyms = &localAddToLiveOnBackEdgeSyms;
  35. Assert(this->m_func->GetCloneMap() == nullptr);
  36. m_lowererMD.Init(this);
  37. bool defaultDoFastPath = this->m_func->DoFastPaths();
  38. bool loopFastPath = this->m_func->DoLoopFastPaths();
  39. if (m_func->HasAnyStackNestedFunc())
  40. {
  41. EnsureStackFunctionListStackSym();
  42. }
  43. if (m_func->DoStackFrameDisplay() && !m_func->IsLoopBody())
  44. {
  45. AllocStackClosure();
  46. }
  47. AllocStackForInObjectEnumeratorArray();
  48. if (m_func->IsJitInDebugMode())
  49. {
  50. // Initialize metadata of local var slots.
  51. // Too late to wait until Register Allocator, as we need the offset when lowerering bailout for debugger.
  52. int32 hasLocalVarChangedOffset = m_func->GetHasLocalVarChangedOffset();
  53. if (hasLocalVarChangedOffset != Js::Constants::InvalidOffset)
  54. {
  55. // MOV [EBP + m_func->GetHasLocalVarChangedOffset()], 0
  56. StackSym* sym = StackSym::New(TyInt8, m_func);
  57. sym->m_offset = hasLocalVarChangedOffset;
  58. sym->m_allocated = true;
  59. IR::Opnd* opnd1 = IR::SymOpnd::New(sym, TyInt8, m_func);
  60. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func);
  61. Lowerer::InsertMove(opnd1, opnd2, m_func->GetFunctionEntryInsertionPoint());
  62. #ifdef DBG
  63. // Pre-fill all local slots with a pattern. This will help identify non-initialized/garbage var values.
  64. // Note that in the beginning of the function in bytecode we should initialize all locals to undefined.
  65. uint32 localSlotCount = m_func->GetJITFunctionBody()->GetEndNonTempLocalIndex() - m_func->GetJITFunctionBody()->GetFirstNonTempLocalIndex();
  66. for (uint i = 0; i < localSlotCount; ++i)
  67. {
  68. int offset = m_func->GetLocalVarSlotOffset(i);
  69. IRType opnd1Type;
  70. #if defined(TARGET_32)
  71. opnd1Type = TyInt32;
  72. opnd2 = IR::IntConstOpnd::New(Func::c_debugFillPattern4, opnd1Type, m_func);
  73. #else
  74. opnd1Type = TyInt64;
  75. opnd2 = IR::IntConstOpnd::New(Func::c_debugFillPattern8, opnd1Type, m_func);
  76. #endif
  77. sym = StackSym::New(opnd1Type, m_func);
  78. sym->m_offset = offset;
  79. sym->m_allocated = true;
  80. opnd1 = IR::SymOpnd::New(sym, opnd1Type, m_func);
  81. Lowerer::InsertMove(opnd1, opnd2, m_func->GetFunctionEntryInsertionPoint());
  82. }
  83. #endif
  84. }
  85. Assert(!m_func->HasAnyStackNestedFunc());
  86. }
  87. this->LowerRange(m_func->m_headInstr, m_func->m_tailInstr, defaultDoFastPath, loopFastPath);
  88. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  89. // TODO: (leish)(swb) implement for arm
  90. #if defined(_M_IX86) || defined(_M_AMD64)
  91. if (CONFIG_FLAG(ForceSoftwareWriteBarrier) && CONFIG_FLAG(VerifyBarrierBit))
  92. {
  93. // find out all write barrier setting instr, call Recycler::WBSetBit for verification purpose
  94. // should do this in LowererMD::GenerateWriteBarrier, however, can't insert call instruction there
  95. FOREACH_INSTR_EDITING(instr, instrNext, m_func->m_headInstr)
  96. if (instr->m_src1 && instr->m_src1->IsAddrOpnd())
  97. {
  98. IR::AddrOpnd* addrOpnd = instr->m_src1->AsAddrOpnd();
  99. if (addrOpnd->GetAddrOpndKind() == IR::AddrOpndKindWriteBarrierCardTable)
  100. {
  101. auto& leaInstr = instr->m_prev->m_prev->m_prev;
  102. auto& movInstr = instr->m_prev->m_prev;
  103. auto& shrInstr = instr->m_prev;
  104. Assert(leaInstr->m_opcode == Js::OpCode::LEA);
  105. Assert(movInstr->m_opcode == Js::OpCode::MOV);
  106. Assert(shrInstr->m_opcode == Js::OpCode::SHR);
  107. m_lowererMD.LoadHelperArgument(movInstr, leaInstr->m_dst);
  108. IR::Instr* instrCall = IR::Instr::New(Js::OpCode::Call, m_func);
  109. movInstr->InsertBefore(instrCall);
  110. m_lowererMD.ChangeToHelperCall(instrCall, IR::HelperWriteBarrierSetVerifyBit);
  111. }
  112. }
  113. NEXT_INSTR_EDITING
  114. }
  115. #endif
  116. #endif
  117. this->m_func->ClearCloneMap();
  118. if (m_func->HasAnyStackNestedFunc())
  119. {
  120. EnsureZeroLastStackFunctionNext();
  121. }
  122. if (!m_func->IsSimpleJit())
  123. {
  124. #if 0 // TODO michhol oop jit, reenable assert
  125. Js::EntryPointInfo* entryPointInfo = this->m_func->m_workItem->GetEntryPoint();
  126. Assert(entryPointInfo->GetJitTransferData() != nullptr && !entryPointInfo->GetJitTransferData()->GetIsReady());
  127. #endif
  128. }
  129. this->initializedTempSym = nullptr;
  130. this->m_alloc = nullptr;
  131. this->m_func->DisableConstandAddressLoadHoist();
  132. }
  133. void
  134. Lowerer::LowerRange(IR::Instr *instrStart, IR::Instr *instrEnd, bool defaultDoFastPath, bool defaultDoLoopFastPath)
  135. {
  136. bool noMathFastPath;
  137. bool noFieldFastPath;
  138. bool isStrictMode = this->m_func->GetJITFunctionBody()->IsStrictMode();
  139. noFieldFastPath = !defaultDoFastPath;
  140. noMathFastPath = !defaultDoFastPath;
  141. #if DBG_DUMP
  142. char16 * globOptInstrString = nullptr;
  143. #endif
  144. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, instrEnd, instrStart)
  145. {
  146. // Try to peep this`
  147. instr = this->PreLowerPeepInstr(instr, &instrPrev);
  148. #if DBG
  149. IR::Instr * verifyLegalizeInstrNext = instr->m_next;
  150. m_currentInstrOpCode = instr->m_opcode;
  151. #endif
  152. // If we have debugger bailout as part of real instr (not separate BailForDebugger instr),
  153. // extract/split out BailOutForDebugger into separate instr, if needed.
  154. // The instr can have just debugger bailout, or debugger bailout + other shared bailout.
  155. // Note that by the time we get here, we should not have aux-only bailout (in globopt we promote it to normal bailout).
  156. if (m_func->IsJitInDebugMode() && instr->HasBailOutInfo() &&
  157. (((instr->GetBailOutKind() & IR::BailOutForDebuggerBits) && instr->m_opcode != Js::OpCode::BailForDebugger) ||
  158. instr->HasAuxBailOut()))
  159. {
  160. instr = this->SplitBailForDebugger(instr); // Change instr, as returned is the one we need to lower next.
  161. instrPrev = instr->m_prev; // Change just in case if instr got changed.
  162. }
  163. #if DBG_DUMP
  164. if (!instr->IsLowered() && !instr->IsLabelInstr()
  165. && (CONFIG_FLAG(ForcePostLowerGlobOptInstrString) ||
  166. PHASE_DUMP(Js::LowererPhase, m_func) ||
  167. PHASE_DUMP(Js::LinearScanPhase, m_func) ||
  168. PHASE_DUMP(Js::RegAllocPhase, m_func) ||
  169. PHASE_DUMP(Js::PeepsPhase, m_func) ||
  170. PHASE_DUMP(Js::LayoutPhase, m_func) ||
  171. PHASE_DUMP(Js::EmitterPhase, m_func) ||
  172. PHASE_DUMP(Js::EncoderPhase, m_func) ||
  173. PHASE_DUMP(Js::BackEndPhase, m_func)))
  174. {
  175. if(instr->m_next && instr->m_next->m_opcode != Js::OpCode::StatementBoundary && !instr->m_next->IsLabelInstr())
  176. {
  177. instr->m_next->globOptInstrString = globOptInstrString;
  178. }
  179. globOptInstrString = instr->DumpString();
  180. }
  181. #endif
  182. if (instr->IsBranchInstr() && !instr->AsBranchInstr()->IsMultiBranch() && instr->AsBranchInstr()->GetTarget()->m_isLoopTop)
  183. {
  184. Loop * loop = instr->AsBranchInstr()->GetTarget()->GetLoop();
  185. if (this->outerMostLoopLabel == nullptr && !loop->isProcessed)
  186. {
  187. while (loop && loop->GetLoopTopInstr()) // some loops are optimized away so that they are not loops anymore.
  188. // They do, however, stay in the loop graph but don't have loop top labels assigned to them
  189. {
  190. this->outerMostLoopLabel = loop->GetLoopTopInstr();
  191. Assert(this->outerMostLoopLabel->m_isLoopTop);
  192. // landing pad must fall through to the loop
  193. Assert(this->outerMostLoopLabel->m_prev->HasFallThrough());
  194. loop = loop->parent;
  195. }
  196. this->initializedTempSym->ClearAll();
  197. }
  198. noFieldFastPath = !defaultDoLoopFastPath;
  199. noMathFastPath = !defaultDoLoopFastPath;
  200. }
  201. #ifdef INLINE_CACHE_STATS
  202. if(PHASE_STATS1(Js::PolymorphicInlineCachePhase))
  203. {
  204. // Always use the slow path, so we can track property accesses
  205. noFieldFastPath = true;
  206. }
  207. #endif
  208. #if DBG
  209. if (instr->HasBailOutInfo())
  210. {
  211. IR::BailOutKind bailoutKind = instr->GetBailOutKind();
  212. if (BailOutInfo::IsBailOutOnImplicitCalls(bailoutKind))
  213. {
  214. this->helperCallCheckState = (HelperCallCheckState)(this->helperCallCheckState | HelperCallCheckState_ImplicitCallsBailout);
  215. }
  216. if ((bailoutKind & IR::BailOutOnArrayAccessHelperCall) != 0 &&
  217. instr->m_opcode != Js::OpCode::Memcopy &&
  218. instr->m_opcode != Js::OpCode::Memset)
  219. {
  220. this->helperCallCheckState = (HelperCallCheckState)(this->helperCallCheckState | HelperCallCheckState_NoHelperCalls);
  221. }
  222. }
  223. #endif
  224. switch (instr->m_opcode)
  225. {
  226. case Js::OpCode::LdHandlerScope:
  227. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdHandlerScope);
  228. break;
  229. case Js::OpCode::InitSetFld:
  230. instrPrev = this->LowerStFld(instr, IR::HelperOP_InitSetter, IR::HelperOP_InitSetter, false);
  231. break;
  232. case Js::OpCode::InitGetFld:
  233. instrPrev = this->LowerStFld(instr, IR::HelperOP_InitGetter, IR::HelperOP_InitGetter, false);
  234. break;
  235. case Js::OpCode::InitProto:
  236. instrPrev = this->LowerStFld(instr, IR::HelperOP_InitProto, IR::HelperOP_InitProto, false);
  237. break;
  238. case Js::OpCode::LdArgCnt:
  239. this->LoadArgumentCount(instr);
  240. break;
  241. case Js::OpCode::LdStackArgPtr:
  242. this->LoadStackArgPtr(instr);
  243. break;
  244. case Js::OpCode::LdHeapArguments:
  245. case Js::OpCode::LdLetHeapArguments:
  246. instrPrev = m_lowererMD.LoadHeapArguments(instr);
  247. break;
  248. case Js::OpCode::LdHeapArgsCached:
  249. case Js::OpCode::LdLetHeapArgsCached:
  250. m_lowererMD.LoadHeapArgsCached(instr);
  251. break;
  252. case Js::OpCode::InvalCachedScope:
  253. this->LowerBinaryHelper(instr, IR::HelperOP_InvalidateCachedScope);
  254. break;
  255. case Js::OpCode::InitCachedScope:
  256. if (instr->m_func->GetJITFunctionBody()->GetDoScopeObjectCreation() || !instr->m_func->IsStackArgsEnabled())
  257. {
  258. instrPrev = this->LowerInitCachedScope(instr);
  259. }
  260. else
  261. {
  262. instr->ReplaceSrc1(IR::AddrOpnd::NewNull(instr->m_func));
  263. instr->m_opcode = Js::OpCode::Ld_A;
  264. instrPrev = instr;
  265. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase))
  266. {
  267. Output::Print(_u("StackArgFormals : %s (%d) :Removing Scope object creation in Lowerer and replacing it with MOV NULL. \n"), instr->m_func->GetJITFunctionBody()->GetDisplayName(), instr->m_func->GetFunctionNumber());
  268. Output::Flush();
  269. }
  270. }
  271. break;
  272. case Js::OpCode::NewScopeObject:
  273. {
  274. Func * currFunc = instr->m_func;
  275. if (currFunc->GetJITFunctionBody()->GetDoScopeObjectCreation() || !currFunc->IsStackArgsEnabled())
  276. {
  277. //Call Helper that creates scope object and does type transition for the formals
  278. if (currFunc->IsStackArgsEnabled() && currFunc->GetJITFunctionBody()->GetInParamsCount() != 1)
  279. {
  280. // s3 = formals are let decls
  281. this->m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(currFunc->GetHasNonSimpleParams() ? TRUE : FALSE, TyUint8, currFunc));
  282. // s2 = current function.
  283. IR::Opnd * paramOpnd = LoadFunctionBodyOpnd(instr);
  284. this->m_lowererMD.LoadHelperArgument(instr, paramOpnd);
  285. m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewScopeObjectWithFormals);
  286. }
  287. else
  288. {
  289. m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewScopeObject);
  290. }
  291. }
  292. else
  293. {
  294. instr->SetSrc1(IR::AddrOpnd::NewNull(instr->m_func));
  295. instr->m_opcode = Js::OpCode::Ld_A;
  296. instrPrev = instr;
  297. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase))
  298. {
  299. Output::Print(_u("StackArgFormals : %s (%d) :Removing Scope object creation in Lowerer and replacing it with MOV NULL. \n"), currFunc->GetJITFunctionBody()->GetDisplayName(), currFunc->GetFunctionNumber());
  300. Output::Flush();
  301. }
  302. }
  303. break;
  304. }
  305. case Js::OpCode::NewStackScopeSlots:
  306. this->LowerNewScopeSlots(instr, m_func->DoStackScopeSlots());
  307. break;
  308. case Js::OpCode::NewScopeSlots:
  309. this->LowerNewScopeSlots(instr, false);
  310. break;
  311. case Js::OpCode::InitLocalClosure:
  312. // Real initialization of the stack pointers happens on entry to the function, so this instruction
  313. // (which exists to provide a def in the IR) can go away.
  314. instr->Remove();
  315. break;
  316. case Js::OpCode::NewScopeSlotsWithoutPropIds:
  317. this->LowerBinaryHelperMemWithFuncBody(instr, IR::HelperOP_NewScopeSlotsWithoutPropIds);
  318. break;
  319. case Js::OpCode::NewBlockScope:
  320. m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewBlockScope);
  321. break;
  322. case Js::OpCode::NewPseudoScope:
  323. m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewPseudoScope);
  324. break;
  325. case Js::OpCode::CloneInnerScopeSlots:
  326. this->LowerUnaryHelperMem(instr, IR::HelperOP_CloneInnerScopeSlots);
  327. break;
  328. case Js::OpCode::CloneBlockScope:
  329. this->LowerUnaryHelperMem(instr, IR::HelperOP_CloneBlockScope);
  330. break;
  331. case Js::OpCode::GetCachedFunc:
  332. this->LowerGetCachedFunc(instr);
  333. break;
  334. case Js::OpCode::BrFncCachedScopeEq:
  335. case Js::OpCode::BrFncCachedScopeNeq:
  336. this->LowerBrFncCachedScopeEq(instr);
  337. break;
  338. case Js::OpCode::CommitScope:
  339. this->LowerCommitScope(instr);
  340. break;
  341. case Js::OpCode::LdFldForTypeOf:
  342. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValueForTypeOf, IR::HelperOp_PatchGetValuePolymorphicForTypeOf,
  343. IR::HelperOp_PatchGetValueForTypeOf, IR::HelperOp_PatchGetValuePolymorphicForTypeOf);
  344. break;
  345. case Js::OpCode::LdFld:
  346. case Js::OpCode::LdFldForCallApplyTarget:
  347. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValue, IR::HelperOp_PatchGetValuePolymorphic,
  348. IR::HelperOp_PatchGetValue, IR::HelperOp_PatchGetValuePolymorphic);
  349. break;
  350. case Js::OpCode::LdSuperFld:
  351. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValueWithThisPtr, IR::HelperOp_PatchGetValuePolymorphicWithThisPtr,
  352. IR::HelperOp_PatchGetValueWithThisPtr, IR::HelperOp_PatchGetValuePolymorphicWithThisPtr);
  353. break;
  354. case Js::OpCode::LdRootFld:
  355. instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootValue, IR::HelperOp_PatchGetRootValuePolymorphic,
  356. IR::HelperOp_PatchGetRootValue, IR::HelperOp_PatchGetRootValuePolymorphic);
  357. break;
  358. case Js::OpCode::LdRootFldForTypeOf:
  359. instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootValueForTypeOf, IR::HelperOp_PatchGetRootValuePolymorphicForTypeOf,
  360. IR::HelperOp_PatchGetRootValueForTypeOf, IR::HelperOp_PatchGetRootValuePolymorphicForTypeOf);
  361. break;
  362. case Js::OpCode::LdMethodFldPolyInlineMiss:
  363. instrPrev = LowerLdFld(instr, IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic, true, nullptr, true);
  364. break;
  365. case Js::OpCode::LdMethodFld:
  366. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic,
  367. IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic);
  368. break;
  369. case Js::OpCode::LdRootMethodFld:
  370. instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootMethod, IR::HelperOp_PatchGetRootMethodPolymorphic,
  371. IR::HelperOp_PatchGetRootMethod, IR::HelperOp_PatchGetRootMethodPolymorphic);
  372. break;
  373. case Js::OpCode::ScopedLdMethodFld:
  374. // "Scoped" in ScopedLdMethodFld is a bit of a misnomer because it doesn't look through a scope chain.
  375. // Instead the op is to allow for either a LdRootMethodFld or LdMethodFld depending on whether the
  376. // object is the root object or not.
  377. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_ScopedGetMethod, IR::HelperOp_ScopedGetMethodPolymorphic,
  378. IR::HelperOp_ScopedGetMethod, IR::HelperOp_ScopedGetMethodPolymorphic);
  379. break;
  380. case Js::OpCode::LdMethodFromFlags:
  381. {
  382. Assert(instr->HasBailOutInfo());
  383. bool success = GenerateFastLdMethodFromFlags(instr);
  384. AssertMsg(success, "Not expected to generate helper block here");
  385. break;
  386. }
  387. case Js::OpCode::CheckFixedFld:
  388. AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instr->m_func) || !PHASE_OFF(Js::UseFixedDataPropsPhase, instr->m_func), "CheckFixedFld with fixed prop(Data|Method) phase disabled?");
  389. this->GenerateCheckFixedFld(instr);
  390. break;
  391. case Js::OpCode::CheckPropertyGuardAndLoadType:
  392. instrPrev = this->GeneratePropertyGuardCheckBailoutAndLoadType(instr);
  393. break;
  394. case Js::OpCode::CheckObjType:
  395. this->GenerateCheckObjType(instr);
  396. break;
  397. case Js::OpCode::AdjustObjType:
  398. case Js::OpCode::AdjustObjTypeReloadAuxSlotPtr:
  399. this->LowerAdjustObjType(instr);
  400. break;
  401. case Js::OpCode::DeleteFld:
  402. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteProperty, false, false);
  403. break;
  404. case Js::OpCode::DeleteRootFld:
  405. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteRootProperty, false, false);
  406. break;
  407. case Js::OpCode::DeleteFldStrict:
  408. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteProperty, false, true);
  409. break;
  410. case Js::OpCode::DeleteRootFldStrict:
  411. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteRootProperty, false, true);
  412. break;
  413. case Js::OpCode::ScopedLdFldForTypeOf:
  414. if (!noFieldFastPath)
  415. {
  416. m_lowererMD.GenerateFastScopedLdFld(instr);
  417. }
  418. instrPrev = this->LowerScopedLdFld(instr, IR::HelperOp_PatchGetPropertyForTypeOfScoped, true);
  419. break;
  420. case Js::OpCode::ScopedLdFld:
  421. if (!noFieldFastPath)
  422. {
  423. m_lowererMD.GenerateFastScopedLdFld(instr);
  424. }
  425. instrPrev = this->LowerScopedLdFld(instr, IR::HelperOp_PatchGetPropertyScoped, true);
  426. break;
  427. case Js::OpCode::ScopedLdInst:
  428. instrPrev = this->LowerScopedLdInst(instr, IR::HelperOp_GetInstanceScoped);
  429. break;
  430. case Js::OpCode::ScopedDeleteFld:
  431. instrPrev = this->LowerScopedDelFld(instr, IR::HelperOp_DeletePropertyScoped, false, false);
  432. break;
  433. case Js::OpCode::ScopedDeleteFldStrict:
  434. instrPrev = this->LowerScopedDelFld(instr, IR::HelperOp_DeletePropertyScoped, false, true);
  435. break;
  436. case Js::OpCode::NewScFunc:
  437. instrPrev = this->LowerNewScFunc(instr);
  438. break;
  439. case Js::OpCode::NewScFuncHomeObj:
  440. instrPrev = this->LowerNewScFuncHomeObj(instr);
  441. break;
  442. case Js::OpCode::NewScGenFunc:
  443. instrPrev = this->LowerNewScGenFunc(instr);
  444. break;
  445. case Js::OpCode::NewScGenFuncHomeObj:
  446. instrPrev = this->LowerNewScGenFuncHomeObj(instr);
  447. break;
  448. case Js::OpCode::StFld:
  449. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueNoLocalFastPath, IR::HelperOp_PatchPutValueNoLocalFastPathPolymorphic,
  450. IR::HelperOp_PatchPutValue, IR::HelperOp_PatchPutValuePolymorphic, true, Js::PropertyOperation_None);
  451. break;
  452. case Js::OpCode::StSuperFld:
  453. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueWithThisPtrNoLocalFastPath, IR::HelperOp_PatchPutValueWithThisPtrNoLocalFastPathPolymorphic,
  454. IR::HelperOp_PatchPutValueWithThisPtr, IR::HelperOp_PatchPutValueWithThisPtrPolymorphic, true, isStrictMode ? Js::PropertyOperation_StrictMode : Js::PropertyOperation_None);
  455. break;
  456. case Js::OpCode::StRootFld:
  457. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPathPolymorphic,
  458. IR::HelperOp_PatchPutRootValue, IR::HelperOp_PatchPutRootValuePolymorphic, true, Js::PropertyOperation_Root);
  459. break;
  460. case Js::OpCode::StFldStrict:
  461. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueNoLocalFastPath, IR::HelperOp_PatchPutValueNoLocalFastPathPolymorphic,
  462. IR::HelperOp_PatchPutValue, IR::HelperOp_PatchPutValuePolymorphic, true, Js::PropertyOperation_StrictMode);
  463. break;
  464. case Js::OpCode::StRootFldStrict:
  465. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPathPolymorphic,
  466. IR::HelperOp_PatchPutRootValue, IR::HelperOp_PatchPutRootValuePolymorphic, true, Js::PropertyOperation_StrictModeRoot);
  467. break;
  468. case Js::OpCode::InitFld:
  469. case Js::OpCode::InitRootFld:
  470. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchInitValue, IR::HelperOp_PatchInitValuePolymorphic,
  471. IR::HelperOp_PatchInitValue, IR::HelperOp_PatchInitValuePolymorphic, false, Js::PropertyOperation_None);
  472. break;
  473. case Js::OpCode::ScopedInitFunc:
  474. instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_InitFuncScoped, false);
  475. break;
  476. case Js::OpCode::ScopedStFld:
  477. case Js::OpCode::ScopedStFldStrict:
  478. if (!noFieldFastPath)
  479. {
  480. m_lowererMD.GenerateFastScopedStFld(instr);
  481. }
  482. instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_PatchSetPropertyScoped, true, true,
  483. instr->m_opcode == Js::OpCode::ScopedStFld ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode);
  484. break;
  485. case Js::OpCode::ConsoleScopedStFld:
  486. case Js::OpCode::ConsoleScopedStFldStrict:
  487. {
  488. if (!noFieldFastPath)
  489. {
  490. m_lowererMD.GenerateFastScopedStFld(instr);
  491. }
  492. Js::PropertyOperationFlags flags = static_cast<Js::PropertyOperationFlags>((instr->m_opcode == Js::OpCode::ConsoleScopedStFld ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode) | Js::PropertyOperation_AllowUndeclInConsoleScope);
  493. instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_ConsolePatchSetPropertyScoped, true, true, flags);
  494. break;
  495. }
  496. case Js::OpCode::LdStr:
  497. m_lowererMD.ChangeToAssign(instr);
  498. break;
  499. case Js::OpCode::CloneStr:
  500. {
  501. GenerateGetImmutableOrScriptUnreferencedString(instr->GetSrc1()->AsRegOpnd(), instr, IR::HelperOp_CompoundStringCloneForAppending, false);
  502. instr->Remove();
  503. break;
  504. }
  505. case Js::OpCode::NewScObjArray:
  506. instrPrev = this->LowerNewScObjArray(instr);
  507. break;
  508. case Js::OpCode::NewScObject:
  509. case Js::OpCode::NewScObjectSpread:
  510. case Js::OpCode::NewScObjArraySpread:
  511. instrPrev = this->LowerNewScObject(instr, true, true);
  512. break;
  513. case Js::OpCode::NewScObjectNoCtor:
  514. instrPrev = this->LowerNewScObject(instr, false, true);
  515. break;
  516. case Js::OpCode::NewScObjectNoCtorFull:
  517. instrPrev = this->LowerNewScObject(instr, false, true, true);
  518. break;
  519. case Js::OpCode::GetNewScObject:
  520. instrPrev = this->LowerGetNewScObject(instr);
  521. break;
  522. case Js::OpCode::UpdateNewScObjectCache:
  523. instrPrev = instr->m_prev;
  524. this->LowerUpdateNewScObjectCache(instr, instr->GetSrc2(), instr->GetSrc1(), true /* isCtorFunction */);
  525. instr->Remove();
  526. break;
  527. case Js::OpCode::NewScObjectSimple:
  528. this->LowerNewScObjectSimple(instr);
  529. break;
  530. case Js::OpCode::NewScObjectLiteral:
  531. this->LowerNewScObjectLiteral(instr);
  532. break;
  533. case Js::OpCode::LdPropIds:
  534. m_lowererMD.ChangeToAssign(instr);
  535. break;
  536. case Js::OpCode::StArrSegItem_A:
  537. instrPrev = this->LowerArraySegmentVars(instr);
  538. break;
  539. case Js::OpCode::InlineMathAcos:
  540. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Acos);
  541. break;
  542. case Js::OpCode::InlineMathAsin:
  543. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Asin);
  544. break;
  545. case Js::OpCode::InlineMathAtan:
  546. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Atan);
  547. break;
  548. case Js::OpCode::InlineMathAtan2:
  549. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Atan2);
  550. break;
  551. case Js::OpCode::InlineMathCos:
  552. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Cos);
  553. break;
  554. case Js::OpCode::InlineMathExp:
  555. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Exp);
  556. break;
  557. case Js::OpCode::InlineMathLog:
  558. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Log);
  559. break;
  560. case Js::OpCode::InlineMathPow:
  561. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Pow);
  562. break;
  563. case Js::OpCode::InlineMathSin:
  564. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Sin);
  565. break;
  566. case Js::OpCode::InlineMathSqrt:
  567. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  568. break;
  569. case Js::OpCode::InlineMathTan:
  570. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Tan);
  571. break;
  572. case Js::OpCode::InlineMathFloor:
  573. #if defined(ASMJS_PLAT) && (defined(_M_X64) || defined(_M_IX86))
  574. if (!AutoSystemInfo::Data.SSE4_1Available() && instr->m_func->GetJITFunctionBody()->IsAsmJsMode())
  575. {
  576. m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_FloorFlt, IR::HelperDirectMath_FloorDb);
  577. break;
  578. }
  579. #endif
  580. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  581. break;
  582. case Js::OpCode::InlineMathCeil:
  583. #if defined(ASMJS_PLAT) && (defined(_M_X64) || defined(_M_IX86))
  584. if (!AutoSystemInfo::Data.SSE4_1Available() && instr->m_func->GetJITFunctionBody()->IsAsmJsMode())
  585. {
  586. m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_CeilFlt, IR::HelperDirectMath_CeilDb);
  587. break;
  588. }
  589. #endif
  590. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  591. break;
  592. case Js::OpCode::InlineMathRound:
  593. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  594. break;
  595. case Js::OpCode::InlineMathAbs:
  596. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  597. break;
  598. case Js::OpCode::InlineMathImul:
  599. GenerateFastInlineMathImul(instr);
  600. break;
  601. case Js::OpCode::Ctz:
  602. GenerateCtz(instr);
  603. break;
  604. case Js::OpCode::PopCnt:
  605. GeneratePopCnt(instr);
  606. break;
  607. case Js::OpCode::InlineMathClz:
  608. GenerateFastInlineMathClz(instr);
  609. break;
  610. case Js::OpCode::InlineMathFround:
  611. GenerateFastInlineMathFround(instr);
  612. break;
  613. case Js::OpCode::Reinterpret_Prim:
  614. LowerReinterpretPrimitive(instr);
  615. break;
  616. case Js::OpCode::InlineMathMin:
  617. case Js::OpCode::InlineMathMax:
  618. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  619. break;
  620. case Js::OpCode::InlineMathRandom:
  621. this->GenerateFastInlineBuiltInMathRandom(instr);
  622. break;
  623. #ifdef ENABLE_DOM_FAST_PATH
  624. case Js::OpCode::DOMFastPathGetter:
  625. this->LowerFastInlineDOMFastPathGetter(instr);
  626. break;
  627. #endif
  628. case Js::OpCode::InlineArrayPush:
  629. this->GenerateFastInlineArrayPush(instr);
  630. break;
  631. case Js::OpCode::InlineArrayPop:
  632. this->GenerateFastInlineArrayPop(instr);
  633. break;
  634. //Now retrieve the function object from the ArgOut_A_InlineSpecialized instruction opcode to push it on the stack after all the other arguments have been pushed.
  635. //The lowering of the direct call to helper is handled by GenerateDirectCall (architecture specific).
  636. case Js::OpCode::CallDirect:
  637. {
  638. IR::Opnd * src1 = instr->GetSrc1();
  639. Assert(src1->IsHelperCallOpnd());
  640. switch (src1->AsHelperCallOpnd()->m_fnHelper)
  641. {
  642. case IR::JnHelperMethod::HelperString_Split:
  643. case IR::JnHelperMethod::HelperString_Match:
  644. GenerateFastInlineStringSplitMatch(instr);
  645. break;
  646. case IR::JnHelperMethod::HelperRegExp_Exec:
  647. GenerateFastInlineRegExpExec(instr);
  648. break;
  649. case IR::JnHelperMethod::HelperGlobalObject_ParseInt:
  650. GenerateFastInlineGlobalObjectParseInt(instr);
  651. break;
  652. case IR::JnHelperMethod::HelperString_FromCharCode:
  653. GenerateFastInlineStringFromCharCode(instr);
  654. break;
  655. case IR::JnHelperMethod::HelperString_FromCodePoint:
  656. GenerateFastInlineStringFromCodePoint(instr);
  657. break;
  658. case IR::JnHelperMethod::HelperString_CharAt:
  659. GenerateFastInlineStringCharCodeAt(instr, Js::BuiltinFunction::JavascriptString_CharAt);
  660. break;
  661. case IR::JnHelperMethod::HelperString_CharCodeAt:
  662. GenerateFastInlineStringCharCodeAt(instr, Js::BuiltinFunction::JavascriptString_CharCodeAt);
  663. break;
  664. case IR::JnHelperMethod::HelperString_Replace:
  665. GenerateFastInlineStringReplace(instr);
  666. break;
  667. case IR::JnHelperMethod::HelperObject_HasOwnProperty:
  668. this->GenerateFastInlineHasOwnProperty(instr);
  669. break;
  670. case IR::JnHelperMethod::HelperArray_IsArray:
  671. this->GenerateFastInlineIsArray(instr);
  672. break;
  673. }
  674. instrPrev = LowerCallDirect(instr);
  675. break;
  676. }
  677. case Js::OpCode::CallIDynamic:
  678. {
  679. Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
  680. instrPrev = this->LowerCallIDynamic(instr, (ushort)flags);
  681. break;
  682. }
  683. case Js::OpCode::CallIDynamicSpread:
  684. {
  685. Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
  686. instrPrev = this->LowerCallIDynamicSpread(instr, (ushort)flags);
  687. break;
  688. }
  689. case Js::OpCode::CallI:
  690. case Js::OpCode::CallINew:
  691. case Js::OpCode::CallIFixed:
  692. case Js::OpCode::CallINewTargetNew:
  693. {
  694. Js::CallFlags flags = Js::CallFlags_None;
  695. if (instr->isCtorCall)
  696. {
  697. flags = Js::CallFlags_New;
  698. }
  699. else
  700. {
  701. if (instr->m_opcode == Js::OpCode::CallINew)
  702. {
  703. flags = Js::CallFlags_New;
  704. }
  705. else if (instr->m_opcode == Js::OpCode::CallINewTargetNew)
  706. {
  707. flags = (Js::CallFlags) (Js::CallFlags_New | Js::CallFlags_ExtraArg | Js::CallFlags_NewTarget);
  708. }
  709. if (instr->GetDst())
  710. {
  711. flags = (Js::CallFlags) (flags | Js::CallFlags_Value);
  712. }
  713. else
  714. {
  715. flags = (Js::CallFlags) (flags | Js::CallFlags_NotUsed);
  716. }
  717. }
  718. if (!PHASE_OFF(Js::CallFastPathPhase, this->m_func) && !noMathFastPath)
  719. {
  720. // We shouldn't have turned this instruction into a fixed method call if we're calling one of the
  721. // built-ins we still inline in the lowerer.
  722. Assert(instr->m_opcode != Js::OpCode::CallIFixed || !Func::IsBuiltInInlinedInLowerer(instr->GetSrc1()));
  723. // Disable InlineBuiltInLibraryCall as it does not work well with 2nd chance reg alloc
  724. // and may invalidate live on back edge data by introducing refs across loops. See Winblue Bug: 577641
  725. //// Callee may still be a library built-in; if so, generate it inline.
  726. //if (this->InlineBuiltInLibraryCall(instr))
  727. //{
  728. // m_lowererMD.LowerCallI(instr, (ushort)flags, true /*isHelper*/);
  729. //}
  730. //else
  731. //{
  732. m_lowererMD.LowerCallI(instr, (ushort)flags);
  733. //}
  734. }
  735. else
  736. {
  737. m_lowererMD.LowerCallI(instr, (ushort)flags);
  738. }
  739. break;
  740. }
  741. case Js::OpCode::AsmJsCallI:
  742. instrPrev = m_lowererMD.LowerAsmJsCallI(instr);
  743. break;
  744. case Js::OpCode::AsmJsCallE:
  745. instrPrev = m_lowererMD.LowerAsmJsCallE(instr);
  746. break;
  747. case Js::OpCode::CallIEval:
  748. {
  749. Js::CallFlags flags = (Js::CallFlags)(Js::CallFlags_ExtraArg | (instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed));
  750. if (IsSpreadCall(instr))
  751. {
  752. instrPrev = LowerSpreadCall(instr, flags);
  753. }
  754. else
  755. {
  756. m_lowererMD.LowerCallI(instr, (ushort)flags);
  757. }
  758. #ifdef PERF_HINT
  759. if (PHASE_TRACE1(Js::PerfHintPhase))
  760. {
  761. WritePerfHint(PerfHints::CallsEval, this->m_func, instr->GetByteCodeOffset());
  762. }
  763. #endif
  764. break;
  765. }
  766. case Js::OpCode::CallHelper:
  767. instrPrev = m_lowererMD.LowerCallHelper(instr);
  768. break;
  769. case Js::OpCode::Ret:
  770. if (instr->m_next->m_opcode != Js::OpCode::FunctionExit)
  771. {
  772. // If this RET isn't at the end of the function, insert a branch to
  773. // the epilog.
  774. IR::Instr *exitPrev = m_func->m_exitInstr->m_prev;
  775. if (!exitPrev->IsLabelInstr())
  776. {
  777. exitPrev = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  778. m_func->m_exitInstr->InsertBefore(exitPrev);
  779. }
  780. IR::BranchInstr *exitBr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode,
  781. exitPrev->AsLabelInstr(), m_func);
  782. instr->InsertAfter(exitBr);
  783. }
  784. m_lowererMD.LowerRet(instr);
  785. break;
  786. case Js::OpCode::LdArgumentsFromFrame:
  787. this->LoadArgumentsFromFrame(instr);
  788. break;
  789. case Js::OpCode::LdC_A_I4:
  790. {
  791. IR::Opnd *src1 = instr->UnlinkSrc1();
  792. AssertMsg(src1->IsIntConstOpnd(), "Source of LdC_A_I4 should be an IntConst...");
  793. instrPrev = this->LowerLoadVar(instr,
  794. IR::AddrOpnd::NewFromNumber(static_cast<int32>(src1->AsIntConstOpnd()->GetValue()), this->m_func));
  795. src1->Free(this->m_func);
  796. break;
  797. }
  798. case Js::OpCode::LdC_A_R8:
  799. {
  800. IR::Opnd *src1 = instr->UnlinkSrc1();
  801. AssertMsg(src1->IsFloatConstOpnd(), "Source of LdC_A_R8 should be a FloatConst...");
  802. instrPrev = this->LowerLoadVar(instr, src1->AsFloatConstOpnd()->GetAddrOpnd(this->m_func));
  803. src1->Free(this->m_func);
  804. break;
  805. }
  806. case Js::OpCode::LdC_F8_R8:
  807. {
  808. IR::Opnd *src1 = instr->UnlinkSrc1();
  809. AssertMsg(src1->IsFloatConstOpnd() || src1->IsFloat32ConstOpnd(), "Source of LdC_F8_R8 should be a FloatConst...");
  810. if (src1->IsFloatConstOpnd())
  811. {
  812. instrPrev = m_lowererMD.LoadFloatValue(instr->UnlinkDst()->AsRegOpnd(), src1->AsFloatConstOpnd()->m_value, instr);
  813. }
  814. else
  815. {
  816. instrPrev = m_lowererMD.LoadFloatValue(instr->UnlinkDst()->AsRegOpnd(), src1->AsFloat32ConstOpnd()->m_value, instr);
  817. }
  818. src1->Free(this->m_func);
  819. instr->Remove();
  820. break;
  821. }
  822. case Js::OpCode::NewRegEx:
  823. instrPrev = this->LowerNewRegEx(instr);
  824. break;
  825. case Js::OpCode::Conv_Obj:
  826. this->LowerUnaryHelperMem(instr, IR::HelperOp_ConvObject);
  827. break;
  828. case Js::OpCode::NewUnscopablesWrapperObject:
  829. this->LowerUnaryHelperMem(instr, IR::HelperOp_NewUnscopablesWrapperObject);
  830. break;
  831. case Js::OpCode::LdCustomSpreadIteratorList:
  832. this->LowerUnaryHelperMem(instr, IR::HelperOp_ToSpreadedFunctionArgument);
  833. break;
  834. case Js::OpCode::Conv_Numeric:
  835. case Js::OpCode::Conv_Num:
  836. this->LowerConvNum(instr, noMathFastPath);
  837. break;
  838. case Js::OpCode::Incr_Num_A:
  839. case Js::OpCode::Incr_A:
  840. if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  841. {
  842. this->LowerUnaryHelperMem(instr, IR::HelperOp_Increment);
  843. }
  844. else
  845. {
  846. instr->SetSrc2(IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(1), IR::AddrOpndKindConstantVar, this->m_func));
  847. m_lowererMD.GenerateFastAdd(instr);
  848. instr->FreeSrc2();
  849. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Increment));
  850. }
  851. break;
  852. case Js::OpCode::Decr_Num_A:
  853. case Js::OpCode::Decr_A:
  854. if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  855. {
  856. this->LowerUnaryHelperMem(instr, IR::HelperOp_Decrement);
  857. }
  858. else
  859. {
  860. instr->SetSrc2(IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(1), IR::AddrOpndKindConstantVar, this->m_func));
  861. m_lowererMD.GenerateFastSub(instr);
  862. instr->FreeSrc2();
  863. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Decrement));
  864. }
  865. break;
  866. case Js::OpCode::Neg_A:
  867. if (instr->GetDst()->IsFloat())
  868. {
  869. Assert(instr->GetSrc1()->IsFloat());
  870. m_lowererMD.LowerToFloat(instr);
  871. }
  872. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  873. {
  874. this->LowerUnaryHelperMem(instr, IR::HelperOp_Negate);
  875. }
  876. else if (m_lowererMD.GenerateFastNeg(instr))
  877. {
  878. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Negate));
  879. }
  880. break;
  881. case Js::OpCode::Not_A:
  882. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
  883. {
  884. this->LowerUnaryHelperMem(instr, IR::HelperOp_Not);
  885. }
  886. else if (m_lowererMD.GenerateFastNot(instr))
  887. {
  888. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Not));
  889. }
  890. break;
  891. case Js::OpCode::BrEq_I4:
  892. case Js::OpCode::BrNeq_I4:
  893. case Js::OpCode::BrGt_I4:
  894. case Js::OpCode::BrGe_I4:
  895. case Js::OpCode::BrLt_I4:
  896. case Js::OpCode::BrLe_I4:
  897. case Js::OpCode::BrUnGt_I4:
  898. case Js::OpCode::BrUnGe_I4:
  899. case Js::OpCode::BrUnLt_I4:
  900. case Js::OpCode::BrUnLe_I4:
  901. {
  902. // See calls to MarkOneFltTmpSym under BrSrEq. This is to handle the case
  903. // where a branch is type-specialized and uses the result of a float pref op,
  904. // which must then be saved to var at the def.
  905. StackSym *sym = instr->GetSrc1()->GetStackSym();
  906. if (sym)
  907. {
  908. sym = sym->GetVarEquivSym(nullptr);
  909. }
  910. sym = instr->GetSrc2()->GetStackSym();
  911. if (sym)
  912. {
  913. sym = sym->GetVarEquivSym(nullptr);
  914. }
  915. }
  916. // FALLTHROUGH
  917. case Js::OpCode::Neg_I4:
  918. case Js::OpCode::Not_I4:
  919. case Js::OpCode::Add_I4:
  920. case Js::OpCode::Sub_I4:
  921. case Js::OpCode::Mul_I4:
  922. case Js::OpCode::RemU_I4:
  923. case Js::OpCode::Rem_I4:
  924. case Js::OpCode::Or_I4:
  925. case Js::OpCode::Xor_I4:
  926. case Js::OpCode::And_I4:
  927. case Js::OpCode::Shl_I4:
  928. case Js::OpCode::Shr_I4:
  929. case Js::OpCode::ShrU_I4:
  930. case Js::OpCode::Rol_I4:
  931. case Js::OpCode::Ror_I4:
  932. case Js::OpCode::BrTrue_I4:
  933. case Js::OpCode::BrFalse_I4:
  934. #ifdef _M_IX86
  935. if (
  936. instr->GetDst() && instr->GetDst()->IsInt64() ||
  937. instr->GetSrc1() && instr->GetSrc1()->IsInt64() ||
  938. instr->GetSrc2() && instr->GetSrc2()->IsInt64()
  939. )
  940. {
  941. m_lowererMD.EmitInt64Instr(instr);
  942. break;
  943. }
  944. #endif
  945. if (instr->HasBailOutInfo())
  946. {
  947. const auto bailOutKind = instr->GetBailOutKind();
  948. if (bailOutKind & IR::BailOutOnResultConditions ||
  949. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck)
  950. {
  951. const auto nonBailOutInstr = SplitBailOnResultCondition(instr);
  952. IR::LabelInstr *bailOutLabel, *skipBailOutLabel;
  953. LowerBailOnResultCondition(instr, &bailOutLabel, &skipBailOutLabel);
  954. LowerInstrWithBailOnResultCondition(nonBailOutInstr, bailOutKind, bailOutLabel, skipBailOutLabel);
  955. }
  956. else if (bailOutKind == IR::BailOnModByPowerOf2)
  957. {
  958. Assert(instr->m_opcode == Js::OpCode::Rem_I4);
  959. bool fastPath = GenerateSimplifiedInt4Rem(instr);
  960. Assert(fastPath);
  961. instr->FreeSrc1();
  962. instr->FreeSrc2();
  963. this->GenerateBailOut(instr);
  964. }
  965. }
  966. else
  967. {
  968. if (instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::RemU_I4)
  969. {
  970. // fast path
  971. this->GenerateSimplifiedInt4Rem(instr);
  972. // slow path
  973. this->LowerRemI4(instr);
  974. }
  975. #if defined(_M_IX86) || defined(_M_X64)
  976. else if (instr->m_opcode == Js::OpCode::Mul_I4)
  977. {
  978. if (!LowererMD::GenerateSimplifiedInt4Mul(instr))
  979. {
  980. m_lowererMD.EmitInt4Instr(instr);
  981. }
  982. }
  983. #endif
  984. else
  985. {
  986. m_lowererMD.EmitInt4Instr(instr);
  987. }
  988. }
  989. break;
  990. case Js::OpCode::TrapIfMinIntOverNegOne:
  991. LowerTrapIfMinIntOverNegOne(instr);
  992. break;
  993. case Js::OpCode::TrapIfTruncOverflow:
  994. LowererMD::ChangeToAssign(instr);
  995. break;
  996. case Js::OpCode::TrapIfZero:
  997. LowerTrapIfZero(instr);
  998. break;
  999. case Js::OpCode::TrapIfUnalignedAccess:
  1000. instrPrev = LowerTrapIfUnalignedAccess(instr);
  1001. break;
  1002. case Js::OpCode::DivU_I4:
  1003. case Js::OpCode::Div_I4:
  1004. this->LowerDivI4(instr);
  1005. break;
  1006. case Js::OpCode::Typeof:
  1007. m_lowererMD.LowerTypeof(instr);
  1008. break;
  1009. case Js::OpCode::TypeofElem:
  1010. this->LowerLdElemI(instr, IR::HelperOp_TypeofElem, false);
  1011. break;
  1012. case Js::OpCode::LdLen_A:
  1013. {
  1014. bool fastPath = !noMathFastPath;
  1015. if (!fastPath && instr->HasBailOutInfo())
  1016. {
  1017. // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
  1018. // bailouts require the fast path.
  1019. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1020. if (bailOutKind & IR::BailOutKindBits)
  1021. {
  1022. fastPath = true;
  1023. }
  1024. else
  1025. {
  1026. const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
  1027. fastPath =
  1028. bailOutKindMinusBits &&
  1029. bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
  1030. bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
  1031. }
  1032. }
  1033. bool instrIsInHelperBlock = false;
  1034. if (!fastPath)
  1035. {
  1036. LowerLdLen(instr, false);
  1037. }
  1038. else if (GenerateFastLdLen(instr, &instrIsInHelperBlock))
  1039. {
  1040. Assert(
  1041. !instr->HasBailOutInfo() ||
  1042. (instr->GetBailOutKind() & ~IR::BailOutKindBits) != IR::BailOutOnIrregularLength);
  1043. LowerLdLen(instr, instrIsInHelperBlock);
  1044. }
  1045. break;
  1046. }
  1047. case Js::OpCode::LdThis:
  1048. {
  1049. if (noFieldFastPath || !GenerateLdThisCheck(instr))
  1050. {
  1051. IR::JnHelperMethod meth;
  1052. if (instr->IsJitProfilingInstr())
  1053. {
  1054. Assert(instr->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
  1055. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  1056. meth = IR::HelperSimpleProfiledLdThis;
  1057. this->LowerBinaryHelper(instr, meth);
  1058. }
  1059. else
  1060. {
  1061. meth = IR::HelperLdThisNoFastPath;
  1062. this->LowerBinaryHelperMem(instr, meth);
  1063. }
  1064. }
  1065. else
  1066. {
  1067. this->LowerBinaryHelperMem(instr, IR::HelperLdThis);
  1068. }
  1069. break;
  1070. }
  1071. case Js::OpCode::LdNativeCodeData:
  1072. Assert(m_func->IsOOPJIT());
  1073. instrPrev = LowerLdNativeCodeData(instr);
  1074. break;
  1075. case Js::OpCode::StrictLdThis:
  1076. if (noFieldFastPath)
  1077. {
  1078. IR::JnHelperMethod meth;
  1079. if (instr->IsJitProfilingInstr())
  1080. {
  1081. Assert(instr->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
  1082. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  1083. meth = IR::HelperSimpleProfiledStrictLdThis;
  1084. this->LowerUnaryHelper(instr, meth);
  1085. }
  1086. else
  1087. {
  1088. meth = IR::HelperStrictLdThis;
  1089. this->LowerUnaryHelperMem(instr, meth);
  1090. }
  1091. }
  1092. else
  1093. {
  1094. this->GenerateLdThisStrict(instr);
  1095. instr->Remove();
  1096. }
  1097. break;
  1098. case Js::OpCode::CheckThis:
  1099. GenerateLdThisCheck(instr);
  1100. instr->FreeSrc1();
  1101. this->GenerateBailOut(instr);
  1102. break;
  1103. case Js::OpCode::StrictCheckThis:
  1104. this->GenerateLdThisStrict(instr);
  1105. instr->FreeSrc1();
  1106. this->GenerateBailOut(instr);
  1107. break;
  1108. case Js::OpCode::NewScArray:
  1109. instrPrev = this->LowerNewScArray(instr);
  1110. break;
  1111. case Js::OpCode::NewScArrayWithMissingValues:
  1112. this->LowerUnaryHelperMem(instr, IR::HelperScrArr_OP_NewScArrayWithMissingValues);
  1113. break;
  1114. case Js::OpCode::NewScIntArray:
  1115. instrPrev = this->LowerNewScIntArray(instr);
  1116. break;
  1117. case Js::OpCode::NewScFltArray:
  1118. instrPrev = this->LowerNewScFltArray(instr);
  1119. break;
  1120. case Js::OpCode::InitForInEnumerator:
  1121. this->LowerInitForInEnumerator(instr);
  1122. break;
  1123. case Js::OpCode::Add_A:
  1124. if (instr->GetDst()->IsFloat())
  1125. {
  1126. Assert(instr->GetSrc1()->IsFloat());
  1127. Assert(instr->GetSrc2()->IsFloat());
  1128. // we don't want to mix float32 and float64
  1129. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1130. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1131. m_lowererMD.LowerToFloat(instr);
  1132. }
  1133. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  1134. {
  1135. this->LowerBinaryHelperMem(instr, IR::HelperOp_Add);
  1136. }
  1137. else if (m_lowererMD.TryGenerateFastMulAdd(instr, &instrPrev))
  1138. {
  1139. }
  1140. else
  1141. {
  1142. m_lowererMD.GenerateFastAdd(instr);
  1143. this->LowerBinaryHelperMemWithTemp3(instr, IR_HELPER_OP_FULL_OR_INPLACE(Add), IR::HelperOp_AddLeftDead);
  1144. }
  1145. break;
  1146. case Js::OpCode::Div_A:
  1147. {
  1148. if (instr->IsJitProfilingInstr()) {
  1149. LowerProfiledBinaryOp(instr->AsJitProfilingInstr(), IR::HelperSimpleProfiledDivide);
  1150. }
  1151. else if (instr->GetDst()->IsFloat())
  1152. {
  1153. Assert(instr->GetSrc1()->IsFloat());
  1154. Assert(instr->GetSrc2()->IsFloat());
  1155. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1156. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1157. m_lowererMD.LowerToFloat(instr);
  1158. }
  1159. else
  1160. {
  1161. if (!PHASE_OFF(Js::MathFastPathPhase, this->m_func) && !noMathFastPath)
  1162. {
  1163. IR::AddrOpnd *src2 = instr->GetSrc2()->IsAddrOpnd() ? instr->GetSrc2()->AsAddrOpnd() : nullptr;
  1164. if (src2 && src2->IsVar() && Js::TaggedInt::Is(src2->m_address))
  1165. {
  1166. int32 value = Js::TaggedInt::ToInt32(src2->m_address);
  1167. if (Math::IsPow2(value))
  1168. {
  1169. m_lowererMD.GenerateFastDivByPow2(instr);
  1170. }
  1171. }
  1172. }
  1173. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Divide));
  1174. }
  1175. break;
  1176. }
  1177. case Js::OpCode::Expo_A:
  1178. {
  1179. if (instr->GetDst()->IsFloat())
  1180. {
  1181. Assert(instr->GetSrc1()->IsFloat());
  1182. Assert(instr->GetSrc2()->IsFloat());
  1183. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1184. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1185. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Pow);
  1186. }
  1187. else
  1188. {
  1189. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Exponentiation));
  1190. }
  1191. break;
  1192. }
  1193. case Js::OpCode::Mul_A:
  1194. if (instr->GetDst()->IsFloat())
  1195. {
  1196. Assert(instr->GetSrc1()->IsFloat());
  1197. Assert(instr->GetSrc2()->IsFloat());
  1198. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1199. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1200. m_lowererMD.LowerToFloat(instr);
  1201. }
  1202. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  1203. {
  1204. this->LowerBinaryHelperMem(instr, IR::HelperOp_Multiply);
  1205. }
  1206. else if (m_lowererMD.GenerateFastMul(instr))
  1207. {
  1208. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Multiply));
  1209. }
  1210. break;
  1211. case Js::OpCode::Rem_A:
  1212. if (instr->GetDst()->IsFloat64())
  1213. {
  1214. this->LowerRemR8(instr);
  1215. }
  1216. else if (instr->IsJitProfilingInstr())
  1217. {
  1218. this->LowerProfiledBinaryOp(instr->AsJitProfilingInstr(), IR::HelperSimpleProfiledRemainder);
  1219. }
  1220. else
  1221. {
  1222. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Modulus));
  1223. }
  1224. break;
  1225. case Js::OpCode::Sub_A:
  1226. if (instr->GetDst()->IsFloat())
  1227. {
  1228. Assert(instr->GetSrc1()->IsFloat());
  1229. Assert(instr->GetSrc2()->IsFloat());
  1230. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1231. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1232. m_lowererMD.LowerToFloat(instr);
  1233. }
  1234. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  1235. {
  1236. this->LowerBinaryHelperMem(instr, IR::HelperOp_Subtract);
  1237. }
  1238. else if (m_lowererMD.TryGenerateFastMulAdd(instr, &instrPrev))
  1239. {
  1240. }
  1241. else
  1242. {
  1243. m_lowererMD.GenerateFastSub(instr);
  1244. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Subtract));
  1245. }
  1246. break;
  1247. case Js::OpCode::And_A:
  1248. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
  1249. {
  1250. this->LowerBinaryHelperMem(instr, IR::HelperOp_And);
  1251. }
  1252. else if (m_lowererMD.GenerateFastAnd(instr))
  1253. {
  1254. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(And));
  1255. }
  1256. break;
  1257. case Js::OpCode::Or_A:
  1258. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
  1259. {
  1260. this->LowerBinaryHelperMem(instr, IR::HelperOp_Or);
  1261. }
  1262. else if (m_lowererMD.GenerateFastOr(instr))
  1263. {
  1264. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Or));
  1265. }
  1266. break;
  1267. case Js::OpCode::Xor_A:
  1268. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastXor(instr))
  1269. {
  1270. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Xor));
  1271. }
  1272. break;
  1273. case Js::OpCode::Shl_A:
  1274. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftLeft(instr))
  1275. {
  1276. this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftLeft);
  1277. }
  1278. break;
  1279. case Js::OpCode::Shr_A:
  1280. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftRight(instr))
  1281. {
  1282. this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftRight);
  1283. }
  1284. break;
  1285. case Js::OpCode::ShrU_A:
  1286. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftRight(instr))
  1287. {
  1288. this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftRightU);
  1289. }
  1290. break;
  1291. case Js::OpCode::CmEq_A:
  1292. {
  1293. instrPrev = LowerEqualityCompare(instr, IR::HelperOP_CmEq_A);
  1294. break;
  1295. }
  1296. case Js::OpCode::CmNeq_A:
  1297. {
  1298. instrPrev = LowerEqualityCompare(instr, IR::HelperOP_CmNeq_A);
  1299. break;
  1300. }
  1301. case Js::OpCode::CmSrEq_A:
  1302. instrPrev = LowerEqualityCompare(instr, IR::HelperOP_CmSrEq_A);
  1303. break;
  1304. case Js::OpCode::CmSrNeq_A:
  1305. instrPrev = LowerEqualityCompare(instr, IR::HelperOP_CmSrNeq_A);
  1306. break;
  1307. case Js::OpCode::CmGt_A:
  1308. if (instr->GetSrc1()->IsFloat())
  1309. {
  1310. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1311. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1312. }
  1313. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1314. {
  1315. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmGt_A);
  1316. }
  1317. break;
  1318. case Js::OpCode::CmGe_A:
  1319. if (instr->GetSrc1()->IsFloat())
  1320. {
  1321. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1322. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1323. }
  1324. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1325. {
  1326. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmGe_A);
  1327. }
  1328. break;
  1329. case Js::OpCode::CmLt_A:
  1330. if (instr->GetSrc1()->IsFloat())
  1331. {
  1332. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1333. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1334. }
  1335. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1336. {
  1337. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmLt_A);
  1338. }
  1339. break;
  1340. case Js::OpCode::CmLe_A:
  1341. if (instr->GetSrc1()->IsFloat())
  1342. {
  1343. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1344. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1345. }
  1346. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1347. {
  1348. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmLe_A);
  1349. }
  1350. break;
  1351. case Js::OpCode::CmEq_I4:
  1352. case Js::OpCode::CmNeq_I4:
  1353. case Js::OpCode::CmGe_I4:
  1354. case Js::OpCode::CmGt_I4:
  1355. case Js::OpCode::CmLe_I4:
  1356. case Js::OpCode::CmLt_I4:
  1357. case Js::OpCode::CmUnGe_I4:
  1358. case Js::OpCode::CmUnGt_I4:
  1359. case Js::OpCode::CmUnLe_I4:
  1360. case Js::OpCode::CmUnLt_I4:
  1361. this->m_lowererMD.GenerateFastCmXxI4(instr);
  1362. break;
  1363. case Js::OpCode::Conv_Bool:
  1364. instrPrev = this->m_lowererMD.GenerateConvBool(instr);
  1365. break;
  1366. case Js::OpCode::IsInst:
  1367. this->GenerateFastIsInst(instr);
  1368. instrPrev = this->LowerIsInst(instr, IR::HelperScrObj_OP_IsInst);
  1369. break;
  1370. case Js::OpCode::IsIn:
  1371. this->GenerateFastArrayIsIn(instr);
  1372. this->GenerateFastObjectIsIn(instr);
  1373. this->LowerBinaryHelperMem(instr, IR::HelperOp_IsIn);
  1374. break;
  1375. case Js::OpCode::LdArrViewElem:
  1376. instrPrev = LowerLdArrViewElem(instr);
  1377. break;
  1378. case Js::OpCode::StAtomicWasm:
  1379. instrPrev = LowerStAtomicsWasm(instr);
  1380. break;
  1381. case Js::OpCode::StArrViewElem:
  1382. instrPrev = LowerStArrViewElem(instr);
  1383. break;
  1384. case Js::OpCode::LdAtomicWasm:
  1385. instrPrev = LowerLdAtomicsWasm(instr);
  1386. break;
  1387. case Js::OpCode::LdArrViewElemWasm:
  1388. instrPrev = LowerLdArrViewElemWasm(instr);
  1389. break;
  1390. case Js::OpCode::Memset:
  1391. case Js::OpCode::Memcopy:
  1392. {
  1393. instrPrev = LowerMemOp(instr);
  1394. break;
  1395. }
  1396. case Js::OpCode::ArrayDetachedCheck:
  1397. instrPrev = LowerArrayDetachedCheck(instr);
  1398. break;
  1399. case Js::OpCode::StElemI_A:
  1400. case Js::OpCode::StElemI_A_Strict:
  1401. {
  1402. // Note: under debugger (Fast F12) don't let GenerateFastStElemI which calls into ToNumber_Helper
  1403. // which takes double, and currently our helper wrapper doesn't support double.
  1404. bool fastPath = !noMathFastPath && !m_func->IsJitInDebugMode();
  1405. if (!fastPath && instr->HasBailOutInfo())
  1406. {
  1407. // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
  1408. // bailouts require the fast path.
  1409. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1410. const IR::BailOutKind bailOutKindBits = bailOutKind & IR::BailOutKindBits;
  1411. if (bailOutKindBits & ~(IR::BailOutOnMissingValue | IR::BailOutConvertedNativeArray))
  1412. {
  1413. fastPath = true;
  1414. }
  1415. else
  1416. {
  1417. const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
  1418. fastPath =
  1419. bailOutKindMinusBits &&
  1420. bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
  1421. bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
  1422. }
  1423. }
  1424. IR::Opnd * opnd = instr->GetDst();
  1425. IR::Opnd * baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  1426. ValueType profiledBaseValueType = baseOpnd->AsRegOpnd()->GetValueType();
  1427. if (profiledBaseValueType.IsUninitialized() && baseOpnd->AsRegOpnd()->m_sym->IsSingleDef())
  1428. {
  1429. baseOpnd->SetValueType(baseOpnd->FindProfiledValueType());
  1430. }
  1431. bool instrIsInHelperBlock = false;
  1432. if (!fastPath)
  1433. {
  1434. this->LowerStElemI(
  1435. instr,
  1436. instr->m_opcode == Js::OpCode::StElemI_A ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode,
  1437. false);
  1438. }
  1439. else if (GenerateFastStElemI(instr, &instrIsInHelperBlock))
  1440. {
  1441. #if DBG
  1442. if (instr->HasBailOutInfo())
  1443. {
  1444. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1445. Assert(
  1446. (bailOutKind & ~IR::BailOutKindBits) != IR::BailOutConventionalTypedArrayAccessOnly &&
  1447. !(
  1448. bailOutKind &
  1449. (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)
  1450. ));
  1451. }
  1452. #endif
  1453. this->LowerStElemI(
  1454. instr,
  1455. instr->m_opcode == Js::OpCode::StElemI_A ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode,
  1456. instrIsInHelperBlock);
  1457. }
  1458. break;
  1459. }
  1460. case Js::OpCode::LdElemI_A:
  1461. case Js::OpCode::LdMethodElem:
  1462. {
  1463. bool fastPath =
  1464. !noMathFastPath &&
  1465. (
  1466. instr->m_opcode != Js::OpCode::LdMethodElem ||
  1467. instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyObject()
  1468. );
  1469. if (!fastPath && instr->HasBailOutInfo())
  1470. {
  1471. // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
  1472. // bailouts require the fast path.
  1473. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1474. if (bailOutKind & IR::BailOutKindBits)
  1475. {
  1476. fastPath = true;
  1477. }
  1478. else
  1479. {
  1480. const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
  1481. fastPath =
  1482. bailOutKindMinusBits &&
  1483. bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
  1484. bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
  1485. }
  1486. }
  1487. IR::Opnd * opnd = instr->GetSrc1();
  1488. IR::Opnd * baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  1489. ValueType profiledBaseValueType = baseOpnd->AsRegOpnd()->GetValueType();
  1490. if (profiledBaseValueType.IsUninitialized() && baseOpnd->AsRegOpnd()->m_sym->IsSingleDef())
  1491. {
  1492. baseOpnd->SetValueType(baseOpnd->FindProfiledValueType());
  1493. }
  1494. bool instrIsInHelperBlock = false;
  1495. if (!fastPath)
  1496. {
  1497. this->LowerLdElemI(
  1498. instr,
  1499. instr->m_opcode == Js::OpCode::LdElemI_A ? IR::HelperOp_GetElementI : IR::HelperOp_GetMethodElement,
  1500. false);
  1501. }
  1502. else if (GenerateFastLdElemI(instr, &instrIsInHelperBlock))
  1503. {
  1504. #if DBG
  1505. if (instr->HasBailOutInfo())
  1506. {
  1507. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1508. Assert(
  1509. (bailOutKind & ~IR::BailOutKindBits) != IR::BailOutConventionalTypedArrayAccessOnly &&
  1510. !(
  1511. bailOutKind &
  1512. (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)
  1513. ));
  1514. }
  1515. #endif
  1516. this->LowerLdElemI(
  1517. instr,
  1518. instr->m_opcode == Js::OpCode::LdElemI_A ? IR::HelperOp_GetElementI : IR::HelperOp_GetMethodElement,
  1519. instrIsInHelperBlock);
  1520. }
  1521. break;
  1522. }
  1523. case Js::OpCode::InitSetElemI:
  1524. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitElemSetter);
  1525. break;
  1526. case Js::OpCode::InitGetElemI:
  1527. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitElemGetter);
  1528. break;
  1529. case Js::OpCode::InitComputedProperty:
  1530. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitComputedProperty);
  1531. break;
  1532. case Js::OpCode::Delete_A:
  1533. this->LowerUnaryHelperMem(instr, IR::HelperOp_Delete);
  1534. break;
  1535. case Js::OpCode::DeleteElemI_A:
  1536. this->LowerDeleteElemI(instr, false);
  1537. break;
  1538. case Js::OpCode::DeleteElemIStrict_A:
  1539. this->LowerDeleteElemI(instr, true);
  1540. break;
  1541. case Js::OpCode::BytecodeArgOutCapture:
  1542. m_lowererMD.ChangeToAssign(instr);
  1543. break;
  1544. case Js::OpCode::UnwrapWithObj:
  1545. this->LowerUnaryHelper(instr, IR::HelperOp_UnwrapWithObj);
  1546. break;
  1547. #ifdef ENABLE_WASM
  1548. case Js::OpCode::CheckWasmSignature:
  1549. this->LowerCheckWasmSignature(instr);
  1550. break;
  1551. case Js::OpCode::LdWasmFunc:
  1552. instrPrev = this->LowerLdWasmFunc(instr);
  1553. break;
  1554. case Js::OpCode::GrowWasmMemory:
  1555. instrPrev = this->LowerGrowWasmMemory(instr);
  1556. break;
  1557. #endif
  1558. case Js::OpCode::Ld_I4:
  1559. LowererMD::ChangeToAssign(instr);
  1560. break;
  1561. case Js::OpCode::LdAsmJsFunc:
  1562. if (instr->GetSrc1()->IsIndirOpnd())
  1563. {
  1564. IR::IndirOpnd* indir = instr->GetSrc1()->AsIndirOpnd();
  1565. byte scale = m_lowererMD.GetDefaultIndirScale();
  1566. if (!indir->GetIndexOpnd())
  1567. {
  1568. // If we have a constant offset, we need to apply the scale now
  1569. int32 offset;
  1570. if (Int32Math::Shl(1, scale, &offset) || Int32Math::Mul(offset, indir->GetOffset(), &offset))
  1571. {
  1572. // The constant is too big to offset this array. Throw out of range.
  1573. // Todo:: throw a better error message for this scenario
  1574. GenerateRuntimeError(instr, JSERR_ArgumentOutOfRange, IR::HelperOp_RuntimeRangeError);
  1575. }
  1576. indir->SetOffset(offset);
  1577. }
  1578. else
  1579. {
  1580. indir->SetScale(scale);
  1581. }
  1582. }
  1583. //fallthrough
  1584. case Js::OpCode::Ld_A:
  1585. case Js::OpCode::InitConst:
  1586. if (instr->IsJitProfilingInstr() && instr->AsJitProfilingInstr()->isBeginSwitch) {
  1587. LowerProfiledBeginSwitch(instr->AsJitProfilingInstr());
  1588. break;
  1589. }
  1590. m_lowererMD.ChangeToAssign(instr);
  1591. if (instr->HasBailOutInfo())
  1592. {
  1593. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1594. if (bailOutKind == IR::BailOutExpectingString)
  1595. {
  1596. this->LowerBailOnNotString(instr);
  1597. }
  1598. else
  1599. {
  1600. // Should not reach here as there are only 1 BailOutKind (BailOutExpectingString) currently associated with the Load Instr
  1601. Assert(false);
  1602. }
  1603. }
  1604. break;
  1605. case Js::OpCode::LdIndir:
  1606. Assert(instr->GetDst());
  1607. Assert(instr->GetDst()->IsRegOpnd());
  1608. Assert(instr->GetSrc1());
  1609. Assert(instr->GetSrc1()->IsIndirOpnd());
  1610. Assert(!instr->GetSrc2());
  1611. m_lowererMD.ChangeToAssign(instr);
  1612. break;
  1613. case Js::OpCode::FromVar:
  1614. Assert(instr->GetSrc1()->GetType() == TyVar);
  1615. if (instr->GetDst()->GetType() == TyInt32)
  1616. {
  1617. if (m_lowererMD.EmitLoadInt32(instr, !(instr->HasBailOutInfo() && (instr->GetBailOutKind() == IR::BailOutOnNotPrimitive))))
  1618. {
  1619. // Bail out instead of calling a helper
  1620. Assert(instr->GetBailOutKind() == IR::BailOutIntOnly || instr->GetBailOutKind() == IR::BailOutExpectingInteger);
  1621. Assert(!instr->GetSrc1()->GetValueType().IsInt()); // when we know it's an int, it should not have bailout info, to avoid generating a bailout path that will never be taken
  1622. instr->UnlinkSrc1();
  1623. instr->UnlinkDst();
  1624. GenerateBailOut(instr);
  1625. }
  1626. }
  1627. else if (instr->GetDst()->IsFloat())
  1628. {
  1629. if (m_func->GetJITFunctionBody()->IsAsmJsMode())
  1630. {
  1631. m_lowererMD.EmitLoadFloat(instr->GetDst(), instr->GetSrc1(), instr);
  1632. instr->Remove();
  1633. }
  1634. else
  1635. {
  1636. m_lowererMD.EmitLoadFloatFromNumber(instr->GetDst(), instr->GetSrc1(), instr);
  1637. }
  1638. }
  1639. else if (instr->GetDst()->IsInt64())
  1640. {
  1641. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  1642. GenerateRuntimeError(instr, WASMERR_InvalidTypeConversion);
  1643. instr->ReplaceSrc1(IR::Int64ConstOpnd::New(0, TyInt64, m_func));
  1644. LowererMD::ChangeToAssign(instr);
  1645. }
  1646. #ifdef ENABLE_WASM_SIMD
  1647. else if (instr->GetDst()->IsSimd128())
  1648. {
  1649. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  1650. GenerateRuntimeError(instr, WASMERR_InvalidTypeConversion);
  1651. instr->ReplaceSrc1(IR::Simd128ConstOpnd::New({ 0,0,0,0 }, instr->GetDst()->GetType(), m_func));
  1652. LowererMD::ChangeToAssign(instr);
  1653. }
  1654. #endif
  1655. else
  1656. {
  1657. Assert(UNREACHED);
  1658. }
  1659. break;
  1660. case Js::OpCode::ArgOut_A:
  1661. // I don't know if this can happen in asm.js mode, but if it can, we might want to handle differently
  1662. Assert(!m_func->GetJITFunctionBody()->IsAsmJsMode());
  1663. // fall-through
  1664. case Js::OpCode::ArgOut_A_Inline:
  1665. case Js::OpCode::ArgOut_A_Dynamic:
  1666. {
  1667. // ArgOut/StartCall are normally lowered by the lowering of the associated call instr.
  1668. // If the call becomes unreachable, we could end up with an orphan ArgOut or StartCall.
  1669. // Change the ArgOut into a store to the stack for bailouts
  1670. instr->FreeSrc2();
  1671. StackSym *argSym = instr->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  1672. argSym->m_offset = this->m_func->StackAllocate(sizeof(Js::Var));
  1673. argSym->m_allocated = true;
  1674. argSym->m_isOrphanedArg = true;
  1675. this->m_lowererMD.ChangeToAssign(instr);
  1676. }
  1677. break;
  1678. case Js::OpCode::LoweredStartCall:
  1679. case Js::OpCode::StartCall:
  1680. // ArgOut/StartCall are normally lowered by the lowering of the associated call instr.
  1681. // If the call becomes unreachable, we could end up with an orphan ArgOut or StartCall.
  1682. // We'll just delete these StartCalls during peeps.
  1683. break;
  1684. case Js::OpCode::ToVar:
  1685. Assert(instr->GetDst()->GetType() == TyVar);
  1686. if (instr->GetSrc1()->GetType() == TyInt32)
  1687. {
  1688. m_lowererMD.EmitLoadVar(instr);
  1689. }
  1690. else if (instr->GetSrc1()->IsFloat())
  1691. {
  1692. Assert(instr->GetSrc1()->IsRegOpnd());
  1693. IR::RegOpnd* float64Opnd = instr->GetSrc1()->AsRegOpnd();
  1694. if (float64Opnd->IsFloat32())
  1695. {
  1696. IR::RegOpnd* float64ConvOpnd = IR::RegOpnd::New(TyFloat64, m_func);
  1697. m_lowererMD.EmitFloat32ToFloat64(float64ConvOpnd, float64Opnd, instr);
  1698. float64Opnd = float64ConvOpnd;
  1699. }
  1700. m_lowererMD.SaveDoubleToVar(
  1701. instr->GetDst()->AsRegOpnd(),
  1702. float64Opnd, instr, instr);
  1703. instr->Remove();
  1704. }
  1705. else if (instr->GetSrc1()->IsInt64() || instr->GetSrc1()->IsSimd128())
  1706. {
  1707. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  1708. GenerateRuntimeError(instr, WASMERR_InvalidTypeConversion);
  1709. instr->ReplaceSrc1(IR::IntConstOpnd::New(0, TyMachReg, m_func));
  1710. LowererMD::ChangeToAssign(instr);
  1711. }
  1712. else
  1713. {
  1714. Assert(UNREACHED);
  1715. }
  1716. break;
  1717. case Js::OpCode::Conv_Prim_Sat:
  1718. {
  1719. GenerateTruncWithCheck<true /* Saturate */>(instr);
  1720. break;
  1721. }
  1722. case Js::OpCode::Conv_Prim:
  1723. {
  1724. if (IR::Instr::FindSingleDefInstr(Js::OpCode::TrapIfTruncOverflow, instr->GetSrc1()))
  1725. {
  1726. GenerateTruncWithCheck<false /* Saturate */>(instr);
  1727. break;
  1728. }
  1729. if (instr->GetDst()->IsFloat())
  1730. {
  1731. if (instr->GetSrc1()->IsIntConstOpnd())
  1732. {
  1733. LoadFloatFromNonReg(instr->UnlinkSrc1(), instr->UnlinkDst(), instr);
  1734. }
  1735. else if (instr->GetSrc1()->IsInt32())
  1736. {
  1737. m_lowererMD.EmitIntToFloat(instr->GetDst(), instr->GetSrc1(), instr);
  1738. }
  1739. else if (instr->GetSrc1()->IsUInt32())
  1740. {
  1741. m_lowererMD.EmitUIntToFloat(instr->GetDst(), instr->GetSrc1(), instr);
  1742. }
  1743. else if (instr->GetSrc1()->IsInt64())
  1744. {
  1745. m_lowererMD.EmitInt64toFloat(instr->GetDst(), instr->GetSrc1(), instr);
  1746. }
  1747. else
  1748. {
  1749. Assert(instr->GetDst()->IsFloat64());
  1750. Assert(instr->GetSrc1()->IsFloat32());
  1751. m_lowererMD.EmitFloat32ToFloat64(instr->GetDst(), instr->GetSrc1(), instr);
  1752. }
  1753. }
  1754. else if (instr->GetDst()->IsInt64())
  1755. {
  1756. if (instr->GetSrc1()->IsInt32())
  1757. {
  1758. m_lowererMD.EmitIntToLong(instr->GetDst(), instr->GetSrc1(), instr);
  1759. }
  1760. else if (instr->GetSrc1()->IsUInt32())
  1761. {
  1762. m_lowererMD.EmitUIntToLong(instr->GetDst(), instr->GetSrc1(), instr);
  1763. }
  1764. else if (instr->GetSrc1()->IsInt64() && instr->GetSrc2())
  1765. {
  1766. m_lowererMD.EmitSignExtend(instr);
  1767. }
  1768. else
  1769. {
  1770. Assert(0);
  1771. }
  1772. }
  1773. else
  1774. {
  1775. Assert(instr->GetDst()->IsInt32());
  1776. if (instr->GetSrc1()->IsInt64())
  1777. {
  1778. m_lowererMD.EmitLongToInt(instr->GetDst(), instr->GetSrc1(), instr);
  1779. }
  1780. else if ((instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32()) && instr->GetSrc2())
  1781. {
  1782. m_lowererMD.EmitSignExtend(instr);
  1783. }
  1784. else
  1785. {
  1786. Assert(instr->GetSrc1()->IsFloat());
  1787. m_lowererMD.EmitFloatToInt(instr->GetDst(), instr->GetSrc1(), instr);
  1788. }
  1789. }
  1790. instr->Remove();
  1791. break;
  1792. }
  1793. case Js::OpCode::FunctionExit:
  1794. LowerFunctionExit(instr);
  1795. // The rest of Epilog generation happens after reg allocation
  1796. break;
  1797. case Js::OpCode::FunctionEntry:
  1798. LowerFunctionEntry(instr);
  1799. // The rest of Prolog generation happens after reg allocation
  1800. break;
  1801. case Js::OpCode::ArgIn_Rest:
  1802. case Js::OpCode::ArgIn_A:
  1803. if (m_func->GetJITFunctionBody()->IsAsmJsMode() && !m_func->IsLoopBody())
  1804. {
  1805. instrPrev = LowerArgInAsmJs(instr);
  1806. }
  1807. else
  1808. {
  1809. instrPrev = LowerArgIn(instr);
  1810. }
  1811. break;
  1812. case Js::OpCode::Label:
  1813. if (instr->AsLabelInstr()->m_isLoopTop)
  1814. {
  1815. if (this->outerMostLoopLabel == instr)
  1816. {
  1817. noFieldFastPath = !defaultDoFastPath;
  1818. noMathFastPath = !defaultDoFastPath;
  1819. this->outerMostLoopLabel = nullptr;
  1820. instr->AsLabelInstr()->GetLoop()->isProcessed = true;
  1821. }
  1822. this->m_func->MarkConstantAddressSyms(instr->AsLabelInstr()->GetLoop()->regAlloc.liveOnBackEdgeSyms);
  1823. instr->AsLabelInstr()->GetLoop()->regAlloc.liveOnBackEdgeSyms->Or(this->addToLiveOnBackEdgeSyms);
  1824. }
  1825. break;
  1826. case Js::OpCode::Br:
  1827. instr->m_opcode = LowererMD::MDUncondBranchOpcode;
  1828. break;
  1829. case Js::OpCode::BrFncEqApply:
  1830. LowerBrFncApply(instr, IR::HelperOp_OP_BrFncEqApply);
  1831. break;
  1832. case Js::OpCode::BrFncNeqApply:
  1833. LowerBrFncApply(instr, IR::HelperOp_OP_BrFncNeqApply);
  1834. break;
  1835. case Js::OpCode::BrHasSideEffects:
  1836. case Js::OpCode::BrNotHasSideEffects:
  1837. m_lowererMD.GenerateFastBrS(instr->AsBranchInstr());
  1838. break;
  1839. case Js::OpCode::BrFalse_A:
  1840. case Js::OpCode::BrTrue_A:
  1841. if (instr->GetSrc1()->IsFloat())
  1842. {
  1843. GenerateFastBrBool(instr->AsBranchInstr());
  1844. }
  1845. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) ||
  1846. noMathFastPath ||
  1847. GenerateFastBrBool(instr->AsBranchInstr()))
  1848. {
  1849. this->LowerBrBMem(instr, IR::HelperConv_ToBoolean);
  1850. }
  1851. break;
  1852. case Js::OpCode::BrOnObject_A:
  1853. if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath)
  1854. {
  1855. this->LowerBrOnObject(instr, IR::HelperOp_IsObject);
  1856. }
  1857. else
  1858. {
  1859. GenerateFastBrOnObject(instr);
  1860. }
  1861. break;
  1862. case Js::OpCode::BrOnBaseConstructorKind:
  1863. this->LowerBrOnClassConstructor(instr, IR::HelperOp_IsBaseConstructorKind);
  1864. break;
  1865. case Js::OpCode::BrOnClassConstructor:
  1866. this->LowerBrOnClassConstructor(instr, IR::HelperOp_IsClassConstructor);
  1867. break;
  1868. case Js::OpCode::BrAddr_A:
  1869. case Js::OpCode::BrNotAddr_A:
  1870. case Js::OpCode::BrNotNull_A:
  1871. m_lowererMD.LowerCondBranch(instr);
  1872. break;
  1873. case Js::OpCode::BrEq_A:
  1874. case Js::OpCode::BrNotNeq_A:
  1875. instrPrev = LowerEqualityBranch(instr, IR::HelperOp_Equal);
  1876. break;
  1877. case Js::OpCode::BrGe_A:
  1878. case Js::OpCode::BrNotGe_A:
  1879. if (instr->GetSrc1()->IsFloat())
  1880. {
  1881. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1882. m_lowererMD.LowerToFloat(instr);
  1883. }
  1884. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1885. {
  1886. this->LowerBrCMem(instr, IR::HelperOp_GreaterEqual, false, false /*isHelper*/);
  1887. }
  1888. else
  1889. {
  1890. this->LowerBrCMem(instr, IR::HelperOp_GreaterEqual, true, false /*isHelper*/);
  1891. }
  1892. break;
  1893. case Js::OpCode::BrGt_A:
  1894. case Js::OpCode::BrNotGt_A:
  1895. if (instr->GetSrc1()->IsFloat())
  1896. {
  1897. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1898. m_lowererMD.LowerToFloat(instr);
  1899. }
  1900. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1901. {
  1902. this->LowerBrCMem(instr, IR::HelperOp_Greater, false, false /*isHelper*/);
  1903. }
  1904. else
  1905. {
  1906. this->LowerBrCMem(instr, IR::HelperOp_Greater, true, false /*isHelper*/);
  1907. }
  1908. break;
  1909. case Js::OpCode::BrLt_A:
  1910. case Js::OpCode::BrNotLt_A:
  1911. if (instr->GetSrc1()->IsFloat())
  1912. {
  1913. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1914. m_lowererMD.LowerToFloat(instr);
  1915. }
  1916. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1917. {
  1918. this->LowerBrCMem(instr, IR::HelperOp_Less, false, false /*isHelper*/);
  1919. }
  1920. else
  1921. {
  1922. this->LowerBrCMem(instr, IR::HelperOp_Less, true, false /*isHelper*/);
  1923. }
  1924. break;
  1925. case Js::OpCode::BrLe_A:
  1926. case Js::OpCode::BrNotLe_A:
  1927. if (instr->GetSrc1()->IsFloat())
  1928. {
  1929. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1930. m_lowererMD.LowerToFloat(instr);
  1931. }
  1932. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1933. {
  1934. this->LowerBrCMem(instr, IR::HelperOp_LessEqual, false, false /*isHelper*/);
  1935. }
  1936. else
  1937. {
  1938. this->LowerBrCMem(instr, IR::HelperOp_LessEqual, true, false /*isHelper*/);
  1939. }
  1940. break;
  1941. case Js::OpCode::BrNeq_A:
  1942. case Js::OpCode::BrNotEq_A:
  1943. instrPrev = LowerEqualityBranch(instr, IR::HelperOp_NotEqual);
  1944. break;
  1945. case Js::OpCode::MultiBr:
  1946. {
  1947. IR::MultiBranchInstr * multiBranchInstr = instr->AsBranchInstr()->AsMultiBrInstr();
  1948. switch (multiBranchInstr->m_kind)
  1949. {
  1950. case IR::MultiBranchInstr::StrDictionary:
  1951. this->GenerateSwitchStringLookup(instr);
  1952. break;
  1953. case IR::MultiBranchInstr::SingleCharStrJumpTable:
  1954. this->GenerateSingleCharStrJumpTableLookup(instr);
  1955. m_func->m_totalJumpTableSizeInBytesForSwitchStatements += (multiBranchInstr->GetBranchJumpTable()->tableSize * sizeof(void*));
  1956. break;
  1957. case IR::MultiBranchInstr::IntJumpTable:
  1958. this->LowerMultiBr(instr);
  1959. m_func->m_totalJumpTableSizeInBytesForSwitchStatements += (multiBranchInstr->GetBranchJumpTable()->tableSize * sizeof(void*));
  1960. break;
  1961. default:
  1962. Assert(false);
  1963. }
  1964. break;
  1965. }
  1966. case Js::OpCode::BrSrEq_A:
  1967. case Js::OpCode::BrSrNotNeq_A:
  1968. instrPrev = LowerEqualityBranch(instr, IR::HelperOp_StrictEqual);
  1969. break;
  1970. case Js::OpCode::BrSrNeq_A:
  1971. case Js::OpCode::BrSrNotEq_A:
  1972. instrPrev = LowerEqualityBranch(instr, IR::HelperOp_NotStrictEqual);
  1973. break;
  1974. case Js::OpCode::BrOnEmpty:
  1975. case Js::OpCode::BrOnNotEmpty:
  1976. if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func))
  1977. {
  1978. this->GenerateFastBrBReturn(instr);
  1979. this->LowerBrBReturn(instr, IR::HelperOp_OP_BrOnEmpty, true);
  1980. }
  1981. else
  1982. {
  1983. this->LowerBrBReturn(instr, IR::HelperOp_OP_BrOnEmpty, false);
  1984. }
  1985. break;
  1986. case Js::OpCode::BrOnHasProperty:
  1987. case Js::OpCode::BrOnNoProperty:
  1988. this->LowerBrProperty(instr, IR::HelperOp_HasProperty);
  1989. break;
  1990. case Js::OpCode::BrOnException:
  1991. Assert(!this->m_func->DoGlobOpt());
  1992. instr->Remove();
  1993. break;
  1994. case Js::OpCode::BrOnNoException:
  1995. instr->m_opcode = LowererMD::MDUncondBranchOpcode;
  1996. break;
  1997. case Js::OpCode::StSlot:
  1998. {
  1999. PropertySym *propertySym = instr->GetDst()->AsSymOpnd()->m_sym->AsPropertySym();
  2000. instrPrev = AddSlotArrayCheck(propertySym, instr);
  2001. this->LowerStSlot(instr);
  2002. break;
  2003. }
  2004. case Js::OpCode::StSlotChkUndecl:
  2005. {
  2006. PropertySym *propertySym = instr->GetDst()->AsSymOpnd()->m_sym->AsPropertySym();
  2007. instrPrev = AddSlotArrayCheck(propertySym, instr);
  2008. this->LowerStSlotChkUndecl(instr);
  2009. break;
  2010. }
  2011. case Js::OpCode::ProfiledLoopStart:
  2012. {
  2013. Assert(m_func->DoSimpleJitDynamicProfile());
  2014. Assert(instr->IsJitProfilingInstr());
  2015. // Check for the helper instr from IRBuilding (it won't be there if there are no LoopEnds due to an infinite loop)
  2016. auto prev = instr->m_prev;
  2017. if (prev->IsJitProfilingInstr() && prev->AsJitProfilingInstr()->isLoopHelper)
  2018. {
  2019. auto saveOpnd = prev->UnlinkDst();
  2020. instrPrev = prev->m_prev;
  2021. prev->Remove();
  2022. const auto starFlag = GetImplicitCallFlagsOpnd();
  2023. IR::AutoReuseOpnd a(starFlag, m_func);
  2024. this->InsertMove(saveOpnd, starFlag, instr);
  2025. this->InsertMove(starFlag, CreateClearImplicitCallFlagsOpnd(), instr);
  2026. }
  2027. else
  2028. {
  2029. #if DBG
  2030. // Double check that we indeed do not have a LoopEnd that is part of the same loop for the rest of the function
  2031. auto cur = instr;
  2032. auto loopNumber = instr->AsJitProfilingInstr()->loopNumber;
  2033. while (cur)
  2034. {
  2035. Assert(cur->m_opcode != Js::OpCode::ProfiledLoopEnd || cur->IsJitProfilingInstr() && cur->AsJitProfilingInstr()->loopNumber != loopNumber);
  2036. cur = cur->m_next;
  2037. }
  2038. #endif
  2039. }
  2040. // If we turned off fulljit, there's no reason to do this.
  2041. if (PHASE_OFF(Js::FullJitPhase, m_func))
  2042. {
  2043. instr->Remove();
  2044. }
  2045. else
  2046. {
  2047. Assert(instr->GetDst());
  2048. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleGetScheduledEntryPoint, m_func));
  2049. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateUint32Opnd(instr->AsJitProfilingInstr()->loopNumber, m_func));
  2050. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  2051. this->m_lowererMD.LowerCall(instr, 0);
  2052. }
  2053. break;
  2054. }
  2055. case Js::OpCode::ProfiledLoopBodyStart:
  2056. {
  2057. Assert(m_func->DoSimpleJitDynamicProfile());
  2058. const auto loopNum = instr->AsJitProfilingInstr()->loopNumber;
  2059. Assert(loopNum < m_func->GetJITFunctionBody()->GetLoopCount());
  2060. auto entryPointOpnd = instr->UnlinkSrc1();
  2061. auto dobailout = instr->UnlinkDst();
  2062. const auto dobailoutType = TyUint8;
  2063. Assert(dobailout->GetType() == TyUint8 && sizeof(decltype(Js::SimpleJitHelpers::IsLoopCodeGenDone(nullptr))) == 1);
  2064. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(0, TyUint32, m_func)); // zero indicates that we do not want to add flags back in
  2065. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(loopNum, TyUint32, m_func));
  2066. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  2067. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleRecordLoopImplicitCallFlags, m_func));
  2068. m_lowererMD.LowerCall(instr, 0);
  2069. // Outline of JITed code:
  2070. //
  2071. // LoopStart:
  2072. // entryPoint = GetScheduledEntryPoint(framePtr, loopNum)
  2073. // LoopBodyStart:
  2074. // uint8 dobailout;
  2075. // if (entryPoint) {
  2076. // dobailout = IsLoopCodeGenDone(entryPoint)
  2077. // } else {
  2078. // dobailout = ++interpretCount >= threshold
  2079. // }
  2080. // // already exists from IRBuilding:
  2081. // if (dobailout) {
  2082. // Bailout
  2083. // }
  2084. if (PHASE_OFF(Js::FullJitPhase, m_func) || !m_func->GetJITFunctionBody()->DoJITLoopBody())
  2085. {
  2086. // If we're not doing fulljit, we've turned off JitLoopBodies, or if we don't have loop headers allocated (the function has a Try, etc)
  2087. // just move false to dobailout
  2088. this->InsertMove(dobailout, IR::IntConstOpnd::New(0, dobailoutType, m_func, true), instr->m_next);
  2089. }
  2090. else if (m_func->GetWorkItem()->GetJITTimeInfo()->ForceJITLoopBody())
  2091. {
  2092. // If we're forcing jit loop bodies, move true to dobailout
  2093. this->InsertMove(dobailout, IR::IntConstOpnd::New(1, dobailoutType, m_func, true), instr->m_next);
  2094. }
  2095. else
  2096. {
  2097. // Put in the labels
  2098. auto entryPointIsNull = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2099. auto checkDoBailout = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2100. instr->InsertAfter(checkDoBailout);
  2101. instr->InsertAfter(entryPointIsNull);
  2102. this->InsertCompareBranch(entryPointOpnd, IR::AddrOpnd::New(nullptr, IR::AddrOpndKindDynamicMisc, m_func), Js::OpCode::BrEq_A, false, entryPointIsNull, instr->m_next);
  2103. // If the entry point is not null
  2104. auto isCodeGenDone = IR::Instr::New(Js::OpCode::Call, dobailout, IR::HelperCallOpnd::New(IR::HelperSimpleIsLoopCodeGenDone, m_func), m_func);
  2105. entryPointIsNull->InsertBefore(isCodeGenDone);
  2106. m_lowererMD.LoadHelperArgument(isCodeGenDone, entryPointOpnd);
  2107. m_lowererMD.LowerCall(isCodeGenDone, 0);
  2108. this->InsertBranch(LowererMD::MDUncondBranchOpcode, true, checkDoBailout, entryPointIsNull);
  2109. const auto type = TyUint32;
  2110. auto countReg = IR::RegOpnd::New(type, m_func);
  2111. auto countAddr = IR::MemRefOpnd::New(m_func->GetJITFunctionBody()->GetLoopHeaderAddr(loopNum) + Js::LoopHeader::GetOffsetOfInterpretCount(), type, m_func);
  2112. IR::AutoReuseOpnd a(countReg, m_func), b(countAddr, m_func);
  2113. this->InsertAdd(false, countReg, countAddr, IR::IntConstOpnd::New(1, type, m_func, true), checkDoBailout);
  2114. this->InsertMove(countAddr, countReg, checkDoBailout);
  2115. this->InsertMove(dobailout, IR::IntConstOpnd::New(0, dobailoutType, m_func, true), checkDoBailout);
  2116. this->InsertCompareBranch(countReg, IR::IntConstOpnd::New(m_func->GetJITFunctionBody()->GetLoopHeaderData(loopNum)->interpretCount, type, m_func), Js::OpCode::BrLt_A, checkDoBailout, checkDoBailout);
  2117. this->InsertMove(dobailout, IR::IntConstOpnd::New(1, dobailoutType, m_func, true), checkDoBailout);
  2118. // fallthrough
  2119. // Label checkDoBailout (inserted above)
  2120. }
  2121. }
  2122. break;
  2123. case Js::OpCode::ProfiledLoopEnd:
  2124. {
  2125. Assert(m_func->DoSimpleJitDynamicProfile());
  2126. // This is set up in IRBuilding
  2127. Assert(instr->GetSrc1());
  2128. IR::Opnd* savedFlags = instr->UnlinkSrc1();
  2129. m_lowererMD.LoadHelperArgument(instr, savedFlags);
  2130. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateUint32Opnd(instr->AsJitProfilingInstr()->loopNumber, m_func));
  2131. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  2132. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleRecordLoopImplicitCallFlags, m_func));
  2133. m_lowererMD.LowerCall(instr, 0);
  2134. }
  2135. break;
  2136. case Js::OpCode::InitLoopBodyCount:
  2137. Assert(this->m_func->IsLoopBody());
  2138. instr->SetSrc1(IR::IntConstOpnd::New(0, TyUint32, this->m_func));
  2139. this->m_lowererMD.ChangeToAssign(instr);
  2140. break;
  2141. case Js::OpCode::StLoopBodyCount:
  2142. Assert(this->m_func->IsLoopBody());
  2143. this->LowerStLoopBodyCount(instr);
  2144. break;
  2145. case Js::OpCode::IncrLoopBodyCount:
  2146. {
  2147. Assert(this->m_func->IsLoopBody());
  2148. instr->m_opcode = Js::OpCode::Add_I4;
  2149. instr->SetSrc2(IR::IntConstOpnd::New(1, TyUint32, this->m_func));
  2150. this->m_lowererMD.EmitInt4Instr(instr);
  2151. // Update the jittedLoopIterations field on the entryPointInfo
  2152. IR::MemRefOpnd *iterationsAddressOpnd = IR::MemRefOpnd::New(this->m_func->GetJittedLoopIterationsSinceLastBailoutAddress(), TyUint32, this->m_func);
  2153. InsertMove(iterationsAddressOpnd, instr->GetDst(), instr);
  2154. break;
  2155. }
  2156. #if !FLOATVAR
  2157. case Js::OpCode::StSlotBoxTemp:
  2158. this->LowerStSlotBoxTemp(instr);
  2159. break;
  2160. #endif
  2161. case Js::OpCode::LdSlot:
  2162. {
  2163. PropertySym *propertySym = instr->GetSrc1()->AsSymOpnd()->m_sym->AsPropertySym();
  2164. instrPrev = AddSlotArrayCheck(propertySym, instr);
  2165. }
  2166. case Js::OpCode::LdSlotArr:
  2167. {
  2168. Js::ProfileId profileId;
  2169. IR::Instr *profileBeforeInstr;
  2170. if (instr->IsJitProfilingInstr())
  2171. {
  2172. profileId = instr->AsJitProfilingInstr()->profileId;
  2173. Assert(profileId != Js::Constants::NoProfileId);
  2174. profileBeforeInstr = instr->m_next;
  2175. }
  2176. else
  2177. {
  2178. profileId = Js::Constants::NoProfileId;
  2179. profileBeforeInstr = nullptr;
  2180. }
  2181. this->LowerLdSlot(instr);
  2182. if (profileId != Js::Constants::NoProfileId)
  2183. {
  2184. LowerProfileLdSlot(instr->GetDst(), instr->m_func, profileId, profileBeforeInstr);
  2185. }
  2186. break;
  2187. }
  2188. case Js::OpCode::ChkUndecl:
  2189. instrPrev = this->LowerChkUndecl(instr);
  2190. break;
  2191. case Js::OpCode::LdArrHead:
  2192. this->LowerLdArrHead(instr);
  2193. break;
  2194. case Js::OpCode::StElemC:
  2195. case Js::OpCode::StArrSegElemC:
  2196. this->LowerStElemC(instr);
  2197. break;
  2198. case Js::OpCode::LdEnv:
  2199. instrPrev = this->LowerLdEnv(instr);
  2200. break;
  2201. case Js::OpCode::LdAsmJsEnv:
  2202. instrPrev = this->LowerLdAsmJsEnv(instr);
  2203. break;
  2204. case Js::OpCode::LdElemUndef:
  2205. this->LowerLdElemUndef(instr);
  2206. break;
  2207. case Js::OpCode::LdElemUndefScoped:
  2208. this->LowerElementUndefinedScopedMem(instr, IR::HelperOp_LdElemUndefScoped);
  2209. break;
  2210. case Js::OpCode::EnsureNoRootFld:
  2211. this->LowerElementUndefined(instr, IR::HelperOp_EnsureNoRootProperty);
  2212. break;
  2213. case Js::OpCode::EnsureNoRootRedeclFld:
  2214. this->LowerElementUndefined(instr, IR::HelperOp_EnsureNoRootRedeclProperty);
  2215. break;
  2216. case Js::OpCode::EnsureCanDeclGloFunc:
  2217. this->LowerElementUndefined(instr, IR::HelperOp_EnsureCanDeclGloFunc);
  2218. break;
  2219. case Js::OpCode::ScopedEnsureNoRedeclFld:
  2220. this->LowerElementUndefinedScoped(instr, IR::HelperOp_EnsureNoRedeclPropertyScoped);
  2221. break;
  2222. case Js::OpCode::LdFuncExpr:
  2223. // src = function Expression
  2224. LoadFuncExpression(instr);
  2225. this->GenerateGetCurrentFunctionObject(instr);
  2226. break;
  2227. case Js::OpCode::LdNewTarget:
  2228. this->GenerateLoadNewTarget(instr);
  2229. break;
  2230. case Js::OpCode::ChkNewCallFlag:
  2231. this->GenerateCheckForCallFlagNew(instr);
  2232. break;
  2233. case Js::OpCode::StFuncExpr:
  2234. // object.propid = src
  2235. LowerStFld(instr, IR::HelperOp_StFunctionExpression, IR::HelperOp_StFunctionExpression, false);
  2236. break;
  2237. case Js::OpCode::InitLetFld:
  2238. case Js::OpCode::InitRootLetFld:
  2239. LowerStFld(instr, IR::HelperOp_InitLetFld, IR::HelperOp_InitLetFld, false);
  2240. break;
  2241. case Js::OpCode::InitConstFld:
  2242. case Js::OpCode::InitRootConstFld:
  2243. LowerStFld(instr, IR::HelperOp_InitConstFld, IR::HelperOp_InitConstFld, false);
  2244. break;
  2245. case Js::OpCode::InitUndeclRootLetFld:
  2246. LowerElementUndefined(instr, IR::HelperOp_InitUndeclRootLetFld);
  2247. break;
  2248. case Js::OpCode::InitUndeclRootConstFld:
  2249. LowerElementUndefined(instr, IR::HelperOp_InitUndeclRootConstFld);
  2250. break;
  2251. case Js::OpCode::InitUndeclConsoleLetFld:
  2252. LowerElementUndefined(instr, IR::HelperOp_InitUndeclConsoleLetFld);
  2253. break;
  2254. case Js::OpCode::InitUndeclConsoleConstFld:
  2255. LowerElementUndefined(instr, IR::HelperOp_InitUndeclConsoleConstFld);
  2256. break;
  2257. case Js::OpCode::InitClassMember:
  2258. LowerStFld(instr, IR::HelperOp_InitClassMember, IR::HelperOp_InitClassMember, false);
  2259. break;
  2260. case Js::OpCode::InitClassMemberComputedName:
  2261. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberComputedName);
  2262. break;
  2263. case Js::OpCode::InitClassMemberGetComputedName:
  2264. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberGetComputedName);
  2265. break;
  2266. case Js::OpCode::InitClassMemberSetComputedName:
  2267. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberSetComputedName);
  2268. break;
  2269. case Js::OpCode::InitClassMemberGet:
  2270. instrPrev = this->LowerStFld(instr, IR::HelperOp_InitClassMemberGet, IR::HelperOp_InitClassMemberGet, false);
  2271. break;
  2272. case Js::OpCode::InitClassMemberSet:
  2273. instrPrev = this->LowerStFld(instr, IR::HelperOp_InitClassMemberSet, IR::HelperOp_InitClassMemberSet, false);
  2274. break;
  2275. case Js::OpCode::NewStackFrameDisplay:
  2276. this->LowerLdFrameDisplay(instr, m_func->DoStackFrameDisplay());
  2277. break;
  2278. case Js::OpCode::LdFrameDisplay:
  2279. this->LowerLdFrameDisplay(instr, false);
  2280. break;
  2281. case Js::OpCode::LdInnerFrameDisplay:
  2282. this->LowerLdInnerFrameDisplay(instr);
  2283. break;
  2284. case Js::OpCode::Throw:
  2285. case Js::OpCode::InlineThrow:
  2286. case Js::OpCode::EHThrow:
  2287. this->LowerUnaryHelperMem(instr, IR::HelperOp_Throw);
  2288. break;
  2289. case Js::OpCode::TryCatch:
  2290. instrPrev = this->LowerTry(instr, true /*try-catch*/);
  2291. break;
  2292. case Js::OpCode::TryFinally:
  2293. instrPrev = this->LowerTry(instr, false /*try-finally*/);
  2294. break;
  2295. case Js::OpCode::Catch:
  2296. instrPrev = this->LowerCatch(instr);
  2297. break;
  2298. case Js::OpCode::Finally:
  2299. instr->Remove();
  2300. break;
  2301. case Js::OpCode::LeaveNull:
  2302. if (this->m_func->DoOptimizeTry() || (this->m_func->IsSimpleJit() && this->m_func->hasBailout))
  2303. {
  2304. instr->Remove();
  2305. }
  2306. else
  2307. {
  2308. instrPrev = m_lowererMD.LowerLeaveNull(instr);
  2309. }
  2310. break;
  2311. case Js::OpCode::Leave:
  2312. if (this->m_func->HasTry() && this->m_func->DoOptimizeTry())
  2313. {
  2314. // Required in Register Allocator to mark region boundaries
  2315. break;
  2316. }
  2317. instrPrev = this->LowerLeave(instr, instr->AsBranchInstr()->GetTarget(), false /*fromFinalLower*/, instr->AsBranchInstr()->m_isOrphanedLeave);
  2318. break;
  2319. case Js::OpCode::BailOnException:
  2320. instrPrev = this->LowerBailOnException(instr);
  2321. break;
  2322. case Js::OpCode::BailOnEarlyExit:
  2323. instrPrev = this->LowerBailOnEarlyExit(instr);
  2324. break;
  2325. case Js::OpCode::RuntimeTypeError:
  2326. case Js::OpCode::InlineRuntimeTypeError:
  2327. this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeTypeError);
  2328. break;
  2329. case Js::OpCode::RuntimeReferenceError:
  2330. case Js::OpCode::InlineRuntimeReferenceError:
  2331. this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeReferenceError);
  2332. break;
  2333. case Js::OpCode::Break:
  2334. // Inline breakpoint: for now do nothing.
  2335. break;
  2336. case Js::OpCode::Nop:
  2337. // This may need support for debugging the JIT, but for now just remove the instruction.
  2338. instr->Remove();
  2339. break;
  2340. case Js::OpCode::Unused:
  2341. // Currently Unused is used with ScopedLdInst to keep the second dst alive, but we don't need to lower it.
  2342. instr->Remove();
  2343. break;
  2344. case Js::OpCode::StatementBoundary:
  2345. // This instruction is merely to help convey source info through the IR
  2346. // and eventually generate the nativeOffset maps.
  2347. #if DBG_DUMP && DBG
  2348. // If we have a JITStatementBreakpoint, then we should break on this statement
  2349. {
  2350. uint32 statementIndex = instr->AsPragmaInstr()->m_statementIndex;
  2351. if (Js::Configuration::Global.flags.StatementDebugBreak.Contains(instr->m_func->GetSourceContextId(), instr->m_func->GetLocalFunctionId(), statementIndex))
  2352. {
  2353. IR::Instr* tempinstr = instr;
  2354. Assert(tempinstr != nullptr);
  2355. // go past any labels, and then add a debug breakpoint
  2356. while (tempinstr->m_next != nullptr && tempinstr->m_next->m_opcode == Js::OpCode::Label)
  2357. {
  2358. tempinstr = tempinstr->m_next;
  2359. }
  2360. this->m_lowererMD.GenerateDebugBreak(tempinstr);
  2361. }
  2362. }
  2363. #endif
  2364. break;
  2365. case Js::OpCode::BailOnNotPolymorphicInlinee:
  2366. instrPrev = LowerBailOnNotPolymorphicInlinee(instr);
  2367. break;
  2368. case Js::OpCode::BailOnNoSimdTypeSpec:
  2369. case Js::OpCode::BailOnNoProfile:
  2370. this->GenerateBailOut(instr, nullptr, nullptr);
  2371. break;
  2372. case Js::OpCode::BailOnNotSpreadable:
  2373. instrPrev = this->LowerBailOnNotSpreadable(instr);
  2374. break;
  2375. case Js::OpCode::BailOnNotStackArgs:
  2376. instrPrev = this->LowerBailOnNotStackArgs(instr);
  2377. break;
  2378. case Js::OpCode::BailOnEqual:
  2379. case Js::OpCode::BailOnNotEqual:
  2380. instrPrev = this->LowerBailOnEqualOrNotEqual(instr);
  2381. break;
  2382. case Js::OpCode::BailOnNegative:
  2383. LowerBailOnNegative(instr);
  2384. break;
  2385. #ifdef ENABLE_SCRIPT_DEBUGGING
  2386. case Js::OpCode::BailForDebugger:
  2387. instrPrev = this->LowerBailForDebugger(instr);
  2388. break;
  2389. #endif
  2390. case Js::OpCode::BailOnNotObject:
  2391. instrPrev = this->LowerBailOnNotObject(instr);
  2392. break;
  2393. case Js::OpCode::CheckIsFuncObj:
  2394. instrPrev = this->LowerCheckIsFuncObj(instr);
  2395. break;
  2396. case Js::OpCode::CheckFuncInfo:
  2397. instrPrev = this->LowerCheckIsFuncObj(instr, true);
  2398. break;
  2399. case Js::OpCode::BailOnNotBuiltIn:
  2400. instrPrev = this->LowerBailOnNotBuiltIn(instr);
  2401. break;
  2402. case Js::OpCode::BailOnNotArray:
  2403. {
  2404. IR::Instr *bailOnNotArray = nullptr, *bailOnMissingValue = nullptr;
  2405. SplitBailOnNotArray(instr, &bailOnNotArray, &bailOnMissingValue);
  2406. IR::RegOpnd *const arrayOpnd = LowerBailOnNotArray(bailOnNotArray);
  2407. if (bailOnMissingValue)
  2408. {
  2409. LowerBailOnMissingValue(bailOnMissingValue, arrayOpnd);
  2410. }
  2411. break;
  2412. }
  2413. case Js::OpCode::BoundCheck:
  2414. case Js::OpCode::UnsignedBoundCheck:
  2415. LowerBoundCheck(instr);
  2416. break;
  2417. case Js::OpCode::BailTarget:
  2418. instrPrev = this->LowerBailTarget(instr);
  2419. break;
  2420. case Js::OpCode::InlineeStart:
  2421. this->LowerInlineeStart(instr);
  2422. break;
  2423. case Js::OpCode::EndCallForPolymorphicInlinee:
  2424. instr->Remove();
  2425. break;
  2426. case Js::OpCode::InlineeEnd:
  2427. this->LowerInlineeEnd(instr);
  2428. break;
  2429. case Js::OpCode::InlineBuiltInEnd:
  2430. case Js::OpCode::InlineNonTrackingBuiltInEnd:
  2431. this->LowerInlineBuiltIn(instr);
  2432. break;
  2433. case Js::OpCode::ExtendArg_A:
  2434. if (instr->GetSrc1()->IsRegOpnd())
  2435. {
  2436. IR::RegOpnd *src1 = instr->GetSrc1()->AsRegOpnd();
  2437. this->addToLiveOnBackEdgeSyms->Clear(src1->m_sym->m_id);
  2438. }
  2439. instr->Remove();
  2440. break;
  2441. case Js::OpCode::InlineBuiltInStart:
  2442. case Js::OpCode::BytecodeArgOutUse:
  2443. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  2444. instr->Remove();
  2445. break;
  2446. case Js::OpCode::DeadBrEqual:
  2447. this->LowerBinaryHelperMem(instr, IR::HelperOp_Equal);
  2448. break;
  2449. case Js::OpCode::DeadBrSrEqual:
  2450. this->LowerBinaryHelperMem(instr, IR::HelperOp_StrictEqual);
  2451. break;
  2452. case Js::OpCode::DeadBrRelational:
  2453. this->LowerBinaryHelperMem(instr, IR::HelperOp_Greater);
  2454. break;
  2455. case Js::OpCode::DeadBrOnHasProperty:
  2456. this->LowerUnaryHelperMem(instr, IR::HelperOp_HasProperty);
  2457. break;
  2458. case Js::OpCode::DeletedNonHelperBranch:
  2459. break;
  2460. case Js::OpCode::InitClass:
  2461. instrPrev = this->LowerInitClass(instr);
  2462. break;
  2463. case Js::OpCode::NewConcatStrMulti:
  2464. this->LowerNewConcatStrMulti(instr);
  2465. break;
  2466. case Js::OpCode::NewConcatStrMultiBE:
  2467. this->LowerNewConcatStrMultiBE(instr);
  2468. break;
  2469. case Js::OpCode::SetConcatStrMultiItem:
  2470. this->LowerSetConcatStrMultiItem(instr);
  2471. break;
  2472. case Js::OpCode::SetConcatStrMultiItemBE:
  2473. Assert(instr->GetSrc1()->IsRegOpnd());
  2474. this->addToLiveOnBackEdgeSyms->Clear(instr->GetSrc1()->GetStackSym()->m_id);
  2475. // code corresponding to it should already have been generated while lowering NewConcatStrMultiBE
  2476. instr->Remove();
  2477. break;
  2478. case Js::OpCode::Conv_Str:
  2479. this->LowerConvStr(instr);
  2480. break;
  2481. case Js::OpCode::Coerce_Str:
  2482. this->LowerCoerseStr(instr);
  2483. break;
  2484. case Js::OpCode::Coerce_StrOrRegex:
  2485. this->LowerCoerseStrOrRegex(instr);
  2486. break;
  2487. case Js::OpCode::Coerce_Regex:
  2488. this->LowerCoerseRegex(instr);
  2489. break;
  2490. case Js::OpCode::Conv_PrimStr:
  2491. this->LowerConvPrimStr(instr);
  2492. break;
  2493. case Js::OpCode::ClearAttributes:
  2494. this->LowerBinaryHelper(instr, IR::HelperOP_ClearAttributes);
  2495. break;
  2496. case Js::OpCode::SpreadArrayLiteral:
  2497. this->LowerSpreadArrayLiteral(instr);
  2498. break;
  2499. case Js::OpCode::CallIExtended:
  2500. {
  2501. // Currently, the only use for CallIExtended is a call that uses spread.
  2502. Assert(IsSpreadCall(instr));
  2503. instrPrev = this->LowerSpreadCall(instr, Js::CallFlags_None);
  2504. break;
  2505. }
  2506. case Js::OpCode::CallIExtendedNew:
  2507. {
  2508. // Currently, the only use for CallIExtended is a call that uses spread.
  2509. Assert(IsSpreadCall(instr));
  2510. instrPrev = this->LowerSpreadCall(instr, Js::CallFlags_New);
  2511. break;
  2512. }
  2513. case Js::OpCode::CallIExtendedNewTargetNew:
  2514. {
  2515. // Currently, the only use for CallIExtended is a call that uses spread.
  2516. Assert(IsSpreadCall(instr));
  2517. instrPrev = this->LowerSpreadCall(instr, (Js::CallFlags)(Js::CallFlags_New | Js::CallFlags_ExtraArg | Js::CallFlags_NewTarget));
  2518. break;
  2519. }
  2520. case Js::OpCode::LdSpreadIndices:
  2521. instr->Remove();
  2522. break;
  2523. case Js::OpCode::LdHomeObj:
  2524. this->GenerateLdHomeObj(instr);
  2525. break;
  2526. case Js::OpCode::LdHomeObjProto:
  2527. this->GenerateLdHomeObjProto(instr);
  2528. break;
  2529. case Js::OpCode::LdFuncObj:
  2530. this->GenerateLdFuncObj(instr);
  2531. break;
  2532. case Js::OpCode::LdFuncObjProto:
  2533. this->GenerateLdFuncObjProto(instr);
  2534. break;
  2535. case Js::OpCode::ImportCall:
  2536. {
  2537. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  2538. IR::Opnd *functionObjOpnd = nullptr;
  2539. m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  2540. LoadScriptContext(instr);
  2541. m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  2542. m_lowererMD.LoadHelperArgument(instr, functionObjOpnd);
  2543. m_lowererMD.ChangeToHelperCall(instr, IR::HelperImportCall);
  2544. break;
  2545. }
  2546. case Js::OpCode::SetComputedNameVar:
  2547. {
  2548. IR::Opnd *src2Opnd = instr->UnlinkSrc2();
  2549. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  2550. m_lowererMD.LoadHelperArgument(instr, src2Opnd);
  2551. m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  2552. m_lowererMD.ChangeToHelperCall(instr, IR::HelperSetComputedNameVar);
  2553. break;
  2554. }
  2555. case Js::OpCode::InlineeMetaArg:
  2556. {
  2557. m_lowererMD.ChangeToAssign(instr);
  2558. break;
  2559. }
  2560. case Js::OpCode::Yield:
  2561. {
  2562. instr->FreeSrc1(); // Source is not actually used by the backend other than to calculate lifetime
  2563. IR::Opnd* dstOpnd = instr->UnlinkDst();
  2564. // prm2 is the ResumeYieldData pointer per calling convention established in JavascriptGenerator::CallGenerator
  2565. // This is the value the bytecode expects to be in the dst register of the Yield opcode after resumption.
  2566. // Load it here after the bail-in.
  2567. StackSym *resumeYieldDataSym = StackSym::NewImplicitParamSym(4, m_func);
  2568. m_func->SetArgOffset(resumeYieldDataSym, (LowererMD::GetFormalParamOffset() + 1) * MachPtr);
  2569. IR::SymOpnd * resumeYieldDataOpnd = IR::SymOpnd::New(resumeYieldDataSym, TyMachPtr, m_func);
  2570. AssertMsg(instr->m_next->IsLabelInstr(), "Expect the resume label to immediately follow Yield instruction");
  2571. InsertMove(dstOpnd, resumeYieldDataOpnd, instr->m_next->m_next);
  2572. GenerateBailOut(instr);
  2573. break;
  2574. }
  2575. case Js::OpCode::ResumeYield:
  2576. case Js::OpCode::ResumeYieldStar:
  2577. {
  2578. IR::Opnd *srcOpnd1 = instr->UnlinkSrc1();
  2579. IR::Opnd *srcOpnd2 = instr->m_opcode == Js::OpCode::ResumeYieldStar ? instr->UnlinkSrc2() : IR::AddrOpnd::NewNull(m_func);
  2580. m_lowererMD.LoadHelperArgument(instr, srcOpnd2);
  2581. m_lowererMD.LoadHelperArgument(instr, srcOpnd1);
  2582. m_lowererMD.ChangeToHelperCall(instr, IR::HelperResumeYield);
  2583. break;
  2584. }
  2585. case Js::OpCode::GeneratorResumeJumpTable:
  2586. {
  2587. // Lowered in LowerPrologEpilog so that the jumps introduced are not considered to be part of the flow for the RegAlloc phase.
  2588. // Introduce a BailOutNoSave label if there were yield points that were elided due to optimizations. They could still be hit
  2589. // if an active generator object had been paused at such a yield point when the function body was JITed. So safe guard such a
  2590. // case by having the native code simply jump back to the interpreter for such yield points.
  2591. IR::LabelInstr *bailOutNoSaveLabel = nullptr;
  2592. m_func->MapUntilYieldOffsetResumeLabels([this, &bailOutNoSaveLabel](int, const YieldOffsetResumeLabel& yorl)
  2593. {
  2594. if (yorl.Second() == nullptr)
  2595. {
  2596. if (bailOutNoSaveLabel == nullptr)
  2597. {
  2598. bailOutNoSaveLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2599. }
  2600. return true;
  2601. }
  2602. return false;
  2603. });
  2604. // Insert the bailoutnosave label somewhere along with a call to BailOutNoSave helper
  2605. if (bailOutNoSaveLabel != nullptr)
  2606. {
  2607. IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->m_prev;
  2608. IR::LabelInstr * exitTargetInstr;
  2609. if (exitPrevInstr->IsLabelInstr())
  2610. {
  2611. exitTargetInstr = exitPrevInstr->AsLabelInstr();
  2612. exitPrevInstr = exitPrevInstr->m_prev;
  2613. }
  2614. else
  2615. {
  2616. exitTargetInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  2617. exitPrevInstr->InsertAfter(exitTargetInstr);
  2618. }
  2619. bailOutNoSaveLabel->m_hasNonBranchRef = true;
  2620. bailOutNoSaveLabel->isOpHelper = true;
  2621. IR::Instr* bailOutCall = IR::Instr::New(Js::OpCode::Call, m_func);
  2622. exitPrevInstr->InsertAfter(bailOutCall);
  2623. exitPrevInstr->InsertAfter(bailOutNoSaveLabel);
  2624. exitPrevInstr->InsertAfter(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, exitTargetInstr, m_func));
  2625. IR::RegOpnd * frameRegOpnd = IR::RegOpnd::New(nullptr, LowererMD::GetRegFramePointer(), TyMachPtr, m_func);
  2626. m_lowererMD.LoadHelperArgument(bailOutCall, frameRegOpnd);
  2627. m_lowererMD.ChangeToHelperCall(bailOutCall, IR::HelperNoSaveRegistersBailOutForElidedYield);
  2628. m_func->m_bailOutNoSaveLabel = bailOutNoSaveLabel;
  2629. }
  2630. break;
  2631. }
  2632. case Js::OpCode::FrameDisplayCheck:
  2633. instrPrev = this->LowerFrameDisplayCheck(instr);
  2634. break;
  2635. case Js::OpCode::SlotArrayCheck:
  2636. instrPrev = this->LowerSlotArrayCheck(instr);
  2637. break;
  2638. #if DBG
  2639. case Js::OpCode::CheckLowerIntBound:
  2640. instrPrev = this->LowerCheckLowerIntBound(instr);
  2641. break;
  2642. case Js::OpCode::CheckUpperIntBound:
  2643. instrPrev = this->LowerCheckUpperIntBound(instr);
  2644. break;
  2645. #endif
  2646. #ifdef ENABLE_WASM
  2647. case Js::OpCode::Copysign_A:
  2648. m_lowererMD.GenerateCopysign(instr);
  2649. break;
  2650. case Js::OpCode::Trunc_A:
  2651. if (!AutoSystemInfo::Data.SSE4_1Available())
  2652. {
  2653. m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_TruncFlt, IR::HelperDirectMath_TruncDb);
  2654. break;
  2655. }
  2656. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  2657. break;
  2658. case Js::OpCode::Nearest_A:
  2659. if (!AutoSystemInfo::Data.SSE4_1Available())
  2660. {
  2661. m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_NearestFlt, IR::HelperDirectMath_NearestDb);
  2662. break;
  2663. }
  2664. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  2665. break;
  2666. case Js::OpCode::ThrowRuntimeError:
  2667. GenerateThrow(instr->UnlinkSrc1(), instr);
  2668. instr->Remove();
  2669. break;
  2670. #endif //ENABLE_WASM
  2671. case Js::OpCode::SpeculatedLoadFence:
  2672. {
  2673. AssertOrFailFast(instr->m_kind == IR::InstrKindByteCodeUses);
  2674. #ifdef _M_ARM
  2675. AssertOrFailFastMsg(false, "We shouldn't perform this hoisting on ARM");
  2676. #else
  2677. IR::ByteCodeUsesInstr* bcuInstr = static_cast<IR::ByteCodeUsesInstr*>(instr);
  2678. // Most of the time we're not going to be able to remove any masking in a loop, and
  2679. // this instruction can be removed.
  2680. if (bcuInstr->GetByteCodeUpwardExposedUsed() != nullptr && !bcuInstr->GetByteCodeUpwardExposedUsed()->IsEmpty())
  2681. {
  2682. // The generated code is:
  2683. //
  2684. // cmp rax, rax
  2685. // for each symbol to mask:
  2686. // reg(sym) = cmovne reg(sym), reg(sym)
  2687. IR::RegOpnd* temp = IR::RegOpnd::New(TyUint8, instr->m_func);
  2688. InsertMove(temp, IR::IntConstOpnd::New(0, TyUint8, instr->m_func), instr);
  2689. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
  2690. cmp->SetSrc1(temp);
  2691. cmp->SetSrc2(temp);
  2692. instr->InsertBefore(cmp);
  2693. m_lowererMD.Legalize(cmp);
  2694. FOREACH_BITSET_IN_SPARSEBV(symid, bcuInstr->GetByteCodeUpwardExposedUsed())
  2695. {
  2696. StackSym* thisSym = instr->m_func->m_symTable->Find(symid)->AsStackSym();
  2697. IR::RegOpnd* thisSymReg = IR::RegOpnd::New(thisSym, thisSym->GetType(), instr->m_func);
  2698. Js::OpCode specBlockOp = thisSymReg->IsFloat() ? LowererMD::MDSpecBlockFNEOpcode : LowererMD::MDSpecBlockNEOpcode;
  2699. IR::Instr* cmov = IR::Instr::New(specBlockOp, thisSymReg, thisSymReg, thisSymReg, instr->m_func);
  2700. instr->InsertBefore(cmov);
  2701. m_lowererMD.Legalize(cmov);
  2702. } NEXT_BITSET_IN_SPARSEBV;
  2703. }
  2704. #endif
  2705. instr->Remove();
  2706. break;
  2707. }
  2708. case Js::OpCode::SpreadObjectLiteral:
  2709. this->LowerBinaryHelperMem(instr, IR::HelperSpreadObjectLiteral);
  2710. break;
  2711. case Js::OpCode::Restify:
  2712. instrPrev = this->LowerRestify(instr);
  2713. break;
  2714. case Js::OpCode::NewPropIdArrForCompProps:
  2715. this->LowerUnaryHelperMem(instr, IR::HelperNewPropIdArrForCompProps);
  2716. break;
  2717. case Js::OpCode::StPropIdArrFromVar:
  2718. instrPrev = this->LowerStPropIdArrFromVar(instr);
  2719. break;
  2720. default:
  2721. #ifdef ENABLE_WASM_SIMD
  2722. if (IsSimd128Opcode(instr->m_opcode))
  2723. {
  2724. instrPrev = m_lowererMD.Simd128Instruction(instr);
  2725. break;
  2726. }
  2727. #endif
  2728. AssertMsg(instr->IsLowered(), "Unknown opcode");
  2729. if(!instr->IsLowered())
  2730. {
  2731. Fatal();
  2732. }
  2733. break;
  2734. }
  2735. #if DBG
  2736. LegalizeVerifyRange(instrPrev ? instrPrev->m_next : instrStart,
  2737. verifyLegalizeInstrNext ? verifyLegalizeInstrNext->m_prev : nullptr);
  2738. this->helperCallCheckState = HelperCallCheckState_None;
  2739. #endif
  2740. } NEXT_INSTR_BACKWARD_EDITING_IN_RANGE;
  2741. Assert(this->outerMostLoopLabel == nullptr);
  2742. }
  2743. IR::Opnd *
  2744. Lowerer::LoadFunctionInfoOpnd(IR::Instr * instr)
  2745. {
  2746. return IR::AddrOpnd::New(instr->m_func->GetWorkItem()->GetJITTimeInfo()->GetFunctionInfoAddr(), IR::AddrOpndKindDynamicFunctionInfo, instr->m_func);
  2747. }
  2748. IR::Instr *
  2749. Lowerer::LoadFunctionBody(IR::Instr * instr)
  2750. {
  2751. return m_lowererMD.LoadHelperArgument(instr, LoadFunctionBodyOpnd(instr));
  2752. }
  2753. IR::Instr *
  2754. Lowerer::LoadScriptContext(IR::Instr * instr)
  2755. {
  2756. return m_lowererMD.LoadHelperArgument(instr, LoadScriptContextOpnd(instr));
  2757. }
  2758. IR::Opnd *
  2759. Lowerer::LoadFunctionBodyOpnd(IR::Instr * instr)
  2760. {
  2761. return IR::AddrOpnd::New(instr->m_func->GetJITFunctionBody()->GetAddr(), IR::AddrOpndKindDynamicFunctionBody, instr->m_func);
  2762. }
  2763. IR::Opnd *
  2764. Lowerer::LoadScriptContextOpnd(IR::Instr * instr)
  2765. {
  2766. return IR::AddrOpnd::New(m_func->GetScriptContextInfo()->GetAddr(), IR::AddrOpndKindDynamicScriptContext, this->m_func);
  2767. }
  2768. IR::Opnd *
  2769. Lowerer::LoadScriptContextValueOpnd(IR::Instr * instr, ScriptContextValue valueType)
  2770. {
  2771. ScriptContextInfo *scriptContextInfo = instr->m_func->GetScriptContextInfo();
  2772. switch (valueType)
  2773. {
  2774. case ScriptContextValue::ScriptContextNumberAllocator:
  2775. return IR::AddrOpnd::New(scriptContextInfo->GetNumberAllocatorAddr(), IR::AddrOpndKindDynamicMisc, instr->m_func);
  2776. case ScriptContextValue::ScriptContextRecycler:
  2777. return IR::AddrOpnd::New(scriptContextInfo->GetRecyclerAddr(), IR::AddrOpndKindDynamicMisc, instr->m_func);
  2778. default:
  2779. Assert(false);
  2780. return nullptr;
  2781. }
  2782. }
  2783. IR::Opnd *
  2784. Lowerer::LoadLibraryValueOpnd(IR::Instr * instr, LibraryValue valueType)
  2785. {
  2786. ScriptContextInfo *scriptContextInfo = instr->m_func->GetScriptContextInfo();
  2787. switch (valueType)
  2788. {
  2789. case LibraryValue::ValueEmptyString:
  2790. return IR::AddrOpnd::New(scriptContextInfo->GetEmptyStringAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2791. case LibraryValue::ValueUndeclBlockVar:
  2792. return IR::AddrOpnd::New(scriptContextInfo->GetUndeclBlockVarAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2793. case LibraryValue::ValueUndefined:
  2794. return IR::AddrOpnd::New(scriptContextInfo->GetUndefinedAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2795. case LibraryValue::ValueNull:
  2796. return IR::AddrOpnd::New(scriptContextInfo->GetNullAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2797. case LibraryValue::ValueTrue:
  2798. return IR::AddrOpnd::New(scriptContextInfo->GetTrueAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2799. case LibraryValue::ValueFalse:
  2800. return IR::AddrOpnd::New(scriptContextInfo->GetFalseAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2801. case LibraryValue::ValueNegativeZero:
  2802. return IR::AddrOpnd::New(scriptContextInfo->GetNegativeZeroAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2803. case LibraryValue::ValueNumberTypeStatic:
  2804. return IR::AddrOpnd::New(scriptContextInfo->GetNumberTypeStaticAddr(), IR::AddrOpndKindDynamicType, instr->m_func, true);
  2805. case LibraryValue::ValueStringTypeStatic:
  2806. return IR::AddrOpnd::New(scriptContextInfo->GetStringTypeStaticAddr(), IR::AddrOpndKindDynamicType, instr->m_func, true);
  2807. case LibraryValue::ValueSymbolTypeStatic:
  2808. return IR::AddrOpnd::New(scriptContextInfo->GetSymbolTypeStaticAddr(), IR::AddrOpndKindDynamicType, instr->m_func, true);
  2809. case LibraryValue::ValueObjectType:
  2810. return IR::AddrOpnd::New(scriptContextInfo->GetObjectTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
  2811. case LibraryValue::ValueObjectHeaderInlinedType:
  2812. return IR::AddrOpnd::New(scriptContextInfo->GetObjectHeaderInlinedTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
  2813. case LibraryValue::ValueRegexType:
  2814. return IR::AddrOpnd::New(scriptContextInfo->GetRegexTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
  2815. case LibraryValue::ValueArrayConstructor:
  2816. return IR::AddrOpnd::New(scriptContextInfo->GetArrayConstructorAddr(), IR::AddrOpndKindDynamicVar, instr->m_func);
  2817. case LibraryValue::ValueJavascriptArrayType:
  2818. return IR::AddrOpnd::New(scriptContextInfo->GetArrayTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
  2819. case LibraryValue::ValueNativeIntArrayType:
  2820. return IR::AddrOpnd::New(scriptContextInfo->GetNativeIntArrayTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
  2821. case LibraryValue::ValueNativeFloatArrayType:
  2822. return IR::AddrOpnd::New(scriptContextInfo->GetNativeFloatArrayTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
  2823. case LibraryValue::ValueConstructorCacheDefaultInstance:
  2824. return IR::AddrOpnd::New(m_func->GetThreadContextInfo()->GetConstructorCacheDefaultInstanceAddr(), IR::AddrOpndKindDynamicMisc, instr->m_func);
  2825. case LibraryValue::ValueAbsDoubleCst:
  2826. return IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetAbsDoubleCstAddr(), TyMachDouble, instr->m_func, IR::AddrOpndKindDynamicDoubleRef);
  2827. case LibraryValue::ValueCharStringCache:
  2828. return IR::AddrOpnd::New(scriptContextInfo->GetCharStringCacheAddr(), IR::AddrOpndKindDynamicCharStringCache, instr->m_func);
  2829. default:
  2830. Assert(UNREACHED);
  2831. return nullptr;
  2832. }
  2833. }
  2834. IR::Opnd *
  2835. Lowerer::LoadVTableValueOpnd(IR::Instr * instr, VTableValue vtableType)
  2836. {
  2837. return IR::AddrOpnd::New((Js::Var)instr->m_func->GetScriptContextInfo()->GetVTableAddress(vtableType), IR::AddrOpndKindDynamicVtable, this->m_func);
  2838. }
  2839. IR::Opnd *
  2840. Lowerer::LoadOptimizationOverridesValueOpnd(IR::Instr *instr, OptimizationOverridesValue valueType)
  2841. {
  2842. switch (valueType)
  2843. {
  2844. case OptimizationOverridesValue::OptimizationOverridesSideEffects:
  2845. return IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetSideEffectsAddr(), TyInt32, instr->m_func);
  2846. case OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable:
  2847. return IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetArraySetElementFastPathVtableAddr(), TyMachPtr, instr->m_func);
  2848. case OptimizationOverridesValue::OptimizationOverridesIntArraySetElementFastPathVtable:
  2849. return IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetIntArraySetElementFastPathVtableAddr(), TyMachPtr, instr->m_func);
  2850. case OptimizationOverridesValue::OptimizationOverridesFloatArraySetElementFastPathVtable:
  2851. return IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetFloatArraySetElementFastPathVtableAddr(), TyMachPtr, instr->m_func);
  2852. default:
  2853. Assert(UNREACHED);
  2854. return nullptr;
  2855. }
  2856. }
  2857. IR::Opnd *
  2858. Lowerer::LoadNumberAllocatorValueOpnd(IR::Instr *instr, NumberAllocatorValue valueType)
  2859. {
  2860. ScriptContextInfo *scriptContext = instr->m_func->GetScriptContextInfo();
  2861. bool allowNativeCodeBumpAllocation = scriptContext->GetRecyclerAllowNativeCodeBumpAllocation();
  2862. switch (valueType)
  2863. {
  2864. case NumberAllocatorValue::NumberAllocatorEndAddress:
  2865. return IR::MemRefOpnd::New(((char *)scriptContext->GetNumberAllocatorAddr()) + Js::RecyclerJavascriptNumberAllocator::GetEndAddressOffset(), TyMachPtr, instr->m_func);
  2866. case NumberAllocatorValue::NumberAllocatorFreeObjectList:
  2867. return IR::MemRefOpnd::New(
  2868. ((char *)scriptContext->GetNumberAllocatorAddr()) +
  2869. (allowNativeCodeBumpAllocation ? Js::RecyclerJavascriptNumberAllocator::GetFreeObjectListOffset() : Js::RecyclerJavascriptNumberAllocator::GetEndAddressOffset()),
  2870. TyMachPtr, instr->m_func);
  2871. default:
  2872. Assert(false);
  2873. return nullptr;
  2874. }
  2875. }
  2876. IR::Opnd *
  2877. Lowerer::LoadIsInstInlineCacheOpnd(IR::Instr * instr, uint inlineCacheIndex)
  2878. {
  2879. intptr_t inlineCache = instr->m_func->GetJITFunctionBody()->GetIsInstInlineCache(inlineCacheIndex);
  2880. return IR::AddrOpnd::New(inlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func);
  2881. }
  2882. IR::Opnd *
  2883. Lowerer::LoadRuntimeInlineCacheOpnd(IR::Instr * instr, IR::PropertySymOpnd * propertySymOpnd, bool isHelper)
  2884. {
  2885. Assert(propertySymOpnd->m_runtimeInlineCache != 0);
  2886. IR::Opnd * inlineCacheOpnd = nullptr;
  2887. if (instr->m_func->GetJITFunctionBody()->HasInlineCachesOnFunctionObject() && !instr->m_func->IsInlinee())
  2888. {
  2889. inlineCacheOpnd = this->GetInlineCacheFromFuncObjectForRuntimeUse(instr, propertySymOpnd, isHelper);
  2890. }
  2891. else
  2892. {
  2893. intptr_t inlineCache = propertySymOpnd->m_runtimeInlineCache;
  2894. inlineCacheOpnd = IR::AddrOpnd::New(inlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func, /* dontEncode */ true);
  2895. }
  2896. return inlineCacheOpnd;
  2897. }
  2898. bool
  2899. Lowerer::TryGenerateFastCmSrXx(IR::Instr * instr)
  2900. {
  2901. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  2902. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  2903. if (srcReg2 && IsConstRegOpnd(srcReg2))
  2904. {
  2905. return m_lowererMD.GenerateFastCmSrXxConst(instr);
  2906. }
  2907. else if (srcReg1 && IsConstRegOpnd(srcReg1))
  2908. {
  2909. instr->SwapOpnds();
  2910. return m_lowererMD.GenerateFastCmSrXxConst(instr);
  2911. }
  2912. return false;
  2913. }
  2914. // Generate fast path for StrictEquals when one of the sources are undefined, null, boolean
  2915. bool
  2916. Lowerer::TryGenerateFastBrSrXx(IR::Instr * instr, IR::RegOpnd * srcReg1, IR::RegOpnd * srcReg2, IR::Instr ** pInstrPrev, bool noMathFastPath)
  2917. {
  2918. bool isEqual = !instr->IsNeq();
  2919. if (srcReg2 && IsConstRegOpnd(srcReg2))
  2920. {
  2921. this->GenerateFastBrConst(instr->AsBranchInstr(), GetConstRegOpnd(srcReg2, instr), isEqual);
  2922. instr->Remove();
  2923. return true;
  2924. }
  2925. else if (srcReg1 && IsConstRegOpnd(srcReg1))
  2926. {
  2927. instr->SwapOpnds();
  2928. this->GenerateFastBrConst(instr->AsBranchInstr(), GetConstRegOpnd(srcReg1, instr), isEqual);
  2929. instr->Remove();
  2930. return true;
  2931. }
  2932. return false;
  2933. }
  2934. ///----------------------------------------------------------------------------
  2935. ///
  2936. /// Lowerer::GenerateFastBrConst
  2937. ///
  2938. ///----------------------------------------------------------------------------
  2939. IR::BranchInstr *
  2940. Lowerer::GenerateFastBrConst(IR::BranchInstr *branchInstr, IR::Opnd * constOpnd, bool isEqual)
  2941. {
  2942. Assert(constOpnd->IsAddrOpnd() || constOpnd->IsIntConstOpnd());
  2943. //
  2944. // Given:
  2945. // BrSrXx_A $L1, s1, s2
  2946. // where s2 is either 'null', 'undefined', 'true' or 'false'
  2947. //
  2948. // Generate:
  2949. //
  2950. // CMP s1, s2
  2951. // JEQ/JNE $L1
  2952. //
  2953. Assert(IsConstRegOpnd(branchInstr->GetSrc2()->AsRegOpnd()));
  2954. IR::RegOpnd *opnd = GetRegOpnd(branchInstr->GetSrc1(), branchInstr, m_func, TyVar);
  2955. IR::BranchInstr *newBranch;
  2956. newBranch = InsertCompareBranch(opnd, constOpnd, isEqual ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A, branchInstr->GetTarget(), branchInstr);
  2957. return newBranch;
  2958. }
  2959. bool
  2960. Lowerer::TryGenerateFastBrEq(IR::Instr * instr)
  2961. {
  2962. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  2963. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  2964. bool isConst = false;
  2965. if (srcReg1 && this->IsNullOrUndefRegOpnd(srcReg1))
  2966. {
  2967. instr->SwapOpnds();
  2968. isConst = true;
  2969. }
  2970. // Fast path for == null or == undefined
  2971. // if (src == null || src == undefined)
  2972. if (isConst || (srcReg2 && this->IsNullOrUndefRegOpnd(srcReg2)))
  2973. {
  2974. IR::BranchInstr *newBranch;
  2975. newBranch = this->GenerateFastBrConst(instr->AsBranchInstr(),
  2976. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  2977. true);
  2978. this->GenerateFastBrConst(instr->AsBranchInstr(),
  2979. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
  2980. true);
  2981. instr->Remove();
  2982. return true;
  2983. }
  2984. return false;
  2985. }
  2986. bool
  2987. Lowerer::TryGenerateFastBrNeq(IR::Instr * instr)
  2988. {
  2989. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  2990. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  2991. bool isConst = false;
  2992. if (srcReg1 && this->IsNullOrUndefRegOpnd(srcReg1))
  2993. {
  2994. instr->SwapOpnds();
  2995. isConst = true;
  2996. }
  2997. // Fast path for != null or != undefined
  2998. // if (src != null && src != undefined)
  2999. //
  3000. // That is:
  3001. // if (src == NULL) goto labelEq
  3002. // if (src != undef) goto target
  3003. // labelEq:
  3004. if (isConst || (srcReg2 && this->IsNullOrUndefRegOpnd(srcReg2)))
  3005. {
  3006. IR::LabelInstr *labelEq = instr->GetOrCreateContinueLabel();
  3007. IR::BranchInstr *newBranch;
  3008. newBranch = this->GenerateFastBrConst(instr->AsBranchInstr(),
  3009. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  3010. true);
  3011. newBranch->AsBranchInstr()->SetTarget(labelEq);
  3012. this->GenerateFastBrConst(instr->AsBranchInstr(),
  3013. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
  3014. false);
  3015. instr->Remove();
  3016. return true;
  3017. }
  3018. return false;
  3019. }
  3020. void
  3021. Lowerer::GenerateDynamicObjectAlloc(IR::Instr * newObjInstr, uint inlineSlotCount, uint slotCount, IR::RegOpnd * newObjDst, IR::Opnd * typeSrc)
  3022. {
  3023. size_t headerAllocSize = sizeof(Js::DynamicObject) + inlineSlotCount * sizeof(Js::Var);
  3024. IR::SymOpnd * tempObjectSymOpnd;
  3025. bool isZeroed = GenerateRecyclerOrMarkTempAlloc(newObjInstr, newObjDst, IR::HelperAllocMemForScObject, headerAllocSize, &tempObjectSymOpnd);
  3026. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  3027. {
  3028. // Hoist the vtable init to the outer most loop top as it never changes
  3029. InsertMove(tempObjectSymOpnd,
  3030. LoadVTableValueOpnd(this->outerMostLoopLabel, VTableValue::VtableDynamicObject), this->outerMostLoopLabel, false);
  3031. }
  3032. else
  3033. {
  3034. // MOV [newObjDst + offset(vtable)], DynamicObject::vtable
  3035. GenerateMemInit(newObjDst, 0, LoadVTableValueOpnd(newObjInstr, VTableValue::VtableDynamicObject), newObjInstr, isZeroed);
  3036. }
  3037. // MOV [newObjDst + offset(type)], newObjectType
  3038. GenerateMemInit(newObjDst, Js::DynamicObject::GetOffsetOfType(), typeSrc, newObjInstr, isZeroed);
  3039. // CALL JavascriptOperators::AllocMemForVarArray((slotCount - inlineSlotCount) * sizeof(Js::Var))
  3040. if (slotCount > inlineSlotCount)
  3041. {
  3042. size_t auxSlotsAllocSize = (slotCount - inlineSlotCount) * sizeof(Js::Var);
  3043. IR::RegOpnd* auxSlots = IR::RegOpnd::New(TyMachPtr, m_func);
  3044. GenerateRecyclerAllocAligned(IR::HelperAllocMemForVarArray, auxSlotsAllocSize, auxSlots, newObjInstr);
  3045. GenerateMemInit(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), auxSlots, newObjInstr, isZeroed);
  3046. IR::IndirOpnd* newObjAuxSlots = IR::IndirOpnd::New(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachPtr, m_func);
  3047. this->InsertMove(newObjAuxSlots, auxSlots, newObjInstr);
  3048. }
  3049. else
  3050. {
  3051. GenerateMemInitNull(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), newObjInstr, isZeroed);
  3052. }
  3053. GenerateMemInitNull(newObjDst, Js::DynamicObject::GetOffsetOfObjectArray(), newObjInstr, isZeroed);
  3054. }
  3055. void
  3056. Lowerer::LowerNewScObjectSimple(IR::Instr * instr)
  3057. {
  3058. GenerateDynamicObjectAlloc(
  3059. instr,
  3060. 0,
  3061. 0,
  3062. instr->UnlinkDst()->AsRegOpnd(),
  3063. LoadLibraryValueOpnd(
  3064. instr,
  3065. Js::FunctionBody::DoObjectHeaderInliningForEmptyObjects()
  3066. ? LibraryValue::ValueObjectHeaderInlinedType
  3067. : LibraryValue::ValueObjectType));
  3068. instr->Remove();
  3069. }
  3070. void
  3071. Lowerer::LowerNewScObjectLiteral(IR::Instr *newObjInstr)
  3072. {
  3073. Func * func = m_func;
  3074. IR::IntConstOpnd * literalObjectIdOpnd = newObjInstr->UnlinkSrc2()->AsIntConstOpnd();
  3075. intptr_t literalTypeRef = newObjInstr->m_func->GetJITFunctionBody()->GetObjectLiteralTypeRef(literalObjectIdOpnd->AsUint32());
  3076. IR::LabelInstr * helperLabel = nullptr;
  3077. IR::LabelInstr * allocLabel = nullptr;
  3078. IR::Opnd * literalTypeRefOpnd;
  3079. IR::Opnd * literalTypeOpnd;
  3080. IR::Opnd * propertyArrayOpnd;
  3081. IR::IntConstOpnd * propertyArrayIdOpnd = newObjInstr->UnlinkSrc1()->AsIntConstOpnd();
  3082. const Js::PropertyIdArray * propIds = newObjInstr->m_func->GetJITFunctionBody()->ReadPropertyIdArrayFromAuxData(propertyArrayIdOpnd->AsUint32());
  3083. intptr_t propArrayAddr = newObjInstr->m_func->GetJITFunctionBody()->GetAuxDataAddr(propertyArrayIdOpnd->AsUint32());
  3084. uint inlineSlotCapacity = Js::JavascriptOperators::GetLiteralInlineSlotCapacity(propIds);
  3085. uint slotCapacity = Js::JavascriptOperators::GetLiteralSlotCapacity(propIds);
  3086. IR::RegOpnd * dstOpnd;
  3087. literalTypeRefOpnd = IR::AddrOpnd::New(literalTypeRef, IR::AddrOpndKindDynamicMisc, this->m_func);
  3088. propertyArrayOpnd = IR::AddrOpnd::New(propArrayAddr, IR::AddrOpndKindDynamicMisc, this->m_func);
  3089. //#if 0 TODO: OOP JIT, obj literal types
  3090. // should pass in isShared bit through RPC, enable for in-proc jit to see perf impact
  3091. Js::DynamicType * literalType = func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts) ? nullptr : *(Js::DynamicType **)literalTypeRef;
  3092. if (literalType == nullptr || !literalType->GetIsShared())
  3093. {
  3094. helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3095. allocLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3096. literalTypeOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3097. InsertMove(literalTypeOpnd, IR::MemRefOpnd::New(literalTypeRef, TyMachPtr, func), newObjInstr);
  3098. InsertTestBranch(literalTypeOpnd, literalTypeOpnd,
  3099. Js::OpCode::BrEq_A, helperLabel, newObjInstr);
  3100. InsertTestBranch(IR::IndirOpnd::New(literalTypeOpnd->AsRegOpnd(), Js::DynamicType::GetOffsetOfIsShared(), TyInt8, func),
  3101. IR::IntConstOpnd::New(1, TyInt8, func, true), Js::OpCode::BrEq_A, helperLabel, newObjInstr);
  3102. dstOpnd = newObjInstr->GetDst()->AsRegOpnd();
  3103. }
  3104. else
  3105. {
  3106. literalTypeOpnd = IR::AddrOpnd::New(literalType, IR::AddrOpndKindDynamicType, func);
  3107. dstOpnd = newObjInstr->UnlinkDst()->AsRegOpnd();
  3108. Assert(inlineSlotCapacity == literalType->GetTypeHandler()->GetInlineSlotCapacity());
  3109. Assert(slotCapacity == (uint)literalType->GetTypeHandler()->GetSlotCapacity());
  3110. }
  3111. if (helperLabel)
  3112. {
  3113. InsertBranch(Js::OpCode::Br, allocLabel, newObjInstr);
  3114. // Slow path to ensure the type is there
  3115. newObjInstr->InsertBefore(helperLabel);
  3116. IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperEnsureObjectLiteralType, func);
  3117. m_lowererMD.LoadHelperArgument(newObjInstr, literalTypeRefOpnd);
  3118. m_lowererMD.LoadHelperArgument(newObjInstr, propertyArrayOpnd);
  3119. LoadScriptContext(newObjInstr);
  3120. IR::Instr * ensureTypeInstr = IR::Instr::New(Js::OpCode::Call, literalTypeOpnd, opndHelper, func);
  3121. newObjInstr->InsertBefore(ensureTypeInstr);
  3122. m_lowererMD.LowerCall(ensureTypeInstr, 0);
  3123. newObjInstr->InsertBefore(allocLabel);
  3124. }
  3125. else
  3126. {
  3127. Assert(allocLabel == nullptr);
  3128. }
  3129. // For the next call:
  3130. // inlineSlotCapacity == Number of slots to allocate beyond the DynamicObject header
  3131. // slotCapacity - inlineSlotCapacity == Number of aux slots to allocate
  3132. if(Js::FunctionBody::DoObjectHeaderInliningForObjectLiteral(propIds))
  3133. {
  3134. Assert(inlineSlotCapacity >= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity());
  3135. Assert(inlineSlotCapacity == slotCapacity);
  3136. slotCapacity = inlineSlotCapacity -= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity();
  3137. }
  3138. GenerateDynamicObjectAlloc(
  3139. newObjInstr,
  3140. inlineSlotCapacity,
  3141. slotCapacity,
  3142. dstOpnd,
  3143. literalTypeOpnd);
  3144. newObjInstr->Remove();
  3145. }
  3146. IR::Instr*
  3147. Lowerer::LowerProfiledNewScArray(IR::JitProfilingInstr* arrInstr)
  3148. {
  3149. IR::Instr *instrPrev = arrInstr->m_prev;
  3150. /*
  3151. JavascriptArray *ProfilingHelpers::ProfiledNewScArray(
  3152. const uint length,
  3153. FunctionBody *const functionBody,
  3154. const ProfileId profileId)
  3155. */
  3156. m_lowererMD.LoadHelperArgument(arrInstr, IR::Opnd::CreateProfileIdOpnd(arrInstr->profileId, m_func));
  3157. m_lowererMD.LoadHelperArgument(arrInstr, CreateFunctionBodyOpnd(arrInstr->m_func));
  3158. m_lowererMD.LoadHelperArgument(arrInstr, arrInstr->UnlinkSrc1());
  3159. arrInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledNewScArray, m_func));
  3160. m_lowererMD.LowerCall(arrInstr, 0);
  3161. return instrPrev;
  3162. }
  3163. IR::Instr *
  3164. Lowerer::LowerNewScArray(IR::Instr *arrInstr)
  3165. {
  3166. if (arrInstr->IsJitProfilingInstr())
  3167. {
  3168. return LowerProfiledNewScArray(arrInstr->AsJitProfilingInstr());
  3169. }
  3170. IR::Instr *instrPrev = arrInstr->m_prev;
  3171. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScArray;
  3172. if (arrInstr->IsProfiledInstr() && arrInstr->m_func->HasProfileInfo())
  3173. {
  3174. intptr_t weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
  3175. Assert(weakFuncRef);
  3176. Js::ProfileId profileId = static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
  3177. Js::ArrayCallSiteInfo *arrayInfo = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
  3178. intptr_t arrayInfoAddr = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
  3179. Assert(arrInstr->GetSrc1()->IsConstOpnd());
  3180. GenerateProfiledNewScArrayFastPath(arrInstr, arrayInfo, arrayInfoAddr, weakFuncRef, arrInstr->GetSrc1()->AsIntConstOpnd()->AsUint32());
  3181. if (arrInstr->GetDst() && arrInstr->GetDst()->GetValueType().IsLikelyNativeArray())
  3182. {
  3183. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
  3184. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
  3185. helperMethod = IR::HelperScrArr_ProfiledNewScArray;
  3186. }
  3187. }
  3188. LoadScriptContext(arrInstr);
  3189. IR::Opnd *src1Opnd = arrInstr->UnlinkSrc1();
  3190. m_lowererMD.LoadHelperArgument(arrInstr, src1Opnd);
  3191. m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
  3192. return instrPrev;
  3193. }
  3194. template <typename ArrayType>
  3195. BOOL Lowerer::IsSmallObject(uint32 length)
  3196. {
  3197. if (ArrayType::HasInlineHeadSegment(length))
  3198. return true;
  3199. uint32 alignedHeadSegmentSize = Js::SparseArraySegment<typename ArrayType::TElement>::GetAlignedSize(length);
  3200. size_t allocSize = sizeof(Js::SparseArraySegment<typename ArrayType::TElement>) + alignedHeadSegmentSize * sizeof(typename ArrayType::TElement);
  3201. return HeapInfo::IsSmallObject(HeapInfo::GetAlignedSizeNoCheck(allocSize));
  3202. }
  3203. bool
  3204. Lowerer::GenerateProfiledNewScArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef, uint32 length)
  3205. {
  3206. if (PHASE_OFF(Js::ArrayCtorFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
  3207. {
  3208. return false;
  3209. }
  3210. Func * func = this->m_func;
  3211. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3212. uint32 size = length;
  3213. bool isZeroed;
  3214. IR::RegOpnd *dstOpnd = instr->GetDst()->AsRegOpnd();
  3215. IR::RegOpnd *headOpnd;
  3216. uint32 i = length;
  3217. if (instr->GetDst() && instr->GetDst()->GetValueType().IsLikelyNativeIntArray())
  3218. {
  3219. if (!IsSmallObject<Js::JavascriptNativeIntArray>(length))
  3220. {
  3221. return false;
  3222. }
  3223. GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3224. Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
  3225. headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isZeroed);
  3226. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3227. GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3228. for (; i < size; i++)
  3229. {
  3230. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3231. Js::JavascriptNativeIntArray::MissingItem, instr, isZeroed);
  3232. }
  3233. }
  3234. else if (instr->GetDst() && instr->GetDst()->GetValueType().IsLikelyNativeFloatArray())
  3235. {
  3236. if (!IsSmallObject<Js::JavascriptNativeFloatArray>(length))
  3237. {
  3238. return false;
  3239. }
  3240. GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3241. Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
  3242. headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isZeroed);
  3243. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3244. GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3245. // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
  3246. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3247. for (; i < size; i++)
  3248. {
  3249. GenerateMemInit(
  3250. headOpnd, offsetStart + i * sizeof(double),
  3251. GetMissingItemOpndForAssignment(TyFloat64, m_func),
  3252. instr, isZeroed);
  3253. }
  3254. }
  3255. else
  3256. {
  3257. if (!IsSmallObject<Js::JavascriptArray>(length))
  3258. {
  3259. return false;
  3260. }
  3261. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3262. headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptArray>(instr, &size, arrayInfo, &isZeroed);
  3263. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3264. for (; i < size; i++)
  3265. {
  3266. GenerateMemInit(
  3267. headOpnd, offsetStart + i * sizeof(Js::Var),
  3268. GetMissingItemOpndForAssignment(TyVar, m_func),
  3269. instr, isZeroed);
  3270. }
  3271. }
  3272. // Skip pass the helper call
  3273. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3274. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  3275. instr->InsertBefore(helperLabel);
  3276. instr->InsertAfter(doneLabel);
  3277. return true;
  3278. }
  3279. void
  3280. Lowerer::GenerateArrayInfoIsNativeIntArrayTest(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, IR::LabelInstr * helperLabel)
  3281. {
  3282. Func * func = this->m_func;
  3283. InsertTestBranch(IR::MemRefOpnd::New(((char *)arrayInfoAddr) + Js::ArrayCallSiteInfo::GetOffsetOfBits(), TyUint8, func),
  3284. IR::IntConstOpnd::New(Js::ArrayCallSiteInfo::NotNativeIntBit, TyUint8, func), Js::OpCode::BrNeq_A, helperLabel, instr);
  3285. }
  3286. void
  3287. Lowerer::GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, IR::LabelInstr * helperLabel)
  3288. {
  3289. Func * func = this->m_func;
  3290. InsertCompareBranch(IR::MemRefOpnd::New(((char *)arrayInfoAddr) + Js::ArrayCallSiteInfo::GetOffsetOfBits(), TyUint8, func),
  3291. IR::IntConstOpnd::New(Js::ArrayCallSiteInfo::NotNativeIntBit, TyUint8, func), Js::OpCode::BrNeq_A, helperLabel, instr);
  3292. }
  3293. template <typename ArrayType>
  3294. static IR::JnHelperMethod GetArrayAllocMemHelper();
  3295. template <>
  3296. IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptArray>()
  3297. {
  3298. return IR::HelperAllocMemForJavascriptArray;
  3299. }
  3300. template <>
  3301. IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptNativeIntArray>()
  3302. {
  3303. return IR::HelperAllocMemForJavascriptNativeIntArray;
  3304. }
  3305. template <>
  3306. IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptNativeFloatArray>()
  3307. {
  3308. return IR::HelperAllocMemForJavascriptNativeFloatArray;
  3309. }
  3310. template <typename ArrayType>
  3311. IR::RegOpnd *
  3312. Lowerer::GenerateArrayLiteralsAlloc(IR::Instr *instr, uint32 * psize, Js::ArrayCallSiteInfo * arrayInfo, bool * pIsHeadSegmentZeroed)
  3313. {
  3314. return GenerateArrayAllocHelper<ArrayType>(instr, psize, arrayInfo, pIsHeadSegmentZeroed, false /* isArrayObjCtor */, false /* isNoArgs */);
  3315. }
  3316. template <typename ArrayType>
  3317. IR::RegOpnd *
  3318. Lowerer::GenerateArrayObjectsAlloc(IR::Instr *instr, uint32 * psize, Js::ArrayCallSiteInfo * arrayInfo, bool * pIsHeadSegmentZeroed, bool isNoArgs)
  3319. {
  3320. return GenerateArrayAllocHelper<ArrayType>(instr, psize, arrayInfo, pIsHeadSegmentZeroed, true /* isArrayObjCtor */, isNoArgs);
  3321. }
  3322. template <typename ArrayType>
  3323. IR::RegOpnd *
  3324. Lowerer::GenerateArrayAllocHelper(IR::Instr *instr, uint32 * psize, Js::ArrayCallSiteInfo * arrayInfo, bool * pIsHeadSegmentZeroed, bool isArrayObjCtor, bool isNoArgs)
  3325. {
  3326. Func * func = this->m_func;
  3327. IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
  3328. // Generate code as in JavascriptArray::NewLiteral
  3329. uint32 count = *psize;
  3330. uint alignedHeadSegmentSize;
  3331. size_t arrayAllocSize;
  3332. IR::RegOpnd * headOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3333. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func, false);
  3334. IR::Instr * leaHeadInstr = nullptr;
  3335. bool isHeadSegmentZeroed = false;
  3336. if (ArrayType::HasInlineHeadSegment(count))
  3337. {
  3338. if (isArrayObjCtor)
  3339. {
  3340. uint32 allocCount = isNoArgs ? Js::SparseArraySegmentBase::SMALL_CHUNK_SIZE : count;
  3341. arrayAllocSize = Js::JavascriptArray::DetermineAllocationSizeForArrayObjects<ArrayType, 0>(allocCount, nullptr, &alignedHeadSegmentSize);
  3342. }
  3343. else
  3344. {
  3345. uint32 allocCount = count == 0 ? Js::SparseArraySegmentBase::SMALL_CHUNK_SIZE : count;
  3346. arrayAllocSize = Js::JavascriptArray::DetermineAllocationSize<ArrayType, 0>(allocCount, nullptr, &alignedHeadSegmentSize);
  3347. }
  3348. // Note that it is possible for the returned alignedHeadSegmentSize to be greater than INLINE_CHUNK_SIZE because
  3349. // of rounding the *entire* object, including the head segment, to the nearest aligned size. In that case, ensure
  3350. // that this size is still not larger than INLINE_CHUNK_SIZE size because the head segment is still inlined. This
  3351. // keeps consistency with the definition of HasInlineHeadSegment and maintained in the assert below.
  3352. uint inlineChunkSize = Js::SparseArraySegmentBase::INLINE_CHUNK_SIZE;
  3353. alignedHeadSegmentSize = min(alignedHeadSegmentSize, inlineChunkSize);
  3354. Assert(ArrayType::HasInlineHeadSegment(alignedHeadSegmentSize));
  3355. leaHeadInstr = IR::Instr::New(Js::OpCode::LEA, headOpnd,
  3356. IR::IndirOpnd::New(dstOpnd, sizeof(ArrayType), TyMachPtr, func), func);
  3357. isHeadSegmentZeroed = true;
  3358. }
  3359. else
  3360. {
  3361. // Need to allocate the head segment first so that if it throws,
  3362. // we doesn't have the memory assigned to dstOpnd yet
  3363. // Even if the instruction is marked as dstIsTempObject, we still should not allocate
  3364. // that big of a chunk on the stack.
  3365. alignedHeadSegmentSize = Js::SparseArraySegment<typename ArrayType::TElement>::GetAlignedSize(count);
  3366. GenerateRecyclerAlloc(
  3367. IR::HelperAllocMemForSparseArraySegmentBase,
  3368. sizeof(Js::SparseArraySegment<typename ArrayType::TElement>) +
  3369. alignedHeadSegmentSize * sizeof(typename ArrayType::TElement),
  3370. headOpnd,
  3371. instr);
  3372. arrayAllocSize = sizeof(ArrayType);
  3373. }
  3374. *psize = alignedHeadSegmentSize;
  3375. IR::SymOpnd * tempObjectSymOpnd;
  3376. bool isZeroed = GenerateRecyclerOrMarkTempAlloc(instr, dstOpnd,
  3377. GetArrayAllocMemHelper<ArrayType>(), arrayAllocSize, &tempObjectSymOpnd);
  3378. isHeadSegmentZeroed = isHeadSegmentZeroed & isZeroed;
  3379. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  3380. {
  3381. // Hoist the vtable init to the outer most loop top as it never changes
  3382. InsertMove(tempObjectSymOpnd,
  3383. this->LoadVTableValueOpnd(this->outerMostLoopLabel, ArrayType::VtableHelper()),
  3384. this->outerMostLoopLabel, false);
  3385. }
  3386. else
  3387. {
  3388. GenerateMemInit(dstOpnd, 0, this->LoadVTableValueOpnd(instr, ArrayType::VtableHelper()), instr, isZeroed);
  3389. }
  3390. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfType(), this->LoadLibraryValueOpnd(instr, ArrayType::InitialTypeHelper()), instr, isZeroed);
  3391. GenerateMemInitNull(dstOpnd, ArrayType::GetOffsetOfAuxSlots(), instr, isZeroed);
  3392. // Emit the flags and call site index together
  3393. Js::ProfileId arrayCallSiteIndex = (Js::ProfileId)instr->AsProfiledInstr()->u.profileId;
  3394. #if DBG
  3395. if (instr->AsProfiledInstr()->u.profileId < Js::Constants::NoProfileId)
  3396. {
  3397. Assert((uint32)(arrayInfo - instr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(0)) == arrayCallSiteIndex);
  3398. }
  3399. else
  3400. {
  3401. Assert(arrayInfo == nullptr);
  3402. }
  3403. #endif
  3404. // The same at this:
  3405. // GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayFlags(), (uint16)Js::DynamicObjectFlags::InitialArrayValue, instr, isZeroed);
  3406. // GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayCallSiteIndex(), arrayCallSiteIndex, instr, isZeroed);
  3407. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayFlags(), (uint)Js::DynamicObjectFlags::InitialArrayValue | ((uint)arrayCallSiteIndex << 16), instr, isZeroed);
  3408. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLength(), count, instr, isZeroed);
  3409. if (leaHeadInstr != nullptr)
  3410. {
  3411. instr->InsertBefore(leaHeadInstr);
  3412. ChangeToLea(leaHeadInstr);
  3413. }
  3414. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfHead(), headOpnd, instr, isZeroed);
  3415. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLastUsedSegmentOrSegmentMap(), headOpnd, instr, isZeroed);
  3416. // Initialize segment head
  3417. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLeft(), 0, instr, isHeadSegmentZeroed);
  3418. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), isArrayObjCtor ? 0 : count, instr, isHeadSegmentZeroed);
  3419. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfSize(), alignedHeadSegmentSize, instr, isHeadSegmentZeroed);
  3420. GenerateMemInitNull(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfNext(), instr, isHeadSegmentZeroed);
  3421. *pIsHeadSegmentZeroed = isHeadSegmentZeroed;
  3422. return headOpnd;
  3423. }
  3424. template <typename ArrayType>
  3425. IR::RegOpnd *
  3426. Lowerer::GenerateArrayAlloc(IR::Instr *instr, IR::Opnd * arrayLenOpnd, Js::ArrayCallSiteInfo * arrayInfo)
  3427. {
  3428. Func * func = this->m_func;
  3429. IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
  3430. IR::RegOpnd * headOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3431. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func, false);
  3432. IR::Instr * leaHeadInstr = nullptr;
  3433. IR::Opnd * arraySizeOpnd = IR::RegOpnd::New(TyUint32, func);
  3434. IR::Opnd * alignedArrayAllocSizeOpnd = IR::RegOpnd::New(TyUint32, func);
  3435. IR::LabelInstr * doneCalculatingAllocSize = IR::LabelInstr::New(Js::OpCode::Label, func);
  3436. IR::LabelInstr * skipToNextBucket = nullptr;
  3437. uint8 bucketsCount = ArrayType::AllocationBucketsCount;
  3438. Js::JavascriptArray::EnsureCalculationOfAllocationBuckets<ArrayType>();
  3439. for (uint8 i = 0;i < bucketsCount;i++)
  3440. {
  3441. uint elementsCountToInitialize = ArrayType::allocationBuckets[i][Js::JavascriptArray::MissingElementsCountIndex];
  3442. uint allocationSize = ArrayType::allocationBuckets[i][Js::JavascriptArray::AllocationSizeIndex];
  3443. // Ensure we already have allocation size calculated and within range
  3444. Assert(elementsCountToInitialize > 0 && elementsCountToInitialize <= ArrayType::allocationBuckets[bucketsCount - 1][Js::JavascriptArray::MissingElementsCountIndex]);
  3445. Assert(allocationSize > 0 && allocationSize <= ArrayType::allocationBuckets[bucketsCount - 1][Js::JavascriptArray::AllocationSizeIndex]);
  3446. // CMP arrayLen, currentBucket
  3447. // JG $checkNextBucket
  3448. if (i != (bucketsCount - 1))
  3449. {
  3450. Lowerer::InsertCompare(arrayLenOpnd, IR::IntConstOpnd::New((uint16)ArrayType::allocationBuckets[i][Js::JavascriptArray::AllocationBucketIndex], TyUint32, func), instr);
  3451. skipToNextBucket = IR::LabelInstr::New(Js::OpCode::Label, func);
  3452. Lowerer::InsertBranch(Js::OpCode::BrGt_A, skipToNextBucket, instr);
  3453. }
  3454. // MOV $arrayAlignedSize, <const1>
  3455. // MOV $arrayAllocSize, <const2>
  3456. Lowerer::InsertMove(arraySizeOpnd, IR::IntConstOpnd::New((uint16)elementsCountToInitialize, TyUint32, func), instr);
  3457. Lowerer::InsertMove(alignedArrayAllocSizeOpnd, IR::IntConstOpnd::New((uint16)allocationSize, TyUint32, func), instr);
  3458. // JMP $doneCalculatingAllocSize
  3459. if (i != (bucketsCount - 1))
  3460. {
  3461. Lowerer::InsertBranch(Js::OpCode::Br, doneCalculatingAllocSize, instr);
  3462. instr->InsertBefore(skipToNextBucket);
  3463. }
  3464. }
  3465. instr->InsertBefore(doneCalculatingAllocSize);
  3466. // ***** Call to allocation helper *****
  3467. this->m_lowererMD.LoadHelperArgument(instr, this->LoadScriptContextValueOpnd(instr, ScriptContextValue::ScriptContextRecycler));
  3468. this->m_lowererMD.LoadHelperArgument(instr, alignedArrayAllocSizeOpnd);
  3469. IR::Instr *newObjCall = IR::Instr::New(Js::OpCode::Call, dstOpnd, IR::HelperCallOpnd::New(GetArrayAllocMemHelper<ArrayType>(), func), func);
  3470. instr->InsertBefore(newObjCall);
  3471. this->m_lowererMD.LowerCall(newObjCall, 0);
  3472. // ***** Load headSeg/initialize it *****
  3473. leaHeadInstr = IR::Instr::New(Js::OpCode::LEA, headOpnd,
  3474. IR::IndirOpnd::New(dstOpnd, sizeof(ArrayType), TyMachPtr, func), func);
  3475. GenerateMemInit(dstOpnd, 0, this->LoadVTableValueOpnd(instr, ArrayType::VtableHelper()), instr, true);
  3476. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfType(), this->LoadLibraryValueOpnd(instr, ArrayType::InitialTypeHelper()), instr, true);
  3477. GenerateMemInitNull(dstOpnd, ArrayType::GetOffsetOfAuxSlots(), instr, true);
  3478. Js::ProfileId arrayCallSiteIndex = (Js::ProfileId)instr->AsProfiledInstr()->u.profileId;
  3479. #if DBG
  3480. if (instr->AsProfiledInstr()->u.profileId < Js::Constants::NoProfileId)
  3481. {
  3482. Assert((uint32)(arrayInfo - instr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(0)) == arrayCallSiteIndex);
  3483. }
  3484. else
  3485. {
  3486. Assert(arrayInfo == nullptr);
  3487. }
  3488. #endif
  3489. // ***** Array object initialization *****
  3490. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayFlags(), IR::IntConstOpnd::New((uint16)Js::DynamicObjectFlags::InitialArrayValue, TyUint16, func), instr, true);
  3491. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLength(), arrayLenOpnd, instr, true);
  3492. if (leaHeadInstr != nullptr)
  3493. {
  3494. instr->InsertBefore(leaHeadInstr);
  3495. ChangeToLea(leaHeadInstr);
  3496. }
  3497. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfHead(), headOpnd, instr, true);
  3498. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLastUsedSegmentOrSegmentMap(), headOpnd, instr, true);
  3499. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLeft(), 0, instr, true);
  3500. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), 0, instr, true); // Set head segment length to 0
  3501. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfSize(), arraySizeOpnd, instr, true);
  3502. GenerateMemInitNull(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfNext(), instr, true);
  3503. return headOpnd;
  3504. }
  3505. bool
  3506. Lowerer::GenerateProfiledNewScObjArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef, uint32 length, IR::LabelInstr* labelDone, bool isNoArgs)
  3507. {
  3508. if (PHASE_OFF(Js::ArrayCtorFastPathPhase, m_func))
  3509. {
  3510. return false;
  3511. }
  3512. Func * func = this->m_func;
  3513. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3514. uint32 size = length;
  3515. bool isZeroed = false;
  3516. IR::RegOpnd *dstOpnd = instr->GetDst()->AsRegOpnd();
  3517. IR::RegOpnd *headOpnd;
  3518. Js::ProfileId profileId = static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId);
  3519. if (arrayInfo && arrayInfo->IsNativeIntArray())
  3520. {
  3521. GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3522. Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
  3523. headOpnd = GenerateArrayObjectsAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isZeroed, isNoArgs);
  3524. GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex(), IR::IntConstOpnd::New(profileId, TyUint16, func, true), instr, isZeroed);
  3525. GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3526. for (uint i = 0; i < size; i++)
  3527. {
  3528. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3529. Js::JavascriptNativeIntArray::MissingItem, instr, isZeroed);
  3530. }
  3531. }
  3532. else if (arrayInfo && arrayInfo->IsNativeFloatArray())
  3533. {
  3534. GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3535. Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
  3536. headOpnd = GenerateArrayObjectsAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isZeroed, isNoArgs);
  3537. GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex(), IR::IntConstOpnd::New(profileId, TyUint16, func, true), instr, isZeroed);
  3538. GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3539. // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
  3540. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3541. for (uint i = 0; i < size; i++)
  3542. {
  3543. GenerateMemInit(
  3544. headOpnd, offsetStart + i * sizeof(double),
  3545. GetMissingItemOpndForAssignment(TyFloat64, m_func),
  3546. instr, isZeroed);
  3547. }
  3548. }
  3549. else
  3550. {
  3551. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3552. headOpnd = GenerateArrayObjectsAlloc<Js::JavascriptArray>(instr, &size, arrayInfo, &isZeroed, isNoArgs);
  3553. for (uint i = 0; i < size; i++)
  3554. {
  3555. GenerateMemInit(
  3556. headOpnd, offsetStart + i * sizeof(Js::Var),
  3557. GetMissingItemOpndForAssignment(TyVar, m_func),
  3558. instr, isZeroed);
  3559. }
  3560. }
  3561. // Skip pass the helper call
  3562. InsertBranch(Js::OpCode::Br, labelDone, instr);
  3563. instr->InsertBefore(helperLabel);
  3564. return true;
  3565. }
  3566. template <typename ArrayType>
  3567. bool
  3568. Lowerer::GenerateProfiledNewScObjArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef, IR::LabelInstr* helperLabel,
  3569. IR::LabelInstr* labelDone, IR::Opnd* lengthOpnd, uint32 offsetOfCallSiteIndex, uint32 offsetOfWeakFuncRef)
  3570. {
  3571. if (PHASE_OFF(Js::ArrayCtorFastPathPhase, m_func))
  3572. {
  3573. return false;
  3574. }
  3575. Func * func = this->m_func;
  3576. IR::RegOpnd *dstOpnd = instr->GetDst()->AsRegOpnd();
  3577. IR::RegOpnd *headOpnd;
  3578. Js::ProfileId profileId = static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId);
  3579. uint sizeOfElement = 0;
  3580. uint allocationBucketsCount = ArrayType::AllocationBucketsCount;
  3581. uint(*allocationBuckets)[Js::JavascriptArray::AllocationBucketsInfoSize];
  3582. allocationBuckets = ArrayType::allocationBuckets;
  3583. IRType missingItemType = (arrayInfo ? arrayInfo->IsNativeIntArray() ? IRType::TyInt32 : arrayInfo->IsNativeFloatArray() ? IRType::TyFloat64 : IRType::TyVar : IRType::TyVar);
  3584. IR::LabelInstr * arrayInitDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  3585. bool isNativeArray = arrayInfo && (arrayInfo->IsNativeIntArray() || arrayInfo->IsNativeFloatArray());
  3586. if (arrayInfo && arrayInfo->IsNativeIntArray())
  3587. {
  3588. sizeOfElement = sizeof(int32);
  3589. GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3590. }
  3591. else if (arrayInfo && arrayInfo->IsNativeFloatArray())
  3592. {
  3593. sizeOfElement = sizeof(double);
  3594. GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3595. }
  3596. else
  3597. {
  3598. sizeOfElement = sizeof(Js::Var);
  3599. }
  3600. lengthOpnd = GenerateUntagVar(lengthOpnd->AsRegOpnd(), helperLabel, instr);
  3601. IR::Opnd* upperBound = IR::IntConstOpnd::New(8, TyUint8, func, true);
  3602. InsertCompare(lengthOpnd, upperBound, instr);
  3603. InsertBranch(Js::OpCode::BrGt_A, true /* isUnsigned */, helperLabel, instr);
  3604. headOpnd = GenerateArrayAlloc<ArrayType>(instr, lengthOpnd, arrayInfo);
  3605. if (isNativeArray)
  3606. {
  3607. Assert(ArrayType::GetOffsetOfArrayFlags() + sizeof(uint16) == offsetOfCallSiteIndex);
  3608. Assert(offsetOfWeakFuncRef > 0);
  3609. GenerateMemInit(dstOpnd, offsetOfCallSiteIndex, IR::IntConstOpnd::New(profileId, TyUint16, func, true), instr, true /* isZeroed */);
  3610. GenerateMemInit(dstOpnd, offsetOfWeakFuncRef, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, true /* isZeroed */);
  3611. }
  3612. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3613. uint missingItemCount = 0;
  3614. uint missingItemInitializedSoFar = 0;
  3615. uint missingItemIndex = 0;
  3616. uint maxAllocationSize = allocationBuckets[allocationBucketsCount - 1][Js::JavascriptArray::AllocationSizeIndex];
  3617. for (uint8 i = 0;i < allocationBucketsCount;i++)
  3618. {
  3619. missingItemCount = allocationBuckets[i][Js::JavascriptArray::MissingElementsCountIndex];
  3620. if (i > 0)
  3621. {
  3622. // Reduce missingItemCount we have already set so far
  3623. missingItemCount -= missingItemInitializedSoFar;
  3624. }
  3625. // Generate array initialization with MissingItem
  3626. for (uint j = 0;j < missingItemCount;j++)
  3627. {
  3628. // Ensure we don't write missingItems past allocation size
  3629. Assert(offsetStart + missingItemIndex * sizeOfElement <= maxAllocationSize);
  3630. GenerateMemInit(headOpnd, offsetStart + missingItemIndex * sizeOfElement, GetMissingItemOpndForAssignment(missingItemType, func), instr, true /*isZeroed*/);
  3631. missingItemIndex++;
  3632. }
  3633. // CMP arrayLen, currentBucket
  3634. // JG $checkNextBucket
  3635. if (i != (allocationBucketsCount - 1))
  3636. {
  3637. Lowerer::InsertCompare(lengthOpnd, IR::IntConstOpnd::New(allocationBuckets[i][Js::JavascriptArray::AllocationBucketIndex], TyUint32, func), instr);
  3638. Lowerer::InsertBranch(Js::OpCode::BrLe_A, arrayInitDone, instr);
  3639. }
  3640. missingItemInitializedSoFar += missingItemCount;
  3641. }
  3642. // Ensure no. of missingItems written are same
  3643. Assert(missingItemIndex == missingItemInitializedSoFar);
  3644. // Ensure no. of missingItems match what present in allocationBuckets
  3645. Assert(missingItemIndex == allocationBuckets[allocationBucketsCount - 1][Js::JavascriptArray::MissingElementsCountIndex]);
  3646. instr->InsertBefore(arrayInitDone);
  3647. Lowerer::InsertBranch(Js::OpCode::Br, labelDone, instr);
  3648. instr->InsertBefore(helperLabel);
  3649. return true;
  3650. }
  3651. void
  3652. Lowerer::GenerateProfiledNewScIntArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef)
  3653. {
  3654. // Helper will deal with ForceES5ARray
  3655. if (PHASE_OFF(Js::ArrayLiteralFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
  3656. {
  3657. return;
  3658. }
  3659. if (!arrayInfo->IsNativeIntArray())
  3660. {
  3661. return;
  3662. }
  3663. if (instr->GetSrc1()->AsAddrOpnd()->GetAddrOpndKind() != IR::AddrOpndKindDynamicAuxBufferRef)
  3664. {
  3665. return;
  3666. }
  3667. Func * func = this->m_func;
  3668. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3669. GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3670. IR::AddrOpnd * elementsOpnd = instr->GetSrc1()->AsAddrOpnd();
  3671. Js::AuxArray<int32> * ints = (Js::AuxArray<int32> *)elementsOpnd->m_metadata;
  3672. uint32 size = ints->count;
  3673. // Generate code as in JavascriptArray::NewLiteral
  3674. bool isHeadSegmentZeroed;
  3675. IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
  3676. Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
  3677. IR::RegOpnd * headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isHeadSegmentZeroed);
  3678. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3679. GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicMisc, m_func), instr, isHeadSegmentZeroed);
  3680. // Initialize the elements
  3681. uint i = 0;
  3682. if (ints->count > 16)
  3683. {
  3684. // Do memcpy if > 16
  3685. IR::RegOpnd * dstElementsOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3686. const IR::AutoReuseOpnd autoReuseDstElementsOpnd(dstElementsOpnd, func);
  3687. IR::Opnd * srcOpnd = IR::AddrOpnd::New((intptr_t)elementsOpnd->m_address + Js::AuxArray<int32>::OffsetOfElements(), IR::AddrOpndKindDynamicMisc, func);
  3688. InsertLea(dstElementsOpnd, IR::IndirOpnd::New(headOpnd, sizeof(Js::SparseArraySegmentBase), TyMachPtr, func), instr);
  3689. GenerateMemCopy(dstElementsOpnd, srcOpnd, ints->count * sizeof(int32), instr);
  3690. i = ints->count;
  3691. }
  3692. else
  3693. {
  3694. for (; i < ints->count; i++)
  3695. {
  3696. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3697. ints->elements[i], instr, isHeadSegmentZeroed);
  3698. }
  3699. }
  3700. Assert(i == ints->count);
  3701. for (; i < size; i++)
  3702. {
  3703. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3704. Js::JavascriptNativeIntArray::MissingItem, instr, isHeadSegmentZeroed);
  3705. }
  3706. // Skip pass the helper call
  3707. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3708. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  3709. instr->InsertBefore(helperLabel);
  3710. instr->InsertAfter(doneLabel);
  3711. }
  3712. void
  3713. Lowerer::GenerateProfiledNewScFloatArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef)
  3714. {
  3715. if (PHASE_OFF(Js::ArrayLiteralFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
  3716. {
  3717. return;
  3718. }
  3719. if (!arrayInfo->IsNativeFloatArray())
  3720. {
  3721. return;
  3722. }
  3723. if (instr->GetSrc1()->AsAddrOpnd()->GetAddrOpndKind() != IR::AddrOpndKindDynamicAuxBufferRef)
  3724. {
  3725. return;
  3726. }
  3727. Func * func = this->m_func;
  3728. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3729. // If the array info hasn't mark as not int array yet, go to the helper and mark it.
  3730. // It really is just for assert purpose in JavascriptNativeFloatArray::ToVarArray
  3731. GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3732. IR::AddrOpnd * elementsOpnd = instr->GetSrc1()->AsAddrOpnd();
  3733. Js::AuxArray<double> * doubles = (Js::AuxArray<double> *)elementsOpnd->m_metadata;
  3734. uint32 size = doubles->count;
  3735. // Generate code as in JavascriptArray::NewLiteral
  3736. bool isHeadSegmentZeroed;
  3737. IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
  3738. Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
  3739. IR::RegOpnd * headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isHeadSegmentZeroed);
  3740. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3741. GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isHeadSegmentZeroed);
  3742. // Initialize the elements
  3743. IR::RegOpnd * dstElementsOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3744. const IR::AutoReuseOpnd autoReuseDstElementsOpnd(dstElementsOpnd, func);
  3745. IR::Opnd * srcOpnd = IR::AddrOpnd::New((intptr_t)elementsOpnd->m_address + Js::AuxArray<double>::OffsetOfElements(), IR::AddrOpndKindDynamicMisc, func);
  3746. InsertLea(dstElementsOpnd, IR::IndirOpnd::New(headOpnd, sizeof(Js::SparseArraySegmentBase), TyMachPtr, func), instr);
  3747. GenerateMemCopy(dstElementsOpnd, srcOpnd, doubles->count * sizeof(double), instr);
  3748. // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
  3749. uint const offsetStart = sizeof(Js::SparseArraySegmentBase) + doubles->count * sizeof(double);
  3750. uint const missingItem = (size - doubles->count);
  3751. for (uint i = 0; i < missingItem; i++)
  3752. {
  3753. GenerateMemInit(headOpnd, offsetStart + i * sizeof(double),
  3754. GetMissingItemOpndForAssignment(TyFloat64, m_func), instr, isHeadSegmentZeroed);
  3755. }
  3756. // Skip pass the helper call
  3757. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3758. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  3759. instr->InsertBefore(helperLabel);
  3760. instr->InsertAfter(doneLabel);
  3761. }
  3762. IR::Instr *
  3763. Lowerer::LowerNewScIntArray(IR::Instr *arrInstr)
  3764. {
  3765. IR::Instr *instrPrev = arrInstr->m_prev;
  3766. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScIntArray;
  3767. if ((arrInstr->IsJitProfilingInstr() || arrInstr->IsProfiledInstr()) && arrInstr->m_func->HasProfileInfo())
  3768. {
  3769. intptr_t weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
  3770. if (weakFuncRef)
  3771. {
  3772. // Technically a load of the same memory address either way.
  3773. Js::ProfileId profileId =
  3774. arrInstr->IsJitProfilingInstr()
  3775. ? arrInstr->AsJitProfilingInstr()->profileId
  3776. : static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
  3777. Js::ArrayCallSiteInfo *arrayInfo = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
  3778. intptr_t arrayInfoAddr = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
  3779. // Only do fast-path if it isn't a JitProfiling instr and not copy-on-access array
  3780. if (arrInstr->IsProfiledInstr()
  3781. #if ENABLE_COPYONACCESS_ARRAY
  3782. && (PHASE_OFF1(Js::Phase::CopyOnAccessArrayPhase) || arrayInfo->isNotCopyOnAccessArray) && !PHASE_FORCE1(Js::Phase::CopyOnAccessArrayPhase)
  3783. #endif
  3784. )
  3785. {
  3786. GenerateProfiledNewScIntArrayFastPath(arrInstr, arrayInfo, arrayInfoAddr, weakFuncRef);
  3787. }
  3788. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
  3789. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
  3790. helperMethod = IR::HelperScrArr_ProfiledNewScIntArray;
  3791. }
  3792. }
  3793. LoadScriptContext(arrInstr);
  3794. IR::Opnd *elementsOpnd = arrInstr->UnlinkSrc1();
  3795. m_lowererMD.LoadHelperArgument(arrInstr, elementsOpnd);
  3796. m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
  3797. return instrPrev;
  3798. }
  3799. IR::Instr *
  3800. Lowerer::LowerNewScFltArray(IR::Instr *arrInstr)
  3801. {
  3802. IR::Instr *instrPrev = arrInstr->m_prev;
  3803. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScFltArray;
  3804. if ((arrInstr->IsJitProfilingInstr() || arrInstr->IsProfiledInstr()) && arrInstr->m_func->HasProfileInfo())
  3805. {
  3806. intptr_t weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
  3807. if (weakFuncRef)
  3808. {
  3809. Js::ProfileId profileId =
  3810. arrInstr->IsJitProfilingInstr()
  3811. ? arrInstr->AsJitProfilingInstr()->profileId
  3812. : static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
  3813. Js::ArrayCallSiteInfo *arrayInfo = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
  3814. intptr_t arrayInfoAddr = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
  3815. // Only do fast-path if it isn't a JitProfiling instr
  3816. if (arrInstr->IsProfiledInstr()) {
  3817. GenerateProfiledNewScFloatArrayFastPath(arrInstr, arrayInfo, arrayInfoAddr, weakFuncRef);
  3818. }
  3819. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
  3820. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
  3821. helperMethod = IR::HelperScrArr_ProfiledNewScFltArray;
  3822. }
  3823. }
  3824. LoadScriptContext(arrInstr);
  3825. IR::Opnd *elementsOpnd = arrInstr->UnlinkSrc1();
  3826. m_lowererMD.LoadHelperArgument(arrInstr, elementsOpnd);
  3827. m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
  3828. return instrPrev;
  3829. }
  3830. IR::Instr *
  3831. Lowerer::LowerArraySegmentVars(IR::Instr *arrayInstr)
  3832. {
  3833. IR::Instr * instrPrev;
  3834. IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperArraySegmentVars, m_func);
  3835. instrPrev = m_lowererMD.LoadHelperArgument(arrayInstr, arrayInstr->UnlinkSrc2());
  3836. m_lowererMD.LoadHelperArgument(arrayInstr, arrayInstr->UnlinkSrc1());
  3837. arrayInstr->m_opcode = Js::OpCode::Call;
  3838. arrayInstr->SetSrc1(opndHelper);
  3839. m_lowererMD.LowerCall(arrayInstr, 0);
  3840. return instrPrev;
  3841. }
  3842. IR::Instr* Lowerer::LowerProfiledNewArray(IR::JitProfilingInstr* instr, bool hasArgs)
  3843. {
  3844. // Use the special helper which checks whether Array has been overwritten by the user and if
  3845. // it hasn't, possibly allocates a native array
  3846. // Insert a temporary label before the instruction we're about to lower, so that we can return
  3847. // the first instruction above that needs to be lowered after we're done - regardless of argument
  3848. // list, StartCall, etc.
  3849. IR::Instr* startMarkerInstr = InsertLoweredRegionStartMarker(instr);
  3850. Assert(instr->isNewArray);
  3851. Assert(instr->arrayProfileId != Js::Constants::NoProfileId);
  3852. Assert(instr->profileId != Js::Constants::NoProfileId);
  3853. bool isSpreadCall = instr->m_opcode == Js::OpCode::NewScObjectSpread || instr->m_opcode == Js::OpCode::NewScObjArraySpread;
  3854. m_lowererMD.LoadNewScObjFirstArg(instr, IR::AddrOpnd::New(nullptr, IR::AddrOpndKindConstantVar, m_func, true), isSpreadCall ? 1 : 0);
  3855. if (isSpreadCall)
  3856. {
  3857. this->LowerSpreadCall(instr, Js::CallFlags_New, true);
  3858. }
  3859. else
  3860. {
  3861. const int32 argCount = m_lowererMD.LowerCallArgs(instr, Js::CallFlags_New, 4);
  3862. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->arrayProfileId, m_func));
  3863. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
  3864. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  3865. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  3866. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledNewScObjArray, m_func));
  3867. m_lowererMD.LowerCall(instr, static_cast<Js::ArgSlot>(argCount));
  3868. }
  3869. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  3870. }
  3871. ///----------------------------------------------------------------------------
  3872. ///
  3873. /// Lowerer::LowerNewScObject
  3874. ///
  3875. /// Machine independent lowering of a CallI instr.
  3876. ///
  3877. ///----------------------------------------------------------------------------
  3878. IR::Instr *
  3879. Lowerer::LowerNewScObject(IR::Instr *newObjInstr, bool callCtor, bool hasArgs, bool isBaseClassConstructorNewScObject)
  3880. {
  3881. if (newObjInstr->IsJitProfilingInstr() && newObjInstr->AsJitProfilingInstr()->isNewArray)
  3882. {
  3883. Assert(callCtor);
  3884. return LowerProfiledNewArray(newObjInstr->AsJitProfilingInstr(), hasArgs);
  3885. }
  3886. bool isSpreadCall = newObjInstr->m_opcode == Js::OpCode::NewScObjectSpread ||
  3887. newObjInstr->m_opcode == Js::OpCode::NewScObjArraySpread;
  3888. Func* func = newObjInstr->m_func;
  3889. // Insert a temporary label before the instruction we're about to lower, so that we can return
  3890. // the first instruction above that needs to be lowered after we're done - regardless of argument
  3891. // list, StartCall, etc.
  3892. IR::Instr* startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  3893. IR::Opnd *ctorOpnd = newObjInstr->GetSrc1();
  3894. IR::RegOpnd *newObjDst = newObjInstr->GetDst()->AsRegOpnd();
  3895. Assert(!callCtor || !hasArgs || (newObjInstr->GetSrc2() != nullptr /*&& newObjInstr->GetSrc2()->IsSymOpnd()*/));
  3896. bool skipNewScObj = false;
  3897. bool returnNewScObj = false;
  3898. bool emitBailOut = false;
  3899. // If we haven't yet split NewScObject into NewScObjectNoCtor and CallI, we will need a temporary register
  3900. // to hold the result of the object allocation.
  3901. IR::RegOpnd* createObjDst = callCtor ? IR::RegOpnd::New(TyVar, func) : newObjDst;
  3902. IR::LabelInstr* helperOrBailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, /* isOpHelper = */ true);
  3903. IR::LabelInstr* callCtorLabel = IR::LabelInstr::New(Js::OpCode::Label, func, /* isOpHelper = */ false);
  3904. // Try to emit the fast allocation and construction path.
  3905. bool usedFixedCtorCache = TryLowerNewScObjectWithFixedCtorCache(newObjInstr, createObjDst, helperOrBailoutLabel, callCtorLabel, skipNewScObj, returnNewScObj, emitBailOut);
  3906. AssertMsg(!skipNewScObj || callCtor, "What will we return if we skip the default new object and don't call the ctor?");
  3907. Assert(!skipNewScObj || !returnNewScObj);
  3908. Assert(usedFixedCtorCache || !skipNewScObj);
  3909. Assert(!usedFixedCtorCache || newObjInstr->HasFixedFunctionAddressTarget());
  3910. Assert(!skipNewScObj || !emitBailOut);
  3911. #if DBG && 0 // TODO: OOP JIT, enable assert
  3912. if (usedFixedCtorCache)
  3913. {
  3914. Js::JavascriptFunction* ctor = newObjInstr->GetFixedFunction();
  3915. Js::FunctionInfo* ctorInfo = ctor->GetFunctionInfo();
  3916. Assert((ctorInfo->GetAttributes() & Js::FunctionInfo::Attributes::ErrorOnNew) == 0);
  3917. Assert(!!(ctorInfo->GetAttributes() & Js::FunctionInfo::Attributes::SkipDefaultNewObject) == skipNewScObj);
  3918. }
  3919. #endif
  3920. IR::Instr* startCallInstr = nullptr;
  3921. if (callCtor && hasArgs)
  3922. {
  3923. hasArgs = !newObjInstr->HasEmptyArgOutChain(&startCallInstr);
  3924. }
  3925. // If we're not skipping the default new object, let's emit bailout or a call to NewScObject* helper
  3926. IR::JnHelperMethod newScHelper = IR::HelperInvalid;
  3927. IR::Instr *newScObjCall = nullptr;
  3928. if (!skipNewScObj)
  3929. {
  3930. // If we emitted the fast path, this block is a helper block.
  3931. if (usedFixedCtorCache)
  3932. {
  3933. newObjInstr->InsertBefore(helperOrBailoutLabel);
  3934. }
  3935. if (emitBailOut)
  3936. {
  3937. IR::Instr* bailOutInstr = newObjInstr;
  3938. newObjInstr = IR::Instr::New(newObjInstr->m_opcode, func);
  3939. bailOutInstr->TransferTo(newObjInstr);
  3940. bailOutInstr->m_opcode = Js::OpCode::BailOut;
  3941. bailOutInstr->InsertAfter(newObjInstr);
  3942. GenerateBailOut(bailOutInstr);
  3943. }
  3944. else
  3945. {
  3946. Assert(!newObjDst->CanStoreTemp());
  3947. // createObjDst = NewScObject...(ctorOpnd)
  3948. newScHelper = !callCtor ?
  3949. (isBaseClassConstructorNewScObject ?
  3950. (hasArgs ? IR::HelperNewScObjectNoCtorFull : IR::HelperNewScObjectNoArgNoCtorFull) :
  3951. (hasArgs ? IR::HelperNewScObjectNoCtor : IR::HelperNewScObjectNoArgNoCtor)) :
  3952. (hasArgs || usedFixedCtorCache ? IR::HelperNewScObjectNoCtor : IR::HelperNewScObjectNoArg);
  3953. LoadScriptContext(newObjInstr);
  3954. m_lowererMD.LoadHelperArgument(newObjInstr, newObjInstr->GetSrc1());
  3955. newScObjCall = IR::Instr::New(Js::OpCode::Call, createObjDst, IR::HelperCallOpnd::New(newScHelper, func), func);
  3956. newObjInstr->InsertBefore(newScObjCall);
  3957. m_lowererMD.LowerCall(newScObjCall, 0);
  3958. }
  3959. }
  3960. // If we call HelperNewScObjectNoArg directly, we won't be calling the constructor from here, because the helper will do it.
  3961. // We could probably avoid this complexity by converting NewScObjectNoArg to NewScObject in the IRBuilder, once we have dedicated
  3962. // code paths for new Object() and new Array().
  3963. callCtor &= hasArgs || usedFixedCtorCache;
  3964. AssertMsg(!skipNewScObj || callCtor, "What will we return if we skip the default new object and don't call the ctor?");
  3965. newObjInstr->InsertBefore(callCtorLabel);
  3966. if (callCtor && usedFixedCtorCache)
  3967. {
  3968. IR::JnHelperMethod ctorHelper = IR::JnHelperMethodCount;
  3969. // If we have no arguments (i.e. the argument chain is empty), we can recognize a couple of common special cases, such
  3970. // as new Object() or new Array(), for which we have optimized helpers.
  3971. FixedFieldInfo* ctor = newObjInstr->GetFixedFunction();
  3972. intptr_t ctorInfo = ctor->GetFuncInfoAddr();
  3973. if (!hasArgs && (ctorInfo == m_func->GetThreadContextInfo()->GetJavascriptObjectNewInstanceAddr() || ctorInfo == m_func->GetThreadContextInfo()->GetJavascriptArrayNewInstanceAddr()))
  3974. {
  3975. if (ctorInfo == m_func->GetThreadContextInfo()->GetJavascriptObjectNewInstanceAddr())
  3976. {
  3977. Assert(skipNewScObj);
  3978. ctorHelper = IR::HelperNewJavascriptObjectNoArg;
  3979. callCtor = false;
  3980. }
  3981. else if (ctorInfo == m_func->GetThreadContextInfo()->GetJavascriptArrayNewInstanceAddr())
  3982. {
  3983. Assert(skipNewScObj);
  3984. ctorHelper = IR::HelperNewJavascriptArrayNoArg;
  3985. callCtor = false;
  3986. }
  3987. if (!callCtor)
  3988. {
  3989. LoadScriptContext(newObjInstr);
  3990. IR::Instr *ctorCall = IR::Instr::New(Js::OpCode::Call, newObjDst, IR::HelperCallOpnd::New(ctorHelper, func), func);
  3991. newObjInstr->InsertBefore(ctorCall);
  3992. m_lowererMD.LowerCall(ctorCall, 0);
  3993. }
  3994. }
  3995. }
  3996. IR::AutoReuseOpnd autoReuseSavedCtorOpnd;
  3997. if (callCtor)
  3998. {
  3999. // Load the first argument, which is either the object just created or null. Spread has an extra argument.
  4000. IR::Instr * argInstr = this->m_lowererMD.LoadNewScObjFirstArg(newObjInstr, createObjDst, isSpreadCall ? 1 : 0);
  4001. IR::Instr * insertAfterCtorInstr = newObjInstr->m_next;
  4002. if (skipNewScObj)
  4003. {
  4004. // Since we skipped the default new object, we must be returning whatever the constructor returns
  4005. // (which better be an Object), so let's just use newObjDst directly.
  4006. // newObjDst = newObjInstr->m_src1(createObjDst, ...)
  4007. Assert(newObjInstr->GetDst() == newObjDst);
  4008. if (isSpreadCall)
  4009. {
  4010. newObjInstr = this->LowerSpreadCall(newObjInstr, Js::CallFlags_New);
  4011. }
  4012. else
  4013. {
  4014. newObjInstr = this->m_lowererMD.LowerCallI(newObjInstr, Js::CallFlags_New, false, argInstr);
  4015. }
  4016. }
  4017. else
  4018. {
  4019. // We may need to return the default new object or whatever the constructor returns. Let's stash
  4020. // away the constructor's return in a temporary operand, and do the right check, if necessary.
  4021. // ctorResultObjOpnd = newObjInstr->m_src1(createObjDst, ...)
  4022. IR::RegOpnd *ctorResultObjOpnd = IR::RegOpnd::New(TyVar, func);
  4023. newObjInstr->UnlinkDst();
  4024. newObjInstr->SetDst(ctorResultObjOpnd);
  4025. if (isSpreadCall)
  4026. {
  4027. newObjInstr = this->LowerSpreadCall(newObjInstr, Js::CallFlags_New);
  4028. }
  4029. else
  4030. {
  4031. newObjInstr = this->m_lowererMD.LowerCallI(newObjInstr, Js::CallFlags_New, false, argInstr);
  4032. }
  4033. if (returnNewScObj)
  4034. {
  4035. // MOV newObjDst, createObjDst
  4036. this->InsertMove(newObjDst, createObjDst, insertAfterCtorInstr);
  4037. }
  4038. else
  4039. {
  4040. LowerGetNewScObjectCommon(ctorResultObjOpnd, ctorResultObjOpnd, createObjDst, insertAfterCtorInstr);
  4041. this->InsertMove(newObjDst, ctorResultObjOpnd, insertAfterCtorInstr);
  4042. }
  4043. }
  4044. // We don't ever need to update the constructor cache, if we hard coded it. Caches requiring update after constructor
  4045. // don't get cloned, and those that don't require update will never need one anymore.
  4046. if (!usedFixedCtorCache)
  4047. {
  4048. LowerUpdateNewScObjectCache(insertAfterCtorInstr, newObjDst, ctorOpnd, false /* isCtorFunction */);
  4049. }
  4050. }
  4051. else
  4052. {
  4053. if (newObjInstr->IsJitProfilingInstr())
  4054. {
  4055. Assert(m_func->IsSimpleJit());
  4056. Assert(!CONFIG_FLAG(NewSimpleJit));
  4057. // This path skipped calling the Ctor, which skips calling LowerCallI with newObjInstr, meaning that the call will not be profiled.
  4058. // So we insert it manually here.
  4059. if(newScHelper == IR::HelperNewScObjectNoArg &&
  4060. newObjDst &&
  4061. ctorOpnd->IsRegOpnd() &&
  4062. newObjDst->AsRegOpnd()->m_sym == ctorOpnd->AsRegOpnd()->m_sym)
  4063. {
  4064. Assert(newObjInstr->m_func->IsSimpleJit());
  4065. Assert(createObjDst != newObjDst);
  4066. // The function object sym is going to be overwritten, so save it in a temp for profiling
  4067. IR::RegOpnd *const savedCtorOpnd = IR::RegOpnd::New(ctorOpnd->GetType(), newObjInstr->m_func);
  4068. autoReuseSavedCtorOpnd.Initialize(savedCtorOpnd, newObjInstr->m_func);
  4069. Lowerer::InsertMove(savedCtorOpnd, ctorOpnd, newObjInstr);
  4070. ctorOpnd = savedCtorOpnd;
  4071. }
  4072. // It is a constructor (CallFlags_New) and therefore a single argument (this) would have been given.
  4073. const auto info = Lowerer::MakeCallInfoConst(Js::CallFlags_New, 1, func);
  4074. Assert(newScObjCall);
  4075. IR::JitProfilingInstr *const newObjJitProfilingInstr = newObjInstr->AsJitProfilingInstr();
  4076. GenerateCallProfiling(
  4077. newObjJitProfilingInstr->profileId,
  4078. newObjJitProfilingInstr->inlineCacheIndex,
  4079. createObjDst,
  4080. ctorOpnd,
  4081. info,
  4082. false,
  4083. newScObjCall,
  4084. newObjInstr);
  4085. }
  4086. // MOV newObjDst, createObjDst
  4087. if (!skipNewScObj && createObjDst != newObjDst)
  4088. {
  4089. this->InsertMove(newObjDst, createObjDst, newObjInstr);
  4090. }
  4091. newObjInstr->Remove();
  4092. }
  4093. // Return the first instruction above the region we've just lowered.
  4094. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  4095. }
  4096. IR::Instr*
  4097. Lowerer::GenerateCallProfiling(Js::ProfileId profileId, Js::InlineCacheIndex inlineCacheIndex, IR::Opnd* retval, IR::Opnd*calleeFunctionObjOpnd, IR::Opnd* callInfo, bool returnTypeOnly, IR::Instr*callInstr,IR::Instr*insertAfter)
  4098. {
  4099. // This should only ever happen in profiling simplejit
  4100. Assert(m_func->DoSimpleJitDynamicProfile());
  4101. // Make sure they gave us the correct call instruction
  4102. #if defined(_M_IX86) || defined(_M_X64)
  4103. Assert(callInstr->m_opcode == Js::OpCode::CALL);
  4104. #elif defined(_M_ARM)
  4105. Assert(callInstr->m_opcode == Js::OpCode::BLX);
  4106. #elif defined(_M_ARM64)
  4107. Assert(callInstr->m_opcode == Js::OpCode::BLR);
  4108. #endif
  4109. Func*const func = insertAfter->m_func;
  4110. {
  4111. // First, we should save the implicit call flags
  4112. const auto starFlag = GetImplicitCallFlagsOpnd();
  4113. const auto saveOpnd = IR::RegOpnd::New(starFlag->GetType(), func);
  4114. IR::AutoReuseOpnd a(starFlag, func), b(saveOpnd, func);
  4115. //Save the flags (before call) and restore them (after the call)
  4116. this->InsertMove(saveOpnd, starFlag, callInstr);
  4117. // Note: On arm this is slightly inefficient because it forces a reload of the memory location to a reg (whereas x86 can load straight from hard-coded memory into a reg)
  4118. // But it works and making it not reload the memory location would force more refactoring.
  4119. this->InsertMove(starFlag, saveOpnd, insertAfter->m_next);
  4120. }
  4121. // Profile a call that just happened: push some extra info on the stack and call the helper
  4122. if (!retval)
  4123. {
  4124. if (returnTypeOnly)
  4125. {
  4126. // If we are only supposed to profile the return type but don't use the return value, we might
  4127. // as well do nothing!
  4128. return insertAfter;
  4129. }
  4130. retval = IR::AddrOpnd::NewNull(func);
  4131. }
  4132. IR::Instr* profileCall = IR::Instr::New(Js::OpCode::Call, func);
  4133. bool needInlineCacheIndex;
  4134. IR::JnHelperMethod helperMethod;
  4135. if (returnTypeOnly)
  4136. {
  4137. needInlineCacheIndex = false;
  4138. helperMethod = IR::HelperSimpleProfileReturnTypeCall;
  4139. }
  4140. else if(inlineCacheIndex == Js::Constants::NoInlineCacheIndex)
  4141. {
  4142. needInlineCacheIndex = false;
  4143. helperMethod = IR::HelperSimpleProfileCall_DefaultInlineCacheIndex;
  4144. }
  4145. else
  4146. {
  4147. needInlineCacheIndex = true;
  4148. helperMethod = IR::HelperSimpleProfileCall;
  4149. }
  4150. profileCall->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
  4151. insertAfter->InsertAfter(profileCall);
  4152. m_lowererMD.LoadHelperArgument(profileCall, callInfo);
  4153. m_lowererMD.LoadHelperArgument(profileCall, calleeFunctionObjOpnd);
  4154. m_lowererMD.LoadHelperArgument(profileCall, retval);
  4155. if(needInlineCacheIndex)
  4156. {
  4157. m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateInlineCacheIndexOpnd(inlineCacheIndex, func));
  4158. }
  4159. m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateProfileIdOpnd(profileId, func));
  4160. // Push the frame pointer so that the profiling call can grab the stack layout
  4161. m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateFramePointerOpnd(func));
  4162. // No args: the helper is stdcall
  4163. return m_lowererMD.LowerCall(profileCall, 0);
  4164. }
  4165. bool Lowerer::TryLowerNewScObjectWithFixedCtorCache(IR::Instr* newObjInstr, IR::RegOpnd* newObjDst,
  4166. IR::LabelInstr* helperOrBailoutLabel, IR::LabelInstr* callCtorLabel, bool& skipNewScObj, bool& returnNewScObj, bool& emitBailOut)
  4167. {
  4168. skipNewScObj = false;
  4169. returnNewScObj = false;
  4170. AssertMsg(!PHASE_OFF(Js::ObjTypeSpecNewObjPhase, this->m_func) || !newObjInstr->HasBailOutInfo(),
  4171. "Why do we have bailout on NewScObject when ObjTypeSpecNewObj is off?");
  4172. if (PHASE_OFF(Js::FixedNewObjPhase, newObjInstr->m_func) && PHASE_OFF(Js::ObjTypeSpecNewObjPhase, this->m_func))
  4173. {
  4174. return false;
  4175. }
  4176. JITTimeConstructorCache * ctorCache;
  4177. if (newObjInstr->HasBailOutInfo() && !newObjInstr->HasLazyBailOut())
  4178. {
  4179. Assert(newObjInstr->IsNewScObjectInstr());
  4180. Assert(newObjInstr->IsProfiledInstr());
  4181. Assert(newObjInstr->GetBailOutKind() == IR::BailOutFailedCtorGuardCheck || newObjInstr->HasLazyBailOut());
  4182. emitBailOut = true;
  4183. ctorCache = newObjInstr->m_func->GetConstructorCache(static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId));
  4184. Assert(ctorCache != nullptr);
  4185. Assert(!ctorCache->SkipNewScObject());
  4186. Assert(!ctorCache->IsTypeFinal() || ctorCache->CtorHasNoExplicitReturnValue());
  4187. LinkCtorCacheToGuardedProperties(ctorCache);
  4188. }
  4189. else
  4190. {
  4191. if (newObjInstr->m_opcode == Js::OpCode::NewScObjArray || newObjInstr->m_opcode == Js::OpCode::NewScObjArraySpread)
  4192. {
  4193. // These instr's carry a profile that indexes the array call site info, not the ctor cache.
  4194. return false;
  4195. }
  4196. ctorCache = newObjInstr->IsProfiledInstr() ? newObjInstr->m_func->GetConstructorCache(static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId)) : nullptr;
  4197. if (ctorCache == nullptr)
  4198. {
  4199. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func))
  4200. {
  4201. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4202. Output::Print(_u("FixedNewObj: function %s (%s): lowering non-fixed new script object for %s, because %s.\n"),
  4203. newObjInstr->m_func->GetJITFunctionBody()->GetDisplayName(), newObjInstr->m_func->GetDebugNumberSet(debugStringBuffer), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  4204. newObjInstr->IsProfiledInstr() ? _u("constructor cache hasn't been cloned") : _u("instruction is not profiled"));
  4205. Output::Flush();
  4206. }
  4207. return false;
  4208. }
  4209. }
  4210. Assert(ctorCache != nullptr);
  4211. // We should only have cloned if the script contexts match.
  4212. // TODO: oop jit, add ctorCache->scriptContext for tracing assert
  4213. // Assert(newObjInstr->m_func->GetScriptContextInfo()->GetAddr() == ctorCache->scriptContext);
  4214. // Built-in constructors don't need a default new object. Since we know which constructor we're calling, we can skip creating a default
  4215. // object and call a specialized helper (or even constructor, directly) avoiding the checks in generic NewScObjectCommon.
  4216. if (ctorCache->SkipNewScObject())
  4217. {
  4218. #if 0 // TODO: oop jit, add constructor info for tracing
  4219. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func))
  4220. {
  4221. const Js::JavascriptFunction* ctor = ctorCache->constructor;
  4222. Js::FunctionBody* ctorBody = ctor->GetFunctionInfo()->HasBody() ? ctor->GetFunctionInfo()->GetFunctionBody() : nullptr;
  4223. const char16* ctorName = ctorBody != nullptr ? ctorBody->GetDisplayName() : _u("<unknown>");
  4224. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4225. char16 debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4226. Output::Print(_u("FixedNewObj: function %s (%s): lowering skipped new script object for %s with %s ctor <unknown> (%s %s).\n"),
  4227. newObjInstr->m_func->GetJITFunctionBody()->GetDisplayName(), newObjInstr->m_func->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  4228. newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? _u("inlined") : _u("called"),
  4229. ctorName, ctorBody ? ctorBody->GetDebugNumberSet(debugStringBuffer) : _u("(null)"));
  4230. Output::Flush();
  4231. }
  4232. #endif
  4233. // All built-in constructors share a special singleton cache that is never checked and never invalidated. It cannot be used
  4234. // as a guard to protect any property operations downstream from the constructor. If this ever becomes a performance issue,
  4235. // we could have a dedicated cache for each built-in constructor, populate it and invalidate it as any other constructor cache.
  4236. AssertMsg(!emitBailOut, "Can't bail out on constructor cache guard for built-in constructors.");
  4237. skipNewScObj = true;
  4238. IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
  4239. this->InsertMove(newObjDst, zeroOpnd, newObjInstr);
  4240. return true;
  4241. }
  4242. AssertMsg(ctorCache->GetType() != nullptr, "Why did we hard-code a mismatched, invalidated or polymorphic constructor cache?");
  4243. #if 0 // TODO: oop jit, add constructor info for tracing
  4244. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func))
  4245. {
  4246. const Js::JavascriptFunction* constructor = ctorCache->constructor;
  4247. Js::FunctionBody* constructorBody = constructor->GetFunctionInfo()->HasBody() ? constructor->GetFunctionInfo()->GetFunctionBody() : nullptr;
  4248. const char16* constructorName = constructorBody != nullptr ? constructorBody->GetDisplayName() : _u("<unknown>");
  4249. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4250. char16 debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4251. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func))
  4252. {
  4253. Output::Print(_u("FixedNewObj: function %s (%s): lowering fixed new script object for %s with %s ctor <unknown> (%s %s): type = %p, slots = %d, inlined slots = %d.\n"),
  4254. newObjInstr->m_func->GetJITFunctionBody()->GetDisplayName(), newObjInstr->m_func->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  4255. newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? _u("inlined") : _u("called"),
  4256. constructorName, constructorBody ? constructorBody->GetDebugNumberSet(debugStringBuffer) : _u("(null)"),
  4257. ctorCache->type, ctorCache->slotCount, ctorCache->inlineSlotCount);
  4258. }
  4259. else
  4260. {
  4261. Output::Print(_u("FixedNewObj: function %s (%s): lowering fixed new script object for %s with %s ctor <unknown> (%s %s): slots = %d, inlined slots = %d.\n"),
  4262. newObjInstr->m_func->GetJITFunctionBody()->GetDisplayName(), newObjInstr->m_func->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  4263. newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? _u("inlined") : _u("called"),
  4264. constructorName, debugStringBuffer, ctorCache->slotCount, ctorCache->inlineSlotCount);
  4265. }
  4266. Output::Flush();
  4267. }
  4268. #endif
  4269. // If the constructor has no return statements, we can safely return the object that was created here.
  4270. // No need to check what the constructor returned - it must be undefined.
  4271. returnNewScObj = ctorCache->CtorHasNoExplicitReturnValue();
  4272. Assert(Js::ConstructorCache::GetSizeOfGuardValue() == static_cast<size_t>(TySize[TyMachPtr]));
  4273. IR::MemRefOpnd* guardOpnd = IR::MemRefOpnd::New(ctorCache->GetRuntimeCacheGuardAddr(), TyMachReg, this->m_func,
  4274. IR::AddrOpndKindDynamicGuardValueRef);
  4275. IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
  4276. InsertCompareBranch(guardOpnd, zeroOpnd, Js::OpCode::BrEq_A, helperOrBailoutLabel, newObjInstr);
  4277. // If we are calling new on a class constructor, the contract is that we pass new.target as the 'this' argument.
  4278. // function is the constructor on which we called new - which is new.target.
  4279. FixedFieldInfo* ctor = newObjInstr->GetFixedFunction();
  4280. if (ctor->IsClassCtor())
  4281. {
  4282. // MOV newObjDst, function
  4283. this->InsertMove(newObjDst, newObjInstr->GetSrc1(), newObjInstr);
  4284. }
  4285. else
  4286. {
  4287. JITTypeHolder newObjectType(ctorCache->GetType());
  4288. Assert(newObjectType->IsShared());
  4289. IR::AddrOpnd* typeSrc = IR::AddrOpnd::New(newObjectType->GetAddr(), IR::AddrOpndKindDynamicType, m_func);
  4290. // For the next call:
  4291. // inlineSlotSize == Number of slots to allocate beyond the DynamicObject header
  4292. // slotSize - inlineSlotSize == Number of aux slots to allocate
  4293. int inlineSlotSize = ctorCache->GetInlineSlotCount();
  4294. int slotSize = ctorCache->GetSlotCount();
  4295. if (newObjectType->GetTypeHandler()->IsObjectHeaderInlinedTypeHandler())
  4296. {
  4297. Assert(inlineSlotSize >= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity());
  4298. Assert(inlineSlotSize == slotSize);
  4299. slotSize = inlineSlotSize -= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity();
  4300. }
  4301. GenerateDynamicObjectAlloc(newObjInstr, inlineSlotSize, slotSize, newObjDst, typeSrc);
  4302. }
  4303. // JMP $callCtor
  4304. IR::BranchInstr *callCtorBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, callCtorLabel, m_func);
  4305. newObjInstr->InsertBefore(callCtorBranch);
  4306. return true;
  4307. }
  4308. void
  4309. Lowerer::GenerateRecyclerAllocAligned(IR::JnHelperMethod allocHelper, size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, bool inOpHelper)
  4310. {
  4311. IR::LabelInstr * allocDoneLabel = nullptr;
  4312. if (!PHASE_OFF(Js::JitAllocNewObjPhase, insertionPointInstr->m_func) && HeapInfo::IsSmallObject(allocSize))
  4313. {
  4314. IR::LabelInstr * allocHelperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4315. allocDoneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, inOpHelper);
  4316. this->m_lowererMD.GenerateFastRecyclerAlloc(allocSize, newObjDst, insertionPointInstr, allocHelperLabel, allocDoneLabel);
  4317. // $allocHelper:
  4318. insertionPointInstr->InsertBefore(allocHelperLabel);
  4319. }
  4320. // call JavascriptOperators::AllocMemForScObject(allocSize, scriptContext->GetRecycler())
  4321. this->m_lowererMD.LoadHelperArgument(insertionPointInstr, this->LoadScriptContextValueOpnd(insertionPointInstr, ScriptContextValue::ScriptContextRecycler));
  4322. this->m_lowererMD.LoadHelperArgument(insertionPointInstr, IR::IntConstOpnd::New((int32)allocSize, TyUint32, m_func, true));
  4323. IR::Instr *newObjCall = IR::Instr::New(Js::OpCode::Call, newObjDst, IR::HelperCallOpnd::New(allocHelper, m_func), m_func);
  4324. insertionPointInstr->InsertBefore(newObjCall);
  4325. this->m_lowererMD.LowerCall(newObjCall, 0);
  4326. if (allocDoneLabel != nullptr)
  4327. {
  4328. // $allocDone:
  4329. insertionPointInstr->InsertBefore(allocDoneLabel);
  4330. }
  4331. }
  4332. IR::Instr *
  4333. Lowerer::LowerGetNewScObject(IR::Instr *instr)
  4334. {
  4335. Assert(instr);
  4336. Assert(instr->m_opcode == Js::OpCode::GetNewScObject);
  4337. Assert(instr->GetDst());
  4338. Assert(instr->GetSrc1());
  4339. Assert(instr->GetSrc2());
  4340. const auto instrPrev = instr->m_prev;
  4341. Assert(instrPrev);
  4342. LowerGetNewScObjectCommon(
  4343. instr->GetDst()->AsRegOpnd(),
  4344. instr->GetSrc1()->AsRegOpnd(),
  4345. instr->GetSrc2()->AsRegOpnd(),
  4346. instr);
  4347. instr->Remove();
  4348. return instrPrev;
  4349. }
  4350. void
  4351. Lowerer::LowerGetNewScObjectCommon(
  4352. IR::RegOpnd *const resultObjOpnd,
  4353. IR::RegOpnd *const constructorReturnOpnd,
  4354. IR::RegOpnd *const newObjOpnd,
  4355. IR::Instr *insertBeforeInstr)
  4356. {
  4357. Assert(resultObjOpnd);
  4358. Assert(constructorReturnOpnd);
  4359. Assert(newObjOpnd);
  4360. Assert(insertBeforeInstr);
  4361. // (newObjOpnd == 'this' value passed to constructor)
  4362. //
  4363. // if (!IsJsObject(constructorReturnOpnd))
  4364. // goto notObjectLabel
  4365. // newObjOpnd = constructorReturnOpnd
  4366. // notObjectLabel:
  4367. // resultObjOpnd = newObjOpnd
  4368. if(!constructorReturnOpnd->IsEqual(newObjOpnd))
  4369. {
  4370. // Need to check whether the constructor returned an object
  4371. IR::LabelInstr *notObjectLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4372. Assert(insertBeforeInstr->m_prev);
  4373. IR::LabelInstr *const doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4374. insertBeforeInstr->InsertBefore(doneLabel);
  4375. insertBeforeInstr = doneLabel;
  4376. #if defined(_M_ARM32_OR_ARM64)
  4377. m_lowererMD.LoadHelperArgument(insertBeforeInstr, constructorReturnOpnd);
  4378. IR::Opnd * targetOpnd = IR::RegOpnd::New(StackSym::New(TyInt32,m_func), TyInt32, m_func);
  4379. IR::Instr * callIsObjectInstr = IR::Instr::New(Js::OpCode::Call, targetOpnd, m_func);
  4380. insertBeforeInstr->InsertBefore(callIsObjectInstr);
  4381. this->m_lowererMD.ChangeToHelperCall(callIsObjectInstr, IR::HelperOp_IsObject);
  4382. InsertTestBranch( targetOpnd, targetOpnd, Js::OpCode::BrEq_A, notObjectLabel,insertBeforeInstr);
  4383. #else
  4384. m_lowererMD.GenerateIsJsObjectTest(constructorReturnOpnd, insertBeforeInstr, notObjectLabel);
  4385. #endif
  4386. // Value returned by constructor is an object (use constructorReturnOpnd)
  4387. if(!resultObjOpnd->IsEqual(constructorReturnOpnd))
  4388. {
  4389. this->InsertMove(resultObjOpnd, constructorReturnOpnd, insertBeforeInstr);
  4390. }
  4391. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, doneLabel, m_func));
  4392. // Value returned by constructor is not an object (use newObjOpnd)
  4393. insertBeforeInstr->InsertBefore(notObjectLabel);
  4394. }
  4395. if(!resultObjOpnd->IsEqual(newObjOpnd))
  4396. {
  4397. this->InsertMove(resultObjOpnd, newObjOpnd, insertBeforeInstr);
  4398. }
  4399. // fall through to insertBeforeInstr or doneLabel
  4400. }
  4401. ///----------------------------------------------------------------------------
  4402. ///
  4403. /// Lowerer::LowerUpdateNewScObjectCache
  4404. ///
  4405. ///----------------------------------------------------------------------------
  4406. IR::Instr *
  4407. Lowerer::LowerUpdateNewScObjectCache(IR::Instr * insertInstr, IR::Opnd *dst, IR::Opnd *src1, const bool isCtorFunction)
  4408. {
  4409. // if (!isCtorFunction)
  4410. // {
  4411. // MOV r1, [src1 + offset(type)] -- check base TypeIds_Function
  4412. // CMP [r1 + offset(typeId)], TypeIds_Function
  4413. // }
  4414. // JNE $fallThru
  4415. // MOV r2, [src1 + offset(constructorCache)]
  4416. // MOV r3, [r2 + offset(updateAfterCtor)]
  4417. // TEST r3, r3 -- check if updateAfterCtor is 0
  4418. // JEQ $fallThru
  4419. // CALL UpdateNewScObjectCache(src1, dst, scriptContext)
  4420. // $fallThru:
  4421. IR::LabelInstr *labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4422. src1 = GetRegOpnd(src1, insertInstr, m_func, TyMachReg);
  4423. // Check if constructor is a function if we don't already know it.
  4424. if (!isCtorFunction)
  4425. {
  4426. IR::RegOpnd* src1RegOpnd = src1->AsRegOpnd();
  4427. // MOV r1, [src1 + offset(type)] -- check base TypeIds_Function
  4428. IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  4429. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src1RegOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  4430. Lowerer::InsertMove(r1, indirOpnd, insertInstr);
  4431. // CMP [r1 + offset(typeId)], TypeIds_Function
  4432. // JNE $fallThru
  4433. indirOpnd = IR::IndirOpnd::New(r1, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
  4434. IR::IntConstOpnd *intOpnd = IR::IntConstOpnd::New(Js::TypeIds_Function, TyInt32, this->m_func, true);
  4435. IR::BranchInstr* branchInstr = InsertCompareBranch(indirOpnd, intOpnd, Js::OpCode::BrNeq_A, labelFallThru, insertInstr);
  4436. InsertObjectPoison(src1RegOpnd, branchInstr, insertInstr, false);
  4437. }
  4438. // Every function has a constructor cache, even if only the default blank one.
  4439. // r2 = MOV JavascriptFunction->constructorCache
  4440. IR::RegOpnd *r2 = IR::RegOpnd::New(TyVar, this->m_func);
  4441. IR::IndirOpnd *opndIndir = IR::IndirOpnd::New(src1->AsRegOpnd(), Js::JavascriptFunction::GetOffsetOfConstructorCache(), TyMachReg, this->m_func);
  4442. IR::Instr *instr = Lowerer::InsertMove(r2, opndIndir, insertInstr);
  4443. // r3 = constructorCache->updateAfterCtor
  4444. IR::RegOpnd *r3 = IR::RegOpnd::New(TyInt8, this->m_func);
  4445. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(r2, Js::ConstructorCache::GetOffsetOfUpdateAfterCtor(), TyUint8, this->m_func);
  4446. instr = Lowerer::InsertMove(r3, indirOpnd, insertInstr);
  4447. // TEST r3, r3 -- check if updateAfterCtor is 0
  4448. // JEQ $fallThru
  4449. InsertTestBranch(r3, r3, Js::OpCode::BrEq_A, labelFallThru, insertInstr);
  4450. // r2 = UpdateNewScObjectCache(src1, dst, scriptContext)
  4451. insertInstr->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true)); // helper label for uncommon path
  4452. IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperUpdateNewScObjectCache, m_func);
  4453. LoadScriptContext(insertInstr);
  4454. m_lowererMD.LoadHelperArgument(insertInstr, dst);
  4455. m_lowererMD.LoadHelperArgument(insertInstr, src1);
  4456. instr = IR::Instr::New(Js::OpCode::Call, m_func);
  4457. instr->SetSrc1(opndHelper);
  4458. insertInstr->InsertBefore(instr);
  4459. m_lowererMD.LowerCall(instr, 0);
  4460. // $fallThru:
  4461. insertInstr->InsertBefore(labelFallThru);
  4462. return insertInstr;
  4463. }
  4464. IR::Instr *
  4465. Lowerer::LowerNewScObjArray(IR::Instr *newObjInstr)
  4466. {
  4467. if (newObjInstr->HasEmptyArgOutChain())
  4468. {
  4469. newObjInstr->FreeSrc2();
  4470. return LowerNewScObjArrayNoArg(newObjInstr);
  4471. }
  4472. IR::Instr* startMarkerInstr = nullptr;
  4473. IR::Opnd *targetOpnd = newObjInstr->GetSrc1();
  4474. Func *func = newObjInstr->m_func;
  4475. if (!targetOpnd->IsAddrOpnd())
  4476. {
  4477. if (!newObjInstr->HasBailOutInfo() || newObjInstr->OnlyHasLazyBailOut())
  4478. {
  4479. return this->LowerNewScObject(newObjInstr, true, true);
  4480. }
  4481. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4482. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4483. // list, StartCall, etc.
  4484. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4485. // For whatever reason, we couldn't do a fixed function check on the call target.
  4486. // Generate a runtime check on the target.
  4487. Assert(
  4488. newObjInstr->GetBailOutKind() == IR::BailOutOnNotNativeArray ||
  4489. newObjInstr->GetBailOutKind() == BailOutInfo::WithLazyBailOut(IR::BailOutOnNotNativeArray)
  4490. );
  4491. IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, func);
  4492. InsertCompareBranch(
  4493. targetOpnd,
  4494. LoadLibraryValueOpnd(newObjInstr, LibraryValue::ValueArrayConstructor),
  4495. Js::OpCode::BrEq_A,
  4496. true,
  4497. labelSkipBailOut,
  4498. newObjInstr);
  4499. IR::ProfiledInstr *instrNew = IR::ProfiledInstr::New(newObjInstr->m_opcode, newObjInstr->UnlinkDst(), newObjInstr->UnlinkSrc1(), newObjInstr->UnlinkSrc2(), func);
  4500. instrNew->u.profileId = newObjInstr->AsProfiledInstr()->u.profileId;
  4501. newObjInstr->InsertAfter(instrNew);
  4502. newObjInstr->m_opcode = Js::OpCode::BailOut;
  4503. GenerateBailOut(newObjInstr);
  4504. instrNew->InsertBefore(labelSkipBailOut);
  4505. newObjInstr = instrNew;
  4506. }
  4507. else
  4508. {
  4509. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4510. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4511. // list, StartCall, etc.
  4512. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4513. }
  4514. intptr_t weakFuncRef = 0;
  4515. Js::ArrayCallSiteInfo *arrayInfo = nullptr;
  4516. intptr_t arrayInfoAddr = 0;
  4517. Assert(newObjInstr->IsProfiledInstr());
  4518. IR::RegOpnd *resultObjOpnd = newObjInstr->GetDst()->AsRegOpnd();
  4519. IR::Instr * insertInstr = newObjInstr->m_next;
  4520. Js::ProfileId profileId = static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId);
  4521. // We may not have profileId if we converted a NewScObject to NewScObjArray
  4522. if (profileId != Js::Constants::NoProfileId)
  4523. {
  4524. arrayInfo = func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
  4525. arrayInfoAddr = func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
  4526. Assert(arrayInfo);
  4527. weakFuncRef = func->GetWeakFuncRef();
  4528. Assert(weakFuncRef);
  4529. }
  4530. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  4531. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  4532. IR::Opnd *linkOpnd = newObjInstr->GetSrc2();
  4533. Assert(linkOpnd->IsSymOpnd());
  4534. StackSym *linkSym = linkOpnd->AsSymOpnd()->m_sym->AsStackSym();
  4535. Assert(linkSym->IsSingleDef());
  4536. IR::Instr* argInstr = linkSym->GetInstrDef();
  4537. IR::Opnd *opndOfArrayCtor = argInstr->GetSrc1();
  4538. const uint16 upperBoundValue = 8;
  4539. // Generate fast path only if it meets all the conditions:
  4540. // 1. It is the only parameter and it is a likely int
  4541. // 2a. If 1st parameter is a variable, emit fast path with checks
  4542. // 2b. If 1st parameter is a constant, it is in range 0 and upperBoundValue (inclusive)
  4543. if (opndOfArrayCtor->GetValueType().IsLikelyInt() && (opndOfArrayCtor->IsAddrOpnd() || opndOfArrayCtor->IsRegOpnd())) // #1
  4544. {
  4545. if ((linkSym->GetArgSlotNum() == 2)) // 1. It is the only parameter
  4546. {
  4547. AssertMsg(linkSym->IsArgSlotSym(), "Not an argSlot symbol...");
  4548. linkOpnd = argInstr->GetSrc2();
  4549. bool emittedFastPath = false;
  4550. // 2a. If 1st parameter is a variable, emit fast path with checks
  4551. if (opndOfArrayCtor->IsRegOpnd())
  4552. {
  4553. if (!opndOfArrayCtor->AsRegOpnd()->IsNotInt())
  4554. {
  4555. // 3. GenerateFastPath
  4556. if (arrayInfo && arrayInfo->IsNativeIntArray())
  4557. {
  4558. emittedFastPath = GenerateProfiledNewScObjArrayFastPath<Js::JavascriptNativeIntArray>(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, helperLabel, labelDone, opndOfArrayCtor,
  4559. Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex(),
  4560. Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef());
  4561. }
  4562. else if (arrayInfo && arrayInfo->IsNativeFloatArray())
  4563. {
  4564. emittedFastPath = GenerateProfiledNewScObjArrayFastPath<Js::JavascriptNativeFloatArray>(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, helperLabel, labelDone, opndOfArrayCtor,
  4565. Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex(),
  4566. Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef());
  4567. }
  4568. else
  4569. {
  4570. emittedFastPath = GenerateProfiledNewScObjArrayFastPath<Js::JavascriptArray>(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, helperLabel, labelDone, opndOfArrayCtor, 0, 0);
  4571. }
  4572. }
  4573. }
  4574. // 2b. If 1st parameter is a constant, it is in range 0 and upperBoundValue (inclusive)
  4575. else
  4576. {
  4577. int32 length = linkSym->GetIntConstValue();
  4578. if (length >= 0 && length <= upperBoundValue)
  4579. {
  4580. emittedFastPath = GenerateProfiledNewScObjArrayFastPath(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, (uint32)length, labelDone, false);
  4581. }
  4582. }
  4583. // Since we emitted fast path above, move the startCall/argOut instruction right before helper
  4584. if (emittedFastPath)
  4585. {
  4586. linkSym = linkOpnd->AsRegOpnd()->m_sym->AsStackSym();
  4587. AssertMsg(!linkSym->IsArgSlotSym() && linkSym->m_isSingleDef, "Arg tree not single def...");
  4588. IR::Instr* startCallInstr = linkSym->m_instrDef;
  4589. AssertMsg(startCallInstr->GetArgOutCount(false) == 2, "Generating ArrayFastPath for more than 1 parameter not allowed.");
  4590. // Since we emitted fast path above, move the startCall/argOut instruction right before helper
  4591. startCallInstr->Move(newObjInstr);
  4592. argInstr->Move(newObjInstr);
  4593. }
  4594. }
  4595. }
  4596. newObjInstr->UnlinkSrc1();
  4597. IR::Opnd *profileOpnd = IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, func);
  4598. this->m_lowererMD.LoadNewScObjFirstArg(newObjInstr, profileOpnd);
  4599. IR::JnHelperMethod helperMethod = IR::HelperScrArr_ProfiledNewInstance;
  4600. newObjInstr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
  4601. newObjInstr = GenerateDirectCall(newObjInstr, targetOpnd, Js::CallFlags_New);
  4602. IR::BranchInstr* branchInstr = InsertCompareBranch(
  4603. IR::IndirOpnd::New(resultObjOpnd, 0, TyMachPtr, func),
  4604. LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptArray),
  4605. Js::OpCode::BrEq_A,
  4606. true,
  4607. labelDone,
  4608. insertInstr);
  4609. InsertObjectPoison(resultObjOpnd, branchInstr, insertInstr, true);
  4610. // We know we have a native array, so store the weak ref and call site index.
  4611. InsertMove(
  4612. IR::IndirOpnd::New(resultObjOpnd, Js::JavascriptNativeArray::GetOffsetOfArrayCallSiteIndex(), TyUint16, func),
  4613. IR::Opnd::CreateProfileIdOpnd(profileId, func),
  4614. insertInstr);
  4615. InsertMove(
  4616. IR::IndirOpnd::New(resultObjOpnd, Js::JavascriptNativeArray::GetOffsetOfWeakFuncRef(), TyMachReg, func),
  4617. IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, func),
  4618. insertInstr);
  4619. insertInstr->InsertBefore(labelDone);
  4620. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  4621. }
  4622. IR::Instr *
  4623. Lowerer::LowerNewScObjArrayNoArg(IR::Instr *newObjInstr)
  4624. {
  4625. IR::Opnd *targetOpnd = newObjInstr->GetSrc1();
  4626. Func *func = newObjInstr->m_func;
  4627. IR::Instr* startMarkerInstr = nullptr;
  4628. if (!targetOpnd->IsAddrOpnd())
  4629. {
  4630. if (!newObjInstr->HasBailOutInfo() || newObjInstr->OnlyHasLazyBailOut())
  4631. {
  4632. return this->LowerNewScObject(newObjInstr, true, false);
  4633. }
  4634. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4635. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4636. // list, StartCall, etc.
  4637. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4638. // For whatever reason, we couldn't do a fixed function check on the call target.
  4639. // Generate a runtime check on the target.
  4640. Assert(
  4641. newObjInstr->GetBailOutKind() == IR::BailOutOnNotNativeArray ||
  4642. newObjInstr->GetBailOutKind() == BailOutInfo::WithLazyBailOut(IR::BailOutOnNotNativeArray)
  4643. );
  4644. IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, func);
  4645. InsertCompareBranch(
  4646. targetOpnd,
  4647. LoadLibraryValueOpnd(newObjInstr, LibraryValue::ValueArrayConstructor),
  4648. Js::OpCode::BrEq_A,
  4649. true,
  4650. labelSkipBailOut,
  4651. newObjInstr);
  4652. IR::ProfiledInstr *instrNew = IR::ProfiledInstr::New(newObjInstr->m_opcode, newObjInstr->UnlinkDst(), newObjInstr->UnlinkSrc1(), func);
  4653. instrNew->u.profileId = newObjInstr->AsProfiledInstr()->u.profileId;
  4654. newObjInstr->InsertAfter(instrNew);
  4655. newObjInstr->m_opcode = Js::OpCode::BailOut;
  4656. GenerateBailOut(newObjInstr);
  4657. instrNew->InsertBefore(labelSkipBailOut);
  4658. newObjInstr = instrNew;
  4659. }
  4660. else
  4661. {
  4662. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4663. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4664. // list, StartCall, etc.
  4665. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4666. }
  4667. Assert(newObjInstr->IsProfiledInstr());
  4668. intptr_t weakFuncRef = 0;
  4669. intptr_t arrayInfoAddr = 0;
  4670. Js::ArrayCallSiteInfo *arrayInfo = nullptr;
  4671. Js::ProfileId profileId = static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId);
  4672. if (profileId != Js::Constants::NoProfileId)
  4673. {
  4674. arrayInfo = func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
  4675. arrayInfoAddr = func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
  4676. Assert(arrayInfo);
  4677. weakFuncRef = func->GetWeakFuncRef();
  4678. Assert(weakFuncRef);
  4679. }
  4680. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  4681. GenerateProfiledNewScObjArrayFastPath(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, 0, labelDone, true);
  4682. newObjInstr->InsertAfter(labelDone);
  4683. m_lowererMD.LoadHelperArgument(newObjInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, func));
  4684. m_lowererMD.LoadHelperArgument(newObjInstr, IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, func));
  4685. LoadScriptContext(newObjInstr);
  4686. m_lowererMD.LoadHelperArgument(newObjInstr, targetOpnd);
  4687. newObjInstr->UnlinkSrc1();
  4688. newObjInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperScrArr_ProfiledNewInstanceNoArg, func));
  4689. m_lowererMD.LowerCall(newObjInstr, 0);
  4690. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  4691. }
  4692. ///----------------------------------------------------------------------------
  4693. ///
  4694. /// Lowerer::LowerPrologEpilog
  4695. ///
  4696. ///----------------------------------------------------------------------------
  4697. void
  4698. Lowerer::LowerPrologEpilog()
  4699. {
  4700. if (m_func->GetJITFunctionBody()->IsCoroutine())
  4701. {
  4702. LowerGeneratorResumeJumpTable();
  4703. }
  4704. IR::Instr * instr;
  4705. instr = m_func->m_headInstr;
  4706. AssertMsg(instr->IsEntryInstr(), "First instr isn't an EntryInstr...");
  4707. m_lowererMD.LowerEntryInstr(instr->AsEntryInstr());
  4708. instr = m_func->m_exitInstr;
  4709. AssertMsg(instr->IsExitInstr(), "Last instr isn't an ExitInstr...");
  4710. m_lowererMD.LowerExitInstr(instr->AsExitInstr());
  4711. }
  4712. void
  4713. Lowerer::LowerPrologEpilogAsmJs()
  4714. {
  4715. IR::Instr * instr;
  4716. instr = m_func->m_headInstr;
  4717. AssertMsg(instr->IsEntryInstr(), "First instr isn't an EntryInstr...");
  4718. m_lowererMD.LowerEntryInstr(instr->AsEntryInstr());
  4719. instr = m_func->m_exitInstr;
  4720. AssertMsg(instr->IsExitInstr(), "Last instr isn't an ExitInstr...");
  4721. m_lowererMD.LowerExitInstrAsmJs(instr->AsExitInstr());
  4722. }
  4723. void
  4724. Lowerer::LowerGeneratorResumeJumpTable()
  4725. {
  4726. Assert(m_func->GetJITFunctionBody()->IsCoroutine());
  4727. IR::Instr * jumpTableInstr = m_func->m_headInstr;
  4728. AssertMsg(jumpTableInstr->IsEntryInstr(), "First instr isn't an EntryInstr...");
  4729. // Hope to do away with this linked list scan by moving this lowering to a post-prolog-epilog/pre-encoder phase that is common to all architectures (currently such phase is only available on amd64/arm)
  4730. while (jumpTableInstr->m_opcode != Js::OpCode::GeneratorResumeJumpTable)
  4731. {
  4732. jumpTableInstr = jumpTableInstr->m_next;
  4733. }
  4734. IR::Opnd * srcOpnd = jumpTableInstr->UnlinkSrc1();
  4735. m_func->MapYieldOffsetResumeLabels([&](int i, const YieldOffsetResumeLabel& yorl)
  4736. {
  4737. uint32 offset = yorl.First();
  4738. IR::LabelInstr * label = yorl.Second();
  4739. if (label != nullptr && label->m_hasNonBranchRef)
  4740. {
  4741. // Also fix up the bailout at the label with the jump to epilog that was not emitted in GenerateBailOut()
  4742. Assert(label->m_prev->HasBailOutInfo());
  4743. GenerateJumpToEpilogForBailOut(label->m_prev->GetBailOutInfo(), label->m_prev);
  4744. }
  4745. else if (label == nullptr)
  4746. {
  4747. label = m_func->m_bailOutNoSaveLabel;
  4748. }
  4749. // For each offset label pair, insert a compare of the offset and branch if equal to the label
  4750. InsertCompareBranch(srcOpnd, IR::IntConstOpnd::New(offset, TyUint32, m_func), Js::OpCode::BrSrEq_A, label, jumpTableInstr);
  4751. });
  4752. jumpTableInstr->Remove();
  4753. }
  4754. void
  4755. Lowerer::DoInterruptProbes()
  4756. {
  4757. this->m_func->SetHasInstrNumber(true);
  4758. uint instrCount = 1;
  4759. FOREACH_INSTR_IN_FUNC(instr, this->m_func)
  4760. {
  4761. instr->SetNumber(instrCount++);
  4762. if (instr->IsLabelInstr())
  4763. {
  4764. IR::LabelInstr *labelInstr = instr->AsLabelInstr();
  4765. if (labelInstr->m_isLoopTop)
  4766. {
  4767. // For every loop top label, insert the following:
  4768. // cmp sp, ThreadContext::stackLimitForCurrentThread
  4769. // bgt $continue
  4770. // $helper:
  4771. // call JavascriptOperators::ScriptAbort
  4772. // b $exit
  4773. // $continue:
  4774. IR::LabelInstr *newLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4775. labelInstr->InsertAfter(newLabel);
  4776. this->InsertOneLoopProbe(newLabel, newLabel);
  4777. }
  4778. }
  4779. }
  4780. NEXT_INSTR_IN_FUNC;
  4781. }
  4782. // Insert an interrupt probe at each loop back branch. (Currently uncalled, since we're inserting
  4783. // probes at loop tops instead of back edges, but kept around because it may prove useful.)
  4784. uint
  4785. Lowerer::DoLoopProbeAndNumber(IR::BranchInstr *branchInstr)
  4786. {
  4787. IR::LabelInstr *labelInstr = branchInstr->GetTarget();
  4788. if (labelInstr == nullptr || labelInstr->GetNumber() == 0)
  4789. {
  4790. // Forward branch (possibly an indirect jump after try-catch-finally); nothing to do.
  4791. return branchInstr->GetNumber() + 1;
  4792. }
  4793. Assert(labelInstr->m_isLoopTop);
  4794. // Insert a stack probe at this branch. Number all the instructions we insert
  4795. // and return the next instruction number.
  4796. uint number = branchInstr->GetNumber();
  4797. IR::Instr *instrPrev = branchInstr->m_prev;
  4798. IR::Instr *instrNext = branchInstr->m_next;
  4799. if (branchInstr->IsUnconditional())
  4800. {
  4801. // B $loop ==>
  4802. // cmp [], 0
  4803. // beq $loop
  4804. // $helper:
  4805. // call abort
  4806. // b $exit
  4807. this->InsertOneLoopProbe(branchInstr, labelInstr);
  4808. branchInstr->Remove();
  4809. }
  4810. else
  4811. {
  4812. // Bcc $loop ==>
  4813. // Binv $notloop
  4814. // cmp [], 0
  4815. // beq $loop
  4816. // $helper:
  4817. // call abort
  4818. // b $exit
  4819. // $notloop:
  4820. IR::LabelInstr *loopExitLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4821. branchInstr->SetTarget(loopExitLabel);
  4822. LowererMD::InvertBranch(branchInstr);
  4823. branchInstr->InsertAfter(loopExitLabel);
  4824. this->InsertOneLoopProbe(loopExitLabel, labelInstr);
  4825. }
  4826. FOREACH_INSTR_IN_RANGE(instr, instrPrev->m_next, instrNext->m_prev)
  4827. {
  4828. instr->SetNumber(number++);
  4829. }
  4830. NEXT_INSTR_IN_RANGE;
  4831. return number;
  4832. }
  4833. void
  4834. Lowerer::InsertOneLoopProbe(IR::Instr *insertInstr, IR::LabelInstr *loopLabel)
  4835. {
  4836. // Insert one interrupt probe at the given instruction. Probe the stack and call the abort helper
  4837. // directly if the probe fails.
  4838. IR::Opnd *memRefOpnd = IR::MemRefOpnd::New(
  4839. m_func->GetThreadContextInfo()->GetThreadStackLimitAddr(),
  4840. TyMachReg, this->m_func);
  4841. IR::RegOpnd *regStackPointer = IR::RegOpnd::New(
  4842. NULL, this->m_lowererMD.GetRegStackPointer(), TyMachReg, this->m_func);
  4843. InsertCompareBranch(regStackPointer, memRefOpnd, Js::OpCode::BrGt_A, loopLabel, insertInstr);
  4844. IR::LabelInstr *helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4845. insertInstr->InsertBefore(helperLabel);
  4846. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperScriptAbort, this->m_func);
  4847. IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  4848. instr->SetSrc1(helperOpnd);
  4849. insertInstr->InsertBefore(instr);
  4850. this->m_lowererMD.LowerCall(instr, 0);
  4851. // Jump to the exit after the helper call. This instruction will never be reached, but the jump
  4852. // indicates that nothing is live after the call (to avoid useless spills in code that will
  4853. // be executed).
  4854. instr = this->m_func->m_exitInstr->GetPrevRealInstrOrLabel();
  4855. if (instr->IsLabelInstr())
  4856. {
  4857. helperLabel = instr->AsLabelInstr();
  4858. }
  4859. else
  4860. {
  4861. helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4862. this->m_func->m_exitInstr->InsertBefore(helperLabel);
  4863. }
  4864. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, helperLabel, this->m_func);
  4865. insertInstr->InsertBefore(instr);
  4866. }
  4867. ///----------------------------------------------------------------------------
  4868. ///
  4869. /// Lowerer::LoadPropertySymAsArgument
  4870. ///
  4871. /// Generate code to pass a fieldSym as argument to a helper.
  4872. ///----------------------------------------------------------------------------
  4873. IR::Instr *
  4874. Lowerer::LoadPropertySymAsArgument(IR::Instr *instr, IR::Opnd *fieldSrc)
  4875. {
  4876. IR::Instr * instrPrev;
  4877. AssertMsg(fieldSrc->IsSymOpnd() && fieldSrc->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as src of LdFld");
  4878. IR::SymOpnd *symOpnd = fieldSrc->AsSymOpnd();
  4879. PropertySym * fieldSym = symOpnd->m_sym->AsPropertySym();
  4880. IR::IntConstOpnd * indexOpnd = IR::IntConstOpnd::New(fieldSym->m_propertyId, TyInt32, m_func, /*dontEncode*/true);
  4881. instrPrev = m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  4882. IR::RegOpnd * instanceOpnd = symOpnd->CreatePropertyOwnerOpnd(m_func);
  4883. m_lowererMD.LoadHelperArgument(instr, instanceOpnd);
  4884. return instrPrev;
  4885. }
  4886. ///----------------------------------------------------------------------------
  4887. ///
  4888. /// Lowerer::LoadFunctionBodyAsArgument
  4889. ///
  4890. /// Special case: the "property ID" is a key into the ScriptContext's FunctionBody map
  4891. ///----------------------------------------------------------------------------
  4892. IR::Instr *
  4893. Lowerer::LoadFunctionBodyAsArgument(IR::Instr *instr, IR::IntConstOpnd * functionBodySlotOpnd, IR::RegOpnd * envOpnd)
  4894. {
  4895. IR::Instr * instrPrev;
  4896. // We need to pass in the function reference, we can't embed the pointer to the function proxy here.
  4897. // The function proxy may be deferred parsed/serialize, and may 'progress' to a real function body after it is undeferred
  4898. // At which point the deferred function proxy may be collect.
  4899. // Just pass it the address where we will find the function proxy/body
  4900. Js::FunctionInfoPtrPtr infoRef = instr->m_func->GetJITFunctionBody()->GetNestedFuncRef((uint)functionBodySlotOpnd->GetValue());
  4901. AssertMsg(infoRef, "Expected FunctionProxy for index of NewScFunc or NewScGenFunc opnd");
  4902. IR::AddrOpnd * indexOpnd = IR::AddrOpnd::New((Js::Var)infoRef, IR::AddrOpndKindDynamicMisc, m_func);
  4903. instrPrev = m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  4904. m_lowererMD.LoadHelperArgument(instr, envOpnd);
  4905. return instrPrev;
  4906. }
  4907. IR::Instr *
  4908. Lowerer::LowerProfiledLdFld(IR::JitProfilingInstr *ldFldInstr)
  4909. {
  4910. const auto instrPrev = ldFldInstr->m_prev;
  4911. auto src = ldFldInstr->UnlinkSrc1();
  4912. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  4913. IR::JnHelperMethod helper = IR::HelperInvalid;
  4914. switch (ldFldInstr->m_opcode)
  4915. {
  4916. case Js::OpCode::LdFld:
  4917. helper = IR::HelperProfiledLdFld;
  4918. goto ldFldCommon;
  4919. case Js::OpCode::LdRootFld:
  4920. helper = IR::HelperProfiledLdRootFld;
  4921. goto ldFldCommon;
  4922. case Js::OpCode::LdMethodFld:
  4923. helper = IR::HelperProfiledLdMethodFld;
  4924. goto ldFldCommon;
  4925. case Js::OpCode::LdRootMethodFld:
  4926. helper = IR::HelperProfiledLdRootMethodFld;
  4927. goto ldFldCommon;
  4928. case Js::OpCode::LdFldForCallApplyTarget:
  4929. helper = IR::HelperProfiledLdFld_CallApplyTarget;
  4930. goto ldFldCommon;
  4931. case Js::OpCode::LdFldForTypeOf:
  4932. helper = IR::HelperProfiledLdFldForTypeOf;
  4933. goto ldFldCommon;
  4934. case Js::OpCode::LdRootFldForTypeOf:
  4935. helper = IR::HelperProfiledLdRootFldForTypeOf;
  4936. goto ldFldCommon;
  4937. ldFldCommon:
  4938. {
  4939. Assert(ldFldInstr->profileId == Js::Constants::NoProfileId);
  4940. /*
  4941. Var ProfilingHelpers::ProfiledLdFld_Jit(
  4942. const Var instance,
  4943. const PropertyId propertyId,
  4944. const InlineCacheIndex inlineCacheIndex,
  4945. void *const framePointer)
  4946. */
  4947. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  4948. m_lowererMD.LoadHelperArgument(
  4949. ldFldInstr,
  4950. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  4951. LoadPropertySymAsArgument(ldFldInstr, src);
  4952. break;
  4953. }
  4954. case Js::OpCode::LdSuperFld:
  4955. {
  4956. Assert(ldFldInstr->profileId == Js::Constants::NoProfileId);
  4957. IR::Opnd * src2 = nullptr;
  4958. /*
  4959. Var ProfilingHelpers::ProfiledLdSuperFld_Jit(
  4960. const Var instance,
  4961. const PropertyId propertyId,
  4962. const InlineCacheIndex inlineCacheIndex,
  4963. void *const framePointer,
  4964. const Var thisInstance)
  4965. */
  4966. src2 = ldFldInstr->UnlinkSrc2();
  4967. m_lowererMD.LoadHelperArgument(ldFldInstr, src2 );
  4968. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  4969. m_lowererMD.LoadHelperArgument(
  4970. ldFldInstr,
  4971. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  4972. LoadPropertySymAsArgument(ldFldInstr, src);
  4973. helper = IR::HelperProfiledLdSuperFld;
  4974. break;
  4975. }
  4976. case Js::OpCode::LdLen_A:
  4977. Assert(ldFldInstr->profileId != Js::Constants::NoProfileId);
  4978. /*
  4979. Var ProfilingHelpers::ProfiledLdLen_Jit(
  4980. const Var instance,
  4981. const PropertyId propertyId,
  4982. const InlineCacheIndex inlineCacheIndex,
  4983. const ProfileId profileId,
  4984. void *const framePointer)
  4985. */
  4986. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  4987. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateProfileIdOpnd(ldFldInstr->profileId, m_func));
  4988. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  4989. LoadPropertySymAsArgument(ldFldInstr, src);
  4990. helper = IR::HelperProfiledLdLen;
  4991. break;
  4992. default:
  4993. Assert(false);
  4994. }
  4995. ldFldInstr->SetSrc1(IR::HelperCallOpnd::New(helper, m_func));
  4996. m_lowererMD.LowerCall(ldFldInstr, 0);
  4997. return instrPrev;
  4998. }
  4999. void
  5000. Lowerer::GenerateProtoLdFldFromFlagInlineCache(
  5001. IR::Instr * insertBeforeInstr,
  5002. IR::Opnd * opndDst,
  5003. IR::RegOpnd * opndInlineCache,
  5004. IR::LabelInstr * labelFallThru,
  5005. bool isInlineSlot)
  5006. {
  5007. // Generate:
  5008. //
  5009. // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
  5010. // s1 = MOV [&s1->slots] -- load the slot array
  5011. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  5012. // dst = MOV [s1 + s2*4]
  5013. // JMP $fallthru
  5014. IR::Opnd* inlineCacheObjOpnd;
  5015. IR::IndirOpnd * opndIndir;
  5016. IR::RegOpnd * opndObjSlots = nullptr;
  5017. inlineCacheObjOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.object), TyMachReg, this->m_func);
  5018. // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
  5019. IR::RegOpnd *opndObject = IR::RegOpnd::New(TyMachReg, this->m_func);
  5020. InsertMove(opndObject, inlineCacheObjOpnd, insertBeforeInstr, false);
  5021. if (!isInlineSlot)
  5022. {
  5023. // s1 = MOV [&s1->slots] -- load the slot array
  5024. opndObjSlots = IR::RegOpnd::New(TyMachReg, this->m_func);
  5025. opndIndir = IR::IndirOpnd::New(opndObject, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  5026. InsertMove(opndObjSlots, opndIndir, insertBeforeInstr, false);
  5027. }
  5028. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  5029. IR::RegOpnd *opndSlotIndex = IR::RegOpnd::New(TyMachReg, this->m_func);
  5030. IR::Opnd* slotIndexOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.slotIndex), TyUint16, this->m_func);
  5031. InsertMove(opndSlotIndex, slotIndexOpnd, insertBeforeInstr, false);
  5032. if (isInlineSlot)
  5033. {
  5034. // dst = MOV [s1 + s2*4]
  5035. opndIndir = IR::IndirOpnd::New(opndObject, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
  5036. }
  5037. else
  5038. {
  5039. // dst = MOV [s1 + s2*4]
  5040. opndIndir = IR::IndirOpnd::New(opndObjSlots, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
  5041. }
  5042. InsertMove(opndDst, opndIndir, insertBeforeInstr, false);
  5043. // JMP $fallthru
  5044. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  5045. }
  5046. void
  5047. Lowerer::GenerateLocalLdFldFromFlagInlineCache(
  5048. IR::Instr * insertBeforeInstr,
  5049. IR::RegOpnd * opndBase,
  5050. IR::Opnd * opndDst,
  5051. IR::RegOpnd * opndInlineCache,
  5052. IR::LabelInstr * labelFallThru,
  5053. bool isInlineSlot)
  5054. {
  5055. // Generate:
  5056. //
  5057. // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
  5058. // s1 = MOV [&s1->slots] -- load the slot array
  5059. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  5060. // dst = MOV [s1 + s2*4]
  5061. // JMP $fallthru
  5062. IR::IndirOpnd * opndIndir;
  5063. IR::RegOpnd * opndObjSlots = nullptr;
  5064. if (!isInlineSlot)
  5065. {
  5066. // s1 = MOV [&s1->slots] -- load the slot array
  5067. opndObjSlots = IR::RegOpnd::New(TyMachReg, this->m_func);
  5068. opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  5069. InsertMove(opndObjSlots, opndIndir, insertBeforeInstr, false);
  5070. }
  5071. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  5072. IR::RegOpnd *opndSlotIndex = IR::RegOpnd::New(TyMachReg, this->m_func);
  5073. IR::Opnd* slotIndexOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.slotIndex), TyUint16, this->m_func);
  5074. InsertMove(opndSlotIndex, slotIndexOpnd, insertBeforeInstr, false);
  5075. if (isInlineSlot)
  5076. {
  5077. // dst = MOV [s1 + s2*4]
  5078. opndIndir = IR::IndirOpnd::New(opndBase, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
  5079. }
  5080. else
  5081. {
  5082. // dst = MOV [s1 + s2*4]
  5083. opndIndir = IR::IndirOpnd::New(opndObjSlots, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
  5084. }
  5085. InsertMove(opndDst, opndIndir, insertBeforeInstr, false);
  5086. // JMP $fallthru
  5087. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  5088. }
  5089. void
  5090. Lowerer::GenerateFlagProtoCheck(
  5091. IR::Instr * insertBeforeInstr,
  5092. IR::RegOpnd * opndInlineCache,
  5093. IR::LabelInstr * labelNotOnProto)
  5094. {
  5095. // Generate:
  5096. //
  5097. // TEST [&(inlineCache->u.accessor.isOnProto)], Js::FlagIsOnProto
  5098. // JEQ $next
  5099. IR::Opnd* flagsOpnd;
  5100. flagsOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.rawUInt16), TyInt8, insertBeforeInstr->m_func);
  5101. uint isOnProtoFlagMask = Js::InlineCache::GetIsOnProtoFlagMask();
  5102. InsertTestBranch(flagsOpnd, IR::IntConstOpnd::New(isOnProtoFlagMask, TyInt8, this->m_func), Js::OpCode::BrEq_A, labelNotOnProto, insertBeforeInstr);
  5103. }
  5104. ///----------------------------------------------------------------------------
  5105. ///
  5106. /// Lowerer::GenerateFastLdMethodFromFlags
  5107. ///
  5108. /// Make use of the helper to cache the type and slot index used to do a LdFld
  5109. /// and do an inline load from the appropriate slot if the type hasn't changed
  5110. /// since the last time this LdFld was executed.
  5111. ///
  5112. ///----------------------------------------------------------------------------
  5113. bool
  5114. Lowerer::GenerateFastLdMethodFromFlags(IR::Instr * instrLdFld)
  5115. {
  5116. IR::LabelInstr * labelFallThru;
  5117. IR::LabelInstr * bailOutLabel;
  5118. IR::Opnd * opndSrc;
  5119. IR::Opnd * opndDst;
  5120. IR::RegOpnd * opndBase;
  5121. IR::RegOpnd * opndType;
  5122. IR::RegOpnd * opndInlineCache;
  5123. opndSrc = instrLdFld->GetSrc1();
  5124. AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->IsPropertySymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(),
  5125. "Expected property sym operand as src of LdFldFlags");
  5126. IR::PropertySymOpnd * propertySymOpnd = opndSrc->AsPropertySymOpnd();
  5127. Assert(!instrLdFld->DoStackArgsOpt());
  5128. if (propertySymOpnd->IsTypeCheckSeqCandidate())
  5129. {
  5130. AssertMsg(propertySymOpnd->HasObjectTypeSym(), "Type optimized property sym operand without a type sym?");
  5131. StackSym *typeSym = propertySymOpnd->GetObjectTypeSym();
  5132. opndType = IR::RegOpnd::New(typeSym, TyMachReg, this->m_func);
  5133. }
  5134. else
  5135. {
  5136. opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  5137. }
  5138. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  5139. opndDst = instrLdFld->GetDst();
  5140. opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  5141. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5142. // Label to jump to (or fall through to) when bailing out
  5143. bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instrLdFld->m_func, true /* isOpHelper */);
  5144. InsertMove(opndInlineCache, LoadRuntimeInlineCacheOpnd(instrLdFld, propertySymOpnd), instrLdFld);
  5145. IR::LabelInstr * labelFlagAux = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5146. // Check the flag cache with the untagged type
  5147. GenerateObjectTestAndTypeLoad(instrLdFld, opndBase, opndType, bailOutLabel);
  5148. GenerateFlagInlineCacheCheck(instrLdFld, opndType, opndInlineCache, labelFlagAux);
  5149. IR::LabelInstr * labelFlagInlineLocal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5150. GenerateFlagProtoCheck(instrLdFld, opndInlineCache, labelFlagInlineLocal);
  5151. GenerateProtoLdFldFromFlagInlineCache(instrLdFld, opndDst, opndInlineCache, labelFallThru, true);
  5152. instrLdFld->InsertBefore(labelFlagInlineLocal);
  5153. GenerateLocalLdFldFromFlagInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  5154. // Check the flag cache with the tagged type
  5155. instrLdFld->InsertBefore(labelFlagAux);
  5156. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  5157. m_lowererMD.GenerateLoadTaggedType(instrLdFld, opndType, opndTaggedType);
  5158. GenerateFlagInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, bailOutLabel);
  5159. IR::LabelInstr * labelFlagAuxLocal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5160. GenerateFlagProtoCheck(instrLdFld, opndInlineCache, labelFlagAuxLocal);
  5161. GenerateProtoLdFldFromFlagInlineCache(instrLdFld, opndDst, opndInlineCache, labelFallThru, false);
  5162. instrLdFld->InsertBefore(labelFlagAuxLocal);
  5163. GenerateLocalLdFldFromFlagInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  5164. instrLdFld->InsertBefore(bailOutLabel);
  5165. instrLdFld->InsertAfter(labelFallThru);
  5166. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  5167. // ordering instructions anymore.
  5168. instrLdFld->UnlinkSrc1();
  5169. GenerateBailOut(instrLdFld);
  5170. return true;
  5171. }
  5172. ///----------------------------------------------------------------------------
  5173. ///
  5174. /// Lowerer::LowerLdFld
  5175. ///
  5176. /// Lower an instruction (LdFld, ScopedLdFld) that takes a property
  5177. /// reference as a source and puts a result in a register.
  5178. ///
  5179. ///----------------------------------------------------------------------------
  5180. IR::Instr *
  5181. Lowerer::LowerLdFld(
  5182. IR::Instr * ldFldInstr,
  5183. IR::JnHelperMethod helperMethod,
  5184. IR::JnHelperMethod polymorphicHelperMethod,
  5185. bool useInlineCache,
  5186. IR::LabelInstr *labelBailOut,
  5187. bool isHelper)
  5188. {
  5189. if (ldFldInstr->IsJitProfilingInstr())
  5190. {
  5191. // If we want to profile then do something completely different
  5192. return this->LowerProfiledLdFld(ldFldInstr->AsJitProfilingInstr());
  5193. }
  5194. IR::Opnd *src;
  5195. IR::Instr *instrPrev = ldFldInstr->m_prev;
  5196. src = ldFldInstr->UnlinkSrc1();
  5197. if (ldFldInstr->m_opcode == Js::OpCode::LdSuperFld)
  5198. {
  5199. IR::Opnd * src2 = nullptr;
  5200. src2 = ldFldInstr->UnlinkSrc2();
  5201. m_lowererMD.LoadHelperArgument(ldFldInstr, src2);
  5202. }
  5203. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  5204. if (useInlineCache)
  5205. {
  5206. IR::Opnd * inlineCacheOpnd;
  5207. AssertMsg(src->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  5208. if (src->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache && polymorphicHelperMethod != helperMethod)
  5209. {
  5210. JITTimePolymorphicInlineCache * polymorphicInlineCache = src->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache;
  5211. helperMethod = polymorphicHelperMethod;
  5212. inlineCacheOpnd = IR::AddrOpnd::New(polymorphicInlineCache->GetAddr(), IR::AddrOpndKindDynamicInlineCache, this->m_func);
  5213. }
  5214. else
  5215. {
  5216. // Need to load runtime inline cache opnd first before loading any helper argument
  5217. // because LoadRuntimeInlineCacheOpnd may create labels marked as helper,
  5218. // and cause op helper register push/pop save in x86, messing up with any helper arguments that is already pushed
  5219. inlineCacheOpnd = this->LoadRuntimeInlineCacheOpnd(ldFldInstr, src->AsPropertySymOpnd(), isHelper);
  5220. }
  5221. this->LoadPropertySymAsArgument(ldFldInstr, src);
  5222. this-> m_lowererMD.LoadHelperArgument(
  5223. ldFldInstr,
  5224. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  5225. this->m_lowererMD.LoadHelperArgument(ldFldInstr, inlineCacheOpnd);
  5226. this->m_lowererMD.LoadHelperArgument(ldFldInstr, LoadFunctionBodyOpnd(ldFldInstr));
  5227. }
  5228. else
  5229. {
  5230. LoadScriptContext(ldFldInstr);
  5231. this->LoadPropertySymAsArgument(ldFldInstr, src);
  5232. }
  5233. // Do we need to reload the type and slot array after the helper returns?
  5234. // (We do if there's a propertySymOpnd downstream that needs it, i.e., the type is not dead.)
  5235. IR::RegOpnd *opndBase = src->AsSymOpnd()->CreatePropertyOwnerOpnd(m_func);
  5236. m_lowererMD.ChangeToHelperCall(ldFldInstr, helperMethod, labelBailOut, opndBase, src->AsSymOpnd()->IsPropertySymOpnd() ? src->AsSymOpnd()->AsPropertySymOpnd() : nullptr, isHelper);
  5237. return instrPrev;
  5238. }
  5239. bool
  5240. Lowerer::GenerateLdFldWithCachedType(IR::Instr * instrLdFld, bool* continueAsHelperOut, IR::LabelInstr** labelHelperOut, IR::RegOpnd** typeOpndOut)
  5241. {
  5242. IR::Instr *instr;
  5243. IR::Opnd *opnd;
  5244. IR::LabelInstr *labelObjCheckFailed = nullptr;
  5245. IR::LabelInstr *labelTypeCheckFailed = nullptr;
  5246. IR::LabelInstr *labelDone = nullptr;
  5247. Assert(continueAsHelperOut != nullptr);
  5248. *continueAsHelperOut = false;
  5249. Assert(labelHelperOut != nullptr);
  5250. *labelHelperOut = nullptr;
  5251. Assert(typeOpndOut != nullptr);
  5252. *typeOpndOut = nullptr;
  5253. Assert(instrLdFld->GetSrc1()->IsSymOpnd());
  5254. if (!instrLdFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
  5255. {
  5256. return false;
  5257. }
  5258. IR::PropertySymOpnd *propertySymOpnd = instrLdFld->GetSrc1()->AsPropertySymOpnd();
  5259. if (!propertySymOpnd->IsTypeCheckSeqCandidate())
  5260. {
  5261. return false;
  5262. }
  5263. AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
  5264. if (!propertySymOpnd->IsTypeCheckSeqParticipant() && !propertySymOpnd->NeedsLocalTypeCheck())
  5265. {
  5266. return false;
  5267. }
  5268. Assert(!propertySymOpnd->NeedsTypeCheckAndBailOut() || (instrLdFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrLdFld->GetBailOutKind())));
  5269. // In the backwards pass we only add guarded property operations to instructions that are not already
  5270. // protected by an upstream type check.
  5271. Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
  5272. PHASE_PRINT_TESTTRACE(
  5273. Js::ObjTypeSpecPhase,
  5274. this->m_func,
  5275. _u("Field load: %s, property ID: %d, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n"),
  5276. Js::OpCodeUtil::GetOpCodeName(instrLdFld->m_opcode),
  5277. propertySymOpnd->m_sym->AsPropertySym()->m_propertyId,
  5278. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  5279. propertySymOpnd->m_inlineCacheIndex,
  5280. propertySymOpnd->GetCacheLayoutString(),
  5281. propertySymOpnd->IsTypeChecked() ? _u("true") : _u("false"));
  5282. if (propertySymOpnd->HasFinalType() && !propertySymOpnd->IsLoadedFromProto())
  5283. {
  5284. propertySymOpnd->UpdateSlotForFinalType();
  5285. }
  5286. // TODO (ObjTypeSpec): If ((PropertySym*)propertySymOpnd->m_sym)->m_stackSym->m_isIntConst consider emitting a direct
  5287. // jump to helper or bailout. If we have a type check bailout, we could even abort compilation.
  5288. bool hasTypeCheckBailout = instrLdFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrLdFld->GetBailOutKind());
  5289. // If the hard-coded type is not available here, do a type check, and branch to the helper if the check fails.
  5290. // In the prototype case, we have to check the type even if it was checked upstream, to cover the case where
  5291. // the property has been added locally. Note that this is not necessary if the proto chain has been checked,
  5292. // because then we know there's been no store of the property since the type was checked.
  5293. bool emitPrimaryTypeCheck = propertySymOpnd->NeedsPrimaryTypeCheck();
  5294. bool emitLocalTypeCheck = propertySymOpnd->NeedsLocalTypeCheck();
  5295. bool emitLoadFromProtoTypeCheck = propertySymOpnd->NeedsLoadFromProtoTypeCheck();
  5296. bool emitTypeCheck = emitPrimaryTypeCheck || emitLocalTypeCheck || emitLoadFromProtoTypeCheck;
  5297. if (emitTypeCheck)
  5298. {
  5299. if (emitLoadFromProtoTypeCheck)
  5300. {
  5301. propertySymOpnd->EnsureGuardedPropOps(this->m_func->m_alloc);
  5302. propertySymOpnd->SetGuardedPropOp(propertySymOpnd->GetObjTypeSpecFldId());
  5303. }
  5304. labelTypeCheckFailed = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5305. labelObjCheckFailed = hasTypeCheckBailout ? labelTypeCheckFailed : IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5306. *typeOpndOut = this->GenerateCachedTypeCheck(instrLdFld, propertySymOpnd, labelObjCheckFailed, labelTypeCheckFailed);
  5307. }
  5308. IR::Opnd *opndSlotArray;
  5309. if (propertySymOpnd->IsLoadedFromProto())
  5310. {
  5311. opndSlotArray = this->LoadSlotArrayWithCachedProtoType(instrLdFld, propertySymOpnd);
  5312. }
  5313. else
  5314. {
  5315. opndSlotArray = this->LoadSlotArrayWithCachedLocalType(instrLdFld, propertySymOpnd);
  5316. }
  5317. // Load the value from the slot, getting the slot ID from the cache.
  5318. uint16 index = propertySymOpnd->GetSlotIndex();
  5319. Assert(index != -1);
  5320. if (opndSlotArray->IsRegOpnd())
  5321. {
  5322. opnd = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, this->m_func);
  5323. }
  5324. else
  5325. {
  5326. Assert(opndSlotArray->IsMemRefOpnd());
  5327. opnd = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, this->m_func, IR::AddrOpndKindDynamicPropertySlotRef);
  5328. }
  5329. Lowerer::InsertMove(instrLdFld->GetDst(), opnd, instrLdFld);
  5330. // We eliminate the helper, or the type check succeeds, or we bail out before the operation.
  5331. // Either delete the original instruction or replace it with a bailout.
  5332. if (!emitPrimaryTypeCheck && !emitLocalTypeCheck && !emitLoadFromProtoTypeCheck)
  5333. {
  5334. Assert(labelTypeCheckFailed == nullptr);
  5335. AssertMsg(!instrLdFld->HasBailOutInfo() || instrLdFld->HasLazyBailOut(), "Why does a direct field load have bailout that is not lazy?");
  5336. instrLdFld->Remove();
  5337. return true;
  5338. }
  5339. // Otherwise, branch around the bailout or helper.
  5340. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5341. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  5342. instrLdFld->InsertBefore(instr);
  5343. // Insert the bailout or helper label here.
  5344. instrLdFld->InsertBefore(labelTypeCheckFailed);
  5345. instrLdFld->InsertAfter(labelDone);
  5346. if (hasTypeCheckBailout)
  5347. {
  5348. AssertMsg(PHASE_ON1(Js::ObjTypeSpecIsolatedFldOpsWithBailOutPhase) || !propertySymOpnd->IsTypeDead(),
  5349. "Why does a field load have a type check bailout, if its type is dead?");
  5350. // Convert the original instruction to a bailout.
  5351. if (instrLdFld->GetBailOutInfo()->bailOutInstr != instrLdFld)
  5352. {
  5353. // Set the cache index in the bailout info so that the bailout code will write it into the
  5354. // bailout record at runtime.
  5355. instrLdFld->GetBailOutInfo()->polymorphicCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  5356. }
  5357. instrLdFld->FreeDst();
  5358. instrLdFld->FreeSrc1();
  5359. instrLdFld->m_opcode = Js::OpCode::BailOut;
  5360. this->GenerateBailOut(instrLdFld);
  5361. return true;
  5362. }
  5363. else
  5364. {
  5365. *continueAsHelperOut = true;
  5366. Assert(labelObjCheckFailed != nullptr && labelObjCheckFailed != labelTypeCheckFailed);
  5367. *labelHelperOut = labelObjCheckFailed;
  5368. return false;
  5369. }
  5370. }
  5371. template<bool isRoot>
  5372. IR::Instr* Lowerer::GenerateCompleteLdFld(IR::Instr* instr, bool emitFastPath, IR::JnHelperMethod monoHelperAfterFastPath, IR::JnHelperMethod polyHelperAfterFastPath,
  5373. IR::JnHelperMethod monoHelperWithoutFastPath, IR::JnHelperMethod polyHelperWithoutFastPath)
  5374. {
  5375. if(instr->CallsAccessor() && instr->HasBailOutInfo())
  5376. {
  5377. Assert(!BailOutInfo::IsBailOutOnImplicitCalls(instr->GetBailOutKind()));
  5378. }
  5379. IR::Instr* prevInstr = instr->m_prev;
  5380. IR::LabelInstr* labelHelper = nullptr;
  5381. IR::LabelInstr* labelBailOut = nullptr;
  5382. bool isHelper = false;
  5383. IR::RegOpnd* typeOpnd = nullptr;
  5384. if (isRoot)
  5385. {
  5386. // Don't do the fast path here if emitFastPath is false, even if we can.
  5387. if (emitFastPath && (this->GenerateLdFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd) || this->GenerateNonConfigurableLdRootFld(instr)))
  5388. {
  5389. Assert(labelHelper == nullptr);
  5390. return prevInstr;
  5391. }
  5392. }
  5393. else
  5394. {
  5395. if (this->GenerateLdFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd))
  5396. {
  5397. Assert(labelHelper == nullptr);
  5398. return prevInstr;
  5399. }
  5400. }
  5401. if (emitFastPath)
  5402. {
  5403. if (!GenerateFastLdFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, &labelBailOut, typeOpnd, &isHelper, &labelHelper))
  5404. {
  5405. if (labelHelper != nullptr)
  5406. {
  5407. labelHelper->isOpHelper = isHelper;
  5408. instr->InsertBefore(labelHelper);
  5409. }
  5410. prevInstr = LowerLdFld(instr, monoHelperAfterFastPath, polyHelperAfterFastPath, true, labelBailOut, isHelper);
  5411. }
  5412. }
  5413. else
  5414. {
  5415. if (labelHelper != nullptr)
  5416. {
  5417. labelHelper->isOpHelper = isHelper;
  5418. instr->InsertBefore(labelHelper);
  5419. }
  5420. prevInstr = LowerLdFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, true, labelBailOut, isHelper);
  5421. }
  5422. return prevInstr;
  5423. }
  5424. bool
  5425. Lowerer::GenerateCheckFixedFld(IR::Instr * instrChkFld)
  5426. {
  5427. IR::Instr *instr;
  5428. IR::LabelInstr *labelBailOut = nullptr;
  5429. IR::LabelInstr *labelDone = nullptr;
  5430. AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instrChkFld->m_func) ||
  5431. !PHASE_OFF(Js::UseFixedDataPropsPhase, instrChkFld->m_func), "Lowering a check fixed field with fixed data/method phase disabled?");
  5432. Assert(instrChkFld->GetSrc1()->IsSymOpnd() && instrChkFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd());
  5433. IR::PropertySymOpnd *propertySymOpnd = instrChkFld->GetSrc1()->AsPropertySymOpnd();
  5434. AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
  5435. Assert(propertySymOpnd->MayNeedTypeCheckProtection());
  5436. // In the backwards pass we only add guarded property operations to instructions that are not already
  5437. // protected by an upstream type check.
  5438. Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
  5439. // For the non-configurable properties on the global object we do not need a type check. Otherwise,
  5440. // we need a type check and bailout here unless this operation is part of the type check sequence and
  5441. // is protected by a type check upstream.
  5442. bool emitPrimaryTypeCheck = propertySymOpnd->NeedsPrimaryTypeCheck();
  5443. // In addition, we may also need a local type check in case the property comes from the prototype and
  5444. // it may have been overwritten on the instance after the primary type check upstream. If the property
  5445. // comes from the instance, we must still protect against its value changing after the type check, but
  5446. // for this a cheaper guard check is sufficient (see below).
  5447. bool emitFixedFieldTypeCheck = propertySymOpnd->NeedsCheckFixedFieldTypeCheck() &&
  5448. (!propertySymOpnd->IsTypeChecked() || propertySymOpnd->IsLoadedFromProto());
  5449. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  5450. uint inlineCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  5451. bool checkFixedDataGenerated = false;
  5452. bool checkFixedTypeGenerated = false;
  5453. OUTPUT_TRACE_FUNC(
  5454. Js::ObjTypeSpecPhase,
  5455. this->m_func,
  5456. _u("Fixed field check: %s, property ID: %d, cache ID: %u, cloned cache: true, layout: %s, redundant check: %s count of props: %u \n"),
  5457. Js::OpCodeUtil::GetOpCodeName(instrChkFld->m_opcode),
  5458. propertySym->m_propertyId,
  5459. inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(), propertySymOpnd->IsTypeChecked() ? _u("true") : _u("false"),
  5460. propertySymOpnd->GetGuardedPropOps() ? propertySymOpnd->GetGuardedPropOps()->Count() : 0);
  5461. if (emitPrimaryTypeCheck || emitFixedFieldTypeCheck)
  5462. {
  5463. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5464. if(emitFixedFieldTypeCheck && propertySymOpnd->IsRootObjectNonConfigurableFieldLoad())
  5465. {
  5466. AssertMsg(!propertySymOpnd->GetGuardedPropOps() || propertySymOpnd->GetGuardedPropOps()->IsEmpty(), "This property Guard is used only for one property");
  5467. //We need only cheaper Guard check, if the property belongs to the GlobalObject.
  5468. checkFixedDataGenerated = this->GenerateFixedFieldGuardCheck(instrChkFld, propertySymOpnd, labelBailOut);
  5469. }
  5470. else
  5471. {
  5472. if (emitFixedFieldTypeCheck)
  5473. {
  5474. propertySymOpnd->EnsureGuardedPropOps(this->m_func->m_alloc);
  5475. propertySymOpnd->SetGuardedPropOp(propertySymOpnd->GetObjTypeSpecFldId());
  5476. }
  5477. this->GenerateCachedTypeCheck(instrChkFld, propertySymOpnd, labelBailOut, labelBailOut);
  5478. checkFixedTypeGenerated = true;
  5479. }
  5480. }
  5481. // We may still need this guard if we didn't emit the write protect type check above. This situation arises if we have
  5482. // a fixed field from the instance (not proto) and a property of the same name has been written somewhere between the
  5483. // primary type check and here. Note that we don't need a type check, because we know the fixed field exists on the
  5484. // object even if it has been written since primary type check, but we need to verify the fixed value didn't get overwritten.
  5485. if (!emitPrimaryTypeCheck && !emitFixedFieldTypeCheck && !propertySymOpnd->IsWriteGuardChecked())
  5486. {
  5487. if (!PHASE_OFF(Js::FixedFieldGuardCheckPhase, this->m_func))
  5488. {
  5489. Assert(labelBailOut == nullptr);
  5490. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5491. checkFixedDataGenerated = this->GenerateFixedFieldGuardCheck(instrChkFld, propertySymOpnd, labelBailOut);
  5492. }
  5493. }
  5494. // Note that a type handler holds only a weak reference to the singleton instance it represents, so
  5495. // it is possible that the instance gets collected before the type and handler do. Hence, the upstream
  5496. // type check may succeed, even as the original instance no longer exists. However, this would happen
  5497. // only if another instance reached the same type (otherwise we wouldn't ever pass the type check
  5498. // upstream). In that case we would have invalidated all fixed fields on that type, and so the type
  5499. // check (or property guard check, if necessary) above would fail. All in all, we would never attempt
  5500. // to access a fixed field from an instance that has been collected.
  5501. if (!emitPrimaryTypeCheck && !emitFixedFieldTypeCheck && propertySymOpnd->IsWriteGuardChecked())
  5502. {
  5503. Assert(labelBailOut == nullptr);
  5504. AssertMsg(!instrChkFld->HasBailOutInfo(), "Why does a direct fixed field check have bailout?");
  5505. if (propertySymOpnd->ProducesAuxSlotPtr())
  5506. {
  5507. this->GenerateAuxSlotPtrLoad(propertySymOpnd, instrChkFld);
  5508. }
  5509. instrChkFld->Remove();
  5510. return true;
  5511. }
  5512. // With lazy bailout, no checks might be generated for CheckFixedFld, so the code in Lowerer is only an
  5513. // unconditional jmp to get past the bailout helper block. This is a new case and is unexpected, so layout
  5514. // phase will also move the statement boundary preceding CheckFixedFld together with the jmp to after
  5515. // function exit. As a result, source mapping is incorrect. Make sure that this doesn't happen by not
  5516. // generating helper blocks at all if we don't generate checks.
  5517. if (!checkFixedDataGenerated && !checkFixedTypeGenerated)
  5518. {
  5519. instrChkFld->Remove();
  5520. return true;
  5521. }
  5522. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5523. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  5524. instrChkFld->InsertBefore(instr);
  5525. // Insert the helper label here.
  5526. instrChkFld->InsertBefore(labelBailOut);
  5527. instrChkFld->InsertAfter(labelDone);
  5528. if (propertySymOpnd->ProducesAuxSlotPtr())
  5529. {
  5530. this->GenerateAuxSlotPtrLoad(propertySymOpnd, labelDone->m_next);
  5531. }
  5532. // Convert the original instruction to a bailout.
  5533. Assert(instrChkFld->HasBailOutInfo());
  5534. if (instrChkFld->GetBailOutInfo()->bailOutInstr != instrChkFld)
  5535. {
  5536. // Set the cache index in the bailout info so that the bailout code will write it into the
  5537. // bailout record at runtime.
  5538. instrChkFld->GetBailOutInfo()->polymorphicCacheIndex = inlineCacheIndex;
  5539. }
  5540. instrChkFld->FreeSrc1();
  5541. instrChkFld->m_opcode = Js::OpCode::BailOut;
  5542. this->GenerateBailOut(instrChkFld);
  5543. return true;
  5544. }
  5545. void
  5546. Lowerer::GenerateCheckObjType(IR::Instr * instrChkObjType)
  5547. {
  5548. Assert(instrChkObjType->GetSrc1()->IsSymOpnd() && instrChkObjType->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd());
  5549. IR::PropertySymOpnd *propertySymOpnd = instrChkObjType->GetSrc1()->AsPropertySymOpnd();
  5550. // Why do we have an explicit type check if the cached type has been checked upstream? The dead store pass should have
  5551. // removed this instruction.
  5552. Assert(propertySymOpnd->IsTypeCheckSeqCandidate() && !propertySymOpnd->IsTypeChecked());
  5553. // Why do we have an explicit type check on a non-configurable root field load?
  5554. Assert(!propertySymOpnd->IsRootObjectNonConfigurableFieldLoad());
  5555. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  5556. uint inlineCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  5557. PHASE_PRINT_TESTTRACE(
  5558. Js::ObjTypeSpecPhase,
  5559. this->m_func,
  5560. _u("Object type check: %s, property ID: %d, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n"),
  5561. Js::OpCodeUtil::GetOpCodeName(instrChkObjType->m_opcode),
  5562. propertySym->m_propertyId,
  5563. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  5564. inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(), _u("false"));
  5565. IR::LabelInstr* labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5566. this->GenerateCachedTypeCheck(instrChkObjType, propertySymOpnd, labelBailOut, labelBailOut);
  5567. IR::LabelInstr* labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5568. IR::Instr* instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  5569. instrChkObjType->InsertBefore(instr);
  5570. // Insert the bailout label here.
  5571. instrChkObjType->InsertBefore(labelBailOut);
  5572. instrChkObjType->InsertAfter(labelDone);
  5573. if (propertySymOpnd->ProducesAuxSlotPtr())
  5574. {
  5575. this->GenerateAuxSlotPtrLoad(propertySymOpnd, labelDone->m_next);
  5576. }
  5577. // Convert the original instruction to a bailout.
  5578. Assert(instrChkObjType->HasBailOutInfo());
  5579. if (instrChkObjType->GetBailOutInfo()->bailOutInstr != instrChkObjType)
  5580. {
  5581. // Set the cache index in the bailout info so that the bailout code will write it into the
  5582. // bailout record at runtime.
  5583. instrChkObjType->GetBailOutInfo()->polymorphicCacheIndex = inlineCacheIndex;
  5584. }
  5585. instrChkObjType->FreeSrc1();
  5586. instrChkObjType->m_opcode = Js::OpCode::BailOut;
  5587. this->GenerateBailOut(instrChkObjType);
  5588. }
  5589. void
  5590. Lowerer::LowerAdjustObjType(IR::Instr * instrAdjustObjType)
  5591. {
  5592. IR::AddrOpnd *finalTypeOpnd = instrAdjustObjType->UnlinkDst()->AsAddrOpnd();
  5593. IR::AddrOpnd *initialTypeOpnd = instrAdjustObjType->UnlinkSrc2()->AsAddrOpnd();
  5594. IR::RegOpnd *baseOpnd = instrAdjustObjType->UnlinkSrc1()->AsRegOpnd();
  5595. bool adjusted = this->GenerateAdjustBaseSlots(
  5596. instrAdjustObjType, baseOpnd, JITTypeHolder((JITType*)initialTypeOpnd->m_metadata), JITTypeHolder((JITType*)finalTypeOpnd->m_metadata));
  5597. if (instrAdjustObjType->m_opcode == Js::OpCode::AdjustObjTypeReloadAuxSlotPtr)
  5598. {
  5599. Assert(adjusted);
  5600. // We reallocated the aux slots, so reload them if necessary.
  5601. StackSym * auxSlotPtrSym = baseOpnd->m_sym->GetAuxSlotPtrSym();
  5602. Assert(auxSlotPtrSym);
  5603. IR::Opnd *opndIndir = IR::IndirOpnd::New(baseOpnd, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  5604. IR::RegOpnd *regOpnd = IR::RegOpnd::New(auxSlotPtrSym, TyMachReg, this->m_func);
  5605. regOpnd->SetIsJITOptimizedReg(true);
  5606. Lowerer::InsertMove(regOpnd, opndIndir, instrAdjustObjType);
  5607. }
  5608. this->m_func->PinTypeRef((JITType*)finalTypeOpnd->m_metadata);
  5609. IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrAdjustObjType->m_func);
  5610. this->InsertMove(opnd, finalTypeOpnd, instrAdjustObjType);
  5611. initialTypeOpnd->Free(instrAdjustObjType->m_func);
  5612. instrAdjustObjType->Remove();
  5613. }
  5614. bool
  5615. Lowerer::GenerateNonConfigurableLdRootFld(IR::Instr * instrLdFld)
  5616. {
  5617. if (!instrLdFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
  5618. {
  5619. return false;
  5620. }
  5621. IR::PropertySymOpnd *propertySymOpnd = instrLdFld->GetSrc1()->AsPropertySymOpnd();
  5622. if (!propertySymOpnd->IsRootObjectNonConfigurableFieldLoad())
  5623. {
  5624. return false;
  5625. }
  5626. Assert(!PHASE_OFF(Js::RootObjectFldFastPathPhase, this->m_func));
  5627. Assert(!instrLdFld->HasBailOutInfo() || instrLdFld->HasLazyBailOut());
  5628. if (instrLdFld->HasLazyBailOut())
  5629. {
  5630. instrLdFld->ClearBailOutInfo();
  5631. }
  5632. IR::Opnd * srcOpnd;
  5633. intptr_t rootObject = this->m_func->GetJITFunctionBody()->GetRootObject();
  5634. if (propertySymOpnd->UsesAuxSlot())
  5635. {
  5636. IR::RegOpnd * auxSlotOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  5637. this->InsertMove(auxSlotOpnd, IR::MemRefOpnd::New((byte *)rootObject + Js::DynamicObject::GetOffsetOfAuxSlots(),
  5638. TyMachPtr, this->m_func), instrLdFld);
  5639. srcOpnd = IR::IndirOpnd::New(auxSlotOpnd, propertySymOpnd->GetSlotIndex() * sizeof(Js::Var *),
  5640. TyVar, this->m_func);
  5641. }
  5642. else
  5643. {
  5644. srcOpnd = IR::MemRefOpnd::New((Js::Var *)rootObject + propertySymOpnd->GetSlotIndex(),
  5645. TyVar, this->m_func);
  5646. }
  5647. instrLdFld->ReplaceSrc1(srcOpnd);
  5648. instrLdFld->m_opcode = Js::OpCode::Ld_A;
  5649. LowererMD::ChangeToAssign(instrLdFld);
  5650. return true;
  5651. }
  5652. IR::Instr *
  5653. Lowerer::LowerDelFld(IR::Instr *delFldInstr, IR::JnHelperMethod helperMethod, bool useInlineCache, bool strictMode)
  5654. {
  5655. IR::Instr *instrPrev;
  5656. Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
  5657. if (strictMode)
  5658. {
  5659. propertyOperationFlag = Js::PropertyOperation_StrictMode;
  5660. }
  5661. instrPrev = m_lowererMD.LoadHelperArgument(delFldInstr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
  5662. LowerLdFld(delFldInstr, helperMethod, helperMethod, useInlineCache);
  5663. return instrPrev;
  5664. }
  5665. IR::Instr *
  5666. Lowerer::LowerIsInst(IR::Instr * isInstInstr, IR::JnHelperMethod helperMethod)
  5667. {
  5668. IR::Instr * instrPrev;
  5669. IR::Instr * instrArg;
  5670. IR::RegOpnd * argOpnd;
  5671. // inlineCache
  5672. instrPrev = m_lowererMD.LoadHelperArgument(isInstInstr, LoadIsInstInlineCacheOpnd(isInstInstr, isInstInstr->GetSrc1()->AsIntConstOpnd()->AsUint32()));
  5673. isInstInstr->FreeSrc1();
  5674. argOpnd = isInstInstr->UnlinkSrc2()->AsRegOpnd();
  5675. Assert(argOpnd->m_sym->m_isSingleDef);
  5676. instrArg = argOpnd->m_sym->m_instrDef;
  5677. argOpnd->Free(m_func);
  5678. // scriptContext
  5679. LoadScriptContext(isInstInstr);
  5680. // instance goes last, so remember it now
  5681. IR::Opnd * instanceOpnd = instrArg->UnlinkSrc1();
  5682. argOpnd = instrArg->UnlinkSrc2()->AsRegOpnd();
  5683. Assert(argOpnd->m_sym->m_isSingleDef);
  5684. instrArg->Remove();
  5685. instrArg = argOpnd->m_sym->m_instrDef;
  5686. argOpnd->Free(m_func);
  5687. // function
  5688. IR::Opnd *opnd = instrArg->UnlinkSrc1();
  5689. m_lowererMD.LoadHelperArgument(isInstInstr, opnd);
  5690. Assert(instrArg->GetSrc2() == NULL);
  5691. instrArg->Remove();
  5692. // instance
  5693. m_lowererMD.LoadHelperArgument(isInstInstr, instanceOpnd);
  5694. m_lowererMD.ChangeToHelperCall(isInstInstr, helperMethod);
  5695. return instrPrev;
  5696. }
  5697. void
  5698. Lowerer::GenerateStackScriptFunctionInit(StackSym * stackSym, Js::FunctionInfoPtrPtr nestedInfo)
  5699. {
  5700. Func * func = this->m_func;
  5701. Assert(func->HasAnyStackNestedFunc());
  5702. Assert(nextStackFunctionOpnd);
  5703. IR::Instr * insertBeforeInstr = func->GetFunctionEntryInsertionPoint();
  5704. IR::RegOpnd * addressOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5705. const IR::AutoReuseOpnd autoReuseAddressOpnd(addressOpnd, func);
  5706. InsertLea(addressOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertBeforeInstr);
  5707. // Currently we don't initialize the environment until we actually allocate the function, we also
  5708. // walk the list of stack function when we need to box them. so we should use initialize it to NullFrameDisplay
  5709. GenerateStackScriptFunctionInit(addressOpnd, nestedInfo,
  5710. IR::AddrOpnd::New(func->GetThreadContextInfo()->GetNullFrameDisplayAddr(), IR::AddrOpndKindDynamicMisc, func), insertBeforeInstr);
  5711. // Establish the next link
  5712. InsertMove(nextStackFunctionOpnd, addressOpnd, insertBeforeInstr);
  5713. this->nextStackFunctionOpnd = IR::SymOpnd::New(stackSym, sizeof(Js::StackScriptFunction), TyMachPtr, func);
  5714. }
  5715. void
  5716. Lowerer::GenerateScriptFunctionInit(IR::RegOpnd * regOpnd, IR::Opnd * vtableAddressOpnd,
  5717. Js::FunctionInfoPtrPtr nestedInfo, IR::Opnd * envOpnd, IR::Instr * insertBeforeInstr, bool isZeroed)
  5718. {
  5719. Func * func = this->m_func;
  5720. IR::Opnd * functionInfoOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5721. InsertMove(functionInfoOpnd, IR::MemRefOpnd::New(nestedInfo, TyMachPtr, func), insertBeforeInstr);
  5722. IR::Opnd * functionProxyOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5723. InsertMove(functionProxyOpnd, IR::IndirOpnd::New(functionInfoOpnd->AsRegOpnd(), Js::FunctionInfo::GetOffsetOfFunctionProxy(), TyMachPtr, func), insertBeforeInstr);
  5724. IR::Opnd * typeOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5725. InsertMove(typeOpnd, IR::IndirOpnd::New(functionProxyOpnd->AsRegOpnd(), Js::FunctionProxy::GetOffsetOfDeferredPrototypeType(),
  5726. TyMachPtr, func), insertBeforeInstr);
  5727. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5728. InsertTestBranch(typeOpnd, typeOpnd, Js::OpCode::BrEq_A, labelHelper, insertBeforeInstr);
  5729. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  5730. InsertBranch(Js::OpCode::Br, labelDone, insertBeforeInstr);
  5731. insertBeforeInstr->InsertBefore(labelHelper);
  5732. m_lowererMD.LoadHelperArgument(insertBeforeInstr, functionProxyOpnd);
  5733. IR::Instr * callHelperInstr = IR::Instr::New(Js::OpCode::Call, typeOpnd,
  5734. IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperEnsureFunctionProxyDeferredPrototypeType, func), func);
  5735. insertBeforeInstr->InsertBefore(callHelperInstr);
  5736. m_lowererMD.LowerCall(callHelperInstr, 0);
  5737. insertBeforeInstr->InsertBefore(labelDone);
  5738. GenerateMemInit(regOpnd, 0, vtableAddressOpnd, insertBeforeInstr, isZeroed);
  5739. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfType(), typeOpnd, insertBeforeInstr, isZeroed);
  5740. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfAuxSlots(), insertBeforeInstr, isZeroed);
  5741. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfObjectArray(), insertBeforeInstr, isZeroed);
  5742. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfConstructorCache(),
  5743. LoadLibraryValueOpnd(insertBeforeInstr, LibraryValue::ValueConstructorCacheDefaultInstance),
  5744. insertBeforeInstr, isZeroed);
  5745. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfFunctionInfo(), functionInfoOpnd, insertBeforeInstr, isZeroed);
  5746. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfEnvironment(), envOpnd, insertBeforeInstr, isZeroed);
  5747. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfCachedScopeObj(), insertBeforeInstr, isZeroed);
  5748. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfHasInlineCaches(), insertBeforeInstr, isZeroed);
  5749. }
  5750. void
  5751. Lowerer::GenerateStackScriptFunctionInit(IR::RegOpnd * regOpnd, Js::FunctionInfoPtrPtr nestedInfo, IR::Opnd * envOpnd, IR::Instr * insertBeforeInstr)
  5752. {
  5753. Func * func = this->m_func;
  5754. GenerateScriptFunctionInit(regOpnd,
  5755. LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableStackScriptFunction),
  5756. nestedInfo, envOpnd, insertBeforeInstr);
  5757. InsertMove(IR::IndirOpnd::New(regOpnd, Js::StackScriptFunction::GetOffsetOfBoxedScriptFunction(), TyMachPtr, func),
  5758. IR::AddrOpnd::NewNull(func), insertBeforeInstr);
  5759. }
  5760. void
  5761. Lowerer::EnsureStackFunctionListStackSym()
  5762. {
  5763. Func * func = this->m_func;
  5764. Assert(func->HasAnyStackNestedFunc());
  5765. #if defined(_M_IX86) || defined(_M_X64)
  5766. Assert(func->m_localStackHeight == (func->HasArgumentSlot()? MachArgsSlotOffset : 0));
  5767. StackSym * stackFunctionListStackSym = StackSym::New(TyMachPtr, func);
  5768. func->StackAllocate(stackFunctionListStackSym, sizeof(Js::ScriptFunction *));
  5769. nextStackFunctionOpnd = IR::SymOpnd::New(stackFunctionListStackSym, TyMachPtr, func);
  5770. #else
  5771. Assert(func->m_localStackHeight == 0);
  5772. nextStackFunctionOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(NULL, FRAME_REG, TyMachReg, func),
  5773. -(int32)(Js::Constants::StackNestedFuncList * sizeof(Js::Var)), TyMachPtr, func);
  5774. #endif
  5775. }
  5776. void
  5777. Lowerer::AllocStackClosure()
  5778. {
  5779. m_func->StackAllocate(m_func->GetLocalFrameDisplaySym(), sizeof(Js::Var));
  5780. m_func->StackAllocate(m_func->GetLocalClosureSym(), sizeof(Js::Var));
  5781. }
  5782. void
  5783. Lowerer::EnsureZeroLastStackFunctionNext()
  5784. {
  5785. Assert(nextStackFunctionOpnd != nullptr);
  5786. Func * func = this->m_func;
  5787. IR::Instr * insertBeforeInstr = func->GetFunctionEntryInsertionPoint();
  5788. InsertMove(nextStackFunctionOpnd, IR::AddrOpnd::NewNull(func), insertBeforeInstr);
  5789. }
  5790. IR::Instr *
  5791. Lowerer::GenerateNewStackScFunc(IR::Instr * newScFuncInstr, IR::RegOpnd ** ppEnvOpnd)
  5792. {
  5793. Assert(newScFuncInstr->m_func->DoStackNestedFunc());
  5794. Func * func = newScFuncInstr->m_func;
  5795. uint index = newScFuncInstr->GetSrc1()->AsIntConstOpnd()->AsUint32();
  5796. Assert(index < func->GetJITFunctionBody()->GetNestedCount());
  5797. IR::LabelInstr * labelNoStackFunc = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5798. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  5799. InsertTestBranch(IR::MemRefOpnd::New(func->GetJITFunctionBody()->GetFlagsAddr(), TyInt8, func),
  5800. IR::IntConstOpnd::New(Js::FunctionBody::Flags_StackNestedFunc, TyInt8, func, true),
  5801. Js::OpCode::BrEq_A, labelNoStackFunc, newScFuncInstr);
  5802. Js::FunctionInfoPtrPtr nestedInfo = func->GetJITFunctionBody()->GetNestedFuncRef(index);
  5803. IR::Instr * instrAssignDst;
  5804. IR::RegOpnd * envOpnd = *ppEnvOpnd;
  5805. if (!func->IsLoopBody())
  5806. {
  5807. // the stackAllocate Call below for this sym is passing a size that is not represented by any IRType and hence passing TyMisc for the constructor
  5808. StackSym * stackSym = StackSym::New(TyMisc, func);
  5809. // ScriptFunction and it's next pointer
  5810. this->m_func->StackAllocate(stackSym, sizeof(Js::StackScriptFunction) + sizeof(Js::StackScriptFunction *));
  5811. GenerateStackScriptFunctionInit(stackSym, nestedInfo);
  5812. InsertMove(IR::SymOpnd::New(stackSym, Js::ScriptFunction::GetOffsetOfEnvironment(), TyMachPtr, func),
  5813. envOpnd,
  5814. newScFuncInstr);
  5815. instrAssignDst =
  5816. InsertLea(newScFuncInstr->GetDst()->AsRegOpnd(), IR::SymOpnd::New(stackSym, TyMachPtr, func), newScFuncInstr);
  5817. }
  5818. else
  5819. {
  5820. Assert(func->IsTopFunc());
  5821. Assert(func->m_loopParamSym);
  5822. IR::Instr * envDefInstr = envOpnd->AsRegOpnd()->m_sym->m_instrDef;
  5823. Assert(envDefInstr && envDefInstr->m_opcode == Js::OpCode::NewScFuncData);
  5824. IR::RegOpnd * opndFuncPtr = envDefInstr->UnlinkSrc2()->AsRegOpnd();
  5825. Assert(opndFuncPtr);
  5826. envOpnd = envDefInstr->UnlinkSrc1()->AsRegOpnd();
  5827. Assert(envOpnd);
  5828. *ppEnvOpnd = envOpnd;
  5829. envDefInstr->Remove();
  5830. if (index != 0)
  5831. {
  5832. IR::RegOpnd * opnd = IR::RegOpnd::New(TyVar, func);
  5833. InsertAdd(false, opnd, opndFuncPtr, IR::IntConstOpnd::New(index * sizeof(Js::StackScriptFunction), TyMachPtr, func), newScFuncInstr);
  5834. opndFuncPtr = opnd;
  5835. }
  5836. InsertMove(IR::IndirOpnd::New(opndFuncPtr, Js::ScriptFunction::GetOffsetOfEnvironment(), TyMachPtr, func),
  5837. envOpnd, newScFuncInstr);
  5838. instrAssignDst = InsertMove(newScFuncInstr->GetDst(), opndFuncPtr, newScFuncInstr);
  5839. }
  5840. InsertBranch(Js::OpCode::Br, labelDone, newScFuncInstr);
  5841. newScFuncInstr->InsertBefore(labelNoStackFunc);
  5842. newScFuncInstr->InsertAfter(labelDone);
  5843. return instrAssignDst;
  5844. }
  5845. IR::Instr *
  5846. Lowerer::LowerNewScFunc(IR::Instr * newScFuncInstr)
  5847. {
  5848. IR::Instr *stackNewScFuncInstr = nullptr;
  5849. IR::RegOpnd * envOpnd = newScFuncInstr->UnlinkSrc2()->AsRegOpnd();
  5850. if (newScFuncInstr->m_func->DoStackNestedFunc())
  5851. {
  5852. stackNewScFuncInstr = GenerateNewStackScFunc(newScFuncInstr, &envOpnd);
  5853. }
  5854. IR::IntConstOpnd * functionBodySlotOpnd = newScFuncInstr->UnlinkSrc1()->AsIntConstOpnd();
  5855. IR::Instr * instrPrev = this->LoadFunctionBodyAsArgument(newScFuncInstr, functionBodySlotOpnd, envOpnd);
  5856. m_lowererMD.ChangeToHelperCall(newScFuncInstr, IR::HelperScrFunc_OP_NewScFunc );
  5857. return stackNewScFuncInstr == nullptr? instrPrev : stackNewScFuncInstr;
  5858. }
  5859. IR::Instr *
  5860. Lowerer::LowerNewScFuncHomeObj(IR::Instr * newScFuncInstr)
  5861. {
  5862. newScFuncInstr->m_opcode = Js::OpCode::CallHelper;
  5863. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperScrFunc_OP_NewScFuncHomeObj, this->m_func);
  5864. IR::Opnd * src1 = newScFuncInstr->UnlinkSrc1();
  5865. newScFuncInstr->SetSrc1(helperOpnd);
  5866. newScFuncInstr->SetSrc2(src1);
  5867. return newScFuncInstr;
  5868. }
  5869. IR::Instr *
  5870. Lowerer::LowerNewScGenFunc(IR::Instr * newScFuncInstr)
  5871. {
  5872. IR::IntConstOpnd * functionBodySlotOpnd = newScFuncInstr->UnlinkSrc1()->AsIntConstOpnd();
  5873. IR::RegOpnd * envOpnd = newScFuncInstr->UnlinkSrc2()->AsRegOpnd();
  5874. IR::Instr * instrPrev = this->LoadFunctionBodyAsArgument(newScFuncInstr, functionBodySlotOpnd, envOpnd);
  5875. m_lowererMD.ChangeToHelperCall(newScFuncInstr, IR::HelperScrFunc_OP_NewScGenFunc );
  5876. return instrPrev;
  5877. }
  5878. IR::Instr *
  5879. Lowerer::LowerNewScGenFuncHomeObj(IR::Instr * newScFuncInstr)
  5880. {
  5881. newScFuncInstr->m_opcode = Js::OpCode::CallHelper;
  5882. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperScrFunc_OP_NewScGenFuncHomeObj, this->m_func);
  5883. IR::Opnd * src1 = newScFuncInstr->UnlinkSrc1();
  5884. newScFuncInstr->SetSrc1(helperOpnd);
  5885. newScFuncInstr->SetSrc2(src1);
  5886. return newScFuncInstr;
  5887. }
  5888. IR::Instr *
  5889. Lowerer::LowerStPropIdArrFromVar(IR::Instr * stPropIdInstr)
  5890. {
  5891. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperStPropIdArrFromVar, this->m_func);
  5892. IR::Opnd * src1 = stPropIdInstr->UnlinkSrc1();
  5893. stPropIdInstr->SetSrc1(helperOpnd);
  5894. stPropIdInstr->SetSrc2(src1);
  5895. return m_lowererMD.LowerCallHelper(stPropIdInstr);
  5896. }
  5897. IR::Instr *
  5898. Lowerer::LowerRestify(IR::Instr * newRestInstr)
  5899. {
  5900. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperRestify, this->m_func);
  5901. IR::Opnd * src1 = newRestInstr->UnlinkSrc1();
  5902. newRestInstr->SetSrc1(helperOpnd);
  5903. newRestInstr->SetSrc2(src1);
  5904. return m_lowererMD.LowerCallHelper(newRestInstr);
  5905. }
  5906. ///----------------------------------------------------------------------------
  5907. ///
  5908. /// Lowerer::LowerScopedLdFld
  5909. ///
  5910. /// Lower a load instruction that takes an additional instance to use as a
  5911. /// a default if the scope chain provided doesn't contain the property.
  5912. ///
  5913. ///----------------------------------------------------------------------------
  5914. IR::Instr *
  5915. Lowerer::LowerScopedLdFld(IR::Instr * ldFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache)
  5916. {
  5917. IR::Opnd *src;
  5918. IR::Instr *instrPrev = ldFldInstr->m_prev;
  5919. if(!withInlineCache)
  5920. {
  5921. LoadScriptContext(ldFldInstr);
  5922. }
  5923. intptr_t rootObject = m_func->GetJITFunctionBody()->GetRootObject();
  5924. src = IR::AddrOpnd::New(rootObject, IR::AddrOpndKindDynamicVar, this->m_func, true);
  5925. instrPrev = m_lowererMD.LoadHelperArgument(ldFldInstr, src);
  5926. src = ldFldInstr->UnlinkSrc1();
  5927. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  5928. this->LoadPropertySymAsArgument(ldFldInstr, src);
  5929. if (withInlineCache)
  5930. {
  5931. AssertMsg(src->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  5932. m_lowererMD.LoadHelperArgument(
  5933. ldFldInstr,
  5934. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  5935. // Not using the polymorphic inline cache because the fast path only uses the monomorphic inline cache
  5936. this->m_lowererMD.LoadHelperArgument(ldFldInstr, this->LoadRuntimeInlineCacheOpnd(ldFldInstr, src->AsPropertySymOpnd()));
  5937. m_lowererMD.LoadHelperArgument(ldFldInstr, LoadFunctionBodyOpnd(ldFldInstr));
  5938. }
  5939. m_lowererMD.ChangeToHelperCall(ldFldInstr, helperMethod);
  5940. return instrPrev;
  5941. }
  5942. ///----------------------------------------------------------------------------
  5943. ///
  5944. /// Lowerer::LowerScopedLdInst
  5945. ///
  5946. /// Lower a load instruction that takes an additional instance to use as a
  5947. /// a default if the scope chain provided doesn't contain the property.
  5948. ///
  5949. ///----------------------------------------------------------------------------
  5950. IR::Instr *
  5951. Lowerer::LowerScopedLdInst(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  5952. {
  5953. IR::Opnd *src;
  5954. IR::Instr *instrPrev;
  5955. // last argument is the scriptContext
  5956. instrPrev = LoadScriptContext(instr);
  5957. src = instr->UnlinkSrc2();
  5958. AssertMsg(src->IsRegOpnd(), "Expected Reg opnd as src2");
  5959. // __out Var*. The StackSym is allocated in irbuilder, and here we need to insert a lea
  5960. StackSym* dstSym = src->GetStackSym();
  5961. IR::Instr *load = InsertLoadStackAddress(dstSym, instr);
  5962. IR::Opnd* tempOpnd = load->GetDst();
  5963. m_lowererMD.LoadHelperArgument(instr, tempOpnd);
  5964. // now 3rd last argument is the rootObject of the function. Need to add addrOpnd to
  5965. // pass in the address of the roobObject.
  5966. IR::Opnd * srcOpnd;
  5967. intptr_t rootObject = m_func->GetJITFunctionBody()->GetRootObject();
  5968. srcOpnd = IR::AddrOpnd::New(rootObject, IR::AddrOpndKindDynamicVar, instr->m_func, true);
  5969. instrPrev = m_lowererMD.LoadHelperArgument(instr, srcOpnd);
  5970. // no change, the property field built from irbuilder.
  5971. src = instr->UnlinkSrc1();
  5972. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  5973. this->LoadPropertySymAsArgument(instr, src);
  5974. instrPrev = m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  5975. IR::RegOpnd* regOpnd = IR::RegOpnd::New(dstSym, TyVar, m_func);
  5976. IR::SymOpnd*symOpnd = IR::SymOpnd::New(dstSym, TyVar, m_func);
  5977. this->InsertMove(regOpnd, symOpnd, instrPrev);
  5978. return instrPrev;
  5979. }
  5980. IR::Instr *
  5981. Lowerer::LowerScopedDelFld(IR::Instr * delFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache, bool strictMode)
  5982. {
  5983. IR::Instr *instrPrev;
  5984. Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
  5985. if (strictMode)
  5986. {
  5987. propertyOperationFlag = Js::PropertyOperation_StrictMode;
  5988. }
  5989. instrPrev = m_lowererMD.LoadHelperArgument(delFldInstr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
  5990. LowerScopedLdFld(delFldInstr, helperMethod, withInlineCache);
  5991. return instrPrev;
  5992. }
  5993. IR::Instr *
  5994. Lowerer::LowerProfiledStFld(IR::JitProfilingInstr *stFldInstr, Js::PropertyOperationFlags flags)
  5995. {
  5996. Assert(stFldInstr->profileId == Js::Constants::NoProfileId);
  5997. IR::Instr *const instrPrev = stFldInstr->m_prev;
  5998. /*
  5999. void ProfilingHelpers::ProfiledInitFld_Jit(
  6000. const Var instance,
  6001. const PropertyId propertyId,
  6002. const InlineCacheIndex inlineCacheIndex,
  6003. const Var value,
  6004. void *const framePointer)
  6005. void ProfilingHelpers::ProfiledStFld_Jit(
  6006. const Var instance,
  6007. const PropertyId propertyId,
  6008. const InlineCacheIndex inlineCacheIndex,
  6009. const Var value,
  6010. void *const framePointer)
  6011. void ProfilingHelpers::ProfiledStSuperFld_Jit(
  6012. const Var instance,
  6013. const PropertyId propertyId,
  6014. const InlineCacheIndex inlineCacheIndex,
  6015. const Var value,
  6016. void *const framePointer,
  6017. const Var thisInstance)
  6018. {
  6019. */
  6020. m_lowererMD.LoadHelperArgument(stFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  6021. if (stFldInstr->m_opcode == Js::OpCode::StSuperFld)
  6022. {
  6023. m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc2());
  6024. }
  6025. m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc1());
  6026. IR::Opnd *dst = stFldInstr->UnlinkDst();
  6027. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
  6028. m_lowererMD.LoadHelperArgument(
  6029. stFldInstr,
  6030. IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  6031. LoadPropertySymAsArgument(stFldInstr, dst);
  6032. IR::JnHelperMethod helper;
  6033. switch (stFldInstr->m_opcode)
  6034. {
  6035. case Js::OpCode::InitFld:
  6036. case Js::OpCode::InitRootFld:
  6037. helper = IR::HelperProfiledInitFld;
  6038. break;
  6039. case Js::OpCode::StSuperFld:
  6040. helper = IR::HelperProfiledStSuperFld;
  6041. break;
  6042. default:
  6043. helper =
  6044. flags & Js::PropertyOperation_Root
  6045. ? flags & Js::PropertyOperation_StrictMode ? IR::HelperProfiledStRootFld_Strict : IR::HelperProfiledStRootFld
  6046. : flags & Js::PropertyOperation_StrictMode ? IR::HelperProfiledStFld_Strict : IR::HelperProfiledStFld;
  6047. break;
  6048. }
  6049. stFldInstr->SetSrc1(IR::HelperCallOpnd::New(helper, m_func));
  6050. m_lowererMD.LowerCall(stFldInstr, 0);
  6051. return instrPrev;
  6052. }
  6053. ///----------------------------------------------------------------------------
  6054. ///
  6055. /// Lowerer::LowerStFld
  6056. ///
  6057. ///----------------------------------------------------------------------------
  6058. IR::Instr *
  6059. Lowerer::LowerStFld(
  6060. IR::Instr * stFldInstr,
  6061. IR::JnHelperMethod helperMethod,
  6062. IR::JnHelperMethod polymorphicHelperMethod,
  6063. bool withInlineCache,
  6064. IR::LabelInstr *labelBailOut,
  6065. bool isHelper,
  6066. bool withPutFlags,
  6067. Js::PropertyOperationFlags flags)
  6068. {
  6069. if (stFldInstr->IsJitProfilingInstr())
  6070. {
  6071. // If we want to profile then do something completely different
  6072. return this->LowerProfiledStFld(stFldInstr->AsJitProfilingInstr(), flags);
  6073. }
  6074. IR::Instr *instrPrev = stFldInstr->m_prev;
  6075. IR::Opnd *dst = stFldInstr->UnlinkDst();
  6076. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
  6077. IR::Opnd * inlineCacheOpnd = nullptr;
  6078. if (withInlineCache)
  6079. {
  6080. AssertMsg(dst->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  6081. if (dst->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache && polymorphicHelperMethod != helperMethod)
  6082. {
  6083. JITTimePolymorphicInlineCache * polymorphicInlineCache = dst->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache;
  6084. helperMethod = polymorphicHelperMethod;
  6085. inlineCacheOpnd = IR::AddrOpnd::New(polymorphicInlineCache->GetAddr(), IR::AddrOpndKindDynamicInlineCache, this->m_func);
  6086. }
  6087. else
  6088. {
  6089. // Need to load runtime inline cache opnd first before loading any helper argument
  6090. // because LoadRuntimeInlineCacheOpnd may create labels marked as helper
  6091. // and cause op helper register push/pop save in x86, messing up with any helper arguments that is already pushed
  6092. inlineCacheOpnd = this->LoadRuntimeInlineCacheOpnd(stFldInstr, dst->AsPropertySymOpnd(), isHelper);
  6093. }
  6094. }
  6095. if (withPutFlags)
  6096. {
  6097. m_lowererMD.LoadHelperArgument(stFldInstr,
  6098. IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
  6099. }
  6100. IR::Opnd *src = stFldInstr->UnlinkSrc1();
  6101. if (stFldInstr->m_opcode == Js::OpCode::StSuperFld)
  6102. {
  6103. m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc2());
  6104. }
  6105. m_lowererMD.LoadHelperArgument(stFldInstr, src);
  6106. this->LoadPropertySymAsArgument(stFldInstr, dst);
  6107. if (withInlineCache)
  6108. {
  6109. Assert(inlineCacheOpnd != nullptr);
  6110. this->m_lowererMD.LoadHelperArgument(
  6111. stFldInstr,
  6112. IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  6113. this->m_lowererMD.LoadHelperArgument(stFldInstr, inlineCacheOpnd);
  6114. this->m_lowererMD.LoadHelperArgument(stFldInstr, LoadFunctionBodyOpnd(stFldInstr));
  6115. }
  6116. IR::RegOpnd *opndBase = dst->AsSymOpnd()->CreatePropertyOwnerOpnd(m_func);
  6117. m_lowererMD.ChangeToHelperCall(stFldInstr, helperMethod, labelBailOut, opndBase, dst->AsSymOpnd()->IsPropertySymOpnd() ? dst->AsSymOpnd()->AsPropertySymOpnd() : nullptr, isHelper);
  6118. return instrPrev;
  6119. }
  6120. IR::Instr* Lowerer::GenerateCompleteStFld(IR::Instr* instr, bool emitFastPath, IR::JnHelperMethod monoHelperAfterFastPath, IR::JnHelperMethod polyHelperAfterFastPath,
  6121. IR::JnHelperMethod monoHelperWithoutFastPath, IR::JnHelperMethod polyHelperWithoutFastPath, bool withPutFlags, Js::PropertyOperationFlags flags)
  6122. {
  6123. if(instr->CallsAccessor() && instr->HasBailOutInfo())
  6124. {
  6125. IR::BailOutKind kindMinusBits = instr->GetBailOutKind() & ~IR::BailOutKindBits;
  6126. Assert(kindMinusBits != IR::BailOutOnImplicitCalls && kindMinusBits != IR::BailOutOnImplicitCallsPreOp);
  6127. }
  6128. IR::Instr* prevInstr = instr->m_prev;
  6129. IR::LabelInstr* labelBailOut = nullptr;
  6130. IR::LabelInstr* labelHelper = nullptr;
  6131. bool isHelper = false;
  6132. IR::RegOpnd* typeOpnd = nullptr;
  6133. if(emitFastPath && GenerateFastStFldForCustomProperty(instr, &labelHelper))
  6134. {
  6135. if(labelHelper)
  6136. {
  6137. Assert(labelHelper->isOpHelper);
  6138. instr->InsertBefore(labelHelper);
  6139. prevInstr = this->LowerStFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
  6140. }
  6141. else
  6142. {
  6143. instr->Remove();
  6144. return prevInstr;
  6145. }
  6146. }
  6147. else if (this->GenerateStFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd))
  6148. {
  6149. Assert(labelHelper == nullptr);
  6150. return prevInstr;
  6151. }
  6152. else if (emitFastPath)
  6153. {
  6154. if (!GenerateFastStFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, &labelBailOut, typeOpnd, &isHelper, &labelHelper, withPutFlags, flags))
  6155. {
  6156. if (labelHelper != nullptr)
  6157. {
  6158. labelHelper->isOpHelper = isHelper;
  6159. instr->InsertBefore(labelHelper);
  6160. }
  6161. prevInstr = this->LowerStFld(instr, monoHelperAfterFastPath, polyHelperAfterFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
  6162. }
  6163. }
  6164. else
  6165. {
  6166. if (labelHelper != nullptr)
  6167. {
  6168. labelHelper->isOpHelper = isHelper;
  6169. instr->InsertBefore(labelHelper);
  6170. }
  6171. prevInstr = this->LowerStFld(instr, monoHelperWithoutFastPath, monoHelperWithoutFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
  6172. }
  6173. return prevInstr;
  6174. }
  6175. void
  6176. Lowerer::GenerateDirectFieldStore(IR::Instr* instrStFld, IR::PropertySymOpnd* propertySymOpnd)
  6177. {
  6178. Func* func = instrStFld->m_func;
  6179. IR::Opnd *opndSlotArray = this->LoadSlotArrayWithCachedLocalType(instrStFld, propertySymOpnd);
  6180. // Store the value to the slot, getting the slot index from the cache.
  6181. uint16 index = propertySymOpnd->GetSlotIndex();
  6182. Assert(index != -1);
  6183. #if defined(RECYCLER_WRITE_BARRIER_JIT) && (defined(_M_IX86) || defined(_M_AMD64))
  6184. if (opndSlotArray->IsRegOpnd())
  6185. {
  6186. IR::IndirOpnd * opndDst = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, func);
  6187. this->GetLowererMD()->GenerateWriteBarrierAssign(opndDst, instrStFld->GetSrc1(), instrStFld);
  6188. }
  6189. else
  6190. {
  6191. Assert(opndSlotArray->IsMemRefOpnd());
  6192. IR::MemRefOpnd * opndDst = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, func);
  6193. this->GetLowererMD()->GenerateWriteBarrierAssign(opndDst, instrStFld->GetSrc1(), instrStFld);
  6194. }
  6195. #else
  6196. IR::Opnd *opnd;
  6197. if (opndSlotArray->IsRegOpnd())
  6198. {
  6199. opnd = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, func);
  6200. }
  6201. else
  6202. {
  6203. opnd = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, func);
  6204. }
  6205. this->InsertMove(opnd, instrStFld->GetSrc1(), instrStFld);
  6206. #endif
  6207. }
  6208. bool
  6209. Lowerer::GenerateStFldWithCachedType(IR::Instr *instrStFld, bool* continueAsHelperOut, IR::LabelInstr** labelHelperOut, IR::RegOpnd** typeOpndOut)
  6210. {
  6211. IR::Instr *instr;
  6212. IR::RegOpnd *typeOpnd = nullptr;
  6213. IR::LabelInstr* labelObjCheckFailed = nullptr;
  6214. IR::LabelInstr *labelTypeCheckFailed = nullptr;
  6215. IR::LabelInstr *labelBothTypeChecksFailed = nullptr;
  6216. IR::LabelInstr *labelDone = nullptr;
  6217. Assert(continueAsHelperOut != nullptr);
  6218. *continueAsHelperOut = false;
  6219. Assert(labelHelperOut != nullptr);
  6220. *labelHelperOut = nullptr;
  6221. Assert(typeOpndOut != nullptr);
  6222. *typeOpndOut = nullptr;
  6223. Assert(instrStFld->GetDst()->IsSymOpnd());
  6224. if (!instrStFld->GetDst()->AsSymOpnd()->IsPropertySymOpnd() || !instrStFld->GetDst()->AsPropertySymOpnd()->IsTypeCheckSeqCandidate())
  6225. {
  6226. return false;
  6227. }
  6228. IR::PropertySymOpnd *propertySymOpnd = instrStFld->GetDst()->AsPropertySymOpnd();
  6229. // If we have any object type spec info, we better not believe this is a load from prototype, since this is a store
  6230. // and we never share inline caches between loads and stores.
  6231. Assert(!propertySymOpnd->HasObjTypeSpecFldInfo() || !propertySymOpnd->IsLoadedFromProto());
  6232. AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
  6233. if (!propertySymOpnd->IsTypeCheckSeqCandidate())
  6234. {
  6235. return false;
  6236. }
  6237. if (!propertySymOpnd->IsTypeCheckSeqParticipant() && !propertySymOpnd->NeedsLocalTypeCheck())
  6238. {
  6239. return false;
  6240. }
  6241. Assert(!propertySymOpnd->NeedsTypeCheckAndBailOut() || (instrStFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrStFld->GetBailOutKind())));
  6242. // In the backwards pass we only add guarded property operations to instructions that are not already
  6243. // protected by an upstream type check.
  6244. Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
  6245. PHASE_PRINT_TESTTRACE(
  6246. Js::ObjTypeSpecPhase,
  6247. this->m_func,
  6248. _u("Field store: %s, property ID: %d, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n"),
  6249. Js::OpCodeUtil::GetOpCodeName(instrStFld->m_opcode),
  6250. propertySymOpnd->m_sym->AsPropertySym()->m_propertyId,
  6251. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  6252. propertySymOpnd->m_inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(),
  6253. propertySymOpnd->IsTypeChecked() ? _u("true") : _u("false"));
  6254. if (propertySymOpnd->HasFinalType() && !propertySymOpnd->IsLoadedFromProto())
  6255. {
  6256. propertySymOpnd->UpdateSlotForFinalType();
  6257. }
  6258. Func* func = instrStFld->m_func;
  6259. // TODO (ObjTypeSpec): If ((PropertySym*)propertySymOpnd->m_sym)->m_stackSym->m_isIntConst consider emitting a direct
  6260. // jump to helper or bailout. If we have a type check bailout, we could even abort compilation.
  6261. bool hasTypeCheckBailout = instrStFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrStFld->GetBailOutKind());
  6262. // If the type hasn't been checked upstream, see if it makes sense to check it here.
  6263. bool isTypeChecked = propertySymOpnd->IsTypeChecked();
  6264. if (!isTypeChecked)
  6265. {
  6266. // If the initial type has been checked, we can do a hard coded type transition without any type checks
  6267. // (see GenerateStFldWithCachedFinalType), which is always worth doing, even if the type is not needed
  6268. // downstream. We're not introducing any additional bailouts.
  6269. if (propertySymOpnd->HasFinalType() && propertySymOpnd->HasInitialType() && !propertySymOpnd->IsTypeDead())
  6270. {
  6271. // We have a final type in hand, so we can JIT (most of) the type transition work.
  6272. return this->GenerateStFldWithCachedFinalType(instrStFld, propertySymOpnd);
  6273. }
  6274. if (propertySymOpnd->HasTypeMismatch())
  6275. {
  6276. // So we have a type mismatch, which happens when the type (and the type without property if ObjTypeSpecStore
  6277. // is on) on this instruction didn't match the live type value according to the flow. We must have hit some
  6278. // stale inline cache (perhaps inlined from a different function, or on a code path not taken for a while).
  6279. // Either way, we know exactly what type the object must have at this point (fully determined by flow), but
  6280. // we don't know whether that type already has the property we're storing here. All in all, we know exactly
  6281. // what shape the object will have after this operation, but we're not sure what label (type) to give this
  6282. // shape. Thus we can simply let the fast path do its thing based on the live inline cache. The downstream
  6283. // instructions relying only on this shape (loads and stores) are safe, and those that need the next type
  6284. // (i.e. adds) will do the same thing as this instruction.
  6285. return false;
  6286. }
  6287. // If we're still here then we must need a primary type check on this instruction to protect
  6288. // a sequence of field operations downstream, or a local type check for an isolated field store.
  6289. Assert(propertySymOpnd->NeedsPrimaryTypeCheck() || propertySymOpnd->NeedsLocalTypeCheck());
  6290. labelTypeCheckFailed = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  6291. labelBothTypeChecksFailed = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  6292. labelObjCheckFailed = hasTypeCheckBailout ? labelBothTypeChecksFailed : IR::LabelInstr::New(Js::OpCode::Label, func, true);
  6293. typeOpnd = this->GenerateCachedTypeCheck(instrStFld, propertySymOpnd, labelObjCheckFailed, labelBothTypeChecksFailed, labelTypeCheckFailed);
  6294. *typeOpndOut = typeOpnd;
  6295. }
  6296. // Either we are protected by a type check upstream or we just emitted a type check above,
  6297. // now it's time to store the field value.
  6298. GenerateDirectFieldStore(instrStFld, propertySymOpnd);
  6299. // If we are protected by a type check upstream, we don't need a bailout or helper here, delete the instruction
  6300. // and return "true" to indicate that we succeeded in eliminating it.
  6301. if (isTypeChecked)
  6302. {
  6303. Assert(labelTypeCheckFailed == nullptr && labelBothTypeChecksFailed == nullptr);
  6304. AssertMsg(
  6305. !instrStFld->HasBailOutInfo() || instrStFld->OnlyHasLazyBailOut(),
  6306. "Why does a direct field store have bailout that is not lazy?"
  6307. );
  6308. instrStFld->Remove();
  6309. return true;
  6310. }
  6311. // Otherwise, branch around the helper on successful type check.
  6312. labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  6313. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, func);
  6314. instrStFld->InsertBefore(instr);
  6315. // On failed type check, try the type without property if we've got one.
  6316. instrStFld->InsertBefore(labelTypeCheckFailed);
  6317. // Caution, this is one of the dusty corners of the JIT. We only get here if this is an isolated StFld which adds a property, or
  6318. // ObjTypeSpecStore is off. In the former case no downstream operations depend on the final type produced here, and we can fall
  6319. // back on live cache and helper if the type doesn't match. In the latter we may have a cache with type transition, which must
  6320. // produce a value for the type after transition, because that type is consumed downstream. Thus, if the object's type doesn't
  6321. // match either the type with or the type without the property we're storing, we must bail out here.
  6322. bool emitAddProperty = propertySymOpnd->IsMono() && propertySymOpnd->HasInitialType();
  6323. if (emitAddProperty)
  6324. {
  6325. GenerateCachedTypeWithoutPropertyCheck(instrStFld, propertySymOpnd, typeOpnd, labelBothTypeChecksFailed);
  6326. GenerateFieldStoreWithTypeChange(instrStFld, propertySymOpnd, propertySymOpnd->GetInitialType(), propertySymOpnd->GetType());
  6327. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, func);
  6328. instrStFld->InsertBefore(instr);
  6329. }
  6330. instrStFld->InsertBefore(labelBothTypeChecksFailed);
  6331. instrStFld->InsertAfter(labelDone);
  6332. if (hasTypeCheckBailout)
  6333. {
  6334. AssertMsg(PHASE_ON1(Js::ObjTypeSpecIsolatedFldOpsWithBailOutPhase) || !PHASE_ON(Js::DeadStoreTypeChecksOnStoresPhase, this->m_func) || !propertySymOpnd->IsTypeDead() || propertySymOpnd->TypeCheckRequired(),
  6335. "Why does a field store have a type check bailout, if its type is dead?");
  6336. if (instrStFld->GetBailOutInfo()->bailOutInstr != instrStFld)
  6337. {
  6338. // Set the cache index in the bailout info so that the generated code will write it into the
  6339. // bailout record at runtime.
  6340. instrStFld->GetBailOutInfo()->polymorphicCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  6341. }
  6342. else
  6343. {
  6344. Assert(instrStFld->GetBailOutInfo()->polymorphicCacheIndex == propertySymOpnd->m_inlineCacheIndex);
  6345. }
  6346. instrStFld->m_opcode = Js::OpCode::BailOut;
  6347. instrStFld->FreeSrc1();
  6348. instrStFld->FreeDst();
  6349. this->GenerateBailOut(instrStFld);
  6350. return true;
  6351. }
  6352. else
  6353. {
  6354. *continueAsHelperOut = true;
  6355. Assert(labelObjCheckFailed != nullptr && labelObjCheckFailed != labelBothTypeChecksFailed);
  6356. *labelHelperOut = labelObjCheckFailed;
  6357. return false;
  6358. }
  6359. }
  6360. IR::RegOpnd *
  6361. Lowerer::GenerateCachedTypeCheck(IR::Instr *instrChk, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr* labelObjCheckFailed, IR::LabelInstr *labelTypeCheckFailed, IR::LabelInstr *labelSecondChance)
  6362. {
  6363. Assert(propertySymOpnd->MayNeedTypeCheckProtection());
  6364. Func* func = instrChk->m_func;
  6365. IR::RegOpnd *regOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(func);
  6366. regOpnd->SetValueType(propertySymOpnd->GetPropertyOwnerValueType());
  6367. if (!regOpnd->IsNotTaggedValue())
  6368. {
  6369. m_lowererMD.GenerateObjectTest(regOpnd, instrChk, labelObjCheckFailed);
  6370. }
  6371. // Load the current object type into typeOpnd
  6372. IR::RegOpnd* typeOpnd = IR::RegOpnd::New(TyMachReg, func);
  6373. IR::Opnd *sourceType;
  6374. if (regOpnd->m_sym->IsConst() && !regOpnd->m_sym->IsIntConst() && !regOpnd->m_sym->IsFloatConst())
  6375. {
  6376. sourceType = IR::MemRefOpnd::New((BYTE*)regOpnd->m_sym->GetConstAddress() +
  6377. Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func, IR::AddrOpndKindDynamicObjectTypeRef);
  6378. }
  6379. else
  6380. {
  6381. sourceType = IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func);
  6382. }
  6383. InsertMove(typeOpnd, sourceType, instrChk);
  6384. // Note: don't attempt equivalent type check if we're doing a final type optimization or if we have a monomorphic
  6385. // cache and no type check bailout. In the latter case, we can wind up doing expensive failed equivalence checks
  6386. // repeatedly and never rejit.
  6387. bool doEquivTypeCheck =
  6388. instrChk->HasEquivalentTypeCheckBailOut() ||
  6389. (propertySymOpnd->HasEquivalentTypeSet() &&
  6390. !(propertySymOpnd->HasFinalType() && propertySymOpnd->HasInitialType()) &&
  6391. !propertySymOpnd->MustDoMonoCheck() &&
  6392. (propertySymOpnd->IsPoly() || instrChk->HasTypeCheckBailOut()));
  6393. Assert(doEquivTypeCheck || !instrChk->HasEquivalentTypeCheckBailOut());
  6394. // Create and initialize the property guard if required. Note that for non-shared monomorphic checks we can refer
  6395. // directly to the (pinned) type and not use a guard.
  6396. Js::PropertyGuard * typeCheckGuard;
  6397. IR::RegOpnd * polyIndexOpnd = nullptr;
  6398. JITTypeHolder monoType = nullptr;
  6399. if (doEquivTypeCheck)
  6400. {
  6401. typeCheckGuard = CreateEquivalentTypeGuardAndLinkToGuardedProperties(propertySymOpnd);
  6402. if (typeCheckGuard->IsPoly())
  6403. {
  6404. Assert(propertySymOpnd->ShouldUsePolyEquivTypeGuard(this->m_func));
  6405. polyIndexOpnd = this->GeneratePolymorphicTypeIndex(typeOpnd, typeCheckGuard, instrChk);
  6406. }
  6407. }
  6408. else
  6409. {
  6410. monoType = propertySymOpnd->MustDoMonoCheck() ? propertySymOpnd->GetMonoGuardType() : propertySymOpnd->GetType();
  6411. typeCheckGuard = this->CreateTypePropertyGuardForGuardedProperties(monoType, propertySymOpnd);
  6412. }
  6413. // Create the opnd we will check against the current type.
  6414. IR::Opnd *expectedTypeOpnd;
  6415. JITTypeHolder directCheckType = nullptr;
  6416. if (typeCheckGuard == nullptr)
  6417. {
  6418. Assert(monoType != nullptr);
  6419. expectedTypeOpnd = IR::AddrOpnd::New(monoType->GetAddr(), IR::AddrOpndKindDynamicType, func, true);
  6420. directCheckType = monoType;
  6421. }
  6422. else
  6423. {
  6424. Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
  6425. if (this->m_func->IsOOPJIT())
  6426. {
  6427. if (polyIndexOpnd != nullptr)
  6428. {
  6429. IR::RegOpnd * baseOpnd = IR::RegOpnd::New(TyMachPtr, func);
  6430. this->GenerateLeaOfOOPData(baseOpnd, typeCheckGuard, Js::JitPolyEquivalentTypeGuard::GetOffsetOfPolyValues(), instrChk);
  6431. expectedTypeOpnd = IR::IndirOpnd::New(baseOpnd, polyIndexOpnd, m_lowererMD.GetDefaultIndirScale(), TyMachPtr, func);
  6432. }
  6433. else
  6434. {
  6435. expectedTypeOpnd = this->GenerateIndirOfOOPData(typeCheckGuard, 0, instrChk);
  6436. }
  6437. this->addToLiveOnBackEdgeSyms->Set(func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  6438. }
  6439. else
  6440. {
  6441. if (polyIndexOpnd != nullptr)
  6442. {
  6443. IR::RegOpnd * baseOpnd = IR::RegOpnd::New(TyMachPtr, func);
  6444. InsertMove(baseOpnd, IR::AddrOpnd::New((Js::Var)typeCheckGuard->AsPolyTypeCheckGuard()->GetAddressOfPolyValues(), IR::AddrOpndKindDynamicTypeCheckGuard, func, true), instrChk);
  6445. expectedTypeOpnd = IR::IndirOpnd::New(baseOpnd, polyIndexOpnd, m_lowererMD.GetDefaultIndirScale(), TyMachPtr, func);
  6446. }
  6447. else
  6448. {
  6449. expectedTypeOpnd = IR::MemRefOpnd::New((void*)(typeCheckGuard->GetAddressOfValue()), TyMachPtr, func, IR::AddrOpndKindDynamicGuardValueRef);
  6450. }
  6451. }
  6452. }
  6453. if (PHASE_VERBOSE_TRACE(Js::ObjTypeSpecPhase, this->m_func))
  6454. {
  6455. OUTPUT_VERBOSE_TRACE_FUNC(Js::ObjTypeSpecPhase, this->m_func, _u("Emitted %s type check "),
  6456. directCheckType != nullptr ? _u("direct") : propertySymOpnd->IsPoly() ? _u("equivalent") : _u("indirect"));
  6457. #if DBG
  6458. if (propertySymOpnd->GetGuardedPropOps() != nullptr)
  6459. {
  6460. Output::Print(_u(" guarding operations:\n "));
  6461. propertySymOpnd->GetGuardedPropOps()->Dump();
  6462. }
  6463. else
  6464. {
  6465. Output::Print(_u("\n"));
  6466. }
  6467. #else
  6468. Output::Print(_u("\n"));
  6469. #endif
  6470. Output::Flush();
  6471. }
  6472. if (doEquivTypeCheck)
  6473. {
  6474. // TODO (ObjTypeSpec): For isolated equivalent type checks it would be good to emit a check if the cache is still valid, and
  6475. // if not go straight to live polymorphic cache. This way we wouldn't have to bail out and re-JIT, and also wouldn't continue
  6476. // to try the equivalent type cache, miss it and do the slow comparison. This may be as easy as sticking a null on the main
  6477. // type in the equivalent type cache.
  6478. IR::LabelInstr* labelCheckEquivalentType = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  6479. IR::BranchInstr* branchInstr = InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelCheckEquivalentType, instrChk);
  6480. InsertObjectPoison(regOpnd, branchInstr, instrChk, false);
  6481. IR::LabelInstr *labelTypeCheckSucceeded = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  6482. InsertBranch(Js::OpCode::Br, labelTypeCheckSucceeded, instrChk);
  6483. instrChk->InsertBefore(labelCheckEquivalentType);
  6484. IR::Opnd* typeCheckGuardOpnd = nullptr;
  6485. if (this->m_func->IsOOPJIT())
  6486. {
  6487. typeCheckGuardOpnd = IR::RegOpnd::New(TyMachPtr, func);
  6488. this->GenerateLeaOfOOPData(typeCheckGuardOpnd->AsRegOpnd(), typeCheckGuard, 0, instrChk);
  6489. this->addToLiveOnBackEdgeSyms->Set(func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  6490. }
  6491. else
  6492. {
  6493. typeCheckGuardOpnd = IR::AddrOpnd::New((Js::Var)typeCheckGuard, IR::AddrOpndKindDynamicTypeCheckGuard, func, true);
  6494. }
  6495. IR::JnHelperMethod helperMethod;
  6496. if (polyIndexOpnd != nullptr)
  6497. {
  6498. helperMethod = propertySymOpnd->HasFixedValue() ? IR::HelperCheckIfPolyTypeIsEquivalentForFixedField : IR::HelperCheckIfPolyTypeIsEquivalent;
  6499. this->m_lowererMD.LoadHelperArgument(instrChk, polyIndexOpnd);
  6500. }
  6501. else
  6502. {
  6503. helperMethod = propertySymOpnd->HasFixedValue() ? IR::HelperCheckIfTypeIsEquivalentForFixedField : IR::HelperCheckIfTypeIsEquivalent;
  6504. }
  6505. this->m_lowererMD.LoadHelperArgument(instrChk, typeCheckGuardOpnd);
  6506. this->m_lowererMD.LoadHelperArgument(instrChk, typeOpnd);
  6507. IR::RegOpnd* equivalentTypeCheckResultOpnd = IR::RegOpnd::New(TyUint8, func);
  6508. IR::HelperCallOpnd* equivalentTypeCheckHelperCallOpnd = IR::HelperCallOpnd::New(helperMethod, func);
  6509. IR::Instr* equivalentTypeCheckCallInstr = IR::Instr::New(Js::OpCode::Call, equivalentTypeCheckResultOpnd, equivalentTypeCheckHelperCallOpnd, func);
  6510. instrChk->InsertBefore(equivalentTypeCheckCallInstr);
  6511. this->m_lowererMD.LowerCall(equivalentTypeCheckCallInstr, 0);
  6512. InsertTestBranch(equivalentTypeCheckResultOpnd, equivalentTypeCheckResultOpnd, Js::OpCode::BrEq_A, labelTypeCheckFailed, instrChk);
  6513. // TODO (ObjTypeSpec): Consider emitting a shared bailout to which a specific bailout kind is written at runtime. This would allow us to distinguish
  6514. // between non-equivalent type and other cases, such as invalidated guard (due to fixed field overwrite, perhaps) or too much thrashing on the
  6515. // equivalent type cache. We could determine bailout kind based on the value returned by the helper. In the case of cache thrashing we could just
  6516. // turn off the whole optimization for a given function.
  6517. instrChk->InsertBefore(labelTypeCheckSucceeded);
  6518. }
  6519. else
  6520. {
  6521. IR::BranchInstr* branchInstr = InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelSecondChance != nullptr ? labelSecondChance : labelTypeCheckFailed, instrChk);
  6522. InsertObjectPoison(regOpnd, branchInstr, instrChk, false);
  6523. }
  6524. // Don't pin the type for polymorphic operations. The code can successfully execute even if this type is no longer referenced by any objects,
  6525. // as long as there are other objects with types equivalent on the properties referenced by this code. The type is kept alive until entry point
  6526. // installation by the JIT transfer data, and after that by the equivalent type cache, so it will stay alive unless or until it gets evicted
  6527. // from the cache.
  6528. if (!doEquivTypeCheck)
  6529. {
  6530. Assert(monoType != nullptr);
  6531. PinTypeRef(monoType, monoType.t, instrChk, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
  6532. }
  6533. return typeOpnd;
  6534. }
  6535. IR::RegOpnd *
  6536. Lowerer::GeneratePolymorphicTypeIndex(IR::RegOpnd * typeOpnd, Js::PropertyGuard * typeCheckGuard, IR::Instr * instrInsert)
  6537. {
  6538. IR::RegOpnd * resultOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  6539. InsertMove(resultOpnd, typeOpnd, instrInsert);
  6540. InsertShift(Js::OpCode::ShrU_A, false, resultOpnd, resultOpnd, IR::IntConstOpnd::New(PolymorphicInlineCacheShift, TyInt8, this->m_func, true), instrInsert);
  6541. InsertAnd(resultOpnd, resultOpnd, IR::IntConstOpnd::New(typeCheckGuard->AsPolyTypeCheckGuard()->GetSize() - 1, TyMachReg, this->m_func, true), instrInsert);
  6542. return resultOpnd;
  6543. }
  6544. void
  6545. Lowerer::GenerateLeaOfOOPData(IR::RegOpnd * regOpnd, void * address, int32 offset, IR::Instr * instrInsert)
  6546. {
  6547. Func * func = instrInsert->m_func;
  6548. int32 dataOffset;
  6549. Int32Math::Add(NativeCodeData::GetDataTotalOffset(address), offset, &dataOffset);
  6550. InsertLea(regOpnd,
  6551. IR::IndirOpnd::New(IR::RegOpnd::New(func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), dataOffset, TyMachPtr,
  6552. #if DBG
  6553. NativeCodeData::GetDataDescription(address, func->m_alloc),
  6554. #endif
  6555. func, true),
  6556. instrInsert);
  6557. }
  6558. IR::Opnd *
  6559. Lowerer::GenerateIndirOfOOPData(void * address, int32 offset, IR::Instr * instrInsert)
  6560. {
  6561. Func * func = instrInsert->m_func;
  6562. int32 dataOffset;
  6563. Int32Math::Add(NativeCodeData::GetDataTotalOffset(address), offset, &dataOffset);
  6564. IR::Opnd * opnd = IR::IndirOpnd::New(IR::RegOpnd::New(func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), dataOffset, TyMachPtr,
  6565. #if DBG
  6566. NativeCodeData::GetDataDescription(address, func->m_alloc),
  6567. #endif
  6568. func, true);
  6569. return opnd;
  6570. }
  6571. void
  6572. Lowerer::InsertObjectPoison(IR::Opnd* poisonedOpnd, IR::BranchInstr* branchInstr, IR::Instr* insertInstr, bool isForStore)
  6573. {
  6574. #ifndef _M_ARM
  6575. LowererMD::InsertObjectPoison(poisonedOpnd, branchInstr, insertInstr, isForStore);
  6576. #endif
  6577. }
  6578. void
  6579. Lowerer::PinTypeRef(JITTypeHolder type, void* typeRef, IR::Instr* instr, Js::PropertyId propertyId)
  6580. {
  6581. this->m_func->PinTypeRef(typeRef);
  6582. if (PHASE_TRACE(Js::TracePinnedTypesPhase, this->m_func))
  6583. {
  6584. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6585. Output::Print(_u("PinnedTypes: function %s(%s) instr %s property ID %u pinned %s reference 0x%p to type 0x%p.\n"),
  6586. this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
  6587. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode), propertyId,
  6588. typeRef == type.t ? _u("strong") : _u("weak"), typeRef, type.t);
  6589. Output::Flush();
  6590. }
  6591. }
  6592. void
  6593. Lowerer::GenerateCachedTypeWithoutPropertyCheck(IR::Instr *instrInsert, IR::PropertySymOpnd *propertySymOpnd, IR::Opnd *typeOpnd, IR::LabelInstr *labelTypeCheckFailed)
  6594. {
  6595. Assert(propertySymOpnd->IsMonoObjTypeSpecCandidate());
  6596. Assert(propertySymOpnd->HasInitialType());
  6597. JITTypeHolder typeWithoutProperty = propertySymOpnd->GetInitialType();
  6598. // We should never add properties to objects of static types.
  6599. Assert(Js::DynamicType::Is(typeWithoutProperty->GetTypeId()));
  6600. if (typeOpnd == nullptr)
  6601. {
  6602. // No opnd holding the type was passed in, so we have to load the type here.
  6603. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  6604. if (!baseOpnd->IsNotTaggedValue())
  6605. {
  6606. m_lowererMD.GenerateObjectTest(baseOpnd, instrInsert, labelTypeCheckFailed);
  6607. }
  6608. IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  6609. typeOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  6610. InsertMove(typeOpnd, opnd, instrInsert);
  6611. }
  6612. Js::JitTypePropertyGuard* typePropertyGuard = CreateTypePropertyGuardForGuardedProperties(typeWithoutProperty, propertySymOpnd);
  6613. IR::Opnd *expectedTypeOpnd;
  6614. if (typePropertyGuard)
  6615. {
  6616. bool emitDirectCheck = true;
  6617. Assert(typePropertyGuard != nullptr);
  6618. Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
  6619. if (this->m_func->IsOOPJIT())
  6620. {
  6621. int typeCheckGuardOffset = NativeCodeData::GetDataTotalOffset(typePropertyGuard);
  6622. expectedTypeOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), typeCheckGuardOffset, TyMachPtr,
  6623. #if DBG
  6624. NativeCodeData::GetDataDescription(typePropertyGuard, this->m_func->m_alloc),
  6625. #endif
  6626. this->m_func, true);
  6627. this->addToLiveOnBackEdgeSyms->Set(m_func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  6628. }
  6629. else
  6630. {
  6631. expectedTypeOpnd = IR::MemRefOpnd::New((void*)(typePropertyGuard->GetAddressOfValue()), TyMachPtr, this->m_func, IR::AddrOpndKindDynamicGuardValueRef);
  6632. }
  6633. emitDirectCheck = false;
  6634. OUTPUT_VERBOSE_TRACE_FUNC(Js::ObjTypeSpecPhase, this->m_func, _u("Emitted %s type check for type 0x%p.\n"),
  6635. emitDirectCheck ? _u("direct") : _u("indirect"), typeWithoutProperty->GetAddr());
  6636. }
  6637. else
  6638. {
  6639. expectedTypeOpnd = IR::AddrOpnd::New(typeWithoutProperty->GetAddr(), IR::AddrOpndKindDynamicType, m_func, true);
  6640. }
  6641. InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelTypeCheckFailed, instrInsert);
  6642. // Technically, it should be enough to pin the final type, because it should keep all of its predecessors alive, but
  6643. // just to be extra cautious, let's pin the initial type as well.
  6644. PinTypeRef(typeWithoutProperty, typeWithoutProperty.t, instrInsert, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
  6645. }
  6646. bool
  6647. Lowerer::GenerateFixedFieldGuardCheck(IR::Instr *insertPointInstr, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr *labelBailOut)
  6648. {
  6649. return this->GeneratePropertyGuardCheck(insertPointInstr, propertySymOpnd, labelBailOut);
  6650. }
  6651. Js::JitTypePropertyGuard*
  6652. Lowerer::CreateTypePropertyGuardForGuardedProperties(JITTypeHolder type, IR::PropertySymOpnd* propertySymOpnd)
  6653. {
  6654. // We should always have a list of guarded properties.
  6655. Assert(propertySymOpnd->GetGuardedPropOps() != nullptr);
  6656. Js::JitTypePropertyGuard* guard = nullptr;
  6657. if (m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuards())
  6658. {
  6659. // Consider (ObjTypeSpec): Because we allocate these guards from the JIT thread we can't share guards for the same type across multiple functions.
  6660. // This leads to proliferation of property guards on the thread context. The alternative would be to pre-allocate shared (by value) guards
  6661. // from the thread context during work item creation. We would create too many of them (because some types aren't actually used as guards),
  6662. // but we could share a guard for a given type between functions. This may ultimately be better.
  6663. LinkGuardToGuardedProperties(propertySymOpnd->GetGuardedPropOps(), [this, type, &guard](Js::PropertyId propertyId)
  6664. {
  6665. if (ShouldDoLazyFixedTypeBailout(this->m_func))
  6666. {
  6667. this->m_func->lazyBailoutProperties.Item(propertyId);
  6668. }
  6669. else
  6670. {
  6671. if (guard == nullptr)
  6672. {
  6673. guard = this->m_func->GetOrCreateSingleTypeGuard(type->GetAddr());
  6674. }
  6675. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
  6676. {
  6677. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6678. Output::Print(_u("ObjTypeSpec: function %s(%s) registered guard 0x%p with value 0x%p for property ID %u.\n"),
  6679. m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
  6680. guard, guard->GetValue(), propertyId);
  6681. Output::Flush();
  6682. }
  6683. this->m_func->EnsurePropertyGuardsByPropertyId();
  6684. this->m_func->LinkGuardToPropertyId(propertyId, guard);
  6685. }
  6686. });
  6687. }
  6688. return guard;
  6689. }
  6690. Js::JitEquivalentTypeGuard*
  6691. Lowerer::CreateEquivalentTypeGuardAndLinkToGuardedProperties(IR::PropertySymOpnd* propertySymOpnd)
  6692. {
  6693. // We should always have a list of guarded properties.
  6694. Assert(propertySymOpnd->HasObjTypeSpecFldInfo() && propertySymOpnd->HasEquivalentTypeSet() && propertySymOpnd->GetGuardedPropOps());
  6695. Js::JitEquivalentTypeGuard* guard;
  6696. if (propertySymOpnd->ShouldUsePolyEquivTypeGuard(this->m_func))
  6697. {
  6698. Js::JitPolyEquivalentTypeGuard *polyGuard = this->m_func->CreatePolyEquivalentTypeGuard(propertySymOpnd->GetObjTypeSpecFldId());
  6699. // Copy types from the type set to the guard's value locations
  6700. Js::EquivalentTypeSet* typeSet = propertySymOpnd->GetEquivalentTypeSet();
  6701. for (uint16 ti = 0; ti < typeSet->GetCount(); ti++)
  6702. {
  6703. intptr_t typeToCache = typeSet->GetType(ti)->GetAddr();
  6704. polyGuard->SetPolyValue(typeToCache, polyGuard->GetIndexForValue(typeToCache));
  6705. }
  6706. guard = polyGuard;
  6707. }
  6708. else
  6709. {
  6710. guard = this->m_func->CreateEquivalentTypeGuard(propertySymOpnd->GetFirstEquivalentType(), propertySymOpnd->GetObjTypeSpecFldId());
  6711. }
  6712. if (m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuards())
  6713. {
  6714. LinkGuardToGuardedProperties(propertySymOpnd->GetGuardedPropOps(), [=](Js::PropertyId propertyId)
  6715. {
  6716. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
  6717. {
  6718. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6719. Output::Print(_u("ObjTypeSpec: function %s(%s) registered equivalent type spec guard 0x%p with value 0x%p for property ID %u.\n"),
  6720. this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
  6721. guard, guard->GetValue(), propertyId);
  6722. Output::Flush();
  6723. }
  6724. this->m_func->EnsurePropertyGuardsByPropertyId();
  6725. this->m_func->LinkGuardToPropertyId(propertyId, guard);
  6726. });
  6727. }
  6728. Assert(guard->GetCache() != nullptr);
  6729. Js::EquivalentTypeCache* cache = guard->GetCache();
  6730. // TODO (ObjTypeSpec): If we delayed populating the types until encoder, we could bulk allocate all equivalent type caches
  6731. // in one block from the heap. This would allow us to not allocate them from the native code data allocator and free them
  6732. // when no longer needed. However, we would need to store the global property operation ID in the guard, so we can look up
  6733. // the info in the encoder. Perhaps we could overload the cache pointer to be the ID until encoder.
  6734. // Copy types from the type set to the guard's cache
  6735. Js::EquivalentTypeSet* typeSet = propertySymOpnd->GetEquivalentTypeSet();
  6736. uint16 cachedTypeCount = typeSet->GetCount() < EQUIVALENT_TYPE_CACHE_SIZE ? typeSet->GetCount() : EQUIVALENT_TYPE_CACHE_SIZE;
  6737. for (uint16 ti = 0; ti < cachedTypeCount; ti++)
  6738. {
  6739. cache->types[ti] = (Js::Type*)typeSet->GetType(ti)->GetAddr();
  6740. }
  6741. #ifdef DEBUG
  6742. bool there_was_a_null_type = false;
  6743. for (uint16 ti = 0; ti < cachedTypeCount; ti++)
  6744. {
  6745. if (cache->types[ti] == nullptr)
  6746. {
  6747. there_was_a_null_type = true;
  6748. }
  6749. else if (there_was_a_null_type)
  6750. {
  6751. AssertMsg(false, "there_was_a_null_type ? something is wrong here.");
  6752. }
  6753. }
  6754. #endif
  6755. // Populate property ID and slot index arrays on the guard's cache. We iterate over the
  6756. // bit vector of property operations protected by this guard, but some property operations
  6757. // may be referring to the same property ID (but not share the same cache). We skip
  6758. // redundant entries by maintaining a hash set of property IDs we've already encountered.
  6759. auto propOps = propertySymOpnd->GetGuardedPropOps();
  6760. uint propOpCount = propOps->Count();
  6761. bool isTypeStatic = Js::StaticType::Is(propertySymOpnd->GetFirstEquivalentType()->GetTypeId());
  6762. JsUtil::BaseDictionary<Js::PropertyId, Js::EquivalentPropertyEntry*, JitArenaAllocator> propIds(this->m_alloc, propOpCount);
  6763. Js::EquivalentPropertyEntry* properties = AnewArray(this->m_alloc, Js::EquivalentPropertyEntry, propOpCount);
  6764. uint propIdCount = 0;
  6765. FOREACH_BITSET_IN_SPARSEBV(propOpId, propOps)
  6766. {
  6767. ObjTypeSpecFldInfo* propOpInfo = this->m_func->GetGlobalObjTypeSpecFldInfo(propOpId);
  6768. Js::PropertyId propertyId = propOpInfo->GetPropertyId();
  6769. Js::PropertyIndex propOpIndex = Js::Constants::NoSlot;
  6770. bool hasFixedValue = propOpInfo->HasFixedValue();
  6771. if (hasFixedValue)
  6772. {
  6773. cache->SetHasFixedValue();
  6774. }
  6775. bool isLoadedFromProto = propOpInfo->IsLoadedFromProto();
  6776. if (isLoadedFromProto)
  6777. {
  6778. cache->SetIsLoadedFromProto();
  6779. }
  6780. else
  6781. {
  6782. propOpIndex = propOpInfo->GetSlotIndex();
  6783. }
  6784. bool propOpUsesAuxSlot = propOpInfo->UsesAuxSlot();
  6785. AssertMsg(!isTypeStatic || !propOpInfo->IsBeingStored(), "Why are we storing a field to an object of static type?");
  6786. Js::EquivalentPropertyEntry* entry = nullptr;
  6787. if (propIds.TryGetValue(propertyId, &entry))
  6788. {
  6789. if (propOpIndex == entry->slotIndex && propOpUsesAuxSlot == entry->isAuxSlot)
  6790. {
  6791. entry->mustBeWritable |= propOpInfo->IsBeingStored();
  6792. }
  6793. else
  6794. {
  6795. // Due to inline cache sharing we have the same property accessed using different caches
  6796. // with inconsistent info. This means a guaranteed bailout on the equivalent type check.
  6797. // We'll just let it happen and turn off the optimization for this function. We could avoid
  6798. // this problem by tracking property information on the value type in glob opt.
  6799. if (PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->m_func))
  6800. {
  6801. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6802. Output::Print(_u("EquivObjTypeSpec: top function %s (%s): duplicate property clash on %d \n"),
  6803. m_func->GetJITFunctionBody()->GetDisplayName(), m_func->GetDebugNumberSet(debugStringBuffer), propertyId);
  6804. Output::Flush();
  6805. }
  6806. Assert(propIdCount < propOpCount);
  6807. __analysis_assume(propIdCount < propOpCount);
  6808. entry = &properties[propIdCount++];
  6809. entry->propertyId = propertyId;
  6810. entry->slotIndex = propOpIndex;
  6811. entry->isAuxSlot = propOpUsesAuxSlot;
  6812. entry->mustBeWritable = propOpInfo->IsBeingStored();
  6813. }
  6814. }
  6815. else
  6816. {
  6817. Assert(propIdCount < propOpCount);
  6818. __analysis_assume(propIdCount < propOpCount);
  6819. entry = &properties[propIdCount++];
  6820. entry->propertyId = propertyId;
  6821. entry->slotIndex = propOpIndex;
  6822. entry->isAuxSlot = propOpUsesAuxSlot;
  6823. entry->mustBeWritable = propOpInfo->IsBeingStored();
  6824. propIds.AddNew(propertyId, entry);
  6825. }
  6826. }
  6827. NEXT_BITSET_IN_SPARSEBV;
  6828. cache->record.propertyCount = propIdCount;
  6829. // Js::EquivalentPropertyEntry does not contain pointer, no need to fixup
  6830. cache->record.properties = NativeCodeDataNewArrayNoFixup(this->m_func->GetNativeCodeDataAllocator(), Js::EquivalentPropertyEntry, propIdCount);
  6831. memcpy(cache->record.properties, properties, propIdCount * sizeof(Js::EquivalentPropertyEntry));
  6832. return guard;
  6833. }
  6834. bool
  6835. Lowerer::LinkCtorCacheToGuardedProperties(JITTimeConstructorCache* ctorCache)
  6836. {
  6837. // We do not always have guarded properties. If the constructor is empty and the subsequent code doesn't load or store any of
  6838. // the constructed object's properties, or if all inline caches are empty then this ctor cache doesn't guard any properties.
  6839. if (ctorCache->GetGuardedPropOps() == nullptr)
  6840. {
  6841. return false;
  6842. }
  6843. bool linked = false;
  6844. if (this->m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuards())
  6845. {
  6846. linked = LinkGuardToGuardedProperties(ctorCache->GetGuardedPropOps(), [=](Js::PropertyId propertyId)
  6847. {
  6848. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
  6849. {
  6850. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6851. Output::Print(_u("ObjTypeSpec: function %s(%s) registered ctor cache 0x%p with value 0x%p for property %u.\n"),
  6852. this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
  6853. ctorCache->GetRuntimeCacheAddr(), ctorCache->GetType()->GetAddr(), propertyId);
  6854. Output::Flush();
  6855. }
  6856. this->m_func->EnsureCtorCachesByPropertyId();
  6857. this->m_func->LinkCtorCacheToPropertyId(propertyId, ctorCache);
  6858. });
  6859. }
  6860. return linked;
  6861. }
  6862. template<typename LinkFunc>
  6863. bool
  6864. Lowerer::LinkGuardToGuardedProperties(const BVSparse<JitArenaAllocator>* guardedPropOps, LinkFunc link)
  6865. {
  6866. Assert(this->m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuards());
  6867. Assert(guardedPropOps != nullptr);
  6868. bool linked = false;
  6869. // For every entry in the bit vector, register the guard for the corresponding property ID.
  6870. FOREACH_BITSET_IN_SPARSEBV(propertyOpId, guardedPropOps)
  6871. {
  6872. ObjTypeSpecFldInfo* propertyOpInfo = this->m_func->GetGlobalObjTypeSpecFldInfo(propertyOpId);
  6873. Js::PropertyId propertyId = propertyOpInfo->GetPropertyId();
  6874. // It's okay for an equivalent type check to be registered as a guard against a property becoming read-only. This transpires if, there is
  6875. // a different monomorphic type check upstream, which guarantees the actual type of the object needed for the hard-coded type transition,
  6876. // but it is later followed by a sequence of polymorphic inline caches, which do not have that type in the type set. At the beginning of
  6877. // that sequence we'll emit an equivalent type check to verify that the actual type has relevant properties on appropriate slots. Then in
  6878. // the dead store pass we'll walk upwards and encounter this check first, thus we'll drop the guarded properties accumulated thus far
  6879. // (including the one being added) on that check.
  6880. // AssertMsg(!propertyOpInfo->IsBeingAdded() || !isEquivalentTypeGuard, "Why do we have an equivalent type check protecting a property add?");
  6881. if (propertyOpInfo->IsBeingAdded() || propertyOpInfo->IsLoadedFromProto() || propertyOpInfo->HasFixedValue())
  6882. {
  6883. // Equivalent object type spec only supports fixed fields on prototypes. This is to simplify the slow type equivalence check.
  6884. // See JavascriptOperators::CheckIfTypeIsEquivalent.
  6885. Assert(!propertyOpInfo->IsPoly() || (!propertyOpInfo->HasFixedValue() || propertyOpInfo->IsLoadedFromProto() || propertyOpInfo->UsesAccessor()));
  6886. if (this->m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuard(propertyId))
  6887. {
  6888. link(propertyId);
  6889. linked = true;
  6890. }
  6891. else
  6892. {
  6893. AssertMsg(false, "Did we fail to create a shared property guard for a guarded property?");
  6894. }
  6895. }
  6896. }
  6897. NEXT_BITSET_IN_SPARSEBV;
  6898. return linked;
  6899. }
  6900. bool
  6901. Lowerer::GeneratePropertyGuardCheck(IR::Instr *insertPointInstr, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr *labelBailOut)
  6902. {
  6903. intptr_t guard = propertySymOpnd->GetPropertyGuardValueAddr();
  6904. Assert(guard != 0);
  6905. if (ShouldDoLazyFixedDataBailout(this->m_func))
  6906. {
  6907. this->m_func->lazyBailoutProperties.Item(propertySymOpnd->GetPropertyId());
  6908. return false;
  6909. }
  6910. else
  6911. {
  6912. Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
  6913. IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
  6914. IR::MemRefOpnd* guardOpnd = IR::MemRefOpnd::New(guard, TyMachPtr, this->m_func, IR::AddrOpndKindDynamicGuardValueRef);
  6915. IR::BranchInstr *branchInstr = InsertCompareBranch(guardOpnd, zeroOpnd, Js::OpCode::BrEq_A, labelBailOut, insertPointInstr);
  6916. IR::RegOpnd *objPtrReg = IR::RegOpnd::New(propertySymOpnd->GetObjectSym(), TyMachPtr, m_func);
  6917. InsertObjectPoison(objPtrReg, branchInstr, insertPointInstr, false);
  6918. return true;
  6919. }
  6920. }
  6921. IR::Instr*
  6922. Lowerer::GeneratePropertyGuardCheckBailoutAndLoadType(IR::Instr *insertInstr)
  6923. {
  6924. IR::Instr* instrPrev = insertInstr->m_prev;
  6925. IR::Opnd* numberTypeOpnd = IR::AddrOpnd::New(insertInstr->m_func->GetScriptContextInfo()->GetNumberTypeStaticAddr(), IR::AddrOpndKindDynamicType, insertInstr->m_func);
  6926. IR::PropertySymOpnd* propertySymOpnd = insertInstr->GetSrc1()->AsPropertySymOpnd();
  6927. IR::LabelInstr* labelBailout = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func, true);
  6928. IR::LabelInstr* labelContinue = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func);
  6929. IR::LabelInstr* loadNumberTypeLabel = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func, true);
  6930. GeneratePropertyGuardCheck(insertInstr, propertySymOpnd, labelBailout);
  6931. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  6932. GenerateObjectTestAndTypeLoad(insertInstr, baseOpnd, insertInstr->GetDst()->AsRegOpnd(), loadNumberTypeLabel);
  6933. insertInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelContinue, this->m_func));
  6934. insertInstr->InsertBefore(loadNumberTypeLabel);
  6935. this->InsertMove(insertInstr->GetDst(), numberTypeOpnd, insertInstr);
  6936. insertInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelContinue, this->m_func));
  6937. insertInstr->InsertBefore(labelBailout);
  6938. insertInstr->InsertAfter(labelContinue);
  6939. insertInstr->FreeSrc1();
  6940. insertInstr->m_opcode = Js::OpCode::BailOut;
  6941. this->GenerateBailOut(insertInstr);
  6942. return instrPrev;
  6943. }
  6944. void
  6945. Lowerer::GenerateAdjustSlots(IR::Instr *instrInsert, IR::PropertySymOpnd *propertySymOpnd, JITTypeHolder initialType, JITTypeHolder finalType)
  6946. {
  6947. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  6948. bool adjusted = this->GenerateAdjustBaseSlots(instrInsert, baseOpnd, initialType, finalType);
  6949. if (!adjusted)
  6950. {
  6951. baseOpnd->Free(m_func);
  6952. }
  6953. }
  6954. bool
  6955. Lowerer::GenerateAdjustBaseSlots(IR::Instr *instrInsert, IR::RegOpnd *baseOpnd, JITTypeHolder initialType, JITTypeHolder finalType)
  6956. {
  6957. // Possibly allocate new slot capacity to accommodate a type transition.
  6958. AssertMsg(JITTypeHandler::IsTypeHandlerCompatibleForObjectHeaderInlining(initialType->GetTypeHandler(), finalType->GetTypeHandler()),
  6959. "Incompatible typeHandler transition?");
  6960. int oldCount = 0;
  6961. int newCount = 0;
  6962. Js::PropertyIndex inlineSlotCapacity = 0;
  6963. Js::PropertyIndex newInlineSlotCapacity = 0;
  6964. bool needSlotAdjustment =
  6965. JITTypeHandler::NeedSlotAdjustment(initialType->GetTypeHandler(), finalType->GetTypeHandler(), &oldCount, &newCount, &inlineSlotCapacity, &newInlineSlotCapacity);
  6966. if (!needSlotAdjustment)
  6967. {
  6968. return false;
  6969. }
  6970. // Call AdjustSlots using the new counts. Because AdjustSlots uses the "no dispose" flavor of alloc,
  6971. // no implicit calls are possible, and we don't need an implicit call check and bailout.
  6972. // CALL AdjustSlots, instance, newInlineSlotCapacity, newAuxSlotCapacity
  6973. //3rd Param
  6974. Assert(newCount > newInlineSlotCapacity);
  6975. const int newAuxSlotCapacity = newCount - newInlineSlotCapacity;
  6976. m_lowererMD.LoadHelperArgument(instrInsert, IR::IntConstOpnd::New(newAuxSlotCapacity, TyInt32, this->m_func));
  6977. //2nd Param
  6978. m_lowererMD.LoadHelperArgument(instrInsert, IR::IntConstOpnd::New(newInlineSlotCapacity, TyUint16, this->m_func));
  6979. //1st Param (instance)
  6980. m_lowererMD.LoadHelperArgument(instrInsert, baseOpnd);
  6981. //CALL HelperAdjustSlots
  6982. IR::Opnd *opnd = IR::HelperCallOpnd::New(IR::HelperAdjustSlots, this->m_func);
  6983. IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  6984. instr->SetSrc1(opnd);
  6985. instrInsert->InsertBefore(instr);
  6986. m_lowererMD.LowerCall(instr, 0);
  6987. return true;
  6988. }
  6989. void
  6990. Lowerer::GenerateFieldStoreWithTypeChange(IR::Instr * instrStFld, IR::PropertySymOpnd *propertySymOpnd, JITTypeHolder initialType, JITTypeHolder finalType)
  6991. {
  6992. // Adjust instance slots, if necessary.
  6993. this->GenerateAdjustSlots(instrStFld, propertySymOpnd, initialType, finalType);
  6994. // We should never add properties to objects of static types.
  6995. Assert(Js::DynamicType::Is(finalType->GetTypeId()));
  6996. // Let's pin the final type to be sure its alive when we try to do the type transition.
  6997. PinTypeRef(finalType, finalType.t, instrStFld, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
  6998. IR::Opnd *finalTypeOpnd = IR::AddrOpnd::New(finalType->GetAddr(), IR::AddrOpndKindDynamicType, instrStFld->m_func, true);
  6999. // Set the new type.
  7000. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(instrStFld->m_func);
  7001. IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrStFld->m_func);
  7002. this->InsertMove(opnd, finalTypeOpnd, instrStFld);
  7003. // Now do the store.
  7004. GenerateDirectFieldStore(instrStFld, propertySymOpnd);
  7005. }
  7006. bool
  7007. Lowerer::GenerateStFldWithCachedFinalType(IR::Instr * instrStFld, IR::PropertySymOpnd *propertySymOpnd)
  7008. {
  7009. // This function tries to treat a sequence of add-property stores as a single type transition.
  7010. Assert(propertySymOpnd == instrStFld->GetDst()->AsPropertySymOpnd());
  7011. Assert(propertySymOpnd->IsMonoObjTypeSpecCandidate());
  7012. Assert(propertySymOpnd->HasFinalType());
  7013. Assert(propertySymOpnd->HasInitialType());
  7014. IR::Instr *instr;
  7015. IR::LabelInstr *labelBailOut = nullptr;
  7016. AssertMsg(!propertySymOpnd->IsTypeChecked(), "Why are we doing a type transition when we have the type we want?");
  7017. // If the initial type must be checked here, do it.
  7018. Assert(instrStFld->HasBailOutInfo());
  7019. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  7020. GenerateCachedTypeWithoutPropertyCheck(instrStFld, propertySymOpnd, nullptr/*typeOpnd*/, labelBailOut);
  7021. // Do the type transition.
  7022. GenerateFieldStoreWithTypeChange(instrStFld, propertySymOpnd, propertySymOpnd->GetInitialType(), propertySymOpnd->GetFinalType());
  7023. instrStFld->FreeSrc1();
  7024. instrStFld->FreeDst();
  7025. // Insert the bailout and let the main path branch around it.
  7026. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7027. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  7028. instrStFld->InsertBefore(instr);
  7029. if (instrStFld->HasBailOutInfo())
  7030. {
  7031. Assert(labelBailOut != nullptr);
  7032. instrStFld->InsertBefore(labelBailOut);
  7033. instrStFld->InsertAfter(labelDone);
  7034. instrStFld->m_opcode = Js::OpCode::BailOut;
  7035. this->GenerateBailOut(instrStFld);
  7036. }
  7037. else
  7038. {
  7039. instrStFld->InsertAfter(labelDone);
  7040. instrStFld->Remove();
  7041. }
  7042. return true;
  7043. }
  7044. ///----------------------------------------------------------------------------
  7045. ///
  7046. /// Lowerer::LowerScopedStFld
  7047. ///
  7048. ///----------------------------------------------------------------------------
  7049. IR::Instr *
  7050. Lowerer::LowerScopedStFld(IR::Instr * stFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache,
  7051. bool withPropertyOperationFlags, Js::PropertyOperationFlags flags)
  7052. {
  7053. IR::Instr *instrPrev = stFldInstr->m_prev;
  7054. if (withPropertyOperationFlags)
  7055. {
  7056. m_lowererMD.LoadHelperArgument(stFldInstr,
  7057. IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
  7058. }
  7059. if(!withInlineCache)
  7060. {
  7061. LoadScriptContext(stFldInstr);
  7062. }
  7063. // Pass the default instance
  7064. IR::Opnd *src = stFldInstr->UnlinkSrc2();
  7065. m_lowererMD.LoadHelperArgument(stFldInstr, src);
  7066. // Pass the value to store
  7067. src = stFldInstr->UnlinkSrc1();
  7068. m_lowererMD.LoadHelperArgument(stFldInstr, src);
  7069. // Pass the property sym to store to
  7070. IR::Opnd *dst = stFldInstr->UnlinkDst();
  7071. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
  7072. this->LoadPropertySymAsArgument(stFldInstr, dst);
  7073. if (withInlineCache)
  7074. {
  7075. AssertMsg(dst->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  7076. m_lowererMD.LoadHelperArgument(
  7077. stFldInstr,
  7078. IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  7079. // Not using the polymorphic inline cache because the fast path only uses the monomorphic inline cache
  7080. this->m_lowererMD.LoadHelperArgument(stFldInstr, this->LoadRuntimeInlineCacheOpnd(stFldInstr, dst->AsPropertySymOpnd()));
  7081. m_lowererMD.LoadHelperArgument(stFldInstr, LoadFunctionBodyOpnd(stFldInstr));
  7082. }
  7083. m_lowererMD.ChangeToHelperCall(stFldInstr, helperMethod);
  7084. return instrPrev;
  7085. }
  7086. ///----------------------------------------------------------------------------
  7087. ///
  7088. /// Lowerer::LowerLoadVar
  7089. ///
  7090. ///----------------------------------------------------------------------------
  7091. IR::Instr *
  7092. Lowerer::LowerLoadVar(IR::Instr *instr, IR::Opnd *opnd)
  7093. {
  7094. instr->SetSrc1(opnd);
  7095. return m_lowererMD.ChangeToAssign(instr);
  7096. }
  7097. IR::Instr *
  7098. Lowerer::LoadHelperTemp(IR::Instr * instr, IR::Instr * instrInsert)
  7099. {
  7100. IR::Opnd *tempOpnd;
  7101. IR::Opnd *dst = instr->GetDst();
  7102. AssertMsg(dst != nullptr, "Always expect a dst for these.");
  7103. AssertMsg(instr->dstIsTempNumber, "Should only be loading temps here");
  7104. Assert(dst->IsRegOpnd());
  7105. StackSym * tempNumberSym = this->GetTempNumberSym(dst, instr->dstIsTempNumberTransferred);
  7106. IR::Instr *load = InsertLoadStackAddress(tempNumberSym, instrInsert);
  7107. tempOpnd = load->GetDst();
  7108. m_lowererMD.LoadHelperArgument(instrInsert, tempOpnd);
  7109. return load;
  7110. }
  7111. void
  7112. Lowerer::LoadArgumentCount(IR::Instr *const instr)
  7113. {
  7114. Assert(instr);
  7115. Assert(instr->GetDst());
  7116. Assert(!instr->GetSrc1());
  7117. Assert(!instr->GetSrc2());
  7118. if(instr->m_func->IsInlinee())
  7119. {
  7120. // Argument count including 'this'
  7121. instr->SetSrc1(IR::IntConstOpnd::New(instr->m_func->actualCount, TyUint32, instr->m_func, true));
  7122. LowererMD::ChangeToAssign(instr);
  7123. }
  7124. else if (instr->m_func->GetJITFunctionBody()->IsCoroutine())
  7125. {
  7126. IR::SymOpnd* symOpnd = LoadCallInfo(instr);
  7127. instr->SetSrc1(symOpnd);
  7128. LowererMD::ChangeToAssign(instr);
  7129. }
  7130. else
  7131. {
  7132. m_lowererMD.LoadArgumentCount(instr);
  7133. }
  7134. }
  7135. void
  7136. Lowerer::LoadStackArgPtr(IR::Instr *const instr)
  7137. {
  7138. Assert(instr);
  7139. Assert(instr->GetDst());
  7140. Assert(!instr->GetSrc1());
  7141. Assert(!instr->GetSrc2());
  7142. if(instr->m_func->IsInlinee())
  7143. {
  7144. // Address of argument after 'this'
  7145. const auto firstRealArgStackSym = instr->m_func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  7146. this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr);
  7147. instr->SetSrc1(IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, instr->m_func));
  7148. ChangeToLea(instr);
  7149. }
  7150. else
  7151. {
  7152. m_lowererMD.LoadStackArgPtr(instr);
  7153. }
  7154. }
  7155. IR::Instr *
  7156. Lowerer::InsertLoadStackAddress(StackSym *sym, IR::Instr * instrInsert, IR::RegOpnd *optionalDstOpnd /* = nullptr */)
  7157. {
  7158. IR::RegOpnd * regDst = optionalDstOpnd != nullptr ? optionalDstOpnd : IR::RegOpnd::New(TyMachReg, this->m_func);
  7159. IR::SymOpnd * symSrc = IR::SymOpnd::New(sym, TyMachPtr, this->m_func);
  7160. return InsertLea(regDst, symSrc, instrInsert);
  7161. }
  7162. void
  7163. Lowerer::LoadArgumentsFromFrame(IR::Instr *const instr)
  7164. {
  7165. Assert(instr);
  7166. Assert(instr->GetDst());
  7167. Assert(!instr->GetSrc1());
  7168. Assert(!instr->GetSrc2());
  7169. if(instr->m_func->IsInlinee())
  7170. {
  7171. // Use the inline object meta arg slot for the arguments object
  7172. instr->SetSrc1(instr->m_func->GetInlineeArgumentsObjectSlotOpnd());
  7173. LowererMD::ChangeToAssign(instr);
  7174. }
  7175. else
  7176. {
  7177. m_lowererMD.LoadArgumentsFromFrame(instr);
  7178. }
  7179. }
  7180. #ifdef ENABLE_WASM
  7181. IR::Instr *
  7182. Lowerer::LowerCheckWasmSignature(IR::Instr * instr)
  7183. {
  7184. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  7185. Assert(instr->GetSrc1());
  7186. Assert(instr->GetSrc2()->IsIntConstOpnd());
  7187. int sigId = instr->UnlinkSrc2()->AsIntConstOpnd()->AsInt32();
  7188. IR::Instr *instrPrev = instr->m_prev;
  7189. IR::IndirOpnd * actualSig = IR::IndirOpnd::New(instr->UnlinkSrc1()->AsRegOpnd(), Js::WasmScriptFunction::GetOffsetOfSignature(), TyMachReg, m_func);
  7190. Wasm::WasmSignature * expectedSig = m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetWasmSignature(sigId);
  7191. if (expectedSig->GetShortSig() == Js::Constants::InvalidSignature)
  7192. {
  7193. intptr_t sigAddr = m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetWasmSignatureAddr(sigId);
  7194. IR::AddrOpnd * expectedOpnd = IR::AddrOpnd::New(sigAddr, IR::AddrOpndKindConstantAddress, m_func);
  7195. m_lowererMD.LoadHelperArgument(instr, expectedOpnd);
  7196. m_lowererMD.LoadHelperArgument(instr, actualSig);
  7197. LoadScriptContext(instr);
  7198. m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_CheckWasmSignature);
  7199. }
  7200. else
  7201. {
  7202. IR::LabelInstr * trapLabel = InsertLabel(true, instr);
  7203. IR::LabelInstr * labelFallThrough = InsertLabel(false, instr->m_next);
  7204. IR::RegOpnd * actualRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  7205. InsertMove(actualRegOpnd, actualSig, trapLabel);
  7206. IR::IndirOpnd * shortSigIndir = IR::IndirOpnd::New(actualRegOpnd, Wasm::WasmSignature::GetOffsetOfShortSig(), TyMachReg, m_func);
  7207. InsertCompareBranch(shortSigIndir, IR::IntConstOpnd::New(expectedSig->GetShortSig(), TyMachReg, m_func), Js::OpCode::BrNeq_A, trapLabel, trapLabel);
  7208. InsertBranch(Js::OpCode::Br, labelFallThrough, trapLabel);
  7209. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_SignatureMismatch), TyInt32, m_func), instr);
  7210. instr->Remove();
  7211. }
  7212. return instrPrev;
  7213. }
  7214. IR::Instr *
  7215. Lowerer::LowerLdWasmFunc(IR::Instr* instr)
  7216. {
  7217. IR::Instr * prev = instr->m_prev;
  7218. IR::RegOpnd * tableReg = instr->UnlinkSrc1()->AsRegOpnd();
  7219. IR::Opnd * indexOpnd = instr->UnlinkSrc2();
  7220. IR::Opnd * dst = instr->UnlinkDst();
  7221. IR::IndirOpnd * lengthOpnd = IR::IndirOpnd::New(tableReg, Js::WebAssemblyTable::GetOffsetOfCurrentLength(), TyUint32, m_func);
  7222. IR::IndirOpnd * valuesIndirOpnd = IR::IndirOpnd::New(tableReg, Js::WebAssemblyTable::GetOffsetOfValues(), TyMachPtr, m_func);
  7223. IR::RegOpnd * valuesRegOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  7224. byte scale = m_lowererMD.GetDefaultIndirScale();
  7225. IR::IndirOpnd * funcIndirOpnd;
  7226. if (indexOpnd->IsIntConstOpnd())
  7227. {
  7228. funcIndirOpnd = IR::IndirOpnd::New(valuesRegOpnd, indexOpnd->AsIntConstOpnd()->AsInt32() << scale, TyMachPtr, m_func);
  7229. }
  7230. else
  7231. {
  7232. Assert(indexOpnd->IsRegOpnd());
  7233. funcIndirOpnd = IR::IndirOpnd::New(valuesRegOpnd, indexOpnd->AsRegOpnd(), TyMachPtr, m_func);
  7234. funcIndirOpnd->SetScale(scale);
  7235. }
  7236. IR::LabelInstr * trapOutOfBoundsLabel = InsertLabel(true, instr);
  7237. IR::LabelInstr * trapLabel = InsertLabel(true, trapOutOfBoundsLabel);
  7238. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  7239. InsertCompareBranch(indexOpnd, lengthOpnd, Js::OpCode::BrGe_A, true, trapOutOfBoundsLabel, trapLabel);
  7240. InsertMove(valuesRegOpnd, valuesIndirOpnd, trapLabel);
  7241. InsertMove(dst, funcIndirOpnd, trapLabel);
  7242. InsertCompareBranch(dst, IR::IntConstOpnd::New(0, TyMachPtr, m_func), Js::OpCode::BrEq_A, trapLabel, trapLabel);
  7243. InsertBranch(Js::OpCode::Br, doneLabel, trapLabel);
  7244. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_NeedWebAssemblyFunc), TyInt32, m_func), trapOutOfBoundsLabel);
  7245. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_TableIndexOutOfRange), TyInt32, m_func), instr);
  7246. instr->Remove();
  7247. return prev;
  7248. }
  7249. IR::Instr *
  7250. Lowerer::LowerGrowWasmMemory(IR::Instr* instr)
  7251. {
  7252. IR::Instr * instrPrev = m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc2());
  7253. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  7254. m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_GrowWasmMemory);
  7255. return instrPrev;
  7256. }
  7257. #endif
  7258. IR::Instr *
  7259. Lowerer::LowerUnaryHelper(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::Opnd* opndBailoutArg)
  7260. {
  7261. IR::Instr *instrPrev;
  7262. IR::Opnd *src1 = instr->UnlinkSrc1();
  7263. instrPrev = m_lowererMD.LoadHelperArgument(instr, src1);
  7264. m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, opndBailoutArg);
  7265. return instrPrev;
  7266. }
  7267. // helper takes memory context as second argument
  7268. IR::Instr *
  7269. Lowerer::LowerUnaryHelperMem(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::Opnd* opndBailoutArg)
  7270. {
  7271. IR::Instr *instrPrev;
  7272. instrPrev = LoadScriptContext(instr);
  7273. return this->LowerUnaryHelper(instr, helperMethod, opndBailoutArg);
  7274. }
  7275. IR::Instr *
  7276. Lowerer::LowerUnaryHelperMemWithFunctionInfo(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  7277. {
  7278. m_lowererMD.LoadHelperArgument(instr, this->LoadFunctionInfoOpnd(instr));
  7279. return this->LowerUnaryHelperMem(instr, helperMethod);
  7280. }
  7281. IR::Instr *
  7282. Lowerer::LowerUnaryHelperMemWithFuncBody(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  7283. {
  7284. m_lowererMD.LoadHelperArgument(instr, this->LoadFunctionBodyOpnd(instr));
  7285. return this->LowerUnaryHelperMem(instr, helperMethod);
  7286. }
  7287. IR::Instr *
  7288. Lowerer::LowerBinaryHelperMemWithFuncBody(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  7289. {
  7290. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3, "Expected a binary instruction...");
  7291. m_lowererMD.LoadHelperArgument(instr, this->LoadFunctionBodyOpnd(instr));
  7292. return this->LowerBinaryHelperMem(instr, helperMethod);
  7293. }
  7294. IR::Instr *
  7295. Lowerer::LowerUnaryHelperMemWithTemp(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  7296. {
  7297. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2, "Expected a unary instruction...");
  7298. IR::Instr * instrFirst;
  7299. IR::Opnd * tempOpnd;
  7300. if (instr->dstIsTempNumber)
  7301. {
  7302. instrFirst = this->LoadHelperTemp(instr, instr);
  7303. }
  7304. else
  7305. {
  7306. tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  7307. instrFirst = m_lowererMD.LoadHelperArgument(instr, tempOpnd);
  7308. }
  7309. this->LowerUnaryHelperMem(instr, helperMethod);
  7310. return instrFirst;
  7311. }
  7312. IR::Instr *
  7313. Lowerer::LowerUnaryHelperMemWithTemp2(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::JnHelperMethod helperMethodWithTemp)
  7314. {
  7315. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2, "Expected a unary instruction...");
  7316. if (instr->dstIsTempNumber)
  7317. {
  7318. IR::Instr * instrFirst = this->LoadHelperTemp(instr, instr);
  7319. this->LowerUnaryHelperMem(instr, helperMethodWithTemp);
  7320. return instrFirst;
  7321. }
  7322. return this->LowerUnaryHelperMem(instr, helperMethod);
  7323. }
  7324. IR::Instr *
  7325. Lowerer::LowerUnaryHelperMemWithBoolReference(IR::Instr *instr, IR::JnHelperMethod helperMethod, bool useBoolForBailout)
  7326. {
  7327. if (!this->m_func->tempSymBool)
  7328. {
  7329. this->m_func->tempSymBool = StackSym::New(TyUint8, this->m_func);
  7330. this->m_func->StackAllocate(this->m_func->tempSymBool, TySize[TyUint8]);
  7331. }
  7332. IR::SymOpnd * boolOpnd = IR::SymOpnd::New(this->m_func->tempSymBool, TyUint8, this->m_func);
  7333. IR::RegOpnd * boolRefOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  7334. InsertLea(boolRefOpnd, boolOpnd, instr);
  7335. m_lowererMD.LoadHelperArgument(instr, boolRefOpnd);
  7336. return this->LowerUnaryHelperMem(instr, helperMethod, useBoolForBailout ? boolOpnd : nullptr);
  7337. }
  7338. IR::Instr *
  7339. Lowerer::LowerInitCachedScope(IR::Instr* instr)
  7340. {
  7341. instr->m_opcode = Js::OpCode::CallHelper;
  7342. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperOP_InitCachedScope, this->m_func);
  7343. IR::Opnd * src1 = instr->UnlinkSrc1();
  7344. instr->SetSrc1(helperOpnd);
  7345. instr->SetSrc2(src1);
  7346. return instr;
  7347. }
  7348. ///----------------------------------------------------------------------------
  7349. ///
  7350. /// Lowerer::LowerBinaryHelper
  7351. ///
  7352. ///----------------------------------------------------------------------------
  7353. IR::Instr *
  7354. Lowerer::LowerBinaryHelper(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  7355. {
  7356. // The only case where this would still be null when we return is when
  7357. // helperMethod == HelperOP_CmSrEq_EmptyString; in which case we ignore
  7358. // instrPrev.
  7359. IR::Instr *instrPrev = nullptr;
  7360. AssertMsg((Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Unsigned1) ||
  7361. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3 ||
  7362. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2 ||
  7363. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2Int1 ||
  7364. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::ElementU ||
  7365. instr->m_opcode == Js::OpCode::InvalCachedScope, "Expected a binary instruction...");
  7366. IR::Opnd *src2 = instr->UnlinkSrc2();
  7367. if (helperMethod != IR::HelperOP_CmSrEq_EmptyString)
  7368. instrPrev = m_lowererMD.LoadHelperArgument(instr, src2);
  7369. IR::Opnd *src1 = instr->UnlinkSrc1();
  7370. m_lowererMD.LoadHelperArgument(instr, src1);
  7371. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  7372. return instrPrev;
  7373. }
  7374. // helper takes memory context as third argument
  7375. IR::Instr *
  7376. Lowerer::LowerBinaryHelperMem(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  7377. {
  7378. IR::Instr *instrPrev;
  7379. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3 ||
  7380. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2 ||
  7381. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2Int1 ||
  7382. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Unsigned1, "Expected a binary instruction...");
  7383. instrPrev = LoadScriptContext(instr);
  7384. return this->LowerBinaryHelper(instr, helperMethod);
  7385. }
  7386. IR::Instr *
  7387. Lowerer::LowerBinaryHelperMemWithTemp(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  7388. {
  7389. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3, "Expected a binary instruction...");
  7390. IR::Instr * instrFirst;
  7391. IR::Opnd * tempOpnd;
  7392. if (instr->dstIsTempNumber)
  7393. {
  7394. instrFirst = this->LoadHelperTemp(instr, instr);
  7395. }
  7396. else
  7397. {
  7398. tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  7399. instrFirst = m_lowererMD.LoadHelperArgument(instr, tempOpnd);
  7400. }
  7401. this->LowerBinaryHelperMem(instr, helperMethod);
  7402. return instrFirst;
  7403. }
  7404. IR::Instr *
  7405. Lowerer::LowerBinaryHelperMemWithTemp2(
  7406. IR::Instr *instr,
  7407. IR::JnHelperMethod helperMethod,
  7408. IR::JnHelperMethod helperMethodWithTemp
  7409. )
  7410. {
  7411. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3, "Expected a binary instruction...");
  7412. if (instr->dstIsTempNumber && instr->GetDst() && instr->GetDst()->GetValueType().HasBeenNumber())
  7413. {
  7414. IR::Instr * instrFirst = this->LoadHelperTemp(instr, instr);
  7415. this->LowerBinaryHelperMem(instr, helperMethodWithTemp);
  7416. return instrFirst;
  7417. }
  7418. return this->LowerBinaryHelperMem(instr, helperMethod);
  7419. }
  7420. IR::Instr *
  7421. Lowerer::LowerAddLeftDeadForString(IR::Instr *instr)
  7422. {
  7423. IR::Opnd * opndLeft;
  7424. IR::Opnd * opndRight;
  7425. opndLeft = instr->GetSrc1();
  7426. opndRight = instr->GetSrc2();
  7427. Assert(opndLeft && opndRight);
  7428. bool generateFastPath = this->m_func->DoFastPaths();
  7429. if (!generateFastPath
  7430. || !opndLeft->IsRegOpnd()
  7431. || !opndRight->IsRegOpnd()
  7432. || !instr->GetDst()->IsRegOpnd()
  7433. || !opndLeft->GetValueType().IsLikelyString()
  7434. || !opndRight->GetValueType().IsLikelyString()
  7435. || !opndLeft->IsEqual(instr->GetDst()->AsRegOpnd())
  7436. || opndLeft->IsEqual(opndRight))
  7437. {
  7438. return this->LowerBinaryHelperMemWithTemp(instr, IR::HelperOp_AddLeftDead);
  7439. }
  7440. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  7441. IR::LabelInstr * labelFallThrough = instr->GetOrCreateContinueLabel(false);
  7442. IR::LabelInstr *insertBeforeInstr = labelHelper;
  7443. instr->InsertBefore(labelHelper);
  7444. if (!opndLeft->IsNotTaggedValue())
  7445. {
  7446. this->m_lowererMD.GenerateObjectTest(opndLeft->AsRegOpnd(), insertBeforeInstr, labelHelper);
  7447. }
  7448. IR::BranchInstr* branchInstr = InsertCompareBranch(
  7449. IR::IndirOpnd::New(opndLeft->AsRegOpnd(), 0, TyMachPtr, m_func),
  7450. this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableCompoundString),
  7451. Js::OpCode::BrNeq_A,
  7452. labelHelper,
  7453. insertBeforeInstr);
  7454. InsertObjectPoison(opndLeft->AsRegOpnd(), branchInstr, insertBeforeInstr, false);
  7455. GenerateStringTest(opndRight->AsRegOpnd(), insertBeforeInstr, labelHelper);
  7456. // left->m_charLength <= JavascriptArray::MaxCharLength
  7457. IR::IndirOpnd *indirLeftCharLengthOpnd = IR::IndirOpnd::New(opndLeft->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, m_func);
  7458. IR::RegOpnd *regLeftCharLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  7459. InsertMove(regLeftCharLengthOpnd, indirLeftCharLengthOpnd, insertBeforeInstr);
  7460. InsertCompareBranch(
  7461. regLeftCharLengthOpnd,
  7462. IR::IntConstOpnd::New(Js::JavascriptString::MaxCharLength, TyUint32, m_func),
  7463. Js::OpCode::BrGe_A,
  7464. labelHelper,
  7465. insertBeforeInstr);
  7466. // left->m_pszValue == NULL (!left->IsFinalized())
  7467. InsertCompareBranch(
  7468. IR::IndirOpnd::New(opndLeft->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func),
  7469. IR::AddrOpnd::NewNull(m_func),
  7470. Js::OpCode::BrNeq_A,
  7471. labelHelper,
  7472. insertBeforeInstr);
  7473. // right->m_pszValue != NULL (right->IsFinalized())
  7474. InsertCompareBranch(
  7475. IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func),
  7476. IR::AddrOpnd::NewNull(m_func),
  7477. Js::OpCode::BrEq_A,
  7478. labelHelper,
  7479. insertBeforeInstr);
  7480. // if ownsLastBlock != 0
  7481. InsertCompareBranch(
  7482. IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfOwnsLastBlock(), TyUint8, m_func),
  7483. IR::IntConstOpnd::New(0, TyUint8, m_func),
  7484. Js::OpCode::BrEq_A,
  7485. labelHelper,
  7486. insertBeforeInstr);
  7487. // if right->m_charLength == 1
  7488. InsertCompareBranch(IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_charLength), TyUint32, m_func),
  7489. IR::IntConstOpnd::New(1, TyUint32, m_func),
  7490. Js::OpCode::BrNeq_A, labelHelper, insertBeforeInstr);
  7491. // if left->m_directCharLength == -1
  7492. InsertCompareBranch(IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfDirectCharLength(), TyUint32, m_func),
  7493. IR::IntConstOpnd::New(UINT32_MAX, TyUint32, m_func),
  7494. Js::OpCode::BrNeq_A, labelHelper, insertBeforeInstr);
  7495. // if lastBlockInfo.charLength < lastBlockInfo.charCapacity
  7496. IR::IndirOpnd *indirCharLength = IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo() + (int32)Js::CompoundString::GetOffsetOfLastBlockInfoCharLength(), TyUint32, m_func);
  7497. IR::RegOpnd *charLengthOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
  7498. InsertMove(charLengthOpnd, indirCharLength, insertBeforeInstr);
  7499. InsertCompareBranch(charLengthOpnd, IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo() + (int32)Js::CompoundString::GetOffsetOfLastBlockInfoCharCapacity(), TyUint32, m_func), Js::OpCode::BrGe_A, labelHelper, insertBeforeInstr);
  7500. // load c = right->m_pszValue[0]
  7501. IR::RegOpnd *pszValue0Opnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  7502. IR::IndirOpnd *indirRightPszOpnd = IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func);
  7503. InsertMove(pszValue0Opnd, indirRightPszOpnd, insertBeforeInstr);
  7504. IR::RegOpnd *charResultOpnd = IR::RegOpnd::New(TyUint16, this->m_func);
  7505. InsertMove(charResultOpnd, IR::IndirOpnd::New(pszValue0Opnd, 0, TyUint16, this->m_func), insertBeforeInstr);
  7506. // lastBlockInfo.buffer[blockCharLength] = c;
  7507. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  7508. InsertMove(baseOpnd, IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo() + (int32)Js::CompoundString::GetOffsetOfLastBlockInfoBuffer(), TyMachPtr, m_func), insertBeforeInstr);
  7509. IR::IndirOpnd *indirBufferToStore = IR::IndirOpnd::New(baseOpnd, charLengthOpnd, (byte)Math::Log2(sizeof(char16)), TyUint16, m_func);
  7510. InsertMove(indirBufferToStore, charResultOpnd, insertBeforeInstr);
  7511. // left->m_charLength++
  7512. InsertAdd(false, indirLeftCharLengthOpnd, regLeftCharLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
  7513. // lastBlockInfo.charLength++
  7514. InsertAdd(false, indirCharLength, indirCharLength, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
  7515. InsertBranch(Js::OpCode::Br, labelFallThrough, insertBeforeInstr);
  7516. return this->LowerBinaryHelperMemWithTemp(instr, IR::HelperOp_AddLeftDead);
  7517. }
  7518. IR::Instr *
  7519. Lowerer::LowerBinaryHelperMemWithTemp3(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::JnHelperMethod helperMethodWithTemp, IR::JnHelperMethod helperMethodLeftDead)
  7520. {
  7521. IR::Opnd *src1 = instr->GetSrc1();
  7522. if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_isTempLastUse && !src1->GetValueType().IsNotString())
  7523. {
  7524. Assert(helperMethodLeftDead == IR::HelperOp_AddLeftDead);
  7525. return LowerAddLeftDeadForString(instr);
  7526. }
  7527. else
  7528. {
  7529. return this->LowerBinaryHelperMemWithTemp2(instr, helperMethod, helperMethodWithTemp);
  7530. }
  7531. }
  7532. StackSym *
  7533. Lowerer::GetTempNumberSym(IR::Opnd * opnd, bool isTempTransferred)
  7534. {
  7535. AssertMsg(opnd->IsRegOpnd(), "Expected regOpnd");
  7536. if (isTempTransferred)
  7537. {
  7538. StackSym * tempNumberSym = StackSym::New(TyMisc, m_func);
  7539. this->m_func->StackAllocate(tempNumberSym, sizeof(Js::JavascriptNumber));
  7540. return tempNumberSym;
  7541. }
  7542. StackSym * stackSym = opnd->AsRegOpnd()->m_sym;
  7543. StackSym * tempNumberSym = stackSym->m_tempNumberSym;
  7544. if (tempNumberSym == nullptr)
  7545. {
  7546. tempNumberSym = StackSym::New(TyMisc, m_func);
  7547. this->m_func->StackAllocate(tempNumberSym, sizeof(Js::JavascriptNumber));
  7548. stackSym->m_tempNumberSym = tempNumberSym;
  7549. }
  7550. return tempNumberSym;
  7551. }
  7552. void Lowerer::LowerProfiledLdElemI(IR::JitProfilingInstr *const instr)
  7553. {
  7554. Assert(instr);
  7555. /*
  7556. Var ProfilingHelpers::ProfiledLdElem(
  7557. const Var base,
  7558. const Var varIndex,
  7559. FunctionBody *const functionBody,
  7560. const ProfileId profileId,
  7561. bool didArrayAccessHelperCall,
  7562. bool bailedOutOnArraySpecialization)
  7563. */
  7564. Func *const func = instr->m_func;
  7565. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(false, TyInt8, func));
  7566. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(false, TyInt8, func));
  7567. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, func));
  7568. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(func));
  7569. IR::IndirOpnd *const indir = instr->UnlinkSrc1()->AsIndirOpnd();
  7570. IR::Opnd *const indexOpnd = indir->UnlinkIndexOpnd();
  7571. Assert(indexOpnd || indir->GetOffset() >= 0 && !Js::TaggedInt::IsOverflow(indir->GetOffset()));
  7572. m_lowererMD.LoadHelperArgument(
  7573. instr,
  7574. indexOpnd
  7575. ? static_cast<IR::Opnd *>(indexOpnd)
  7576. : IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(indir->GetOffset()), IR::AddrOpndKindDynamicVar, func));
  7577. m_lowererMD.LoadHelperArgument(instr, indir->UnlinkBaseOpnd());
  7578. indir->Free(func);
  7579. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledLdElem, func));
  7580. m_lowererMD.LowerCall(instr, 0);
  7581. }
  7582. void Lowerer::LowerProfiledStElemI(IR::JitProfilingInstr *const instr, const Js::PropertyOperationFlags flags)
  7583. {
  7584. Assert(instr);
  7585. /*
  7586. void ProfilingHelpers::ProfiledStElem(
  7587. const Var base,
  7588. const Var varIndex,
  7589. const Var value,
  7590. FunctionBody *const functionBody,
  7591. const ProfileId profileId,
  7592. const PropertyOperationFlags flags,
  7593. bool didArrayAccessHelperCall,
  7594. bool bailedOutOnArraySpecialization)
  7595. */
  7596. Func *const func = instr->m_func;
  7597. IR::JnHelperMethod helper;
  7598. if(flags == Js::PropertyOperation_None)
  7599. {
  7600. helper = IR::HelperProfiledStElem_DefaultFlags;
  7601. }
  7602. else
  7603. {
  7604. helper = IR::HelperProfiledStElem;
  7605. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(false, TyInt8, func));
  7606. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(false, TyInt8, func));
  7607. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(flags, TyInt32, func, true));
  7608. }
  7609. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, func));
  7610. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(func));
  7611. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  7612. IR::IndirOpnd *const indir = instr->UnlinkDst()->AsIndirOpnd();
  7613. IR::Opnd *const indexOpnd = indir->UnlinkIndexOpnd();
  7614. Assert(indexOpnd || indir->GetOffset() >= 0 && !Js::TaggedInt::IsOverflow(indir->GetOffset()));
  7615. m_lowererMD.LoadHelperArgument(
  7616. instr,
  7617. indexOpnd
  7618. ? static_cast<IR::Opnd *>(indexOpnd)
  7619. : IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(indir->GetOffset()), IR::AddrOpndKindDynamicVar, func));
  7620. m_lowererMD.LoadHelperArgument(instr, indir->UnlinkBaseOpnd());
  7621. indir->Free(func);
  7622. instr->SetSrc1(IR::HelperCallOpnd::New(helper, func));
  7623. m_lowererMD.LowerCall(instr, 0);
  7624. }
  7625. ///----------------------------------------------------------------------------
  7626. ///
  7627. /// Lowerer::LowerStElemI
  7628. ///
  7629. ///----------------------------------------------------------------------------
  7630. IR::Instr *
  7631. Lowerer::LowerStElemI(IR::Instr * instr, Js::PropertyOperationFlags flags, bool isHelper, IR::JnHelperMethod helperMethod)
  7632. {
  7633. IR::Instr *instrPrev = instr->m_prev;
  7634. if (instr->IsJitProfilingInstr())
  7635. {
  7636. Assert(!isHelper);
  7637. LowerProfiledStElemI(instr->AsJitProfilingInstr(), flags);
  7638. return instrPrev;
  7639. }
  7640. IR::Opnd *src1 = instr->GetSrc1();
  7641. IR::Opnd *dst = instr->GetDst();
  7642. IR::Opnd *newDst = nullptr;
  7643. IRType srcType = src1->GetType();
  7644. AssertMsg(dst->IsIndirOpnd(), "Expected indirOpnd on StElementI");
  7645. #if !FLOATVAR
  7646. if (dst->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyOptimizedTypedArray() && src1->IsRegOpnd())
  7647. {
  7648. // We allow the source of typedArray StElem to be marked as temp, since we just need the value,
  7649. // however if the array turns out to be a non-typed array, or the index isn't valid (the value is then stored as a property)
  7650. // the temp needs to be boxed if it is a float. The BoxStackNumber helper will box JavascriptNumbers
  7651. // which are on the stack.
  7652. // regVar = BoxStackNumber(src1, scriptContext)
  7653. IR::Instr *newInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  7654. IR::RegOpnd *regVar = IR::RegOpnd::New(TyVar, this->m_func);
  7655. newInstr->SetDst(regVar);
  7656. newInstr->SetSrc1(src1);
  7657. instr->InsertBefore(newInstr);
  7658. LowerUnaryHelperMem(newInstr, IR::HelperBoxStackNumber);
  7659. // MOV src1, regVar
  7660. newInstr = IR::Instr::New(Js::OpCode::Ld_A, src1, regVar, this->m_func);
  7661. instr->InsertBefore(m_lowererMD.ChangeToAssign(newInstr));
  7662. }
  7663. #endif
  7664. if(instr->HasBailOutInfo())
  7665. {
  7666. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  7667. if(bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment)
  7668. {
  7669. Assert(!(bailOutKind & IR::BailOutOnMissingValue));
  7670. LowerBailOnInvalidatedArrayHeadSegment(instr, isHelper);
  7671. bailOutKind ^= IR::BailOutOnInvalidatedArrayHeadSegment;
  7672. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7673. }
  7674. else if(bailOutKind & IR::BailOutOnMissingValue)
  7675. {
  7676. LowerBailOnCreatedMissingValue(instr, isHelper);
  7677. bailOutKind ^= IR::BailOutOnMissingValue;
  7678. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7679. }
  7680. if(bailOutKind & IR::BailOutOnInvalidatedArrayLength)
  7681. {
  7682. LowerBailOnInvalidatedArrayLength(instr, isHelper);
  7683. bailOutKind ^= IR::BailOutOnInvalidatedArrayLength;
  7684. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7685. }
  7686. if(bailOutKind & IR::BailOutConvertedNativeArray)
  7687. {
  7688. IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  7689. instr->InsertAfter(labelSkipBailOut);
  7690. LowerOneBailOutKind(instr, IR::BailOutConvertedNativeArray, isHelper);
  7691. newDst = IR::RegOpnd::New(TyMachReg, m_func);
  7692. InsertTestBranch(newDst, newDst, Js::OpCode::BrEq_A, labelSkipBailOut, instr->m_next);
  7693. }
  7694. }
  7695. instr->UnlinkDst();
  7696. instr->UnlinkSrc1();
  7697. Assert(
  7698. helperMethod == IR::HelperOP_InitElemGetter ||
  7699. helperMethod == IR::HelperOP_InitElemSetter ||
  7700. helperMethod == IR::HelperOP_InitComputedProperty ||
  7701. helperMethod == IR::HelperOp_SetElementI ||
  7702. helperMethod == IR::HelperOp_InitClassMemberComputedName ||
  7703. helperMethod == IR::HelperOp_InitClassMemberGetComputedName ||
  7704. helperMethod == IR::HelperOp_InitClassMemberSetComputedName
  7705. );
  7706. IR::IndirOpnd* dstIndirOpnd = dst->AsIndirOpnd();
  7707. IR::Opnd *indexOpnd = dstIndirOpnd->UnlinkIndexOpnd();
  7708. if (indexOpnd && indexOpnd->GetType() != TyVar)
  7709. {
  7710. Assert(
  7711. helperMethod != IR::HelperOP_InitElemGetter &&
  7712. helperMethod != IR::HelperOP_InitElemSetter &&
  7713. helperMethod != IR::HelperOp_InitClassMemberGetComputedName &&
  7714. helperMethod != IR::HelperOp_InitClassMemberSetComputedName
  7715. );
  7716. if (indexOpnd->GetType() == TyInt32)
  7717. {
  7718. helperMethod =
  7719. srcType == TyVar ? IR::HelperOp_SetElementI_Int32 :
  7720. srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI_Int32 :
  7721. IR::HelperOp_SetNativeFloatElementI_Int32;
  7722. }
  7723. else if (indexOpnd->GetType() == TyUint32)
  7724. {
  7725. helperMethod =
  7726. srcType == TyVar ? IR::HelperOp_SetElementI_UInt32 :
  7727. srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI_UInt32 :
  7728. IR::HelperOp_SetNativeFloatElementI_UInt32;
  7729. }
  7730. else
  7731. {
  7732. Assert(FALSE);
  7733. }
  7734. }
  7735. else
  7736. {
  7737. if (indexOpnd == nullptr)
  7738. {
  7739. // No index; the offset identifies the element.
  7740. IntConstType offset = (IntConstType)dst->AsIndirOpnd()->GetOffset();
  7741. indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
  7742. }
  7743. if (srcType != TyVar)
  7744. {
  7745. helperMethod =
  7746. srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI : IR::HelperOp_SetNativeFloatElementI;
  7747. }
  7748. }
  7749. if (srcType == TyFloat64)
  7750. {
  7751. m_lowererMD.LoadDoubleHelperArgument(instr, src1);
  7752. }
  7753. m_lowererMD.LoadHelperArgument(instr,
  7754. IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
  7755. LoadScriptContext(instr);
  7756. if (srcType != TyFloat64)
  7757. {
  7758. m_lowererMD.LoadHelperArgument(instr, src1);
  7759. }
  7760. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  7761. IR::Opnd *baseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
  7762. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  7763. dst->Free(this->m_func);
  7764. if (newDst)
  7765. {
  7766. instr->SetDst(newDst);
  7767. }
  7768. m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, nullptr, nullptr, isHelper);
  7769. return instrPrev;
  7770. }
  7771. ///----------------------------------------------------------------------------
  7772. ///
  7773. /// Lowerer::LowerLdElemI
  7774. ///
  7775. ///----------------------------------------------------------------------------
  7776. IR::Instr *
  7777. Lowerer::LowerLdElemI(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool isHelper)
  7778. {
  7779. IR::Instr *instrPrev = instr->m_prev;
  7780. if(instr->IsJitProfilingInstr())
  7781. {
  7782. Assert(helperMethod == IR::HelperOp_GetElementI);
  7783. Assert(!isHelper);
  7784. LowerProfiledLdElemI(instr->AsJitProfilingInstr());
  7785. return instrPrev;
  7786. }
  7787. if (!isHelper && instr->DoStackArgsOpt())
  7788. {
  7789. IR::LabelInstr * labelLdElem = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  7790. // Pass in null for labelFallThru to only generate the LdHeapArgument call
  7791. GenerateFastArgumentsLdElemI(instr, nullptr);
  7792. instr->InsertBefore(labelLdElem);
  7793. instr->UnlinkSrc1();
  7794. instr->UnlinkDst();
  7795. Assert(instr->HasBailOutInfo() && instr->GetBailOutKind() == IR::BailOutKind::BailOnStackArgsOutOfActualsRange);
  7796. instr = GenerateBailOut(instr, nullptr, nullptr);
  7797. return instrPrev;
  7798. }
  7799. IR::Opnd *src1 = instr->UnlinkSrc1();
  7800. AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd");
  7801. IR::IndirOpnd *indirOpnd = src1->AsIndirOpnd();
  7802. bool loadScriptContext = true;
  7803. IRType dstType = instr->GetDst()->GetType();
  7804. IR::Opnd *indexOpnd = indirOpnd->UnlinkIndexOpnd();
  7805. if (indexOpnd && indexOpnd->GetType() != TyVar)
  7806. {
  7807. Assert(indexOpnd->GetType() == TyUint32 || indexOpnd->GetType() == TyInt32);
  7808. switch (helperMethod)
  7809. {
  7810. case IR::HelperOp_GetElementI:
  7811. if (indexOpnd->GetType() == TyUint32)
  7812. {
  7813. helperMethod =
  7814. dstType == TyVar ? IR::HelperOp_GetElementI_UInt32 :
  7815. dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI_UInt32 :
  7816. IR::HelperOp_GetNativeFloatElementI_UInt32;
  7817. }
  7818. else
  7819. {
  7820. helperMethod =
  7821. dstType == TyVar ? IR::HelperOp_GetElementI_Int32 :
  7822. dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI_Int32 :
  7823. IR::HelperOp_GetNativeFloatElementI_Int32;
  7824. }
  7825. break;
  7826. case IR::HelperOp_GetMethodElement:
  7827. Assert(dstType == TyVar);
  7828. helperMethod = indexOpnd->GetType() == TyUint32?
  7829. IR::HelperOp_GetMethodElement_UInt32 : IR::HelperOp_GetMethodElement_Int32;
  7830. break;
  7831. case IR::HelperOp_TypeofElem:
  7832. Assert(dstType == TyVar);
  7833. helperMethod = indexOpnd->GetType() == TyUint32?
  7834. IR::HelperOp_TypeofElem_UInt32 : IR::HelperOp_TypeofElem_Int32;
  7835. break;
  7836. default:
  7837. Assert(false);
  7838. }
  7839. }
  7840. else
  7841. {
  7842. if (indexOpnd == nullptr)
  7843. {
  7844. // No index; the offset identifies the element.
  7845. IntConstType offset = (IntConstType)src1->AsIndirOpnd()->GetOffset();
  7846. indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
  7847. }
  7848. if (dstType != TyVar)
  7849. {
  7850. loadScriptContext = false;
  7851. helperMethod =
  7852. dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI : IR::HelperOp_GetNativeFloatElementI;
  7853. }
  7854. }
  7855. // Jitted loop bodies have volatile information about values created outside the loop, so don't update array creation site
  7856. // profile data from jitted loop bodies
  7857. if(!m_func->IsLoopBody())
  7858. {
  7859. const ValueType baseValueType(indirOpnd->GetBaseOpnd()->GetValueType());
  7860. if( baseValueType.IsLikelyObject() &&
  7861. baseValueType.GetObjectType() == ObjectType::Array &&
  7862. !baseValueType.HasIntElements())
  7863. {
  7864. switch(helperMethod)
  7865. {
  7866. case IR::HelperOp_GetElementI:
  7867. helperMethod =
  7868. baseValueType.HasFloatElements()
  7869. ? IR::HelperOp_GetElementI_ExpectingNativeFloatArray
  7870. : IR::HelperOp_GetElementI_ExpectingVarArray;
  7871. break;
  7872. case IR::HelperOp_GetElementI_UInt32:
  7873. helperMethod =
  7874. baseValueType.HasFloatElements()
  7875. ? IR::HelperOp_GetElementI_UInt32_ExpectingNativeFloatArray
  7876. : IR::HelperOp_GetElementI_UInt32_ExpectingVarArray;
  7877. break;
  7878. case IR::HelperOp_GetElementI_Int32:
  7879. helperMethod =
  7880. baseValueType.HasFloatElements()
  7881. ? IR::HelperOp_GetElementI_Int32_ExpectingNativeFloatArray
  7882. : IR::HelperOp_GetElementI_Int32_ExpectingVarArray;
  7883. break;
  7884. }
  7885. }
  7886. }
  7887. if (loadScriptContext)
  7888. {
  7889. LoadScriptContext(instr);
  7890. }
  7891. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  7892. IR::Opnd *baseOpnd = indirOpnd->UnlinkBaseOpnd();
  7893. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  7894. src1->Free(this->m_func);
  7895. m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, nullptr, nullptr, isHelper);
  7896. return instrPrev;
  7897. }
  7898. void Lowerer::LowerLdLen(IR::Instr *const instr, const bool isHelper)
  7899. {
  7900. Assert(instr);
  7901. Assert(instr->m_opcode == Js::OpCode::LdLen_A);
  7902. // LdLen has persisted to this point for the sake of pre-lower opts.
  7903. // Turn it into a LdFld of the "length" property.
  7904. // This is normally a load of the internal "length" of an Array, so it probably doesn't benefit
  7905. // from inline caching.
  7906. if (instr->GetSrc1()->IsRegOpnd())
  7907. {
  7908. IR::RegOpnd * baseOpnd = instr->GetSrc1()->AsRegOpnd();
  7909. PropertySym* fieldSym = PropertySym::FindOrCreate(baseOpnd->m_sym->m_id, Js::PropertyIds::length, (uint32)-1, (uint)-1, PropertyKindData, m_func);
  7910. instr->ReplaceSrc1(IR::SymOpnd::New(fieldSym, TyVar, m_func));
  7911. }
  7912. LowerLdFld(instr, IR::HelperOp_GetProperty, IR::HelperOp_GetProperty, false, nullptr, isHelper);
  7913. }
  7914. IR::Instr* InsertMaskableMove(bool isStore, bool generateWriteBarrier, IR::Opnd* dst, IR::Opnd* src1, IR::Opnd* src2, IR::Opnd* indexOpnd, IR::Instr* insertBeforeInstr, Lowerer* lowerer)
  7915. {
  7916. Assert(insertBeforeInstr->m_func->GetJITFunctionBody()->IsAsmJsMode());
  7917. // Mask with the bounds check operand to avoid speculation issues
  7918. const bool usesFastArray = insertBeforeInstr->m_func->GetJITFunctionBody()->UsesWAsmJsFastVirtualBuffer();
  7919. IR::RegOpnd* mask = nullptr;
  7920. bool shouldMaskResult = false;
  7921. if (!usesFastArray)
  7922. {
  7923. bool shouldMask = isStore ? CONFIG_FLAG_RELEASE(PoisonTypedArrayStore) : CONFIG_FLAG_RELEASE(PoisonTypedArrayLoad);
  7924. if (shouldMask && indexOpnd != nullptr)
  7925. {
  7926. // indices in asmjs fit in 32 bits, but we need a mask
  7927. IR::RegOpnd* temp = IR::RegOpnd::New(indexOpnd->GetType(), insertBeforeInstr->m_func);
  7928. lowerer->InsertMove(temp, indexOpnd, insertBeforeInstr, false);
  7929. lowerer->InsertAdd(false, temp, temp, IR::IntConstOpnd::New((uint32)src1->GetSize() - 1, temp->GetType(), insertBeforeInstr->m_func, true), insertBeforeInstr);
  7930. // For native ints and vars, we do the masking after the load; we don't do this for
  7931. // floats and doubles because the conversion to and from fp regs is slow.
  7932. shouldMaskResult = (!isStore) && IRType_IsNativeIntOrVar(src1->GetType()) && TySize[dst->GetType()] <= TySize[TyMachReg];
  7933. // When we do post-load masking, we AND the mask with dst, so they need to have the
  7934. // same type, as otherwise we'll hit asserts later on. When we do pre-load masking,
  7935. // we AND the mask with the index component of the indir opnd for the move from the
  7936. // array, so we need to align with that type instead.
  7937. mask = IR::RegOpnd::New((shouldMaskResult ? dst : indexOpnd)->GetType(), insertBeforeInstr->m_func);
  7938. if (temp->GetSize() != mask->GetSize())
  7939. {
  7940. Assert(mask->GetSize() == MachPtr);
  7941. Assert(src2->GetType() == TyUint32);
  7942. temp = temp->UseWithNewType(TyMachPtr, insertBeforeInstr->m_func)->AsRegOpnd();
  7943. src2 = src2->UseWithNewType(TyMachPtr, insertBeforeInstr->m_func)->AsRegOpnd();
  7944. }
  7945. lowerer->InsertSub(false, mask, temp, src2, insertBeforeInstr);
  7946. lowerer->InsertShift(Js::OpCode::Shr_A, false, mask, mask, IR::IntConstOpnd::New(TySize[mask->GetType()] * 8 - 1, TyInt8, insertBeforeInstr->m_func), insertBeforeInstr);
  7947. // If we're not masking the result, we're masking the index
  7948. if (!shouldMaskResult)
  7949. {
  7950. lowerer->InsertAnd(indexOpnd, indexOpnd, mask, insertBeforeInstr);
  7951. }
  7952. }
  7953. }
  7954. IR::Instr* ret = lowerer->InsertMove(dst, src1, insertBeforeInstr, generateWriteBarrier);
  7955. if(!usesFastArray && shouldMaskResult)
  7956. {
  7957. // Mask the result if we didn't use the mask earlier to mask the index
  7958. lowerer->InsertAnd(dst, dst, mask, insertBeforeInstr);
  7959. }
  7960. return ret;
  7961. }
  7962. IR::Instr *
  7963. Lowerer::LowerLdArrViewElem(IR::Instr * instr)
  7964. {
  7965. #ifdef ASMJS_PLAT
  7966. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  7967. Assert(instr);
  7968. Assert(instr->m_opcode == Js::OpCode::LdArrViewElem);
  7969. IR::Instr * instrPrev = instr->m_prev;
  7970. IR::RegOpnd * indexOpnd = instr->GetSrc1()->AsIndirOpnd()->GetIndexOpnd();
  7971. int32 offset = instr->GetSrc1()->AsIndirOpnd()->GetOffset();
  7972. IR::Opnd * dst = instr->GetDst();
  7973. IR::Opnd * src1 = instr->GetSrc1();
  7974. IR::Opnd * src2 = instr->GetSrc2();
  7975. IR::Instr * done;
  7976. if (offset < 0)
  7977. {
  7978. IR::Opnd * oobValue = nullptr;
  7979. if(dst->IsFloat32())
  7980. {
  7981. oobValue = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNaNAddr(), TyFloat32, m_func);
  7982. }
  7983. else if(dst->IsFloat64())
  7984. {
  7985. oobValue = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNaNAddr(), TyFloat64, m_func);
  7986. }
  7987. else
  7988. {
  7989. oobValue = IR::IntConstOpnd::New(0, dst->GetType(), m_func);
  7990. }
  7991. instr->ReplaceSrc1(oobValue);
  7992. if (src2)
  7993. {
  7994. instr->FreeSrc2();
  7995. }
  7996. return m_lowererMD.ChangeToAssign(instr);
  7997. }
  7998. if (indexOpnd || m_func->GetJITFunctionBody()->GetAsmJsInfo()->AccessNeedsBoundCheck((uint32)offset))
  7999. {
  8000. // CMP indexOpnd, src2(arrSize)
  8001. // JA $helper
  8002. // JMP $load
  8003. // $helper:
  8004. // MOV dst, 0
  8005. // JMP $done
  8006. // $load:
  8007. // MOV dst, src1([arrayBuffer + indexOpnd])
  8008. // $done:
  8009. Assert(!dst->IsFloat32() || src1->IsFloat32());
  8010. Assert(!dst->IsFloat64() || src1->IsFloat64());
  8011. done = m_lowererMD.LowerAsmJsLdElemHelper(instr);
  8012. }
  8013. else
  8014. {
  8015. // any access below 0x10000 is safe
  8016. instr->UnlinkDst();
  8017. instr->UnlinkSrc1();
  8018. if (src2)
  8019. {
  8020. instr->FreeSrc2();
  8021. }
  8022. done = instr;
  8023. }
  8024. InsertMaskableMove(false, true, dst, src1, src2, indexOpnd, done, this);
  8025. instr->Remove();
  8026. return instrPrev;
  8027. #else
  8028. Assert(UNREACHED);
  8029. return instr;
  8030. #endif
  8031. }
  8032. IR::Instr *
  8033. Lowerer::LowerWasmArrayBoundsCheck(IR::Instr * instr, IR::Opnd *addrOpnd)
  8034. {
  8035. uint32 offset = addrOpnd->AsIndirOpnd()->GetOffset();
  8036. // don't encode offset for wasm memory reads/writes
  8037. addrOpnd->AsIndirOpnd()->m_dontEncode = true;
  8038. // if offset/size overflow the max length, throw (this also saves us from having to do int64 math)
  8039. int64 constOffset = (int64)addrOpnd->GetSize() + (int64)offset;
  8040. if (constOffset >= Js::ArrayBuffer::MaxArrayBufferLength)
  8041. {
  8042. GenerateRuntimeError(instr, WASMERR_ArrayIndexOutOfRange, IR::HelperOp_WebAssemblyRuntimeError);
  8043. return instr;
  8044. }
  8045. else
  8046. {
  8047. return m_lowererMD.LowerWasmArrayBoundsCheck(instr, addrOpnd);
  8048. }
  8049. }
  8050. IR::Instr *
  8051. Lowerer::LowerLdArrViewElemWasm(IR::Instr * instr)
  8052. {
  8053. #ifdef ENABLE_WASM
  8054. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  8055. Assert(instr);
  8056. Assert(instr->m_opcode == Js::OpCode::LdArrViewElemWasm);
  8057. IR::Instr * instrPrev = instr->m_prev;
  8058. IR::Opnd * dst = instr->GetDst();
  8059. IR::Opnd * src1 = instr->GetSrc1();
  8060. Assert(!dst->IsFloat32() || src1->IsFloat32());
  8061. Assert(!dst->IsFloat64() || src1->IsFloat64());
  8062. IR::Instr * done = LowerWasmArrayBoundsCheck(instr, src1);
  8063. IR::Instr* newMove = InsertMaskableMove(false, true, dst, src1, instr->GetSrc2(), src1->AsIndirOpnd()->GetIndexOpnd(), done, this);
  8064. if (m_func->GetJITFunctionBody()->UsesWAsmJsFastVirtualBuffer())
  8065. {
  8066. // We need to have an AV when accessing out of bounds memory even if the dst is not used
  8067. // Make sure LinearScan doesn't dead store this instruction
  8068. newMove->hasSideEffects = true;
  8069. }
  8070. instr->Remove();
  8071. return instrPrev;
  8072. #else
  8073. Assert(UNREACHED);
  8074. return instr;
  8075. #endif
  8076. }
  8077. IR::Instr *
  8078. Lowerer::LowerMemset(IR::Instr * instr, IR::RegOpnd * helperRet)
  8079. {
  8080. IR::Opnd * dst = instr->UnlinkDst();
  8081. IR::Opnd * src1 = instr->UnlinkSrc1();
  8082. Assert(dst->IsIndirOpnd());
  8083. IR::Opnd *baseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
  8084. IR::Opnd *indexOpnd = dst->AsIndirOpnd()->UnlinkIndexOpnd();
  8085. IR::Opnd *sizeOpnd = instr->UnlinkSrc2();
  8086. Assert(baseOpnd);
  8087. Assert(sizeOpnd);
  8088. Assert(indexOpnd);
  8089. IR::JnHelperMethod helperMethod = IR::HelperOp_Memset;
  8090. IR::Instr *instrPrev = nullptr;
  8091. if (src1->IsRegOpnd() && !src1->IsVar())
  8092. {
  8093. IR::RegOpnd* varOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  8094. instrPrev = IR::Instr::New(Js::OpCode::ToVar, varOpnd, src1, instr->m_func);
  8095. instr->InsertBefore(instrPrev);
  8096. src1 = varOpnd;
  8097. }
  8098. instr->SetDst(helperRet);
  8099. LoadScriptContext(instr);
  8100. m_lowererMD.LoadHelperArgument(instr, sizeOpnd);
  8101. m_lowererMD.LoadHelperArgument(instr, src1);
  8102. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  8103. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  8104. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  8105. dst->Free(m_func);
  8106. return instrPrev;
  8107. }
  8108. IR::Instr *
  8109. Lowerer::LowerMemcopy(IR::Instr * instr, IR::RegOpnd * helperRet)
  8110. {
  8111. IR::Opnd * dst = instr->UnlinkDst();
  8112. IR::Opnd * src = instr->UnlinkSrc1();
  8113. Assert(dst->IsIndirOpnd());
  8114. Assert(src->IsIndirOpnd());
  8115. IR::Opnd *dstBaseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
  8116. IR::Opnd *dstIndexOpnd = dst->AsIndirOpnd()->UnlinkIndexOpnd();
  8117. IR::Opnd *srcBaseOpnd = src->AsIndirOpnd()->UnlinkBaseOpnd();
  8118. IR::Opnd *srcIndexOpnd = src->AsIndirOpnd()->UnlinkIndexOpnd();
  8119. IR::Opnd *sizeOpnd = instr->UnlinkSrc2();
  8120. Assert(sizeOpnd);
  8121. Assert(dstBaseOpnd);
  8122. Assert(dstIndexOpnd);
  8123. Assert(srcBaseOpnd);
  8124. Assert(srcIndexOpnd);
  8125. IR::JnHelperMethod helperMethod = IR::HelperOp_Memcopy;
  8126. instr->SetDst(helperRet);
  8127. LoadScriptContext(instr);
  8128. m_lowererMD.LoadHelperArgument(instr, sizeOpnd);
  8129. m_lowererMD.LoadHelperArgument(instr, srcIndexOpnd);
  8130. m_lowererMD.LoadHelperArgument(instr, srcBaseOpnd);
  8131. m_lowererMD.LoadHelperArgument(instr, dstIndexOpnd);
  8132. m_lowererMD.LoadHelperArgument(instr, dstBaseOpnd);
  8133. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  8134. dst->Free(m_func);
  8135. src->Free(m_func);
  8136. return nullptr;
  8137. }
  8138. IR::Instr *
  8139. Lowerer::LowerMemOp(IR::Instr * instr)
  8140. {
  8141. Assert(instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  8142. IR::Instr *instrPrev = instr->m_prev;
  8143. IR::RegOpnd* helperRet = IR::RegOpnd::New(TyInt8, instr->m_func);
  8144. const bool isHelper = false;
  8145. AssertMsg(instr->HasBailOutInfo(), "Expected bailOut on MemOp instruction");
  8146. if (instr->HasBailOutInfo())
  8147. {
  8148. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  8149. if (bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment)
  8150. {
  8151. Assert(!(bailOutKind & IR::BailOutOnMissingValue));
  8152. LowerBailOnInvalidatedArrayHeadSegment(instr, isHelper);
  8153. bailOutKind ^= IR::BailOutOnInvalidatedArrayHeadSegment;
  8154. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  8155. }
  8156. else if (bailOutKind & IR::BailOutOnMissingValue)
  8157. {
  8158. LowerBailOnCreatedMissingValue(instr, isHelper);
  8159. bailOutKind ^= IR::BailOutOnMissingValue;
  8160. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  8161. }
  8162. if (bailOutKind & IR::BailOutOnInvalidatedArrayLength)
  8163. {
  8164. LowerBailOnInvalidatedArrayLength(instr, isHelper);
  8165. bailOutKind ^= IR::BailOutOnInvalidatedArrayLength;
  8166. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  8167. }
  8168. AssertMsg(bailOutKind & IR::BailOutOnMemOpError, "Expected BailOutOnMemOpError on MemOp instruction");
  8169. if (bailOutKind & IR::BailOutOnMemOpError)
  8170. {
  8171. // Insert or get continue label
  8172. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isHelper);
  8173. Func *const func = instr->m_func;
  8174. LowerOneBailOutKind(instr, IR::BailOutOnMemOpError, isHelper);
  8175. IR::Instr *const insertBeforeInstr = instr->m_next;
  8176. // test helperRet, helperRet
  8177. // jz $skipBailOut
  8178. InsertCompareBranch(
  8179. helperRet,
  8180. IR::IntConstOpnd::New(0, TyInt8, func),
  8181. Js::OpCode::BrNeq_A,
  8182. skipBailOutLabel,
  8183. insertBeforeInstr);
  8184. // (Bail out with IR::BailOutOnMemOpError)
  8185. // $skipBailOut:
  8186. bailOutKind ^= IR::BailOutOnMemOpError;
  8187. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  8188. }
  8189. instr->ClearBailOutInfo();
  8190. }
  8191. IR::Instr* newInstrPrev = nullptr;
  8192. if (instr->m_opcode == Js::OpCode::Memset)
  8193. {
  8194. newInstrPrev = LowerMemset(instr, helperRet);
  8195. }
  8196. else if (instr->m_opcode == Js::OpCode::Memcopy)
  8197. {
  8198. newInstrPrev = LowerMemcopy(instr, helperRet);
  8199. }
  8200. if (newInstrPrev != nullptr)
  8201. {
  8202. instrPrev = newInstrPrev;
  8203. }
  8204. return instrPrev;
  8205. }
  8206. IR::Instr*
  8207. Lowerer::LowerStAtomicsWasm(IR::Instr* instr)
  8208. {
  8209. #ifdef ENABLE_WASM
  8210. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  8211. Assert(instr);
  8212. Assert(instr->m_opcode == Js::OpCode::StAtomicWasm);
  8213. IR::Instr * instrPrev = instr->m_prev;
  8214. IR::Opnd * dst = instr->GetDst();
  8215. IR::Opnd * src1 = instr->GetSrc1();
  8216. Assert(IRType_IsNativeInt(dst->GetType()));
  8217. IR::Instr * done = LowerWasmArrayBoundsCheck(instr, dst);
  8218. m_lowererMD.LowerAtomicStore(dst, src1, done);
  8219. instr->Remove();
  8220. return instrPrev;
  8221. #else
  8222. Assert(UNREACHED);
  8223. return instr;
  8224. #endif
  8225. }
  8226. IR::Instr * Lowerer::LowerLdAtomicsWasm(IR::Instr * instr)
  8227. {
  8228. #ifdef ENABLE_WASM
  8229. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  8230. Assert(instr);
  8231. Assert(instr->m_opcode == Js::OpCode::LdAtomicWasm);
  8232. IR::Instr * instrPrev = instr->m_prev;
  8233. IR::Opnd * dst = instr->GetDst();
  8234. IR::Opnd * src1 = instr->GetSrc1();
  8235. Assert(IRType_IsNativeInt(dst->GetType()));
  8236. IR::Instr * done = LowerWasmArrayBoundsCheck(instr, src1);
  8237. m_lowererMD.LowerAtomicLoad(dst, src1, done);
  8238. instr->Remove();
  8239. return instrPrev;
  8240. #else
  8241. Assert(UNREACHED);
  8242. return instr;
  8243. #endif
  8244. }
  8245. IR::Instr *
  8246. Lowerer::LowerStArrViewElem(IR::Instr * instr)
  8247. {
  8248. #ifdef ASMJS_PLAT
  8249. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  8250. Assert(instr);
  8251. Assert(instr->m_opcode == Js::OpCode::StArrViewElem);
  8252. IR::Instr * instrPrev = instr->m_prev;
  8253. IR::Opnd * dst = instr->GetDst();
  8254. IR::Opnd * src1 = instr->GetSrc1();
  8255. IR::Opnd * src2 = instr->GetSrc2();
  8256. // type of dst is the type of array
  8257. IR::RegOpnd * indexOpnd = dst->AsIndirOpnd()->GetIndexOpnd();
  8258. int32 offset = dst->AsIndirOpnd()->GetOffset();
  8259. Assert(!dst->IsFloat32() || src1->IsFloat32());
  8260. Assert(!dst->IsFloat64() || src1->IsFloat64());
  8261. Assert(!dst->IsInt64() || src1->IsInt64());
  8262. IR::Instr * done;
  8263. if (m_func->GetJITFunctionBody()->IsWasmFunction())
  8264. {
  8265. done = LowerWasmArrayBoundsCheck(instr, dst);
  8266. }
  8267. else if (offset < 0)
  8268. {
  8269. instr->Remove();
  8270. return instrPrev;
  8271. }
  8272. else if (indexOpnd || m_func->GetJITFunctionBody()->GetAsmJsInfo()->AccessNeedsBoundCheck((uint32)offset))
  8273. {
  8274. // CMP indexOpnd, src2(arrSize)
  8275. // JA $helper
  8276. // JMP $store
  8277. // $helper:
  8278. // JMP $done
  8279. // $store:
  8280. // MOV dst([arrayBuffer + indexOpnd]), src1
  8281. // $done:
  8282. done = m_lowererMD.LowerAsmJsStElemHelper(instr);
  8283. }
  8284. else
  8285. {
  8286. // any constant access below 0x10000 is safe, as that is the min heap size
  8287. instr->UnlinkDst();
  8288. instr->UnlinkSrc1();
  8289. done = instr;
  8290. if (src2)
  8291. {
  8292. instr->FreeSrc2();
  8293. }
  8294. }
  8295. // wasm memory buffer is not recycler allocated, so we shouldn't generate write barrier
  8296. InsertMaskableMove(true, false, dst, src1, src2, indexOpnd, done, this);
  8297. instr->Remove();
  8298. return instrPrev;
  8299. #else
  8300. Assert(UNREACHED);
  8301. return instr;
  8302. #endif
  8303. }
  8304. IR::Instr *
  8305. Lowerer::LowerArrayDetachedCheck(IR::Instr * instr)
  8306. {
  8307. // TEST isDetached, isDetached
  8308. // JE Done
  8309. // Helper:
  8310. // CALL Js::Throw::OutOfMemory
  8311. // Done:
  8312. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  8313. IR::Instr * instrPrev = instr->m_prev;
  8314. IR::Opnd * isDetachedOpnd = instr->UnlinkSrc1();
  8315. Assert(isDetachedOpnd->IsIndirOpnd() || isDetachedOpnd->IsMemRefOpnd());
  8316. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  8317. IR::LabelInstr * helperLabel = InsertLabel(true, instr);
  8318. InsertTestBranch(isDetachedOpnd, isDetachedOpnd, Js::OpCode::BrNotNeq_A, doneLabel, helperLabel);
  8319. m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_OutOfMemoryError);
  8320. return instrPrev;
  8321. }
  8322. ///----------------------------------------------------------------------------
  8323. ///
  8324. /// Lowerer::LowerDeleteElemI
  8325. ///
  8326. ///----------------------------------------------------------------------------
  8327. IR::Instr *
  8328. Lowerer::LowerDeleteElemI(IR::Instr * instr, bool strictMode)
  8329. {
  8330. IR::Instr *instrPrev;
  8331. IR::Opnd *src1 = instr->UnlinkSrc1();
  8332. AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd on DeleteElementI");
  8333. Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
  8334. if (strictMode)
  8335. {
  8336. propertyOperationFlag = Js::PropertyOperation_StrictMode;
  8337. }
  8338. instrPrev = instr->m_prev;
  8339. IR::JnHelperMethod helperMethod = IR::HelperOp_DeleteElementI;
  8340. IR::Opnd *indexOpnd = src1->AsIndirOpnd()->UnlinkIndexOpnd();
  8341. if (indexOpnd)
  8342. {
  8343. if (indexOpnd->GetType() == TyInt32)
  8344. {
  8345. helperMethod = IR::HelperOp_DeleteElementI_Int32;
  8346. }
  8347. else if (indexOpnd->GetType() == TyUint32)
  8348. {
  8349. helperMethod = IR::HelperOp_DeleteElementI_UInt32;
  8350. }
  8351. else
  8352. {
  8353. Assert(indexOpnd->GetType() == TyVar);
  8354. }
  8355. }
  8356. else
  8357. {
  8358. // No index; the offset identifies the element.
  8359. IntConstType offset = (IntConstType)src1->AsIndirOpnd()->GetOffset();
  8360. indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
  8361. }
  8362. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
  8363. LoadScriptContext(instr);
  8364. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  8365. IR::Opnd *baseOpnd = src1->AsIndirOpnd()->UnlinkBaseOpnd();
  8366. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  8367. src1->Free(this->m_func);
  8368. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  8369. return instrPrev;
  8370. }
  8371. IR::Opnd *
  8372. Lowerer::GetForInEnumeratorFieldOpnd(IR::Opnd * forInEnumeratorOpnd, uint fieldOffset, IRType type)
  8373. {
  8374. if (forInEnumeratorOpnd->IsSymOpnd())
  8375. {
  8376. IR::SymOpnd * symOpnd = forInEnumeratorOpnd->AsSymOpnd();
  8377. return IR::SymOpnd::New(symOpnd->GetStackSym(), symOpnd->m_offset + fieldOffset, type, this->m_func);
  8378. }
  8379. Assert(forInEnumeratorOpnd->IsIndirOpnd());
  8380. IR::IndirOpnd * indirOpnd = forInEnumeratorOpnd->AsIndirOpnd();
  8381. return IR::IndirOpnd::New(indirOpnd->GetBaseOpnd(), indirOpnd->GetOffset() + fieldOffset, type, this->m_func);
  8382. }
  8383. void
  8384. Lowerer::GenerateFastBrBReturn(IR::Instr * instr)
  8385. {
  8386. Assert(instr->m_opcode == Js::OpCode::BrOnEmpty || instr->m_opcode == Js::OpCode::BrOnNotEmpty);
  8387. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  8388. IR::Opnd * forInEnumeratorOpnd = instr->GetSrc1();
  8389. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  8390. IR::LabelInstr * loopBody = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  8391. // CMP forInEnumerator->canUseJitFastPath, 0
  8392. // JEQ $helper
  8393. IR::Opnd * canUseJitFastPathOpnd = GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfCanUseJitFastPath(), TyInt8);
  8394. InsertCompareBranch(canUseJitFastPathOpnd, IR::IntConstOpnd::New(0, TyInt8, this->m_func), Js::OpCode::BrEq_A, labelHelper, instr);
  8395. // MOV objectOpnd, forInEnumerator->enumerator.object
  8396. // MOV cachedDataTypeOpnd, forInEnumerator->enumerator.cachedDataType
  8397. // CMP cachedDataTypeOpnd, objectOpnd->type
  8398. // JNE $helper
  8399. IR::RegOpnd * objectOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  8400. InsertMove(objectOpnd,
  8401. GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorObject(), TyMachPtr), instr);
  8402. IR::RegOpnd * cachedDataTypeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  8403. InsertMove(cachedDataTypeOpnd,
  8404. GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorInitialType(), TyMachPtr), instr);
  8405. InsertCompareBranch(cachedDataTypeOpnd, IR::IndirOpnd::New(objectOpnd, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, this->m_func),
  8406. Js::OpCode::BrNeq_A, labelHelper, instr);
  8407. // MOV cachedDataOpnd, forInEnumeratorOpnd->enumerator.cachedData
  8408. // MOV enumeratedCountOpnd, forInEnumeratorOpnd->enumerator.enumeratedCount
  8409. // CMP enumeratedCountOpnd, cachedDataOpnd->cachedCount
  8410. // JLT $loopBody
  8411. IR::RegOpnd * cachedDataOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  8412. InsertMove(cachedDataOpnd,
  8413. GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorCachedData(), TyMachPtr), instr);
  8414. IR::RegOpnd * enumeratedCountOpnd = IR::RegOpnd::New(TyUint32, m_func);
  8415. InsertMove(enumeratedCountOpnd,
  8416. GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorEnumeratedCount(), TyUint32), instr);
  8417. InsertCompareBranch(enumeratedCountOpnd,
  8418. IR::IndirOpnd::New(cachedDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataCachedCount(), TyUint32, this->m_func),
  8419. Js::OpCode::BrLt_A, loopBody, instr);
  8420. // CMP cacheData.completed, 0
  8421. // JNE $loopEnd
  8422. // JMP $helper
  8423. IR::LabelInstr * labelAfter = instr->GetOrCreateContinueLabel();
  8424. InsertCompareBranch(
  8425. IR::IndirOpnd::New(cachedDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataCompleted(), TyInt8, this->m_func),
  8426. IR::IntConstOpnd::New(0, TyInt8, this->m_func),
  8427. Js::OpCode::BrNeq_A, instr->m_opcode == Js::OpCode::BrOnNotEmpty ? labelAfter : instr->AsBranchInstr()->GetTarget(), instr);
  8428. InsertBranch(Js::OpCode::Br, labelHelper, instr);
  8429. // $loopBody:
  8430. instr->InsertBefore(loopBody);
  8431. IR::Opnd * opndDst = instr->GetDst(); // ForIn result propertyString
  8432. Assert(opndDst->IsRegOpnd());
  8433. // MOV stringsOpnd, cachedData->strings
  8434. // MOV opndDst, stringsOpnd[enumeratedCount]
  8435. IR::RegOpnd * stringsOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  8436. InsertMove(stringsOpnd,
  8437. IR::IndirOpnd::New(cachedDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataStrings(), TyMachPtr, this->m_func), instr);
  8438. InsertMove(opndDst,
  8439. IR::IndirOpnd::New(stringsOpnd, enumeratedCountOpnd, m_lowererMD.GetDefaultIndirScale(), TyVar, this->m_func), instr);
  8440. // MOV indexesOpnd, cachedData->indexes
  8441. // MOV objectIndexOpnd, indexesOpnd[enumeratedCount]
  8442. // MOV forInEnumeratorOpnd->enumerator.objectIndex, objectIndexOpnd
  8443. IR::RegOpnd * indexesOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  8444. InsertMove(indexesOpnd,
  8445. IR::IndirOpnd::New(cachedDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataIndexes(), TyMachPtr, this->m_func), instr);
  8446. IR::RegOpnd * objectIndexOpnd = IR::RegOpnd::New(TyUint32, m_func);
  8447. InsertMove(objectIndexOpnd,
  8448. IR::IndirOpnd::New(indexesOpnd, enumeratedCountOpnd, IndirScale4, TyUint32, this->m_func), instr);
  8449. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorObjectIndex(), TyUint32),
  8450. objectIndexOpnd, instr);
  8451. // INC enumeratedCountOpnd
  8452. // MOV forInEnumeratorOpnd->enumerator.enumeratedCount, enumeratedCountOpnd
  8453. InsertAdd(false, enumeratedCountOpnd, enumeratedCountOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), instr);
  8454. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorEnumeratedCount(), TyUint32),
  8455. enumeratedCountOpnd, instr);
  8456. // We know result propertyString (opndDst) != NULL
  8457. InsertBranch(Js::OpCode::Br, instr->m_opcode == Js::OpCode::BrOnNotEmpty ? instr->AsBranchInstr()->GetTarget() : labelAfter, instr);
  8458. // $helper
  8459. instr->InsertBefore(labelHelper);
  8460. // $after
  8461. }
  8462. ///----------------------------------------------------------------------------
  8463. ///
  8464. /// Lowerer::LowerBrB - lower 1-operand (boolean) conditional branch
  8465. ///
  8466. ///----------------------------------------------------------------------------
  8467. IR::Instr *
  8468. Lowerer::LowerBrBReturn(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool isHelper)
  8469. {
  8470. IR::Instr * instrPrev;
  8471. IR::Instr * instrCall;
  8472. IR::HelperCallOpnd * opndHelper;
  8473. IR::Opnd * opndDst;
  8474. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  8475. Assert(instr->m_opcode == Js::OpCode::BrOnEmpty || instr->m_opcode == Js::OpCode::BrOnNotEmpty);
  8476. IR::RegOpnd * forInEnumeratorRegOpnd = GenerateForInEnumeratorLoad(instr->UnlinkSrc1(), instr);
  8477. instrPrev = m_lowererMD.LoadHelperArgument(instr, forInEnumeratorRegOpnd);
  8478. // Generate helper call to convert the unknown operand to boolean
  8479. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8480. opndDst = instr->UnlinkDst();
  8481. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8482. instr->InsertBefore(instrCall);
  8483. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8484. // Branch on the result of the call
  8485. instr->m_opcode = (instr->m_opcode == Js::OpCode::BrOnNotEmpty? Js::OpCode::BrTrue_A : Js::OpCode::BrFalse_A);
  8486. instr->SetSrc1(opndDst);
  8487. IR::Instr *loweredInstr;
  8488. loweredInstr = this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, isHelper);
  8489. #if DBG
  8490. if (isHelper)
  8491. {
  8492. if (!loweredInstr->IsBranchInstr())
  8493. {
  8494. loweredInstr = loweredInstr->GetNextBranchOrLabel();
  8495. }
  8496. if (loweredInstr->IsBranchInstr())
  8497. {
  8498. loweredInstr->AsBranchInstr()->m_isHelperToNonHelperBranch = true;
  8499. }
  8500. }
  8501. #endif
  8502. return instrPrev;
  8503. }
  8504. ///----------------------------------------------------------------------------
  8505. ///
  8506. /// Lowerer::LowerMultiBr
  8507. /// - Lowers the instruction for dictionary look up(string case arms)
  8508. ///
  8509. ///----------------------------------------------------------------------------
  8510. IR::Instr* Lowerer::LowerMultiBr(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  8511. {
  8512. IR::Instr * instrPrev = instr->m_prev;
  8513. IR::Instr * instrCall;
  8514. IR::HelperCallOpnd * opndHelper;
  8515. IR::Opnd * opndSrc;
  8516. IR::Opnd * opndDst;
  8517. StackSym * symDst;
  8518. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnd on BrB");
  8519. // Push the args in reverse order.
  8520. // The end and start labels for the function are used to guarantee
  8521. // that the dictionary jump destinations haven't been tampered with, so we
  8522. // will always jump to some location within this function
  8523. IR::LabelOpnd * endFuncOpnd = IR::LabelOpnd::New(m_func->EnsureFuncEndLabel(), m_func);
  8524. m_lowererMD.LoadHelperArgument(instr, endFuncOpnd);
  8525. IR::LabelOpnd * startFuncOpnd = IR::LabelOpnd::New(m_func->EnsureFuncStartLabel(), m_func);
  8526. m_lowererMD.LoadHelperArgument(instr, startFuncOpnd);
  8527. //Load the address of the dictionary pair- Js::StringDictionaryWrapper
  8528. auto dictionary = instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary();
  8529. if (this->m_func->IsOOPJIT())
  8530. {
  8531. auto dictionaryOffset = NativeCodeData::GetDataTotalOffset(dictionary);
  8532. auto addressRegOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  8533. Lowerer::InsertLea(addressRegOpnd,
  8534. IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), dictionaryOffset, TyMachPtr,
  8535. #if DBG
  8536. NativeCodeData::GetDataDescription(dictionary, this->m_func->m_alloc),
  8537. #endif
  8538. this->m_func, true), instr);
  8539. this->addToLiveOnBackEdgeSyms->Set(m_func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  8540. m_lowererMD.LoadHelperArgument(instr, addressRegOpnd);
  8541. }
  8542. else
  8543. {
  8544. IR::AddrOpnd* nativestringDictionaryOpnd = IR::AddrOpnd::New(dictionary, IR::AddrOpndKindDynamicMisc, this->m_func);
  8545. m_lowererMD.LoadHelperArgument(instr, nativestringDictionaryOpnd);
  8546. }
  8547. //Load the String passed in the Switch expression for look up - JavascriptString
  8548. opndSrc = instr->UnlinkSrc1();
  8549. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  8550. // Generate helper call for dictionary lookup.
  8551. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8552. symDst = StackSym::New(TyMachPtr,this->m_func);
  8553. opndDst = IR::RegOpnd::New(symDst, TyMachPtr, this->m_func);
  8554. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8555. instr->InsertBefore(instrCall);
  8556. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8557. instr->SetSrc1(instrCall->GetDst());
  8558. instr->m_opcode = LowererMD::MDMultiBranchOpcode;
  8559. return instrPrev;
  8560. }
  8561. void
  8562. Lowerer::LowerJumpTableMultiBranch(IR::MultiBranchInstr * multiBrInstr, IR::RegOpnd * indexOpnd)
  8563. {
  8564. Func * func = this->m_func;
  8565. IR::Opnd * opndDst = IR::RegOpnd::New(TyMachPtr, func);
  8566. //Move the native address of the jump table to a register
  8567. IR::LabelInstr * nativeJumpTableLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  8568. nativeJumpTableLabel->m_isDataLabel = true;
  8569. IR::LabelOpnd * nativeJumpTable = IR::LabelOpnd::New(nativeJumpTableLabel, m_func);
  8570. IR::RegOpnd * nativeJumpTableReg = IR::RegOpnd::New(TyMachPtr, func);
  8571. InsertMove(nativeJumpTableReg, nativeJumpTable, multiBrInstr);
  8572. BranchJumpTableWrapper * branchJumpTable = multiBrInstr->GetBranchJumpTable();
  8573. AssertMsg(branchJumpTable->labelInstr == nullptr, "Should not be already assigned");
  8574. branchJumpTable->labelInstr = nativeJumpTableLabel;
  8575. //Indirect addressing @ target location in the jump table.
  8576. //MOV eax, [nativeJumpTableReg + (offset * indirScale)]
  8577. BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  8578. IR::Opnd * opndSrc = IR::IndirOpnd::New(nativeJumpTableReg, indexOpnd, indirScale, TyMachReg, this->m_func);
  8579. IR::Instr * indirInstr = InsertMove(opndDst, opndSrc, multiBrInstr);
  8580. //MultiBr eax
  8581. multiBrInstr->SetSrc1(indirInstr->GetDst());
  8582. //Jump to the address at the target location in the jump table
  8583. multiBrInstr->m_opcode = LowererMD::MDMultiBranchOpcode;
  8584. }
  8585. ///----------------------------------------------------------------------------
  8586. ///
  8587. /// Lowerer::LowerMultiBr
  8588. /// - Lowers the instruction for jump table(consecutive integer case arms)
  8589. ///
  8590. ///----------------------------------------------------------------------------
  8591. IR::Instr* Lowerer::LowerMultiBr(IR::Instr * instr)
  8592. {
  8593. IR::Instr * instrPrev = instr->m_prev;
  8594. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnd on BrB");
  8595. AssertMsg(instr->IsBranchInstr() && instr->AsBranchInstr()->IsMultiBranch(), "Bad Instruction Lowering Call to LowerMultiBr()");
  8596. IR::MultiBranchInstr * multiBrInstr = instr->AsBranchInstr()->AsMultiBrInstr();
  8597. IR::RegOpnd * offset = instr->UnlinkSrc1()->AsRegOpnd();
  8598. LowerJumpTableMultiBranch(multiBrInstr, offset);
  8599. return instrPrev;
  8600. }
  8601. IR::Instr* Lowerer::LowerBrBMem(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  8602. {
  8603. IR::Instr * instrPrev;
  8604. IR::Instr * instrCall;
  8605. IR::HelperCallOpnd * opndHelper;
  8606. IR::Opnd * opndSrc;
  8607. IR::Opnd * opndDst;
  8608. StackSym * symDst;
  8609. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  8610. instrPrev = LoadScriptContext(instr);
  8611. opndSrc = instr->UnlinkSrc1();
  8612. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  8613. // Generate helper call to convert the unknown operand to boolean
  8614. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8615. symDst = StackSym::New(TyVar, this->m_func);
  8616. opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
  8617. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8618. instr->InsertBefore(instrCall);
  8619. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8620. // Branch on the result of the call
  8621. instr->SetSrc1(opndDst);
  8622. m_lowererMD.LowerCondBranch(instr);
  8623. return instrPrev;
  8624. }
  8625. IR::Instr* Lowerer::LowerBrOnObject(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  8626. {
  8627. IR::Instr * instrPrev;
  8628. IR::Instr * instrCall;
  8629. IR::HelperCallOpnd * opndHelper;
  8630. IR::Opnd * opndSrc;
  8631. IR::Opnd * opndDst;
  8632. StackSym * symDst;
  8633. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  8634. opndSrc = instr->UnlinkSrc1();
  8635. instrPrev = m_lowererMD.LoadHelperArgument(instr, opndSrc);
  8636. // Generate helper call to check if the operand's type is object
  8637. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8638. symDst = StackSym::New(TyVar, this->m_func);
  8639. opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
  8640. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8641. instr->InsertBefore(instrCall);
  8642. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8643. // Branch on the result of the call
  8644. instr->SetSrc1(opndDst);
  8645. m_lowererMD.LowerCondBranch(instr);
  8646. return instrPrev;
  8647. }
  8648. IR::Instr * Lowerer::LowerBrOnClassConstructor(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  8649. {
  8650. IR::Instr * instrPrev;
  8651. IR::Instr * instrCall;
  8652. IR::HelperCallOpnd * opndHelper;
  8653. IR::Opnd * opndSrc;
  8654. IR::Opnd * opndDst;
  8655. StackSym * symDst;
  8656. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  8657. opndSrc = instr->UnlinkSrc1();
  8658. instrPrev = m_lowererMD.LoadHelperArgument(instr, opndSrc);
  8659. // Generate helper call to check if the operand's type is object
  8660. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8661. symDst = StackSym::New(TyVar, this->m_func);
  8662. opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
  8663. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8664. instr->InsertBefore(instrCall);
  8665. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8666. // Branch on the result of the call
  8667. instr->SetSrc1(opndDst);
  8668. m_lowererMD.LowerCondBranch(instr);
  8669. return instrPrev;
  8670. }
  8671. IR::Instr *
  8672. Lowerer::LowerEqualityCompare(IR::Instr* instr, IR::JnHelperMethod helper)
  8673. {
  8674. IR::Instr * instrPrev = instr->m_prev;
  8675. bool needHelper = true;
  8676. bool fNoLower = false;
  8677. bool isStrictCompare = instr->m_opcode == Js::OpCode::CmSrEq_A || instr->m_opcode == Js::OpCode::CmSrNeq_A;
  8678. if (instr->GetSrc1()->IsFloat())
  8679. {
  8680. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  8681. this->m_lowererMD.GenerateFastCmXxR8(instr);
  8682. }
  8683. else if (PHASE_OFF(Js::BranchFastPathPhase, m_func) || !m_func->DoFastPaths())
  8684. {
  8685. LowerBinaryHelperMem(instr, helper);
  8686. }
  8687. else if (TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, instr->IsNeq(), &fNoLower))
  8688. {
  8689. if (!fNoLower)
  8690. {
  8691. LowerBinaryHelperMem(instr, helper);
  8692. }
  8693. }
  8694. else if (isStrictCompare && TryGenerateFastCmSrXx(instr))
  8695. {
  8696. }
  8697. else
  8698. {
  8699. if (GenerateFastBrOrCmString(instr))
  8700. {
  8701. LowerBinaryHelperMem(instr, helper);
  8702. }
  8703. else if (isStrictCompare && GenerateFastBrOrCmEqDefinite(instr, helper, &needHelper, false, false))
  8704. {
  8705. if (needHelper)
  8706. {
  8707. LowerBinaryHelperMem(instr, helper);
  8708. }
  8709. }
  8710. else if(GenerateFastCmEqLikely(instr, &needHelper, false) || GenerateFastEqBoolInt(instr, &needHelper, false))
  8711. {
  8712. if (needHelper)
  8713. {
  8714. if (isStrictCompare)
  8715. {
  8716. LowerStrictBrOrCm(instr, helper, false, false /* isBranch */, true);
  8717. }
  8718. else
  8719. {
  8720. LowerBinaryHelperMem(instr, helper);
  8721. }
  8722. }
  8723. }
  8724. else if (!m_lowererMD.GenerateFastCmXxTaggedInt(instr, false))
  8725. {
  8726. if (isStrictCompare)
  8727. {
  8728. LowerStrictBrOrCm(instr, helper, false, false /* isBranch */, false);
  8729. }
  8730. else
  8731. {
  8732. LowerBinaryHelperMem(instr, helper);
  8733. }
  8734. }
  8735. }
  8736. if (!needHelper)
  8737. {
  8738. instr->Remove();
  8739. }
  8740. return instrPrev;
  8741. }
  8742. IR::Instr *
  8743. Lowerer::LowerEqualityBranch(IR::Instr* instr, IR::JnHelperMethod helper)
  8744. {
  8745. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  8746. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  8747. IR::Instr * instrPrev = instr->m_prev;
  8748. bool fNoLower = false;
  8749. const bool noFastPath = PHASE_OFF(Js::BranchFastPathPhase, m_func) || !m_func->DoFastPaths();
  8750. if (instr->GetSrc1()->IsFloat())
  8751. {
  8752. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  8753. m_lowererMD.LowerToFloat(instr);
  8754. return instrPrev;
  8755. }
  8756. if (instr->GetSrc2()->IsFloat())
  8757. {
  8758. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  8759. instr->SwapOpnds();
  8760. m_lowererMD.LowerToFloat(instr);
  8761. return instrPrev;
  8762. }
  8763. if (noFastPath)
  8764. {
  8765. LowerBrCMem(instr, helper, true, false /*isHelper*/);
  8766. return instrPrev;
  8767. }
  8768. if (TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, instr->IsNeq(), &fNoLower))
  8769. {
  8770. if (!fNoLower)
  8771. {
  8772. LowerBrCMem(instr, helper, false, false /*isHelper*/);
  8773. }
  8774. return instrPrev;
  8775. }
  8776. bool done = false;
  8777. bool isStrictCompare = false;
  8778. switch(instr->m_opcode)
  8779. {
  8780. case Js::OpCode::BrNeq_A:
  8781. case Js::OpCode::BrNotEq_A:
  8782. done = TryGenerateFastBrNeq(instr);
  8783. break;
  8784. case Js::OpCode::BrEq_A:
  8785. case Js::OpCode::BrNotNeq_A:
  8786. done = TryGenerateFastBrEq(instr);
  8787. break;
  8788. case Js::OpCode::BrSrEq_A:
  8789. case Js::OpCode::BrSrNotNeq_A:
  8790. case Js::OpCode::BrSrNeq_A:
  8791. case Js::OpCode::BrSrNotEq_A:
  8792. isStrictCompare = true;
  8793. done = TryGenerateFastBrSrXx(instr, srcReg1, srcReg2, &instrPrev, noFastPath);
  8794. break;
  8795. default:
  8796. Assume(UNREACHED);
  8797. }
  8798. if (done)
  8799. {
  8800. return instrPrev;
  8801. }
  8802. bool needHelper = true;
  8803. bool hasStrFastPath = false;
  8804. if (GenerateFastBrOrCmString(instr))
  8805. {
  8806. hasStrFastPath = true;
  8807. LowerBrCMem(instr, helper, false, true);
  8808. }
  8809. else if (isStrictCompare && GenerateFastBrOrCmEqDefinite(instr, helper, &needHelper, true, hasStrFastPath))
  8810. {
  8811. if (needHelper)
  8812. {
  8813. LowerBrCMem(instr, helper, true /*noMathFastPath*/, hasStrFastPath);
  8814. }
  8815. }
  8816. else if (GenerateFastBrEqLikely(instr->AsBranchInstr(), &needHelper, hasStrFastPath) || GenerateFastEqBoolInt(instr, &needHelper, hasStrFastPath))
  8817. {
  8818. if (needHelper)
  8819. {
  8820. if (isStrictCompare)
  8821. {
  8822. LowerStrictBrOrCm(instr, helper, false, true /* isBranch */, true);
  8823. }
  8824. else
  8825. {
  8826. LowerBrCMem(instr, helper, false, hasStrFastPath);
  8827. }
  8828. }
  8829. }
  8830. else if (needHelper)
  8831. {
  8832. if (isStrictCompare)
  8833. {
  8834. LowerStrictBrOrCm(instr, helper, false, true /* isBranch */, false);
  8835. }
  8836. else
  8837. {
  8838. LowerBrCMem(instr, helper, false, hasStrFastPath);
  8839. }
  8840. }
  8841. if (!needHelper)
  8842. {
  8843. if (instr->AsBranchInstr()->GetTarget()->m_isLoopTop)
  8844. {
  8845. LowerBrCMem(instr, helper, false, hasStrFastPath);
  8846. }
  8847. else
  8848. {
  8849. instr->Remove();
  8850. }
  8851. }
  8852. return instrPrev;
  8853. }
  8854. // Generate fast path for StrictEquals for objects that are not GlobalObject, HostDispatch or External to be pointer comparison
  8855. IR::Instr *
  8856. Lowerer::LowerStrictBrOrCm(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool noMathFastPath, bool isBranch, bool isHelper)
  8857. {
  8858. IR::Instr * instrPrev = instr->m_prev;
  8859. IR::LabelInstr * labelHelper = nullptr;
  8860. IR::LabelInstr * labelFallThrough = nullptr;
  8861. IR::LabelInstr * labelBranchSuccess = nullptr;
  8862. IR::LabelInstr * labelBranchFailure = nullptr;
  8863. LibraryValue successValueType = ValueInvalid;
  8864. LibraryValue failureValueType = ValueInvalid;
  8865. bool isEqual = !instr->IsNeq();
  8866. IR::Opnd * src1 = instr->GetSrc1();
  8867. IR::Opnd * src2 = instr->GetSrc2();
  8868. AssertMsg(src1 != nullptr && src2 != nullptr, "Expected 2 src opnds on BrC");
  8869. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  8870. if (!noMathFastPath)
  8871. {
  8872. labelFallThrough = instr->GetOrCreateContinueLabel(isHelper);
  8873. if (!isBranch)
  8874. {
  8875. labelBranchSuccess = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  8876. labelBranchFailure = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  8877. successValueType = isEqual ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
  8878. failureValueType = isEqual ? LibraryValue::ValueFalse : LibraryValue::ValueTrue;
  8879. }
  8880. else
  8881. {
  8882. labelBranchSuccess = isEqual ? instr->AsBranchInstr()->GetTarget() : labelFallThrough;
  8883. labelBranchFailure = isEqual ? labelFallThrough : instr->AsBranchInstr()->GetTarget();
  8884. }
  8885. if (src1->IsEqual(src2))
  8886. {
  8887. if (instr->GetSrc1()->GetValueType().IsNotFloat())
  8888. {
  8889. if (!isBranch)
  8890. {
  8891. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, successValueType), instr);
  8892. InsertBranch(Js::OpCode::Br, labelFallThrough, instr);
  8893. }
  8894. else
  8895. {
  8896. IR::BranchInstr * branch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelBranchSuccess, this->m_func);
  8897. instr->InsertBefore(branch);
  8898. }
  8899. instr->Remove();
  8900. return instrPrev;
  8901. }
  8902. #if !FLOATVAR
  8903. m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instr, labelHelper);
  8904. IR::RegOpnd *src1TypeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  8905. Lowerer::InsertMove(src1TypeReg, IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func), instr);
  8906. // MOV src1TypeIdReg, [src1TypeReg + offset(typeId)]
  8907. IR::RegOpnd *src1TypeIdReg = IR::RegOpnd::New(TyInt32, this->m_func);
  8908. Lowerer::InsertMove(src1TypeIdReg, IR::IndirOpnd::New(src1TypeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func), instr);
  8909. // CMP src1TypeIdReg, TypeIds_Number
  8910. // JEQ $helper
  8911. IR::IntConstOpnd *numberTypeId = IR::IntConstOpnd::New(Js::TypeIds_Number, TyInt32, this->m_func, true);
  8912. InsertCompareBranch(src1TypeIdReg, numberTypeId, Js::OpCode::BrEq_A, labelHelper, instr);
  8913. #else
  8914. m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instr, labelHelper);
  8915. #endif
  8916. IR::BranchInstr * branch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelBranchSuccess, this->m_func);
  8917. instr->InsertBefore(branch);
  8918. }
  8919. else
  8920. {
  8921. m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instr, labelHelper);
  8922. #if !FLOATVAR
  8923. IR::RegOpnd *src1TypeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  8924. Lowerer::InsertMove(src1TypeReg, IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func), instr);
  8925. // MOV src1TypeIdReg, [src1TypeReg + offset(typeId)]
  8926. IR::RegOpnd *src1TypeIdReg = IR::RegOpnd::New(TyInt32, this->m_func);
  8927. Lowerer::InsertMove(src1TypeIdReg, IR::IndirOpnd::New(src1TypeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func), instr);
  8928. // CMP src1TypeIdReg, TypeIds_Number
  8929. // JEQ $helper
  8930. IR::IntConstOpnd *numberTypeId = IR::IntConstOpnd::New(Js::TypeIds_Number, TyInt32, this->m_func, true);
  8931. InsertCompareBranch(src1TypeIdReg, numberTypeId, Js::OpCode::BrEq_A, labelHelper, instr);
  8932. #endif
  8933. // CMP src1, src2 - Ptr comparison
  8934. // JEQ $branchSuccess
  8935. InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, labelBranchSuccess, instr);
  8936. #if FLOATVAR
  8937. IR::RegOpnd *src1TypeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  8938. Lowerer::InsertMove(src1TypeReg, IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func), instr);
  8939. // MOV src1TypeIdReg, [src1TypeReg + offset(typeId)]
  8940. IR::RegOpnd *src1TypeIdReg = IR::RegOpnd::New(TyInt32, this->m_func);
  8941. Lowerer::InsertMove(src1TypeIdReg, IR::IndirOpnd::New(src1TypeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func), instr);
  8942. #endif
  8943. // CMP src1TypeIdReg, TypeIds_HostDispatch
  8944. // JLE $helper (le condition covers string, int64, uint64, hostdispatch, as well as undefined, null, boolean)
  8945. IR::IntConstOpnd *hostDispatchTypeId = IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, this->m_func, true);
  8946. InsertCompareBranch(src1TypeIdReg, hostDispatchTypeId, Js::OpCode::BrLe_A, labelHelper, instr);
  8947. // CMP src1TypeIdReg, TypeIds_GlobalObject
  8948. // JE $helper
  8949. IR::IntConstOpnd *globalObjectTypeId = IR::IntConstOpnd::New(Js::TypeIds_GlobalObject, TyInt32, this->m_func, true);
  8950. InsertCompareBranch(src1TypeIdReg, globalObjectTypeId, Js::OpCode::BrEq_A, labelHelper, instr);
  8951. // TEST src1TypeReg->flags, TypeFlagMask_EngineExternal
  8952. // JE $helper
  8953. IR::Opnd *flags = IR::IndirOpnd::New(src1TypeReg, Js::Type::GetOffsetOfFlags(), TyInt8, this->m_func);
  8954. InsertTestBranch(flags, IR::IntConstOpnd::New(TypeFlagMask_EngineExternal, TyInt8, this->m_func), Js::OpCode::BrNeq_A, labelHelper, instr);
  8955. if (src2->IsRegOpnd())
  8956. {
  8957. m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instr, labelHelper);
  8958. // MOV src2TypeReg, [src2 + offset(type)]
  8959. // TEST [src2TypeReg + offset(flags)], TypeFlagMask_EngineExternal
  8960. // JE $helper
  8961. IR::RegOpnd *src2TypeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  8962. IR::IndirOpnd *src2Type = IR::IndirOpnd::New(src2->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  8963. Lowerer::InsertMove(src2TypeReg, src2Type, instr);
  8964. IR::Opnd *src2Flags = IR::IndirOpnd::New(src2TypeReg, Js::Type::GetOffsetOfFlags(), TyInt8, this->m_func);
  8965. InsertTestBranch(src2Flags, IR::IntConstOpnd::New(TypeFlagMask_EngineExternal, TyInt8, this->m_func), Js::OpCode::BrNeq_A, labelHelper, instr);
  8966. }
  8967. // JMP $done
  8968. IR::BranchInstr * branch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelBranchFailure, this->m_func);
  8969. instr->InsertBefore(branch);
  8970. }
  8971. if (!isBranch)
  8972. {
  8973. instr->InsertBefore(labelBranchSuccess);
  8974. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, successValueType), instr);
  8975. InsertBranch(Js::OpCode::Br, labelFallThrough, instr);
  8976. instr->InsertBefore(labelBranchFailure);
  8977. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, failureValueType), instr);
  8978. InsertBranch(Js::OpCode::Br, labelFallThrough, instr);
  8979. }
  8980. }
  8981. instr->InsertBefore(labelHelper);
  8982. if (isBranch)
  8983. {
  8984. LowerBrCMem(instr, helperMethod, true, true);
  8985. }
  8986. else
  8987. {
  8988. LowerBinaryHelperMem(instr, helperMethod);
  8989. }
  8990. return instrPrev;
  8991. }
  8992. IR::Instr *
  8993. Lowerer::LowerBrCMem(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool noMathFastPath, bool isHelper)
  8994. {
  8995. IR::Instr * instrPrev = instr->m_prev;
  8996. IR::Instr * instrCall;
  8997. IR::HelperCallOpnd * opndHelper;
  8998. IR::Opnd * opndSrc;
  8999. IR::Opnd * opndDst;
  9000. StackSym * symDst;
  9001. bool inverted = false;
  9002. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() != nullptr, "Expected 2 src opnds on BrC");
  9003. if (!noMathFastPath && !this->GenerateFastCondBranch(instr->AsBranchInstr(), &isHelper))
  9004. {
  9005. return instrPrev;
  9006. }
  9007. // Push the args in reverse order.
  9008. const bool loadScriptContext = !(helperMethod == IR::HelperOp_StrictEqualString || helperMethod == IR::HelperOp_StrictEqualEmptyString);
  9009. const bool loadArg2 = !(helperMethod == IR::HelperOp_StrictEqualEmptyString);
  9010. if (helperMethod == IR::HelperOp_NotEqual)
  9011. {
  9012. // Op_NotEqual() returns !Op_Equal(). It is faster to call Op_Equal() directly.
  9013. helperMethod = IR::HelperOp_Equal;
  9014. instr->AsBranchInstr()->Invert();
  9015. inverted = true;
  9016. }
  9017. else if(helperMethod == IR::HelperOp_NotStrictEqual)
  9018. {
  9019. // Op_NotStrictEqual() returns !Op_StrictEqual(). It is faster to call Op_StrictEqual() directly.
  9020. helperMethod = IR::HelperOp_StrictEqual;
  9021. instr->AsBranchInstr()->Invert();
  9022. inverted = true;
  9023. }
  9024. if (loadScriptContext)
  9025. LoadScriptContext(instr);
  9026. opndSrc = instr->UnlinkSrc2();
  9027. if (loadArg2)
  9028. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  9029. opndSrc = instr->UnlinkSrc1();
  9030. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  9031. // Generate helper call to compare the source operands.
  9032. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  9033. symDst = StackSym::New(TyMachReg, this->m_func);
  9034. opndDst = IR::RegOpnd::New(symDst, TyMachReg, this->m_func);
  9035. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  9036. instr->InsertBefore(instrCall);
  9037. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  9038. switch (instr->m_opcode)
  9039. {
  9040. case Js::OpCode::BrNotEq_A:
  9041. case Js::OpCode::BrNotNeq_A:
  9042. case Js::OpCode::BrSrNotEq_A:
  9043. case Js::OpCode::BrSrNotNeq_A:
  9044. if (instr->HasBailOutInfo())
  9045. {
  9046. instr->GetBailOutInfo()->isInvertedBranch = true;
  9047. }
  9048. break;
  9049. case Js::OpCode::BrNotGe_A:
  9050. case Js::OpCode::BrNotGt_A:
  9051. case Js::OpCode::BrNotLe_A:
  9052. case Js::OpCode::BrNotLt_A:
  9053. inverted = true;
  9054. break;
  9055. }
  9056. // Branch if the result is "true".
  9057. instr->SetSrc1(opndDst);
  9058. instr->m_opcode = (inverted ? Js::OpCode::BrFalse_A : Js::OpCode::BrTrue_A);
  9059. this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, !noMathFastPath && isHelper);
  9060. return instrPrev;
  9061. }
  9062. IR::Instr *
  9063. Lowerer::LowerBrFncApply(IR::Instr * instr, IR::JnHelperMethod helperMethod) {
  9064. IR::Instr * instrPrev = instr->m_prev;
  9065. IR::Instr * instrCall;
  9066. IR::HelperCallOpnd * opndHelper;
  9067. IR::Opnd * opndSrc;
  9068. IR::Opnd * opndDst;
  9069. StackSym * symDst;
  9070. AssertMsg(instr->GetSrc1() != nullptr, "Expected 1 src opnd on BrFncApply");
  9071. LoadScriptContext(instr);
  9072. opndSrc = instr->UnlinkSrc1();
  9073. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  9074. // Generate helper call to compare the source operands.
  9075. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  9076. symDst = StackSym::New(TyMachReg, this->m_func);
  9077. opndDst = IR::RegOpnd::New(symDst, TyMachReg, this->m_func);
  9078. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  9079. instr->InsertBefore(instrCall);
  9080. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  9081. // Branch if the result is "true".
  9082. instr->SetSrc1(opndDst);
  9083. instr->m_opcode = Js::OpCode::BrTrue_A;
  9084. m_lowererMD.LowerCondBranch(instr);
  9085. return instrPrev;
  9086. }
  9087. ///----------------------------------------------------------------------------
  9088. ///
  9089. /// Lowerer::LowerBrProperty - lower branch-on-has/no-property
  9090. ///
  9091. ///----------------------------------------------------------------------------
  9092. IR::Instr *
  9093. Lowerer::LowerBrProperty(IR::Instr * instr, IR::JnHelperMethod helper)
  9094. {
  9095. IR::Instr * instrPrev;
  9096. IR::Instr * instrCall;
  9097. IR::HelperCallOpnd * opndHelper;
  9098. IR::Opnd * opndSrc;
  9099. IR::Opnd * opndDst;
  9100. opndSrc = instr->UnlinkSrc1();
  9101. AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(),
  9102. "Expected propertySym as src of BrProperty");
  9103. instrPrev = LoadScriptContext(instr);
  9104. this->LoadPropertySymAsArgument(instr, opndSrc);
  9105. opndHelper = IR::HelperCallOpnd::New(helper, this->m_func);
  9106. opndDst = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), TyMachReg, this->m_func);
  9107. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  9108. instr->InsertBefore(instrCall);
  9109. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  9110. // Branch on the result of the call
  9111. instr->SetSrc1(opndDst);
  9112. switch (instr->m_opcode)
  9113. {
  9114. case Js::OpCode::BrOnHasProperty:
  9115. instr->m_opcode = Js::OpCode::BrTrue_A;
  9116. break;
  9117. case Js::OpCode::BrOnNoProperty:
  9118. instr->m_opcode = Js::OpCode::BrFalse_A;
  9119. break;
  9120. default:
  9121. AssertMsg(0, "Unknown opcode on BrProperty branch");
  9122. break;
  9123. }
  9124. this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, false);
  9125. return instrPrev;
  9126. }
  9127. ///----------------------------------------------------------------------------
  9128. ///
  9129. /// Lowerer::LowerElementUndefined
  9130. ///
  9131. ///----------------------------------------------------------------------------
  9132. IR::Instr *
  9133. Lowerer::LowerElementUndefined(IR::Instr * instr, IR::JnHelperMethod helper)
  9134. {
  9135. IR::Opnd *dst = instr->UnlinkDst();
  9136. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as dst of Ld Undefined");
  9137. // Pass the property sym to store to
  9138. this->LoadPropertySymAsArgument(instr, dst);
  9139. m_lowererMD.ChangeToHelperCall(instr, helper);
  9140. return instr;
  9141. }
  9142. IR::Instr *
  9143. Lowerer::LowerElementUndefinedMem(IR::Instr * instr, IR::JnHelperMethod helper)
  9144. {
  9145. // Pass script context
  9146. IR::Instr * instrPrev = LoadScriptContext(instr);
  9147. this->LowerElementUndefined(instr, helper);
  9148. return instrPrev;
  9149. }
  9150. IR::Instr *
  9151. Lowerer::LowerLdElemUndef(IR::Instr * instr)
  9152. {
  9153. if (this->m_func->GetJITFunctionBody()->IsEval())
  9154. {
  9155. return LowerElementUndefinedMem(instr, IR::HelperOp_LdElemUndefDynamic);
  9156. }
  9157. else
  9158. {
  9159. return LowerElementUndefined(instr, IR::HelperOp_LdElemUndef);
  9160. }
  9161. }
  9162. ///----------------------------------------------------------------------------
  9163. ///
  9164. /// Lowerer::LowerElementUndefinedScoped
  9165. ///
  9166. ///----------------------------------------------------------------------------
  9167. IR::Instr *
  9168. Lowerer::LowerElementUndefinedScoped(IR::Instr * instr, IR::JnHelperMethod helper)
  9169. {
  9170. IR::Instr * instrPrev = instr->m_prev;
  9171. // Pass the default instance
  9172. IR::Opnd *src = instr->UnlinkSrc1();
  9173. m_lowererMD.LoadHelperArgument(instr, src);
  9174. // Pass the property sym to store to
  9175. IR::Opnd * dst = instr->UnlinkDst();
  9176. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as dst of Ld Undefined Scoped");
  9177. this->LoadPropertySymAsArgument(instr, dst);
  9178. m_lowererMD.ChangeToHelperCall(instr, helper);
  9179. return instrPrev;
  9180. }
  9181. IR::Instr *
  9182. Lowerer::LowerElementUndefinedScopedMem(IR::Instr * instr, IR::JnHelperMethod helper)
  9183. {
  9184. // Pass script context
  9185. IR::Instr * instrPrev = LoadScriptContext(instr);
  9186. this->LowerElementUndefinedScoped(instr, helper);
  9187. return instrPrev;
  9188. }
  9189. void
  9190. Lowerer::LowerStLoopBodyCount(IR::Instr* instr)
  9191. {
  9192. intptr_t header = m_func->m_workItem->GetLoopHeaderAddr();
  9193. IR::MemRefOpnd *loopBodyCounterOpnd = IR::MemRefOpnd::New((BYTE*)(header) + Js::LoopHeader::GetOffsetOfProfiledLoopCounter(), TyUint32, this->m_func);
  9194. instr->SetDst(loopBodyCounterOpnd);
  9195. instr->ReplaceSrc1(instr->GetSrc1()->AsRegOpnd()->UseWithNewType(TyUint32, this->m_func));
  9196. IR::AutoReuseOpnd autoReuse(loopBodyCounterOpnd, this->m_func);
  9197. m_lowererMD.ChangeToAssign(instr);
  9198. return;
  9199. }
  9200. #if !FLOATVAR
  9201. IR::Instr *
  9202. Lowerer::LowerStSlotBoxTemp(IR::Instr *stSlot)
  9203. {
  9204. // regVar = BoxStackNumber(src, scriptContext)
  9205. IR::RegOpnd * regSrc = stSlot->UnlinkSrc1()->AsRegOpnd();
  9206. IR::Instr * instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  9207. IR::RegOpnd *regVar = IR::RegOpnd::New(TyVar, this->m_func);
  9208. instr->SetDst(regVar);
  9209. instr->SetSrc1(regSrc);
  9210. stSlot->InsertBefore(instr);
  9211. this->LowerUnaryHelperMem(instr, IR::HelperBoxStackNumber);
  9212. stSlot->SetSrc1(regVar);
  9213. return this->LowerStSlot(stSlot);
  9214. }
  9215. #endif
  9216. IR::Opnd *
  9217. Lowerer::CreateOpndForSlotAccess(IR::Opnd * opnd)
  9218. {
  9219. IR::SymOpnd * symOpnd = opnd->AsSymOpnd();
  9220. PropertySym * dstSym = symOpnd->m_sym->AsPropertySym();
  9221. if (!m_func->IsLoopBody() &&
  9222. m_func->DoStackFrameDisplay() &&
  9223. (dstSym->m_stackSym == m_func->GetLocalClosureSym() || dstSym->m_stackSym == m_func->GetLocalFrameDisplaySym()))
  9224. {
  9225. // Stack closure syms are made to look like slot accesses for the benefit of GlobOpt, so that it can do proper
  9226. // copy prop and implicit call bailout. But what we really want is local stack load/store.
  9227. // Don't do this for loop body, though, since we don't have the value saved on the stack.
  9228. IR::SymOpnd * closureSym = IR::SymOpnd::New(dstSym->m_stackSym, 0, TyMachReg, this->m_func);
  9229. closureSym->GetStackSym()->m_isClosureSym = true;
  9230. return closureSym;
  9231. }
  9232. int32 offset = dstSym->m_propertyId;
  9233. if (!m_func->GetJITFunctionBody()->IsAsmJsMode())
  9234. {
  9235. offset = offset * TySize[opnd->GetType()];
  9236. }
  9237. #ifdef ASMJS_PLAT
  9238. if (m_func->IsTJLoopBody())
  9239. {
  9240. offset = offset - m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetTotalSizeInBytes();
  9241. }
  9242. #endif
  9243. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(symOpnd->CreatePropertyOwnerOpnd(m_func),
  9244. offset , opnd->GetType(), this->m_func);
  9245. return indirOpnd;
  9246. }
  9247. IR::Instr* Lowerer::AddSlotArrayCheck(PropertySym *propertySym, IR::Instr* instr)
  9248. {
  9249. if (propertySym->m_stackSym != m_func->GetLocalClosureSym() || PHASE_OFF(Js::ClosureRangeCheckPhase, m_func))
  9250. {
  9251. return instr->m_prev;
  9252. }
  9253. IR::Instr *instrDef = propertySym->m_stackSym->m_instrDef;
  9254. bool doDynamicCheck = this->m_func->IsLoopBody();
  9255. bool insertSlotArrayCheck = false;
  9256. uint32 slotId = (uint32)propertySym->m_propertyId;
  9257. if (instrDef)
  9258. {
  9259. switch (instrDef->m_opcode)
  9260. {
  9261. case Js::OpCode::NewScopeSlots:
  9262. case Js::OpCode::NewStackScopeSlots:
  9263. case Js::OpCode::NewScopeSlotsWithoutPropIds:
  9264. {
  9265. IR::Opnd *allocOpnd = allocOpnd = instrDef->GetSrc1();
  9266. uint32 allocCount = allocOpnd->AsIntConstOpnd()->AsUint32();
  9267. if (slotId >= allocCount)
  9268. {
  9269. Js::Throw::FatalInternalError();
  9270. }
  9271. break;
  9272. }
  9273. case Js::OpCode::ArgIn_A:
  9274. break;
  9275. case Js::OpCode::LdSlot:
  9276. case Js::OpCode::LdSlotArr:
  9277. {
  9278. if (doDynamicCheck && slotId > Js::ScopeSlots::FirstSlotIndex)
  9279. {
  9280. insertSlotArrayCheck = true;
  9281. }
  9282. break;
  9283. }
  9284. case Js::OpCode::SlotArrayCheck:
  9285. {
  9286. uint32 currentSlotId = instrDef->GetSrc2()->AsIntConstOpnd()->AsInt32();
  9287. if (slotId > currentSlotId)
  9288. {
  9289. instrDef->ReplaceSrc2(IR::IntConstOpnd::New(slotId, TyUint32, m_func));
  9290. }
  9291. break;
  9292. }
  9293. default:
  9294. Js::Throw::FatalInternalError();
  9295. }
  9296. }
  9297. if (insertSlotArrayCheck)
  9298. {
  9299. IR::Instr *insertInstr = instrDef->m_next;
  9300. IR::RegOpnd *dstOpnd = instrDef->UnlinkDst()->AsRegOpnd();
  9301. IR::Instr *checkInstr = IR::Instr::New(Js::OpCode::SlotArrayCheck, dstOpnd, m_func);
  9302. dstOpnd = IR::RegOpnd::New(TyVar, m_func);
  9303. instrDef->SetDst(dstOpnd);
  9304. checkInstr->SetSrc1(dstOpnd);
  9305. // Attach the slot ID to the check instruction.
  9306. IR::IntConstOpnd *slotIdOpnd = IR::IntConstOpnd::New(slotId, TyUint32, m_func);
  9307. checkInstr->SetSrc2(slotIdOpnd);
  9308. insertInstr->InsertBefore(checkInstr);
  9309. }
  9310. return instr->m_prev;
  9311. }
  9312. IR::Instr *
  9313. Lowerer::LowerStSlot(IR::Instr *instr)
  9314. {
  9315. // StSlot stores the nth Var in the buffer pointed to by the property sym's stack sym.
  9316. IR::Opnd * dstOpnd = instr->UnlinkDst();
  9317. AssertMsg(dstOpnd, "Expected dst opnd on StSlot");
  9318. IR::Opnd * dstNew = this->CreateOpndForSlotAccess(dstOpnd);
  9319. dstOpnd->Free(this->m_func);
  9320. instr->SetDst(dstNew);
  9321. instr = m_lowererMD.ChangeToWriteBarrierAssign(instr, this->m_func);
  9322. return instr;
  9323. }
  9324. IR::Instr *
  9325. Lowerer::LowerStSlotChkUndecl(IR::Instr *instrStSlot)
  9326. {
  9327. Assert(instrStSlot->GetSrc2() != nullptr);
  9328. // Src2 is required only to avoid dead store false positives during GlobOpt.
  9329. instrStSlot->FreeSrc2();
  9330. IR::Opnd *dstOpnd = this->CreateOpndForSlotAccess(instrStSlot->GetDst());
  9331. IR::Instr *instr = this->LowerStSlot(instrStSlot);
  9332. this->GenUndeclChk(instr, dstOpnd);
  9333. return instr;
  9334. }
  9335. void Lowerer::LowerProfileLdSlot(IR::Opnd *const valueOpnd, Func *const ldSlotFunc, const Js::ProfileId profileId, IR::Instr *const insertBeforeInstr)
  9336. {
  9337. Assert(valueOpnd);
  9338. Assert(profileId != Js::Constants::NoProfileId);
  9339. Assert(insertBeforeInstr);
  9340. Func *const irFunc = insertBeforeInstr->m_func;
  9341. m_lowererMD.LoadHelperArgument(insertBeforeInstr, IR::Opnd::CreateProfileIdOpnd(profileId, irFunc));
  9342. m_lowererMD.LoadHelperArgument(insertBeforeInstr, CreateFunctionBodyOpnd(ldSlotFunc));
  9343. m_lowererMD.LoadHelperArgument(insertBeforeInstr, valueOpnd);
  9344. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, irFunc);
  9345. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfileLdSlot, irFunc));
  9346. insertBeforeInstr->InsertBefore(callInstr);
  9347. m_lowererMD.LowerCall(callInstr, 0);
  9348. }
  9349. void
  9350. Lowerer::LowerLdSlot(IR::Instr *instr)
  9351. {
  9352. IR::Opnd * srcOpnd = instr->UnlinkSrc1();
  9353. AssertMsg(srcOpnd, "Expected src opnd on LdSlot");
  9354. IR::Opnd * srcNew = this->CreateOpndForSlotAccess(srcOpnd);
  9355. srcOpnd->Free(this->m_func);
  9356. instr->SetSrc1(srcNew);
  9357. m_lowererMD.ChangeToAssign(instr);
  9358. }
  9359. IR::Instr *
  9360. Lowerer::LowerChkUndecl(IR::Instr *instr)
  9361. {
  9362. IR::Instr *instrPrev = instr->m_prev;
  9363. this->GenUndeclChk(instr, instr->GetSrc1());
  9364. instr->Remove();
  9365. return instrPrev;
  9366. }
  9367. void
  9368. Lowerer::GenUndeclChk(IR::Instr *instrInsert, IR::Opnd *opnd)
  9369. {
  9370. IR::LabelInstr *labelContinue = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  9371. InsertCompareBranch(
  9372. opnd,
  9373. LoadLibraryValueOpnd(instrInsert, LibraryValue::ValueUndeclBlockVar),
  9374. Js::OpCode::BrNeq_A, labelContinue, instrInsert);
  9375. IR::LabelInstr *labelThrow = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  9376. instrInsert->InsertBefore(labelThrow);
  9377. IR::Instr *instr = IR::Instr::New(
  9378. Js::OpCode::RuntimeReferenceError,
  9379. IR::RegOpnd::New(TyMachReg, m_func),
  9380. IR::IntConstOpnd::New(SCODE_CODE(JSERR_UseBeforeDeclaration), TyInt32, m_func),
  9381. m_func);
  9382. instrInsert->InsertBefore(instr);
  9383. this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeReferenceError);
  9384. instrInsert->InsertBefore(labelContinue);
  9385. }
  9386. ///----------------------------------------------------------------------------
  9387. ///
  9388. /// Lowerer::LowerStElemC
  9389. ///
  9390. ///----------------------------------------------------------------------------
  9391. IR::Instr *
  9392. Lowerer::LowerStElemC(IR::Instr * stElem)
  9393. {
  9394. IR::Instr *instrPrev = stElem->m_prev;
  9395. IR::IndirOpnd * indirOpnd = stElem->GetDst()->AsIndirOpnd();
  9396. IR::RegOpnd *indexOpnd = indirOpnd->UnlinkIndexOpnd();
  9397. Assert(!indexOpnd || indexOpnd->m_sym->IsIntConst());
  9398. IntConstType value;
  9399. if (indexOpnd)
  9400. {
  9401. value = indexOpnd->AsRegOpnd()->m_sym->GetIntConstValue();
  9402. indexOpnd->Free(this->m_func);
  9403. }
  9404. else
  9405. {
  9406. value = (IntConstType)indirOpnd->GetOffset();
  9407. }
  9408. if (stElem->IsJitProfilingInstr())
  9409. {
  9410. Assert(stElem->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
  9411. m_lowererMD.LoadHelperArgument(stElem, stElem->UnlinkSrc1());
  9412. const auto meth = stElem->m_opcode == Js::OpCode::StElemC ? IR::HelperSimpleStoreArrayHelper : IR::HelperSimpleStoreArraySegHelper;
  9413. stElem->SetSrc1(IR::HelperCallOpnd::New(meth, m_func));
  9414. m_lowererMD.LoadHelperArgument(stElem, IR::IntConstOpnd::New(value, TyUint32, m_func));
  9415. m_lowererMD.LoadHelperArgument(stElem, indirOpnd->UnlinkBaseOpnd());
  9416. stElem->UnlinkDst()->Free(m_func);
  9417. m_lowererMD.LowerCall(stElem, 0);
  9418. return instrPrev;
  9419. }
  9420. IntConstType base;
  9421. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  9422. const ValueType baseValueType(baseOpnd->GetValueType());
  9423. if(baseValueType.IsLikelyNativeArray())
  9424. {
  9425. Assert(stElem->m_opcode == Js::OpCode::StElemC);
  9426. IR::LabelInstr *labelBailOut = nullptr;
  9427. IR::Instr *instrBailOut = nullptr;
  9428. if (stElem->HasBailOutInfo())
  9429. {
  9430. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  9431. instrBailOut = stElem;
  9432. stElem = IR::Instr::New(instrBailOut->m_opcode, m_func);
  9433. instrBailOut->TransferTo(stElem);
  9434. instrBailOut->InsertBefore(stElem);
  9435. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  9436. InsertBranch(Js::OpCode::Br, labelDone, instrBailOut);
  9437. instrBailOut->InsertBefore(labelBailOut);
  9438. instrBailOut->InsertAfter(labelDone);
  9439. instrBailOut->m_opcode = Js::OpCode::BailOut;
  9440. GenerateBailOut(instrBailOut);
  9441. }
  9442. if (!baseValueType.IsObject())
  9443. {
  9444. // Likely native array: do a vtable check and bail if it fails.
  9445. Assert(labelBailOut);
  9446. GenerateArrayTest(baseOpnd, labelBailOut, labelBailOut, stElem, true);
  9447. }
  9448. if (stElem->GetSrc1()->GetType() == TyVar)
  9449. {
  9450. // Storing a non-specialized value. This may cause array conversion, which invalidates all the code
  9451. // that depends on the array check we've already done.
  9452. // Call a helper that returns the type ID of the resulting array, check it here against the one we
  9453. // expect, and bail if it fails.
  9454. Assert(labelBailOut);
  9455. // Call a helper to (try and) unbox the var and store it.
  9456. // If we had to convert the array to do the store, we'll bail.
  9457. LoadScriptContext(stElem);
  9458. m_lowererMD.LoadHelperArgument(stElem, stElem->UnlinkSrc1());
  9459. IR::IntConstOpnd * intConstIndexOpnd = IR::IntConstOpnd::New(value, TyUint32, m_func);
  9460. m_lowererMD.LoadHelperArgument(stElem, intConstIndexOpnd);
  9461. m_lowererMD.LoadHelperArgument(stElem, indirOpnd->UnlinkBaseOpnd());
  9462. IR::JnHelperMethod helperMethod;
  9463. if (baseValueType.HasIntElements())
  9464. {
  9465. helperMethod = IR::HelperScrArr_SetNativeIntElementC;
  9466. }
  9467. else
  9468. {
  9469. helperMethod = IR::HelperScrArr_SetNativeFloatElementC;
  9470. }
  9471. IR::Instr *instrInsertBranch = stElem->m_next;
  9472. IR::RegOpnd *typeIdOpnd = IR::RegOpnd::New(TyUint32, m_func);
  9473. stElem->ReplaceDst(typeIdOpnd);
  9474. m_lowererMD.ChangeToHelperCall(stElem, helperMethod);
  9475. InsertCompareBranch(
  9476. typeIdOpnd,
  9477. IR::IntConstOpnd::New(
  9478. baseValueType.HasIntElements() ?
  9479. Js::TypeIds_NativeIntArray : Js::TypeIds_NativeFloatArray, TyUint32, m_func),
  9480. Js::OpCode::BrNeq_A,
  9481. labelBailOut,
  9482. instrInsertBranch);
  9483. return instrPrev;
  9484. }
  9485. else if (baseValueType.HasIntElements() && labelBailOut)
  9486. {
  9487. Assert(stElem->GetSrc1()->GetType() == GetArrayIndirType(baseValueType));
  9488. IR::Opnd* missingElementOpnd = GetMissingItemOpnd(stElem->GetSrc1()->GetType(), m_func);
  9489. if (!stElem->GetSrc1()->IsEqual(missingElementOpnd))
  9490. {
  9491. InsertMissingItemCompareBranch(stElem->GetSrc1(), Js::OpCode::BrEq_A, labelBailOut, stElem);
  9492. }
  9493. else
  9494. {
  9495. //Its a missing value store and data flow proves that src1 is always missing value. Array cannot be an int array at the first place
  9496. //if this code was ever hit. Just bailout, this code path would be updated with the profile information next time around.
  9497. InsertBranch(Js::OpCode::Br, labelBailOut, stElem);
  9498. #if DBG
  9499. labelBailOut->m_noHelperAssert = true;
  9500. #endif
  9501. stElem->Remove();
  9502. return instrPrev;
  9503. }
  9504. }
  9505. else
  9506. {
  9507. Assert(stElem->GetSrc1()->GetType() == GetArrayIndirType(baseValueType));
  9508. }
  9509. stElem->GetDst()->SetType(stElem->GetSrc1()->GetType());
  9510. Assert(value <= Js::SparseArraySegmentBase::INLINE_CHUNK_SIZE);
  9511. if(baseValueType.HasIntElements())
  9512. {
  9513. base = sizeof(Js::JavascriptNativeIntArray) + offsetof(Js::SparseArraySegment<int32>, elements);
  9514. }
  9515. else
  9516. {
  9517. base = sizeof(Js::JavascriptNativeFloatArray) + offsetof(Js::SparseArraySegment<double>, elements);
  9518. }
  9519. }
  9520. else if(baseValueType.IsLikelyObject() && baseValueType.GetObjectType() == ObjectType::Array)
  9521. {
  9522. Assert(stElem->m_opcode == Js::OpCode::StElemC);
  9523. Assert(value <= Js::SparseArraySegmentBase::INLINE_CHUNK_SIZE);
  9524. base = sizeof(Js::JavascriptArray) + offsetof(Js::SparseArraySegment<Js::Var>, elements);
  9525. }
  9526. else
  9527. {
  9528. Assert(stElem->m_opcode == Js::OpCode::StElemC || stElem->m_opcode == Js::OpCode::StArrSegElemC);
  9529. Assert(indirOpnd->GetBaseOpnd()->GetType() == TyVar);
  9530. base = offsetof(Js::SparseArraySegment<Js::Var>, elements);
  9531. }
  9532. Assert(value >= 0);
  9533. // MOV [r3 + offset(element) + index], src
  9534. const BYTE indirScale =
  9535. baseValueType.IsLikelyAnyOptimizedArray() ? GetArrayIndirScale(baseValueType) : m_lowererMD.GetDefaultIndirScale();
  9536. IntConstType offset = base + (value << indirScale);
  9537. Assert(Math::FitsInDWord(offset));
  9538. indirOpnd->SetOffset((int32)offset);
  9539. m_lowererMD.ChangeToWriteBarrierAssign(stElem, this->m_func);
  9540. return instrPrev;
  9541. }
  9542. void Lowerer::LowerLdArrHead(IR::Instr *const instr)
  9543. {
  9544. IR::RegOpnd *array = instr->UnlinkSrc1()->AsRegOpnd();
  9545. const ValueType arrayValueType(array->GetValueType());
  9546. Assert(arrayValueType.IsAnyOptimizedArray());
  9547. if(arrayValueType.GetObjectType() == ObjectType::ObjectWithArray)
  9548. {
  9549. array = LoadObjectArray(array, instr);
  9550. }
  9551. // mov arrayHeadSegment, [array + offset(headSegment)]
  9552. instr->GetDst()->SetType(TyMachPtr);
  9553. instr->SetSrc1(
  9554. IR::IndirOpnd::New(
  9555. array,
  9556. GetArrayOffsetOfHeadSegment(arrayValueType),
  9557. TyMachPtr,
  9558. instr->m_func));
  9559. LowererMD::ChangeToAssign(instr);
  9560. }
  9561. // Creates the rest parameter array.
  9562. // Var JavascriptArray::OP_NewScArrayWithElements(
  9563. // uint32 elementCount,
  9564. // Var *elements,
  9565. // ScriptContext* scriptContext)
  9566. IR::Instr *Lowerer::LowerRestParameter(IR::Opnd *formalsOpnd, IR::Opnd *dstOpnd, IR::Opnd *excessOpnd, IR::Instr *instr, IR::RegOpnd *generatorArgsPtrOpnd)
  9567. {
  9568. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, dstOpnd, instr->m_func);
  9569. instr->InsertAfter(helperCallInstr);
  9570. // Var JavascriptArray::OP_NewScArrayWithElements(
  9571. // int32 elementCount,
  9572. // Var *elements,
  9573. // ScriptContext* scriptContext)
  9574. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScArrayWithElements;
  9575. LoadScriptContext(helperCallInstr);
  9576. BOOL isGenerator = this->m_func->GetJITFunctionBody()->IsCoroutine();
  9577. // Elements pointer = ebp + (formals count + formals offset + 1)*sizeof(Var)
  9578. IR::RegOpnd *srcOpnd = isGenerator ? generatorArgsPtrOpnd : IR::Opnd::CreateFramePointerOpnd(this->m_func);
  9579. uint16 actualOffset = isGenerator ? 0 : GetFormalParamOffset(); //4
  9580. IR::RegOpnd *argPtrOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  9581. InsertAdd(false, argPtrOpnd, srcOpnd, IR::IntConstOpnd::New((formalsOpnd->AsIntConstOpnd()->GetValue() + actualOffset) * MachPtr, TyMachPtr, this->m_func), helperCallInstr);
  9582. m_lowererMD.LoadHelperArgument(helperCallInstr, argPtrOpnd);
  9583. m_lowererMD.LoadHelperArgument(helperCallInstr, excessOpnd);
  9584. m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
  9585. return helperCallInstr;
  9586. }
  9587. ///----------------------------------------------------------------------------
  9588. ///
  9589. /// Lowerer::LowerArgIn
  9590. ///
  9591. /// This function checks the passed-in argument count against the index of this
  9592. /// argument and uses null for a param value if the caller didn't explicitly
  9593. /// pass anything.
  9594. ///
  9595. ///----------------------------------------------------------------------------
  9596. IR::Instr *
  9597. Lowerer::LowerArgIn(IR::Instr *instrArgIn)
  9598. {
  9599. IR::LabelInstr * labelDone;
  9600. IR::LabelInstr * labelUndef;
  9601. IR::LabelInstr * labelNormal;
  9602. IR::LabelInstr * labelInit;
  9603. IR::LabelInstr * labelInitNext;
  9604. IR::BranchInstr * instrBranch;
  9605. IR::Instr * instrArgInNext;
  9606. IR::Instr * instrInsert;
  9607. IR::Instr * instrPrev;
  9608. IR::Instr * instrResume = nullptr;
  9609. IR::Opnd * dstOpnd;
  9610. IR::Opnd * srcOpnd;
  9611. IR::Opnd * opndUndef;
  9612. Js::ArgSlot argIndex;
  9613. StackSym * symParam;
  9614. BOOLEAN isDuplicate;
  9615. IR::RegOpnd * generatorArgsPtrOpnd = nullptr;
  9616. // We start with:
  9617. // s1 = ArgIn_A param1
  9618. // s2 = ArgIn_A param2
  9619. // ...
  9620. // sn = ArgIn_A paramn
  9621. //
  9622. // We want to end up with:
  9623. //
  9624. // s1 = ArgIn_A param1 -- Note that this is unconditional
  9625. // count = (load from param area)
  9626. // BrLt_A $start, count, n -- Forward cbranch to the uncommon case
  9627. // Br $Ln
  9628. // $start:
  9629. // sn = assign undef
  9630. // BrGe_A $Ln-1, count, n-1
  9631. // sn-1 = assign undef
  9632. // ...
  9633. // s2 = assign undef
  9634. // Br $done
  9635. // $Ln:
  9636. // sn = assign paramn
  9637. // $Ln-1:
  9638. // sn-1 = assign paramn-1
  9639. // ...
  9640. // s2 = assign param2
  9641. // $done:
  9642. AnalysisAssert(instrArgIn);
  9643. IR::Opnd *restDst = nullptr;
  9644. bool hasRest = instrArgIn->m_opcode == Js::OpCode::ArgIn_Rest;
  9645. if (hasRest)
  9646. {
  9647. IR::Instr *restInstr = instrArgIn;
  9648. restDst = restInstr->UnlinkDst();
  9649. if (m_func->GetJITFunctionBody()->HasImplicitArgIns() && m_func->argInsCount > 0)
  9650. {
  9651. while (instrArgIn->m_opcode != Js::OpCode::ArgIn_A)
  9652. {
  9653. instrArgIn = instrArgIn->m_prev;
  9654. if (instrResume == nullptr)
  9655. {
  9656. instrResume = instrArgIn;
  9657. }
  9658. }
  9659. restInstr->Remove();
  9660. }
  9661. else
  9662. {
  9663. Assert(instrArgIn->m_func == this->m_func);
  9664. IR::Instr * instrCount = m_lowererMD.LoadInputParamCount(instrArgIn, -this->m_func->GetInParamsCount());
  9665. IR::Opnd * excessOpnd = instrCount->GetDst();
  9666. IR::LabelInstr *createRestArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  9667. // BrGe $createRestArray, excess, 0
  9668. InsertCompareBranch(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), Js::OpCode::BrGe_A, createRestArrayLabel, instrArgIn);
  9669. // MOV excess, 0
  9670. InsertMove(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), instrArgIn);
  9671. // $createRestArray
  9672. instrArgIn->InsertBefore(createRestArrayLabel);
  9673. if (m_func->GetJITFunctionBody()->IsCoroutine())
  9674. {
  9675. generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrArgIn);
  9676. }
  9677. IR::IntConstOpnd * formalsOpnd = IR::IntConstOpnd::New(this->m_func->GetInParamsCount(), TyUint32, this->m_func);
  9678. IR::Instr *prev = LowerRestParameter(formalsOpnd, restDst, excessOpnd, instrArgIn, generatorArgsPtrOpnd);
  9679. instrArgIn->Remove();
  9680. return prev;
  9681. }
  9682. }
  9683. srcOpnd = instrArgIn->GetSrc1();
  9684. symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
  9685. argIndex = symParam->GetParamSlotNum();
  9686. if (argIndex == 1)
  9687. {
  9688. // The "this" argument is not source-dependent and doesn't need to be checked.
  9689. if (m_func->GetJITFunctionBody()->IsCoroutine())
  9690. {
  9691. generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrArgIn);
  9692. ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
  9693. }
  9694. m_lowererMD.ChangeToAssign(instrArgIn);
  9695. return instrResume == nullptr ? instrArgIn->m_prev : instrResume;
  9696. }
  9697. Js::ArgSlot formalsCount = this->m_func->GetInParamsCount();
  9698. AssertMsg(argIndex <= formalsCount, "Expect to see the ArgIn's within the range of the formals");
  9699. // Because there may be instructions between the ArgIn's, such as saves to the frame object,
  9700. // we find the top of the sequence of ArgIn's and insert everything there. This assumes that
  9701. // ArgIn's use param symbols as src's and not the results of previous instructions.
  9702. instrPrev = instrArgIn;
  9703. Js::ArgSlot currArgInCount = 0;
  9704. Assert(this->m_func->argInsCount > 0);
  9705. while (currArgInCount < this->m_func->argInsCount - 1)
  9706. {
  9707. instrPrev = instrPrev->m_prev;
  9708. if (instrPrev->m_opcode == Js::OpCode::ArgIn_A)
  9709. {
  9710. srcOpnd = instrPrev->GetSrc1();
  9711. symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
  9712. AssertMsg(symParam->GetParamSlotNum() < argIndex, "ArgIn's not in numerical order");
  9713. argIndex = symParam->GetParamSlotNum();
  9714. currArgInCount++;
  9715. }
  9716. else
  9717. {
  9718. // Make sure that this instruction gets lowered.
  9719. if (instrResume == nullptr)
  9720. {
  9721. instrResume = instrPrev;
  9722. }
  9723. }
  9724. }
  9725. // The loading of parameters will be inserted above this instruction.
  9726. instrInsert = instrPrev;
  9727. AnalysisAssert(instrInsert);
  9728. if (instrResume == nullptr)
  9729. {
  9730. // We found no intervening non-ArgIn's, so lowering can resume at the previous instruction.
  9731. instrResume = instrInsert->m_prev;
  9732. }
  9733. // Now insert all the checks and undef-assigns.
  9734. if (m_func->GetJITFunctionBody()->IsCoroutine())
  9735. {
  9736. generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrInsert);
  9737. }
  9738. // excessOpnd = (load from param area) - formalCounts
  9739. IR::Instr * instrCount = this->m_lowererMD.LoadInputParamCount(instrInsert, -formalsCount, true);
  9740. IR::Opnd * excessOpnd = instrCount->GetDst();
  9741. labelUndef = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /*helperLabel*/ true);
  9742. Lowerer::InsertBranch(Js::OpCode::BrLt_A, labelUndef, instrInsert);
  9743. // Br $Ln
  9744. labelNormal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  9745. labelInit = labelNormal;
  9746. instrBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelNormal, this->m_func);
  9747. instrInsert->InsertBefore(instrBranch);
  9748. // Insert the labels
  9749. instrInsert->InsertBefore(labelUndef);
  9750. instrInsert->InsertBefore(labelNormal);
  9751. //Adjustment for deadstore of ArgIn_A
  9752. Js::ArgSlot highestSlotNum = instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum();
  9753. Js::ArgSlot missingSlotNums = this->m_func->GetInParamsCount() - highestSlotNum;
  9754. Assert(missingSlotNums >= 0);
  9755. while (missingSlotNums > 0)
  9756. {
  9757. InsertAdd(true, excessOpnd, excessOpnd, IR::IntConstOpnd::New(1, TyMachReg, this->m_func), labelNormal);
  9758. Lowerer::InsertBranch(Js::OpCode::BrEq_A, labelNormal, labelNormal);
  9759. missingSlotNums--;
  9760. }
  9761. // MOV undefReg, undefAddress
  9762. IR::Opnd* opndUndefAddress = this->LoadLibraryValueOpnd(labelNormal, LibraryValue::ValueUndefined);
  9763. opndUndef = IR::RegOpnd::New(TyMachPtr, this->m_func);
  9764. Lowerer::InsertMove(opndUndef, opndUndefAddress, labelNormal);
  9765. BVSparse<JitArenaAllocator> *formalsBv = JitAnew(this->m_alloc, BVSparse<JitArenaAllocator>, this->m_alloc);
  9766. while (currArgInCount > 0)
  9767. {
  9768. dstOpnd = instrArgIn->GetDst();
  9769. Assert(dstOpnd->IsRegOpnd());
  9770. isDuplicate = formalsBv->TestAndSet(dstOpnd->AsRegOpnd()->m_sym->AsStackSym()->m_id);
  9771. // Now insert the undef initialization before the "normal" label
  9772. // sn = assign undef
  9773. Lowerer::InsertMove(dstOpnd, opndUndef, labelNormal);
  9774. // INC excessOpnd
  9775. // BrEq_A $Ln-1
  9776. currArgInCount--;
  9777. labelInitNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  9778. // And insert the "normal" initialization before the "done" label
  9779. // sn = assign paramn
  9780. // $Ln-1:
  9781. labelInit->InsertAfter(labelInitNext);
  9782. labelInit = labelInitNext;
  9783. instrArgInNext = instrArgIn->m_prev;
  9784. instrArgIn->Unlink();
  9785. Js::ArgSlot prevParamSlotNum = instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum();
  9786. // function foo(x, x) { use(x); }
  9787. // This should refer to the second 'x'. Since we reverse the order here however, we need to skip
  9788. // the initialization of the first 'x' to not override the one for the second. WOOB:1105504
  9789. if (isDuplicate)
  9790. {
  9791. instrArgIn->Free();
  9792. }
  9793. else
  9794. {
  9795. ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
  9796. labelInit->InsertBefore(instrArgIn);
  9797. this->m_lowererMD.ChangeToAssign(instrArgIn);
  9798. }
  9799. instrArgIn = instrArgInNext;
  9800. while (instrArgIn->m_opcode != Js::OpCode::ArgIn_A)
  9801. {
  9802. instrArgIn = instrArgIn->m_prev;
  9803. AssertMsg(instrArgIn, "???");
  9804. }
  9805. //Adjustment for deadstore of ArgIn_A
  9806. Js::ArgSlot currParamSlotNum = instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum();
  9807. Js::ArgSlot diffSlotsNum = prevParamSlotNum - currParamSlotNum;
  9808. AssertMsg(diffSlotsNum > 0, "Argins are not in order?");
  9809. while (diffSlotsNum > 0)
  9810. {
  9811. InsertAdd(true, excessOpnd, excessOpnd, IR::IntConstOpnd::New(1, TyMachReg, this->m_func), labelNormal);
  9812. InsertBranch(Js::OpCode::BrEq_A, labelInitNext, labelNormal);
  9813. diffSlotsNum--;
  9814. }
  9815. AssertMsg(instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum() <= formalsCount,
  9816. "Expect all ArgIn's to be in numerical order by param slot");
  9817. }
  9818. // Insert final undef and normal initializations, jumping unconditionally to the end
  9819. // rather than checking against the decremented formals count as we did inside the loop above.
  9820. // s2 = assign undef
  9821. dstOpnd = instrArgIn->GetDst();
  9822. Assert(dstOpnd->IsRegOpnd());
  9823. isDuplicate = formalsBv->TestAndSet(dstOpnd->AsRegOpnd()->m_sym->AsStackSym()->m_id);
  9824. Lowerer::InsertMove(dstOpnd, opndUndef, labelNormal);
  9825. if (hasRest)
  9826. {
  9827. InsertMove(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), labelNormal);
  9828. }
  9829. // Br $done
  9830. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  9831. instrBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  9832. labelNormal->InsertBefore(instrBranch);
  9833. // s2 = assign param2
  9834. // $done:
  9835. labelInit->InsertAfter(labelDone);
  9836. if (hasRest)
  9837. {
  9838. // The formals count has been tainted, so restore it before lowering rest
  9839. IR::IntConstOpnd * formalsOpnd = IR::IntConstOpnd::New(this->m_func->GetInParamsCount(), TyUint32, this->m_func);
  9840. LowerRestParameter(formalsOpnd, restDst, excessOpnd, labelDone, generatorArgsPtrOpnd);
  9841. }
  9842. instrArgIn->Unlink();
  9843. if (isDuplicate)
  9844. {
  9845. instrArgIn->Free();
  9846. }
  9847. else
  9848. {
  9849. ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
  9850. labelDone->InsertBefore(instrArgIn);
  9851. this->m_lowererMD.ChangeToAssign(instrArgIn);
  9852. }
  9853. JitAdelete(this->m_alloc, formalsBv);
  9854. return instrResume;
  9855. }
  9856. void
  9857. Lowerer::ConvertArgOpndIfGeneratorFunction(IR::Instr *instrArgIn, IR::RegOpnd *generatorArgsPtrOpnd)
  9858. {
  9859. if (this->m_func->GetJITFunctionBody()->IsCoroutine())
  9860. {
  9861. // Replace stack param operand with offset into arguments array held by
  9862. // the generator object.
  9863. IR::Opnd * srcOpnd = instrArgIn->UnlinkSrc1();
  9864. StackSym * symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
  9865. Js::ArgSlot argIndex = symParam->GetParamSlotNum();
  9866. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorArgsPtrOpnd, (argIndex - 1) * MachPtr, TyMachPtr, this->m_func);
  9867. srcOpnd->Free(this->m_func);
  9868. instrArgIn->SetSrc1(indirOpnd);
  9869. }
  9870. }
  9871. IR::RegOpnd *
  9872. Lowerer::LoadGeneratorArgsPtr(IR::Instr *instrInsert)
  9873. {
  9874. IR::Instr * instr = LoadGeneratorObject(instrInsert);
  9875. IR::RegOpnd * generatorRegOpnd = instr->GetDst()->AsRegOpnd();
  9876. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorRegOpnd, Js::JavascriptGenerator::GetArgsPtrOffset(), TyMachPtr, instrInsert->m_func);
  9877. IR::RegOpnd * argsPtrOpnd = IR::RegOpnd::New(TyMachReg, instrInsert->m_func);
  9878. Lowerer::InsertMove(argsPtrOpnd, indirOpnd, instrInsert);
  9879. return argsPtrOpnd;
  9880. }
  9881. IR::Instr *
  9882. Lowerer::LoadGeneratorObject(IR::Instr * instrInsert)
  9883. {
  9884. StackSym * generatorSym = StackSym::NewImplicitParamSym(3, instrInsert->m_func);
  9885. instrInsert->m_func->SetArgOffset(generatorSym, LowererMD::GetFormalParamOffset() * MachPtr);
  9886. IR::SymOpnd * generatorSymOpnd = IR::SymOpnd::New(generatorSym, TyMachPtr, instrInsert->m_func);
  9887. IR::RegOpnd * generatorRegOpnd = IR::RegOpnd::New(TyMachPtr, instrInsert->m_func);
  9888. instrInsert->m_func->SetHasImplicitParamLoad();
  9889. return Lowerer::InsertMove(generatorRegOpnd, generatorSymOpnd, instrInsert);
  9890. }
  9891. IR::Instr *
  9892. Lowerer::LowerArgInAsmJs(IR::Instr * instr)
  9893. {
  9894. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  9895. Assert(instr && instr->m_opcode == Js::OpCode::ArgIn_A);
  9896. IR::Instr* instrPrev = instr->m_prev;
  9897. m_lowererMD.ChangeToAssign(instr);
  9898. return instrPrev;
  9899. }
  9900. bool
  9901. Lowerer::InlineBuiltInLibraryCall(IR::Instr *callInstr)
  9902. {
  9903. IR::Opnd *src1 = callInstr->GetSrc1();
  9904. IR::Opnd *src2 = callInstr->GetSrc2();
  9905. // Get the arg count by looking at the slot number of the last arg symbol.
  9906. if (!src2->IsSymOpnd())
  9907. {
  9908. // No args? Not sure this is possible, but handle it.
  9909. return false;
  9910. }
  9911. StackSym *argLinkSym = src2->AsSymOpnd()->m_sym->AsStackSym();
  9912. // Subtract "this" from the arg count.
  9913. IntConstType argCount = argLinkSym->GetArgSlotNum() - 1;
  9914. // Find the callee's built-in index (if any).
  9915. Js::BuiltinFunction index = Func::GetBuiltInIndex(src1);
  9916. // Warning!
  9917. // Don't add new built-in to following switch. Built-ins needs to be inlined in call direct way.
  9918. // Following is only for prejit scenarios where we don't get inlining always and generate fast path in lowerer.
  9919. // Generating fastpath here misses fixed functions and globopt optimizations.
  9920. switch(index)
  9921. {
  9922. case Js::BuiltinFunction::JavascriptString_CharAt:
  9923. case Js::BuiltinFunction::JavascriptString_CharCodeAt:
  9924. if (argCount != 1)
  9925. {
  9926. return false;
  9927. }
  9928. if (!callInstr->GetDst())
  9929. {
  9930. // Optimization of Char[Code]At assumes result is used.
  9931. return false;
  9932. }
  9933. break;
  9934. case Js::BuiltinFunction::Math_Abs:
  9935. #ifdef _M_IX86
  9936. if (!AutoSystemInfo::Data.SSE2Available())
  9937. {
  9938. return false;
  9939. }
  9940. #endif
  9941. if (argCount != 1)
  9942. {
  9943. return false;
  9944. }
  9945. if (!callInstr->GetDst())
  9946. {
  9947. // Optimization of Abs assumes result is used.
  9948. return false;
  9949. }
  9950. break;
  9951. case Js::BuiltinFunction::JavascriptArray_Push:
  9952. {
  9953. if (argCount != 1)
  9954. {
  9955. return false;
  9956. }
  9957. if (callInstr->GetDst())
  9958. {
  9959. // Optimization of push assumes result is unused.
  9960. return false;
  9961. }
  9962. StackSym *linkSym = callInstr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym();
  9963. Assert(linkSym->IsSingleDef());
  9964. linkSym = linkSym->m_instrDef->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym();
  9965. Assert(linkSym->IsSingleDef());
  9966. IR::Opnd *const arrayOpnd = linkSym->m_instrDef->GetSrc1();
  9967. if(!arrayOpnd->IsRegOpnd())
  9968. {
  9969. // This should be rare, but needs to be handled.
  9970. // By now, we've already started some of the inlining. Simply jmp to the helper.
  9971. // The branch will get peeped later.
  9972. return false;
  9973. }
  9974. if(!ShouldGenerateArrayFastPath(arrayOpnd, false, false, false) ||
  9975. arrayOpnd->GetValueType().IsLikelyNativeArray())
  9976. {
  9977. // Rejecting native array for now, since we have to do a FromVar at the call site and bail out.
  9978. return false;
  9979. }
  9980. break;
  9981. }
  9982. case Js::BuiltinFunction::JavascriptString_Replace:
  9983. {
  9984. if(argCount != 2)
  9985. {
  9986. return false;
  9987. }
  9988. if(!ShouldGenerateStringReplaceFastPath(callInstr, argCount))
  9989. {
  9990. return false;
  9991. }
  9992. break;
  9993. }
  9994. default:
  9995. return false;
  9996. }
  9997. Assert(Func::IsBuiltInInlinedInLowerer(callInstr->GetSrc1()));
  9998. IR::Opnd *callTargetOpnd = callInstr->GetSrc1();
  9999. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  10000. IR::Opnd *objRefOpnd = IR::MemRefOpnd::New((void*)this->GetObjRefForBuiltInTarget(callTargetOpnd->AsRegOpnd()), TyMachReg, this->m_func);
  10001. InsertCompareBranch(callTargetOpnd, objRefOpnd, Js::OpCode::BrNeq_A, labelHelper, callInstr);
  10002. callInstr->InsertBefore(labelHelper);
  10003. Assert(argCount <= 2);
  10004. IR::Opnd *argsOpnd[3];
  10005. IR::Opnd *linkOpnd = callInstr->GetSrc2();
  10006. while(linkOpnd->IsSymOpnd())
  10007. {
  10008. IR::SymOpnd * symOpnd = linkOpnd->AsSymOpnd();
  10009. StackSym *sym = symOpnd->m_sym->AsStackSym();
  10010. Assert(sym->m_isSingleDef);
  10011. IR::Instr *argInstr = sym->m_instrDef;
  10012. Assert(argCount >= 0);
  10013. argsOpnd[argCount] = argInstr->GetSrc1();
  10014. argCount--;
  10015. argInstr->Unlink();
  10016. labelHelper->InsertAfter(argInstr);
  10017. linkOpnd = argInstr->GetSrc2();
  10018. }
  10019. AnalysisAssert(argCount == -1);
  10020. // Move startcall
  10021. Assert(linkOpnd->IsRegOpnd());
  10022. StackSym *sym = linkOpnd->AsRegOpnd()->m_sym;
  10023. Assert(sym->m_isSingleDef);
  10024. IR::Instr *startCall = sym->m_instrDef;
  10025. Assert(startCall->m_opcode == Js::OpCode::StartCall);
  10026. startCall->Unlink();
  10027. labelHelper->InsertAfter(startCall);
  10028. // $doneLabel:
  10029. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  10030. callInstr->InsertAfter(doneLabel);
  10031. bool success = true;
  10032. switch(index)
  10033. {
  10034. case Js::BuiltinFunction::Math_Abs:
  10035. this->m_lowererMD.GenerateFastAbs(callInstr->GetDst(), argsOpnd[1], callInstr, labelHelper, labelHelper, doneLabel);
  10036. break;
  10037. case Js::BuiltinFunction::JavascriptString_CharCodeAt:
  10038. case Js::BuiltinFunction::JavascriptString_CharAt:
  10039. success = GenerateFastCharAt(index, callInstr->GetDst(), argsOpnd[0], argsOpnd[1],
  10040. callInstr, labelHelper, labelHelper, doneLabel);
  10041. break;
  10042. case Js::BuiltinFunction::JavascriptArray_Push:
  10043. success = GenerateFastPush(argsOpnd[0], argsOpnd[1], callInstr, labelHelper, labelHelper, nullptr, doneLabel);
  10044. break;
  10045. case Js::BuiltinFunction::JavascriptString_Replace:
  10046. success = GenerateFastReplace(argsOpnd[0], argsOpnd[1], argsOpnd[2], callInstr, labelHelper, labelHelper, doneLabel);
  10047. break;
  10048. default:
  10049. Assert(UNREACHED);
  10050. }
  10051. IR::Instr *instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, doneLabel, this->m_func);
  10052. labelHelper->InsertBefore(instr);
  10053. return success;
  10054. }
  10055. // Perform lowerer part of inlining built-in function.
  10056. // For details, see inline.cpp.
  10057. //
  10058. // Description of changes here (note that taking care of Argouts are similar to InlineeStart):
  10059. // - Move ArgOut_A_InlineBuiltIn next to the call instr -- used by bailout processing in register allocator.
  10060. // - Remove StartCall and InlineBuiltInStart for this call.
  10061. // Before:
  10062. // StartCall fn
  10063. // d1 = BIA s1, link1
  10064. // ...
  10065. // InlineBuiltInStart fn, link0
  10066. // After:
  10067. // ...
  10068. // d1 = BIA s1, NULL
  10069. void Lowerer::LowerInlineBuiltIn(IR::Instr* builtInEndInstr)
  10070. {
  10071. Assert(builtInEndInstr->m_opcode == Js::OpCode::InlineBuiltInEnd || builtInEndInstr->m_opcode == Js::OpCode::InlineNonTrackingBuiltInEnd);
  10072. IR::Instr* startCallInstr = nullptr;
  10073. builtInEndInstr->IterateArgInstrs([&](IR::Instr* argInstr) {
  10074. startCallInstr = argInstr->GetSrc2()->GetStackSym()->m_instrDef;
  10075. return false;
  10076. });
  10077. // Keep the startCall around as bailout refers to it. Just unlink it for now - do not delete it.
  10078. startCallInstr->Unlink();
  10079. builtInEndInstr->Remove();
  10080. }
  10081. intptr_t
  10082. Lowerer::GetObjRefForBuiltInTarget(IR::RegOpnd * regOpnd)
  10083. {
  10084. intptr_t mathFns = m_func->GetScriptContextInfo()->GetBuiltinFunctionsBaseAddr();
  10085. Js::BuiltinFunction index = regOpnd->m_sym->m_builtInIndex;
  10086. AssertMsg(index < Js::BuiltinFunction::Count, "Invalid built-in index on a call target marked as built-in");
  10087. return mathFns + index;
  10088. }
  10089. IR::Instr *
  10090. Lowerer::LowerNewRegEx(IR::Instr * instr)
  10091. {
  10092. IR::Opnd *src1 = instr->UnlinkSrc1();
  10093. Assert(src1->IsAddrOpnd());
  10094. #if ENABLE_REGEX_CONFIG_OPTIONS
  10095. if (REGEX_CONFIG_FLAG(RegexTracing))
  10096. {
  10097. Assert(!instr->GetDst()->CanStoreTemp());
  10098. IR::Instr * instrPrev = LoadScriptContext(instr);
  10099. instrPrev = m_lowererMD.LoadHelperArgument(instr, src1);
  10100. m_lowererMD.ChangeToHelperCall(instr, IR::HelperScrRegEx_OP_NewRegEx);
  10101. return instrPrev;
  10102. }
  10103. #endif
  10104. IR::Instr * instrPrev = instr->m_prev;
  10105. IR::RegOpnd * dstOpnd = instr->UnlinkDst()->AsRegOpnd();
  10106. IR::SymOpnd * tempObjectSymOpnd;
  10107. bool isZeroed = GenerateRecyclerOrMarkTempAlloc(instr, dstOpnd, IR::HelperAllocMemForJavascriptRegExp, sizeof(Js::JavascriptRegExp), &tempObjectSymOpnd);
  10108. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  10109. {
  10110. // Hoist the vtable and pattern init to the outer most loop top as it never changes
  10111. InsertMove(tempObjectSymOpnd,
  10112. LoadVTableValueOpnd(this->outerMostLoopLabel, VTableValue::VtableJavascriptRegExp),
  10113. this->outerMostLoopLabel, false);
  10114. }
  10115. else
  10116. {
  10117. GenerateMemInit(dstOpnd, 0, LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp), instr, isZeroed);
  10118. }
  10119. GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfType(),
  10120. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueRegexType), instr, isZeroed);
  10121. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfAuxSlots(), instr, isZeroed);
  10122. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfObjectArray(), instr, isZeroed);
  10123. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  10124. {
  10125. InsertMove(IR::SymOpnd::New(tempObjectSymOpnd->m_sym,
  10126. tempObjectSymOpnd->m_offset + Js::JavascriptRegExp::GetOffsetOfPattern(), TyMachPtr, this->m_func),
  10127. src1, this->outerMostLoopLabel, false);
  10128. }
  10129. else
  10130. {
  10131. GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfPattern(), src1, instr, isZeroed);
  10132. }
  10133. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfSplitPattern(), instr, isZeroed);
  10134. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), instr, isZeroed);
  10135. GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), 0, instr, isZeroed);
  10136. instr->Remove();
  10137. return instrPrev;
  10138. }
  10139. IR::Instr *
  10140. Lowerer::GenerateRuntimeError(IR::Instr * insertBeforeInstr, Js::MessageId errorCode, IR::JnHelperMethod helper /*= IR::JnHelperMethod::HelperOp_RuntimeTypeError*/)
  10141. {
  10142. IR::Instr * runtimeErrorInstr = IR::Instr::New(Js::OpCode::RuntimeTypeError, this->m_func);
  10143. runtimeErrorInstr->SetSrc1(IR::IntConstOpnd::New(errorCode, TyInt32, this->m_func, true));
  10144. insertBeforeInstr->InsertBefore(runtimeErrorInstr);
  10145. return this->LowerUnaryHelperMem(runtimeErrorInstr, helper);
  10146. }
  10147. bool Lowerer::IsNullOrUndefRegOpnd(IR::RegOpnd *opnd) const
  10148. {
  10149. StackSym *sym = opnd->m_sym;
  10150. if (sym->IsIntConst() || sym->IsFloatConst())
  10151. {
  10152. return false;
  10153. }
  10154. return opnd->GetValueType().IsUndefined() || opnd->GetValueType().IsNull();
  10155. }
  10156. bool Lowerer::IsConstRegOpnd(IR::RegOpnd *opnd) const
  10157. {
  10158. StackSym *sym = opnd->m_sym;
  10159. if (sym->IsIntConst() || sym->IsFloatConst())
  10160. {
  10161. return false;
  10162. }
  10163. const auto& vt = opnd->GetValueType();
  10164. return vt.IsUndefined() || vt.IsNull() || (sym->m_isConst && vt.IsBoolean());
  10165. }
  10166. IR::Opnd * Lowerer::GetConstRegOpnd(IR::RegOpnd *opnd, IR::Instr * instr)
  10167. {
  10168. if (opnd->GetValueType().IsUndefined())
  10169. {
  10170. return this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined);
  10171. }
  10172. if (opnd->GetValueType().IsNull())
  10173. {
  10174. return this->LoadLibraryValueOpnd(instr, LibraryValue::ValueNull);
  10175. }
  10176. Assert(opnd->GetValueType().IsBoolean());
  10177. return opnd->GetStackSym()->GetInstrDef()->GetSrc1()->AsAddrOpnd();
  10178. }
  10179. bool
  10180. Lowerer::HasSideEffects(IR::Instr *instr)
  10181. {
  10182. if (LowererMD::IsCall(instr))
  10183. {
  10184. #ifdef _M_IX86
  10185. IR::Opnd *src1 = instr->GetSrc1();
  10186. if (src1->IsHelperCallOpnd())
  10187. {
  10188. IR::HelperCallOpnd * helper = src1->AsHelperCallOpnd();
  10189. switch(helper->m_fnHelper)
  10190. {
  10191. case IR::HelperOp_Int32ToAtomInPlace:
  10192. case IR::HelperOp_Int32ToAtom:
  10193. case IR::HelperOp_UInt32ToAtom:
  10194. return false;
  10195. }
  10196. }
  10197. #endif
  10198. return true;
  10199. }
  10200. return instr->HasAnySideEffects();
  10201. }
  10202. bool Lowerer::IsArgSaveRequired(Func *func) {
  10203. return (!func->IsTrueLeaf() || func->IsJitInDebugMode() ||
  10204. // GetHasImplicitParamLoad covers generators, asmjs,
  10205. // and other javascript functions that implicitly read from the arg stack slots
  10206. func->GetHasThrow() || func->GetHasImplicitParamLoad() || func->HasThis() || func->argInsCount > 0);
  10207. }
  10208. IR::Instr*
  10209. Lowerer::GenerateFastInlineBuiltInMathRandom(IR::Instr* instr)
  10210. {
  10211. AssertMsg(instr->GetDst()->IsFloat(), "dst must be float.");
  10212. IR::Instr* retInstr = instr->m_prev;
  10213. IR::Opnd* dst = instr->GetDst();
  10214. #if defined(_M_X64)
  10215. if (m_func->GetScriptContextInfo()->IsPRNGSeeded())
  10216. {
  10217. const uint64 mExp = 0x3FF0000000000000;
  10218. const uint64 mMant = 0x000FFFFFFFFFFFFF;
  10219. IR::RegOpnd* r0 = IR::RegOpnd::New(TyUint64, m_func); // s0
  10220. IR::RegOpnd* r1 = IR::RegOpnd::New(TyUint64, m_func); // s1
  10221. IR::RegOpnd* r3 = IR::RegOpnd::New(TyUint64, m_func); // helper uint64 reg
  10222. IR::RegOpnd* r4 = IR::RegOpnd::New(TyFloat64, m_func); // helper float64 reg
  10223. // ===========================================================
  10224. // s0 = scriptContext->GetLibrary()->GetRandSeed1();
  10225. // s1 = scriptContext->GetLibrary()->GetRandSeed0();
  10226. // ===========================================================
  10227. this->InsertMove(r0,
  10228. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetRandSeed1Offset(), TyUint64, instr->m_func), instr);
  10229. this->InsertMove(r1,
  10230. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetRandSeed0Offset(), TyUint64, instr->m_func), instr);
  10231. // ===========================================================
  10232. // s1 ^= s1 << 23;
  10233. // ===========================================================
  10234. this->InsertMove(r3, r1, instr);
  10235. this->InsertShift(Js::OpCode::Shl_A, false, r3, r3, IR::IntConstOpnd::New(23, TyInt8, m_func), instr);
  10236. this->InsertXor(r1, r1, r3, instr);
  10237. // ===========================================================
  10238. // s1 ^= s1 >> 17;
  10239. // ===========================================================
  10240. this->InsertMove(r3, r1, instr);
  10241. this->InsertShift(Js::OpCode::ShrU_A, false, r3, r3, IR::IntConstOpnd::New(17, TyInt8, m_func), instr);
  10242. this->InsertXor(r1, r1, r3, instr);
  10243. // ===========================================================
  10244. // s1 ^= s0;
  10245. // ===========================================================
  10246. this->InsertXor(r1, r1, r0, instr);
  10247. // ===========================================================
  10248. // s1 ^= s0 >> 26;
  10249. // ===========================================================
  10250. this->InsertMove(r3, r0, instr);
  10251. this->InsertShift(Js::OpCode::ShrU_A, false, r3, r3, IR::IntConstOpnd::New(26, TyInt8, m_func), instr);
  10252. this->InsertXor(r1, r1, r3, instr);
  10253. // ===========================================================
  10254. // scriptContext->GetLibrary()->SetRandSeed0(s0);
  10255. // scriptContext->GetLibrary()->SetRandSeed1(s1);
  10256. // ===========================================================
  10257. this->InsertMove(
  10258. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetRandSeed0Offset(), TyUint64, m_func), r0, instr);
  10259. this->InsertMove(
  10260. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetRandSeed1Offset(), TyUint64, m_func), r1, instr);
  10261. // ===========================================================
  10262. // dst = bit_cast<float64>(((s0 + s1) & mMant) | mExp);
  10263. // ===========================================================
  10264. this->InsertAdd(false, r1, r1, r0, instr);
  10265. this->InsertMove(r3, IR::IntConstOpnd::New(mMant, TyInt64, m_func, true), instr);
  10266. this->InsertAnd(r1, r1, r3, instr);
  10267. this->InsertMove(r3, IR::IntConstOpnd::New(mExp, TyInt64, m_func, true), instr);
  10268. this->InsertOr(r1, r1, r3, instr);
  10269. this->InsertMoveBitCast(dst, r1, instr);
  10270. // ===================================================================
  10271. // dst -= 1.0;
  10272. // ===================================================================
  10273. this->InsertMove(r4, IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleOnePointZeroAddr(), TyFloat64, m_func, IR::AddrOpndKindDynamicDoubleRef), instr);
  10274. this->InsertSub(false, dst, dst, r4, instr);
  10275. }
  10276. else
  10277. #endif
  10278. {
  10279. IR::Opnd* tmpdst = dst;
  10280. if (!dst->IsRegOpnd())
  10281. {
  10282. tmpdst = IR::RegOpnd::New(dst->GetType(), instr->m_func);
  10283. }
  10284. LoadScriptContext(instr);
  10285. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, tmpdst, instr->m_func);
  10286. instr->InsertBefore(helperCallInstr);
  10287. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperDirectMath_Random);
  10288. if (tmpdst != dst)
  10289. {
  10290. InsertMove(dst, tmpdst, instr);
  10291. }
  10292. }
  10293. instr->Remove();
  10294. return retInstr;
  10295. }
  10296. IR::Instr *
  10297. Lowerer::LowerCallDirect(IR::Instr * instr)
  10298. {
  10299. IR::Opnd* linkOpnd = instr->UnlinkSrc2();
  10300. StackSym *linkSym = linkOpnd->AsSymOpnd()->m_sym->AsStackSym();
  10301. IR::Instr* argInstr = linkSym->m_instrDef;
  10302. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_InlineSpecialized);
  10303. IR::Opnd* funcObj = argInstr->UnlinkSrc1();
  10304. instr->SetSrc2(argInstr->UnlinkSrc2());
  10305. argInstr->Remove();
  10306. if (instr->HasBailOutInfo() && !instr->HasLazyBailOut())
  10307. {
  10308. IR::Instr * bailOutInstr = this->SplitBailOnImplicitCall(instr, instr->m_next, instr->m_next);
  10309. this->LowerBailOnEqualOrNotEqual(bailOutInstr);
  10310. }
  10311. Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
  10312. return this->GenerateDirectCall(instr, funcObj, (ushort)flags);
  10313. }
  10314. IR::Instr *
  10315. Lowerer::GenerateDirectCall(IR::Instr* inlineInstr, IR::Opnd* funcObj, ushort callflags)
  10316. {
  10317. int32 argCount = m_lowererMD.LowerCallArgs(inlineInstr, callflags);
  10318. m_lowererMD.LoadHelperArgument(inlineInstr, funcObj);
  10319. m_lowererMD.LowerCall(inlineInstr, (Js::ArgSlot)argCount); //to account for function object and callinfo
  10320. return inlineInstr->m_prev;
  10321. }
  10322. /*
  10323. * GenerateHelperToArrayPushFastPath
  10324. * Generates Helper Call and pushes arguments to the Push HelperCall
  10325. */
  10326. IR::Instr *
  10327. Lowerer::GenerateHelperToArrayPushFastPath(IR::Instr * instr, IR::LabelInstr * bailOutLabelHelper)
  10328. {
  10329. IR::Opnd * arrayHelperOpnd = instr->UnlinkSrc1();
  10330. IR::Opnd * elementHelperOpnd = instr->UnlinkSrc2();
  10331. IR::JnHelperMethod helperMethod;
  10332. if(elementHelperOpnd->IsInt32())
  10333. {
  10334. Assert(arrayHelperOpnd->GetValueType().IsLikelyNativeIntArray());
  10335. helperMethod = IR::HelperArray_NativeIntPush;
  10336. m_lowererMD.LoadHelperArgument(instr, elementHelperOpnd);
  10337. }
  10338. else if(elementHelperOpnd->IsFloat())
  10339. {
  10340. Assert(arrayHelperOpnd->GetValueType().IsLikelyNativeFloatArray());
  10341. helperMethod = IR::HelperArray_NativeFloatPush;
  10342. m_lowererMD.LoadDoubleHelperArgument(instr, elementHelperOpnd);
  10343. }
  10344. else
  10345. {
  10346. helperMethod = IR::HelperArray_VarPush;
  10347. m_lowererMD.LoadHelperArgument(instr, elementHelperOpnd);
  10348. }
  10349. m_lowererMD.LoadHelperArgument(instr, arrayHelperOpnd);
  10350. LoadScriptContext(instr);
  10351. return m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  10352. }
  10353. /*
  10354. * GenerateHelperToArrayPopFastPath
  10355. * Generates Helper Call and pushes arguments to the Pop HelperCall
  10356. */
  10357. IR::Instr *
  10358. Lowerer::GenerateHelperToArrayPopFastPath(IR::Instr * instr, IR::LabelInstr * doneLabel, IR::LabelInstr * bailOutLabelHelper)
  10359. {
  10360. IR::Opnd * arrayHelperOpnd = instr->UnlinkSrc1();
  10361. ValueType arrayValueType = arrayHelperOpnd->GetValueType();
  10362. IR::JnHelperMethod helperMethod;
  10363. //Decide the helperMethod based on dst availability and nativity of the array.
  10364. if(arrayValueType.IsLikelyNativeArray() && !instr->GetDst())
  10365. {
  10366. helperMethod = IR::HelperArray_NativePopWithNoDst;
  10367. }
  10368. else if(arrayValueType.IsLikelyNativeIntArray())
  10369. {
  10370. helperMethod = IR::HelperArray_NativeIntPop;
  10371. }
  10372. else if(arrayValueType.IsLikelyNativeFloatArray())
  10373. {
  10374. helperMethod = IR::HelperArray_NativeFloatPop;
  10375. }
  10376. else
  10377. {
  10378. helperMethod = IR::HelperArray_VarPop;
  10379. }
  10380. m_lowererMD.LoadHelperArgument(instr, arrayHelperOpnd);
  10381. //We do not need scriptContext for HelperArray_NativePopWithNoDst call.
  10382. if(helperMethod != IR::HelperArray_NativePopWithNoDst)
  10383. {
  10384. LoadScriptContext(instr);
  10385. }
  10386. IR::Instr * retInstr = m_lowererMD.ChangeToHelperCall(instr, helperMethod, bailOutLabelHelper);
  10387. //We don't need missing item check for var arrays, as there it is taken care by the helper.
  10388. if(arrayValueType.IsLikelyNativeArray())
  10389. {
  10390. if(retInstr->GetDst())
  10391. {
  10392. //Do this check only for native arrays with Dst. For Var arrays, this is taken care in the Runtime helper itself.
  10393. InsertMissingItemCompareBranch(retInstr->GetDst(), Js::OpCode::BrNeq_A, doneLabel, bailOutLabelHelper);
  10394. }
  10395. else
  10396. {
  10397. //We need unconditional jump to doneLabel, if there is no dst in Pop instr.
  10398. InsertBranch(Js::OpCode::Br, true, doneLabel, bailOutLabelHelper);
  10399. }
  10400. }
  10401. return retInstr;
  10402. }
  10403. IR::Instr *
  10404. Lowerer::LowerCondBranchCheckBailOut(IR::BranchInstr * branchInstr, IR::Instr * helperCall, bool isHelper)
  10405. {
  10406. Assert(branchInstr->m_opcode == Js::OpCode::BrTrue_A || branchInstr->m_opcode == Js::OpCode::BrFalse_A);
  10407. if (branchInstr->HasBailOutInfo())
  10408. {
  10409. #ifdef ENABLE_SCRIPT_DEBUGGING
  10410. IR::BailOutKind debuggerBailOutKind = IR::BailOutInvalid;
  10411. if (branchInstr->HasAuxBailOut())
  10412. {
  10413. // We have shared debugger bailout. For branches we lower it here, not in SplitBailForDebugger.
  10414. // See SplitBailForDebugger for details.
  10415. AssertMsg(!(branchInstr->GetBailOutKind() & IR::BailOutForDebuggerBits), "There should be no debugger bits in main bailout kind.");
  10416. debuggerBailOutKind = branchInstr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits;
  10417. AssertMsg((debuggerBailOutKind & ~(IR::BailOutIgnoreException | IR::BailOutForceByFlag)) == 0, "Only IR::BailOutIgnoreException|ForceByFlag supported here.");
  10418. }
  10419. #endif
  10420. IR::Instr * bailOutInstr = this->SplitBailOnImplicitCall(branchInstr, helperCall, branchInstr);
  10421. IR::Instr* prevInstr = this->LowerBailOnEqualOrNotEqual(bailOutInstr, branchInstr, nullptr, nullptr, isHelper);
  10422. #ifdef ENABLE_SCRIPT_DEBUGGING
  10423. if (debuggerBailOutKind != IR::BailOutInvalid)
  10424. {
  10425. // Note that by this time implicit calls bailout is already lowered.
  10426. // What we do here is use same bailout info and lower debugger bailout which would be shared bailout.
  10427. BailOutInfo* bailOutInfo = bailOutInstr->GetBailOutInfo();
  10428. IR::BailOutInstr* debuggerBailoutInstr = IR::BailOutInstr::New(
  10429. Js::OpCode::BailForDebugger, debuggerBailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
  10430. prevInstr->InsertAfter(debuggerBailoutInstr);
  10431. // The result of that is:
  10432. // original helper op_* instr, then debugger bailout, then implicit calls bailout/etc with the branch instr.
  10433. // Example:
  10434. // s35(eax).i32 = CALL Op_GreaterEqual.u32 # -- original op_* helper
  10435. // s34.i32 = MOV s35(eax).i32 #
  10436. // BailForDebugger # Bailout: #0042 (BailOutIgnoreException) -- the debugger bailout
  10437. // CMP [0x0003BDE0].i8, 1 (0x1).i8 # -- implicit calls check
  10438. // JEQ $L10 #
  10439. //$L11: [helper] #
  10440. // CALL SaveAllRegistersAndBranchBailOut.u32 # Bailout: #0042 (BailOutOnImplicitCalls)
  10441. // JMP $L5 #
  10442. //$L10: [helper] #
  10443. // BrFalse_A $L3, s34.i32 #0034 -- The BrTrue/BrFalse branch (branch instr)
  10444. //$L6: [helper] #0042
  10445. this->LowerBailForDebugger(debuggerBailoutInstr, isHelper);
  10446. // After lowering this we will have a check which on bailout condition will JMP to $L11.
  10447. }
  10448. #else
  10449. (prevInstr);
  10450. #endif
  10451. }
  10452. return m_lowererMD.LowerCondBranch(branchInstr);
  10453. }
  10454. IR::SymOpnd *
  10455. Lowerer::LoadCallInfo(IR::Instr * instrInsert)
  10456. {
  10457. IR::SymOpnd * srcOpnd;
  10458. Func * func = instrInsert->m_func;
  10459. if (func->GetJITFunctionBody()->IsCoroutine())
  10460. {
  10461. // Generator function arguments and ArgumentsInfo are not on the stack. Instead they
  10462. // are accessed off the generator object (which is prm1).
  10463. IR::Instr *genLoadInstr = LoadGeneratorObject(instrInsert);
  10464. IR::RegOpnd * generatorRegOpnd = genLoadInstr->GetDst()->AsRegOpnd();
  10465. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorRegOpnd, Js::JavascriptGenerator::GetCallInfoOffset(), TyMachPtr, func);
  10466. IR::Instr * instr = Lowerer::InsertMove(IR::RegOpnd::New(TyMachPtr, func), indirOpnd, instrInsert);
  10467. StackSym * callInfoSym = StackSym::New(TyMachReg, func);
  10468. IR::SymOpnd * callInfoSymOpnd = IR::SymOpnd::New(callInfoSym, TyMachReg, func);
  10469. Lowerer::InsertMove(callInfoSymOpnd, instr->GetDst(), instrInsert);
  10470. srcOpnd = IR::SymOpnd::New(callInfoSym, TyMachReg, func);
  10471. }
  10472. else
  10473. {
  10474. // Otherwise callInfo is always the "second" argument.
  10475. // The stack looks like this:
  10476. //
  10477. // script param N
  10478. // ...
  10479. // script param 1
  10480. // callinfo
  10481. // function object
  10482. // return addr
  10483. // FP -> FP chain
  10484. StackSym * srcSym = LowererMD::GetImplicitParamSlotSym(1, func);
  10485. srcOpnd = IR::SymOpnd::New(srcSym, TyMachReg, func);
  10486. }
  10487. return srcOpnd;
  10488. }
  10489. IR::Instr *
  10490. Lowerer::LowerBailOnNotStackArgs(IR::Instr * instr)
  10491. {
  10492. if (!this->m_func->GetHasStackArgs())
  10493. {
  10494. throw Js::RejitException(RejitReason::InlineApplyDisabled);
  10495. }
  10496. IR::Instr * prevInstr = instr->m_prev;
  10497. // Bail out test
  10498. // Label to skip Bailout and continue
  10499. IR::LabelInstr * continueLabelInstr;
  10500. IR::Instr *instrNext = instr->m_next;
  10501. if (instrNext->IsLabelInstr())
  10502. {
  10503. continueLabelInstr = instrNext->AsLabelInstr();
  10504. }
  10505. else
  10506. {
  10507. continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  10508. instr->InsertAfter(continueLabelInstr);
  10509. }
  10510. if (!instr->m_func->IsInlinee())
  10511. {
  10512. //BailOut if the number of actuals (except "this" argument) is greater than or equal to 15.
  10513. IR::RegOpnd* ldLenDstOpnd = IR::RegOpnd::New(TyUint32, instr->m_func);
  10514. const IR::AutoReuseOpnd autoReuseldLenDstOpnd(ldLenDstOpnd, instr->m_func);
  10515. IR::Instr* ldLen = IR::Instr::New(Js::OpCode::LdLen_A, ldLenDstOpnd, instr->m_func);
  10516. ldLenDstOpnd->SetValueType(ValueType::GetTaggedInt()); //LdLen_A works only on stack arguments
  10517. instr->InsertBefore(ldLen);
  10518. this->GenerateFastRealStackArgumentsLdLen(ldLen);
  10519. this->InsertCompareBranch(ldLenDstOpnd, IR::IntConstOpnd::New(Js::InlineeCallInfo::MaxInlineeArgoutCount, TyUint32, m_func, true), Js::OpCode::BrLt_A, true, continueLabelInstr, instr);
  10520. this->GenerateBailOut(instr, nullptr, nullptr);
  10521. }
  10522. else
  10523. {
  10524. //For Inlined functions, we are sure actuals can't exceed Js::InlineeCallInfo::MaxInlineeArgoutCount (15).
  10525. //No need to bail out.
  10526. instr->Remove();
  10527. }
  10528. return prevInstr;
  10529. }
  10530. IR::Instr *
  10531. Lowerer::LowerBailOnNotSpreadable(IR::Instr *instr)
  10532. {
  10533. // We only avoid bailing out / throwing a rejit exception when the array operand is a simple, non-optimized, non-object array.
  10534. IR::Instr * prevInstr = instr->m_prev;
  10535. Func *func = instr->m_func;
  10536. IR::Opnd *arraySrcOpnd = instr->UnlinkSrc1();
  10537. IR::RegOpnd *arrayOpnd = GetRegOpnd(arraySrcOpnd, instr, func, TyMachPtr);
  10538. const ValueType baseValueType(arrayOpnd->GetValueType());
  10539. // Check if we can just throw a rejit exception based on valuetype alone instead of bailing out.
  10540. if (!baseValueType.IsLikelyArray()
  10541. || baseValueType.IsLikelyAnyOptimizedArray()
  10542. || (baseValueType.IsLikelyObject() && (baseValueType.GetObjectType() == ObjectType::ObjectWithArray))
  10543. // Validate that GenerateArrayTest will not fail.
  10544. || !(baseValueType.IsUninitialized() || baseValueType.HasBeenObject())
  10545. || m_func->IsInlinee())
  10546. {
  10547. throw Js::RejitException(RejitReason::InlineSpreadDisabled);
  10548. }
  10549. // Past this point, we will need to use a bailout.
  10550. IR::LabelInstr *bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true /* isOpHelper */);
  10551. // See if we can skip various array checks on value type alone
  10552. if (!baseValueType.IsArray())
  10553. {
  10554. GenerateArrayTest(arrayOpnd, bailOutLabel, bailOutLabel, instr, false);
  10555. }
  10556. if (!(baseValueType.IsArray() && baseValueType.HasNoMissingValues()))
  10557. {
  10558. InsertTestBranch(
  10559. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, func),
  10560. IR::IntConstOpnd::New(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues), TyUint8, func, true),
  10561. Js::OpCode::BrEq_A,
  10562. bailOutLabel,
  10563. instr);
  10564. }
  10565. IR::IndirOpnd *arrayLenPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, func);
  10566. InsertCompareBranch(arrayLenPtrOpnd, IR::IntConstOpnd::New(Js::InlineeCallInfo::MaxInlineeArgoutCount - 1, TyUint8, func), Js::OpCode::BrGt_A, true, bailOutLabel, instr);
  10567. IR::LabelInstr *skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  10568. InsertBranch(Js::OpCode::Br, skipBailOutLabel, instr);
  10569. instr->InsertBefore(bailOutLabel);
  10570. instr->InsertAfter(skipBailOutLabel);
  10571. GenerateBailOut(instr);
  10572. return prevInstr;
  10573. }
  10574. IR::Instr *
  10575. Lowerer::LowerBailOnNotPolymorphicInlinee(IR::Instr * instr)
  10576. {
  10577. Assert(instr->HasBailOutInfo() && (instr->GetBailOutKind() == IR::BailOutOnFailedPolymorphicInlineTypeCheck || instr->GetBailOutKind() == IR::BailOutOnPolymorphicInlineFunction));
  10578. IR::Instr* instrPrev = instr->m_prev;
  10579. this->GenerateBailOut(instr, nullptr, nullptr);
  10580. return instrPrev;
  10581. }
  10582. void
  10583. Lowerer::LowerBailoutCheckAndLabel(IR::Instr *instr, bool onEqual, bool isHelper)
  10584. {
  10585. // Label to skip Bailout and continue
  10586. IR::LabelInstr * continueLabelInstr;
  10587. IR::Instr *instrNext = instr->m_next;
  10588. if (instrNext->IsLabelInstr())
  10589. {
  10590. continueLabelInstr = instrNext->AsLabelInstr();
  10591. }
  10592. else
  10593. {
  10594. continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  10595. instr->InsertAfter(continueLabelInstr);
  10596. }
  10597. if(instr->GetBailOutKind() == IR::BailOutInjected)
  10598. {
  10599. // BailOnEqual 0, 0
  10600. Assert(onEqual);
  10601. Assert(instr->GetSrc1()->IsEqual(instr->GetSrc2()));
  10602. Assert(instr->GetSrc1()->AsIntConstOpnd()->GetValue() == 0);
  10603. // The operands cannot be equal when generating a compare (assert) but since this is for testing purposes, hoist a src.
  10604. // Ideally, we would just create a BailOut instruction that generates a guaranteed bailout, but there seem to be issues
  10605. // with doing this in a non-helper path. So finally, it would generate:
  10606. // xor s0, s0
  10607. // test s0, s0
  10608. // jnz $continue
  10609. // $bailout:
  10610. // // bailout
  10611. // $continue:
  10612. instr->HoistSrc1(LowererMD::GetLoadOp(instr->GetSrc1()->GetType()));
  10613. }
  10614. InsertCompareBranch(instr->UnlinkSrc1(), instr->UnlinkSrc2(),
  10615. onEqual ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, continueLabelInstr, instr);
  10616. if (!isHelper)
  10617. {
  10618. IR::LabelInstr * helperLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  10619. instr->InsertBefore(helperLabelInstr);
  10620. #if DBG
  10621. helperLabelInstr->m_noLazyHelperAssert = true;
  10622. #endif
  10623. }
  10624. }
  10625. IR::Instr *
  10626. Lowerer::LowerBailOnEqualOrNotEqual(IR::Instr * instr,
  10627. IR::BranchInstr *branchInstr, // = nullptr
  10628. IR::LabelInstr *labelBailOut, // = nullptr
  10629. IR::PropertySymOpnd * propSymOpnd, // = nullptr
  10630. bool isHelper) // = false
  10631. {
  10632. IR::Instr * prevInstr = instr->m_prev;
  10633. // Bail out test
  10634. bool onEqual = instr->m_opcode == Js::OpCode::BailOnEqual;
  10635. LowerBailoutCheckAndLabel(instr, onEqual, isHelper);
  10636. // BailOutOnImplicitCalls is a post-op bailout. Since we look at the profile info for LdFld/StFld to decide whether the instruction may or may not call an accessor,
  10637. // we need to update this profile information on the bailout path for BailOutOnImplicitCalls if the implicit call was an accessor call.
  10638. if(propSymOpnd && ((instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCalls) && (propSymOpnd->m_inlineCacheIndex != -1) &&
  10639. instr->m_func->HasProfileInfo())
  10640. {
  10641. // result = AND implCallFlags, ~ImplicitCall_None
  10642. // TST result, ImplicitCall_Accessor
  10643. // JEQ $bail
  10644. // OR profiledFlags, ( FldInfo_FromAccessor | FldInfo_Polymorphic )
  10645. // $bail
  10646. IR::Opnd * implicitCallFlags = GetImplicitCallFlagsOpnd();
  10647. IR::Opnd * accessorImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_Accessor & ~Js::ImplicitCall_None, GetImplicitCallFlagsType(), instr->m_func, true);
  10648. IR::Opnd * maskNoImplicitCall = IR::IntConstOpnd::New((Js::ImplicitCallFlags)~Js::ImplicitCall_None, GetImplicitCallFlagsType(), instr->m_func, true);
  10649. IR::Opnd * fldInfoAccessor = IR::IntConstOpnd::New(Js::FldInfo_FromAccessor | Js::FldInfo_Polymorphic, GetFldInfoFlagsType(), instr->m_func, true);
  10650. IR::LabelInstr * label = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true);
  10651. IR::Instr * andInstr = InsertAnd(IR::RegOpnd::New(GetImplicitCallFlagsType(), instr->m_func), implicitCallFlags, maskNoImplicitCall, instr);
  10652. InsertTestBranch(andInstr->GetDst(), accessorImplicitCall, Js::OpCode::BrEq_A, label, instr);
  10653. intptr_t infoAddr = instr->m_func->GetReadOnlyProfileInfo()->GetFldInfoAddr(propSymOpnd->m_inlineCacheIndex);
  10654. IR::Opnd * profiledFlags = IR::MemRefOpnd::New(infoAddr + Js::FldInfo::GetOffsetOfFlags(), TyInt8, instr->m_func);
  10655. InsertOr(profiledFlags, profiledFlags, fldInfoAccessor, instr);
  10656. instr->InsertBefore(label);
  10657. }
  10658. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  10659. return prevInstr;
  10660. }
  10661. void Lowerer::LowerBailOnNegative(IR::Instr *const instr)
  10662. {
  10663. Assert(instr);
  10664. Assert(instr->m_opcode == Js::OpCode::BailOnNegative);
  10665. Assert(instr->HasBailOutInfo());
  10666. Assert(!instr->GetDst());
  10667. Assert(instr->GetSrc1());
  10668. Assert(instr->GetSrc1()->GetType() == TyInt32 || instr->GetSrc1()->GetType() == TyUint32);
  10669. Assert(!instr->GetSrc2());
  10670. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(false);
  10671. LowerOneBailOutKind(instr, instr->GetBailOutKind(), false);
  10672. Assert(!instr->HasBailOutInfo());
  10673. IR::Instr *insertBeforeInstr = instr->m_next;
  10674. Func *const func = instr->m_func;
  10675. // test src, src
  10676. // jns $skipBailOut
  10677. InsertCompareBranch(
  10678. instr->UnlinkSrc1(),
  10679. IR::IntConstOpnd::New(0, TyInt32, func, true),
  10680. Js::OpCode::BrGe_A,
  10681. skipBailOutLabel,
  10682. insertBeforeInstr);
  10683. instr->Remove();
  10684. }
  10685. IR::Instr *
  10686. Lowerer::LowerBailOnNotObject(IR::Instr *instr,
  10687. IR::BranchInstr *branchInstr /* = nullptr */,
  10688. IR::LabelInstr *labelBailOut /* = nullptr */)
  10689. {
  10690. IR::Instr *prevInstr = instr->m_prev;
  10691. IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label,
  10692. m_func);
  10693. instr->InsertAfter(continueLabelInstr);
  10694. this->m_lowererMD.GenerateObjectTest(instr->UnlinkSrc1(),
  10695. instr,
  10696. continueLabelInstr,
  10697. /* fContinueLabel = */ true);
  10698. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  10699. return prevInstr;
  10700. }
  10701. IR::Instr*
  10702. Lowerer::LowerCheckIsFuncObj(IR::Instr *instr, bool checkFuncInfo)
  10703. {
  10704. // The CheckIsFuncObj instr and CheckFuncInfo instr (checkFuncInfo = true) are used to
  10705. // generate bailout instrs that type check a function (and can also check the func info).
  10706. // Rather than creating these bailout instrs in Inline, they are created in Lower because
  10707. // CheckIsFuncObj and CheckFuncInfo instrs can be hoisted outside of loops and thus the
  10708. // bailout instrs created can exist outside of loops.
  10709. IR::RegOpnd *funcOpnd = instr->GetSrc1()->AsRegOpnd();
  10710. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  10711. BailOutInfo *bailOutInfo = instr->GetBailOutInfo();
  10712. // Check that the property is an object.
  10713. InsertObjectCheck(funcOpnd, instr, bailOutKind, bailOutInfo);
  10714. // Check that the object is a function with the correct type ID.
  10715. IR::Instr *lastInstr = InsertFunctionTypeIdCheck(funcOpnd, instr, bailOutKind, bailOutInfo);
  10716. if (checkFuncInfo)
  10717. {
  10718. // Check that the function body matches the func info.
  10719. lastInstr = InsertFunctionInfoCheck(
  10720. funcOpnd, instr, instr->GetSrc2()->AsAddrOpnd(), bailOutKind, bailOutInfo);
  10721. lastInstr->SetByteCodeOffset(instr);
  10722. }
  10723. if (bailOutInfo->bailOutInstr == instr)
  10724. {
  10725. // bailOutInstr is currently instr. By changing bailOutInstr to point to lastInstr, the next
  10726. // instruction to be lowered (lastInstr) will create the bailout target. This is necessary in
  10727. // cases where instr does not have a shared bailout (ex: instr was not hoisted outside of a loop).
  10728. bailOutInfo->bailOutInstr = lastInstr;
  10729. }
  10730. // the CheckFunctionEntryPoint instr exists in order to create the instrs above. It does not have
  10731. // any other purpose and thus it is removed. The instr's BailOutInfo continues to be used and thus
  10732. // must not be deleted. Flags are turned off to stop Remove() from deleting instr's BailOutInfo.
  10733. instr->hasBailOutInfo = false;
  10734. instr->hasAuxBailOut = false;
  10735. instr->Remove();
  10736. return lastInstr;
  10737. }
  10738. IR::Instr*
  10739. Lowerer::LowerBailOnTrue(IR::Instr* instr, IR::LabelInstr* labelBailOut /*nullptr*/)
  10740. {
  10741. IR::Instr* instrPrev = instr->m_prev;
  10742. IR::LabelInstr* continueLabel = instr->GetOrCreateContinueLabel();
  10743. IR::RegOpnd * regSrc1 = IR::RegOpnd::New(instr->GetSrc1()->GetType(), this->m_func);
  10744. InsertMove(regSrc1, instr->UnlinkSrc1(), instr);
  10745. InsertTestBranch(regSrc1, regSrc1, Js::OpCode::BrEq_A, continueLabel, instr);
  10746. GenerateBailOut(instr, nullptr, labelBailOut);
  10747. return instrPrev;
  10748. }
  10749. IR::Instr *
  10750. Lowerer::LowerBailOnNotBuiltIn(IR::Instr *instr,
  10751. IR::BranchInstr *branchInstr /* = nullptr */,
  10752. IR::LabelInstr *labelBailOut /* = nullptr */)
  10753. {
  10754. Assert(instr->GetSrc2()->IsIntConstOpnd());
  10755. IR::Instr *prevInstr = instr->m_prev;
  10756. intptr_t builtInFuncs = m_func->GetScriptContextInfo()->GetBuiltinFunctionsBaseAddr();
  10757. Js::BuiltinFunction builtInIndex = instr->UnlinkSrc2()->AsIntConstOpnd()->AsInt32();
  10758. IR::Opnd *builtIn = IR::MemRefOpnd::New((void*)(builtInFuncs + builtInIndex * MachPtr), TyMachReg, instr->m_func);
  10759. #if TESTBUILTINFORNULL
  10760. IR::LabelInstr * continueAfterTestLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  10761. InsertTestBranch(builtIn, builtIn, Js::OpCode::BrNeq_A, continueAfterTestLabel, instr);
  10762. this->m_lowererMD.GenerateDebugBreak(instr);
  10763. instr->InsertBefore(continueAfterTestLabel);
  10764. #endif
  10765. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  10766. instr->InsertAfter(continueLabel);
  10767. InsertCompareBranch(instr->UnlinkSrc1(), builtIn, Js::OpCode::BrEq_A, continueLabel, instr);
  10768. GenerateBailOut(instr, branchInstr, labelBailOut);
  10769. return prevInstr;
  10770. }
  10771. #ifdef ENABLE_SCRIPT_DEBUGGING
  10772. IR::Instr *
  10773. Lowerer::LowerBailForDebugger(IR::Instr* instr, bool isInsideHelper /* = false */)
  10774. {
  10775. IR::Instr * prevInstr = instr->m_prev;
  10776. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  10777. AssertMsg(bailOutKind, "bailOutKind should not be zero at this time.");
  10778. AssertMsg(!(bailOutKind & IR::BailOutExplicit) || bailOutKind == IR::BailOutExplicit,
  10779. "BailOutExplicit cannot be combined with any other bailout flags.");
  10780. IR::LabelInstr* explicitBailOutLabel = nullptr;
  10781. if (!(bailOutKind & IR::BailOutExplicit))
  10782. {
  10783. intptr_t flags = m_func->GetScriptContextInfo()->GetDebuggingFlagsAddr();
  10784. // Check 1 (do we need to bail out?)
  10785. // JXX bailoutLabel
  10786. // Check 2 (do we need to bail out?)
  10787. // JXX bailoutLabel
  10788. // ...
  10789. // JMP continueLabel
  10790. // bailoutDocumentLabel:
  10791. // (determine if document boundary reached - if not, JMP to continueLabel)
  10792. // NOTE: THIS BLOCK IS CONDITIONALLY GENERATED BASED ON doGenerateBailOutDocumentBlock
  10793. // bailoutLabel:
  10794. // bail out
  10795. // continueLabel:
  10796. // ...
  10797. IR::LabelInstr* bailOutDocumentLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ true);
  10798. instr->InsertBefore(bailOutDocumentLabel);
  10799. IR::LabelInstr* bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ true);
  10800. instr->InsertBefore(bailOutLabel);
  10801. IR::LabelInstr* continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ isInsideHelper);
  10802. instr->InsertAfter(continueLabel);
  10803. IR::BranchInstr* continueBranchInstr = this->InsertBranch(Js::OpCode::Br, continueLabel, bailOutDocumentLabel); // JMP continueLabel.
  10804. bool doGenerateBailOutDocumentBlock = false;
  10805. const IR::BailOutKind c_forceAndIgnoreEx = IR::BailOutForceByFlag | IR::BailOutIgnoreException;
  10806. if ((bailOutKind & c_forceAndIgnoreEx) == c_forceAndIgnoreEx)
  10807. {
  10808. // It's faster to check these together in 1 check rather than 2 separate checks at run time.
  10809. // CMP [&(flags->m_forceInterpreter, flags->m_isIgnoreException)], 0
  10810. // BNE bailout
  10811. IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + DebuggingFlags::GetForceInterpreterOffset(), TyInt16, m_func);
  10812. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt16, m_func, /*dontEncode*/ true);
  10813. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  10814. bailOutKind ^= c_forceAndIgnoreEx;
  10815. }
  10816. else
  10817. {
  10818. if (bailOutKind & IR::BailOutForceByFlag)
  10819. {
  10820. // CMP [&flags->m_forceInterpreter], 0
  10821. // BNE bailout
  10822. IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + DebuggingFlags::GetForceInterpreterOffset(), TyInt8, m_func);
  10823. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func, /*dontEncode*/ true);
  10824. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  10825. bailOutKind ^= IR::BailOutForceByFlag;
  10826. }
  10827. if (bailOutKind & IR::BailOutIgnoreException)
  10828. {
  10829. // CMP [&flags->m_byteCodeOffsetAfterIgnoreException], DebuggingFlags::InvalidByteCodeOffset
  10830. // BNE bailout
  10831. IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + DebuggingFlags::GetByteCodeOffsetAfterIgnoreExceptionOffset(), TyInt32, m_func);
  10832. IR::Opnd* opnd2 = IR::IntConstOpnd::New(DebuggingFlags::InvalidByteCodeOffset, TyInt32, m_func, /*dontEncode*/ true);
  10833. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  10834. bailOutKind ^= IR::BailOutIgnoreException;
  10835. }
  10836. }
  10837. if (bailOutKind & IR::BailOutBreakPointInFunction)
  10838. {
  10839. // CMP [&functionBody->m_sourceInfo.m_probeCount], 0
  10840. // BNE bailout
  10841. IR::Opnd* opnd1 = IR::MemRefOpnd::New(m_func->GetJITFunctionBody()->GetProbeCountAddr(), TyInt32, m_func);
  10842. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt32, m_func, /*dontEncode*/ true);
  10843. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  10844. bailOutKind ^= IR::BailOutBreakPointInFunction;
  10845. }
  10846. // on method entry
  10847. if(bailOutKind & IR::BailOutStep)
  10848. {
  10849. // TEST STEP_BAILOUT, [&stepController->StepType]
  10850. // BNE BailoutLabel
  10851. IR::Opnd* opnd1 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugStepTypeAddr(), TyInt8, m_func);
  10852. IR::Opnd* opnd2 = IR::IntConstOpnd::New(Js::STEP_BAILOUT, TyInt8, this->m_func, /*dontEncode*/ true);
  10853. InsertTestBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  10854. // CMP STEP_DOCUMENT, [&stepController->StepType]
  10855. // BEQ BailoutDocumentLabel
  10856. opnd1 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugStepTypeAddr(), TyInt8, m_func);
  10857. opnd2 = IR::IntConstOpnd::New(Js::STEP_DOCUMENT, TyInt8, this->m_func, /*dontEncode*/ true);
  10858. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrEq_A, /*isUnsigned*/ true, bailOutDocumentLabel, continueBranchInstr);
  10859. doGenerateBailOutDocumentBlock = true;
  10860. bailOutKind ^= IR::BailOutStep;
  10861. }
  10862. // on method exit
  10863. if (bailOutKind & IR::BailOutStackFrameBase)
  10864. {
  10865. // CMP EffectiveFrameBase, [&stepController->frameAddrWhenSet]
  10866. // BA bailoutLabel
  10867. RegNum effectiveFrameBaseReg;
  10868. #ifdef _M_X64
  10869. effectiveFrameBaseReg = m_lowererMD.GetRegStackPointer();
  10870. #else
  10871. effectiveFrameBaseReg = m_lowererMD.GetRegFramePointer();
  10872. #endif
  10873. IR::Opnd* opnd1 = IR::RegOpnd::New(nullptr, effectiveFrameBaseReg, TyMachReg, m_func);
  10874. IR::Opnd* opnd2 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugFrameAddressAddr(), TyMachReg, m_func);
  10875. this->InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrGt_A, /*isUnsigned*/ true, bailOutLabel, continueBranchInstr);
  10876. // CMP STEP_DOCUMENT, [&stepController->StepType]
  10877. // BEQ BailoutDocumentLabel
  10878. opnd1 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugStepTypeAddr(), TyInt8, m_func);
  10879. opnd2 = IR::IntConstOpnd::New(Js::STEP_DOCUMENT, TyInt8, this->m_func, /*dontEncode*/ true);
  10880. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrEq_A, /*isUnsigned*/ true, bailOutDocumentLabel, continueBranchInstr);
  10881. doGenerateBailOutDocumentBlock = true;
  10882. bailOutKind ^= IR::BailOutStackFrameBase;
  10883. }
  10884. if (bailOutKind & IR::BailOutLocalValueChanged)
  10885. {
  10886. int32 hasLocalVarChangedOffset = m_func->GetHasLocalVarChangedOffset();
  10887. if (hasLocalVarChangedOffset != Js::Constants::InvalidOffset)
  10888. {
  10889. // CMP [EBP + hasLocalVarChangedStackOffset], 0
  10890. // BNE bailout
  10891. StackSym* sym = StackSym::New(TyInt8, m_func);
  10892. sym->m_offset = hasLocalVarChangedOffset;
  10893. sym->m_allocated = true;
  10894. IR::Opnd* opnd1 = IR::SymOpnd::New(sym, TyInt8, m_func);
  10895. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func);
  10896. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  10897. }
  10898. bailOutKind ^= IR::BailOutLocalValueChanged;
  10899. }
  10900. if (doGenerateBailOutDocumentBlock)
  10901. {
  10902. // GENERATE the BailoutDocumentLabel
  10903. // bailOutDocumentLabel:
  10904. // CMP CurrentScriptId, [&stepController->ScriptIdWhenSet]
  10905. // BEQ ContinueLabel
  10906. // bailOutLabel: // (fallthrough bailOutLabel)
  10907. IR::Opnd* opnd1 = IR::MemRefOpnd::New(m_func->GetJITFunctionBody()->GetScriptIdAddr(), TyInt32, m_func);
  10908. IR::Opnd* opnd2 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugScriptIdWhenSetAddr(), TyInt32, m_func);
  10909. IR::RegOpnd* reg1 = IR::RegOpnd::New(TyInt32, m_func);
  10910. InsertMove(reg1, opnd2, bailOutLabel);
  10911. InsertCompareBranch(opnd1, reg1, Js::OpCode::BrEq_A, /*isUnsigned*/ true, continueLabel, bailOutLabel);
  10912. }
  10913. AssertMsg(bailOutKind == (IR::BailOutKind)0, "Some of the bits in BailOutKind were not processed!");
  10914. // Note: at this time the 'instr' is in between bailoutLabel and continueLabel.
  10915. }
  10916. else
  10917. {
  10918. // For explicit/unconditional bailout use label which is not a helper, otherwise we would get a helper in main code path
  10919. // which breaks helper label consistency (you can only get to helper from a conditional branch in main code), see DbCheckPostLower.
  10920. explicitBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  10921. }
  10922. this->GenerateBailOut(instr, nullptr, explicitBailOutLabel);
  10923. return prevInstr;
  10924. }
  10925. #endif
  10926. IR::Instr*
  10927. Lowerer::LowerBailOnException(IR::Instr * instr)
  10928. {
  10929. Assert(instr->HasBailOutInfo());
  10930. IR::Instr * instrPrev = instr->m_prev;
  10931. this->GenerateBailOut(instr, nullptr, nullptr);
  10932. return instrPrev;
  10933. }
  10934. IR::Instr*
  10935. Lowerer::LowerBailOnEarlyExit(IR::Instr * instr)
  10936. {
  10937. Assert(instr->HasBailOutInfo());
  10938. IR::Instr * instrPrev = instr->m_prev;
  10939. this->GenerateBailOut(instr, nullptr, nullptr);
  10940. return instrPrev;
  10941. }
  10942. // Generate BailOut Lowerer Instruction if the value is INT_MIN.
  10943. // It it's not INT_MIN, we continue without bailout.
  10944. IR::Instr *
  10945. Lowerer::LowerBailOnIntMin(IR::Instr *instr, IR::BranchInstr *branchInstr /* = nullptr */, IR::LabelInstr *labelBailOut /* = nullptr */)
  10946. {
  10947. Assert(instr);
  10948. Assert(instr->GetSrc1());
  10949. IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  10950. instr->InsertAfter(continueLabelInstr);
  10951. if(!instr->HasBailOutInfo())
  10952. {
  10953. instr->Remove();
  10954. }
  10955. else
  10956. {
  10957. Assert(instr->GetBailOutKind() == IR::BailOnIntMin);
  10958. // Note: src1 must be int32 at this point.
  10959. if (instr->GetSrc1()->IsIntConstOpnd())
  10960. {
  10961. // For consts we can check the value at JIT time. Note: without this check we'll have to legalize the CMP instr.
  10962. IR::IntConstOpnd* intConst = instr->UnlinkSrc1()->AsIntConstOpnd();
  10963. if (intConst->GetValue() == INT_MIN)
  10964. {
  10965. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  10966. intConst->Free(instr->m_func);
  10967. }
  10968. else
  10969. {
  10970. instr->Remove();
  10971. }
  10972. }
  10973. else
  10974. {
  10975. InsertCompareBranch(instr->UnlinkSrc1(), IR::IntConstOpnd::New(INT_MIN, TyInt32, this->m_func), Js::OpCode::BrNeq_A, continueLabelInstr, instr);
  10976. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  10977. }
  10978. }
  10979. return continueLabelInstr;
  10980. }
  10981. ///----------------------------------------------------------------------------
  10982. ///
  10983. /// Lowerer::LowerBailOnNotString
  10984. /// Generate BailOut Lowerer Instruction if not a String
  10985. ///
  10986. ///----------------------------------------------------------------------------
  10987. void Lowerer::LowerBailOnNotString(IR::Instr *instr)
  10988. {
  10989. if (!instr->GetSrc1()->GetValueType().IsString())
  10990. {
  10991. /*Creating a MOV instruction*/
  10992. IR::Instr * movInstr = IR::Instr::New(instr->m_opcode, instr->UnlinkDst(), instr->UnlinkSrc1(), instr->m_func);
  10993. instr->InsertBefore(movInstr);
  10994. IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  10995. IR::LabelInstr *helperLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  10996. instr->InsertAfter(continueLabelInstr);
  10997. IR::RegOpnd *srcReg = movInstr->GetSrc1()->IsRegOpnd() ? movInstr->GetSrc1()->AsRegOpnd() : nullptr;
  10998. this->GenerateStringTest(srcReg, instr, helperLabelInstr, continueLabelInstr);
  10999. this->GenerateBailOut(instr, nullptr, helperLabelInstr);
  11000. }
  11001. else
  11002. {
  11003. instr->ClearBailOutInfo();
  11004. }
  11005. }
  11006. void Lowerer::LowerOneBailOutKind(
  11007. IR::Instr *const instr,
  11008. const IR::BailOutKind bailOutKindToLower,
  11009. const bool isInHelperBlock,
  11010. const bool preserveBailOutKindInInstr)
  11011. {
  11012. Assert(instr);
  11013. Assert(bailOutKindToLower);
  11014. Assert(!(bailOutKindToLower & IR::BailOutKindBits) || !(bailOutKindToLower & bailOutKindToLower - 1u));
  11015. Func *const func = instr->m_func;
  11016. // Split bailouts other than the one being lowered here
  11017. BailOutInfo *const bailOutInfo = instr->GetBailOutInfo();
  11018. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  11019. Assert(
  11020. bailOutKindToLower & IR::BailOutKindBits
  11021. ? bailOutKind & bailOutKindToLower
  11022. : (bailOutKind & ~IR::BailOutKindBits) == bailOutKindToLower);
  11023. if(!preserveBailOutKindInInstr)
  11024. {
  11025. bailOutKind -= bailOutKindToLower;
  11026. }
  11027. if(bailOutKind)
  11028. {
  11029. if(bailOutInfo->bailOutInstr == instr)
  11030. {
  11031. // Create a shared bailout point for the split bailout checks
  11032. IR::Instr *const sharedBail = instr->ShareBailOut();
  11033. Assert(sharedBail->GetBailOutInfo() == bailOutInfo);
  11034. GenerateBailOut(sharedBail);
  11035. }
  11036. instr->SetBailOutKind(bailOutKind);
  11037. }
  11038. else
  11039. {
  11040. instr->UnlinkBailOutInfo();
  11041. if(bailOutInfo->bailOutInstr == instr)
  11042. {
  11043. bailOutInfo->bailOutInstr = nullptr;
  11044. }
  11045. }
  11046. IR::Instr *const insertBeforeInstr = instr->m_next;
  11047. // (Bail out with the requested bail out kind)
  11048. IR::BailOutInstr *const bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOut, bailOutKindToLower, bailOutInfo, func);
  11049. bailOutInstr->SetByteCodeOffset(instr);
  11050. insertBeforeInstr->InsertBefore(bailOutInstr);
  11051. GenerateBailOut(bailOutInstr);
  11052. // The caller is expected to generate code to decide whether to bail out
  11053. }
  11054. void Lowerer::SplitBailOnNotArray(
  11055. IR::Instr *const instr,
  11056. IR::Instr * *const bailOnNotArrayRef,
  11057. IR::Instr * *const bailOnMissingValueRef)
  11058. {
  11059. Assert(instr);
  11060. Assert(!instr->GetDst());
  11061. Assert(instr->GetSrc1());
  11062. Assert(instr->GetSrc1()->IsRegOpnd());
  11063. Assert(!instr->GetSrc2());
  11064. Assert(bailOnNotArrayRef);
  11065. Assert(bailOnMissingValueRef);
  11066. IR::Instr *&bailOnNotArray = *bailOnNotArrayRef;
  11067. IR::Instr *&bailOnMissingValue = *bailOnMissingValueRef;
  11068. bailOnNotArray = instr;
  11069. bailOnMissingValue = nullptr;
  11070. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  11071. if(bailOutKind == IR::BailOutOnNotArray ||
  11072. bailOutKind == IR::BailOutOnNotNativeArray)
  11073. {
  11074. return;
  11075. }
  11076. // Split array checks
  11077. BailOutInfo *const bailOutInfo = instr->GetBailOutInfo();
  11078. if(bailOutInfo->bailOutInstr == instr)
  11079. {
  11080. // Create a shared bailout point for the split bailout checks
  11081. IR::Instr *const sharedBail = instr->ShareBailOut();
  11082. Assert(sharedBail->GetBailOutInfo() == bailOutInfo);
  11083. LowerBailTarget(sharedBail);
  11084. }
  11085. bailOutKind -= IR::BailOutOnMissingValue;
  11086. Assert(bailOutKind == IR::BailOutOnNotArray ||
  11087. bailOutKind == IR::BailOutOnNotNativeArray);
  11088. instr->SetBailOutKind(bailOutKind);
  11089. Func *const func = bailOutInfo->bailOutFunc;
  11090. IR::Instr *const insertBeforeInstr = instr->m_next;
  11091. // Split missing value checks
  11092. bailOnMissingValue = IR::BailOutInstr::New(Js::OpCode::BailOnNotArray, IR::BailOutOnMissingValue, bailOutInfo, func);
  11093. bailOnMissingValue->SetByteCodeOffset(instr);
  11094. insertBeforeInstr->InsertBefore(bailOnMissingValue);
  11095. }
  11096. IR::RegOpnd *Lowerer::LowerBailOnNotArray(IR::Instr *const instr)
  11097. {
  11098. Assert(instr);
  11099. Assert(!instr->GetDst());
  11100. Assert(instr->GetSrc1());
  11101. Assert(instr->GetSrc1()->IsRegOpnd());
  11102. Assert(!instr->GetSrc2());
  11103. Func *const func = instr->m_func;
  11104. // Label to jump to (or fall through to) when bailing out
  11105. const auto bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true /* isOpHelper */);
  11106. instr->InsertBefore(bailOutLabel);
  11107. // Label to jump to when not bailing out
  11108. const auto skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  11109. instr->InsertAfter(skipBailOutLabel);
  11110. // Do the array tests and jump to bailOutLabel if it's not an array. Fall through if it is an array.
  11111. IR::RegOpnd *const arrayOpnd =
  11112. GenerateArrayTest(instr->UnlinkSrc1()->AsRegOpnd(), bailOutLabel, bailOutLabel, bailOutLabel, true);
  11113. // Skip bail-out when it is an array
  11114. InsertBranch(Js::OpCode::Br, skipBailOutLabel, bailOutLabel);
  11115. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  11116. // ordering instructions anymore.
  11117. GenerateBailOut(instr);
  11118. return arrayOpnd;
  11119. }
  11120. void Lowerer::LowerBailOnMissingValue(IR::Instr *const instr, IR::RegOpnd *const arrayOpnd)
  11121. {
  11122. Assert(instr);
  11123. Assert(!instr->GetDst());
  11124. Assert(!instr->GetSrc1());
  11125. Assert(!instr->GetSrc2());
  11126. Assert(arrayOpnd);
  11127. Assert(arrayOpnd->GetValueType().IsArrayOrObjectWithArray());
  11128. Func *const func = instr->m_func;
  11129. // Label to jump to when not bailing out
  11130. const auto skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  11131. instr->InsertAfter(skipBailOutLabel);
  11132. // Skip bail-out when the array has no missing values
  11133. //
  11134. // test [array + offsetOf(objectArrayOrFlags)], Js::DynamicObjectFlags::HasNoMissingValues
  11135. // jnz $skipBailOut
  11136. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, func);
  11137. CompileAssert(
  11138. static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
  11139. Js::DynamicObjectFlags::HasNoMissingValues);
  11140. InsertTestBranch(
  11141. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, func),
  11142. IR::IntConstOpnd::New(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues), TyUint8, func, true),
  11143. Js::OpCode::BrNeq_A,
  11144. skipBailOutLabel,
  11145. instr);
  11146. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  11147. // ordering instructions anymore.
  11148. GenerateBailOut(instr);
  11149. }
  11150. void Lowerer::LowerBailOnInvalidatedArrayHeadSegment(IR::Instr *const instr, const bool isInHelperBlock)
  11151. {
  11152. /*
  11153. // Generate checks for whether the head segment or the head segment length changed during the helper call
  11154. if(!(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentSym()))
  11155. {
  11156. // Record the array head segment before the helper call
  11157. headSegmentBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayHeadSegmentForArrayOrObjectWithArray(base)
  11158. }
  11159. if(!(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentLengthSym()))
  11160. {
  11161. // Record the array head segment length before the helper call
  11162. if(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentSym())
  11163. {
  11164. mov headSegmentLengthBeforeHelperCall, [headSegmentBeforeHelperCall + offsetOf(length)]
  11165. }
  11166. else
  11167. {
  11168. headSegmentLengthBeforeHelperCall =
  11169. Js::JavascriptArray::Jit_GetArrayHeadSegmentLength(headSegmentBeforeHelperCall)
  11170. }
  11171. }
  11172. helperCall:
  11173. (Helper call and other bailout checks)
  11174. // If the array has a different head segment or head segment length after the helper call, then this store needs to bail
  11175. // out
  11176. invalidatedHeadSegment =
  11177. JavascriptArray::Jit_OperationInvalidatedArrayHeadSegment(
  11178. headSegmentBeforeHelperCall,
  11179. headSegmentLengthBeforeHelperCall,
  11180. base)
  11181. test invalidatedHeadSegment, invalidatedHeadSegment
  11182. jz $skipBailOut
  11183. (Bail out with IR::BailOutOnInvalidatedArrayHeadSegment)
  11184. $skipBailOut:
  11185. */
  11186. Assert(instr);
  11187. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  11188. Assert(instr->GetDst());
  11189. Assert(instr->GetDst()->IsIndirOpnd());
  11190. Func *const func = instr->m_func;
  11191. IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  11192. const ValueType baseValueType(baseOpnd->GetValueType());
  11193. Assert(!baseValueType.IsNotArrayOrObjectWithArray());
  11194. const bool isArrayOrObjectWithArray = baseValueType.IsArrayOrObjectWithArray();
  11195. IR::ArrayRegOpnd *const arrayOpnd = baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd() : nullptr;
  11196. IR::RegOpnd *headSegmentBeforeHelperCallOpnd;
  11197. IR::AutoReuseOpnd autoReuseHeadSegmentBeforeHelperCallOpnd;
  11198. if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentSym())
  11199. {
  11200. headSegmentBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->HeadSegmentSym(), TyMachPtr, func);
  11201. autoReuseHeadSegmentBeforeHelperCallOpnd.Initialize(headSegmentBeforeHelperCallOpnd, func);
  11202. }
  11203. else
  11204. {
  11205. // Record the array head segment before the helper call
  11206. // headSegmentBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayHeadSegmentForArrayOrObjectWithArray(base)
  11207. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  11208. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  11209. headSegmentBeforeHelperCallOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, func), TyMachPtr, func);
  11210. autoReuseHeadSegmentBeforeHelperCallOpnd.Initialize(headSegmentBeforeHelperCallOpnd, func);
  11211. callInstr->SetDst(headSegmentBeforeHelperCallOpnd);
  11212. instr->InsertBefore(callInstr);
  11213. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayHeadSegmentForArrayOrObjectWithArray);
  11214. }
  11215. IR::RegOpnd *headSegmentLengthBeforeHelperCallOpnd;
  11216. IR::AutoReuseOpnd autoReuseHeadSegmentLengthBeforeHelperCallOpnd;
  11217. if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentLengthSym())
  11218. {
  11219. headSegmentLengthBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->HeadSegmentLengthSym(), TyUint32, func);
  11220. autoReuseHeadSegmentLengthBeforeHelperCallOpnd.Initialize(headSegmentLengthBeforeHelperCallOpnd, func);
  11221. }
  11222. else
  11223. {
  11224. headSegmentLengthBeforeHelperCallOpnd = IR::RegOpnd::New(StackSym::New(TyUint32, func), TyUint32, func);
  11225. autoReuseHeadSegmentLengthBeforeHelperCallOpnd.Initialize(headSegmentLengthBeforeHelperCallOpnd, func);
  11226. if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentSym())
  11227. {
  11228. // Record the array head segment length before the helper call
  11229. // mov headSegmentLengthBeforeHelperCall, [headSegmentBeforeHelperCall + offsetOf(length)]
  11230. InsertMove(
  11231. headSegmentLengthBeforeHelperCallOpnd,
  11232. IR::IndirOpnd::New(
  11233. headSegmentBeforeHelperCallOpnd,
  11234. Js::SparseArraySegmentBase::GetOffsetOfLength(),
  11235. TyUint32,
  11236. func),
  11237. instr);
  11238. }
  11239. else
  11240. {
  11241. // Record the array head segment length before the helper call
  11242. // headSegmentLengthBeforeHelperCall =
  11243. // Js::JavascriptArray::Jit_GetArrayHeadSegmentLength(headSegmentBeforeHelperCall)
  11244. m_lowererMD.LoadHelperArgument(instr, headSegmentBeforeHelperCallOpnd);
  11245. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  11246. callInstr->SetDst(headSegmentLengthBeforeHelperCallOpnd);
  11247. instr->InsertBefore(callInstr);
  11248. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayHeadSegmentLength);
  11249. }
  11250. }
  11251. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
  11252. LowerOneBailOutKind(instr, IR::BailOutOnInvalidatedArrayHeadSegment, isInHelperBlock);
  11253. IR::Instr *const insertBeforeInstr = instr->m_next;
  11254. // If the array has a different head segment or head segment length after the helper call, then this store needs to bail out
  11255. // invalidatedHeadSegment =
  11256. // JavascriptArray::Jit_OperationInvalidatedArrayHeadSegment(
  11257. // headSegmentBeforeHelperCall,
  11258. // headSegmentLengthBeforeHelperCall,
  11259. // base)
  11260. m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
  11261. m_lowererMD.LoadHelperArgument(insertBeforeInstr, headSegmentLengthBeforeHelperCallOpnd);
  11262. m_lowererMD.LoadHelperArgument(insertBeforeInstr, headSegmentBeforeHelperCallOpnd);
  11263. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  11264. IR::RegOpnd *const invalidatedHeadSegmentOpnd = IR::RegOpnd::New(TyUint8, func);
  11265. const IR::AutoReuseOpnd autoReuseInvalidatedHeadSegmentOpnd(invalidatedHeadSegmentOpnd, func);
  11266. callInstr->SetDst(invalidatedHeadSegmentOpnd);
  11267. insertBeforeInstr->InsertBefore(callInstr);
  11268. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationInvalidatedArrayHeadSegment);
  11269. // test invalidatedHeadSegment, invalidatedHeadSegment
  11270. // jz $skipBailOut
  11271. InsertTestBranch(
  11272. invalidatedHeadSegmentOpnd,
  11273. invalidatedHeadSegmentOpnd,
  11274. Js::OpCode::BrEq_A,
  11275. skipBailOutLabel,
  11276. insertBeforeInstr);
  11277. // (Bail out with IR::BailOutOnInvalidatedArrayHeadSegment)
  11278. // $skipBailOut:
  11279. }
  11280. void Lowerer::LowerBailOnInvalidatedArrayLength(IR::Instr *const instr, const bool isInHelperBlock)
  11281. {
  11282. /*
  11283. // Generate checks for whether the length changed during the helper call
  11284. if(!(arrayOpnd && arrayOpnd.LengthSym() && arrayOpnd.LengthSym() != arrayOpnd.HeadSegmentLengthSym()))
  11285. {
  11286. // Record the array length before the helper call
  11287. lengthBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayLength(base)
  11288. }
  11289. helperCall:
  11290. (Helper call and other bailout checks)
  11291. // If the array has a different length after the helper call, then this store needs to bail out
  11292. invalidatedLength = JavascriptArray::Jit_OperationInvalidatedArrayLength(lengthBeforeHelperCall, base)
  11293. test invalidatedLength, invalidatedLength
  11294. jz $skipBailOut
  11295. (Bail out with IR::BailOutOnInvalidatedArrayLength)
  11296. $skipBailOut:
  11297. */
  11298. Assert(instr);
  11299. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  11300. Assert(instr->GetDst());
  11301. Assert(instr->GetDst()->IsIndirOpnd());
  11302. Func *const func = instr->m_func;
  11303. IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  11304. const ValueType baseValueType(baseOpnd->GetValueType());
  11305. Assert(!baseValueType.IsNotArray());
  11306. IR::ArrayRegOpnd *const arrayOpnd = baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd() : nullptr;
  11307. IR::RegOpnd *lengthBeforeHelperCallOpnd;
  11308. IR::AutoReuseOpnd autoReuseLengthBeforeHelperCallOpnd;
  11309. if(arrayOpnd && arrayOpnd->LengthSym() && arrayOpnd->LengthSym() != arrayOpnd->HeadSegmentLengthSym())
  11310. {
  11311. lengthBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->LengthSym(), arrayOpnd->LengthSym()->GetType(), func);
  11312. autoReuseLengthBeforeHelperCallOpnd.Initialize(lengthBeforeHelperCallOpnd, func);
  11313. }
  11314. else
  11315. {
  11316. // Record the array length before the helper call
  11317. // lengthBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayLength(base)
  11318. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  11319. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  11320. lengthBeforeHelperCallOpnd = IR::RegOpnd::New(TyUint32, func);
  11321. autoReuseLengthBeforeHelperCallOpnd.Initialize(lengthBeforeHelperCallOpnd, func);
  11322. callInstr->SetDst(lengthBeforeHelperCallOpnd);
  11323. instr->InsertBefore(callInstr);
  11324. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayLength);
  11325. }
  11326. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
  11327. LowerOneBailOutKind(instr, IR::BailOutOnInvalidatedArrayLength, isInHelperBlock);
  11328. IR::Instr *const insertBeforeInstr = instr->m_next;
  11329. // If the array has a different length after the helper call, then this store needs to bail out
  11330. // invalidatedLength = JavascriptArray::Jit_OperationInvalidatedArrayLength(lengthBeforeHelperCall, base)
  11331. m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
  11332. m_lowererMD.LoadHelperArgument(insertBeforeInstr, lengthBeforeHelperCallOpnd);
  11333. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  11334. IR::RegOpnd *const invalidatedLengthOpnd = IR::RegOpnd::New(TyUint8, func);
  11335. const IR::AutoReuseOpnd autoReuseInvalidatedLengthOpnd(invalidatedLengthOpnd, func);
  11336. callInstr->SetDst(invalidatedLengthOpnd);
  11337. insertBeforeInstr->InsertBefore(callInstr);
  11338. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationInvalidatedArrayLength);
  11339. // test invalidatedLength, invalidatedLength
  11340. // jz $skipBailOut
  11341. InsertTestBranch(
  11342. invalidatedLengthOpnd,
  11343. invalidatedLengthOpnd,
  11344. Js::OpCode::BrEq_A,
  11345. skipBailOutLabel,
  11346. insertBeforeInstr);
  11347. // (Bail out with IR::BailOutOnInvalidatedArrayLength)
  11348. // $skipBailOut:
  11349. }
  11350. void Lowerer::LowerBailOnCreatedMissingValue(IR::Instr *const instr, const bool isInHelperBlock)
  11351. {
  11352. /*
  11353. // Generate checks for whether the first missing value was created during the helper call
  11354. if(!(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()))
  11355. {
  11356. // Record whether the array has missing values before the helper call
  11357. arrayFlagsBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayFlagsForArrayOrObjectWithArray(base)
  11358. }
  11359. helperCall:
  11360. (Helper call and other bailout checks)
  11361. // If the array had no missing values before the helper call, and the array has missing values after the helper
  11362. // call, then this store created the first missing value in the array and needs to bail out
  11363. if(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues())
  11364. (arrayFlagsBeforeHelperCall = Js::DynamicObjectFlags::HasNoMissingValues)
  11365. createdFirstMissingValue = JavascriptArray::Jit_OperationCreatedFirstMissingValue(arrayFlagsBeforeHelperCall, base)
  11366. test createdFirstMissingValue, createdFirstMissingValue
  11367. jz $skipBailOut
  11368. (Bail out with IR::BailOutOnMissingValue)
  11369. $skipBailOut:
  11370. */
  11371. Assert(instr);
  11372. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  11373. Assert(instr->GetDst());
  11374. Assert(instr->GetDst()->IsIndirOpnd());
  11375. Func *const func = instr->m_func;
  11376. IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  11377. const ValueType baseValueType(baseOpnd->GetValueType());
  11378. Assert(!baseValueType.IsNotArrayOrObjectWithArray());
  11379. IR::Opnd *arrayFlagsBeforeHelperCallOpnd = nullptr;
  11380. IR::AutoReuseOpnd autoReuseArrayFlagsBeforeHelperCallOpnd;
  11381. const IRType arrayFlagsType = sizeof(uintptr_t) == sizeof(uint32) ? TyUint32 : TyUint64;
  11382. if(!(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()))
  11383. {
  11384. // Record whether the array has missing values before the helper call
  11385. // arrayFlagsBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayFlagsForArrayOrObjectWithArray(base)
  11386. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  11387. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  11388. arrayFlagsBeforeHelperCallOpnd = IR::RegOpnd::New(arrayFlagsType, func);
  11389. autoReuseArrayFlagsBeforeHelperCallOpnd.Initialize(arrayFlagsBeforeHelperCallOpnd, func);
  11390. callInstr->SetDst(arrayFlagsBeforeHelperCallOpnd);
  11391. instr->InsertBefore(callInstr);
  11392. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayFlagsForArrayOrObjectWithArray);
  11393. }
  11394. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
  11395. LowerOneBailOutKind(instr, IR::BailOutOnMissingValue, isInHelperBlock);
  11396. IR::Instr *const insertBeforeInstr = instr->m_next;
  11397. // If the array had no missing values before the helper call, and the array has missing values after the helper
  11398. // call, then this store created the first missing value in the array and needs to bail out
  11399. if(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues())
  11400. {
  11401. // (arrayFlagsBeforeHelperCall = Js::DynamicObjectFlags::HasNoMissingValues)
  11402. Assert(!arrayFlagsBeforeHelperCallOpnd);
  11403. arrayFlagsBeforeHelperCallOpnd =
  11404. arrayFlagsType == TyUint32
  11405. ? static_cast<IR::Opnd *>(
  11406. IR::IntConstOpnd::New(
  11407. static_cast<uintptr_t>(Js::DynamicObjectFlags::HasNoMissingValues),
  11408. arrayFlagsType,
  11409. func,
  11410. true))
  11411. : IR::AddrOpnd::New(
  11412. reinterpret_cast<void *>(Js::DynamicObjectFlags::HasNoMissingValues),
  11413. IR::AddrOpndKindConstantVar,
  11414. func,
  11415. true);
  11416. autoReuseArrayFlagsBeforeHelperCallOpnd.Initialize(arrayFlagsBeforeHelperCallOpnd, func);
  11417. }
  11418. else
  11419. {
  11420. Assert(arrayFlagsBeforeHelperCallOpnd);
  11421. }
  11422. // createdFirstMissingValue = JavascriptArray::Jit_OperationCreatedFirstMissingValue(arrayFlagsBeforeHelperCall, base)
  11423. m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
  11424. m_lowererMD.LoadHelperArgument(insertBeforeInstr, arrayFlagsBeforeHelperCallOpnd);
  11425. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  11426. IR::RegOpnd *const createdFirstMissingValueOpnd = IR::RegOpnd::New(TyUint8, func);
  11427. IR::AutoReuseOpnd autoReuseCreatedFirstMissingValueOpnd(createdFirstMissingValueOpnd, func);
  11428. callInstr->SetDst(createdFirstMissingValueOpnd);
  11429. insertBeforeInstr->InsertBefore(callInstr);
  11430. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationCreatedFirstMissingValue);
  11431. // test createdFirstMissingValue, createdFirstMissingValue
  11432. // jz $skipBailOut
  11433. InsertCompareBranch(
  11434. createdFirstMissingValueOpnd,
  11435. IR::IntConstOpnd::New(0, createdFirstMissingValueOpnd->GetType(), func, true),
  11436. Js::OpCode::BrEq_A,
  11437. skipBailOutLabel,
  11438. insertBeforeInstr);
  11439. // (Bail out with IR::BailOutOnMissingValue)
  11440. // $skipBailOut:
  11441. }
  11442. IR::Opnd*
  11443. Lowerer::GetFuncObjectOpnd(IR::Instr* insertBeforeInstr)
  11444. {
  11445. Func * func = insertBeforeInstr->m_func;
  11446. IR::Opnd *paramOpnd = nullptr;
  11447. if (func->IsInlinee())
  11448. {
  11449. paramOpnd = func->GetInlineeFunctionObjectSlotOpnd();
  11450. }
  11451. else
  11452. {
  11453. #if defined(_M_ARM32_OR_ARM64)
  11454. StackSym * paramSym = this->m_lowererMD.GetImplicitParamSlotSym(0);
  11455. #else
  11456. StackSym *paramSym = StackSym::New(TyMachReg, this->m_func);
  11457. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  11458. this->m_func->SetHasImplicitParamLoad();
  11459. #endif
  11460. paramOpnd = IR::SymOpnd::New(paramSym, TyMachReg, this->m_func);
  11461. }
  11462. if (func->GetJITFunctionBody()->IsCoroutine())
  11463. {
  11464. // the function object for generator calls is a GeneratorVirtualScriptFunction object
  11465. // and we need to return the real JavascriptGeneratorFunction object so grab it before
  11466. // assigning to the dst
  11467. Assert(!func->IsInlinee());
  11468. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, func);
  11469. Lowerer::InsertMove(tmpOpnd, paramOpnd, insertBeforeInstr);
  11470. paramOpnd = IR::IndirOpnd::New(tmpOpnd, Js::GeneratorVirtualScriptFunction::GetRealFunctionOffset(), TyMachPtr, func);
  11471. }
  11472. return paramOpnd;
  11473. }
  11474. ///----------------------------------------------------------------------------
  11475. ///
  11476. /// Lowerer::LoadFuncExpression
  11477. ///
  11478. /// Load the function expression to src1 from [ebp + 8]
  11479. ///
  11480. ///----------------------------------------------------------------------------
  11481. IR::Instr *
  11482. Lowerer::LoadFuncExpression(IR::Instr *instrFuncExpr)
  11483. {
  11484. ASSERT_INLINEE_FUNC(instrFuncExpr);
  11485. IR::Opnd *paramOpnd = GetFuncObjectOpnd(instrFuncExpr);
  11486. // mov dst, param
  11487. instrFuncExpr->SetSrc1(paramOpnd);
  11488. LowererMD::ChangeToAssign(instrFuncExpr);
  11489. return instrFuncExpr;
  11490. }
  11491. void Lowerer::LowerBoundCheck(IR::Instr *const instr)
  11492. {
  11493. Assert(instr);
  11494. Assert(instr->m_opcode == Js::OpCode::BoundCheck || instr->m_opcode == Js::OpCode::UnsignedBoundCheck);
  11495. #if DBG
  11496. if(instr->m_opcode == Js::OpCode::UnsignedBoundCheck)
  11497. {
  11498. // UnsignedBoundCheck is currently only supported for the pattern:
  11499. // UnsignedBoundCheck s1 <= s2 + c, where c == 0 || c == -1
  11500. Assert(instr->GetSrc1()->IsRegOpnd());
  11501. Assert(instr->GetSrc1()->IsInt32());
  11502. Assert(instr->GetSrc2());
  11503. Assert(!instr->GetSrc2()->IsIntConstOpnd());
  11504. if(instr->GetDst())
  11505. {
  11506. const int32 c = instr->GetDst()->AsIntConstOpnd()->AsInt32();
  11507. Assert(c == 0 || c == -1);
  11508. }
  11509. }
  11510. #endif
  11511. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  11512. Assert(
  11513. bailOutKind == IR::BailOutOnArrayAccessHelperCall ||
  11514. bailOutKind == IR::BailOutOnInvalidatedArrayHeadSegment ||
  11515. bailOutKind == IR::BailOutOnFailedHoistedBoundCheck ||
  11516. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  11517. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(false);
  11518. LowerOneBailOutKind(instr, bailOutKind, false);
  11519. Assert(!instr->HasBailOutInfo());
  11520. IR::Instr *insertBeforeInstr = instr->m_next;
  11521. #if DBG
  11522. const auto VerifyLeftOrRightOpnd = [&](IR::Opnd *const opnd, const bool isRightOpnd)
  11523. {
  11524. if(!opnd)
  11525. {
  11526. Assert(isRightOpnd);
  11527. return;
  11528. }
  11529. if(opnd->IsIntConstOpnd())
  11530. {
  11531. Assert(!isRightOpnd || opnd->AsIntConstOpnd()->GetValue() != 0);
  11532. return;
  11533. }
  11534. Assert(opnd->GetType() == TyInt32 || opnd->GetType() == TyUint32);
  11535. };
  11536. #endif
  11537. // left <= right + offset (src1 <= src2 + dst)
  11538. IR::Opnd *leftOpnd = instr->UnlinkSrc1();
  11539. DebugOnly(VerifyLeftOrRightOpnd(leftOpnd, false));
  11540. IR::Opnd *rightOpnd = instr->UnlinkSrc2();
  11541. DebugOnly(VerifyLeftOrRightOpnd(rightOpnd, true));
  11542. Assert(!leftOpnd->IsIntConstOpnd() || rightOpnd && !rightOpnd->IsIntConstOpnd());
  11543. IR::IntConstOpnd *offsetOpnd = instr->GetDst() ? instr->UnlinkDst()->AsIntConstOpnd() : nullptr;
  11544. Assert(!offsetOpnd || offsetOpnd->GetValue() != 0);
  11545. const bool doUnsignedCompare = instr->m_opcode == Js::OpCode::UnsignedBoundCheck;
  11546. instr->Remove();
  11547. Func *const func = insertBeforeInstr->m_func;
  11548. IntConstType offset = offsetOpnd ? offsetOpnd->GetValue() : 0;
  11549. Js::OpCode compareOpCode = Js::OpCode::BrLe_A;
  11550. if(leftOpnd->IsIntConstOpnd() && rightOpnd->IsRegOpnd() && offset != IntConstMin)
  11551. {
  11552. // Put the constants together: swap the operands, negate the offset, and invert the branch
  11553. IR::Opnd *const tempOpnd = leftOpnd;
  11554. leftOpnd = rightOpnd;
  11555. rightOpnd = tempOpnd;
  11556. offset = -offset;
  11557. compareOpCode = Js::OpCode::BrGe_A;
  11558. }
  11559. if(rightOpnd->IsIntConstOpnd())
  11560. {
  11561. // Try to aggregate right + offset into a constant offset
  11562. IntConstType newOffset;
  11563. if(!IntConstMath::Add(offset, rightOpnd->AsIntConstOpnd()->GetValue(), TyInt32, &newOffset))
  11564. {
  11565. offset = newOffset;
  11566. rightOpnd = nullptr;
  11567. offsetOpnd = nullptr;
  11568. }
  11569. }
  11570. // Determine if the Add for (right + offset) is necessary, and the op code that will be used for the comparison
  11571. IR::AutoReuseOpnd autoReuseAddResultOpnd;
  11572. if(offset == -1 && compareOpCode == Js::OpCode::BrLe_A)
  11573. {
  11574. offset = 0;
  11575. compareOpCode = Js::OpCode::BrLt_A;
  11576. }
  11577. else if(offset == 1 && compareOpCode == Js::OpCode::BrGe_A)
  11578. {
  11579. offset = 0;
  11580. compareOpCode = Js::OpCode::BrGt_A;
  11581. }
  11582. else if(offset != 0 && rightOpnd)
  11583. {
  11584. // Need to Add (right + offset). If it overflows, bail out.
  11585. IR::LabelInstr *const bailOutLabel = insertBeforeInstr->m_prev->GetOrCreateContinueLabel(true);
  11586. insertBeforeInstr = bailOutLabel;
  11587. // mov temp, right
  11588. // add temp, offset
  11589. // jo $bailOut
  11590. // $bailOut: (insertBeforeInstr)
  11591. Assert(!offsetOpnd || offsetOpnd->GetValue() == offset);
  11592. IR::RegOpnd *const addResultOpnd = IR::RegOpnd::New(TyInt32, func);
  11593. autoReuseAddResultOpnd.Initialize(addResultOpnd, func);
  11594. InsertAdd(
  11595. true,
  11596. addResultOpnd,
  11597. rightOpnd,
  11598. offsetOpnd ? offsetOpnd->UseWithNewType(TyInt32, func) : IR::IntConstOpnd::New(offset, TyInt32, func),
  11599. insertBeforeInstr);
  11600. InsertBranch(LowererMD::MDOverflowBranchOpcode, bailOutLabel, insertBeforeInstr);
  11601. rightOpnd = addResultOpnd;
  11602. }
  11603. // cmp left, right
  11604. // jl[e] $skipBailOut
  11605. // $bailOut:
  11606. if(!rightOpnd)
  11607. {
  11608. rightOpnd = IR::IntConstOpnd::New(offset, TyInt32, func);
  11609. }
  11610. InsertCompareBranch(leftOpnd, rightOpnd, compareOpCode, doUnsignedCompare, skipBailOutLabel, insertBeforeInstr);
  11611. }
  11612. IR::Instr *
  11613. Lowerer::LowerBailTarget(IR::Instr * instr)
  11614. {
  11615. // this is just a bailout target, just skip over it and generate a label before so other bailout can jump here.
  11616. IR::Instr * prevInstr = instr->m_prev;
  11617. IR::LabelInstr * continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  11618. instr->InsertAfter(continueLabelInstr);
  11619. IR::BranchInstr * skipInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, continueLabelInstr, this->m_func);
  11620. instr->InsertBefore(skipInstr);
  11621. this->GenerateBailOut(instr);
  11622. return prevInstr;
  11623. }
  11624. IR::Instr *
  11625. Lowerer::SplitBailOnImplicitCall(IR::Instr *& instr)
  11626. {
  11627. Assert(instr->IsPlainInstr() || instr->IsProfiledInstr());
  11628. const auto bailOutKind = instr->GetBailOutKind();
  11629. Assert(BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind));
  11630. IR::Opnd * implicitCallFlags = this->GetImplicitCallFlagsOpnd();
  11631. const IR::AutoReuseOpnd autoReuseImplicitCallFlags(implicitCallFlags, instr->m_func);
  11632. IR::IntConstOpnd * noImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_None, TyInt8, this->m_func, true);
  11633. const IR::AutoReuseOpnd autoReuseNoImplicitCall(noImplicitCall, instr->m_func);
  11634. // Reset the implicit call flag on every helper call
  11635. Lowerer::InsertMove(implicitCallFlags, noImplicitCall, instr);
  11636. IR::Instr *disableImplicitCallsInstr = nullptr, *enableImplicitCallsInstr = nullptr;
  11637. if(BailOutInfo::WithoutLazyBailOut(bailOutKind) == IR::BailOutOnImplicitCallsPreOp)
  11638. {
  11639. const auto disableImplicitCallAddress =
  11640. m_lowererMD.GenerateMemRef(
  11641. instr->m_func->GetThreadContextInfo()->GetDisableImplicitFlagsAddr(),
  11642. TyInt8,
  11643. instr);
  11644. // Disable implicit calls since they will be called after bailing out
  11645. disableImplicitCallsInstr =
  11646. IR::Instr::New(
  11647. Js::OpCode::Ld_A,
  11648. disableImplicitCallAddress,
  11649. IR::IntConstOpnd::New(DisableImplicitCallFlag, TyInt8, instr->m_func, true),
  11650. instr->m_func);
  11651. instr->InsertBefore(disableImplicitCallsInstr);
  11652. // Create instruction for re-enabling implicit calls
  11653. enableImplicitCallsInstr =
  11654. IR::Instr::New(
  11655. Js::OpCode::Ld_A,
  11656. disableImplicitCallAddress,
  11657. IR::IntConstOpnd::New(DisableImplicitNoFlag, TyInt8, instr->m_func, true),
  11658. instr->m_func);
  11659. #if DBG
  11660. enableImplicitCallsInstr->m_noLazyHelperAssert = true;
  11661. #endif
  11662. }
  11663. IR::Instr * bailOutInstr = instr;
  11664. instr = IR::Instr::New(instr->m_opcode, instr->m_func);
  11665. bailOutInstr->TransferTo(instr);
  11666. bailOutInstr->InsertBefore(instr);
  11667. if(disableImplicitCallsInstr)
  11668. {
  11669. // Re-enable implicit calls
  11670. Assert(enableImplicitCallsInstr);
  11671. bailOutInstr->InsertBefore(enableImplicitCallsInstr);
  11672. // Lower both instructions. Lowering an instruction may free the instruction's original operands, so do that last.
  11673. LowererMD::ChangeToAssign(disableImplicitCallsInstr);
  11674. LowererMD::ChangeToAssign(enableImplicitCallsInstr);
  11675. }
  11676. bailOutInstr->m_opcode = Js::OpCode::BailOnNotEqual;
  11677. bailOutInstr->SetSrc1(implicitCallFlags);
  11678. bailOutInstr->SetSrc2(noImplicitCall);
  11679. return bailOutInstr;
  11680. }
  11681. IR::Instr *
  11682. Lowerer::SplitBailOnImplicitCall(IR::Instr * instr, IR::Instr * helperCall, IR::Instr * insertBeforeInstr)
  11683. {
  11684. IR::Opnd * implicitCallFlags = this->GetImplicitCallFlagsOpnd();
  11685. const IR::AutoReuseOpnd autoReuseImplicitCallFlags(implicitCallFlags, instr->m_func);
  11686. IR::IntConstOpnd * noImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_None, TyInt8, this->m_func, true);
  11687. const IR::AutoReuseOpnd autoReuseNoImplicitCall(noImplicitCall, instr->m_func);
  11688. // Reset the implicit call flag on every helper call
  11689. Lowerer::InsertMove(implicitCallFlags, noImplicitCall, helperCall->m_prev);
  11690. BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
  11691. if (bailOutInfo->bailOutInstr == instr)
  11692. {
  11693. bailOutInfo->bailOutInstr = nullptr;
  11694. }
  11695. IR::Instr * bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOnNotEqual, IR::BailOutOnImplicitCalls, bailOutInfo, bailOutInfo->bailOutFunc);
  11696. bailOutInstr->SetSrc1(implicitCallFlags);
  11697. bailOutInstr->SetSrc2(noImplicitCall);
  11698. insertBeforeInstr->InsertBefore(bailOutInstr);
  11699. instr->ClearBailOutInfo();
  11700. return bailOutInstr;
  11701. }
  11702. // Split out bailout for debugger into separate bailout instr out of real instr which has bailout for debugger.
  11703. // Returns the instr which needs to lower next, which would normally be last of splitted instr.
  11704. // IR on input:
  11705. // - Real instr with BailOutInfo but it's opcode is not BailForDebugger.
  11706. // - debugger bailout is not shared. In this case we'll have debugger bailout in instr->GetBailOutKind().
  11707. // - debugger bailout is shared. In this case we'll have debugger bailout in instr->GetAuxBailOutKind().
  11708. // IR on output:
  11709. // - Either of:
  11710. // - real instr, then debuggerBailout -- in case we only had debugger bailout.
  11711. // - real instr with BailOutInfo w/o debugger bailout, then debuggerBailout, then sharedBailout -- in case bailout for debugger was shared w/some other b.o.
  11712. IR::Instr* Lowerer::SplitBailForDebugger(IR::Instr* instr)
  11713. {
  11714. Assert(m_func->IsJitInDebugMode() && instr->m_opcode != Js::OpCode::BailForDebugger);
  11715. IR::BailOutKind debuggerBailOutKind; // Used for splitted instr.
  11716. BailOutInfo* bailOutInfo = instr->GetBailOutInfo();
  11717. IR::Instr* sharedBailoutInstr = nullptr;
  11718. if (instr->GetBailOutKind() & IR::BailOutForDebuggerBits)
  11719. {
  11720. // debugger bailout is not shared.
  11721. Assert(!instr->HasAuxBailOut());
  11722. AssertMsg(!(instr->GetBailOutKind() & ~IR::BailOutForDebuggerBits), "There should only be debugger bailout bits in the instr.");
  11723. debuggerBailOutKind = instr->GetBailOutKind() & IR::BailOutForDebuggerBits;
  11724. // There is no non-debugger bailout in the instr, still can't clear bailout info, as we use it for the splitted instr,
  11725. // but we need to mark the bailout as hasn't been generated yet.
  11726. if (bailOutInfo->bailOutInstr == instr)
  11727. {
  11728. // null will be picked up by following BailOutInstr::New which will change it to new bailout instr.
  11729. bailOutInfo->bailOutInstr = nullptr;
  11730. }
  11731. // Remove bailout info from the original instr which from now on becomes just regular instr, w/o deallocating bailout info.
  11732. instr->ClearBailOutInfo();
  11733. }
  11734. else if (instr->IsBranchInstr() && instr->HasBailOutInfo() && instr->HasAuxBailOut())
  11735. {
  11736. // Branches with shared bailout are lowered in LowerCondBranchCheckBailOut,
  11737. // can't do here because we need to use BranchBailOutRecord but don't know which BrTrue/BrFalse to use for it.
  11738. debuggerBailOutKind = IR::BailOutInvalid;
  11739. }
  11740. else if (instr->HasAuxBailOut() && instr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits)
  11741. {
  11742. // debugger bailout is shared.
  11743. AssertMsg(!(instr->GetBailOutKind() & IR::BailOutForDebuggerBits), "There should be no debugger bits in main bailout kind.");
  11744. debuggerBailOutKind = instr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits;
  11745. // This will insert SharedBail instr after current instr and set bailOutInfo->bailOutInstr to the shared one.
  11746. sharedBailoutInstr = instr->ShareBailOut();
  11747. // As we extracted aux bail out, invalidate all tracks of it in the instr.
  11748. instr->ResetAuxBailOut();
  11749. }
  11750. else
  11751. {
  11752. AssertMsg(FALSE, "shouldn't get here");
  11753. debuggerBailOutKind = IR::BailOutInvalid;
  11754. }
  11755. if (debuggerBailOutKind != IR::BailOutInvalid)
  11756. {
  11757. IR::BailOutInstr* debuggerBailoutInstr = IR::BailOutInstr::New(
  11758. Js::OpCode::BailForDebugger, debuggerBailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
  11759. instr->InsertAfter(debuggerBailoutInstr);
  11760. // Since we go backwards, we need to process extracted out bailout for debugger first.
  11761. instr = sharedBailoutInstr ? sharedBailoutInstr : debuggerBailoutInstr;
  11762. }
  11763. return instr;
  11764. }
  11765. IR::Instr *
  11766. Lowerer::SplitBailOnResultCondition(IR::Instr *const instr) const
  11767. {
  11768. Assert(instr);
  11769. Assert(!instr->IsLowered());
  11770. Assert(
  11771. instr->GetBailOutKind() & IR::BailOutOnResultConditions ||
  11772. instr->GetBailOutKind() == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  11773. const auto nonBailOutInstr = IR::Instr::New(instr->m_opcode, instr->m_func);
  11774. instr->TransferTo(nonBailOutInstr);
  11775. instr->InsertBefore(nonBailOutInstr);
  11776. return nonBailOutInstr;
  11777. }
  11778. void
  11779. Lowerer::LowerBailOnResultCondition(
  11780. IR::Instr *const instr,
  11781. IR::LabelInstr * *const bailOutLabel,
  11782. IR::LabelInstr * *const skipBailOutLabel)
  11783. {
  11784. Assert(instr);
  11785. Assert(
  11786. instr->GetBailOutKind() & IR::BailOutOnResultConditions ||
  11787. instr->GetBailOutKind() == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  11788. Assert(bailOutLabel);
  11789. Assert(skipBailOutLabel);
  11790. // Label to jump to (or fall through to) when bailing out. The actual bailout label
  11791. // (bailOutInfo->bailOutInstr->AsLabelInstr()) may be shared, and code may be added to restore values before the jump to the
  11792. // actual bailout label in the cloned bailout case, so always create a new bailout label for this particular path.
  11793. *bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true /* isOpHelper */);
  11794. instr->InsertBefore(*bailOutLabel);
  11795. // Label to jump to when not bailing out
  11796. *skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  11797. instr->InsertAfter(*skipBailOutLabel);
  11798. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  11799. // ordering instructions anymore.
  11800. GenerateBailOut(instr);
  11801. }
  11802. void
  11803. Lowerer::PreserveSourcesForBailOnResultCondition(IR::Instr *const instr, IR::LabelInstr *const skipBailOutLabel) const
  11804. {
  11805. Assert(instr);
  11806. Assert(!instr->IsLowered());
  11807. Assert(!instr->HasBailOutInfo());
  11808. // Since this instruction may bail out, writing to the destination cannot overwrite one of the sources, or we may lose one
  11809. // of the sources needed to redo the equivalent byte code instruction. Determine if the sources need to be preserved.
  11810. const auto dst = instr->GetDst();
  11811. Assert(dst);
  11812. const auto dstStackSym = dst->GetStackSym();
  11813. if(!dstStackSym || !dstStackSym->HasByteCodeRegSlot())
  11814. {
  11815. // We only need to ensure that a byte-code source is not being overwritten
  11816. return;
  11817. }
  11818. switch(instr->m_opcode)
  11819. {
  11820. // The sources of these instructions don't need restoring, or will be restored in the bailout path
  11821. case Js::OpCode::Neg_I4:
  11822. // In case of overflow or zero, the result is the same as the operand
  11823. case Js::OpCode::Add_I4:
  11824. case Js::OpCode::Sub_I4:
  11825. // In case of overflow, there is always enough information to restore the operands
  11826. return;
  11827. }
  11828. Assert(instr->GetSrc1());
  11829. if(!dst->IsEqual(instr->GetSrc1()) && !(instr->GetSrc2() && dst->IsEqual(instr->GetSrc2())))
  11830. {
  11831. // The destination is different from the sources
  11832. return;
  11833. }
  11834. // The destination is the same as one of the sources and the original sources cannot be restored after the instruction, so
  11835. // use a temporary destination for the result and move it back to the original destination after deciding not to bail out
  11836. LowererMD::ChangeToAssign(instr->SinkDst(Js::OpCode::Ld_I4, RegNOREG, skipBailOutLabel));
  11837. }
  11838. void
  11839. Lowerer::LowerInstrWithBailOnResultCondition(
  11840. IR::Instr *const instr,
  11841. const IR::BailOutKind bailOutKind,
  11842. IR::LabelInstr *const bailOutLabel,
  11843. IR::LabelInstr *const skipBailOutLabel) const
  11844. {
  11845. Assert(instr);
  11846. Assert(!instr->IsLowered());
  11847. Assert(!instr->HasBailOutInfo());
  11848. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  11849. Assert(bailOutLabel);
  11850. Assert(instr->m_next == bailOutLabel);
  11851. Assert(skipBailOutLabel);
  11852. // Preserve sources that are overwritten by the instruction if needed
  11853. PreserveSourcesForBailOnResultCondition(instr, skipBailOutLabel);
  11854. // Lower the instruction
  11855. switch(instr->m_opcode)
  11856. {
  11857. case Js::OpCode::Neg_I4:
  11858. LowererMD::LowerInt4NegWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  11859. break;
  11860. case Js::OpCode::Add_I4:
  11861. LowererMD::LowerInt4AddWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  11862. break;
  11863. case Js::OpCode::Sub_I4:
  11864. LowererMD::LowerInt4SubWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  11865. break;
  11866. case Js::OpCode::Mul_I4:
  11867. LowererMD::LowerInt4MulWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  11868. break;
  11869. case Js::OpCode::Rem_I4:
  11870. m_lowererMD.LowerInt4RemWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  11871. break;
  11872. default:
  11873. Assert(false); // not implemented
  11874. __assume(false);
  11875. }
  11876. }
  11877. void
  11878. Lowerer::GenerateObjectTestAndTypeLoad(IR::Instr *instrLdSt, IR::RegOpnd *opndBase, IR::RegOpnd *opndType, IR::LabelInstr *labelHelper)
  11879. {
  11880. IR::IndirOpnd *opndIndir;
  11881. if (!opndBase->IsNotTaggedValue())
  11882. {
  11883. m_lowererMD.GenerateObjectTest(opndBase, instrLdSt, labelHelper);
  11884. }
  11885. opndIndir = IR::IndirOpnd::New(opndBase, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  11886. InsertMove(opndType, opndIndir, instrLdSt);
  11887. }
  11888. IR::LabelInstr *
  11889. Lowerer::GenerateBailOut(IR::Instr * instr, IR::BranchInstr * branchInstr, IR::LabelInstr *bailOutLabel, IR::LabelInstr * collectRuntimeStatsLabel)
  11890. {
  11891. BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
  11892. IR::Instr * bailOutInstr = bailOutInfo->bailOutInstr;
  11893. if (instr->IsCloned())
  11894. {
  11895. Assert(bailOutInstr != instr);
  11896. // Jump to the cloned bail out label
  11897. IR::LabelInstr * bailOutLabelInstr = bailOutInstr->AsLabelInstr();
  11898. IR::BranchInstr * bailOutBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, bailOutLabelInstr, this->m_func);
  11899. instr->InsertBefore(bailOutBranch);
  11900. instr->Remove();
  11901. return bailOutLabel;
  11902. }
  11903. // Add helper label to trigger layout.
  11904. if (!collectRuntimeStatsLabel)
  11905. {
  11906. collectRuntimeStatsLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  11907. }
  11908. Assert(!collectRuntimeStatsLabel->IsLinked());
  11909. instr->InsertBefore(collectRuntimeStatsLabel);
  11910. if (bailOutInstr != instr)
  11911. {
  11912. // this bailOutInfo is shared, just jump to the bailout target
  11913. IR::Opnd * indexOpndForBailOutKind = nullptr;
  11914. int bailOutRecordOffset = 0;
  11915. if (this->m_func->IsOOPJIT())
  11916. {
  11917. bailOutRecordOffset = NativeCodeData::GetDataTotalOffset(bailOutInfo->bailOutRecord);
  11918. indexOpndForBailOutKind = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), (int)(bailOutRecordOffset + BailOutRecord::GetOffsetOfBailOutKind()), TyUint32,
  11919. #if DBG
  11920. NativeCodeData::GetDataDescription(bailOutInfo->bailOutRecord, this->m_func->m_alloc),
  11921. #endif
  11922. m_func, true);
  11923. this->addToLiveOnBackEdgeSyms->Set(m_func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  11924. }
  11925. else
  11926. {
  11927. indexOpndForBailOutKind =
  11928. IR::MemRefOpnd::New((BYTE*)bailOutInfo->bailOutRecord + BailOutRecord::GetOffsetOfBailOutKind(), TyUint32, this->m_func, IR::AddrOpndKindDynamicBailOutKindRef);
  11929. }
  11930. InsertMove(
  11931. indexOpndForBailOutKind, IR::IntConstOpnd::New(instr->GetBailOutKind(), indexOpndForBailOutKind->GetType(), this->m_func), instr, false);
  11932. // No point in doing this for BailOutFailedEquivalentTypeCheck or BailOutFailedEquivalentFixedFieldTypeCheck,
  11933. // because the respective inline cache is already polymorphic, anyway.
  11934. if (instr->GetBailOutKind() == IR::BailOutFailedTypeCheck || instr->GetBailOutKind() == IR::BailOutFailedFixedFieldTypeCheck)
  11935. {
  11936. // We have a type check bailout that shares a bailout record with other instructions.
  11937. // Generate code to write the cache index into the bailout record before we jump to the call site.
  11938. Assert(bailOutInfo->polymorphicCacheIndex != (uint)-1);
  11939. Assert(bailOutInfo->bailOutRecord);
  11940. IR::Opnd * indexOpnd = nullptr;
  11941. if (this->m_func->IsOOPJIT())
  11942. {
  11943. indexOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), (int)(bailOutRecordOffset + BailOutRecord::GetOffsetOfPolymorphicCacheIndex()), TyUint32, m_func);
  11944. }
  11945. else
  11946. {
  11947. indexOpnd = IR::MemRefOpnd::New((BYTE*)bailOutInfo->bailOutRecord + BailOutRecord::GetOffsetOfPolymorphicCacheIndex(), TyUint32, this->m_func);
  11948. }
  11949. InsertMove(
  11950. indexOpnd, IR::IntConstOpnd::New(bailOutInfo->polymorphicCacheIndex, TyUint32, this->m_func), instr, false);
  11951. }
  11952. if (bailOutInfo->bailOutRecord->IsShared())
  11953. {
  11954. IR::Opnd *functionBodyOpnd;
  11955. if (this->m_func->IsOOPJIT())
  11956. {
  11957. functionBodyOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), (int)(bailOutRecordOffset + SharedBailOutRecord::GetOffsetOfFunctionBody()), TyMachPtr, m_func);
  11958. }
  11959. else
  11960. {
  11961. functionBodyOpnd = IR::MemRefOpnd::New((BYTE*)bailOutInfo->bailOutRecord + SharedBailOutRecord::GetOffsetOfFunctionBody(), TyMachPtr, this->m_func);
  11962. }
  11963. InsertMove(
  11964. functionBodyOpnd, CreateFunctionBodyOpnd(instr->m_func), instr, false);
  11965. }
  11966. // GenerateBailOut should have replaced this as a label as we should have already lowered
  11967. // the main bailOutInstr.
  11968. IR::LabelInstr * bailOutTargetLabel = bailOutInstr->AsLabelInstr();
  11969. #if DBG
  11970. if (bailOutTargetLabel->m_noHelperAssert)
  11971. {
  11972. collectRuntimeStatsLabel->m_noHelperAssert = true;
  11973. }
  11974. #endif
  11975. Assert(bailOutLabel == nullptr || bailOutLabel == bailOutTargetLabel);
  11976. IR::BranchInstr * newBranchInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, bailOutTargetLabel, this->m_func);
  11977. instr->InsertAfter(newBranchInstr);
  11978. instr->Remove();
  11979. return collectRuntimeStatsLabel ? collectRuntimeStatsLabel : bailOutLabel;
  11980. }
  11981. // The bailout hasn't been generated yet.
  11982. Assert(!bailOutInstr->IsLabelInstr());
  11983. // Capture the condition for this bailout
  11984. if (bailOutLabel == nullptr)
  11985. {
  11986. // Create a label and place it in the bailout info so that shared bailout point can jump to this one
  11987. if (instr->m_prev->IsLabelInstr())
  11988. {
  11989. bailOutLabel = instr->m_prev->AsLabelInstr();
  11990. Assert(bailOutLabel->isOpHelper);
  11991. }
  11992. else
  11993. {
  11994. bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  11995. instr->InsertBefore(bailOutLabel);
  11996. }
  11997. }
  11998. else
  11999. {
  12000. instr->InsertBefore(bailOutLabel);
  12001. }
  12002. #if DBG
  12003. bailOutLabel->m_noLazyHelperAssert = true;
  12004. #endif
  12005. #if DBG
  12006. const IR::BailOutKind bailOutKind = bailOutInstr->GetBailOutKind();
  12007. if (bailOutInstr->m_opcode == Js::OpCode::BailOnNoSimdTypeSpec ||
  12008. bailOutInstr->m_opcode == Js::OpCode::BailOnNoProfile ||
  12009. bailOutInstr->m_opcode == Js::OpCode::BailOnException ||
  12010. bailOutInstr->m_opcode == Js::OpCode::Yield ||
  12011. bailOutKind & (IR::BailOutConventionalTypedArrayAccessOnly |
  12012. IR::BailOutConventionalNativeArrayAccessOnly |
  12013. IR::BailOutOnArrayAccessHelperCall))
  12014. {
  12015. bailOutLabel->m_noHelperAssert = true;
  12016. }
  12017. #endif
  12018. bailOutInfo->bailOutInstr = bailOutLabel;
  12019. bailOutLabel->m_hasNonBranchRef = true;
  12020. // Create the bail out record
  12021. Assert(bailOutInfo->bailOutRecord == nullptr);
  12022. BailOutRecord * bailOutRecord;
  12023. IR::JnHelperMethod helperMethod;
  12024. if (branchInstr != nullptr)
  12025. {
  12026. Assert(branchInstr->GetSrc2() == nullptr);
  12027. Assert(branchInstr->GetDst() == nullptr);
  12028. IR::LabelInstr * targetLabel = branchInstr->GetTarget();
  12029. Assert(targetLabel->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset);
  12030. uint32 trueOffset;
  12031. uint32 falseOffset;
  12032. IR::Opnd *condOpnd = branchInstr->GetSrc1();
  12033. bool invertTarget = (branchInstr->m_opcode == Js::OpCode::BrFalse_A);
  12034. if (bailOutInfo->isInvertedBranch)
  12035. {
  12036. // Flip the condition
  12037. IR::Instr *subInstr = IR::Instr::New(Js::OpCode::Sub_I4, condOpnd, condOpnd, IR::IntConstOpnd::New(1, TyMachReg, instr->m_func), instr->m_func);
  12038. instr->InsertBefore(subInstr);
  12039. this->m_lowererMD.EmitInt4Instr(subInstr);
  12040. // We should really do a DEC/NEG for a full 2's complement flip from 0/1 to 1/0,
  12041. // but DEC is sufficient to flip from 0/1 to -1/0, which is false/true to true/false...
  12042. // instr->InsertBefore(IR::Instr::New(Js::OpCode::Neg_I4, condOpnd, condOpnd, instr->m_func));
  12043. invertTarget = invertTarget ? false : true;
  12044. }
  12045. if (!invertTarget)
  12046. {
  12047. trueOffset = targetLabel->GetByteCodeOffset();
  12048. falseOffset = bailOutInfo->bailOutOffset;
  12049. }
  12050. else
  12051. {
  12052. falseOffset = targetLabel->GetByteCodeOffset();
  12053. trueOffset = bailOutInfo->bailOutOffset;
  12054. }
  12055. bailOutRecord = NativeCodeDataNewZ(this->m_func->GetNativeCodeDataAllocator(),
  12056. BranchBailOutRecord, trueOffset, falseOffset, branchInstr->GetByteCodeReg(), instr->GetBailOutKind(), bailOutInfo->bailOutFunc);
  12057. helperMethod = IR::HelperSaveAllRegistersAndBranchBailOut;
  12058. #ifdef _M_IX86
  12059. if(!AutoSystemInfo::Data.SSE2Available())
  12060. {
  12061. helperMethod = IR::HelperSaveAllRegistersNoSse2AndBranchBailOut;
  12062. }
  12063. #endif
  12064. // Save the condition. The register allocator will generate arguments.
  12065. bailOutInfo->branchConditionOpnd = branchInstr->GetSrc1()->Copy(branchInstr->m_func);
  12066. }
  12067. else
  12068. {
  12069. if (bailOutInstr->GetBailOutKind() == IR::BailOutShared)
  12070. {
  12071. bailOutRecord = NativeCodeDataNewZ(this->m_func->GetNativeCodeDataAllocator(),
  12072. SharedBailOutRecord, bailOutInfo->bailOutOffset, bailOutInfo->polymorphicCacheIndex, instr->GetBailOutKind(), bailOutInfo->bailOutFunc);
  12073. if (bailOutInfo->isLoopTopBailOutInfo)
  12074. {
  12075. bailOutRecord->SetType(BailOutRecord::BailoutRecordType::SharedForLoopTop);
  12076. }
  12077. }
  12078. else
  12079. {
  12080. bailOutRecord = NativeCodeDataNewZ(this->m_func->GetNativeCodeDataAllocator(),
  12081. BailOutRecord, bailOutInfo->bailOutOffset, bailOutInfo->polymorphicCacheIndex, instr->GetBailOutKind(), bailOutInfo->bailOutFunc);
  12082. }
  12083. helperMethod = IR::HelperSaveAllRegistersAndBailOut;
  12084. #ifdef _M_IX86
  12085. if(!AutoSystemInfo::Data.SSE2Available())
  12086. {
  12087. helperMethod = IR::HelperSaveAllRegistersNoSse2AndBailOut;
  12088. }
  12089. #endif
  12090. }
  12091. // Save the bailout record. The register allocator will generate arguments.
  12092. bailOutInfo->bailOutRecord = bailOutRecord;
  12093. #if ENABLE_DEBUG_CONFIG_OPTIONS
  12094. bailOutRecord->bailOutOpcode = bailOutInfo->bailOutOpcode;
  12095. #endif
  12096. if (instr->m_opcode == Js::OpCode::BailOnNotStackArgs && instr->GetSrc1())
  12097. {
  12098. // src1 on BailOnNotStackArgs is helping CSE
  12099. instr->FreeSrc1();
  12100. }
  12101. if (instr->GetSrc2() != nullptr)
  12102. {
  12103. // Ideally we should never be in this situation but incase we reached a
  12104. // condition where we didn't free src2, free it here.
  12105. instr->FreeSrc2();
  12106. }
  12107. // We do not need lazybailout bit on SaveAllRegistersAndBailOut
  12108. if (instr->HasLazyBailOut())
  12109. {
  12110. instr->ClearLazyBailOut();
  12111. Assert(instr->HasBailOutInfo());
  12112. }
  12113. // Call the bail out wrapper
  12114. instr->m_opcode = Js::OpCode::Call;
  12115. if(instr->GetDst())
  12116. {
  12117. // To facilitate register allocation, don't assign a destination. The result will anyway go into the return register,
  12118. // but the register allocator does not need to kill that register for the call.
  12119. instr->FreeDst();
  12120. }
  12121. instr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, this->m_func));
  12122. m_lowererMD.LowerCall(instr, 0);
  12123. if (bailOutInstr->GetBailOutKind() != IR::BailOutForGeneratorYield)
  12124. {
  12125. // Defer introducing the JMP to epilog until LowerPrologEpilog phase for Yield bailouts so
  12126. // that Yield does not appear to have flow out of its containing block for the RegAlloc phase.
  12127. // Yield is an unconditional bailout but we want to simulate the flow as if the Yield were
  12128. // just like a call.
  12129. GenerateJumpToEpilogForBailOut(bailOutInfo, instr);
  12130. }
  12131. return collectRuntimeStatsLabel ? collectRuntimeStatsLabel : bailOutLabel;
  12132. }
  12133. void
  12134. Lowerer::GenerateJumpToEpilogForBailOut(BailOutInfo * bailOutInfo, IR::Instr *instr)
  12135. {
  12136. IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->m_prev;
  12137. // JMP to the epilog
  12138. IR::LabelInstr * exitTargetInstr;
  12139. if (exitPrevInstr->IsLabelInstr())
  12140. {
  12141. exitTargetInstr = exitPrevInstr->AsLabelInstr();
  12142. }
  12143. else
  12144. {
  12145. exitTargetInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  12146. exitPrevInstr->InsertAfter(exitTargetInstr);
  12147. }
  12148. exitTargetInstr = m_lowererMD.GetBailOutStackRestoreLabel(bailOutInfo, exitTargetInstr);
  12149. IR::Instr * instrAfter = instr->m_next;
  12150. IR::BranchInstr * exitInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, exitTargetInstr, this->m_func);
  12151. instrAfter->InsertBefore(exitInstr);
  12152. }
  12153. ///----------------------------------------------------------------------------
  12154. ///
  12155. /// Lowerer::GenerateFastCondBranch
  12156. ///
  12157. ///----------------------------------------------------------------------------
  12158. bool
  12159. Lowerer::GenerateFastCondBranch(IR::BranchInstr * instrBranch, bool *pIsHelper)
  12160. {
  12161. // The idea is to do an inline compare if we can prove that both sources
  12162. // are tagged ints
  12163. //
  12164. // Given:
  12165. //
  12166. // Brxx_A $L, src1, src2
  12167. //
  12168. // Generate:
  12169. //
  12170. // (If not Int31's, goto $helper)
  12171. // Jxx $L, src1, src2
  12172. // JMP $fallthru
  12173. // $helper:
  12174. // (caller will generate normal helper call sequence)
  12175. // $fallthru:
  12176. IR::LabelInstr * labelHelper = nullptr;
  12177. IR::LabelInstr * labelFallThru;
  12178. IR::BranchInstr * instr;
  12179. IR::Opnd * opndSrc1;
  12180. IR::Opnd * opndSrc2;
  12181. opndSrc1 = instrBranch->GetSrc1();
  12182. opndSrc2 = instrBranch->GetSrc2();
  12183. AssertMsg(opndSrc1 && opndSrc2, "BrC expects 2 src operands");
  12184. // Not tagged ints?
  12185. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  12186. {
  12187. return true;
  12188. }
  12189. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  12190. {
  12191. return true;
  12192. }
  12193. // Tagged ints?
  12194. bool isTaggedInts = false;
  12195. if (opndSrc1->IsTaggedInt())
  12196. {
  12197. if (opndSrc2->IsTaggedInt())
  12198. {
  12199. isTaggedInts = true;
  12200. }
  12201. }
  12202. if (!isTaggedInts)
  12203. {
  12204. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  12205. this->m_lowererMD.GenerateSmIntPairTest(instrBranch, opndSrc1, opndSrc2, labelHelper);
  12206. }
  12207. // Jxx $L, src1, src2
  12208. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  12209. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  12210. instr = IR::BranchInstr::New(instrBranch->m_opcode, instrBranch->GetTarget(), opndSrc1, opndSrc2, this->m_func);
  12211. instrBranch->InsertBefore(instr);
  12212. this->m_lowererMD.LowerCondBranch(instr);
  12213. if (isTaggedInts)
  12214. {
  12215. instrBranch->Remove();
  12216. // Skip lowering call to helper
  12217. return false;
  12218. }
  12219. // JMP $fallthru
  12220. IR::Instr *instrNext = instrBranch->GetNextRealInstrOrLabel();
  12221. if (instrNext->IsLabelInstr())
  12222. {
  12223. labelFallThru = instrNext->AsLabelInstr();
  12224. }
  12225. else
  12226. {
  12227. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /**pIsHelper*/FALSE);
  12228. instrBranch->InsertAfter(labelFallThru);
  12229. }
  12230. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallThru, this->m_func);
  12231. instrBranch->InsertBefore(instr);
  12232. // $helper:
  12233. // (caller will generate normal helper call sequence)
  12234. // $fallthru:
  12235. AssertMsg(labelHelper, "Should not be NULL");
  12236. instrBranch->InsertBefore(labelHelper);
  12237. *pIsHelper = true;
  12238. return true;
  12239. }
  12240. IR::Instr *
  12241. Lowerer::LowerInlineeStart(IR::Instr * inlineeStartInstr)
  12242. {
  12243. IR::Opnd *linkOpnd = inlineeStartInstr->GetSrc2();
  12244. if (!linkOpnd)
  12245. {
  12246. Assert(inlineeStartInstr->m_func->m_hasInlineArgsOpt);
  12247. return inlineeStartInstr->m_prev;
  12248. }
  12249. AssertMsg(inlineeStartInstr->m_func->firstActualStackOffset != -1, "This should have been already done in backward pass");
  12250. IR::Instr *startCall;
  12251. // Free the argOut links and lower them to MOVs
  12252. inlineeStartInstr->IterateArgInstrs([&](IR::Instr* argInstr){
  12253. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A || argInstr->m_opcode == Js::OpCode::ArgOut_A_Inline);
  12254. startCall = argInstr->GetSrc2()->GetStackSym()->m_instrDef;
  12255. argInstr->FreeSrc2();
  12256. #pragma prefast(suppress:6235, "Non-Zero Constant in Condition")
  12257. if (!PHASE_ON(Js::EliminateArgoutForInlineePhase, this->m_func) || inlineeStartInstr->m_func->GetJITFunctionBody()->HasOrParentHasArguments())
  12258. {
  12259. m_lowererMD.ChangeToAssign(argInstr);
  12260. }
  12261. else
  12262. {
  12263. argInstr->m_opcode = Js::OpCode::ArgOut_A_InlineBuiltIn;
  12264. }
  12265. return false;
  12266. });
  12267. IR::Instr *argInsertInstr = inlineeStartInstr;
  12268. uint i = 0;
  12269. inlineeStartInstr->IterateMetaArgs( [&] (IR::Instr* metaArg)
  12270. {
  12271. if(i == 0)
  12272. {
  12273. Lowerer::InsertMove(metaArg->m_func->GetNextInlineeFrameArgCountSlotOpnd(),
  12274. IR::AddrOpnd::NewNull(metaArg->m_func),
  12275. argInsertInstr);
  12276. }
  12277. if (i == Js::Constants::InlineeMetaArgIndex_FunctionObject)
  12278. {
  12279. metaArg->SetSrc1(inlineeStartInstr->GetSrc1());
  12280. }
  12281. metaArg->Unlink();
  12282. argInsertInstr->InsertBefore(metaArg);
  12283. IR::Instr* prev = metaArg->m_prev;
  12284. m_lowererMD.ChangeToAssign(metaArg);
  12285. if (i == Js::Constants::InlineeMetaArgIndex_Argc)
  12286. {
  12287. #if defined(_M_IX86) || defined(_M_X64)
  12288. Assert(metaArg == prev->m_next);
  12289. #else //defined(_M_ARM)
  12290. Assert(prev->m_next->m_opcode == Js::OpCode::LDIMM);
  12291. #endif
  12292. metaArg = prev->m_next;
  12293. Assert(metaArg->GetSrc1()->AsIntConstOpnd()->m_dontEncode == true);
  12294. metaArg->isInlineeEntryInstr = true;
  12295. LowererMD::Legalize(metaArg);
  12296. }
  12297. argInsertInstr = metaArg;
  12298. i++;
  12299. return false;
  12300. });
  12301. IR::Instr* prev = inlineeStartInstr->m_prev;
  12302. if (inlineeStartInstr->m_func->m_hasInlineArgsOpt)
  12303. {
  12304. inlineeStartInstr->FreeSrc1();
  12305. inlineeStartInstr->FreeSrc2();
  12306. inlineeStartInstr->FreeDst();
  12307. }
  12308. else
  12309. {
  12310. inlineeStartInstr->Remove();
  12311. }
  12312. return prev;
  12313. }
  12314. void
  12315. Lowerer::LowerInlineeEnd(IR::Instr *instr)
  12316. {
  12317. Assert(instr->m_func->IsInlinee());
  12318. Assert(m_func->IsTopFunc());
  12319. // No need to emit code if the function wasn't marked as having implicit calls or bailout. Dead-Store should have removed inline overhead.
  12320. if (instr->m_func->GetHasImplicitCalls() || PHASE_OFF(Js::DeadStorePhase, this->m_func))
  12321. {
  12322. Lowerer::InsertMove(instr->m_func->GetInlineeArgCountSlotOpnd(),
  12323. IR::IntConstOpnd::New(0, TyMachReg, instr->m_func),
  12324. instr);
  12325. }
  12326. // Keep InlineeEnd around as it is used by register allocator, if we have optimized the arguments stack
  12327. if (instr->m_func->m_hasInlineArgsOpt)
  12328. {
  12329. instr->FreeSrc1();
  12330. }
  12331. else
  12332. {
  12333. instr->Remove();
  12334. }
  12335. }
  12336. IR::Instr *
  12337. Lowerer::LoadFloatFromNonReg(IR::Opnd * opndSrc, IR::Opnd * opndDst, IR::Instr * instrInsert)
  12338. {
  12339. double value;
  12340. if (opndSrc->IsAddrOpnd())
  12341. {
  12342. Js::Var var = opndSrc->AsAddrOpnd()->m_address;
  12343. if (Js::TaggedInt::Is(var))
  12344. {
  12345. value = Js::TaggedInt::ToDouble(var);
  12346. }
  12347. else
  12348. {
  12349. value = Js::JavascriptNumber::GetValue(var);
  12350. }
  12351. }
  12352. else if (opndSrc->IsIntConstOpnd())
  12353. {
  12354. if (opndSrc->IsUInt32())
  12355. {
  12356. value = (double)(uint32)opndSrc->AsIntConstOpnd()->GetValue();
  12357. }
  12358. else
  12359. {
  12360. value = (double)opndSrc->AsIntConstOpnd()->GetValue();
  12361. }
  12362. }
  12363. else if (opndSrc->IsFloatConstOpnd())
  12364. {
  12365. value = (double)opndSrc->AsFloatConstOpnd()->m_value;
  12366. }
  12367. else if (opndSrc->IsFloat32ConstOpnd())
  12368. {
  12369. float floatValue = opndSrc->AsFloat32ConstOpnd()->m_value;
  12370. return LowererMD::LoadFloatValue(opndDst, floatValue, instrInsert);
  12371. }
  12372. else
  12373. {
  12374. AssertMsg(0, "Unexpected opnd type");
  12375. value = 0;
  12376. }
  12377. return LowererMD::LoadFloatValue(opndDst, value, instrInsert);
  12378. }
  12379. void
  12380. Lowerer::LoadInt32FromUntaggedVar(IR::Instr *const instrLoad)
  12381. {
  12382. Assert(instrLoad);
  12383. Assert(instrLoad->GetDst());
  12384. Assert(instrLoad->GetDst()->IsRegOpnd());
  12385. Assert(instrLoad->GetDst()->IsInt32());
  12386. Assert(instrLoad->GetSrc1());
  12387. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  12388. Assert(instrLoad->GetSrc1()->IsVar());
  12389. Assert(!instrLoad->GetSrc2());
  12390. // push src
  12391. // int32Value = call JavascriptNumber::GetNonzeroInt32Value_NoChecks
  12392. // test int32Value, int32Value
  12393. // jne $done
  12394. // (fall through to 'instrLoad'; caller will generate code here)
  12395. // $done:
  12396. // (rest of program)
  12397. Func *const func = instrLoad->m_func;
  12398. IR::LabelInstr *const doneLabel = instrLoad->GetOrCreateContinueLabel();
  12399. // push src
  12400. // int32Value = call JavascriptNumber::GetNonzeroInt32Value_NoChecks
  12401. StackSym *const int32ValueSym = instrLoad->GetDst()->AsRegOpnd()->m_sym;
  12402. IR::Instr *const instr =
  12403. IR::Instr::New(
  12404. Js::OpCode::Call,
  12405. IR::RegOpnd::New(int32ValueSym, TyInt32, func),
  12406. instrLoad->GetSrc1()->AsRegOpnd(),
  12407. func);
  12408. instrLoad->InsertBefore(instr);
  12409. LowerUnaryHelper(instr, IR::HelperGetNonzeroInt32Value_NoTaggedIntCheck);
  12410. // test int32Value, int32Value
  12411. // jne $done
  12412. InsertCompareBranch(
  12413. IR::RegOpnd::New(int32ValueSym, TyInt32, func),
  12414. IR::IntConstOpnd::New(0, TyInt32, func, true),
  12415. Js::OpCode::BrNeq_A,
  12416. doneLabel,
  12417. instrLoad);
  12418. }
  12419. bool
  12420. Lowerer::GetValueFromIndirOpnd(IR::IndirOpnd *indirOpnd, IR::Opnd **pValueOpnd, IntConstType *pValue)
  12421. {
  12422. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  12423. IR::Opnd* valueOpnd = nullptr;
  12424. IntConstType value = 0;
  12425. if (!indexOpnd)
  12426. {
  12427. value = (IntConstType)indirOpnd->GetOffset();
  12428. if (value < 0)
  12429. {
  12430. // Can't do fast path for negative index
  12431. return false;
  12432. }
  12433. valueOpnd = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
  12434. }
  12435. else if (indexOpnd->m_sym->IsIntConst())
  12436. {
  12437. value = indexOpnd->AsRegOpnd()->m_sym->GetIntConstValue();
  12438. if (value < 0)
  12439. {
  12440. // Can't do fast path for negative index
  12441. return false;
  12442. }
  12443. valueOpnd = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
  12444. }
  12445. *pValueOpnd = valueOpnd;
  12446. *pValue = value;
  12447. return true;
  12448. }
  12449. void
  12450. Lowerer::GenerateFastBrOnObject(IR::Instr *instr)
  12451. {
  12452. Assert(instr->m_opcode == Js::OpCode::BrOnObject_A);
  12453. IR::RegOpnd *object = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  12454. IR::LabelInstr *done = instr->GetOrCreateContinueLabel();
  12455. IR::LabelInstr *target = instr->AsBranchInstr()->GetTarget();
  12456. IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  12457. IR::IntConstOpnd *typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, instr->m_func);
  12458. if (!object)
  12459. {
  12460. object = IR::RegOpnd::New(TyVar, m_func);
  12461. Lowerer::InsertMove(object, instr->GetSrc1(), instr);
  12462. }
  12463. // TEST object, 1
  12464. // JNE $done
  12465. // MOV typeRegOpnd, [object + offset(Type)]
  12466. // CMP [typeRegOpnd + offset(TypeId)], TypeIds_LastJavascriptPrimitiveType
  12467. // JGT $target
  12468. // $done:
  12469. m_lowererMD.GenerateObjectTest(object, instr, done);
  12470. InsertMove(typeRegOpnd,
  12471. IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  12472. instr);
  12473. InsertCompareBranch(
  12474. IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
  12475. typeIdOpnd, Js::OpCode::BrGt_A, target, instr);
  12476. instr->Remove();
  12477. }
  12478. void Lowerer::GenerateObjectHeaderInliningTest(IR::RegOpnd *baseOpnd, IR::LabelInstr * target,IR::Instr *insertBeforeInstr)
  12479. {
  12480. Assert(baseOpnd);
  12481. Assert(target);
  12482. AssertMsg(
  12483. baseOpnd->GetValueType().IsLikelyObject() &&
  12484. baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray,
  12485. "Why are we here, when the object is already known not to have an ObjArray");
  12486. Assert(insertBeforeInstr);
  12487. Func *const func = insertBeforeInstr->m_func;
  12488. // mov type, [base + offsetOf(type)]
  12489. IR::RegOpnd *const opnd = IR::RegOpnd::New(TyMachPtr, func);
  12490. InsertMove(
  12491. opnd,
  12492. IR::IndirOpnd::New(
  12493. baseOpnd,
  12494. Js::DynamicObject::GetOffsetOfType(),
  12495. opnd->GetType(),
  12496. func),
  12497. insertBeforeInstr);
  12498. // mov typeHandler, [type + offsetOf(typeHandler)]
  12499. InsertMove(
  12500. opnd,
  12501. IR::IndirOpnd::New(
  12502. opnd,
  12503. Js::DynamicType::GetOffsetOfTypeHandler(),
  12504. opnd->GetType(),
  12505. func),
  12506. insertBeforeInstr);
  12507. IR::IndirOpnd * offsetOfInlineSlotOpnd = IR::IndirOpnd::New(opnd,Js::DynamicTypeHandler::GetOffsetOfOffsetOfInlineSlots(), TyInt16, func);
  12508. IR::IntConstOpnd * objHeaderInlinedSlotOffset = IR::IntConstOpnd::New(Js::DynamicTypeHandler::GetOffsetOfObjectHeaderInlineSlots(), TyInt16, func);
  12509. // CMP [typeHandler + offsetOf(offsetOfInlineSlots)], objHeaderInlinedSlotOffset
  12510. InsertCompareBranch(
  12511. offsetOfInlineSlotOpnd,
  12512. objHeaderInlinedSlotOffset,
  12513. Js::OpCode::BrEq_A,
  12514. target,
  12515. insertBeforeInstr);
  12516. }
  12517. void Lowerer::GenerateObjectTypeTest(IR::RegOpnd *srcReg, IR::Instr *instrInsert, IR::LabelInstr *labelHelper)
  12518. {
  12519. Assert(srcReg);
  12520. if (!srcReg->IsNotTaggedValue())
  12521. {
  12522. m_lowererMD.GenerateObjectTest(srcReg, instrInsert, labelHelper);
  12523. }
  12524. // CMP [srcReg], Js::DynamicObject::`vtable'
  12525. // JNE $helper
  12526. IR::BranchInstr *branchInstr = InsertCompareBranch(
  12527. IR::IndirOpnd::New(srcReg, 0, TyMachPtr, m_func),
  12528. LoadVTableValueOpnd(instrInsert, VTableValue::VtableDynamicObject),
  12529. Js::OpCode::BrNeq_A,
  12530. labelHelper,
  12531. instrInsert);
  12532. InsertObjectPoison(srcReg, branchInstr, instrInsert, false);
  12533. }
  12534. const VTableValue Lowerer::VtableAddresses[static_cast<ValueType::TSize>(ObjectType::Count)] =
  12535. {
  12536. /* ObjectType::UninitializedObject */ VTableValue::VtableInvalid,
  12537. /* ObjectType::Object */ VTableValue::VtableInvalid,
  12538. /* ObjectType::RegExp */ VTableValue::VtableInvalid,
  12539. /* ObjectType::ObjectWithArray */ VTableValue::VtableJavascriptArray,
  12540. /* ObjectType::Array */ VTableValue::VtableJavascriptArray,
  12541. /* ObjectType::Int8Array */ VTableValue::VtableInt8Array,
  12542. /* ObjectType::Uint8Array */ VTableValue::VtableUint8Array,
  12543. /* ObjectType::Uint8ClampedArray */ VTableValue::VtableUint8ClampedArray,
  12544. /* ObjectType::Int16Array */ VTableValue::VtableInt16Array,
  12545. /* ObjectType::Uint16Array */ VTableValue::VtableUint16Array,
  12546. /* ObjectType::Int32Array */ VTableValue::VtableInt32Array,
  12547. /* ObjectType::Uint32Array */ VTableValue::VtableUint32Array,
  12548. /* ObjectType::Float32Array */ VTableValue::VtableFloat32Array,
  12549. /* ObjectType::Float64Array */ VTableValue::VtableFloat64Array,
  12550. /* ObjectType::Int8VirtualArray */ VTableValue::VtableInt8VirtualArray,
  12551. /* ObjectType::Uint8VirtualArray */ VTableValue::VtableUint8VirtualArray,
  12552. /* ObjectType::Uint8ClampedVirtualArray */ VTableValue::VtableUint8ClampedVirtualArray,
  12553. /* ObjectType::Int16VirtualArray */ VTableValue::VtableInt16VirtualArray,
  12554. /* ObjectType::Uint16VirtualArray */ VTableValue::VtableUint16VirtualArray,
  12555. /* ObjectType::Int32VirtualArray */ VTableValue::VtableInt32VirtualArray,
  12556. /* ObjectType::Uint32VirtualArray */ VTableValue::VtableUint32VirtualArray,
  12557. /* ObjectType::Float32VirtualArray */ VTableValue::VtableFloat32VirtualArray,
  12558. /* ObjectType::Float64VirtualArray */ VTableValue::VtableFloat64VirtualArray,
  12559. /* ObjectType::Int8MixedArray */ VTableValue::VtableInt8Array,
  12560. /* ObjectType::Uint8MixedArray */ VTableValue::VtableUint8Array,
  12561. /* ObjectType::Uint8ClampedMixedArray */ VTableValue::VtableUint8ClampedArray,
  12562. /* ObjectType::Int16MixedArray */ VTableValue::VtableInt16Array,
  12563. /* ObjectType::Uint16MixedArray */ VTableValue::VtableUint16Array,
  12564. /* ObjectType::Int32MixedArray */ VTableValue::VtableInt32Array,
  12565. /* ObjectType::Uint32MixedArray */ VTableValue::VtableUint32Array,
  12566. /* ObjectType::Float32MixedArray */ VTableValue::VtableFloat32Array,
  12567. /* ObjectType::Float64MixedArray */ VTableValue::VtableFloat64Array,
  12568. /* ObjectType::Int64Array */ VTableValue::VtableInt64Array,
  12569. /* ObjectType::Uint64Array */ VTableValue::VtableUint64Array,
  12570. /* ObjectType::BoolArray */ VTableValue::VtableBoolArray,
  12571. /* ObjectType::CharArray */ VTableValue::VtableCharArray
  12572. };
  12573. const uint32 Lowerer::OffsetsOfHeadSegment[static_cast<ValueType::TSize>(ObjectType::Count)] =
  12574. {
  12575. /* ObjectType::UninitializedObject */ static_cast<uint32>(-1),
  12576. /* ObjectType::Object */ static_cast<uint32>(-1),
  12577. /* ObjectType::RegExp */ static_cast<uint32>(-1),
  12578. /* ObjectType::ObjectWithArray */ Js::JavascriptArray::GetOffsetOfHead(),
  12579. /* ObjectType::Array */ Js::JavascriptArray::GetOffsetOfHead(),
  12580. /* ObjectType::Int8Array */ Js::Int8Array::GetOffsetOfBuffer(),
  12581. /* ObjectType::Uint8Array */ Js::Uint8Array::GetOffsetOfBuffer(),
  12582. /* ObjectType::Uint8ClampedArray */ Js::Uint8ClampedArray::GetOffsetOfBuffer(),
  12583. /* ObjectType::Int16Array */ Js::Int16Array::GetOffsetOfBuffer(),
  12584. /* ObjectType::Uint16Array */ Js::Uint16Array::GetOffsetOfBuffer(),
  12585. /* ObjectType::Int32Array */ Js::Int32Array::GetOffsetOfBuffer(),
  12586. /* ObjectType::Uint32Array */ Js::Uint32Array::GetOffsetOfBuffer(),
  12587. /* ObjectType::Float32Array */ Js::Float32Array::GetOffsetOfBuffer(),
  12588. /* ObjectType::Float64Array */ Js::Float64Array::GetOffsetOfBuffer(),
  12589. /* ObjectType::Int8VirtualArray */ Js::Int8VirtualArray::GetOffsetOfBuffer(),
  12590. /* ObjectType::Uint8VirtualArray */ Js::Uint8VirtualArray::GetOffsetOfBuffer(),
  12591. /* ObjectType::Uint8ClampedVirtualArray */ Js::Uint8ClampedVirtualArray::GetOffsetOfBuffer(),
  12592. /* ObjectType::Int16VirtualArray */ Js::Int16VirtualArray::GetOffsetOfBuffer(),
  12593. /* ObjectType::Uint16VirtualArray */ Js::Uint16VirtualArray::GetOffsetOfBuffer(),
  12594. /* ObjectType::Int32VirtualArray */ Js::Int32VirtualArray::GetOffsetOfBuffer(),
  12595. /* ObjectType::Uint32VirtualArray */ Js::Uint32VirtualArray::GetOffsetOfBuffer(),
  12596. /* ObjectType::Float32VirtualArray */ Js::Float32VirtualArray::GetOffsetOfBuffer(),
  12597. /* ObjectType::Float64VirtualArray */ Js::Float64VirtualArray::GetOffsetOfBuffer(),
  12598. /* ObjectType::Int8MixedArray */ Js::Int8Array::GetOffsetOfBuffer(),
  12599. /* ObjectType::Uint8MixedArray */ Js::Uint8Array::GetOffsetOfBuffer(),
  12600. /* ObjectType::Uint8ClampedMixedArray */ Js::Uint8ClampedArray::GetOffsetOfBuffer(),
  12601. /* ObjectType::Int16MixedArray */ Js::Int16Array::GetOffsetOfBuffer(),
  12602. /* ObjectType::Uint16MixedArray */ Js::Uint16Array::GetOffsetOfBuffer(),
  12603. /* ObjectType::Int32MixedArray */ Js::Int32Array::GetOffsetOfBuffer(),
  12604. /* ObjectType::Uint32MixedArray */ Js::Uint32Array::GetOffsetOfBuffer(),
  12605. /* ObjectType::Float32MixedArray */ Js::Float32Array::GetOffsetOfBuffer(),
  12606. /* ObjectType::Float64MixedArray */ Js::Float64Array::GetOffsetOfBuffer(),
  12607. /* ObjectType::Int64Array */ Js::Int64Array::GetOffsetOfBuffer(),
  12608. /* ObjectType::Uint64Array */ Js::Uint64Array::GetOffsetOfBuffer(),
  12609. /* ObjectType::BoolArray */ Js::BoolArray::GetOffsetOfBuffer(),
  12610. /* ObjectType::CharArray */ Js::CharArray::GetOffsetOfBuffer()
  12611. };
  12612. const uint32 Lowerer::OffsetsOfLength[static_cast<ValueType::TSize>(ObjectType::Count)] =
  12613. {
  12614. /* ObjectType::UninitializedObject */ static_cast<uint32>(-1),
  12615. /* ObjectType::Object */ static_cast<uint32>(-1),
  12616. /* ObjectType::RegExp */ static_cast<uint32>(-1),
  12617. /* ObjectType::ObjectWithArray */ Js::JavascriptArray::GetOffsetOfLength(),
  12618. /* ObjectType::Array */ Js::JavascriptArray::GetOffsetOfLength(),
  12619. /* ObjectType::Int8Array */ Js::Int8Array::GetOffsetOfLength(),
  12620. /* ObjectType::Uint8Array */ Js::Uint8Array::GetOffsetOfLength(),
  12621. /* ObjectType::Uint8ClampedArray */ Js::Uint8ClampedArray::GetOffsetOfLength(),
  12622. /* ObjectType::Int16Array */ Js::Int16Array::GetOffsetOfLength(),
  12623. /* ObjectType::Uint16Array */ Js::Uint16Array::GetOffsetOfLength(),
  12624. /* ObjectType::Int32Array */ Js::Int32Array::GetOffsetOfLength(),
  12625. /* ObjectType::Uint32Array */ Js::Uint32Array::GetOffsetOfLength(),
  12626. /* ObjectType::Float32Array */ Js::Float32Array::GetOffsetOfLength(),
  12627. /* ObjectType::Float64Array */ Js::Float64Array::GetOffsetOfLength(),
  12628. /* ObjectType::Int8VirtualArray */ Js::Int8VirtualArray::GetOffsetOfLength(),
  12629. /* ObjectType::Uint8VirtualArray */ Js::Uint8VirtualArray::GetOffsetOfLength(),
  12630. /* ObjectType::Uint8ClampedVirtualArray */ Js::Uint8ClampedVirtualArray::GetOffsetOfLength(),
  12631. /* ObjectType::Int16VirtualArray */ Js::Int16VirtualArray::GetOffsetOfLength(),
  12632. /* ObjectType::Uint16VirtualArray */ Js::Uint16VirtualArray::GetOffsetOfLength(),
  12633. /* ObjectType::Int32VirtualArray */ Js::Int32VirtualArray::GetOffsetOfLength(),
  12634. /* ObjectType::Uint32VirtualArray */ Js::Uint32VirtualArray::GetOffsetOfLength(),
  12635. /* ObjectType::Float32VirtualArray */ Js::Float32VirtualArray::GetOffsetOfLength(),
  12636. /* ObjectType::Float64VirtualArray */ Js::Float64VirtualArray::GetOffsetOfLength(),
  12637. /* ObjectType::Int8MixedArray */ Js::Int8Array::GetOffsetOfLength(),
  12638. /* ObjectType::Uint8MixedArray */ Js::Uint8Array::GetOffsetOfLength(),
  12639. /* ObjectType::Uint8ClampedMixedArray */ Js::Uint8ClampedArray::GetOffsetOfLength(),
  12640. /* ObjectType::Int16MixedArray */ Js::Int16Array::GetOffsetOfLength(),
  12641. /* ObjectType::Uint16MixedArray */ Js::Uint16Array::GetOffsetOfLength(),
  12642. /* ObjectType::Int32MixedArray */ Js::Int32Array::GetOffsetOfLength(),
  12643. /* ObjectType::Uint32MixedArray */ Js::Uint32Array::GetOffsetOfLength(),
  12644. /* ObjectType::Float32MixedArray */ Js::Float32Array::GetOffsetOfLength(),
  12645. /* ObjectType::Float64MixedArray */ Js::Float64Array::GetOffsetOfLength(),
  12646. /* ObjectType::Int64Array */ Js::Int64Array::GetOffsetOfLength(),
  12647. /* ObjectType::Uint64Array */ Js::Uint64Array::GetOffsetOfLength(),
  12648. /* ObjectType::BoolArray */ Js::BoolArray::GetOffsetOfLength(),
  12649. /* ObjectType::CharArray */ Js::CharArray::GetOffsetOfLength()
  12650. };
  12651. const IRType Lowerer::IndirTypes[static_cast<ValueType::TSize>(ObjectType::Count)] =
  12652. {
  12653. /* ObjectType::UninitializedObject */ TyIllegal,
  12654. /* ObjectType::Object */ TyIllegal,
  12655. /* ObjectType::RegExp */ TyIllegal,
  12656. /* ObjectType::ObjectWithArray */ TyVar,
  12657. /* ObjectType::Array */ TyVar,
  12658. /* ObjectType::Int8Array */ TyInt8,
  12659. /* ObjectType::Uint8Array */ TyUint8,
  12660. /* ObjectType::Uint8ClampedArray */ TyUint8,
  12661. /* ObjectType::Int16Array */ TyInt16,
  12662. /* ObjectType::Uint16Array */ TyUint16,
  12663. /* ObjectType::Int32Array */ TyInt32,
  12664. /* ObjectType::Uint32Array */ TyUint32,
  12665. /* ObjectType::Float32Array */ TyFloat32,
  12666. /* ObjectType::Float64Array */ TyFloat64,
  12667. /* ObjectType::Int8VirtualArray */ TyInt8,
  12668. /* ObjectType::Uint8VirtualArray */ TyUint8,
  12669. /* ObjectType::Uint8ClampedVirtualArray */ TyUint8,
  12670. /* ObjectType::Int16VirtualArray */ TyInt16,
  12671. /* ObjectType::Uint16vArray */ TyUint16,
  12672. /* ObjectType::Int32VirtualArray */ TyInt32,
  12673. /* ObjectType::Uint32VirtualArray */ TyUint32,
  12674. /* ObjectType::Float32VirtualArray */ TyFloat32,
  12675. /* ObjectType::Float64VirtualArray */ TyFloat64,
  12676. /* ObjectType::Int8MixedArray */ TyInt8,
  12677. /* ObjectType::Uint8MixedArray */ TyUint8,
  12678. /* ObjectType::Uint8ClampedMixedArray */ TyUint8,
  12679. /* ObjectType::Int16MixedArray */ TyInt16,
  12680. /* ObjectType::Uint16MixedArray */ TyUint16,
  12681. /* ObjectType::Int32MixedArray */ TyInt32,
  12682. /* ObjectType::Uint32MixedArray */ TyUint32,
  12683. /* ObjectType::Float32MixedArray */ TyFloat32,
  12684. /* ObjectType::Float64MixedArray */ TyFloat64,
  12685. /* ObjectType::Int64Array */ TyInt64,
  12686. /* ObjectType::Uint64Array */ TyUint64,
  12687. /* ObjectType::BoolArray */ TyUint8,
  12688. /* ObjectType::CharArray */ TyUint16
  12689. };
  12690. const BYTE Lowerer::IndirScales[static_cast<ValueType::TSize>(ObjectType::Count)] =
  12691. {
  12692. /* ObjectType::UninitializedObject */ static_cast<BYTE>(-1),
  12693. /* ObjectType::Object */ static_cast<BYTE>(-1),
  12694. /* ObjectType::RegExp */ static_cast<BYTE>(-1),
  12695. /* ObjectType::ObjectWithArray */ LowererMD::GetDefaultIndirScale(),
  12696. /* ObjectType::Array */ LowererMD::GetDefaultIndirScale(),
  12697. /* ObjectType::Int8Array */ 0, // log2(sizeof(int8))
  12698. /* ObjectType::Uint8Array */ 0, // log2(sizeof(uint8))
  12699. /* ObjectType::Uint8ClampedArray */ 0, // log2(sizeof(uint8))
  12700. /* ObjectType::Int16Array */ 1, // log2(sizeof(int16))
  12701. /* ObjectType::Uint16Array */ 1, // log2(sizeof(uint16))
  12702. /* ObjectType::Int32Array */ 2, // log2(sizeof(int32))
  12703. /* ObjectType::Uint32Array */ 2, // log2(sizeof(uint32))
  12704. /* ObjectType::Float32Array */ 2, // log2(sizeof(float))
  12705. /* ObjectType::Float64Array */ 3, // log2(sizeof(double))
  12706. /* ObjectType::Int8VirtualArray */ 0, // log2(sizeof(int8))
  12707. /* ObjectType::Uint8VirtualArray */ 0, // log2(sizeof(uint8))
  12708. /* ObjectType::Uint8ClampedVirtualArray */ 0, // log2(sizeof(uint8))
  12709. /* ObjectType::Int16VirtualArray */ 1, // log2(sizeof(int16))
  12710. /* ObjectType::Uint16VirtualArray */ 1, // log2(sizeof(uint16))
  12711. /* ObjectType::Int32VirtualArray */ 2, // log2(sizeof(int32))
  12712. /* ObjectType::Uint32VirtualArray */ 2, // log2(sizeof(uint32))
  12713. /* ObjectType::Float32VirtualArray */ 2, // log2(sizeof(float))
  12714. /* ObjectType::Float64VirtualArray */ 3, // log2(sizeof(double))
  12715. /* ObjectType::Int8MixedArray */ 0, // log2(sizeof(int8))
  12716. /* ObjectType::Uint8MixedArray */ 0, // log2(sizeof(uint8))
  12717. /* ObjectType::Uint8ClampedMixedArray */ 0, // log2(sizeof(uint8))
  12718. /* ObjectType::Int16MixedArray */ 1, // log2(sizeof(int16))
  12719. /* ObjectType::Uint16MixedArray */ 1, // log2(sizeof(uint16))
  12720. /* ObjectType::Int32MixedArray */ 2, // log2(sizeof(int32))
  12721. /* ObjectType::Uint32MixedArray */ 2, // log2(sizeof(uint32))
  12722. /* ObjectType::Float32MixedArray */ 2, // log2(sizeof(float))
  12723. /* ObjectType::Float64MixedArray */ 3, // log2(sizeof(double))
  12724. /* ObjectType::Int64Array */ 3, // log2(sizeof(int64))
  12725. /* ObjectType::Uint64Array */ 3, // log2(sizeof(uint64))
  12726. /* ObjectType::BoolArray */ 0, // log2(sizeof(bool))
  12727. /* ObjectType::CharArray */ 1 // log2(sizeof(char16))
  12728. };
  12729. VTableValue Lowerer::GetArrayVtableAddress(const ValueType valueType, bool getVirtual)
  12730. {
  12731. Assert(valueType.IsLikelyAnyOptimizedArray());
  12732. if(valueType.IsLikelyArrayOrObjectWithArray())
  12733. {
  12734. if(valueType.HasIntElements())
  12735. {
  12736. return VTableValue::VtableNativeIntArray;
  12737. }
  12738. else if(valueType.HasFloatElements())
  12739. {
  12740. return VTableValue::VtableNativeFloatArray;
  12741. }
  12742. }
  12743. if (getVirtual && valueType.IsLikelyMixedTypedArrayType())
  12744. {
  12745. return VtableAddresses[static_cast<ValueType::TSize>(valueType.GetMixedToVirtualTypedArrayObjectType())];
  12746. }
  12747. return VtableAddresses[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  12748. }
  12749. uint32 Lowerer::GetArrayOffsetOfHeadSegment(const ValueType valueType)
  12750. {
  12751. Assert(valueType.IsLikelyAnyOptimizedArray());
  12752. return OffsetsOfHeadSegment[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  12753. }
  12754. uint32 Lowerer::GetArrayOffsetOfLength(const ValueType valueType)
  12755. {
  12756. Assert(valueType.IsLikelyAnyOptimizedArray());
  12757. return OffsetsOfLength[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  12758. }
  12759. IRType Lowerer::GetArrayIndirType(const ValueType valueType)
  12760. {
  12761. Assert(valueType.IsLikelyAnyOptimizedArray());
  12762. if(valueType.IsLikelyArrayOrObjectWithArray())
  12763. {
  12764. if(valueType.HasIntElements())
  12765. {
  12766. return TyInt32;
  12767. }
  12768. else if(valueType.HasFloatElements())
  12769. {
  12770. return TyFloat64;
  12771. }
  12772. }
  12773. return IndirTypes[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  12774. }
  12775. BYTE Lowerer::GetArrayIndirScale(const ValueType valueType)
  12776. {
  12777. Assert(valueType.IsLikelyAnyOptimizedArray());
  12778. if(valueType.IsLikelyArrayOrObjectWithArray())
  12779. {
  12780. if(valueType.HasIntElements())
  12781. {
  12782. return 2; // log2(sizeof(int32))
  12783. }
  12784. else if(valueType.HasFloatElements())
  12785. {
  12786. return 3; // log2(sizeof(double))
  12787. }
  12788. }
  12789. return IndirScales[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  12790. }
  12791. int Lowerer::SimdGetElementCountFromBytes(ValueType arrValueType, uint8 dataWidth)
  12792. {
  12793. Assert(dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  12794. Assert(arrValueType.IsTypedArray());
  12795. BYTE bpe = 1 << Lowerer::GetArrayIndirScale(arrValueType);
  12796. // round up
  12797. return (int)::ceil(((float)dataWidth) / bpe);
  12798. }
  12799. bool Lowerer::ShouldGenerateArrayFastPath(
  12800. const IR::Opnd *const arrayOpnd,
  12801. const bool supportsObjectsWithArrays,
  12802. const bool supportsTypedArrays,
  12803. const bool requiresSse2ForFloatArrays) const
  12804. {
  12805. Assert(arrayOpnd);
  12806. const ValueType arrayValueType(arrayOpnd->GetValueType());
  12807. if(arrayValueType.IsUninitialized())
  12808. {
  12809. // Don't have info about the value type, better to generate the fast path anyway
  12810. return true;
  12811. }
  12812. if (!arrayValueType.IsLikelyObject())
  12813. {
  12814. if (!arrayValueType.HasBeenObject() || arrayValueType.IsLikelyString())
  12815. {
  12816. return false;
  12817. }
  12818. //We have seen at least once there is an object in the code path. Generate fastpath hoping it to be array.
  12819. //Its nice if we can get all the attributes set but valueType is only 16 bits. Consider expanding the same.
  12820. return true;
  12821. }
  12822. if( (!supportsObjectsWithArrays && arrayValueType.GetObjectType() == ObjectType::ObjectWithArray) ||
  12823. (!supportsTypedArrays && arrayValueType.IsLikelyTypedArray()) )
  12824. {
  12825. // The fast path likely would not hit
  12826. return false;
  12827. }
  12828. if(arrayValueType.GetObjectType() == ObjectType::UninitializedObject)
  12829. {
  12830. // Don't have info about the object type, better to generate the fast path anyway
  12831. return true;
  12832. }
  12833. #ifdef _M_IX86
  12834. if(requiresSse2ForFloatArrays &&
  12835. (
  12836. arrayValueType.GetObjectType() == ObjectType::Float32Array ||
  12837. arrayValueType.GetObjectType() == ObjectType::Float64Array
  12838. ) &&
  12839. !AutoSystemInfo::Data.SSE2Available())
  12840. {
  12841. // Fast paths for float arrays rely on SSE2
  12842. return false;
  12843. }
  12844. #endif
  12845. return !arrayValueType.IsLikelyAnyUnOptimizedArray();
  12846. }
  12847. IR::RegOpnd *Lowerer::LoadObjectArray(IR::RegOpnd *const baseOpnd, IR::Instr *const insertBeforeInstr)
  12848. {
  12849. Assert(baseOpnd);
  12850. Assert(
  12851. baseOpnd->GetValueType().IsLikelyObject() &&
  12852. baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray);
  12853. Assert(insertBeforeInstr);
  12854. Func *const func = insertBeforeInstr->m_func;
  12855. // mov array, [base + offsetOf(objectArrayOrFlags)]
  12856. IR::RegOpnd *const arrayOpnd =
  12857. baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd()->CopyAsRegOpnd(func) : baseOpnd->Copy(func)->AsRegOpnd();
  12858. arrayOpnd->m_sym = StackSym::New(TyVar, func);
  12859. arrayOpnd->SetValueType(arrayOpnd->GetValueType().ToArray());
  12860. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, func, false /* autoDelete */);
  12861. InsertMove(
  12862. arrayOpnd,
  12863. IR::IndirOpnd::New(
  12864. baseOpnd,
  12865. Js::DynamicObject::GetOffsetOfObjectArray(),
  12866. arrayOpnd->GetType(),
  12867. func),
  12868. insertBeforeInstr);
  12869. return arrayOpnd;
  12870. }
  12871. void
  12872. Lowerer::GenerateIsEnabledArraySetElementFastPathCheck(
  12873. IR::LabelInstr * isDisabledLabel,
  12874. IR::Instr * const insertBeforeInstr)
  12875. {
  12876. InsertCompareBranch(
  12877. this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable),
  12878. LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableInvalid),
  12879. Js::OpCode::BrEq_A,
  12880. isDisabledLabel,
  12881. insertBeforeInstr);
  12882. }
  12883. IR::RegOpnd *Lowerer::GenerateArrayTest(
  12884. IR::RegOpnd *const baseOpnd,
  12885. IR::LabelInstr *const isNotObjectLabel,
  12886. IR::LabelInstr *const isNotArrayLabel,
  12887. IR::Instr *const insertBeforeInstr,
  12888. const bool forceFloat,
  12889. const bool isStore,
  12890. const bool allowDefiniteArray)
  12891. {
  12892. Assert(baseOpnd);
  12893. const ValueType baseValueType(baseOpnd->GetValueType());
  12894. // Shouldn't request to do an array test when it's already known to be an array, or if it's unlikely to be an array
  12895. Assert(!baseValueType.IsAnyOptimizedArray() || allowDefiniteArray || baseValueType.IsNativeArray());
  12896. Assert(baseValueType.IsUninitialized() || baseValueType.HasBeenObject());
  12897. Assert(isNotObjectLabel);
  12898. Assert(isNotArrayLabel);
  12899. Assert(insertBeforeInstr);
  12900. Func *const func = insertBeforeInstr->m_func;
  12901. IR::RegOpnd *arrayOpnd;
  12902. IR::AutoReuseOpnd autoReuseArrayOpnd;
  12903. if(baseValueType.IsLikelyObject() && baseValueType.GetObjectType() == ObjectType::ObjectWithArray)
  12904. {
  12905. // Only DynamicObject is allowed (DynamicObject vtable is ensured) because some object types have special handling for
  12906. // index properties - arguments object, string object, external object, etc.
  12907. // JavascriptArray::Jit_TryGetArrayForObjectWithArray as well.
  12908. GenerateObjectTypeTest(baseOpnd, insertBeforeInstr, isNotObjectLabel);
  12909. GenerateObjectHeaderInliningTest(baseOpnd, isNotArrayLabel, insertBeforeInstr);
  12910. arrayOpnd = LoadObjectArray(baseOpnd, insertBeforeInstr);
  12911. autoReuseArrayOpnd.Initialize(arrayOpnd, func, false /* autoDelete */);
  12912. // test array, array
  12913. // je $isNotArrayLabel
  12914. // test array, 1
  12915. // jne $isNotArrayLabel
  12916. InsertTestBranch(
  12917. arrayOpnd,
  12918. arrayOpnd,
  12919. Js::OpCode::BrEq_A,
  12920. isNotArrayLabel,
  12921. insertBeforeInstr);
  12922. InsertTestBranch(
  12923. arrayOpnd,
  12924. IR::IntConstOpnd::New(1, TyUint8, func, true),
  12925. Js::OpCode::BrNeq_A,
  12926. isNotArrayLabel,
  12927. insertBeforeInstr);
  12928. }
  12929. else
  12930. {
  12931. if(!baseOpnd->IsNotTaggedValue())
  12932. {
  12933. m_lowererMD.GenerateObjectTest(baseOpnd, insertBeforeInstr, isNotObjectLabel);
  12934. }
  12935. arrayOpnd = baseOpnd->Copy(func)->AsRegOpnd();
  12936. if(!baseValueType.IsLikelyAnyOptimizedArray())
  12937. {
  12938. arrayOpnd->SetValueType(
  12939. ValueType::GetObject(ObjectType::Array)
  12940. .ToLikely()
  12941. .SetHasNoMissingValues(false)
  12942. .SetArrayTypeId(Js::TypeIds_Array));
  12943. }
  12944. autoReuseArrayOpnd.Initialize(arrayOpnd, func, false /* autoDelete */);
  12945. }
  12946. VTableValue vtableAddress = baseValueType.IsLikelyAnyOptimizedArray()
  12947. ? GetArrayVtableAddress(baseValueType)
  12948. : VTableValue::VtableJavascriptArray;
  12949. VTableValue virtualVtableAddress = VTableValue::VtableInvalid;
  12950. if (baseValueType.IsLikelyMixedTypedArrayType())
  12951. {
  12952. virtualVtableAddress = GetArrayVtableAddress(baseValueType, true);
  12953. }
  12954. IR::Opnd * vtableOpnd;
  12955. IR::Opnd * vtableVirtualOpnd = nullptr;
  12956. if (isStore &&
  12957. (vtableAddress == VTableValue::VtableJavascriptArray ||
  12958. baseValueType.IsLikelyNativeArray()))
  12959. {
  12960. vtableOpnd = IR::RegOpnd::New(TyMachPtr, func);
  12961. if (baseValueType.IsLikelyNativeArray())
  12962. {
  12963. if (baseValueType.HasIntElements())
  12964. {
  12965. InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesIntArraySetElementFastPathVtable), insertBeforeInstr);
  12966. }
  12967. else
  12968. {
  12969. Assert(baseValueType.HasFloatElements());
  12970. InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesFloatArraySetElementFastPathVtable), insertBeforeInstr);
  12971. }
  12972. }
  12973. else
  12974. {
  12975. InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable), insertBeforeInstr);
  12976. }
  12977. }
  12978. else
  12979. {
  12980. vtableOpnd = LoadVTableValueOpnd(insertBeforeInstr, vtableAddress);
  12981. }
  12982. // cmp [array], vtableAddress
  12983. // jne $isNotArrayLabel
  12984. if (forceFloat && baseValueType.IsLikelyNativeFloatArray())
  12985. {
  12986. // We expect a native float array. If we get native int instead, convert it on the spot and bail out afterward.
  12987. const auto goodArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  12988. IR::BranchInstr* branchInstr = InsertCompareBranch(
  12989. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  12990. vtableOpnd,
  12991. Js::OpCode::BrEq_A,
  12992. goodArrayLabel,
  12993. insertBeforeInstr);
  12994. InsertObjectPoison(arrayOpnd, branchInstr, insertBeforeInstr, isStore);
  12995. IR::LabelInstr *notFloatArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  12996. insertBeforeInstr->InsertBefore(notFloatArrayLabel);
  12997. if (isStore)
  12998. {
  12999. vtableOpnd = IR::RegOpnd::New(TyMachPtr, func);
  13000. InsertMove(vtableOpnd, IR::MemRefOpnd::New(
  13001. func->GetScriptContextInfo()->GetIntArraySetElementFastPathVtableAddr(),
  13002. TyMachPtr, func), insertBeforeInstr);
  13003. }
  13004. else
  13005. {
  13006. vtableOpnd = LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableJavascriptNativeIntArray);
  13007. }
  13008. branchInstr = InsertCompareBranch(
  13009. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  13010. vtableOpnd,
  13011. Js::OpCode::BrNeq_A,
  13012. isNotArrayLabel,
  13013. insertBeforeInstr);
  13014. InsertObjectPoison(arrayOpnd, branchInstr, insertBeforeInstr, isStore);
  13015. m_lowererMD.LoadHelperArgument(insertBeforeInstr, arrayOpnd);
  13016. IR::Instr *helperInstr = IR::Instr::New(Js::OpCode::Call, m_func);
  13017. insertBeforeInstr->InsertBefore(helperInstr);
  13018. m_lowererMD.ChangeToHelperCall(helperInstr, IR::HelperIntArr_ToNativeFloatArray);
  13019. // Branch to the (bailout) label, because converting the array may have made our array checks unsafe.
  13020. InsertBranch(Js::OpCode::Br, isNotArrayLabel, insertBeforeInstr);
  13021. insertBeforeInstr->InsertBefore(goodArrayLabel);
  13022. }
  13023. else
  13024. {
  13025. IR::LabelInstr* goodArrayLabel = nullptr;
  13026. if (baseValueType.IsLikelyMixedTypedArrayType())
  13027. {
  13028. goodArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  13029. InsertCompareBranch(
  13030. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  13031. vtableOpnd,
  13032. Js::OpCode::BrEq_A,
  13033. goodArrayLabel,
  13034. insertBeforeInstr);
  13035. Assert(virtualVtableAddress);
  13036. vtableVirtualOpnd = LoadVTableValueOpnd(insertBeforeInstr, virtualVtableAddress);
  13037. Assert(vtableVirtualOpnd);
  13038. IR::BranchInstr* branchInstr = InsertCompareBranch(
  13039. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  13040. vtableVirtualOpnd,
  13041. Js::OpCode::BrNeq_A,
  13042. isNotArrayLabel,
  13043. insertBeforeInstr);
  13044. InsertObjectPoison(arrayOpnd, branchInstr, insertBeforeInstr, isStore);
  13045. insertBeforeInstr->InsertBefore(goodArrayLabel);
  13046. }
  13047. else
  13048. {
  13049. IR::BranchInstr *branchInstr = InsertCompareBranch(
  13050. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  13051. vtableOpnd,
  13052. Js::OpCode::BrNeq_A,
  13053. isNotArrayLabel,
  13054. insertBeforeInstr);
  13055. InsertObjectPoison(arrayOpnd, branchInstr, insertBeforeInstr, isStore);
  13056. }
  13057. }
  13058. ValueType arrayValueType(arrayOpnd->GetValueType());
  13059. if(arrayValueType.IsLikelyArrayOrObjectWithArray() && !arrayValueType.IsObject())
  13060. {
  13061. arrayValueType = arrayValueType.SetHasNoMissingValues(false);
  13062. }
  13063. arrayValueType = arrayValueType.ToDefiniteObject();
  13064. arrayOpnd->SetValueType(arrayValueType);
  13065. return arrayOpnd;
  13066. }
  13067. ///----------------------------------------------------------------------------
  13068. ///
  13069. /// Lowerer::HoistIndirOffset
  13070. ///
  13071. /// Replace the offset of the given indir with a new symbol, which becomes the indir index.
  13072. /// Assign the new symbol by creating an assignment from the constant offset.
  13073. ///
  13074. ///----------------------------------------------------------------------------
  13075. IR::Instr *Lowerer::HoistIndirOffset(IR::Instr* instr, IR::IndirOpnd *indirOpnd, RegNum regNum)
  13076. {
  13077. int32 offset = indirOpnd->GetOffset();
  13078. if (indirOpnd->GetIndexOpnd())
  13079. {
  13080. Assert(indirOpnd->GetBaseOpnd());
  13081. return Lowerer::HoistIndirOffsetAsAdd(instr, indirOpnd, indirOpnd->GetBaseOpnd(), offset, regNum);
  13082. }
  13083. IR::IntConstOpnd *offsetOpnd = IR::IntConstOpnd::New(offset, TyInt32, instr->m_func);
  13084. IR::RegOpnd *indexOpnd = IR::RegOpnd::New(StackSym::New(TyMachReg, instr->m_func), regNum, TyMachReg, instr->m_func);
  13085. #if defined(DBG) && defined(_M_ARM)
  13086. if (regNum == SCRATCH_REG)
  13087. {
  13088. AssertMsg(indirOpnd->GetBaseOpnd()->GetReg()!= SCRATCH_REG, "Why both are SCRATCH_REG");
  13089. if (instr->GetSrc1() && instr->GetSrc1()->IsRegOpnd())
  13090. {
  13091. Assert(instr->GetSrc1()->AsRegOpnd()->GetReg() != SCRATCH_REG);
  13092. }
  13093. if (instr->GetSrc2() && instr->GetSrc2()->IsRegOpnd())
  13094. {
  13095. Assert(instr->GetSrc2()->AsRegOpnd()->GetReg() != SCRATCH_REG);
  13096. }
  13097. if (instr->GetDst() && instr->GetDst()->IsRegOpnd())
  13098. {
  13099. Assert(instr->GetDst()->AsRegOpnd()->GetReg() != SCRATCH_REG);
  13100. }
  13101. }
  13102. #endif
  13103. // Clear the offset and add a new reg as the index.
  13104. indirOpnd->SetOffset(0);
  13105. indirOpnd->SetIndexOpnd(indexOpnd);
  13106. IR::Instr *instrAssign = Lowerer::InsertMove(indexOpnd, offsetOpnd, instr);
  13107. indexOpnd->m_sym->SetIsIntConst(offset);
  13108. return instrAssign;
  13109. }
  13110. IR::Instr *Lowerer::HoistIndirOffsetAsAdd(IR::Instr* instr, IR::IndirOpnd *orgOpnd, IR::Opnd *baseOpnd, int offset, RegNum regNum)
  13111. {
  13112. IR::RegOpnd *newBaseOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, instr->m_func), regNum, TyMachPtr, instr->m_func);
  13113. IR::IntConstOpnd *src2 = IR::IntConstOpnd::New(offset, TyInt32, instr->m_func);
  13114. IR::Instr * instrAdd = IR::Instr::New(Js::OpCode::Add_A, newBaseOpnd, baseOpnd, src2, instr->m_func);
  13115. LowererMD::ChangeToAdd(instrAdd, false);
  13116. instr->InsertBefore(instrAdd);
  13117. orgOpnd->ReplaceBaseOpnd(newBaseOpnd);
  13118. orgOpnd->SetOffset(0);
  13119. return instrAdd;
  13120. }
  13121. IR::Instr *Lowerer::HoistIndirIndexOpndAsAdd(IR::Instr* instr, IR::IndirOpnd *orgOpnd, IR::Opnd *baseOpnd, IR::Opnd *indexOpnd, RegNum regNum)
  13122. {
  13123. IR::RegOpnd *newBaseOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, instr->m_func), regNum, TyMachPtr, instr->m_func);
  13124. IR::Instr * instrAdd = IR::Instr::New(Js::OpCode::Add_A, newBaseOpnd, baseOpnd, indexOpnd->UseWithNewType(TyMachPtr, instr->m_func), instr->m_func);
  13125. LowererMD::ChangeToAdd(instrAdd, false);
  13126. instr->InsertBefore(instrAdd);
  13127. orgOpnd->ReplaceBaseOpnd(newBaseOpnd);
  13128. orgOpnd->SetIndexOpnd(nullptr);
  13129. return instrAdd;
  13130. }
  13131. ///----------------------------------------------------------------------------
  13132. ///
  13133. /// Lowerer::HoistSymOffset
  13134. ///
  13135. /// Replace the given sym with an indir using the given base and offset.
  13136. /// (This is used, for instance, to hoist a sym offset that is too large to encode.)
  13137. ///
  13138. ///----------------------------------------------------------------------------
  13139. IR::Instr *Lowerer::HoistSymOffset(IR::Instr *instr, IR::SymOpnd *symOpnd, RegNum baseReg, uint32 offset, RegNum regNum)
  13140. {
  13141. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(nullptr, baseReg, TyMachPtr, instr->m_func);
  13142. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(baseOpnd, offset, symOpnd->GetType(), instr->m_func);
  13143. if (symOpnd == instr->GetDst())
  13144. {
  13145. instr->ReplaceDst(indirOpnd);
  13146. }
  13147. else
  13148. {
  13149. instr->ReplaceSrc(symOpnd, indirOpnd);
  13150. }
  13151. return Lowerer::HoistIndirOffset(instr, indirOpnd, regNum);
  13152. }
  13153. IR::Instr *Lowerer::HoistSymOffsetAsAdd(IR::Instr* instr, IR::SymOpnd *orgOpnd, IR::Opnd *baseOpnd, int offset, RegNum regNum)
  13154. {
  13155. IR::IndirOpnd *newIndirOpnd = IR::IndirOpnd::New(baseOpnd->AsRegOpnd(), 0, TyMachPtr, instr->m_func);
  13156. instr->Replace(orgOpnd, newIndirOpnd); // Replace SymOpnd with IndirOpnd
  13157. return Lowerer::HoistIndirOffsetAsAdd(instr, newIndirOpnd, baseOpnd, offset, regNum);
  13158. }
  13159. IR::LabelInstr *Lowerer::InsertLabel(const bool isHelper, IR::Instr *const insertBeforeInstr)
  13160. {
  13161. Assert(insertBeforeInstr);
  13162. Func *const func = insertBeforeInstr->m_func;
  13163. IR::LabelInstr *const instr = IR::LabelInstr::New(Js::OpCode::Label, func, isHelper);
  13164. insertBeforeInstr->InsertBefore(instr);
  13165. return instr;
  13166. }
  13167. IR::Instr *Lowerer::InsertMoveWithBarrier(IR::Opnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr)
  13168. {
  13169. return Lowerer::InsertMove(dst, src, insertBeforeInstr, true);
  13170. }
  13171. IR::Instr *Lowerer::InsertMove(IR::Opnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr, bool generateWriteBarrier)
  13172. {
  13173. Assert(dst);
  13174. Assert(src);
  13175. Assert(insertBeforeInstr);
  13176. Func *const func = insertBeforeInstr->m_func;
  13177. if(dst->IsFloat() && src->IsConstOpnd())
  13178. {
  13179. return LoadFloatFromNonReg(src, dst, insertBeforeInstr);
  13180. }
  13181. if(TySize[dst->GetType()] < TySize[src->GetType()])
  13182. {
  13183. #if _M_IX86
  13184. if (IRType_IsInt64(src->GetType()))
  13185. {
  13186. // On x86, if we are trying to move an int64 to a smaller type
  13187. // Insert a move of the low bits into dst
  13188. return InsertMove(dst, func->FindOrCreateInt64Pair(src).low, insertBeforeInstr, generateWriteBarrier);
  13189. }
  13190. else
  13191. #endif
  13192. {
  13193. src = src->UseWithNewType(dst->GetType(), func);
  13194. }
  13195. }
  13196. IR::Instr * instr = IR::Instr::New(Js::OpCode::Ld_A, dst, src, func);
  13197. insertBeforeInstr->InsertBefore(instr);
  13198. if (generateWriteBarrier)
  13199. {
  13200. instr = LowererMD::ChangeToWriteBarrierAssign(instr, func);
  13201. }
  13202. else
  13203. {
  13204. LowererMD::ChangeToAssignNoBarrierCheck(instr);
  13205. }
  13206. return instr;
  13207. }
  13208. IR::BranchInstr *Lowerer::InsertBranch(
  13209. const Js::OpCode opCode,
  13210. IR::LabelInstr *const target,
  13211. IR::Instr *const insertBeforeInstr)
  13212. {
  13213. return InsertBranch(opCode, false /* isUnsigned */, target, insertBeforeInstr);
  13214. }
  13215. IR::BranchInstr *Lowerer::InsertBranch(
  13216. const Js::OpCode opCode,
  13217. const bool isUnsigned,
  13218. IR::LabelInstr *const target,
  13219. IR::Instr *const insertBeforeInstr)
  13220. {
  13221. Assert(target);
  13222. Assert(insertBeforeInstr);
  13223. Func *const func = insertBeforeInstr->m_func;
  13224. IR::BranchInstr *const instr = IR::BranchInstr::New(opCode, target, func);
  13225. if(!instr->IsLowered())
  13226. {
  13227. if(opCode == Js::OpCode::Br)
  13228. {
  13229. instr->m_opcode = LowererMD::MDUncondBranchOpcode;
  13230. }
  13231. else if(isUnsigned)
  13232. {
  13233. instr->m_opcode = LowererMD::MDUnsignedBranchOpcode(opCode);
  13234. }
  13235. else
  13236. {
  13237. instr->m_opcode = LowererMD::MDBranchOpcode(opCode);
  13238. }
  13239. }
  13240. insertBeforeInstr->InsertBefore(instr);
  13241. return instr;
  13242. }
  13243. IR::Instr *Lowerer::InsertCompare(IR::Opnd *const src1, IR::Opnd *const src2, IR::Instr *const insertBeforeInstr)
  13244. {
  13245. Assert(src1);
  13246. Assert(!src1->IsFloat64()); // not implemented
  13247. Assert(src2);
  13248. Assert(!src2->IsFloat64()); // not implemented
  13249. Assert(!src1->IsEqual(src2));
  13250. Assert(insertBeforeInstr);
  13251. Func *const func = insertBeforeInstr->m_func;
  13252. IR::Instr *const instr = IR::Instr::New(Js::OpCode::CMP, func);
  13253. instr->SetSrc1(src1);
  13254. instr->SetSrc2(src2);
  13255. insertBeforeInstr->InsertBefore(instr);
  13256. LowererMD::Legalize(instr);
  13257. return instr;
  13258. }
  13259. IR::BranchInstr *Lowerer::InsertCompareBranch(
  13260. IR::Opnd *const compareSrc1,
  13261. IR::Opnd *const compareSrc2,
  13262. Js::OpCode branchOpCode,
  13263. IR::LabelInstr *const target,
  13264. IR::Instr *const insertBeforeInstr,
  13265. const bool ignoreNaN)
  13266. {
  13267. return InsertCompareBranch(compareSrc1, compareSrc2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr, ignoreNaN);
  13268. }
  13269. IR::BranchInstr *Lowerer::InsertCompareBranch(
  13270. IR::Opnd *compareSrc1,
  13271. IR::Opnd *compareSrc2,
  13272. Js::OpCode branchOpCode,
  13273. const bool isUnsigned,
  13274. IR::LabelInstr *const target,
  13275. IR::Instr *const insertBeforeInstr,
  13276. const bool ignoreNaN)
  13277. {
  13278. Assert(compareSrc1);
  13279. Assert(compareSrc2);
  13280. Func *const func = insertBeforeInstr->m_func;
  13281. if(compareSrc1->IsFloat())
  13282. {
  13283. Assert(compareSrc2->IsFloat());
  13284. Assert(!isUnsigned);
  13285. IR::BranchInstr *const instr = IR::BranchInstr::New(branchOpCode, target, compareSrc1, compareSrc2, func);
  13286. insertBeforeInstr->InsertBefore(instr);
  13287. return LowererMD::LowerFloatCondBranch(instr, ignoreNaN);
  13288. }
  13289. #ifdef _M_IX86
  13290. else if (compareSrc1->IsInt64())
  13291. {
  13292. Assert(compareSrc2->IsInt64());
  13293. IR::BranchInstr *const instr = IR::BranchInstr::New(branchOpCode, target, compareSrc1, compareSrc2, func);
  13294. insertBeforeInstr->InsertBefore(instr);
  13295. m_lowererMD.EmitInt64Instr(instr);
  13296. return instr;
  13297. }
  13298. #endif
  13299. Js::OpCode swapSrcsBranchOpCode;
  13300. switch(branchOpCode)
  13301. {
  13302. case Js::OpCode::BrEq_A:
  13303. case Js::OpCode::BrNeq_A:
  13304. swapSrcsBranchOpCode = branchOpCode;
  13305. goto Common_BrEqNeqGeGtLeLt;
  13306. case Js::OpCode::BrGe_A:
  13307. swapSrcsBranchOpCode = Js::OpCode::BrLe_A;
  13308. goto Common_BrEqNeqGeGtLeLt;
  13309. case Js::OpCode::BrGt_A:
  13310. swapSrcsBranchOpCode = Js::OpCode::BrLt_A;
  13311. goto Common_BrEqNeqGeGtLeLt;
  13312. case Js::OpCode::BrLe_A:
  13313. swapSrcsBranchOpCode = Js::OpCode::BrGe_A;
  13314. goto Common_BrEqNeqGeGtLeLt;
  13315. case Js::OpCode::BrLt_A:
  13316. swapSrcsBranchOpCode = Js::OpCode::BrGt_A;
  13317. // fall through
  13318. Common_BrEqNeqGeGtLeLt:
  13319. // Check if src1 is a constant and src2 is not, and facilitate folding the constant into the Cmp instruction
  13320. if( (
  13321. compareSrc1->IsIntConstOpnd() ||
  13322. (
  13323. compareSrc1->IsAddrOpnd() &&
  13324. Math::FitsInDWord(reinterpret_cast<size_t>(compareSrc1->AsAddrOpnd()->m_address))
  13325. )
  13326. ) &&
  13327. !compareSrc2->IsIntConstOpnd() &&
  13328. !compareSrc2->IsAddrOpnd())
  13329. {
  13330. // Swap the sources and branch
  13331. IR::Opnd *const tempSrc = compareSrc1;
  13332. compareSrc1 = compareSrc2;
  13333. compareSrc2 = tempSrc;
  13334. branchOpCode = swapSrcsBranchOpCode;
  13335. }
  13336. // Check for compare with zero, to prefer using Test instead of Cmp
  13337. if( !compareSrc1->IsRegOpnd() ||
  13338. !(
  13339. (compareSrc2->IsIntConstOpnd() && compareSrc2->AsIntConstOpnd()->GetValue() == 0) ||
  13340. (compareSrc2->IsAddrOpnd() && !compareSrc2->AsAddrOpnd()->m_address)
  13341. ) ||
  13342. branchOpCode == Js::OpCode::BrGt_A || branchOpCode == Js::OpCode::BrLe_A)
  13343. {
  13344. goto Default;
  13345. }
  13346. if(branchOpCode == Js::OpCode::BrGe_A || branchOpCode == Js::OpCode::BrLt_A)
  13347. {
  13348. if(isUnsigned)
  13349. {
  13350. goto Default;
  13351. }
  13352. branchOpCode = LowererMD::MDCompareWithZeroBranchOpcode(branchOpCode);
  13353. }
  13354. if(!compareSrc2->IsInUse())
  13355. {
  13356. compareSrc2->Free(func);
  13357. }
  13358. InsertTest(compareSrc1, compareSrc1, insertBeforeInstr);
  13359. break;
  13360. default:
  13361. Default:
  13362. InsertCompare(compareSrc1, compareSrc2, insertBeforeInstr);
  13363. break;
  13364. }
  13365. return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
  13366. }
  13367. IR::Instr *Lowerer::InsertTest(IR::Opnd *const src1, IR::Opnd *const src2, IR::Instr *const insertBeforeInstr)
  13368. {
  13369. Assert(src1);
  13370. Assert(!src1->IsFloat64()); // not implemented
  13371. Assert(src2);
  13372. Assert(!src2->IsFloat64()); // not implemented
  13373. #if !TARGET_64
  13374. Assert(!src1->IsInt64()); // not implemented
  13375. Assert(!src2->IsInt64()); // not implemented
  13376. #endif
  13377. Assert(insertBeforeInstr);
  13378. Func *const func = insertBeforeInstr->m_func;
  13379. IR::Instr *const instr = IR::Instr::New(LowererMD::MDTestOpcode, func);
  13380. instr->SetSrc1(src1);
  13381. instr->SetSrc2(src2);
  13382. insertBeforeInstr->InsertBefore(instr);
  13383. LowererMD::Legalize(instr);
  13384. return instr;
  13385. }
  13386. IR::BranchInstr *Lowerer::InsertTestBranch(
  13387. IR::Opnd *const testSrc1,
  13388. IR::Opnd *const testSrc2,
  13389. const Js::OpCode branchOpCode,
  13390. IR::LabelInstr *const target,
  13391. IR::Instr *const insertBeforeInstr)
  13392. {
  13393. return InsertTestBranch(testSrc1, testSrc2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr);
  13394. }
  13395. IR::BranchInstr *Lowerer::InsertTestBranch(
  13396. IR::Opnd *const testSrc1,
  13397. IR::Opnd *const testSrc2,
  13398. const Js::OpCode branchOpCode,
  13399. const bool isUnsigned,
  13400. IR::LabelInstr *const target,
  13401. IR::Instr *const insertBeforeInstr)
  13402. {
  13403. InsertTest(testSrc1, testSrc2, insertBeforeInstr);
  13404. return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
  13405. }
  13406. /* Inserts add with an overflow check, if we overflow throw OOM
  13407. * add dst, src
  13408. * jno $continueLabel
  13409. * overflow code
  13410. * $continueLabel : fall through
  13411. */
  13412. void Lowerer::InsertAddWithOverflowCheck(
  13413. const bool needFlags,
  13414. IR::Opnd *const dst,
  13415. IR::Opnd *src1,
  13416. IR::Opnd *src2,
  13417. IR::Instr *const insertBeforeInstr,
  13418. IR::Instr **const onOverflowInsertBeforeInstrRef)
  13419. {
  13420. Func * func = insertBeforeInstr->m_func;
  13421. InsertAdd(needFlags, dst, src1, src2, insertBeforeInstr);
  13422. IR::LabelInstr *const continueLabel = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  13423. InsertBranch(LowererMD::MDNotOverflowBranchOpcode, continueLabel, insertBeforeInstr);
  13424. *onOverflowInsertBeforeInstrRef = continueLabel;
  13425. }
  13426. IR::Instr *Lowerer::InsertAdd(
  13427. const bool needFlags,
  13428. IR::Opnd *const dst,
  13429. IR::Opnd *src1,
  13430. IR::Opnd *src2,
  13431. IR::Instr *const insertBeforeInstr)
  13432. {
  13433. Assert(dst);
  13434. Assert(src1);
  13435. Assert(src2);
  13436. Assert(insertBeforeInstr);
  13437. Func *const func = insertBeforeInstr->m_func;
  13438. if(src2->IsIntConstOpnd())
  13439. {
  13440. IR::IntConstOpnd *const intConstOpnd = src2->AsIntConstOpnd();
  13441. const IntConstType value = intConstOpnd->GetValue();
  13442. if(value < 0 && value != IntConstMin)
  13443. {
  13444. // Change (s1 = s1 + -5) into (s1 = s1 - 5)
  13445. IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
  13446. newSrc2->SetValue(-value);
  13447. return InsertSub(needFlags, dst, src1, newSrc2, insertBeforeInstr);
  13448. }
  13449. }
  13450. else if(src1->IsIntConstOpnd())
  13451. {
  13452. IR::IntConstOpnd *const intConstOpnd = src1->AsIntConstOpnd();
  13453. const IntConstType value = intConstOpnd->GetValue();
  13454. if(value < 0 && value != IntConstMin)
  13455. {
  13456. // Change (s1 = -5 + s1) into (s1 = s1 - 5)
  13457. IR::Opnd *const newSrc1 = src2;
  13458. IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
  13459. newSrc2->SetValue(-value);
  13460. return InsertSub(needFlags, dst, newSrc1, newSrc2, insertBeforeInstr);
  13461. }
  13462. }
  13463. IR::Instr *const instr = IR::Instr::New(Js::OpCode::Add_A, dst, src1, src2, func);
  13464. insertBeforeInstr->InsertBefore(instr);
  13465. LowererMD::ChangeToAdd(instr, needFlags);
  13466. LowererMD::Legalize(instr);
  13467. return instr;
  13468. }
  13469. IR::Instr *Lowerer::InsertSub(
  13470. const bool needFlags,
  13471. IR::Opnd *const dst,
  13472. IR::Opnd *src1,
  13473. IR::Opnd *src2,
  13474. IR::Instr *const insertBeforeInstr)
  13475. {
  13476. Assert(dst);
  13477. Assert(src1);
  13478. Assert(src2);
  13479. Assert(insertBeforeInstr);
  13480. Func *const func = insertBeforeInstr->m_func;
  13481. if(src2->IsIntConstOpnd())
  13482. {
  13483. IR::IntConstOpnd *const intConstOpnd = src2->AsIntConstOpnd();
  13484. const IntConstType value = intConstOpnd->GetValue();
  13485. if(value < 0 && value != IntConstMin)
  13486. {
  13487. // Change (s1 = s1 - -5) into (s1 = s1 + 5)
  13488. IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
  13489. newSrc2->SetValue(-value);
  13490. return InsertAdd(needFlags, dst, src1, newSrc2, insertBeforeInstr);
  13491. }
  13492. }
  13493. IR::Instr *const instr = IR::Instr::New(Js::OpCode::Sub_A, dst, src1, src2, func);
  13494. insertBeforeInstr->InsertBefore(instr);
  13495. LowererMD::ChangeToSub(instr, needFlags);
  13496. LowererMD::Legalize(instr);
  13497. return instr;
  13498. }
  13499. IR::Instr *Lowerer::InsertLea(IR::RegOpnd *const dst, IR::Opnd *const src, IR::Instr *const insertBeforeInstr)
  13500. {
  13501. Assert(dst);
  13502. Assert(src);
  13503. Assert(src->IsIndirOpnd() || src->IsSymOpnd());
  13504. Assert(insertBeforeInstr);
  13505. Func *const func = insertBeforeInstr->m_func;
  13506. IR::Instr *const instr = IR::Instr::New(LowererMD::MDLea, dst, src, func);
  13507. insertBeforeInstr->InsertBefore(instr);
  13508. return ChangeToLea(instr);
  13509. }
  13510. IR::Instr *
  13511. Lowerer::ChangeToLea(IR::Instr * instr)
  13512. {
  13513. Assert(instr);
  13514. Assert(instr->GetDst());
  13515. Assert(instr->GetDst()->IsRegOpnd());
  13516. Assert(instr->GetSrc1());
  13517. Assert(instr->GetSrc1()->IsIndirOpnd() || instr->GetSrc1()->IsSymOpnd());
  13518. Assert(!instr->GetSrc2());
  13519. instr->m_opcode = LowererMD::MDLea;
  13520. LowererMD::Legalize(instr);
  13521. return instr;
  13522. }
  13523. #if _M_X64
  13524. IR::Instr *Lowerer::InsertMoveBitCast(
  13525. IR::Opnd *const dst,
  13526. IR::Opnd *const src1,
  13527. IR::Instr *const insertBeforeInstr)
  13528. {
  13529. Assert(dst);
  13530. Assert(dst->GetType() == TyFloat64);
  13531. Assert(src1);
  13532. Assert(src1->GetType() == TyUint64);
  13533. Assert(insertBeforeInstr);
  13534. Func *const func = insertBeforeInstr->m_func;
  13535. IR::Instr *const instr = IR::Instr::New(LowererMD::MDMovUint64ToFloat64Opcode, dst, src1, func);
  13536. insertBeforeInstr->InsertBefore(instr);
  13537. LowererMD::Legalize(instr);
  13538. return instr;
  13539. }
  13540. #endif
  13541. IR::Instr *Lowerer::InsertXor(
  13542. IR::Opnd *const dst,
  13543. IR::Opnd *const src1,
  13544. IR::Opnd *const src2,
  13545. IR::Instr *const insertBeforeInstr)
  13546. {
  13547. Assert(dst);
  13548. Assert(src1);
  13549. Assert(src2);
  13550. Assert(insertBeforeInstr);
  13551. Func *const func = insertBeforeInstr->m_func;
  13552. IR::Instr *const instr = IR::Instr::New(LowererMD::MDXorOpcode, dst, src1, src2, func);
  13553. insertBeforeInstr->InsertBefore(instr);
  13554. LowererMD::Legalize(instr);
  13555. return instr;
  13556. }
  13557. IR::Instr *Lowerer::InsertAnd(
  13558. IR::Opnd *const dst,
  13559. IR::Opnd *const src1,
  13560. IR::Opnd *const src2,
  13561. IR::Instr *const insertBeforeInstr)
  13562. {
  13563. Assert(dst);
  13564. Assert(src1);
  13565. Assert(src2);
  13566. Assert(insertBeforeInstr);
  13567. Func *const func = insertBeforeInstr->m_func;
  13568. IR::Instr *const instr = IR::Instr::New(Js::OpCode::AND, dst, src1, src2, func);
  13569. insertBeforeInstr->InsertBefore(instr);
  13570. LowererMD::Legalize(instr);
  13571. return instr;
  13572. }
  13573. IR::Instr *Lowerer::InsertOr(
  13574. IR::Opnd *const dst,
  13575. IR::Opnd *const src1,
  13576. IR::Opnd *const src2,
  13577. IR::Instr *const insertBeforeInstr)
  13578. {
  13579. Assert(dst);
  13580. Assert(src1);
  13581. Assert(src2);
  13582. Assert(insertBeforeInstr);
  13583. Func *const func = insertBeforeInstr->m_func;
  13584. IR::Instr *const instr = IR::Instr::New(LowererMD::MDOrOpcode, dst, src1, src2, func);
  13585. insertBeforeInstr->InsertBefore(instr);
  13586. LowererMD::Legalize(instr);
  13587. return instr;
  13588. }
  13589. IR::Instr *Lowerer::InsertShift(
  13590. const Js::OpCode opCode,
  13591. const bool needFlags,
  13592. IR::Opnd *const dst,
  13593. IR::Opnd *const src1,
  13594. IR::Opnd *const src2,
  13595. IR::Instr *const insertBeforeInstr)
  13596. {
  13597. Assert(dst);
  13598. Assert(!dst->IsFloat64()); // not implemented
  13599. Assert(src1);
  13600. Assert(!src1->IsFloat64()); // not implemented
  13601. Assert(src2);
  13602. Assert(!src2->IsFloat64()); // not implemented
  13603. Assert(insertBeforeInstr);
  13604. Func *const func = insertBeforeInstr->m_func;
  13605. IR::Instr *const instr = IR::Instr::New(opCode, dst, src1, src2, func);
  13606. insertBeforeInstr->InsertBefore(instr);
  13607. LowererMD::ChangeToShift(instr, needFlags);
  13608. LowererMD::Legalize(instr);
  13609. return instr;
  13610. }
  13611. IR::Instr *Lowerer::InsertShiftBranch(
  13612. const Js::OpCode shiftOpCode,
  13613. IR::Opnd *const dst,
  13614. IR::Opnd *const src1,
  13615. IR::Opnd *const src2,
  13616. const Js::OpCode branchOpCode,
  13617. IR::LabelInstr *const target,
  13618. IR::Instr *const insertBeforeInstr)
  13619. {
  13620. return InsertShiftBranch(shiftOpCode, dst, src1, src2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr);
  13621. }
  13622. IR::Instr *Lowerer::InsertShiftBranch(
  13623. const Js::OpCode shiftOpCode,
  13624. IR::Opnd *const dst,
  13625. IR::Opnd *const src1,
  13626. IR::Opnd *const src2,
  13627. const Js::OpCode branchOpCode,
  13628. const bool isUnsigned,
  13629. IR::LabelInstr *const target,
  13630. IR::Instr *const insertBeforeInstr)
  13631. {
  13632. InsertShift(shiftOpCode, true /* needFlags */, dst, src1, src2, insertBeforeInstr);
  13633. return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
  13634. }
  13635. IR::Instr *Lowerer::InsertConvertFloat32ToFloat64(
  13636. IR::Opnd *const dst,
  13637. IR::Opnd *const src,
  13638. IR::Instr *const insertBeforeInstr)
  13639. {
  13640. Assert(dst);
  13641. Assert(dst->IsFloat64());
  13642. Assert(src);
  13643. Assert(src->IsFloat32());
  13644. Assert(insertBeforeInstr);
  13645. Func *const func = insertBeforeInstr->m_func;
  13646. IR::Instr *const instr = IR::Instr::New(LowererMD::MDConvertFloat32ToFloat64Opcode, dst, src, func);
  13647. insertBeforeInstr->InsertBefore(instr);
  13648. LowererMD::Legalize(instr);
  13649. return instr;
  13650. }
  13651. IR::Instr *Lowerer::InsertConvertFloat64ToFloat32(
  13652. IR::Opnd *const dst,
  13653. IR::Opnd *const src,
  13654. IR::Instr *const insertBeforeInstr)
  13655. {
  13656. Assert(dst);
  13657. Assert(dst->IsFloat32());
  13658. Assert(src);
  13659. Assert(src->IsFloat64());
  13660. Assert(insertBeforeInstr);
  13661. Func *const func = insertBeforeInstr->m_func;
  13662. IR::Instr *const instr = IR::Instr::New(LowererMD::MDConvertFloat64ToFloat32Opcode, dst, src, func);
  13663. insertBeforeInstr->InsertBefore(instr);
  13664. LowererMD::Legalize(instr);
  13665. return instr;
  13666. }
  13667. void Lowerer::InsertDecUInt32PreventOverflow(
  13668. IR::Opnd *const dst,
  13669. IR::Opnd *const src,
  13670. IR::Instr *const insertBeforeInstr,
  13671. IR::Instr * *const onOverflowInsertBeforeInstrRef)
  13672. {
  13673. Assert(dst);
  13674. Assert(dst->GetType() == TyUint32);
  13675. Assert(src);
  13676. Assert(src->GetType() == TyUint32);
  13677. Assert(insertBeforeInstr);
  13678. Func *const func = insertBeforeInstr->m_func;
  13679. // Generate:
  13680. // subs temp, src, 1
  13681. // bcs $overflow
  13682. // mov dst, temp
  13683. // b $continue
  13684. // $overflow:
  13685. // mov dst, 0
  13686. // $continue:
  13687. IR::LabelInstr *const overflowLabel = Lowerer::InsertLabel(false, insertBeforeInstr);
  13688. // subs temp, src, 1
  13689. IR::RegOpnd *const tempOpnd = IR::RegOpnd::New(StackSym::New(TyUint32, func), TyUint32, func);
  13690. const IR::AutoReuseOpnd autoReuseTempOpnd(tempOpnd, func);
  13691. Lowerer::InsertSub(true, tempOpnd, src, IR::IntConstOpnd::New(1, TyUint32, func, true), overflowLabel);
  13692. // bcs $overflow
  13693. Lowerer::InsertBranch(Js::OpCode::BrLt_A, true, overflowLabel, overflowLabel);
  13694. // mov dst, temp
  13695. Lowerer::InsertMove(dst, tempOpnd, overflowLabel);
  13696. const bool dstEqualsSrc = dst->IsEqual(src);
  13697. if(!dstEqualsSrc || onOverflowInsertBeforeInstrRef)
  13698. {
  13699. // b $continue
  13700. // $overflow:
  13701. // mov dst, 0
  13702. // $continue:
  13703. IR::LabelInstr *const continueLabel = Lowerer::InsertLabel(false, insertBeforeInstr);
  13704. Lowerer::InsertBranch(Js::OpCode::Br, continueLabel, overflowLabel);
  13705. if(!dstEqualsSrc)
  13706. {
  13707. Lowerer::InsertMove(dst, IR::IntConstOpnd::New(0, TyUint32, func, true), continueLabel);
  13708. }
  13709. if(onOverflowInsertBeforeInstrRef)
  13710. {
  13711. *onOverflowInsertBeforeInstrRef = continueLabel;
  13712. }
  13713. }
  13714. else
  13715. {
  13716. // $overflow:
  13717. }
  13718. }
  13719. void Lowerer::InsertFloatCheckForZeroOrNanBranch(
  13720. IR::Opnd *const src,
  13721. const bool branchOnZeroOrNan,
  13722. IR::LabelInstr *const target,
  13723. IR::LabelInstr *const fallthroughLabel,
  13724. IR::Instr *const insertBeforeInstr)
  13725. {
  13726. Assert(src);
  13727. Assert(src->IsFloat64());
  13728. Assert(target);
  13729. Assert(!fallthroughLabel || fallthroughLabel != target);
  13730. Assert(insertBeforeInstr);
  13731. Func *const func = insertBeforeInstr->m_func;
  13732. IR::BranchInstr *const branchOnEqualOrNotEqual =
  13733. InsertCompareBranch(
  13734. src,
  13735. IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetDoubleZeroAddr(), TyFloat64, func),
  13736. branchOnZeroOrNan ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A,
  13737. target,
  13738. insertBeforeInstr,
  13739. true /* ignoreNaN */);
  13740. // x86/x64
  13741. // When NaN is ignored, on x86 and x64, JE branches when equal or unordered since an unordered result sets the zero
  13742. // flag, and JNE branches when not equal and not unordered. By comparing with zero, JE will branch when src is zero or
  13743. // NaN, and JNE will branch when src is not zero and not NaN.
  13744. //
  13745. // ARM
  13746. // When NaN is ignored, BEQ branches when equal and not unordered, and BNE branches when not equal or unordered. So,
  13747. // when comparing src with zero, an unordered check needs to be added before the BEQ/BNE.
  13748. branchOnEqualOrNotEqual; // satisfy the compiler
  13749. #ifdef _M_ARM32_OR_ARM64
  13750. InsertBranch(
  13751. Js::OpCode::BVS,
  13752. branchOnZeroOrNan
  13753. ? target
  13754. : fallthroughLabel ? fallthroughLabel : insertBeforeInstr->m_prev->GetOrCreateContinueLabel(),
  13755. branchOnEqualOrNotEqual);
  13756. #endif
  13757. }
  13758. IR::IndirOpnd*
  13759. Lowerer::GenerateFastElemICommon(
  13760. _In_ IR::Instr* elemInstr,
  13761. _In_ bool isStore,
  13762. _In_ IR::IndirOpnd* indirOpnd,
  13763. _In_ IR::LabelInstr* labelHelper,
  13764. _In_ IR::LabelInstr* labelCantUseArray,
  13765. _In_opt_ IR::LabelInstr* labelFallthrough,
  13766. _Out_ bool* pIsTypedArrayElement,
  13767. _Out_ bool* pIsStringIndex,
  13768. _Out_opt_ bool* emitBailoutRef,
  13769. _Outptr_opt_result_maybenull_ IR::Opnd** maskOpnd,
  13770. _Outptr_opt_result_maybenull_ IR::LabelInstr** pLabelSegmentLengthIncreased, // = nullptr
  13771. _In_ bool checkArrayLengthOverflow, // = true
  13772. _In_ bool forceGenerateFastPath, // = false
  13773. _In_ bool returnLength, // = false
  13774. _In_opt_ IR::LabelInstr* bailOutLabelInstr, // = nullptr
  13775. _Out_opt_ bool* indirOpndOverflowed, // = nullptr
  13776. _In_ Js::FldInfoFlags flags) // = Js::FldInfo_NoInfo
  13777. {
  13778. *pIsTypedArrayElement = false;
  13779. *pIsStringIndex = false;
  13780. if(pLabelSegmentLengthIncreased)
  13781. {
  13782. *pLabelSegmentLengthIncreased = nullptr;
  13783. }
  13784. if (maskOpnd)
  13785. {
  13786. *maskOpnd = nullptr;
  13787. }
  13788. if (indirOpndOverflowed)
  13789. {
  13790. *indirOpndOverflowed = false;
  13791. }
  13792. if (emitBailoutRef)
  13793. {
  13794. *emitBailoutRef = false;
  13795. }
  13796. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  13797. AssertMsg(baseOpnd, "This shouldn't be NULL");
  13798. // Caution: If making changes to the conditions under which we don't emit the typical array checks, make sure
  13799. // the code in GlobOpt::ShouldAssumeIndirOpndHasNonNegativeIntIndex is updated accordingly. We don't want the
  13800. // global optimizer to type specialize instructions, for which the lowerer is forced to emit unconditional
  13801. // bailouts.
  13802. if (baseOpnd->IsTaggedInt())
  13803. {
  13804. return NULL;
  13805. }
  13806. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  13807. if (indexOpnd)
  13808. {
  13809. const bool normalLocation = (flags & (Js::FldInfo_FromLocal | Js::FldInfo_FromProto | Js::FldInfo_FromLocalWithoutProperty)) != 0;
  13810. const bool normalSlots = (flags & (Js::FldInfo_FromAuxSlots | Js::FldInfo_FromInlineSlots)) != 0;
  13811. const bool generateFastpath = !baseOpnd->GetValueType().IsLikelyOptimizedTypedArray() && normalLocation && normalSlots && flags != Js::FldInfo_NoInfo;
  13812. if (indexOpnd->GetValueType().IsLikelyString())
  13813. {
  13814. if (generateFastpath)
  13815. {
  13816. // If profile data says that it's a typed array - do not generate the property string fast path as the src. could be a temp and that would cause a bug.
  13817. *pIsTypedArrayElement = false;
  13818. *pIsStringIndex = true;
  13819. return GenerateFastElemIStringIndexCommon(elemInstr, isStore, indirOpnd, labelHelper, flags);
  13820. }
  13821. else
  13822. {
  13823. // There's no point in generating the int index fast path if we know the index has a string value.
  13824. return nullptr;
  13825. }
  13826. }
  13827. else if (indexOpnd->GetValueType().IsLikelySymbol())
  13828. {
  13829. if (generateFastpath)
  13830. {
  13831. // If profile data says that it's a typed array - do not generate the symbol fast path as the src. could be a temp and that would cause a bug.
  13832. return GenerateFastElemISymbolIndexCommon(elemInstr, isStore, indirOpnd, labelHelper, flags);
  13833. }
  13834. else
  13835. {
  13836. // There's no point in generating the int index fast path if we know the index has a symbol value.
  13837. return nullptr;
  13838. }
  13839. }
  13840. }
  13841. return
  13842. GenerateFastElemIIntIndexCommon(
  13843. elemInstr,
  13844. isStore,
  13845. indirOpnd,
  13846. labelHelper,
  13847. labelCantUseArray,
  13848. labelFallthrough,
  13849. pIsTypedArrayElement,
  13850. emitBailoutRef,
  13851. pLabelSegmentLengthIncreased,
  13852. checkArrayLengthOverflow,
  13853. maskOpnd,
  13854. false,
  13855. returnLength,
  13856. bailOutLabelInstr,
  13857. indirOpndOverflowed);
  13858. }
  13859. void
  13860. Lowerer::GenerateDynamicLoadPolymorphicInlineCacheSlot(IR::Instr * instrInsert, IR::RegOpnd * inlineCacheOpnd, IR::Opnd * objectTypeOpnd)
  13861. {
  13862. // Generates:
  13863. // MOV opndOffset, objectTypeOpnd
  13864. // SHR opndOffset, PolymorphicInlineCacheShift
  13865. // MOVZX cacheIndexOpnd, inlineCacheOpnd->size
  13866. // DEC cacheIndexOpnd
  13867. // AND opndOffset, cacheIndexOpnd
  13868. // SHL opndOffset, Math::Log2(sizeof(Js::InlineCache))
  13869. // MOV inlineCacheOpnd, inlineCacheOpnd->inlineCaches
  13870. // LEA inlineCacheOpnd, [inlineCacheOpnd + opndOffset]
  13871. IntConstType rightShiftAmount = PolymorphicInlineCacheShift;
  13872. IntConstType leftShiftAmount = Math::Log2(sizeof(Js::InlineCache));
  13873. Assert(rightShiftAmount > leftShiftAmount);
  13874. IR::RegOpnd * opndOffset = IR::RegOpnd::New(TyMachPtr, m_func);
  13875. InsertShift(Js::OpCode::ShrU_A, false, opndOffset, objectTypeOpnd, IR::IntConstOpnd::New(rightShiftAmount, TyUint8, m_func, true), instrInsert);
  13876. IR::RegOpnd * cacheIndexOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  13877. InsertMove(cacheIndexOpnd, IR::IndirOpnd::New(inlineCacheOpnd, Js::PolymorphicInlineCache::GetOffsetOfSize(), TyUint16, m_func), instrInsert);
  13878. InsertSub(false, cacheIndexOpnd, cacheIndexOpnd, IR::IntConstOpnd::New(1, TyMachPtr, m_func), instrInsert);
  13879. InsertAnd(opndOffset, opndOffset, cacheIndexOpnd, instrInsert);
  13880. InsertShift(Js::OpCode::Shl_A, false, opndOffset, opndOffset, IR::IntConstOpnd::New(leftShiftAmount, TyUint8, m_func), instrInsert);
  13881. InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(inlineCacheOpnd, Js::PolymorphicInlineCache::GetOffsetOfInlineCaches(), TyMachPtr, m_func), instrInsert);
  13882. InsertLea(inlineCacheOpnd, IR::IndirOpnd::New(inlineCacheOpnd, opndOffset, TyMachPtr, m_func), instrInsert);
  13883. }
  13884. // Test that the operand is a PropertyString, or bail to helper
  13885. void
  13886. Lowerer::GeneratePropertyStringTest(IR::RegOpnd *srcReg, IR::Instr *instrInsert, IR::LabelInstr *labelHelper, bool isStore)
  13887. {
  13888. // Generates:
  13889. // StringTest(srcReg, $helper) ; verify index is string type
  13890. // CMP srcReg, PropertyString::`vtable' ; verify index is property string
  13891. // JNE $helper
  13892. GenerateStringTest(srcReg, instrInsert, labelHelper);
  13893. IR::LabelInstr * notPropStrLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  13894. IR::LabelInstr * propStrLoadedLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  13895. IR::BranchInstr *branchInstr = InsertCompareBranch(
  13896. IR::IndirOpnd::New(srcReg, 0, TyMachPtr, m_func),
  13897. LoadVTableValueOpnd(instrInsert, VTableValue::VtablePropertyString),
  13898. Js::OpCode::BrNeq_A, notPropStrLabel, instrInsert);
  13899. InsertObjectPoison(srcReg, branchInstr, instrInsert, isStore);
  13900. InsertBranch(Js::OpCode::Br, propStrLoadedLabel, instrInsert);
  13901. InsertBranch(Js::OpCode::Br, propStrLoadedLabel, instrInsert);
  13902. instrInsert->InsertBefore(notPropStrLabel);
  13903. branchInstr = InsertCompareBranch(
  13904. IR::IndirOpnd::New(srcReg, 0, TyMachPtr, m_func),
  13905. LoadVTableValueOpnd(instrInsert, VTableValue::VtableLiteralStringWithPropertyStringPtr),
  13906. Js::OpCode::BrNeq_A, labelHelper, instrInsert);
  13907. InsertObjectPoison(srcReg, branchInstr, instrInsert, isStore);
  13908. IR::IndirOpnd * propStrOpnd = IR::IndirOpnd::New(srcReg, Js::LiteralStringWithPropertyStringPtr::GetOffsetOfPropertyString(), TyMachPtr, m_func);
  13909. InsertCompareBranch(propStrOpnd, IR::IntConstOpnd::New(NULL, TyMachPtr, m_func), Js::OpCode::BrNeq_A, labelHelper, instrInsert);
  13910. // We don't really own srcReg, but it is fine to update it to be the PropertyString, since that is better to have anyway
  13911. InsertMove(srcReg, propStrOpnd, instrInsert);
  13912. instrInsert->InsertBefore(propStrLoadedLabel);
  13913. }
  13914. IR::IndirOpnd*
  13915. Lowerer::GenerateFastElemIStringIndexCommon(
  13916. _In_ IR::Instr* elemInstr,
  13917. _In_ bool isStore,
  13918. _In_ IR::IndirOpnd* indirOpnd,
  13919. _In_ IR::LabelInstr* labelHelper,
  13920. _In_ Js::FldInfoFlags flags)
  13921. {
  13922. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  13923. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  13924. Assert(baseOpnd != nullptr);
  13925. Assert(indexOpnd->GetValueType().IsLikelyString());
  13926. // Generates:
  13927. // PropertyStringTest(indexOpnd, $helper) ; verify index is string type
  13928. // FastElemISymbolOrStringIndexCommon(indexOpnd, baseOpnd, $helper) ; shared code with JavascriptSymbol
  13929. GeneratePropertyStringTest(indexOpnd, elemInstr, labelHelper, isStore);
  13930. const uint32 inlineCacheOffset = isStore ? Js::PropertyString::GetOffsetOfStElemInlineCache() : Js::PropertyString::GetOffsetOfLdElemInlineCache();
  13931. const uint32 hitRateOffset = Js::PropertyString::GetOffsetOfHitRate();
  13932. return GenerateFastElemISymbolOrStringIndexCommon(elemInstr, indexOpnd, baseOpnd, inlineCacheOffset, hitRateOffset, labelHelper, flags);
  13933. }
  13934. IR::IndirOpnd*
  13935. Lowerer::GenerateFastElemISymbolIndexCommon(
  13936. _In_ IR::Instr* elemInstr,
  13937. _In_ bool isStore,
  13938. _In_ IR::IndirOpnd* indirOpnd,
  13939. _In_ IR::LabelInstr* labelHelper,
  13940. _In_ Js::FldInfoFlags flags)
  13941. {
  13942. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  13943. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  13944. Assert(baseOpnd != nullptr);
  13945. Assert(indexOpnd->GetValueType().IsLikelySymbol());
  13946. // Generates:
  13947. // SymbolTest(indexOpnd, $helper) ; verify index is symbol type
  13948. // FastElemISymbolOrStringIndexCommon(indexOpnd, baseOpnd, $helper) ; shared code with PropertyString
  13949. GenerateSymbolTest(indexOpnd, elemInstr, labelHelper);
  13950. const uint32 inlineCacheOffset = isStore ? Js::JavascriptSymbol::GetOffsetOfStElemInlineCache() : Js::JavascriptSymbol::GetOffsetOfLdElemInlineCache();
  13951. const uint32 hitRateOffset = Js::JavascriptSymbol::GetOffsetOfHitRate();
  13952. return GenerateFastElemISymbolOrStringIndexCommon(elemInstr, indexOpnd, baseOpnd, inlineCacheOffset, hitRateOffset, labelHelper, flags);
  13953. }
  13954. void
  13955. Lowerer::GenerateFastIsInSymbolOrStringIndex(IR::Instr * instrInsert, IR::RegOpnd *indexOpnd, IR::RegOpnd *baseOpnd, IR::Opnd *dest, uint32 inlineCacheOffset, const uint32 hitRateOffset, IR::LabelInstr * labelHelper, IR::LabelInstr * labelDone)
  13956. {
  13957. // Try to look up the property in the cache, or bail to helper
  13958. GenerateLookUpInIndexCache(instrInsert, indexOpnd, baseOpnd, nullptr /*opndSlotArray*/, nullptr /*opndSlotIndex*/, inlineCacheOffset, hitRateOffset, labelHelper);
  13959. // MOV dest, true
  13960. InsertMove(dest, LoadLibraryValueOpnd(instrInsert, LibraryValue::ValueTrue), instrInsert);
  13961. // JMP labelDone
  13962. InsertBranch(Js::OpCode::Br, labelDone, instrInsert);
  13963. }
  13964. IR::IndirOpnd*
  13965. Lowerer::GenerateFastElemISymbolOrStringIndexCommon(
  13966. _In_ IR::Instr* instrInsert,
  13967. _In_ IR::RegOpnd* indexOpnd,
  13968. _In_ IR::RegOpnd* baseOpnd,
  13969. _In_ const uint32 inlineCacheOffset,
  13970. _In_ const uint32 hitRateOffset,
  13971. _In_ IR::LabelInstr* labelHelper,
  13972. _In_ Js::FldInfoFlags flags)
  13973. {
  13974. // Try to look up the property in the cache, or bail to helper
  13975. IR::RegOpnd * opndSlotArray = IR::RegOpnd::New(TyMachReg, instrInsert->m_func);
  13976. IR::RegOpnd * opndSlotIndex = IR::RegOpnd::New(TyMachReg, instrInsert->m_func);
  13977. GenerateLookUpInIndexCache(instrInsert, indexOpnd, baseOpnd, opndSlotArray, opndSlotIndex, inlineCacheOffset, hitRateOffset, labelHelper, flags);
  13978. // return [opndSlotArray + opndSlotIndex * PtrSize]
  13979. return IR::IndirOpnd::New(opndSlotArray, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, instrInsert->m_func);
  13980. }
  13981. // Look up a value from the polymorphic inline cache on a PropertyString or Symbol. Offsets are relative to indexOpnd.
  13982. // Checks local and/or proto caches based on profile data. If the property is not found, jump to the helper.
  13983. // opndSlotArray is optional; if provided, it will receive the base address of the slot array that contains the property.
  13984. // opndSlotIndex is optional; if provided, it will receive the index of the match within the slot array.
  13985. void
  13986. Lowerer::GenerateLookUpInIndexCache(
  13987. _In_ IR::Instr* instrInsert,
  13988. _In_ IR::RegOpnd* indexOpnd,
  13989. _In_ IR::RegOpnd* baseOpnd,
  13990. _In_opt_ IR::RegOpnd* opndSlotArray,
  13991. _In_opt_ IR::RegOpnd* opndSlotIndex,
  13992. _In_ const uint32 inlineCacheOffset,
  13993. _In_ const uint32 hitRateOffset,
  13994. _In_ IR::LabelInstr* labelHelper,
  13995. _In_ Js::FldInfoFlags flags) // = Js::FldInfo_NoInfo
  13996. {
  13997. // Generates:
  13998. // MOV inlineCacheOpnd, index->inlineCache
  13999. // GenerateObjectTest(baseOpnd, $helper) ; verify base is an object
  14000. // MOV objectTypeOpnd, baseOpnd->type
  14001. // GenerateDynamicLoadPolymorphicInlineCacheSlot(inlineCacheOpnd, objectTypeOpnd) ; loads inline cache for given type
  14002. // if (checkLocalInlineSlots)
  14003. // GenerateLookUpInIndexCacheHelper<CheckLocal, CheckInlineSlot> // checks local inline slots, goes to next on failure
  14004. // if (checkLocalAuxSlots)
  14005. // GenerateLookUpInIndexCacheHelper<CheckLocal, CheckAuxSlot> // checks local aux slots, goes to next on failure
  14006. // if (fromProto && fromInlineSlots)
  14007. // GenerateLookUpInIndexCacheHelper<CheckProto, CheckInlineSlot> // checks proto inline slots, goes to next on failure
  14008. // if (fromProto && fromAuxSlots)
  14009. // GenerateLookUpInIndexCacheHelper<CheckProto, CheckAuxSlot> // checks proto aux slots, goes to next on failure
  14010. // if (doAdd && fromInlineSlots)
  14011. // GenerateLookUpInIndexCacheHelper<CheckLocal, CheckInlineSlot, DoAdd> // checks typeWithoutProperty inline slots, goes to next on failure
  14012. // if (doAdd && fromAuxSlots)
  14013. // GenerateLookUpInIndexCacheHelper<CheckLocal, CheckAuxSlot, DoAdd> // checks typeWithoutProperty aux slots, goes to helper on failure
  14014. // $slotIndexLoadedLabel
  14015. // INC indexOpnd->hitRate
  14016. const bool fromInlineSlots = (flags & Js::FldInfo_FromInlineSlots) == Js::FldInfo_FromInlineSlots;
  14017. const bool fromAuxSlots = (flags & Js::FldInfo_FromAuxSlots) == Js::FldInfo_FromAuxSlots;
  14018. const bool fromLocal = (flags & Js::FldInfo_FromLocal) == Js::FldInfo_FromLocal;
  14019. const bool fromProto = (flags & Js::FldInfo_FromProto) == Js::FldInfo_FromProto;
  14020. const bool doAdd = (flags & Js::FldInfo_FromLocalWithoutProperty) == Js::FldInfo_FromLocalWithoutProperty;
  14021. const bool checkLocalInlineSlots = flags == Js::FldInfo_NoInfo || (fromInlineSlots && fromLocal);
  14022. const bool checkLocalAuxSlots = flags == Js::FldInfo_NoInfo || (fromAuxSlots && fromLocal);
  14023. m_lowererMD.GenerateObjectTest(baseOpnd, instrInsert, labelHelper);
  14024. IR::RegOpnd * objectTypeOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  14025. InsertMove(objectTypeOpnd, IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, m_func), instrInsert);
  14026. IR::RegOpnd * inlineCacheOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  14027. InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(indexOpnd, inlineCacheOffset, TyMachPtr, m_func), instrInsert);
  14028. GenerateDynamicLoadPolymorphicInlineCacheSlot(instrInsert, inlineCacheOpnd, objectTypeOpnd);
  14029. IR::LabelInstr* slotIndexLoadedLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  14030. IR::BranchInstr* branchToPatch = nullptr;
  14031. IR::LabelInstr* nextLabel = nullptr;
  14032. IR::RegOpnd* taggedTypeOpnd = nullptr;
  14033. if (checkLocalInlineSlots)
  14034. {
  14035. GenerateLookUpInIndexCacheHelper<true /* CheckLocal */, true /* CheckInlineSlot */, false /* DoAdd */>(
  14036. instrInsert,
  14037. baseOpnd,
  14038. opndSlotArray,
  14039. opndSlotIndex,
  14040. objectTypeOpnd,
  14041. inlineCacheOpnd,
  14042. slotIndexLoadedLabel,
  14043. labelHelper,
  14044. &nextLabel,
  14045. &branchToPatch,
  14046. &taggedTypeOpnd);
  14047. }
  14048. if (checkLocalAuxSlots)
  14049. {
  14050. GenerateLookUpInIndexCacheHelper<true /* CheckLocal */, false /* CheckInlineSlot */, false /* DoAdd */>(
  14051. instrInsert,
  14052. baseOpnd,
  14053. opndSlotArray,
  14054. opndSlotIndex,
  14055. objectTypeOpnd,
  14056. inlineCacheOpnd,
  14057. slotIndexLoadedLabel,
  14058. labelHelper,
  14059. &nextLabel,
  14060. &branchToPatch,
  14061. &taggedTypeOpnd);
  14062. }
  14063. if (fromProto)
  14064. {
  14065. if (fromInlineSlots)
  14066. {
  14067. GenerateLookUpInIndexCacheHelper<false /* CheckLocal */, true /* CheckInlineSlot */, false /* DoAdd */>(
  14068. instrInsert,
  14069. baseOpnd,
  14070. opndSlotArray,
  14071. opndSlotIndex,
  14072. objectTypeOpnd,
  14073. inlineCacheOpnd,
  14074. slotIndexLoadedLabel,
  14075. labelHelper,
  14076. &nextLabel,
  14077. &branchToPatch,
  14078. &taggedTypeOpnd);
  14079. }
  14080. if (fromAuxSlots)
  14081. {
  14082. GenerateLookUpInIndexCacheHelper<false /* CheckLocal */, false /* CheckInlineSlot */, false /* DoAdd */>(
  14083. instrInsert,
  14084. baseOpnd,
  14085. opndSlotArray,
  14086. opndSlotIndex,
  14087. objectTypeOpnd,
  14088. inlineCacheOpnd,
  14089. slotIndexLoadedLabel,
  14090. labelHelper,
  14091. &nextLabel,
  14092. &branchToPatch,
  14093. &taggedTypeOpnd);
  14094. }
  14095. }
  14096. if (doAdd)
  14097. {
  14098. Assert(opndSlotArray);
  14099. if (fromInlineSlots)
  14100. {
  14101. GenerateLookUpInIndexCacheHelper<true /* CheckLocal */, true /* CheckInlineSlot */, true /* DoAdd */>(
  14102. instrInsert,
  14103. baseOpnd,
  14104. opndSlotArray,
  14105. opndSlotIndex,
  14106. objectTypeOpnd,
  14107. inlineCacheOpnd,
  14108. slotIndexLoadedLabel,
  14109. labelHelper,
  14110. &nextLabel,
  14111. &branchToPatch,
  14112. &taggedTypeOpnd);
  14113. }
  14114. if (fromAuxSlots)
  14115. {
  14116. GenerateLookUpInIndexCacheHelper<true /* CheckLocal */, false /* CheckInlineSlot */, true /* DoAdd */>(
  14117. instrInsert,
  14118. baseOpnd,
  14119. opndSlotArray,
  14120. opndSlotIndex,
  14121. objectTypeOpnd,
  14122. inlineCacheOpnd,
  14123. slotIndexLoadedLabel,
  14124. labelHelper,
  14125. &nextLabel,
  14126. &branchToPatch,
  14127. &taggedTypeOpnd);
  14128. }
  14129. }
  14130. Assert(branchToPatch);
  14131. Assert(nextLabel);
  14132. Assert(nextLabel->labelRefs.Count() == 1 && nextLabel->labelRefs.Head() == branchToPatch);
  14133. branchToPatch->SetTarget(labelHelper);
  14134. nextLabel->Remove();
  14135. instrInsert->InsertBefore(slotIndexLoadedLabel);
  14136. IR::IndirOpnd * hitRateOpnd = IR::IndirOpnd::New(indexOpnd, hitRateOffset, TyInt32, m_func);
  14137. IR::IntConstOpnd * incOpnd = IR::IntConstOpnd::New(1, TyInt32, m_func);
  14138. // overflow check: not needed here, we don't allocate anything with hitrate
  14139. InsertAdd(false, hitRateOpnd, hitRateOpnd, incOpnd, instrInsert);
  14140. }
  14141. template <bool CheckLocal, bool CheckInlineSlot, bool DoAdd>
  14142. void
  14143. Lowerer::GenerateLookUpInIndexCacheHelper(
  14144. _In_ IR::Instr* insertInstr,
  14145. _In_ IR::RegOpnd* baseOpnd,
  14146. _In_opt_ IR::RegOpnd* opndSlotArray,
  14147. _In_opt_ IR::RegOpnd* opndSlotIndex,
  14148. _In_ IR::RegOpnd* objectTypeOpnd,
  14149. _In_ IR::RegOpnd* inlineCacheOpnd,
  14150. _In_ IR::LabelInstr* doneLabel,
  14151. _In_ IR::LabelInstr* helperLabel,
  14152. _Outptr_ IR::LabelInstr** nextLabel,
  14153. _Outptr_ IR::BranchInstr** branchToPatch,
  14154. _Inout_ IR::RegOpnd** taggedTypeOpnd)
  14155. {
  14156. CompileAssert(!DoAdd || CheckLocal);
  14157. AnalysisAssert(!opndSlotArray || opndSlotIndex);
  14158. *nextLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  14159. IR::RegOpnd* typeOpnd = nullptr;
  14160. if (CheckInlineSlot)
  14161. {
  14162. typeOpnd = objectTypeOpnd;
  14163. }
  14164. else
  14165. {
  14166. if (*taggedTypeOpnd == nullptr)
  14167. {
  14168. *taggedTypeOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  14169. m_lowererMD.GenerateLoadTaggedType(insertInstr, objectTypeOpnd, *taggedTypeOpnd);
  14170. }
  14171. typeOpnd = *taggedTypeOpnd;
  14172. }
  14173. IR::RegOpnd* objectOpnd = nullptr;
  14174. if (CheckLocal)
  14175. {
  14176. *branchToPatch = GenerateLocalInlineCacheCheck(insertInstr, typeOpnd, inlineCacheOpnd, *nextLabel, DoAdd);
  14177. if (DoAdd)
  14178. {
  14179. if (!CheckInlineSlot)
  14180. {
  14181. GenerateAuxSlotAdjustmentRequiredCheck(insertInstr, inlineCacheOpnd, helperLabel);
  14182. }
  14183. GenerateSetObjectTypeFromInlineCache(insertInstr, baseOpnd, inlineCacheOpnd, !CheckInlineSlot);
  14184. }
  14185. objectOpnd = baseOpnd;
  14186. }
  14187. else
  14188. {
  14189. *branchToPatch = GenerateProtoInlineCacheCheck(insertInstr, typeOpnd, inlineCacheOpnd, *nextLabel);
  14190. IR::RegOpnd* protoOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  14191. int32 protoObjOffset = (int32)offsetof(Js::InlineCache, u.proto.prototypeObject);
  14192. IR::IndirOpnd* protoIndir = IR::IndirOpnd::New(inlineCacheOpnd, protoObjOffset, TyMachReg, m_func);
  14193. InsertMove(protoOpnd, protoIndir, insertInstr);
  14194. objectOpnd = protoOpnd;
  14195. }
  14196. if (opndSlotArray)
  14197. {
  14198. if (CheckInlineSlot)
  14199. {
  14200. InsertMove(opndSlotArray, objectOpnd, insertInstr);
  14201. }
  14202. else
  14203. {
  14204. IR::IndirOpnd* auxIndir = IR::IndirOpnd::New(objectOpnd, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, m_func);
  14205. InsertMove(opndSlotArray, auxIndir, insertInstr);
  14206. }
  14207. size_t slotIndexOffset = CheckLocal ? offsetof(Js::InlineCache, u.local.slotIndex) : offsetof(Js::InlineCache, u.proto.slotIndex);
  14208. IR::IndirOpnd* slotOffsetIndir = IR::IndirOpnd::New(inlineCacheOpnd, (int32)slotIndexOffset, TyUint16, m_func);
  14209. // overflow check: not needed here, we don't allocate anything with hitrate
  14210. InsertMove(opndSlotIndex, slotOffsetIndir, insertInstr);
  14211. }
  14212. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  14213. insertInstr->InsertBefore(*nextLabel);
  14214. }
  14215. IR::IndirOpnd *
  14216. Lowerer::GenerateFastElemIIntIndexCommon(
  14217. IR::Instr * instr,
  14218. bool isStore,
  14219. IR::IndirOpnd * indirOpnd,
  14220. IR::LabelInstr * labelHelper,
  14221. IR::LabelInstr * labelCantUseArray,
  14222. IR::LabelInstr *labelFallthrough,
  14223. bool * pIsTypedArrayElement,
  14224. bool *emitBailoutRef,
  14225. IR::LabelInstr **pLabelSegmentLengthIncreased,
  14226. bool checkArrayLengthOverflow /*= true*/,
  14227. IR::Opnd** maskOpnd,
  14228. bool forceGenerateFastPath /* = false */,
  14229. bool returnLength,
  14230. IR::LabelInstr *bailOutLabelInstr /* = nullptr*/,
  14231. bool * indirOpndOverflowed /* = nullptr */)
  14232. {
  14233. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  14234. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  14235. Assert(!baseOpnd->IsTaggedInt() || (indexOpnd && indexOpnd->IsNotInt()));
  14236. if (indirOpndOverflowed != nullptr)
  14237. {
  14238. *indirOpndOverflowed = false;
  14239. }
  14240. BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  14241. IRType indirType = TyVar;
  14242. const ValueType baseValueType(baseOpnd->GetValueType());
  14243. // TEST base, AtomTag -- check base not tagged int
  14244. // JNE $helper
  14245. // if (base.GetValueType() != Array) {
  14246. // CMP [base], JavascriptArray::`vtable'
  14247. // JNE $helper
  14248. // }
  14249. // TEST index, 1 -- index tagged int
  14250. // JEQ $helper
  14251. // if (inputIndex is not int const) {
  14252. // MOV index, inputIndex
  14253. // SAR index, Js::VarTag_Shift -- remote atom tag
  14254. // JS $helper -- exclude negative index
  14255. // }
  14256. // MOV headSegment, [base + offset(head)]
  14257. // CMP [headSegment + offset(length)], index -- bounds check
  14258. // if (opcode == StElemI_A) {
  14259. // JA $done (for typedarray, JA $toNumberHelper)
  14260. // CMP [headSegment + offset(size)], index -- chunk has room?
  14261. // JBE $helper
  14262. // if (index is not int const) {
  14263. // LEA newLength, [index + 1]
  14264. // } else {
  14265. // newLength = index + 1
  14266. // }
  14267. // if(BailOutOnInvalidatedArrayLength) {
  14268. // CMP [base + offset(length)], newlength
  14269. // JB $helper
  14270. // }
  14271. // MOV [headSegment + offset(length)], newLength -- update length on chunk
  14272. // CMP [base + offset(length)], newLength
  14273. // JAE $done
  14274. // MOV [base + offset(length)], newLength -- update length on array
  14275. // if(length to be returned){
  14276. // SHL newLength, AtomTag
  14277. // INC newLength
  14278. // MOV dst, newLength
  14279. // }
  14280. // JMP $done
  14281. //
  14282. // $toNumberHelper: Call HelperOp_ConvNumber_Full
  14283. // JMP $done
  14284. // $done
  14285. // } else {la
  14286. // JBE $helper
  14287. // }
  14288. // return [headSegment + offset(elements) + index]
  14289. // Caution: If making changes to the conditions under which we don't emit the typical array checks, make sure
  14290. // the code in GlobOpt::ShouldAssumeIndirOpndHasNonNegativeIntIndex is updated accordingly. We don't want the
  14291. // global optimizer to type specialize instructions, for which the lowerer is forced to emit unconditional
  14292. // bailouts.
  14293. bool isIndexNotInt = false;
  14294. IntConstType value = 0;
  14295. IR::Opnd * indexValueOpnd = nullptr;
  14296. bool invertBoundCheckComparison = false;
  14297. bool checkIndexConstOverflowed = false;
  14298. if (indirOpnd->TryGetIntConstIndexValue(true, &value, &isIndexNotInt))
  14299. {
  14300. if (value >= 0)
  14301. {
  14302. indexValueOpnd = IR::IntConstOpnd::New(value, TyUint32, this->m_func);
  14303. invertBoundCheckComparison = true; // facilitate folding the constant index into the compare instruction
  14304. checkIndexConstOverflowed = true;
  14305. }
  14306. else
  14307. {
  14308. // If the index is a negative int constant we go directly to helper.
  14309. Assert(!forceGenerateFastPath);
  14310. return nullptr;
  14311. }
  14312. }
  14313. else if (isIndexNotInt)
  14314. {
  14315. // If we know the index is not an int we go directly to helper.
  14316. Assert(!forceGenerateFastPath);
  14317. return nullptr;
  14318. }
  14319. //At this point indexValueOpnd is either NULL or contains the valueOpnd
  14320. if(!forceGenerateFastPath && !ShouldGenerateArrayFastPath(baseOpnd, true, true, true))
  14321. {
  14322. return nullptr;
  14323. }
  14324. if(baseValueType.IsLikelyAnyOptimizedArray())
  14325. {
  14326. indirScale = GetArrayIndirScale(baseValueType);
  14327. indirType = GetArrayIndirType(baseValueType);
  14328. }
  14329. if (checkIndexConstOverflowed && (static_cast<uint64>(value) << indirScale) > INT32_MAX &&
  14330. indirOpndOverflowed != nullptr)
  14331. {
  14332. *indirOpndOverflowed = true;
  14333. return nullptr;
  14334. }
  14335. IRType elementType = TyIllegal;
  14336. IR::Opnd * element = nullptr;
  14337. if(instr->m_opcode == Js::OpCode::InlineArrayPush)
  14338. {
  14339. element = instr->GetSrc2();
  14340. elementType = element->GetType();
  14341. }
  14342. else if(isStore && instr->GetSrc1())
  14343. {
  14344. element = instr->GetSrc1();
  14345. elementType = element->GetType();
  14346. }
  14347. Assert(isStore || (element == nullptr && elementType == TyIllegal));
  14348. if (isStore && baseValueType.IsLikelyNativeArray() && indirType != elementType)
  14349. {
  14350. // We're trying to write a value of the wrong type, which should force a conversion of the array.
  14351. // Go to the helper for that.
  14352. return nullptr;
  14353. }
  14354. IR::RegOpnd *arrayOpnd = baseOpnd;
  14355. IR::RegOpnd *headSegmentOpnd = nullptr;
  14356. IR::Opnd *headSegmentLengthOpnd = nullptr;
  14357. IR::AutoReuseOpnd autoReuseHeadSegmentOpnd, autoReuseHeadSegmentLengthOpnd;
  14358. bool indexIsNonnegative = indexValueOpnd || indexOpnd->GetType() == TyUint32 || !checkArrayLengthOverflow;
  14359. bool indexIsLessThanHeadSegmentLength = false;
  14360. if(!baseValueType.IsAnyOptimizedArray())
  14361. {
  14362. arrayOpnd = GenerateArrayTest(baseOpnd, labelCantUseArray, labelCantUseArray, instr, true, isStore);
  14363. }
  14364. else
  14365. {
  14366. if(arrayOpnd->IsArrayRegOpnd())
  14367. {
  14368. IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
  14369. if(arrayRegOpnd->HeadSegmentSym())
  14370. {
  14371. headSegmentOpnd = IR::RegOpnd::New(arrayRegOpnd->HeadSegmentSym(), TyMachPtr, m_func);
  14372. DebugOnly(headSegmentOpnd->FreezeSymValue());
  14373. autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
  14374. }
  14375. if(arrayRegOpnd->HeadSegmentLengthSym())
  14376. {
  14377. headSegmentLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->HeadSegmentLengthSym(), TyUint32, m_func);
  14378. // This value can change over the course of this function
  14379. //DebugOnly(headSegmentLengthOpnd->AsRegOpnd()->FreezeSymValue());
  14380. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  14381. }
  14382. if (arrayRegOpnd->EliminatedLowerBoundCheck())
  14383. {
  14384. indexIsNonnegative = true;
  14385. }
  14386. if(arrayRegOpnd->EliminatedUpperBoundCheck())
  14387. {
  14388. indexIsLessThanHeadSegmentLength = true;
  14389. }
  14390. }
  14391. }
  14392. IR::AutoReuseOpnd autoReuseArrayOpnd;
  14393. if(arrayOpnd->GetValueType().GetObjectType() != ObjectType::ObjectWithArray)
  14394. {
  14395. autoReuseArrayOpnd.Initialize(arrayOpnd, m_func);
  14396. }
  14397. const auto EnsureObjectArrayLoaded = [&]()
  14398. {
  14399. if(arrayOpnd->GetValueType().GetObjectType() != ObjectType::ObjectWithArray)
  14400. {
  14401. return;
  14402. }
  14403. arrayOpnd = LoadObjectArray(arrayOpnd, instr);
  14404. autoReuseArrayOpnd.Initialize(arrayOpnd, m_func);
  14405. };
  14406. const bool doUpperBoundCheck = checkArrayLengthOverflow && !indexIsLessThanHeadSegmentLength;
  14407. if(!indexValueOpnd)
  14408. {
  14409. indexValueOpnd =
  14410. m_lowererMD.LoadNonnegativeIndex(
  14411. indexOpnd,
  14412. (
  14413. indexIsNonnegative
  14414. #if !INT32VAR
  14415. ||
  14416. // On 32-bit platforms, skip the negative check since for now, the unsigned upper bound check covers it
  14417. doUpperBoundCheck
  14418. #endif
  14419. ),
  14420. labelCantUseArray,
  14421. labelHelper,
  14422. instr);
  14423. }
  14424. const IR::AutoReuseOpnd autoReuseIndexValueOpnd(indexValueOpnd, m_func);
  14425. if (baseValueType.IsLikelyTypedArray())
  14426. {
  14427. *pIsTypedArrayElement = true;
  14428. if(doUpperBoundCheck)
  14429. {
  14430. if(!headSegmentLengthOpnd)
  14431. {
  14432. // (headSegmentLength = [base + offset(length)])
  14433. int lengthOffset;
  14434. lengthOffset = Js::Float64Array::GetOffsetOfLength();
  14435. headSegmentLengthOpnd = IR::IndirOpnd::New(arrayOpnd, lengthOffset, TyUint32, m_func);
  14436. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  14437. }
  14438. // CMP index, headSegmentLength -- upper bound check
  14439. if(!invertBoundCheckComparison)
  14440. {
  14441. InsertCompare(indexValueOpnd, headSegmentLengthOpnd, instr);
  14442. }
  14443. else
  14444. {
  14445. InsertCompare(headSegmentLengthOpnd, indexValueOpnd, instr);
  14446. }
  14447. }
  14448. }
  14449. else
  14450. {
  14451. *pIsTypedArrayElement = false;
  14452. if (isStore &&
  14453. baseValueType.IsLikelyNativeIntArray() &&
  14454. (!element->IsIntConstOpnd() || Js::SparseArraySegment<int32>::GetMissingItem() == element->AsIntConstOpnd()->AsInt32()))
  14455. {
  14456. Assert(instr->m_opcode != Js::OpCode::InlineArrayPush || bailOutLabelInstr);
  14457. // Check for a write of the MissingItem value.
  14458. InsertMissingItemCompareBranch(
  14459. element,
  14460. Js::OpCode::BrEq_A,
  14461. instr->m_opcode == Js::OpCode::InlineArrayPush ? bailOutLabelInstr : labelCantUseArray,
  14462. instr);
  14463. }
  14464. if(!headSegmentOpnd)
  14465. {
  14466. EnsureObjectArrayLoaded();
  14467. // MOV headSegment, [base + offset(head)]
  14468. indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfHead(), TyMachPtr, this->m_func);
  14469. headSegmentOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  14470. autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
  14471. InsertMove(headSegmentOpnd, indirOpnd, instr);
  14472. }
  14473. if(doUpperBoundCheck)
  14474. {
  14475. if(!headSegmentLengthOpnd)
  14476. {
  14477. // (headSegmentLength = [headSegment + offset(length)])
  14478. headSegmentLengthOpnd =
  14479. IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
  14480. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  14481. }
  14482. // CMP index, headSegmentLength -- upper bound check
  14483. if(!invertBoundCheckComparison)
  14484. {
  14485. InsertCompare(indexValueOpnd, headSegmentLengthOpnd, instr);
  14486. }
  14487. else
  14488. {
  14489. InsertCompare(headSegmentLengthOpnd, indexValueOpnd, instr);
  14490. }
  14491. }
  14492. }
  14493. const IR::BailOutKind bailOutKind = instr->HasBailOutInfo() ? instr->GetBailOutKind() : IR::BailOutInvalid;
  14494. const bool needBailOutOnInvalidLength = !!(bailOutKind & (IR::BailOutOnInvalidatedArrayHeadSegment));
  14495. const bool needBailOutToHelper = !!(bailOutKind & (IR::BailOutOnArrayAccessHelperCall));
  14496. const bool needBailOutOnSegmentLengthCompare = needBailOutToHelper || needBailOutOnInvalidLength;
  14497. bool usingSegmentLengthIncreasedLabel = false;
  14498. if(indexIsLessThanHeadSegmentLength || needBailOutOnSegmentLengthCompare)
  14499. {
  14500. if (needBailOutOnSegmentLengthCompare)
  14501. {
  14502. // The bailout must be pre-op because it will not have completed the operation
  14503. Assert(instr->GetBailOutInfo()->bailOutOffset == instr->GetByteCodeOffset());
  14504. // TODO: Check this with lazy bailout
  14505. // Verify other bailouts these can be combined with
  14506. Assert(
  14507. !(
  14508. bailOutKind &
  14509. IR::BailOutKindBits &
  14510. ~(
  14511. IR::LazyBailOut |
  14512. IR::BailOutOnArrayAccessHelperCall |
  14513. IR::BailOutOnInvalidatedArrayHeadSegment |
  14514. IR::BailOutOnInvalidatedArrayLength |
  14515. IR::BailOutConventionalNativeArrayAccessOnly |
  14516. IR::BailOutOnMissingValue |
  14517. (bailOutKind & IR::BailOutOnArrayAccessHelperCall ? IR::BailOutInvalid : IR::BailOutConvertedNativeArray)
  14518. )
  14519. )
  14520. );
  14521. if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  14522. {
  14523. // Omit the helper call and generate a bailout instead
  14524. Assert(emitBailoutRef);
  14525. *emitBailoutRef = true;
  14526. }
  14527. }
  14528. if (indexIsLessThanHeadSegmentLength)
  14529. {
  14530. Assert(!(bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment));
  14531. }
  14532. else
  14533. {
  14534. IR::LabelInstr *bailOutLabel;
  14535. if (needBailOutOnInvalidLength)
  14536. {
  14537. Assert(isStore);
  14538. // Lower a separate (but shared) bailout for this case, and preserve the bailout kind in the instruction if the
  14539. // helper call is going to be generated, because the bailout kind needs to be lowered again and differently in the
  14540. // helper call path.
  14541. //
  14542. // Generate:
  14543. // (instr)
  14544. // jmp $continue
  14545. // $bailOut:
  14546. // Bail out with IR::BailOutOnInvalidatedArrayHeadSegment
  14547. // $continue:
  14548. LowerOneBailOutKind(
  14549. instr,
  14550. IR::BailOutOnInvalidatedArrayHeadSegment,
  14551. false,
  14552. !(bailOutKind & IR::BailOutOnArrayAccessHelperCall));
  14553. bailOutLabel = instr->GetOrCreateContinueLabel(true);
  14554. InsertBranch(Js::OpCode::Br, labelFallthrough, bailOutLabel);
  14555. }
  14556. else
  14557. {
  14558. Assert(needBailOutToHelper);
  14559. bailOutLabel = labelHelper;
  14560. }
  14561. // Bail out if the index is outside the head segment bounds
  14562. // jae $bailOut
  14563. Assert(checkArrayLengthOverflow);
  14564. InsertBranch(
  14565. !invertBoundCheckComparison ? Js::OpCode::BrGe_A : Js::OpCode::BrLe_A,
  14566. true /* isUnsigned */,
  14567. bailOutLabel,
  14568. instr);
  14569. }
  14570. }
  14571. else if (isStore && !baseValueType.IsLikelyTypedArray()) // #if (opcode == StElemI_A)
  14572. {
  14573. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14574. LABELNAME(labelDone);
  14575. IR::LabelInstr *labelSegmentLengthIncreased = nullptr;
  14576. const bool isPush = instr->m_opcode != Js::OpCode::StElemI_A && instr->m_opcode != Js::OpCode::StElemI_A_Strict;
  14577. // Put the head segment size check and length updates in a helper block since they're not the common path for StElem.
  14578. // For push, that is the common path so keep it in a non-helper block.
  14579. const bool isInHelperBlock = !isPush;
  14580. if(checkArrayLengthOverflow)
  14581. {
  14582. if(pLabelSegmentLengthIncreased &&
  14583. !(
  14584. (baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()) ||
  14585. ((instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict) &&
  14586. instr->IsProfiledInstr() && !instr->AsProfiledInstr()->u.stElemInfo->LikelyFillsMissingValue())
  14587. ))
  14588. {
  14589. // For arrays that are not guaranteed to have no missing values, before storing to an element where
  14590. // (index < length), the element value needs to be checked to see if it's a missing value, and if so, fall back
  14591. // to the helper. This is done to keep the missing value tracking precise in arrays. So, create a separate label
  14592. // for the case where the length was increased (index >= length), and pass it back to GenerateFastStElemI, which
  14593. // will fill in the rest.
  14594. labelSegmentLengthIncreased = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelperBlock);
  14595. LABELNAME(labelSegmentLengthIncreased);
  14596. *pLabelSegmentLengthIncreased = labelSegmentLengthIncreased;
  14597. // Since this is effectively a separate exit point, we need to do the spectre mitigations in this place as well.
  14598. usingSegmentLengthIncreasedLabel = true;
  14599. }
  14600. else
  14601. {
  14602. labelSegmentLengthIncreased = labelDone;
  14603. }
  14604. // JB $done
  14605. InsertBranch(
  14606. !invertBoundCheckComparison ? Js::OpCode::BrLt_A : Js::OpCode::BrGt_A,
  14607. true /* isUnsigned */,
  14608. labelDone,
  14609. instr);
  14610. }
  14611. if(isInHelperBlock)
  14612. {
  14613. InsertLabel(true /* isHelper */, instr);
  14614. }
  14615. EnsureObjectArrayLoaded();
  14616. do // while(false);
  14617. {
  14618. if(checkArrayLengthOverflow)
  14619. {
  14620. if(instr->HasBailOutInfo() && instr->GetBailOutKind() & IR::BailOutOnMissingValue)
  14621. {
  14622. // Need to bail out if this store would create a missing value. The store would cause a missing value to be
  14623. // created if (index > length && index < size). If (index >= size) we would go to helper anyway, and the bailout
  14624. // handling for this is done after the helper call, so just go to helper if (index > length).
  14625. //
  14626. // jne $helper // branch for (cmp index, headSegmentLength)
  14627. InsertBranch(Js::OpCode::BrNeq_A, labelHelper, instr);
  14628. }
  14629. else
  14630. {
  14631. // If (index < size) we will not call the helper, so the array flags must be updated to reflect that it no
  14632. // longer has no missing values.
  14633. //
  14634. // jne indexGreaterThanLength // branch for (cmp index, headSegmentLength)
  14635. // cmp index, [headSegment + offset(size)]
  14636. // jae $helper
  14637. // jmp indexLessThanSize
  14638. // indexGreaterThanLength:
  14639. // cmp index, [headSegment + offset(size)]
  14640. // jae $helper
  14641. // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
  14642. // indexLessThanSize:
  14643. // if(!index->IsConstOpnd()) {
  14644. // sub temp, index, [headSegment + offset(size)]
  14645. // sar temp, 31
  14646. // and index, temp
  14647. // }
  14648. IR::LabelInstr *const indexGreaterThanLengthLabel = InsertLabel(true /* isHelper */, instr);
  14649. LABELNAME(indexGreaterThanLengthLabel);
  14650. IR::LabelInstr *const indexLessThanSizeLabel = InsertLabel(isInHelperBlock, instr);
  14651. LABELNAME(indexLessThanSizeLabel);
  14652. // jne indexGreaterThanLength // branch for (cmp index, headSegmentLength)
  14653. InsertBranch(Js::OpCode::BrNeq_A, indexGreaterThanLengthLabel, indexGreaterThanLengthLabel);
  14654. // cmp index, [headSegment + offset(size)]
  14655. // jae $helper
  14656. // jmp indexLessThanSize
  14657. // indexGreaterThanLength:
  14658. InsertCompareBranch(
  14659. indexValueOpnd,
  14660. IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, m_func),
  14661. Js::OpCode::BrGe_A,
  14662. true /* isUnsigned */,
  14663. labelHelper,
  14664. indexGreaterThanLengthLabel);
  14665. InsertBranch(Js::OpCode::Br, indexLessThanSizeLabel, indexGreaterThanLengthLabel);
  14666. // indexGreaterThanLength:
  14667. // cmp index, [headSegment + offset(size)]
  14668. // jae $helper
  14669. // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
  14670. // indexLessThanSize:
  14671. InsertCompareBranch(
  14672. indexValueOpnd,
  14673. IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, m_func),
  14674. Js::OpCode::BrGe_A,
  14675. true /* isUnsigned */,
  14676. labelHelper,
  14677. indexLessThanSizeLabel);
  14678. CompileAssert(
  14679. static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
  14680. Js::DynamicObjectFlags::HasNoMissingValues);
  14681. InsertAnd(
  14682. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  14683. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  14684. IR::IntConstOpnd::New(
  14685. static_cast<uint8>(~Js::DynamicObjectFlags::HasNoMissingValues),
  14686. TyUint8,
  14687. m_func,
  14688. true),
  14689. indexLessThanSizeLabel);
  14690. // In speculative cases, we want to avoid a write to an array setting the length to something huge, which
  14691. // would then allow subsequent reads to hit arbitrary memory (in the speculative path). This is done with
  14692. // a mask generated from the difference between the index and the size. Since we should have already gone
  14693. // to the helper in any case where this would execute, it's a functional no-op.
  14694. // indexLessThanSize:
  14695. // In speculative cases, we want to avoid a write to an array setting the length to something huge, which
  14696. // would then allow subsequent reads to hit arbitrary memory (in the speculative path). This is done with
  14697. // a mask generated from the difference between the index and the size. Since we should have already gone
  14698. // to the helper in any case where this would execute, it's a functional no-op.
  14699. // if(!index->IsConstOpnd()) {
  14700. // sub temp, index, [headSegment + offset(size)]
  14701. // sar temp, 31
  14702. // and index, temp
  14703. // }
  14704. if (!indexValueOpnd->IsConstOpnd()
  14705. && (baseValueType.IsLikelyTypedArray()
  14706. ? CONFIG_FLAG_RELEASE(PoisonTypedArrayStore)
  14707. : ((indirType == TyVar && CONFIG_FLAG_RELEASE(PoisonVarArrayStore))
  14708. || (IRType_IsNativeInt(indirType) && CONFIG_FLAG_RELEASE(PoisonIntArrayStore))
  14709. || (IRType_IsFloat(indirType) && CONFIG_FLAG_RELEASE(PoisonFloatArrayStore)))
  14710. )
  14711. )
  14712. {
  14713. IR::RegOpnd* temp = IR::RegOpnd::New(TyUint32, m_func);
  14714. InsertSub(
  14715. false,
  14716. temp,
  14717. indexValueOpnd,
  14718. IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, m_func),
  14719. instr);
  14720. InsertShift(Js::OpCode::Shr_A, false, temp, temp, IR::IntConstOpnd::New(31, TyInt8, m_func), instr);
  14721. InsertAnd(indexValueOpnd, indexValueOpnd, temp, instr);
  14722. }
  14723. break;
  14724. }
  14725. }
  14726. // CMP index, [headSegment + offset(size)]
  14727. // JAE $helper
  14728. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, this->m_func);
  14729. InsertCompareBranch(indexValueOpnd, indirOpnd, Js::OpCode::BrGe_A, true /* isUnsigned */, labelHelper, instr);
  14730. } while(false);
  14731. if(isPush)
  14732. {
  14733. IR::LabelInstr *const updateLengthLabel = InsertLabel(isInHelperBlock, instr);
  14734. LABELNAME(updateLengthLabel);
  14735. if(!doUpperBoundCheck && !headSegmentLengthOpnd)
  14736. {
  14737. // (headSegmentLength = [headSegment + offset(length)])
  14738. headSegmentLengthOpnd =
  14739. IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
  14740. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  14741. }
  14742. // For push, it is guaranteed that (index >= length). We already know that (index < size), but we need to check if
  14743. // (index > length) because in that case a missing value will be created and the missing value tracking in the array
  14744. // needs to be updated.
  14745. //
  14746. // cmp index, headSegmentLength
  14747. // je $updateLength
  14748. // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
  14749. // updateLength:
  14750. InsertCompareBranch(
  14751. indexValueOpnd,
  14752. headSegmentLengthOpnd,
  14753. Js::OpCode::BrEq_A,
  14754. updateLengthLabel,
  14755. updateLengthLabel);
  14756. CompileAssert(
  14757. static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
  14758. Js::DynamicObjectFlags::HasNoMissingValues);
  14759. InsertAnd(
  14760. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  14761. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  14762. IR::IntConstOpnd::New(
  14763. static_cast<uint8>(~Js::DynamicObjectFlags::HasNoMissingValues),
  14764. TyUint8,
  14765. m_func,
  14766. true),
  14767. updateLengthLabel);
  14768. }
  14769. if (baseValueType.IsArrayOrObjectWithArray())
  14770. {
  14771. // We didn't emit an array check, but if we are going to grow the array
  14772. // We need to go to helper if there is an ES5 array/objectarray used as prototype
  14773. GenerateIsEnabledArraySetElementFastPathCheck(labelHelper, instr);
  14774. }
  14775. IR::Opnd *newLengthOpnd;
  14776. IR::AutoReuseOpnd autoReuseNewLengthOpnd;
  14777. if (indexValueOpnd->IsRegOpnd())
  14778. {
  14779. // LEA newLength, [index + 1]
  14780. newLengthOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
  14781. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  14782. InsertAdd(false /* needFlags */, newLengthOpnd, indexValueOpnd, IR::IntConstOpnd::New(1, TyUint32, m_func), instr);
  14783. }
  14784. else
  14785. {
  14786. newLengthOpnd = IR::IntConstOpnd::New(value + 1, TyUint32, this->m_func);
  14787. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  14788. }
  14789. // This is a common enough case that we want to go through this path instead of the simpler one, since doing it this way is faster for preallocated but un-filled arrays.
  14790. if (!!(bailOutKind & IR::BailOutOnInvalidatedArrayLength))
  14791. {
  14792. // If we'd increase the array length, go to the helper
  14793. indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
  14794. InsertCompareBranch(
  14795. newLengthOpnd,
  14796. indirOpnd,
  14797. Js::OpCode::BrGt_A,
  14798. true,
  14799. labelHelper,
  14800. instr);
  14801. }
  14802. // MOV [headSegment + offset(length)], newLength
  14803. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, length), TyUint32, this->m_func);
  14804. InsertMove(indirOpnd, newLengthOpnd, instr);
  14805. // We've changed the head segment length, so we may need to change the head segment length opnd
  14806. if (headSegmentLengthOpnd != nullptr && !headSegmentLengthOpnd->IsIndirOpnd())
  14807. {
  14808. InsertMove(headSegmentLengthOpnd, newLengthOpnd, instr);
  14809. }
  14810. if (checkArrayLengthOverflow)
  14811. {
  14812. // CMP newLength, [base + offset(length)]
  14813. // JBE $segmentLengthIncreased
  14814. Assert(labelSegmentLengthIncreased);
  14815. indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
  14816. InsertCompareBranch(
  14817. newLengthOpnd,
  14818. indirOpnd,
  14819. Js::OpCode::BrLe_A,
  14820. true /* isUnsigned */,
  14821. labelSegmentLengthIncreased,
  14822. instr);
  14823. if(!isInHelperBlock)
  14824. {
  14825. InsertLabel(true /* isHelper */, instr);
  14826. }
  14827. }
  14828. // MOV [base + offset(length)], newLength
  14829. indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
  14830. InsertMove(indirOpnd, newLengthOpnd, instr);
  14831. if(returnLength)
  14832. {
  14833. if(newLengthOpnd->GetSize() != MachPtr)
  14834. {
  14835. newLengthOpnd = newLengthOpnd->UseWithNewType(TyMachPtr, m_func)->AsRegOpnd();
  14836. }
  14837. // SHL newLength, AtomTag
  14838. // INC newLength
  14839. this->m_lowererMD.GenerateInt32ToVarConversion(newLengthOpnd, instr);
  14840. // MOV dst, newLength
  14841. InsertMove(instr->GetDst(), newLengthOpnd, instr);
  14842. }
  14843. // Calling code assumes that indirOpnd is initialized before labelSegmentLengthIncreased is reached
  14844. if(labelSegmentLengthIncreased && labelSegmentLengthIncreased != labelDone)
  14845. {
  14846. // labelSegmentLengthIncreased:
  14847. instr->InsertBefore(labelSegmentLengthIncreased);
  14848. }
  14849. // $done
  14850. instr->InsertBefore(labelDone);
  14851. }
  14852. else // #else
  14853. {
  14854. if (checkArrayLengthOverflow)
  14855. {
  14856. if (*pIsTypedArrayElement && isStore)
  14857. {
  14858. IR::LabelInstr *labelInlineSet = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14859. LABELNAME(labelInlineSet);
  14860. //For positive index beyond length or negative index its essentially nop for typed array store
  14861. InsertBranch(
  14862. !invertBoundCheckComparison ? Js::OpCode::BrLt_A : Js::OpCode::BrGt_A,
  14863. true /* isUnsigned */,
  14864. labelInlineSet,
  14865. instr);
  14866. // For typed array, call ToNumber before we fallThrough.
  14867. if (instr->GetSrc1()->GetType() == TyVar && !instr->GetSrc1()->GetValueType().IsPrimitive())
  14868. {
  14869. // Enter an ophelper block
  14870. IR::LabelInstr * opHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14871. LABELNAME(opHelper);
  14872. instr->InsertBefore(opHelper);
  14873. IR::Instr *toNumberInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  14874. toNumberInstr->SetSrc1(instr->GetSrc1());
  14875. instr->InsertBefore(toNumberInstr);
  14876. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  14877. {
  14878. // Bail out if this conversion triggers implicit calls.
  14879. toNumberInstr = this->AddBailoutToHelperCallInstr(toNumberInstr, instr->GetBailOutInfo(), bailOutKind, instr);
  14880. }
  14881. LowerUnaryHelperMem(toNumberInstr, IR::HelperOp_ConvNumber_Full);
  14882. }
  14883. InsertBranch(Js::OpCode::Br, labelFallthrough, instr); //Jump to fallThrough
  14884. instr->InsertBefore(labelInlineSet);
  14885. }
  14886. else
  14887. {
  14888. // JAE $helper
  14889. InsertBranch(
  14890. !invertBoundCheckComparison ? Js::OpCode::BrGe_A : Js::OpCode::BrLe_A,
  14891. true /* isUnsigned */,
  14892. labelHelper,
  14893. instr);
  14894. }
  14895. }
  14896. EnsureObjectArrayLoaded();
  14897. if (instr->m_opcode == Js::OpCode::InlineArrayPop)
  14898. {
  14899. Assert(!baseValueType.IsLikelyTypedArray());
  14900. Assert(bailOutLabelInstr);
  14901. if (indexValueOpnd->IsIntConstOpnd())
  14902. {
  14903. // indirOpnd = [headSegment + index + offset(elements)]
  14904. IntConstType offset = offsetof(Js::SparseArraySegment<Js::Var>, elements) + (value << indirScale);
  14905. // TODO: Assert(Math::FitsInDWord(offset));
  14906. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
  14907. }
  14908. else
  14909. {
  14910. // indirOpnd = [headSegment + offset(elements) + (index << scale)]
  14911. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
  14912. indirOpnd->SetOffset(offsetof(Js::SparseArraySegment<Js::Var>, elements));
  14913. }
  14914. IR::Opnd * tmpDst = nullptr;
  14915. IR::Opnd * dst = instr->GetDst();
  14916. // Pop might not have a dst, if not don't worry about returning the last element. But we still have to
  14917. // worry about gaps, because these force us to access the prototype chain, which may have side-effects.
  14918. if (dst || !baseValueType.HasNoMissingValues())
  14919. {
  14920. if (!dst)
  14921. {
  14922. dst = IR::RegOpnd::New(indirType, this->m_func);
  14923. }
  14924. else if (dst->AsRegOpnd()->m_sym == arrayOpnd->m_sym)
  14925. {
  14926. tmpDst = IR::RegOpnd::New(TyVar, this->m_func);
  14927. dst = tmpDst;
  14928. }
  14929. // Use a mask to prevent arbitrary speculative reads
  14930. // If you think this code looks highly similar to the code later in this function,
  14931. // you'd be right. Unfortunately, I wasn't able to find a way to reduce duplication
  14932. // here without significantly complicating the code structure.
  14933. if (!headSegmentLengthOpnd)
  14934. {
  14935. headSegmentLengthOpnd =
  14936. IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
  14937. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  14938. }
  14939. IR::RegOpnd* localMaskOpnd = nullptr;
  14940. #if TARGET_64
  14941. IR::Opnd* lengthOpnd = nullptr;
  14942. AnalysisAssert(headSegmentLengthOpnd != nullptr);
  14943. lengthOpnd = IR::RegOpnd::New(headSegmentLengthOpnd->GetType(), m_func);
  14944. {
  14945. IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV_TRUNC, lengthOpnd, headSegmentLengthOpnd, m_func);
  14946. instr->InsertBefore(instrMov);
  14947. LowererMD::Legalize(instrMov);
  14948. }
  14949. if (lengthOpnd->GetSize() != MachPtr)
  14950. {
  14951. lengthOpnd = lengthOpnd->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
  14952. }
  14953. // MOV r1, [opnd + offset(type)]
  14954. IR::RegOpnd* indexValueRegOpnd = IR::RegOpnd::New(indexValueOpnd->GetType(), m_func);
  14955. {
  14956. IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV_TRUNC, indexValueRegOpnd, indexValueOpnd, m_func);
  14957. instr->InsertBefore(instrMov);
  14958. LowererMD::Legalize(instrMov);
  14959. }
  14960. if (indexValueRegOpnd->GetSize() != MachPtr)
  14961. {
  14962. indexValueRegOpnd = indexValueRegOpnd->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
  14963. }
  14964. localMaskOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  14965. InsertSub(false, localMaskOpnd, indexValueRegOpnd, lengthOpnd, instr);
  14966. InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(63, TyInt8, m_func), instr);
  14967. #else
  14968. localMaskOpnd = IR::RegOpnd::New(TyInt32, m_func);
  14969. InsertSub(false, localMaskOpnd, indexValueOpnd, headSegmentLengthOpnd, instr);
  14970. InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(31, TyInt8, m_func), instr);
  14971. #endif
  14972. // for pop we always do the masking before the load in cases where we load a value
  14973. IR::RegOpnd* loadAddr = IR::RegOpnd::New(TyMachPtr, m_func);
  14974. #if _M_ARM32_OR_ARM64
  14975. if (indirOpnd->GetIndexOpnd() != nullptr && indirOpnd->GetScale() > 0)
  14976. {
  14977. // We don't support encoding for LEA with scale on ARM/ARM64, so do the scale calculation as a separate instruction
  14978. IR::RegOpnd* fullIndexOpnd = IR::RegOpnd::New(indirOpnd->GetIndexOpnd()->GetType(), m_func);
  14979. InsertShift(Js::OpCode::Shl_A, false, fullIndexOpnd, indirOpnd->GetIndexOpnd(), IR::IntConstOpnd::New(indirOpnd->GetScale(), TyInt8, m_func), instr);
  14980. IR::IndirOpnd* newIndir = IR::IndirOpnd::New(indirOpnd->GetBaseOpnd(), fullIndexOpnd, indirType, m_func);
  14981. if (indirOpnd->GetOffset() != 0)
  14982. {
  14983. newIndir->SetOffset(indirOpnd->GetOffset());
  14984. }
  14985. indirOpnd = newIndir;
  14986. }
  14987. #endif
  14988. IR::AutoReuseOpnd reuseIndir(indirOpnd, m_func);
  14989. InsertLea(loadAddr, indirOpnd, instr);
  14990. InsertAnd(loadAddr, loadAddr, localMaskOpnd, instr);
  14991. indirOpnd = IR::IndirOpnd::New(loadAddr, 0, indirType, m_func);
  14992. // MOV dst, [head + offset]
  14993. InsertMove(dst, indirOpnd, instr);
  14994. //If the array has missing values, check for one
  14995. if (!baseValueType.HasNoMissingValues())
  14996. {
  14997. InsertMissingItemCompareBranch(
  14998. dst,
  14999. Js::OpCode::BrEq_A,
  15000. bailOutLabelInstr,
  15001. instr);
  15002. }
  15003. }
  15004. // MOV [head + offset], missing
  15005. InsertMove(indirOpnd, GetMissingItemOpndForAssignment(indirType, m_func), instr);
  15006. IR::Opnd *newLengthOpnd;
  15007. IR::AutoReuseOpnd autoReuseNewLengthOpnd;
  15008. if (indexValueOpnd->IsRegOpnd())
  15009. {
  15010. // LEA newLength, [index]
  15011. newLengthOpnd = indexValueOpnd;
  15012. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  15013. }
  15014. else
  15015. {
  15016. newLengthOpnd = IR::IntConstOpnd::New(value, TyUint32, this->m_func);
  15017. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  15018. }
  15019. //update segment length and array length
  15020. // MOV [headSegment + offset(length)], newLength
  15021. IR::IndirOpnd *lengthIndirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, length), TyUint32, this->m_func);
  15022. InsertMove(lengthIndirOpnd, newLengthOpnd, instr);
  15023. // We've changed the head segment length, so we may need to change the head segment length opnd
  15024. if (headSegmentLengthOpnd != nullptr && !headSegmentLengthOpnd->IsIndirOpnd())
  15025. {
  15026. InsertMove(headSegmentLengthOpnd, newLengthOpnd, instr);
  15027. }
  15028. // MOV [base + offset(length)], newLength
  15029. lengthIndirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
  15030. InsertMove(lengthIndirOpnd, newLengthOpnd, instr);
  15031. if (tmpDst)
  15032. {
  15033. // The array opnd and the destination is the same, need to move the value in the tmp dst
  15034. // to the actual dst
  15035. InsertMove(instr->GetDst(), tmpDst, instr);
  15036. }
  15037. return indirOpnd;
  15038. }
  15039. } // #endif
  15040. // Should we poison the load of the address to/from which the store/load happens?
  15041. bool shouldPoisonLoad = maskOpnd != nullptr
  15042. && (
  15043. (!isStore && (!instr->IsSafeToSpeculate()) &&
  15044. (baseValueType.IsLikelyTypedArray()
  15045. ? CONFIG_FLAG_RELEASE(PoisonTypedArrayLoad)
  15046. : ((indirType == TyVar && CONFIG_FLAG_RELEASE(PoisonVarArrayLoad))
  15047. || (IRType_IsNativeInt(indirType) && CONFIG_FLAG_RELEASE(PoisonIntArrayLoad))
  15048. || (IRType_IsFloat(indirType) && CONFIG_FLAG_RELEASE(PoisonFloatArrayLoad)))
  15049. )
  15050. )
  15051. ||
  15052. (isStore &&
  15053. (baseValueType.IsLikelyTypedArray()
  15054. ? CONFIG_FLAG_RELEASE(PoisonTypedArrayStore)
  15055. : ((indirType == TyVar && CONFIG_FLAG_RELEASE(PoisonVarArrayStore))
  15056. || (IRType_IsNativeInt(indirType) && CONFIG_FLAG_RELEASE(PoisonIntArrayStore))
  15057. || (IRType_IsFloat(indirType) && CONFIG_FLAG_RELEASE(PoisonFloatArrayStore)))
  15058. )
  15059. )
  15060. )
  15061. ;
  15062. // We have two exit paths for this function in the store case when we might grow the head
  15063. // segment, due to tracking for missing elements. This unfortunately means that we need a
  15064. // copy of the poisoning code on the other exit path, since the determination of the path
  15065. // and the use of the path determination to decide whether we found the missing value are
  15066. // things that have to happen on opposite sides of the poisoning.
  15067. IR::Instr* insertForSegmentLengthIncreased = nullptr;
  15068. if (shouldPoisonLoad && usingSegmentLengthIncreasedLabel)
  15069. {
  15070. insertForSegmentLengthIncreased = (*pLabelSegmentLengthIncreased)->m_next;
  15071. }
  15072. #if TARGET_32
  15073. if (shouldPoisonLoad)
  15074. {
  15075. // Prevent index from being negative, which would break the poisoning
  15076. if (indexValueOpnd->IsIntConstOpnd())
  15077. {
  15078. indexValueOpnd = IR::IntConstOpnd::New(value & INT32_MAX, TyUint32, m_func);
  15079. }
  15080. else
  15081. {
  15082. IR::RegOpnd* newIndexValueOpnd = IR::RegOpnd::New(TyUint32, m_func);
  15083. InsertAnd(newIndexValueOpnd, indexValueOpnd, IR::IntConstOpnd::New(INT32_MAX, TyUint32, m_func), instr);
  15084. if(insertForSegmentLengthIncreased != nullptr)
  15085. {
  15086. InsertAnd(newIndexValueOpnd, indexValueOpnd, IR::IntConstOpnd::New(INT32_MAX, TyUint32, m_func), insertForSegmentLengthIncreased);
  15087. }
  15088. indexValueOpnd = newIndexValueOpnd;
  15089. }
  15090. }
  15091. #endif
  15092. if (baseValueType.IsLikelyTypedArray())
  15093. {
  15094. if(!headSegmentOpnd)
  15095. {
  15096. // MOV headSegment, [base + offset(arrayBuffer)]
  15097. int bufferOffset;
  15098. bufferOffset = Js::Float64Array::GetOffsetOfBuffer();
  15099. indirOpnd = IR::IndirOpnd::New(arrayOpnd, bufferOffset, TyMachPtr, this->m_func);
  15100. headSegmentOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  15101. autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
  15102. IR::AutoReuseOpnd reuseIndir(indirOpnd, m_func);
  15103. InsertMove(headSegmentOpnd, indirOpnd, instr);
  15104. if(insertForSegmentLengthIncreased != nullptr)
  15105. {
  15106. InsertMove(headSegmentOpnd, indirOpnd, insertForSegmentLengthIncreased);
  15107. }
  15108. }
  15109. // indirOpnd = [headSegment + index]
  15110. if (indexValueOpnd->IsIntConstOpnd())
  15111. {
  15112. IntConstType offset = (value << indirScale);
  15113. // TODO: Assert(Math::FitsInDWord(offset));
  15114. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
  15115. }
  15116. else
  15117. {
  15118. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
  15119. }
  15120. }
  15121. else if (indexValueOpnd->IsIntConstOpnd())
  15122. {
  15123. // indirOpnd = [headSegment + index + offset(elements)]
  15124. IntConstType offset = offsetof(Js::SparseArraySegment<Js::Var>, elements) + (value << indirScale);
  15125. // TODO: Assert(Math::FitsInDWord(offset));
  15126. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
  15127. }
  15128. else
  15129. {
  15130. // indirOpnd = [headSegment + offset(elements) + (index << scale)]
  15131. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
  15132. indirOpnd->SetOffset(offsetof(Js::SparseArraySegment<Js::Var>, elements));
  15133. }
  15134. if (shouldPoisonLoad)
  15135. {
  15136. // Use a mask to prevent arbitrary speculative reads
  15137. if (!headSegmentLengthOpnd
  15138. #if ENABLE_FAST_ARRAYBUFFER
  15139. && !baseValueType.IsLikelyOptimizedVirtualTypedArray()
  15140. #endif
  15141. )
  15142. {
  15143. if (baseValueType.IsLikelyTypedArray())
  15144. {
  15145. int lengthOffset;
  15146. lengthOffset = GetArrayOffsetOfLength(baseValueType);
  15147. headSegmentLengthOpnd = IR::IndirOpnd::New(arrayOpnd, lengthOffset, TyUint32, m_func);
  15148. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  15149. }
  15150. else
  15151. {
  15152. headSegmentLengthOpnd =
  15153. IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
  15154. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  15155. }
  15156. }
  15157. IR::RegOpnd* localMaskOpnd = nullptr;
  15158. #if TARGET_64
  15159. IR::Opnd* lengthOpnd = nullptr;
  15160. #if ENABLE_FAST_ARRAYBUFFER
  15161. if (baseValueType.IsLikelyOptimizedVirtualTypedArray())
  15162. {
  15163. lengthOpnd = IR::IntConstOpnd::New(MAX_ASMJS_ARRAYBUFFER_LENGTH >> indirScale, TyMachReg, m_func);
  15164. }
  15165. else
  15166. #endif
  15167. {
  15168. AnalysisAssert(headSegmentLengthOpnd != nullptr);
  15169. lengthOpnd = IR::RegOpnd::New(headSegmentLengthOpnd->GetType(), m_func);
  15170. IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV_TRUNC, lengthOpnd, headSegmentLengthOpnd, m_func);
  15171. instr->InsertBefore(instrMov);
  15172. LowererMD::Legalize(instrMov);
  15173. if (insertForSegmentLengthIncreased != nullptr)
  15174. {
  15175. IR::Instr * instrMov2 = IR::Instr::New(Js::OpCode::MOV_TRUNC, lengthOpnd, headSegmentLengthOpnd, m_func);
  15176. insertForSegmentLengthIncreased->InsertBefore(instrMov2);
  15177. LowererMD::Legalize(instrMov2);
  15178. }
  15179. if (lengthOpnd->GetSize() != MachPtr)
  15180. {
  15181. lengthOpnd = lengthOpnd->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
  15182. }
  15183. }
  15184. // MOV r1, [opnd + offset(type)]
  15185. IR::RegOpnd* indexValueRegOpnd = IR::RegOpnd::New(indexValueOpnd->GetType(), m_func);
  15186. IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV_TRUNC, indexValueRegOpnd, indexValueOpnd, m_func);
  15187. instr->InsertBefore(instrMov);
  15188. LowererMD::Legalize(instrMov);
  15189. if (insertForSegmentLengthIncreased != nullptr)
  15190. {
  15191. IR::Instr * instrMov2 = IR::Instr::New(Js::OpCode::MOV_TRUNC, indexValueRegOpnd, indexValueOpnd, m_func);
  15192. insertForSegmentLengthIncreased->InsertBefore(instrMov2);
  15193. LowererMD::Legalize(instrMov2);
  15194. }
  15195. if (indexValueRegOpnd->GetSize() != MachPtr)
  15196. {
  15197. indexValueRegOpnd = indexValueRegOpnd->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
  15198. }
  15199. localMaskOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  15200. InsertSub(false, localMaskOpnd, indexValueRegOpnd, lengthOpnd, instr);
  15201. InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(63, TyInt8, m_func), instr);
  15202. if (insertForSegmentLengthIncreased != nullptr)
  15203. {
  15204. InsertSub(false, localMaskOpnd, indexValueRegOpnd, lengthOpnd, insertForSegmentLengthIncreased);
  15205. InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(63, TyInt8, m_func), insertForSegmentLengthIncreased);
  15206. }
  15207. #else
  15208. localMaskOpnd = IR::RegOpnd::New(TyInt32, m_func);
  15209. InsertSub(false, localMaskOpnd, indexValueOpnd, headSegmentLengthOpnd, instr);
  15210. InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(31, TyInt8, m_func), instr);
  15211. if (insertForSegmentLengthIncreased != nullptr)
  15212. {
  15213. InsertSub(false, localMaskOpnd, indexValueOpnd, headSegmentLengthOpnd, insertForSegmentLengthIncreased);
  15214. InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(31, TyInt8, m_func), insertForSegmentLengthIncreased);
  15215. }
  15216. #endif
  15217. if ((IRType_IsNativeInt(indirType) || indirType == TyVar) && !isStore)
  15218. {
  15219. *maskOpnd = localMaskOpnd;
  15220. }
  15221. else
  15222. {
  15223. // for float values, do the poisoning before the load to avoid needing slow floating point conversions
  15224. IR::RegOpnd* loadAddr = IR::RegOpnd::New(TyMachPtr, m_func);
  15225. #if _M_ARM32_OR_ARM64
  15226. if (indirOpnd->GetIndexOpnd() != nullptr && indirOpnd->GetScale() > 0)
  15227. {
  15228. // We don't support encoding for LEA with scale on ARM/ARM64, so do the scale calculation as a separate instruction
  15229. IR::RegOpnd* fullIndexOpnd = IR::RegOpnd::New(indirOpnd->GetIndexOpnd()->GetType(), m_func);
  15230. InsertShift(Js::OpCode::Shl_A, false, fullIndexOpnd, indirOpnd->GetIndexOpnd(), IR::IntConstOpnd::New(indirOpnd->GetScale(), TyInt8, m_func), instr);
  15231. IR::IndirOpnd* newIndir = IR::IndirOpnd::New(indirOpnd->GetBaseOpnd(), fullIndexOpnd, indirType, m_func);
  15232. if (insertForSegmentLengthIncreased != nullptr)
  15233. {
  15234. InsertShift(Js::OpCode::Shl_A, false, fullIndexOpnd, indirOpnd->GetIndexOpnd(), IR::IntConstOpnd::New(indirOpnd->GetScale(), TyInt8, m_func), insertForSegmentLengthIncreased);
  15235. }
  15236. if (indirOpnd->GetOffset() != 0)
  15237. {
  15238. newIndir->SetOffset(indirOpnd->GetOffset());
  15239. }
  15240. indirOpnd = newIndir;
  15241. }
  15242. #endif
  15243. IR::AutoReuseOpnd reuseIndir(indirOpnd, m_func);
  15244. InsertLea(loadAddr, indirOpnd, instr);
  15245. InsertAnd(loadAddr, loadAddr, localMaskOpnd, instr);
  15246. if (insertForSegmentLengthIncreased != nullptr)
  15247. {
  15248. InsertLea(loadAddr, indirOpnd, insertForSegmentLengthIncreased);
  15249. InsertAnd(loadAddr, loadAddr, localMaskOpnd, insertForSegmentLengthIncreased);
  15250. // We want to export a segmentLengthIncreasedLabel to the caller that is after the poisoning
  15251. // code, since that's also the code that generates indirOpnd in this case.
  15252. IR::LabelInstr* exportedSegmentLengthIncreasedLabel = IR::LabelInstr::New(Js::OpCode::Label, insertForSegmentLengthIncreased->m_func, (*pLabelSegmentLengthIncreased)->isOpHelper);
  15253. LABELNAME(exportedSegmentLengthIncreasedLabel);
  15254. insertForSegmentLengthIncreased->InsertBefore(exportedSegmentLengthIncreasedLabel);
  15255. *pLabelSegmentLengthIncreased = exportedSegmentLengthIncreasedLabel;
  15256. }
  15257. indirOpnd = IR::IndirOpnd::New(loadAddr, 0, indirType, m_func);
  15258. }
  15259. }
  15260. return indirOpnd;
  15261. }
  15262. IR::BranchInstr*
  15263. Lowerer::InsertMissingItemCompareBranch(IR::Opnd* compareSrc, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr)
  15264. {
  15265. IR::Opnd* missingItemOpnd = GetMissingItemOpndForCompare(compareSrc->GetType(), m_func);
  15266. if (compareSrc->IsFloat64())
  15267. {
  15268. Assert(compareSrc->IsRegOpnd() || compareSrc->IsIndirOpnd());
  15269. return m_lowererMD.InsertMissingItemCompareBranch(compareSrc, missingItemOpnd, opcode, target, insertBeforeInstr);
  15270. }
  15271. else
  15272. {
  15273. Assert(compareSrc->IsInt32() || compareSrc->IsVar());
  15274. return InsertCompareBranch(missingItemOpnd, compareSrc, opcode, target, insertBeforeInstr, true);
  15275. }
  15276. }
  15277. IR::RegOpnd *
  15278. Lowerer::GenerateUntagVar(IR::RegOpnd * opnd, IR::LabelInstr * labelFail, IR::Instr * insertBeforeInstr, bool generateTagCheck)
  15279. {
  15280. if (!opnd->IsVar())
  15281. {
  15282. AssertMsg(opnd->GetSize() == 4, "This should be 32-bit wide");
  15283. return opnd;
  15284. }
  15285. AssertMsg(!opnd->IsNotInt(), "An opnd we know is not an int should not try to untag it as it will always fail");
  15286. if (opnd->m_sym->IsIntConst())
  15287. {
  15288. int32 constValue = opnd->m_sym->GetIntConstValue();
  15289. IR::IntConstOpnd* constOpnd = IR::IntConstOpnd::New(constValue, TyInt32, this->m_func);
  15290. IR::RegOpnd* regOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  15291. InsertMove(regOpnd, constOpnd, insertBeforeInstr);
  15292. return regOpnd;
  15293. }
  15294. return m_lowererMD.GenerateUntagVar(opnd, labelFail, insertBeforeInstr, generateTagCheck && !opnd->IsTaggedInt());
  15295. }
  15296. void
  15297. Lowerer::GenerateNotZeroTest( IR::Opnd * opndSrc, IR::LabelInstr * isZeroLabel, IR::Instr * insertBeforeInstr)
  15298. {
  15299. InsertTestBranch(opndSrc, opndSrc, Js::OpCode::BrEq_A, isZeroLabel, insertBeforeInstr);
  15300. }
  15301. bool
  15302. Lowerer::GenerateFastStringLdElem(IR::Instr * ldElem, IR::LabelInstr * labelHelper, IR::LabelInstr * labelFallThru)
  15303. {
  15304. IR::IndirOpnd * indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  15305. IR::RegOpnd * baseOpnd = indirOpnd->GetBaseOpnd();
  15306. // don't generate the fast path if the instance is not likely string
  15307. if (!baseOpnd->GetValueType().IsLikelyString())
  15308. {
  15309. return false;
  15310. }
  15311. Assert(!baseOpnd->IsTaggedInt());
  15312. IR::RegOpnd * indexOpnd = indirOpnd->GetIndexOpnd();
  15313. // Don't generate the fast path if the index operand is not likely int
  15314. if (indexOpnd && !indexOpnd->GetValueType().IsLikelyInt())
  15315. {
  15316. return false;
  15317. }
  15318. // Make sure the instance is a string
  15319. Assert(!indexOpnd || !indexOpnd->IsNotInt());
  15320. GenerateStringTest(baseOpnd, ldElem, labelHelper);
  15321. IR::Opnd * index32CmpOpnd;
  15322. IR::RegOpnd * bufferOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  15323. const IR::AutoReuseOpnd autoReuseBufferOpnd(bufferOpnd, m_func);
  15324. IR::IndirOpnd * charIndirOpnd;
  15325. if (indexOpnd)
  15326. {
  15327. // Untag the var and generate the indir into the string buffer
  15328. IR::RegOpnd * index32Opnd = GenerateUntagVar(indexOpnd, labelHelper, ldElem);
  15329. charIndirOpnd = IR::IndirOpnd::New(bufferOpnd, index32Opnd, 1, TyUint16, this->m_func);
  15330. index32CmpOpnd = index32Opnd;
  15331. }
  15332. else
  15333. {
  15334. // Just use the offset to indirect into the string buffer
  15335. charIndirOpnd = IR::IndirOpnd::New(bufferOpnd, indirOpnd->GetOffset() * sizeof(char16), TyUint16, this->m_func);
  15336. index32CmpOpnd = IR::IntConstOpnd::New((uint32)indirOpnd->GetOffset(), TyUint32, this->m_func);
  15337. }
  15338. // Check if the index is in range of the string length
  15339. // CMP [baseOpnd + offset(length)], indexOpnd -- string length
  15340. // JBE $helper -- unsigned compare, and string length are at most INT_MAX - 1
  15341. // -- so that even if we have a negative index, this will fail
  15342. IR::RegOpnd* lengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  15343. InsertMove(lengthOpnd, IR::IndirOpnd::New(baseOpnd, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func), ldElem);
  15344. InsertCompareBranch(lengthOpnd, index32CmpOpnd, Js::OpCode::BrLe_A, true, labelHelper, ldElem);
  15345. // Load the string buffer and make sure it is not null
  15346. // MOV bufferOpnd, [baseOpnd + offset(m_pszValue)]
  15347. // TEST bufferOpnd, bufferOpnd
  15348. // JEQ $labelHelper
  15349. indirOpnd = IR::IndirOpnd::New(baseOpnd, offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func);
  15350. InsertMove(bufferOpnd, indirOpnd, ldElem);
  15351. GenerateNotZeroTest(bufferOpnd, labelHelper, ldElem);
  15352. IR::RegOpnd* maskOpnd = nullptr;
  15353. if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
  15354. {
  15355. // Mask off the sign before loading so that poisoning will work for negative indices
  15356. if (index32CmpOpnd->IsIntConstOpnd())
  15357. {
  15358. charIndirOpnd->SetOffset((index32CmpOpnd->AsIntConstOpnd()->AsUint32() & INT32_MAX) * sizeof(char16));
  15359. }
  15360. else
  15361. {
  15362. InsertAnd(index32CmpOpnd, index32CmpOpnd, IR::IntConstOpnd::New(INT32_MAX, TyInt32, m_func), ldElem);
  15363. }
  15364. // All bits in mask will be 1 for a valid index or 0 for an OOB index
  15365. maskOpnd = IR::RegOpnd::New(TyInt32, m_func);
  15366. InsertSub(false, maskOpnd, index32CmpOpnd, lengthOpnd, ldElem);
  15367. InsertShift(Js::OpCode::Shr_A, false, maskOpnd, maskOpnd, IR::IntConstOpnd::New(31, TyInt8, m_func), ldElem);
  15368. }
  15369. // Load the character and check if it is 7bit ASCI (which we have the cache for)
  15370. // MOV charOpnd, [bufferOpnd + index32Opnd]
  15371. // CMP charOpnd, 0x80
  15372. // JAE $helper
  15373. IR::RegOpnd * charOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
  15374. const IR::AutoReuseOpnd autoReuseCharOpnd(charOpnd, m_func);
  15375. InsertMove(charOpnd, charIndirOpnd, ldElem);
  15376. if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
  15377. {
  15378. InsertAnd(charOpnd, charOpnd, maskOpnd, ldElem);
  15379. }
  15380. InsertCompareBranch(charOpnd, IR::IntConstOpnd::New(Js::CharStringCache::CharStringCacheSize, TyUint16, this->m_func),
  15381. Js::OpCode::BrGe_A, true, labelHelper, ldElem);
  15382. // Load the string from the cache
  15383. // MOV charStringCache, <charStringCache, address>
  15384. // MOV stringOpnd, [charStringCache + charOpnd * 4]
  15385. IR::RegOpnd * cacheOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  15386. const IR::AutoReuseOpnd autoReuseCacheOpnd(cacheOpnd, m_func);
  15387. Assert(Js::JavascriptLibrary::GetCharStringCacheAOffset() == Js::JavascriptLibrary::GetCharStringCacheOffset());
  15388. InsertMove(cacheOpnd, this->LoadLibraryValueOpnd(ldElem, LibraryValue::ValueCharStringCache), ldElem);
  15389. // Check if we have created the string or not
  15390. // TEST stringOpnd, stringOpnd
  15391. // JE $helper
  15392. IR::RegOpnd * stringOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  15393. const IR::AutoReuseOpnd autoReuseStringOpnd(stringOpnd, m_func);
  15394. InsertMove(stringOpnd, IR::IndirOpnd::New(cacheOpnd, charOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyVar, this->m_func), ldElem);
  15395. GenerateNotZeroTest(stringOpnd, labelHelper, ldElem);
  15396. InsertMove(ldElem->GetDst(), stringOpnd, ldElem);
  15397. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  15398. return true;
  15399. }
  15400. bool
  15401. Lowerer::GenerateFastLdElemI(IR::Instr *& ldElem, bool *instrIsInHelperBlockRef)
  15402. {
  15403. Assert(instrIsInHelperBlockRef);
  15404. bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
  15405. instrIsInHelperBlock = false;
  15406. IR::LabelInstr * labelHelper;
  15407. IR::LabelInstr * labelFallThru;
  15408. IR::LabelInstr * labelBailOut = nullptr;
  15409. IR::LabelInstr * labelMissingNative = nullptr;
  15410. IR::Opnd *src1 = ldElem->GetSrc1();
  15411. AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd on LdElementI");
  15412. IR::IndirOpnd * indirOpnd = src1->AsIndirOpnd();
  15413. // From FastElemICommon:
  15414. // TEST base, AtomTag -- check base not tagged int
  15415. // JNE $helper
  15416. // MOV r1, [base + offset(type)] -- check base isArray
  15417. // CMP [r1 + offset(typeId)], TypeIds_Array
  15418. // JNE $helper
  15419. // TEST index, 1 -- index tagged int
  15420. // JEQ $helper
  15421. // MOV r2, index
  15422. // SAR r2, Js::VarTag_Shift -- remoe atom tag
  15423. // JS $helper -- exclude negative index
  15424. // MOV r4, [base + offset(head)]
  15425. // CMP r2, [r4 + offset(length)] -- bounds check
  15426. // JAE $helper
  15427. // MOV r3, [r4 + offset(elements)]
  15428. // Generated here:
  15429. // MOV dst, [r3 + r2]
  15430. // TEST dst, dst
  15431. // JNE $fallthrough
  15432. if(ldElem->m_opcode == Js::OpCode::LdMethodElem && indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyOptimizedTypedArray())
  15433. {
  15434. // Typed arrays don't return objects, so it's not worth generating a fast path for LdMethodElem. Calling the helper also
  15435. // generates a better error message. Skip the fast path and just generate a helper call.
  15436. return true;
  15437. }
  15438. labelFallThru = ldElem->GetOrCreateContinueLabel();
  15439. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  15440. // If we know for sure (based on flow graph) we're loading from the arguments object, then ignore the (path-based) profile info.
  15441. bool isNativeArrayLoad = !ldElem->DoStackArgsOpt() && indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyNativeArray();
  15442. bool needMissingValueCheck = true;
  15443. bool emittedFastPath = false;
  15444. bool emitBailout = false;
  15445. if (ldElem->DoStackArgsOpt())
  15446. {
  15447. emittedFastPath = GenerateFastArgumentsLdElemI(ldElem, labelFallThru);
  15448. emitBailout = true;
  15449. }
  15450. else if (GenerateFastStringLdElem(ldElem, labelHelper, labelFallThru))
  15451. {
  15452. emittedFastPath = true;
  15453. }
  15454. else
  15455. {
  15456. IR::LabelInstr * labelCantUseArray = labelHelper;
  15457. if (isNativeArrayLoad)
  15458. {
  15459. if (ldElem->GetDst()->GetType() == TyVar)
  15460. {
  15461. // Skip the fast path and just generate a helper call
  15462. return true;
  15463. }
  15464. // Specialized native array lowering for LdElem requires that it is profiled. When not profiled, GlobOpt should not
  15465. // have specialized it.
  15466. Assert(ldElem->IsProfiledInstr());
  15467. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  15468. labelCantUseArray = labelBailOut;
  15469. }
  15470. Js::FldInfoFlags flags = Js::FldInfo_NoInfo;
  15471. if (ldElem->IsProfiledInstr())
  15472. {
  15473. flags = ldElem->AsProfiledInstr()->u.ldElemInfo->flags;
  15474. }
  15475. bool isTypedArrayElement, isStringIndex, indirOpndOverflowed = false;
  15476. IR::Opnd* maskOpnd = nullptr;
  15477. indirOpnd =
  15478. GenerateFastElemICommon(
  15479. ldElem,
  15480. false,
  15481. src1->AsIndirOpnd(),
  15482. labelHelper,
  15483. labelCantUseArray,
  15484. labelFallThru,
  15485. &isTypedArrayElement,
  15486. &isStringIndex,
  15487. &emitBailout,
  15488. &maskOpnd,
  15489. nullptr, /* pLabelSegmentLengthIncreased */
  15490. true, /* checkArrayLengthOverflow */
  15491. false, /* forceGenerateFastPath */
  15492. false, /* returnLength */
  15493. nullptr, /* bailOutLabelInstr */
  15494. &indirOpndOverflowed,
  15495. flags);
  15496. IR::Opnd *dst = ldElem->GetDst();
  15497. IRType dstType = dst->AsRegOpnd()->GetType();
  15498. // The index is negative or not int.
  15499. if (indirOpnd == nullptr)
  15500. {
  15501. // could have bailout kind BailOutOnArrayAccessHelperCall if indirOpnd overflows
  15502. Assert(!(ldElem->HasBailOutInfo() && ldElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall) || indirOpndOverflowed);
  15503. // don't check fast path without bailout because it might not be TypedArray
  15504. if (indirOpndOverflowed && ldElem->HasBailOutInfo())
  15505. {
  15506. bool bailoutForOpndOverflow = false;
  15507. const IR::BailOutKind bailOutKind = ldElem->GetBailOutKind();
  15508. // return undefined for typed array if load dest is var, bailout otherwise
  15509. if ((bailOutKind & ~IR::BailOutKindBits) == IR::BailOutConventionalTypedArrayAccessOnly)
  15510. {
  15511. if (dst->IsVar())
  15512. {
  15513. // returns undefined in case of indirOpnd overflow which is consistent with behavior of interpreter
  15514. IR::Opnd * undefinedOpnd = this->LoadLibraryValueOpnd(ldElem, LibraryValue::ValueUndefined);
  15515. InsertMove(dst, undefinedOpnd, ldElem);
  15516. ldElem->FreeSrc1();
  15517. ldElem->FreeDst();
  15518. ldElem->Remove();
  15519. emittedFastPath = true;
  15520. }
  15521. else
  15522. {
  15523. bailoutForOpndOverflow = true;
  15524. }
  15525. }
  15526. if (bailoutForOpndOverflow || (bailOutKind & (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)))
  15527. {
  15528. IR::Opnd * constOpnd = nullptr;
  15529. if (dst->IsFloat())
  15530. {
  15531. constOpnd = IR::FloatConstOpnd::New(Js::JavascriptNumber::NaN, TyFloat64, m_func);
  15532. }
  15533. else
  15534. {
  15535. constOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func, true);
  15536. }
  15537. InsertMove(dst, constOpnd, ldElem);
  15538. ldElem->FreeSrc1();
  15539. ldElem->FreeDst();
  15540. GenerateBailOut(ldElem, nullptr, nullptr);
  15541. emittedFastPath = true;
  15542. }
  15543. return !emittedFastPath;
  15544. }
  15545. // The global optimizer should never type specialize a LdElem for which the index is not int or an integer constant
  15546. // with a negative value. This would force an unconditional bail out on the main code path.
  15547. else if (dst->IsVar())
  15548. {
  15549. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  15550. {
  15551. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  15552. Output::Print(_u("Typed Array Lowering: function: %s (%s): instr %s, not specialized by glob opt due to negative or not likely int index.\n"),
  15553. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  15554. this->m_func->GetDebugNumberSet(debugStringBuffer),
  15555. Js::OpCodeUtil::GetOpCodeName(ldElem->m_opcode));
  15556. Output::Flush();
  15557. }
  15558. // We must be dealing with some unconventional index value. Don't emit fast path, but go directly to helper.
  15559. emittedFastPath = false;
  15560. return true;
  15561. }
  15562. else
  15563. {
  15564. AssertMsg(false, "Global optimizer shouldn't have specialized this instruction.");
  15565. Assert(dst->IsRegOpnd());
  15566. // If global optimizer failed to notice the unconventional index and type specialized the dst,
  15567. // there is nothing to do but bail out. This could happen if global optimizer's information based
  15568. // on value tracking fails to recognize a non-integer index or a constant int index that is negative.
  15569. // The bailout below ensures that we behave correctly in retail builds even under
  15570. // these (unlikely) conditions. To satisfy the downstream code we must populate the type specialized operand
  15571. // with some made up values, even though we will unconditionally bail out here and the values will never be
  15572. // used.
  15573. IR::IntConstOpnd *constOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func, true);
  15574. InsertMove(dst, constOpnd, ldElem);
  15575. ldElem->FreeSrc1();
  15576. ldElem->FreeDst();
  15577. GenerateBailOut(ldElem, nullptr, nullptr);
  15578. return false;
  15579. }
  15580. }
  15581. const IR::AutoReuseOpnd autoReuseIndirOpnd(indirOpnd, m_func);
  15582. const ValueType baseValueType(src1->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
  15583. if ((ldElem->HasBailOutInfo() &&
  15584. ldElem->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset &&
  15585. ldElem->GetBailOutInfo()->bailOutOffset <= ldElem->GetByteCodeOffset() &&
  15586. dst->IsEqual(src1->AsIndirOpnd()->GetBaseOpnd())) ||
  15587. (src1->AsIndirOpnd()->GetIndexOpnd() && dst->IsEqual(src1->AsIndirOpnd()->GetIndexOpnd())))
  15588. {
  15589. // This is a pre-op bailout where the dst is the same as one of the srcs. The dst may be trashed before bailing out,
  15590. // but since the operation will be processed again in the interpreter, src values need to be kept intact. Use a
  15591. // temporary dst until after the operation is complete.
  15592. IR::Instr *instrSink = ldElem->SinkDst(Js::OpCode::Ld_A);
  15593. // The sink instruction needs to be on the fall-through path
  15594. instrSink->Unlink();
  15595. labelFallThru->InsertAfter(instrSink);
  15596. LowererMD::ChangeToAssign(instrSink);
  15597. dst = ldElem->GetDst();
  15598. }
  15599. if (isTypedArrayElement)
  15600. {
  15601. // For typedArrays, convert the loaded element to the appropriate type
  15602. IR::RegOpnd *reg;
  15603. IR::AutoReuseOpnd autoReuseReg;
  15604. Assert(dst->IsRegOpnd());
  15605. if(indirOpnd->IsFloat())
  15606. {
  15607. AssertMsg((dstType == TyFloat64) || (dstType == TyVar), "For Float32Array LdElemI's dst should be specialized to TyFloat64 or not at all.");
  15608. if(indirOpnd->IsFloat32())
  15609. {
  15610. // MOVSS reg32.f32, indirOpnd.f32
  15611. IR::RegOpnd *reg32 = IR::RegOpnd::New(TyFloat32, this->m_func);
  15612. const IR::AutoReuseOpnd autoReuseReg32(reg32, m_func);
  15613. InsertMove(reg32, indirOpnd, ldElem);
  15614. // CVTPS2PD dst/reg.f64, reg32.f64
  15615. reg = dstType == TyFloat64 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyFloat64, this->m_func);
  15616. autoReuseReg.Initialize(reg, m_func);
  15617. InsertConvertFloat32ToFloat64(reg, reg32, ldElem);
  15618. }
  15619. else
  15620. {
  15621. Assert(indirOpnd->IsFloat64());
  15622. // MOVSD dst/reg.f64, indirOpnd.f64
  15623. reg = dstType == TyFloat64 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyFloat64, this->m_func);
  15624. autoReuseReg.Initialize(reg, m_func);
  15625. InsertMove(reg, indirOpnd, ldElem);
  15626. }
  15627. if (dstType != TyFloat64)
  15628. {
  15629. // Convert reg.f64 to var
  15630. m_lowererMD.SaveDoubleToVar(dst->AsRegOpnd(), reg, ldElem, ldElem);
  15631. }
  15632. #if FLOATVAR
  15633. // For NaNs, go to the helper to guarantee we don't have an illegal NaN
  15634. // TODO(magardn): move this to MD code.
  15635. #if _M_X64
  15636. // UCOMISD reg, reg
  15637. {
  15638. IR::Instr *const instr = IR::Instr::New(Js::OpCode::UCOMISD, this->m_func);
  15639. instr->SetSrc1(reg);
  15640. instr->SetSrc2(reg);
  15641. ldElem->InsertBefore(instr);
  15642. }
  15643. // JP $helper
  15644. {
  15645. IR::Instr *const instr = IR::BranchInstr::New(Js::OpCode::JP, labelHelper, this->m_func);
  15646. ldElem->InsertBefore(instr);
  15647. }
  15648. #elif _M_ARM64
  15649. // FCMP reg, reg
  15650. {
  15651. IR::Instr *const instr = IR::Instr::New(Js::OpCode::FCMP, this->m_func);
  15652. instr->SetSrc1(reg);
  15653. instr->SetSrc2(reg);
  15654. ldElem->InsertBefore(instr);
  15655. }
  15656. // BVS $helper
  15657. {
  15658. IR::Instr *const instr = IR::BranchInstr::New(Js::OpCode::BVS, labelHelper, this->m_func);
  15659. ldElem->InsertBefore(instr);
  15660. }
  15661. #endif
  15662. #endif
  15663. if(dstType == TyFloat64)
  15664. {
  15665. emitBailout = true;
  15666. }
  15667. }
  15668. else
  15669. {
  15670. AssertMsg((dstType == TyInt32) || (dstType == TyVar), "For Int/UintArray LdElemI's dst should be specialized to TyInt32 or not at all.");
  15671. reg = dstType == TyInt32 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyInt32, this->m_func);
  15672. autoReuseReg.Initialize(reg, m_func);
  15673. // Int32 and Uint32 arrays could overflow an int31, but the others can't
  15674. if (indirOpnd->GetType() != TyUint32
  15675. #if !INT32VAR
  15676. && indirOpnd->GetType() != TyInt32
  15677. #endif
  15678. )
  15679. {
  15680. reg->SetValueType(ValueType::GetTaggedInt()); // Fits as a tagged-int
  15681. }
  15682. // MOV/MOVZX/MOVSX dst/reg.int32, IndirOpnd.type
  15683. IR::Instr* instrMov = InsertMove(reg, indirOpnd, ldElem);
  15684. if (maskOpnd)
  15685. {
  15686. #if TARGET_64
  15687. if (maskOpnd->GetSize() != reg->GetType())
  15688. {
  15689. maskOpnd = maskOpnd->UseWithNewType(reg->GetType(), m_func)->AsRegOpnd();
  15690. }
  15691. #endif
  15692. instrMov = InsertAnd(reg, reg, maskOpnd, ldElem);
  15693. }
  15694. if (dstType == TyInt32)
  15695. {
  15696. instrMov->dstIsTempNumber = ldElem->dstIsTempNumber;
  15697. instrMov->dstIsTempNumberTransferred = ldElem->dstIsTempNumberTransferred;
  15698. if (indirOpnd->GetType() == TyUint32)
  15699. {
  15700. // TEST dst, dst
  15701. // JSB $helper (bailout)
  15702. InsertCompareBranch(
  15703. reg,
  15704. IR::IntConstOpnd::New(0, TyUint32, this->m_func, /* dontEncode = */ true),
  15705. Js::OpCode::BrLt_A,
  15706. labelHelper,
  15707. ldElem);
  15708. }
  15709. emitBailout = true;
  15710. }
  15711. else
  15712. {
  15713. // MOV dst, reg
  15714. IR::Instr *const instr = IR::Instr::New(Js::OpCode::ToVar, dst, reg, this->m_func);
  15715. instr->dstIsTempNumber = ldElem->dstIsTempNumber;
  15716. instr->dstIsTempNumberTransferred = ldElem->dstIsTempNumberTransferred;
  15717. ldElem->InsertBefore(instr);
  15718. // Convert dst to var
  15719. m_lowererMD.EmitLoadVar(instr, /* isFromUint32 = */ (indirOpnd->GetType() == TyUint32));
  15720. }
  15721. }
  15722. // JMP $fallthrough
  15723. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  15724. emittedFastPath = true;
  15725. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  15726. {
  15727. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  15728. baseValueType.ToString(baseValueTypeStr);
  15729. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  15730. Output::Print(_u("Typed Array Lowering: function: %s (%s), instr: %s, base value type: %S, %s."),
  15731. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  15732. this->m_func->GetDebugNumberSet(debugStringBuffer),
  15733. Js::OpCodeUtil::GetOpCodeName(ldElem->m_opcode),
  15734. baseValueTypeStr,
  15735. (!dst->IsVar() ? _u("specialized") : _u("not specialized")));
  15736. Output::Print(_u("\n"));
  15737. Output::Flush();
  15738. }
  15739. }
  15740. else
  15741. {
  15742. // MOV dst, indirOpnd
  15743. InsertMove(dst, indirOpnd, ldElem);
  15744. if (maskOpnd)
  15745. {
  15746. #if TARGET_64
  15747. if (maskOpnd->GetSize() != dst->GetType())
  15748. {
  15749. maskOpnd = maskOpnd->UseWithNewType(dst->GetType(), m_func)->AsRegOpnd();
  15750. }
  15751. #endif
  15752. InsertAnd(dst, dst, maskOpnd, ldElem);
  15753. }
  15754. // The string index fast path does not operate on index properties (we don't get a PropertyString in that case), so
  15755. // we don't need to do any further checks in that case
  15756. // For LdMethodElem, if the loaded value is a tagged number, the error message generated by the helper call is
  15757. // better than if we were to just try to call the number. Also, the call arguments need to be evaluated before
  15758. // throwing the error, so just test whether it's an object and jump to helper if it's not.
  15759. const bool needObjectTest = !isStringIndex && !isNativeArrayLoad && ldElem->m_opcode == Js::OpCode::LdMethodElem;
  15760. needMissingValueCheck =
  15761. !isStringIndex && !(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues());
  15762. if(needMissingValueCheck)
  15763. {
  15764. // TEST dst, dst
  15765. // JEQ $helper | JNE $fallthrough
  15766. InsertMissingItemCompareBranch(
  15767. dst,
  15768. needObjectTest ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A,
  15769. needObjectTest ? labelHelper : labelFallThru,
  15770. ldElem);
  15771. if (isNativeArrayLoad)
  15772. {
  15773. Assert(!needObjectTest);
  15774. Assert(labelHelper != labelBailOut);
  15775. if(ldElem->AsProfiledInstr()->u.ldElemInfo->GetElementType().HasBeenUndefined())
  15776. {
  15777. // We're going to bail out trying to load "missing value" into a type-spec'd opnd.
  15778. // Branch to a point where we'll convert the array so that we don't keep bailing here.
  15779. // (Gappy arrays are not well-suited to nativeness.)
  15780. labelMissingNative = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  15781. InsertBranch(Js::OpCode::Br, labelMissingNative, ldElem);
  15782. }
  15783. else
  15784. {
  15785. // If the value has not been profiled to be undefined at some point, jump directly to bail out
  15786. InsertBranch(Js::OpCode::Br, labelBailOut, ldElem);
  15787. }
  15788. }
  15789. }
  15790. if(needObjectTest)
  15791. {
  15792. // GenerateObjectTest(dst)
  15793. // JIsObject $fallthrough
  15794. m_lowererMD.GenerateObjectTest(dst, ldElem, labelFallThru, true);
  15795. }
  15796. else if(!needMissingValueCheck)
  15797. {
  15798. // JMP $fallthrough
  15799. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  15800. }
  15801. emittedFastPath = true;
  15802. }
  15803. }
  15804. // $helper:
  15805. // bailout or caller generated helper call
  15806. // $fallthru:
  15807. if (!emittedFastPath)
  15808. {
  15809. labelHelper->isOpHelper = false;
  15810. }
  15811. ldElem->InsertBefore(labelHelper);
  15812. instrIsInHelperBlock = true;
  15813. if (isNativeArrayLoad)
  15814. {
  15815. Assert(ldElem->HasBailOutInfo());
  15816. Assert(labelHelper != labelBailOut);
  15817. // Transform the original instr:
  15818. //
  15819. // $helper:
  15820. // dst = LdElemI_A src (BailOut)
  15821. // $fallthrough:
  15822. //
  15823. // to:
  15824. //
  15825. // b $fallthru <--- we get here if we loaded a valid element directly
  15826. // $helper:
  15827. // dst = LdElemI_A src
  15828. // cmp dst, MissingItem
  15829. // bne $fallthrough
  15830. // $bailout:
  15831. // BailOut
  15832. // $fallthrough:
  15833. LowerOneBailOutKind(ldElem, IR::BailOutConventionalNativeArrayAccessOnly, instrIsInHelperBlock);
  15834. IR::Instr *const insertBeforeInstr = ldElem->m_next;
  15835. // Do missing value check on value returned from helper so that we don't have to check the index against
  15836. // array length. (We already checked it above against the segment length.)
  15837. bool hasBeenUndefined = ldElem->AsProfiledInstr()->u.ldElemInfo->GetElementType().HasBeenUndefined();
  15838. if (hasBeenUndefined)
  15839. {
  15840. if(!emitBailout)
  15841. {
  15842. if (labelMissingNative == nullptr)
  15843. {
  15844. labelMissingNative = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  15845. #if DBG
  15846. labelMissingNative->m_noLazyHelperAssert = true;
  15847. #endif
  15848. }
  15849. InsertMissingItemCompareBranch(ldElem->GetDst(), Js::OpCode::BrEq_A, labelMissingNative, insertBeforeInstr);
  15850. }
  15851. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  15852. if(labelMissingNative)
  15853. {
  15854. // We're going to bail out on a load from a gap, but convert the array to Var first, so we don't just
  15855. // bail here over and over. Gappy arrays are not well suited to nativeness.
  15856. // NOTE: only emit this call if the profile tells us that this has happened before ("hasBeenUndefined").
  15857. // Emitting this in Navier-Stokes brutalizes the score.
  15858. insertBeforeInstr->InsertBefore(labelMissingNative);
  15859. IR::JnHelperMethod helperMethod;
  15860. indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  15861. if (indirOpnd->GetBaseOpnd()->GetValueType().HasIntElements())
  15862. {
  15863. helperMethod = IR::HelperIntArr_ToVarArray;
  15864. }
  15865. else
  15866. {
  15867. Assert(indirOpnd->GetBaseOpnd()->GetValueType().HasFloatElements());
  15868. helperMethod = IR::HelperFloatArr_ToVarArray;
  15869. }
  15870. m_lowererMD.LoadHelperArgument(insertBeforeInstr, indirOpnd->GetBaseOpnd());
  15871. IR::Instr *instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
  15872. instrHelper->SetSrc1(IR::HelperCallOpnd::New(helperMethod, m_func));
  15873. insertBeforeInstr->InsertBefore(instrHelper);
  15874. m_lowererMD.LowerCall(instrHelper, 0);
  15875. }
  15876. }
  15877. else
  15878. {
  15879. if(!emitBailout)
  15880. {
  15881. InsertMissingItemCompareBranch(ldElem->GetDst(), Js::OpCode::BrEq_A, labelBailOut, insertBeforeInstr);
  15882. }
  15883. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  15884. }
  15885. insertBeforeInstr->InsertBefore(labelBailOut);
  15886. }
  15887. if (emitBailout)
  15888. {
  15889. ldElem->UnlinkSrc1();
  15890. ldElem->UnlinkDst();
  15891. GenerateBailOut(ldElem, nullptr, nullptr);
  15892. }
  15893. return !emitBailout;
  15894. }
  15895. IR::Opnd *
  15896. Lowerer::GetMissingItemOpnd(IRType type, Func *func)
  15897. {
  15898. if (type == TyVar)
  15899. {
  15900. return IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstantAddress, func, true);
  15901. }
  15902. if (type == TyInt32)
  15903. {
  15904. return IR::IntConstOpnd::New(Js::JavascriptNativeIntArray::MissingItem, TyInt32, func, true);
  15905. }
  15906. AssertMsg(false, "Only expecting TyVar and TyInt32 in Lowerer::GetMissingItemOpnd");
  15907. __assume(false);
  15908. }
  15909. IR::Opnd*
  15910. Lowerer::GetMissingItemOpndForAssignment(IRType type, Func *func)
  15911. {
  15912. switch (type)
  15913. {
  15914. case TyVar:
  15915. case TyInt32:
  15916. return GetMissingItemOpnd(type, func);
  15917. case TyFloat64:
  15918. return IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetNativeFloatArrayMissingItemAddr(), TyFloat64, func);
  15919. default:
  15920. AnalysisAssertMsg(false, "Unexpected type in Lowerer::GetMissingItemOpndForAssignment");
  15921. __assume(false);
  15922. }
  15923. }
  15924. IR::Opnd *
  15925. Lowerer::GetMissingItemOpndForCompare(IRType type, Func *func)
  15926. {
  15927. switch (type)
  15928. {
  15929. case TyVar:
  15930. case TyInt32:
  15931. return GetMissingItemOpnd(type, func);
  15932. case TyFloat64:
  15933. #if TARGET_64
  15934. return IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetNativeFloatArrayMissingItemAddr(), TyUint64, func);
  15935. #else
  15936. return IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetNativeFloatArrayMissingItemAddr(), TyUint32, func);
  15937. #endif
  15938. default:
  15939. AnalysisAssertMsg(false, "Unexpected type in Lowerer::GetMissingItemOpndForCompare");
  15940. __assume(false);
  15941. }
  15942. }
  15943. bool
  15944. Lowerer::GenerateFastStElemI(IR::Instr *& stElem, bool *instrIsInHelperBlockRef)
  15945. {
  15946. Assert(instrIsInHelperBlockRef);
  15947. bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
  15948. instrIsInHelperBlock = false;
  15949. IR::LabelInstr * labelHelper;
  15950. IR::LabelInstr * labelSegmentLengthIncreased;
  15951. IR::LabelInstr * labelFallThru;
  15952. IR::LabelInstr * labelBailOut = nullptr;
  15953. IR::Opnd *dst = stElem->GetDst();
  15954. IR::IndirOpnd * indirOpnd = dst->AsIndirOpnd();
  15955. AssertMsg(dst->IsIndirOpnd(), "Expected indirOpnd on StElementI");
  15956. // From FastElemICommon:
  15957. // TEST base, AtomTag -- check base not tagged int
  15958. // JNE $helper
  15959. // MOV r1, [base + offset(type)] -- check base isArray
  15960. // CMP [r1 + offset(typeId)], TypeIds_Array
  15961. // JNE $helper
  15962. // TEST index, 1 -- index tagged int
  15963. // JEQ $helper
  15964. // MOV r2, index
  15965. // SAR r2, Js::VarTag_Shift -- remove atom tag
  15966. // JS $helper -- exclude negative index
  15967. // MOV r4, [base + offset(head)]
  15968. // CMP r2, [r4 + offset(length)] -- bounds check
  15969. // JB $done
  15970. // CMP r2, [r4 + offset(size)] -- chunk has room?
  15971. // JAE $helper
  15972. // LEA r5, [r2 + 1]
  15973. // MOV [r4 + offset(length)], r5 -- update length on chunk
  15974. // CMP r5, [base + offset(length)]
  15975. // JBE $done
  15976. // MOV [base + offset(length)], r5 -- update length on array
  15977. // $done
  15978. // LEA r3, [r4 + offset(elements)]
  15979. // Generated here.
  15980. // MOV [r3 + r2], src
  15981. labelFallThru = stElem->GetOrCreateContinueLabel();
  15982. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  15983. bool emitBailout = false;
  15984. bool isNativeArrayStore = indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyNativeArray();
  15985. IR::LabelInstr * labelCantUseArray = labelHelper;
  15986. if (isNativeArrayStore)
  15987. {
  15988. if (stElem->GetSrc1()->GetType() != GetArrayIndirType(indirOpnd->GetBaseOpnd()->GetValueType()))
  15989. {
  15990. // Skip the fast path and just generate a helper call
  15991. return true;
  15992. }
  15993. if(stElem->HasBailOutInfo())
  15994. {
  15995. const IR::BailOutKind bailOutKind = stElem->GetBailOutKind();
  15996. if (bailOutKind & IR::BailOutConventionalNativeArrayAccessOnly)
  15997. {
  15998. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  15999. labelCantUseArray = labelBailOut;
  16000. }
  16001. }
  16002. }
  16003. Js::FldInfoFlags flags = Js::FldInfo_NoInfo;
  16004. if (stElem->IsProfiledInstr())
  16005. {
  16006. flags = stElem->AsProfiledInstr()->u.stElemInfo->flags;
  16007. }
  16008. bool isTypedArrayElement, isStringIndex, indirOpndOverflowed = false;
  16009. IR::Opnd* maskOpnd = nullptr;
  16010. indirOpnd =
  16011. GenerateFastElemICommon(
  16012. stElem,
  16013. true,
  16014. indirOpnd,
  16015. labelHelper,
  16016. labelCantUseArray,
  16017. labelFallThru,
  16018. &isTypedArrayElement,
  16019. &isStringIndex,
  16020. &emitBailout,
  16021. &maskOpnd,
  16022. &labelSegmentLengthIncreased,
  16023. true, /* checkArrayLengthOverflow */
  16024. false, /* forceGenerateFastPath */
  16025. false, /* returnLength */
  16026. nullptr, /* bailOutLabelInstr */
  16027. &indirOpndOverflowed,
  16028. flags);
  16029. IR::Opnd *src = stElem->GetSrc1();
  16030. const IR::AutoReuseOpnd autoReuseSrc(src, m_func);
  16031. // The index is negative or not int.
  16032. if (indirOpnd == nullptr)
  16033. {
  16034. Assert(!(stElem->HasBailOutInfo() && stElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall) || indirOpndOverflowed);
  16035. if (indirOpndOverflowed && stElem->HasBailOutInfo())
  16036. {
  16037. bool emittedFastPath = false;
  16038. const IR::BailOutKind bailOutKind = stElem->GetBailOutKind();
  16039. // ignore StElemI in case of indirOpnd overflow only for typed array which is consistent with behavior of interpreter
  16040. if ((bailOutKind & ~IR::BailOutKindBits) == IR::BailOutConventionalTypedArrayAccessOnly)
  16041. {
  16042. stElem->FreeSrc1();
  16043. stElem->FreeDst();
  16044. stElem->Remove();
  16045. emittedFastPath = true;
  16046. }
  16047. if (!emittedFastPath && (bailOutKind & (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)))
  16048. {
  16049. stElem->FreeSrc1();
  16050. stElem->FreeDst();
  16051. GenerateBailOut(stElem, nullptr, nullptr);
  16052. emittedFastPath = true;
  16053. }
  16054. return !emittedFastPath;
  16055. }
  16056. // The global optimizer should never type specialize a StElem for which we know the index is not int or is a negative
  16057. // int constant. This would result in an unconditional bailout on the main code path.
  16058. else if (src->IsVar())
  16059. {
  16060. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  16061. {
  16062. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  16063. Output::Print(_u("Typed Array Lowering: function: %s (%s): instr %s, not specialized by glob opt due to negative or not likely int index.\n"),
  16064. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  16065. this->m_func->GetDebugNumberSet(debugStringBuffer),
  16066. Js::OpCodeUtil::GetOpCodeName(stElem->m_opcode));
  16067. Output::Flush();
  16068. }
  16069. // We must be dealing with some atypical index value. Don't emit fast path, but go directly to helper.
  16070. return true;
  16071. }
  16072. else
  16073. {
  16074. // If global optimizer failed to notice the unconventional index and type specialized the src,
  16075. // there is nothing to do but bail out. We should never hit this code path, unless the global optimizer's conditions
  16076. // for not specializing the instruction don't match the lowerer's conditions for not emitting the array checks (see above).
  16077. // This could happen if global optimizer's information based on value tracking fails to recognize a non-integer index or
  16078. // a constant int index that is negative. The bailout below ensures that we behave correctly in retail builds even under
  16079. // these (unlikely) conditions.
  16080. AssertMsg(false, "Global optimizer shouldn't have specialized this instruction.");
  16081. stElem->FreeSrc1();
  16082. stElem->FreeDst();
  16083. GenerateBailOut(stElem, nullptr, nullptr);
  16084. return false;
  16085. }
  16086. }
  16087. const IR::AutoReuseOpnd autoReuseIndirOpnd(indirOpnd, m_func);
  16088. const ValueType baseValueType(dst->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
  16089. if (isTypedArrayElement)
  16090. {
  16091. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  16092. {
  16093. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  16094. baseValueType.ToString(baseValueTypeStr);
  16095. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  16096. Output::Print(_u("Typed Array Lowering: function: %s (%s), instr: %s, base value type: %S, %s."),
  16097. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  16098. this->m_func->GetDebugNumberSet(debugStringBuffer),
  16099. Js::OpCodeUtil::GetOpCodeName(stElem->m_opcode),
  16100. baseValueTypeStr,
  16101. (!src->IsVar() ? _u("specialized") : _u("not specialized")));
  16102. Output::Print(_u("\n"));
  16103. Output::Flush();
  16104. }
  16105. ObjectType objectType = baseValueType.GetObjectType();
  16106. if(indirOpnd->IsFloat())
  16107. {
  16108. if (src->GetType() == TyFloat64)
  16109. {
  16110. IR::RegOpnd *const regSrc = src->AsRegOpnd();
  16111. if (indirOpnd->IsFloat32())
  16112. {
  16113. // CVTSD2SS reg.f32, regSrc.f64 -- Convert regSrc from f64 to f32
  16114. IR::RegOpnd *const reg = IR::RegOpnd::New(TyFloat32, this->m_func);
  16115. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  16116. InsertConvertFloat64ToFloat32(reg, regSrc, stElem);
  16117. // MOVSS indirOpnd, reg
  16118. InsertMove(indirOpnd, reg, stElem, false);
  16119. }
  16120. else
  16121. {
  16122. // MOVSD indirOpnd, regSrc
  16123. InsertMove(indirOpnd, regSrc, stElem, false);
  16124. }
  16125. emitBailout = true;
  16126. }
  16127. else
  16128. {
  16129. Assert(src->GetType() == TyVar);
  16130. // MOV reg, src
  16131. IR::RegOpnd *const reg = IR::RegOpnd::New(TyVar, this->m_func);
  16132. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  16133. InsertMove(reg, src, stElem);
  16134. // Convert to float, and assign to indirOpnd
  16135. if (baseValueType.IsLikelyOptimizedVirtualTypedArray())
  16136. {
  16137. IR::RegOpnd* dstReg = IR::RegOpnd::New(indirOpnd->GetType(), this->m_func);
  16138. m_lowererMD.EmitLoadFloat(dstReg, reg, stElem, stElem, labelHelper);
  16139. InsertMove(indirOpnd, dstReg, stElem);
  16140. }
  16141. else
  16142. {
  16143. m_lowererMD.EmitLoadFloat(indirOpnd, reg, stElem, stElem, labelHelper);
  16144. }
  16145. }
  16146. }
  16147. else if (objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedVirtualArray || objectType == ObjectType::Uint8ClampedMixedArray)
  16148. {
  16149. Assert(indirOpnd->GetType() == TyUint8);
  16150. IR::RegOpnd *regSrc;
  16151. IR::AutoReuseOpnd autoReuseRegSrc;
  16152. if(src->IsRegOpnd())
  16153. {
  16154. regSrc = src->AsRegOpnd();
  16155. }
  16156. else
  16157. {
  16158. regSrc = IR::RegOpnd::New(StackSym::New(src->GetType(), m_func), src->GetType(), m_func);
  16159. autoReuseRegSrc.Initialize(regSrc, m_func);
  16160. InsertMove(regSrc, src, stElem);
  16161. }
  16162. IR::Opnd *bitMaskOpnd;
  16163. IRType srcType = regSrc->GetType();
  16164. if ((srcType == TyFloat64) || (srcType == TyInt32))
  16165. {
  16166. // if (srcType == TyInt32) {
  16167. // TEST regSrc, ~255
  16168. // JE $storeValue
  16169. // JSB $handleNegative
  16170. // MOV indirOpnd, 255
  16171. // JMP $fallThru
  16172. // $handleNegative [isHelper = false]
  16173. // MOV indirOpnd, 0
  16174. // JMP $fallThru
  16175. // $storeValue
  16176. // MOV indirOpnd, regSrc
  16177. // }
  16178. // else {
  16179. // MOVSD regTmp, regSrc
  16180. // ADDSD regTmp, 0.5
  16181. // CVTTSD2SI regOpnd, regTmp
  16182. // TEST regOpnd, ~255
  16183. // JE $storeValue
  16184. // $handleOutOfBounds [isHelper = true]
  16185. // COMISD regSrc, [&FloatZero]
  16186. // JB $handleNegative
  16187. // MOV regOpnd, 255
  16188. // JMP $storeValue
  16189. // $handleNegative [isHelper = true]
  16190. // MOV regOpnd, 0
  16191. // $storeValue
  16192. // MOV indirOpnd, regOpnd
  16193. // }
  16194. // $fallThru
  16195. IR::RegOpnd *regOpnd;
  16196. IR::AutoReuseOpnd autoReuseRegOpnd;
  16197. if (srcType == TyInt32)
  16198. {
  16199. // When srcType == TyInt32 we will never call the helper and we will never
  16200. // modify the regOpnd. Therefore, it's okay to use regSrc directly, and it
  16201. // reduces register pressure.
  16202. regOpnd = regSrc;
  16203. }
  16204. else
  16205. {
  16206. #ifdef _M_IX86
  16207. AssertMsg(AutoSystemInfo::Data.SSE2Available(), "GlobOpt shouldn't have specialized Uint8ClampedArray StElem to float64 if SSE2 is unavailable.");
  16208. #endif
  16209. regOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  16210. autoReuseRegOpnd.Initialize(regOpnd, m_func);
  16211. Assert(objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedVirtualArray || objectType == ObjectType::Uint8ClampedMixedArray);
  16212. // Uint8ClampedArray follows IEEE 754 rounding rules for ties which round up
  16213. // odd integers and round down even integers. Both ties result in the nearest
  16214. // even integer value.
  16215. //
  16216. // CVTSD2SI regOpnd, regSrc
  16217. LowererMD::InsertConvertFloat64ToInt32(RoundModeHalfToEven, regOpnd, regSrc, stElem);
  16218. }
  16219. IR::LabelInstr *labelStoreValue = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  16220. #ifndef _M_ARM
  16221. // TEST regOpnd, ~255
  16222. // JE $storeValue
  16223. bitMaskOpnd = IR::IntConstOpnd::New(~255, TyInt32, this->m_func, true);
  16224. InsertTestBranch(regOpnd, bitMaskOpnd, Js::OpCode::BrEq_A, labelStoreValue, stElem);
  16225. #else // ARM
  16226. // Special case for ARM, a shift may be better
  16227. //
  16228. // ASRS tempReg, src, 8
  16229. // BEQ $inlineSet
  16230. InsertShiftBranch(
  16231. Js::OpCode::Shr_A,
  16232. IR::RegOpnd::New(TyInt32, this->m_func),
  16233. regOpnd,
  16234. IR::IntConstOpnd::New(8, TyInt8, this->m_func),
  16235. Js::OpCode::BrEq_A,
  16236. labelStoreValue,
  16237. stElem);
  16238. #endif
  16239. IR::LabelInstr *labelHandleNegative = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, srcType == TyFloat64);
  16240. if (srcType == TyInt32)
  16241. {
  16242. // JSB $handleNegativeOrOverflow
  16243. InsertBranch(
  16244. LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A),
  16245. labelHandleNegative,
  16246. stElem);
  16247. // MOV IndirOpnd.u8, 255
  16248. InsertMove(indirOpnd, IR::IntConstOpnd::New(255, TyUint8, this->m_func, true), stElem);
  16249. // JMP $fallThru
  16250. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  16251. // $handleNegative [isHelper = false]
  16252. stElem->InsertBefore(labelHandleNegative);
  16253. // MOV IndirOpnd.u8, 0
  16254. InsertMove(indirOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func, true), stElem);
  16255. // JMP $fallThru
  16256. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  16257. }
  16258. else
  16259. {
  16260. Assert(regOpnd != regSrc);
  16261. // This label is just to ensure the following code is moved to the helper block.
  16262. // $handleOutOfBounds [isHelper = true]
  16263. IR::LabelInstr *labelHandleOutOfBounds = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  16264. stElem->InsertBefore(labelHandleOutOfBounds);
  16265. // COMISD regSrc, FloatZero
  16266. // JB labelHandleNegative
  16267. IR::MemRefOpnd * zeroOpnd = IR::MemRefOpnd::New(this->m_func->GetThreadContextInfo()->GetDoubleZeroAddr(), TyMachDouble, this->m_func);
  16268. InsertCompareBranch(regSrc, zeroOpnd, Js::OpCode::BrNotGe_A, labelHandleNegative, stElem);
  16269. // MOV regOpnd, 255
  16270. InsertMove(regOpnd, IR::IntConstOpnd::New(255, TyUint8, this->m_func, true), stElem);
  16271. // JMP $storeValue
  16272. InsertBranch(Js::OpCode::Br, labelStoreValue, stElem);
  16273. // $handleNegative [isHelper = true]
  16274. stElem->InsertBefore(labelHandleNegative);
  16275. // MOV regOpnd, 0
  16276. InsertMove(regOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func, true), stElem);
  16277. }
  16278. // $storeValue
  16279. stElem->InsertBefore(labelStoreValue);
  16280. // MOV IndirOpnd.u8, regOpnd.u8
  16281. InsertMove(indirOpnd, regOpnd, stElem);
  16282. emitBailout = true;
  16283. }
  16284. else
  16285. {
  16286. Assert(srcType == TyVar);
  16287. #if INT32VAR
  16288. bitMaskOpnd = IR::AddrOpnd::New((Js::Var)~(INT_PTR)(Js::TaggedInt::ToVarUnchecked(255)), IR::AddrOpndKindConstantVar, this->m_func, true);
  16289. #else
  16290. bitMaskOpnd = IR::IntConstOpnd::New(~(INT_PTR)(Js::TaggedInt::ToVarUnchecked(255)), TyMachReg, this->m_func, true);
  16291. #endif
  16292. // Note: We are assuming that if no bits other than ~(TaggedInt(255)) are 1, that we have a tagged
  16293. // int value between 0 - 255.
  16294. // #if INT32VAR
  16295. // This works for pointers because tagged int bit can't be on, and first 64k are not valid addresses
  16296. // This works for floats because a valid float would have one of the upper 13 bits on.
  16297. // #else
  16298. // Any pointer is larger than 512 because first 64k memory is reserved by the OS
  16299. // #endif
  16300. IR::LabelInstr *labelInlineSet = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16301. #ifndef _M_ARM
  16302. // TEST src, ~(TaggedInt(255)) -- Check for tagged int >= 255 and <= 0
  16303. // JEQ $inlineSet
  16304. InsertTestBranch(regSrc, bitMaskOpnd, Js::OpCode::BrEq_A, labelInlineSet, stElem);
  16305. #else // ARM
  16306. // Special case for ARM, a shift may be better
  16307. //
  16308. // ASRS tempReg, src, 8
  16309. // BEQ $inlineSet
  16310. InsertShiftBranch(
  16311. Js::OpCode::Shr_A,
  16312. IR::RegOpnd::New(TyInt32, this->m_func),
  16313. regSrc,
  16314. IR::IntConstOpnd::New(8, TyInt8, this->m_func),
  16315. Js::OpCode::BrEq_A,
  16316. labelInlineSet,
  16317. stElem);
  16318. #endif
  16319. // Uint8ClampedArray::DirectSetItem(array, index, value);
  16320. // Inserting a helper call. Make sure it observes the main instructions's requirements regarding implicit calls.
  16321. if (!instrIsInHelperBlock)
  16322. {
  16323. stElem->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, m_func, true));
  16324. }
  16325. if (stElem->HasBailOutInfo() && (stElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall))
  16326. {
  16327. // Bail out instead of doing the helper call.
  16328. Assert(labelHelper);
  16329. this->InsertBranch(Js::OpCode::Br, labelHelper, stElem);
  16330. }
  16331. else
  16332. {
  16333. IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  16334. stElem->InsertBefore(instr);
  16335. if (stElem->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(stElem->GetBailOutKind()))
  16336. {
  16337. // Bail out if this helper triggers implicit calls.
  16338. instr = this->AddBailoutToHelperCallInstr(instr, stElem->GetBailOutInfo(), stElem->GetBailOutKind(), stElem);
  16339. }
  16340. m_lowererMD.LoadHelperArgument(instr, regSrc);
  16341. IR::Opnd *indexOpnd = indirOpnd->GetIndexOpnd();
  16342. if (indexOpnd == nullptr)
  16343. {
  16344. if (indirOpnd->GetOffset() == 0)
  16345. {
  16346. // There are two ways that we can get an indirOpnd with no index and 0 offset.
  16347. // The first is that we're storing to element 0 in the array by constant offset.
  16348. // The second is that we got a pointer back that has spectre masking, so it's going
  16349. // to not have the appropriate index into the array. In that case, we need to regen
  16350. // the index.
  16351. // The plan is
  16352. // 1. get the backing buffer pointer
  16353. // 2. subtract that from the indexOpnd to get the numeric index
  16354. // This is unfortunately slightly worse perf for constant writes of vars to index 0
  16355. // of Uint8ClampedArrays, but that's hopefully uncommon enough that the impact will
  16356. // be minimal
  16357. // MOV backingBufferOpnd, [base + offset(arrayBuffer)]
  16358. // SUB indexOpnd, backingBufferOpnd
  16359. int bufferOffset = GetArrayOffsetOfHeadSegment(baseValueType);
  16360. IR::IndirOpnd* arrayBufferOpnd = IR::IndirOpnd::New(stElem->GetDst()->AsIndirOpnd()->GetBaseOpnd(), bufferOffset, TyMachPtr, this->m_func);
  16361. IR::RegOpnd* backingBufferOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  16362. InsertMove(backingBufferOpnd, arrayBufferOpnd, instr);
  16363. IR::RegOpnd* tempIndexOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  16364. InsertSub(false, tempIndexOpnd, indirOpnd->GetBaseOpnd(), backingBufferOpnd, instr);
  16365. indexOpnd = tempIndexOpnd->UseWithNewType(TyInt32, this->m_func);
  16366. }
  16367. else
  16368. {
  16369. indexOpnd = IR::IntConstOpnd::New(indirOpnd->GetOffset(), TyInt32, this->m_func);
  16370. }
  16371. }
  16372. else
  16373. {
  16374. Assert(indirOpnd->GetOffset() == 0);
  16375. }
  16376. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  16377. m_lowererMD.LoadHelperArgument(instr, stElem->GetDst()->AsIndirOpnd()->GetBaseOpnd());
  16378. Assert(objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedMixedArray || objectType == ObjectType::Uint8ClampedVirtualArray);
  16379. m_lowererMD.ChangeToHelperCall(instr, IR::JnHelperMethod::HelperUint8ClampedArraySetItem);
  16380. // JMP $fallThrough
  16381. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  16382. }
  16383. //$inlineSet
  16384. stElem->InsertBefore(labelInlineSet);
  16385. IR::RegOpnd *regOpnd;
  16386. IR::AutoReuseOpnd autoReuseRegOpnd;
  16387. #if INT32VAR
  16388. regOpnd = regSrc;
  16389. #else
  16390. // MOV r1, src
  16391. // SAR r1, 1
  16392. regOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  16393. autoReuseRegOpnd.Initialize(regOpnd, m_func);
  16394. InsertShift(
  16395. Js::OpCode::Shr_A,
  16396. false /* needFlags */,
  16397. regOpnd,
  16398. regSrc,
  16399. IR::IntConstOpnd::New(1, TyInt8, this->m_func),
  16400. stElem);
  16401. #endif
  16402. // MOV IndirOpnd.u8, reg.u8
  16403. InsertMove(indirOpnd, regOpnd, stElem);
  16404. }
  16405. }
  16406. else
  16407. {
  16408. if (src->IsInt32())
  16409. {
  16410. // MOV indirOpnd, src
  16411. InsertMove(indirOpnd, src, stElem);
  16412. emitBailout = true;
  16413. }
  16414. else if (src->IsFloat64())
  16415. {
  16416. AssertMsg(indirOpnd->GetType() == TyUint32, "Only StElemI to Uint32Array could be specialized to float64.");
  16417. #ifdef _M_IX86
  16418. AssertMsg(AutoSystemInfo::Data.SSE2Available(), "GloOpt shouldn't have specialized Uint32Array StElemI to float64 if SSE2 is unavailable.");
  16419. #endif
  16420. bool bailOutOnHelperCall = stElem->HasBailOutInfo() ? !!(stElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall) : false;
  16421. if (bailOutOnHelperCall)
  16422. {
  16423. if(!GlobOpt::DoEliminateArrayAccessHelperCall(this->m_func))
  16424. {
  16425. // Array access helper call removal is already off for some reason. Prevent trying to rejit again
  16426. // because it won't help and the same thing will happen again. Just abort jitting this function.
  16427. if(PHASE_TRACE(Js::BailOutPhase, this->m_func))
  16428. {
  16429. Output::Print(_u(" Aborting JIT because EliminateArrayAccessHelperCall is already off\n"));
  16430. Output::Flush();
  16431. }
  16432. throw Js::OperationAbortedException();
  16433. }
  16434. throw Js::RejitException(RejitReason::ArrayAccessHelperCallEliminationDisabled);
  16435. }
  16436. IR::RegOpnd *const reg = IR::RegOpnd::New(TyInt32, this->m_func);
  16437. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  16438. m_lowererMD.EmitFloatToInt(reg, src, stElem, stElem, labelHelper);
  16439. // MOV indirOpnd, reg
  16440. InsertMove(indirOpnd, reg, stElem);
  16441. emitBailout = true;
  16442. }
  16443. else
  16444. {
  16445. Assert(src->IsVar());
  16446. if(src->IsAddrOpnd())
  16447. {
  16448. IR::AddrOpnd *const addrSrc = src->AsAddrOpnd();
  16449. Assert(addrSrc->IsVar());
  16450. Assert(Js::TaggedInt::Is(addrSrc->m_address));
  16451. // MOV indirOpnd, intValue
  16452. InsertMove(
  16453. indirOpnd,
  16454. IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(addrSrc->m_address), TyInt32, m_func),
  16455. stElem);
  16456. }
  16457. else
  16458. {
  16459. IR::RegOpnd *const regSrc = src->AsRegOpnd();
  16460. // FromVar reg, Src
  16461. IR::RegOpnd *const reg = IR::RegOpnd::New(TyInt32, this->m_func);
  16462. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  16463. IR::Instr * instr = IR::Instr::New(Js::OpCode::FromVar, reg, regSrc, stElem->m_func);
  16464. stElem->InsertBefore(instr);
  16465. // Convert reg to int32
  16466. // Note: ToUint32 is implemented as (uint32)ToInt32()
  16467. IR::BailOutKind bailOutKind = stElem->HasBailOutInfo() ? stElem->GetBailOutKind() : IR::BailOutInvalid;
  16468. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  16469. {
  16470. instr = this->AddBailoutToHelperCallInstr(instr, stElem->GetBailOutInfo(), bailOutKind, stElem);
  16471. }
  16472. bool bailOutOnHelperCall = !!(bailOutKind & IR::BailOutOnArrayAccessHelperCall);
  16473. m_lowererMD.EmitLoadInt32(instr, true /*conversionFromObjectAllowed*/, bailOutOnHelperCall, labelHelper);
  16474. // MOV indirOpnd, reg
  16475. InsertMove(indirOpnd, reg, stElem);
  16476. }
  16477. }
  16478. }
  16479. }
  16480. else
  16481. {
  16482. if(labelSegmentLengthIncreased)
  16483. {
  16484. IR::Instr *const insertBeforeInstr = labelSegmentLengthIncreased->m_next;
  16485. // We might be changing the array to have missing values here, or we might be
  16486. // changing it to extend it; in either case, we're not going to make it _not_
  16487. // have missing values after this operation, so just write and fallthrough.
  16488. // labelSegmentLengthIncreased:
  16489. // mov [segment + index], src
  16490. // jmp $fallThru
  16491. InsertMove(indirOpnd, src, insertBeforeInstr);
  16492. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  16493. }
  16494. if (!(isStringIndex || (baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues())))
  16495. {
  16496. if(!stElem->IsProfiledInstr() || stElem->AsProfiledInstr()->u.stElemInfo->LikelyFillsMissingValue())
  16497. {
  16498. // Check whether the store is filling a missing value. If so, fall back to the helper so that it can check whether
  16499. // this store is filling the last missing value in the array. This is necessary to keep the missing value tracking
  16500. // in arrays precise. The check is omitted when profile data says that the store is likely to create missing values.
  16501. //
  16502. // cmp [segment + index], Js::SparseArraySegment::MissingValue
  16503. // je $helper
  16504. InsertMissingItemCompareBranch(
  16505. indirOpnd,
  16506. Js::OpCode::BrEq_A,
  16507. labelHelper,
  16508. stElem);
  16509. }
  16510. else
  16511. {
  16512. GenerateIsEnabledArraySetElementFastPathCheck(labelHelper, stElem);
  16513. }
  16514. }
  16515. // MOV [r3 + r2], src
  16516. InsertMoveWithBarrier(indirOpnd, src, stElem);
  16517. }
  16518. // JMP $fallThru
  16519. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  16520. // $helper:
  16521. // bailout or caller generated helper call
  16522. // $fallThru:
  16523. stElem->InsertBefore(labelHelper);
  16524. instrIsInHelperBlock = true;
  16525. if (isNativeArrayStore && !isStringIndex)
  16526. {
  16527. Assert(stElem->HasBailOutInfo());
  16528. Assert(labelHelper != labelBailOut);
  16529. // Transform the original instr:
  16530. //
  16531. // $helper:
  16532. // dst = LdElemI_A src (BailOut)
  16533. // $fallthrough:
  16534. //
  16535. // to:
  16536. //
  16537. // $helper:
  16538. // dst = LdElemI_A src
  16539. // b $fallthrough
  16540. // $bailout:
  16541. // BailOut
  16542. // $fallthrough:
  16543. LowerOneBailOutKind(stElem, IR::BailOutConventionalNativeArrayAccessOnly, instrIsInHelperBlock);
  16544. IR::Instr *const insertBeforeInstr = stElem->m_next;
  16545. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  16546. insertBeforeInstr->InsertBefore(labelBailOut);
  16547. }
  16548. if (emitBailout)
  16549. {
  16550. stElem->FreeSrc1();
  16551. stElem->FreeDst();
  16552. GenerateBailOut(stElem, nullptr, nullptr);
  16553. }
  16554. return !emitBailout;
  16555. }
  16556. bool
  16557. Lowerer::GenerateFastLdLen(IR::Instr *ldLen, bool *instrIsInHelperBlockRef)
  16558. {
  16559. Assert(instrIsInHelperBlockRef);
  16560. bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
  16561. instrIsInHelperBlock = false;
  16562. // TEST src, AtomTag -- check src not tagged int
  16563. // JNE $helper
  16564. // CMP [src], JavascriptArray::`vtable' -- check base isArray
  16565. // JNE $string
  16566. // MOV length, [src + offset(length)] -- Load array length
  16567. // JMP $tovar
  16568. // $string:
  16569. // CMP [src + offset(type)], static_string_type -- check src isString
  16570. // JNE $helper
  16571. // MOV length, [src + offset(length)] -- Load string length
  16572. // $toVar:
  16573. // TEST length, 0xC0000000 -- test for overflow of SHL, or negative
  16574. // JNE $helper
  16575. // SHL length, Js::VarTag_Shift -- restore the var tag on the result
  16576. // INC length
  16577. // MOV dst, length
  16578. // JMP $fallthru
  16579. // $helper:
  16580. // CALL GetProperty(src, length_property_id, scriptContext)
  16581. // $fallthru:
  16582. IR::Opnd * opnd = ldLen->GetSrc1();
  16583. IR::RegOpnd * dst = ldLen->GetDst()->AsRegOpnd();
  16584. const ValueType srcValueType(opnd->GetValueType());
  16585. IR::LabelInstr *const labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  16586. if (ldLen->DoStackArgsOpt())
  16587. {
  16588. GenerateFastArgumentsLdLen(ldLen, ldLen->GetOrCreateContinueLabel());
  16589. ldLen->Remove();
  16590. return false;
  16591. }
  16592. else
  16593. {
  16594. const bool arrayFastPath = ShouldGenerateArrayFastPath(opnd, false, true, false);
  16595. // HasBeenString instead of IsLikelyString because it could be a merge between StringObject and String, and this
  16596. // information about whether it's a StringObject or some other object is not available in the profile data
  16597. const bool stringFastPath = srcValueType.IsUninitialized() || srcValueType.HasBeenString();
  16598. if(!(arrayFastPath || stringFastPath))
  16599. {
  16600. return true;
  16601. }
  16602. IR::RegOpnd * src;
  16603. if (opnd->IsRegOpnd())
  16604. {
  16605. src = opnd->AsRegOpnd();
  16606. }
  16607. else
  16608. {
  16609. // LdLen has a PropertySymOpnd until globopt where the decision whether to convert it to LdFld is made. If globopt is skipped, the opnd will
  16610. // still be a PropertySymOpnd here. In that case, do the conversion here.
  16611. IR::SymOpnd * symOpnd = opnd->AsSymOpnd();
  16612. PropertySym * propertySym = symOpnd->m_sym->AsPropertySym();
  16613. src = IR::RegOpnd::New(propertySym->m_stackSym, IRType::TyVar, this->m_func);
  16614. ldLen->ReplaceSrc1(src);
  16615. opnd = src;
  16616. }
  16617. const int32 arrayOffsetOfLength =
  16618. srcValueType.IsLikelyAnyOptimizedArray()
  16619. ? GetArrayOffsetOfLength(srcValueType)
  16620. : Js::JavascriptArray::GetOffsetOfLength();
  16621. IR::LabelInstr *labelString = nullptr;
  16622. IR::RegOpnd *arrayOpnd = src;
  16623. IR::RegOpnd *arrayLengthOpnd = nullptr;
  16624. IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
  16625. if(arrayFastPath)
  16626. {
  16627. if(!srcValueType.IsAnyOptimizedArray())
  16628. {
  16629. if(stringFastPath)
  16630. {
  16631. // If we don't have info about the src value type or its object type, the array and string fast paths are
  16632. // generated
  16633. labelString = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16634. }
  16635. arrayOpnd = GenerateArrayTest(src, labelHelper, stringFastPath ? labelString : labelHelper, ldLen, false);
  16636. }
  16637. else if(src->IsArrayRegOpnd())
  16638. {
  16639. IR::ArrayRegOpnd *const arrayRegOpnd = src->AsArrayRegOpnd();
  16640. if(arrayRegOpnd->LengthSym())
  16641. {
  16642. arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), TyUint32, m_func);
  16643. DebugOnly(arrayLengthOpnd->FreezeSymValue());
  16644. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  16645. }
  16646. }
  16647. }
  16648. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
  16649. IR::RegOpnd *lengthOpnd = nullptr;
  16650. IR::AutoReuseOpnd autoReuseLengthOpnd;
  16651. const auto EnsureLengthOpnd = [&]()
  16652. {
  16653. if(lengthOpnd)
  16654. {
  16655. return;
  16656. }
  16657. lengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  16658. autoReuseLengthOpnd.Initialize(lengthOpnd, m_func);
  16659. };
  16660. if(arrayFastPath)
  16661. {
  16662. if(arrayLengthOpnd)
  16663. {
  16664. lengthOpnd = arrayLengthOpnd;
  16665. autoReuseLengthOpnd.Initialize(lengthOpnd, m_func);
  16666. Assert(!stringFastPath);
  16667. }
  16668. else
  16669. {
  16670. // MOV length, [array + offset(length)] -- Load array length
  16671. EnsureLengthOpnd();
  16672. IR::IndirOpnd *const indirOpnd = IR::IndirOpnd::New(arrayOpnd, arrayOffsetOfLength, TyUint32, this->m_func);
  16673. InsertMove(lengthOpnd, indirOpnd, ldLen);
  16674. }
  16675. }
  16676. if(stringFastPath)
  16677. {
  16678. IR::LabelInstr *labelToVar = nullptr;
  16679. if(arrayFastPath)
  16680. {
  16681. // JMP $tovar
  16682. labelToVar = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16683. InsertBranch(Js::OpCode::Br, labelToVar, ldLen);
  16684. // $string:
  16685. ldLen->InsertBefore(labelString);
  16686. }
  16687. // CMP [src + offset(type)], static_stringtype -- check src isString
  16688. // JNE $helper
  16689. GenerateStringTest(src, ldLen, labelHelper, nullptr, !arrayFastPath);
  16690. // MOV length, [src + offset(length)] -- Load string length
  16691. EnsureLengthOpnd();
  16692. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(src, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func);
  16693. InsertMove(lengthOpnd, indirOpnd, ldLen);
  16694. if(arrayFastPath)
  16695. {
  16696. // $toVar:
  16697. ldLen->InsertBefore(labelToVar);
  16698. }
  16699. }
  16700. Assert(lengthOpnd);
  16701. if(ldLen->HasBailOutInfo() && (ldLen->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutOnIrregularLength)
  16702. {
  16703. Assert(ldLen->GetBailOutKind() == IR::BailOutOnIrregularLength || ldLen->HasLazyBailOut());
  16704. Assert(dst->IsInt32());
  16705. // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative
  16706. // test length, length
  16707. // js $helper
  16708. // mov dst, length
  16709. // jmp $fallthrough
  16710. InsertCompareBranch(
  16711. lengthOpnd,
  16712. IR::IntConstOpnd::New(0, lengthOpnd->GetType(), m_func, true),
  16713. Js::OpCode::BrLt_A,
  16714. labelHelper,
  16715. ldLen);
  16716. InsertMove(dst, lengthOpnd, ldLen);
  16717. InsertBranch(Js::OpCode::Br, ldLen->GetOrCreateContinueLabel(), ldLen);
  16718. // $helper:
  16719. // (Bail out with IR::BailOutOnIrregularLength)
  16720. ldLen->InsertBefore(labelHelper);
  16721. instrIsInHelperBlock = true;
  16722. ldLen->FreeDst();
  16723. ldLen->FreeSrc1();
  16724. GenerateBailOut(ldLen);
  16725. return false;
  16726. }
  16727. #if INT32VAR
  16728. // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative
  16729. // test length, length
  16730. // js $helper
  16731. InsertCompareBranch(
  16732. lengthOpnd,
  16733. IR::IntConstOpnd::New(0, lengthOpnd->GetType(), m_func, true),
  16734. Js::OpCode::BrLt_A,
  16735. labelHelper,
  16736. ldLen);
  16737. #else
  16738. // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative.
  16739. // Additionally, verify that the signed value's width is not greater than 31 bits, since it needs to be tagged.
  16740. // test length, 0xC0000000
  16741. // jne $helper
  16742. InsertTestBranch(
  16743. lengthOpnd,
  16744. IR::IntConstOpnd::New(0xC0000000, TyUint32, this->m_func, true),
  16745. Js::OpCode::BrNeq_A,
  16746. labelHelper,
  16747. ldLen);
  16748. #endif
  16749. #if INT32VAR
  16750. //
  16751. // dst_32 = MOV length
  16752. // dst_64 = OR dst_64, Js::AtomTag_IntPtr
  16753. //
  16754. Assert(dst->GetType() == TyVar);
  16755. IR::Opnd *dst32 = dst->Copy(this->m_func);
  16756. dst32->SetType(TyInt32);
  16757. // This will clear the top bits.
  16758. InsertMove(dst32, lengthOpnd, ldLen);
  16759. m_lowererMD.GenerateInt32ToVarConversion(dst, ldLen);
  16760. #else
  16761. // dst = SHL length, Js::VarTag_Shift -- restore the var tag on the result
  16762. InsertShift(
  16763. Js::OpCode::Shl_A,
  16764. false /* needFlags */,
  16765. dst,
  16766. lengthOpnd,
  16767. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func),
  16768. ldLen);
  16769. // dst = ADD dst, AtomTag
  16770. InsertAdd(
  16771. false /* needFlags */,
  16772. dst,
  16773. dst,
  16774. IR::IntConstOpnd::New(Js::AtomTag_Int32, TyUint32, m_func, true),
  16775. ldLen);
  16776. #endif
  16777. // JMP $fallthrough
  16778. InsertBranch(Js::OpCode::Br, ldLen->GetOrCreateContinueLabel(), ldLen);
  16779. }
  16780. // $helper:
  16781. // (caller generates helper call)
  16782. ldLen->InsertBefore(labelHelper);
  16783. instrIsInHelperBlock = true;
  16784. return true; // fast path was generated, helper call will be in a helper block
  16785. }
  16786. void
  16787. Lowerer::GenerateFastInlineStringCodePointAt(IR::Instr* lastInstr, Func* func, IR::Opnd *strLength, IR::Opnd *srcIndex, IR::RegOpnd *lowerChar, IR::RegOpnd *strPtr)
  16788. {
  16789. //// Required State:
  16790. // strLength - UInt32
  16791. // srcIndex - TyVar if not Address
  16792. // lowerChar - TyMachReg
  16793. // strPtr - Addr
  16794. //// Instructions
  16795. // CMP [strLength], srcIndex + 1
  16796. // JBE charCodeAt
  16797. // CMP lowerChar 0xDC00
  16798. // JGE charCodeAt
  16799. // CMP lowerChar 0xD7FF
  16800. // JLE charCodeAt
  16801. // upperChar = MOVZX [strPtr + srcIndex + 1]
  16802. // CMP upperChar 0xE000
  16803. // JGE charCodeAt
  16804. // CMP lowerChar 0xDBFF
  16805. // JLE charCodeAt
  16806. // lowerChar = SUB lowerChar - 0xD800
  16807. // lowerChar = SHL lowerChar, 10
  16808. // lowerChar = ADD lowerChar + upperChar
  16809. // lowerChar = ADD lowerChar + 0x2400
  16810. // :charCodeAt
  16811. // :done
  16812. // Asserts
  16813. // Arm should change to Uint32 for the strLength
  16814. Assert(strLength->GetType() == TyUint32 || strLength->GetType() == TyMachReg);
  16815. Assert(srcIndex->GetType() == TyVar || srcIndex->IsAddrOpnd());
  16816. Assert(lowerChar->GetType() == TyMachReg || lowerChar->GetType() == TyUint32);
  16817. Assert(strPtr->IsRegOpnd());
  16818. IR::RegOpnd *tempReg = IR::RegOpnd::New(TyMachReg, func);
  16819. IR::LabelInstr *labelCharCodeAt = IR::LabelInstr::New(Js::OpCode::Label, func);
  16820. IR::IndirOpnd *tempIndirOpnd;
  16821. if (srcIndex->IsAddrOpnd())
  16822. {
  16823. uint32 length = Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address) + 1U;
  16824. InsertCompareBranch(strLength, IR::IntConstOpnd::New(length, TyUint32, func), Js::OpCode::BrLe_A, true, labelCharCodeAt, lastInstr);
  16825. tempIndirOpnd = IR::IndirOpnd::New(strPtr, (length) * sizeof(char16), TyUint16, func);
  16826. }
  16827. else
  16828. {
  16829. InsertMove(tempReg, srcIndex, lastInstr);
  16830. #if INT32VAR
  16831. IR::Opnd * reg32Bit = tempReg->UseWithNewType(TyInt32, func);
  16832. InsertMove(tempReg, reg32Bit, lastInstr);
  16833. tempReg = reg32Bit->AsRegOpnd();
  16834. #else
  16835. InsertShift(Js::OpCode::Shr_A, false, tempReg, tempReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, func), lastInstr);
  16836. #endif
  16837. InsertAdd(false, tempReg, tempReg, IR::IntConstOpnd::New(1, TyInt32, func), lastInstr);
  16838. InsertCompareBranch(strLength, tempReg, Js::OpCode::BrLe_A, true, labelCharCodeAt, lastInstr);
  16839. if(tempReg->GetSize() != MachPtr)
  16840. {
  16841. tempReg = tempReg->UseWithNewType(TyMachPtr, func)->AsRegOpnd();
  16842. }
  16843. tempIndirOpnd = IR::IndirOpnd::New(strPtr, tempReg, 1, TyUint16, func);
  16844. }
  16845. // By this point, we have added instructions before labelCharCodeAt to check for extra length required for the surrogate pair
  16846. // The branching for that is already handled, all we have to do now is to check for correct values.
  16847. // Validate char is in range [D800, DBFF]; otherwise just get a charCodeAt
  16848. InsertCompareBranch(lowerChar, IR::IntConstOpnd::New(0xDC00, TyUint32, func), Js::OpCode::BrGe_A, labelCharCodeAt, lastInstr);
  16849. InsertCompareBranch(lowerChar, IR::IntConstOpnd::New(0xD7FF, TyUint32, func), Js::OpCode::BrLe_A, labelCharCodeAt, lastInstr);
  16850. // upperChar = MOVZX r3, [r1 + r3 * 2] -- this is the value of the upper surrogate pair char
  16851. IR::RegOpnd *upperChar = IR::RegOpnd::New(TyInt32, func);
  16852. InsertMove(upperChar, tempIndirOpnd, lastInstr);
  16853. // Validate upper is in range [DC00, DFFF]; otherwise just get a charCodeAt
  16854. InsertCompareBranch(upperChar, IR::IntConstOpnd::New(0xE000, TyUint32, func), Js::OpCode::BrGe_A, labelCharCodeAt, lastInstr);
  16855. InsertCompareBranch(upperChar, IR::IntConstOpnd::New(0xDBFF, TyUint32, func), Js::OpCode::BrLe_A, labelCharCodeAt, lastInstr);
  16856. // (lower - 0xD800) << 10 + second - 0xDC00 + 0x10000 -- 0x10000 - 0xDC00 = 0x2400
  16857. // lowerChar = SUB lowerChar - 0xD800
  16858. // lowerChar = SHL lowerChar, 10
  16859. // lowerChar = ADD lowerChar + upperChar
  16860. // lowerChar = ADD lowerChar + 0x2400
  16861. InsertSub(false, lowerChar, lowerChar, IR::IntConstOpnd::New(0xD800, TyUint32, func), lastInstr);
  16862. InsertShift(Js::OpCode::Shl_A, false, lowerChar, lowerChar, IR::IntConstOpnd::New(10, TyUint32, func), lastInstr);
  16863. InsertAdd(false, lowerChar, lowerChar, upperChar, lastInstr);
  16864. InsertAdd(false, lowerChar, lowerChar, IR::IntConstOpnd::New(0x2400, TyUint32, func), lastInstr);
  16865. lastInstr->InsertBefore(labelCharCodeAt);
  16866. }
  16867. bool
  16868. Lowerer::GenerateFastInlineStringFromCodePoint(IR::Instr* instr)
  16869. {
  16870. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  16871. // ArgOut sequence
  16872. // s8.var = StartCall 2 (0x2).i32 #000c
  16873. // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, String object
  16874. // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to FromCharCode
  16875. // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
  16876. // s0[LikelyTaggedInt].var = CallDirect String_FromCodePoint.u32, arg1(s11)<0>.u32 #001c
  16877. IR::Opnd * linkOpnd = instr->GetSrc2();
  16878. IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  16879. linkOpnd = tmpInstr->GetSrc2();
  16880. #if DBG
  16881. IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  16882. Assert(argCount == 2);
  16883. #endif
  16884. IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
  16885. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  16886. IR::Opnd *src1 = argInstr->GetSrc1();
  16887. if (src1->GetValueType().IsLikelyInt())
  16888. {
  16889. //Trying to generate this code
  16890. // MOV resultOpnd, dst
  16891. // MOV fromCharCodeIntArgOpnd, src1
  16892. // SAR fromCharCodeIntArgOpnd, Js::VarTag_Shift
  16893. // JAE $Helper
  16894. // CMP fromCharCodeIntArgOpnd, Js::ScriptContext::CharStringCacheSize
  16895. //
  16896. // JAE $labelWCharStringCheck <
  16897. // MOV resultOpnd, GetCharStringCache[fromCharCodeIntArgOpnd]
  16898. // TST resultOpnd, resultOpnd //Check for null
  16899. // JEQ $helper
  16900. // JMP $Done
  16901. //
  16902. //$labelWCharStringCheck:
  16903. // resultOpnd = Call HelperGetStringForCharW
  16904. // JMP $Done
  16905. //$helper:
  16906. IR::RegOpnd * resultOpnd = nullptr;
  16907. if (!instr->GetDst()->IsRegOpnd() || instr->GetDst()->IsEqual(src1))
  16908. {
  16909. resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  16910. }
  16911. else
  16912. {
  16913. resultOpnd = instr->GetDst()->AsRegOpnd();
  16914. }
  16915. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16916. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  16917. IR::RegOpnd * fromCodePointIntArgOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  16918. IR::AutoReuseOpnd autoReuseFromCodePointIntArgOpnd(fromCodePointIntArgOpnd, instr->m_func);
  16919. InsertMove(fromCodePointIntArgOpnd, src1, instr);
  16920. //Check for tagged int and get the untagged version.
  16921. fromCodePointIntArgOpnd = GenerateUntagVar(fromCodePointIntArgOpnd, labelHelper, instr);
  16922. GenerateGetSingleCharString(fromCodePointIntArgOpnd, resultOpnd, labelHelper, doneLabel, instr, true);
  16923. instr->InsertBefore(labelHelper);
  16924. instr->InsertAfter(doneLabel);
  16925. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  16926. }
  16927. return true;
  16928. }
  16929. bool
  16930. Lowerer::GenerateFastInlineStringFromCharCode(IR::Instr* instr)
  16931. {
  16932. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  16933. // ArgOut sequence
  16934. // s8.var = StartCall 2 (0x2).i32 #000c
  16935. // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, String object
  16936. // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to FromCharCode
  16937. // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
  16938. // s0[LikelyTaggedInt].var = CallDirect String_FromCharCode.u32, arg1(s11)<0>.u32 #001c
  16939. IR::Opnd * linkOpnd = instr->GetSrc2();
  16940. IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  16941. linkOpnd = tmpInstr->GetSrc2();
  16942. #if DBG
  16943. IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  16944. Assert(argCount == 2);
  16945. #endif
  16946. IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
  16947. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  16948. IR::Opnd *src1 = argInstr->GetSrc1();
  16949. if (src1->GetValueType().IsLikelyInt())
  16950. {
  16951. //Trying to generate this code
  16952. // MOV resultOpnd, dst
  16953. // MOV fromCharCodeIntArgOpnd, src1
  16954. // SAR fromCharCodeIntArgOpnd, Js::VarTag_Shift
  16955. // JAE $Helper
  16956. // CMP fromCharCodeIntArgOpnd, Js::ScriptContext::CharStringCacheSize
  16957. //
  16958. // JAE $labelWCharStringCheck <
  16959. // MOV resultOpnd, GetCharStringCache[fromCharCodeIntArgOpnd]
  16960. // TST resultOpnd, resultOpnd //Check for null
  16961. // JEQ $helper
  16962. // JMP $Done
  16963. //
  16964. //$labelWCharStringCheck:
  16965. // resultOpnd = Call HelperGetStringForCharW
  16966. // JMP $Done
  16967. //$helper:
  16968. IR::RegOpnd * resultOpnd = nullptr;
  16969. if (!instr->GetDst()->IsRegOpnd() || instr->GetDst()->IsEqual(src1))
  16970. {
  16971. resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  16972. }
  16973. else
  16974. {
  16975. resultOpnd = instr->GetDst()->AsRegOpnd();
  16976. }
  16977. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  16978. IR::RegOpnd * fromCharCodeIntArgOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  16979. IR::AutoReuseOpnd autoReuseFromCharCodeIntArgOpnd(fromCharCodeIntArgOpnd, instr->m_func);
  16980. InsertMove(fromCharCodeIntArgOpnd, src1, instr);
  16981. //Check for tagged int and get the untagged version.
  16982. fromCharCodeIntArgOpnd = GenerateUntagVar(fromCharCodeIntArgOpnd, labelHelper, instr);
  16983. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16984. GenerateGetSingleCharString(fromCharCodeIntArgOpnd, resultOpnd, labelHelper, doneLabel, instr, false);
  16985. instr->InsertBefore(labelHelper);
  16986. instr->InsertAfter(doneLabel);
  16987. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  16988. }
  16989. return true;
  16990. }
  16991. void
  16992. Lowerer::GenerateGetSingleCharString(IR::RegOpnd * charCodeOpnd, IR::Opnd * resultOpnd, IR::LabelInstr * labelHelper, IR::LabelInstr * doneLabel, IR::Instr * instr, bool isCodePoint)
  16993. {
  16994. // MOV cacheReg, CharStringCache
  16995. // CMP charCodeOpnd, Js::ScriptContext::CharStringCacheSize
  16996. // JAE $labelWCharStringCheck <
  16997. // MOV resultOpnd, cacheReg[charCodeOpnd]
  16998. // TST resultOpnd, resultOpnd //Check for null
  16999. // JEQ $helper
  17000. // JMP $Done
  17001. //
  17002. //$labelWCharStringCheck:
  17003. // Arg1 = charCodeOpnd
  17004. // Arg0 = cacheReg
  17005. // resultOpnd = Call HelperGetStringForCharW/CodePoint
  17006. // JMP $Done
  17007. //$helper:
  17008. IR::LabelInstr *labelWCharStringCheck = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  17009. //Try to load from in CharStringCacheA
  17010. IR::RegOpnd *cacheRegOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  17011. IR::AutoReuseOpnd autoReuseCacheRegOpnd(cacheRegOpnd, instr->m_func);
  17012. Assert(Js::JavascriptLibrary::GetCharStringCacheAOffset() == Js::JavascriptLibrary::GetCharStringCacheOffset());
  17013. InsertMove(cacheRegOpnd, this->LoadLibraryValueOpnd(instr, LibraryValue::ValueCharStringCache), instr);
  17014. InsertCompareBranch(charCodeOpnd, IR::IntConstOpnd::New(Js::CharStringCache::CharStringCacheSize, TyUint32, this->m_func), Js::OpCode::BrGe_A, true, labelWCharStringCheck, instr);
  17015. InsertMove(resultOpnd, IR::IndirOpnd::New(cacheRegOpnd, charCodeOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyVar, instr->m_func), instr);
  17016. InsertTestBranch(resultOpnd, resultOpnd, Js::OpCode::BrEq_A, labelHelper, instr);
  17017. InsertMove(instr->GetDst(), resultOpnd, instr);
  17018. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  17019. instr->InsertBefore(labelWCharStringCheck);
  17020. IR::JnHelperMethod helperMethod;
  17021. if (isCodePoint)
  17022. {
  17023. helperMethod = IR::HelperGetStringForCharCodePoint;
  17024. }
  17025. else
  17026. {
  17027. InsertMove(charCodeOpnd, charCodeOpnd->UseWithNewType(TyUint16, instr->m_func), instr);
  17028. helperMethod = IR::HelperGetStringForChar;
  17029. }
  17030. //Try to load from in CharStringCacheW or CharStringCacheCodePoint, this is a helper call.
  17031. this->m_lowererMD.LoadHelperArgument(instr, charCodeOpnd);
  17032. this->m_lowererMD.LoadHelperArgument(instr, cacheRegOpnd);
  17033. IR::Instr* helperCallInstr = IR::Instr::New(Js::OpCode::Call, resultOpnd, IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
  17034. instr->InsertBefore(helperCallInstr);
  17035. this->m_lowererMD.LowerCall(helperCallInstr, 0);
  17036. InsertMove(instr->GetDst(), resultOpnd, instr);
  17037. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  17038. }
  17039. bool
  17040. Lowerer::GenerateFastInlineGlobalObjectParseInt(IR::Instr *instr)
  17041. {
  17042. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  17043. // ArgOut sequence
  17044. // s8.var = StartCall 2 (0x2).i32 #000c
  17045. // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, global object
  17046. // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to parseInt
  17047. // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
  17048. // s0[LikelyTaggedInt].var = CallDirect GlobalObject_ParseInt.u32, arg1(s11)<0>.u32 #001c
  17049. IR::Opnd * linkOpnd = instr->GetSrc2();
  17050. IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  17051. linkOpnd = tmpInstr->GetSrc2();
  17052. #if DBG
  17053. IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  17054. Assert(argCount == 2);
  17055. #endif
  17056. IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
  17057. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  17058. IR::Opnd *parseIntArgOpnd = argInstr->GetSrc1();
  17059. if (parseIntArgOpnd->GetValueType().IsLikelyNumber())
  17060. {
  17061. //If likely int check for tagged int and set the dst
  17062. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  17063. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  17064. if (!parseIntArgOpnd->IsTaggedInt())
  17065. {
  17066. this->m_lowererMD.GenerateSmIntTest(parseIntArgOpnd, instr, labelHelper);
  17067. }
  17068. if (instr->GetDst())
  17069. {
  17070. this->InsertMove(instr->GetDst(), parseIntArgOpnd, instr);
  17071. }
  17072. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  17073. instr->InsertBefore(labelHelper);
  17074. instr->InsertAfter(doneLabel);
  17075. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  17076. }
  17077. return true;
  17078. }
  17079. void
  17080. Lowerer::GenerateFastInlineArrayPop(IR::Instr * instr)
  17081. {
  17082. Assert(instr->m_opcode == Js::OpCode::InlineArrayPop);
  17083. IR::Opnd *arrayOpnd = instr->GetSrc1();
  17084. IR::LabelInstr *bailOutLabelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  17085. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  17086. bool isLikelyNativeArray = arrayOpnd->GetValueType().IsLikelyNativeArray();
  17087. if (ShouldGenerateArrayFastPath(arrayOpnd, false, false, false))
  17088. {
  17089. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  17090. if(isLikelyNativeArray)
  17091. {
  17092. //We bailOut on cases like length == 0, Array Test failing cases (Runtime helper cannot handle these cases)
  17093. GenerateFastPop(arrayOpnd, instr, labelHelper, doneLabel, bailOutLabelHelper);
  17094. }
  17095. else
  17096. {
  17097. //We jump to helper on cases like length == 0, Array Test failing cases
  17098. GenerateFastPop(arrayOpnd, instr, labelHelper, doneLabel, labelHelper);
  17099. }
  17100. instr->InsertBefore(labelHelper);
  17101. ///JMP to $doneLabel
  17102. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  17103. }
  17104. else
  17105. {
  17106. //We assume here that the array will be a Var array. - Runtime Helper calls assume this.
  17107. Assert(!isLikelyNativeArray);
  17108. }
  17109. instr->InsertAfter(doneLabel);
  17110. if(isLikelyNativeArray)
  17111. {
  17112. //Lower IR::BailOutConventionalNativeArrayAccessOnly here.
  17113. LowerOneBailOutKind(instr, IR::BailOutConventionalNativeArrayAccessOnly, false, false);
  17114. instr->InsertAfter(bailOutLabelHelper);
  17115. }
  17116. GenerateHelperToArrayPopFastPath(instr, doneLabel, bailOutLabelHelper);
  17117. }
  17118. void
  17119. Lowerer::GenerateFastInlineIsArray(IR::Instr * instr)
  17120. {
  17121. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  17122. IR::Opnd * dst = instr->GetDst();
  17123. Assert(dst);
  17124. //CallDirect src2
  17125. IR::Opnd * linkOpnd = instr->GetSrc2();
  17126. //ArgOut_A_InlineSpecialized
  17127. IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  17128. IR::Opnd * argsOpnd[2] = { 0 };
  17129. bool result = instr->FetchOperands(argsOpnd, 2);
  17130. Assert(result);
  17131. AnalysisAssert(argsOpnd[1]);
  17132. IR::LabelInstr *helperLabel = InsertLabel(true, instr);
  17133. IR::Instr * insertInstr = helperLabel;
  17134. IR::LabelInstr *doneLabel = InsertLabel(false, instr->m_next);
  17135. ValueType valueType = argsOpnd[1]->GetValueType();
  17136. IR::RegOpnd * src = GetRegOpnd(argsOpnd[1], insertInstr, m_func, argsOpnd[1]->GetType());
  17137. IR::LabelInstr *checkNotArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, valueType.IsLikelyArray());
  17138. IR::LabelInstr *notArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, valueType.IsLikelyArray());
  17139. if (!src->IsNotTaggedValue())
  17140. {
  17141. m_lowererMD.GenerateObjectTest(src, insertInstr, notArrayLabel);
  17142. }
  17143. // MOV typeOpnd, [opnd + offset(type)]
  17144. IR::RegOpnd *typeOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  17145. const IR::AutoReuseOpnd autoReuseTypeOpnd(typeOpnd, m_func);
  17146. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, m_func);
  17147. InsertMove(typeOpnd, indirOpnd, insertInstr);
  17148. // MOV typeIdOpnd, [typeOpnd + offset(typeId)]
  17149. IR::RegOpnd *typeIdOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  17150. const IR::AutoReuseOpnd autoReuseTypeIdOpnd(typeIdOpnd, m_func);
  17151. indirOpnd = IR::IndirOpnd::New(typeOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func);
  17152. InsertMove(typeIdOpnd, indirOpnd, insertInstr);
  17153. // CMP typeIdOpnd, TypeIds_ArrayFirst
  17154. // JLT $notArray
  17155. InsertCompareBranch(
  17156. typeIdOpnd,
  17157. IR::IntConstOpnd::New(Js::TypeIds_ArrayFirst, TyInt32, m_func),
  17158. Js::OpCode::BrLt_A,
  17159. checkNotArrayLabel,
  17160. insertInstr);
  17161. // CMP typeIdOpnd, TypeIds_ArrayLastWithES5
  17162. // JGT $notArray
  17163. InsertCompareBranch(
  17164. typeIdOpnd,
  17165. IR::IntConstOpnd::New(Js::TypeIds_ArrayLastWithES5, TyInt32, m_func),
  17166. Js::OpCode::BrGt_A,
  17167. notArrayLabel,
  17168. insertInstr);
  17169. // MOV dst, True
  17170. InsertMove(dst, LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), insertInstr);
  17171. // JMP $done
  17172. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  17173. // $checkNotArray:
  17174. insertInstr->InsertBefore(checkNotArrayLabel);
  17175. // CMP typeIdOpnd, TypeIds_Proxy
  17176. // JEQ $helperLabel
  17177. InsertCompareBranch(
  17178. typeIdOpnd,
  17179. IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyInt32, m_func),
  17180. Js::OpCode::BrEq_A,
  17181. helperLabel,
  17182. insertInstr);
  17183. CompileAssert(Js::TypeIds_Proxy < Js::TypeIds_ArrayFirst);
  17184. // CMP typeIdOpnd, TypeIds_HostDispatch
  17185. // JEQ $helperLabel
  17186. InsertCompareBranch(
  17187. typeIdOpnd,
  17188. IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, m_func),
  17189. Js::OpCode::BrEq_A,
  17190. helperLabel,
  17191. insertInstr);
  17192. CompileAssert(Js::TypeIds_HostDispatch < Js::TypeIds_ArrayFirst);
  17193. // $notObjectLabel:
  17194. insertInstr->InsertBefore(notArrayLabel);
  17195. // MOV dst, False
  17196. InsertMove(dst, LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), insertInstr);
  17197. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  17198. RelocateCallDirectToHelperPath(tmpInstr, helperLabel);
  17199. }
  17200. void
  17201. Lowerer::GenerateFastInlineHasOwnProperty(IR::Instr * instr)
  17202. {
  17203. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  17204. //CallDirect src2
  17205. IR::Opnd * linkOpnd = instr->GetSrc2();
  17206. //ArgOut_A_InlineSpecialized
  17207. IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  17208. IR::Opnd * argsOpnd[2] = { 0 };
  17209. bool result = instr->FetchOperands(argsOpnd, 2);
  17210. Assert(result);
  17211. AnalysisAssert(argsOpnd[0] && argsOpnd[1]);
  17212. if (argsOpnd[1]->GetValueType().IsNotString()
  17213. || argsOpnd[0]->GetValueType().IsNotObject()
  17214. || !argsOpnd[0]->IsRegOpnd()
  17215. || !argsOpnd[1]->IsRegOpnd())
  17216. {
  17217. return;
  17218. }
  17219. IR::RegOpnd * thisObj = argsOpnd[0]->AsRegOpnd();
  17220. IR::RegOpnd * propOpnd = argsOpnd[1]->AsRegOpnd();
  17221. // fast path case where hasOwnProperty is being called using a property name loaded via a for-in loop
  17222. bool generateForInFastpath = propOpnd->GetValueType().IsString()
  17223. && propOpnd->m_sym->m_isSingleDef
  17224. && (propOpnd->m_sym->m_instrDef->m_opcode == Js::OpCode::BrOnEmpty
  17225. || propOpnd->m_sym->m_instrDef->m_opcode == Js::OpCode::BrOnNotEmpty);
  17226. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  17227. IR::LabelInstr * labelHelper = InsertLabel(true, instr);
  17228. IR::LabelInstr * cacheMissLabel = generateForInFastpath ? IR::LabelInstr::New(Js::OpCode::Label, m_func, true) : labelHelper;
  17229. IR::Instr * insertInstr = labelHelper;
  17230. // GenerateObjectTest(propOpnd, $labelHelper)
  17231. // CMP indexOpnd, PropertyString::`vtable'
  17232. // JNE $helper
  17233. // GenerateObjectTest(thisObj, $labelHelper)
  17234. // MOV inlineCacheOpnd, propOpnd->lsElemInlineCache
  17235. // MOV objectTypeOpnd, thisObj->type
  17236. // GenerateDynamicLoadPolymorphicInlineCacheSlot(inlineCacheOpnd, objectTypeOpnd) ; loads inline cache for given type
  17237. // GenerateLocalInlineCacheCheck(objectTypeOpnd, inlineCacheOpnd, $notInlineSlotsLabel) ; check for type in inline slots, jump to $notInlineSlotsLabel on failure
  17238. // MOV dst, ValueTrue
  17239. // JMP $done
  17240. // $notInlineSlotsLabel:
  17241. // GenerateLoadTaggedType(objectTypeOpnd, opndTaggedType)
  17242. // GenerateLocalInlineCacheCheck(opndTaggedType, inlineCacheOpnd, $cacheMissLabel) ; check for type in aux slot, jump to $cacheMissLabel on failure
  17243. // MOV dst, ValueTrue
  17244. // JMP $done
  17245. m_lowererMD.GenerateObjectTest(propOpnd, insertInstr, labelHelper);
  17246. InsertCompareBranch(IR::IndirOpnd::New(propOpnd, 0, TyMachPtr, m_func), LoadVTableValueOpnd(insertInstr, VTableValue::VtablePropertyString), Js::OpCode::BrNeq_A, labelHelper, insertInstr);
  17247. m_lowererMD.GenerateObjectTest(thisObj, insertInstr, labelHelper);
  17248. IR::RegOpnd * inlineCacheOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  17249. InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(propOpnd, Js::PropertyString::GetOffsetOfLdElemInlineCache(), TyMachPtr, m_func), insertInstr);
  17250. IR::RegOpnd * objectTypeOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  17251. InsertMove(objectTypeOpnd, IR::IndirOpnd::New(thisObj, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, m_func), insertInstr);
  17252. GenerateDynamicLoadPolymorphicInlineCacheSlot(insertInstr, inlineCacheOpnd, objectTypeOpnd);
  17253. IR::LabelInstr * notInlineSlotsLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17254. GenerateLocalInlineCacheCheck(insertInstr, objectTypeOpnd, inlineCacheOpnd, notInlineSlotsLabel);
  17255. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), insertInstr);
  17256. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  17257. insertInstr->InsertBefore(notInlineSlotsLabel);
  17258. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, m_func);
  17259. m_lowererMD.GenerateLoadTaggedType(insertInstr, objectTypeOpnd, opndTaggedType);
  17260. GenerateLocalInlineCacheCheck(insertInstr, opndTaggedType, inlineCacheOpnd, cacheMissLabel);
  17261. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), insertInstr);
  17262. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  17263. if (!generateForInFastpath)
  17264. {
  17265. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  17266. return;
  17267. }
  17268. insertInstr->InsertBefore(cacheMissLabel);
  17269. // CMP forInEnumeratorOpnd->canUseJitFastPath, 0
  17270. // JEQ $labelHelper
  17271. // MOV cachedDataTypeOpnd, forInEnumeratorOpnd->enumeratorInitialType
  17272. // CMP thisObj->type, cachedDataTypeOpnd
  17273. // JNE $labelHelper
  17274. // CMP forInEnumeratorOpnd->enumeratingPrototype, 0
  17275. // JNE $falseLabel
  17276. // MOV dst, True
  17277. // JMP $doneLabel
  17278. // $falseLabel: [helper]
  17279. // MOV dst, False
  17280. // JMP $doneLabel
  17281. // $labelHelper: [helper]
  17282. // CallDirect code
  17283. // ...
  17284. // $doneLabel:
  17285. IR::Opnd * forInEnumeratorOpnd = argsOpnd[1]->AsRegOpnd()->m_sym->m_instrDef->GetSrc1();
  17286. // go to helper if we can't use JIT fastpath
  17287. IR::Opnd * canUseJitFastPathOpnd = GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfCanUseJitFastPath(), TyInt8);
  17288. InsertCompareBranch(canUseJitFastPathOpnd, IR::IntConstOpnd::New(0, TyInt8, m_func), Js::OpCode::BrEq_A, labelHelper, insertInstr);
  17289. // go to helper if initial type is not same as the object we are querying
  17290. IR::RegOpnd * cachedDataTypeOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  17291. InsertMove(cachedDataTypeOpnd, GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorInitialType(), TyMachPtr), insertInstr);
  17292. InsertCompareBranch(cachedDataTypeOpnd, IR::IndirOpnd::New(thisObj, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, m_func), Js::OpCode::BrNeq_A, labelHelper, insertInstr);
  17293. // if we haven't yet gone to helper, then we can check if we are enumerating the prototype to know if property is an own property
  17294. IR::LabelInstr *falseLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  17295. IR::Opnd * enumeratingPrototype = GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratingPrototype(), TyInt8);
  17296. InsertCompareBranch(enumeratingPrototype, IR::IntConstOpnd::New(0, TyInt8, m_func), Js::OpCode::BrNeq_A, falseLabel, insertInstr);
  17297. // assume true is the main path
  17298. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), insertInstr);
  17299. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  17300. // load false on helper path
  17301. insertInstr->InsertBefore(falseLabel);
  17302. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), insertInstr);
  17303. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  17304. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  17305. }
  17306. bool
  17307. Lowerer::ShouldGenerateStringReplaceFastPath(IR::Instr * callInstr, IntConstType argCount)
  17308. {
  17309. // a.replace(b,c)
  17310. // We want to emit the fast path if 'a' and 'c' are strings and 'b' is a regex
  17311. //
  17312. // argout sequence:
  17313. // arg1(s12)<0>.var = ArgOut_A s2.var, s11.var #0014 <---- a
  17314. // arg2(s13)<4>.var = ArgOut_A s3.var, arg1(s12)<0>.var #0018 <---- b
  17315. // arg3(s14)<8>.var = ArgOut_A s4.var, arg2(s13)<4>.var #001c <---- c
  17316. // s0[LikelyString].var = CallI s5[ffunc].var, arg3(s14)<8>.var #0020
  17317. IR::Opnd *linkOpnd = callInstr->GetSrc2();
  17318. Assert(argCount == 2);
  17319. while(linkOpnd->IsSymOpnd())
  17320. {
  17321. IR::SymOpnd *src2 = linkOpnd->AsSymOpnd();
  17322. StackSym *sym = src2->m_sym->AsStackSym();
  17323. Assert(sym->m_isSingleDef);
  17324. IR::Instr *argInstr = sym->m_instrDef;
  17325. Assert(argCount >= 0);
  17326. // check to see if 'a' and 'c' are likely strings
  17327. if((argCount == 2 || argCount == 0) && (!argInstr->GetSrc1()->GetValueType().IsLikelyString()))
  17328. {
  17329. return false;
  17330. }
  17331. // we want 'b' to be regex. Don't generate fastpath if it is a tagged int
  17332. if((argCount == 1) && (argInstr->GetSrc1()->IsTaggedInt()))
  17333. {
  17334. return false;
  17335. }
  17336. argCount--;
  17337. linkOpnd = argInstr->GetSrc2();
  17338. }
  17339. return true;
  17340. }
  17341. bool
  17342. Lowerer::GenerateFastReplace(IR::Opnd* strOpnd, IR::Opnd* src1, IR::Opnd* src2, IR::Instr *callInstr, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
  17343. {
  17344. // a.replace(b,c)
  17345. // We want to emit the fast path if 'a' and 'c' are strings and 'b' is a regex
  17346. //
  17347. // strOpnd --> a
  17348. // src1 --> b
  17349. // src2 --> c
  17350. IR::Opnd * callDst = callInstr->GetDst();
  17351. Assert(strOpnd->GetValueType().IsLikelyString() && src2->GetValueType().IsLikelyString());
  17352. if(!strOpnd->GetValueType().IsString())
  17353. {
  17354. strOpnd = GetRegOpnd(strOpnd, insertInstr, m_func, TyVar);
  17355. this->GenerateStringTest(strOpnd->AsRegOpnd(), insertInstr, labelHelper);
  17356. }
  17357. if(!src1->IsNotTaggedValue())
  17358. {
  17359. m_lowererMD.GenerateObjectTest(src1, insertInstr, labelHelper);
  17360. }
  17361. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptRegExp);
  17362. // cmp [regex], vtableAddress
  17363. // jne $labelHelper
  17364. src1 = GetRegOpnd(src1, insertInstr, m_func, TyVar);
  17365. InsertCompareBranch(
  17366. IR::IndirOpnd::New(src1->AsRegOpnd(), 0, TyMachPtr, insertInstr->m_func),
  17367. vtableOpnd,
  17368. Js::OpCode::BrNeq_A,
  17369. labelHelper,
  17370. insertInstr);
  17371. if(!src2->GetValueType().IsString())
  17372. {
  17373. src2 = GetRegOpnd(src2, insertInstr, m_func, TyVar);
  17374. this->GenerateStringTest(src2->AsRegOpnd(), insertInstr, labelHelper);
  17375. }
  17376. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, insertInstr->m_func);
  17377. if (callDst)
  17378. {
  17379. helperCallInstr->SetDst(callDst);
  17380. }
  17381. insertInstr->InsertBefore(helperCallInstr);
  17382. if (insertInstr->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(insertInstr->GetBailOutKind()))
  17383. {
  17384. helperCallInstr = AddBailoutToHelperCallInstr(helperCallInstr, insertInstr->GetBailOutInfo(), insertInstr->GetBailOutKind(), insertInstr);
  17385. }
  17386. //scriptContext, pRegEx, pThis, pReplace (to be pushed in reverse order)
  17387. // pReplace, pThis, pRegEx
  17388. this->m_lowererMD.LoadHelperArgument(helperCallInstr, src2);
  17389. this->m_lowererMD.LoadHelperArgument(helperCallInstr, strOpnd);
  17390. this->m_lowererMD.LoadHelperArgument(helperCallInstr, src1);
  17391. // script context
  17392. LoadScriptContext(helperCallInstr);
  17393. if(callDst)
  17394. {
  17395. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperRegExp_ReplaceStringResultUsed);
  17396. }
  17397. else
  17398. {
  17399. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperRegExp_ReplaceStringResultNotUsed);
  17400. }
  17401. return true;
  17402. }
  17403. ///----
  17404. void
  17405. Lowerer::GenerateFastInlineStringSplitMatch(IR::Instr * instr)
  17406. {
  17407. // a.split(b,c (optional) )
  17408. // We want to emit the fast path when
  17409. // 1. c is not present, and
  17410. // 2. 'a' is a string and 'b' is a regex.
  17411. //
  17412. // a.match(b)
  17413. // We want to emit the fast path when 'a' is a string and 'b' is a regex.
  17414. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  17415. IR::Opnd * callDst = instr->GetDst();
  17416. //helperCallOpnd
  17417. IR::Opnd * src1 = instr->GetSrc1();
  17418. //ArgOut_A_InlineSpecialized
  17419. IR::Instr * tmpInstr = instr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  17420. IR::Opnd * argsOpnd[2];
  17421. if(!instr->FetchOperands(argsOpnd, 2))
  17422. {
  17423. return;
  17424. }
  17425. if(!argsOpnd[0]->GetValueType().IsLikelyString() || argsOpnd[1]->IsTaggedInt())
  17426. {
  17427. return;
  17428. }
  17429. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  17430. if(!argsOpnd[0]->GetValueType().IsString())
  17431. {
  17432. argsOpnd[0] = GetRegOpnd(argsOpnd[0], instr, m_func, TyVar);
  17433. this->GenerateStringTest(argsOpnd[0]->AsRegOpnd(), instr, labelHelper);
  17434. }
  17435. if(!argsOpnd[1]->IsNotTaggedValue())
  17436. {
  17437. m_lowererMD.GenerateObjectTest(argsOpnd[1], instr, labelHelper);
  17438. }
  17439. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  17440. // cmp [regex], vtableAddress
  17441. // jne $labelHelper
  17442. argsOpnd[1] = GetRegOpnd(argsOpnd[1], instr, m_func, TyVar);
  17443. InsertCompareBranch(
  17444. IR::IndirOpnd::New(argsOpnd[1]->AsRegOpnd(), 0, TyMachPtr, instr->m_func),
  17445. vtableOpnd,
  17446. Js::OpCode::BrNeq_A,
  17447. labelHelper,
  17448. instr);
  17449. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
  17450. if (callDst)
  17451. {
  17452. helperCallInstr->SetDst(callDst);
  17453. }
  17454. instr->InsertBefore(helperCallInstr);
  17455. if (instr->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(instr->GetBailOutKind()))
  17456. {
  17457. helperCallInstr = AddBailoutToHelperCallInstr(helperCallInstr, instr->GetBailOutInfo(), instr->GetBailOutKind(), instr);
  17458. }
  17459. // [stackAllocationPointer, ]scriptcontext, regexp, input[, limit] (to be pushed in reverse order)
  17460. if(src1->AsHelperCallOpnd()->m_fnHelper == IR::JnHelperMethod::HelperString_Split)
  17461. {
  17462. //limit
  17463. //As we are optimizing only for two operands, make limit UINT_MAX
  17464. IR::Opnd* limit = IR::IntConstOpnd::New(UINT_MAX, TyUint32, instr->m_func);
  17465. this->m_lowererMD.LoadHelperArgument(helperCallInstr, limit);
  17466. }
  17467. //input, regexp
  17468. this->m_lowererMD.LoadHelperArgument(helperCallInstr, argsOpnd[0]);
  17469. this->m_lowererMD.LoadHelperArgument(helperCallInstr, argsOpnd[1]);
  17470. // script context
  17471. LoadScriptContext(helperCallInstr);
  17472. IR::JnHelperMethod helperMethod = IR::JnHelperMethod::HelperInvalid;
  17473. IR::AutoReuseOpnd autoReuseStackAllocationOpnd;
  17474. if(callDst && instr->dstIsTempObject)
  17475. {
  17476. switch(src1->AsHelperCallOpnd()->m_fnHelper)
  17477. {
  17478. case IR::JnHelperMethod::HelperString_Split:
  17479. helperMethod = IR::JnHelperMethod::HelperRegExp_SplitResultUsedAndMayBeTemp;
  17480. break;
  17481. case IR::JnHelperMethod::HelperString_Match:
  17482. helperMethod = IR::JnHelperMethod::HelperRegExp_MatchResultUsedAndMayBeTemp;
  17483. break;
  17484. default:
  17485. Assert(false);
  17486. __assume(false);
  17487. }
  17488. // Allocate some space on the stack for the result array
  17489. IR::RegOpnd *const stackAllocationOpnd = IR::RegOpnd::New(TyVar, m_func);
  17490. autoReuseStackAllocationOpnd.Initialize(stackAllocationOpnd, m_func);
  17491. stackAllocationOpnd->SetValueType(callDst->GetValueType());
  17492. GenerateMarkTempAlloc(stackAllocationOpnd, Js::JavascriptArray::StackAllocationSize, helperCallInstr);
  17493. m_lowererMD.LoadHelperArgument(helperCallInstr, stackAllocationOpnd);
  17494. }
  17495. else
  17496. {
  17497. switch(src1->AsHelperCallOpnd()->m_fnHelper)
  17498. {
  17499. case IR::JnHelperMethod::HelperString_Split:
  17500. helperMethod =
  17501. callDst
  17502. ? IR::JnHelperMethod::HelperRegExp_SplitResultUsed
  17503. : IR::JnHelperMethod::HelperRegExp_SplitResultNotUsed;
  17504. break;
  17505. case IR::JnHelperMethod::HelperString_Match:
  17506. helperMethod =
  17507. callDst
  17508. ? IR::JnHelperMethod::HelperRegExp_MatchResultUsed
  17509. : IR::JnHelperMethod::HelperRegExp_MatchResultNotUsed;
  17510. break;
  17511. default:
  17512. Assert(false);
  17513. __assume(false);
  17514. }
  17515. }
  17516. m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
  17517. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  17518. instr->InsertAfter(doneLabel);
  17519. instr->InsertBefore(labelHelper);
  17520. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  17521. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  17522. }
  17523. void
  17524. Lowerer::GenerateFastInlineRegExpExec(IR::Instr * instr)
  17525. {
  17526. // a.exec(b)
  17527. // We want to emit the fast path when 'a' is a regex and 'b' is a string
  17528. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  17529. IR::Opnd * callDst = instr->GetDst();
  17530. //ArgOut_A_InlineSpecialized
  17531. IR::Instr * tmpInstr = instr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  17532. IR::Opnd * argsOpnd[2];
  17533. if (!instr->FetchOperands(argsOpnd, 2))
  17534. {
  17535. return;
  17536. }
  17537. IR::Opnd *opndString = argsOpnd[1];
  17538. if(!opndString->GetValueType().IsLikelyString() || argsOpnd[0]->IsTaggedInt())
  17539. {
  17540. return;
  17541. }
  17542. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  17543. if(!opndString->GetValueType().IsString())
  17544. {
  17545. opndString = GetRegOpnd(opndString, instr, m_func, TyVar);
  17546. this->GenerateStringTest(opndString->AsRegOpnd(), instr, labelHelper);
  17547. }
  17548. IR::Opnd *opndRegex = argsOpnd[0];
  17549. if(!opndRegex->IsNotTaggedValue())
  17550. {
  17551. m_lowererMD.GenerateObjectTest(opndRegex, instr, labelHelper);
  17552. }
  17553. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  17554. // cmp [regex], vtableAddress
  17555. // jne $labelHelper
  17556. opndRegex = GetRegOpnd(opndRegex, instr, m_func, TyVar);
  17557. InsertCompareBranch(
  17558. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), 0, TyMachPtr, instr->m_func),
  17559. vtableOpnd,
  17560. Js::OpCode::BrNeq_A,
  17561. labelHelper,
  17562. instr);
  17563. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17564. if (!PHASE_OFF(Js::ExecBOIFastPathPhase, m_func))
  17565. {
  17566. // Load pattern from regex operand
  17567. IR::RegOpnd *opndPattern = IR::RegOpnd::New(TyMachPtr, m_func);
  17568. Lowerer::InsertMove(
  17569. opndPattern,
  17570. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfPattern(), TyMachPtr, m_func),
  17571. instr);
  17572. // Load program from pattern
  17573. IR::RegOpnd *opndProgram = IR::RegOpnd::New(TyMachPtr, m_func);
  17574. Lowerer::InsertMove(
  17575. opndProgram,
  17576. IR::IndirOpnd::New(opndPattern, offsetof(UnifiedRegex::RegexPattern, rep) + offsetof(UnifiedRegex::RegexPattern::UnifiedRep, program), TyMachPtr, m_func),
  17577. instr);
  17578. IR::LabelInstr *labelFastHelper = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17579. // We want the program's tag to be BOILiteral2Tag
  17580. InsertCompareBranch(
  17581. IR::IndirOpnd::New(opndProgram, (int32)UnifiedRegex::Program::GetOffsetOfTag(), TyUint8, m_func),
  17582. IR::IntConstOpnd::New((IntConstType)UnifiedRegex::Program::GetBOILiteral2Tag(), TyUint8, m_func),
  17583. Js::OpCode::BrNeq_A,
  17584. labelFastHelper,
  17585. instr);
  17586. // Test the program's flags for "global"
  17587. InsertTestBranch(
  17588. IR::IndirOpnd::New(opndProgram, offsetof(UnifiedRegex::Program, flags), TyUint8, m_func),
  17589. IR::IntConstOpnd::New(UnifiedRegex::GlobalRegexFlag, TyUint8, m_func),
  17590. Js::OpCode::BrNeq_A,
  17591. labelFastHelper,
  17592. instr);
  17593. IR::LabelInstr *labelNoMatch = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17594. // If string length < 2...
  17595. InsertCompareBranch(
  17596. IR::IndirOpnd::New(opndString->AsRegOpnd(), offsetof(Js::JavascriptString, m_charLength), TyUint32, m_func),
  17597. IR::IntConstOpnd::New(2, TyUint32, m_func),
  17598. Js::OpCode::BrLt_A,
  17599. labelNoMatch,
  17600. instr);
  17601. // ...or the DWORD doesn't match the pattern...
  17602. IR::RegOpnd *opndBuffer = IR::RegOpnd::New(TyMachReg, m_func);
  17603. Lowerer::InsertMove(
  17604. opndBuffer,
  17605. IR::IndirOpnd::New(opndString->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, m_func),
  17606. instr);
  17607. IR::LabelInstr *labelGotString = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17608. InsertTestBranch(opndBuffer, opndBuffer, Js::OpCode::BrNeq_A, labelGotString, instr);
  17609. m_lowererMD.LoadHelperArgument(instr, opndString);
  17610. IR::Instr *instrCall = IR::Instr::New(Js::OpCode::Call, opndBuffer, IR::HelperCallOpnd::New(IR::HelperString_GetSz, m_func), m_func);
  17611. instr->InsertBefore(instrCall);
  17612. m_lowererMD.LowerCall(instrCall, 0);
  17613. instr->InsertBefore(labelGotString);
  17614. IR::RegOpnd *opndBufferDWORD = IR::RegOpnd::New(TyUint32, m_func);
  17615. Lowerer::InsertMove(
  17616. opndBufferDWORD,
  17617. IR::IndirOpnd::New(opndBuffer, 0, TyUint32, m_func),
  17618. instr);
  17619. InsertCompareBranch(
  17620. IR::IndirOpnd::New(opndProgram, (int32)(UnifiedRegex::Program::GetOffsetOfRep() + UnifiedRegex::Program::GetOffsetOfBOILiteral2Literal()), TyUint32, m_func),
  17621. opndBufferDWORD,
  17622. Js::OpCode::BrEq_A,
  17623. labelFastHelper,
  17624. instr);
  17625. // ...then set the last index to 0...
  17626. instr->InsertBefore(labelNoMatch);
  17627. Lowerer::InsertMove(
  17628. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), TyVar, m_func),
  17629. IR::AddrOpnd::NewNull(m_func),
  17630. instr);
  17631. Lowerer::InsertMove(
  17632. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), TyUint32, m_func),
  17633. IR::IntConstOpnd::New(0, TyUint32, m_func),
  17634. instr);
  17635. // ...and set the dst to null...
  17636. if (callDst)
  17637. {
  17638. Lowerer::InsertMove(
  17639. callDst,
  17640. LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  17641. instr);
  17642. }
  17643. // ...and we're done.
  17644. this->InsertBranch(Js::OpCode::Br, doneLabel, instr);
  17645. instr->InsertBefore(labelFastHelper);
  17646. }
  17647. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
  17648. if (callDst)
  17649. {
  17650. helperCallInstr->SetDst(callDst);
  17651. }
  17652. instr->InsertBefore(helperCallInstr);
  17653. if (instr->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(instr->GetBailOutKind()))
  17654. {
  17655. helperCallInstr = AddBailoutToHelperCallInstr(helperCallInstr, instr->GetBailOutInfo(), instr->GetBailOutKind(), instr);
  17656. }
  17657. // [stackAllocationPointer, ]scriptcontext, regexp, string (to be pushed in reverse order)
  17658. //string, regexp
  17659. this->m_lowererMD.LoadHelperArgument(helperCallInstr, opndString);
  17660. this->m_lowererMD.LoadHelperArgument(helperCallInstr, opndRegex);
  17661. // script context
  17662. LoadScriptContext(helperCallInstr);
  17663. IR::JnHelperMethod helperMethod;
  17664. IR::AutoReuseOpnd autoReuseStackAllocationOpnd;
  17665. if (callDst)
  17666. {
  17667. if (instr->dstIsTempObject)
  17668. {
  17669. helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultUsedAndMayBeTemp;
  17670. // Allocate some space on the stack for the result array
  17671. IR::RegOpnd *const stackAllocationOpnd = IR::RegOpnd::New(TyVar, m_func);
  17672. autoReuseStackAllocationOpnd.Initialize(stackAllocationOpnd, m_func);
  17673. stackAllocationOpnd->SetValueType(callDst->GetValueType());
  17674. GenerateMarkTempAlloc(stackAllocationOpnd, Js::JavascriptArray::StackAllocationSize, helperCallInstr);
  17675. m_lowererMD.LoadHelperArgument(helperCallInstr, stackAllocationOpnd);
  17676. }
  17677. else
  17678. {
  17679. helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultUsed;
  17680. }
  17681. }
  17682. else
  17683. {
  17684. helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultNotUsed;
  17685. }
  17686. m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
  17687. instr->InsertAfter(doneLabel);
  17688. instr->InsertBefore(labelHelper);
  17689. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  17690. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  17691. }
  17692. // Generate a fast path for the "in" operator that check quickly if we have an array or not and if the index of the data is contained in the array's length.
  17693. void Lowerer::GenerateFastArrayIsIn(IR::Instr * instr)
  17694. {
  17695. // operator "foo in bar"
  17696. IR::Opnd* src1 = instr->GetSrc1(); // foo
  17697. IR::Opnd* src2 = instr->GetSrc2(); // bar
  17698. if (
  17699. !src1->GetValueType().IsLikelyInt() ||
  17700. // Do not do a fast path if we know for sure we don't have an int
  17701. src1->IsNotInt() ||
  17702. !src2->GetValueType().IsLikelyArray() ||
  17703. !src2->GetValueType().HasNoMissingValues())
  17704. {
  17705. return;
  17706. }
  17707. IR::LabelInstr* helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  17708. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17709. IR::LabelInstr* isArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17710. IR::RegOpnd* src1Untagged = GenerateUntagVar(src1->AsRegOpnd(), helperLabel, instr);
  17711. IR::RegOpnd* src2RegOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  17712. InsertMove(src2RegOpnd, src2, instr);
  17713. IR::AutoReuseOpnd autoReuseArrayOpnd;
  17714. m_lowererMD.GenerateObjectTest(src2RegOpnd, instr, helperLabel);
  17715. IR::RegOpnd* arrayOpnd = src2RegOpnd->Copy(instr->m_func)->AsRegOpnd();
  17716. autoReuseArrayOpnd.Initialize(arrayOpnd, instr->m_func, false /* autoDelete */);
  17717. IR::Opnd* vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptArray);
  17718. InsertCompareBranch(
  17719. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, instr->m_func),
  17720. vtableOpnd,
  17721. Js::OpCode::BrEq_A,
  17722. isArrayLabel,
  17723. instr);
  17724. vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableNativeIntArray);
  17725. InsertCompareBranch(
  17726. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, instr->m_func),
  17727. vtableOpnd,
  17728. Js::OpCode::BrEq_A,
  17729. isArrayLabel,
  17730. instr);
  17731. vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableNativeFloatArray);
  17732. InsertCompareBranch(
  17733. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, instr->m_func),
  17734. vtableOpnd,
  17735. Js::OpCode::BrNeq_A,
  17736. helperLabel,
  17737. instr);
  17738. instr->InsertBefore(isArrayLabel);
  17739. InsertTestBranch(
  17740. IR::IndirOpnd::New(src2RegOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  17741. IR::IntConstOpnd::New(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues), TyUint8, m_func, true),
  17742. Js::OpCode::BrEq_A,
  17743. helperLabel,
  17744. instr);
  17745. IR::AutoReuseOpnd autoReuseHeadSegmentOpnd;
  17746. IR::AutoReuseOpnd autoReuseHeadSegmentLengthOpnd;
  17747. IR::IndirOpnd* indirOpnd = IR::IndirOpnd::New(src2RegOpnd, Js::JavascriptArray::GetOffsetOfHead(), TyMachPtr, this->m_func);
  17748. IR::RegOpnd* headSegmentOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  17749. autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
  17750. InsertMove(headSegmentOpnd, indirOpnd, instr);
  17751. IR::Opnd* headSegmentLengthOpnd = IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
  17752. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  17753. InsertCompareBranch(
  17754. src1Untagged,
  17755. headSegmentLengthOpnd,
  17756. Js::OpCode::BrGe_A,
  17757. helperLabel,
  17758. instr);
  17759. InsertCompareBranch(
  17760. src1Untagged,
  17761. IR::IntConstOpnd::New(0, src1Untagged->GetType(), this->m_func),
  17762. Js::OpCode::BrLt_A,
  17763. helperLabel,
  17764. instr);
  17765. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), instr);
  17766. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  17767. instr->InsertBefore(helperLabel);
  17768. instr->InsertAfter(doneLabel);
  17769. }
  17770. // Generate a fast path for the "in" operator to use the cache where the key may be a PropertyString or Symbol.
  17771. void Lowerer::GenerateFastObjectIsIn(IR::Instr * instr)
  17772. {
  17773. IR::RegOpnd* baseOpnd = GetRegOpnd(instr->GetSrc2(), instr, m_func, TyVar);
  17774. IR::RegOpnd* indexOpnd = GetRegOpnd(instr->GetSrc1(), instr, m_func, TyVar);
  17775. bool likelyStringIndex = indexOpnd->GetValueType().IsLikelyString();
  17776. bool likelySymbolIndex = indexOpnd->GetValueType().IsLikelySymbol();
  17777. if (!baseOpnd->GetValueType().IsLikelyObject() || !(likelyStringIndex || likelySymbolIndex))
  17778. {
  17779. return;
  17780. }
  17781. IR::LabelInstr* helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  17782. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17783. if (likelyStringIndex)
  17784. {
  17785. GeneratePropertyStringTest(indexOpnd, instr, helperLabel, false /*isStore*/);
  17786. const uint32 inlineCacheOffset = Js::PropertyString::GetOffsetOfLdElemInlineCache();
  17787. const uint32 hitRateOffset = Js::PropertyString::GetOffsetOfHitRate();
  17788. GenerateFastIsInSymbolOrStringIndex(instr, indexOpnd, baseOpnd, instr->GetDst(), inlineCacheOffset, hitRateOffset, helperLabel, doneLabel);
  17789. }
  17790. else
  17791. {
  17792. Assert(likelySymbolIndex);
  17793. GenerateSymbolTest(indexOpnd, instr, helperLabel);
  17794. const uint32 inlineCacheOffset = Js::JavascriptSymbol::GetOffsetOfLdElemInlineCache();
  17795. const uint32 hitRateOffset = Js::JavascriptSymbol::GetOffsetOfHitRate();
  17796. GenerateFastIsInSymbolOrStringIndex(instr, indexOpnd, baseOpnd, instr->GetDst(), inlineCacheOffset, hitRateOffset, helperLabel, doneLabel);
  17797. }
  17798. instr->InsertBefore(helperLabel);
  17799. instr->InsertAfter(doneLabel);
  17800. }
  17801. // Given an operand, either cast it or move it to a register
  17802. IR::RegOpnd * Lowerer::GetRegOpnd(IR::Opnd* opnd, IR::Instr* insertInstr, Func* func, IRType type)
  17803. {
  17804. if (opnd->IsRegOpnd())
  17805. {
  17806. return opnd->AsRegOpnd();
  17807. }
  17808. IR::RegOpnd *regOpnd = IR::RegOpnd::New(type, func);
  17809. InsertMove(regOpnd, opnd, insertInstr);
  17810. return regOpnd;
  17811. }
  17812. template <bool Saturate>
  17813. void Lowerer::GenerateTruncWithCheck(_In_ IR::Instr* instr)
  17814. {
  17815. Assert(instr->GetSrc1()->IsFloat());
  17816. if (instr->GetDst()->IsInt32() || instr->GetDst()->IsUInt32())
  17817. {
  17818. m_lowererMD.GenerateTruncWithCheck<Saturate>(instr);
  17819. }
  17820. else
  17821. {
  17822. Assert(instr->GetDst()->IsInt64());
  17823. LoadScriptContext(instr);
  17824. if (instr->GetSrc1()->IsFloat32())
  17825. {
  17826. m_lowererMD.LoadFloatHelperArgument(instr, instr->GetSrc1());
  17827. }
  17828. else
  17829. {
  17830. m_lowererMD.LoadDoubleHelperArgument(instr, instr->GetSrc1());
  17831. }
  17832. IR::JnHelperMethod helper;
  17833. if (Saturate)
  17834. {
  17835. IR::JnHelperMethod helperList[2][2] = { IR::HelperF32ToI64Sat, IR::HelperF32ToU64Sat, IR::HelperF64ToI64Sat ,IR::HelperF64ToU64Sat };
  17836. helper = helperList[instr->GetSrc1()->GetType() != TyFloat32][instr->GetDst()->GetType() == TyUint64];
  17837. }
  17838. else
  17839. {
  17840. IR::JnHelperMethod helperList[2][2] = { IR::HelperF32ToI64, IR::HelperF32ToU64, IR::HelperF64ToI64 ,IR::HelperF64ToU64 };
  17841. helper = helperList[instr->GetSrc1()->GetType() != TyFloat32][instr->GetDst()->GetType() == TyUint64];
  17842. }
  17843. instr->UnlinkSrc1();
  17844. this->m_lowererMD.ChangeToHelperCall(instr, helper);
  17845. }
  17846. }
  17847. void
  17848. Lowerer::RelocateCallDirectToHelperPath(IR::Instr* argoutInlineSpecialized, IR::LabelInstr* labelHelper)
  17849. {
  17850. IR::Opnd *linkOpnd = argoutInlineSpecialized->GetSrc2(); //ArgOut_A_InlineSpecialized src2; link to actual argouts.
  17851. argoutInlineSpecialized->Unlink();
  17852. labelHelper->InsertAfter(argoutInlineSpecialized);
  17853. while(linkOpnd->IsSymOpnd())
  17854. {
  17855. IR::SymOpnd *src2 = linkOpnd->AsSymOpnd();
  17856. StackSym *sym = src2->m_sym->AsStackSym();
  17857. Assert(sym->m_isSingleDef);
  17858. IR::Instr *argInstr = sym->m_instrDef;
  17859. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  17860. argInstr->Unlink();
  17861. labelHelper->InsertAfter(argInstr);
  17862. linkOpnd = argInstr->GetSrc2();
  17863. }
  17864. // Move startcall
  17865. Assert(linkOpnd->IsRegOpnd());
  17866. StackSym *sym = linkOpnd->AsRegOpnd()->m_sym;
  17867. Assert(sym->m_isSingleDef);
  17868. IR::Instr *startCall = sym->m_instrDef;
  17869. Assert(startCall->m_opcode == Js::OpCode::StartCall);
  17870. startCall->Unlink();
  17871. labelHelper->InsertAfter(startCall);
  17872. }
  17873. bool
  17874. Lowerer::GenerateFastInlineStringCharCodeAt(IR::Instr * instr, Js::BuiltinFunction index)
  17875. {
  17876. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  17877. //CallDirect src2
  17878. IR::Opnd * linkOpnd = instr->GetSrc2();
  17879. //ArgOut_A_InlineSpecialized
  17880. IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  17881. IR::Opnd * argsOpnd[2] = {0};
  17882. bool result = instr->FetchOperands(argsOpnd, 2);
  17883. Assert(result);
  17884. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  17885. instr->InsertAfter(doneLabel);
  17886. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  17887. bool success = GenerateFastCharAt(index, instr->GetDst(), argsOpnd[0], argsOpnd[1],
  17888. instr, instr, labelHelper, doneLabel);
  17889. instr->InsertBefore(labelHelper);
  17890. if (!success)
  17891. {
  17892. return false;
  17893. }
  17894. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  17895. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  17896. return true;
  17897. }
  17898. void
  17899. Lowerer::GenerateCtz(IR::Instr* instr)
  17900. {
  17901. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsInt64());
  17902. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsInt64());
  17903. m_lowererMD.GenerateCtz(instr);
  17904. }
  17905. void
  17906. Lowerer::GeneratePopCnt(IR::Instr* instr)
  17907. {
  17908. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
  17909. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsUInt32() || instr->GetDst()->IsInt64());
  17910. m_lowererMD.GeneratePopCnt(instr);
  17911. }
  17912. void
  17913. Lowerer::GenerateFastInlineMathClz(IR::Instr* instr)
  17914. {
  17915. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsInt64());
  17916. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsInt64());
  17917. m_lowererMD.GenerateClz(instr);
  17918. }
  17919. void
  17920. Lowerer::GenerateFastInlineMathImul(IR::Instr* instr)
  17921. {
  17922. IR::Opnd* src1 = instr->GetSrc1();
  17923. IR::Opnd* src2 = instr->GetSrc2();
  17924. IR::Opnd* dst = instr->GetDst();
  17925. Assert(dst->IsInt32());
  17926. Assert(src1->IsInt32());
  17927. Assert(src2->IsInt32());
  17928. IR::Instr* imul = IR::Instr::New(LowererMD::MDImulOpcode, dst, src1, src2, instr->m_func);
  17929. instr->InsertBefore(imul);
  17930. LowererMD::Legalize(imul);
  17931. instr->Remove();
  17932. }
  17933. void
  17934. Lowerer::LowerReinterpretPrimitive(IR::Instr* instr)
  17935. {
  17936. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  17937. IR::Opnd* src1 = instr->GetSrc1();
  17938. IR::Opnd* dst = instr->GetDst();
  17939. Assert(dst->GetSize() == src1->GetSize());
  17940. Assert((dst->IsFloat32() && src1->IsInt32()) ||
  17941. (dst->IsInt32() && src1->IsFloat32()) ||
  17942. (dst->IsInt64() && src1->IsFloat64()) ||
  17943. (dst->IsFloat64() && src1->IsInt64()) );
  17944. m_lowererMD.EmitReinterpretPrimitive(dst, src1, instr);
  17945. instr->Remove();
  17946. }
  17947. void
  17948. Lowerer::GenerateFastInlineMathFround(IR::Instr* instr)
  17949. {
  17950. IR::Opnd* src1 = instr->GetSrc1();
  17951. IR::Opnd* dst = instr->GetDst();
  17952. Assert(dst->IsFloat());
  17953. Assert(src1->IsFloat());
  17954. // This function is supposed to convert a float to the closest float32 representation.
  17955. // However, it is a bit loose about types, which the ARM64 encoder takes issue with.
  17956. #ifdef _M_ARM64
  17957. LowererMD::GenerateFastInlineMathFround(instr);
  17958. #else
  17959. IR::Instr* fcvt64to32 = IR::Instr::New(LowererMD::MDConvertFloat64ToFloat32Opcode, dst, src1, instr->m_func);
  17960. instr->InsertBefore(fcvt64to32);
  17961. LowererMD::Legalize(fcvt64to32);
  17962. if (dst->IsFloat64())
  17963. {
  17964. IR::Instr* fcvt32to64 = IR::Instr::New(LowererMD::MDConvertFloat32ToFloat64Opcode, dst, dst, instr->m_func);
  17965. instr->InsertBefore(fcvt32to64);
  17966. LowererMD::Legalize(fcvt32to64);
  17967. }
  17968. instr->Remove();
  17969. #endif
  17970. return;
  17971. }
  17972. bool
  17973. Lowerer::GenerateFastInlineStringReplace(IR::Instr * instr)
  17974. {
  17975. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  17976. //CallDirect src2
  17977. IR::Opnd * linkOpnd = instr->GetSrc2();
  17978. //ArgOut_A_InlineSpecialized
  17979. IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  17980. IR::Opnd * argsOpnd[3] = {0};
  17981. bool result = instr->FetchOperands(argsOpnd, 3);
  17982. Assert(result);
  17983. AnalysisAssert(argsOpnd[0] && argsOpnd[1] && argsOpnd[2]);
  17984. if (!argsOpnd[0]->GetValueType().IsLikelyString()
  17985. || argsOpnd[1]->GetValueType().IsNotObject()
  17986. || !argsOpnd[2]->GetValueType().IsLikelyString())
  17987. {
  17988. return false;
  17989. }
  17990. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  17991. instr->InsertAfter(doneLabel);
  17992. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  17993. bool success = this->GenerateFastReplace(argsOpnd[0], argsOpnd[1], argsOpnd[2],
  17994. instr, instr, labelHelper, doneLabel);
  17995. instr->InsertBefore(labelHelper);
  17996. if (!success)
  17997. {
  17998. return false;
  17999. }
  18000. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  18001. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  18002. return true;
  18003. }
  18004. #ifdef ENABLE_DOM_FAST_PATH
  18005. /*
  18006. Lower the DOMFastPathGetter opcode
  18007. We have inliner generated bytecode:
  18008. (dst)helpArg1: ExtendArg_A (src1)thisObject (src2)null
  18009. (dst)helpArg2: ExtendArg_A (src1)funcObject (src2)helpArg1
  18010. method: DOMFastPathGetter (src1)HelperCall (src2)helpArg2
  18011. We'll convert it to a JavascriptFunction entry method call:
  18012. CALL Helper funcObject CallInfo(CallFlags_Value, 3) thisObj
  18013. */
  18014. void
  18015. Lowerer::LowerFastInlineDOMFastPathGetter(IR::Instr* instr)
  18016. {
  18017. IR::Opnd* helperOpnd = instr->UnlinkSrc1();
  18018. Assert(helperOpnd->IsHelperCallOpnd());
  18019. IR::Opnd *linkOpnd = instr->UnlinkSrc2();
  18020. Assert(linkOpnd->IsRegOpnd());
  18021. IR::Instr* prevInstr = linkOpnd->AsRegOpnd()->m_sym->m_instrDef;
  18022. Assert(prevInstr->m_opcode == Js::OpCode::ExtendArg_A);
  18023. IR::Opnd* funcObj = prevInstr->GetSrc1();
  18024. Assert(funcObj->IsRegOpnd());
  18025. // If the Extended_arg was CSE's across a loop or hoisted out of a loop,
  18026. // adding a new reference down here might cause funcObj to now be liveOnBackEdge.
  18027. // Use the addToLiveOnBackEdgeSyms bit vector to add it to a loop if we encounter one.
  18028. // We'll clear it once we reach the Extended arg.
  18029. this->addToLiveOnBackEdgeSyms->Set(funcObj->AsRegOpnd()->m_sym->m_id);
  18030. Assert(prevInstr->GetSrc2() != nullptr);
  18031. prevInstr = prevInstr->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
  18032. Assert(prevInstr->m_opcode == Js::OpCode::ExtendArg_A);
  18033. IR::Opnd* thisObj = prevInstr->GetSrc1();
  18034. Assert(prevInstr->GetSrc2() == nullptr);
  18035. Assert(thisObj->IsRegOpnd());
  18036. this->addToLiveOnBackEdgeSyms->Set(thisObj->AsRegOpnd()->m_sym->m_id);
  18037. const auto info = Lowerer::MakeCallInfoConst(Js::CallFlags_Value, 1, m_func);
  18038. m_lowererMD.LoadHelperArgument(instr, thisObj);
  18039. m_lowererMD.LoadHelperArgument(instr, info);
  18040. m_lowererMD.LoadHelperArgument(instr, funcObj);
  18041. instr->m_opcode = Js::OpCode::Call;
  18042. IR::HelperCallOpnd *helperCallOpnd = Lowerer::CreateHelperCallOpnd(helperOpnd->AsHelperCallOpnd()->m_fnHelper, 3, m_func);
  18043. instr->SetSrc1(helperCallOpnd);
  18044. m_lowererMD.LowerCall(instr, 3); // we have funcobj, callInfo, and this.
  18045. }
  18046. #endif
  18047. void
  18048. Lowerer::GenerateFastInlineArrayPush(IR::Instr * instr)
  18049. {
  18050. Assert(instr->m_opcode == Js::OpCode::InlineArrayPush);
  18051. IR::Opnd * baseOpnd = instr->GetSrc1();
  18052. IR::Opnd * srcOpnd = instr->GetSrc2();
  18053. bool returnLength = false;
  18054. if(instr->GetDst())
  18055. {
  18056. returnLength = true;
  18057. }
  18058. IR::LabelInstr * bailOutLabelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  18059. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  18060. instr->InsertAfter(doneLabel);
  18061. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  18062. //Don't Generate fast path according to ShouldGenerateArrayFastPath()
  18063. //AND, Don't Generate fast path if the array is LikelyNative and the element is not specialized
  18064. if(ShouldGenerateArrayFastPath(baseOpnd, false, false, false) &&
  18065. !(baseOpnd->GetValueType().IsLikelyNativeArray() && srcOpnd->IsVar()))
  18066. {
  18067. GenerateFastPush(baseOpnd, srcOpnd, instr, instr, labelHelper, doneLabel, bailOutLabelHelper, returnLength);
  18068. instr->InsertBefore(labelHelper);
  18069. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  18070. }
  18071. if(baseOpnd->GetValueType().IsLikelyNativeArray())
  18072. {
  18073. //Lower IR::BailOutConventionalNativeArrayAccessOnly here.
  18074. LowerOneBailOutKind(instr, IR::BailOutConventionalNativeArrayAccessOnly, false, false);
  18075. instr->InsertAfter(bailOutLabelHelper);
  18076. InsertBranch(Js::OpCode::Br, doneLabel, bailOutLabelHelper);
  18077. }
  18078. GenerateHelperToArrayPushFastPath(instr, bailOutLabelHelper);
  18079. }
  18080. bool Lowerer::GenerateFastPop(IR::Opnd *baseOpndParam, IR::Instr *callInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel, IR::LabelInstr * bailOutLabelHelper)
  18081. {
  18082. Assert(ShouldGenerateArrayFastPath(baseOpndParam, false, false, false));
  18083. // TEST baseOpnd, AtomTag -- check baseOpnd not tagged int
  18084. // JNE $helper
  18085. // CMP [baseOpnd], JavascriptArray::`vtable' -- check baseOpnd isArray
  18086. // JNE $helper
  18087. // MOV r2, [baseOpnd + offset(length)] -- Load array length
  18088. IR::RegOpnd * baseOpnd = baseOpndParam->AsRegOpnd();
  18089. const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
  18090. ValueType arrValueType(baseOpndParam->GetValueType());
  18091. IR::RegOpnd *arrayOpnd = baseOpnd;
  18092. IR::RegOpnd *arrayLengthOpnd = nullptr;
  18093. IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
  18094. if(!arrValueType.IsAnyOptimizedArray())
  18095. {
  18096. arrayOpnd = GenerateArrayTest(baseOpnd, bailOutLabelHelper, bailOutLabelHelper, callInstr, false, true);
  18097. arrValueType = arrayOpnd->GetValueType().ToDefiniteObject().SetHasNoMissingValues(false);
  18098. }
  18099. else if(arrayOpnd->IsArrayRegOpnd())
  18100. {
  18101. IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
  18102. if(arrayRegOpnd->LengthSym())
  18103. {
  18104. arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), arrayRegOpnd->LengthSym()->GetType(), m_func);
  18105. DebugOnly(arrayLengthOpnd->FreezeSymValue());
  18106. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  18107. }
  18108. }
  18109. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
  18110. IR::AutoReuseOpnd autoReuseMutableArrayLengthOpnd;
  18111. {
  18112. IR::RegOpnd *const mutableArrayLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  18113. autoReuseMutableArrayLengthOpnd.Initialize(mutableArrayLengthOpnd, m_func);
  18114. if(arrayLengthOpnd)
  18115. {
  18116. // mov mutableArrayLength, arrayLength
  18117. InsertMove(mutableArrayLengthOpnd, arrayLengthOpnd, callInstr);
  18118. }
  18119. else
  18120. {
  18121. // MOV mutableArrayLength, [array + offset(length)] -- Load array length
  18122. // We know this index is safe since, so mark it as UInt32 to avoid unnecessary conversion/checks
  18123. InsertMove(
  18124. mutableArrayLengthOpnd,
  18125. IR::IndirOpnd::New(
  18126. arrayOpnd,
  18127. Js::JavascriptArray::GetOffsetOfLength(),
  18128. mutableArrayLengthOpnd->GetType(),
  18129. this->m_func),
  18130. callInstr);
  18131. }
  18132. arrayLengthOpnd = mutableArrayLengthOpnd;
  18133. }
  18134. InsertCompareBranch(arrayLengthOpnd, IR::IntConstOpnd::New(0, TyUint32, this->m_func), Js::OpCode::BrEq_A, true, bailOutLabelHelper, callInstr);
  18135. InsertSub(false, arrayLengthOpnd, arrayLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func),callInstr);
  18136. IR::IndirOpnd *arrayRef = IR::IndirOpnd::New(arrayOpnd, arrayLengthOpnd, TyVar, this->m_func);
  18137. arrayRef->GetBaseOpnd()->SetValueType(arrValueType);
  18138. //Array length is going to overflow, hence don't check for Array.length and Segment.length overflow.
  18139. bool isTypedArrayElement, isStringIndex;
  18140. IR::IndirOpnd *const indirOpnd =
  18141. GenerateFastElemICommon(
  18142. callInstr,
  18143. false,
  18144. arrayRef,
  18145. labelHelper,
  18146. labelHelper,
  18147. nullptr,
  18148. &isTypedArrayElement,
  18149. &isStringIndex,
  18150. nullptr,
  18151. nullptr,
  18152. nullptr /*pLabelSegmentLengthIncreased*/,
  18153. true /*checkArrayLengthOverflow*/,
  18154. true /* forceGenerateFastPath */,
  18155. false/* = returnLength */,
  18156. bailOutLabelHelper /* = bailOutLabelInstr*/);
  18157. Assert(!isTypedArrayElement);
  18158. Assert(indirOpnd);
  18159. return true;
  18160. }
  18161. bool Lowerer::GenerateFastPush(IR::Opnd *baseOpndParam, IR::Opnd *src, IR::Instr *callInstr,
  18162. IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel, IR::LabelInstr * bailOutLabelHelper, bool returnLength)
  18163. {
  18164. Assert(ShouldGenerateArrayFastPath(baseOpndParam, false, false, false));
  18165. // TEST baseOpnd, AtomTag -- check baseOpnd not tagged int
  18166. // JNE $helper
  18167. // CMP [baseOpnd], JavascriptArray::`vtable' -- check baseOpnd isArray
  18168. // JNE $helper
  18169. // MOV r2, [baseOpnd + offset(length)] -- Load array length
  18170. IR::RegOpnd * baseOpnd = baseOpndParam->AsRegOpnd();
  18171. const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
  18172. ValueType arrValueType(baseOpndParam->GetValueType());
  18173. IR::RegOpnd *arrayOpnd = baseOpnd;
  18174. IR::RegOpnd *arrayLengthOpnd = nullptr;
  18175. IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
  18176. if(!arrValueType.IsAnyOptimizedArray())
  18177. {
  18178. arrayOpnd = GenerateArrayTest(baseOpnd, labelHelper, labelHelper, insertInstr, false, true);
  18179. arrValueType = arrayOpnd->GetValueType().ToDefiniteObject().SetHasNoMissingValues(false);
  18180. }
  18181. else if(arrayOpnd->IsArrayRegOpnd())
  18182. {
  18183. IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
  18184. if(arrayRegOpnd->LengthSym())
  18185. {
  18186. arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), arrayRegOpnd->LengthSym()->GetType(), m_func);
  18187. DebugOnly(arrayLengthOpnd->FreezeSymValue());
  18188. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  18189. }
  18190. }
  18191. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
  18192. if(!arrayLengthOpnd)
  18193. {
  18194. // MOV arrayLength, [array + offset(length)] -- Load array length
  18195. // We know this index is safe since, so mark it as UInt32 to avoid unnecessary conversion/checks
  18196. arrayLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  18197. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  18198. InsertMove(
  18199. arrayLengthOpnd,
  18200. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), arrayLengthOpnd->GetType(), this->m_func),
  18201. insertInstr);
  18202. }
  18203. IR::IndirOpnd *arrayRef = IR::IndirOpnd::New(arrayOpnd, arrayLengthOpnd, TyVar, this->m_func);
  18204. arrayRef->GetBaseOpnd()->SetValueType(arrValueType);
  18205. if (returnLength && src->IsEqual(insertInstr->GetDst()))
  18206. {
  18207. //If the dst is same as the src, then dst is going to be overridden by GenerateFastElemICommon in process of updating the length.
  18208. //Save it in a temp register.
  18209. IR::RegOpnd *opnd = IR::RegOpnd::New(src->GetType(), this->m_func);
  18210. InsertMove(opnd, src, insertInstr);
  18211. src = opnd;
  18212. }
  18213. //Array length is going to overflow, hence don't check for Array.length and Segment.length overflow.
  18214. bool isTypedArrayElement, isStringIndex;
  18215. IR::IndirOpnd *const indirOpnd =
  18216. GenerateFastElemICommon(
  18217. insertInstr,
  18218. true,
  18219. arrayRef,
  18220. labelHelper,
  18221. labelHelper,
  18222. nullptr,
  18223. &isTypedArrayElement,
  18224. &isStringIndex,
  18225. nullptr,
  18226. nullptr,
  18227. nullptr /*pLabelSegmentLengthIncreased*/,
  18228. false /*checkArrayLengthOverflow*/,
  18229. true /* forceGenerateFastPath */,
  18230. returnLength,
  18231. bailOutLabelHelper);
  18232. Assert(!isTypedArrayElement);
  18233. Assert(indirOpnd);
  18234. // MOV [r3 + r2], src
  18235. InsertMoveWithBarrier(indirOpnd, src, insertInstr);
  18236. return true;
  18237. }
  18238. bool
  18239. Lowerer::GenerateFastCharAt(Js::BuiltinFunction index, IR::Opnd *dst, IR::Opnd *srcStr, IR::Opnd *srcIndex, IR::Instr *callInstr,
  18240. IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
  18241. {
  18242. // if regSrcStr is not object, JMP $helper
  18243. // CMP [regSrcStr + offset(type)] , static string type -- check base string type
  18244. // JNE $helper
  18245. // MOV r1, [regSrcStr + offset(m_pszValue)]
  18246. // TEST r1, r1
  18247. // JEQ $helper
  18248. // MOV r2, srcIndex
  18249. // If r2 is not int, JMP $helper
  18250. // Convert r2 to int
  18251. // CMP [regSrcStr + offsetof(length)], r2
  18252. // JBE $helper
  18253. // MOVZX r2, [r1 + r2 * 2]
  18254. // if (charAt)
  18255. // PUSH r1
  18256. // PUSH scriptContext
  18257. // CALL GetStringFromChar
  18258. // MOV dst, EAX
  18259. // else (charCodeAt)
  18260. // if (codePointAt)
  18261. // Lowerer.GenerateFastCodePointAt -- Common inline functions
  18262. // Convert r2 to Var
  18263. // MOV dst, r2
  18264. bool isInt = false;
  18265. bool isNotTaggedValue = false;
  18266. if (srcStr->IsRegOpnd())
  18267. {
  18268. if (srcStr->AsRegOpnd()->IsTaggedInt())
  18269. {
  18270. isInt = true;
  18271. }
  18272. else if (srcStr->AsRegOpnd()->IsNotTaggedValue())
  18273. {
  18274. isNotTaggedValue = true;
  18275. }
  18276. }
  18277. IR::RegOpnd *regSrcStr = GetRegOpnd(srcStr, insertInstr, m_func, TyVar);
  18278. if (!isNotTaggedValue)
  18279. {
  18280. if (!isInt)
  18281. {
  18282. m_lowererMD.GenerateObjectTest(regSrcStr, insertInstr, labelHelper);
  18283. }
  18284. else
  18285. {
  18286. // Insert delete branch opcode to tell the dbChecks not to assert on this helper label
  18287. IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
  18288. insertInstr->InsertBefore(fakeBr);
  18289. InsertBranch(Js::OpCode::Br, labelHelper, insertInstr);
  18290. }
  18291. }
  18292. // Bail out if index a constant and is less than zero.
  18293. if (srcIndex->IsAddrOpnd() && Js::TaggedInt::ToInt32(srcIndex->AsAddrOpnd()->m_address) < 0)
  18294. {
  18295. labelHelper->isOpHelper = false;
  18296. InsertBranch(Js::OpCode::Br, labelHelper, insertInstr);
  18297. return false;
  18298. }
  18299. GenerateStringTest(regSrcStr, insertInstr, labelHelper, nullptr, false);
  18300. // r1 contains the value of the char16* pointer inside JavascriptString.
  18301. // MOV r1, [regSrcStr + offset(m_pszValue)]
  18302. IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  18303. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(regSrcStr->AsRegOpnd(), Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, this->m_func);
  18304. InsertMove(r1, indirOpnd, insertInstr);
  18305. // TEST r1, r1 -- Null pointer test
  18306. // JEQ $helper
  18307. InsertTestBranch(r1, r1, Js::OpCode::BrEq_A, labelHelper, insertInstr);
  18308. IR::RegOpnd *strLength = IR::RegOpnd::New(TyUint32, m_func);
  18309. InsertMove(strLength, IR::IndirOpnd::New(regSrcStr, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func), insertInstr);
  18310. IR::Opnd* indexOpnd = nullptr;
  18311. if (srcIndex->IsAddrOpnd())
  18312. {
  18313. uint32 indexValue = Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address);
  18314. // CMP [regSrcStr + offsetof(length)], index
  18315. // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
  18316. // JBE $helper
  18317. InsertCompareBranch(strLength, IR::IntConstOpnd::New(indexValue, TyUint32, m_func), Js::OpCode::BrLe_A, true, labelHelper, insertInstr);
  18318. // Mask off the sign so that poisoning will work for negative indices
  18319. #if TARGET_32
  18320. uint32 maskedIndex = CONFIG_FLAG_RELEASE(PoisonStringLoad) ? (indexValue & INT32_MAX) : indexValue;
  18321. #else
  18322. uint32 maskedIndex = indexValue;
  18323. #endif
  18324. indirOpnd = IR::IndirOpnd::New(r1, maskedIndex * sizeof(char16), TyUint16, this->m_func);
  18325. indexOpnd = IR::IntConstOpnd::New(maskedIndex, TyMachPtr, m_func);
  18326. }
  18327. else
  18328. {
  18329. IR::RegOpnd *r2 = IR::RegOpnd::New(TyVar, this->m_func);
  18330. // MOV r2, srcIndex
  18331. InsertMove(r2, srcIndex, insertInstr);
  18332. r2 = GenerateUntagVar(r2, labelHelper, insertInstr);
  18333. // CMP [regSrcStr + offsetof(length)], r2
  18334. // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
  18335. // JBE $helper
  18336. InsertCompareBranch(strLength, r2, Js::OpCode::BrLe_A, true, labelHelper, insertInstr);
  18337. #if TARGET_32
  18338. if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
  18339. {
  18340. // Mask off the sign so that poisoning will work for negative indices
  18341. InsertAnd(r2, r2, IR::IntConstOpnd::New(INT32_MAX, TyInt32, m_func), insertInstr);
  18342. }
  18343. #endif
  18344. if (r2->GetSize() != MachPtr)
  18345. {
  18346. r2 = r2->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
  18347. }
  18348. indexOpnd = r2;
  18349. indirOpnd = IR::IndirOpnd::New(r1, r2, 1, TyUint16, this->m_func);
  18350. }
  18351. IR::RegOpnd* maskOpnd = nullptr;
  18352. if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
  18353. {
  18354. maskOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  18355. if (strLength->GetSize() != MachPtr)
  18356. {
  18357. strLength = strLength->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
  18358. }
  18359. InsertSub(false, maskOpnd, indexOpnd, strLength, insertInstr);
  18360. InsertShift(Js::OpCode::Shr_A, false, maskOpnd, maskOpnd, IR::IntConstOpnd::New(MachRegInt * 8 - 1, TyInt8, m_func), insertInstr);
  18361. if (maskOpnd->GetSize() != TyUint32)
  18362. {
  18363. maskOpnd = maskOpnd->UseWithNewType(TyUint32, this->m_func)->AsRegOpnd();
  18364. }
  18365. }
  18366. // MOVZX charReg, [r1 + r2 * 2] -- this is the value of the char
  18367. IR::RegOpnd *charReg = IR::RegOpnd::New(TyUint32, this->m_func);
  18368. InsertMove(charReg, indirOpnd, insertInstr);
  18369. if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
  18370. {
  18371. InsertAnd(charReg, charReg, maskOpnd, insertInstr);
  18372. }
  18373. if (index == Js::BuiltinFunction::JavascriptString_CharAt)
  18374. {
  18375. IR::Opnd *resultOpnd;
  18376. if (dst->IsEqual(srcStr))
  18377. {
  18378. resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  18379. }
  18380. else
  18381. {
  18382. resultOpnd = dst;
  18383. }
  18384. GenerateGetSingleCharString(charReg, resultOpnd, labelHelper, doneLabel, insertInstr, false);
  18385. }
  18386. else
  18387. {
  18388. Assert(index == Js::BuiltinFunction::JavascriptString_CharCodeAt || index == Js::BuiltinFunction::JavascriptString_CodePointAt);
  18389. if (index == Js::BuiltinFunction::JavascriptString_CodePointAt)
  18390. {
  18391. GenerateFastInlineStringCodePointAt(insertInstr, this->m_func, strLength, srcIndex, charReg, r1);
  18392. }
  18393. if (charReg->GetSize() != MachPtr)
  18394. {
  18395. charReg = charReg->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
  18396. }
  18397. m_lowererMD.GenerateInt32ToVarConversion(charReg, insertInstr);
  18398. // MOV dst, charReg
  18399. InsertMove(dst, charReg, insertInstr);
  18400. }
  18401. return true;
  18402. }
  18403. IR::Opnd*
  18404. Lowerer::GenerateArgOutForInlineeStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  18405. {
  18406. Assert(callInstr->m_func->IsInlinee());
  18407. Func *func = callInstr->m_func;
  18408. uint32 actualCount = func->actualCount - 1; // don't count this pointer
  18409. Assert(actualCount < Js::InlineeCallInfo::MaxInlineeArgoutCount);
  18410. const auto firstRealArgStackSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  18411. this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr); //Start after this pointer
  18412. IR::SymOpnd *firstArg = IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, func);
  18413. const IR::AutoReuseOpnd autoReuseFirstArg(firstArg, func);
  18414. IR::RegOpnd* argInOpnd = IR::RegOpnd::New(TyMachReg, func);
  18415. const IR::AutoReuseOpnd autoReuseArgInOpnd(argInOpnd, func);
  18416. InsertLea(argInOpnd, firstArg, callInstr);
  18417. IR::IndirOpnd *argIndirOpnd = nullptr;
  18418. IR::Instr* argout = nullptr;
  18419. #if defined(_M_IX86)
  18420. // Maintain alignment
  18421. if ((actualCount & 1) == 0)
  18422. {
  18423. IR::Instr *alignPush = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  18424. alignPush->SetSrc1(IR::IntConstOpnd::New(1, TyInt32, this->m_func));
  18425. callInstr->InsertBefore(alignPush);
  18426. }
  18427. #endif
  18428. for(uint i = actualCount; i > 0; i--)
  18429. {
  18430. argIndirOpnd = IR::IndirOpnd::New(argInOpnd, (i - 1) * MachPtr, TyMachReg, func);
  18431. argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  18432. argout->SetSrc1(argIndirOpnd);
  18433. callInstr->InsertBefore(argout);
  18434. // i represents ith arguments from actuals, with is i + 3 counting this, callInfo and function object
  18435. this->m_lowererMD.LoadDynamicArgument(argout, i + 3);
  18436. }
  18437. return IR::IntConstOpnd::New(func->actualCount, TyMachReg, func);
  18438. }
  18439. // For AMD64 and ARM only.
  18440. void
  18441. Lowerer::LowerInlineSpreadArgOutLoopUsingRegisters(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd)
  18442. {
  18443. Func *const func = callInstr->m_func;
  18444. IR::LabelInstr *oneArgLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  18445. InsertCompareBranch(indexOpnd, IR::IntConstOpnd::New(1, TyUint8, func), Js::OpCode::BrEq_A, true, oneArgLabel, callInstr);
  18446. IR::LabelInstr *startLoopLabel = InsertLoopTopLabel(callInstr);
  18447. Loop * loop = startLoopLabel->GetLoop();
  18448. loop->regAlloc.liveOnBackEdgeSyms->Set(indexOpnd->m_sym->m_id);
  18449. loop->regAlloc.liveOnBackEdgeSyms->Set(arrayElementsStartOpnd->m_sym->m_id);
  18450. InsertSub(false, indexOpnd, indexOpnd, IR::IntConstOpnd::New(1, TyInt8, func), callInstr);
  18451. IR::IndirOpnd *elemPtrOpnd = IR::IndirOpnd::New(arrayElementsStartOpnd, indexOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyMachPtr, func);
  18452. // Generate argout for n+2 arg (skipping function object + this)
  18453. IR::Instr *argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  18454. // X64 requires a reg opnd
  18455. IR::RegOpnd *elemRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  18456. Lowerer::InsertMove(elemRegOpnd, elemPtrOpnd, callInstr);
  18457. argout->SetSrc1(elemRegOpnd);
  18458. argout->SetSrc2(indexOpnd);
  18459. callInstr->InsertBefore(argout);
  18460. this->m_lowererMD.LoadDynamicArgumentUsingLength(argout);
  18461. InsertCompareBranch(indexOpnd, IR::IntConstOpnd::New(1, TyUint8, func), Js::OpCode::BrNeq_A, true, startLoopLabel, callInstr);
  18462. // Emit final argument into register 4 on AMD64 and ARM
  18463. callInstr->InsertBefore(oneArgLabel);
  18464. argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  18465. argout->SetSrc1(elemPtrOpnd);
  18466. callInstr->InsertBefore(argout);
  18467. this->m_lowererMD.LoadDynamicArgument(argout, 4); //4 to denote this is 4th register after this, callinfo & function object
  18468. }
  18469. IR::Instr *
  18470. Lowerer::LowerCallIDynamicSpread(IR::Instr *callInstr, ushort callFlags)
  18471. {
  18472. Assert(callInstr->m_opcode == Js::OpCode::CallIDynamicSpread);
  18473. IR::Instr * insertBeforeInstrForCFG = nullptr;
  18474. Func *const func = callInstr->m_func;
  18475. if (func->IsInlinee())
  18476. {
  18477. throw Js::RejitException(RejitReason::InlineSpreadDisabled);
  18478. }
  18479. IR::Instr *spreadArrayInstr = callInstr;
  18480. IR::SymOpnd *argLinkOpnd = spreadArrayInstr->UnlinkSrc2()->AsSymOpnd();
  18481. StackSym *argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  18482. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  18483. argLinkOpnd->Free(this->m_func);
  18484. spreadArrayInstr = argLinkSym->m_instrDef;
  18485. Assert(spreadArrayInstr->m_opcode == Js::OpCode::ArgOut_A_SpreadArg);
  18486. IR::Opnd *arraySrcOpnd = spreadArrayInstr->UnlinkSrc1();
  18487. IR::RegOpnd *arrayOpnd = GetRegOpnd(arraySrcOpnd, spreadArrayInstr, func, TyMachPtr);
  18488. argLinkOpnd = spreadArrayInstr->UnlinkSrc2()->AsSymOpnd();
  18489. // Walk the arg chain and find the start call
  18490. argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  18491. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  18492. argLinkOpnd->Free(this->m_func);
  18493. // Nothing to be done for the function object, emit as normal
  18494. IR::Instr *thisInstr = argLinkSym->m_instrDef;
  18495. IR::RegOpnd *thisOpnd = thisInstr->UnlinkSrc2()->AsRegOpnd();
  18496. argLinkSym = thisOpnd->m_sym->AsStackSym();
  18497. thisInstr->Unlink();
  18498. thisInstr->FreeDst();
  18499. // Remove the array ArgOut instr and StartCall, they are no longer needed
  18500. spreadArrayInstr->Unlink();
  18501. spreadArrayInstr->FreeDst();
  18502. IR::Instr *startCallInstr = argLinkSym->m_instrDef;
  18503. Assert(startCallInstr->m_opcode == Js::OpCode::StartCall);
  18504. insertBeforeInstrForCFG = startCallInstr->GetNextRealInstr();
  18505. startCallInstr->Remove();
  18506. IR::RegOpnd *argsLengthOpnd = IR::RegOpnd::New(TyUint32, func);
  18507. IR::IndirOpnd *arrayLengthPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, func);
  18508. Lowerer::InsertMove(argsLengthOpnd, arrayLengthPtrOpnd, callInstr);
  18509. // Don't bother expanding args if there are zero
  18510. IR::LabelInstr *zeroArgsLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  18511. InsertCompareBranch(argsLengthOpnd, IR::IntConstOpnd::New(0, TyInt8, func), Js::OpCode::BrEq_A, true, zeroArgsLabel, callInstr);
  18512. IR::RegOpnd *indexOpnd = IR::RegOpnd::New(TyUint32, func);
  18513. Lowerer::InsertMove(indexOpnd, argsLengthOpnd, callInstr);
  18514. // Get the array head offset and length
  18515. IR::IndirOpnd *arrayHeadPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfHead(), TyMachPtr, func);
  18516. IR::RegOpnd *arrayElementsStartOpnd = IR::RegOpnd::New(TyMachPtr, func);
  18517. InsertAdd(false, arrayElementsStartOpnd, arrayHeadPtrOpnd, IR::IntConstOpnd::New(offsetof(Js::SparseArraySegment<Js::Var>, elements), TyUint8, func), callInstr);
  18518. this->m_lowererMD.LowerInlineSpreadArgOutLoop(callInstr, indexOpnd, arrayElementsStartOpnd);
  18519. // Resume if we have zero args
  18520. callInstr->InsertBefore(zeroArgsLabel);
  18521. // Lower call
  18522. callInstr->m_opcode = Js::OpCode::CallIDynamic;
  18523. callInstr = m_lowererMD.LowerCallIDynamic(callInstr, thisInstr, argsLengthOpnd, callFlags, insertBeforeInstrForCFG);
  18524. return callInstr;
  18525. }
  18526. IR::Instr *
  18527. Lowerer::LowerCallIDynamic(IR::Instr * callInstr, ushort callFlags)
  18528. {
  18529. if (!this->m_func->GetHasStackArgs())
  18530. {
  18531. throw Js::RejitException(RejitReason::InlineApplyDisabled);
  18532. }
  18533. IR::Instr * insertBeforeInstrForCFG = nullptr;
  18534. // Lower args and look for StartCall
  18535. IR::Instr * argInstr = callInstr;
  18536. IR::SymOpnd * argLinkOpnd = argInstr->UnlinkSrc2()->AsSymOpnd();
  18537. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  18538. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  18539. argLinkOpnd->Free(this->m_func);
  18540. argInstr = argLinkSym->m_instrDef;
  18541. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  18542. IR::Instr* saveThisArgOutInstr = argInstr;
  18543. saveThisArgOutInstr->Unlink();
  18544. saveThisArgOutInstr->FreeDst();
  18545. argLinkOpnd = argInstr->UnlinkSrc2()->AsSymOpnd();
  18546. argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  18547. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  18548. argLinkOpnd->Free(this->m_func);
  18549. argInstr = argLinkSym->m_instrDef;
  18550. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_FromStackArgs);
  18551. IR::Opnd* argsLength = m_lowererMD.GenerateArgOutForStackArgs(callInstr, argInstr);
  18552. IR::RegOpnd* startCallDstOpnd = argInstr->UnlinkSrc2()->AsRegOpnd();
  18553. argLinkSym = startCallDstOpnd->m_sym->AsStackSym();
  18554. startCallDstOpnd->Free(this->m_func);
  18555. argInstr->Remove();// Remove ArgOut_A_FromStackArgs
  18556. argInstr = argLinkSym->m_instrDef;
  18557. Assert(argInstr->m_opcode == Js::OpCode::StartCall);
  18558. insertBeforeInstrForCFG = argInstr->GetNextRealInstr();
  18559. argInstr->Remove(); //Remove start call
  18560. return m_lowererMD.LowerCallIDynamic(callInstr, saveThisArgOutInstr, argsLength, callFlags, insertBeforeInstrForCFG);
  18561. }
  18562. //This is only for x64 & ARM.
  18563. IR::Opnd*
  18564. Lowerer::GenerateArgOutForStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  18565. {
  18566. // For architectures were we only pass 4 parameters in registers, the
  18567. // generated code looks something like this:
  18568. // s25.var = LdLen_A s4.var
  18569. // s26.var = Ld_A s25.var
  18570. // BrEq_I4 $L3, s25.var,0 // If we have no further arguments to pass, don't pass them
  18571. // $L2:
  18572. // BrEq_I4 $L4, s25.var,1 // Loop through the rest of the arguments, putting them on the stack
  18573. // s25.var = SUB_I4 s25.var, 0x1
  18574. // s10.var = LdElemI_A [s4.var+s25.var].var
  18575. // ArgOut_A_Dynamic s10.var, s25.var
  18576. // Br $L2
  18577. // $L4:
  18578. // s25.var = LdImm 0 // set s25 to 0, since it'll be 1 on the way into this block
  18579. // s10.var = LdElemI_A [s4.var + 0 * MachReg].var // The last one has to be put into argslot 4, since this is likely a register, not a stack location.
  18580. // ArgOut_A_Dynamic s10.var, 4
  18581. // $L3:
  18582. //
  18583. // Generalizing this for more register-passed parameters gives us code
  18584. // something like this:
  18585. // s25.var = LdLen_A s4.var
  18586. // s26.var = Ld_A s25.var
  18587. // BrLe_I4 $L3, s25.var,0 // If we have no further arguments to pass, don't pass them
  18588. // $L2:
  18589. // BrLe_I4 $L4, s25.var,INT_REG_COUNT-3 // Loop through the rest of the arguments up to the number passed in registers, putting them on the stack
  18590. // s25.var = SUB_I4 s25.var, 0x1
  18591. // s10.var = LdElemI_A [s4.var+s25.var].var
  18592. // ArgOut_A_Dynamic s10.var, s25.var
  18593. // Br $L2
  18594. // $L4:
  18595. // foreach of the remaining ones, N going down from (the number we can pass in regs -1) to 1 (0 omitted as we know that it'll be at least one register argument):
  18596. // BrEq_I4 $L__N, s25.var, N
  18597. // end foreach
  18598. // foreach of the remaining ones, N going down from (the number we can pass in regs -1) to 0:
  18599. // $L__N:
  18600. // s10.var = LdElemI_A [s4.var + N * MachReg].var // The last one has to be put into argslot 4, since this is likely a register, not a stack location.
  18601. // ArgOut_A_Dynamic s10.var, N+3
  18602. // end foreach
  18603. // $L3:
  18604. #if defined(_M_IX86)
  18605. // We get a compilation error on x86 due to assigning a negative to a uint
  18606. // TODO: don't even define this function on x86 - we Assert(false) anyway there.
  18607. // Alternatively, don't define when INT_ARG_REG_COUNT - 4 < 0
  18608. AssertOrFailFast(false);
  18609. return nullptr;
  18610. #else
  18611. Assert(stackArgsInstr->m_opcode == Js::OpCode::ArgOut_A_FromStackArgs);
  18612. Assert(callInstr->m_opcode == Js::OpCode::CallIDynamic);
  18613. this->m_lowererMD.GenerateFunctionObjectTest(callInstr, callInstr->GetSrc1()->AsRegOpnd(), false);
  18614. if (callInstr->m_func->IsInlinee())
  18615. {
  18616. return this->GenerateArgOutForInlineeStackArgs(callInstr, stackArgsInstr);
  18617. }
  18618. Func *func = callInstr->m_func;
  18619. IR::RegOpnd* stackArgs = stackArgsInstr->GetSrc1()->AsRegOpnd();
  18620. IR::RegOpnd* ldLenDstOpnd = IR::RegOpnd::New(TyMachReg, func);
  18621. const IR::AutoReuseOpnd autoReuseLdLenDstOpnd(ldLenDstOpnd, func);
  18622. IR::Instr* ldLen = IR::Instr::New(Js::OpCode::LdLen_A, ldLenDstOpnd ,stackArgs, func);
  18623. ldLenDstOpnd->SetValueType(ValueType::GetTaggedInt()); /*LdLen_A works only on stack arguments*/
  18624. callInstr->InsertBefore(ldLen);
  18625. GenerateFastRealStackArgumentsLdLen(ldLen);
  18626. IR::Instr* saveLenInstr = IR::Instr::New(Js::OpCode::MOV, IR::RegOpnd::New(TyMachReg, func), ldLenDstOpnd, func);
  18627. saveLenInstr->GetDst()->SetValueType(ValueType::GetTaggedInt());
  18628. callInstr->InsertBefore(saveLenInstr);
  18629. IR::LabelInstr* doneArgs = IR::LabelInstr::New(Js::OpCode::Label, func);
  18630. IR::Instr* branchDoneArgs = IR::BranchInstr::New(Js::OpCode::BrEq_I4, doneArgs, ldLenDstOpnd, IR::IntConstOpnd::New(0, TyInt8, func),func);
  18631. callInstr->InsertBefore(branchDoneArgs);
  18632. this->m_lowererMD.EmitInt4Instr(branchDoneArgs);
  18633. IR::LabelInstr* startLoop = InsertLoopTopLabel(callInstr);
  18634. Loop * loop = startLoop->GetLoop();
  18635. IR::LabelInstr* endLoop = IR::LabelInstr::New(Js::OpCode::Label, func);
  18636. IR::Instr* branchOutOfLoop = IR::BranchInstr::New(Js::OpCode::BrLe_I4, endLoop, ldLenDstOpnd, IR::IntConstOpnd::New(INT_ARG_REG_COUNT - 3, TyInt8, func),func);
  18637. callInstr->InsertBefore(branchOutOfLoop);
  18638. this->m_lowererMD.EmitInt4Instr(branchOutOfLoop);
  18639. IR::Instr* subInstr = IR::Instr::New(Js::OpCode::Sub_I4, ldLenDstOpnd, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyMachReg, func),func);
  18640. callInstr->InsertBefore(subInstr);
  18641. this->m_lowererMD.EmitInt4Instr(subInstr);
  18642. IR::IndirOpnd *nthArgument = IR::IndirOpnd::New(stackArgs, ldLenDstOpnd, TyMachReg, func);
  18643. IR::RegOpnd* ldElemDstOpnd = IR::RegOpnd::New(TyMachReg,func);
  18644. const IR::AutoReuseOpnd autoReuseldElemDstOpnd(ldElemDstOpnd, func);
  18645. IR::Instr* ldElem = IR::Instr::New(Js::OpCode::LdElemI_A, ldElemDstOpnd, nthArgument, func);
  18646. callInstr->InsertBefore(ldElem);
  18647. GenerateFastStackArgumentsLdElemI(ldElem);
  18648. IR::Instr* argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  18649. argout->SetSrc1(ldElemDstOpnd);
  18650. argout->SetSrc2(ldLenDstOpnd);
  18651. callInstr->InsertBefore(argout);
  18652. this->m_lowererMD.LoadDynamicArgumentUsingLength(argout);
  18653. IR::BranchInstr *tailBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, startLoop, func);
  18654. callInstr->InsertBefore(tailBranch);
  18655. callInstr->InsertBefore(endLoop);
  18656. loop->regAlloc.liveOnBackEdgeSyms->Set(ldLenDstOpnd->m_sym->m_id);
  18657. // Note: This loop iteratively adds instructions in two locations; in the block
  18658. // of branches that jump to the "load elements to argOuts" instructions, and in
  18659. // the the block of load elements to argOuts instructions themselves.
  18660. // 4 to denote this is 4th register after this, callinfo & function object
  18661. // INT_ARG_REG_COUNT is the number of parameters passed in int regs
  18662. uint current_reg_pass = INT_ARG_REG_COUNT - 4;
  18663. do
  18664. {
  18665. // If we're on this pass we know we have to do at least one of these, so skip
  18666. // the branch if we're on the last one.
  18667. if (current_reg_pass != INT_ARG_REG_COUNT - 4)
  18668. {
  18669. IR::LabelInstr* loadBlockLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  18670. IR::Instr* branchToBlock = IR::BranchInstr::New(Js::OpCode::BrEq_I4, loadBlockLabel, ldLenDstOpnd, IR::IntConstOpnd::New(current_reg_pass + 1, TyInt8, func), func);
  18671. endLoop->InsertAfter(branchToBlock);
  18672. callInstr->InsertBefore(loadBlockLabel);
  18673. }
  18674. // TODO: We can further optimize this with a GenerateFastStackArgumentsLdElemI that can
  18675. // handle us passing along constant argument references and encode them into the offset
  18676. // instead of having to use an IndirOpnd; this would allow us to save a few bytes here,
  18677. // and reduce register pressure a hair
  18678. // stemp.var = LdImm current_reg_pass
  18679. IR::RegOpnd* localTemp = IR::RegOpnd::New(TyInt32, func);
  18680. // We need to make it a tagged int because GenerateFastStackArgumentsLdElemI asserts if
  18681. // it is not.
  18682. localTemp->SetValueType(ValueType::GetTaggedInt());
  18683. const IR::AutoReuseOpnd autoReuseldElemDstOpnd3(localTemp, func);
  18684. this->InsertMove(localTemp, IR::IntConstOpnd::New(current_reg_pass, TyInt8, func, true), callInstr);
  18685. // sTemp = LdElem_I [s4.var + current_reg_pass (aka stemp.var) ]
  18686. nthArgument = IR::IndirOpnd::New(stackArgs, localTemp, TyMachReg, func);
  18687. ldElemDstOpnd = IR::RegOpnd::New(TyMachReg, func);
  18688. const IR::AutoReuseOpnd autoReuseldElemDstOpnd2(ldElemDstOpnd, func);
  18689. ldElem = IR::Instr::New(Js::OpCode::LdElemI_A, ldElemDstOpnd, nthArgument, func);
  18690. callInstr->InsertBefore(ldElem);
  18691. GenerateFastStackArgumentsLdElemI(ldElem);
  18692. argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  18693. argout->SetSrc1(ldElemDstOpnd);
  18694. callInstr->InsertBefore(argout);
  18695. this->m_lowererMD.LoadDynamicArgument(argout, current_reg_pass + 4);
  18696. }
  18697. while (current_reg_pass-- != 0);
  18698. callInstr->InsertBefore(doneArgs);
  18699. /*return the length which will be used for callInfo generations & stack allocation*/
  18700. return saveLenInstr->GetDst()->AsRegOpnd();
  18701. #endif
  18702. }
  18703. void
  18704. Lowerer::GenerateLoadStackArgumentByIndex(IR::Opnd *dst, IR::RegOpnd *indexOpnd, IR::Instr *instr, int32 offset, Func *func)
  18705. {
  18706. // Load argument set dst = [ebp + index].
  18707. IR::RegOpnd *ebpOpnd = IR::Opnd::CreateFramePointerOpnd(func);
  18708. IR::IndirOpnd *argIndirOpnd = nullptr;
  18709. // The stack looks like this:
  18710. // [new.target or FrameDisplay] <== EBP + formalParamOffset (4) + callInfo.Count
  18711. // arguments[n] <== EBP + formalParamOffset (4) + n
  18712. // ...
  18713. // arguments[1] <== EBP + formalParamOffset (4) + 2
  18714. // arguments[0] <== EBP + formalParamOffset (4) + 1
  18715. // this or new.target <== EBP + formalParamOffset (4)
  18716. // callinfo
  18717. // function object
  18718. // return addr
  18719. // EBP-> EBP chain
  18720. //actual arguments offset is LowererMD::GetFormalParamOffset() + 1 (this)
  18721. int32 actualOffset = GetFormalParamOffset() + offset;
  18722. Assert(GetFormalParamOffset() == 4);
  18723. const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  18724. argIndirOpnd = IR::IndirOpnd::New(ebpOpnd, indexOpnd, indirScale, TyMachReg, this->m_func);
  18725. argIndirOpnd->SetOffset(actualOffset << indirScale);
  18726. Lowerer::InsertMove(dst, argIndirOpnd, instr);
  18727. }
  18728. //This function assumes there is stackargs bailout and index is always on the range.
  18729. bool
  18730. Lowerer::GenerateFastStackArgumentsLdElemI(IR::Instr* ldElem)
  18731. {
  18732. // MOV dst, ebp [(valueOpnd + 5) *4] // 5 for the stack layout
  18733. //
  18734. IR::IndirOpnd *indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  18735. // Now load the index and check if it is an integer.
  18736. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  18737. Assert (indexOpnd && indexOpnd->IsTaggedInt());
  18738. if(ldElem->m_func->IsInlinee())
  18739. {
  18740. IR::IndirOpnd *argIndirOpnd = GetArgsIndirOpndForInlinee(ldElem, indexOpnd);
  18741. Lowerer::InsertMove(ldElem->GetDst(), argIndirOpnd, ldElem);
  18742. }
  18743. else
  18744. {
  18745. GenerateLoadStackArgumentByIndex(ldElem->GetDst(), indexOpnd, ldElem, indirOpnd->GetOffset() + 1, m_func); // +1 to offset 'this'
  18746. }
  18747. ldElem->Remove();
  18748. return false;
  18749. }
  18750. IR::IndirOpnd*
  18751. Lowerer::GetArgsIndirOpndForInlinee(IR::Instr* ldElem, IR::Opnd* valueOpnd)
  18752. {
  18753. Assert(ldElem->m_func->IsInlinee());
  18754. IR::IndirOpnd* argIndirOpnd = nullptr;
  18755. // Address of argument after 'this'
  18756. const auto firstRealArgStackSym = ldElem->m_func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  18757. this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr); //Start after this pointer
  18758. IR::SymOpnd *firstArg = IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, ldElem->m_func);
  18759. const IR::AutoReuseOpnd autoReuseFirstArg(firstArg, m_func);
  18760. IR::RegOpnd *const baseOpnd = IR::RegOpnd::New(TyMachReg, ldElem->m_func);
  18761. const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
  18762. InsertLea(baseOpnd, firstArg, ldElem);
  18763. if (valueOpnd->IsIntConstOpnd())
  18764. {
  18765. IntConstType offset = valueOpnd->AsIntConstOpnd()->GetValue() * MachPtr;
  18766. // TODO: Assert(Math::FitsInDWord(offset));
  18767. argIndirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, ldElem->m_func);
  18768. }
  18769. else
  18770. {
  18771. Assert(valueOpnd->IsRegOpnd());
  18772. const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  18773. argIndirOpnd = IR::IndirOpnd::New(baseOpnd, valueOpnd->AsRegOpnd(), indirScale, TyMachReg, ldElem->m_func);
  18774. }
  18775. return argIndirOpnd;
  18776. }
  18777. IR::IndirOpnd*
  18778. Lowerer::GetArgsIndirOpndForTopFunction(IR::Instr* ldElem, IR::Opnd* valueOpnd)
  18779. {
  18780. // Load argument set dst = [ebp + index] (or grab from the generator object if m_func is a generator function).
  18781. IR::RegOpnd *baseOpnd = m_func->GetJITFunctionBody()->IsCoroutine() ? LoadGeneratorArgsPtr(ldElem) : IR::Opnd::CreateFramePointerOpnd(m_func);
  18782. IR::IndirOpnd* argIndirOpnd = nullptr;
  18783. // The stack looks like this:
  18784. // ...
  18785. // arguments[1]
  18786. // arguments[0]
  18787. // this
  18788. // callinfo
  18789. // function object
  18790. // return addr
  18791. // EBP-> EBP chain
  18792. //actual arguments offset is LowererMD::GetFormalParamOffset() + 1 (this)
  18793. uint16 actualOffset = m_func->GetJITFunctionBody()->IsCoroutine() ? 1 : GetFormalParamOffset() + 1; //5
  18794. Assert(actualOffset == 5 || m_func->GetJITFunctionBody()->IsGenerator());
  18795. if (valueOpnd->IsIntConstOpnd())
  18796. {
  18797. IntConstType offset = (valueOpnd->AsIntConstOpnd()->GetValue() + actualOffset) * MachPtr;
  18798. // TODO: Assert(Math::FitsInDWord(offset));
  18799. argIndirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, this->m_func);
  18800. }
  18801. else
  18802. {
  18803. const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  18804. argIndirOpnd = IR::IndirOpnd::New(baseOpnd->AsRegOpnd(), valueOpnd->AsRegOpnd(), indirScale, TyMachReg, this->m_func);
  18805. // Need to offset valueOpnd by 5. Instead of changing valueOpnd, we can just add an offset to the indir. Changing
  18806. // valueOpnd requires creation of a temp sym (if it's not already a temp) so that the value of the sym that
  18807. // valueOpnd represents is not changed.
  18808. argIndirOpnd->SetOffset(actualOffset << indirScale);
  18809. }
  18810. return argIndirOpnd;
  18811. }
  18812. void
  18813. Lowerer::GenerateCheckForArgumentsLength(IR::Instr* ldElem, IR::LabelInstr* labelCreateHeapArgs, IR::Opnd* actualParamOpnd, IR::Opnd* valueOpnd, Js::OpCode opcode)
  18814. {
  18815. // Check if index < nr_actuals.
  18816. InsertCompare(actualParamOpnd, valueOpnd, ldElem);
  18817. // Jump to helper if index >= nr_actuals.
  18818. // Do an unsigned check here so that a negative index will also fail.
  18819. // (GenerateLdValueFromCheckedIndexOpnd does not guarantee positive index on x86.)
  18820. InsertBranch(opcode, true, labelCreateHeapArgs, ldElem);
  18821. }
  18822. bool
  18823. Lowerer::GenerateFastArgumentsLdElemI(IR::Instr* ldElem, IR::LabelInstr *labelFallThru)
  18824. {
  18825. // ---GenerateSmIntTest
  18826. // ---GenerateLdValueFromCheckedIndexOpnd
  18827. // ---LoadInputParamCount
  18828. // CMP actualParamOpnd, valueOpnd //Compare between the actual count & the index count (say i in arguments[i])
  18829. // JLE $labelCreateHeapArgs
  18830. // MOV dst, ebp [(valueOpnd + 5) *4] // 5 for the stack layout
  18831. // JMP $fallthrough
  18832. //
  18833. //labelCreateHeapArgs:
  18834. // ---Bail out to create Heap Arguments object
  18835. Assert(ldElem->DoStackArgsOpt());
  18836. IR::IndirOpnd *indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  18837. bool isInlinee = ldElem->m_func->IsInlinee();
  18838. Func *func = ldElem->m_func;
  18839. IR::LabelInstr *labelCreateHeapArgs = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  18840. // Now load the index and check if it is an integer.
  18841. bool emittedFastPath = false;
  18842. bool isNotInt = false;
  18843. IntConstType value = 0;
  18844. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  18845. IR::Opnd *valueOpnd = nullptr;
  18846. IR::Opnd *actualParamOpnd = nullptr;
  18847. bool hasIntConstIndex = indirOpnd->TryGetIntConstIndexValue(true, &value, &isNotInt);
  18848. if (isNotInt || (isInlinee && hasIntConstIndex && value >= (ldElem->m_func->actualCount - 1)))
  18849. {
  18850. //Outside the range of actuals, skip
  18851. }
  18852. else if (labelFallThru != nullptr && !(hasIntConstIndex && value < 0)) //if index is not a negative int constant
  18853. {
  18854. if (isInlinee)
  18855. {
  18856. actualParamOpnd = IR::IntConstOpnd::New(ldElem->m_func->actualCount - 1, TyInt32, func);
  18857. }
  18858. else
  18859. {
  18860. // Load actuals count, LoadHeapArguments will reuse the generated instructions here
  18861. IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldElem, -1 /* don't include 'this' while counting actuals. */);
  18862. actualParamOpnd = loadInputParamCountInstr->GetDst()->UseWithNewType(TyInt32,this->m_func);
  18863. }
  18864. if (hasIntConstIndex)
  18865. {
  18866. //Constant index
  18867. valueOpnd = IR::IntConstOpnd::New(value, TyInt32, func);
  18868. }
  18869. else
  18870. {
  18871. //Load valueOpnd from the index
  18872. valueOpnd =
  18873. m_lowererMD.LoadNonnegativeIndex(
  18874. indexOpnd,
  18875. (
  18876. #if INT32VAR
  18877. indexOpnd->GetType() == TyUint32
  18878. #else
  18879. // On 32-bit platforms, skip the negative check since for now, the unsigned upper bound check covers it
  18880. true
  18881. #endif
  18882. ),
  18883. labelCreateHeapArgs,
  18884. labelCreateHeapArgs,
  18885. ldElem);
  18886. }
  18887. if (isInlinee)
  18888. {
  18889. if (!hasIntConstIndex)
  18890. {
  18891. //Runtime check if to make sure length is within the arguments.length range.
  18892. GenerateCheckForArgumentsLength(ldElem, labelCreateHeapArgs, valueOpnd, actualParamOpnd, Js::OpCode::BrGe_A);
  18893. }
  18894. }
  18895. else
  18896. {
  18897. GenerateCheckForArgumentsLength(ldElem, labelCreateHeapArgs, actualParamOpnd, valueOpnd, Js::OpCode::BrLe_A);
  18898. }
  18899. IR::Opnd *argIndirOpnd = nullptr;
  18900. if (isInlinee)
  18901. {
  18902. argIndirOpnd = GetArgsIndirOpndForInlinee(ldElem, valueOpnd);
  18903. }
  18904. else
  18905. {
  18906. argIndirOpnd = GetArgsIndirOpndForTopFunction(ldElem, valueOpnd);
  18907. }
  18908. Lowerer::InsertMove(ldElem->GetDst(), argIndirOpnd, ldElem);
  18909. // JMP $done
  18910. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  18911. // $labelCreateHeapArgs:
  18912. ldElem->InsertBefore(labelCreateHeapArgs);
  18913. emittedFastPath = true;
  18914. }
  18915. if (!emittedFastPath)
  18916. {
  18917. throw Js::RejitException(RejitReason::DisableStackArgOpt);
  18918. }
  18919. return emittedFastPath;
  18920. }
  18921. bool
  18922. Lowerer::GenerateFastRealStackArgumentsLdLen(IR::Instr *ldLen)
  18923. {
  18924. if(ldLen->m_func->IsInlinee())
  18925. {
  18926. //Get the length of the arguments
  18927. Lowerer::InsertMove(ldLen->GetDst(),
  18928. IR::IntConstOpnd::New(ldLen->m_func->actualCount - 1, TyUint32, ldLen->m_func),
  18929. ldLen);
  18930. }
  18931. else
  18932. {
  18933. IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldLen, -1);
  18934. IR::RegOpnd *actualCountOpnd = loadInputParamCountInstr->GetDst()->AsRegOpnd();
  18935. Lowerer::InsertMove(ldLen->GetDst(), actualCountOpnd, ldLen);
  18936. }
  18937. ldLen->Remove();
  18938. return false;
  18939. }
  18940. bool
  18941. Lowerer::GenerateFastArgumentsLdLen(IR::Instr *ldLen, IR::LabelInstr* labelFallThru)
  18942. {
  18943. // TEST argslot, argslot //Test if the arguments slot is zero
  18944. // JNE $helper
  18945. // actualCountOpnd <-LoadInputParamCount fastpath
  18946. // SHL actualCountOpnd, actualCountOpnd, 1 // Left shift for tagging
  18947. // INC actualCountOpnd // Tagging
  18948. // MOV dst, actualCountOpnd
  18949. // JMP $fallthrough
  18950. //$helper:
  18951. Assert(ldLen->DoStackArgsOpt());
  18952. if(ldLen->m_func->IsInlinee())
  18953. {
  18954. //Get the length of the arguments
  18955. Lowerer::InsertMove(ldLen->GetDst(),
  18956. IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(ldLen->m_func->actualCount - 1), IR::AddrOpndKindConstantVar, ldLen->m_func), // -1 to exclude this pointer
  18957. ldLen);
  18958. }
  18959. else
  18960. {
  18961. IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldLen, -1);
  18962. IR::RegOpnd *actualCountOpnd = loadInputParamCountInstr->GetDst()->AsRegOpnd();
  18963. this->m_lowererMD.GenerateInt32ToVarConversion(actualCountOpnd, ldLen);
  18964. Lowerer::InsertMove(ldLen->GetDst(), actualCountOpnd, ldLen);
  18965. }
  18966. return true;
  18967. }
  18968. IR::RegOpnd*
  18969. Lowerer::GenerateFunctionTypeFromFixedFunctionObject(IR::Instr *insertInstrPt, IR::Opnd* functionObjOpnd)
  18970. {
  18971. IR::RegOpnd * functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  18972. IR::Opnd *functionTypeOpnd = nullptr;
  18973. if(functionObjOpnd->IsAddrOpnd())
  18974. {
  18975. IR::AddrOpnd* functionObjAddrOpnd = functionObjOpnd->AsAddrOpnd();
  18976. // functionTypeRegOpnd = MOV [fixed function address + type offset]
  18977. functionObjAddrOpnd->m_address;
  18978. functionTypeOpnd = IR::MemRefOpnd::New((void *)((intptr_t)functionObjAddrOpnd->m_address + Js::RecyclableObject::GetOffsetOfType()), TyMachPtr, this->m_func,
  18979. IR::AddrOpndKindDynamicObjectTypeRef);
  18980. }
  18981. else
  18982. {
  18983. functionTypeOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, this->m_func);
  18984. }
  18985. Lowerer::InsertMove(functionTypeRegOpnd, functionTypeOpnd, insertInstrPt);
  18986. return functionTypeRegOpnd;
  18987. }
  18988. void
  18989. Lowerer::FinalLower()
  18990. {
  18991. this->m_lowererMD.FinalLower();
  18992. // We check if there are any lazy bailouts in
  18993. // LowererMD::FinalLower, so only insert the thunk
  18994. // if needed
  18995. if (this->m_func->HasLazyBailOut())
  18996. {
  18997. this->InsertLazyBailOutThunk();
  18998. }
  18999. // Ensure that the StartLabel and EndLabel are inserted
  19000. // before the prolog and after the epilog respectively
  19001. IR::LabelInstr * startLabel = m_func->GetFuncStartLabel();
  19002. if (startLabel != nullptr)
  19003. {
  19004. m_func->m_headInstr->InsertAfter(startLabel);
  19005. }
  19006. IR::LabelInstr * endLabel = m_func->GetFuncEndLabel();
  19007. if (endLabel != nullptr)
  19008. {
  19009. m_func->m_tailInstr->GetPrevRealInstr()->InsertBefore(endLabel);
  19010. }
  19011. }
  19012. void
  19013. Lowerer::InsertLazyBailOutThunk()
  19014. {
  19015. #if defined(_M_IX86) || defined(_M_X64)
  19016. if (!this->m_func->IsTopFunc())
  19017. {
  19018. return;
  19019. }
  19020. Assert(this->m_func->GetLazyBailOutRecordSlot() != nullptr);
  19021. IR::Instr *tailInstr = this->m_func->m_tailInstr;
  19022. // Label (LazyBailOutThunk):
  19023. IR::LabelInstr *lazyBailOutLabel = IR::LabelInstr::New(Js::OpCode::LazyBailOutThunkLabel, this->m_func, true /* isOpHelper */);
  19024. lazyBailOutLabel->m_hasNonBranchRef = true; // Make sure that this label isn't removed
  19025. LABELNAMESET(lazyBailOutLabel, "LazyBailOutThunk");
  19026. tailInstr->InsertBefore(lazyBailOutLabel);
  19027. #ifdef _M_X64
  19028. // 1. Save registers used for parameters, and rax, if necessary, into the shadow space allocated for register parameters:
  19029. // mov [rsp + 16], RegArg1 (if branchConditionOpnd)
  19030. // mov [rsp + 8], RegArg0
  19031. // mov [rsp], rax
  19032. extern const IRType RegTypes[RegNumCount];
  19033. const RegNum regs[3] = { RegRAX, RegArg0, RegArg1 };
  19034. for (int i = 2; i >= 0; i--)
  19035. {
  19036. RegNum reg = regs[i];
  19037. const IRType regType = RegTypes[reg];
  19038. Lowerer::InsertMove(
  19039. IR::SymOpnd::New(this->m_func->m_symTable->GetArgSlotSym(static_cast<Js::ArgSlot>(i + 1)), regType, this->m_func),
  19040. IR::RegOpnd::New(nullptr, reg, regType, this->m_func),
  19041. tailInstr
  19042. );
  19043. }
  19044. #endif
  19045. // 2. Always enable implicit call flag
  19046. // If StFld/StElem instructions have both LazyBailOut and BailOnImplicitCallPreop and the operation turns out to not
  19047. // be an implicit call, at that point, we have already disabled the implicit calls flag. We would then do lazy bailout
  19048. // and not go back to the remaining code. Therefore, we need to re-enable implicit calls again in the thunk.
  19049. IR::Opnd *disableImplicitCallFlagAddress = this->m_lowererMD.GenerateMemRef(
  19050. this->m_func->GetThreadContextInfo()->GetDisableImplicitFlagsAddr(),
  19051. TyInt8,
  19052. tailInstr /* insertBeforeInstr */
  19053. );
  19054. #ifdef _M_X64
  19055. // On x64, we might decide to load the address of implicit flag to a register,
  19056. // but since we are in Lowerer (past RegAlloc), all the operands won't have any
  19057. // registers assigned to them. We force them to be rcx (because they are going
  19058. // to be replaced anyway).
  19059. // TODO: This hack doesn't work with ARM/ARM64
  19060. // Will need to revisit this if we decide to do lazy bailout on those platforms
  19061. IR::Instr *moveInstr = Lowerer::InsertMove(
  19062. disableImplicitCallFlagAddress,
  19063. IR::IntConstOpnd::New(DisableImplicitNoFlag, TyInt8, this->m_func, true),
  19064. tailInstr /* insertBeforeInstr */
  19065. );
  19066. if (moveInstr->GetDst()->IsIndirOpnd())
  19067. {
  19068. moveInstr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->AsRegOpnd()->SetReg(RegArg0);
  19069. }
  19070. if (moveInstr->m_prev->GetDst()->IsRegOpnd())
  19071. {
  19072. moveInstr->m_prev->GetDst()->AsRegOpnd()->SetReg(RegArg0);
  19073. }
  19074. #else
  19075. Lowerer::InsertMove(
  19076. disableImplicitCallFlagAddress,
  19077. IR::IntConstOpnd::New(DisableImplicitNoFlag, TyInt8, this->m_func, true),
  19078. tailInstr /* insertBeforeInstr */
  19079. );
  19080. #endif
  19081. #ifdef _M_X64
  19082. // 3. mov rcx, [rbp + offset] ; for bailout record
  19083. IR::RegOpnd *arg0 = IR::RegOpnd::New(nullptr, RegArg0, TyMachPtr, this->m_func);
  19084. IR::SymOpnd *bailOutRecordAddr = IR::SymOpnd::New(this->m_func->GetLazyBailOutRecordSlot(), TyMachPtr, this->m_func);
  19085. Lowerer::InsertMove(arg0, bailOutRecordAddr, tailInstr, false /* generateWriteBarrier */);
  19086. #else
  19087. // 3. Put the BailOutRecord on the stack for x86
  19088. IR::Instr *const newInstr = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  19089. IR::SymOpnd *bailOutRecordAddr = IR::SymOpnd::New(this->m_func->GetLazyBailOutRecordSlot(), TyMachPtr, this->m_func);
  19090. newInstr->SetSrc1(bailOutRecordAddr);
  19091. tailInstr->InsertBefore(newInstr);
  19092. #endif
  19093. // 4. call SaveAllRegistersAndBailOut
  19094. IR::Instr *callInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  19095. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSaveAllRegistersAndBailOut, this->m_func));
  19096. tailInstr->InsertBefore(callInstr);
  19097. m_lowererMD.LowerCall(callInstr, 0);
  19098. // 5. jmp to function's epilog
  19099. IR::LabelInstr *exitLabel = this->m_func->m_exitInstr->GetPrevLabelInstr();
  19100. IR::BranchInstr *branchInstr = IR::BranchInstr::New(Js::OpCode::JMP, exitLabel, this->m_func);
  19101. tailInstr->InsertBefore(branchInstr);
  19102. #endif
  19103. }
  19104. void
  19105. Lowerer::EHBailoutPatchUp()
  19106. {
  19107. Assert(this->m_func->isPostLayout);
  19108. // 1. Insert return thunks for all the regions.
  19109. // 2. Set the hasBailedOut bit to true on all bailout paths in EH regions.
  19110. // 3. Insert code after every bailout in a try or catch region to save the return value on the stack, and jump to the return thunk (See Region.h) of that region.
  19111. // 4. Insert code right before the epilog, to restore the return value (saved in 2.) from a bailout into eax.
  19112. IR::LabelInstr * restoreReturnValueFromBailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  19113. IR::LabelInstr * epilogLabel;
  19114. IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->GetPrevRealInstrOrLabel();
  19115. if (exitPrevInstr->IsLabelInstr())
  19116. {
  19117. epilogLabel = exitPrevInstr->AsLabelInstr();
  19118. }
  19119. else
  19120. {
  19121. epilogLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  19122. this->m_func->m_exitInstr->InsertBefore(epilogLabel);
  19123. }
  19124. IR::Instr * tmpInstr = nullptr;
  19125. bool restoreReturnFromBailoutEmitted = false;
  19126. FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, this->m_func)
  19127. {
  19128. if (instr->IsLabelInstr())
  19129. {
  19130. this->currentRegion = instr->AsLabelInstr()->GetRegion();
  19131. }
  19132. // Consider (radua): Assert(this->currentRegion) here?
  19133. if (this->currentRegion)
  19134. {
  19135. RegionType currentRegionType = this->currentRegion->GetType();
  19136. if (currentRegionType == RegionTypeTry || currentRegionType == RegionTypeCatch || currentRegionType == RegionTypeFinally)
  19137. {
  19138. if (this->currentRegion->IsNonExceptingFinally())
  19139. {
  19140. Region * parent = this->currentRegion->GetParent();
  19141. while (parent->IsNonExceptingFinally())
  19142. {
  19143. parent = parent->GetParent();
  19144. }
  19145. if (parent->GetType() == RegionTypeRoot)
  19146. {
  19147. continue;
  19148. }
  19149. }
  19150. this->InsertReturnThunkForRegion(this->currentRegion, restoreReturnValueFromBailoutLabel);
  19151. if (instr->HasBailOutInfo())
  19152. {
  19153. if (instr->GetBailOutInfo()->bailOutFunc == this->m_func)
  19154. {
  19155. // We dont set this bit for inlined code, if there was a bailout in the inlined code,
  19156. // and an exception was thrown, we want the caller's handler to handle the exception accordingly.
  19157. // TODO : Revisit when we start inlining functions with try-catch/try-finally
  19158. this->SetHasBailedOut(instr);
  19159. }
  19160. tmpInstr = this->EmitEHBailoutStackRestore(instr);
  19161. this->EmitSaveEHBailoutReturnValueAndJumpToRetThunk(tmpInstr);
  19162. if (!restoreReturnFromBailoutEmitted)
  19163. {
  19164. this->EmitRestoreReturnValueFromEHBailout(restoreReturnValueFromBailoutLabel, epilogLabel);
  19165. restoreReturnFromBailoutEmitted = true;
  19166. }
  19167. }
  19168. }
  19169. }
  19170. }
  19171. NEXT_INSTR_IN_FUNC_EDITING
  19172. }
  19173. bool
  19174. Lowerer::GenerateFastLdFld(IR::Instr * const instrLdFld, IR::JnHelperMethod helperMethod, IR::JnHelperMethod polymorphicHelperMethod,
  19175. IR::LabelInstr ** labelBailOut, IR::RegOpnd* typeOpnd, bool* pIsHelper, IR::LabelInstr** pLabelHelper)
  19176. {
  19177. // Generates:
  19178. //
  19179. // r1 = object->type
  19180. // if (r1 is taggedInt) goto helper
  19181. // Load inline cache
  19182. // if monomorphic
  19183. // r2 = address of the monomorphic inline cache
  19184. // if polymorphic
  19185. // r2 = address of the polymorphic inline cache array
  19186. // r3 = (type >> PIC shift amount) & (PIC size - 1)
  19187. // r2 = r2 + r3
  19188. // Try load property using proto cache (if protoFirst)
  19189. // Try load property using local cache
  19190. // Try loading property using proto cache (if !protoFirst)
  19191. // Try loading property using flags cache
  19192. //
  19193. // Loading property using local cache:
  19194. // if (r1 == r2->u.local.type)
  19195. // result = load inline slot r2->u.local.slotIndex from r1
  19196. // goto fallthru
  19197. // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.local.type)
  19198. // result = load aux slot r2->u.local.slotIndex from r1
  19199. // goto fallthru
  19200. //
  19201. // Loading property using proto cache:
  19202. // if (r1 == r2->u.proto.type)
  19203. // r3 = r2->u.proto.prototypeObject
  19204. // result = load inline slot r2->u.proto.slotIndex from r3
  19205. // goto fallthru
  19206. // if (r1 | InlineCacheAuxSlotTypeTag) == r2.u.proto.type)
  19207. // r3 = r2->u.proto.prototypeObject
  19208. // result = load aux slot r2->u.proto.slotIndex from r3
  19209. // goto fallthru
  19210. //
  19211. // Loading property using flags cache:
  19212. // if (r2->u.accessor.flags & (Js::InlineCacheGetterFlag | Js::InlineCacheSetterFlag) == 0)
  19213. // if (r1 == r2->u.accessor.type)
  19214. // result = load inline slot r2->u.accessor.slotIndex from r1
  19215. // goto fallthru
  19216. // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.accessor.type)
  19217. // result = load aux slot r2->u.accessor.slotIndex from r1
  19218. // goto fallthru
  19219. //
  19220. // Loading an inline slot:
  19221. // result = [r1 + slotIndex * sizeof(Var)]
  19222. //
  19223. // Loading an aux slot:
  19224. // slotArray = r1->auxSlots
  19225. // result = [slotArray + slotIndex * sizeof(Var)]
  19226. //
  19227. // We only emit the code block for a type of cache (local/proto/flags) if the profile data
  19228. // indicates that type of cache was used to load the property in the past.
  19229. // We don't emit the type check with aux slot tag if the profile data indicates that we didn't
  19230. // load the property from an aux slot before.
  19231. // We don't emit the type check without an aux slot tag if the profile data indicates that we didn't
  19232. // load the property from an inline slot before.
  19233. IR::Opnd * opndSrc = instrLdFld->GetSrc1();
  19234. AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->IsPropertySymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(), "Expected PropertySym as src of LdFld");
  19235. Assert(!instrLdFld->DoStackArgsOpt());
  19236. IR::PropertySymOpnd * propertySymOpnd = opndSrc->AsPropertySymOpnd();
  19237. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  19238. PHASE_PRINT_TESTTRACE(
  19239. Js::ObjTypeSpecPhase,
  19240. this->m_func,
  19241. _u("Field load: %s, property ID: %d, func: %s, cache ID: %d, cloned cache: false\n"),
  19242. Js::OpCodeUtil::GetOpCodeName(instrLdFld->m_opcode),
  19243. propertySym->m_propertyId,
  19244. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  19245. propertySymOpnd->m_inlineCacheIndex);
  19246. Assert(pIsHelper != nullptr);
  19247. bool& isHelper = *pIsHelper;
  19248. Assert(pLabelHelper != nullptr);
  19249. IR::LabelInstr*& labelHelper = *pLabelHelper;
  19250. bool doLocal = true;
  19251. bool doProto = instrLdFld->m_opcode == Js::OpCode::LdMethodFld
  19252. || instrLdFld->m_opcode == Js::OpCode::LdRootMethodFld
  19253. || instrLdFld->m_opcode == Js::OpCode::ScopedLdMethodFld;
  19254. bool doProtoFirst = doProto;
  19255. bool doInlineSlots = true;
  19256. bool doAuxSlots = true;
  19257. if (!PHASE_OFF(Js::ProfileBasedFldFastPathPhase, this->m_func) && instrLdFld->IsProfiledInstr())
  19258. {
  19259. IR::ProfiledInstr * profiledInstrLdFld = instrLdFld->AsProfiledInstr();
  19260. if (profiledInstrLdFld->u.FldInfo().flags != Js::FldInfo_NoInfo)
  19261. {
  19262. doProto = !!(profiledInstrLdFld->u.FldInfo().flags & Js::FldInfo_FromProto);
  19263. doLocal = !!(profiledInstrLdFld->u.FldInfo().flags & Js::FldInfo_FromLocal);
  19264. if ((profiledInstrLdFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromInlineSlots)
  19265. {
  19266. // If the inline slots flag is set and the aux slots flag is not, only generate the inline slots check
  19267. doAuxSlots = false;
  19268. }
  19269. else if ((profiledInstrLdFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromAuxSlots)
  19270. {
  19271. // If the aux slots flag is set and the inline slots flag is not, only generate the aux slots check
  19272. doInlineSlots = false;
  19273. }
  19274. }
  19275. else if (!profiledInstrLdFld->u.FldInfo().valueType.IsUninitialized())
  19276. {
  19277. // We have value type info about the field but no flags. This means we shouldn't generate any
  19278. // fast paths for this field load.
  19279. doLocal = false;
  19280. doProto = false;
  19281. }
  19282. }
  19283. if (!doLocal && !doProto)
  19284. {
  19285. return false;
  19286. }
  19287. IR::LabelInstr * labelFallThru = instrLdFld->GetOrCreateContinueLabel();
  19288. if (labelHelper == nullptr)
  19289. {
  19290. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  19291. }
  19292. IR::RegOpnd * opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  19293. bool usePolymorphicInlineCache = !!propertySymOpnd->m_runtimePolymorphicInlineCache;
  19294. IR::RegOpnd * opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  19295. if (usePolymorphicInlineCache)
  19296. {
  19297. Lowerer::InsertMove(opndInlineCache, IR::AddrOpnd::New(propertySymOpnd->m_runtimePolymorphicInlineCache->GetInlineCachesAddr(), IR::AddrOpndKindDynamicInlineCache, this->m_func, true), instrLdFld);
  19298. }
  19299. else
  19300. {
  19301. Lowerer::InsertMove(opndInlineCache, this->LoadRuntimeInlineCacheOpnd(instrLdFld, propertySymOpnd, isHelper), instrLdFld);
  19302. }
  19303. if (typeOpnd == nullptr)
  19304. {
  19305. typeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  19306. GenerateObjectTestAndTypeLoad(instrLdFld, opndBase, typeOpnd, labelHelper);
  19307. }
  19308. if (usePolymorphicInlineCache)
  19309. {
  19310. LowererMD::GenerateLoadPolymorphicInlineCacheSlot(instrLdFld, opndInlineCache, typeOpnd, propertySymOpnd->m_runtimePolymorphicInlineCache->GetSize());
  19311. }
  19312. IR::LabelInstr * labelNext = nullptr;
  19313. IR::Opnd * opndDst = instrLdFld->GetDst();
  19314. IR::RegOpnd * opndTaggedType = nullptr;
  19315. IR::BranchInstr * labelNextBranchToPatch = nullptr;
  19316. if (doProto && doProtoFirst)
  19317. {
  19318. if (doInlineSlots)
  19319. {
  19320. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  19321. labelNextBranchToPatch = GenerateProtoInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
  19322. GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  19323. instrLdFld->InsertBefore(labelNext);
  19324. }
  19325. if (doAuxSlots)
  19326. {
  19327. if (opndTaggedType == nullptr)
  19328. {
  19329. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  19330. LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
  19331. }
  19332. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  19333. labelNextBranchToPatch = GenerateProtoInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
  19334. GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  19335. instrLdFld->InsertBefore(labelNext);
  19336. }
  19337. }
  19338. if (doLocal)
  19339. {
  19340. if (doInlineSlots)
  19341. {
  19342. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  19343. labelNextBranchToPatch = GenerateLocalInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
  19344. GenerateLdFldFromLocalInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  19345. instrLdFld->InsertBefore(labelNext);
  19346. }
  19347. if (doAuxSlots)
  19348. {
  19349. if (opndTaggedType == nullptr)
  19350. {
  19351. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  19352. LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
  19353. }
  19354. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  19355. labelNextBranchToPatch = GenerateLocalInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
  19356. GenerateLdFldFromLocalInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  19357. instrLdFld->InsertBefore(labelNext);
  19358. }
  19359. }
  19360. if (doProto && !doProtoFirst)
  19361. {
  19362. if (doInlineSlots)
  19363. {
  19364. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  19365. labelNextBranchToPatch = GenerateProtoInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
  19366. GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  19367. instrLdFld->InsertBefore(labelNext);
  19368. }
  19369. if (doAuxSlots)
  19370. {
  19371. if (opndTaggedType == nullptr)
  19372. {
  19373. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  19374. LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
  19375. }
  19376. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  19377. labelNextBranchToPatch = GenerateProtoInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
  19378. GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  19379. instrLdFld->InsertBefore(labelNext);
  19380. }
  19381. }
  19382. Assert(labelNextBranchToPatch);
  19383. labelNextBranchToPatch->SetTarget(labelHelper);
  19384. labelNext->Remove();
  19385. // $helper:
  19386. // dst = CALL Helper(inlineCache, base, field, scriptContext)
  19387. // $fallthru:
  19388. isHelper = true;
  19389. // Return false to indicate the original instruction was not lowered. Caller will insert the helper label.
  19390. return false;
  19391. }
  19392. void
  19393. Lowerer::GenerateAuxSlotAdjustmentRequiredCheck(
  19394. IR::Instr * instrToInsertBefore,
  19395. IR::RegOpnd * opndInlineCache,
  19396. IR::LabelInstr * labelHelper)
  19397. {
  19398. // regSlotCap = MOV [&(inlineCache->u.local.rawUInt16)] // sized to 16 bits
  19399. IR::RegOpnd * regSlotCap = IR::RegOpnd::New(TyMachReg, instrToInsertBefore->m_func);
  19400. IR::IndirOpnd * memSlotCap = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.rawUInt16), TyUint16, instrToInsertBefore->m_func);
  19401. InsertMove(regSlotCap, memSlotCap, instrToInsertBefore);
  19402. IR::IntConstOpnd * constSelectorBitCount = IR::IntConstOpnd::New(Js::InlineCache::CacheLayoutSelectorBitCount, TyUint16, instrToInsertBefore->m_func, /* dontEncode = */ true);
  19403. #if _M_ARM64
  19404. IR::Instr * testBranch = InsertBranch(Js::OpCode::TBZ, labelHelper, instrToInsertBefore);
  19405. testBranch->SetSrc1(regSlotCap);
  19406. testBranch->SetSrc2(constSelectorBitCount);
  19407. #else
  19408. // SAR regSlotCap, Js::InlineCache::CacheLayoutSelectorBitCount
  19409. InsertShiftBranch(Js::OpCode::Shr_A, regSlotCap, regSlotCap, constSelectorBitCount, Js::OpCode::BrNeq_A, true, labelHelper, instrToInsertBefore);
  19410. #endif
  19411. }
  19412. void
  19413. Lowerer::GenerateSetObjectTypeFromInlineCache(
  19414. IR::Instr * instrToInsertBefore,
  19415. IR::RegOpnd * opndBase,
  19416. IR::RegOpnd * opndInlineCache,
  19417. bool isTypeTagged)
  19418. {
  19419. // regNewType = MOV [&(inlineCache->u.local.type)]
  19420. IR::RegOpnd * regNewType = IR::RegOpnd::New(TyMachReg, instrToInsertBefore->m_func);
  19421. IR::IndirOpnd * memNewType = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.type), TyMachReg, instrToInsertBefore->m_func);
  19422. InsertMove(regNewType, memNewType, instrToInsertBefore);
  19423. // AND regNewType, ~InlineCacheAuxSlotTypeTag
  19424. if (isTypeTagged)
  19425. {
  19426. // On 64-bit platforms IntConstOpnd isn't big enough to hold TyMachReg values.
  19427. IR::IntConstOpnd * constTypeTagComplement = IR::IntConstOpnd::New(~InlineCacheAuxSlotTypeTag, TyMachReg, instrToInsertBefore->m_func, /* dontEncode = */ true);
  19428. InsertAnd(regNewType, regNewType, constTypeTagComplement, instrToInsertBefore);
  19429. }
  19430. // MOV base->type, regNewType
  19431. IR::IndirOpnd * memObjType = IR::IndirOpnd::New(opndBase, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrToInsertBefore->m_func);
  19432. InsertMove(memObjType, regNewType, instrToInsertBefore);
  19433. }
  19434. bool
  19435. Lowerer::GenerateFastStFld(IR::Instr * const instrStFld, IR::JnHelperMethod helperMethod, IR::JnHelperMethod polymorphicHelperMethod, IR::LabelInstr ** labelBailOut, IR::RegOpnd* typeOpnd,
  19436. bool* pIsHelper, IR::LabelInstr** pLabelHelper, bool withPutFlags, Js::PropertyOperationFlags flags)
  19437. {
  19438. // Generates:
  19439. //
  19440. // r1 = object->type
  19441. // if (r1 is taggedInt) goto helper
  19442. // Load inline cache
  19443. // if monomorphic
  19444. // r2 = address of the monomorphic inline cache
  19445. // if polymorphic
  19446. // r2 = address of the polymorphic inline cache array
  19447. // r3 = (type >> PIC shift amount) & (PIC size - 1)
  19448. // r2 = r2 + r3
  19449. // Try store property using local cache
  19450. //
  19451. // Loading property using local cache:
  19452. // if (r1 == r2->u.local.type)
  19453. // store value to inline slot r2->u.local.slotIndex on r1
  19454. // goto fallthru
  19455. // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.local.type)
  19456. // store value to aux slot r2->u.local.slotIndex on r1
  19457. // goto fallthru
  19458. //
  19459. // Storing to an inline slot:
  19460. // [r1 + slotIndex * sizeof(Var)] = value
  19461. //
  19462. // Storing to an aux slot:
  19463. // slotArray = r1->auxSlots
  19464. // [slotArray + slotIndex * sizeof(Var)] = value
  19465. //
  19466. // We don't emit the type check with aux slot tag if the profile data indicates that we didn't
  19467. // store the property to an aux slot before.
  19468. // We don't emit the type check without an aux slot tag if the profile data indicates that we didn't
  19469. // store the property to an inline slot before.
  19470. IR::Opnd * opndSrc = instrStFld->GetSrc1();
  19471. IR::Opnd * opndDst = instrStFld->GetDst();
  19472. AssertMsg(opndDst->IsSymOpnd() && opndDst->AsSymOpnd()->IsPropertySymOpnd() && opndDst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected PropertySym as dst of StFld");
  19473. IR::PropertySymOpnd * propertySymOpnd = opndDst->AsPropertySymOpnd();
  19474. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  19475. PHASE_PRINT_TESTTRACE(
  19476. Js::ObjTypeSpecPhase,
  19477. this->m_func,
  19478. _u("Field store: %s, property ID: %u, func: %s, cache ID: %d, cloned cache: false\n"),
  19479. Js::OpCodeUtil::GetOpCodeName(instrStFld->m_opcode),
  19480. propertySym->m_propertyId,
  19481. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  19482. propertySymOpnd->m_inlineCacheIndex);
  19483. Assert(pIsHelper != nullptr);
  19484. bool& isHelper = *pIsHelper;
  19485. Assert(pLabelHelper != nullptr);
  19486. IR::LabelInstr*& labelHelper = *pLabelHelper;
  19487. bool doStore = true;
  19488. bool doAdd = false;
  19489. bool doInlineSlots = true;
  19490. bool doAuxSlots = true;
  19491. if (!PHASE_OFF(Js::ProfileBasedFldFastPathPhase, this->m_func) && instrStFld->IsProfiledInstr())
  19492. {
  19493. IR::ProfiledInstr * profiledInstrStFld = instrStFld->AsProfiledInstr();
  19494. if (profiledInstrStFld->u.FldInfo().flags != Js::FldInfo_NoInfo)
  19495. {
  19496. if (!(profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromLocal | Js::FldInfo_FromLocalWithoutProperty)))
  19497. {
  19498. return false;
  19499. }
  19500. if (!PHASE_OFF(Js::AddFldFastPathPhase, this->m_func))
  19501. {
  19502. // We always try to do the store field fast path, unless the profile specifically says we never set, but always add a property here.
  19503. if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromLocal | Js::FldInfo_FromLocalWithoutProperty)) == Js::FldInfo_FromLocalWithoutProperty)
  19504. {
  19505. doStore = false;
  19506. }
  19507. // On the other hand, we only emit the add field fast path, if the profile explicitly says we do add properties here.
  19508. if (!!(profiledInstrStFld->u.FldInfo().flags & Js::FldInfo_FromLocalWithoutProperty))
  19509. {
  19510. doAdd = true;
  19511. }
  19512. }
  19513. else
  19514. {
  19515. #if ENABLE_DEBUG_CONFIG_OPTIONS
  19516. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  19517. #endif
  19518. PHASE_PRINT_TRACE(Js::AddFldFastPathPhase, this->m_func,
  19519. _u("AddFldFastPath: function: %s(%s) property ID: %u no fast path, because the phase is off.\n"),
  19520. this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
  19521. propertySym->m_propertyId);
  19522. }
  19523. if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromInlineSlots)
  19524. {
  19525. // If the inline slots flag is set and the aux slots flag is not, only generate the inline slots check
  19526. doAuxSlots = false;
  19527. }
  19528. else if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromAuxSlots)
  19529. {
  19530. // If the aux slots flag is set and the inline slots flag is not, only generate the aux slots check
  19531. doInlineSlots = false;
  19532. }
  19533. }
  19534. else if (!profiledInstrStFld->u.FldInfo().valueType.IsUninitialized())
  19535. {
  19536. // We have value type info about the field but no flags. This means we shouldn't generate any
  19537. // fast paths for this field store.
  19538. return false;
  19539. }
  19540. }
  19541. Assert(doStore || doAdd);
  19542. if (labelHelper == nullptr)
  19543. {
  19544. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  19545. }
  19546. IR::LabelInstr * labelFallThru = instrStFld->GetOrCreateContinueLabel();
  19547. IR::RegOpnd * opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  19548. bool usePolymorphicInlineCache = !!propertySymOpnd->m_runtimePolymorphicInlineCache;
  19549. if (doAdd)
  19550. {
  19551. #if ENABLE_DEBUG_CONFIG_OPTIONS
  19552. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  19553. #endif
  19554. PHASE_PRINT_TRACE(Js::AddFldFastPathPhase, this->m_func,
  19555. _u("AddFldFastPath: function: %s(%s) property ID: %d %s fast path for %s.\n"),
  19556. this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
  19557. propertySym->m_propertyId,
  19558. usePolymorphicInlineCache ? _u("poly") : _u("mono"), doStore ? _u("store and add") : _u("add only"));
  19559. }
  19560. IR::RegOpnd * opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  19561. if (usePolymorphicInlineCache)
  19562. {
  19563. Lowerer::InsertMove(opndInlineCache, IR::AddrOpnd::New(propertySymOpnd->m_runtimePolymorphicInlineCache->GetInlineCachesAddr(), IR::AddrOpndKindDynamicInlineCache, this->m_func, true), instrStFld);
  19564. }
  19565. else
  19566. {
  19567. Lowerer::InsertMove(opndInlineCache, this->LoadRuntimeInlineCacheOpnd(instrStFld, propertySymOpnd, isHelper), instrStFld);
  19568. }
  19569. if (typeOpnd == nullptr)
  19570. {
  19571. typeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  19572. GenerateObjectTestAndTypeLoad(instrStFld, opndBase, typeOpnd, labelHelper);
  19573. }
  19574. if (usePolymorphicInlineCache)
  19575. {
  19576. LowererMD::GenerateLoadPolymorphicInlineCacheSlot(instrStFld, opndInlineCache, typeOpnd, propertySymOpnd->m_runtimePolymorphicInlineCache->GetSize());
  19577. }
  19578. IR::LabelInstr * labelNext = nullptr;
  19579. IR::RegOpnd * opndTaggedType = nullptr;
  19580. IR::BranchInstr * lastBranchToNext = nullptr;
  19581. if (doStore)
  19582. {
  19583. if (doInlineSlots)
  19584. {
  19585. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  19586. lastBranchToNext = GenerateLocalInlineCacheCheck(instrStFld, typeOpnd, opndInlineCache, labelNext);
  19587. this->GetLowererMD()->GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, true);
  19588. instrStFld->InsertBefore(labelNext);
  19589. }
  19590. if (doAuxSlots)
  19591. {
  19592. if (opndTaggedType == nullptr)
  19593. {
  19594. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  19595. LowererMD::GenerateLoadTaggedType(instrStFld, typeOpnd, opndTaggedType);
  19596. }
  19597. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  19598. lastBranchToNext = GenerateLocalInlineCacheCheck(instrStFld, opndTaggedType, opndInlineCache, labelNext);
  19599. this->GetLowererMD()->GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, false);
  19600. instrStFld->InsertBefore(labelNext);
  19601. }
  19602. }
  19603. if (doAdd)
  19604. {
  19605. if (doInlineSlots)
  19606. {
  19607. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  19608. lastBranchToNext = GenerateLocalInlineCacheCheck(instrStFld, typeOpnd, opndInlineCache, labelNext, true);
  19609. GenerateSetObjectTypeFromInlineCache(instrStFld, opndBase, opndInlineCache, false);
  19610. this->GetLowererMD()->GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, true);
  19611. instrStFld->InsertBefore(labelNext);
  19612. }
  19613. if (doAuxSlots)
  19614. {
  19615. if (opndTaggedType == nullptr)
  19616. {
  19617. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  19618. LowererMD::GenerateLoadTaggedType(instrStFld, typeOpnd, opndTaggedType);
  19619. }
  19620. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  19621. lastBranchToNext = GenerateLocalInlineCacheCheck(instrStFld, opndTaggedType, opndInlineCache, labelNext, true);
  19622. GenerateAuxSlotAdjustmentRequiredCheck(instrStFld, opndInlineCache, labelHelper);
  19623. GenerateSetObjectTypeFromInlineCache(instrStFld, opndBase, opndInlineCache, true);
  19624. this->GetLowererMD()->GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, false);
  19625. instrStFld->InsertBefore(labelNext);
  19626. }
  19627. }
  19628. Assert(lastBranchToNext);
  19629. lastBranchToNext->SetTarget(labelHelper);
  19630. labelNext->Remove();
  19631. // $helper:
  19632. // CALL Helper(inlineCache, base, field, src, scriptContext)
  19633. // $fallthru:
  19634. isHelper = true;
  19635. // Return false to indicate the original instruction was not lowered. Caller will insert the helper label.
  19636. return false;
  19637. }
  19638. bool Lowerer::GenerateFastStFldForCustomProperty(IR::Instr *const instr, IR::LabelInstr * *const labelHelperRef)
  19639. {
  19640. Assert(instr);
  19641. Assert(labelHelperRef);
  19642. Assert(!*labelHelperRef);
  19643. switch(instr->m_opcode)
  19644. {
  19645. case Js::OpCode::StFld:
  19646. case Js::OpCode::StFldStrict:
  19647. break;
  19648. default:
  19649. return false;
  19650. }
  19651. IR::SymOpnd *const symOpnd = instr->GetDst()->AsSymOpnd();
  19652. PropertySym *const propertySym = symOpnd->m_sym->AsPropertySym();
  19653. if(propertySym->m_propertyId != Js::PropertyIds::lastIndex || !symOpnd->IsPropertySymOpnd())
  19654. {
  19655. return false;
  19656. }
  19657. const ValueType objectValueType(symOpnd->GetPropertyOwnerValueType());
  19658. if(!objectValueType.IsLikelyRegExp())
  19659. {
  19660. return false;
  19661. }
  19662. if(instr->HasBailOutInfo())
  19663. {
  19664. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  19665. if(!BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind) || bailOutKind & IR::BailOutKindBits)
  19666. {
  19667. // Other bailout kinds will likely need bailout checks that would not be generated here. In particular, if a type
  19668. // check is necessary here to guard against downstream property accesses on the same object, the type check will
  19669. // fail and cause a bailout if the object is a RegExp object since the "lastIndex" property accesses are not cached.
  19670. return false;
  19671. }
  19672. }
  19673. Func *const func = instr->m_func;
  19674. IR::RegOpnd *const objectOpnd = symOpnd->CreatePropertyOwnerOpnd(func);
  19675. const IR::AutoReuseOpnd autoReuseObjectOpnd(objectOpnd, func);
  19676. IR::LabelInstr *labelHelper = nullptr;
  19677. if(!objectOpnd->IsNotTaggedValue())
  19678. {
  19679. // test object, 1
  19680. // jnz $helper
  19681. if(!labelHelper)
  19682. {
  19683. *labelHelperRef = labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  19684. }
  19685. m_lowererMD.GenerateObjectTest(objectOpnd, instr, labelHelper);
  19686. }
  19687. if(!objectValueType.IsObject())
  19688. {
  19689. // cmp [object], Js::JavascriptRegExp::vtable
  19690. // jne $helper
  19691. if(!labelHelper)
  19692. {
  19693. *labelHelperRef = labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  19694. }
  19695. InsertCompareBranch(
  19696. IR::IndirOpnd::New(objectOpnd, 0, TyMachPtr, func),
  19697. LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp),
  19698. Js::OpCode::BrNeq_A,
  19699. labelHelper,
  19700. instr);
  19701. objectOpnd->SetValueType(objectValueType.ToDefiniteObject());
  19702. }
  19703. // mov [object + offset(lastIndexVar)], src
  19704. // mov [object + offset(lastIndexOrFlag)], Js::JavascriptRegExp::NotCachedValue
  19705. // jmp $done
  19706. InsertMove(
  19707. IR::IndirOpnd::New(objectOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), TyVar, func),
  19708. instr->GetSrc1(),
  19709. instr);
  19710. InsertMove(
  19711. IR::IndirOpnd::New(objectOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), TyUint32, func),
  19712. IR::IntConstOpnd::New(Js::JavascriptRegExp::NotCachedValue, TyUint32, func, true),
  19713. instr);
  19714. InsertBranch(Js::OpCode::Br, instr->GetOrCreateContinueLabel(), instr);
  19715. return true;
  19716. }
  19717. IR::RegOpnd *
  19718. Lowerer::GenerateIsBuiltinRecyclableObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool checkObjectAndDynamicObject, IR::LabelInstr *labelContinue, bool isInHelper)
  19719. {
  19720. // CMP [srcReg], Js::DynamicObject::`vtable'
  19721. // JEQ $fallThough
  19722. // MOV r1, [src1 + offset(type)] -- get the type id
  19723. // MOV r1, [r1 + offset(typeId)]
  19724. // ADD r1, ~TypeIds_LastStaticType -- if (typeId > TypeIds_LastStaticType && typeId <= TypeIds_LastBuiltinDynamicObject)
  19725. // CMP r1, (TypeIds_LastBuiltinDynamicObject - TypeIds_LastStaticType - 1)
  19726. // JA $helper
  19727. //fallThrough:
  19728. IR::LabelInstr *labelFallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  19729. if (checkObjectAndDynamicObject)
  19730. {
  19731. if (!regOpnd->IsNotTaggedValue())
  19732. {
  19733. m_lowererMD.GenerateObjectTest(regOpnd, insertInstr, labelHelper);
  19734. }
  19735. GenerateIsDynamicObject(regOpnd, insertInstr, labelFallthrough, true);
  19736. }
  19737. IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  19738. IR::RegOpnd * typeIdRegOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  19739. IR::IndirOpnd *indirOpnd;
  19740. // MOV typeRegOpnd, [src1 + offset(type)]
  19741. indirOpnd = IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  19742. InsertMove(typeRegOpnd, indirOpnd, insertInstr);
  19743. // MOV typeIdRegOpnd, [typeRegOpnd + offset(typeId)]
  19744. indirOpnd = IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
  19745. InsertMove(typeIdRegOpnd, indirOpnd, insertInstr);
  19746. // ADD typeIdRegOpnd, ~TypeIds_LastStaticType
  19747. InsertAdd(false, typeIdRegOpnd, typeIdRegOpnd,
  19748. IR::IntConstOpnd::New(~Js::TypeIds_LastStaticType, TyInt32, this->m_func, true), insertInstr);
  19749. // CMP typeIdRegOpnd, (TypeIds_LastBuiltinDynamicObject - TypeIds_LastStaticType - 1)
  19750. InsertCompare(
  19751. typeIdRegOpnd,
  19752. IR::IntConstOpnd::New(Js::TypeIds_LastBuiltinDynamicObject - Js::TypeIds_LastStaticType - 1, TyInt32, this->m_func),
  19753. insertInstr);
  19754. if (labelContinue)
  19755. {
  19756. // On success, go to continuation label.
  19757. InsertBranch(Js::OpCode::BrLe_A, true, labelContinue, insertInstr);
  19758. }
  19759. else
  19760. {
  19761. // On failure, go to helper.
  19762. InsertBranch(Js::OpCode::BrGt_A, true, labelHelper, insertInstr);
  19763. }
  19764. // $fallThrough
  19765. insertInstr->InsertBefore(labelFallthrough);
  19766. return typeRegOpnd;
  19767. }
  19768. void Lowerer::GenerateIsDynamicObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool fContinueLabel)
  19769. {
  19770. // CMP [srcReg], Js::DynamicObject::`vtable'
  19771. InsertCompare(
  19772. IR::IndirOpnd::New(regOpnd, 0, TyMachPtr, m_func),
  19773. LoadVTableValueOpnd(insertInstr, VTableValue::VtableDynamicObject),
  19774. insertInstr);
  19775. if (fContinueLabel)
  19776. {
  19777. // JEQ $fallThough
  19778. Lowerer::InsertBranch(Js::OpCode::BrEq_A, labelHelper, insertInstr);
  19779. }
  19780. else
  19781. {
  19782. // JNE $helper
  19783. Lowerer::InsertBranch(Js::OpCode::BrNeq_A, labelHelper, insertInstr);
  19784. }
  19785. }
  19786. void Lowerer::GenerateIsRecyclableObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool checkObjectAndDynamicObject)
  19787. {
  19788. // CMP [srcReg], Js::DynamicObject::`vtable'
  19789. // JEQ $fallThough
  19790. // MOV r1, [src1 + offset(type)] -- get the type id
  19791. // MOV r1, [r1 + offset(typeId)]
  19792. // ADD r1, ~TypeIds_LastJavascriptPrimitiveType -- if (typeId > TypeIds_LastJavascriptPrimitiveType && typeId <= TypeIds_LastTrueJavascriptObjectType)
  19793. // CMP r1, (TypeIds_LastTrueJavascriptObjectType - TypeIds_LastJavascriptPrimitiveType - 1)
  19794. // JA $helper
  19795. //fallThrough:
  19796. IR::LabelInstr *labelFallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  19797. if (checkObjectAndDynamicObject)
  19798. {
  19799. if (!regOpnd->IsNotTaggedValue())
  19800. {
  19801. m_lowererMD.GenerateObjectTest(regOpnd, insertInstr, labelHelper);
  19802. }
  19803. this->GenerateIsDynamicObject(regOpnd, insertInstr, labelFallthrough, true);
  19804. }
  19805. IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  19806. IR::RegOpnd * typeIdRegOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  19807. // MOV r1, [src1 + offset(type)]
  19808. InsertMove(typeRegOpnd, IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func), insertInstr);
  19809. // MOV r1, [r1 + offset(typeId)]
  19810. InsertMove(typeIdRegOpnd, IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func), insertInstr);
  19811. // ADD r1, ~TypeIds_LastJavascriptPrimitiveType
  19812. InsertAdd(false, typeIdRegOpnd, typeIdRegOpnd, IR::IntConstOpnd::New(~Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, this->m_func, true), insertInstr);
  19813. // CMP r1, (TypeIds_LastTrueJavascriptObjectType - TypeIds_LastJavascriptPrimitiveType - 1)
  19814. InsertCompare(
  19815. typeIdRegOpnd,
  19816. IR::IntConstOpnd::New(Js::TypeIds_LastTrueJavascriptObjectType - Js::TypeIds_LastJavascriptPrimitiveType - 1, TyInt32, this->m_func),
  19817. insertInstr);
  19818. // JA $helper
  19819. InsertBranch(Js::OpCode::BrGe_A, true, labelHelper, insertInstr);
  19820. // $fallThrough
  19821. insertInstr->InsertBefore(labelFallthrough);
  19822. }
  19823. bool
  19824. Lowerer::GenerateLdThisCheck(IR::Instr * instr)
  19825. {
  19826. //
  19827. // If not a recyclable object, jump to $helper
  19828. // MOV dst, src1 -- return the object itself
  19829. // JMP $fallthrough
  19830. // $helper:
  19831. // (caller generates helper call)
  19832. // $fallthrough:
  19833. //
  19834. IR::RegOpnd * src1 = instr->GetSrc1()->AsRegOpnd();
  19835. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  19836. IR::LabelInstr * fallthrough = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  19837. GenerateIsRecyclableObject(src1, instr, helper);
  19838. // MOV dst, src1
  19839. if (instr->GetDst() && !instr->GetDst()->IsEqual(src1))
  19840. {
  19841. InsertMove(instr->GetDst(), src1, instr);
  19842. }
  19843. // JMP $fallthrough
  19844. InsertBranch(Js::OpCode::Br, fallthrough, instr);
  19845. // $helper:
  19846. // (caller generates helper call)
  19847. // $fallthrough:
  19848. instr->InsertBefore(helper);
  19849. instr->InsertAfter(fallthrough);
  19850. return true;
  19851. }
  19852. //
  19853. // TEST src, Js::AtomTag
  19854. // JNE $done
  19855. // MOV typeReg, objectSrc + offsetof(RecyclableObject::type)
  19856. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_ActivationObject
  19857. // JEQ $helper
  19858. // $done:
  19859. // MOV dst, src
  19860. // JMP $fallthru
  19861. // helper:
  19862. // MOV dst, undefined
  19863. // $fallthru:
  19864. bool
  19865. Lowerer::GenerateLdThisStrict(IR::Instr* instr)
  19866. {
  19867. IR::RegOpnd * src1 = instr->GetSrc1()->AsRegOpnd();
  19868. IR::RegOpnd * typeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  19869. IR::LabelInstr * done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  19870. IR::LabelInstr * fallthru = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  19871. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*helper*/true);
  19872. bool assign = instr->GetDst() && !instr->GetDst()->IsEqual(src1);
  19873. if (!src1->IsNotTaggedValue())
  19874. {
  19875. // TEST src1, Js::AtomTag
  19876. // JNE $done
  19877. this->m_lowererMD.GenerateObjectTest(src1, instr, assign ? done : fallthru);
  19878. }
  19879. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(src1, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  19880. Lowerer::InsertMove(typeReg, indirOpnd, instr);
  19881. IR::IndirOpnd * typeID = IR::IndirOpnd::New(typeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
  19882. IR::Opnd * activationObject = IR::IntConstOpnd::New(Js::TypeIds_ActivationObject, TyMachReg, this->m_func);
  19883. Lowerer::InsertCompare(typeID, activationObject, instr);
  19884. // JEQ $helper
  19885. Lowerer::InsertBranch(Js::OpCode::BrEq_A, helper, instr);
  19886. if (assign)
  19887. {
  19888. // $done:
  19889. instr->InsertBefore(done);
  19890. // MOV dst, src
  19891. Lowerer::InsertMove(instr->GetDst(), src1, instr);
  19892. }
  19893. // JMP $fallthru
  19894. Lowerer::InsertBranch(Js::OpCode::Br, fallthru, instr);
  19895. instr->InsertBefore(helper);
  19896. if (instr->GetDst())
  19897. {
  19898. // MOV dst, undefined
  19899. Lowerer::InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined), instr);
  19900. }
  19901. // $fallthru:
  19902. instr->InsertAfter(fallthru);
  19903. return true;
  19904. }
  19905. // given object instanceof function, functionReg is a register with function,
  19906. // objectReg is a register with instance and inlineCache is an InstIsInlineCache.
  19907. // We want to generate:
  19908. //
  19909. // fallback on helper (will patch the inline cache) if function does not match the cache
  19910. // MOV dst, Js::false
  19911. // CMP functionReg, [&(inlineCache->function)]
  19912. // JNE helper
  19913. //
  19914. // fallback if object is a tagged int
  19915. // TEST objectReg, Js::AtomTag
  19916. // JNE done
  19917. //
  19918. // return false if object is a primitive
  19919. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_LastJavascriptPrimitiveType
  19920. // JLE done
  19921. // fallback if object's type is not the cached type
  19922. // MOV typeReg, objectSrc + offsetof(RecyclableObject::type)
  19923. // CMP typeReg, [&(inlineCache->type]
  19924. // JNE checkPrimType
  19925. // use the cached result and fallthrough
  19926. // MOV dst, [&(inlineCache->result)]
  19927. // JMP done
  19928. //
  19929. //
  19930. // $helper
  19931. // $done
  19932. bool
  19933. Lowerer::GenerateFastIsInst(IR::Instr * instr)
  19934. {
  19935. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  19936. IR::LabelInstr * done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  19937. IR::RegOpnd * typeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  19938. IR::Opnd * objectSrc;
  19939. IR::Opnd * functionSrc;
  19940. intptr_t inlineCache;
  19941. IR::Instr * instrArg;
  19942. // We are going to use the extra ArgOut_A instructions to lower the helper call later,
  19943. // so we leave them alone here and clean them up then.
  19944. inlineCache = instr->m_func->GetJITFunctionBody()->GetIsInstInlineCache(instr->GetSrc1()->AsIntConstOpnd()->AsUint32());
  19945. Assert(instr->GetSrc2()->AsRegOpnd()->m_sym->m_isSingleDef);
  19946. instrArg = instr->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
  19947. objectSrc = instrArg->GetSrc1();
  19948. Assert(instrArg->GetSrc2()->AsRegOpnd()->m_sym->m_isSingleDef);
  19949. instrArg = instrArg->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
  19950. functionSrc = instrArg->GetSrc1();
  19951. Assert(instrArg->GetSrc2() == nullptr);
  19952. // MOV dst, Js::false
  19953. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
  19954. IR::RegOpnd * functionReg = GetRegOpnd(functionSrc, instr, m_func, TyMachReg);
  19955. // CMP functionReg, [&(inlineCache->function)]
  19956. {
  19957. IR::Opnd* cacheFunction = IR::MemRefOpnd::New(inlineCache + Js::IsInstInlineCache::OffsetOfFunction(), TyMachReg, m_func, IR::AddrOpndKindDynamicIsInstInlineCacheFunctionRef);
  19958. InsertCompare(functionReg, cacheFunction, instr);
  19959. }
  19960. // JNE helper
  19961. InsertBranch(Js::OpCode::BrNeq_A, helper, instr);
  19962. IR::RegOpnd * objectReg = GetRegOpnd(objectSrc, instr, m_func, TyMachReg);
  19963. // TEST objectReg, Js::AtomTag
  19964. // JNE done
  19965. m_lowererMD.GenerateObjectTest(objectReg, instr, done);
  19966. // MOV typeReg, objectSrc + offsetof(RecyclableObject::type)
  19967. InsertMove(typeReg, IR::IndirOpnd::New(objectReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func), instr);
  19968. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_LastJavascriptPrimitiveType
  19969. {
  19970. IR::IndirOpnd * typeId = IR::IndirOpnd::New(typeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func);
  19971. IR::IntConstOpnd * lastPrimitive = IR::IntConstOpnd::New(Js::TypeId::TypeIds_LastJavascriptPrimitiveType, TyInt32, m_func);
  19972. InsertCompare(typeId, lastPrimitive, instr);
  19973. }
  19974. // JLE done
  19975. InsertBranch(Js::OpCode::BrLe_A, done, instr);
  19976. // CMP typeReg, [&(inlineCache->type]
  19977. {
  19978. IR::Opnd * cacheType = IR::MemRefOpnd::New(inlineCache + Js::IsInstInlineCache::OffsetOfType(), TyMachReg, m_func, IR::AddrOpndKindDynamicIsInstInlineCacheTypeRef);
  19979. InsertCompare(typeReg, cacheType, instr);
  19980. }
  19981. // JNE helper
  19982. InsertBranch(Js::OpCode::BrNeq_A, helper, instr);
  19983. // MOV dst, [&(inlineCache->result)]
  19984. {
  19985. IR::Opnd * cacheResult = IR::MemRefOpnd::New(inlineCache + Js::IsInstInlineCache::OffsetOfResult(), TyMachReg, m_func, IR::AddrOpndKindDynamicIsInstInlineCacheResultRef);
  19986. InsertMove(instr->GetDst(), cacheResult, instr);
  19987. }
  19988. // JMP done
  19989. InsertBranch(Js::OpCode::Br, done, instr);
  19990. // LABEL helper
  19991. instr->InsertBefore(helper);
  19992. instr->InsertAfter(done);
  19993. return true;
  19994. }
  19995. void Lowerer::GenerateBooleanNegate(IR::Instr * instr, IR::Opnd * srcBool, IR::Opnd * dst)
  19996. {
  19997. // dst = src
  19998. // dst = dst ^ (true ^ false) (= !src)
  19999. Lowerer::InsertMove(dst, srcBool, instr);
  20000. ScriptContextInfo* sci = instr->m_func->GetScriptContextInfo();
  20001. IR::AddrOpnd* xorval = IR::AddrOpnd::New(sci->GetTrueAddr() ^ sci->GetFalseAddr(), IR::AddrOpndKindDynamicMisc, instr->m_func, true);
  20002. InsertXor(dst, dst, xorval, instr);
  20003. }
  20004. bool Lowerer::GenerateJSBooleanTest(IR::RegOpnd * regSrc, IR::Instr * insertInstr, IR::LabelInstr * labelTarget, bool fContinueLabel)
  20005. {
  20006. if (regSrc->GetValueType().IsBoolean())
  20007. {
  20008. if (fContinueLabel)
  20009. {
  20010. // JMP $labelTarget
  20011. InsertBranch(Js::OpCode::Br, labelTarget, insertInstr);
  20012. #if DBG
  20013. if (labelTarget->isOpHelper)
  20014. {
  20015. labelTarget->m_noHelperAssert = true;
  20016. }
  20017. #endif
  20018. }
  20019. return false;
  20020. }
  20021. IR::IndirOpnd * vtablePtrOpnd = IR::IndirOpnd::New(regSrc, 0, TyMachPtr, this->m_func);
  20022. IR::Opnd * jsBooleanVTable = LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptBoolean);
  20023. InsertCompare(vtablePtrOpnd, jsBooleanVTable, insertInstr);
  20024. if (fContinueLabel)
  20025. {
  20026. // JEQ $labelTarget
  20027. InsertBranch(Js::OpCode::BrEq_A, labelTarget, insertInstr);
  20028. // $helper
  20029. InsertLabel(true, insertInstr);
  20030. }
  20031. else
  20032. {
  20033. // JNE $labelTarget
  20034. InsertBranch(Js::OpCode::BrNeq_A, labelTarget, insertInstr);
  20035. }
  20036. return true;
  20037. }
  20038. bool Lowerer::GenerateFastEqBoolInt(IR::Instr * instr, bool *pNeedHelper, bool isInHelper)
  20039. {
  20040. Assert(instr);
  20041. // There's a total of 8 modes for this function, based on these inferred flags
  20042. bool isBranchNotCompare = instr->IsBranchInstr();
  20043. bool isStrict = false;
  20044. bool isNegOp = false;
  20045. switch (instr->m_opcode)
  20046. {
  20047. case Js::OpCode::BrSrEq_A:
  20048. case Js::OpCode::BrSrNotNeq_A:
  20049. case Js::OpCode::BrSrNeq_A:
  20050. case Js::OpCode::BrSrNotEq_A:
  20051. case Js::OpCode::CmSrEq_A:
  20052. case Js::OpCode::CmSrNeq_A:
  20053. isStrict = true;
  20054. break;
  20055. default:
  20056. break;
  20057. }
  20058. switch (instr->m_opcode)
  20059. {
  20060. case Js::OpCode::BrSrEq_A:
  20061. case Js::OpCode::BrSrNotNeq_A:
  20062. case Js::OpCode::CmSrEq_A:
  20063. case Js::OpCode::BrEq_A:
  20064. case Js::OpCode::BrNotNeq_A:
  20065. case Js::OpCode::CmEq_A:
  20066. isNegOp = false;
  20067. break;
  20068. case Js::OpCode::BrSrNeq_A:
  20069. case Js::OpCode::BrSrNotEq_A:
  20070. case Js::OpCode::CmSrNeq_A:
  20071. case Js::OpCode::BrNeq_A:
  20072. case Js::OpCode::BrNotEq_A:
  20073. case Js::OpCode::CmNeq_A:
  20074. isNegOp = true;
  20075. break;
  20076. default:
  20077. // This opcode is not one of the ones that should be handled here.
  20078. return false;
  20079. break;
  20080. }
  20081. IR::Opnd *src1 = instr->GetSrc1();
  20082. IR::Opnd *src2 = instr->GetSrc2();
  20083. // The instrucions given to this _should_ all be 2-arg.
  20084. Assert(src1 && src2);
  20085. if (!(src1 && src2))
  20086. {
  20087. return false;
  20088. }
  20089. // If it's a branch instruction, we'll want these to be defined
  20090. //IR::BranchInstr *instrBranch = nullptr;
  20091. IR::LabelInstr *targetInstr = nullptr;
  20092. IR::LabelInstr *labelFallthrough = nullptr;
  20093. if (isBranchNotCompare)
  20094. {
  20095. IR::BranchInstr * instrBranch = instr->AsBranchInstr();
  20096. targetInstr = instrBranch->GetTarget();
  20097. labelFallthrough = instrBranch->GetOrCreateContinueLabel(isInHelper);
  20098. }
  20099. // Assume we need the helper until we can show otherwise.
  20100. *pNeedHelper = true;
  20101. // If we don't know the final types well enough at JIT time, a helper block to set
  20102. // the inputs to the correct types will be needed.
  20103. IR::LabelInstr *labelHelper = nullptr;
  20104. // If we're doing a compare and can handle it early, then we want to skip the helper
  20105. IR::LabelInstr *labelDone = instr->GetOrCreateContinueLabel(isInHelper);
  20106. // Normallize for orderings
  20107. IR::Opnd *srcBool = nullptr;
  20108. IR::Opnd *srcInt = nullptr;
  20109. if (src1->GetValueType().IsLikelyBoolean() && src2->GetValueType().IsLikelyTaggedInt())
  20110. {
  20111. srcBool = src1;
  20112. srcInt = src2;
  20113. }
  20114. else if (src1->GetValueType().IsLikelyTaggedInt() && src2->GetValueType().IsLikelyBoolean())
  20115. {
  20116. srcInt = src1;
  20117. srcBool = src2;
  20118. }
  20119. else
  20120. {
  20121. return false;
  20122. }
  20123. // If either instruction is constant, we can simplify the check. If both are constant, we can eliminate it
  20124. bool srcIntConst = false;
  20125. bool srcIntConstVal = false;
  20126. // If we're comparing with a number that is not 0 or 1, then the two are inequal by default
  20127. bool srcIntIsBoolable = false;
  20128. bool srcBoolConst = false;
  20129. bool srcBoolConstVal = false;
  20130. if (srcInt->IsIntConstOpnd())
  20131. {
  20132. IR::IntConstOpnd * constSrcInt = srcInt->AsIntConstOpnd();
  20133. IntConstType constIntVal = constSrcInt->GetValue();
  20134. srcIntConst = true;
  20135. if (constIntVal == 0)
  20136. {
  20137. srcIntConstVal = false;
  20138. srcIntIsBoolable = true;
  20139. }
  20140. else if (constIntVal == 1)
  20141. {
  20142. srcIntConstVal = true;
  20143. srcIntIsBoolable = true;
  20144. }
  20145. }
  20146. else if (srcInt->IsAddrOpnd())
  20147. {
  20148. IR::AddrOpnd * addrSrcInt = srcInt->AsAddrOpnd();
  20149. if (!(addrSrcInt && addrSrcInt->IsVar() && Js::TaggedInt::Is(addrSrcInt->m_address)))
  20150. {
  20151. return false;
  20152. }
  20153. int32 constIntVal = Js::TaggedInt::ToInt32(addrSrcInt->m_address);
  20154. srcIntConst = true;
  20155. if (constIntVal == 0)
  20156. {
  20157. srcIntConstVal = false;
  20158. srcIntIsBoolable = true;
  20159. }
  20160. else if (constIntVal == 1)
  20161. {
  20162. srcIntConstVal = true;
  20163. srcIntIsBoolable = true;
  20164. }
  20165. }
  20166. else if (srcInt->IsConstOpnd())
  20167. {
  20168. // Not handled yet
  20169. return false;
  20170. }
  20171. if (srcBool->IsIntConstOpnd())
  20172. {
  20173. IR::IntConstOpnd * constSrcBool = srcBool->AsIntConstOpnd();
  20174. IntConstType constIntVal = constSrcBool->GetValue();
  20175. srcBoolConst = true;
  20176. srcBoolConstVal = constIntVal != 0;
  20177. }
  20178. else if (srcBool->IsAddrOpnd())
  20179. {
  20180. IR::AddrOpnd * addrSrcBool = srcInt->AsAddrOpnd();
  20181. if (!(addrSrcBool && addrSrcBool->IsVar() && Js::TaggedInt::Is(addrSrcBool->m_address)))
  20182. {
  20183. return false;
  20184. }
  20185. int32 value = Js::TaggedInt::ToInt32(addrSrcBool->m_address);
  20186. srcBoolConst = true;
  20187. srcBoolConstVal = value != 0;
  20188. }
  20189. else if (srcBool->IsConstOpnd())
  20190. {
  20191. // Not handled yet
  20192. return false;
  20193. }
  20194. // Do these checks here, since that way we avoid emitting instructions before exiting earlier
  20195. if (srcInt->GetValueType().IsTaggedInt() && srcBool->GetValueType().IsBoolean()) {
  20196. // ok, we know the types, so no helper needed
  20197. *pNeedHelper = false;
  20198. }
  20199. else
  20200. {
  20201. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  20202. // check the types and jump to the helper if incorrect
  20203. if (!srcInt->IsConstOpnd() && !srcInt->GetValueType().IsTaggedInt())
  20204. {
  20205. this->m_lowererMD.GenerateSmIntTest(srcInt->AsRegOpnd(), instr, labelHelper);
  20206. }
  20207. if (!srcBool->IsConstOpnd() && !srcBool->GetValueType().IsBoolean())
  20208. {
  20209. if (!srcBool->GetValueType().IsObject())
  20210. {
  20211. this->m_lowererMD.GenerateObjectTest(srcBool->AsRegOpnd(), instr, labelHelper, false);
  20212. }
  20213. GenerateJSBooleanTest(srcBool->AsRegOpnd(), instr, labelHelper, false);
  20214. }
  20215. }
  20216. // At this point, we know both which operand is an integer and which is a boolean,
  20217. // whether either operand is constant, and what the constant true/false values are
  20218. // for any constant operands. This should allow us to emit some decent code.
  20219. LibraryValue equalResultValue = !isNegOp ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
  20220. LibraryValue inequalResultValue = !isNegOp ? LibraryValue::ValueFalse : LibraryValue::ValueTrue;
  20221. IR::LabelInstr *equalResultTarget = !isNegOp ? targetInstr : labelFallthrough;
  20222. IR::LabelInstr *inequalResultTarget = !isNegOp ? labelFallthrough : targetInstr;
  20223. // For the Sr instructions, we now know that the types are different, so we can immediately
  20224. // decide what the result will be.
  20225. if (isStrict)
  20226. {
  20227. if (isBranchNotCompare)
  20228. {
  20229. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, inequalResultTarget, this->m_func));
  20230. #if DBG
  20231. // Since we're not making a non-helper path to one of the branches, we need to tell
  20232. // DbCheckPostLower that we are going to have a non-helper label without non-helper
  20233. // branches.
  20234. // Note: this following line isn't good practice in general
  20235. equalResultTarget->m_noHelperAssert = true;
  20236. #endif
  20237. }
  20238. else
  20239. {
  20240. Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, inequalResultValue), instr);
  20241. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  20242. }
  20243. }
  20244. // Now that we've checked the types, we can lower some instructions to quickly do the check
  20245. // in the case that it's not a type-strict strict equality/inequality check.
  20246. else if (srcIntConst && srcBoolConst)
  20247. {
  20248. // If both arguments are constant, we can statically determine the result.
  20249. bool sameVal = srcIntConstVal == srcBoolConstVal;
  20250. if (isBranchNotCompare)
  20251. {
  20252. // For constant branches, branch to the target
  20253. Assert(instr);
  20254. IR::LabelInstr * target = sameVal && srcIntIsBoolable ? equalResultTarget : inequalResultTarget;
  20255. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, target, this->m_func));
  20256. #if DBG
  20257. // Since we're not making a non-helper path to one of the branches, we need to tell
  20258. // DbCheckPostLower that we are going to have a non-helper label without non-helper
  20259. // branches.
  20260. // Note: this following line isn't good practice in general
  20261. (sameVal && srcIntIsBoolable ? inequalResultTarget : equalResultTarget)->m_noHelperAssert = true;
  20262. #endif
  20263. }
  20264. else
  20265. {
  20266. // For constant compares, load the constant result
  20267. Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, sameVal && srcIntIsBoolable ? equalResultValue : inequalResultValue), instr);
  20268. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  20269. }
  20270. }
  20271. else if (!srcIntConst && !srcBoolConst)
  20272. {
  20273. // If neither is constant, we can still do a bit better than loading the helper
  20274. IR::LabelInstr * firstFalse = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  20275. IR::LabelInstr * forceInequal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  20276. // We branch based on the zero-ness of the integer argument to two checks against the boolean argument
  20277. this->m_lowererMD.GenerateTaggedZeroTest(srcInt->AsRegOpnd(), instr, firstFalse);
  20278. // If it's not zero, then it's either 1, in which case it's true, or it's something else, in which
  20279. // case the two will compare as inequal
  20280. InsertCompareBranch(
  20281. IR::IntConstOpnd::New((((IntConstType)1) << Js::VarTag_Shift) + Js::AtomTag, IRType::TyVar, this->m_func, true),
  20282. srcInt->AsRegOpnd(),
  20283. Js::OpCode::BrNeq_A,
  20284. isBranchNotCompare ? inequalResultTarget : forceInequal, // in the case of branching, we can go straight to the inequal target; for compares, we need to load the value
  20285. instr,
  20286. true);
  20287. if (isBranchNotCompare)
  20288. {
  20289. // if the int evaluates to 1 (true)
  20290. InsertCompareBranch(
  20291. srcBool,
  20292. LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue),
  20293. instr->m_opcode,
  20294. targetInstr,
  20295. instr);
  20296. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallthrough, this->m_func));
  20297. // if the int evaluates to 0 (false)
  20298. instr->InsertBefore(firstFalse);
  20299. InsertCompareBranch(
  20300. srcBool,
  20301. LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse),
  20302. instr->m_opcode,
  20303. targetInstr,
  20304. instr);
  20305. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallthrough, this->m_func));
  20306. }
  20307. else
  20308. {
  20309. // the int resolves to 1 (true)
  20310. // Load either the bool or its complement into the dst reg, depending on the opcode
  20311. if (isNegOp)
  20312. {
  20313. GenerateBooleanNegate(instr, srcBool, instr->GetDst());
  20314. }
  20315. else
  20316. {
  20317. this->InsertMove(instr->GetDst(), srcBool, instr);
  20318. }
  20319. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  20320. // the int resolves to 0 (false)
  20321. // Handle the complement case
  20322. instr->InsertBefore(firstFalse);
  20323. if (!isNegOp)
  20324. {
  20325. GenerateBooleanNegate(instr, srcBool, instr->GetDst());
  20326. }
  20327. else
  20328. {
  20329. this->InsertMove(instr->GetDst(), srcBool, instr);
  20330. }
  20331. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  20332. // the int resolves to something other than 0 or 1 (inequal to a bool)
  20333. instr->InsertBefore(forceInequal);
  20334. Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, inequalResultValue), instr);
  20335. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  20336. }
  20337. }
  20338. else if (srcIntConst)
  20339. {
  20340. if (isBranchNotCompare)
  20341. {
  20342. if (srcIntIsBoolable)
  20343. {
  20344. LibraryValue intval = srcIntConstVal ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
  20345. InsertCompareBranch(
  20346. srcBool,
  20347. LoadLibraryValueOpnd(instr, intval),
  20348. instr->m_opcode,
  20349. targetInstr,
  20350. instr);
  20351. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallthrough, this->m_func));
  20352. }
  20353. else
  20354. {
  20355. // Since a constant int that isn't 0 or 1 will always be inequal to bools, just jump to the inequal result
  20356. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, inequalResultTarget, this->m_func));
  20357. #if DBG
  20358. // Since we're not making a non-helper path to one of the branches, we need to tell
  20359. // DbCheckPostLower that we are going to have a non-helper label without non-helper
  20360. // branches.
  20361. // Note: this following line isn't good practice in general
  20362. equalResultTarget->m_noHelperAssert = true;
  20363. #endif
  20364. }
  20365. }
  20366. else
  20367. {
  20368. if (srcIntIsBoolable)
  20369. {
  20370. bool directPassthrough = isNegOp != srcIntConstVal;
  20371. if (directPassthrough)
  20372. {
  20373. // If this case is hit, the result value is the same as the value in srcBool
  20374. this->InsertMove(instr->GetDst(), srcBool, instr);
  20375. }
  20376. else
  20377. {
  20378. // Otherwise, the result value is the negation of the value in srcBool
  20379. GenerateBooleanNegate(instr, srcBool, instr->GetDst());
  20380. }
  20381. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  20382. }
  20383. else
  20384. {
  20385. Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, inequalResultValue), instr);
  20386. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  20387. }
  20388. }
  20389. }
  20390. else if (srcBoolConst)
  20391. {
  20392. if (isBranchNotCompare)
  20393. {
  20394. this->m_lowererMD.GenerateTaggedZeroTest(srcInt->AsRegOpnd(), instr, srcBoolConstVal ? inequalResultTarget : equalResultTarget);
  20395. if (srcBoolConstVal)
  20396. {
  20397. // If it's not zero, then it's either 1, in which case it's true, or it's something else, in which
  20398. // case we have an issue.
  20399. InsertCompareBranch(IR::IntConstOpnd::New((((IntConstType)1) << Js::VarTag_Shift) + Js::AtomTag, IRType::TyVar, this->m_func), srcInt->AsRegOpnd(), Js::OpCode::BrNeq_A, inequalResultTarget, instr, true);
  20400. }
  20401. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, srcBoolConstVal ? equalResultTarget : inequalResultTarget, this->m_func));
  20402. }
  20403. else
  20404. {
  20405. IR::LabelInstr* isNonZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  20406. IR::LabelInstr* isZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  20407. this->m_lowererMD.GenerateTaggedZeroTest(srcInt->AsRegOpnd(), instr, isZero);
  20408. if (srcBoolConstVal)
  20409. {
  20410. // If it's not zero, then it's either 1, in which case it's true, or it's something else, in which
  20411. // case we have an issue.
  20412. InsertCompareBranch(IR::IntConstOpnd::New((((IntConstType)1) << Js::VarTag_Shift) + Js::AtomTag, IRType::TyVar, this->m_func), srcInt->AsRegOpnd(), Js::OpCode::BrNeq_A, isZero, instr, true);
  20413. }
  20414. instr->InsertBefore(isNonZero);
  20415. Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, srcBoolConstVal ? equalResultValue : inequalResultValue), instr);
  20416. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  20417. instr->InsertBefore(isZero);
  20418. Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, !srcBoolConstVal ? equalResultValue : inequalResultValue), instr);
  20419. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  20420. }
  20421. }
  20422. if (*pNeedHelper)
  20423. {
  20424. instr->InsertBefore(labelHelper);
  20425. }
  20426. return true;
  20427. }
  20428. // Generate fast path for StrictEquals when one of the source have a definite valuetype
  20429. bool Lowerer::GenerateFastBrOrCmEqDefinite(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool *pNeedHelper, bool isBranch, bool isInHelper)
  20430. {
  20431. IR::Opnd *src1 = instr->GetSrc1();
  20432. IR::Opnd *src2 = instr->GetSrc2();
  20433. if (!src1->GetValueType().IsDefinite() && !src2->GetValueType().IsDefinite())
  20434. {
  20435. return false;
  20436. }
  20437. if (src1->IsEqual(src2))
  20438. {
  20439. return false;
  20440. }
  20441. if (src1->GetValueType().IsDefinite() && src2->GetValueType().IsDefinite())
  20442. {
  20443. if (src1->IsTaggedValue() || src2->IsTaggedValue())
  20444. {
  20445. return true;
  20446. }
  20447. }
  20448. IR::LabelInstr * labelBranchSuccess = nullptr;
  20449. IR::LabelInstr * labelBranchFailure = nullptr;
  20450. IR::LabelInstr * labelFallThrough = instr->GetOrCreateContinueLabel();
  20451. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  20452. LibraryValue successValueType = ValueInvalid;
  20453. LibraryValue failureValueType = ValueInvalid;
  20454. IR::Opnd * definiteSrc = src1->GetValueType().IsDefinite() ? src1 : src2;
  20455. IR::Opnd * likelySrc = src1->GetValueType().IsDefinite() ? src2 : src1;
  20456. bool isEqual = !instr->IsNeq();
  20457. if (!isBranch)
  20458. {
  20459. labelBranchSuccess = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  20460. labelBranchFailure = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  20461. successValueType = isEqual ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
  20462. failureValueType = isEqual ? LibraryValue::ValueFalse : LibraryValue::ValueTrue;
  20463. }
  20464. else
  20465. {
  20466. labelBranchSuccess = isEqual ? instr->AsBranchInstr()->GetTarget() : labelFallThrough;
  20467. labelBranchFailure = isEqual ? labelFallThrough : instr->AsBranchInstr()->GetTarget();
  20468. }
  20469. Assert(likelySrc->IsRegOpnd());
  20470. if (definiteSrc->GetValueType().IsAnyArray() || definiteSrc->GetValueType().IsSymbol() || definiteSrc->GetValueType().IsBoolean() || definiteSrc->GetValueType().IsPrimitiveOrObject())
  20471. {
  20472. InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, labelBranchSuccess, instr);
  20473. IR::BranchInstr * branch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelBranchFailure, this->m_func);
  20474. instr->InsertBefore(branch);
  20475. *pNeedHelper = false;
  20476. }
  20477. else if (definiteSrc->GetValueType().IsObject() && !CONFIG_FLAG(ESBigInt))
  20478. {
  20479. InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, labelBranchSuccess, instr);
  20480. if (!likelySrc->GetValueType().IsDefinite())
  20481. {
  20482. m_lowererMD.GenerateObjectTest(likelySrc->AsRegOpnd(), instr, labelBranchFailure);
  20483. IR::RegOpnd * likelyTypeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  20484. IR::IndirOpnd * likelyType = IR::IndirOpnd::New(likelySrc->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  20485. Lowerer::InsertMove(likelyTypeReg, likelyType, instr);
  20486. IR::Opnd *likelyFlags = IR::IndirOpnd::New(likelyTypeReg, Js::Type::GetOffsetOfFlags(), TyInt8, this->m_func);
  20487. InsertTestBranch(likelyFlags, IR::IntConstOpnd::New(TypeFlagMask_EngineExternal, TyInt8, this->m_func), Js::OpCode::BrNeq_A, labelHelper, instr);
  20488. }
  20489. else
  20490. {
  20491. *pNeedHelper = false;
  20492. }
  20493. IR::BranchInstr * branch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelBranchFailure, this->m_func);
  20494. instr->InsertBefore(branch);
  20495. }
  20496. else if (definiteSrc->IsTaggedInt())
  20497. {
  20498. InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, labelBranchSuccess, instr);
  20499. IR::BranchInstr * branch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func);
  20500. instr->InsertBefore(branch);
  20501. }
  20502. else
  20503. {
  20504. return true;
  20505. }
  20506. if (!isBranch)
  20507. {
  20508. instr->InsertBefore(labelBranchSuccess);
  20509. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, successValueType), instr);
  20510. InsertBranch(Js::OpCode::Br, labelFallThrough, instr);
  20511. instr->InsertBefore(labelBranchFailure);
  20512. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, failureValueType), instr);
  20513. InsertBranch(Js::OpCode::Br, labelFallThrough, instr);
  20514. }
  20515. instr->InsertBefore(labelHelper);
  20516. return true;
  20517. }
  20518. // Generate fast path for Strict Equals when both sources are likely boolean/likely object/likely symbol
  20519. bool Lowerer::GenerateFastBrEqLikely(IR::BranchInstr * instrBranch, bool *pNeedHelper, bool isInHelper)
  20520. {
  20521. IR::Opnd *src1 = instrBranch->GetSrc1();
  20522. IR::Opnd *src2 = instrBranch->GetSrc2();
  20523. IR::LabelInstr *targetInstr = instrBranch->GetTarget();
  20524. IR::LabelInstr *labelEqualLikely = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  20525. IR::LabelInstr *labelTrue = instrBranch->GetOrCreateContinueLabel(isInHelper);
  20526. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  20527. *pNeedHelper = true;
  20528. if (!this->GenerateFastBooleanAndObjectEqLikely(instrBranch, src1, src2, labelHelper, labelEqualLikely, pNeedHelper, isInHelper))
  20529. {
  20530. return false;
  20531. }
  20532. instrBranch->InsertBefore(labelEqualLikely);
  20533. IR::BranchInstr *newBranch = IR::BranchInstr::New(instrBranch->m_opcode, targetInstr, src1, src2, this->m_func);
  20534. instrBranch->InsertBefore(newBranch);
  20535. this->m_lowererMD.LowerCondBranch(newBranch);
  20536. newBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelTrue, this->m_func);
  20537. instrBranch->InsertBefore(newBranch);
  20538. instrBranch->InsertBefore(labelHelper);
  20539. return true;
  20540. }
  20541. bool Lowerer::GenerateFastBooleanAndObjectEqLikely(IR::Instr * instr, IR::Opnd *src1, IR::Opnd *src2, IR::LabelInstr * labelHelper, IR::LabelInstr * labelEqualLikely, bool *pNeedHelper, bool isInHelper)
  20542. {
  20543. *pNeedHelper = true;
  20544. if (!src1 || !src2)
  20545. {
  20546. return false;
  20547. }
  20548. bool isStrictCompare = false;
  20549. bool isStrictMode = this->m_func->GetJITFunctionBody()->IsStrictMode();
  20550. switch (instr->m_opcode)
  20551. {
  20552. case Js::OpCode::BrSrEq_A:
  20553. case Js::OpCode::BrSrNotNeq_A:
  20554. case Js::OpCode::BrSrNeq_A:
  20555. case Js::OpCode::BrSrNotEq_A:
  20556. case Js::OpCode::CmSrEq_A:
  20557. case Js::OpCode::CmSrNeq_A:
  20558. isStrictCompare = true;
  20559. break;
  20560. }
  20561. if (src1->GetValueType().IsLikelyBoolean() && src2->GetValueType().IsLikelyBoolean())
  20562. {
  20563. //
  20564. // Booleans
  20565. //
  20566. if (isStrictCompare)
  20567. {
  20568. if (!src1->GetValueType().IsBoolean() && !src2->GetValueType().IsBoolean())
  20569. {
  20570. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instr, labelHelper, false);
  20571. if (GenerateJSBooleanTest(src2->AsRegOpnd(), instr, labelEqualLikely, true))
  20572. {
  20573. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func));
  20574. }
  20575. }
  20576. else
  20577. {
  20578. *pNeedHelper = false;
  20579. }
  20580. }
  20581. else
  20582. {
  20583. this->m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instr, labelHelper, false);
  20584. GenerateJSBooleanTest(src1->AsRegOpnd(), instr, labelHelper, false);
  20585. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instr, labelHelper, false);
  20586. if (GenerateJSBooleanTest(src2->AsRegOpnd(), instr, labelEqualLikely, true))
  20587. {
  20588. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func));
  20589. }
  20590. }
  20591. }
  20592. else if (src1->GetValueType().HasBeenObject() && src2->GetValueType().HasBeenObject())
  20593. {
  20594. //
  20595. // Objects
  20596. //
  20597. IR::LabelInstr *labelTypeIdCheck = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  20598. if (!isStrictCompare)
  20599. {
  20600. // If not strictBr, verify both sides are dynamic objects
  20601. this->m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instr, labelHelper, false);
  20602. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instr, labelHelper, false);
  20603. GenerateIsDynamicObject(src1->AsRegOpnd(), instr, labelTypeIdCheck, false);
  20604. }
  20605. else
  20606. {
  20607. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instr, labelHelper, false);
  20608. }
  20609. GenerateIsDynamicObject(src2->AsRegOpnd(), instr, labelEqualLikely, true);
  20610. instr->InsertBefore(labelTypeIdCheck);
  20611. if (isStrictMode)
  20612. {
  20613. labelTypeIdCheck->isOpHelper = true;
  20614. IR::BranchInstr *branchToHelper = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func);
  20615. instr->InsertBefore(branchToHelper);
  20616. }
  20617. else
  20618. {
  20619. if (!ExternalLowerer::TryGenerateFastExternalEqTest(src1, src2, instr, labelHelper, labelEqualLikely, this, isStrictCompare, isInHelper))
  20620. {
  20621. if (!isStrictCompare)
  20622. {
  20623. GenerateIsBuiltinRecyclableObject(src1->AsRegOpnd(), instr, labelHelper, false /*checkObjectAndDynamicObject*/, nullptr /*labelContinue*/, isInHelper);
  20624. }
  20625. GenerateIsBuiltinRecyclableObject(src2->AsRegOpnd(), instr, labelHelper, false /*checkObjectAndDynamicObject*/, nullptr /*labelContinue*/, isInHelper);
  20626. }
  20627. }
  20628. }
  20629. else if (src1->GetValueType().IsLikelySymbol() && src2->GetValueType().IsLikelySymbol())
  20630. {
  20631. this->GenerateSymbolTest(src1->AsRegOpnd(), instr, labelHelper, nullptr, true);
  20632. this->GenerateSymbolTest(src2->AsRegOpnd(), instr, labelHelper, nullptr, true);
  20633. }
  20634. else
  20635. {
  20636. return false;
  20637. }
  20638. return true;
  20639. }
  20640. bool Lowerer::GenerateFastCmEqLikely(IR::Instr * instr, bool *pNeedHelper, bool isInHelper)
  20641. {
  20642. *pNeedHelper = false;
  20643. Assert(instr->m_opcode == Js::OpCode::CmSrEq_A ||
  20644. instr->m_opcode == Js::OpCode::CmSrNeq_A ||
  20645. instr->m_opcode == Js::OpCode::CmEq_A ||
  20646. instr->m_opcode == Js::OpCode::CmNeq_A);
  20647. bool isNegOp = false;
  20648. bool isStrict = false;
  20649. switch (instr->m_opcode)
  20650. {
  20651. case Js::OpCode::CmSrEq_A:
  20652. isStrict = true;
  20653. break;
  20654. case Js::OpCode::CmSrNeq_A:
  20655. isStrict = true;
  20656. case Js::OpCode::CmNeq_A:
  20657. isNegOp = true;
  20658. break;
  20659. }
  20660. IR::Opnd *src1 = instr->GetSrc1();
  20661. IR::Opnd *src2 = instr->GetSrc2();
  20662. IR::LabelInstr *labelEqualLikely = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  20663. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  20664. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  20665. if (!this->GenerateFastBooleanAndObjectEqLikely(instr, src1, src2, labelHelper, labelEqualLikely, pNeedHelper, isInHelper))
  20666. {
  20667. return false;
  20668. }
  20669. instr->InsertBefore(labelEqualLikely);
  20670. // $labelEqualLikely
  20671. //
  20672. // Will only come here for
  20673. // if src2 is dynamic object(matches Js::DynamicObject::`vtable'), for non strict cm both src1 and src2 should be dynamic object
  20674. // or if src2 is builtin recyclableobject(typeId > TypeIds_LastStaticType && typeId <= TypeIds_LastBuiltinDynamicObject)
  20675. // or if CustomExternalType with no operations usage flags
  20676. //
  20677. // src1->IsEqual(src2)
  20678. // MOV DST SUCCESS
  20679. // JMP $DONE
  20680. // CMP src1, src2
  20681. // MOV DST SUCCESS
  20682. // JEQ $DONE
  20683. // MOV DST FAILURE
  20684. // JMP $DONE
  20685. LibraryValue successValueType = !isNegOp ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
  20686. LibraryValue failureValueType = !isNegOp ? LibraryValue::ValueFalse : LibraryValue::ValueTrue;
  20687. if (src1->IsEqual(src2))
  20688. {
  20689. Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, successValueType), instr);
  20690. instr->InsertBefore(IR::BranchInstr::New(this->m_lowererMD.MDUncondBranchOpcode, labelDone, this->m_func));
  20691. }
  20692. else
  20693. {
  20694. IR::LabelInstr *cmEqual = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  20695. this->InsertCompareBranch(src1, src2, isStrict ? Js::OpCode::BrSrEq_A : Js::OpCode::BrEq_A, cmEqual, instr);
  20696. Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, failureValueType), instr);
  20697. instr->InsertBefore(IR::BranchInstr::New(this->m_lowererMD.MDUncondBranchOpcode, labelDone, this->m_func));
  20698. instr->InsertBefore(cmEqual);
  20699. Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, successValueType), instr);
  20700. instr->InsertBefore(IR::BranchInstr::New(this->m_lowererMD.MDUncondBranchOpcode, labelDone, this->m_func));
  20701. }
  20702. instr->InsertBefore(labelHelper);
  20703. instr->InsertAfter(labelDone);
  20704. return true;
  20705. }
  20706. bool
  20707. Lowerer::GenerateFastBrOrCmString(IR::Instr* instr)
  20708. {
  20709. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  20710. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  20711. if (!srcReg1 ||
  20712. !srcReg2 ||
  20713. srcReg1->IsTaggedInt() ||
  20714. srcReg2->IsTaggedInt() ||
  20715. (!srcReg1->GetValueType().HasHadStringTag() && !srcReg2->GetValueType().IsString()) ||
  20716. (!srcReg2->GetValueType().HasHadStringTag() && !srcReg1->GetValueType().IsString()))
  20717. {
  20718. return false;
  20719. }
  20720. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  20721. IR::LabelInstr *labelBranchFail = nullptr;
  20722. IR::LabelInstr *labelBranchSuccess = nullptr;
  20723. bool isEqual = false;
  20724. bool isStrict = false;
  20725. bool isBranch = true;
  20726. bool isCmNegOp = false;
  20727. switch (instr->m_opcode)
  20728. {
  20729. case Js::OpCode::BrSrEq_A:
  20730. case Js::OpCode::BrSrNotNeq_A:
  20731. isStrict = true;
  20732. case Js::OpCode::BrEq_A:
  20733. case Js::OpCode::BrNotNeq_A:
  20734. labelBranchFail = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  20735. labelBranchSuccess = instr->AsBranchInstr()->GetTarget();
  20736. instr->InsertAfter(labelBranchFail);
  20737. isEqual = true;
  20738. break;
  20739. case Js::OpCode::BrSrNeq_A:
  20740. case Js::OpCode::BrSrNotEq_A:
  20741. isStrict = true;
  20742. case Js::OpCode::BrNeq_A:
  20743. case Js::OpCode::BrNotEq_A:
  20744. labelBranchSuccess = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  20745. labelBranchFail = instr->AsBranchInstr()->GetTarget();
  20746. instr->InsertAfter(labelBranchSuccess);
  20747. isEqual = false;
  20748. break;
  20749. case Js::OpCode::CmSrEq_A:
  20750. isStrict = true;
  20751. case Js::OpCode::CmEq_A:
  20752. isEqual = true;
  20753. isBranch = false;
  20754. break;
  20755. case Js::OpCode::CmSrNeq_A:
  20756. isStrict = true;
  20757. case Js::OpCode::CmNeq_A:
  20758. isEqual = false;
  20759. isBranch = false;
  20760. isCmNegOp = true;
  20761. break;
  20762. default:
  20763. Assume(UNREACHED);
  20764. }
  20765. if (!isBranch)
  20766. {
  20767. labelBranchSuccess = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  20768. labelBranchFail = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  20769. }
  20770. GenerateFastStringCheck(instr, srcReg1, srcReg2, isEqual, isStrict, labelHelper, labelBranchSuccess, labelBranchFail);
  20771. IR::LabelInstr *labelFallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  20772. if (!isBranch)
  20773. {
  20774. const LibraryValue successValueType = !isCmNegOp ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
  20775. const LibraryValue failureValueType = !isCmNegOp ? LibraryValue::ValueFalse : LibraryValue::ValueTrue;
  20776. instr->InsertBefore(labelBranchSuccess);
  20777. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, successValueType), instr);
  20778. InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  20779. instr->InsertBefore(labelBranchFail);
  20780. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, failureValueType), instr);
  20781. InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  20782. }
  20783. instr->InsertBefore(labelHelper);
  20784. instr->InsertAfter(labelFallthrough);
  20785. #if DBG
  20786. // The fast-path for strings assumes the case where 2 strings are equal is rare, and marks that path as 'helper'.
  20787. // This breaks the helper label dbchecks as it can result in non-helper blocks be reachable only from helper blocks.
  20788. // Use m_isHelperToNonHelperBranch and m_noHelperAssert to fix this.
  20789. IR::Instr *blockEndInstr;
  20790. if (isEqual)
  20791. {
  20792. blockEndInstr = labelHelper->GetNextBranchOrLabel();
  20793. }
  20794. else
  20795. {
  20796. blockEndInstr = instr->GetNextBranchOrLabel();
  20797. }
  20798. if (blockEndInstr->IsBranchInstr())
  20799. {
  20800. blockEndInstr->AsBranchInstr()->m_isHelperToNonHelperBranch = true;
  20801. }
  20802. labelFallthrough->m_noHelperAssert = true;
  20803. #endif
  20804. return true;
  20805. }
  20806. bool
  20807. Lowerer::GenerateFastStringCheck(IR::Instr *instr, IR::RegOpnd *srcReg1, IR::RegOpnd *srcReg2, bool isEqual, bool isStrict, IR::LabelInstr *labelHelper, IR::LabelInstr *labelBranchSuccess, IR::LabelInstr *labelBranchFail)
  20808. {
  20809. Assert(instr->m_opcode == Js::OpCode::BrSrEq_A ||
  20810. instr->m_opcode == Js::OpCode::BrSrNeq_A ||
  20811. instr->m_opcode == Js::OpCode::BrEq_A ||
  20812. instr->m_opcode == Js::OpCode::BrNeq_A ||
  20813. instr->m_opcode == Js::OpCode::BrSrNotEq_A ||
  20814. instr->m_opcode == Js::OpCode::BrSrNotNeq_A ||
  20815. instr->m_opcode == Js::OpCode::BrNotEq_A ||
  20816. instr->m_opcode == Js::OpCode::BrNotNeq_A ||
  20817. instr->m_opcode == Js::OpCode::CmEq_A ||
  20818. instr->m_opcode == Js::OpCode::CmNeq_A ||
  20819. instr->m_opcode == Js::OpCode::CmSrEq_A ||
  20820. instr->m_opcode == Js::OpCode::CmSrNeq_A);
  20821. // if src1 is not string
  20822. // generate object test, if not equal jump to $helper
  20823. // compare type check to string, if not jump to $helper
  20824. //
  20825. // if strict mode generate string test as above for src2 and jump to $failure if failed any time
  20826. // else if not strict generate string test as above for src2 and jump to $helper if failed any time
  20827. //
  20828. // Compare length of src1 and src2 if not equal goto $failure
  20829. //
  20830. // if src1 is not flat string jump to $helper
  20831. //
  20832. // if src1 and src2 m_pszValue pointer match goto $success
  20833. //
  20834. // if src2 is not flat string jump to $helper
  20835. //
  20836. // if first character of src1 and src2 doesn't match goto $failure
  20837. //
  20838. // shift left by 1 length of src1 (length*2)
  20839. //
  20840. // wmemcmp src1 and src2 flat strings till length * 2
  20841. //
  20842. // test eax (result of wmemcmp)
  20843. // if equal jump to $success else to $failure
  20844. //
  20845. // $success
  20846. // jmp to $fallthrough
  20847. // $failure
  20848. // jmp to $fallthrough
  20849. // $helper
  20850. //
  20851. // $fallthrough
  20852. // Generates:
  20853. // GenerateObjectTest(src1);
  20854. // CMP srcReg1, srcReg2
  20855. // JEQ $success
  20856. // MOV s1, [srcReg1 + offset(Type)]
  20857. // CMP type, static_string_type
  20858. // JNE $helper
  20859. // GenerateObjectTest(src2);
  20860. // MOV s2, [srcReg2 + offset(Type)]
  20861. // CMP type, static_string_type
  20862. // JNE $fail ; if src1 is string but not src2, src1 !== src2 if isStrict
  20863. // MOV s3, [srcReg1,offset(m_charLength)]
  20864. // CMP [srcReg2,offset(m_charLength)], s3
  20865. // JNE $fail <--- length check done
  20866. // MOV s4, [srcReg1,offset(m_pszValue)]
  20867. // CMP s4, 0
  20868. // JEQ $helper
  20869. // MOV s5, [srcReg2,offset(m_pszValue)]
  20870. // CMP s5, 0
  20871. // JEQ $helper
  20872. // MOV s6,[s4]
  20873. // CMP [s5], s6 -First character comparison
  20874. // JNE $fail
  20875. // SHL length, 1
  20876. // eax = wmemcmp(src1String, src2String, length*2)
  20877. // TEST eax, eax
  20878. // JEQ $success
  20879. // JMP $fail
  20880. IR::Instr* instrInsert = instr;
  20881. GenerateStringTest(srcReg1, instrInsert, labelHelper);
  20882. if (srcReg1->IsEqual(srcReg2))
  20883. {
  20884. InsertBranch(Js::OpCode::Br, labelBranchSuccess, instrInsert);
  20885. #if DBG
  20886. if (instr->IsBranchInstr())
  20887. {
  20888. // we might have other cases on helper path which will generate branch to the target
  20889. instr->AsBranchInstr()->GetTarget()->m_noHelperAssert = true;
  20890. }
  20891. #endif
  20892. return true;
  20893. }
  20894. // CMP srcReg1, srcReg2 - Ptr comparison
  20895. // JEQ $branchSuccess
  20896. InsertCompareBranch(srcReg1, srcReg2, Js::OpCode::BrEq_A, labelBranchSuccess, instrInsert);
  20897. if (isStrict)
  20898. {
  20899. GenerateStringTest(srcReg2, instrInsert, labelBranchFail);
  20900. }
  20901. else
  20902. {
  20903. GenerateStringTest(srcReg2, instrInsert, labelHelper);
  20904. }
  20905. if (isStrict && (srcReg1->m_sym->m_isStrEmpty || srcReg2->m_sym->m_isStrEmpty))
  20906. {
  20907. IR::RegOpnd* otherOpnd = srcReg1->m_sym->m_isStrEmpty ? srcReg2 : srcReg1;
  20908. InsertCompareBranch(IR::IndirOpnd::New(otherOpnd, Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, m_func), IR::IntConstOpnd::New(0, TyUint32, this->m_func, true), Js::OpCode::BrNeq_A, labelBranchFail, instrInsert);
  20909. return true;
  20910. }
  20911. // MOV s3, [srcReg1,offset(m_charLength)]
  20912. // CMP [srcReg2,offset(m_charLength)], s3
  20913. // JNE $branchfail
  20914. IR::RegOpnd * src1LengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  20915. InsertMove(src1LengthOpnd, IR::IndirOpnd::New(srcReg1, Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, m_func), instrInsert);
  20916. InsertCompareBranch(IR::IndirOpnd::New(srcReg2, Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, m_func), src1LengthOpnd, Js::OpCode::BrNeq_A, labelBranchFail, instrInsert);
  20917. // MOV s4, [src1,offset(m_pszValue)]
  20918. // CMP s4, 0
  20919. // JEQ $helper
  20920. // MOV s5, [src2,offset(m_pszValue)]
  20921. // CMP s5, 0
  20922. // JEQ $helper
  20923. IR::RegOpnd * src1FlatString = IR::RegOpnd::New(TyMachPtr, m_func);
  20924. InsertMove(src1FlatString, IR::IndirOpnd::New(srcReg1, Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, m_func), instrInsert);
  20925. InsertCompareBranch(src1FlatString, IR::IntConstOpnd::New(0, TyUint32, m_func), Js::OpCode::BrEq_A, labelHelper, instrInsert);
  20926. IR::RegOpnd * src2FlatString = IR::RegOpnd::New(TyMachPtr, m_func);
  20927. InsertMove(src2FlatString, IR::IndirOpnd::New(srcReg2, Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, m_func), instrInsert);
  20928. InsertCompareBranch(src2FlatString, IR::IntConstOpnd::New(0, TyUint32, m_func), Js::OpCode::BrEq_A, labelHelper, instrInsert);
  20929. // MOV s6,[s4]
  20930. // CMP [s5], s6 -First character comparison
  20931. // JNE $branchfail
  20932. IR::RegOpnd * src1FirstChar = IR::RegOpnd::New(TyUint16, m_func);
  20933. InsertMove(src1FirstChar, IR::IndirOpnd::New(src1FlatString, 0, TyUint16, m_func), instrInsert);
  20934. InsertCompareBranch(IR::IndirOpnd::New(src2FlatString, 0, TyUint16, m_func), src1FirstChar, Js::OpCode::BrNeq_A, labelBranchFail, instrInsert);
  20935. // eax = wmemcmp(src1String, src2String, length)
  20936. m_lowererMD.LoadHelperArgument(instr, src1LengthOpnd);
  20937. m_lowererMD.LoadHelperArgument(instr, src1FlatString);
  20938. m_lowererMD.LoadHelperArgument(instr, src2FlatString);
  20939. IR::RegOpnd *dstOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  20940. IR::Instr *instrCall = IR::Instr::New(Js::OpCode::Call, dstOpnd, IR::HelperCallOpnd::New(IR::HelperWMemCmp, m_func), m_func);
  20941. instr->InsertBefore(instrCall);
  20942. m_lowererMD.LowerCall(instrCall, 3);
  20943. // TEST eax, eax
  20944. // JEQ success
  20945. InsertTestBranch(dstOpnd, dstOpnd, Js::OpCode::BrEq_A, labelBranchSuccess, instrInsert);
  20946. // JMP fail
  20947. InsertBranch(Js::OpCode::Br, labelBranchFail, instrInsert);
  20948. return true;
  20949. }
  20950. bool Lowerer::GenerateFastBrBool(IR::BranchInstr *const instr)
  20951. {
  20952. Assert(instr);
  20953. Assert(instr->m_opcode == Js::OpCode::BrFalse_A || instr->m_opcode == Js::OpCode::BrTrue_A);
  20954. Func *const func = instr->m_func;
  20955. if(!instr->GetSrc1()->IsRegOpnd())
  20956. {
  20957. LowererMD::ChangeToAssign(instr->HoistSrc1(Js::OpCode::Ld_A));
  20958. }
  20959. IR::RegOpnd *const src = instr->GetSrc1()->Copy(func)->AsRegOpnd();
  20960. const IR::AutoReuseOpnd autoReuseSrc(src, func);
  20961. const ValueType srcOriginalValueType(src->GetValueType());
  20962. ValueType srcValueType(srcOriginalValueType);
  20963. IR::LabelInstr *const labelTarget = instr->GetTarget();
  20964. IR::LabelInstr *const labelFallthrough = instr->GetOrCreateContinueLabel();
  20965. if(labelTarget == labelFallthrough)
  20966. {
  20967. // Nothing to do
  20968. instr->Remove();
  20969. return false;
  20970. }
  20971. const bool branchOnFalse = instr->m_opcode == Js::OpCode::BrFalse_A;
  20972. IR::LabelInstr *const labelFalse = branchOnFalse ? labelTarget : labelFallthrough;
  20973. IR::LabelInstr *const labelTrue = branchOnFalse ? labelFallthrough : labelTarget;
  20974. const Js::OpCode compareWithFalseBranchToTargetOpCode = branchOnFalse ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A;
  20975. IR::LabelInstr *lastLabelBeforeHelper = nullptr;
  20976. /// Typespec'd float
  20977. if (instr->GetSrc1()->GetType() == TyFloat64)
  20978. {
  20979. InsertFloatCheckForZeroOrNanBranch(instr->GetSrc1(), branchOnFalse, labelTarget, labelFallthrough, instr);
  20980. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  20981. instr->Remove();
  20982. return false;
  20983. }
  20984. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  20985. // Null fast path
  20986. if (srcValueType.HasBeenNull() || srcOriginalValueType.IsUninitialized())
  20987. {
  20988. if(srcValueType.IsNull())
  20989. {
  20990. // jmp $false
  20991. InsertBranch(Js::OpCode::Br, labelFalse, instr);
  20992. // Skip lowering call to helper
  20993. Assert(instr->m_prev->IsBranchInstr());
  20994. instr->Remove();
  20995. return false;
  20996. }
  20997. // cmp src, null
  20998. // je $false
  20999. InsertCompareBranch(
  21000. src,
  21001. LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  21002. Js::OpCode::BrEq_A,
  21003. labelFalse,
  21004. instr);
  21005. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Null));
  21006. }
  21007. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  21008. // Undefined fast path
  21009. if(srcValueType.HasBeenUndefined() || srcOriginalValueType.IsUninitialized())
  21010. {
  21011. if(srcValueType.IsUndefined())
  21012. {
  21013. // jmp $false
  21014. InsertBranch(Js::OpCode::Br, labelFalse, instr);
  21015. // Skip lowering call to helper
  21016. Assert(instr->m_prev->IsBranchInstr());
  21017. instr->Remove();
  21018. return false;
  21019. }
  21020. // cmp src, undefined
  21021. // je $false
  21022. InsertCompareBranch(
  21023. src,
  21024. LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
  21025. Js::OpCode::BrEq_A,
  21026. labelFalse,
  21027. instr);
  21028. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Undefined));
  21029. }
  21030. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  21031. // Tagged int fast path
  21032. const bool isNotInt = src->IsNotInt();
  21033. bool checkedForTaggedInt = isNotInt;
  21034. if( (
  21035. srcValueType.HasBeenInt() ||
  21036. srcValueType.HasBeenUnknownNumber() ||
  21037. srcOriginalValueType.IsUninitialized()
  21038. ) && !isNotInt)
  21039. {
  21040. checkedForTaggedInt = true;
  21041. IR::LabelInstr *notTaggedIntLabel = nullptr;
  21042. if(!src->IsTaggedInt())
  21043. {
  21044. // test src, 1
  21045. // jz $notTaggedInt
  21046. notTaggedIntLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  21047. m_lowererMD.GenerateSmIntTest(src, instr, notTaggedIntLabel);
  21048. }
  21049. // cmp src, tag(0)
  21050. // je/jne $target
  21051. m_lowererMD.GenerateTaggedZeroTest(src, instr);
  21052. Lowerer::InsertBranch(compareWithFalseBranchToTargetOpCode, labelTarget, instr);
  21053. if(src->IsTaggedInt())
  21054. {
  21055. // Skip lowering call to helper
  21056. Assert(instr->m_prev->IsBranchInstr());
  21057. instr->Remove();
  21058. return false;
  21059. }
  21060. // jmp $fallthrough
  21061. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  21062. // $notTaggedInt:
  21063. if(notTaggedIntLabel)
  21064. {
  21065. instr->InsertBefore(notTaggedIntLabel);
  21066. lastLabelBeforeHelper = notTaggedIntLabel;
  21067. }
  21068. }
  21069. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  21070. // Float fast path
  21071. bool generateFloatTest = srcValueType.IsLikelyFloat();
  21072. #ifdef _M_IX86
  21073. if (!AutoSystemInfo::Data.SSE2Available())
  21074. {
  21075. generateFloatTest = false;
  21076. }
  21077. #endif
  21078. bool checkedForTaggedFloat =
  21079. #if FLOATVAR
  21080. srcValueType.IsNotNumber();
  21081. #else
  21082. true; // there are no tagged floats, indicate that it has been checked
  21083. #endif
  21084. if (generateFloatTest)
  21085. {
  21086. // if(srcValueType.IsFloat()) // skip tagged int check?
  21087. //
  21088. // ValueType::IsFloat() does not guarantee that the storage is not in a tagged int.
  21089. // The tagged int check is necessary. It does, however, guarantee that as long as the value is not
  21090. // stored in a tagged int, that it is definitely stored in a JavascriptNumber/TaggedFloat.
  21091. IR::LabelInstr *const notFloatLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  21092. if(!checkedForTaggedInt)
  21093. {
  21094. checkedForTaggedInt = true;
  21095. m_lowererMD.GenerateSmIntTest(src, instr, notFloatLabel, nullptr, true);
  21096. }
  21097. // cmp [src], JavascriptNumber::vtable
  21098. // jne $notFloat
  21099. #if FLOATVAR
  21100. checkedForTaggedFloat = true;
  21101. IR::RegOpnd *const floatOpnd = m_lowererMD.CheckFloatAndUntag(src, instr, notFloatLabel);
  21102. #else
  21103. m_lowererMD.GenerateFloatTest(src, instr, notFloatLabel);
  21104. IR::IndirOpnd *const floatOpnd = IR::IndirOpnd::New(src, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, func);
  21105. #endif
  21106. // cmp src, 0.0
  21107. // jp $false
  21108. // je/jne $target
  21109. // jmp $fallthrough
  21110. InsertFloatCheckForZeroOrNanBranch(floatOpnd, branchOnFalse, labelTarget, labelFallthrough, instr);
  21111. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  21112. // $notFloat:
  21113. instr->InsertBefore(notFloatLabel);
  21114. lastLabelBeforeHelper = notFloatLabel;
  21115. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::AnyNumber));
  21116. }
  21117. IR::LabelInstr *labelHelper = nullptr;
  21118. bool _didObjectTest = checkedForTaggedInt && checkedForTaggedFloat;
  21119. const auto EnsureObjectTest = [&]()
  21120. {
  21121. if(_didObjectTest)
  21122. {
  21123. return;
  21124. }
  21125. if(!labelHelper)
  21126. {
  21127. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  21128. }
  21129. m_lowererMD.GenerateObjectTest(src, instr, labelHelper);
  21130. _didObjectTest = true;
  21131. };
  21132. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  21133. // Boolean fast path
  21134. if (srcValueType.HasBeenBoolean() || srcOriginalValueType.IsUninitialized())
  21135. {
  21136. IR::LabelInstr *notBooleanLabel = nullptr;
  21137. if (!srcValueType.IsBoolean())
  21138. {
  21139. EnsureObjectTest();
  21140. // cmp [src], JavascriptBoolean::vtable
  21141. // jne $notBoolean
  21142. notBooleanLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  21143. InsertCompareBranch(
  21144. IR::IndirOpnd::New(src, 0, TyMachPtr, func),
  21145. LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptBoolean),
  21146. Js::OpCode::BrNeq_A,
  21147. notBooleanLabel,
  21148. instr);
  21149. }
  21150. // cmp src, false
  21151. // je/jne $target
  21152. InsertCompareBranch(
  21153. src,
  21154. LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse),
  21155. compareWithFalseBranchToTargetOpCode,
  21156. labelTarget,
  21157. instr);
  21158. if (srcValueType.IsBoolean())
  21159. {
  21160. // Skip lowering call to helper
  21161. Assert(!labelHelper);
  21162. Assert(instr->m_prev->IsBranchInstr());
  21163. instr->Remove();
  21164. return false;
  21165. }
  21166. // jmp $fallthrough
  21167. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  21168. if (notBooleanLabel)
  21169. {
  21170. instr->InsertBefore(notBooleanLabel);
  21171. lastLabelBeforeHelper = notBooleanLabel;
  21172. }
  21173. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Boolean));
  21174. }
  21175. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  21176. // String fast path
  21177. if(srcValueType.HasBeenString())
  21178. {
  21179. IR::LabelInstr *notStringLabel = nullptr;
  21180. if(!srcValueType.IsString())
  21181. {
  21182. EnsureObjectTest();
  21183. notStringLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  21184. GenerateStringTest(src, instr, notStringLabel, nullptr, false);
  21185. }
  21186. // cmp [src + offset(length)], 0
  21187. // jeq/jne $target
  21188. InsertCompareBranch(
  21189. IR::IndirOpnd::New(src, Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func),
  21190. IR::IntConstOpnd::New(0, TyUint32, func, true),
  21191. compareWithFalseBranchToTargetOpCode,
  21192. labelTarget,
  21193. instr);
  21194. if(srcValueType.IsString())
  21195. {
  21196. // Skip lowering call to helper
  21197. Assert(!labelHelper);
  21198. Assert(instr->m_prev->IsBranchInstr());
  21199. instr->Remove();
  21200. return false;
  21201. }
  21202. // jmp $fallthrough
  21203. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  21204. if(notStringLabel)
  21205. {
  21206. instr->InsertBefore(notStringLabel);
  21207. lastLabelBeforeHelper = notStringLabel;
  21208. }
  21209. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::String));
  21210. }
  21211. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  21212. // Object fast path
  21213. if (srcValueType.IsLikelyObject())
  21214. {
  21215. if(srcValueType.IsObject())
  21216. {
  21217. if(srcValueType.GetObjectType() > ObjectType::Object)
  21218. {
  21219. // Specific object types that are tracked are equivalent to 'true'
  21220. // jmp $true
  21221. InsertBranch(Js::OpCode::Br, labelTrue, instr);
  21222. // Skip lowering call to helper
  21223. Assert(!labelHelper);
  21224. Assert(instr->m_prev->IsBranchInstr());
  21225. instr->Remove();
  21226. return false;
  21227. }
  21228. }
  21229. else
  21230. {
  21231. EnsureObjectTest();
  21232. }
  21233. // mov srcType, [src + offset(type)] -- load type
  21234. IR::RegOpnd *const srcType = IR::RegOpnd::New(TyMachPtr, func);
  21235. const IR::AutoReuseOpnd autoReuseR1(srcType, func);
  21236. InsertMove(srcType, IR::IndirOpnd::New(src, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func), instr);
  21237. // test [srcType + offset(flags)], TypeFlagMask_IsFalsy -- check if falsy
  21238. // jnz $false
  21239. InsertTestBranch(
  21240. IR::IndirOpnd::New(srcType, Js::Type::GetOffsetOfFlags(), TyUint8, func),
  21241. IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyUint8, func),
  21242. Js::OpCode::BrNeq_A,
  21243. labelFalse,
  21244. instr);
  21245. // cmp [srcType + offset(typeId)], TypeIds_LastJavascriptPrimitiveType -- check base TypeIds_LastJavascriptPrimitiveType
  21246. // ja $true
  21247. InsertCompareBranch(
  21248. IR::IndirOpnd::New(srcType, Js::Type::GetOffsetOfTypeId(), TyInt32, func),
  21249. IR::IntConstOpnd::New(Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, func),
  21250. Js::OpCode::BrGt_A,
  21251. true /* isUnsigned */,
  21252. labelTrue,
  21253. instr);
  21254. if(!labelHelper)
  21255. {
  21256. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  21257. }
  21258. lastLabelBeforeHelper = nullptr;
  21259. }
  21260. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  21261. // Helper call
  21262. // $helper:
  21263. if(lastLabelBeforeHelper)
  21264. {
  21265. Assert(instr->m_prev == lastLabelBeforeHelper);
  21266. lastLabelBeforeHelper->isOpHelper = true;
  21267. }
  21268. if (labelHelper)
  21269. {
  21270. Assert(labelHelper->isOpHelper);
  21271. instr->InsertBefore(labelHelper);
  21272. }
  21273. // call JavascriptConversion::ToBoolean
  21274. IR::RegOpnd *const toBoolDst = IR::RegOpnd::New(TyInt32, func);
  21275. const IR::AutoReuseOpnd autoReuseToBoolDst(toBoolDst, func);
  21276. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, toBoolDst, instr->GetSrc1(), func);
  21277. instr->InsertBefore(callInstr);
  21278. LowerUnaryHelperMem(callInstr, IR::HelperConv_ToBoolean);
  21279. // test eax, eax
  21280. InsertTest(toBoolDst, toBoolDst, instr);
  21281. // je/jne $target
  21282. Assert(instr->IsBranchInstr());
  21283. instr->FreeSrc1();
  21284. instr->m_opcode = LowererMD::MDBranchOpcode(compareWithFalseBranchToTargetOpCode);
  21285. Assert(instr->AsBranchInstr()->GetTarget() == labelTarget);
  21286. // Skip lowering another call to helper
  21287. return false;
  21288. }
  21289. // Helper method used in LowerMD by all platforms.
  21290. // Creates HelperCallOpnd or DiagHelperCallOpnd, based on helperMethod and state.
  21291. // static
  21292. IR::HelperCallOpnd*
  21293. Lowerer::CreateHelperCallOpnd(IR::JnHelperMethod helperMethod, int helperArgCount, Func* func)
  21294. {
  21295. Assert(func);
  21296. IR::HelperCallOpnd* helperCallOpnd;
  21297. if (CONFIG_FLAG(EnableContinueAfterExceptionWrappersForHelpers) &&
  21298. func->IsJitInDebugMode() &&
  21299. HelperMethodAttributes::CanThrow(helperMethod))
  21300. {
  21301. // Create DiagHelperCallOpnd to indicate that it's needed to wrap original helper with try-catch wrapper,
  21302. // so that we can ignore exception and bailout to next stmt in debugger.
  21303. // For details, see: Lib\Runtime\Debug\DiagHelperMethodWrapper.{h,cpp}.
  21304. helperCallOpnd = IR::DiagHelperCallOpnd::New(helperMethod, func, helperArgCount);
  21305. }
  21306. else
  21307. {
  21308. helperCallOpnd = IR::HelperCallOpnd::New(helperMethod, func);
  21309. }
  21310. return helperCallOpnd;
  21311. }
  21312. bool
  21313. Lowerer::TryGenerateFastBrOrCmTypeOf(IR::Instr *instr, IR::Instr **prev, bool isNeqOp, bool *pfNoLower)
  21314. {
  21315. Assert(prev);
  21316. Assert(instr->m_opcode == Js::OpCode::BrSrEq_A ||
  21317. instr->m_opcode == Js::OpCode::BrSrNeq_A ||
  21318. instr->m_opcode == Js::OpCode::BrSrNotEq_A ||
  21319. instr->m_opcode == Js::OpCode::BrSrNotNeq_A ||
  21320. instr->m_opcode == Js::OpCode::CmSrEq_A ||
  21321. instr->m_opcode == Js::OpCode::CmSrNeq_A ||
  21322. instr->m_opcode == Js::OpCode::BrEq_A ||
  21323. instr->m_opcode == Js::OpCode::BrNeq_A ||
  21324. instr->m_opcode == Js::OpCode::BrNotEq_A ||
  21325. instr->m_opcode == Js::OpCode::BrNotNeq_A ||
  21326. instr->m_opcode == Js::OpCode::CmEq_A ||
  21327. instr->m_opcode == Js::OpCode::CmNeq_A);
  21328. //
  21329. // instr - (Br/Cm)(Sr)(N(ot))eq_A
  21330. // instr->m_prev - typeOf
  21331. //
  21332. IR::Instr *instrLd = instr->GetPrevRealInstrOrLabel();
  21333. bool skippedLoads = false;
  21334. //Skip intermediate Ld_A which might be inserted by flow graph peeps
  21335. while (instrLd && instrLd->m_opcode == Js::OpCode::Ld_A )
  21336. {
  21337. if (!(instrLd->GetDst()->IsRegOpnd() && instrLd->GetDst()->AsRegOpnd()->m_fgPeepTmp))
  21338. {
  21339. return false;
  21340. }
  21341. if (instrLd->HasBailOutInfo())
  21342. {
  21343. return false;
  21344. }
  21345. instrLd = instrLd->GetPrevRealInstrOrLabel();
  21346. skippedLoads = true;
  21347. }
  21348. IR::Instr *typeOf = instrLd;
  21349. IR::RegOpnd *instrSrc1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  21350. IR::RegOpnd *instrSrc2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  21351. if (typeOf && (typeOf->m_opcode == Js::OpCode::Typeof))
  21352. {
  21353. IR::RegOpnd *typeOfDst = typeOf->GetDst()->IsRegOpnd() ? typeOf->GetDst()->AsRegOpnd() : nullptr;
  21354. if (typeOfDst && instrSrc1 && instrSrc2)
  21355. {
  21356. do
  21357. {
  21358. IR::RegOpnd *typeOpnd = nullptr;
  21359. IR::RegOpnd *idOpnd = nullptr;
  21360. if (instrSrc1->m_sym == typeOfDst->m_sym)
  21361. {
  21362. typeOpnd = instrSrc1;
  21363. idOpnd = instrSrc2;
  21364. }
  21365. else if (instrSrc2->m_sym == typeOfDst->m_sym)
  21366. {
  21367. typeOpnd = instrSrc2;
  21368. idOpnd = instrSrc1;
  21369. }
  21370. else
  21371. {
  21372. // Neither source turned out to be the typeOpnd
  21373. break;
  21374. }
  21375. if (!typeOpnd->m_isTempLastUse)
  21376. {
  21377. break;
  21378. }
  21379. if (!(idOpnd->m_sym->m_isSingleDef && idOpnd->m_sym->m_isStrConst))
  21380. {
  21381. return false;
  21382. }
  21383. // The second argument to [Cm|Br]TypeOf is the typeid.
  21384. IR::IntConstOpnd *typeIdOpnd = nullptr;
  21385. Assert(idOpnd->m_sym->m_isSingleDef);
  21386. Assert(idOpnd->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd());
  21387. // We can't optimize non-javascript type strings.
  21388. JITJavascriptString *typeNameJsString = JITJavascriptString::FromVar(idOpnd->m_sym->m_instrDef->GetSrc1()->AsAddrOpnd()->m_localAddress);
  21389. const char16 *typeName = typeNameJsString->GetString();
  21390. Js::InternalString typeNameString(typeName, typeNameJsString->GetLength());
  21391. if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::UndefinedTypeNameString))
  21392. {
  21393. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Undefined, TyInt32, instr->m_func);
  21394. }
  21395. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::ObjectTypeNameString))
  21396. {
  21397. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Object, TyInt32, instr->m_func);
  21398. }
  21399. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::BooleanTypeNameString))
  21400. {
  21401. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Boolean, TyInt32, instr->m_func);
  21402. }
  21403. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::NumberTypeNameString))
  21404. {
  21405. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Number, TyInt32, instr->m_func);
  21406. }
  21407. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::StringTypeNameString))
  21408. {
  21409. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_String, TyInt32, instr->m_func);
  21410. }
  21411. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::FunctionTypeNameString))
  21412. {
  21413. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Function, TyInt32, instr->m_func);
  21414. }
  21415. else
  21416. {
  21417. return false;
  21418. }
  21419. if (skippedLoads)
  21420. {
  21421. //validate none of dst of Ld_A overlaps with typeof src or dst
  21422. IR::Opnd* typeOfSrc = typeOf->GetSrc1();
  21423. instrLd = typeOf->GetNextRealInstr();
  21424. while (instrLd != instr)
  21425. {
  21426. if (instrLd->GetDst()->IsEqual(typeOfDst) || instrLd->GetDst()->IsEqual(typeOfSrc))
  21427. {
  21428. return false;
  21429. }
  21430. instrLd = instrLd->GetNextRealInstr();
  21431. }
  21432. typeOf->Unlink();
  21433. instr->InsertBefore(typeOf);
  21434. }
  21435. // The first argument to [Cm|Br]TypeOf is the first arg to the TypeOf instruction.
  21436. IR::Opnd *objectOpnd = typeOf->GetSrc1();
  21437. Assert(objectOpnd->IsRegOpnd());
  21438. // Now emit this instruction and remove the ldstr and typeOf.
  21439. *prev = typeOf->m_prev;
  21440. *pfNoLower = false;
  21441. if (instr->IsBranchInstr())
  21442. {
  21443. GenerateFastBrTypeOf(instr, objectOpnd->AsRegOpnd(), typeIdOpnd, typeOf, pfNoLower, isNeqOp);
  21444. }
  21445. else
  21446. {
  21447. GenerateFastCmTypeOf(instr, objectOpnd->AsRegOpnd(), typeIdOpnd, typeOf, pfNoLower, isNeqOp);
  21448. }
  21449. return true;
  21450. } while (false);
  21451. }
  21452. }
  21453. if (instrSrc1 && instrSrc1->GetStackSym()->IsSingleDef() && instrSrc2 && instrSrc2->GetStackSym()->IsSingleDef() &&
  21454. (
  21455. ((instrSrc1->GetStackSym()->GetInstrDef()->m_opcode == Js::OpCode::Typeof) &&
  21456. ((instrSrc2->GetStackSym()->GetInstrDef()->m_opcode == Js::OpCode::Typeof) || instrSrc2->GetStackSym()->GetIsStrConst()))
  21457. ||
  21458. ((instrSrc2->GetStackSym()->GetInstrDef()->m_opcode == Js::OpCode::Typeof) &&
  21459. ((instrSrc1->GetStackSym()->GetInstrDef()->m_opcode == Js::OpCode::Typeof) || instrSrc1->GetStackSym()->GetIsStrConst()))
  21460. )
  21461. )
  21462. {
  21463. *pfNoLower = true;
  21464. if (instr->IsBranchInstr())
  21465. {
  21466. InsertCompareBranch(instrSrc1, instrSrc2, isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, instr->AsBranchInstr()->GetTarget(), instr);
  21467. instr->Remove();
  21468. }
  21469. else
  21470. {
  21471. if (instrSrc1->IsEqual(instrSrc2))
  21472. {
  21473. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, isNeqOp ? LibraryValue::ValueFalse : LibraryValue::ValueTrue), instr);
  21474. }
  21475. else
  21476. {
  21477. // t1 = typeof o1
  21478. // t2 = typeof o2
  21479. // dst = t1 == t2
  21480. // MOV dst, true
  21481. // CMP t1, t2
  21482. // x86, amd64
  21483. // CMOVNE dst, false
  21484. // arm
  21485. // BEQ $done
  21486. // MOV dst, false
  21487. // $done
  21488. if (instr->GetDst()->IsEqual(instrSrc1))
  21489. {
  21490. IR::Instr* hoistInstr = m_lowererMD.ChangeToAssign(instr->HoistSrc1(Js::OpCode::Ld_A));
  21491. instrSrc1 = hoistInstr->GetDst()->AsRegOpnd();
  21492. }
  21493. if (instr->GetDst()->IsEqual(instrSrc2))
  21494. {
  21495. IR::Instr* hoistInstr = m_lowererMD.ChangeToAssign(instr->HoistSrc2(Js::OpCode::Ld_A));
  21496. instrSrc2 = hoistInstr->GetDst()->AsRegOpnd();
  21497. }
  21498. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), instr);
  21499. #if defined(_M_ARM32_OR_ARM64)
  21500. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  21501. InsertCompareBranch(instrSrc1, instrSrc2, isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, doneLabel, instr);
  21502. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
  21503. instr->InsertBefore(doneLabel);
  21504. #else
  21505. InsertCompare(instrSrc1, instrSrc2, instr);
  21506. LowererMD::InsertCmovCC(isNeqOp ? Js::OpCode::CMOVE : Js::OpCode::CMOVNE, instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
  21507. #endif
  21508. }
  21509. instr->Remove();
  21510. }
  21511. return true;
  21512. }
  21513. return false;
  21514. }
  21515. void
  21516. Lowerer::GenerateFalsyObjectTest(IR::Instr * insertInstr, IR::RegOpnd * typeOpnd, IR::LabelInstr * falsyLabel)
  21517. {
  21518. IR::Opnd *flagsOpnd = IR::IndirOpnd::New(typeOpnd, Js::Type::GetOffsetOfFlags(), TyInt32, this->m_func);
  21519. InsertTestBranch(flagsOpnd, IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyInt32, this->m_func), Js::OpCode::BrNeq_A, falsyLabel, insertInstr);
  21520. }
  21521. void
  21522. Lowerer::GenerateFalsyObjectTest(IR::Instr *insertInstr, IR::RegOpnd *typeOpnd, Js::TypeId typeIdToCheck, IR::LabelInstr* target, IR::LabelInstr* done, bool isNeqOp)
  21523. {
  21524. if (!this->m_func->GetThreadContextInfo()->CanBeFalsy(typeIdToCheck) && typeIdToCheck != Js::TypeIds_Undefined)
  21525. {
  21526. // Don't need the check for falsy, the typeId we are looking for doesn't care
  21527. return;
  21528. }
  21529. IR::Opnd *flagsOpnd = IR::IndirOpnd::New(typeOpnd, Js::Type::GetOffsetOfFlags(), TyInt32, this->m_func);
  21530. InsertTest(flagsOpnd, IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyInt32, this->m_func), insertInstr);
  21531. if (typeIdToCheck == Js::TypeIds_Undefined)
  21532. {
  21533. //Falsy object returns true for undefined ((typeof falsyObj) == "undefined")
  21534. InsertBranch( Js::OpCode::BrNeq_A, true, isNeqOp ? done : target, insertInstr);
  21535. }
  21536. else
  21537. {
  21538. //Falsy object returns false for all other types ((typeof falsyObj) != "function")
  21539. InsertBranch( Js::OpCode::BrNeq_A, true, isNeqOp? target : done , insertInstr);
  21540. }
  21541. }
  21542. ///----------------------------------------------------------------------------
  21543. ///
  21544. /// LowererMD::GenerateFastBrTypeOf
  21545. ///
  21546. ///----------------------------------------------------------------------------
  21547. void
  21548. Lowerer::GenerateFastBrTypeOf(IR::Instr *branch, IR::RegOpnd *object, IR::IntConstOpnd *typeIdOpnd, IR::Instr *typeOf, bool *pfNoLower, bool isNeqOp)
  21549. {
  21550. Js::TypeId typeId = static_cast<Js::TypeId>(typeIdOpnd->GetValue());
  21551. IR::LabelInstr *target = branch->AsBranchInstr()->GetTarget();
  21552. IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  21553. IR::LabelInstr *helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  21554. IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  21555. switch(branch->m_opcode)
  21556. {
  21557. case Js::OpCode::BrSrNeq_A:
  21558. case Js::OpCode::BrNeq_A:
  21559. case Js::OpCode::BrSrNotEq_A:
  21560. case Js::OpCode::BrNotEq_A:
  21561. case Js::OpCode::BrSrEq_A:
  21562. case Js::OpCode::BrEq_A:
  21563. case Js::OpCode::BrSrNotNeq_A:
  21564. case Js::OpCode::BrNotNeq_A:
  21565. break;
  21566. default:
  21567. Assert(UNREACHED);
  21568. __assume(UNREACHED);
  21569. }
  21570. // JNE/BNE (typeId == Js::TypeIds_Number) ? $target : $done
  21571. IR::LabelInstr *label = (typeId == Js::TypeIds_Number) ? target : done;
  21572. if (isNeqOp)
  21573. label = (label == target) ? done : target;
  21574. m_lowererMD.GenerateObjectTest(object, branch, label);
  21575. // MOV typeRegOpnd, [object + offset(Type)]
  21576. InsertMove(typeRegOpnd,
  21577. IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  21578. branch);
  21579. GenerateFalsyObjectTest(branch, typeRegOpnd, typeId, target, done, isNeqOp);
  21580. // MOV objTypeId, [typeRegOpnd + offset(TypeId)]
  21581. IR::RegOpnd* objTypeIdOpnd = IR::RegOpnd::New(TyInt32, m_func);
  21582. InsertMove(objTypeIdOpnd,
  21583. IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
  21584. branch);
  21585. // CMP objTypeId, typeId
  21586. // JEQ/JGE $done
  21587. if (typeId == Js::TypeIds_Object)
  21588. {
  21589. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrGe_A, isNeqOp ? done : target, branch);
  21590. }
  21591. else if (typeId == Js::TypeIds_Function)
  21592. {
  21593. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrEq_A, isNeqOp ? done : target, branch);
  21594. }
  21595. else if (typeId == Js::TypeIds_Number)
  21596. {
  21597. //Check for the typeIds between TypeIds_FirstNumberType <= typeIds <= TypeIds_LastNumberType
  21598. InsertSub(false, objTypeIdOpnd, objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_FirstNumberType, TyInt32, branch->m_func),branch);
  21599. InsertCompare(objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_LastNumberType - Js::TypeIds_FirstNumberType, TyInt32, branch->m_func), branch);
  21600. InsertBranch(isNeqOp ? Js::OpCode::BrGt_A : Js::OpCode::BrLe_A, true, target, branch);
  21601. }
  21602. else
  21603. {
  21604. InsertCompare(objTypeIdOpnd, typeIdOpnd, branch);
  21605. InsertBranch(isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, target, branch);
  21606. }
  21607. // This could be 'null' which, for historical reasons, has a TypeId < TypeIds_Object but
  21608. // is still a Javascript "object."
  21609. if (typeId == Js::TypeIds_Object)
  21610. {
  21611. // CMP object, 0xXXXXXXXX
  21612. // JEQ isNeqOp ? $done : $target
  21613. InsertCompareBranch(object,
  21614. LoadLibraryValueOpnd(branch, LibraryValue::ValueNull),
  21615. Js::OpCode::BrEq_A,
  21616. isNeqOp ? done : target,
  21617. branch);
  21618. }
  21619. branch->InsertAfter(done); // Get this label first
  21620. // "object" or "function" may come from HostDispatch. Needs helper if that's the case.
  21621. if (typeId == Js::TypeIds_Object || typeId == Js::TypeIds_Function)
  21622. {
  21623. // CMP objTypeId, TypeIds_Proxy. typeof proxy could be 'object' or 'function' depends on the target
  21624. // JNE isNeqOp ? $target : $done
  21625. InsertCompareBranch(objTypeIdOpnd,
  21626. IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyInt32, m_func),
  21627. Js::OpCode::BrEq_A,
  21628. helper,
  21629. branch);
  21630. // CMP objTypeId, TypeIds_HostDispatch
  21631. // JNE isNeqOp ? $target : $done
  21632. InsertCompareBranch(objTypeIdOpnd,
  21633. IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, m_func),
  21634. Js::OpCode::BrNeq_A,
  21635. isNeqOp ? target : done,
  21636. branch);
  21637. // Now emit Typeof and lower it like we would've for the helper call.
  21638. {
  21639. branch->InsertBefore(helper);
  21640. typeOf->Unlink();
  21641. branch->InsertBefore(typeOf);
  21642. if (branch->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(branch->GetBailOutKind()) &&
  21643. (!typeOf->HasBailOutInfo() || !BailOutInfo::IsBailOutOnImplicitCalls(typeOf->GetBailOutKind())))
  21644. {
  21645. typeOf = AddBailoutToHelperCallInstr(typeOf, branch->GetBailOutInfo(), branch->GetBailOutKind(), branch);
  21646. }
  21647. LowerUnaryHelperMem(typeOf, IR::HelperOp_Typeof);
  21648. }
  21649. }
  21650. else // Other primitive types don't need helper
  21651. {
  21652. typeOf->Remove();
  21653. branch->Remove();
  21654. *pfNoLower = true;
  21655. }
  21656. // $done:
  21657. }
  21658. ///----------------------------------------------------------------------------
  21659. ///
  21660. /// LowererMD::GenerateFastCmTypeOf
  21661. ///
  21662. ///----------------------------------------------------------------------------
  21663. void
  21664. Lowerer::GenerateFastCmTypeOf(IR::Instr *compare, IR::RegOpnd *object, IR::IntConstOpnd *typeIdOpnd, IR::Instr *typeOf, bool *pfNoLower, bool isNeqOp)
  21665. {
  21666. Assert(compare->m_opcode == Js::OpCode::CmSrEq_A ||
  21667. compare->m_opcode == Js::OpCode::CmEq_A ||
  21668. compare->m_opcode == Js::OpCode::CmSrNeq_A ||
  21669. compare->m_opcode == Js::OpCode::CmNeq_A);
  21670. Js::TypeId typeId = static_cast<Js::TypeId>(typeIdOpnd->GetValue());
  21671. IR::LabelInstr *movFalse = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  21672. IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  21673. IR::LabelInstr *helper= IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  21674. IR::RegOpnd *dst = compare->GetDst()->IsRegOpnd() ? compare->GetDst()->AsRegOpnd() : nullptr;
  21675. IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  21676. Assert(dst);
  21677. if (dst->IsEqual(object))
  21678. {
  21679. //dst same as the src of typeof. As we need to move true to dst first we need to save the src to a new opnd
  21680. IR::RegOpnd *newObject = IR::RegOpnd::New(object->GetType(), m_func);
  21681. InsertMove(newObject, object, compare); //Save src
  21682. object = newObject;
  21683. }
  21684. // mov dst, 'true'
  21685. InsertMove(dst,
  21686. LoadLibraryValueOpnd(compare, LibraryValue::ValueTrue),
  21687. compare);
  21688. // TEST object, 1
  21689. // JNE (typeId == Js::TypeIds_Number) ? $done : $movFalse
  21690. IR::LabelInstr *target = (typeId == Js::TypeIds_Number) ? done : movFalse;
  21691. if (isNeqOp)
  21692. {
  21693. target = (target == done) ? movFalse : done;
  21694. }
  21695. m_lowererMD.GenerateObjectTest(object, compare, target);
  21696. // MOV typeRegOpnd, [object + offset(Type)]
  21697. InsertMove(typeRegOpnd,
  21698. IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  21699. compare);
  21700. GenerateFalsyObjectTest(compare, typeRegOpnd, typeId, done, movFalse, isNeqOp);
  21701. // MOV objTypeId, [typeRegOpnd + offset(TypeId)]
  21702. IR::RegOpnd* objTypeIdOpnd = IR::RegOpnd::New(TyInt32, m_func);
  21703. InsertMove(objTypeIdOpnd,
  21704. IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
  21705. compare);
  21706. // CMP objTypeId, typeId
  21707. // JEQ/JGE $done
  21708. if (typeId == Js::TypeIds_Object)
  21709. {
  21710. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrGe_A, isNeqOp ? movFalse : done, compare);
  21711. }
  21712. else if (typeId == Js::TypeIds_Function)
  21713. {
  21714. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrEq_A, isNeqOp ? movFalse : done, compare);
  21715. }
  21716. else if (typeId == Js::TypeIds_Number)
  21717. {
  21718. //Check for the typeIds between TypeIds_FirstNumberType <= typeIds <= TypeIds_LastNumberType
  21719. InsertCompareBranch(objTypeIdOpnd,
  21720. IR::IntConstOpnd::New(Js::TypeIds_LastNumberType, TyInt32, compare->m_func),
  21721. Js::OpCode::BrGt_A,
  21722. isNeqOp ? done : movFalse,
  21723. compare);
  21724. InsertCompareBranch(objTypeIdOpnd,
  21725. IR::IntConstOpnd::New(Js::TypeIds_FirstNumberType, TyInt32, compare->m_func),
  21726. isNeqOp? Js::OpCode::BrLt_A : Js::OpCode::BrGe_A,
  21727. done,
  21728. compare);
  21729. }
  21730. else
  21731. {
  21732. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, done, compare);
  21733. }
  21734. // This could be 'null' which, for historical reasons, has a TypeId < TypeIds_Object but
  21735. // is still a Javascript "object."
  21736. if (typeId == Js::TypeIds_Object)
  21737. {
  21738. // CMP object, 0xXXXXXXXX
  21739. // JEQ isNeqOp ? $movFalse : $done
  21740. InsertCompareBranch(object,
  21741. LoadLibraryValueOpnd(compare, LibraryValue::ValueNull),
  21742. Js::OpCode::BrEq_A,
  21743. isNeqOp ? movFalse : done,
  21744. compare);
  21745. }
  21746. compare->InsertAfter(done); // Get this label first
  21747. // "object" or "function" may come from HostDispatch. Needs helper if that's the case.
  21748. if (typeId == Js::TypeIds_Object || typeId == Js::TypeIds_Function)
  21749. {
  21750. // CMP objTypeId, TypeIds_Proxy
  21751. // JNE isNeqOp ? $done : $movFalse
  21752. InsertCompareBranch(objTypeIdOpnd,
  21753. IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyInt32, m_func),
  21754. Js::OpCode::BrEq_A,
  21755. helper,
  21756. compare);
  21757. // CMP objTypeId, TypeIds_HostDispatch
  21758. // JNE isNeqOp ? $done : $movFalse
  21759. InsertCompareBranch(objTypeIdOpnd,
  21760. IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, m_func),
  21761. Js::OpCode::BrNeq_A,
  21762. isNeqOp ? done : movFalse,
  21763. compare);
  21764. // Now emit Typeof like we would've for the helper call.
  21765. {
  21766. compare->InsertBefore(helper);
  21767. typeOf->Unlink();
  21768. compare->InsertBefore(typeOf);
  21769. if (compare->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(compare->GetBailOutKind()) &&
  21770. (!typeOf->HasBailOutInfo() || !BailOutInfo::IsBailOutOnImplicitCalls(typeOf->GetBailOutKind())))
  21771. {
  21772. typeOf = AddBailoutToHelperCallInstr(typeOf, compare->GetBailOutInfo(), compare->GetBailOutKind(), compare);
  21773. }
  21774. LowerUnaryHelperMem(typeOf, IR::HelperOp_Typeof);
  21775. }
  21776. // JMP/B $done
  21777. InsertBranch(Js::OpCode::Br, done, done);
  21778. }
  21779. else // Other primitive types don't need helper
  21780. {
  21781. typeOf->Remove();
  21782. dst = compare->UnlinkDst()->AsRegOpnd();
  21783. compare->Remove();
  21784. *pfNoLower = true;
  21785. }
  21786. // $movFalse: (insert before $done)
  21787. done->InsertBefore(movFalse);
  21788. // MOV dst, 'false'
  21789. InsertMove(dst, LoadLibraryValueOpnd(done, LibraryValue::ValueFalse), done);
  21790. // $done:
  21791. }
  21792. void
  21793. Lowerer::GenerateCheckForCallFlagNew(IR::Instr* instrInsert)
  21794. {
  21795. Func *func = instrInsert->m_func;
  21796. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21797. Assert(!func->IsInlinee());
  21798. // MOV s1, [ebp + 4] // s1 = call info
  21799. // AND s2, s1, Js::CallFlags_New // s2 = s1 & Js::CallFlags_New
  21800. // CMP s2, 0
  21801. // JNE $Done
  21802. // CALL RuntimeTypeError
  21803. // $Done
  21804. IR::SymOpnd* callInfoOpnd = Lowerer::LoadCallInfo(instrInsert);
  21805. Assert(Js::CallInfo::ksizeofCount == 24);
  21806. IR::RegOpnd* isNewFlagSetRegOpnd = IR::RegOpnd::New(TyMachReg, func);
  21807. InsertAnd(isNewFlagSetRegOpnd, callInfoOpnd, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_New << Js::CallInfo::ksizeofCount, TyMachReg, func, true), instrInsert);
  21808. InsertTestBranch(isNewFlagSetRegOpnd, isNewFlagSetRegOpnd, Js::OpCode::BrNeq_A, labelDone, instrInsert);
  21809. IR::Instr *throwInstr = IR::Instr::New(
  21810. Js::OpCode::RuntimeTypeError,
  21811. IR::RegOpnd::New(TyMachReg, m_func),
  21812. IR::IntConstOpnd::New(SCODE_CODE(JSERR_ClassConstructorCannotBeCalledWithoutNew), TyInt32, m_func),
  21813. m_func);
  21814. instrInsert->InsertBefore(throwInstr);
  21815. this->LowerUnaryHelperMem(throwInstr, IR::HelperOp_RuntimeTypeError);
  21816. instrInsert->InsertBefore(labelDone);
  21817. instrInsert->Remove();
  21818. }
  21819. void
  21820. Lowerer::GenerateJavascriptOperatorsIsConstructorGotoElse(IR::Instr *instrInsert, IR::RegOpnd *instanceRegOpnd, IR::LabelInstr *labelReturnTrue, IR::LabelInstr *labelReturnFalse)
  21821. {
  21822. // $ProxyLoop:
  21823. // // if (!VarIs<RecyclableObject>(instance)) { goto $ReturnFalse }; // omitted: VarIs<RecyclableObject>(instance) always true
  21824. // MOV s0, instance->type
  21825. // MOV s1, s0->typeId
  21826. // CMP s1, TypeIds_Proxy
  21827. // JNE $NotProxy
  21828. //
  21829. // MOV instance, instance->target
  21830. // JMP $ProxyLoop
  21831. //
  21832. // $NotProxy:
  21833. // CMP s1, TypeIds_Function
  21834. // JNE $ReturnFalse // external
  21835. //
  21836. // MOV s0, instance->functionInfo
  21837. // MOV s1, s0->attributes
  21838. // TEST s1, ErrorOnNew
  21839. // JNE $ReturnFalse // external
  21840. //
  21841. // JMP $ReturnTrue // external
  21842. Func *func = instrInsert->m_func;
  21843. IR::LabelInstr *labelProxyLoop = InsertLoopTopLabel(instrInsert);
  21844. IR::LabelInstr *labelNotProxy = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21845. IR::RegOpnd *indir0RegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  21846. IR::RegOpnd *indir1RegOpnd = IR::RegOpnd::New(TyUint32, func);
  21847. Loop * loop = labelProxyLoop->GetLoop();
  21848. loop->regAlloc.liveOnBackEdgeSyms->Set(instanceRegOpnd->m_sym->m_id);
  21849. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func);
  21850. Lowerer::InsertMove(indir0RegOpnd, indirOpnd, instrInsert);
  21851. indirOpnd = IR::IndirOpnd::New(indir0RegOpnd, Js::Type::GetOffsetOfTypeId(), TyUint32, func);
  21852. Lowerer::InsertMove(indir1RegOpnd, indirOpnd, instrInsert);
  21853. InsertCompareBranch(indir1RegOpnd, IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyUint32, func, true), Js::OpCode::BrNeq_A, labelNotProxy, instrInsert);
  21854. indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::JavascriptProxy::GetOffsetOfTarget(), TyMachPtr, func);
  21855. Lowerer::InsertMove(instanceRegOpnd, indirOpnd, instrInsert);
  21856. InsertBranch(Js::OpCode::Br, labelProxyLoop, instrInsert);
  21857. instrInsert->InsertBefore(labelNotProxy);
  21858. InsertCompareBranch(indir1RegOpnd, IR::IntConstOpnd::New(Js::TypeIds_Function, TyUint32, func, true), Js::OpCode::BrNeq_A, labelReturnFalse, instrInsert);
  21859. indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::JavascriptFunction::GetOffsetOfFunctionInfo(), TyMachPtr, func);
  21860. Lowerer::InsertMove(indir0RegOpnd, indirOpnd, instrInsert);
  21861. indirOpnd = IR::IndirOpnd::New(indir0RegOpnd, Js::FunctionInfo::GetAttributesOffset(), TyUint32, func);
  21862. Lowerer::InsertMove(indir1RegOpnd, indirOpnd, instrInsert);
  21863. InsertTestBranch(indir1RegOpnd, IR::IntConstOpnd::New(Js::FunctionInfo::Attributes::ErrorOnNew, TyUint32, func, true), Js::OpCode::BrNeq_A, labelReturnFalse, instrInsert);
  21864. InsertBranch(Js::OpCode::Br, labelReturnTrue, instrInsert);
  21865. }
  21866. void
  21867. Lowerer::GenerateRecyclableObjectGetPrototypeNullptrGoto(IR::Instr *instrInsert, IR::RegOpnd *instanceRegOpnd, IR::LabelInstr *labelReturnNullptr)
  21868. {
  21869. // MOV instance, instance->type
  21870. // MOV flags, instance->flags
  21871. // TEST flags, TypeFlagMask_HasSpecialPrototype
  21872. // JNE $ReturnNullptr // external, bypassing nullptr check
  21873. // MOV instance, instance->prototype
  21874. Func *func = instrInsert->m_func;
  21875. IR::RegOpnd *flagsRegOpnd = IR::RegOpnd::New(TyUint32, func);
  21876. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func);
  21877. Lowerer::InsertMove(instanceRegOpnd, indirOpnd, instrInsert);
  21878. indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::Type::GetOffsetOfFlags(), TyUint32, func);
  21879. Lowerer::InsertMove(flagsRegOpnd, indirOpnd, instrInsert);
  21880. InsertTestBranch(flagsRegOpnd, IR::IntConstOpnd::New(TypeFlagMask_HasSpecialPrototype, TyUint32, func, true), Js::OpCode::BrNeq_A, labelReturnNullptr, instrInsert);
  21881. indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::Type::GetOffsetOfPrototype(), TyMachPtr, func);
  21882. Lowerer::InsertMove(instanceRegOpnd, indirOpnd, instrInsert);
  21883. }
  21884. void
  21885. Lowerer::GenerateRecyclableObjectIsElse(IR::Instr *instrInsert, IR::RegOpnd *instanceRegOpnd, IR::LabelInstr *labelFalse)
  21886. {
  21887. Func *func = instrInsert->m_func;
  21888. #if INT32VAR
  21889. InsertTestBranch(instanceRegOpnd, IR::AddrOpnd::New((Js::Var)0xffff000000000000, IR::AddrOpndKindConstantVar, func, true), Js::OpCode::BrNeq_A, labelFalse, instrInsert);
  21890. #else
  21891. InsertTestBranch(instanceRegOpnd, IR::IntConstOpnd::New(Js::AtomTag, TyUint32, func, true), Js::OpCode::BrNeq_A, labelFalse, instrInsert);
  21892. #endif
  21893. }
  21894. void
  21895. Lowerer::GenerateLdHomeObj(IR::Instr* instr)
  21896. {
  21897. // MOV dst, undefined
  21898. // MOV instance, functionObject // functionObject through stack params or src1
  21899. // CMP [instance], VtableStackScriptFunction
  21900. // JE $Done
  21901. // MOV instance, instance->homeObj
  21902. // TEST instance, instance
  21903. // JZ $Done
  21904. // MOV dst, instance
  21905. // $Done:
  21906. Func *func = instr->m_func;
  21907. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21908. IR::LabelInstr *labelInlineFunc = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21909. IR::LabelInstr *testLabel = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21910. IR::LabelInstr *scriptFuncLabel = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21911. IR::Opnd *opndUndefAddress = this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined);
  21912. IR::RegOpnd *instanceRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  21913. IR::Opnd *dstOpnd = instr->GetDst();
  21914. Assert(dstOpnd->IsRegOpnd());
  21915. Lowerer::InsertMove(dstOpnd, opndUndefAddress, instr);
  21916. IR::Opnd * functionObjOpnd = nullptr;
  21917. m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  21918. Lowerer::InsertMove(instanceRegOpnd, functionObjOpnd, instr);
  21919. IR::Opnd * vtableAddressOpnd = this->LoadVTableValueOpnd(instr, VTableValue::VtableStackScriptFunction);
  21920. IR::BranchInstr* branchInstr = InsertCompareBranch(IR::IndirOpnd::New(instanceRegOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
  21921. Js::OpCode::BrEq_A, true, labelDone, instr);
  21922. InsertObjectPoison(instanceRegOpnd, branchInstr, instr, false);
  21923. if (func->GetJITFunctionBody()->HasHomeObj())
  21924. {
  21925. // Is this an function with inline cache and home obj??
  21926. IR::Opnd * vtableAddressInlineFuncHomObjOpnd = this->LoadVTableValueOpnd(instr, VTableValue::VtableScriptFunctionWithInlineCacheAndHomeObj);
  21927. IR::BranchInstr* inlineFuncHomObjOpndBr = InsertCompareBranch(IR::IndirOpnd::New(instanceRegOpnd, 0, TyMachPtr, func), vtableAddressInlineFuncHomObjOpnd, Js::OpCode::BrNeq_A, labelInlineFunc, instr);
  21928. InsertObjectPoison(instanceRegOpnd, inlineFuncHomObjOpndBr, instr, false);
  21929. IR::IndirOpnd *indirInlineFuncHomeObjOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::FunctionWithHomeObj<Js::ScriptFunctionWithInlineCache>::GetOffsetOfHomeObj(), TyMachPtr, func);
  21930. Lowerer::InsertMove(instanceRegOpnd, indirInlineFuncHomeObjOpnd, instr);
  21931. InsertBranch(Js::OpCode::Br, testLabel, instr);
  21932. instr->InsertBefore(labelInlineFunc);
  21933. // Is this a function with inline cache, home obj and computed name??
  21934. IR::Opnd * vtableAddressInlineFuncHomObjCompNameOpnd = this->LoadVTableValueOpnd(instr, VTableValue::VtableScriptFunctionWithInlineCacheHomeObjAndComputedName);
  21935. IR::BranchInstr* inlineFuncHomObjCompNameBr = InsertCompareBranch(IR::IndirOpnd::New(instanceRegOpnd, 0, TyMachPtr, func), vtableAddressInlineFuncHomObjCompNameOpnd, Js::OpCode::BrNeq_A, scriptFuncLabel, instr);
  21936. InsertObjectPoison(instanceRegOpnd, inlineFuncHomObjCompNameBr, instr, false);
  21937. IR::IndirOpnd *indirInlineFuncHomeObjCompNameOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::FunctionWithComputedName<Js::FunctionWithHomeObj<Js::ScriptFunctionWithInlineCache>>::GetOffsetOfHomeObj(), TyMachPtr, func);
  21938. Lowerer::InsertMove(instanceRegOpnd, indirInlineFuncHomeObjCompNameOpnd, instr);
  21939. InsertBranch(Js::OpCode::Br, testLabel, instr);
  21940. instr->InsertBefore(scriptFuncLabel);
  21941. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::ScriptFunctionWithHomeObj::GetOffsetOfHomeObj(), TyMachPtr, func);
  21942. Lowerer::InsertMove(instanceRegOpnd, indirOpnd, instr);
  21943. }
  21944. else
  21945. {
  21946. // Even if the function does not have home object in eval cases we still have the LdHomeObj opcode
  21947. InsertBranch(Js::OpCode::Br, labelDone, instr);
  21948. }
  21949. instr->InsertBefore(testLabel);
  21950. InsertTestBranch(instanceRegOpnd, instanceRegOpnd, Js::OpCode::BrEq_A, labelDone, instr);
  21951. Lowerer::InsertMove(dstOpnd, instanceRegOpnd, instr);
  21952. instr->InsertBefore(labelDone);
  21953. instr->Remove();
  21954. }
  21955. void
  21956. Lowerer::GenerateLdHomeObjProto(IR::Instr* instr)
  21957. {
  21958. // MOV dst, undefined
  21959. // MOV instance, src1 // homeObj
  21960. // TEST instance, instance
  21961. // JZ $Done
  21962. //
  21963. // if (!VarIs<RecyclableObject>(instance)) goto $Done
  21964. // MOV type, [instance+Offset(type)]
  21965. // MOV typeId, [type+Offset(typeId)]
  21966. // CMP typeId, TypeIds_Null
  21967. // JEQ $Err
  21968. // CMP typeId, TypeIds_Undefined
  21969. // JNE $NoErr
  21970. //
  21971. // $Err:
  21972. // ThrowRuntimeReferenceError(JSERR_BadSuperReference);
  21973. //
  21974. // $NoErr:
  21975. // instance = ((RecyclableObject*)instance)->GetPrototype();
  21976. // if (instance == nullptr) goto $Done;
  21977. //
  21978. // if (!VarIs<RecyclableObject>(instance)) goto $Done
  21979. //
  21980. // MOV dst, instance
  21981. // $Done:
  21982. Func *func = instr->m_func;
  21983. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  21984. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21985. IR::LabelInstr *labelErr = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21986. IR::LabelInstr *labelNoErr = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21987. IR::Opnd *opndUndefAddress = this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined);
  21988. IR::RegOpnd *instanceRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  21989. IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  21990. IR::RegOpnd *typeIdRegOpnd = IR::RegOpnd::New(TyUint32, func);
  21991. IR::Opnd *dstOpnd = instr->GetDst();
  21992. Assert(dstOpnd->IsRegOpnd());
  21993. Lowerer::InsertMove(dstOpnd, opndUndefAddress, instr);
  21994. Lowerer::InsertMove(instanceRegOpnd, src1Opnd, instr);
  21995. InsertTestBranch(instanceRegOpnd, instanceRegOpnd, Js::OpCode::BrEq_A, labelDone, instr);
  21996. this->GenerateRecyclableObjectIsElse(instr, instanceRegOpnd, labelDone);
  21997. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func);
  21998. Lowerer::InsertMove(typeRegOpnd, indirOpnd, instr);
  21999. indirOpnd = IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyUint32, func);
  22000. Lowerer::InsertMove(typeIdRegOpnd, indirOpnd, instr);
  22001. InsertCompareBranch(typeIdRegOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_Null, TyUint32, func, true), Js::OpCode::BrEq_A, labelErr, instr);
  22002. InsertCompareBranch(typeIdRegOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_Undefined, TyUint32, func, true), Js::OpCode::BrNeq_A, labelNoErr, instr);
  22003. instr->InsertBefore(labelErr);
  22004. this->GenerateRuntimeError(instr, JSERR_BadSuperReference, IR::HelperOp_RuntimeReferenceError);
  22005. instr->InsertBefore(labelNoErr);
  22006. this->GenerateRecyclableObjectGetPrototypeNullptrGoto(instr, instanceRegOpnd, labelDone);
  22007. this->GenerateRecyclableObjectIsElse(instr, instanceRegOpnd, labelDone);
  22008. Lowerer::InsertMove(dstOpnd, instanceRegOpnd, instr);
  22009. instr->InsertBefore(labelDone);
  22010. instr->Remove();
  22011. }
  22012. void
  22013. Lowerer::GenerateLdFuncObj(IR::Instr* instr)
  22014. {
  22015. // MOV dst, functionObject // functionObject through stack params or src1
  22016. IR::Opnd *dstOpnd = instr->GetDst();
  22017. IR::Opnd *functionObjOpnd = nullptr;
  22018. m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  22019. Lowerer::InsertMove(dstOpnd, functionObjOpnd, instr);
  22020. instr->Remove();
  22021. }
  22022. void
  22023. Lowerer::GenerateLdFuncObjProto(IR::Instr* instr)
  22024. {
  22025. // MOV instance, src1
  22026. //
  22027. // instance = ((RecyclableObject*)instance)->GetPrototype();
  22028. // if (instance == nullptr) goto $ThrowTypeError;
  22029. //
  22030. // MOV dst, instance
  22031. //
  22032. // if (!JavascriptOperators::IsConstructor(instance))
  22033. // goto $ThrowTypeError;
  22034. // else
  22035. // goto $Done;
  22036. //
  22037. // $helperLabelThrowTypeError:
  22038. // ThrowRuntimeTypeError(JSERR_NotAConstructor);
  22039. //
  22040. // $Done:
  22041. Func *func = instr->m_func;
  22042. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  22043. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  22044. IR::LabelInstr *helperLabelThrowTypeError = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  22045. IR::RegOpnd *instanceRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  22046. IR::Opnd *dstOpnd = instr->GetDst();
  22047. Lowerer::InsertMove(instanceRegOpnd, src1Opnd, instr);
  22048. this->GenerateRecyclableObjectGetPrototypeNullptrGoto(instr, instanceRegOpnd, helperLabelThrowTypeError);
  22049. Lowerer::InsertMove(dstOpnd, instanceRegOpnd, instr);
  22050. this->GenerateJavascriptOperatorsIsConstructorGotoElse(instr, instanceRegOpnd, labelDone, helperLabelThrowTypeError);
  22051. instr->InsertBefore(helperLabelThrowTypeError);
  22052. this->GenerateRuntimeError(instr, JSERR_NotAConstructor, IR::HelperOp_RuntimeTypeError);
  22053. instr->InsertBefore(labelDone);
  22054. instr->Remove();
  22055. }
  22056. void
  22057. Lowerer::GenerateLoadNewTarget(IR::Instr* instrInsert)
  22058. {
  22059. Func *func = instrInsert->m_func;
  22060. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  22061. IR::LabelInstr * labelLoadArgNewTarget = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  22062. IR::Opnd* opndUndefAddress = this->LoadLibraryValueOpnd(instrInsert, LibraryValue::ValueUndefined);
  22063. Assert(!func->IsInlinee());
  22064. if (func->GetJITFunctionBody()->IsCoroutine())
  22065. {
  22066. instrInsert->SetSrc1(opndUndefAddress);
  22067. LowererMD::ChangeToAssign(instrInsert);
  22068. return;
  22069. }
  22070. // MOV dst, undefined // dst = undefined
  22071. // MOV s1, callInfo // s1 = callInfo
  22072. // TEST s1, Js::CallFlags_NewTarget << 24 // if (callInfo.Flags & Js::CallFlags_NewTarget)
  22073. // JNE $LoadLastArgument // goto $LoadLastArgument
  22074. // TEST s1, Js::CallFlags_New << 24 // if (!(callInfo.Flags & Js::CallFlags_New))
  22075. // JE $Done // goto $Done
  22076. // MOV dst, functionObject // dst = functionObject
  22077. // JMP $Done // goto $Done
  22078. // $LoadLastArgument
  22079. // AND s1, s1, (0x00FFFFFF) // s2 = callInfo.Count == arguments.length + 2
  22080. // MOV dst, [ebp + (s1 - 1) * sizeof(Var) + formalParamOffset * sizeof(Var) ] // points to new.target
  22081. // $Done
  22082. IR::Opnd *dstOpnd = instrInsert->GetDst();
  22083. Assert(dstOpnd->IsRegOpnd());
  22084. Lowerer::InsertMove(dstOpnd, opndUndefAddress, instrInsert);
  22085. IR::SymOpnd *callInfoOpnd = Lowerer::LoadCallInfo(instrInsert);
  22086. Assert(Js::CallInfo::ksizeofCount == 24);
  22087. IR::RegOpnd *s1 = IR::RegOpnd::New(TyUint32, func);
  22088. Lowerer::InsertMove(s1, callInfoOpnd, instrInsert);
  22089. InsertTestBranch(s1, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_NewTarget << Js::CallInfo::ksizeofCount, TyUint32, func, true), Js::OpCode::BrNeq_A, labelLoadArgNewTarget, instrInsert);
  22090. InsertTestBranch(s1, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_New << Js::CallInfo::ksizeofCount, TyUint32, func, true), Js::OpCode::BrEq_A, labelDone, instrInsert);
  22091. IR::Instr* loadFuncInstr = IR::Instr::New(Js::OpCode::AND, func);
  22092. loadFuncInstr->SetDst(instrInsert->GetDst());
  22093. LoadFuncExpression(loadFuncInstr);
  22094. instrInsert->InsertBefore(loadFuncInstr);
  22095. InsertBranch(Js::OpCode::Br, labelDone, instrInsert);
  22096. instrInsert->InsertBefore(labelLoadArgNewTarget);
  22097. InsertAnd(s1, s1, IR::IntConstOpnd::New(0x00FFFFFF, TyUint32, func, true), instrInsert); // callInfo.Count
  22098. // [formalOffset (4) + callInfo.Count] points to 'new.target' - see diagram in GenerateLoadStackArgumentByIndex()
  22099. GenerateLoadStackArgumentByIndex(dstOpnd, s1, instrInsert, 0, m_func);
  22100. instrInsert->InsertBefore(labelDone);
  22101. instrInsert->Remove();
  22102. }
  22103. void
  22104. Lowerer::GenerateGetCurrentFunctionObject(IR::Instr * instr)
  22105. {
  22106. Func * func = this->m_func;
  22107. IR::Instr * insertBeforeInstr = instr->m_next;
  22108. IR::RegOpnd * functionObjectOpnd = instr->GetDst()->AsRegOpnd();
  22109. IR::Opnd * vtableAddressOpnd = this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableStackScriptFunction);
  22110. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  22111. IR::BranchInstr *branchInstr = InsertCompareBranch(IR::IndirOpnd::New(functionObjectOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
  22112. Js::OpCode::BrNeq_A, true, labelDone, insertBeforeInstr);
  22113. InsertObjectPoison(functionObjectOpnd, branchInstr, insertBeforeInstr, false);
  22114. IR::RegOpnd * boxedFunctionObjectOpnd = IR::RegOpnd::New(TyMachPtr, func);
  22115. InsertMove(boxedFunctionObjectOpnd, IR::IndirOpnd::New(functionObjectOpnd,
  22116. Js::StackScriptFunction::GetOffsetOfBoxedScriptFunction(), TyMachPtr, func), insertBeforeInstr);
  22117. InsertTestBranch(boxedFunctionObjectOpnd, boxedFunctionObjectOpnd, Js::OpCode::BrEq_A, true, labelDone, insertBeforeInstr);
  22118. InsertMove(functionObjectOpnd, boxedFunctionObjectOpnd, insertBeforeInstr);
  22119. insertBeforeInstr->InsertBefore(labelDone);
  22120. }
  22121. IR::Opnd *
  22122. Lowerer::GetInlineCacheFromFuncObjectForRuntimeUse(IR::Instr * instr, IR::PropertySymOpnd * propSymOpnd, bool isHelper)
  22123. {
  22124. // MOV s1, [ebp + 8] //s1 = function object
  22125. // MOV s2, [s1 + offset(hasInlineCaches)]
  22126. // TEST s2, s2
  22127. // JE $L1
  22128. // MOV s3, [s1 + offset(m_inlineCaches)] //s3 = inlineCaches from function object
  22129. // MOV s4, [s3 + index*scale] //s4 = inlineCaches[index]
  22130. // JMP $L2
  22131. // $L1
  22132. // MOV s3, propSym->m_runtimeCache
  22133. // $L2
  22134. byte indirScale = this->m_lowererMD.GetDefaultIndirScale();
  22135. IR::RegOpnd * funcObjOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  22136. IR::Instr * funcObjInstr = IR::Instr::New(Js::OpCode::Ld_A, funcObjOpnd, instr->m_func);
  22137. instr->InsertBefore(funcObjInstr);
  22138. LoadFuncExpression(funcObjInstr);
  22139. IR::RegOpnd * funcObjHasInlineCachesOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  22140. this->InsertMove(funcObjHasInlineCachesOpnd, IR::IndirOpnd::New(funcObjOpnd, Js::ScriptFunction::GetOffsetOfHasInlineCaches(), TyUint8, instr->m_func), instr);
  22141. IR::LabelInstr * inlineCachesNullLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, isHelper);
  22142. InsertTestBranch(funcObjHasInlineCachesOpnd, funcObjHasInlineCachesOpnd, Js::OpCode::BrEq_A, inlineCachesNullLabel, instr);
  22143. IR::RegOpnd * inlineCachesOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  22144. Lowerer::InsertMove(inlineCachesOpnd, IR::IndirOpnd::New(funcObjOpnd, Js::ScriptFunctionWithInlineCache::GetOffsetOfInlineCaches(), TyMachPtr, instr->m_func), instr);
  22145. IR::RegOpnd * inlineCacheOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  22146. IR::RegOpnd * indexOpnd = IR::RegOpnd::New(TyMachReg, instr->m_func);
  22147. int inlineCacheOffset;
  22148. if (!Int32Math::Mul(sizeof(Js::InlineCache *), propSymOpnd->m_inlineCacheIndex, &inlineCacheOffset))
  22149. {
  22150. Lowerer::InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(inlineCachesOpnd, inlineCacheOffset, TyMachPtr, instr->m_func), instr);
  22151. }
  22152. else
  22153. {
  22154. Lowerer::InsertMove(indexOpnd, IR::IntConstOpnd::New(propSymOpnd->m_inlineCacheIndex, TyUint32, instr->m_func), instr);
  22155. Lowerer::InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(inlineCachesOpnd, indexOpnd, indirScale, TyMachPtr, instr->m_func), instr);
  22156. }
  22157. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, isHelper);
  22158. InsertBranch(LowererMD::MDUncondBranchOpcode, continueLabel, instr);
  22159. IR::Instr * ldCacheFromPropSymOpndInstr = this->InsertMove(inlineCacheOpnd, IR::AddrOpnd::New(propSymOpnd->m_runtimeInlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func), instr);
  22160. ldCacheFromPropSymOpndInstr->InsertBefore(inlineCachesNullLabel);
  22161. ldCacheFromPropSymOpndInstr->InsertAfter(continueLabel);
  22162. return inlineCacheOpnd;
  22163. }
  22164. IR::Instr *
  22165. Lowerer::LowerInitClass(IR::Instr * instr)
  22166. {
  22167. // scriptContext
  22168. IR::Instr * prevInstr = LoadScriptContext(instr);
  22169. // extends
  22170. if (instr->GetSrc2() != nullptr)
  22171. {
  22172. IR::Opnd * extendsOpnd = instr->UnlinkSrc2();
  22173. m_lowererMD.LoadHelperArgument(instr, extendsOpnd);
  22174. }
  22175. else
  22176. {
  22177. IR::AddrOpnd* extendsOpnd = IR::AddrOpnd::NewNull(this->m_func);
  22178. m_lowererMD.LoadHelperArgument(instr, extendsOpnd);
  22179. }
  22180. // constructor
  22181. IR::Opnd * ctorOpnd = instr->UnlinkSrc1();
  22182. m_lowererMD.LoadHelperArgument(instr, ctorOpnd);
  22183. // call
  22184. m_lowererMD.ChangeToHelperCall(instr, IR::HelperOP_InitClass);
  22185. return prevInstr;
  22186. }
  22187. void
  22188. Lowerer::LowerNewConcatStrMulti(IR::Instr * instr)
  22189. {
  22190. IR::IntConstOpnd * countOpnd = instr->UnlinkSrc1()->AsIntConstOpnd();
  22191. IR::RegOpnd * dstOpnd = instr->UnlinkDst()->AsRegOpnd();
  22192. uint8 count = (uint8)countOpnd->GetValue();
  22193. Assert(dstOpnd->GetValueType().IsString());
  22194. GenerateRecyclerAlloc(IR::HelperAllocMemForConcatStringMulti, Js::ConcatStringMulti::GetAllocSize(count), dstOpnd, instr);
  22195. GenerateRecyclerMemInit(dstOpnd, 0, this->LoadVTableValueOpnd(instr, VTableValue::VtableConcatStringMulti), instr);
  22196. GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfType(),
  22197. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueStringTypeStatic), instr);
  22198. GenerateRecyclerMemInitNull(dstOpnd, Js::ConcatStringMulti::GetOffsetOfpszValue(), instr);
  22199. GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), 0, instr);
  22200. GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfSlotCount(), countOpnd->AsUint32(), instr);
  22201. instr->Remove();
  22202. }
  22203. void
  22204. Lowerer::LowerNewConcatStrMultiBE(IR::Instr * instr)
  22205. {
  22206. // Lower
  22207. // t1 = SetConcatStrMultiBE s1
  22208. // t2 = SetConcatStrMultiBE s2, t1
  22209. // t3 = SetConcatStrMultiBE s3, t2
  22210. // s = NewConcatStrMultiBE 3, t3
  22211. // to
  22212. // s = new concat string
  22213. // s+0 = s1
  22214. // s+1 = s2
  22215. // s+2 = s3
  22216. Assert(instr->GetSrc1()->IsConstOpnd());
  22217. Assert(instr->GetDst()->IsRegOpnd());
  22218. IR::RegOpnd * newString = instr->GetDst()->AsRegOpnd();
  22219. IR::Opnd * newConcatItemOpnd = nullptr;
  22220. uint index = instr->GetSrc1()->AsIntConstOpnd()->AsUint32() - 1;
  22221. IR::Instr * concatItemInstr = nullptr;
  22222. IR::Opnd * linkOpnd = instr->GetSrc2();
  22223. while (linkOpnd)
  22224. {
  22225. Assert(linkOpnd->IsRegOpnd());
  22226. concatItemInstr = linkOpnd->GetStackSym()->GetInstrDef();
  22227. Assert(concatItemInstr->m_opcode == Js::OpCode::SetConcatStrMultiItemBE);
  22228. IR::Opnd * concatItemOpnd = concatItemInstr->GetSrc1();
  22229. Assert(concatItemOpnd->IsRegOpnd());
  22230. // If one of the concat items is equal to the dst of the concat expressions (s = s + a + b),
  22231. // hoist the load of that item to before the setting of the new string to the dst.
  22232. if (concatItemOpnd->IsEqual(newString))
  22233. {
  22234. if (!newConcatItemOpnd)
  22235. {
  22236. IR::Instr * hoistSrcInstr = concatItemInstr->HoistSrc1(Js::OpCode::Ld_A);
  22237. newConcatItemOpnd = hoistSrcInstr->GetDst();
  22238. }
  22239. concatItemOpnd = newConcatItemOpnd;
  22240. }
  22241. else
  22242. {
  22243. // If only some of the SetConcatStrMultiItemBE instructions were CSE'd and the rest, along with the NewConcatStrMultiBE
  22244. // instruction, were in a loop, the strings on the CSE'd Set*BE instructions will become live on back edge. Add them to
  22245. // addToLiveOnBackEdgeSyms here and clear when we reach the Set*BE instruction.
  22246. // Note that we are doing this only for string opnds which are not the same as the dst of the concat expression. Reasoning
  22247. // behind this is that if a loop has a concat expression with one of its sources same as the dst, the Set*BE instruction
  22248. // for the dst wouldn't have been CSE'd as the dst's value is changing in the loop and the backward pass should have set the
  22249. // symbol as live on backedge.
  22250. this->addToLiveOnBackEdgeSyms->Set(concatItemOpnd->GetStackSym()->m_id);
  22251. }
  22252. IR::Instr * newConcatItemInstr = IR::Instr::New(Js::OpCode::SetConcatStrMultiItem,
  22253. IR::IndirOpnd::New(newString, index, TyVar, instr->m_func),
  22254. concatItemOpnd,
  22255. instr->m_func);
  22256. instr->InsertAfter(newConcatItemInstr);
  22257. this->LowerSetConcatStrMultiItem(newConcatItemInstr);
  22258. linkOpnd = concatItemInstr->GetSrc2();
  22259. index--;
  22260. }
  22261. Assert(index == -1);
  22262. this->LowerNewConcatStrMulti(instr);
  22263. }
  22264. void
  22265. Lowerer::LowerSetConcatStrMultiItem(IR::Instr * instr)
  22266. {
  22267. Func * func = this->m_func;
  22268. IR::IndirOpnd * dstOpnd = instr->GetDst()->AsIndirOpnd();
  22269. IR::RegOpnd * concatStrOpnd = dstOpnd->GetBaseOpnd();
  22270. IR::RegOpnd * srcOpnd = instr->UnlinkSrc1()->AsRegOpnd();
  22271. Assert(concatStrOpnd->GetValueType().IsString());
  22272. Assert(srcOpnd->GetValueType().IsString());
  22273. srcOpnd = GenerateGetImmutableOrScriptUnreferencedString(srcOpnd, instr, IR::HelperOp_CompoundStringCloneForConcat);
  22274. instr->SetSrc1(srcOpnd);
  22275. IR::IndirOpnd * dstLength = IR::IndirOpnd::New(concatStrOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), TyUint32, func);
  22276. IR::Opnd * srcLength;
  22277. if (srcOpnd->m_sym->m_isStrConst)
  22278. {
  22279. srcLength = IR::IntConstOpnd::New(JITJavascriptString::FromVar(srcOpnd->m_sym->GetConstAddress(true))->GetLength(), TyUint32, func);
  22280. }
  22281. else
  22282. {
  22283. srcLength = IR::RegOpnd::New(TyUint32, func);
  22284. InsertMove(srcLength, IR::IndirOpnd::New(srcOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), TyUint32, func), instr);
  22285. }
  22286. IR::Instr *onOverflowInsertBeforeInstr;
  22287. InsertAddWithOverflowCheck(false, dstLength, dstLength, srcLength, instr, &onOverflowInsertBeforeInstr);
  22288. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, func);
  22289. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperOp_OutOfMemoryError, func));
  22290. instr->InsertBefore(onOverflowInsertBeforeInstr);
  22291. onOverflowInsertBeforeInstr->InsertBefore(callInstr);
  22292. this->m_lowererMD.LowerCall(callInstr, 0);
  22293. dstOpnd->SetOffset(dstOpnd->GetOffset() * sizeof(Js::JavascriptString *) + Js::ConcatStringMulti::GetOffsetOfSlots());
  22294. LowererMD::ChangeToWriteBarrierAssign(instr, func);
  22295. }
  22296. IR::RegOpnd *
  22297. Lowerer::GenerateGetImmutableOrScriptUnreferencedString(IR::RegOpnd * strOpnd, IR::Instr * insertBeforeInstr, IR::JnHelperMethod helperMethod, bool reloadDst)
  22298. {
  22299. if (strOpnd->m_sym->m_isStrConst)
  22300. {
  22301. return strOpnd;
  22302. }
  22303. Func * const func = this->m_func;
  22304. IR::RegOpnd *dstOpnd = reloadDst == true ? IR::RegOpnd::New(TyVar, func) : strOpnd;
  22305. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  22306. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  22307. if (!strOpnd->IsNotTaggedValue())
  22308. {
  22309. this->m_lowererMD.GenerateObjectTest(strOpnd, insertBeforeInstr, doneLabel);
  22310. }
  22311. // CMP [strOpnd], Js::CompoundString::`vtable'
  22312. // JEQ $helper
  22313. InsertCompareBranch(
  22314. IR::IndirOpnd::New(strOpnd, 0, TyMachPtr, func),
  22315. this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableCompoundString),
  22316. Js::OpCode::BrEq_A,
  22317. helperLabel,
  22318. insertBeforeInstr);
  22319. if (reloadDst)
  22320. {
  22321. InsertMove(dstOpnd, strOpnd, insertBeforeInstr);
  22322. }
  22323. InsertBranch(Js::OpCode::Br, doneLabel, insertBeforeInstr);
  22324. insertBeforeInstr->InsertBefore(helperLabel);
  22325. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, strOpnd);
  22326. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, dstOpnd, func);
  22327. callInstr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
  22328. insertBeforeInstr->InsertBefore(callInstr);
  22329. this->m_lowererMD.LowerCall(callInstr, 0);
  22330. insertBeforeInstr->InsertBefore(doneLabel);
  22331. return dstOpnd;
  22332. }
  22333. void
  22334. Lowerer::LowerConvStrCommon(IR::JnHelperMethod helper, IR::Instr * instr)
  22335. {
  22336. IR::RegOpnd * src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
  22337. if (!src1Opnd->GetValueType().IsNotString())
  22338. {
  22339. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  22340. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  22341. this->GenerateStringTest(src1Opnd, instr, helperLabel);
  22342. InsertMove(instr->GetDst(), src1Opnd, instr);
  22343. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  22344. instr->InsertBefore(helperLabel);
  22345. instr->InsertAfter(doneLabel);
  22346. }
  22347. if (instr->GetSrc2())
  22348. {
  22349. this->m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc2());
  22350. }
  22351. this->LoadScriptContext(instr);
  22352. this->m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  22353. this->m_lowererMD.ChangeToHelperCall(instr, helper);
  22354. }
  22355. void
  22356. Lowerer::LowerConvStr(IR::Instr * instr)
  22357. {
  22358. LowerConvStrCommon(IR::HelperOp_ConvString, instr);
  22359. }
  22360. void
  22361. Lowerer::LowerCoerseStr(IR::Instr* instr)
  22362. {
  22363. LowerConvStrCommon(IR::HelperOp_CoerseString, instr);
  22364. }
  22365. ///----------------------------------------------------------------------------
  22366. ///
  22367. /// Lowerer::LowerCoerseStrOrRegex - This method is used for String.Replace(arg1, arg2)
  22368. /// where arg1 is regex or string
  22369. /// if arg1 is not regex, then do String.Replace(CoerseStr(arg1), arg2);
  22370. ///
  22371. /// CoerseStrOrRegex arg1
  22372. ///
  22373. /// if (value == regex) goto :done
  22374. /// else
  22375. ///helper:
  22376. /// ConvStr value
  22377. ///done:
  22378. ///----------------------------------------------------------------------------
  22379. void
  22380. Lowerer::LowerCoerseStrOrRegex(IR::Instr* instr)
  22381. {
  22382. IR::RegOpnd * src1Opnd = instr->GetSrc1()->AsRegOpnd();
  22383. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  22384. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  22385. // if (value == regex) goto :done
  22386. if (!src1Opnd->IsNotTaggedValue())
  22387. {
  22388. this->m_lowererMD.GenerateObjectTest(src1Opnd, instr, helperLabel);
  22389. }
  22390. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  22391. InsertCompareBranch(IR::IndirOpnd::New(src1Opnd, 0, TyMachPtr, instr->m_func),
  22392. vtableOpnd, Js::OpCode::BrNeq_A, helperLabel, instr);
  22393. InsertMove(instr->GetDst(), src1Opnd, instr);
  22394. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  22395. instr->InsertBefore(helperLabel);
  22396. instr->InsertAfter(doneLabel);
  22397. // helper: ConvStr value
  22398. LowerConvStr(instr);
  22399. }
  22400. ///----------------------------------------------------------------------------
  22401. ///
  22402. /// Lowerer::LowerCoerseRegex - This method is used for String.Match(arg1)
  22403. /// if arg1 is regex, then pass CreateRegEx(arg1) to String.Match
  22404. ///
  22405. ///----------------------------------------------------------------------------
  22406. void
  22407. Lowerer::LowerCoerseRegex(IR::Instr* instr)
  22408. {
  22409. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  22410. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  22411. IR::RegOpnd * src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
  22412. if (!src1Opnd->IsNotTaggedValue())
  22413. {
  22414. this->m_lowererMD.GenerateObjectTest(src1Opnd, instr, helperLabel);
  22415. }
  22416. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  22417. InsertCompareBranch(IR::IndirOpnd::New(src1Opnd, 0, TyMachPtr, instr->m_func),
  22418. vtableOpnd, Js::OpCode::BrNeq_A, helperLabel, instr);
  22419. InsertMove(instr->GetDst(), src1Opnd, instr);
  22420. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  22421. instr->InsertBefore(helperLabel);
  22422. instr->InsertAfter(doneLabel);
  22423. this->LoadScriptContext(instr);
  22424. this->m_lowererMD.LoadHelperArgument(instr, IR::AddrOpnd::NewNull(instr->m_func)); // option
  22425. this->m_lowererMD.LoadHelperArgument(instr, src1Opnd); // regex
  22426. this->m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_CoerseRegex);
  22427. }
  22428. void
  22429. Lowerer::LowerConvPrimStr(IR::Instr * instr)
  22430. {
  22431. LowerConvStrCommon(IR::HelperOp_ConvPrimitiveString, instr);
  22432. }
  22433. void
  22434. Lowerer::GenerateRecyclerAlloc(IR::JnHelperMethod allocHelper, size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, bool inOpHelper)
  22435. {
  22436. size_t alignedSize = HeapInfo::GetAlignedSizeNoCheck(allocSize);
  22437. this->GenerateRecyclerAllocAligned(allocHelper, alignedSize, newObjDst, insertionPointInstr, inOpHelper);
  22438. }
  22439. void
  22440. Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, int32 value, IR::Instr * insertBeforeInstr, bool isZeroed)
  22441. {
  22442. IRType type = TyInt32;
  22443. if (isZeroed)
  22444. {
  22445. if (value == 0)
  22446. {
  22447. // Recycler memory are zero initialized
  22448. return;
  22449. }
  22450. if (value > 0 && value <= USHORT_MAX)
  22451. {
  22452. // Recycler memory are zero initialized, so we can just initialize the 8 or 16 bits of value
  22453. type = (value <= UCHAR_MAX)? TyUint8 : TyUint16;
  22454. }
  22455. }
  22456. Func * func = this->m_func;
  22457. InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), IR::IntConstOpnd::New(value, type, func), insertBeforeInstr);
  22458. }
  22459. void
  22460. Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, uint32 value, IR::Instr * insertBeforeInstr, bool isZeroed)
  22461. {
  22462. IRType type = TyUint32;
  22463. if (isZeroed)
  22464. {
  22465. if (value == 0)
  22466. {
  22467. // Recycler memory are zero initialized
  22468. return;
  22469. }
  22470. if (value <= USHORT_MAX)
  22471. {
  22472. // Recycler memory are zero initialized, so we can just initialize the 8 or 16 bits of value
  22473. type = (value <= UCHAR_MAX)? TyUint8 : TyUint16;
  22474. }
  22475. }
  22476. Func * func = this->m_func;
  22477. InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), IR::IntConstOpnd::New(value, type, func), insertBeforeInstr);
  22478. }
  22479. void
  22480. Lowerer::GenerateMemInitNull(IR::RegOpnd * opnd, int32 offset, IR::Instr * insertBeforeInstr, bool isZeroed)
  22481. {
  22482. if (isZeroed)
  22483. {
  22484. return;
  22485. }
  22486. GenerateMemInit(opnd, offset, IR::AddrOpnd::NewNull(m_func), insertBeforeInstr);
  22487. }
  22488. void
  22489. Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, IR::Opnd * value, IR::Instr * insertBeforeInstr, bool isZeroed)
  22490. {
  22491. IRType type = value->GetType();
  22492. Func * func = this->m_func;
  22493. InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), value, insertBeforeInstr);
  22494. }
  22495. void
  22496. Lowerer::GenerateMemInit(IR::RegOpnd * opnd, IR::RegOpnd * offset, IR::Opnd * value, IR::Instr * insertBeforeInstr, bool isZeroed)
  22497. {
  22498. IRType type = value->GetType();
  22499. Func * func = this->m_func;
  22500. InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), value, insertBeforeInstr);
  22501. }
  22502. void
  22503. Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, int32 value, IR::Instr * insertBeforeInstr)
  22504. {
  22505. GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
  22506. }
  22507. void
  22508. Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, uint32 value, IR::Instr * insertBeforeInstr)
  22509. {
  22510. GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
  22511. }
  22512. void
  22513. Lowerer::GenerateRecyclerMemInitNull(IR::RegOpnd * opnd, int32 offset, IR::Instr * insertBeforeInstr)
  22514. {
  22515. GenerateMemInitNull(opnd, offset, insertBeforeInstr, true);
  22516. }
  22517. void
  22518. Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, IR::Opnd * value, IR::Instr * insertBeforeInstr)
  22519. {
  22520. GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
  22521. }
  22522. void
  22523. Lowerer::GenerateMemCopy(IR::Opnd * dst, IR::Opnd * src, uint32 size, IR::Instr * insertBeforeInstr)
  22524. {
  22525. Func * func = this->m_func;
  22526. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, IR::IntConstOpnd::New(size, TyUint32, func));
  22527. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, src);
  22528. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, dst);
  22529. IR::Instr * memcpyInstr = IR::Instr::New(Js::OpCode::Call, func);
  22530. memcpyInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperMemCpy, func));
  22531. insertBeforeInstr->InsertBefore(memcpyInstr);
  22532. m_lowererMD.LowerCall(memcpyInstr, 3);
  22533. }
  22534. bool
  22535. Lowerer::GenerateSimplifiedInt4Rem(
  22536. IR::Instr *const remInstr,
  22537. IR::LabelInstr *const skipBailOutLabel) const
  22538. {
  22539. Assert(remInstr);
  22540. Assert(remInstr->m_opcode == Js::OpCode::Rem_I4 || remInstr->m_opcode == Js::OpCode::RemU_I4);
  22541. auto *dst = remInstr->GetDst(), *src1 = remInstr->GetSrc1(), *src2 = remInstr->GetSrc2();
  22542. Assert(src1 && src2);
  22543. Assert(dst->IsRegOpnd());
  22544. bool isModByPowerOf2 = (remInstr->HasBailOutInfo() && remInstr->GetBailOutKind() == IR::BailOnModByPowerOf2);
  22545. if (PHASE_OFF(Js::Phase::MathFastPathPhase, remInstr->m_func->GetTopFunc()) && !isModByPowerOf2)
  22546. return false;
  22547. if (!(src2->IsIntConstOpnd() && Math::IsPow2(src2->AsIntConstOpnd()->AsInt32())) && !isModByPowerOf2)
  22548. {
  22549. return false;
  22550. }
  22551. // We have:
  22552. // s3 = s1 % s2 , where s2 = +2^i
  22553. //
  22554. // Generate:
  22555. // test s1, s1
  22556. // js $slowPathLabel
  22557. // s3 = and s1, 0x00..fff (2^i - 1)
  22558. // jmp $doneLabel
  22559. // $slowPathLabel:
  22560. // (Slow path)
  22561. // (Neg zero check)
  22562. // (Bailout code)
  22563. // $doneLabel:
  22564. IR::LabelInstr *doneLabel = skipBailOutLabel, *slowPathLabel;
  22565. if (!doneLabel)
  22566. {
  22567. doneLabel = IR::LabelInstr::New(Js::OpCode::Label, remInstr->m_func);
  22568. remInstr->InsertAfter(doneLabel);
  22569. }
  22570. slowPathLabel = IR::LabelInstr::New(Js::OpCode::Label, remInstr->m_func, isModByPowerOf2);
  22571. remInstr->InsertBefore(slowPathLabel);
  22572. // test s1, s1
  22573. InsertTest(src1, src1, slowPathLabel);
  22574. // jsb $slowPathLabel
  22575. InsertBranch(LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), slowPathLabel, slowPathLabel);
  22576. // s3 = and s1, 0x00..fff (2^i - 1)
  22577. IR::Opnd* maskOpnd;
  22578. if(isModByPowerOf2)
  22579. {
  22580. Assert(isModByPowerOf2);
  22581. maskOpnd = IR::RegOpnd::New(TyInt32, remInstr->m_func);
  22582. // mov maskOpnd, s2
  22583. InsertMove(maskOpnd, src2, slowPathLabel);
  22584. // dec maskOpnd
  22585. InsertSub(/*needFlags*/ true, maskOpnd, maskOpnd, IR::IntConstOpnd::New(1, TyInt32, this->m_func, /*dontEncode*/true), slowPathLabel);
  22586. // maskOpnd < 0 goto $slowPath
  22587. InsertBranch(LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), slowPathLabel, slowPathLabel);
  22588. // TEST src2, maskOpnd
  22589. InsertTestBranch(src2, maskOpnd, Js::OpCode::BrNeq_A, slowPathLabel, slowPathLabel);
  22590. }
  22591. else
  22592. {
  22593. Assert(src2->IsIntConstOpnd());
  22594. int32 mask = src2->AsIntConstOpnd()->AsInt32() - 1;
  22595. maskOpnd = IR::IntConstOpnd::New(mask, TyInt32, remInstr->m_func);
  22596. }
  22597. // dst = src1 & maskOpnd
  22598. InsertAnd(dst, src1, maskOpnd, slowPathLabel);
  22599. // jmp $doneLabel
  22600. InsertBranch(Js::OpCode::Br, doneLabel, slowPathLabel);
  22601. return true;
  22602. }
  22603. #if DBG
  22604. bool
  22605. Lowerer::ValidOpcodeAfterLower(IR::Instr* instr, Func * func)
  22606. {
  22607. Js::OpCode opcode = instr->m_opcode;
  22608. if (opcode > Js::OpCode::MDStart)
  22609. {
  22610. return true;
  22611. }
  22612. switch (opcode)
  22613. {
  22614. case Js::OpCode::Ret:
  22615. case Js::OpCode::Label:
  22616. case Js::OpCode::StatementBoundary:
  22617. case Js::OpCode::DeletedNonHelperBranch:
  22618. case Js::OpCode::FunctionEntry:
  22619. case Js::OpCode::FunctionExit:
  22620. case Js::OpCode::TryCatch:
  22621. case Js::OpCode::TryFinally:
  22622. case Js::OpCode::Catch:
  22623. case Js::OpCode::GeneratorResumeJumpTable:
  22624. case Js::OpCode::Break:
  22625. #ifdef _M_X64
  22626. case Js::OpCode::PrologStart:
  22627. case Js::OpCode::PrologEnd:
  22628. #endif
  22629. #ifdef _M_IX86
  22630. case Js::OpCode::BailOutStackRestore:
  22631. #endif
  22632. return true;
  22633. case Js::OpCode::RestoreOutParam:
  22634. Assert(func->isPostRegAlloc);
  22635. return true;
  22636. // These may be removed by peep
  22637. case Js::OpCode::StartCall:
  22638. case Js::OpCode::LoweredStartCall:
  22639. case Js::OpCode::Nop:
  22640. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  22641. return func && !func->isPostPeeps;
  22642. case Js::OpCode::InlineeStart:
  22643. case Js::OpCode::InlineeEnd:
  22644. return instr->m_func->m_hasInlineArgsOpt;
  22645. #ifdef _M_X64
  22646. case Js::OpCode::LdArgSize:
  22647. case Js::OpCode::LdSpillSize:
  22648. return func && !func->isPostFinalLower;
  22649. #endif
  22650. case Js::OpCode::Leave:
  22651. Assert(!func->IsLoopBodyInTry());
  22652. Assert(func->HasTry() && func->DoOptimizeTry());
  22653. return func && !func->isPostFinalLower; //Lowered in FinalLower phase
  22654. case Js::OpCode::LazyBailOutThunkLabel:
  22655. return func && func->HasLazyBailOut() && func->isPostFinalLower; //Lowered in FinalLower phase
  22656. };
  22657. return false;
  22658. }
  22659. #endif
  22660. void Lowerer::LowerProfiledBeginSwitch(IR::JitProfilingInstr* instr)
  22661. {
  22662. Assert(instr->isBeginSwitch);
  22663. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  22664. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
  22665. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  22666. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleProfiledSwitch, m_func));
  22667. m_lowererMD.LowerCall(instr, 0);
  22668. }
  22669. void Lowerer::LowerProfiledBinaryOp(IR::JitProfilingInstr* instr, IR::JnHelperMethod meth)
  22670. {
  22671. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc2());
  22672. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  22673. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
  22674. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  22675. instr->SetSrc1(IR::HelperCallOpnd::New(meth, m_func));
  22676. m_lowererMD.LowerCall(instr, 0);
  22677. }
  22678. void Lowerer::GenerateNullOutGeneratorFrame(IR::Instr* insertInstr)
  22679. {
  22680. // null out frame pointer on generator object to signal completion to JavascriptGenerator::CallGenerator
  22681. // s = MOV prm1
  22682. // s[offset of JavascriptGenerator::frame] = MOV nullptr
  22683. StackSym *symSrc = StackSym::NewImplicitParamSym(3, m_func);
  22684. m_func->SetArgOffset(symSrc, LowererMD::GetFormalParamOffset() * MachPtr);
  22685. IR::SymOpnd *srcOpnd = IR::SymOpnd::New(symSrc, TyMachPtr, m_func);
  22686. IR::RegOpnd *dstOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  22687. InsertMove(dstOpnd, srcOpnd, insertInstr);
  22688. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(dstOpnd, Js::JavascriptGenerator::GetFrameOffset(), TyMachPtr, m_func);
  22689. IR::AddrOpnd *addrOpnd = IR::AddrOpnd::NewNull(m_func);
  22690. InsertMove(indirOpnd, addrOpnd, insertInstr);
  22691. }
  22692. void Lowerer::LowerFunctionExit(IR::Instr* funcExit)
  22693. {
  22694. if (m_func->GetJITFunctionBody()->IsCoroutine())
  22695. {
  22696. GenerateNullOutGeneratorFrame(funcExit->m_prev);
  22697. }
  22698. if (!m_func->DoSimpleJitDynamicProfile())
  22699. {
  22700. return;
  22701. }
  22702. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, m_func);
  22703. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleCleanImplicitCallFlags, m_func));
  22704. funcExit->m_prev->InsertBefore(callInstr);
  22705. m_lowererMD.LoadHelperArgument(callInstr, CreateFunctionBodyOpnd(funcExit->m_func));
  22706. m_lowererMD.LowerCall(callInstr, 0);
  22707. }
  22708. void Lowerer::LowerFunctionEntry(IR::Instr* funcEntry)
  22709. {
  22710. Assert(funcEntry->m_opcode == Js::OpCode::FunctionEntry);
  22711. //Don't do a body call increment for loops or asm.js
  22712. if (m_func->IsLoopBody() || m_func->GetJITFunctionBody()->IsAsmJsMode())
  22713. {
  22714. return;
  22715. }
  22716. IR::Instr *const insertBeforeInstr = this->m_func->GetFunctionEntryInsertionPoint();
  22717. LowerFunctionBodyCallCountChange(insertBeforeInstr);
  22718. if (m_func->DoSimpleJitDynamicProfile())
  22719. {
  22720. // Only generate the argument profiling if the function expects to have some arguments to profile and only if
  22721. // it has implicit ArgIns (the latter is a restriction imposed by the Interpreter, so it is mirrored in SimpleJit)
  22722. if (m_func->GetJITFunctionBody()->GetInParamsCount() > 1 && m_func->GetJITFunctionBody()->HasImplicitArgIns())
  22723. {
  22724. // Call out to the argument profiling helper
  22725. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, m_func);
  22726. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleProfileParameters, m_func));
  22727. insertBeforeInstr->InsertBefore(callInstr);
  22728. m_lowererMD.LoadHelperArgument(callInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  22729. m_lowererMD.LowerCall(callInstr, 0);
  22730. }
  22731. // Clear existing ImplicitCallFlags
  22732. const auto starFlag = GetImplicitCallFlagsOpnd();
  22733. this->InsertMove(starFlag, CreateClearImplicitCallFlagsOpnd(), insertBeforeInstr);
  22734. }
  22735. }
  22736. void Lowerer::LowerFunctionBodyCallCountChange(IR::Instr *const insertBeforeInstr)
  22737. {
  22738. Assert(insertBeforeInstr);
  22739. Func *const func = insertBeforeInstr->m_func;
  22740. const bool isSimpleJit = func->IsSimpleJit();
  22741. if ((isSimpleJit && PHASE_OFF(Js::FullJitPhase, m_func)))
  22742. {
  22743. return;
  22744. }
  22745. // mov countAddress, <countAddress>
  22746. IR::RegOpnd *const countAddressOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, func), TyMachPtr, func);
  22747. const IR::AutoReuseOpnd autoReuseCountAddressOpnd(countAddressOpnd, func);
  22748. InsertMove(
  22749. countAddressOpnd,
  22750. IR::AddrOpnd::New((Js::Var)func->GetWorkItem()->GetCallsCountAddress(), IR::AddrOpndKindDynamicMisc, func, true),
  22751. insertBeforeInstr);
  22752. IR::IndirOpnd *const countOpnd = IR::IndirOpnd::New(countAddressOpnd, 0, TyUint32, func);
  22753. const IR::AutoReuseOpnd autoReuseCountOpnd(countOpnd, func);
  22754. if(!isSimpleJit)
  22755. {
  22756. InsertAdd(false, countOpnd, countOpnd, IR::IntConstOpnd::New(1, TyUint32, func), insertBeforeInstr);
  22757. return;
  22758. }
  22759. IR::Instr *onOverflowInsertBeforeInstr;
  22760. InsertDecUInt32PreventOverflow(
  22761. countOpnd,
  22762. countOpnd,
  22763. insertBeforeInstr,
  22764. &onOverflowInsertBeforeInstr);
  22765. // ($overflow:)
  22766. // TransitionFromSimpleJit(framePointer)
  22767. m_lowererMD.LoadHelperArgument(onOverflowInsertBeforeInstr, IR::Opnd::CreateFramePointerOpnd(func));
  22768. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  22769. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperTransitionFromSimpleJit, func));
  22770. onOverflowInsertBeforeInstr->InsertBefore(callInstr);
  22771. m_lowererMD.LowerCall(callInstr, 0);
  22772. }
  22773. IR::Opnd*
  22774. Lowerer::GetImplicitCallFlagsOpnd()
  22775. {
  22776. return GetImplicitCallFlagsOpnd(m_func);
  22777. }
  22778. IR::Opnd*
  22779. Lowerer::GetImplicitCallFlagsOpnd(Func * func)
  22780. {
  22781. return IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetImplicitCallFlagsAddr(), GetImplicitCallFlagsType(), func);
  22782. }
  22783. IR::Opnd*
  22784. Lowerer::CreateClearImplicitCallFlagsOpnd()
  22785. {
  22786. return IR::IntConstOpnd::New(Js::ImplicitCall_None, GetImplicitCallFlagsType(), m_func);
  22787. }
  22788. void
  22789. Lowerer::GenerateFlagInlineCacheCheckForGetterSetter(
  22790. IR::Instr * insertBeforeInstr,
  22791. IR::RegOpnd * opndInlineCache,
  22792. IR::LabelInstr * labelNext)
  22793. {
  22794. uint accessorFlagMask;
  22795. if (PHASE_OFF(Js::InlineGettersPhase, insertBeforeInstr->m_func))
  22796. {
  22797. accessorFlagMask = Js::InlineCache::GetSetterFlagMask();
  22798. }
  22799. else if (PHASE_OFF(Js::InlineSettersPhase, insertBeforeInstr->m_func))
  22800. {
  22801. accessorFlagMask = Js::InlineCache::GetGetterFlagMask();
  22802. }
  22803. else
  22804. {
  22805. accessorFlagMask = Js::InlineCache::GetGetterSetterFlagMask();
  22806. }
  22807. // Generate:
  22808. //
  22809. // TEST [&(inlineCache->u.accessor.flags)], Js::InlineCacheGetterFlag | Js::InlineCacheSetterFlag
  22810. // JEQ $next
  22811. IR::Opnd * flagsOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.rawUInt16), TyInt8, insertBeforeInstr->m_func);
  22812. IR::Opnd * accessorOpnd = IR::IntConstOpnd::New(accessorFlagMask, TyInt8, this->m_func);
  22813. InsertTestBranch(flagsOpnd, accessorOpnd, Js::OpCode::BrEq_A, labelNext, insertBeforeInstr);
  22814. }
  22815. IR::BranchInstr *
  22816. Lowerer::GenerateLocalInlineCacheCheck(
  22817. IR::Instr * instrLdSt,
  22818. IR::RegOpnd * opndType,
  22819. IR::RegOpnd * inlineCache,
  22820. IR::LabelInstr * labelNext,
  22821. bool checkTypeWithoutProperty)
  22822. {
  22823. // Generate:
  22824. //
  22825. // CMP s1, [&(inlineCache->u.local.type/typeWithoutProperty)]
  22826. // JNE $next
  22827. IR::Opnd* typeOpnd;
  22828. if (checkTypeWithoutProperty)
  22829. {
  22830. typeOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.local.typeWithoutProperty), TyMachReg, instrLdSt->m_func);
  22831. }
  22832. else
  22833. {
  22834. typeOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.local.type), TyMachReg, instrLdSt->m_func);
  22835. }
  22836. InsertCompare(opndType, typeOpnd, instrLdSt);
  22837. return InsertBranch(Js::OpCode::BrNeq_A, labelNext, instrLdSt);
  22838. }
  22839. IR::BranchInstr *
  22840. Lowerer::GenerateProtoInlineCacheCheck(
  22841. IR::Instr * instrLdSt,
  22842. IR::RegOpnd * opndType,
  22843. IR::RegOpnd * inlineCache,
  22844. IR::LabelInstr * labelNext)
  22845. {
  22846. // Generate:
  22847. //
  22848. // CMP s1, [&(inlineCache->u.proto.type)]
  22849. // JNE $next
  22850. IR::Opnd* typeOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.proto.type), TyMachReg, instrLdSt->m_func);
  22851. InsertCompare(opndType, typeOpnd, instrLdSt);
  22852. return InsertBranch(Js::OpCode::BrNeq_A, labelNext, instrLdSt);
  22853. }
  22854. void
  22855. Lowerer::GenerateFlagInlineCacheCheck(
  22856. IR::Instr * instrLdSt,
  22857. IR::RegOpnd * opndType,
  22858. IR::RegOpnd * opndInlineCache,
  22859. IR::LabelInstr * labelNext)
  22860. {
  22861. // Generate:
  22862. //
  22863. // CMP s1, [&(inlineCache->u.accessor.type)]
  22864. // JNE $next
  22865. IR::Opnd* typeOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.type), TyMachReg, instrLdSt->m_func);
  22866. // CMP s1, [&(inlineCache->u.flag.type)]
  22867. InsertCompareBranch(opndType, typeOpnd, Js::OpCode::BrNeq_A, labelNext, instrLdSt);
  22868. }
  22869. void
  22870. Lowerer::GenerateLdFldFromLocalInlineCache(
  22871. IR::Instr * instrLdFld,
  22872. IR::RegOpnd * opndBase,
  22873. IR::Opnd * opndDst,
  22874. IR::RegOpnd * opndInlineCache,
  22875. IR::LabelInstr * labelFallThru,
  22876. bool isInlineSlot)
  22877. {
  22878. // Generate:
  22879. //
  22880. // s1 = MOV base->slots -- load the slot array
  22881. // s2 = MOVZXw [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
  22882. // dst = MOV [s1 + s2 * Scale] -- load the value directly from the slot
  22883. // JMP $fallthru
  22884. IR::IndirOpnd * opndIndir = nullptr;
  22885. IR::RegOpnd * opndSlotArray = nullptr;
  22886. if (!isInlineSlot)
  22887. {
  22888. opndSlotArray = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  22889. opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrLdFld->m_func);
  22890. InsertMove(opndSlotArray, opndIndir, instrLdFld);
  22891. }
  22892. // s2 = MOVZXw [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
  22893. IR::RegOpnd * opndReg2 = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  22894. opndIndir = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.slotIndex), TyUint16, instrLdFld->m_func);
  22895. InsertMove(opndReg2, opndIndir, instrLdFld);
  22896. if (isInlineSlot)
  22897. {
  22898. // dst = MOV [base + s2 * Scale] -- load the value directly from the slot
  22899. opndIndir = IR::IndirOpnd::New(opndBase, opndReg2, LowererMD::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  22900. InsertMove(opndDst, opndIndir, instrLdFld);
  22901. }
  22902. else
  22903. {
  22904. // dst = MOV [s1 + s2 * Scale] -- load the value directly from the slot
  22905. opndIndir = IR::IndirOpnd::New(opndSlotArray, opndReg2, LowererMD::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  22906. InsertMove(opndDst, opndIndir, instrLdFld);
  22907. }
  22908. // JMP $fallthru
  22909. InsertBranch(Js::OpCode::Br, labelFallThru, instrLdFld);
  22910. }
  22911. void
  22912. Lowerer::GenerateLdFldFromProtoInlineCache(
  22913. IR::Instr * instrLdFld,
  22914. IR::RegOpnd * opndBase,
  22915. IR::Opnd * opndDst,
  22916. IR::RegOpnd * inlineCache,
  22917. IR::LabelInstr * labelFallThru,
  22918. bool isInlineSlot)
  22919. {
  22920. // Generate:
  22921. //
  22922. // s1 = MOV [&(inlineCache->u.proto.prototypeObject)] -- load the cached prototype object
  22923. // s1 = MOV [&s1->slots] -- load the slot array
  22924. // s2 = MOVZXW [&(inlineCache->u.proto.slotIndex)] -- load the cached slot index
  22925. // dst = MOV [s1 + s2*4]
  22926. // JMP $fallthru
  22927. IR::IndirOpnd * opndIndir = nullptr;
  22928. IR::RegOpnd * opndProtoSlots = nullptr;
  22929. // s1 = MOV [&(inlineCache->u.proto.prototypeObject)] -- load the cached prototype object
  22930. IR::RegOpnd * opndProto = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  22931. opndIndir = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.proto.prototypeObject), TyMachReg, instrLdFld->m_func);
  22932. InsertMove(opndProto, opndIndir, instrLdFld);
  22933. if (!isInlineSlot)
  22934. {
  22935. // s1 = MOV [&s1->slots] -- load the slot array
  22936. opndProtoSlots = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  22937. opndIndir = IR::IndirOpnd::New(opndProto, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrLdFld->m_func);
  22938. InsertMove(opndProtoSlots, opndIndir, instrLdFld);
  22939. }
  22940. // s2 = MOVZXW [&(inlineCache->u.proto.slotIndex)] -- load the cached slot index
  22941. IR::RegOpnd * opndSlotIndex = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  22942. opndIndir = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.proto.slotIndex), TyUint16, instrLdFld->m_func);
  22943. InsertMove(opndSlotIndex, opndIndir, instrLdFld);
  22944. if (isInlineSlot)
  22945. {
  22946. // dst = MOV [s1 + s2*4]
  22947. opndIndir = IR::IndirOpnd::New(opndProto, opndSlotIndex, LowererMD::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  22948. InsertMove(opndDst, opndIndir, instrLdFld);
  22949. }
  22950. else
  22951. {
  22952. // dst = MOV [s1 + s2*4]
  22953. opndIndir = IR::IndirOpnd::New(opndProtoSlots, opndSlotIndex, LowererMD::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  22954. InsertMove(opndDst, opndIndir, instrLdFld);
  22955. }
  22956. // JMP $fallthru
  22957. InsertBranch(Js::OpCode::Br, labelFallThru, instrLdFld);
  22958. }
  22959. void
  22960. Lowerer::GenerateLdFldFromFlagInlineCache(
  22961. IR::Instr * insertBeforeInstr,
  22962. IR::RegOpnd * opndBase,
  22963. IR::Opnd * opndDst,
  22964. IR::RegOpnd * opndInlineCache,
  22965. IR::LabelInstr * labelFallThru,
  22966. bool isInlineSlot)
  22967. {
  22968. // Generate:
  22969. //
  22970. // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
  22971. // s1 = MOV [&s1->slots] -- load the slot array
  22972. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  22973. // dst = MOV [s1 + s2 * 4]
  22974. // JMP $fallthru
  22975. IR::IndirOpnd * opndIndir = nullptr;
  22976. IR::RegOpnd * opndObjSlots = nullptr;
  22977. // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
  22978. IR::RegOpnd * opndObject = IR::RegOpnd::New(TyMachReg, this->m_func);
  22979. opndIndir = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.object), TyMachReg, this->m_func);
  22980. InsertMove(opndObject, opndIndir, insertBeforeInstr);
  22981. if (!isInlineSlot)
  22982. {
  22983. // s1 = MOV [&s1->slots] -- load the slot array
  22984. opndObjSlots = IR::RegOpnd::New(TyMachReg, this->m_func);
  22985. opndIndir = IR::IndirOpnd::New(opndObject, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  22986. InsertMove(opndObjSlots, opndIndir, insertBeforeInstr);
  22987. }
  22988. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  22989. IR::RegOpnd * opndSlotIndex = IR::RegOpnd::New(TyMachReg, this->m_func);
  22990. opndIndir = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.slotIndex), TyUint16, this->m_func);
  22991. InsertMove(opndSlotIndex, opndIndir, insertBeforeInstr);
  22992. if (isInlineSlot)
  22993. {
  22994. // dst = MOV [s1 + s2 * 4]
  22995. opndIndir = IR::IndirOpnd::New(opndObject, opndSlotIndex, this->m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
  22996. InsertMove(opndDst, opndIndir, insertBeforeInstr);
  22997. }
  22998. else
  22999. {
  23000. // dst = MOV [s1 + s2 * 4]
  23001. opndIndir = IR::IndirOpnd::New(opndObjSlots, opndSlotIndex, this->m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
  23002. InsertMove(opndDst, opndIndir, insertBeforeInstr);
  23003. }
  23004. // JMP $fallthru
  23005. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  23006. }
  23007. void
  23008. Lowerer::LowerSpreadArrayLiteral(IR::Instr *instr)
  23009. {
  23010. LoadScriptContext(instr);
  23011. IR::Opnd *src2Opnd = instr->UnlinkSrc2();
  23012. m_lowererMD.LoadHelperArgument(instr, src2Opnd);
  23013. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  23014. m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  23015. this->m_lowererMD.ChangeToHelperCall(instr, IR::HelperSpreadArrayLiteral);
  23016. }
  23017. IR::Instr *
  23018. Lowerer::LowerSpreadCall(IR::Instr *instr, Js::CallFlags callFlags, bool setupProfiledVersion)
  23019. {
  23020. // Get the target function object, and emit function object test.
  23021. IR::RegOpnd * functionObjOpnd = instr->UnlinkSrc1()->AsRegOpnd();
  23022. functionObjOpnd->m_isCallArg = true;
  23023. if (!(callFlags & Js::CallFlags_New) && !setupProfiledVersion)
  23024. {
  23025. IR::LabelInstr* continueAfterExLabel = InsertContinueAfterExceptionLabelForDebugger(m_func, instr, false);
  23026. this->m_lowererMD.GenerateFunctionObjectTest(instr, functionObjOpnd, false, continueAfterExLabel);
  23027. }
  23028. IR::Instr *spreadIndicesInstr;
  23029. spreadIndicesInstr = GetLdSpreadIndicesInstr(instr);
  23030. Assert(spreadIndicesInstr->m_opcode == Js::OpCode::LdSpreadIndices);
  23031. // Get AuxArray
  23032. IR::Opnd *spreadIndicesOpnd = spreadIndicesInstr->UnlinkSrc1();
  23033. // Remove LdSpreadIndices from the argument chain
  23034. instr->ReplaceSrc2(spreadIndicesInstr->UnlinkSrc2());
  23035. // Emit the normal args
  23036. if (!(callFlags & Js::CallFlags_New))
  23037. {
  23038. callFlags = (Js::CallFlags)(callFlags | (instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed));
  23039. }
  23040. // Profiled helper call requires three more parameters, ArrayProfileId, profileId, and the frame pointer.
  23041. // This is just following the convention of HelperProfiledNewScObjArray call.
  23042. const unsigned short extraArgsCount = setupProfiledVersion ? 5 : 2; // function object and AuxArray
  23043. int32 argCount = this->m_lowererMD.LowerCallArgs(instr, (ushort)callFlags, extraArgsCount);
  23044. // Emit our extra (first) args for the Spread helper in reverse order
  23045. if (setupProfiledVersion)
  23046. {
  23047. IR::JitProfilingInstr* jitInstr = (IR::JitProfilingInstr*)instr;
  23048. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(jitInstr->arrayProfileId, m_func));
  23049. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(jitInstr->profileId, m_func));
  23050. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  23051. }
  23052. m_lowererMD.LoadHelperArgument(instr, functionObjOpnd);
  23053. m_lowererMD.LoadHelperArgument(instr, spreadIndicesOpnd);
  23054. // Change the call target to our helper
  23055. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(setupProfiledVersion ? IR::HelperProfiledNewScObjArraySpread : IR::HelperSpreadCall, this->m_func);
  23056. instr->SetSrc1(helperOpnd);
  23057. return this->m_lowererMD.LowerCall(instr, (Js::ArgSlot)argCount);
  23058. }
  23059. void
  23060. Lowerer::LowerDivI4Common(IR::Instr * instr)
  23061. {
  23062. Assert(instr);
  23063. Assert((instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::Div_I4) ||
  23064. (instr->m_opcode == Js::OpCode::RemU_I4 || instr->m_opcode == Js::OpCode::DivU_I4));
  23065. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  23066. const bool isRem = instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::RemU_I4;
  23067. // MIN_INT/-1 path is only needed for signed operations
  23068. // TEST src2, src2
  23069. // JEQ $div0
  23070. // CMP src1, MIN_INT
  23071. // JEQ $minInt
  23072. // JMP $div
  23073. // $div0: [helper]
  23074. // MOV dst, 0
  23075. // JMP $done
  23076. // $minInt: [helper]
  23077. // CMP src2, -1
  23078. // JNE $div
  23079. // dst = MOV src1 / 0
  23080. // JMP $done
  23081. // $div:
  23082. // dst = IDIV src2, src1
  23083. // $done:
  23084. IR::LabelInstr * div0Label = InsertLabel(true, instr);
  23085. IR::LabelInstr * divLabel = InsertLabel(false, instr);
  23086. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  23087. IR::Opnd * dst = instr->GetDst();
  23088. IR::Opnd * src1 = instr->GetSrc1();
  23089. IR::Opnd * src2 = instr->GetSrc2();
  23090. bool isWasm = m_func->GetJITFunctionBody()->IsWasmFunction();
  23091. Assert(!isWasm || isRem);
  23092. if (!isWasm)
  23093. {
  23094. InsertTestBranch(src2, src2, Js::OpCode::BrEq_A, div0Label, div0Label);
  23095. InsertMove(dst, IR::IntConstOpnd::NewFromType(0, dst->GetType(), m_func), divLabel);
  23096. InsertBranch(Js::OpCode::Br, doneLabel, divLabel);
  23097. }
  23098. if (instr->GetSrc1()->IsSigned())
  23099. {
  23100. IR::LabelInstr * minIntLabel = nullptr;
  23101. // we need to check for INT_MIN/-1 if divisor is either -1 or variable, and dividend is either INT_MIN or variable
  23102. int64 intMin = IRType_IsInt64(src1->GetType()) ? LONGLONG_MIN : INT_MIN;
  23103. bool needsMinOverNeg1Check = !(src2->IsImmediateOpnd() && src2->GetImmediateValue(m_func) != -1);
  23104. if (src1->IsImmediateOpnd())
  23105. {
  23106. if (needsMinOverNeg1Check && src1->GetImmediateValue(m_func) == intMin)
  23107. {
  23108. minIntLabel = InsertLabel(true, divLabel);
  23109. InsertBranch(Js::OpCode::Br, minIntLabel, div0Label);
  23110. }
  23111. else
  23112. {
  23113. needsMinOverNeg1Check = false;
  23114. }
  23115. }
  23116. else if(needsMinOverNeg1Check)
  23117. {
  23118. minIntLabel = InsertLabel(true, divLabel);
  23119. InsertCompareBranch(src1, IR::IntConstOpnd::NewFromType(intMin, src1->GetType(), m_func), Js::OpCode::BrEq_A, minIntLabel, div0Label);
  23120. }
  23121. if (needsMinOverNeg1Check)
  23122. {
  23123. Assert(minIntLabel);
  23124. Assert(!src2->IsImmediateOpnd() || src2->GetImmediateValue(m_func) == -1);
  23125. if (!src2->IsImmediateOpnd())
  23126. {
  23127. InsertCompareBranch(src2, IR::IntConstOpnd::NewFromType(-1, src2->GetType(), m_func), Js::OpCode::BrNeq_A, divLabel, divLabel);
  23128. }
  23129. InsertMove(dst, !isRem ? src1 : IR::IntConstOpnd::NewFromType(0, dst->GetType(), m_func), divLabel);
  23130. InsertBranch(Js::OpCode::Br, doneLabel, divLabel);
  23131. }
  23132. }
  23133. InsertBranch(Js::OpCode::Br, divLabel, div0Label);
  23134. m_lowererMD.EmitInt4Instr(instr);
  23135. }
  23136. void
  23137. Lowerer::LowerRemI4(IR::Instr * instr)
  23138. {
  23139. Assert(instr);
  23140. Assert(instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::RemU_I4);
  23141. //Generate fast path for const divisors
  23142. if (m_lowererMD.GenerateFastDivAndRem(instr))
  23143. {
  23144. return;
  23145. }
  23146. if (m_func->GetJITFunctionBody()->IsAsmJsMode())
  23147. {
  23148. LowerDivI4Common(instr);
  23149. }
  23150. else
  23151. {
  23152. m_lowererMD.EmitInt4Instr(instr);
  23153. }
  23154. }
  23155. void
  23156. Lowerer::LowerTrapIfZero(IR::Instr * const instr)
  23157. {
  23158. Assert(instr);
  23159. Assert(instr->m_opcode == Js::OpCode::TrapIfZero);
  23160. Assert(instr->GetSrc1());
  23161. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  23162. IR::Opnd * src1 = instr->GetSrc1();
  23163. if (src1->IsImmediateOpnd())
  23164. {
  23165. if (src1->GetImmediateValue(m_func) == 0)
  23166. {
  23167. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_DivideByZero), TyInt32, m_func), instr);
  23168. }
  23169. }
  23170. else
  23171. {
  23172. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  23173. InsertCompareBranch(src1, IR::IntConstOpnd::NewFromType(0, src1->GetType(), m_func), Js::OpCode::BrNeq_A, doneLabel, doneLabel);
  23174. InsertLabel(true, doneLabel);
  23175. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_DivideByZero), TyInt32, m_func), doneLabel);
  23176. }
  23177. LowererMD::ChangeToAssign(instr);
  23178. }
  23179. IR::Instr*
  23180. Lowerer::LowerTrapIfUnalignedAccess(IR::Instr * const instr)
  23181. {
  23182. IR::Opnd* dst = instr->UnlinkDst();
  23183. IR::Opnd* src1 = instr->UnlinkSrc1();
  23184. IR::Opnd* src2 = instr->GetSrc2();
  23185. Assert(instr);
  23186. Assert(instr->m_opcode == Js::OpCode::TrapIfUnalignedAccess);
  23187. Assert(src1 && !src1->IsVar());
  23188. Assert(src2 && src2->IsImmediateOpnd());
  23189. Assert(src2->GetSize() > 1);
  23190. uint32 mask = src2->GetSize() - 1;
  23191. uint32 cmpValue = (uint32)src2->GetImmediateValue(m_func);
  23192. InsertMove(dst, src1, instr);
  23193. IR::IntConstOpnd* maskOpnd = IR::IntConstOpnd::New(mask, src1->GetType(), m_func);
  23194. IR::RegOpnd* maskedOpnd = IR::RegOpnd::New(src1->GetType(), m_func);
  23195. IR::Instr* maskInstr = IR::Instr::New(Js::OpCode::And_I4, maskedOpnd, src1, maskOpnd, m_func);
  23196. instr->InsertBefore(maskInstr);
  23197. IR::IntConstOpnd* cmpOpnd = IR::IntConstOpnd::New(cmpValue, maskedOpnd->GetType(), m_func, true);
  23198. IR::LabelInstr* alignedLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  23199. IR::Instr* branch = IR::BranchInstr::New(Js::OpCode::BrEq_I4, alignedLabel, maskedOpnd, cmpOpnd, m_func);
  23200. instr->InsertBefore(branch);
  23201. InsertLabel(true, instr);
  23202. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_UnalignedAtomicAccess), TyInt32, m_func), instr);
  23203. instr->InsertBefore(alignedLabel);
  23204. instr->Remove();
  23205. // The check and branch are not fully lowered yet, let them go in the lower loop.
  23206. return branch;
  23207. }
  23208. void
  23209. Lowerer::LowerTrapIfMinIntOverNegOne(IR::Instr * const instr)
  23210. {
  23211. Assert(instr);
  23212. Assert(instr->m_opcode == Js::OpCode::TrapIfMinIntOverNegOne);
  23213. Assert(instr->GetSrc1());
  23214. Assert(instr->GetSrc2());
  23215. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  23216. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  23217. IR::Opnd * src1 = instr->GetSrc1();
  23218. IR::Opnd * src2 = instr->UnlinkSrc2();
  23219. int64 intMin = src1->IsInt64() ? LONGLONG_MIN : INT_MIN;
  23220. if (src1->IsImmediateOpnd())
  23221. {
  23222. if (src1->GetImmediateValue(m_func) != intMin)
  23223. {
  23224. // Const value not min int, will not trap
  23225. doneLabel->Remove();
  23226. src2->Free(m_func);
  23227. LowererMD::ChangeToAssign(instr);
  23228. return;
  23229. }
  23230. // Is min int no need to do check
  23231. }
  23232. else
  23233. {
  23234. InsertCompareBranch(src1, IR::IntConstOpnd::NewFromType(intMin, src1->GetType(), m_func), Js::OpCode::BrNeq_A, doneLabel, doneLabel);
  23235. }
  23236. if (src2->IsImmediateOpnd())
  23237. {
  23238. if (src2->GetImmediateValue(m_func) != -1)
  23239. {
  23240. // Const value not min int, will not trap
  23241. doneLabel->Remove();
  23242. src2->Free(m_func);
  23243. LowererMD::ChangeToAssign(instr);
  23244. return;
  23245. }
  23246. // Is -1 no need to do check
  23247. src2->Free(m_func);
  23248. }
  23249. else
  23250. {
  23251. InsertCompareBranch(src2, IR::IntConstOpnd::NewFromType(-1, src2->GetType(), m_func), Js::OpCode::BrNeq_A, doneLabel, doneLabel);
  23252. }
  23253. InsertLabel(true, doneLabel);
  23254. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(VBSERR_Overflow), TyInt32, m_func), doneLabel);
  23255. LowererMD::ChangeToAssign(instr);
  23256. }
  23257. void
  23258. Lowerer::GenerateThrow(IR::Opnd* errorCode, IR::Instr * instr)
  23259. {
  23260. IR::Instr *throwInstr = IR::Instr::New(Js::OpCode::RuntimeTypeError, IR::RegOpnd::New(TyMachReg, m_func), errorCode, m_func);
  23261. instr->InsertBefore(throwInstr);
  23262. const bool isWasm = m_func->GetJITFunctionBody() && m_func->GetJITFunctionBody()->IsWasmFunction();
  23263. LowerUnaryHelperMem(throwInstr, isWasm ? IR::HelperOp_WebAssemblyRuntimeError : IR::HelperOp_RuntimeTypeError);
  23264. }
  23265. void
  23266. Lowerer::LowerDivI4(IR::Instr * instr)
  23267. {
  23268. Assert(instr);
  23269. Assert(instr->m_opcode == Js::OpCode::Div_I4 || instr->m_opcode == Js::OpCode::DivU_I4);
  23270. #ifdef _M_IX86
  23271. if (
  23272. instr->GetDst() && instr->GetDst()->IsInt64() ||
  23273. instr->GetSrc1() && instr->GetSrc1()->IsInt64() ||
  23274. instr->GetSrc2() && instr->GetSrc2()->IsInt64()
  23275. )
  23276. {
  23277. m_lowererMD.EmitInt64Instr(instr);
  23278. return;
  23279. }
  23280. #endif
  23281. Assert(instr->GetSrc2());
  23282. if (m_func->GetJITFunctionBody()->IsWasmFunction())
  23283. {
  23284. if (!m_lowererMD.GenerateFastDivAndRem(instr))
  23285. {
  23286. m_lowererMD.EmitInt4Instr(instr);
  23287. }
  23288. return;
  23289. }
  23290. if (m_func->GetJITFunctionBody()->IsAsmJsMode())
  23291. {
  23292. if (!m_lowererMD.GenerateFastDivAndRem(instr))
  23293. {
  23294. LowerDivI4Common(instr);
  23295. }
  23296. return;
  23297. }
  23298. if(!instr->HasBailOutInfo())
  23299. {
  23300. if (!m_lowererMD.GenerateFastDivAndRem(instr))
  23301. {
  23302. m_lowererMD.EmitInt4Instr(instr);
  23303. }
  23304. return;
  23305. }
  23306. Assert(!(instr->GetBailOutKind() & ~(IR::BailOnDivResultNotInt | IR::BailOutOnNegativeZero | IR::BailOutOnDivByZero | IR::BailOutOnDivOfMinInt)));
  23307. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  23308. // Split out and generate the bailout instruction
  23309. const auto nonBailOutInstr = IR::Instr::New(instr->m_opcode, instr->m_func);
  23310. instr->TransferTo(nonBailOutInstr);
  23311. instr->InsertBefore(nonBailOutInstr);
  23312. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  23313. instr->InsertAfter(doneLabel);
  23314. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  23315. // ordering instructions anymore.
  23316. IR::LabelInstr * bailOutLabel = GenerateBailOut(instr);
  23317. IR::Opnd * denominatorOpnd = nonBailOutInstr->GetSrc2();
  23318. IR::Opnd * nominatorOpnd = nonBailOutInstr->GetSrc1();
  23319. bool isFastDiv = false;
  23320. if (bailOutKind & IR::BailOutOnDivOfMinInt)
  23321. {
  23322. // Bailout if numerator is MIN_INT (could also check for denominator being -1
  23323. // before bailing out, but does not seem worth the extra code..)
  23324. InsertCompareBranch(nominatorOpnd, IR::IntConstOpnd::New(INT32_MIN, TyInt32, this->m_func, true), Js::OpCode::BrEq_A, bailOutLabel, nonBailOutInstr);
  23325. }
  23326. if (denominatorOpnd->IsIntConstOpnd() && Math::IsPow2(denominatorOpnd->AsIntConstOpnd()->AsInt32()))
  23327. {
  23328. Assert((bailOutKind & (IR::BailOutOnNegativeZero | IR::BailOutOnDivByZero)) == 0);
  23329. if (Math::IsPow2(denominatorOpnd->AsIntConstOpnd()->AsInt32()))
  23330. {
  23331. int pow2 = denominatorOpnd->AsIntConstOpnd()->AsInt32();
  23332. InsertTestBranch(nominatorOpnd, IR::IntConstOpnd::New(pow2 - 1, TyInt32, this->m_func),
  23333. Js::OpCode::BrNeq_A, bailOutLabel, nonBailOutInstr);
  23334. nonBailOutInstr->m_opcode = Js::OpCode::Shr_A;
  23335. nonBailOutInstr->ReplaceSrc2(IR::IntConstOpnd::New(Math::Log2(pow2), TyInt32, this->m_func));
  23336. LowererMD::ChangeToShift(nonBailOutInstr, false);
  23337. LowererMD::Legalize(nonBailOutInstr);
  23338. isFastDiv = true;
  23339. }
  23340. else
  23341. {
  23342. isFastDiv = m_lowererMD.GenerateFastDivAndRem(nonBailOutInstr, bailOutLabel);
  23343. }
  23344. }
  23345. if (!isFastDiv)
  23346. {
  23347. if (bailOutKind & IR::BailOutOnDivByZero)
  23348. {
  23349. // Bailout if denominator is 0
  23350. InsertTestBranch(denominatorOpnd, denominatorOpnd, Js::OpCode::BrEq_A, bailOutLabel, nonBailOutInstr);
  23351. }
  23352. // Lower the div and bailout if there is a reminder (machine specific)
  23353. IR::Instr * insertBeforeInstr = m_lowererMD.LowerDivI4AndBailOnReminder(nonBailOutInstr, bailOutLabel);
  23354. IR::Opnd * resultOpnd = nonBailOutInstr->GetDst();
  23355. if (bailOutKind & IR::BailOutOnNegativeZero)
  23356. {
  23357. // TEST result, result
  23358. // JNE skipNegDenominatorCheckLabel // Result not 0
  23359. // TEST denominator, denominator
  23360. // JNSB/BMI bailout // bail if negative
  23361. // skipNegDenominatorCheckLabel:
  23362. IR::LabelInstr * skipNegDenominatorCheckLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  23363. // Skip negative denominator check if the result is not 0
  23364. InsertTestBranch(resultOpnd, resultOpnd, Js::OpCode::BrNeq_A, skipNegDenominatorCheckLabel, insertBeforeInstr);
  23365. IR::LabelInstr * negDenominatorCheckLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  23366. insertBeforeInstr->InsertBefore(negDenominatorCheckLabel);
  23367. // Jump to done if the denominator is not negative
  23368. InsertTestBranch(denominatorOpnd, denominatorOpnd,
  23369. LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), bailOutLabel, insertBeforeInstr);
  23370. insertBeforeInstr->InsertBefore(skipNegDenominatorCheckLabel);
  23371. }
  23372. }
  23373. // We are all fine, jump around the bailout to done
  23374. InsertBranch(Js::OpCode::Br, doneLabel, bailOutLabel);
  23375. }
  23376. void
  23377. Lowerer::LowerRemR8(IR::Instr * instr)
  23378. {
  23379. Assert(instr);
  23380. Assert(instr->m_opcode == Js::OpCode::Rem_A);
  23381. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  23382. m_lowererMD.LoadDoubleHelperArgument(instr, instr->UnlinkSrc2());
  23383. m_lowererMD.LoadDoubleHelperArgument(instr, instr->UnlinkSrc1());
  23384. instr->SetSrc1(IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperOp_Rem_Double, m_func));
  23385. m_lowererMD.LowerCall(instr, 0);
  23386. }
  23387. void
  23388. Lowerer::LowerNewScopeSlots(IR::Instr * instr, bool doStackSlots)
  23389. {
  23390. Func * func = m_func;
  23391. if (PHASE_OFF(Js::NewScopeSlotFastPathPhase, func))
  23392. {
  23393. this->LowerUnaryHelperMemWithFunctionInfo(instr, IR::HelperOP_NewScopeSlots);
  23394. return;
  23395. }
  23396. uint const count = instr->GetSrc1()->AsIntConstOpnd()->AsUint32();
  23397. uint const allocSize = count * sizeof(Js::Var);
  23398. uint const actualSlotCount = count - Js::ScopeSlots::FirstSlotIndex;
  23399. IR::RegOpnd * dst = instr->UnlinkDst()->AsRegOpnd();
  23400. // dst = RecyclerAlloc(allocSize)
  23401. // dst[EncodedSlotCountSlotIndex] = min(actualSlotCount, MaxEncodedSlotCount);
  23402. // dst[ScopeMetadataSlotIndex] = FunctionBody;
  23403. // mov undefinedOpnd, undefined
  23404. // dst[FirstSlotIndex..count] = undefinedOpnd;
  23405. // Note: stack allocation of both scope slots and frame display are done together
  23406. // in lowering of NewStackFrameDisplay
  23407. if (!doStackSlots)
  23408. {
  23409. GenerateRecyclerAlloc(IR::HelperAllocMemForVarArray, allocSize, dst, instr);
  23410. }
  23411. m_lowererMD.GenerateMemInit(dst, Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
  23412. (size_t)min<uint>(actualSlotCount, Js::ScopeSlots::MaxEncodedSlotCount), instr, !doStackSlots);
  23413. IR::Opnd * functionInfoOpnd = this->LoadFunctionInfoOpnd(instr);
  23414. GenerateMemInit(dst, Js::ScopeSlots::ScopeMetadataSlotIndex * sizeof(Js::Var),
  23415. functionInfoOpnd, instr, !doStackSlots);
  23416. IR::Opnd * undefinedOpnd = this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined);
  23417. const IR::AutoReuseOpnd autoReuseUndefinedOpnd(undefinedOpnd, func);
  23418. // avoid using a register for the undefined pointer if we are going to assign 1 or 2
  23419. if (actualSlotCount > 2)
  23420. {
  23421. undefinedOpnd = GetRegOpnd(undefinedOpnd, instr, func, TyVar);
  23422. }
  23423. int const loopUnrollCount = 8;
  23424. if (actualSlotCount <= loopUnrollCount * 2)
  23425. {
  23426. // Just generate all the assignment in straight line code
  23427. // mov[dst + Js::FirstSlotIndex], undefinedOpnd
  23428. // ...
  23429. // mov[dst + count - 1], undefinedOpnd
  23430. for (unsigned int i = Js::ScopeSlots::FirstSlotIndex; i < count; i++)
  23431. {
  23432. GenerateMemInit(dst, sizeof(Js::Var) * i, undefinedOpnd, instr, !doStackSlots);
  23433. }
  23434. }
  23435. else
  23436. {
  23437. // Just generate all the assignment in loop of loopUnrollCount and the rest as straight line code
  23438. //
  23439. // lea currOpnd, [dst + sizeof(Var) * (loopAssignCount + Js::ScopeSlots::FirstSlotIndex - loopUnrollCount)];
  23440. // mov [currOpnd + loopUnrollCount + leftOverAssignCount - 1] , undefinedOpnd
  23441. // mov [currOpnd + loopUnrollCount + leftOverAssignCount - 2] , undefinedOpnd
  23442. // ...
  23443. // mov [currOpnd + loopUnrollCount], undefinedOpnd
  23444. // $LoopTop:
  23445. // mov [currOpnd + loopUnrollCount - 1], undefinedOpnd
  23446. // mov [currOpnd + loopUnrollCount - 2], undefinedOpnd
  23447. // ...
  23448. // mov [currOpnd], undefinedOpnd
  23449. // lea currOpnd, [currOpnd - loopUnrollCount]
  23450. // cmp dst, currOpnd
  23451. // jlt $Looptop
  23452. uint nLoop = actualSlotCount / loopUnrollCount;
  23453. uint loopAssignCount = nLoop * loopUnrollCount;
  23454. uint leftOverAssignCount = actualSlotCount - loopAssignCount; // The left over assignments
  23455. IR::RegOpnd * currOpnd = IR::RegOpnd::New(TyMachPtr, func);
  23456. const IR::AutoReuseOpnd autoReuseCurrOpnd(currOpnd, m_func);
  23457. InsertLea(
  23458. currOpnd,
  23459. IR::IndirOpnd::New(
  23460. dst,
  23461. sizeof(Js::Var) * (loopAssignCount + Js::ScopeSlots::FirstSlotIndex - loopUnrollCount),
  23462. TyMachPtr,
  23463. func),
  23464. instr);
  23465. for (unsigned int i = 0; i < leftOverAssignCount; i++)
  23466. {
  23467. GenerateMemInit(currOpnd, sizeof(Js::Var) * (loopUnrollCount + leftOverAssignCount - i - 1), undefinedOpnd, instr, !doStackSlots);
  23468. }
  23469. IR::LabelInstr * loopTop = InsertLoopTopLabel(instr);
  23470. Loop * loop = loopTop->GetLoop();
  23471. for (unsigned int i = 0; i < loopUnrollCount; i++)
  23472. {
  23473. GenerateMemInit(currOpnd, sizeof(Js::Var) * (loopUnrollCount - i - 1), undefinedOpnd, instr, !doStackSlots);
  23474. }
  23475. InsertLea(currOpnd, IR::IndirOpnd::New(currOpnd, -((int)sizeof(Js::Var) * loopUnrollCount), TyMachPtr, func), instr);
  23476. InsertCompareBranch(dst, currOpnd, Js::OpCode::BrLt_A, true, loopTop, instr);
  23477. loop->regAlloc.liveOnBackEdgeSyms->Set(currOpnd->m_sym->m_id);
  23478. loop->regAlloc.liveOnBackEdgeSyms->Set(dst->m_sym->m_id);
  23479. loop->regAlloc.liveOnBackEdgeSyms->Set(undefinedOpnd->AsRegOpnd()->m_sym->m_id);
  23480. }
  23481. if (!doStackSlots)
  23482. {
  23483. InsertMove(IR::RegOpnd::New(instr->m_func->GetLocalClosureSym(), TyMachPtr, func), dst, instr);
  23484. }
  23485. instr->Remove();
  23486. }
  23487. void Lowerer::LowerLdInnerFrameDisplay(IR::Instr *instr)
  23488. {
  23489. bool isStrict = instr->m_func->GetJITFunctionBody()->IsStrictMode();
  23490. if (isStrict)
  23491. {
  23492. if (instr->GetSrc2())
  23493. {
  23494. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdStrictInnerFrameDisplay);
  23495. }
  23496. else
  23497. {
  23498. #if DBG
  23499. instr->m_opcode = Js::OpCode::LdInnerFrameDisplayNoParent;
  23500. #endif
  23501. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdStrictInnerFrameDisplayNoParent);
  23502. }
  23503. }
  23504. else
  23505. {
  23506. if (instr->GetSrc2())
  23507. {
  23508. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdInnerFrameDisplay);
  23509. }
  23510. else
  23511. {
  23512. #if DBG
  23513. instr->m_opcode = Js::OpCode::LdInnerFrameDisplayNoParent;
  23514. #endif
  23515. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdInnerFrameDisplayNoParent);
  23516. }
  23517. }
  23518. }
  23519. void Lowerer::LowerLdFrameDisplay(IR::Instr *instr, bool doStackFrameDisplay)
  23520. {
  23521. bool isStrict = instr->m_func->GetJITFunctionBody()->IsStrictMode();
  23522. uint16 envDepth = instr->m_func->GetJITFunctionBody()->GetEnvDepth();
  23523. Func *func = this->m_func;
  23524. // envDepth of -1 indicates unknown depth (eval expression or HTML event handler).
  23525. // We could still fast-path these by generating a loop over the (dynamically loaded) scope chain length,
  23526. // but I doubt it's worth it.
  23527. // If the dst opnd is a byte code temp, that indicates we're prepending a block scope or some such and
  23528. // shouldn't attempt to do this.
  23529. if (envDepth == (uint16)-1 ||
  23530. (!doStackFrameDisplay && (instr->isNonFastPathFrameDisplay || instr->GetDst()->AsRegOpnd()->m_sym->IsTempReg(instr->m_func))) ||
  23531. PHASE_OFF(Js::FrameDisplayFastPathPhase, func))
  23532. {
  23533. if (isStrict)
  23534. {
  23535. if (instr->GetSrc2())
  23536. {
  23537. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdStrictFrameDisplay);
  23538. }
  23539. else
  23540. {
  23541. #if DBG
  23542. instr->m_opcode = Js::OpCode::LdFrameDisplayNoParent;
  23543. #endif
  23544. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdStrictFrameDisplayNoParent);
  23545. }
  23546. }
  23547. else
  23548. {
  23549. if (instr->GetSrc2())
  23550. {
  23551. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdFrameDisplay);
  23552. }
  23553. else
  23554. {
  23555. #if DBG
  23556. instr->m_opcode = Js::OpCode::LdFrameDisplayNoParent;
  23557. #endif
  23558. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdFrameDisplayNoParent);
  23559. }
  23560. }
  23561. return;
  23562. }
  23563. uint16 frameDispLength = envDepth + 1;
  23564. Assert(frameDispLength > 0);
  23565. IR::RegOpnd *dstOpnd = instr->UnlinkDst()->AsRegOpnd();
  23566. IR::RegOpnd *currentFrameOpnd = instr->UnlinkSrc1()->AsRegOpnd();
  23567. uint allocSize = sizeof(Js::FrameDisplay) + (frameDispLength * sizeof(Js::Var));
  23568. if (doStackFrameDisplay)
  23569. {
  23570. IR::Instr *insertInstr = func->GetFunctionEntryInsertionPoint();
  23571. // Initialize stack pointers for scope slots and frame display together at the top of the function
  23572. // (in case we bail out before executing the instructions).
  23573. IR::LabelInstr *labelNoStackFunc = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  23574. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  23575. // Check whether stack functions have been disabled since we jitted.
  23576. // If they have, then we must allocate closure memory on the heap.
  23577. InsertTestBranch(IR::MemRefOpnd::New(m_func->GetJITFunctionBody()->GetFlagsAddr(), TyInt8, m_func),
  23578. IR::IntConstOpnd::New(Js::FunctionBody::Flags_StackNestedFunc, TyInt8, m_func, true),
  23579. Js::OpCode::BrEq_A, labelNoStackFunc, insertInstr);
  23580. // allocSize is greater than TyMachPtr and hence changing the initial size to TyMisc
  23581. StackSym * stackSym = StackSym::New(TyMisc, instr->m_func);
  23582. m_func->StackAllocate(stackSym, allocSize);
  23583. InsertLea(dstOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertInstr);
  23584. uint scopeSlotAllocSize =
  23585. (m_func->GetJITFunctionBody()->GetScopeSlotArraySize() + Js::ScopeSlots::FirstSlotIndex) * sizeof(Js::Var);
  23586. stackSym = StackSym::New(TyMisc, instr->m_func);
  23587. m_func->StackAllocate(stackSym, scopeSlotAllocSize);
  23588. InsertLea(currentFrameOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertInstr);
  23589. InsertBranch(Js::OpCode::Br, labelDone, insertInstr);
  23590. insertInstr->InsertBefore(labelNoStackFunc);
  23591. GenerateRecyclerAlloc(IR::HelperAllocMemForFrameDisplay, allocSize, dstOpnd, insertInstr, true);
  23592. GenerateRecyclerAlloc(IR::HelperAllocMemForVarArray, scopeSlotAllocSize, currentFrameOpnd, insertInstr, true);
  23593. insertInstr->InsertBefore(labelDone);
  23594. InsertMove(IR::SymOpnd::New(m_func->GetLocalFrameDisplaySym(), 0, TyMachReg, m_func), dstOpnd, insertInstr);
  23595. InsertMove(IR::SymOpnd::New(m_func->GetLocalClosureSym(), 0, TyMachReg, m_func), currentFrameOpnd, insertInstr);
  23596. }
  23597. else
  23598. {
  23599. GenerateRecyclerAlloc(IR::HelperAllocMemForFrameDisplay, allocSize, dstOpnd, instr);
  23600. }
  23601. // Copy contents of environment
  23602. // Work back to front to leave the head element(s) in cache
  23603. if (envDepth > 0)
  23604. {
  23605. IR::RegOpnd *envOpnd = instr->UnlinkSrc2()->AsRegOpnd();
  23606. for (uint16 i = envDepth; i >= 1; i--)
  23607. {
  23608. IR::Opnd *scopeOpnd = IR::RegOpnd::New(TyMachReg, func);
  23609. IR::Opnd *envLoadOpnd =
  23610. IR::IndirOpnd::New(envOpnd, Js::FrameDisplay::GetOffsetOfScopes() + ((i - 1) * sizeof(Js::Var)), TyMachReg, func);
  23611. InsertMove(scopeOpnd, envLoadOpnd, instr);
  23612. IR::Opnd *dstStoreOpnd =
  23613. IR::IndirOpnd::New(dstOpnd, Js::FrameDisplay::GetOffsetOfScopes() + (i * sizeof(Js::Var)), TyMachReg, func);
  23614. InsertMove(dstStoreOpnd, scopeOpnd, instr);
  23615. }
  23616. }
  23617. // Assign current element.
  23618. InsertMove(
  23619. IR::IndirOpnd::New(dstOpnd, Js::FrameDisplay::GetOffsetOfScopes(), TyMachReg, func),
  23620. currentFrameOpnd,
  23621. instr);
  23622. // Combine tag, strict mode flag, and length
  23623. uintptr_t bits = 1 |
  23624. (isStrict << (Js::FrameDisplay::GetOffsetOfStrictMode() * 8)) |
  23625. (frameDispLength << (Js::FrameDisplay::GetOffsetOfLength() * 8));
  23626. InsertMove(
  23627. IR::IndirOpnd::New(dstOpnd, 0, TyMachReg, func),
  23628. IR::IntConstOpnd::New(bits, TyMachReg, func, true),
  23629. instr);
  23630. instr->Remove();
  23631. }
  23632. IR::AddrOpnd *Lowerer::CreateFunctionBodyOpnd(Func *const func) const
  23633. {
  23634. return IR::AddrOpnd::New(func->GetJITFunctionBody()->GetAddr(), IR::AddrOpndKindDynamicFunctionBody, m_func, true);
  23635. }
  23636. IR::AddrOpnd *Lowerer::CreateFunctionBodyOpnd(Js::FunctionBody *const functionBody) const
  23637. {
  23638. // TODO: OOP JIT, CreateFunctionBodyOpnd
  23639. Assert(!m_func->IsOOPJIT());
  23640. return IR::AddrOpnd::New(functionBody, IR::AddrOpndKindDynamicFunctionBody, m_func, true);
  23641. }
  23642. bool
  23643. Lowerer::GenerateRecyclerOrMarkTempAlloc(IR::Instr * instr, IR::RegOpnd * dstOpnd, IR::JnHelperMethod allocHelper, size_t allocSize, IR::SymOpnd ** tempObjectSymOpnd)
  23644. {
  23645. if (instr->dstIsTempObject)
  23646. {
  23647. *tempObjectSymOpnd = GenerateMarkTempAlloc(dstOpnd, allocSize, instr);
  23648. return false;
  23649. }
  23650. this->GenerateRecyclerAlloc(allocHelper, allocSize, dstOpnd, instr);
  23651. *tempObjectSymOpnd = nullptr;
  23652. return true;
  23653. }
  23654. IR::SymOpnd *
  23655. Lowerer::GenerateMarkTempAlloc(IR::RegOpnd *const dstOpnd, const size_t allocSize, IR::Instr *const insertBeforeInstr)
  23656. {
  23657. Assert(dstOpnd);
  23658. Assert(allocSize != 0);
  23659. Assert(insertBeforeInstr);
  23660. Func *const func = insertBeforeInstr->m_func;
  23661. // Allocate stack space for the reg exp instance, and a slot for the boxed value
  23662. StackSym *const tempObjectSym = StackSym::New(TyMisc, func);
  23663. m_func->StackAllocate(tempObjectSym, (int)(allocSize + sizeof(void *)));
  23664. IR::SymOpnd * tempObjectOpnd = IR::SymOpnd::New(tempObjectSym, sizeof(void *), TyVar, func);
  23665. InsertLea(dstOpnd, tempObjectOpnd, insertBeforeInstr);
  23666. // Initialize the boxed instance slot
  23667. if (this->outerMostLoopLabel == nullptr)
  23668. {
  23669. GenerateMemInit(dstOpnd, -(int)sizeof(void *), IR::AddrOpnd::NewNull(func), insertBeforeInstr, false);
  23670. }
  23671. else if (!PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func))
  23672. {
  23673. InsertMove(IR::SymOpnd::New(tempObjectSym, TyMachPtr, func), IR::AddrOpnd::NewNull(func), this->outerMostLoopLabel, false);
  23674. }
  23675. return tempObjectOpnd;
  23676. }
  23677. void Lowerer::LowerBrFncCachedScopeEq(IR::Instr *instr)
  23678. {
  23679. Assert(instr->m_opcode == Js::OpCode::BrFncCachedScopeEq || instr->m_opcode == Js::OpCode::BrFncCachedScopeNeq);
  23680. Js::OpCode opcode = (instr->m_opcode == Js::OpCode::BrFncCachedScopeEq ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A);
  23681. IR::RegOpnd *src1Reg = instr->UnlinkSrc1()->AsRegOpnd();
  23682. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src1Reg, Js::ScriptFunction::GetOffsetOfCachedScopeObj(), TyMachReg, this->m_func);
  23683. this->InsertCompareBranch(indirOpnd, instr->UnlinkSrc2(), opcode, false, instr->AsBranchInstr()->GetTarget(), instr->m_next);
  23684. instr->Remove();
  23685. }
  23686. IR::Instr* Lowerer::InsertLoweredRegionStartMarker(IR::Instr* instrToInsertBefore)
  23687. {
  23688. AssertMsg(instrToInsertBefore->m_prev != nullptr, "Can't insert lowered region start marker as the first instr in the func.");
  23689. IR::LabelInstr* startMarkerLabel = IR::LabelInstr::New(Js::OpCode::Label, instrToInsertBefore->m_func);
  23690. instrToInsertBefore->InsertBefore(startMarkerLabel);
  23691. return startMarkerLabel;
  23692. }
  23693. IR::Instr* Lowerer::RemoveLoweredRegionStartMarker(IR::Instr* startMarkerInstr)
  23694. {
  23695. AssertMsg(startMarkerInstr->m_prev != nullptr, "Lowered region start marker became the first instruction in the func after lowering?");
  23696. IR::Instr* prevInstr = startMarkerInstr->m_prev;
  23697. startMarkerInstr->Remove();
  23698. return prevInstr;
  23699. }
  23700. IR::Instr* Lowerer::GetLdSpreadIndicesInstr(IR::Instr *instr)
  23701. {
  23702. IR::Opnd *src2 = instr->GetSrc2();
  23703. if (!src2->IsSymOpnd())
  23704. {
  23705. return nullptr;
  23706. }
  23707. IR::SymOpnd * argLinkOpnd = src2->AsSymOpnd();
  23708. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  23709. Assert(argLinkSym->IsSingleDef());
  23710. return argLinkSym->m_instrDef;
  23711. }
  23712. bool Lowerer::IsSpreadCall(IR::Instr *instr)
  23713. {
  23714. IR::Instr *lastInstr = GetLdSpreadIndicesInstr(instr);
  23715. return lastInstr && lastInstr->m_opcode == Js::OpCode::LdSpreadIndices;
  23716. }
  23717. // When under debugger, generate a new label to be used as safe place to jump after ignore exception,
  23718. // insert it after insertAfterInstr, and return the label inserted.
  23719. // Returns nullptr/NoOP for non-debugger code path.
  23720. //static
  23721. IR::LabelInstr* Lowerer::InsertContinueAfterExceptionLabelForDebugger(Func* func, IR::Instr* insertAfterInstr, bool isHelper)
  23722. {
  23723. Assert(func);
  23724. Assert(insertAfterInstr);
  23725. IR::LabelInstr* continueAfterExLabel = nullptr;
  23726. if (func->IsJitInDebugMode())
  23727. {
  23728. continueAfterExLabel = IR::LabelInstr::New(Js::OpCode::Label, func, isHelper);
  23729. insertAfterInstr->InsertAfter(continueAfterExLabel);
  23730. }
  23731. return continueAfterExLabel;
  23732. }
  23733. void Lowerer::GenerateSingleCharStrJumpTableLookup(IR::Instr * instr)
  23734. {
  23735. IR::MultiBranchInstr * multiBrInstr = instr->AsBranchInstr()->AsMultiBrInstr();
  23736. Func * func = instr->m_func;
  23737. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  23738. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  23739. // MOV strLengthOpnd, str->length
  23740. IR::RegOpnd * strLengthOpnd = IR::RegOpnd::New(TyUint32, func);
  23741. InsertMove(strLengthOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func), instr);
  23742. // CMP strLengthOpnd, 1
  23743. // JNE defaultLabel
  23744. IR::LabelInstr * defaultLabelInstr = (IR::LabelInstr *)multiBrInstr->GetBranchJumpTable()->defaultTarget;
  23745. InsertCompareBranch(strLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
  23746. // MOV strBuffer, str->psz
  23747. IR::RegOpnd * strBufferOpnd = IR::RegOpnd::New(TyMachPtr, func);
  23748. InsertMove(strBufferOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, func), instr);
  23749. // TST strBuffer, strBuffer
  23750. // JNE $continue
  23751. InsertTestBranch(strBufferOpnd, strBufferOpnd, Js::OpCode::BrNeq_A, continueLabel, instr);
  23752. // $helper:
  23753. // PUSH str
  23754. // CALL JavascriptString::GetSzHelper
  23755. // MOV strBuffer, eax
  23756. // $continue:
  23757. instr->InsertBefore(helperLabel);
  23758. m_lowererMD.LoadHelperArgument(instr, instr->GetSrc1());
  23759. IR::Instr * instrCall = IR::Instr::New(Js::OpCode::Call, strBufferOpnd, IR::HelperCallOpnd::New(IR::HelperString_GetSz, func), func);
  23760. instr->InsertBefore(instrCall);
  23761. m_lowererMD.LowerCall(instrCall, 0);
  23762. instr->InsertBefore(continueLabel);
  23763. // MOV charOpnd, [strBuffer]
  23764. IR::RegOpnd * charOpnd = IR::RegOpnd::New(TyUint32, func);
  23765. InsertMove(charOpnd, IR::IndirOpnd::New(strBufferOpnd, 0, TyUint16, func), instr);
  23766. if (multiBrInstr->m_baseCaseValue != 0)
  23767. {
  23768. // SUB charOpnd, baseIndex
  23769. InsertSub(false, charOpnd, charOpnd, IR::IntConstOpnd::New(multiBrInstr->m_baseCaseValue, TyUint32, func), instr);
  23770. }
  23771. // CMP charOpnd, lastCaseIndex - baseCaseIndex
  23772. // JA defaultLabel
  23773. InsertCompareBranch(charOpnd, IR::IntConstOpnd::New(multiBrInstr->m_lastCaseValue - multiBrInstr->m_baseCaseValue, TyUint32, func),
  23774. Js::OpCode::BrGt_A, true, defaultLabelInstr, instr);
  23775. instr->UnlinkSrc1();
  23776. LowerJumpTableMultiBranch(multiBrInstr, charOpnd);
  23777. }
  23778. void Lowerer::GenerateSwitchStringLookup(IR::Instr * instr)
  23779. {
  23780. /* Collect information about string length in all the case*/
  23781. charcount_t minLength = UINT_MAX;
  23782. charcount_t maxLength = 0;
  23783. BVUnit32 bvLength;
  23784. instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary()->dictionary.Map([&](JITJavascriptString * str, void *)
  23785. {
  23786. charcount_t len = str->GetLength();
  23787. minLength = min(minLength, str->GetLength());
  23788. maxLength = max(maxLength, str->GetLength());
  23789. if (len < 32)
  23790. {
  23791. bvLength.Set(len);
  23792. }
  23793. });
  23794. Func * func = instr->m_func;
  23795. IR::RegOpnd * strLengthOpnd = IR::RegOpnd::New(TyUint32, func);
  23796. InsertMove(strLengthOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func), instr);
  23797. IR::LabelInstr * defaultLabelInstr = (IR::LabelInstr *)instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary()->defaultTarget;
  23798. if (minLength == maxLength)
  23799. {
  23800. // Generate single length filter
  23801. InsertCompareBranch(strLengthOpnd, IR::IntConstOpnd::New(minLength, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
  23802. }
  23803. else if (maxLength < 32)
  23804. {
  23805. // Generate bit filter
  23806. // Jump to default label if the bit is not on for the length % 32
  23807. IR::IntConstOpnd * lenBitMaskOpnd = IR::IntConstOpnd::New(bvLength.GetWord(), TyUint32, func);
  23808. InsertBitTestBranch(lenBitMaskOpnd, strLengthOpnd, false, defaultLabelInstr, instr);
  23809. // Jump to default label if the bit is > 32
  23810. InsertTestBranch(strLengthOpnd, IR::IntConstOpnd::New(UINT32_MAX ^ 31, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
  23811. }
  23812. else
  23813. {
  23814. // CONSIDER: Generate range filter
  23815. }
  23816. this->LowerMultiBr(instr, IR::HelperOp_SwitchStringLookUp);
  23817. }
  23818. IR::Instr *
  23819. Lowerer::LowerGetCachedFunc(IR::Instr *instr)
  23820. {
  23821. // src1 is an ActivationObjectEx, and we want to get the function object identified by the index (src2)
  23822. // dst = MOV (src1)->GetFuncCacheEntry(src2)->func
  23823. //
  23824. // => [src1 + (offsetof(src1, cache) + (src2 * sizeof(FuncCacheEntry)) + offsetof(FuncCacheEntry, func))]
  23825. IR::IntConstOpnd *src2Opnd = instr->UnlinkSrc2()->AsIntConstOpnd();
  23826. IR::RegOpnd *src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
  23827. IR::Instr *instrPrev = instr->m_prev;
  23828. instr->SetSrc1(IR::IndirOpnd::New(src1Opnd, int32((src2Opnd->GetValue() * sizeof(Js::FuncCacheEntry)) + Js::ActivationObjectEx::GetOffsetOfCache() + offsetof(Js::FuncCacheEntry, func)), TyVar, this->m_func));
  23829. this->m_lowererMD.ChangeToAssign(instr);
  23830. src2Opnd->Free(this->m_func);
  23831. return instrPrev;
  23832. }
  23833. IR::Instr *
  23834. Lowerer::LowerCommitScope(IR::Instr *instrCommit)
  23835. {
  23836. IR::Instr *instrPrev = instrCommit->m_prev;
  23837. IR::RegOpnd *baseOpnd = instrCommit->UnlinkSrc1()->AsRegOpnd();
  23838. IR::Opnd *opnd;
  23839. IR::Instr * insertInstr = instrCommit->m_next;
  23840. // Write undef to all the local var slots.
  23841. opnd = IR::IndirOpnd::New(baseOpnd, Js::ActivationObjectEx::GetOffsetOfCommitFlag(), TyInt8, this->m_func);
  23842. instrCommit->SetDst(opnd);
  23843. instrCommit->SetSrc1(IR::IntConstOpnd::New(1, TyInt8, this->m_func));
  23844. LowererMD::ChangeToAssign(instrCommit);
  23845. const Js::PropertyIdArray *propIds = instrCommit->m_func->GetJITFunctionBody()->GetFormalsPropIdArray();
  23846. uint firstVarSlot = (uint)Js::ActivationObjectEx::GetFirstVarSlot(propIds);
  23847. if (firstVarSlot < propIds->count)
  23848. {
  23849. // Instead of re-using the address of "undefined" for each store, put the address in a register and re-use that.
  23850. IR::RegOpnd *undefOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  23851. InsertMove(undefOpnd, LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueUndefined), insertInstr);
  23852. IR::RegOpnd *slotBaseOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  23853. // Load a pointer to the aux slots. We assume that all ActivationObject's have only aux slots.
  23854. opnd = IR::IndirOpnd::New(baseOpnd, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  23855. InsertMove(slotBaseOpnd, opnd, insertInstr);
  23856. for (uint i = firstVarSlot; i < propIds->count; i++)
  23857. {
  23858. opnd = IR::IndirOpnd::New(slotBaseOpnd, i << this->m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
  23859. InsertMove(opnd, undefOpnd, insertInstr);
  23860. }
  23861. }
  23862. return instrPrev;
  23863. }
  23864. IR::Instr *
  23865. Lowerer::LowerTry(IR::Instr* instr, bool tryCatch)
  23866. {
  23867. if (this->m_func->hasBailout)
  23868. {
  23869. this->EnsureBailoutReturnValueSym();
  23870. }
  23871. this->EnsureHasBailedOutSym();
  23872. IR::SymOpnd * hasBailedOutOpnd = IR::SymOpnd::New(this->m_func->m_hasBailedOutSym, TyUint32, this->m_func);
  23873. IR::Instr * setInstr = IR::Instr::New(LowererMD::GetStoreOp(TyUint32), hasBailedOutOpnd, IR::IntConstOpnd::New(0, TyUint32, this->m_func), this->m_func);
  23874. instr->InsertBefore(setInstr);
  23875. LowererMD::Legalize(setInstr);
  23876. return m_lowererMD.LowerTry(instr, tryCatch ? IR::HelperOp_TryCatch : ((this->m_func->DoOptimizeTry() || (this->m_func->IsSimpleJit() && this->m_func->hasBailout))? IR::HelperOp_TryFinally : IR::HelperOp_TryFinallyNoOpt));
  23877. }
  23878. IR::Instr *
  23879. Lowerer::LowerCatch(IR::Instr * instr)
  23880. {
  23881. // t1 = catch => t2 = catch
  23882. // => t1 = t2
  23883. IR::Opnd *catchObj = instr->UnlinkDst();
  23884. IR::RegOpnd *catchParamReg = IR::RegOpnd::New(TyMachPtr, this->m_func);
  23885. catchParamReg->SetReg(CATCH_OBJ_REG);
  23886. instr->SetDst(catchParamReg);
  23887. IR::Instr * mov = IR::Instr::New(Js::OpCode::Ld_A, catchObj, catchParamReg, this->m_func);
  23888. this->m_lowererMD.ChangeToAssign(mov);
  23889. instr->InsertAfter(mov);
  23890. return instr->m_prev;
  23891. }
  23892. IR::Instr *
  23893. Lowerer::LowerLeave(IR::Instr * leaveInstr, IR::LabelInstr * targetInstr, bool fromFinalLower, bool isOrphanedLeave)
  23894. {
  23895. if (isOrphanedLeave)
  23896. {
  23897. Assert(this->m_func->IsLoopBodyInTry());
  23898. leaveInstr->m_opcode = LowererMD::MDUncondBranchOpcode;
  23899. return leaveInstr->m_prev;
  23900. }
  23901. IR::Instr * instrPrev = leaveInstr->m_prev;
  23902. IR::LabelOpnd *labelOpnd = IR::LabelOpnd::New(targetInstr, this->m_func);
  23903. m_lowererMD.LowerEHRegionReturn(leaveInstr, labelOpnd);
  23904. if (fromFinalLower)
  23905. {
  23906. instrPrev = leaveInstr->m_prev;
  23907. }
  23908. leaveInstr->Remove();
  23909. return instrPrev;
  23910. }
  23911. void
  23912. Lowerer::EnsureBailoutReturnValueSym()
  23913. {
  23914. if (this->m_func->m_bailoutReturnValueSym == nullptr)
  23915. {
  23916. this->m_func->m_bailoutReturnValueSym = StackSym::New(TyVar, this->m_func);
  23917. this->m_func->StackAllocate(this->m_func->m_bailoutReturnValueSym, sizeof(Js::Var));
  23918. }
  23919. }
  23920. void
  23921. Lowerer::EnsureHasBailedOutSym()
  23922. {
  23923. if (this->m_func->m_hasBailedOutSym == nullptr)
  23924. {
  23925. this->m_func->m_hasBailedOutSym = StackSym::New(TyUint32, this->m_func);
  23926. this->m_func->StackAllocate(this->m_func->m_hasBailedOutSym, MachRegInt);
  23927. }
  23928. }
  23929. void
  23930. Lowerer::InsertReturnThunkForRegion(Region* region, IR::LabelInstr* restoreLabel)
  23931. {
  23932. Assert(this->m_func->isPostLayout);
  23933. Assert(region->GetType() == RegionTypeTry || region->GetType() == RegionTypeCatch || region->GetType() == RegionTypeFinally);
  23934. if (!region->returnThunkEmitted)
  23935. {
  23936. this->m_func->m_exitInstr->InsertAfter(region->GetBailoutReturnThunkLabel());
  23937. bool newLastInstrInserted = false;
  23938. IR::Instr * insertBeforeInstr = region->GetBailoutReturnThunkLabel()->m_next;
  23939. if (insertBeforeInstr == nullptr)
  23940. {
  23941. Assert(this->m_func->m_exitInstr == this->m_func->m_tailInstr);
  23942. insertBeforeInstr = IR::Instr::New(Js::OpCode::Nop, this->m_func);
  23943. newLastInstrInserted = true;
  23944. region->GetBailoutReturnThunkLabel()->InsertAfter(insertBeforeInstr);
  23945. this->m_func->m_tailInstr = insertBeforeInstr;
  23946. }
  23947. IR::LabelOpnd * continuationAddr;
  23948. // We insert return thunk to the region's parent return thunk label
  23949. // For non exception finallys, we do not need a return thunk
  23950. // Because, we are not calling none xception finallys from within amd64_callWithFakeFrame
  23951. // But a non exception finally maybe within other eh regions that need a return thunk
  23952. if (region->IsNonExceptingFinally())
  23953. {
  23954. Assert(region->GetParent()->GetType() != RegionTypeRoot);
  23955. Region *ancestor = region->GetParent()->GetFirstAncestorOfNonExceptingFinallyParent();
  23956. Assert(ancestor && !ancestor->IsNonExceptingFinally());
  23957. if (ancestor->GetType() != RegionTypeRoot)
  23958. {
  23959. continuationAddr = IR::LabelOpnd::New(ancestor->GetBailoutReturnThunkLabel(), this->m_func);
  23960. }
  23961. else
  23962. {
  23963. continuationAddr = IR::LabelOpnd::New(restoreLabel, this->m_func);
  23964. }
  23965. }
  23966. else if (region->GetParent()->IsNonExceptingFinally())
  23967. {
  23968. Region *ancestor = region->GetFirstAncestorOfNonExceptingFinally();
  23969. if (ancestor && ancestor->GetType() != RegionTypeRoot)
  23970. {
  23971. continuationAddr = IR::LabelOpnd::New(ancestor->GetBailoutReturnThunkLabel(), this->m_func);
  23972. }
  23973. else
  23974. {
  23975. continuationAddr = IR::LabelOpnd::New(restoreLabel, this->m_func);
  23976. }
  23977. }
  23978. else if (region->GetParent()->GetType() != RegionTypeRoot)
  23979. {
  23980. continuationAddr = IR::LabelOpnd::New(region->GetParent()->GetBailoutReturnThunkLabel(), this->m_func);
  23981. }
  23982. else
  23983. {
  23984. continuationAddr = IR::LabelOpnd::New(restoreLabel, this->m_func);
  23985. }
  23986. IR::Instr * lastInstr = m_lowererMD.LowerEHRegionReturn(insertBeforeInstr, continuationAddr);
  23987. if (newLastInstrInserted)
  23988. {
  23989. Assert(this->m_func->m_tailInstr == insertBeforeInstr);
  23990. insertBeforeInstr->Remove();
  23991. this->m_func->m_tailInstr = lastInstr;
  23992. }
  23993. region->returnThunkEmitted = true;
  23994. }
  23995. }
  23996. void
  23997. Lowerer::SetHasBailedOut(IR::Instr * bailoutInstr)
  23998. {
  23999. Assert(this->m_func->isPostLayout);
  24000. IR::SymOpnd * hasBailedOutOpnd = IR::SymOpnd::New(this->m_func->m_hasBailedOutSym, TyUint32, this->m_func);
  24001. IR::Instr * setInstr = IR::Instr::New(LowererMD::GetStoreOp(TyUint32), hasBailedOutOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), this->m_func);
  24002. bailoutInstr->InsertBefore(setInstr);
  24003. LowererMD::Legalize(setInstr);
  24004. }
  24005. IR::Instr*
  24006. Lowerer::EmitEHBailoutStackRestore(IR::Instr * bailoutInstr)
  24007. {
  24008. Assert(this->m_func->isPostLayout);
  24009. #ifdef _M_IX86
  24010. BailOutInfo * bailoutInfo = bailoutInstr->GetBailOutInfo();
  24011. uint totalLiveArgCount = 0;
  24012. if (bailoutInfo->startCallCount != 0)
  24013. {
  24014. uint totalStackToBeRestored = 0;
  24015. uint stackAlignmentAdjustment = 0;
  24016. for (uint i = 0; i < bailoutInfo->startCallCount; i++)
  24017. {
  24018. uint startCallLiveArgCount = bailoutInfo->startCallInfo[i].isOrphanedCall ? 0 : bailoutInfo->GetStartCallOutParamCount(i);
  24019. if ((Math::Align<int32>(startCallLiveArgCount * MachPtr, MachStackAlignment) - (startCallLiveArgCount * MachPtr)) != 0)
  24020. {
  24021. stackAlignmentAdjustment++;
  24022. }
  24023. totalLiveArgCount += startCallLiveArgCount;
  24024. }
  24025. totalStackToBeRestored = (totalLiveArgCount + stackAlignmentAdjustment) * MachPtr;
  24026. IR::RegOpnd * espOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegStackPointer(), TyMachReg, this->m_func);
  24027. IR::Opnd * opnd = IR::IndirOpnd::New(espOpnd, totalStackToBeRestored, TyMachReg, this->m_func);
  24028. IR::Instr * stackRestoreInstr = IR::Instr::New(Js::OpCode::LEA, espOpnd, opnd, this->m_func);
  24029. bailoutInstr->InsertAfter(stackRestoreInstr);
  24030. return stackRestoreInstr;
  24031. }
  24032. #endif
  24033. return bailoutInstr;
  24034. }
  24035. void
  24036. Lowerer::EmitSaveEHBailoutReturnValueAndJumpToRetThunk(IR::Instr * insertAfterInstr)
  24037. {
  24038. Assert(this->m_func->isPostLayout);
  24039. // After the CALL SaveAllRegistersAndBailout instruction, emit
  24040. //
  24041. // MOV bailoutReturnValueSym, eax
  24042. // JMP $currentRegion->bailoutReturnThunkLabel
  24043. IR::SymOpnd * bailoutReturnValueSymOpnd = IR::SymOpnd::New(this->m_func->m_bailoutReturnValueSym, TyVar, this->m_func);
  24044. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  24045. IR::Instr * movInstr = IR::Instr::New(LowererMD::GetStoreOp(TyVar), bailoutReturnValueSymOpnd, eaxOpnd, this->m_func);
  24046. insertAfterInstr->InsertAfter(movInstr);
  24047. LowererMD::Legalize(movInstr);
  24048. IR::BranchInstr * jumpInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, this->currentRegion->GetBailoutReturnThunkLabel(), this->m_func);
  24049. movInstr->InsertAfter(jumpInstr);
  24050. }
  24051. void
  24052. Lowerer::EmitRestoreReturnValueFromEHBailout(IR::LabelInstr * restoreLabel, IR::LabelInstr * epilogLabel)
  24053. {
  24054. Assert(this->m_func->isPostLayout);
  24055. // JMP $epilog
  24056. // $restore:
  24057. // MOV eax, bailoutReturnValueSym
  24058. // $epilog:
  24059. IR::SymOpnd * bailoutReturnValueSymOpnd = IR::SymOpnd::New(this->m_func->m_bailoutReturnValueSym, TyVar, this->m_func);
  24060. IR::RegOpnd * eaxOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  24061. IR::Instr * movInstr = IR::Instr::New(LowererMD::GetLoadOp(TyVar), eaxOpnd, bailoutReturnValueSymOpnd, this->m_func);
  24062. epilogLabel->InsertBefore(restoreLabel);
  24063. epilogLabel->InsertBefore(movInstr);
  24064. LowererMD::Legalize(movInstr);
  24065. restoreLabel->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, epilogLabel, this->m_func));
  24066. }
  24067. void
  24068. Lowerer::InsertBitTestBranch(IR::Opnd * bitMaskOpnd, IR::Opnd * bitIndex, bool jumpIfBitOn, IR::LabelInstr * targetLabel, IR::Instr * insertBeforeInstr)
  24069. {
  24070. #if defined(_M_IX86) || defined(_M_AMD64)
  24071. // Generate bit test and branch
  24072. // BT bitMaskOpnd, bitIndex
  24073. // JB/JAE targetLabel
  24074. Func * func = this->m_func;
  24075. IR::Instr * instr = IR::Instr::New(Js::OpCode::BT, func);
  24076. instr->SetSrc1(bitMaskOpnd);
  24077. instr->SetSrc2(bitIndex);
  24078. insertBeforeInstr->InsertBefore(instr);
  24079. if (!(bitMaskOpnd->IsRegOpnd() || bitMaskOpnd->IsIndirOpnd() || bitMaskOpnd->IsMemRefOpnd()))
  24080. {
  24081. instr->HoistSrc1(Js::OpCode::MOV);
  24082. }
  24083. InsertBranch(jumpIfBitOn ? Js::OpCode::JB : Js::OpCode::JAE, targetLabel, insertBeforeInstr);
  24084. #elif defined(_M_ARM)
  24085. // ARM don't have bit test instruction, so just generated
  24086. // MOV r1, 1
  24087. // SHL r1, bitIndex
  24088. // TEST bitMaskOpnd, r1
  24089. // BEQ/BNEQ targetLabel
  24090. Func * func = this->m_func;
  24091. IR::RegOpnd * lenBitOpnd = IR::RegOpnd::New(TyUint32, func);
  24092. InsertMove(lenBitOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
  24093. InsertShift(Js::OpCode::Shl_I4, false, lenBitOpnd, lenBitOpnd, bitIndex, insertBeforeInstr);
  24094. InsertTestBranch(lenBitOpnd, bitMaskOpnd, jumpIfBitOn ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, targetLabel, insertBeforeInstr);
  24095. #elif defined(_M_ARM64)
  24096. if (bitIndex->IsImmediateOpnd())
  24097. {
  24098. // TBZ/TBNZ bitMaskOpnd, bitIndex, targetLabel
  24099. IR::Instr* branchInstr = InsertBranch(jumpIfBitOn ? Js::OpCode::TBNZ : Js::OpCode::TBZ, targetLabel, insertBeforeInstr);
  24100. branchInstr->SetSrc1(bitMaskOpnd);
  24101. branchInstr->SetSrc2(bitIndex);
  24102. }
  24103. else
  24104. {
  24105. // TBZ/TBNZ require an immediate for the bit to test, so shift the mask to place the bit we want to test at bit zero, and then test bit zero.
  24106. Func * func = this->m_func;
  24107. IR::RegOpnd * maskOpnd = IR::RegOpnd::New(TyUint32, func);
  24108. InsertShift(Js::OpCode::Shr_I4, false, maskOpnd, bitMaskOpnd, bitIndex, insertBeforeInstr);
  24109. IR::Instr* branchInstr = InsertBranch(jumpIfBitOn ? Js::OpCode::TBNZ : Js::OpCode::TBZ, targetLabel, insertBeforeInstr);
  24110. branchInstr->SetSrc1(maskOpnd);
  24111. branchInstr->SetSrc2(IR::IntConstOpnd::New(0, TyUint32, this->m_func));
  24112. }
  24113. #else
  24114. AssertMsg(false, "Not implemented");
  24115. #endif
  24116. }
  24117. //
  24118. // Generates an object test and then a string test with the static string type
  24119. //
  24120. void
  24121. Lowerer::GenerateStringTest(IR::RegOpnd *srcReg, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr * continueLabel, bool generateObjectCheck)
  24122. {
  24123. Assert(srcReg);
  24124. if (!srcReg->GetValueType().IsString())
  24125. {
  24126. if (generateObjectCheck && !srcReg->IsNotTaggedValue())
  24127. {
  24128. this->m_lowererMD.GenerateObjectTest(srcReg, insertInstr, labelHelper);
  24129. }
  24130. // CMP [regSrcStr + offset(type)] , static string type -- check base string type
  24131. // BrEq/BrNeq labelHelper.
  24132. IR::IndirOpnd * src1 = IR::IndirOpnd::New(srcReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func);
  24133. IR::Opnd * src2 = this->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueStringTypeStatic);
  24134. IR::BranchInstr* branchInstr = nullptr;
  24135. if (continueLabel)
  24136. {
  24137. branchInstr = InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, continueLabel, insertInstr);
  24138. }
  24139. else
  24140. {
  24141. branchInstr = InsertCompareBranch(src1, src2, Js::OpCode::BrNeq_A, labelHelper, insertInstr);
  24142. }
  24143. InsertObjectPoison(srcReg, branchInstr, insertInstr, false);
  24144. }
  24145. }
  24146. //
  24147. // Generates an object test and then a symbol test with the static symbol type
  24148. //
  24149. void
  24150. Lowerer::GenerateSymbolTest(IR::RegOpnd *srcReg, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr * continueLabel, bool generateObjectCheck)
  24151. {
  24152. Assert(srcReg);
  24153. if (!srcReg->GetValueType().IsSymbol())
  24154. {
  24155. if (generateObjectCheck && !srcReg->IsNotTaggedValue())
  24156. {
  24157. this->m_lowererMD.GenerateObjectTest(srcReg, insertInstr, labelHelper);
  24158. }
  24159. // CMP [regSrcStr + offset(type)] , static symbol type -- check base symbol type
  24160. // BrEq/BrNeq labelHelper.
  24161. IR::IndirOpnd * src1 = IR::IndirOpnd::New(srcReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func);
  24162. IR::Opnd * src2 = this->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueSymbolTypeStatic);
  24163. if (continueLabel)
  24164. {
  24165. InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, continueLabel, insertInstr);
  24166. }
  24167. else
  24168. {
  24169. InsertCompareBranch(src1, src2, Js::OpCode::BrNeq_A, labelHelper, insertInstr);
  24170. }
  24171. }
  24172. }
  24173. void
  24174. Lowerer::LowerConvNum(IR::Instr *instrLoad, bool noMathFastPath)
  24175. {
  24176. if (PHASE_OFF(Js::OtherFastPathPhase, this->m_func) || noMathFastPath || !instrLoad->GetSrc1()->IsRegOpnd())
  24177. {
  24178. this->LowerUnaryHelperMemWithTemp2(instrLoad, IR_HELPER_OP_FULL_OR_INPLACE(ConvNumber));
  24179. return;
  24180. }
  24181. // MOV dst, src1
  24182. // TEST src1, 1
  24183. // JNE $done
  24184. // call ToNumber
  24185. //$done:
  24186. bool isInt = false;
  24187. bool isNotInt = false;
  24188. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  24189. IR::LabelInstr *labelDone = NULL;
  24190. IR::Instr *instr;
  24191. if (src1->IsTaggedInt())
  24192. {
  24193. isInt = true;
  24194. }
  24195. else if (src1->IsNotInt())
  24196. {
  24197. isNotInt = true;
  24198. }
  24199. if (!isNotInt)
  24200. {
  24201. // MOV dst, src1
  24202. instr = Lowerer::InsertMove(instrLoad->GetDst(), src1, instrLoad);
  24203. if (!isInt)
  24204. {
  24205. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  24206. bool didTest = m_lowererMD.GenerateObjectTest(src1, instrLoad, labelDone);
  24207. if (didTest)
  24208. {
  24209. // This label is needed only to mark the helper block
  24210. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  24211. instrLoad->InsertBefore(labelHelper);
  24212. }
  24213. }
  24214. }
  24215. if (!isInt)
  24216. {
  24217. if (labelDone)
  24218. {
  24219. instrLoad->InsertAfter(labelDone);
  24220. }
  24221. this->LowerUnaryHelperMemWithTemp2(instrLoad, IR_HELPER_OP_FULL_OR_INPLACE(ConvNumber));
  24222. }
  24223. else
  24224. {
  24225. instrLoad->Remove();
  24226. }
  24227. }
  24228. IR::Opnd *
  24229. Lowerer::LoadSlotArrayWithCachedLocalType(IR::Instr * instrInsert, IR::PropertySymOpnd *propertySymOpnd)
  24230. {
  24231. IR::RegOpnd *opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  24232. if (propertySymOpnd->UsesAuxSlot())
  24233. {
  24234. // If we use the auxiliary slot array, load it and return it
  24235. IR::RegOpnd * opndSlotArray;
  24236. if (propertySymOpnd->IsAuxSlotPtrSymAvailable() || propertySymOpnd->ProducesAuxSlotPtr())
  24237. {
  24238. // We want to reload and/or reuse the shared aux slot ptr sym
  24239. StackSym * auxSlotPtrSym = propertySymOpnd->GetAuxSlotPtrSym();
  24240. Assert(auxSlotPtrSym != nullptr);
  24241. opndSlotArray = IR::RegOpnd::New(auxSlotPtrSym, TyMachReg, this->m_func);
  24242. opndSlotArray->SetIsJITOptimizedReg(true);
  24243. if (!propertySymOpnd->ProducesAuxSlotPtr())
  24244. {
  24245. // No need to reload
  24246. return opndSlotArray;
  24247. }
  24248. }
  24249. else
  24250. {
  24251. opndSlotArray = IR::RegOpnd::New(TyMachReg, this->m_func);
  24252. }
  24253. IR::Opnd *opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  24254. Lowerer::InsertMove(opndSlotArray, opndIndir, instrInsert);
  24255. return opndSlotArray;
  24256. }
  24257. else
  24258. {
  24259. // If we use inline slot return the address to the object header
  24260. return opndBase;
  24261. }
  24262. }
  24263. IR::Opnd *
  24264. Lowerer::LoadSlotArrayWithCachedProtoType(IR::Instr * instrInsert, IR::PropertySymOpnd *propertySymOpnd)
  24265. {
  24266. // Get the prototype object from the cache
  24267. intptr_t prototypeObject = propertySymOpnd->GetProtoObject();
  24268. Assert(prototypeObject != 0);
  24269. if (propertySymOpnd->UsesAuxSlot())
  24270. {
  24271. // If we use the auxiliary slot array, load it from the prototype object and return it
  24272. IR::RegOpnd *opndSlotArray = IR::RegOpnd::New(TyMachReg, this->m_func);
  24273. IR::Opnd *opnd = IR::MemRefOpnd::New((char*)prototypeObject + Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func, IR::AddrOpndKindDynamicAuxSlotArrayRef);
  24274. Lowerer::InsertMove(opndSlotArray, opnd, instrInsert);
  24275. return opndSlotArray;
  24276. }
  24277. else
  24278. {
  24279. // If we use inline slot return the address of the prototype object
  24280. return IR::MemRefOpnd::New(prototypeObject, TyMachReg, this->m_func);
  24281. }
  24282. }
  24283. IR::Instr *
  24284. Lowerer::LowerLdAsmJsEnv(IR::Instr * instr)
  24285. {
  24286. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  24287. IR::Opnd * functionObjOpnd;
  24288. IR::Instr * instrPrev = this->m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  24289. Assert(!instr->GetSrc1());
  24290. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::AsmJsScriptFunction::GetOffsetOfModuleMemory(), TyMachPtr, m_func);
  24291. instr->SetSrc1(indirOpnd);
  24292. LowererMD::ChangeToAssign(instr);
  24293. return instrPrev;
  24294. }
  24295. IR::Instr *
  24296. Lowerer::LowerLdNativeCodeData(IR::Instr * instr)
  24297. {
  24298. Assert(!instr->GetSrc1());
  24299. Assert(m_func->IsTopFunc());
  24300. IR::Instr * instrPrev = instr->m_prev;
  24301. instr->SetSrc1(IR::MemRefOpnd::New((void*)m_func->GetWorkItem()->GetWorkItemData()->nativeDataAddr, TyMachPtr, m_func, IR::AddrOpndKindDynamicNativeCodeDataRef));
  24302. LowererMD::ChangeToAssign(instr);
  24303. return instrPrev;
  24304. }
  24305. IR::Instr *
  24306. Lowerer::LowerLdEnv(IR::Instr * instr)
  24307. {
  24308. IR::Opnd * src1 = instr->GetSrc1();
  24309. IR::Opnd * functionObjOpnd;
  24310. IR::Instr * instrPrev = this->m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  24311. Assert(!instr->GetSrc1());
  24312. if (src1 == nullptr || functionObjOpnd->IsRegOpnd())
  24313. {
  24314. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(),
  24315. Js::ScriptFunction::GetOffsetOfEnvironment(), TyMachPtr, m_func);
  24316. instr->SetSrc1(indirOpnd);
  24317. }
  24318. else
  24319. {
  24320. Assert(functionObjOpnd->IsAddrOpnd());
  24321. IR::AddrOpnd* functionObjAddrOpnd = functionObjOpnd->AsAddrOpnd();
  24322. IR::MemRefOpnd* functionEnvMemRefOpnd = IR::MemRefOpnd::New((void *)((intptr_t)functionObjAddrOpnd->m_address + Js::ScriptFunction::GetOffsetOfEnvironment()),
  24323. TyMachPtr, this->m_func, IR::AddrOpndKindDynamicFunctionEnvironmentRef);
  24324. instr->SetSrc1(functionEnvMemRefOpnd);
  24325. }
  24326. LowererMD::ChangeToAssign(instr);
  24327. return instrPrev;
  24328. }
  24329. IR::Instr *
  24330. Lowerer::LowerLdSuper(IR::Instr *instr, IR::JnHelperMethod helperOpCode)
  24331. {
  24332. IR::Opnd * functionObjOpnd;
  24333. IR::Instr * instrPrev = m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  24334. LoadScriptContext(instr);
  24335. m_lowererMD.LoadHelperArgument(instr, functionObjOpnd);
  24336. m_lowererMD.ChangeToHelperCall(instr, helperOpCode);
  24337. return instrPrev;
  24338. }
  24339. IR::Instr *
  24340. Lowerer::LowerFrameDisplayCheck(IR::Instr * instr)
  24341. {
  24342. IR::Instr *instrPrev = instr->m_prev;
  24343. IR::Instr *insertInstr = instr->m_next;
  24344. IR::AddrOpnd *addrOpnd = instr->UnlinkSrc2()->AsAddrOpnd();
  24345. FrameDisplayCheckRecord *record = (FrameDisplayCheckRecord*)addrOpnd->m_address;
  24346. IR::LabelInstr *errorLabel = nullptr;
  24347. IR::LabelInstr *continueLabel = nullptr;
  24348. IR::RegOpnd *envOpnd = instr->GetDst()->AsRegOpnd();
  24349. uint32 frameDisplayOffset = Js::FrameDisplay::GetOffsetOfScopes()/sizeof(Js::Var);
  24350. if (record->slotId != (uint32)-1 && record->slotId > frameDisplayOffset)
  24351. {
  24352. // Check that the frame display has enough scopes in it to satisfy the code.
  24353. errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  24354. continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  24355. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(envOpnd,
  24356. Js::FrameDisplay::GetOffsetOfLength(),
  24357. TyUint16, m_func, true);
  24358. IR::IntConstOpnd *slotIdOpnd = IR::IntConstOpnd::New(record->slotId - frameDisplayOffset, TyUint16, m_func);
  24359. InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrLe_A, true, errorLabel, insertInstr);
  24360. }
  24361. if (record->table)
  24362. {
  24363. // Check the size of each of the slot arrays in the scope chain.
  24364. FOREACH_HASHTABLE_ENTRY(uint32, bucket, record->table)
  24365. {
  24366. uint32 slotId = bucket.element;
  24367. if (slotId != (uint32)-1 && slotId > Js::ScopeSlots::FirstSlotIndex)
  24368. {
  24369. if (errorLabel == nullptr)
  24370. {
  24371. errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  24372. continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  24373. }
  24374. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(envOpnd,
  24375. bucket.value * sizeof(Js::Var),
  24376. TyVar, m_func, true);
  24377. IR::RegOpnd * slotArrayOpnd = IR::RegOpnd::New(TyVar, m_func);
  24378. InsertMove(slotArrayOpnd, indirOpnd, insertInstr);
  24379. indirOpnd = IR::IndirOpnd::New(slotArrayOpnd,
  24380. Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
  24381. TyVar, m_func, true);
  24382. IR::IntConstOpnd * slotIdOpnd = IR::IntConstOpnd::New(slotId - Js::ScopeSlots::FirstSlotIndex,
  24383. TyUint32, m_func);
  24384. InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrLe_A, true, errorLabel, insertInstr);
  24385. }
  24386. }
  24387. NEXT_HASHTABLE_ENTRY;
  24388. }
  24389. if (errorLabel)
  24390. {
  24391. InsertBranch(Js::OpCode::Br, continueLabel, insertInstr);
  24392. insertInstr->InsertBefore(errorLabel);
  24393. IR::Instr * instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
  24394. insertInstr->InsertBefore(instrHelper);
  24395. m_lowererMD.ChangeToHelperCall(instrHelper, IR::HelperOp_FatalInternalError);
  24396. insertInstr->InsertBefore(continueLabel);
  24397. }
  24398. m_lowererMD.ChangeToAssign(instr);
  24399. return instrPrev;
  24400. }
  24401. IR::Instr *
  24402. Lowerer::LowerSlotArrayCheck(IR::Instr * instr)
  24403. {
  24404. IR::Instr *instrPrev = instr->m_prev;
  24405. IR::Instr *insertInstr = instr->m_next;
  24406. IR::RegOpnd *slotArrayOpnd = instr->GetDst()->AsRegOpnd();
  24407. StackSym *stackSym = slotArrayOpnd->m_sym;
  24408. IR::IntConstOpnd *slotIdOpnd = instr->UnlinkSrc2()->AsIntConstOpnd();
  24409. uint32 slotId = (uint32)slotIdOpnd->GetValue();
  24410. Assert(slotId != (uint32)-1 && slotId >= Js::ScopeSlots::FirstSlotIndex);
  24411. if (slotId > Js::ScopeSlots::FirstSlotIndex)
  24412. {
  24413. if (m_func->DoStackFrameDisplay() && stackSym->m_id == m_func->GetLocalClosureSym()->m_id)
  24414. {
  24415. // The pointer we loaded points to the reserved/known address where the slot array can be boxed.
  24416. // Deref to get the real value.
  24417. IR::IndirOpnd * srcOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(stackSym, TyVar, m_func), 0, TyVar, m_func);
  24418. IR::RegOpnd * dstOpnd = IR::RegOpnd::New(TyVar, m_func);
  24419. InsertMove(dstOpnd, srcOpnd, insertInstr);
  24420. stackSym = dstOpnd->m_sym;
  24421. }
  24422. IR::LabelInstr *errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  24423. IR::LabelInstr *continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  24424. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(stackSym, TyVar, m_func),
  24425. Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
  24426. TyVar, m_func, true);
  24427. slotIdOpnd->SetValue(slotId - Js::ScopeSlots::FirstSlotIndex);
  24428. InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrGt_A, true, continueLabel, insertInstr);
  24429. insertInstr->InsertBefore(errorLabel);
  24430. IR::Instr * instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
  24431. insertInstr->InsertBefore(instrHelper);
  24432. m_lowererMD.ChangeToHelperCall(instrHelper, IR::HelperOp_FatalInternalError);
  24433. insertInstr->InsertBefore(continueLabel);
  24434. }
  24435. m_lowererMD.ChangeToAssign(instr);
  24436. return instrPrev;
  24437. }
  24438. IR::RegOpnd *
  24439. Lowerer::LoadIndexFromLikelyFloat(
  24440. IR::RegOpnd *indexOpnd,
  24441. const bool skipNegativeCheck,
  24442. IR::LabelInstr *const notIntLabel,
  24443. IR::LabelInstr *const negativeLabel,
  24444. IR::Instr *const insertBeforeInstr)
  24445. {
  24446. #ifdef _M_IX86
  24447. // We should only generate this if sse2 is available
  24448. Assert(AutoSystemInfo::Data.SSE2Available());
  24449. #endif
  24450. Func *func = insertBeforeInstr->m_func;
  24451. IR::LabelInstr * fallThrough = IR::LabelInstr::New(Js::OpCode::Label, func);
  24452. IR::RegOpnd *int32IndexOpnd = nullptr;
  24453. // If we know for sure that it's not an int, do not check to see if it's a tagged int
  24454. if (indexOpnd->IsNotInt())
  24455. {
  24456. int32IndexOpnd = IR::RegOpnd::New(TyInt32, func);
  24457. }
  24458. else
  24459. {
  24460. IR::LabelInstr * convertToUint = IR::LabelInstr::New(Js::OpCode::Label, func);
  24461. // First generate test for tagged int even though profile data says likely float. Indices are usually int and we need a fast path before we try to convert float to int
  24462. // mov intIndex, index
  24463. // sar intIndex, 1
  24464. // jae convertToInt
  24465. int32IndexOpnd = GenerateUntagVar(indexOpnd, convertToUint, insertBeforeInstr, !indexOpnd->IsTaggedInt());
  24466. if (!skipNegativeCheck)
  24467. {
  24468. // test index, index
  24469. // js $notTaggedIntOrNegative
  24470. InsertTestBranch(int32IndexOpnd, int32IndexOpnd, LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), negativeLabel, insertBeforeInstr);
  24471. }
  24472. InsertBranch(Js::OpCode::Br, fallThrough, insertBeforeInstr);
  24473. insertBeforeInstr->InsertBefore(convertToUint);
  24474. }
  24475. // try to convert float to int in a fast path
  24476. #if FLOATVAR
  24477. IR::RegOpnd* floatIndexOpnd = m_lowererMD.CheckFloatAndUntag(indexOpnd, insertBeforeInstr, notIntLabel);
  24478. #else
  24479. m_lowererMD.GenerateFloatTest(indexOpnd, insertBeforeInstr, notIntLabel);
  24480. IR::IndirOpnd * floatIndexOpnd = IR::IndirOpnd::New(indexOpnd, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  24481. #endif
  24482. IR::LabelInstr * doneConvUint32 = IR::LabelInstr::New(Js::OpCode::Label, func);
  24483. IR::LabelInstr * helperConvUint32 = IR::LabelInstr::New(Js::OpCode::Label, func, true /*helper*/);
  24484. m_lowererMD.ConvertFloatToInt32(int32IndexOpnd, floatIndexOpnd, helperConvUint32, doneConvUint32, insertBeforeInstr);
  24485. // helper path
  24486. insertBeforeInstr->InsertBefore(helperConvUint32);
  24487. m_lowererMD.LoadDoubleHelperArgument(insertBeforeInstr, floatIndexOpnd);
  24488. IR::Instr * helperCall = IR::Instr::New(Js::OpCode::Call, int32IndexOpnd, this->m_func);
  24489. insertBeforeInstr->InsertBefore(helperCall);
  24490. #if DBG
  24491. // This call to Conv_ToUint32Core wont be reentrant as we would only call it for floats
  24492. this->ClearAndSaveImplicitCallCheckOnHelperCallCheckState();
  24493. #endif
  24494. m_lowererMD.ChangeToHelperCall(helperCall, IR::HelperConv_ToUInt32Core);
  24495. #if DBG
  24496. this->RestoreImplicitCallCheckOnHelperCallCheckState();
  24497. #endif
  24498. // main path
  24499. insertBeforeInstr->InsertBefore(doneConvUint32);
  24500. //Convert uint32 to back to float for comparison that conversion was indeed successful
  24501. IR::RegOpnd *floatOpndFromUint32 = IR::RegOpnd::New(TyFloat64, func);
  24502. m_lowererMD.EmitUIntToFloat(floatOpndFromUint32, int32IndexOpnd->UseWithNewType(TyUint32, this->m_func), insertBeforeInstr);
  24503. // compare with float from the original indexOpnd, we need floatIndex == (float64)(uint32)floatIndex
  24504. InsertCompareBranch(floatOpndFromUint32, floatIndexOpnd, Js::OpCode::BrNeq_A, notIntLabel, insertBeforeInstr, false);
  24505. insertBeforeInstr->InsertBefore(fallThrough);
  24506. return int32IndexOpnd;
  24507. }
  24508. void
  24509. Lowerer::AllocStackForInObjectEnumeratorArray()
  24510. {
  24511. Func * func = this->m_func;
  24512. Assert(func->IsTopFunc());
  24513. if (func->m_forInLoopMaxDepth)
  24514. {
  24515. func->m_forInEnumeratorArrayOffset = func->StackAllocate(sizeof(Js::ForInObjectEnumerator) * this->m_func->m_forInLoopMaxDepth);
  24516. }
  24517. }
  24518. IR::RegOpnd *
  24519. Lowerer::GenerateForInEnumeratorLoad(IR::Opnd * forInEnumeratorOpnd, IR::Instr * insertBeforeInstr)
  24520. {
  24521. Func * func = insertBeforeInstr->m_func;
  24522. if (forInEnumeratorOpnd->IsSymOpnd())
  24523. {
  24524. StackSym * stackSym = forInEnumeratorOpnd->AsSymOpnd()->GetStackSym();
  24525. Assert(!stackSym->m_allocated);
  24526. uint forInLoopLevel = stackSym->m_offset;
  24527. Assert(func->m_forInLoopBaseDepth + forInLoopLevel < this->m_func->m_forInLoopMaxDepth);
  24528. stackSym->m_offset = this->m_func->m_forInEnumeratorArrayOffset + ((func->m_forInLoopBaseDepth + forInLoopLevel) * sizeof(Js::ForInObjectEnumerator));
  24529. stackSym->m_allocated = true;
  24530. }
  24531. else
  24532. {
  24533. Assert(forInEnumeratorOpnd->IsIndirOpnd());
  24534. if (forInEnumeratorOpnd->AsIndirOpnd()->GetOffset() == 0)
  24535. {
  24536. return forInEnumeratorOpnd->AsIndirOpnd()->GetBaseOpnd();
  24537. }
  24538. }
  24539. IR::RegOpnd * forInEnumeratorRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  24540. InsertLea(forInEnumeratorRegOpnd, forInEnumeratorOpnd, insertBeforeInstr);
  24541. return forInEnumeratorRegOpnd;
  24542. }
  24543. void
  24544. Lowerer::GenerateHasObjectArrayCheck(IR::RegOpnd * objectOpnd, IR::RegOpnd * typeOpnd, IR::LabelInstr * hasObjectArrayLabel, IR::Instr * insertBeforeInstr)
  24545. {
  24546. // CMP [objectOpnd + offset(objectArray)], nullptr
  24547. // JEQ $noObjectArrayLabel
  24548. // TEST[objectOpnd + offset(objectArray)], ObjectArrayFlagsTag (used as flags)
  24549. // JEQ $noObjectArrayLabel
  24550. // MOV typeHandlerOpnd, [typeOpnd + offset(typeHandler)]
  24551. // CMP typeHandler->OffsetOfInlineSlots, Js::DynamicTypeHandler::GetOffsetOfObjectHeaderInlineSlots()
  24552. // JNE $hasObjectArrayLabel
  24553. // $$noObjectArrayLabel: (fall thru)
  24554. Func * func = this->m_func;
  24555. IR::LabelInstr * noObjectArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  24556. IR::IndirOpnd * objectArrayOpnd = IR::IndirOpnd::New(objectOpnd, Js::DynamicObject::GetOffsetOfObjectArray(), TyMachPtr, func);
  24557. InsertCompareBranch(objectArrayOpnd, IR::AddrOpnd::NewNull(func), Js::OpCode::BrEq_A, noObjectArrayLabel, insertBeforeInstr);
  24558. InsertTestBranch(objectArrayOpnd, IR::IntConstOpnd::New((uint32)Js::DynamicObjectFlags::ObjectArrayFlagsTag, TyUint8, func),
  24559. Js::OpCode::BrNeq_A, noObjectArrayLabel, insertBeforeInstr);
  24560. IR::RegOpnd * typeHandlerOpnd = IR::RegOpnd::New(TyMachPtr, func);
  24561. InsertMove(typeHandlerOpnd, IR::IndirOpnd::New(typeOpnd, Js::DynamicType::GetOffsetOfTypeHandler(), TyMachPtr, func), insertBeforeInstr);
  24562. InsertCompareBranch(IR::IndirOpnd::New(typeHandlerOpnd, Js::DynamicTypeHandler::GetOffsetOfOffsetOfInlineSlots(), TyUint16, func),
  24563. IR::IntConstOpnd::New(Js::DynamicTypeHandler::GetOffsetOfObjectHeaderInlineSlots(), TyUint16, func),
  24564. Js::OpCode::BrNeq_A, hasObjectArrayLabel, insertBeforeInstr);
  24565. insertBeforeInstr->InsertBefore(noObjectArrayLabel);
  24566. }
  24567. void
  24568. Lowerer::GenerateInitForInEnumeratorFastPath(IR::Instr * instr, Js::EnumeratorCache * forInCache)
  24569. {
  24570. Func * func = this->m_func;
  24571. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  24572. IR::RegOpnd * objectOpnd = instr->GetSrc1()->AsRegOpnd();
  24573. // Tagged check and object check
  24574. m_lowererMD.GenerateObjectTest(objectOpnd, instr, helperLabel);
  24575. GenerateIsDynamicObject(objectOpnd, instr, helperLabel);
  24576. // Type check with cache
  24577. //
  24578. // MOV typeOpnd, [objectOpnd + offset(type)]
  24579. // CMP [&forInCache->type], typeOpnd
  24580. // JNE $helper
  24581. IR::RegOpnd * typeOpnd = IR::RegOpnd::New(TyMachPtr, func);
  24582. InsertMove(typeOpnd, IR::IndirOpnd::New(objectOpnd, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, func), instr);
  24583. InsertCompareBranch(IR::MemRefOpnd::New(&forInCache->type, TyMachPtr, func, IR::AddrOpndKindForInCacheType), typeOpnd, Js::OpCode::BrNeq_A, helperLabel, instr);
  24584. // Check forInCacheData->EnumNonEnumerable == false
  24585. //
  24586. // MOV forInCacheDataOpnd, [&forInCache->data]
  24587. // CMP forInCacheDataOpnd->enumNonEnumerable, 0
  24588. // JNE $helper
  24589. IR::RegOpnd * forInCacheDataOpnd = IR::RegOpnd::New(TyMachPtr, func);
  24590. InsertMove(forInCacheDataOpnd, IR::MemRefOpnd::New(&forInCache->data, TyMachPtr, func, IR::AddrOpndKindForInCacheData), instr);
  24591. InsertCompareBranch(IR::IndirOpnd::New(forInCacheDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataEnumNonEnumerable(), TyUint8, func),
  24592. IR::IntConstOpnd::New(0, TyUint8, func), Js::OpCode::BrNeq_A, helperLabel, instr);
  24593. // Check has object array
  24594. GenerateHasObjectArrayCheck(objectOpnd, typeOpnd, helperLabel, instr);
  24595. // Check first prototype with enumerable properties
  24596. //
  24597. // MOV prototypeObjectOpnd, [type + offset(prototype)]
  24598. // MOV prototypeTypeOpnd, [prototypeObjectOpnd + offset(type)]
  24599. // CMP [prototypeTypeOpnd + offset(typeId)], TypeIds_Null
  24600. // JEQ $noPrototypeWithEnumerablePropertiesLabel
  24601. //
  24602. // $checkFirstPrototypeLoopTopLabel:
  24603. // CMP [prototypeTypeOpnd + offset(typeId)], TypeIds_LastStaticType
  24604. // JLE $helper
  24605. // CMP [prototypeTypeOpnd, offset(hasNoEnumerableProperties], 0
  24606. // JEQ $helper
  24607. // <hasObjectArrayCheck prototypeObjectOpnd, prototypeTypeOpnd>
  24608. //
  24609. // MOV prototypeObjectOpnd, [prototypeTypeOpnd + offset(protottype)] (load next prototype)
  24610. //
  24611. // MOV prototypeTypeOpnd, [prototypeObjectOpnd + offset(type)] (tail dup TypeIds_Null check)
  24612. // CMP [prototypeTypeOpnd + offset(typeId)], TypeIds_Null
  24613. // JNE $checkFirstPrototypeLoopTopLabel
  24614. //
  24615. // $noPrototypeWithEnumerablePropertiesLabel:
  24616. //
  24617. IR::LabelInstr * noPrototypeWithEnumerablePropertiesLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  24618. IR::RegOpnd * prototypeObjectOpnd = IR::RegOpnd::New(TyMachPtr, func);
  24619. IR::RegOpnd * prototypeTypeOpnd = IR::RegOpnd::New(TyMachPtr, func);
  24620. IR::IndirOpnd * prototypeTypeIdOpnd = IR::IndirOpnd::New(prototypeTypeOpnd, Js::DynamicType::GetOffsetOfTypeId(), TyUint32, func);
  24621. InsertMove(prototypeObjectOpnd, IR::IndirOpnd::New(typeOpnd, Js::DynamicType::GetOffsetOfPrototype(), TyMachPtr, func), instr);
  24622. InsertMove(prototypeTypeOpnd, IR::IndirOpnd::New(prototypeObjectOpnd, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, func), instr);
  24623. InsertCompareBranch(prototypeTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_Null, TyUint32, func), Js::OpCode::BrEq_A, noPrototypeWithEnumerablePropertiesLabel, instr);
  24624. IR::LabelInstr * checkFirstPrototypeLoopTopLabel = InsertLoopTopLabel(instr);
  24625. Loop * loop = checkFirstPrototypeLoopTopLabel->GetLoop();
  24626. loop->regAlloc.liveOnBackEdgeSyms->Set(prototypeObjectOpnd->m_sym->m_id);
  24627. loop->regAlloc.liveOnBackEdgeSyms->Set(prototypeTypeOpnd->m_sym->m_id);
  24628. InsertCompareBranch(prototypeTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_LastStaticType, TyUint32, func), Js::OpCode::BrLe_A, helperLabel, instr);
  24629. // No need to do EnsureObjectReady. Defer init type may not have this bit set, so we will go to helper and call EnsureObjectReady then
  24630. InsertCompareBranch(IR::IndirOpnd::New(prototypeTypeOpnd, Js::DynamicType::GetOffsetOfHasNoEnumerableProperties(), TyUint8, func),
  24631. IR::IntConstOpnd::New(0, TyUint8, func), Js::OpCode::BrEq_A, helperLabel, instr);
  24632. GenerateHasObjectArrayCheck(prototypeObjectOpnd, prototypeTypeOpnd, helperLabel, instr);
  24633. InsertMove(prototypeObjectOpnd, IR::IndirOpnd::New(prototypeTypeOpnd, Js::DynamicType::GetOffsetOfPrototype(), TyMachPtr, func), instr);
  24634. // Tail dup the TypeIds_Null check
  24635. InsertMove(prototypeTypeOpnd, IR::IndirOpnd::New(prototypeObjectOpnd, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, func), instr);
  24636. InsertCompareBranch(prototypeTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_Null, TyUint32, func), Js::OpCode::BrNeq_A, checkFirstPrototypeLoopTopLabel, instr);
  24637. instr->InsertBefore(noPrototypeWithEnumerablePropertiesLabel);
  24638. // Initialize DynamicObjectPropertyEnumerator fields
  24639. IR::Opnd * forInEnumeratorOpnd = instr->GetSrc2();
  24640. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorScriptContext(), TyMachPtr),
  24641. LoadScriptContextOpnd(instr), instr);
  24642. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorObject(), TyMachPtr),
  24643. objectOpnd, instr);
  24644. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorInitialType(), TyMachPtr),
  24645. typeOpnd, instr);
  24646. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorObjectIndex(), TyInt32),
  24647. IR::IntConstOpnd::New(Js::Constants::NoBigSlot, TyInt32, func), instr);
  24648. IR::RegOpnd * initialPropertyCountOpnd = IR::RegOpnd::New(TyInt32, func);
  24649. InsertMove(initialPropertyCountOpnd,
  24650. IR::IndirOpnd::New(forInCacheDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataPropertyCount(), TyInt32, func), instr);
  24651. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorInitialPropertyCount(), TyInt32),
  24652. initialPropertyCountOpnd, instr);
  24653. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorEnumeratedCount(), TyInt32),
  24654. IR::IntConstOpnd::New(0, TyInt32, func), instr);
  24655. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorFlags(), TyUint8),
  24656. IR::IntConstOpnd::New((uint8)(Js::EnumeratorFlags::UseCache | Js::EnumeratorFlags::SnapShotSemantics), TyUint8, func), instr);
  24657. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorCachedData(), TyMachPtr),
  24658. forInCacheDataOpnd, instr);
  24659. // Initialize rest of the JavascriptStaticEnumerator fields
  24660. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorCurrentEnumerator(), TyMachPtr),
  24661. IR::AddrOpnd::NewNull(func), instr);
  24662. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorPrefixEnumerator(), TyMachPtr),
  24663. IR::AddrOpnd::NewNull(func), instr);
  24664. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorArrayEnumerator(), TyMachPtr),
  24665. IR::AddrOpnd::NewNull(func), instr);
  24666. // Initialize rest of the ForInObjectEnumerator fields
  24667. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfShadowData(), TyMachPtr),
  24668. IR::AddrOpnd::NewNull(func), instr);
  24669. // Initialize can UseJitFastPath = true and enumeratingPrototype = false at the same time.
  24670. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfStates(), TyUint16),
  24671. IR::IntConstOpnd::New(1, TyUint16, func, true), instr);
  24672. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  24673. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  24674. instr->InsertBefore(helperLabel);
  24675. instr->InsertAfter(doneLabel);
  24676. }
  24677. void
  24678. Lowerer::LowerInitForInEnumerator(IR::Instr * instr)
  24679. {
  24680. Js::EnumeratorCache * forInCache = nullptr;
  24681. Func * func = instr->m_func;
  24682. if (instr->IsProfiledInstr())
  24683. {
  24684. uint profileId = instr->AsProfiledInstr()->u.profileId;
  24685. forInCache = instr->m_func->GetJITFunctionBody()->GetForInCache(profileId);
  24686. Assert(forInCache != nullptr);
  24687. if (!func->IsSimpleJit()
  24688. #if ENABLE_TTD
  24689. && (func->IsOOPJIT() || !func->GetScriptContext()->GetThreadContext()->IsRuntimeInTTDMode())
  24690. //TODO: We will need to enable OOPJIT info to exclude this if we have a TTD Runtime
  24691. #endif
  24692. )
  24693. {
  24694. GenerateInitForInEnumeratorFastPath(instr, forInCache);
  24695. }
  24696. }
  24697. IR::RegOpnd * forInEnumeratorRegOpnd = GenerateForInEnumeratorLoad(instr->UnlinkSrc2(), instr);
  24698. instr->SetSrc2(forInEnumeratorRegOpnd);
  24699. m_lowererMD.LoadHelperArgument(instr, IR::AddrOpnd::New(forInCache, IR::AddrOpndKindForInCache, func));
  24700. this->LowerBinaryHelperMem(instr, IR::HelperOp_OP_InitForInEnumerator);
  24701. }
  24702. IR::LabelInstr *
  24703. Lowerer::InsertLoopTopLabel(IR::Instr * insertBeforeInstr)
  24704. {
  24705. Func * func = this->m_func;
  24706. IR::LabelInstr * loopTopLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  24707. loopTopLabel->m_isLoopTop = true;
  24708. Loop *loop = JitAnew(func->m_alloc, Loop, func->m_alloc, func);
  24709. loopTopLabel->SetLoop(loop);
  24710. loop->SetLoopTopInstr(loopTopLabel);
  24711. loop->regAlloc.liveOnBackEdgeSyms = AllocatorNew(JitArenaAllocator, func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
  24712. insertBeforeInstr->InsertBefore(loopTopLabel);
  24713. return loopTopLabel;
  24714. }
  24715. IR::Instr *
  24716. Lowerer::AddBailoutToHelperCallInstr(IR::Instr * helperCallInstr, BailOutInfo * bailoutInfo, IR::BailOutKind bailoutKind, IR::Instr * primaryBailoutInstr)
  24717. {
  24718. helperCallInstr = helperCallInstr->ConvertToBailOutInstr(bailoutInfo, bailoutKind);
  24719. if (bailoutInfo->bailOutInstr == primaryBailoutInstr)
  24720. {
  24721. IR::Instr * instrShare = primaryBailoutInstr->ShareBailOut();
  24722. LowerBailTarget(instrShare);
  24723. }
  24724. return helperCallInstr;
  24725. }
  24726. void
  24727. Lowerer::GenerateAuxSlotPtrLoad(IR::PropertySymOpnd *propertySymOpnd, IR::Instr * instrInsert)
  24728. {
  24729. StackSym * auxSlotPtrSym = propertySymOpnd->GetAuxSlotPtrSym();
  24730. Assert(auxSlotPtrSym);
  24731. Func * func = instrInsert->m_func;
  24732. IR::Opnd *opndIndir = IR::IndirOpnd::New(propertySymOpnd->CreatePropertyOwnerOpnd(func), Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, func);
  24733. IR::RegOpnd *regOpnd = IR::RegOpnd::New(auxSlotPtrSym, TyMachReg, func);
  24734. regOpnd->SetIsJITOptimizedReg(true);
  24735. InsertMove(regOpnd, opndIndir, instrInsert);
  24736. }
  24737. void
  24738. Lowerer::InsertAndLegalize(IR::Instr * instr, IR::Instr* insertBeforeInstr)
  24739. {
  24740. insertBeforeInstr->InsertBefore(instr);
  24741. LowererMD::Legalize(instr);
  24742. }
  24743. IR::Instr*
  24744. Lowerer::InsertObjectCheck(IR::RegOpnd *funcOpnd, IR::Instr *insertBeforeInstr, IR::BailOutKind bailOutKind, BailOutInfo *bailOutInfo)
  24745. {
  24746. IR::Instr *bailOutIfNotObject = IR::BailOutInstr::New(Js::OpCode::BailOnNotObject, bailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
  24747. // Bailout when funcOpnd is not an object.
  24748. bailOutIfNotObject->SetSrc1(funcOpnd);
  24749. bailOutIfNotObject->SetByteCodeOffset(insertBeforeInstr);
  24750. insertBeforeInstr->InsertBefore(bailOutIfNotObject);
  24751. return bailOutIfNotObject;
  24752. }
  24753. IR::Instr*
  24754. Lowerer::InsertFunctionTypeIdCheck(IR::RegOpnd * funcOpnd, IR::Instr* insertBeforeInstr, IR::BailOutKind bailOutKind, BailOutInfo *bailOutInfo)
  24755. {
  24756. IR::Instr *bailOutIfNotFunction = IR::BailOutInstr::New(Js::OpCode::BailOnNotEqual, bailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
  24757. // functionTypeRegOpnd = Ld functionRegOpnd->type
  24758. IR::IndirOpnd *functionTypeIndirOpnd = IR::IndirOpnd::New(funcOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, insertBeforeInstr->m_func);
  24759. IR::RegOpnd *functionTypeRegOpnd = IR::RegOpnd::New(TyVar, insertBeforeInstr->m_func->GetTopFunc());
  24760. IR::Instr *instr = IR::Instr::New(Js::OpCode::Ld_A, functionTypeRegOpnd, functionTypeIndirOpnd, insertBeforeInstr->m_func);
  24761. if (instr->m_func->HasByteCodeOffset())
  24762. {
  24763. instr->SetByteCodeOffset(insertBeforeInstr);
  24764. }
  24765. insertBeforeInstr->InsertBefore(instr);
  24766. CompileAssert(sizeof(Js::TypeId) == sizeof(int32));
  24767. // if (functionTypeRegOpnd->typeId != TypeIds_Function) goto $noInlineLabel
  24768. // BrNeq_I4 $noInlineLabel, functionTypeRegOpnd->typeId, TypeIds_Function
  24769. IR::IndirOpnd *functionTypeIdIndirOpnd = IR::IndirOpnd::New(functionTypeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, insertBeforeInstr->m_func);
  24770. IR::IntConstOpnd *typeIdFunctionConstOpnd = IR::IntConstOpnd::New(Js::TypeIds_Function, TyInt32, insertBeforeInstr->m_func);
  24771. bailOutIfNotFunction->SetSrc1(functionTypeIdIndirOpnd);
  24772. bailOutIfNotFunction->SetSrc2(typeIdFunctionConstOpnd);
  24773. insertBeforeInstr->InsertBefore(bailOutIfNotFunction);
  24774. return bailOutIfNotFunction;
  24775. }
  24776. IR::Instr*
  24777. Lowerer::InsertFunctionInfoCheck(IR::RegOpnd * funcOpnd, IR::Instr *insertBeforeInstr, IR::AddrOpnd* inlinedFuncInfo, IR::BailOutKind bailOutKind, BailOutInfo *bailOutInfo)
  24778. {
  24779. IR::Instr *bailOutIfWrongFuncInfo = IR::BailOutInstr::New(Js::OpCode::BailOnNotEqual, bailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
  24780. // if (VarTo<JavascriptFunction>(r1)->functionInfo != funcInfo) goto noInlineLabel
  24781. // BrNeq_A noInlineLabel, r1->functionInfo, funcInfo
  24782. IR::IndirOpnd* opndFuncInfo = IR::IndirOpnd::New(funcOpnd, Js::JavascriptFunction::GetOffsetOfFunctionInfo(), TyMachPtr, insertBeforeInstr->m_func);
  24783. bailOutIfWrongFuncInfo->SetSrc1(opndFuncInfo);
  24784. bailOutIfWrongFuncInfo->SetSrc2(inlinedFuncInfo);
  24785. insertBeforeInstr->InsertBefore(bailOutIfWrongFuncInfo);
  24786. return bailOutIfWrongFuncInfo;
  24787. }
  24788. #if DBG
  24789. void
  24790. Lowerer::LegalizeVerifyRange(IR::Instr * instrStart, IR::Instr * instrLast)
  24791. {
  24792. FOREACH_INSTR_IN_RANGE(verifyLegalizeInstr, instrStart, instrLast)
  24793. {
  24794. LowererMD::Legalize<true>(verifyLegalizeInstr);
  24795. }
  24796. NEXT_INSTR_IN_RANGE;
  24797. }
  24798. void
  24799. Lowerer::ReconcileWithLowererStateOnHelperCall(IR::Instr * callInstr, IR::JnHelperMethod helperMethod)
  24800. {
  24801. AssertMsg((this->helperCallCheckState & HelperCallCheckState_NoHelperCalls) == 0, "Emitting an helper call when we didn't allow helper calls");
  24802. if (HelperMethodAttributes::CanBeReentrant(helperMethod))
  24803. {
  24804. if (this->helperCallCheckState & HelperCallCheckState_ImplicitCallsBailout)
  24805. {
  24806. if (!callInstr->HasBailOutInfo() ||
  24807. !BailOutInfo::IsBailOutOnImplicitCalls(callInstr->GetBailOutKind()))
  24808. {
  24809. Output::Print(_u("HelperMethod : %s\n"), IR::GetMethodName(helperMethod));
  24810. AssertMsg(false, "Helper call doesn't have BailOutOnImplicitCalls when it should");
  24811. }
  24812. }
  24813. if (!OpCodeAttr::HasImplicitCall(m_currentInstrOpCode) && !OpCodeAttr::OpndHasImplicitCall(m_currentInstrOpCode)
  24814. // Special case where we allow support implicit calls, but FromVar says it doesn't have implicit calls
  24815. && m_currentInstrOpCode != Js::OpCode::FromVar
  24816. )
  24817. {
  24818. Output::Print(_u("HelperMethod : %s, OpCode: %s"), IR::GetMethodName(helperMethod), Js::OpCodeUtil::GetOpCodeName(m_currentInstrOpCode));
  24819. callInstr->DumpByteCodeOffset();
  24820. Output::Print(_u("\n"));
  24821. AssertMsg(false, "OpCode and Helper implicit call attribute mismatch");
  24822. }
  24823. }
  24824. }
  24825. void
  24826. Lowerer::ClearAndSaveImplicitCallCheckOnHelperCallCheckState()
  24827. {
  24828. this->oldHelperCallCheckState = this->helperCallCheckState;
  24829. this->helperCallCheckState = HelperCallCheckState(this->helperCallCheckState & ~HelperCallCheckState_ImplicitCallsBailout);
  24830. }
  24831. void
  24832. Lowerer::RestoreImplicitCallCheckOnHelperCallCheckState()
  24833. {
  24834. if (this->oldHelperCallCheckState & HelperCallCheckState_ImplicitCallsBailout)
  24835. {
  24836. this->helperCallCheckState = HelperCallCheckState(this->helperCallCheckState | HelperCallCheckState_ImplicitCallsBailout);
  24837. this->oldHelperCallCheckState = HelperCallCheckState_None;
  24838. }
  24839. }
  24840. IR::Instr*
  24841. Lowerer::LowerCheckLowerIntBound(IR::Instr * instr)
  24842. {
  24843. IR::Instr * instrPrev = instr->m_prev;
  24844. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, false /*isOpHelper*/);
  24845. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32());
  24846. InsertCompareBranch(instr->GetSrc1(), instr->GetSrc2(), Js::OpCode::BrGe_A, continueLabel, instr);
  24847. IR::Instr* helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
  24848. instr->InsertBefore(helperCallInstr);
  24849. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::HelperIntRangeCheckFailure);
  24850. instr->InsertAfter(continueLabel);
  24851. instr->Remove();
  24852. return instrPrev;
  24853. }
  24854. IR::Instr*
  24855. Lowerer::LowerCheckUpperIntBound(IR::Instr * instr)
  24856. {
  24857. bool lowerBoundCheckPresent = instr->m_prev->m_opcode == Js::OpCode::CheckLowerIntBound;
  24858. IR::Instr * instrPrev = lowerBoundCheckPresent ? instr->m_prev->m_prev : instr->m_prev;
  24859. IR::Instr * lowerBoundCheckInstr = lowerBoundCheckPresent ? instr->m_prev : nullptr;
  24860. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, false /*isOpHelper*/);
  24861. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true /*isOpHelper*/);
  24862. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32());
  24863. if (lowerBoundCheckInstr)
  24864. {
  24865. InsertCompareBranch(instr->UnlinkSrc1(), instr->UnlinkSrc2(), Js::OpCode::BrGt_A, helperLabel, instr);
  24866. Assert(lowerBoundCheckInstr->GetSrc1()->IsInt32() || lowerBoundCheckInstr->GetSrc1()->IsUInt32());
  24867. InsertCompareBranch(lowerBoundCheckInstr->UnlinkSrc1(), lowerBoundCheckInstr->UnlinkSrc2(), Js::OpCode::BrGe_A, continueLabel, instr);
  24868. }
  24869. else
  24870. {
  24871. InsertCompareBranch(instr->UnlinkSrc1(), instr->UnlinkSrc2(), Js::OpCode::BrLe_A, continueLabel, instr);
  24872. }
  24873. instr->InsertBefore(helperLabel);
  24874. IR::Instr* helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
  24875. instr->InsertBefore(helperCallInstr);
  24876. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::HelperIntRangeCheckFailure);
  24877. instr->InsertAfter(continueLabel);
  24878. instr->Remove();
  24879. if (lowerBoundCheckInstr)
  24880. {
  24881. lowerBoundCheckInstr->Remove();
  24882. }
  24883. return instrPrev;
  24884. }
  24885. #endif