Lower.cpp 1.1 MB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783978497859786978797889789979097919792979397949795979697979798979998009801980298039804980598069807980898099810981198129813981498159816981798189819982098219822982398249825982698279828982998309831983298339834983598369837983898399840984198429843984498459846984798489849985098519852985398549855985698579858985998609861986298639864986598669867986898699870987198729873987498759876987798789879988098819882988398849885988698879888988998909891989298939894989598969897989898999900990199029903990499059906990799089909991099119912991399149915991699179918991999209921992299239924992599269927992899299930993199329933993499359936993799389939994099419942994399449945994699479948994999509951995299539954995599569957995899599960996199629963996499659966996799689969997099719972997399749975997699779978997999809981998299839984998599869987998899899990999199929993999499959996999799989999100001000110002100031000410005100061000710008100091001010011100121001310014100151001610017100181001910020100211002210023100241002510026100271002810029100301003110032100331003410035100361003710038100391004010041100421004310044100451004610047100481004910050100511005210053100541005510056100571005810059100601006110062100631006410065100661006710068100691007010071100721007310074100751007610077100781007910080100811008210083100841008510086100871008810089100901009110092100931009410095100961009710098100991010010101101021010310104101051010610107101081010910110101111011210113101141011510116101171011810119101201012110122101231012410125101261012710128101291013010131101321013310134101351013610137101381013910140101411014210143101441014510146101471014810149101501015110152101531015410155101561015710158101591016010161101621016310164101651016610167101681016910170101711017210173101741017510176101771017810179101801018110182101831018410185101861018710188101891019010191101921019310194101951019610197101981019910200102011020210203102041020510206102071020810209102101021110212102131021410215102161021710218102191022010221102221022310224102251022610227102281022910230102311023210233102341023510236102371023810239102401024110242102431024410245102461024710248102491025010251102521025310254102551025610257102581025910260102611026210263102641026510266102671026810269102701027110272102731027410275102761027710278102791028010281102821028310284102851028610287102881028910290102911029210293102941029510296102971029810299103001030110302103031030410305103061030710308103091031010311103121031310314103151031610317103181031910320103211032210323103241032510326103271032810329103301033110332103331033410335103361033710338103391034010341103421034310344103451034610347103481034910350103511035210353103541035510356103571035810359103601036110362103631036410365103661036710368103691037010371103721037310374103751037610377103781037910380103811038210383103841038510386103871038810389103901039110392103931039410395103961039710398103991040010401104021040310404104051040610407104081040910410104111041210413104141041510416104171041810419104201042110422104231042410425104261042710428104291043010431104321043310434104351043610437104381043910440104411044210443104441044510446104471044810449104501045110452104531045410455104561045710458104591046010461104621046310464104651046610467104681046910470104711047210473104741047510476104771047810479104801048110482104831048410485104861048710488104891049010491104921049310494104951049610497104981049910500105011050210503105041050510506105071050810509105101051110512105131051410515105161051710518105191052010521105221052310524105251052610527105281052910530105311053210533105341053510536105371053810539105401054110542105431054410545105461054710548105491055010551105521055310554105551055610557105581055910560105611056210563105641056510566105671056810569105701057110572105731057410575105761057710578105791058010581105821058310584105851058610587105881058910590105911059210593105941059510596105971059810599106001060110602106031060410605106061060710608106091061010611106121061310614106151061610617106181061910620106211062210623106241062510626106271062810629106301063110632106331063410635106361063710638106391064010641106421064310644106451064610647106481064910650106511065210653106541065510656106571065810659106601066110662106631066410665106661066710668106691067010671106721067310674106751067610677106781067910680106811068210683106841068510686106871068810689106901069110692106931069410695106961069710698106991070010701107021070310704107051070610707107081070910710107111071210713107141071510716107171071810719107201072110722107231072410725107261072710728107291073010731107321073310734107351073610737107381073910740107411074210743107441074510746107471074810749107501075110752107531075410755107561075710758107591076010761107621076310764107651076610767107681076910770107711077210773107741077510776107771077810779107801078110782107831078410785107861078710788107891079010791107921079310794107951079610797107981079910800108011080210803108041080510806108071080810809108101081110812108131081410815108161081710818108191082010821108221082310824108251082610827108281082910830108311083210833108341083510836108371083810839108401084110842108431084410845108461084710848108491085010851108521085310854108551085610857108581085910860108611086210863108641086510866108671086810869108701087110872108731087410875108761087710878108791088010881108821088310884108851088610887108881088910890108911089210893108941089510896108971089810899109001090110902109031090410905109061090710908109091091010911109121091310914109151091610917109181091910920109211092210923109241092510926109271092810929109301093110932109331093410935109361093710938109391094010941109421094310944109451094610947109481094910950109511095210953109541095510956109571095810959109601096110962109631096410965109661096710968109691097010971109721097310974109751097610977109781097910980109811098210983109841098510986109871098810989109901099110992109931099410995109961099710998109991100011001110021100311004110051100611007110081100911010110111101211013110141101511016110171101811019110201102111022110231102411025110261102711028110291103011031110321103311034110351103611037110381103911040110411104211043110441104511046110471104811049110501105111052110531105411055110561105711058110591106011061110621106311064110651106611067110681106911070110711107211073110741107511076110771107811079110801108111082110831108411085110861108711088110891109011091110921109311094110951109611097110981109911100111011110211103111041110511106111071110811109111101111111112111131111411115111161111711118111191112011121111221112311124111251112611127111281112911130111311113211133111341113511136111371113811139111401114111142111431114411145111461114711148111491115011151111521115311154111551115611157111581115911160111611116211163111641116511166111671116811169111701117111172111731117411175111761117711178111791118011181111821118311184111851118611187111881118911190111911119211193111941119511196111971119811199112001120111202112031120411205112061120711208112091121011211112121121311214112151121611217112181121911220112211122211223112241122511226112271122811229112301123111232112331123411235112361123711238112391124011241112421124311244112451124611247112481124911250112511125211253112541125511256112571125811259112601126111262112631126411265112661126711268112691127011271112721127311274112751127611277112781127911280112811128211283112841128511286112871128811289112901129111292112931129411295112961129711298112991130011301113021130311304113051130611307113081130911310113111131211313113141131511316113171131811319113201132111322113231132411325113261132711328113291133011331113321133311334113351133611337113381133911340113411134211343113441134511346113471134811349113501135111352113531135411355113561135711358113591136011361113621136311364113651136611367113681136911370113711137211373113741137511376113771137811379113801138111382113831138411385113861138711388113891139011391113921139311394113951139611397113981139911400114011140211403114041140511406114071140811409114101141111412114131141411415114161141711418114191142011421114221142311424114251142611427114281142911430114311143211433114341143511436114371143811439114401144111442114431144411445114461144711448114491145011451114521145311454114551145611457114581145911460114611146211463114641146511466114671146811469114701147111472114731147411475114761147711478114791148011481114821148311484114851148611487114881148911490114911149211493114941149511496114971149811499115001150111502115031150411505115061150711508115091151011511115121151311514115151151611517115181151911520115211152211523115241152511526115271152811529115301153111532115331153411535115361153711538115391154011541115421154311544115451154611547115481154911550115511155211553115541155511556115571155811559115601156111562115631156411565115661156711568115691157011571115721157311574115751157611577115781157911580115811158211583115841158511586115871158811589115901159111592115931159411595115961159711598115991160011601116021160311604116051160611607116081160911610116111161211613116141161511616116171161811619116201162111622116231162411625116261162711628116291163011631116321163311634116351163611637116381163911640116411164211643116441164511646116471164811649116501165111652116531165411655116561165711658116591166011661116621166311664116651166611667116681166911670116711167211673116741167511676116771167811679116801168111682116831168411685116861168711688116891169011691116921169311694116951169611697116981169911700117011170211703117041170511706117071170811709117101171111712117131171411715117161171711718117191172011721117221172311724117251172611727117281172911730117311173211733117341173511736117371173811739117401174111742117431174411745117461174711748117491175011751117521175311754117551175611757117581175911760117611176211763117641176511766117671176811769117701177111772117731177411775117761177711778117791178011781117821178311784117851178611787117881178911790117911179211793117941179511796117971179811799118001180111802118031180411805118061180711808118091181011811118121181311814118151181611817118181181911820118211182211823118241182511826118271182811829118301183111832118331183411835118361183711838118391184011841118421184311844118451184611847118481184911850118511185211853118541185511856118571185811859118601186111862118631186411865118661186711868118691187011871118721187311874118751187611877118781187911880118811188211883118841188511886118871188811889118901189111892118931189411895118961189711898118991190011901119021190311904119051190611907119081190911910119111191211913119141191511916119171191811919119201192111922119231192411925119261192711928119291193011931119321193311934119351193611937119381193911940119411194211943119441194511946119471194811949119501195111952119531195411955119561195711958119591196011961119621196311964119651196611967119681196911970119711197211973119741197511976119771197811979119801198111982119831198411985119861198711988119891199011991119921199311994119951199611997119981199912000120011200212003120041200512006120071200812009120101201112012120131201412015120161201712018120191202012021120221202312024120251202612027120281202912030120311203212033120341203512036120371203812039120401204112042120431204412045120461204712048120491205012051120521205312054120551205612057120581205912060120611206212063120641206512066120671206812069120701207112072120731207412075120761207712078120791208012081120821208312084120851208612087120881208912090120911209212093120941209512096120971209812099121001210112102121031210412105121061210712108121091211012111121121211312114121151211612117121181211912120121211212212123121241212512126121271212812129121301213112132121331213412135121361213712138121391214012141121421214312144121451214612147121481214912150121511215212153121541215512156121571215812159121601216112162121631216412165121661216712168121691217012171121721217312174121751217612177121781217912180121811218212183121841218512186121871218812189121901219112192121931219412195121961219712198121991220012201122021220312204122051220612207122081220912210122111221212213122141221512216122171221812219122201222112222122231222412225122261222712228122291223012231122321223312234122351223612237122381223912240122411224212243122441224512246122471224812249122501225112252122531225412255122561225712258122591226012261122621226312264122651226612267122681226912270122711227212273122741227512276122771227812279122801228112282122831228412285122861228712288122891229012291122921229312294122951229612297122981229912300123011230212303123041230512306123071230812309123101231112312123131231412315123161231712318123191232012321123221232312324123251232612327123281232912330123311233212333123341233512336123371233812339123401234112342123431234412345123461234712348123491235012351123521235312354123551235612357123581235912360123611236212363123641236512366123671236812369123701237112372123731237412375123761237712378123791238012381123821238312384123851238612387123881238912390123911239212393123941239512396123971239812399124001240112402124031240412405124061240712408124091241012411124121241312414124151241612417124181241912420124211242212423124241242512426124271242812429124301243112432124331243412435124361243712438124391244012441124421244312444124451244612447124481244912450124511245212453124541245512456124571245812459124601246112462124631246412465124661246712468124691247012471124721247312474124751247612477124781247912480124811248212483124841248512486124871248812489124901249112492124931249412495124961249712498124991250012501125021250312504125051250612507125081250912510125111251212513125141251512516125171251812519125201252112522125231252412525125261252712528125291253012531125321253312534125351253612537125381253912540125411254212543125441254512546125471254812549125501255112552125531255412555125561255712558125591256012561125621256312564125651256612567125681256912570125711257212573125741257512576125771257812579125801258112582125831258412585125861258712588125891259012591125921259312594125951259612597125981259912600126011260212603126041260512606126071260812609126101261112612126131261412615126161261712618126191262012621126221262312624126251262612627126281262912630126311263212633126341263512636126371263812639126401264112642126431264412645126461264712648126491265012651126521265312654126551265612657126581265912660126611266212663126641266512666126671266812669126701267112672126731267412675126761267712678126791268012681126821268312684126851268612687126881268912690126911269212693126941269512696126971269812699127001270112702127031270412705127061270712708127091271012711127121271312714127151271612717127181271912720127211272212723127241272512726127271272812729127301273112732127331273412735127361273712738127391274012741127421274312744127451274612747127481274912750127511275212753127541275512756127571275812759127601276112762127631276412765127661276712768127691277012771127721277312774127751277612777127781277912780127811278212783127841278512786127871278812789127901279112792127931279412795127961279712798127991280012801128021280312804128051280612807128081280912810128111281212813128141281512816128171281812819128201282112822128231282412825128261282712828128291283012831128321283312834128351283612837128381283912840128411284212843128441284512846128471284812849128501285112852128531285412855128561285712858128591286012861128621286312864128651286612867128681286912870128711287212873128741287512876128771287812879128801288112882128831288412885128861288712888128891289012891128921289312894128951289612897128981289912900129011290212903129041290512906129071290812909129101291112912129131291412915129161291712918129191292012921129221292312924129251292612927129281292912930129311293212933129341293512936129371293812939129401294112942129431294412945129461294712948129491295012951129521295312954129551295612957129581295912960129611296212963129641296512966129671296812969129701297112972129731297412975129761297712978129791298012981129821298312984129851298612987129881298912990129911299212993129941299512996129971299812999130001300113002130031300413005130061300713008130091301013011130121301313014130151301613017130181301913020130211302213023130241302513026130271302813029130301303113032130331303413035130361303713038130391304013041130421304313044130451304613047130481304913050130511305213053130541305513056130571305813059130601306113062130631306413065130661306713068130691307013071130721307313074130751307613077130781307913080130811308213083130841308513086130871308813089130901309113092130931309413095130961309713098130991310013101131021310313104131051310613107131081310913110131111311213113131141311513116131171311813119131201312113122131231312413125131261312713128131291313013131131321313313134131351313613137131381313913140131411314213143131441314513146131471314813149131501315113152131531315413155131561315713158131591316013161131621316313164131651316613167131681316913170131711317213173131741317513176131771317813179131801318113182131831318413185131861318713188131891319013191131921319313194131951319613197131981319913200132011320213203132041320513206132071320813209132101321113212132131321413215132161321713218132191322013221132221322313224132251322613227132281322913230132311323213233132341323513236132371323813239132401324113242132431324413245132461324713248132491325013251132521325313254132551325613257132581325913260132611326213263132641326513266132671326813269132701327113272132731327413275132761327713278132791328013281132821328313284132851328613287132881328913290132911329213293132941329513296132971329813299133001330113302133031330413305133061330713308133091331013311133121331313314133151331613317133181331913320133211332213323133241332513326133271332813329133301333113332133331333413335133361333713338133391334013341133421334313344133451334613347133481334913350133511335213353133541335513356133571335813359133601336113362133631336413365133661336713368133691337013371133721337313374133751337613377133781337913380133811338213383133841338513386133871338813389133901339113392133931339413395133961339713398133991340013401134021340313404134051340613407134081340913410134111341213413134141341513416134171341813419134201342113422134231342413425134261342713428134291343013431134321343313434134351343613437134381343913440134411344213443134441344513446134471344813449134501345113452134531345413455134561345713458134591346013461134621346313464134651346613467134681346913470134711347213473134741347513476134771347813479134801348113482134831348413485134861348713488134891349013491134921349313494134951349613497134981349913500135011350213503135041350513506135071350813509135101351113512135131351413515135161351713518135191352013521135221352313524135251352613527135281352913530135311353213533135341353513536135371353813539135401354113542135431354413545135461354713548135491355013551135521355313554135551355613557135581355913560135611356213563135641356513566135671356813569135701357113572135731357413575135761357713578135791358013581135821358313584135851358613587135881358913590135911359213593135941359513596135971359813599136001360113602136031360413605136061360713608136091361013611136121361313614136151361613617136181361913620136211362213623136241362513626136271362813629136301363113632136331363413635136361363713638136391364013641136421364313644136451364613647136481364913650136511365213653136541365513656136571365813659136601366113662136631366413665136661366713668136691367013671136721367313674136751367613677136781367913680136811368213683136841368513686136871368813689136901369113692136931369413695136961369713698136991370013701137021370313704137051370613707137081370913710137111371213713137141371513716137171371813719137201372113722137231372413725137261372713728137291373013731137321373313734137351373613737137381373913740137411374213743137441374513746137471374813749137501375113752137531375413755137561375713758137591376013761137621376313764137651376613767137681376913770137711377213773137741377513776137771377813779137801378113782137831378413785137861378713788137891379013791137921379313794137951379613797137981379913800138011380213803138041380513806138071380813809138101381113812138131381413815138161381713818138191382013821138221382313824138251382613827138281382913830138311383213833138341383513836138371383813839138401384113842138431384413845138461384713848138491385013851138521385313854138551385613857138581385913860138611386213863138641386513866138671386813869138701387113872138731387413875138761387713878138791388013881138821388313884138851388613887138881388913890138911389213893138941389513896138971389813899139001390113902139031390413905139061390713908139091391013911139121391313914139151391613917139181391913920139211392213923139241392513926139271392813929139301393113932139331393413935139361393713938139391394013941139421394313944139451394613947139481394913950139511395213953139541395513956139571395813959139601396113962139631396413965139661396713968139691397013971139721397313974139751397613977139781397913980139811398213983139841398513986139871398813989139901399113992139931399413995139961399713998139991400014001140021400314004140051400614007140081400914010140111401214013140141401514016140171401814019140201402114022140231402414025140261402714028140291403014031140321403314034140351403614037140381403914040140411404214043140441404514046140471404814049140501405114052140531405414055140561405714058140591406014061140621406314064140651406614067140681406914070140711407214073140741407514076140771407814079140801408114082140831408414085140861408714088140891409014091140921409314094140951409614097140981409914100141011410214103141041410514106141071410814109141101411114112141131411414115141161411714118141191412014121141221412314124141251412614127141281412914130141311413214133141341413514136141371413814139141401414114142141431414414145141461414714148141491415014151141521415314154141551415614157141581415914160141611416214163141641416514166141671416814169141701417114172141731417414175141761417714178141791418014181141821418314184141851418614187141881418914190141911419214193141941419514196141971419814199142001420114202142031420414205142061420714208142091421014211142121421314214142151421614217142181421914220142211422214223142241422514226142271422814229142301423114232142331423414235142361423714238142391424014241142421424314244142451424614247142481424914250142511425214253142541425514256142571425814259142601426114262142631426414265142661426714268142691427014271142721427314274142751427614277142781427914280142811428214283142841428514286142871428814289142901429114292142931429414295142961429714298142991430014301143021430314304143051430614307143081430914310143111431214313143141431514316143171431814319143201432114322143231432414325143261432714328143291433014331143321433314334143351433614337143381433914340143411434214343143441434514346143471434814349143501435114352143531435414355143561435714358143591436014361143621436314364143651436614367143681436914370143711437214373143741437514376143771437814379143801438114382143831438414385143861438714388143891439014391143921439314394143951439614397143981439914400144011440214403144041440514406144071440814409144101441114412144131441414415144161441714418144191442014421144221442314424144251442614427144281442914430144311443214433144341443514436144371443814439144401444114442144431444414445144461444714448144491445014451144521445314454144551445614457144581445914460144611446214463144641446514466144671446814469144701447114472144731447414475144761447714478144791448014481144821448314484144851448614487144881448914490144911449214493144941449514496144971449814499145001450114502145031450414505145061450714508145091451014511145121451314514145151451614517145181451914520145211452214523145241452514526145271452814529145301453114532145331453414535145361453714538145391454014541145421454314544145451454614547145481454914550145511455214553145541455514556145571455814559145601456114562145631456414565145661456714568145691457014571145721457314574145751457614577145781457914580145811458214583145841458514586145871458814589145901459114592145931459414595145961459714598145991460014601146021460314604146051460614607146081460914610146111461214613146141461514616146171461814619146201462114622146231462414625146261462714628146291463014631146321463314634146351463614637146381463914640146411464214643146441464514646146471464814649146501465114652146531465414655146561465714658146591466014661146621466314664146651466614667146681466914670146711467214673146741467514676146771467814679146801468114682146831468414685146861468714688146891469014691146921469314694146951469614697146981469914700147011470214703147041470514706147071470814709147101471114712147131471414715147161471714718147191472014721147221472314724147251472614727147281472914730147311473214733147341473514736147371473814739147401474114742147431474414745147461474714748147491475014751147521475314754147551475614757147581475914760147611476214763147641476514766147671476814769147701477114772147731477414775147761477714778147791478014781147821478314784147851478614787147881478914790147911479214793147941479514796147971479814799148001480114802148031480414805148061480714808148091481014811148121481314814148151481614817148181481914820148211482214823148241482514826148271482814829148301483114832148331483414835148361483714838148391484014841148421484314844148451484614847148481484914850148511485214853148541485514856148571485814859148601486114862148631486414865148661486714868148691487014871148721487314874148751487614877148781487914880148811488214883148841488514886148871488814889148901489114892148931489414895148961489714898148991490014901149021490314904149051490614907149081490914910149111491214913149141491514916149171491814919149201492114922149231492414925149261492714928149291493014931149321493314934149351493614937149381493914940149411494214943149441494514946149471494814949149501495114952149531495414955149561495714958149591496014961149621496314964149651496614967149681496914970149711497214973149741497514976149771497814979149801498114982149831498414985149861498714988149891499014991149921499314994149951499614997149981499915000150011500215003150041500515006150071500815009150101501115012150131501415015150161501715018150191502015021150221502315024150251502615027150281502915030150311503215033150341503515036150371503815039150401504115042150431504415045150461504715048150491505015051150521505315054150551505615057150581505915060150611506215063150641506515066150671506815069150701507115072150731507415075150761507715078150791508015081150821508315084150851508615087150881508915090150911509215093150941509515096150971509815099151001510115102151031510415105151061510715108151091511015111151121511315114151151511615117151181511915120151211512215123151241512515126151271512815129151301513115132151331513415135151361513715138151391514015141151421514315144151451514615147151481514915150151511515215153151541515515156151571515815159151601516115162151631516415165151661516715168151691517015171151721517315174151751517615177151781517915180151811518215183151841518515186151871518815189151901519115192151931519415195151961519715198151991520015201152021520315204152051520615207152081520915210152111521215213152141521515216152171521815219152201522115222152231522415225152261522715228152291523015231152321523315234152351523615237152381523915240152411524215243152441524515246152471524815249152501525115252152531525415255152561525715258152591526015261152621526315264152651526615267152681526915270152711527215273152741527515276152771527815279152801528115282152831528415285152861528715288152891529015291152921529315294152951529615297152981529915300153011530215303153041530515306153071530815309153101531115312153131531415315153161531715318153191532015321153221532315324153251532615327153281532915330153311533215333153341533515336153371533815339153401534115342153431534415345153461534715348153491535015351153521535315354153551535615357153581535915360153611536215363153641536515366153671536815369153701537115372153731537415375153761537715378153791538015381153821538315384153851538615387153881538915390153911539215393153941539515396153971539815399154001540115402154031540415405154061540715408154091541015411154121541315414154151541615417154181541915420154211542215423154241542515426154271542815429154301543115432154331543415435154361543715438154391544015441154421544315444154451544615447154481544915450154511545215453154541545515456154571545815459154601546115462154631546415465154661546715468154691547015471154721547315474154751547615477154781547915480154811548215483154841548515486154871548815489154901549115492154931549415495154961549715498154991550015501155021550315504155051550615507155081550915510155111551215513155141551515516155171551815519155201552115522155231552415525155261552715528155291553015531155321553315534155351553615537155381553915540155411554215543155441554515546155471554815549155501555115552155531555415555155561555715558155591556015561155621556315564155651556615567155681556915570155711557215573155741557515576155771557815579155801558115582155831558415585155861558715588155891559015591155921559315594155951559615597155981559915600156011560215603156041560515606156071560815609156101561115612156131561415615156161561715618156191562015621156221562315624156251562615627156281562915630156311563215633156341563515636156371563815639156401564115642156431564415645156461564715648156491565015651156521565315654156551565615657156581565915660156611566215663156641566515666156671566815669156701567115672156731567415675156761567715678156791568015681156821568315684156851568615687156881568915690156911569215693156941569515696156971569815699157001570115702157031570415705157061570715708157091571015711157121571315714157151571615717157181571915720157211572215723157241572515726157271572815729157301573115732157331573415735157361573715738157391574015741157421574315744157451574615747157481574915750157511575215753157541575515756157571575815759157601576115762157631576415765157661576715768157691577015771157721577315774157751577615777157781577915780157811578215783157841578515786157871578815789157901579115792157931579415795157961579715798157991580015801158021580315804158051580615807158081580915810158111581215813158141581515816158171581815819158201582115822158231582415825158261582715828158291583015831158321583315834158351583615837158381583915840158411584215843158441584515846158471584815849158501585115852158531585415855158561585715858158591586015861158621586315864158651586615867158681586915870158711587215873158741587515876158771587815879158801588115882158831588415885158861588715888158891589015891158921589315894158951589615897158981589915900159011590215903159041590515906159071590815909159101591115912159131591415915159161591715918159191592015921159221592315924159251592615927159281592915930159311593215933159341593515936159371593815939159401594115942159431594415945159461594715948159491595015951159521595315954159551595615957159581595915960159611596215963159641596515966159671596815969159701597115972159731597415975159761597715978159791598015981159821598315984159851598615987159881598915990159911599215993159941599515996159971599815999160001600116002160031600416005160061600716008160091601016011160121601316014160151601616017160181601916020160211602216023160241602516026160271602816029160301603116032160331603416035160361603716038160391604016041160421604316044160451604616047160481604916050160511605216053160541605516056160571605816059160601606116062160631606416065160661606716068160691607016071160721607316074160751607616077160781607916080160811608216083160841608516086160871608816089160901609116092160931609416095160961609716098160991610016101161021610316104161051610616107161081610916110161111611216113161141611516116161171611816119161201612116122161231612416125161261612716128161291613016131161321613316134161351613616137161381613916140161411614216143161441614516146161471614816149161501615116152161531615416155161561615716158161591616016161161621616316164161651616616167161681616916170161711617216173161741617516176161771617816179161801618116182161831618416185161861618716188161891619016191161921619316194161951619616197161981619916200162011620216203162041620516206162071620816209162101621116212162131621416215162161621716218162191622016221162221622316224162251622616227162281622916230162311623216233162341623516236162371623816239162401624116242162431624416245162461624716248162491625016251162521625316254162551625616257162581625916260162611626216263162641626516266162671626816269162701627116272162731627416275162761627716278162791628016281162821628316284162851628616287162881628916290162911629216293162941629516296162971629816299163001630116302163031630416305163061630716308163091631016311163121631316314163151631616317163181631916320163211632216323163241632516326163271632816329163301633116332163331633416335163361633716338163391634016341163421634316344163451634616347163481634916350163511635216353163541635516356163571635816359163601636116362163631636416365163661636716368163691637016371163721637316374163751637616377163781637916380163811638216383163841638516386163871638816389163901639116392163931639416395163961639716398163991640016401164021640316404164051640616407164081640916410164111641216413164141641516416164171641816419164201642116422164231642416425164261642716428164291643016431164321643316434164351643616437164381643916440164411644216443164441644516446164471644816449164501645116452164531645416455164561645716458164591646016461164621646316464164651646616467164681646916470164711647216473164741647516476164771647816479164801648116482164831648416485164861648716488164891649016491164921649316494164951649616497164981649916500165011650216503165041650516506165071650816509165101651116512165131651416515165161651716518165191652016521165221652316524165251652616527165281652916530165311653216533165341653516536165371653816539165401654116542165431654416545165461654716548165491655016551165521655316554165551655616557165581655916560165611656216563165641656516566165671656816569165701657116572165731657416575165761657716578165791658016581165821658316584165851658616587165881658916590165911659216593165941659516596165971659816599166001660116602166031660416605166061660716608166091661016611166121661316614166151661616617166181661916620166211662216623166241662516626166271662816629166301663116632166331663416635166361663716638166391664016641166421664316644166451664616647166481664916650166511665216653166541665516656166571665816659166601666116662166631666416665166661666716668166691667016671166721667316674166751667616677166781667916680166811668216683166841668516686166871668816689166901669116692166931669416695166961669716698166991670016701167021670316704167051670616707167081670916710167111671216713167141671516716167171671816719167201672116722167231672416725167261672716728167291673016731167321673316734167351673616737167381673916740167411674216743167441674516746167471674816749167501675116752167531675416755167561675716758167591676016761167621676316764167651676616767167681676916770167711677216773167741677516776167771677816779167801678116782167831678416785167861678716788167891679016791167921679316794167951679616797167981679916800168011680216803168041680516806168071680816809168101681116812168131681416815168161681716818168191682016821168221682316824168251682616827168281682916830168311683216833168341683516836168371683816839168401684116842168431684416845168461684716848168491685016851168521685316854168551685616857168581685916860168611686216863168641686516866168671686816869168701687116872168731687416875168761687716878168791688016881168821688316884168851688616887168881688916890168911689216893168941689516896168971689816899169001690116902169031690416905169061690716908169091691016911169121691316914169151691616917169181691916920169211692216923169241692516926169271692816929169301693116932169331693416935169361693716938169391694016941169421694316944169451694616947169481694916950169511695216953169541695516956169571695816959169601696116962169631696416965169661696716968169691697016971169721697316974169751697616977169781697916980169811698216983169841698516986169871698816989169901699116992169931699416995169961699716998169991700017001170021700317004170051700617007170081700917010170111701217013170141701517016170171701817019170201702117022170231702417025170261702717028170291703017031170321703317034170351703617037170381703917040170411704217043170441704517046170471704817049170501705117052170531705417055170561705717058170591706017061170621706317064170651706617067170681706917070170711707217073170741707517076170771707817079170801708117082170831708417085170861708717088170891709017091170921709317094170951709617097170981709917100171011710217103171041710517106171071710817109171101711117112171131711417115171161711717118171191712017121171221712317124171251712617127171281712917130171311713217133171341713517136171371713817139171401714117142171431714417145171461714717148171491715017151171521715317154171551715617157171581715917160171611716217163171641716517166171671716817169171701717117172171731717417175171761717717178171791718017181171821718317184171851718617187171881718917190171911719217193171941719517196171971719817199172001720117202172031720417205172061720717208172091721017211172121721317214172151721617217172181721917220172211722217223172241722517226172271722817229172301723117232172331723417235172361723717238172391724017241172421724317244172451724617247172481724917250172511725217253172541725517256172571725817259172601726117262172631726417265172661726717268172691727017271172721727317274172751727617277172781727917280172811728217283172841728517286172871728817289172901729117292172931729417295172961729717298172991730017301173021730317304173051730617307173081730917310173111731217313173141731517316173171731817319173201732117322173231732417325173261732717328173291733017331173321733317334173351733617337173381733917340173411734217343173441734517346173471734817349173501735117352173531735417355173561735717358173591736017361173621736317364173651736617367173681736917370173711737217373173741737517376173771737817379173801738117382173831738417385173861738717388173891739017391173921739317394173951739617397173981739917400174011740217403174041740517406174071740817409174101741117412174131741417415174161741717418174191742017421174221742317424174251742617427174281742917430174311743217433174341743517436174371743817439174401744117442174431744417445174461744717448174491745017451174521745317454174551745617457174581745917460174611746217463174641746517466174671746817469174701747117472174731747417475174761747717478174791748017481174821748317484174851748617487174881748917490174911749217493174941749517496174971749817499175001750117502175031750417505175061750717508175091751017511175121751317514175151751617517175181751917520175211752217523175241752517526175271752817529175301753117532175331753417535175361753717538175391754017541175421754317544175451754617547175481754917550175511755217553175541755517556175571755817559175601756117562175631756417565175661756717568175691757017571175721757317574175751757617577175781757917580175811758217583175841758517586175871758817589175901759117592175931759417595175961759717598175991760017601176021760317604176051760617607176081760917610176111761217613176141761517616176171761817619176201762117622176231762417625176261762717628176291763017631176321763317634176351763617637176381763917640176411764217643176441764517646176471764817649176501765117652176531765417655176561765717658176591766017661176621766317664176651766617667176681766917670176711767217673176741767517676176771767817679176801768117682176831768417685176861768717688176891769017691176921769317694176951769617697176981769917700177011770217703177041770517706177071770817709177101771117712177131771417715177161771717718177191772017721177221772317724177251772617727177281772917730177311773217733177341773517736177371773817739177401774117742177431774417745177461774717748177491775017751177521775317754177551775617757177581775917760177611776217763177641776517766177671776817769177701777117772177731777417775177761777717778177791778017781177821778317784177851778617787177881778917790177911779217793177941779517796177971779817799178001780117802178031780417805178061780717808178091781017811178121781317814178151781617817178181781917820178211782217823178241782517826178271782817829178301783117832178331783417835178361783717838178391784017841178421784317844178451784617847178481784917850178511785217853178541785517856178571785817859178601786117862178631786417865178661786717868178691787017871178721787317874178751787617877178781787917880178811788217883178841788517886178871788817889178901789117892178931789417895178961789717898178991790017901179021790317904179051790617907179081790917910179111791217913179141791517916179171791817919179201792117922179231792417925179261792717928179291793017931179321793317934179351793617937179381793917940179411794217943179441794517946179471794817949179501795117952179531795417955179561795717958179591796017961179621796317964179651796617967179681796917970179711797217973179741797517976179771797817979179801798117982179831798417985179861798717988179891799017991179921799317994179951799617997179981799918000180011800218003180041800518006180071800818009180101801118012180131801418015180161801718018180191802018021180221802318024180251802618027180281802918030180311803218033180341803518036180371803818039180401804118042180431804418045180461804718048180491805018051180521805318054180551805618057180581805918060180611806218063180641806518066180671806818069180701807118072180731807418075180761807718078180791808018081180821808318084180851808618087180881808918090180911809218093180941809518096180971809818099181001810118102181031810418105181061810718108181091811018111181121811318114181151811618117181181811918120181211812218123181241812518126181271812818129181301813118132181331813418135181361813718138181391814018141181421814318144181451814618147181481814918150181511815218153181541815518156181571815818159181601816118162181631816418165181661816718168181691817018171181721817318174181751817618177181781817918180181811818218183181841818518186181871818818189181901819118192181931819418195181961819718198181991820018201182021820318204182051820618207182081820918210182111821218213182141821518216182171821818219182201822118222182231822418225182261822718228182291823018231182321823318234182351823618237182381823918240182411824218243182441824518246182471824818249182501825118252182531825418255182561825718258182591826018261182621826318264182651826618267182681826918270182711827218273182741827518276182771827818279182801828118282182831828418285182861828718288182891829018291182921829318294182951829618297182981829918300183011830218303183041830518306183071830818309183101831118312183131831418315183161831718318183191832018321183221832318324183251832618327183281832918330183311833218333183341833518336183371833818339183401834118342183431834418345183461834718348183491835018351183521835318354183551835618357183581835918360183611836218363183641836518366183671836818369183701837118372183731837418375183761837718378183791838018381183821838318384183851838618387183881838918390183911839218393183941839518396183971839818399184001840118402184031840418405184061840718408184091841018411184121841318414184151841618417184181841918420184211842218423184241842518426184271842818429184301843118432184331843418435184361843718438184391844018441184421844318444184451844618447184481844918450184511845218453184541845518456184571845818459184601846118462184631846418465184661846718468184691847018471184721847318474184751847618477184781847918480184811848218483184841848518486184871848818489184901849118492184931849418495184961849718498184991850018501185021850318504185051850618507185081850918510185111851218513185141851518516185171851818519185201852118522185231852418525185261852718528185291853018531185321853318534185351853618537185381853918540185411854218543185441854518546185471854818549185501855118552185531855418555185561855718558185591856018561185621856318564185651856618567185681856918570185711857218573185741857518576185771857818579185801858118582185831858418585185861858718588185891859018591185921859318594185951859618597185981859918600186011860218603186041860518606186071860818609186101861118612186131861418615186161861718618186191862018621186221862318624186251862618627186281862918630186311863218633186341863518636186371863818639186401864118642186431864418645186461864718648186491865018651186521865318654186551865618657186581865918660186611866218663186641866518666186671866818669186701867118672186731867418675186761867718678186791868018681186821868318684186851868618687186881868918690186911869218693186941869518696186971869818699187001870118702187031870418705187061870718708187091871018711187121871318714187151871618717187181871918720187211872218723187241872518726187271872818729187301873118732187331873418735187361873718738187391874018741187421874318744187451874618747187481874918750187511875218753187541875518756187571875818759187601876118762187631876418765187661876718768187691877018771187721877318774187751877618777187781877918780187811878218783187841878518786187871878818789187901879118792187931879418795187961879718798187991880018801188021880318804188051880618807188081880918810188111881218813188141881518816188171881818819188201882118822188231882418825188261882718828188291883018831188321883318834188351883618837188381883918840188411884218843188441884518846188471884818849188501885118852188531885418855188561885718858188591886018861188621886318864188651886618867188681886918870188711887218873188741887518876188771887818879188801888118882188831888418885188861888718888188891889018891188921889318894188951889618897188981889918900189011890218903189041890518906189071890818909189101891118912189131891418915189161891718918189191892018921189221892318924189251892618927189281892918930189311893218933189341893518936189371893818939189401894118942189431894418945189461894718948189491895018951189521895318954189551895618957189581895918960189611896218963189641896518966189671896818969189701897118972189731897418975189761897718978189791898018981189821898318984189851898618987189881898918990189911899218993189941899518996189971899818999190001900119002190031900419005190061900719008190091901019011190121901319014190151901619017190181901919020190211902219023190241902519026190271902819029190301903119032190331903419035190361903719038190391904019041190421904319044190451904619047190481904919050190511905219053190541905519056190571905819059190601906119062190631906419065190661906719068190691907019071190721907319074190751907619077190781907919080190811908219083190841908519086190871908819089190901909119092190931909419095190961909719098190991910019101191021910319104191051910619107191081910919110191111911219113191141911519116191171911819119191201912119122191231912419125191261912719128191291913019131191321913319134191351913619137191381913919140191411914219143191441914519146191471914819149191501915119152191531915419155191561915719158191591916019161191621916319164191651916619167191681916919170191711917219173191741917519176191771917819179191801918119182191831918419185191861918719188191891919019191191921919319194191951919619197191981919919200192011920219203192041920519206192071920819209192101921119212192131921419215192161921719218192191922019221192221922319224192251922619227192281922919230192311923219233192341923519236192371923819239192401924119242192431924419245192461924719248192491925019251192521925319254192551925619257192581925919260192611926219263192641926519266192671926819269192701927119272192731927419275192761927719278192791928019281192821928319284192851928619287192881928919290192911929219293192941929519296192971929819299193001930119302193031930419305193061930719308193091931019311193121931319314193151931619317193181931919320193211932219323193241932519326193271932819329193301933119332193331933419335193361933719338193391934019341193421934319344193451934619347193481934919350193511935219353193541935519356193571935819359193601936119362193631936419365193661936719368193691937019371193721937319374193751937619377193781937919380193811938219383193841938519386193871938819389193901939119392193931939419395193961939719398193991940019401194021940319404194051940619407194081940919410194111941219413194141941519416194171941819419194201942119422194231942419425194261942719428194291943019431194321943319434194351943619437194381943919440194411944219443194441944519446194471944819449194501945119452194531945419455194561945719458194591946019461194621946319464194651946619467194681946919470194711947219473194741947519476194771947819479194801948119482194831948419485194861948719488194891949019491194921949319494194951949619497194981949919500195011950219503195041950519506195071950819509195101951119512195131951419515195161951719518195191952019521195221952319524195251952619527195281952919530195311953219533195341953519536195371953819539195401954119542195431954419545195461954719548195491955019551195521955319554195551955619557195581955919560195611956219563195641956519566195671956819569195701957119572195731957419575195761957719578195791958019581195821958319584195851958619587195881958919590195911959219593195941959519596195971959819599196001960119602196031960419605196061960719608196091961019611196121961319614196151961619617196181961919620196211962219623196241962519626196271962819629196301963119632196331963419635196361963719638196391964019641196421964319644196451964619647196481964919650196511965219653196541965519656196571965819659196601966119662196631966419665196661966719668196691967019671196721967319674196751967619677196781967919680196811968219683196841968519686196871968819689196901969119692196931969419695196961969719698196991970019701197021970319704197051970619707197081970919710197111971219713197141971519716197171971819719197201972119722197231972419725197261972719728197291973019731197321973319734197351973619737197381973919740197411974219743197441974519746197471974819749197501975119752197531975419755197561975719758197591976019761197621976319764197651976619767197681976919770197711977219773197741977519776197771977819779197801978119782197831978419785197861978719788197891979019791197921979319794197951979619797197981979919800198011980219803198041980519806198071980819809198101981119812198131981419815198161981719818198191982019821198221982319824198251982619827198281982919830198311983219833198341983519836198371983819839198401984119842198431984419845198461984719848198491985019851198521985319854198551985619857198581985919860198611986219863198641986519866198671986819869198701987119872198731987419875198761987719878198791988019881198821988319884198851988619887198881988919890198911989219893198941989519896198971989819899199001990119902199031990419905199061990719908199091991019911199121991319914199151991619917199181991919920199211992219923199241992519926199271992819929199301993119932199331993419935199361993719938199391994019941199421994319944199451994619947199481994919950199511995219953199541995519956199571995819959199601996119962199631996419965199661996719968199691997019971199721997319974199751997619977199781997919980199811998219983199841998519986199871998819989199901999119992199931999419995199961999719998199992000020001200022000320004200052000620007200082000920010200112001220013200142001520016200172001820019200202002120022200232002420025200262002720028200292003020031200322003320034200352003620037200382003920040200412004220043200442004520046200472004820049200502005120052200532005420055200562005720058200592006020061200622006320064200652006620067200682006920070200712007220073200742007520076200772007820079200802008120082200832008420085200862008720088200892009020091200922009320094200952009620097200982009920100201012010220103201042010520106201072010820109201102011120112201132011420115201162011720118201192012020121201222012320124201252012620127201282012920130201312013220133201342013520136201372013820139201402014120142201432014420145201462014720148201492015020151201522015320154201552015620157201582015920160201612016220163201642016520166201672016820169201702017120172201732017420175201762017720178201792018020181201822018320184201852018620187201882018920190201912019220193201942019520196201972019820199202002020120202202032020420205202062020720208202092021020211202122021320214202152021620217202182021920220202212022220223202242022520226202272022820229202302023120232202332023420235202362023720238202392024020241202422024320244202452024620247202482024920250202512025220253202542025520256202572025820259202602026120262202632026420265202662026720268202692027020271202722027320274202752027620277202782027920280202812028220283202842028520286202872028820289202902029120292202932029420295202962029720298202992030020301203022030320304203052030620307203082030920310203112031220313203142031520316203172031820319203202032120322203232032420325203262032720328203292033020331203322033320334203352033620337203382033920340203412034220343203442034520346203472034820349203502035120352203532035420355203562035720358203592036020361203622036320364203652036620367203682036920370203712037220373203742037520376203772037820379203802038120382203832038420385203862038720388203892039020391203922039320394203952039620397203982039920400204012040220403204042040520406204072040820409204102041120412204132041420415204162041720418204192042020421204222042320424204252042620427204282042920430204312043220433204342043520436204372043820439204402044120442204432044420445204462044720448204492045020451204522045320454204552045620457204582045920460204612046220463204642046520466204672046820469204702047120472204732047420475204762047720478204792048020481204822048320484204852048620487204882048920490204912049220493204942049520496204972049820499205002050120502205032050420505205062050720508205092051020511205122051320514205152051620517205182051920520205212052220523205242052520526205272052820529205302053120532205332053420535205362053720538205392054020541205422054320544205452054620547205482054920550205512055220553205542055520556205572055820559205602056120562205632056420565205662056720568205692057020571205722057320574205752057620577205782057920580205812058220583205842058520586205872058820589205902059120592205932059420595205962059720598205992060020601206022060320604206052060620607206082060920610206112061220613206142061520616206172061820619206202062120622206232062420625206262062720628206292063020631206322063320634206352063620637206382063920640206412064220643206442064520646206472064820649206502065120652206532065420655206562065720658206592066020661206622066320664206652066620667206682066920670206712067220673206742067520676206772067820679206802068120682206832068420685206862068720688206892069020691206922069320694206952069620697206982069920700207012070220703207042070520706207072070820709207102071120712207132071420715207162071720718207192072020721207222072320724207252072620727207282072920730207312073220733207342073520736207372073820739207402074120742207432074420745207462074720748207492075020751207522075320754207552075620757207582075920760207612076220763207642076520766207672076820769207702077120772207732077420775207762077720778207792078020781207822078320784207852078620787207882078920790207912079220793207942079520796207972079820799208002080120802208032080420805208062080720808208092081020811208122081320814208152081620817208182081920820208212082220823208242082520826208272082820829208302083120832208332083420835208362083720838208392084020841208422084320844208452084620847208482084920850208512085220853208542085520856208572085820859208602086120862208632086420865208662086720868208692087020871208722087320874208752087620877208782087920880208812088220883208842088520886208872088820889208902089120892208932089420895208962089720898208992090020901209022090320904209052090620907209082090920910209112091220913209142091520916209172091820919209202092120922209232092420925209262092720928209292093020931209322093320934209352093620937209382093920940209412094220943209442094520946209472094820949209502095120952209532095420955209562095720958209592096020961209622096320964209652096620967209682096920970209712097220973209742097520976209772097820979209802098120982209832098420985209862098720988209892099020991209922099320994209952099620997209982099921000210012100221003210042100521006210072100821009210102101121012210132101421015210162101721018210192102021021210222102321024210252102621027210282102921030210312103221033210342103521036210372103821039210402104121042210432104421045210462104721048210492105021051210522105321054210552105621057210582105921060210612106221063210642106521066210672106821069210702107121072210732107421075210762107721078210792108021081210822108321084210852108621087210882108921090210912109221093210942109521096210972109821099211002110121102211032110421105211062110721108211092111021111211122111321114211152111621117211182111921120211212112221123211242112521126211272112821129211302113121132211332113421135211362113721138211392114021141211422114321144211452114621147211482114921150211512115221153211542115521156211572115821159211602116121162211632116421165211662116721168211692117021171211722117321174211752117621177211782117921180211812118221183211842118521186211872118821189211902119121192211932119421195211962119721198211992120021201212022120321204212052120621207212082120921210212112121221213212142121521216212172121821219212202122121222212232122421225212262122721228212292123021231212322123321234212352123621237212382123921240212412124221243212442124521246212472124821249212502125121252212532125421255212562125721258212592126021261212622126321264212652126621267212682126921270212712127221273212742127521276212772127821279212802128121282212832128421285212862128721288212892129021291212922129321294212952129621297212982129921300213012130221303213042130521306213072130821309213102131121312213132131421315213162131721318213192132021321213222132321324213252132621327213282132921330213312133221333213342133521336213372133821339213402134121342213432134421345213462134721348213492135021351213522135321354213552135621357213582135921360213612136221363213642136521366213672136821369213702137121372213732137421375213762137721378213792138021381213822138321384213852138621387213882138921390213912139221393213942139521396213972139821399214002140121402214032140421405214062140721408214092141021411214122141321414214152141621417214182141921420214212142221423214242142521426214272142821429214302143121432214332143421435214362143721438214392144021441214422144321444214452144621447214482144921450214512145221453214542145521456214572145821459214602146121462214632146421465214662146721468214692147021471214722147321474214752147621477214782147921480214812148221483214842148521486214872148821489214902149121492214932149421495214962149721498214992150021501215022150321504215052150621507215082150921510215112151221513215142151521516215172151821519215202152121522215232152421525215262152721528215292153021531215322153321534215352153621537215382153921540215412154221543215442154521546215472154821549215502155121552215532155421555215562155721558215592156021561215622156321564215652156621567215682156921570215712157221573215742157521576215772157821579215802158121582215832158421585215862158721588215892159021591215922159321594215952159621597215982159921600216012160221603216042160521606216072160821609216102161121612216132161421615216162161721618216192162021621216222162321624216252162621627216282162921630216312163221633216342163521636216372163821639216402164121642216432164421645216462164721648216492165021651216522165321654216552165621657216582165921660216612166221663216642166521666216672166821669216702167121672216732167421675216762167721678216792168021681216822168321684216852168621687216882168921690216912169221693216942169521696216972169821699217002170121702217032170421705217062170721708217092171021711217122171321714217152171621717217182171921720217212172221723217242172521726217272172821729217302173121732217332173421735217362173721738217392174021741217422174321744217452174621747217482174921750217512175221753217542175521756217572175821759217602176121762217632176421765217662176721768217692177021771217722177321774217752177621777217782177921780217812178221783217842178521786217872178821789217902179121792217932179421795217962179721798217992180021801218022180321804218052180621807218082180921810218112181221813218142181521816218172181821819218202182121822218232182421825218262182721828218292183021831218322183321834218352183621837218382183921840218412184221843218442184521846218472184821849218502185121852218532185421855218562185721858218592186021861218622186321864218652186621867218682186921870218712187221873218742187521876218772187821879218802188121882218832188421885218862188721888218892189021891218922189321894218952189621897218982189921900219012190221903219042190521906219072190821909219102191121912219132191421915219162191721918219192192021921219222192321924219252192621927219282192921930219312193221933219342193521936219372193821939219402194121942219432194421945219462194721948219492195021951219522195321954219552195621957219582195921960219612196221963219642196521966219672196821969219702197121972219732197421975219762197721978219792198021981219822198321984219852198621987219882198921990219912199221993219942199521996219972199821999220002200122002220032200422005220062200722008220092201022011220122201322014220152201622017220182201922020220212202222023220242202522026220272202822029220302203122032220332203422035220362203722038220392204022041220422204322044220452204622047220482204922050220512205222053220542205522056220572205822059220602206122062220632206422065220662206722068220692207022071220722207322074220752207622077220782207922080220812208222083220842208522086220872208822089220902209122092220932209422095220962209722098220992210022101221022210322104221052210622107221082210922110221112211222113221142211522116221172211822119221202212122122221232212422125221262212722128221292213022131221322213322134221352213622137221382213922140221412214222143221442214522146221472214822149221502215122152221532215422155221562215722158221592216022161221622216322164221652216622167221682216922170221712217222173221742217522176221772217822179221802218122182221832218422185221862218722188221892219022191221922219322194221952219622197221982219922200222012220222203222042220522206222072220822209222102221122212222132221422215222162221722218222192222022221222222222322224222252222622227222282222922230222312223222233222342223522236222372223822239222402224122242222432224422245222462224722248222492225022251222522225322254222552225622257222582225922260222612226222263222642226522266222672226822269222702227122272222732227422275222762227722278222792228022281222822228322284222852228622287222882228922290222912229222293222942229522296222972229822299223002230122302223032230422305223062230722308223092231022311223122231322314223152231622317223182231922320223212232222323223242232522326223272232822329223302233122332223332233422335223362233722338223392234022341223422234322344223452234622347223482234922350223512235222353223542235522356223572235822359223602236122362223632236422365223662236722368223692237022371223722237322374223752237622377223782237922380223812238222383223842238522386223872238822389223902239122392223932239422395223962239722398223992240022401224022240322404224052240622407224082240922410224112241222413224142241522416224172241822419224202242122422224232242422425224262242722428224292243022431224322243322434224352243622437224382243922440224412244222443224442244522446224472244822449224502245122452224532245422455224562245722458224592246022461224622246322464224652246622467224682246922470224712247222473224742247522476224772247822479224802248122482224832248422485224862248722488224892249022491224922249322494224952249622497224982249922500225012250222503225042250522506225072250822509225102251122512225132251422515225162251722518225192252022521225222252322524225252252622527225282252922530225312253222533225342253522536225372253822539225402254122542225432254422545225462254722548225492255022551225522255322554225552255622557225582255922560225612256222563225642256522566225672256822569225702257122572225732257422575225762257722578225792258022581225822258322584225852258622587225882258922590225912259222593225942259522596225972259822599226002260122602226032260422605226062260722608226092261022611226122261322614226152261622617226182261922620226212262222623226242262522626226272262822629226302263122632226332263422635226362263722638226392264022641226422264322644226452264622647226482264922650226512265222653226542265522656226572265822659226602266122662226632266422665226662266722668226692267022671226722267322674226752267622677226782267922680226812268222683226842268522686226872268822689226902269122692226932269422695226962269722698226992270022701227022270322704227052270622707227082270922710227112271222713227142271522716227172271822719227202272122722227232272422725227262272722728227292273022731227322273322734227352273622737227382273922740227412274222743227442274522746227472274822749227502275122752227532275422755227562275722758227592276022761227622276322764227652276622767227682276922770227712277222773227742277522776227772277822779227802278122782227832278422785227862278722788227892279022791227922279322794227952279622797227982279922800228012280222803228042280522806228072280822809228102281122812228132281422815228162281722818228192282022821228222282322824228252282622827228282282922830228312283222833228342283522836228372283822839228402284122842228432284422845228462284722848228492285022851228522285322854228552285622857228582285922860228612286222863228642286522866228672286822869228702287122872228732287422875228762287722878228792288022881228822288322884228852288622887228882288922890228912289222893228942289522896228972289822899229002290122902229032290422905229062290722908229092291022911229122291322914229152291622917229182291922920229212292222923229242292522926229272292822929229302293122932229332293422935229362293722938229392294022941229422294322944229452294622947229482294922950229512295222953229542295522956229572295822959229602296122962229632296422965229662296722968229692297022971229722297322974229752297622977229782297922980229812298222983229842298522986229872298822989229902299122992229932299422995229962299722998229992300023001230022300323004230052300623007230082300923010230112301223013230142301523016230172301823019230202302123022230232302423025230262302723028230292303023031230322303323034230352303623037230382303923040230412304223043230442304523046230472304823049230502305123052230532305423055230562305723058230592306023061230622306323064230652306623067230682306923070230712307223073230742307523076230772307823079230802308123082230832308423085230862308723088230892309023091230922309323094230952309623097230982309923100231012310223103231042310523106231072310823109231102311123112231132311423115231162311723118231192312023121231222312323124231252312623127231282312923130231312313223133231342313523136231372313823139231402314123142231432314423145231462314723148231492315023151231522315323154231552315623157231582315923160231612316223163231642316523166231672316823169231702317123172231732317423175231762317723178231792318023181231822318323184231852318623187231882318923190231912319223193231942319523196231972319823199232002320123202232032320423205232062320723208232092321023211232122321323214232152321623217232182321923220232212322223223232242322523226232272322823229232302323123232232332323423235232362323723238232392324023241232422324323244232452324623247232482324923250232512325223253232542325523256232572325823259232602326123262232632326423265232662326723268232692327023271232722327323274232752327623277232782327923280232812328223283232842328523286232872328823289232902329123292232932329423295232962329723298232992330023301233022330323304233052330623307233082330923310233112331223313233142331523316233172331823319233202332123322233232332423325233262332723328233292333023331233322333323334233352333623337233382333923340233412334223343233442334523346233472334823349233502335123352233532335423355233562335723358233592336023361233622336323364233652336623367233682336923370233712337223373233742337523376233772337823379233802338123382233832338423385233862338723388233892339023391233922339323394233952339623397233982339923400234012340223403234042340523406234072340823409234102341123412234132341423415234162341723418234192342023421234222342323424234252342623427234282342923430234312343223433234342343523436234372343823439234402344123442234432344423445234462344723448234492345023451234522345323454234552345623457234582345923460234612346223463234642346523466234672346823469234702347123472234732347423475234762347723478234792348023481234822348323484234852348623487234882348923490234912349223493234942349523496234972349823499235002350123502235032350423505235062350723508235092351023511235122351323514235152351623517235182351923520235212352223523235242352523526235272352823529235302353123532235332353423535235362353723538235392354023541235422354323544235452354623547235482354923550235512355223553235542355523556235572355823559235602356123562235632356423565235662356723568235692357023571235722357323574235752357623577235782357923580235812358223583235842358523586235872358823589235902359123592235932359423595235962359723598235992360023601236022360323604236052360623607236082360923610236112361223613236142361523616236172361823619236202362123622236232362423625236262362723628236292363023631236322363323634236352363623637236382363923640236412364223643236442364523646236472364823649236502365123652236532365423655236562365723658236592366023661236622366323664236652366623667236682366923670236712367223673236742367523676236772367823679236802368123682236832368423685236862368723688236892369023691236922369323694236952369623697236982369923700237012370223703237042370523706237072370823709237102371123712237132371423715237162371723718237192372023721237222372323724237252372623727237282372923730237312373223733237342373523736237372373823739237402374123742237432374423745237462374723748237492375023751237522375323754237552375623757237582375923760237612376223763237642376523766237672376823769237702377123772237732377423775237762377723778237792378023781237822378323784237852378623787237882378923790237912379223793237942379523796237972379823799238002380123802238032380423805238062380723808238092381023811238122381323814238152381623817238182381923820238212382223823238242382523826238272382823829238302383123832238332383423835238362383723838238392384023841238422384323844238452384623847238482384923850238512385223853238542385523856238572385823859238602386123862238632386423865238662386723868238692387023871238722387323874238752387623877238782387923880238812388223883238842388523886238872388823889238902389123892238932389423895238962389723898238992390023901239022390323904239052390623907239082390923910239112391223913239142391523916239172391823919239202392123922239232392423925239262392723928239292393023931239322393323934239352393623937239382393923940239412394223943239442394523946239472394823949239502395123952239532395423955239562395723958239592396023961239622396323964239652396623967239682396923970239712397223973239742397523976239772397823979239802398123982239832398423985239862398723988239892399023991239922399323994239952399623997239982399924000240012400224003240042400524006240072400824009240102401124012240132401424015240162401724018240192402024021240222402324024240252402624027240282402924030240312403224033240342403524036240372403824039240402404124042240432404424045240462404724048240492405024051240522405324054240552405624057240582405924060240612406224063240642406524066240672406824069240702407124072240732407424075240762407724078240792408024081240822408324084240852408624087240882408924090240912409224093240942409524096240972409824099241002410124102241032410424105241062410724108241092411024111241122411324114241152411624117241182411924120241212412224123241242412524126241272412824129241302413124132241332413424135241362413724138241392414024141241422414324144241452414624147241482414924150241512415224153241542415524156241572415824159241602416124162241632416424165241662416724168241692417024171241722417324174241752417624177241782417924180241812418224183241842418524186241872418824189241902419124192241932419424195241962419724198241992420024201242022420324204242052420624207242082420924210242112421224213242142421524216242172421824219242202422124222242232422424225242262422724228242292423024231242322423324234242352423624237242382423924240242412424224243242442424524246242472424824249242502425124252242532425424255242562425724258242592426024261242622426324264242652426624267242682426924270242712427224273242742427524276242772427824279242802428124282242832428424285242862428724288242892429024291242922429324294242952429624297242982429924300243012430224303243042430524306243072430824309243102431124312243132431424315243162431724318243192432024321243222432324324243252432624327243282432924330243312433224333243342433524336243372433824339243402434124342243432434424345243462434724348243492435024351243522435324354243552435624357243582435924360243612436224363243642436524366243672436824369243702437124372243732437424375243762437724378243792438024381243822438324384243852438624387243882438924390243912439224393243942439524396243972439824399244002440124402244032440424405244062440724408244092441024411244122441324414244152441624417244182441924420244212442224423244242442524426244272442824429244302443124432244332443424435244362443724438244392444024441244422444324444244452444624447244482444924450244512445224453244542445524456244572445824459244602446124462244632446424465244662446724468244692447024471244722447324474244752447624477244782447924480244812448224483244842448524486244872448824489244902449124492244932449424495244962449724498244992450024501245022450324504245052450624507245082450924510245112451224513245142451524516245172451824519245202452124522245232452424525245262452724528245292453024531245322453324534245352453624537245382453924540245412454224543245442454524546245472454824549245502455124552245532455424555245562455724558245592456024561245622456324564245652456624567245682456924570245712457224573245742457524576245772457824579245802458124582245832458424585245862458724588245892459024591245922459324594245952459624597245982459924600246012460224603246042460524606246072460824609246102461124612246132461424615246162461724618246192462024621246222462324624246252462624627246282462924630246312463224633246342463524636246372463824639246402464124642246432464424645246462464724648246492465024651246522465324654246552465624657246582465924660246612466224663246642466524666246672466824669246702467124672246732467424675246762467724678246792468024681246822468324684246852468624687246882468924690246912469224693246942469524696246972469824699247002470124702247032470424705247062470724708247092471024711247122471324714247152471624717247182471924720247212472224723247242472524726247272472824729247302473124732247332473424735247362473724738247392474024741247422474324744247452474624747247482474924750247512475224753247542475524756247572475824759247602476124762247632476424765247662476724768247692477024771247722477324774247752477624777247782477924780247812478224783247842478524786247872478824789247902479124792247932479424795247962479724798247992480024801248022480324804248052480624807248082480924810248112481224813248142481524816248172481824819248202482124822248232482424825248262482724828248292483024831248322483324834248352483624837248382483924840248412484224843248442484524846248472484824849248502485124852248532485424855248562485724858248592486024861248622486324864248652486624867248682486924870248712487224873248742487524876248772487824879248802488124882248832488424885248862488724888248892489024891248922489324894248952489624897248982489924900249012490224903249042490524906249072490824909249102491124912249132491424915249162491724918249192492024921249222492324924249252492624927249282492924930249312493224933249342493524936249372493824939249402494124942249432494424945249462494724948249492495024951249522495324954249552495624957249582495924960249612496224963249642496524966249672496824969249702497124972249732497424975249762497724978249792498024981249822498324984249852498624987249882498924990249912499224993249942499524996249972499824999250002500125002250032500425005250062500725008250092501025011250122501325014250152501625017250182501925020250212502225023250242502525026250272502825029250302503125032250332503425035250362503725038250392504025041250422504325044250452504625047250482504925050250512505225053250542505525056250572505825059250602506125062250632506425065250662506725068250692507025071250722507325074250752507625077250782507925080250812508225083250842508525086250872508825089250902509125092250932509425095250962509725098250992510025101251022510325104251052510625107251082510925110251112511225113251142511525116251172511825119251202512125122251232512425125251262512725128251292513025131251322513325134251352513625137251382513925140251412514225143251442514525146251472514825149251502515125152251532515425155251562515725158251592516025161251622516325164251652516625167251682516925170251712517225173251742517525176251772517825179251802518125182251832518425185251862518725188251892519025191251922519325194251952519625197251982519925200252012520225203252042520525206252072520825209252102521125212252132521425215252162521725218252192522025221252222522325224252252522625227252282522925230252312523225233252342523525236252372523825239252402524125242252432524425245252462524725248252492525025251252522525325254252552525625257252582525925260252612526225263252642526525266252672526825269252702527125272252732527425275252762527725278252792528025281252822528325284252852528625287252882528925290252912529225293252942529525296252972529825299253002530125302253032530425305253062530725308253092531025311253122531325314253152531625317253182531925320253212532225323253242532525326253272532825329253302533125332253332533425335253362533725338253392534025341253422534325344253452534625347253482534925350253512535225353253542535525356253572535825359253602536125362253632536425365253662536725368253692537025371253722537325374253752537625377253782537925380253812538225383253842538525386253872538825389253902539125392253932539425395253962539725398253992540025401254022540325404254052540625407254082540925410254112541225413254142541525416254172541825419254202542125422254232542425425254262542725428254292543025431254322543325434254352543625437254382543925440254412544225443254442544525446254472544825449254502545125452254532545425455254562545725458254592546025461254622546325464254652546625467254682546925470254712547225473254742547525476254772547825479254802548125482254832548425485254862548725488254892549025491254922549325494254952549625497254982549925500255012550225503255042550525506255072550825509255102551125512255132551425515255162551725518255192552025521255222552325524255252552625527255282552925530255312553225533255342553525536255372553825539255402554125542255432554425545255462554725548255492555025551255522555325554255552555625557255582555925560255612556225563255642556525566255672556825569255702557125572255732557425575255762557725578255792558025581255822558325584255852558625587255882558925590255912559225593255942559525596255972559825599256002560125602256032560425605256062560725608256092561025611256122561325614256152561625617256182561925620256212562225623256242562525626256272562825629256302563125632256332563425635256362563725638256392564025641256422564325644256452564625647256482564925650256512565225653256542565525656256572565825659256602566125662256632566425665256662566725668256692567025671256722567325674256752567625677256782567925680256812568225683256842568525686256872568825689256902569125692256932569425695256962569725698256992570025701257022570325704257052570625707257082570925710257112571225713257142571525716257172571825719257202572125722257232572425725257262572725728257292573025731257322573325734257352573625737257382573925740257412574225743257442574525746257472574825749257502575125752257532575425755257562575725758257592576025761257622576325764257652576625767257682576925770257712577225773257742577525776257772577825779257802578125782257832578425785257862578725788257892579025791257922579325794257952579625797257982579925800258012580225803258042580525806258072580825809258102581125812258132581425815258162581725818258192582025821258222582325824258252582625827258282582925830258312583225833258342583525836258372583825839258402584125842258432584425845258462584725848258492585025851258522585325854258552585625857258582585925860258612586225863258642586525866258672586825869258702587125872258732587425875258762587725878258792588025881258822588325884258852588625887258882588925890258912589225893258942589525896258972589825899259002590125902259032590425905259062590725908259092591025911259122591325914259152591625917259182591925920259212592225923259242592525926259272592825929259302593125932259332593425935259362593725938259392594025941259422594325944259452594625947259482594925950259512595225953259542595525956259572595825959259602596125962259632596425965259662596725968259692597025971259722597325974259752597625977259782597925980259812598225983259842598525986259872598825989259902599125992259932599425995259962599725998259992600026001260022600326004260052600626007260082600926010260112601226013260142601526016260172601826019260202602126022260232602426025260262602726028260292603026031260322603326034260352603626037260382603926040260412604226043260442604526046260472604826049260502605126052260532605426055260562605726058260592606026061260622606326064260652606626067260682606926070260712607226073260742607526076260772607826079260802608126082260832608426085260862608726088260892609026091260922609326094260952609626097260982609926100261012610226103261042610526106261072610826109261102611126112261132611426115261162611726118261192612026121261222612326124261252612626127261282612926130261312613226133261342613526136261372613826139261402614126142261432614426145261462614726148261492615026151261522615326154261552615626157261582615926160261612616226163261642616526166261672616826169261702617126172261732617426175261762617726178261792618026181261822618326184261852618626187261882618926190261912619226193261942619526196261972619826199262002620126202262032620426205262062620726208262092621026211262122621326214262152621626217262182621926220262212622226223262242622526226262272622826229262302623126232262332623426235262362623726238262392624026241262422624326244262452624626247262482624926250262512625226253262542625526256262572625826259262602626126262262632626426265262662626726268262692627026271262722627326274262752627626277262782627926280262812628226283262842628526286262872628826289262902629126292262932629426295262962629726298262992630026301263022630326304263052630626307263082630926310263112631226313263142631526316263172631826319263202632126322263232632426325263262632726328263292633026331263322633326334263352633626337263382633926340263412634226343263442634526346263472634826349263502635126352263532635426355263562635726358263592636026361263622636326364263652636626367263682636926370263712637226373263742637526376263772637826379263802638126382263832638426385263862638726388263892639026391263922639326394263952639626397263982639926400264012640226403264042640526406264072640826409264102641126412264132641426415264162641726418264192642026421264222642326424264252642626427264282642926430264312643226433264342643526436264372643826439264402644126442264432644426445264462644726448264492645026451264522645326454264552645626457264582645926460264612646226463264642646526466264672646826469264702647126472264732647426475264762647726478264792648026481264822648326484264852648626487264882648926490264912649226493264942649526496264972649826499265002650126502265032650426505265062650726508265092651026511265122651326514265152651626517265182651926520265212652226523265242652526526265272652826529265302653126532265332653426535265362653726538265392654026541265422654326544265452654626547265482654926550265512655226553265542655526556265572655826559265602656126562265632656426565265662656726568265692657026571265722657326574265752657626577265782657926580265812658226583265842658526586265872658826589265902659126592265932659426595265962659726598265992660026601266022660326604266052660626607266082660926610266112661226613266142661526616266172661826619266202662126622266232662426625266262662726628266292663026631266322663326634266352663626637266382663926640266412664226643266442664526646266472664826649266502665126652266532665426655266562665726658266592666026661266622666326664266652666626667266682666926670266712667226673266742667526676266772667826679266802668126682266832668426685266862668726688266892669026691266922669326694266952669626697266982669926700267012670226703267042670526706267072670826709267102671126712267132671426715267162671726718267192672026721267222672326724267252672626727267282672926730267312673226733267342673526736267372673826739267402674126742267432674426745267462674726748267492675026751267522675326754267552675626757267582675926760267612676226763267642676526766267672676826769267702677126772267732677426775267762677726778267792678026781267822678326784267852678626787267882678926790267912679226793267942679526796267972679826799268002680126802268032680426805268062680726808268092681026811268122681326814268152681626817268182681926820268212682226823268242682526826268272682826829268302683126832268332683426835268362683726838268392684026841268422684326844268452684626847268482684926850268512685226853268542685526856268572685826859268602686126862268632686426865268662686726868268692687026871268722687326874268752687626877268782687926880268812688226883268842688526886268872688826889268902689126892268932689426895268962689726898268992690026901269022690326904269052690626907269082690926910269112691226913269142691526916269172691826919269202692126922269232692426925269262692726928269292693026931269322693326934269352693626937269382693926940269412694226943269442694526946269472694826949269502695126952269532695426955269562695726958269592696026961269622696326964269652696626967269682696926970269712697226973269742697526976269772697826979269802698126982269832698426985269862698726988269892699026991269922699326994269952699626997269982699927000270012700227003270042700527006270072700827009270102701127012270132701427015270162701727018270192702027021270222702327024270252702627027270282702927030270312703227033270342703527036270372703827039270402704127042270432704427045270462704727048270492705027051270522705327054270552705627057270582705927060270612706227063270642706527066270672706827069270702707127072270732707427075270762707727078270792708027081270822708327084270852708627087270882708927090270912709227093270942709527096270972709827099271002710127102271032710427105271062710727108271092711027111271122711327114271152711627117271182711927120271212712227123271242712527126271272712827129271302713127132271332713427135271362713727138271392714027141271422714327144271452714627147271482714927150271512715227153271542715527156271572715827159271602716127162271632716427165271662716727168271692717027171271722717327174271752717627177271782717927180271812718227183271842718527186271872718827189271902719127192271932719427195271962719727198271992720027201272022720327204272052720627207272082720927210272112721227213272142721527216272172721827219272202722127222272232722427225272262722727228272292723027231272322723327234272352723627237272382723927240272412724227243272442724527246272472724827249272502725127252272532725427255272562725727258272592726027261272622726327264272652726627267272682726927270272712727227273272742727527276272772727827279272802728127282272832728427285272862728727288272892729027291272922729327294272952729627297272982729927300273012730227303273042730527306273072730827309273102731127312273132731427315273162731727318273192732027321273222732327324273252732627327273282732927330273312733227333273342733527336273372733827339273402734127342273432734427345273462734727348273492735027351273522735327354273552735627357273582735927360273612736227363273642736527366273672736827369273702737127372273732737427375273762737727378273792738027381273822738327384273852738627387273882738927390273912739227393273942739527396273972739827399274002740127402274032740427405274062740727408274092741027411274122741327414274152741627417274182741927420274212742227423274242742527426274272742827429274302743127432274332743427435274362743727438274392744027441274422744327444274452744627447274482744927450274512745227453274542745527456274572745827459274602746127462274632746427465274662746727468274692747027471274722747327474274752747627477274782747927480274812748227483274842748527486274872748827489274902749127492274932749427495274962749727498274992750027501275022750327504275052750627507275082750927510275112751227513275142751527516275172751827519275202752127522275232752427525275262752727528275292753027531275322753327534275352753627537275382753927540275412754227543275442754527546275472754827549275502755127552275532755427555275562755727558275592756027561275622756327564275652756627567275682756927570275712757227573275742757527576275772757827579275802758127582275832758427585275862758727588275892759027591275922759327594275952759627597275982759927600276012760227603276042760527606276072760827609276102761127612276132761427615276162761727618276192762027621276222762327624276252762627627276282762927630276312763227633276342763527636276372763827639276402764127642276432764427645276462764727648276492765027651276522765327654276552765627657276582765927660276612766227663276642766527666276672766827669276702767127672276732767427675276762767727678276792768027681276822768327684276852768627687276882768927690276912769227693276942769527696276972769827699277002770127702277032770427705277062770727708277092771027711277122771327714277152771627717277182771927720277212772227723277242772527726277272772827729277302773127732277332773427735277362773727738277392774027741277422774327744277452774627747277482774927750277512775227753277542775527756277572775827759277602776127762277632776427765277662776727768277692777027771277722777327774277752777627777277782777927780277812778227783277842778527786277872778827789277902779127792277932779427795277962779727798277992780027801278022780327804278052780627807278082780927810278112781227813278142781527816278172781827819278202782127822278232782427825278262782727828278292783027831278322783327834278352783627837278382783927840278412784227843278442784527846278472784827849278502785127852278532785427855278562785727858278592786027861278622786327864278652786627867278682786927870278712787227873278742787527876278772787827879278802788127882278832788427885278862788727888278892789027891278922789327894278952789627897278982789927900279012790227903279042790527906279072790827909279102791127912279132791427915279162791727918279192792027921279222792327924279252792627927279282792927930279312793227933279342793527936279372793827939279402794127942279432794427945279462794727948279492795027951279522795327954279552795627957279582795927960279612796227963279642796527966279672796827969279702797127972279732797427975279762797727978279792798027981279822798327984279852798627987279882798927990279912799227993279942799527996279972799827999280002800128002280032800428005280062800728008280092801028011280122801328014280152801628017280182801928020280212802228023280242802528026280272802828029280302803128032280332803428035280362803728038280392804028041280422804328044280452804628047280482804928050280512805228053280542805528056280572805828059280602806128062280632806428065280662806728068280692807028071280722807328074280752807628077280782807928080280812808228083280842808528086280872808828089280902809128092280932809428095280962809728098280992810028101281022810328104281052810628107281082810928110281112811228113281142811528116281172811828119281202812128122281232812428125281262812728128281292813028131281322813328134281352813628137281382813928140281412814228143281442814528146281472814828149281502815128152281532815428155281562815728158281592816028161281622816328164281652816628167281682816928170281712817228173281742817528176281772817828179281802818128182281832818428185281862818728188281892819028191281922819328194281952819628197281982819928200282012820228203282042820528206282072820828209282102821128212282132821428215282162821728218282192822028221282222822328224282252822628227282282822928230282312823228233282342823528236282372823828239282402824128242282432824428245282462824728248282492825028251282522825328254282552825628257282582825928260282612826228263282642826528266282672826828269282702827128272282732827428275282762827728278282792828028281282822828328284282852828628287282882828928290282912829228293282942829528296282972829828299283002830128302283032830428305283062830728308283092831028311283122831328314283152831628317283182831928320283212832228323283242832528326283272832828329283302833128332283332833428335283362833728338283392834028341283422834328344283452834628347283482834928350283512835228353283542835528356283572835828359283602836128362283632836428365283662836728368283692837028371283722837328374283752837628377283782837928380283812838228383283842838528386283872838828389283902839128392283932839428395283962839728398283992840028401284022840328404284052840628407284082840928410284112841228413284142841528416284172841828419
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #ifdef ENABLE_SCRIPT_DEBUGGING
  7. #include "Debug/DebuggingFlags.h"
  8. #include "Debug/DiagProbe.h"
  9. #include "Debug/DebugManager.h"
  10. #endif
  11. // Parser includes
  12. #include "RegexCommon.h"
  13. #include "RegexPattern.h"
  14. #include "ExternalLowerer.h"
  15. #include "Types/DynamicObjectPropertyEnumerator.h"
  16. #include "Types/JavascriptStaticEnumerator.h"
  17. #include "Library/ForInObjectEnumerator.h"
  18. ///----------------------------------------------------------------------------
  19. ///
  20. /// Lowerer::Lower
  21. ///
  22. /// Lowerer's main entrypoint. Lowers this function..
  23. ///
  24. ///----------------------------------------------------------------------------
  25. void
  26. Lowerer::Lower()
  27. {
  28. this->m_func->StopMaintainByteCodeOffset();
  29. NoRecoverMemoryJitArenaAllocator localAlloc(_u("BE-Lower"), this->m_func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  30. this->m_alloc = &localAlloc;
  31. BVSparse<JitArenaAllocator> localInitializedTempSym(&localAlloc);
  32. this->initializedTempSym = &localInitializedTempSym;
  33. BVSparse<JitArenaAllocator> localAddToLiveOnBackEdgeSyms(&localAlloc);
  34. this->addToLiveOnBackEdgeSyms = &localAddToLiveOnBackEdgeSyms;
  35. Assert(this->m_func->GetCloneMap() == nullptr);
  36. m_lowererMD.Init(this);
  37. bool defaultDoFastPath = this->m_func->DoFastPaths();
  38. bool loopFastPath = this->m_func->DoLoopFastPaths();
  39. if (m_func->HasAnyStackNestedFunc())
  40. {
  41. EnsureStackFunctionListStackSym();
  42. }
  43. if (m_func->DoStackFrameDisplay() && !m_func->IsLoopBody())
  44. {
  45. AllocStackClosure();
  46. }
  47. AllocStackForInObjectEnumeratorArray();
  48. if (m_func->IsJitInDebugMode())
  49. {
  50. // Initialize metadata of local var slots.
  51. // Too late to wait until Register Allocator, as we need the offset when lowerering bailout for debugger.
  52. int32 hasLocalVarChangedOffset = m_func->GetHasLocalVarChangedOffset();
  53. if (hasLocalVarChangedOffset != Js::Constants::InvalidOffset)
  54. {
  55. // MOV [EBP + m_func->GetHasLocalVarChangedOffset()], 0
  56. StackSym* sym = StackSym::New(TyInt8, m_func);
  57. sym->m_offset = hasLocalVarChangedOffset;
  58. sym->m_allocated = true;
  59. IR::Opnd* opnd1 = IR::SymOpnd::New(sym, TyInt8, m_func);
  60. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func);
  61. Lowerer::InsertMove(opnd1, opnd2, m_func->GetFunctionEntryInsertionPoint());
  62. #ifdef DBG
  63. // Pre-fill all local slots with a pattern. This will help identify non-initialized/garbage var values.
  64. // Note that in the beginning of the function in bytecode we should initialize all locals to undefined.
  65. uint32 localSlotCount = m_func->GetJITFunctionBody()->GetEndNonTempLocalIndex() - m_func->GetJITFunctionBody()->GetFirstNonTempLocalIndex();
  66. for (uint i = 0; i < localSlotCount; ++i)
  67. {
  68. int offset = m_func->GetLocalVarSlotOffset(i);
  69. IRType opnd1Type;
  70. #if defined(TARGET_32)
  71. opnd1Type = TyInt32;
  72. opnd2 = IR::IntConstOpnd::New(Func::c_debugFillPattern4, opnd1Type, m_func);
  73. #else
  74. opnd1Type = TyInt64;
  75. opnd2 = IR::IntConstOpnd::New(Func::c_debugFillPattern8, opnd1Type, m_func);
  76. #endif
  77. sym = StackSym::New(opnd1Type, m_func);
  78. sym->m_offset = offset;
  79. sym->m_allocated = true;
  80. opnd1 = IR::SymOpnd::New(sym, opnd1Type, m_func);
  81. Lowerer::InsertMove(opnd1, opnd2, m_func->GetFunctionEntryInsertionPoint());
  82. }
  83. #endif
  84. }
  85. Assert(!m_func->HasAnyStackNestedFunc());
  86. }
  87. this->LowerRange(m_func->m_headInstr, m_func->m_tailInstr, defaultDoFastPath, loopFastPath);
  88. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  89. // TODO: (leish)(swb) implement for arm
  90. #if defined(_M_IX86) || defined(_M_AMD64)
  91. if (CONFIG_FLAG(ForceSoftwareWriteBarrier) && CONFIG_FLAG(VerifyBarrierBit))
  92. {
  93. // find out all write barrier setting instr, call Recycler::WBSetBit for verification purpose
  94. // should do this in LowererMD::GenerateWriteBarrier, however, can't insert call instruction there
  95. FOREACH_INSTR_EDITING(instr, instrNext, m_func->m_headInstr)
  96. if (instr->m_src1 && instr->m_src1->IsAddrOpnd())
  97. {
  98. IR::AddrOpnd* addrOpnd = instr->m_src1->AsAddrOpnd();
  99. if (addrOpnd->GetAddrOpndKind() == IR::AddrOpndKindWriteBarrierCardTable)
  100. {
  101. auto& leaInstr = instr->m_prev->m_prev->m_prev;
  102. auto& movInstr = instr->m_prev->m_prev;
  103. auto& shrInstr = instr->m_prev;
  104. Assert(leaInstr->m_opcode == Js::OpCode::LEA);
  105. Assert(movInstr->m_opcode == Js::OpCode::MOV);
  106. Assert(shrInstr->m_opcode == Js::OpCode::SHR);
  107. m_lowererMD.LoadHelperArgument(movInstr, leaInstr->m_dst);
  108. IR::Instr* instrCall = IR::Instr::New(Js::OpCode::Call, m_func);
  109. movInstr->InsertBefore(instrCall);
  110. m_lowererMD.ChangeToHelperCall(instrCall, IR::HelperWriteBarrierSetVerifyBit);
  111. }
  112. }
  113. NEXT_INSTR_EDITING
  114. }
  115. #endif
  116. #endif
  117. this->m_func->ClearCloneMap();
  118. if (m_func->HasAnyStackNestedFunc())
  119. {
  120. EnsureZeroLastStackFunctionNext();
  121. }
  122. if (!m_func->IsSimpleJit())
  123. {
  124. #if 0 // TODO michhol oop jit, reenable assert
  125. Js::EntryPointInfo* entryPointInfo = this->m_func->m_workItem->GetEntryPoint();
  126. Assert(entryPointInfo->GetJitTransferData() != nullptr && !entryPointInfo->GetJitTransferData()->GetIsReady());
  127. #endif
  128. }
  129. this->initializedTempSym = nullptr;
  130. this->m_alloc = nullptr;
  131. this->m_func->DisableConstandAddressLoadHoist();
  132. }
  133. void
  134. Lowerer::LowerRange(IR::Instr *instrStart, IR::Instr *instrEnd, bool defaultDoFastPath, bool defaultDoLoopFastPath)
  135. {
  136. bool noMathFastPath;
  137. bool noFieldFastPath;
  138. bool isStrictMode = this->m_func->GetJITFunctionBody()->IsStrictMode();
  139. noFieldFastPath = !defaultDoFastPath;
  140. noMathFastPath = !defaultDoFastPath;
  141. #if DBG_DUMP
  142. char16 * globOptInstrString = nullptr;
  143. #endif
  144. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, instrEnd, instrStart)
  145. {
  146. // Try to peep this`
  147. instr = this->PreLowerPeepInstr(instr, &instrPrev);
  148. #if DBG
  149. IR::Instr * verifyLegalizeInstrNext = instr->m_next;
  150. m_currentInstrOpCode = instr->m_opcode;
  151. #endif
  152. // If we have debugger bailout as part of real instr (not separate BailForDebugger instr),
  153. // extract/split out BailOutForDebugger into separate instr, if needed.
  154. // The instr can have just debugger bailout, or debugger bailout + other shared bailout.
  155. // Note that by the time we get here, we should not have aux-only bailout (in globopt we promote it to normal bailout).
  156. if (m_func->IsJitInDebugMode() && instr->HasBailOutInfo() &&
  157. (((instr->GetBailOutKind() & IR::BailOutForDebuggerBits) && instr->m_opcode != Js::OpCode::BailForDebugger) ||
  158. instr->HasAuxBailOut()))
  159. {
  160. instr = this->SplitBailForDebugger(instr); // Change instr, as returned is the one we need to lower next.
  161. instrPrev = instr->m_prev; // Change just in case if instr got changed.
  162. }
  163. #if DBG_DUMP
  164. if (!instr->IsLowered() && !instr->IsLabelInstr()
  165. && (CONFIG_FLAG(ForcePostLowerGlobOptInstrString) ||
  166. PHASE_DUMP(Js::LowererPhase, m_func) ||
  167. PHASE_DUMP(Js::LinearScanPhase, m_func) ||
  168. PHASE_DUMP(Js::RegAllocPhase, m_func) ||
  169. PHASE_DUMP(Js::PeepsPhase, m_func) ||
  170. PHASE_DUMP(Js::LayoutPhase, m_func) ||
  171. PHASE_DUMP(Js::EmitterPhase, m_func) ||
  172. PHASE_DUMP(Js::EncoderPhase, m_func) ||
  173. PHASE_DUMP(Js::BackEndPhase, m_func)))
  174. {
  175. if(instr->m_next && instr->m_next->m_opcode != Js::OpCode::StatementBoundary && !instr->m_next->IsLabelInstr())
  176. {
  177. instr->m_next->globOptInstrString = globOptInstrString;
  178. }
  179. globOptInstrString = instr->DumpString();
  180. }
  181. #endif
  182. if (instr->IsBranchInstr() && !instr->AsBranchInstr()->IsMultiBranch() && instr->AsBranchInstr()->GetTarget()->m_isLoopTop)
  183. {
  184. Loop * loop = instr->AsBranchInstr()->GetTarget()->GetLoop();
  185. if (this->outerMostLoopLabel == nullptr && !loop->isProcessed)
  186. {
  187. while (loop && loop->GetLoopTopInstr()) // some loops are optimized away so that they are not loops anymore.
  188. // They do, however, stay in the loop graph but don't have loop top labels assigned to them
  189. {
  190. this->outerMostLoopLabel = loop->GetLoopTopInstr();
  191. Assert(this->outerMostLoopLabel->m_isLoopTop);
  192. // landing pad must fall through to the loop
  193. Assert(this->outerMostLoopLabel->m_prev->HasFallThrough());
  194. loop = loop->parent;
  195. }
  196. this->initializedTempSym->ClearAll();
  197. }
  198. noFieldFastPath = !defaultDoLoopFastPath;
  199. noMathFastPath = !defaultDoLoopFastPath;
  200. }
  201. #ifdef INLINE_CACHE_STATS
  202. if(PHASE_STATS1(Js::PolymorphicInlineCachePhase))
  203. {
  204. // Always use the slow path, so we can track property accesses
  205. noFieldFastPath = true;
  206. }
  207. #endif
  208. #if DBG
  209. if (instr->HasBailOutInfo())
  210. {
  211. IR::BailOutKind bailoutKind = instr->GetBailOutKind();
  212. if (BailOutInfo::IsBailOutOnImplicitCalls(bailoutKind))
  213. {
  214. this->helperCallCheckState = (HelperCallCheckState)(this->helperCallCheckState | HelperCallCheckState_ImplicitCallsBailout);
  215. }
  216. if ((bailoutKind & IR::BailOutOnArrayAccessHelperCall) != 0 &&
  217. instr->m_opcode != Js::OpCode::Memcopy &&
  218. instr->m_opcode != Js::OpCode::Memset)
  219. {
  220. this->helperCallCheckState = (HelperCallCheckState)(this->helperCallCheckState | HelperCallCheckState_NoHelperCalls);
  221. }
  222. }
  223. #endif
  224. switch (instr->m_opcode)
  225. {
  226. case Js::OpCode::LdHandlerScope:
  227. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdHandlerScope);
  228. break;
  229. case Js::OpCode::InitSetFld:
  230. instrPrev = this->LowerStFld(instr, IR::HelperOP_InitSetter, IR::HelperOP_InitSetter, false);
  231. break;
  232. case Js::OpCode::InitGetFld:
  233. instrPrev = this->LowerStFld(instr, IR::HelperOP_InitGetter, IR::HelperOP_InitGetter, false);
  234. break;
  235. case Js::OpCode::InitProto:
  236. instrPrev = this->LowerStFld(instr, IR::HelperOP_InitProto, IR::HelperOP_InitProto, false);
  237. break;
  238. case Js::OpCode::LdArgCnt:
  239. this->LoadArgumentCount(instr);
  240. break;
  241. case Js::OpCode::LdStackArgPtr:
  242. this->LoadStackArgPtr(instr);
  243. break;
  244. case Js::OpCode::LdHeapArguments:
  245. case Js::OpCode::LdLetHeapArguments:
  246. instrPrev = m_lowererMD.LoadHeapArguments(instr);
  247. break;
  248. case Js::OpCode::LdHeapArgsCached:
  249. case Js::OpCode::LdLetHeapArgsCached:
  250. m_lowererMD.LoadHeapArgsCached(instr);
  251. break;
  252. case Js::OpCode::InvalCachedScope:
  253. this->LowerBinaryHelper(instr, IR::HelperOP_InvalidateCachedScope);
  254. break;
  255. case Js::OpCode::InitCachedScope:
  256. if (instr->m_func->GetJITFunctionBody()->GetDoScopeObjectCreation() || !instr->m_func->IsStackArgsEnabled())
  257. {
  258. instrPrev = this->LowerInitCachedScope(instr);
  259. }
  260. else
  261. {
  262. instr->ReplaceSrc1(IR::AddrOpnd::NewNull(instr->m_func));
  263. instr->m_opcode = Js::OpCode::Ld_A;
  264. instrPrev = instr;
  265. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase))
  266. {
  267. Output::Print(_u("StackArgFormals : %s (%d) :Removing Scope object creation in Lowerer and replacing it with MOV NULL. \n"), instr->m_func->GetJITFunctionBody()->GetDisplayName(), instr->m_func->GetFunctionNumber());
  268. Output::Flush();
  269. }
  270. }
  271. break;
  272. case Js::OpCode::NewScopeObject:
  273. {
  274. Func * currFunc = instr->m_func;
  275. if (currFunc->GetJITFunctionBody()->GetDoScopeObjectCreation() || !currFunc->IsStackArgsEnabled())
  276. {
  277. //Call Helper that creates scope object and does type transition for the formals
  278. if (currFunc->IsStackArgsEnabled() && currFunc->GetJITFunctionBody()->GetInParamsCount() != 1)
  279. {
  280. // s3 = formals are let decls
  281. this->m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(currFunc->GetHasNonSimpleParams() ? TRUE : FALSE, TyUint8, currFunc));
  282. // s2 = current function.
  283. IR::Opnd * paramOpnd = LoadFunctionBodyOpnd(instr);
  284. this->m_lowererMD.LoadHelperArgument(instr, paramOpnd);
  285. m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewScopeObjectWithFormals);
  286. }
  287. else
  288. {
  289. m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewScopeObject);
  290. }
  291. }
  292. else
  293. {
  294. instr->SetSrc1(IR::AddrOpnd::NewNull(instr->m_func));
  295. instr->m_opcode = Js::OpCode::Ld_A;
  296. instrPrev = instr;
  297. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase))
  298. {
  299. Output::Print(_u("StackArgFormals : %s (%d) :Removing Scope object creation in Lowerer and replacing it with MOV NULL. \n"), currFunc->GetJITFunctionBody()->GetDisplayName(), currFunc->GetFunctionNumber());
  300. Output::Flush();
  301. }
  302. }
  303. break;
  304. }
  305. case Js::OpCode::NewStackScopeSlots:
  306. this->LowerNewScopeSlots(instr, m_func->DoStackScopeSlots());
  307. break;
  308. case Js::OpCode::NewScopeSlots:
  309. this->LowerNewScopeSlots(instr, false);
  310. break;
  311. case Js::OpCode::InitLocalClosure:
  312. // Real initialization of the stack pointers happens on entry to the function, so this instruction
  313. // (which exists to provide a def in the IR) can go away.
  314. instr->Remove();
  315. break;
  316. case Js::OpCode::NewScopeSlotsWithoutPropIds:
  317. this->LowerBinaryHelperMemWithFuncBody(instr, IR::HelperOP_NewScopeSlotsWithoutPropIds);
  318. break;
  319. case Js::OpCode::NewBlockScope:
  320. m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewBlockScope);
  321. break;
  322. case Js::OpCode::NewPseudoScope:
  323. m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewPseudoScope);
  324. break;
  325. case Js::OpCode::CloneInnerScopeSlots:
  326. this->LowerUnaryHelperMem(instr, IR::HelperOP_CloneInnerScopeSlots);
  327. break;
  328. case Js::OpCode::CloneBlockScope:
  329. this->LowerUnaryHelperMem(instr, IR::HelperOP_CloneBlockScope);
  330. break;
  331. case Js::OpCode::GetCachedFunc:
  332. this->LowerGetCachedFunc(instr);
  333. break;
  334. case Js::OpCode::BrFncCachedScopeEq:
  335. case Js::OpCode::BrFncCachedScopeNeq:
  336. this->LowerBrFncCachedScopeEq(instr);
  337. break;
  338. case Js::OpCode::CommitScope:
  339. this->LowerCommitScope(instr);
  340. break;
  341. case Js::OpCode::LdFldForTypeOf:
  342. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValueForTypeOf, IR::HelperOp_PatchGetValuePolymorphicForTypeOf,
  343. IR::HelperOp_PatchGetValueForTypeOf, IR::HelperOp_PatchGetValuePolymorphicForTypeOf);
  344. break;
  345. case Js::OpCode::LdFld:
  346. case Js::OpCode::LdFldForCallApplyTarget:
  347. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValue, IR::HelperOp_PatchGetValuePolymorphic,
  348. IR::HelperOp_PatchGetValue, IR::HelperOp_PatchGetValuePolymorphic);
  349. break;
  350. case Js::OpCode::LdSuperFld:
  351. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValueWithThisPtr, IR::HelperOp_PatchGetValuePolymorphicWithThisPtr,
  352. IR::HelperOp_PatchGetValueWithThisPtr, IR::HelperOp_PatchGetValuePolymorphicWithThisPtr);
  353. break;
  354. case Js::OpCode::LdRootFld:
  355. instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootValue, IR::HelperOp_PatchGetRootValuePolymorphic,
  356. IR::HelperOp_PatchGetRootValue, IR::HelperOp_PatchGetRootValuePolymorphic);
  357. break;
  358. case Js::OpCode::LdRootFldForTypeOf:
  359. instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootValueForTypeOf, IR::HelperOp_PatchGetRootValuePolymorphicForTypeOf,
  360. IR::HelperOp_PatchGetRootValueForTypeOf, IR::HelperOp_PatchGetRootValuePolymorphicForTypeOf);
  361. break;
  362. case Js::OpCode::LdMethodFldPolyInlineMiss:
  363. instrPrev = LowerLdFld(instr, IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic, true, nullptr, true);
  364. break;
  365. case Js::OpCode::LdMethodFld:
  366. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic,
  367. IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic);
  368. break;
  369. case Js::OpCode::LdRootMethodFld:
  370. instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootMethod, IR::HelperOp_PatchGetRootMethodPolymorphic,
  371. IR::HelperOp_PatchGetRootMethod, IR::HelperOp_PatchGetRootMethodPolymorphic);
  372. break;
  373. case Js::OpCode::ScopedLdMethodFld:
  374. // "Scoped" in ScopedLdMethodFld is a bit of a misnomer because it doesn't look through a scope chain.
  375. // Instead the op is to allow for either a LdRootMethodFld or LdMethodFld depending on whether the
  376. // object is the root object or not.
  377. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_ScopedGetMethod, IR::HelperOp_ScopedGetMethodPolymorphic,
  378. IR::HelperOp_ScopedGetMethod, IR::HelperOp_ScopedGetMethodPolymorphic);
  379. break;
  380. case Js::OpCode::LdMethodFromFlags:
  381. {
  382. Assert(instr->HasBailOutInfo());
  383. bool success = GenerateFastLdMethodFromFlags(instr);
  384. AssertMsg(success, "Not expected to generate helper block here");
  385. break;
  386. }
  387. case Js::OpCode::CheckFixedFld:
  388. AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instr->m_func) || !PHASE_OFF(Js::UseFixedDataPropsPhase, instr->m_func), "CheckFixedFld with fixed prop(Data|Method) phase disabled?");
  389. this->GenerateCheckFixedFld(instr);
  390. break;
  391. case Js::OpCode::CheckPropertyGuardAndLoadType:
  392. instrPrev = this->GeneratePropertyGuardCheckBailoutAndLoadType(instr);
  393. break;
  394. case Js::OpCode::CheckObjType:
  395. this->GenerateCheckObjType(instr);
  396. break;
  397. case Js::OpCode::AdjustObjType:
  398. case Js::OpCode::AdjustObjTypeReloadAuxSlotPtr:
  399. this->LowerAdjustObjType(instr);
  400. break;
  401. case Js::OpCode::DeleteFld:
  402. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteProperty, false, false);
  403. break;
  404. case Js::OpCode::DeleteRootFld:
  405. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteRootProperty, false, false);
  406. break;
  407. case Js::OpCode::DeleteFldStrict:
  408. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteProperty, false, true);
  409. break;
  410. case Js::OpCode::DeleteRootFldStrict:
  411. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteRootProperty, false, true);
  412. break;
  413. case Js::OpCode::ScopedLdFldForTypeOf:
  414. if (!noFieldFastPath)
  415. {
  416. m_lowererMD.GenerateFastScopedLdFld(instr);
  417. }
  418. instrPrev = this->LowerScopedLdFld(instr, IR::HelperOp_PatchGetPropertyForTypeOfScoped, true);
  419. break;
  420. case Js::OpCode::ScopedLdFld:
  421. if (!noFieldFastPath)
  422. {
  423. m_lowererMD.GenerateFastScopedLdFld(instr);
  424. }
  425. instrPrev = this->LowerScopedLdFld(instr, IR::HelperOp_PatchGetPropertyScoped, true);
  426. break;
  427. case Js::OpCode::ScopedLdInst:
  428. instrPrev = this->LowerScopedLdInst(instr, IR::HelperOp_GetInstanceScoped);
  429. break;
  430. case Js::OpCode::ScopedDeleteFld:
  431. instrPrev = this->LowerScopedDelFld(instr, IR::HelperOp_DeletePropertyScoped, false, false);
  432. break;
  433. case Js::OpCode::ScopedDeleteFldStrict:
  434. instrPrev = this->LowerScopedDelFld(instr, IR::HelperOp_DeletePropertyScoped, false, true);
  435. break;
  436. case Js::OpCode::NewScFunc:
  437. instrPrev = this->LowerNewScFunc(instr);
  438. break;
  439. case Js::OpCode::NewScFuncHomeObj:
  440. instrPrev = this->LowerNewScFuncHomeObj(instr);
  441. break;
  442. case Js::OpCode::NewScGenFunc:
  443. instrPrev = this->LowerNewScGenFunc(instr);
  444. break;
  445. case Js::OpCode::NewScGenFuncHomeObj:
  446. instrPrev = this->LowerNewScGenFuncHomeObj(instr);
  447. break;
  448. case Js::OpCode::StFld:
  449. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueNoLocalFastPath, IR::HelperOp_PatchPutValueNoLocalFastPathPolymorphic,
  450. IR::HelperOp_PatchPutValue, IR::HelperOp_PatchPutValuePolymorphic, true, Js::PropertyOperation_None);
  451. break;
  452. case Js::OpCode::StSuperFld:
  453. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueWithThisPtrNoLocalFastPath, IR::HelperOp_PatchPutValueWithThisPtrNoLocalFastPathPolymorphic,
  454. IR::HelperOp_PatchPutValueWithThisPtr, IR::HelperOp_PatchPutValueWithThisPtrPolymorphic, true, isStrictMode ? Js::PropertyOperation_StrictMode : Js::PropertyOperation_None);
  455. break;
  456. case Js::OpCode::StRootFld:
  457. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPathPolymorphic,
  458. IR::HelperOp_PatchPutRootValue, IR::HelperOp_PatchPutRootValuePolymorphic, true, Js::PropertyOperation_Root);
  459. break;
  460. case Js::OpCode::StFldStrict:
  461. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueNoLocalFastPath, IR::HelperOp_PatchPutValueNoLocalFastPathPolymorphic,
  462. IR::HelperOp_PatchPutValue, IR::HelperOp_PatchPutValuePolymorphic, true, Js::PropertyOperation_StrictMode);
  463. break;
  464. case Js::OpCode::StRootFldStrict:
  465. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPathPolymorphic,
  466. IR::HelperOp_PatchPutRootValue, IR::HelperOp_PatchPutRootValuePolymorphic, true, Js::PropertyOperation_StrictModeRoot);
  467. break;
  468. case Js::OpCode::InitFld:
  469. case Js::OpCode::InitRootFld:
  470. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchInitValue, IR::HelperOp_PatchInitValuePolymorphic,
  471. IR::HelperOp_PatchInitValue, IR::HelperOp_PatchInitValuePolymorphic, false, Js::PropertyOperation_None);
  472. break;
  473. case Js::OpCode::ScopedInitFunc:
  474. instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_InitFuncScoped, false);
  475. break;
  476. case Js::OpCode::ScopedStFld:
  477. case Js::OpCode::ScopedStFldStrict:
  478. if (!noFieldFastPath)
  479. {
  480. m_lowererMD.GenerateFastScopedStFld(instr);
  481. }
  482. instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_PatchSetPropertyScoped, true, true,
  483. instr->m_opcode == Js::OpCode::ScopedStFld ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode);
  484. break;
  485. case Js::OpCode::ConsoleScopedStFld:
  486. case Js::OpCode::ConsoleScopedStFldStrict:
  487. {
  488. if (!noFieldFastPath)
  489. {
  490. m_lowererMD.GenerateFastScopedStFld(instr);
  491. }
  492. Js::PropertyOperationFlags flags = static_cast<Js::PropertyOperationFlags>((instr->m_opcode == Js::OpCode::ConsoleScopedStFld ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode) | Js::PropertyOperation_AllowUndeclInConsoleScope);
  493. instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_ConsolePatchSetPropertyScoped, true, true, flags);
  494. break;
  495. }
  496. case Js::OpCode::LdStr:
  497. m_lowererMD.ChangeToAssign(instr);
  498. break;
  499. case Js::OpCode::CloneStr:
  500. {
  501. GenerateGetImmutableOrScriptUnreferencedString(instr->GetSrc1()->AsRegOpnd(), instr, IR::HelperOp_CompoundStringCloneForAppending, false);
  502. instr->Remove();
  503. break;
  504. }
  505. case Js::OpCode::NewScObjArray:
  506. instrPrev = this->LowerNewScObjArray(instr);
  507. break;
  508. case Js::OpCode::NewScObject:
  509. case Js::OpCode::NewScObjectSpread:
  510. case Js::OpCode::NewScObjArraySpread:
  511. instrPrev = this->LowerNewScObject(instr, true, true);
  512. break;
  513. case Js::OpCode::NewScObjectNoCtor:
  514. instrPrev = this->LowerNewScObject(instr, false, true);
  515. break;
  516. case Js::OpCode::NewScObjectNoCtorFull:
  517. instrPrev = this->LowerNewScObject(instr, false, true, true);
  518. break;
  519. case Js::OpCode::GetNewScObject:
  520. instrPrev = this->LowerGetNewScObject(instr);
  521. break;
  522. case Js::OpCode::UpdateNewScObjectCache:
  523. instrPrev = instr->m_prev;
  524. this->LowerUpdateNewScObjectCache(instr, instr->GetSrc2(), instr->GetSrc1(), true /* isCtorFunction */);
  525. instr->Remove();
  526. break;
  527. case Js::OpCode::NewScObjectSimple:
  528. this->LowerNewScObjectSimple(instr);
  529. break;
  530. case Js::OpCode::NewScObjectLiteral:
  531. this->LowerNewScObjectLiteral(instr);
  532. break;
  533. case Js::OpCode::LdPropIds:
  534. m_lowererMD.ChangeToAssign(instr);
  535. break;
  536. case Js::OpCode::StArrSegItem_A:
  537. instrPrev = this->LowerArraySegmentVars(instr);
  538. break;
  539. case Js::OpCode::InlineMathAcos:
  540. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Acos);
  541. break;
  542. case Js::OpCode::InlineMathAsin:
  543. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Asin);
  544. break;
  545. case Js::OpCode::InlineMathAtan:
  546. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Atan);
  547. break;
  548. case Js::OpCode::InlineMathAtan2:
  549. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Atan2);
  550. break;
  551. case Js::OpCode::InlineMathCos:
  552. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Cos);
  553. break;
  554. case Js::OpCode::InlineMathExp:
  555. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Exp);
  556. break;
  557. case Js::OpCode::InlineMathLog:
  558. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Log);
  559. break;
  560. case Js::OpCode::InlineMathPow:
  561. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Pow);
  562. break;
  563. case Js::OpCode::InlineMathSin:
  564. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Sin);
  565. break;
  566. case Js::OpCode::InlineMathSqrt:
  567. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  568. break;
  569. case Js::OpCode::InlineMathTan:
  570. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Tan);
  571. break;
  572. case Js::OpCode::InlineMathFloor:
  573. #if defined(ASMJS_PLAT) && (defined(_M_X64) || defined(_M_IX86))
  574. if (!AutoSystemInfo::Data.SSE4_1Available() && instr->m_func->GetJITFunctionBody()->IsAsmJsMode())
  575. {
  576. m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_FloorFlt, IR::HelperDirectMath_FloorDb);
  577. break;
  578. }
  579. #endif
  580. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  581. break;
  582. case Js::OpCode::InlineMathCeil:
  583. #if defined(ASMJS_PLAT) && (defined(_M_X64) || defined(_M_IX86))
  584. if (!AutoSystemInfo::Data.SSE4_1Available() && instr->m_func->GetJITFunctionBody()->IsAsmJsMode())
  585. {
  586. m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_CeilFlt, IR::HelperDirectMath_CeilDb);
  587. break;
  588. }
  589. #endif
  590. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  591. break;
  592. case Js::OpCode::InlineMathRound:
  593. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  594. break;
  595. case Js::OpCode::InlineMathAbs:
  596. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  597. break;
  598. case Js::OpCode::InlineMathImul:
  599. GenerateFastInlineMathImul(instr);
  600. break;
  601. case Js::OpCode::Ctz:
  602. GenerateCtz(instr);
  603. break;
  604. case Js::OpCode::PopCnt:
  605. GeneratePopCnt(instr);
  606. break;
  607. case Js::OpCode::InlineMathClz:
  608. GenerateFastInlineMathClz(instr);
  609. break;
  610. case Js::OpCode::InlineMathFround:
  611. GenerateFastInlineMathFround(instr);
  612. break;
  613. case Js::OpCode::Reinterpret_Prim:
  614. LowerReinterpretPrimitive(instr);
  615. break;
  616. case Js::OpCode::InlineMathMin:
  617. case Js::OpCode::InlineMathMax:
  618. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  619. break;
  620. case Js::OpCode::InlineMathRandom:
  621. this->GenerateFastInlineBuiltInMathRandom(instr);
  622. break;
  623. #ifdef ENABLE_DOM_FAST_PATH
  624. case Js::OpCode::DOMFastPathGetter:
  625. this->LowerFastInlineDOMFastPathGetter(instr);
  626. break;
  627. #endif
  628. case Js::OpCode::InlineArrayPush:
  629. this->GenerateFastInlineArrayPush(instr);
  630. break;
  631. case Js::OpCode::InlineArrayPop:
  632. this->GenerateFastInlineArrayPop(instr);
  633. break;
  634. //Now retrieve the function object from the ArgOut_A_InlineSpecialized instruction opcode to push it on the stack after all the other arguments have been pushed.
  635. //The lowering of the direct call to helper is handled by GenerateDirectCall (architecture specific).
  636. case Js::OpCode::CallDirect:
  637. {
  638. IR::Opnd * src1 = instr->GetSrc1();
  639. Assert(src1->IsHelperCallOpnd());
  640. switch (src1->AsHelperCallOpnd()->m_fnHelper)
  641. {
  642. case IR::JnHelperMethod::HelperString_Split:
  643. case IR::JnHelperMethod::HelperString_Match:
  644. GenerateFastInlineStringSplitMatch(instr);
  645. break;
  646. case IR::JnHelperMethod::HelperRegExp_Exec:
  647. GenerateFastInlineRegExpExec(instr);
  648. break;
  649. case IR::JnHelperMethod::HelperGlobalObject_ParseInt:
  650. GenerateFastInlineGlobalObjectParseInt(instr);
  651. break;
  652. case IR::JnHelperMethod::HelperString_FromCharCode:
  653. GenerateFastInlineStringFromCharCode(instr);
  654. break;
  655. case IR::JnHelperMethod::HelperString_FromCodePoint:
  656. GenerateFastInlineStringFromCodePoint(instr);
  657. break;
  658. case IR::JnHelperMethod::HelperString_CharAt:
  659. GenerateFastInlineStringCharCodeAt(instr, Js::BuiltinFunction::JavascriptString_CharAt);
  660. break;
  661. case IR::JnHelperMethod::HelperString_CharCodeAt:
  662. GenerateFastInlineStringCharCodeAt(instr, Js::BuiltinFunction::JavascriptString_CharCodeAt);
  663. break;
  664. case IR::JnHelperMethod::HelperString_Replace:
  665. GenerateFastInlineStringReplace(instr);
  666. break;
  667. case IR::JnHelperMethod::HelperObject_HasOwnProperty:
  668. this->GenerateFastInlineHasOwnProperty(instr);
  669. break;
  670. case IR::JnHelperMethod::HelperArray_IsArray:
  671. this->GenerateFastInlineIsArray(instr);
  672. break;
  673. }
  674. instrPrev = LowerCallDirect(instr);
  675. break;
  676. }
  677. case Js::OpCode::CallIDynamic:
  678. {
  679. Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
  680. instrPrev = this->LowerCallIDynamic(instr, (ushort)flags);
  681. break;
  682. }
  683. case Js::OpCode::CallIDynamicSpread:
  684. {
  685. Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
  686. instrPrev = this->LowerCallIDynamicSpread(instr, (ushort)flags);
  687. break;
  688. }
  689. case Js::OpCode::CallI:
  690. case Js::OpCode::CallINew:
  691. case Js::OpCode::CallIFixed:
  692. case Js::OpCode::CallINewTargetNew:
  693. {
  694. Js::CallFlags flags = Js::CallFlags_None;
  695. if (instr->isCtorCall)
  696. {
  697. flags = Js::CallFlags_New;
  698. }
  699. else
  700. {
  701. if (instr->m_opcode == Js::OpCode::CallINew)
  702. {
  703. flags = Js::CallFlags_New;
  704. }
  705. else if (instr->m_opcode == Js::OpCode::CallINewTargetNew)
  706. {
  707. flags = (Js::CallFlags) (Js::CallFlags_New | Js::CallFlags_ExtraArg | Js::CallFlags_NewTarget);
  708. }
  709. if (instr->GetDst())
  710. {
  711. flags = (Js::CallFlags) (flags | Js::CallFlags_Value);
  712. }
  713. else
  714. {
  715. flags = (Js::CallFlags) (flags | Js::CallFlags_NotUsed);
  716. }
  717. }
  718. if (!PHASE_OFF(Js::CallFastPathPhase, this->m_func) && !noMathFastPath)
  719. {
  720. // We shouldn't have turned this instruction into a fixed method call if we're calling one of the
  721. // built-ins we still inline in the lowerer.
  722. Assert(instr->m_opcode != Js::OpCode::CallIFixed || !Func::IsBuiltInInlinedInLowerer(instr->GetSrc1()));
  723. // Disable InlineBuiltInLibraryCall as it does not work well with 2nd chance reg alloc
  724. // and may invalidate live on back edge data by introducing refs across loops. See Winblue Bug: 577641
  725. //// Callee may still be a library built-in; if so, generate it inline.
  726. //if (this->InlineBuiltInLibraryCall(instr))
  727. //{
  728. // m_lowererMD.LowerCallI(instr, (ushort)flags, true /*isHelper*/);
  729. //}
  730. //else
  731. //{
  732. m_lowererMD.LowerCallI(instr, (ushort)flags);
  733. //}
  734. }
  735. else
  736. {
  737. m_lowererMD.LowerCallI(instr, (ushort)flags);
  738. }
  739. break;
  740. }
  741. case Js::OpCode::AsmJsCallI:
  742. instrPrev = m_lowererMD.LowerAsmJsCallI(instr);
  743. break;
  744. case Js::OpCode::AsmJsCallE:
  745. instrPrev = m_lowererMD.LowerAsmJsCallE(instr);
  746. break;
  747. case Js::OpCode::CallIEval:
  748. {
  749. Js::CallFlags flags = (Js::CallFlags)(Js::CallFlags_ExtraArg | (instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed));
  750. if (IsSpreadCall(instr))
  751. {
  752. instrPrev = LowerSpreadCall(instr, flags);
  753. }
  754. else
  755. {
  756. m_lowererMD.LowerCallI(instr, (ushort)flags);
  757. }
  758. #ifdef PERF_HINT
  759. if (PHASE_TRACE1(Js::PerfHintPhase))
  760. {
  761. WritePerfHint(PerfHints::CallsEval, this->m_func, instr->GetByteCodeOffset());
  762. }
  763. #endif
  764. break;
  765. }
  766. case Js::OpCode::CallHelper:
  767. instrPrev = m_lowererMD.LowerCallHelper(instr);
  768. break;
  769. case Js::OpCode::Ret:
  770. if (instr->m_next->m_opcode != Js::OpCode::FunctionExit)
  771. {
  772. // If this RET isn't at the end of the function, insert a branch to
  773. // the epilog.
  774. IR::Instr *exitPrev = m_func->m_exitInstr->m_prev;
  775. if (!exitPrev->IsLabelInstr())
  776. {
  777. exitPrev = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  778. m_func->m_exitInstr->InsertBefore(exitPrev);
  779. }
  780. IR::BranchInstr *exitBr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode,
  781. exitPrev->AsLabelInstr(), m_func);
  782. instr->InsertAfter(exitBr);
  783. }
  784. m_lowererMD.LowerRet(instr);
  785. break;
  786. case Js::OpCode::LdArgumentsFromFrame:
  787. this->LoadArgumentsFromFrame(instr);
  788. break;
  789. case Js::OpCode::LdC_A_I4:
  790. {
  791. IR::Opnd *src1 = instr->UnlinkSrc1();
  792. AssertMsg(src1->IsIntConstOpnd(), "Source of LdC_A_I4 should be an IntConst...");
  793. instrPrev = this->LowerLoadVar(instr,
  794. IR::AddrOpnd::NewFromNumber(static_cast<int32>(src1->AsIntConstOpnd()->GetValue()), this->m_func));
  795. src1->Free(this->m_func);
  796. break;
  797. }
  798. case Js::OpCode::LdC_A_R8:
  799. {
  800. IR::Opnd *src1 = instr->UnlinkSrc1();
  801. AssertMsg(src1->IsFloatConstOpnd(), "Source of LdC_A_R8 should be a FloatConst...");
  802. instrPrev = this->LowerLoadVar(instr, src1->AsFloatConstOpnd()->GetAddrOpnd(this->m_func));
  803. src1->Free(this->m_func);
  804. break;
  805. }
  806. case Js::OpCode::LdC_F8_R8:
  807. {
  808. IR::Opnd *src1 = instr->UnlinkSrc1();
  809. AssertMsg(src1->IsFloatConstOpnd() || src1->IsFloat32ConstOpnd(), "Source of LdC_F8_R8 should be a FloatConst...");
  810. if (src1->IsFloatConstOpnd())
  811. {
  812. instrPrev = m_lowererMD.LoadFloatValue(instr->UnlinkDst()->AsRegOpnd(), src1->AsFloatConstOpnd()->m_value, instr);
  813. }
  814. else
  815. {
  816. instrPrev = m_lowererMD.LoadFloatValue(instr->UnlinkDst()->AsRegOpnd(), src1->AsFloat32ConstOpnd()->m_value, instr);
  817. }
  818. src1->Free(this->m_func);
  819. instr->Remove();
  820. break;
  821. }
  822. case Js::OpCode::NewRegEx:
  823. instrPrev = this->LowerNewRegEx(instr);
  824. break;
  825. case Js::OpCode::Conv_Obj:
  826. this->LowerUnaryHelperMem(instr, IR::HelperOp_ConvObject);
  827. break;
  828. case Js::OpCode::NewUnscopablesWrapperObject:
  829. this->LowerUnaryHelperMem(instr, IR::HelperOp_NewUnscopablesWrapperObject);
  830. break;
  831. case Js::OpCode::LdCustomSpreadIteratorList:
  832. this->LowerUnaryHelperMem(instr, IR::HelperOp_ToSpreadedFunctionArgument);
  833. break;
  834. case Js::OpCode::Conv_Num:
  835. this->LowerConvNum(instr, noMathFastPath);
  836. break;
  837. case Js::OpCode::Incr_A:
  838. if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  839. {
  840. this->LowerUnaryHelperMem(instr, IR::HelperOp_Increment);
  841. }
  842. else
  843. {
  844. instr->SetSrc2(IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(1), IR::AddrOpndKindConstantVar, this->m_func));
  845. m_lowererMD.GenerateFastAdd(instr);
  846. instr->FreeSrc2();
  847. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Increment));
  848. }
  849. break;
  850. case Js::OpCode::Decr_A:
  851. if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  852. {
  853. this->LowerUnaryHelperMem(instr, IR::HelperOp_Decrement);
  854. }
  855. else
  856. {
  857. instr->SetSrc2(IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(1), IR::AddrOpndKindConstantVar, this->m_func));
  858. m_lowererMD.GenerateFastSub(instr);
  859. instr->FreeSrc2();
  860. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Decrement));
  861. }
  862. break;
  863. case Js::OpCode::Neg_A:
  864. if (instr->GetDst()->IsFloat())
  865. {
  866. Assert(instr->GetSrc1()->IsFloat());
  867. m_lowererMD.LowerToFloat(instr);
  868. }
  869. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  870. {
  871. this->LowerUnaryHelperMem(instr, IR::HelperOp_Negate);
  872. }
  873. else if (m_lowererMD.GenerateFastNeg(instr))
  874. {
  875. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Negate));
  876. }
  877. break;
  878. case Js::OpCode::Not_A:
  879. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
  880. {
  881. this->LowerUnaryHelperMem(instr, IR::HelperOp_Not);
  882. }
  883. else if (m_lowererMD.GenerateFastNot(instr))
  884. {
  885. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Not));
  886. }
  887. break;
  888. case Js::OpCode::BrEq_I4:
  889. case Js::OpCode::BrNeq_I4:
  890. case Js::OpCode::BrGt_I4:
  891. case Js::OpCode::BrGe_I4:
  892. case Js::OpCode::BrLt_I4:
  893. case Js::OpCode::BrLe_I4:
  894. case Js::OpCode::BrUnGt_I4:
  895. case Js::OpCode::BrUnGe_I4:
  896. case Js::OpCode::BrUnLt_I4:
  897. case Js::OpCode::BrUnLe_I4:
  898. {
  899. // See calls to MarkOneFltTmpSym under BrSrEq. This is to handle the case
  900. // where a branch is type-specialized and uses the result of a float pref op,
  901. // which must then be saved to var at the def.
  902. StackSym *sym = instr->GetSrc1()->GetStackSym();
  903. if (sym)
  904. {
  905. sym = sym->GetVarEquivSym(nullptr);
  906. }
  907. sym = instr->GetSrc2()->GetStackSym();
  908. if (sym)
  909. {
  910. sym = sym->GetVarEquivSym(nullptr);
  911. }
  912. }
  913. // FALLTHROUGH
  914. case Js::OpCode::Neg_I4:
  915. case Js::OpCode::Not_I4:
  916. case Js::OpCode::Add_I4:
  917. case Js::OpCode::Sub_I4:
  918. case Js::OpCode::Mul_I4:
  919. case Js::OpCode::RemU_I4:
  920. case Js::OpCode::Rem_I4:
  921. case Js::OpCode::Or_I4:
  922. case Js::OpCode::Xor_I4:
  923. case Js::OpCode::And_I4:
  924. case Js::OpCode::Shl_I4:
  925. case Js::OpCode::Shr_I4:
  926. case Js::OpCode::ShrU_I4:
  927. case Js::OpCode::Rol_I4:
  928. case Js::OpCode::Ror_I4:
  929. case Js::OpCode::BrTrue_I4:
  930. case Js::OpCode::BrFalse_I4:
  931. #ifdef _M_IX86
  932. if (
  933. instr->GetDst() && instr->GetDst()->IsInt64() ||
  934. instr->GetSrc1() && instr->GetSrc1()->IsInt64() ||
  935. instr->GetSrc2() && instr->GetSrc2()->IsInt64()
  936. )
  937. {
  938. m_lowererMD.EmitInt64Instr(instr);
  939. break;
  940. }
  941. #endif
  942. if (instr->HasBailOutInfo())
  943. {
  944. const auto bailOutKind = instr->GetBailOutKind();
  945. if (bailOutKind & IR::BailOutOnResultConditions ||
  946. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck)
  947. {
  948. const auto nonBailOutInstr = SplitBailOnResultCondition(instr);
  949. IR::LabelInstr *bailOutLabel, *skipBailOutLabel;
  950. LowerBailOnResultCondition(instr, &bailOutLabel, &skipBailOutLabel);
  951. LowerInstrWithBailOnResultCondition(nonBailOutInstr, bailOutKind, bailOutLabel, skipBailOutLabel);
  952. }
  953. else if (bailOutKind == IR::BailOnModByPowerOf2)
  954. {
  955. Assert(instr->m_opcode == Js::OpCode::Rem_I4);
  956. bool fastPath = GenerateSimplifiedInt4Rem(instr);
  957. Assert(fastPath);
  958. instr->FreeSrc1();
  959. instr->FreeSrc2();
  960. this->GenerateBailOut(instr);
  961. }
  962. }
  963. else
  964. {
  965. if (instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::RemU_I4)
  966. {
  967. // fast path
  968. this->GenerateSimplifiedInt4Rem(instr);
  969. // slow path
  970. this->LowerRemI4(instr);
  971. }
  972. #if defined(_M_IX86) || defined(_M_X64)
  973. else if (instr->m_opcode == Js::OpCode::Mul_I4)
  974. {
  975. if (!LowererMD::GenerateSimplifiedInt4Mul(instr))
  976. {
  977. m_lowererMD.EmitInt4Instr(instr);
  978. }
  979. }
  980. #endif
  981. else
  982. {
  983. m_lowererMD.EmitInt4Instr(instr);
  984. }
  985. }
  986. break;
  987. case Js::OpCode::TrapIfMinIntOverNegOne:
  988. LowerTrapIfMinIntOverNegOne(instr);
  989. break;
  990. case Js::OpCode::TrapIfTruncOverflow:
  991. LowererMD::ChangeToAssign(instr);
  992. break;
  993. case Js::OpCode::TrapIfZero:
  994. LowerTrapIfZero(instr);
  995. break;
  996. case Js::OpCode::TrapIfUnalignedAccess:
  997. instrPrev = LowerTrapIfUnalignedAccess(instr);
  998. break;
  999. case Js::OpCode::DivU_I4:
  1000. case Js::OpCode::Div_I4:
  1001. this->LowerDivI4(instr);
  1002. break;
  1003. case Js::OpCode::Typeof:
  1004. m_lowererMD.LowerTypeof(instr);
  1005. break;
  1006. case Js::OpCode::TypeofElem:
  1007. this->LowerLdElemI(instr, IR::HelperOp_TypeofElem, false);
  1008. break;
  1009. case Js::OpCode::LdLen_A:
  1010. {
  1011. bool fastPath = !noMathFastPath;
  1012. if (!fastPath && instr->HasBailOutInfo())
  1013. {
  1014. // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
  1015. // bailouts require the fast path.
  1016. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1017. if (bailOutKind & IR::BailOutKindBits)
  1018. {
  1019. fastPath = true;
  1020. }
  1021. else
  1022. {
  1023. const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
  1024. fastPath =
  1025. bailOutKindMinusBits &&
  1026. bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
  1027. bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
  1028. }
  1029. }
  1030. bool instrIsInHelperBlock = false;
  1031. if (!fastPath)
  1032. {
  1033. LowerLdLen(instr, false);
  1034. }
  1035. else if (GenerateFastLdLen(instr, &instrIsInHelperBlock))
  1036. {
  1037. Assert(
  1038. !instr->HasBailOutInfo() ||
  1039. (instr->GetBailOutKind() & ~IR::BailOutKindBits) != IR::BailOutOnIrregularLength);
  1040. LowerLdLen(instr, instrIsInHelperBlock);
  1041. }
  1042. break;
  1043. }
  1044. case Js::OpCode::LdThis:
  1045. {
  1046. if (noFieldFastPath || !GenerateLdThisCheck(instr))
  1047. {
  1048. IR::JnHelperMethod meth;
  1049. if (instr->IsJitProfilingInstr())
  1050. {
  1051. Assert(instr->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
  1052. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  1053. meth = IR::HelperSimpleProfiledLdThis;
  1054. this->LowerBinaryHelper(instr, meth);
  1055. }
  1056. else
  1057. {
  1058. meth = IR::HelperLdThisNoFastPath;
  1059. this->LowerBinaryHelperMem(instr, meth);
  1060. }
  1061. }
  1062. else
  1063. {
  1064. this->LowerBinaryHelperMem(instr, IR::HelperLdThis);
  1065. }
  1066. break;
  1067. }
  1068. case Js::OpCode::LdNativeCodeData:
  1069. Assert(m_func->IsOOPJIT());
  1070. instrPrev = LowerLdNativeCodeData(instr);
  1071. break;
  1072. case Js::OpCode::StrictLdThis:
  1073. if (noFieldFastPath)
  1074. {
  1075. IR::JnHelperMethod meth;
  1076. if (instr->IsJitProfilingInstr())
  1077. {
  1078. Assert(instr->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
  1079. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  1080. meth = IR::HelperSimpleProfiledStrictLdThis;
  1081. this->LowerUnaryHelper(instr, meth);
  1082. }
  1083. else
  1084. {
  1085. meth = IR::HelperStrictLdThis;
  1086. this->LowerUnaryHelperMem(instr, meth);
  1087. }
  1088. }
  1089. else
  1090. {
  1091. this->GenerateLdThisStrict(instr);
  1092. instr->Remove();
  1093. }
  1094. break;
  1095. case Js::OpCode::CheckThis:
  1096. GenerateLdThisCheck(instr);
  1097. instr->FreeSrc1();
  1098. this->GenerateBailOut(instr);
  1099. break;
  1100. case Js::OpCode::StrictCheckThis:
  1101. this->GenerateLdThisStrict(instr);
  1102. instr->FreeSrc1();
  1103. this->GenerateBailOut(instr);
  1104. break;
  1105. case Js::OpCode::NewScArray:
  1106. instrPrev = this->LowerNewScArray(instr);
  1107. break;
  1108. case Js::OpCode::NewScArrayWithMissingValues:
  1109. this->LowerUnaryHelperMem(instr, IR::HelperScrArr_OP_NewScArrayWithMissingValues);
  1110. break;
  1111. case Js::OpCode::NewScIntArray:
  1112. instrPrev = this->LowerNewScIntArray(instr);
  1113. break;
  1114. case Js::OpCode::NewScFltArray:
  1115. instrPrev = this->LowerNewScFltArray(instr);
  1116. break;
  1117. case Js::OpCode::InitForInEnumerator:
  1118. this->LowerInitForInEnumerator(instr);
  1119. break;
  1120. case Js::OpCode::Add_A:
  1121. if (instr->GetDst()->IsFloat())
  1122. {
  1123. Assert(instr->GetSrc1()->IsFloat());
  1124. Assert(instr->GetSrc2()->IsFloat());
  1125. // we don't want to mix float32 and float64
  1126. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1127. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1128. m_lowererMD.LowerToFloat(instr);
  1129. }
  1130. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  1131. {
  1132. this->LowerBinaryHelperMem(instr, IR::HelperOp_Add);
  1133. }
  1134. else if (m_lowererMD.TryGenerateFastMulAdd(instr, &instrPrev))
  1135. {
  1136. }
  1137. else
  1138. {
  1139. m_lowererMD.GenerateFastAdd(instr);
  1140. this->LowerBinaryHelperMemWithTemp3(instr, IR_HELPER_OP_FULL_OR_INPLACE(Add), IR::HelperOp_AddLeftDead);
  1141. }
  1142. break;
  1143. case Js::OpCode::Div_A:
  1144. {
  1145. if (instr->IsJitProfilingInstr()) {
  1146. LowerProfiledBinaryOp(instr->AsJitProfilingInstr(), IR::HelperSimpleProfiledDivide);
  1147. }
  1148. else if (instr->GetDst()->IsFloat())
  1149. {
  1150. Assert(instr->GetSrc1()->IsFloat());
  1151. Assert(instr->GetSrc2()->IsFloat());
  1152. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1153. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1154. m_lowererMD.LowerToFloat(instr);
  1155. }
  1156. else
  1157. {
  1158. if (!PHASE_OFF(Js::MathFastPathPhase, this->m_func) && !noMathFastPath)
  1159. {
  1160. IR::AddrOpnd *src2 = instr->GetSrc2()->IsAddrOpnd() ? instr->GetSrc2()->AsAddrOpnd() : nullptr;
  1161. if (src2 && src2->IsVar() && Js::TaggedInt::Is(src2->m_address))
  1162. {
  1163. int32 value = Js::TaggedInt::ToInt32(src2->m_address);
  1164. if (Math::IsPow2(value))
  1165. {
  1166. m_lowererMD.GenerateFastDivByPow2(instr);
  1167. }
  1168. }
  1169. }
  1170. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Divide));
  1171. }
  1172. break;
  1173. }
  1174. case Js::OpCode::Expo_A:
  1175. {
  1176. if (instr->GetDst()->IsFloat())
  1177. {
  1178. Assert(instr->GetSrc1()->IsFloat());
  1179. Assert(instr->GetSrc2()->IsFloat());
  1180. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1181. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1182. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Pow);
  1183. }
  1184. else
  1185. {
  1186. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Exponentiation));
  1187. }
  1188. break;
  1189. }
  1190. case Js::OpCode::Mul_A:
  1191. if (instr->GetDst()->IsFloat())
  1192. {
  1193. Assert(instr->GetSrc1()->IsFloat());
  1194. Assert(instr->GetSrc2()->IsFloat());
  1195. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1196. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1197. m_lowererMD.LowerToFloat(instr);
  1198. }
  1199. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  1200. {
  1201. this->LowerBinaryHelperMem(instr, IR::HelperOp_Multiply);
  1202. }
  1203. else if (m_lowererMD.GenerateFastMul(instr))
  1204. {
  1205. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Multiply));
  1206. }
  1207. break;
  1208. case Js::OpCode::Rem_A:
  1209. if (instr->GetDst()->IsFloat64())
  1210. {
  1211. this->LowerRemR8(instr);
  1212. }
  1213. else if (instr->IsJitProfilingInstr())
  1214. {
  1215. this->LowerProfiledBinaryOp(instr->AsJitProfilingInstr(), IR::HelperSimpleProfiledRemainder);
  1216. }
  1217. else
  1218. {
  1219. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Modulus));
  1220. }
  1221. break;
  1222. case Js::OpCode::Sub_A:
  1223. if (instr->GetDst()->IsFloat())
  1224. {
  1225. Assert(instr->GetSrc1()->IsFloat());
  1226. Assert(instr->GetSrc2()->IsFloat());
  1227. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1228. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1229. m_lowererMD.LowerToFloat(instr);
  1230. }
  1231. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  1232. {
  1233. this->LowerBinaryHelperMem(instr, IR::HelperOp_Subtract);
  1234. }
  1235. else if (m_lowererMD.TryGenerateFastMulAdd(instr, &instrPrev))
  1236. {
  1237. }
  1238. else
  1239. {
  1240. m_lowererMD.GenerateFastSub(instr);
  1241. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Subtract));
  1242. }
  1243. break;
  1244. case Js::OpCode::And_A:
  1245. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
  1246. {
  1247. this->LowerBinaryHelperMem(instr, IR::HelperOp_And);
  1248. }
  1249. else if (m_lowererMD.GenerateFastAnd(instr))
  1250. {
  1251. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(And));
  1252. }
  1253. break;
  1254. case Js::OpCode::Or_A:
  1255. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
  1256. {
  1257. this->LowerBinaryHelperMem(instr, IR::HelperOp_Or);
  1258. }
  1259. else if (m_lowererMD.GenerateFastOr(instr))
  1260. {
  1261. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Or));
  1262. }
  1263. break;
  1264. case Js::OpCode::Xor_A:
  1265. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastXor(instr))
  1266. {
  1267. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Xor));
  1268. }
  1269. break;
  1270. case Js::OpCode::Shl_A:
  1271. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftLeft(instr))
  1272. {
  1273. this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftLeft);
  1274. }
  1275. break;
  1276. case Js::OpCode::Shr_A:
  1277. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftRight(instr))
  1278. {
  1279. this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftRight);
  1280. }
  1281. break;
  1282. case Js::OpCode::ShrU_A:
  1283. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftRight(instr))
  1284. {
  1285. this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftRightU);
  1286. }
  1287. break;
  1288. case Js::OpCode::CmEq_A:
  1289. {
  1290. instrPrev = LowerEqualityCompare(instr, IR::HelperOP_CmEq_A);
  1291. break;
  1292. }
  1293. case Js::OpCode::CmNeq_A:
  1294. {
  1295. instrPrev = LowerEqualityCompare(instr, IR::HelperOP_CmNeq_A);
  1296. break;
  1297. }
  1298. case Js::OpCode::CmSrEq_A:
  1299. instrPrev = LowerEqualityCompare(instr, IR::HelperOP_CmSrEq_A);
  1300. break;
  1301. case Js::OpCode::CmSrNeq_A:
  1302. instrPrev = LowerEqualityCompare(instr, IR::HelperOP_CmSrNeq_A);
  1303. break;
  1304. case Js::OpCode::CmGt_A:
  1305. if (instr->GetSrc1()->IsFloat())
  1306. {
  1307. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1308. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1309. }
  1310. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1311. {
  1312. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmGt_A);
  1313. }
  1314. break;
  1315. case Js::OpCode::CmGe_A:
  1316. if (instr->GetSrc1()->IsFloat())
  1317. {
  1318. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1319. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1320. }
  1321. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1322. {
  1323. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmGe_A);
  1324. }
  1325. break;
  1326. case Js::OpCode::CmLt_A:
  1327. if (instr->GetSrc1()->IsFloat())
  1328. {
  1329. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1330. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1331. }
  1332. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1333. {
  1334. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmLt_A);
  1335. }
  1336. break;
  1337. case Js::OpCode::CmLe_A:
  1338. if (instr->GetSrc1()->IsFloat())
  1339. {
  1340. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1341. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1342. }
  1343. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1344. {
  1345. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmLe_A);
  1346. }
  1347. break;
  1348. case Js::OpCode::CmEq_I4:
  1349. case Js::OpCode::CmNeq_I4:
  1350. case Js::OpCode::CmGe_I4:
  1351. case Js::OpCode::CmGt_I4:
  1352. case Js::OpCode::CmLe_I4:
  1353. case Js::OpCode::CmLt_I4:
  1354. case Js::OpCode::CmUnGe_I4:
  1355. case Js::OpCode::CmUnGt_I4:
  1356. case Js::OpCode::CmUnLe_I4:
  1357. case Js::OpCode::CmUnLt_I4:
  1358. this->m_lowererMD.GenerateFastCmXxI4(instr);
  1359. break;
  1360. case Js::OpCode::Conv_Bool:
  1361. instrPrev = this->m_lowererMD.GenerateConvBool(instr);
  1362. break;
  1363. case Js::OpCode::IsInst:
  1364. this->GenerateFastIsInst(instr);
  1365. instrPrev = this->LowerIsInst(instr, IR::HelperScrObj_OP_IsInst);
  1366. break;
  1367. case Js::OpCode::IsIn:
  1368. this->GenerateFastArrayIsIn(instr);
  1369. this->GenerateFastObjectIsIn(instr);
  1370. this->LowerBinaryHelperMem(instr, IR::HelperOp_IsIn);
  1371. break;
  1372. case Js::OpCode::LdArrViewElem:
  1373. instrPrev = LowerLdArrViewElem(instr);
  1374. break;
  1375. case Js::OpCode::StAtomicWasm:
  1376. instrPrev = LowerStAtomicsWasm(instr);
  1377. break;
  1378. case Js::OpCode::StArrViewElem:
  1379. instrPrev = LowerStArrViewElem(instr);
  1380. break;
  1381. case Js::OpCode::LdAtomicWasm:
  1382. instrPrev = LowerLdAtomicsWasm(instr);
  1383. break;
  1384. case Js::OpCode::LdArrViewElemWasm:
  1385. instrPrev = LowerLdArrViewElemWasm(instr);
  1386. break;
  1387. case Js::OpCode::Memset:
  1388. case Js::OpCode::Memcopy:
  1389. {
  1390. instrPrev = LowerMemOp(instr);
  1391. break;
  1392. }
  1393. case Js::OpCode::ArrayDetachedCheck:
  1394. instrPrev = LowerArrayDetachedCheck(instr);
  1395. break;
  1396. case Js::OpCode::StElemI_A:
  1397. case Js::OpCode::StElemI_A_Strict:
  1398. {
  1399. // Note: under debugger (Fast F12) don't let GenerateFastStElemI which calls into ToNumber_Helper
  1400. // which takes double, and currently our helper wrapper doesn't support double.
  1401. bool fastPath = !noMathFastPath && !m_func->IsJitInDebugMode();
  1402. if (!fastPath && instr->HasBailOutInfo())
  1403. {
  1404. // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
  1405. // bailouts require the fast path.
  1406. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1407. const IR::BailOutKind bailOutKindBits = bailOutKind & IR::BailOutKindBits;
  1408. if (bailOutKindBits & ~(IR::BailOutOnMissingValue | IR::BailOutConvertedNativeArray))
  1409. {
  1410. fastPath = true;
  1411. }
  1412. else
  1413. {
  1414. const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
  1415. fastPath =
  1416. bailOutKindMinusBits &&
  1417. bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
  1418. bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
  1419. }
  1420. }
  1421. IR::Opnd * opnd = instr->GetDst();
  1422. IR::Opnd * baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  1423. ValueType profiledBaseValueType = baseOpnd->AsRegOpnd()->GetValueType();
  1424. if (profiledBaseValueType.IsUninitialized() && baseOpnd->AsRegOpnd()->m_sym->IsSingleDef())
  1425. {
  1426. baseOpnd->SetValueType(baseOpnd->FindProfiledValueType());
  1427. }
  1428. bool instrIsInHelperBlock = false;
  1429. if (!fastPath)
  1430. {
  1431. this->LowerStElemI(
  1432. instr,
  1433. instr->m_opcode == Js::OpCode::StElemI_A ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode,
  1434. false);
  1435. }
  1436. else if (GenerateFastStElemI(instr, &instrIsInHelperBlock))
  1437. {
  1438. #if DBG
  1439. if (instr->HasBailOutInfo())
  1440. {
  1441. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1442. Assert(
  1443. (bailOutKind & ~IR::BailOutKindBits) != IR::BailOutConventionalTypedArrayAccessOnly &&
  1444. !(
  1445. bailOutKind &
  1446. (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)
  1447. ));
  1448. }
  1449. #endif
  1450. this->LowerStElemI(
  1451. instr,
  1452. instr->m_opcode == Js::OpCode::StElemI_A ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode,
  1453. instrIsInHelperBlock);
  1454. }
  1455. break;
  1456. }
  1457. case Js::OpCode::LdElemI_A:
  1458. case Js::OpCode::LdMethodElem:
  1459. {
  1460. bool fastPath =
  1461. !noMathFastPath &&
  1462. (
  1463. instr->m_opcode != Js::OpCode::LdMethodElem ||
  1464. instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyObject()
  1465. );
  1466. if (!fastPath && instr->HasBailOutInfo())
  1467. {
  1468. // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
  1469. // bailouts require the fast path.
  1470. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1471. if (bailOutKind & IR::BailOutKindBits)
  1472. {
  1473. fastPath = true;
  1474. }
  1475. else
  1476. {
  1477. const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
  1478. fastPath =
  1479. bailOutKindMinusBits &&
  1480. bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
  1481. bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
  1482. }
  1483. }
  1484. IR::Opnd * opnd = instr->GetSrc1();
  1485. IR::Opnd * baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  1486. ValueType profiledBaseValueType = baseOpnd->AsRegOpnd()->GetValueType();
  1487. if (profiledBaseValueType.IsUninitialized() && baseOpnd->AsRegOpnd()->m_sym->IsSingleDef())
  1488. {
  1489. baseOpnd->SetValueType(baseOpnd->FindProfiledValueType());
  1490. }
  1491. bool instrIsInHelperBlock = false;
  1492. if (!fastPath)
  1493. {
  1494. this->LowerLdElemI(
  1495. instr,
  1496. instr->m_opcode == Js::OpCode::LdElemI_A ? IR::HelperOp_GetElementI : IR::HelperOp_GetMethodElement,
  1497. false);
  1498. }
  1499. else if (GenerateFastLdElemI(instr, &instrIsInHelperBlock))
  1500. {
  1501. #if DBG
  1502. if (instr->HasBailOutInfo())
  1503. {
  1504. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1505. Assert(
  1506. (bailOutKind & ~IR::BailOutKindBits) != IR::BailOutConventionalTypedArrayAccessOnly &&
  1507. !(
  1508. bailOutKind &
  1509. (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)
  1510. ));
  1511. }
  1512. #endif
  1513. this->LowerLdElemI(
  1514. instr,
  1515. instr->m_opcode == Js::OpCode::LdElemI_A ? IR::HelperOp_GetElementI : IR::HelperOp_GetMethodElement,
  1516. instrIsInHelperBlock);
  1517. }
  1518. break;
  1519. }
  1520. case Js::OpCode::InitSetElemI:
  1521. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitElemSetter);
  1522. break;
  1523. case Js::OpCode::InitGetElemI:
  1524. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitElemGetter);
  1525. break;
  1526. case Js::OpCode::InitComputedProperty:
  1527. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitComputedProperty);
  1528. break;
  1529. case Js::OpCode::Delete_A:
  1530. this->LowerUnaryHelperMem(instr, IR::HelperOp_Delete);
  1531. break;
  1532. case Js::OpCode::DeleteElemI_A:
  1533. this->LowerDeleteElemI(instr, false);
  1534. break;
  1535. case Js::OpCode::DeleteElemIStrict_A:
  1536. this->LowerDeleteElemI(instr, true);
  1537. break;
  1538. case Js::OpCode::BytecodeArgOutCapture:
  1539. m_lowererMD.ChangeToAssign(instr);
  1540. break;
  1541. case Js::OpCode::UnwrapWithObj:
  1542. this->LowerUnaryHelper(instr, IR::HelperOp_UnwrapWithObj);
  1543. break;
  1544. #ifdef ENABLE_WASM
  1545. case Js::OpCode::CheckWasmSignature:
  1546. this->LowerCheckWasmSignature(instr);
  1547. break;
  1548. case Js::OpCode::LdWasmFunc:
  1549. instrPrev = this->LowerLdWasmFunc(instr);
  1550. break;
  1551. case Js::OpCode::GrowWasmMemory:
  1552. instrPrev = this->LowerGrowWasmMemory(instr);
  1553. break;
  1554. #endif
  1555. case Js::OpCode::Ld_I4:
  1556. LowererMD::ChangeToAssign(instr);
  1557. break;
  1558. case Js::OpCode::LdAsmJsFunc:
  1559. if (instr->GetSrc1()->IsIndirOpnd())
  1560. {
  1561. IR::IndirOpnd* indir = instr->GetSrc1()->AsIndirOpnd();
  1562. byte scale = m_lowererMD.GetDefaultIndirScale();
  1563. if (!indir->GetIndexOpnd())
  1564. {
  1565. // If we have a constant offset, we need to apply the scale now
  1566. int32 offset;
  1567. if (Int32Math::Shl(1, scale, &offset) || Int32Math::Mul(offset, indir->GetOffset(), &offset))
  1568. {
  1569. // The constant is too big to offset this array. Throw out of range.
  1570. // Todo:: throw a better error message for this scenario
  1571. GenerateRuntimeError(instr, JSERR_ArgumentOutOfRange, IR::HelperOp_RuntimeRangeError);
  1572. }
  1573. indir->SetOffset(offset);
  1574. }
  1575. else
  1576. {
  1577. indir->SetScale(scale);
  1578. }
  1579. }
  1580. //fallthrough
  1581. case Js::OpCode::Ld_A:
  1582. case Js::OpCode::InitConst:
  1583. if (instr->IsJitProfilingInstr() && instr->AsJitProfilingInstr()->isBeginSwitch) {
  1584. LowerProfiledBeginSwitch(instr->AsJitProfilingInstr());
  1585. break;
  1586. }
  1587. m_lowererMD.ChangeToAssign(instr);
  1588. if (instr->HasBailOutInfo())
  1589. {
  1590. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1591. if (bailOutKind == IR::BailOutExpectingString)
  1592. {
  1593. this->LowerBailOnNotString(instr);
  1594. }
  1595. else
  1596. {
  1597. // Should not reach here as there are only 1 BailOutKind (BailOutExpectingString) currently associated with the Load Instr
  1598. Assert(false);
  1599. }
  1600. }
  1601. break;
  1602. case Js::OpCode::LdIndir:
  1603. Assert(instr->GetDst());
  1604. Assert(instr->GetDst()->IsRegOpnd());
  1605. Assert(instr->GetSrc1());
  1606. Assert(instr->GetSrc1()->IsIndirOpnd());
  1607. Assert(!instr->GetSrc2());
  1608. m_lowererMD.ChangeToAssign(instr);
  1609. break;
  1610. case Js::OpCode::FromVar:
  1611. Assert(instr->GetSrc1()->GetType() == TyVar);
  1612. if (instr->GetDst()->GetType() == TyInt32)
  1613. {
  1614. if (m_lowererMD.EmitLoadInt32(instr, !(instr->HasBailOutInfo() && (instr->GetBailOutKind() == IR::BailOutOnNotPrimitive))))
  1615. {
  1616. // Bail out instead of calling a helper
  1617. Assert(instr->GetBailOutKind() == IR::BailOutIntOnly || instr->GetBailOutKind() == IR::BailOutExpectingInteger);
  1618. Assert(!instr->GetSrc1()->GetValueType().IsInt()); // when we know it's an int, it should not have bailout info, to avoid generating a bailout path that will never be taken
  1619. instr->UnlinkSrc1();
  1620. instr->UnlinkDst();
  1621. GenerateBailOut(instr);
  1622. }
  1623. }
  1624. else if (instr->GetDst()->IsFloat())
  1625. {
  1626. if (m_func->GetJITFunctionBody()->IsAsmJsMode())
  1627. {
  1628. m_lowererMD.EmitLoadFloat(instr->GetDst(), instr->GetSrc1(), instr);
  1629. instr->Remove();
  1630. }
  1631. else
  1632. {
  1633. m_lowererMD.EmitLoadFloatFromNumber(instr->GetDst(), instr->GetSrc1(), instr);
  1634. }
  1635. }
  1636. else if (instr->GetDst()->IsInt64())
  1637. {
  1638. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  1639. GenerateRuntimeError(instr, WASMERR_InvalidTypeConversion);
  1640. instr->ReplaceSrc1(IR::Int64ConstOpnd::New(0, TyInt64, m_func));
  1641. LowererMD::ChangeToAssign(instr);
  1642. }
  1643. #ifdef ENABLE_WASM_SIMD
  1644. else if (instr->GetDst()->IsSimd128())
  1645. {
  1646. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  1647. GenerateRuntimeError(instr, WASMERR_InvalidTypeConversion);
  1648. instr->ReplaceSrc1(IR::Simd128ConstOpnd::New({ 0,0,0,0 }, instr->GetDst()->GetType(), m_func));
  1649. LowererMD::ChangeToAssign(instr);
  1650. }
  1651. #endif
  1652. else
  1653. {
  1654. Assert(UNREACHED);
  1655. }
  1656. break;
  1657. case Js::OpCode::ArgOut_A:
  1658. // I don't know if this can happen in asm.js mode, but if it can, we might want to handle differently
  1659. Assert(!m_func->GetJITFunctionBody()->IsAsmJsMode());
  1660. // fall-through
  1661. case Js::OpCode::ArgOut_A_Inline:
  1662. case Js::OpCode::ArgOut_A_Dynamic:
  1663. {
  1664. // ArgOut/StartCall are normally lowered by the lowering of the associated call instr.
  1665. // If the call becomes unreachable, we could end up with an orphan ArgOut or StartCall.
  1666. // Change the ArgOut into a store to the stack for bailouts
  1667. instr->FreeSrc2();
  1668. StackSym *argSym = instr->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  1669. argSym->m_offset = this->m_func->StackAllocate(sizeof(Js::Var));
  1670. argSym->m_allocated = true;
  1671. argSym->m_isOrphanedArg = true;
  1672. this->m_lowererMD.ChangeToAssign(instr);
  1673. }
  1674. break;
  1675. case Js::OpCode::LoweredStartCall:
  1676. case Js::OpCode::StartCall:
  1677. // ArgOut/StartCall are normally lowered by the lowering of the associated call instr.
  1678. // If the call becomes unreachable, we could end up with an orphan ArgOut or StartCall.
  1679. // We'll just delete these StartCalls during peeps.
  1680. break;
  1681. case Js::OpCode::ToVar:
  1682. Assert(instr->GetDst()->GetType() == TyVar);
  1683. if (instr->GetSrc1()->GetType() == TyInt32)
  1684. {
  1685. m_lowererMD.EmitLoadVar(instr);
  1686. }
  1687. else if (instr->GetSrc1()->IsFloat())
  1688. {
  1689. Assert(instr->GetSrc1()->IsRegOpnd());
  1690. IR::RegOpnd* float64Opnd = instr->GetSrc1()->AsRegOpnd();
  1691. if (float64Opnd->IsFloat32())
  1692. {
  1693. IR::RegOpnd* float64ConvOpnd = IR::RegOpnd::New(TyFloat64, m_func);
  1694. m_lowererMD.EmitFloat32ToFloat64(float64ConvOpnd, float64Opnd, instr);
  1695. float64Opnd = float64ConvOpnd;
  1696. }
  1697. m_lowererMD.SaveDoubleToVar(
  1698. instr->GetDst()->AsRegOpnd(),
  1699. float64Opnd, instr, instr);
  1700. instr->Remove();
  1701. }
  1702. else if (instr->GetSrc1()->IsInt64() || instr->GetSrc1()->IsSimd128())
  1703. {
  1704. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  1705. GenerateRuntimeError(instr, WASMERR_InvalidTypeConversion);
  1706. instr->ReplaceSrc1(IR::IntConstOpnd::New(0, TyMachReg, m_func));
  1707. LowererMD::ChangeToAssign(instr);
  1708. }
  1709. else
  1710. {
  1711. Assert(UNREACHED);
  1712. }
  1713. break;
  1714. case Js::OpCode::Conv_Prim_Sat:
  1715. {
  1716. GenerateTruncWithCheck<true /* Saturate */>(instr);
  1717. break;
  1718. }
  1719. case Js::OpCode::Conv_Prim:
  1720. {
  1721. if (IR::Instr::FindSingleDefInstr(Js::OpCode::TrapIfTruncOverflow, instr->GetSrc1()))
  1722. {
  1723. GenerateTruncWithCheck<false /* Saturate */>(instr);
  1724. break;
  1725. }
  1726. if (instr->GetDst()->IsFloat())
  1727. {
  1728. if (instr->GetSrc1()->IsIntConstOpnd())
  1729. {
  1730. LoadFloatFromNonReg(instr->UnlinkSrc1(), instr->UnlinkDst(), instr);
  1731. }
  1732. else if (instr->GetSrc1()->IsInt32())
  1733. {
  1734. m_lowererMD.EmitIntToFloat(instr->GetDst(), instr->GetSrc1(), instr);
  1735. }
  1736. else if (instr->GetSrc1()->IsUInt32())
  1737. {
  1738. m_lowererMD.EmitUIntToFloat(instr->GetDst(), instr->GetSrc1(), instr);
  1739. }
  1740. else if (instr->GetSrc1()->IsInt64())
  1741. {
  1742. m_lowererMD.EmitInt64toFloat(instr->GetDst(), instr->GetSrc1(), instr);
  1743. }
  1744. else
  1745. {
  1746. Assert(instr->GetDst()->IsFloat64());
  1747. Assert(instr->GetSrc1()->IsFloat32());
  1748. m_lowererMD.EmitFloat32ToFloat64(instr->GetDst(), instr->GetSrc1(), instr);
  1749. }
  1750. }
  1751. else if (instr->GetDst()->IsInt64())
  1752. {
  1753. if (instr->GetSrc1()->IsInt32())
  1754. {
  1755. m_lowererMD.EmitIntToLong(instr->GetDst(), instr->GetSrc1(), instr);
  1756. }
  1757. else if (instr->GetSrc1()->IsUInt32())
  1758. {
  1759. m_lowererMD.EmitUIntToLong(instr->GetDst(), instr->GetSrc1(), instr);
  1760. }
  1761. else if (instr->GetSrc1()->IsInt64() && instr->GetSrc2())
  1762. {
  1763. m_lowererMD.EmitSignExtend(instr);
  1764. }
  1765. else
  1766. {
  1767. Assert(0);
  1768. }
  1769. }
  1770. else
  1771. {
  1772. Assert(instr->GetDst()->IsInt32());
  1773. if (instr->GetSrc1()->IsInt64())
  1774. {
  1775. m_lowererMD.EmitLongToInt(instr->GetDst(), instr->GetSrc1(), instr);
  1776. }
  1777. else if ((instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32()) && instr->GetSrc2())
  1778. {
  1779. m_lowererMD.EmitSignExtend(instr);
  1780. }
  1781. else
  1782. {
  1783. Assert(instr->GetSrc1()->IsFloat());
  1784. m_lowererMD.EmitFloatToInt(instr->GetDst(), instr->GetSrc1(), instr);
  1785. }
  1786. }
  1787. instr->Remove();
  1788. break;
  1789. }
  1790. case Js::OpCode::FunctionExit:
  1791. LowerFunctionExit(instr);
  1792. // The rest of Epilog generation happens after reg allocation
  1793. break;
  1794. case Js::OpCode::FunctionEntry:
  1795. LowerFunctionEntry(instr);
  1796. // The rest of Prolog generation happens after reg allocation
  1797. break;
  1798. case Js::OpCode::ArgIn_Rest:
  1799. case Js::OpCode::ArgIn_A:
  1800. if (m_func->GetJITFunctionBody()->IsAsmJsMode() && !m_func->IsLoopBody())
  1801. {
  1802. instrPrev = LowerArgInAsmJs(instr);
  1803. }
  1804. else
  1805. {
  1806. instrPrev = LowerArgIn(instr);
  1807. }
  1808. break;
  1809. case Js::OpCode::Label:
  1810. if (instr->AsLabelInstr()->m_isLoopTop)
  1811. {
  1812. if (this->outerMostLoopLabel == instr)
  1813. {
  1814. noFieldFastPath = !defaultDoFastPath;
  1815. noMathFastPath = !defaultDoFastPath;
  1816. this->outerMostLoopLabel = nullptr;
  1817. instr->AsLabelInstr()->GetLoop()->isProcessed = true;
  1818. }
  1819. this->m_func->MarkConstantAddressSyms(instr->AsLabelInstr()->GetLoop()->regAlloc.liveOnBackEdgeSyms);
  1820. instr->AsLabelInstr()->GetLoop()->regAlloc.liveOnBackEdgeSyms->Or(this->addToLiveOnBackEdgeSyms);
  1821. }
  1822. break;
  1823. case Js::OpCode::Br:
  1824. instr->m_opcode = LowererMD::MDUncondBranchOpcode;
  1825. break;
  1826. case Js::OpCode::BrFncEqApply:
  1827. LowerBrFncApply(instr, IR::HelperOp_OP_BrFncEqApply);
  1828. break;
  1829. case Js::OpCode::BrFncNeqApply:
  1830. LowerBrFncApply(instr, IR::HelperOp_OP_BrFncNeqApply);
  1831. break;
  1832. case Js::OpCode::BrHasSideEffects:
  1833. case Js::OpCode::BrNotHasSideEffects:
  1834. m_lowererMD.GenerateFastBrS(instr->AsBranchInstr());
  1835. break;
  1836. case Js::OpCode::BrFalse_A:
  1837. case Js::OpCode::BrTrue_A:
  1838. if (instr->GetSrc1()->IsFloat())
  1839. {
  1840. GenerateFastBrBool(instr->AsBranchInstr());
  1841. }
  1842. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) ||
  1843. noMathFastPath ||
  1844. GenerateFastBrBool(instr->AsBranchInstr()))
  1845. {
  1846. this->LowerBrBMem(instr, IR::HelperConv_ToBoolean);
  1847. }
  1848. break;
  1849. case Js::OpCode::BrOnObject_A:
  1850. if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath)
  1851. {
  1852. this->LowerBrOnObject(instr, IR::HelperOp_IsObject);
  1853. }
  1854. else
  1855. {
  1856. GenerateFastBrOnObject(instr);
  1857. }
  1858. break;
  1859. case Js::OpCode::BrOnBaseConstructorKind:
  1860. this->LowerBrOnClassConstructor(instr, IR::HelperOp_IsBaseConstructorKind);
  1861. break;
  1862. case Js::OpCode::BrOnClassConstructor:
  1863. this->LowerBrOnClassConstructor(instr, IR::HelperOp_IsClassConstructor);
  1864. break;
  1865. case Js::OpCode::BrAddr_A:
  1866. case Js::OpCode::BrNotAddr_A:
  1867. case Js::OpCode::BrNotNull_A:
  1868. m_lowererMD.LowerCondBranch(instr);
  1869. break;
  1870. case Js::OpCode::BrEq_A:
  1871. case Js::OpCode::BrNotNeq_A:
  1872. instrPrev = LowerEqualityBranch(instr, IR::HelperOp_Equal);
  1873. break;
  1874. case Js::OpCode::BrGe_A:
  1875. case Js::OpCode::BrNotGe_A:
  1876. if (instr->GetSrc1()->IsFloat())
  1877. {
  1878. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1879. m_lowererMD.LowerToFloat(instr);
  1880. }
  1881. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1882. {
  1883. this->LowerBrCMem(instr, IR::HelperOp_GreaterEqual, false, false /*isHelper*/);
  1884. }
  1885. else
  1886. {
  1887. this->LowerBrCMem(instr, IR::HelperOp_GreaterEqual, true, false /*isHelper*/);
  1888. }
  1889. break;
  1890. case Js::OpCode::BrGt_A:
  1891. case Js::OpCode::BrNotGt_A:
  1892. if (instr->GetSrc1()->IsFloat())
  1893. {
  1894. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1895. m_lowererMD.LowerToFloat(instr);
  1896. }
  1897. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1898. {
  1899. this->LowerBrCMem(instr, IR::HelperOp_Greater, false, false /*isHelper*/);
  1900. }
  1901. else
  1902. {
  1903. this->LowerBrCMem(instr, IR::HelperOp_Greater, true, false /*isHelper*/);
  1904. }
  1905. break;
  1906. case Js::OpCode::BrLt_A:
  1907. case Js::OpCode::BrNotLt_A:
  1908. if (instr->GetSrc1()->IsFloat())
  1909. {
  1910. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1911. m_lowererMD.LowerToFloat(instr);
  1912. }
  1913. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1914. {
  1915. this->LowerBrCMem(instr, IR::HelperOp_Less, false, false /*isHelper*/);
  1916. }
  1917. else
  1918. {
  1919. this->LowerBrCMem(instr, IR::HelperOp_Less, true, false /*isHelper*/);
  1920. }
  1921. break;
  1922. case Js::OpCode::BrLe_A:
  1923. case Js::OpCode::BrNotLe_A:
  1924. if (instr->GetSrc1()->IsFloat())
  1925. {
  1926. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1927. m_lowererMD.LowerToFloat(instr);
  1928. }
  1929. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1930. {
  1931. this->LowerBrCMem(instr, IR::HelperOp_LessEqual, false, false /*isHelper*/);
  1932. }
  1933. else
  1934. {
  1935. this->LowerBrCMem(instr, IR::HelperOp_LessEqual, true, false /*isHelper*/);
  1936. }
  1937. break;
  1938. case Js::OpCode::BrNeq_A:
  1939. case Js::OpCode::BrNotEq_A:
  1940. instrPrev = LowerEqualityBranch(instr, IR::HelperOp_NotEqual);
  1941. break;
  1942. case Js::OpCode::MultiBr:
  1943. {
  1944. IR::MultiBranchInstr * multiBranchInstr = instr->AsBranchInstr()->AsMultiBrInstr();
  1945. switch (multiBranchInstr->m_kind)
  1946. {
  1947. case IR::MultiBranchInstr::StrDictionary:
  1948. this->GenerateSwitchStringLookup(instr);
  1949. break;
  1950. case IR::MultiBranchInstr::SingleCharStrJumpTable:
  1951. this->GenerateSingleCharStrJumpTableLookup(instr);
  1952. m_func->m_totalJumpTableSizeInBytesForSwitchStatements += (multiBranchInstr->GetBranchJumpTable()->tableSize * sizeof(void*));
  1953. break;
  1954. case IR::MultiBranchInstr::IntJumpTable:
  1955. this->LowerMultiBr(instr);
  1956. m_func->m_totalJumpTableSizeInBytesForSwitchStatements += (multiBranchInstr->GetBranchJumpTable()->tableSize * sizeof(void*));
  1957. break;
  1958. default:
  1959. Assert(false);
  1960. }
  1961. break;
  1962. }
  1963. case Js::OpCode::BrSrEq_A:
  1964. case Js::OpCode::BrSrNotNeq_A:
  1965. instrPrev = LowerEqualityBranch(instr, IR::HelperOp_StrictEqual);
  1966. break;
  1967. case Js::OpCode::BrSrNeq_A:
  1968. case Js::OpCode::BrSrNotEq_A:
  1969. instrPrev = LowerEqualityBranch(instr, IR::HelperOp_NotStrictEqual);
  1970. break;
  1971. case Js::OpCode::BrOnEmpty:
  1972. case Js::OpCode::BrOnNotEmpty:
  1973. if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func))
  1974. {
  1975. this->GenerateFastBrBReturn(instr);
  1976. this->LowerBrBReturn(instr, IR::HelperOp_OP_BrOnEmpty, true);
  1977. }
  1978. else
  1979. {
  1980. this->LowerBrBReturn(instr, IR::HelperOp_OP_BrOnEmpty, false);
  1981. }
  1982. break;
  1983. case Js::OpCode::BrOnHasProperty:
  1984. case Js::OpCode::BrOnNoProperty:
  1985. this->LowerBrProperty(instr, IR::HelperOp_HasProperty);
  1986. break;
  1987. case Js::OpCode::BrOnException:
  1988. Assert(!this->m_func->DoGlobOpt());
  1989. instr->Remove();
  1990. break;
  1991. case Js::OpCode::BrOnNoException:
  1992. instr->m_opcode = LowererMD::MDUncondBranchOpcode;
  1993. break;
  1994. case Js::OpCode::StSlot:
  1995. this->LowerStSlot(instr);
  1996. break;
  1997. case Js::OpCode::StSlotChkUndecl:
  1998. this->LowerStSlotChkUndecl(instr);
  1999. break;
  2000. case Js::OpCode::ProfiledLoopStart:
  2001. {
  2002. Assert(m_func->DoSimpleJitDynamicProfile());
  2003. Assert(instr->IsJitProfilingInstr());
  2004. // Check for the helper instr from IRBuilding (it won't be there if there are no LoopEnds due to an infinite loop)
  2005. auto prev = instr->m_prev;
  2006. if (prev->IsJitProfilingInstr() && prev->AsJitProfilingInstr()->isLoopHelper)
  2007. {
  2008. auto saveOpnd = prev->UnlinkDst();
  2009. instrPrev = prev->m_prev;
  2010. prev->Remove();
  2011. const auto starFlag = GetImplicitCallFlagsOpnd();
  2012. IR::AutoReuseOpnd a(starFlag, m_func);
  2013. this->InsertMove(saveOpnd, starFlag, instr);
  2014. this->InsertMove(starFlag, CreateClearImplicitCallFlagsOpnd(), instr);
  2015. }
  2016. else
  2017. {
  2018. #if DBG
  2019. // Double check that we indeed do not have a LoopEnd that is part of the same loop for the rest of the function
  2020. auto cur = instr;
  2021. auto loopNumber = instr->AsJitProfilingInstr()->loopNumber;
  2022. while (cur)
  2023. {
  2024. Assert(cur->m_opcode != Js::OpCode::ProfiledLoopEnd || cur->IsJitProfilingInstr() && cur->AsJitProfilingInstr()->loopNumber != loopNumber);
  2025. cur = cur->m_next;
  2026. }
  2027. #endif
  2028. }
  2029. // If we turned off fulljit, there's no reason to do this.
  2030. if (PHASE_OFF(Js::FullJitPhase, m_func))
  2031. {
  2032. instr->Remove();
  2033. }
  2034. else
  2035. {
  2036. Assert(instr->GetDst());
  2037. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleGetScheduledEntryPoint, m_func));
  2038. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateUint32Opnd(instr->AsJitProfilingInstr()->loopNumber, m_func));
  2039. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  2040. this->m_lowererMD.LowerCall(instr, 0);
  2041. }
  2042. break;
  2043. }
  2044. case Js::OpCode::ProfiledLoopBodyStart:
  2045. {
  2046. Assert(m_func->DoSimpleJitDynamicProfile());
  2047. const auto loopNum = instr->AsJitProfilingInstr()->loopNumber;
  2048. Assert(loopNum < m_func->GetJITFunctionBody()->GetLoopCount());
  2049. auto entryPointOpnd = instr->UnlinkSrc1();
  2050. auto dobailout = instr->UnlinkDst();
  2051. const auto dobailoutType = TyUint8;
  2052. Assert(dobailout->GetType() == TyUint8 && sizeof(decltype(Js::SimpleJitHelpers::IsLoopCodeGenDone(nullptr))) == 1);
  2053. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(0, TyUint32, m_func)); // zero indicates that we do not want to add flags back in
  2054. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(loopNum, TyUint32, m_func));
  2055. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  2056. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleRecordLoopImplicitCallFlags, m_func));
  2057. m_lowererMD.LowerCall(instr, 0);
  2058. // Outline of JITed code:
  2059. //
  2060. // LoopStart:
  2061. // entryPoint = GetScheduledEntryPoint(framePtr, loopNum)
  2062. // LoopBodyStart:
  2063. // uint8 dobailout;
  2064. // if (entryPoint) {
  2065. // dobailout = IsLoopCodeGenDone(entryPoint)
  2066. // } else {
  2067. // dobailout = ++interpretCount >= threshold
  2068. // }
  2069. // // already exists from IRBuilding:
  2070. // if (dobailout) {
  2071. // Bailout
  2072. // }
  2073. if (PHASE_OFF(Js::FullJitPhase, m_func) || !m_func->GetJITFunctionBody()->DoJITLoopBody())
  2074. {
  2075. // If we're not doing fulljit, we've turned off JitLoopBodies, or if we don't have loop headers allocated (the function has a Try, etc)
  2076. // just move false to dobailout
  2077. this->InsertMove(dobailout, IR::IntConstOpnd::New(0, dobailoutType, m_func, true), instr->m_next);
  2078. }
  2079. else if (m_func->GetWorkItem()->GetJITTimeInfo()->ForceJITLoopBody())
  2080. {
  2081. // If we're forcing jit loop bodies, move true to dobailout
  2082. this->InsertMove(dobailout, IR::IntConstOpnd::New(1, dobailoutType, m_func, true), instr->m_next);
  2083. }
  2084. else
  2085. {
  2086. // Put in the labels
  2087. auto entryPointIsNull = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2088. auto checkDoBailout = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2089. instr->InsertAfter(checkDoBailout);
  2090. instr->InsertAfter(entryPointIsNull);
  2091. this->InsertCompareBranch(entryPointOpnd, IR::AddrOpnd::New(nullptr, IR::AddrOpndKindDynamicMisc, m_func), Js::OpCode::BrEq_A, false, entryPointIsNull, instr->m_next);
  2092. // If the entry point is not null
  2093. auto isCodeGenDone = IR::Instr::New(Js::OpCode::Call, dobailout, IR::HelperCallOpnd::New(IR::HelperSimpleIsLoopCodeGenDone, m_func), m_func);
  2094. entryPointIsNull->InsertBefore(isCodeGenDone);
  2095. m_lowererMD.LoadHelperArgument(isCodeGenDone, entryPointOpnd);
  2096. m_lowererMD.LowerCall(isCodeGenDone, 0);
  2097. this->InsertBranch(LowererMD::MDUncondBranchOpcode, true, checkDoBailout, entryPointIsNull);
  2098. const auto type = TyUint32;
  2099. auto countReg = IR::RegOpnd::New(type, m_func);
  2100. auto countAddr = IR::MemRefOpnd::New(m_func->GetJITFunctionBody()->GetLoopHeaderAddr(loopNum) + Js::LoopHeader::GetOffsetOfInterpretCount(), type, m_func);
  2101. IR::AutoReuseOpnd a(countReg, m_func), b(countAddr, m_func);
  2102. this->InsertAdd(false, countReg, countAddr, IR::IntConstOpnd::New(1, type, m_func, true), checkDoBailout);
  2103. this->InsertMove(countAddr, countReg, checkDoBailout);
  2104. this->InsertMove(dobailout, IR::IntConstOpnd::New(0, dobailoutType, m_func, true), checkDoBailout);
  2105. this->InsertCompareBranch(countReg, IR::IntConstOpnd::New(m_func->GetJITFunctionBody()->GetLoopHeaderData(loopNum)->interpretCount, type, m_func), Js::OpCode::BrLt_A, checkDoBailout, checkDoBailout);
  2106. this->InsertMove(dobailout, IR::IntConstOpnd::New(1, dobailoutType, m_func, true), checkDoBailout);
  2107. // fallthrough
  2108. // Label checkDoBailout (inserted above)
  2109. }
  2110. }
  2111. break;
  2112. case Js::OpCode::ProfiledLoopEnd:
  2113. {
  2114. Assert(m_func->DoSimpleJitDynamicProfile());
  2115. // This is set up in IRBuilding
  2116. Assert(instr->GetSrc1());
  2117. IR::Opnd* savedFlags = instr->UnlinkSrc1();
  2118. m_lowererMD.LoadHelperArgument(instr, savedFlags);
  2119. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateUint32Opnd(instr->AsJitProfilingInstr()->loopNumber, m_func));
  2120. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  2121. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleRecordLoopImplicitCallFlags, m_func));
  2122. m_lowererMD.LowerCall(instr, 0);
  2123. }
  2124. break;
  2125. case Js::OpCode::InitLoopBodyCount:
  2126. Assert(this->m_func->IsLoopBody());
  2127. instr->SetSrc1(IR::IntConstOpnd::New(0, TyUint32, this->m_func));
  2128. this->m_lowererMD.ChangeToAssign(instr);
  2129. break;
  2130. case Js::OpCode::StLoopBodyCount:
  2131. Assert(this->m_func->IsLoopBody());
  2132. this->LowerStLoopBodyCount(instr);
  2133. break;
  2134. case Js::OpCode::IncrLoopBodyCount:
  2135. {
  2136. Assert(this->m_func->IsLoopBody());
  2137. instr->m_opcode = Js::OpCode::Add_I4;
  2138. instr->SetSrc2(IR::IntConstOpnd::New(1, TyUint32, this->m_func));
  2139. this->m_lowererMD.EmitInt4Instr(instr);
  2140. // Update the jittedLoopIterations field on the entryPointInfo
  2141. IR::MemRefOpnd *iterationsAddressOpnd = IR::MemRefOpnd::New(this->m_func->GetJittedLoopIterationsSinceLastBailoutAddress(), TyUint32, this->m_func);
  2142. InsertMove(iterationsAddressOpnd, instr->GetDst(), instr);
  2143. break;
  2144. }
  2145. #if !FLOATVAR
  2146. case Js::OpCode::StSlotBoxTemp:
  2147. this->LowerStSlotBoxTemp(instr);
  2148. break;
  2149. #endif
  2150. case Js::OpCode::LdSlot:
  2151. case Js::OpCode::LdSlotArr:
  2152. {
  2153. Js::ProfileId profileId;
  2154. IR::Instr *profileBeforeInstr;
  2155. if (instr->IsJitProfilingInstr())
  2156. {
  2157. profileId = instr->AsJitProfilingInstr()->profileId;
  2158. Assert(profileId != Js::Constants::NoProfileId);
  2159. profileBeforeInstr = instr->m_next;
  2160. }
  2161. else
  2162. {
  2163. profileId = Js::Constants::NoProfileId;
  2164. profileBeforeInstr = nullptr;
  2165. }
  2166. this->LowerLdSlot(instr);
  2167. if (profileId != Js::Constants::NoProfileId)
  2168. {
  2169. LowerProfileLdSlot(instr->GetDst(), instr->m_func, profileId, profileBeforeInstr);
  2170. }
  2171. break;
  2172. }
  2173. case Js::OpCode::ChkUndecl:
  2174. instrPrev = this->LowerChkUndecl(instr);
  2175. break;
  2176. case Js::OpCode::LdArrHead:
  2177. this->LowerLdArrHead(instr);
  2178. break;
  2179. case Js::OpCode::StElemC:
  2180. case Js::OpCode::StArrSegElemC:
  2181. this->LowerStElemC(instr);
  2182. break;
  2183. case Js::OpCode::LdEnv:
  2184. instrPrev = this->LowerLdEnv(instr);
  2185. break;
  2186. case Js::OpCode::LdAsmJsEnv:
  2187. instrPrev = this->LowerLdAsmJsEnv(instr);
  2188. break;
  2189. case Js::OpCode::LdElemUndef:
  2190. this->LowerLdElemUndef(instr);
  2191. break;
  2192. case Js::OpCode::LdElemUndefScoped:
  2193. this->LowerElementUndefinedScopedMem(instr, IR::HelperOp_LdElemUndefScoped);
  2194. break;
  2195. case Js::OpCode::EnsureNoRootFld:
  2196. this->LowerElementUndefined(instr, IR::HelperOp_EnsureNoRootProperty);
  2197. break;
  2198. case Js::OpCode::EnsureNoRootRedeclFld:
  2199. this->LowerElementUndefined(instr, IR::HelperOp_EnsureNoRootRedeclProperty);
  2200. break;
  2201. case Js::OpCode::EnsureCanDeclGloFunc:
  2202. this->LowerElementUndefined(instr, IR::HelperOp_EnsureCanDeclGloFunc);
  2203. break;
  2204. case Js::OpCode::ScopedEnsureNoRedeclFld:
  2205. this->LowerElementUndefinedScoped(instr, IR::HelperOp_EnsureNoRedeclPropertyScoped);
  2206. break;
  2207. case Js::OpCode::LdFuncExpr:
  2208. // src = function Expression
  2209. LoadFuncExpression(instr);
  2210. this->GenerateGetCurrentFunctionObject(instr);
  2211. break;
  2212. case Js::OpCode::LdNewTarget:
  2213. this->GenerateLoadNewTarget(instr);
  2214. break;
  2215. case Js::OpCode::ChkNewCallFlag:
  2216. this->GenerateCheckForCallFlagNew(instr);
  2217. break;
  2218. case Js::OpCode::StFuncExpr:
  2219. // object.propid = src
  2220. LowerStFld(instr, IR::HelperOp_StFunctionExpression, IR::HelperOp_StFunctionExpression, false);
  2221. break;
  2222. case Js::OpCode::InitLetFld:
  2223. case Js::OpCode::InitRootLetFld:
  2224. LowerStFld(instr, IR::HelperOp_InitLetFld, IR::HelperOp_InitLetFld, false);
  2225. break;
  2226. case Js::OpCode::InitConstFld:
  2227. case Js::OpCode::InitRootConstFld:
  2228. LowerStFld(instr, IR::HelperOp_InitConstFld, IR::HelperOp_InitConstFld, false);
  2229. break;
  2230. case Js::OpCode::InitUndeclRootLetFld:
  2231. LowerElementUndefined(instr, IR::HelperOp_InitUndeclRootLetFld);
  2232. break;
  2233. case Js::OpCode::InitUndeclRootConstFld:
  2234. LowerElementUndefined(instr, IR::HelperOp_InitUndeclRootConstFld);
  2235. break;
  2236. case Js::OpCode::InitUndeclConsoleLetFld:
  2237. LowerElementUndefined(instr, IR::HelperOp_InitUndeclConsoleLetFld);
  2238. break;
  2239. case Js::OpCode::InitUndeclConsoleConstFld:
  2240. LowerElementUndefined(instr, IR::HelperOp_InitUndeclConsoleConstFld);
  2241. break;
  2242. case Js::OpCode::InitClassMember:
  2243. LowerStFld(instr, IR::HelperOp_InitClassMember, IR::HelperOp_InitClassMember, false);
  2244. break;
  2245. case Js::OpCode::InitClassMemberComputedName:
  2246. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberComputedName);
  2247. break;
  2248. case Js::OpCode::InitClassMemberGetComputedName:
  2249. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberGetComputedName);
  2250. break;
  2251. case Js::OpCode::InitClassMemberSetComputedName:
  2252. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberSetComputedName);
  2253. break;
  2254. case Js::OpCode::InitClassMemberGet:
  2255. instrPrev = this->LowerStFld(instr, IR::HelperOp_InitClassMemberGet, IR::HelperOp_InitClassMemberGet, false);
  2256. break;
  2257. case Js::OpCode::InitClassMemberSet:
  2258. instrPrev = this->LowerStFld(instr, IR::HelperOp_InitClassMemberSet, IR::HelperOp_InitClassMemberSet, false);
  2259. break;
  2260. case Js::OpCode::NewStackFrameDisplay:
  2261. this->LowerLdFrameDisplay(instr, m_func->DoStackFrameDisplay());
  2262. break;
  2263. case Js::OpCode::LdFrameDisplay:
  2264. this->LowerLdFrameDisplay(instr, false);
  2265. break;
  2266. case Js::OpCode::LdInnerFrameDisplay:
  2267. this->LowerLdInnerFrameDisplay(instr);
  2268. break;
  2269. case Js::OpCode::Throw:
  2270. case Js::OpCode::InlineThrow:
  2271. case Js::OpCode::EHThrow:
  2272. this->LowerUnaryHelperMem(instr, IR::HelperOp_Throw);
  2273. break;
  2274. case Js::OpCode::TryCatch:
  2275. instrPrev = this->LowerTry(instr, true /*try-catch*/);
  2276. break;
  2277. case Js::OpCode::TryFinally:
  2278. instrPrev = this->LowerTry(instr, false /*try-finally*/);
  2279. break;
  2280. case Js::OpCode::Catch:
  2281. instrPrev = this->LowerCatch(instr);
  2282. break;
  2283. case Js::OpCode::Finally:
  2284. instr->Remove();
  2285. break;
  2286. case Js::OpCode::LeaveNull:
  2287. if (this->m_func->DoOptimizeTry() || (this->m_func->IsSimpleJit() && this->m_func->hasBailout))
  2288. {
  2289. instr->Remove();
  2290. }
  2291. else
  2292. {
  2293. instrPrev = m_lowererMD.LowerLeaveNull(instr);
  2294. }
  2295. break;
  2296. case Js::OpCode::Leave:
  2297. if (this->m_func->HasTry() && this->m_func->DoOptimizeTry())
  2298. {
  2299. // Required in Register Allocator to mark region boundaries
  2300. break;
  2301. }
  2302. instrPrev = this->LowerLeave(instr, instr->AsBranchInstr()->GetTarget(), false /*fromFinalLower*/, instr->AsBranchInstr()->m_isOrphanedLeave);
  2303. break;
  2304. case Js::OpCode::BailOnException:
  2305. instrPrev = this->LowerBailOnException(instr);
  2306. break;
  2307. case Js::OpCode::BailOnEarlyExit:
  2308. instrPrev = this->LowerBailOnEarlyExit(instr);
  2309. break;
  2310. case Js::OpCode::RuntimeTypeError:
  2311. case Js::OpCode::InlineRuntimeTypeError:
  2312. this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeTypeError);
  2313. break;
  2314. case Js::OpCode::RuntimeReferenceError:
  2315. case Js::OpCode::InlineRuntimeReferenceError:
  2316. this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeReferenceError);
  2317. break;
  2318. case Js::OpCode::Break:
  2319. // Inline breakpoint: for now do nothing.
  2320. break;
  2321. case Js::OpCode::Nop:
  2322. // This may need support for debugging the JIT, but for now just remove the instruction.
  2323. instr->Remove();
  2324. break;
  2325. case Js::OpCode::Unused:
  2326. // Currently Unused is used with ScopedLdInst to keep the second dst alive, but we don't need to lower it.
  2327. instr->Remove();
  2328. break;
  2329. case Js::OpCode::StatementBoundary:
  2330. // This instruction is merely to help convey source info through the IR
  2331. // and eventually generate the nativeOffset maps.
  2332. #if DBG_DUMP && DBG
  2333. // If we have a JITStatementBreakpoint, then we should break on this statement
  2334. {
  2335. uint32 statementIndex = instr->AsPragmaInstr()->m_statementIndex;
  2336. if (Js::Configuration::Global.flags.StatementDebugBreak.Contains(instr->m_func->GetSourceContextId(), instr->m_func->GetLocalFunctionId(), statementIndex))
  2337. {
  2338. IR::Instr* tempinstr = instr;
  2339. Assert(tempinstr != nullptr);
  2340. // go past any labels, and then add a debug breakpoint
  2341. while (tempinstr->m_next != nullptr && tempinstr->m_next->m_opcode == Js::OpCode::Label)
  2342. {
  2343. tempinstr = tempinstr->m_next;
  2344. }
  2345. this->m_lowererMD.GenerateDebugBreak(tempinstr);
  2346. }
  2347. }
  2348. #endif
  2349. break;
  2350. case Js::OpCode::BailOnNotPolymorphicInlinee:
  2351. instrPrev = LowerBailOnNotPolymorphicInlinee(instr);
  2352. break;
  2353. case Js::OpCode::BailOnNoSimdTypeSpec:
  2354. case Js::OpCode::BailOnNoProfile:
  2355. this->GenerateBailOut(instr, nullptr, nullptr);
  2356. break;
  2357. case Js::OpCode::BailOnNotSpreadable:
  2358. instrPrev = this->LowerBailOnNotSpreadable(instr);
  2359. break;
  2360. case Js::OpCode::BailOnNotStackArgs:
  2361. instrPrev = this->LowerBailOnNotStackArgs(instr);
  2362. break;
  2363. case Js::OpCode::BailOnEqual:
  2364. case Js::OpCode::BailOnNotEqual:
  2365. instrPrev = this->LowerBailOnEqualOrNotEqual(instr);
  2366. break;
  2367. case Js::OpCode::BailOnNegative:
  2368. LowerBailOnNegative(instr);
  2369. break;
  2370. #ifdef ENABLE_SCRIPT_DEBUGGING
  2371. case Js::OpCode::BailForDebugger:
  2372. instrPrev = this->LowerBailForDebugger(instr);
  2373. break;
  2374. #endif
  2375. case Js::OpCode::BailOnNotObject:
  2376. instrPrev = this->LowerBailOnNotObject(instr);
  2377. break;
  2378. case Js::OpCode::BailOnNotBuiltIn:
  2379. instrPrev = this->LowerBailOnNotBuiltIn(instr);
  2380. break;
  2381. case Js::OpCode::BailOnNotArray:
  2382. {
  2383. IR::Instr *bailOnNotArray = nullptr, *bailOnMissingValue = nullptr;
  2384. SplitBailOnNotArray(instr, &bailOnNotArray, &bailOnMissingValue);
  2385. IR::RegOpnd *const arrayOpnd = LowerBailOnNotArray(bailOnNotArray);
  2386. if (bailOnMissingValue)
  2387. {
  2388. LowerBailOnMissingValue(bailOnMissingValue, arrayOpnd);
  2389. }
  2390. break;
  2391. }
  2392. case Js::OpCode::BoundCheck:
  2393. case Js::OpCode::UnsignedBoundCheck:
  2394. LowerBoundCheck(instr);
  2395. break;
  2396. case Js::OpCode::BailTarget:
  2397. instrPrev = this->LowerBailTarget(instr);
  2398. break;
  2399. case Js::OpCode::InlineeStart:
  2400. this->LowerInlineeStart(instr);
  2401. break;
  2402. case Js::OpCode::EndCallForPolymorphicInlinee:
  2403. instr->Remove();
  2404. break;
  2405. case Js::OpCode::InlineeEnd:
  2406. this->LowerInlineeEnd(instr);
  2407. break;
  2408. case Js::OpCode::InlineBuiltInEnd:
  2409. case Js::OpCode::InlineNonTrackingBuiltInEnd:
  2410. this->LowerInlineBuiltIn(instr);
  2411. break;
  2412. case Js::OpCode::ExtendArg_A:
  2413. if (instr->GetSrc1()->IsRegOpnd())
  2414. {
  2415. IR::RegOpnd *src1 = instr->GetSrc1()->AsRegOpnd();
  2416. this->addToLiveOnBackEdgeSyms->Clear(src1->m_sym->m_id);
  2417. }
  2418. instr->Remove();
  2419. break;
  2420. case Js::OpCode::InlineBuiltInStart:
  2421. case Js::OpCode::BytecodeArgOutUse:
  2422. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  2423. instr->Remove();
  2424. break;
  2425. case Js::OpCode::DeadBrEqual:
  2426. this->LowerBinaryHelperMem(instr, IR::HelperOp_Equal);
  2427. break;
  2428. case Js::OpCode::DeadBrSrEqual:
  2429. this->LowerBinaryHelperMem(instr, IR::HelperOp_StrictEqual);
  2430. break;
  2431. case Js::OpCode::DeadBrRelational:
  2432. this->LowerBinaryHelperMem(instr, IR::HelperOp_Greater);
  2433. break;
  2434. case Js::OpCode::DeadBrOnHasProperty:
  2435. this->LowerUnaryHelperMem(instr, IR::HelperOp_HasProperty);
  2436. break;
  2437. case Js::OpCode::DeletedNonHelperBranch:
  2438. break;
  2439. case Js::OpCode::InitClass:
  2440. instrPrev = this->LowerInitClass(instr);
  2441. break;
  2442. case Js::OpCode::NewConcatStrMulti:
  2443. this->LowerNewConcatStrMulti(instr);
  2444. break;
  2445. case Js::OpCode::NewConcatStrMultiBE:
  2446. this->LowerNewConcatStrMultiBE(instr);
  2447. break;
  2448. case Js::OpCode::SetConcatStrMultiItem:
  2449. this->LowerSetConcatStrMultiItem(instr);
  2450. break;
  2451. case Js::OpCode::SetConcatStrMultiItemBE:
  2452. Assert(instr->GetSrc1()->IsRegOpnd());
  2453. this->addToLiveOnBackEdgeSyms->Clear(instr->GetSrc1()->GetStackSym()->m_id);
  2454. // code corresponding to it should already have been generated while lowering NewConcatStrMultiBE
  2455. instr->Remove();
  2456. break;
  2457. case Js::OpCode::Conv_Str:
  2458. this->LowerConvStr(instr);
  2459. break;
  2460. case Js::OpCode::Coerce_Str:
  2461. this->LowerCoerseStr(instr);
  2462. break;
  2463. case Js::OpCode::Coerce_StrOrRegex:
  2464. this->LowerCoerseStrOrRegex(instr);
  2465. break;
  2466. case Js::OpCode::Coerce_Regex:
  2467. this->LowerCoerseRegex(instr);
  2468. break;
  2469. case Js::OpCode::Conv_PrimStr:
  2470. this->LowerConvPrimStr(instr);
  2471. break;
  2472. case Js::OpCode::ClearAttributes:
  2473. this->LowerBinaryHelper(instr, IR::HelperOP_ClearAttributes);
  2474. break;
  2475. case Js::OpCode::SpreadArrayLiteral:
  2476. this->LowerSpreadArrayLiteral(instr);
  2477. break;
  2478. case Js::OpCode::CallIExtended:
  2479. {
  2480. // Currently, the only use for CallIExtended is a call that uses spread.
  2481. Assert(IsSpreadCall(instr));
  2482. instrPrev = this->LowerSpreadCall(instr, Js::CallFlags_None);
  2483. break;
  2484. }
  2485. case Js::OpCode::CallIExtendedNew:
  2486. {
  2487. // Currently, the only use for CallIExtended is a call that uses spread.
  2488. Assert(IsSpreadCall(instr));
  2489. instrPrev = this->LowerSpreadCall(instr, Js::CallFlags_New);
  2490. break;
  2491. }
  2492. case Js::OpCode::CallIExtendedNewTargetNew:
  2493. {
  2494. // Currently, the only use for CallIExtended is a call that uses spread.
  2495. Assert(IsSpreadCall(instr));
  2496. instrPrev = this->LowerSpreadCall(instr, (Js::CallFlags)(Js::CallFlags_New | Js::CallFlags_ExtraArg | Js::CallFlags_NewTarget));
  2497. break;
  2498. }
  2499. case Js::OpCode::LdSpreadIndices:
  2500. instr->Remove();
  2501. break;
  2502. case Js::OpCode::LdHomeObj:
  2503. this->GenerateLdHomeObj(instr);
  2504. break;
  2505. case Js::OpCode::LdHomeObjProto:
  2506. this->GenerateLdHomeObjProto(instr);
  2507. break;
  2508. case Js::OpCode::LdFuncObj:
  2509. this->GenerateLdFuncObj(instr);
  2510. break;
  2511. case Js::OpCode::LdFuncObjProto:
  2512. this->GenerateLdFuncObjProto(instr);
  2513. break;
  2514. case Js::OpCode::ImportCall:
  2515. {
  2516. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  2517. IR::Opnd *functionObjOpnd = nullptr;
  2518. m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  2519. LoadScriptContext(instr);
  2520. m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  2521. m_lowererMD.LoadHelperArgument(instr, functionObjOpnd);
  2522. m_lowererMD.ChangeToHelperCall(instr, IR::HelperImportCall);
  2523. break;
  2524. }
  2525. case Js::OpCode::SetComputedNameVar:
  2526. {
  2527. IR::Opnd *src2Opnd = instr->UnlinkSrc2();
  2528. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  2529. m_lowererMD.LoadHelperArgument(instr, src2Opnd);
  2530. m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  2531. m_lowererMD.ChangeToHelperCall(instr, IR::HelperSetComputedNameVar);
  2532. break;
  2533. }
  2534. case Js::OpCode::InlineeMetaArg:
  2535. {
  2536. m_lowererMD.ChangeToAssign(instr);
  2537. break;
  2538. }
  2539. case Js::OpCode::Yield:
  2540. {
  2541. instr->FreeSrc1(); // Source is not actually used by the backend other than to calculate lifetime
  2542. IR::Opnd* dstOpnd = instr->UnlinkDst();
  2543. // prm2 is the ResumeYieldData pointer per calling convention established in JavascriptGenerator::CallGenerator
  2544. // This is the value the bytecode expects to be in the dst register of the Yield opcode after resumption.
  2545. // Load it here after the bail-in.
  2546. StackSym *resumeYieldDataSym = StackSym::NewImplicitParamSym(4, m_func);
  2547. m_func->SetArgOffset(resumeYieldDataSym, (LowererMD::GetFormalParamOffset() + 1) * MachPtr);
  2548. IR::SymOpnd * resumeYieldDataOpnd = IR::SymOpnd::New(resumeYieldDataSym, TyMachPtr, m_func);
  2549. AssertMsg(instr->m_next->IsLabelInstr(), "Expect the resume label to immediately follow Yield instruction");
  2550. InsertMove(dstOpnd, resumeYieldDataOpnd, instr->m_next->m_next);
  2551. GenerateBailOut(instr);
  2552. break;
  2553. }
  2554. case Js::OpCode::ResumeYield:
  2555. case Js::OpCode::ResumeYieldStar:
  2556. {
  2557. IR::Opnd *srcOpnd1 = instr->UnlinkSrc1();
  2558. IR::Opnd *srcOpnd2 = instr->m_opcode == Js::OpCode::ResumeYieldStar ? instr->UnlinkSrc2() : IR::AddrOpnd::NewNull(m_func);
  2559. m_lowererMD.LoadHelperArgument(instr, srcOpnd2);
  2560. m_lowererMD.LoadHelperArgument(instr, srcOpnd1);
  2561. m_lowererMD.ChangeToHelperCall(instr, IR::HelperResumeYield);
  2562. break;
  2563. }
  2564. case Js::OpCode::GeneratorResumeJumpTable:
  2565. {
  2566. // Lowered in LowerPrologEpilog so that the jumps introduced are not considered to be part of the flow for the RegAlloc phase.
  2567. // Introduce a BailOutNoSave label if there were yield points that were elided due to optimizations. They could still be hit
  2568. // if an active generator object had been paused at such a yield point when the function body was JITed. So safe guard such a
  2569. // case by having the native code simply jump back to the interpreter for such yield points.
  2570. IR::LabelInstr *bailOutNoSaveLabel = nullptr;
  2571. m_func->MapUntilYieldOffsetResumeLabels([this, &bailOutNoSaveLabel](int, const YieldOffsetResumeLabel& yorl)
  2572. {
  2573. if (yorl.Second() == nullptr)
  2574. {
  2575. if (bailOutNoSaveLabel == nullptr)
  2576. {
  2577. bailOutNoSaveLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2578. }
  2579. return true;
  2580. }
  2581. return false;
  2582. });
  2583. // Insert the bailoutnosave label somewhere along with a call to BailOutNoSave helper
  2584. if (bailOutNoSaveLabel != nullptr)
  2585. {
  2586. IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->m_prev;
  2587. IR::LabelInstr * exitTargetInstr;
  2588. if (exitPrevInstr->IsLabelInstr())
  2589. {
  2590. exitTargetInstr = exitPrevInstr->AsLabelInstr();
  2591. exitPrevInstr = exitPrevInstr->m_prev;
  2592. }
  2593. else
  2594. {
  2595. exitTargetInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  2596. exitPrevInstr->InsertAfter(exitTargetInstr);
  2597. }
  2598. bailOutNoSaveLabel->m_hasNonBranchRef = true;
  2599. bailOutNoSaveLabel->isOpHelper = true;
  2600. IR::Instr* bailOutCall = IR::Instr::New(Js::OpCode::Call, m_func);
  2601. exitPrevInstr->InsertAfter(bailOutCall);
  2602. exitPrevInstr->InsertAfter(bailOutNoSaveLabel);
  2603. exitPrevInstr->InsertAfter(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, exitTargetInstr, m_func));
  2604. IR::RegOpnd * frameRegOpnd = IR::RegOpnd::New(nullptr, LowererMD::GetRegFramePointer(), TyMachPtr, m_func);
  2605. m_lowererMD.LoadHelperArgument(bailOutCall, frameRegOpnd);
  2606. m_lowererMD.ChangeToHelperCall(bailOutCall, IR::HelperNoSaveRegistersBailOutForElidedYield);
  2607. m_func->m_bailOutNoSaveLabel = bailOutNoSaveLabel;
  2608. }
  2609. break;
  2610. }
  2611. case Js::OpCode::FrameDisplayCheck:
  2612. instrPrev = this->LowerFrameDisplayCheck(instr);
  2613. break;
  2614. case Js::OpCode::SlotArrayCheck:
  2615. instrPrev = this->LowerSlotArrayCheck(instr);
  2616. break;
  2617. #if DBG
  2618. case Js::OpCode::CheckLowerIntBound:
  2619. instrPrev = this->LowerCheckLowerIntBound(instr);
  2620. break;
  2621. case Js::OpCode::CheckUpperIntBound:
  2622. instrPrev = this->LowerCheckUpperIntBound(instr);
  2623. break;
  2624. #endif
  2625. #ifdef ENABLE_WASM
  2626. case Js::OpCode::Copysign_A:
  2627. m_lowererMD.GenerateCopysign(instr);
  2628. break;
  2629. case Js::OpCode::Trunc_A:
  2630. if (!AutoSystemInfo::Data.SSE4_1Available())
  2631. {
  2632. m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_TruncFlt, IR::HelperDirectMath_TruncDb);
  2633. break;
  2634. }
  2635. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  2636. break;
  2637. case Js::OpCode::Nearest_A:
  2638. if (!AutoSystemInfo::Data.SSE4_1Available())
  2639. {
  2640. m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_NearestFlt, IR::HelperDirectMath_NearestDb);
  2641. break;
  2642. }
  2643. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  2644. break;
  2645. case Js::OpCode::ThrowRuntimeError:
  2646. GenerateThrow(instr->UnlinkSrc1(), instr);
  2647. instr->Remove();
  2648. break;
  2649. #endif //ENABLE_WASM
  2650. case Js::OpCode::SpeculatedLoadFence:
  2651. {
  2652. AssertOrFailFast(instr->m_kind == IR::InstrKindByteCodeUses);
  2653. #ifdef _M_ARM
  2654. AssertOrFailFastMsg(false, "We shouldn't perform this hoisting on ARM");
  2655. #else
  2656. IR::ByteCodeUsesInstr* bcuInstr = static_cast<IR::ByteCodeUsesInstr*>(instr);
  2657. // Most of the time we're not going to be able to remove any masking in a loop, and
  2658. // this instruction can be removed.
  2659. if (bcuInstr->GetByteCodeUpwardExposedUsed() != nullptr && !bcuInstr->GetByteCodeUpwardExposedUsed()->IsEmpty())
  2660. {
  2661. // The generated code is:
  2662. //
  2663. // cmp rax, rax
  2664. // for each symbol to mask:
  2665. // reg(sym) = cmovne reg(sym), reg(sym)
  2666. IR::RegOpnd* temp = IR::RegOpnd::New(TyUint8, instr->m_func);
  2667. InsertMove(temp, IR::IntConstOpnd::New(0, TyUint8, instr->m_func), instr);
  2668. IR::Instr * cmp = IR::Instr::New(Js::OpCode::CMP, instr->m_func);
  2669. cmp->SetSrc1(temp);
  2670. cmp->SetSrc2(temp);
  2671. instr->InsertBefore(cmp);
  2672. m_lowererMD.Legalize(cmp);
  2673. FOREACH_BITSET_IN_SPARSEBV(symid, bcuInstr->GetByteCodeUpwardExposedUsed())
  2674. {
  2675. StackSym* thisSym = instr->m_func->m_symTable->Find(symid)->AsStackSym();
  2676. IR::RegOpnd* thisSymReg = IR::RegOpnd::New(thisSym, thisSym->GetType(), instr->m_func);
  2677. Js::OpCode specBlockOp = thisSymReg->IsFloat() ? LowererMD::MDSpecBlockFNEOpcode : LowererMD::MDSpecBlockNEOpcode;
  2678. IR::Instr* cmov = IR::Instr::New(specBlockOp, thisSymReg, thisSymReg, thisSymReg, instr->m_func);
  2679. instr->InsertBefore(cmov);
  2680. m_lowererMD.Legalize(cmov);
  2681. } NEXT_BITSET_IN_SPARSEBV;
  2682. }
  2683. #endif
  2684. instr->Remove();
  2685. break;
  2686. }
  2687. case Js::OpCode::SpreadObjectLiteral:
  2688. this->LowerBinaryHelperMem(instr, IR::HelperSpreadObjectLiteral);
  2689. break;
  2690. case Js::OpCode::Restify:
  2691. instrPrev = this->LowerRestify(instr);
  2692. break;
  2693. case Js::OpCode::NewPropIdArrForCompProps:
  2694. this->LowerUnaryHelperMem(instr, IR::HelperNewPropIdArrForCompProps);
  2695. break;
  2696. case Js::OpCode::StPropIdArrFromVar:
  2697. instrPrev = this->LowerStPropIdArrFromVar(instr);
  2698. break;
  2699. default:
  2700. #ifdef ENABLE_WASM_SIMD
  2701. if (IsSimd128Opcode(instr->m_opcode))
  2702. {
  2703. instrPrev = m_lowererMD.Simd128Instruction(instr);
  2704. break;
  2705. }
  2706. #endif
  2707. AssertMsg(instr->IsLowered(), "Unknown opcode");
  2708. if(!instr->IsLowered())
  2709. {
  2710. Fatal();
  2711. }
  2712. break;
  2713. }
  2714. #if DBG
  2715. LegalizeVerifyRange(instrPrev ? instrPrev->m_next : instrStart,
  2716. verifyLegalizeInstrNext ? verifyLegalizeInstrNext->m_prev : nullptr);
  2717. this->helperCallCheckState = HelperCallCheckState_None;
  2718. #endif
  2719. } NEXT_INSTR_BACKWARD_EDITING_IN_RANGE;
  2720. Assert(this->outerMostLoopLabel == nullptr);
  2721. }
  2722. IR::Opnd *
  2723. Lowerer::LoadFunctionInfoOpnd(IR::Instr * instr)
  2724. {
  2725. return IR::AddrOpnd::New(instr->m_func->GetWorkItem()->GetJITTimeInfo()->GetFunctionInfoAddr(), IR::AddrOpndKindDynamicFunctionInfo, instr->m_func);
  2726. }
  2727. IR::Instr *
  2728. Lowerer::LoadFunctionBody(IR::Instr * instr)
  2729. {
  2730. return m_lowererMD.LoadHelperArgument(instr, LoadFunctionBodyOpnd(instr));
  2731. }
  2732. IR::Instr *
  2733. Lowerer::LoadScriptContext(IR::Instr * instr)
  2734. {
  2735. return m_lowererMD.LoadHelperArgument(instr, LoadScriptContextOpnd(instr));
  2736. }
  2737. IR::Opnd *
  2738. Lowerer::LoadFunctionBodyOpnd(IR::Instr * instr)
  2739. {
  2740. return IR::AddrOpnd::New(instr->m_func->GetJITFunctionBody()->GetAddr(), IR::AddrOpndKindDynamicFunctionBody, instr->m_func);
  2741. }
  2742. IR::Opnd *
  2743. Lowerer::LoadScriptContextOpnd(IR::Instr * instr)
  2744. {
  2745. return IR::AddrOpnd::New(m_func->GetScriptContextInfo()->GetAddr(), IR::AddrOpndKindDynamicScriptContext, this->m_func);
  2746. }
  2747. IR::Opnd *
  2748. Lowerer::LoadScriptContextValueOpnd(IR::Instr * instr, ScriptContextValue valueType)
  2749. {
  2750. ScriptContextInfo *scriptContextInfo = instr->m_func->GetScriptContextInfo();
  2751. switch (valueType)
  2752. {
  2753. case ScriptContextValue::ScriptContextNumberAllocator:
  2754. return IR::AddrOpnd::New(scriptContextInfo->GetNumberAllocatorAddr(), IR::AddrOpndKindDynamicMisc, instr->m_func);
  2755. case ScriptContextValue::ScriptContextRecycler:
  2756. return IR::AddrOpnd::New(scriptContextInfo->GetRecyclerAddr(), IR::AddrOpndKindDynamicMisc, instr->m_func);
  2757. default:
  2758. Assert(false);
  2759. return nullptr;
  2760. }
  2761. }
  2762. IR::Opnd *
  2763. Lowerer::LoadLibraryValueOpnd(IR::Instr * instr, LibraryValue valueType)
  2764. {
  2765. ScriptContextInfo *scriptContextInfo = instr->m_func->GetScriptContextInfo();
  2766. switch (valueType)
  2767. {
  2768. case LibraryValue::ValueEmptyString:
  2769. return IR::AddrOpnd::New(scriptContextInfo->GetEmptyStringAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2770. case LibraryValue::ValueUndeclBlockVar:
  2771. return IR::AddrOpnd::New(scriptContextInfo->GetUndeclBlockVarAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2772. case LibraryValue::ValueUndefined:
  2773. return IR::AddrOpnd::New(scriptContextInfo->GetUndefinedAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2774. case LibraryValue::ValueNull:
  2775. return IR::AddrOpnd::New(scriptContextInfo->GetNullAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2776. case LibraryValue::ValueTrue:
  2777. return IR::AddrOpnd::New(scriptContextInfo->GetTrueAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2778. case LibraryValue::ValueFalse:
  2779. return IR::AddrOpnd::New(scriptContextInfo->GetFalseAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2780. case LibraryValue::ValueNegativeZero:
  2781. return IR::AddrOpnd::New(scriptContextInfo->GetNegativeZeroAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2782. case LibraryValue::ValueNumberTypeStatic:
  2783. return IR::AddrOpnd::New(scriptContextInfo->GetNumberTypeStaticAddr(), IR::AddrOpndKindDynamicType, instr->m_func, true);
  2784. case LibraryValue::ValueStringTypeStatic:
  2785. return IR::AddrOpnd::New(scriptContextInfo->GetStringTypeStaticAddr(), IR::AddrOpndKindDynamicType, instr->m_func, true);
  2786. case LibraryValue::ValueSymbolTypeStatic:
  2787. return IR::AddrOpnd::New(scriptContextInfo->GetSymbolTypeStaticAddr(), IR::AddrOpndKindDynamicType, instr->m_func, true);
  2788. case LibraryValue::ValueObjectType:
  2789. return IR::AddrOpnd::New(scriptContextInfo->GetObjectTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
  2790. case LibraryValue::ValueObjectHeaderInlinedType:
  2791. return IR::AddrOpnd::New(scriptContextInfo->GetObjectHeaderInlinedTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
  2792. case LibraryValue::ValueRegexType:
  2793. return IR::AddrOpnd::New(scriptContextInfo->GetRegexTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
  2794. case LibraryValue::ValueArrayConstructor:
  2795. return IR::AddrOpnd::New(scriptContextInfo->GetArrayConstructorAddr(), IR::AddrOpndKindDynamicVar, instr->m_func);
  2796. case LibraryValue::ValueJavascriptArrayType:
  2797. return IR::AddrOpnd::New(scriptContextInfo->GetArrayTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
  2798. case LibraryValue::ValueNativeIntArrayType:
  2799. return IR::AddrOpnd::New(scriptContextInfo->GetNativeIntArrayTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
  2800. case LibraryValue::ValueNativeFloatArrayType:
  2801. return IR::AddrOpnd::New(scriptContextInfo->GetNativeFloatArrayTypeAddr(), IR::AddrOpndKindDynamicType, instr->m_func);
  2802. case LibraryValue::ValueConstructorCacheDefaultInstance:
  2803. return IR::AddrOpnd::New(m_func->GetThreadContextInfo()->GetConstructorCacheDefaultInstanceAddr(), IR::AddrOpndKindDynamicMisc, instr->m_func);
  2804. case LibraryValue::ValueAbsDoubleCst:
  2805. return IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetAbsDoubleCstAddr(), TyMachDouble, instr->m_func, IR::AddrOpndKindDynamicDoubleRef);
  2806. case LibraryValue::ValueCharStringCache:
  2807. return IR::AddrOpnd::New(scriptContextInfo->GetCharStringCacheAddr(), IR::AddrOpndKindDynamicCharStringCache, instr->m_func);
  2808. default:
  2809. Assert(UNREACHED);
  2810. return nullptr;
  2811. }
  2812. }
  2813. IR::Opnd *
  2814. Lowerer::LoadVTableValueOpnd(IR::Instr * instr, VTableValue vtableType)
  2815. {
  2816. return IR::AddrOpnd::New((Js::Var)instr->m_func->GetScriptContextInfo()->GetVTableAddress(vtableType), IR::AddrOpndKindDynamicVtable, this->m_func);
  2817. }
  2818. IR::Opnd *
  2819. Lowerer::LoadOptimizationOverridesValueOpnd(IR::Instr *instr, OptimizationOverridesValue valueType)
  2820. {
  2821. switch (valueType)
  2822. {
  2823. case OptimizationOverridesValue::OptimizationOverridesSideEffects:
  2824. return IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetSideEffectsAddr(), TyInt32, instr->m_func);
  2825. case OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable:
  2826. return IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetArraySetElementFastPathVtableAddr(), TyMachPtr, instr->m_func);
  2827. case OptimizationOverridesValue::OptimizationOverridesIntArraySetElementFastPathVtable:
  2828. return IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetIntArraySetElementFastPathVtableAddr(), TyMachPtr, instr->m_func);
  2829. case OptimizationOverridesValue::OptimizationOverridesFloatArraySetElementFastPathVtable:
  2830. return IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetFloatArraySetElementFastPathVtableAddr(), TyMachPtr, instr->m_func);
  2831. default:
  2832. Assert(UNREACHED);
  2833. return nullptr;
  2834. }
  2835. }
  2836. IR::Opnd *
  2837. Lowerer::LoadNumberAllocatorValueOpnd(IR::Instr *instr, NumberAllocatorValue valueType)
  2838. {
  2839. ScriptContextInfo *scriptContext = instr->m_func->GetScriptContextInfo();
  2840. bool allowNativeCodeBumpAllocation = scriptContext->GetRecyclerAllowNativeCodeBumpAllocation();
  2841. switch (valueType)
  2842. {
  2843. case NumberAllocatorValue::NumberAllocatorEndAddress:
  2844. return IR::MemRefOpnd::New(((char *)scriptContext->GetNumberAllocatorAddr()) + Js::RecyclerJavascriptNumberAllocator::GetEndAddressOffset(), TyMachPtr, instr->m_func);
  2845. case NumberAllocatorValue::NumberAllocatorFreeObjectList:
  2846. return IR::MemRefOpnd::New(
  2847. ((char *)scriptContext->GetNumberAllocatorAddr()) +
  2848. (allowNativeCodeBumpAllocation ? Js::RecyclerJavascriptNumberAllocator::GetFreeObjectListOffset() : Js::RecyclerJavascriptNumberAllocator::GetEndAddressOffset()),
  2849. TyMachPtr, instr->m_func);
  2850. default:
  2851. Assert(false);
  2852. return nullptr;
  2853. }
  2854. }
  2855. IR::Opnd *
  2856. Lowerer::LoadIsInstInlineCacheOpnd(IR::Instr * instr, uint inlineCacheIndex)
  2857. {
  2858. intptr_t inlineCache = instr->m_func->GetJITFunctionBody()->GetIsInstInlineCache(inlineCacheIndex);
  2859. return IR::AddrOpnd::New(inlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func);
  2860. }
  2861. IR::Opnd *
  2862. Lowerer::LoadRuntimeInlineCacheOpnd(IR::Instr * instr, IR::PropertySymOpnd * propertySymOpnd, bool isHelper)
  2863. {
  2864. Assert(propertySymOpnd->m_runtimeInlineCache != 0);
  2865. IR::Opnd * inlineCacheOpnd = nullptr;
  2866. if (instr->m_func->GetJITFunctionBody()->HasInlineCachesOnFunctionObject() && !instr->m_func->IsInlinee())
  2867. {
  2868. inlineCacheOpnd = this->GetInlineCacheFromFuncObjectForRuntimeUse(instr, propertySymOpnd, isHelper);
  2869. }
  2870. else
  2871. {
  2872. intptr_t inlineCache = propertySymOpnd->m_runtimeInlineCache;
  2873. inlineCacheOpnd = IR::AddrOpnd::New(inlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func, /* dontEncode */ true);
  2874. }
  2875. return inlineCacheOpnd;
  2876. }
  2877. bool
  2878. Lowerer::TryGenerateFastCmSrEq(IR::Instr * instr)
  2879. {
  2880. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  2881. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  2882. if (srcReg2 && IsConstRegOpnd(srcReg2))
  2883. {
  2884. return m_lowererMD.GenerateFastCmSrEqConst(instr);
  2885. }
  2886. else if (srcReg1 && IsConstRegOpnd(srcReg1))
  2887. {
  2888. instr->SwapOpnds();
  2889. return m_lowererMD.GenerateFastCmSrEqConst(instr);
  2890. }
  2891. else if (srcReg2 && (srcReg2->m_sym->m_isStrConst))
  2892. {
  2893. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_String);
  2894. return true;
  2895. }
  2896. else if (srcReg1 && (srcReg1->m_sym->m_isStrConst))
  2897. {
  2898. instr->SwapOpnds();
  2899. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_String);
  2900. return true;
  2901. }
  2902. else if (srcReg2 && (srcReg2->m_sym->m_isStrEmpty))
  2903. {
  2904. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_EmptyString);
  2905. return true;
  2906. }
  2907. else if (srcReg1 && (srcReg1->m_sym->m_isStrEmpty))
  2908. {
  2909. instr->SwapOpnds();
  2910. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_EmptyString);
  2911. return true;
  2912. }
  2913. return false;
  2914. }
  2915. bool
  2916. Lowerer::GenerateFastBrSrEq(IR::Instr * instr, IR::RegOpnd * srcReg1, IR::RegOpnd * srcReg2, IR::Instr ** pInstrPrev, bool noMathFastPath)
  2917. {
  2918. if (srcReg2 && IsConstRegOpnd(srcReg2))
  2919. {
  2920. this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg2->m_sym->GetConstOpnd(), true);
  2921. instr->Remove();
  2922. return true;
  2923. }
  2924. else if (srcReg1 && IsConstRegOpnd(srcReg1))
  2925. {
  2926. instr->SwapOpnds();
  2927. this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg1->m_sym->GetConstOpnd(), true);
  2928. instr->Remove();
  2929. return true;
  2930. }
  2931. else if (srcReg2 && (srcReg2->m_sym->m_isStrConst))
  2932. {
  2933. this->LowerBrCMem(instr, IR::HelperOp_StrictEqualString, noMathFastPath, false);
  2934. return true;
  2935. }
  2936. else if (srcReg1 && (srcReg1->m_sym->m_isStrConst))
  2937. {
  2938. instr->SwapOpnds();
  2939. this->LowerBrCMem(instr, IR::HelperOp_StrictEqualString, noMathFastPath, false);
  2940. return true;
  2941. }
  2942. else if (srcReg2 && (srcReg2->m_sym->m_isStrEmpty))
  2943. {
  2944. this->LowerBrCMem(instr, IR::HelperOp_StrictEqualEmptyString, noMathFastPath, false);
  2945. return true;
  2946. }
  2947. else if (srcReg1 && (srcReg1->m_sym->m_isStrEmpty))
  2948. {
  2949. instr->SwapOpnds();
  2950. this->LowerBrCMem(instr, IR::HelperOp_StrictEqualEmptyString, noMathFastPath, false);
  2951. return true;
  2952. }
  2953. return false;
  2954. }
  2955. ///----------------------------------------------------------------------------
  2956. ///
  2957. /// Lowerer::GenerateFastBrConst
  2958. ///
  2959. ///----------------------------------------------------------------------------
  2960. IR::BranchInstr *
  2961. Lowerer::GenerateFastBrConst(IR::BranchInstr *branchInstr, IR::Opnd * constOpnd, bool isEqual)
  2962. {
  2963. Assert(constOpnd->IsAddrOpnd() || constOpnd->IsIntConstOpnd());
  2964. //
  2965. // Given:
  2966. // BrSrEq_A $L1, s1, s2
  2967. // where s2 is either 'null', 'undefined', 'true' or 'false'
  2968. //
  2969. // Generate:
  2970. //
  2971. // CMP s1, s2
  2972. // JEQ/JNE $L1
  2973. //
  2974. // TODO: OOP JIT, enable this assert
  2975. //Assert(this->IsConstRegOpnd(branchInstr->GetSrc2()->AsRegOpnd()));
  2976. IR::RegOpnd *opnd = GetRegOpnd(branchInstr->GetSrc1(), branchInstr, m_func, TyVar);
  2977. IR::BranchInstr *newBranch;
  2978. newBranch = InsertCompareBranch(opnd, constOpnd, isEqual ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A, branchInstr->GetTarget(), branchInstr);
  2979. return newBranch;
  2980. }
  2981. bool
  2982. Lowerer::TryGenerateFastBrEq(IR::Instr * instr)
  2983. {
  2984. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  2985. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  2986. bool isConst = false;
  2987. if (srcReg1 && this->IsNullOrUndefRegOpnd(srcReg1))
  2988. {
  2989. instr->SwapOpnds();
  2990. isConst = true;
  2991. }
  2992. // Fast path for == null or == undefined
  2993. // if (src == null || src == undefined)
  2994. if (isConst || (srcReg2 && this->IsNullOrUndefRegOpnd(srcReg2)))
  2995. {
  2996. IR::BranchInstr *newBranch;
  2997. newBranch = this->GenerateFastBrConst(instr->AsBranchInstr(),
  2998. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  2999. true);
  3000. this->GenerateFastBrConst(instr->AsBranchInstr(),
  3001. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
  3002. true);
  3003. instr->Remove();
  3004. return true;
  3005. }
  3006. return false;
  3007. }
  3008. bool
  3009. Lowerer::TryGenerateFastBrNeq(IR::Instr * instr)
  3010. {
  3011. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  3012. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  3013. bool isConst = false;
  3014. if (srcReg1 && this->IsNullOrUndefRegOpnd(srcReg1))
  3015. {
  3016. instr->SwapOpnds();
  3017. isConst = true;
  3018. }
  3019. // Fast path for != null or != undefined
  3020. // if (src != null && src != undefined)
  3021. //
  3022. // That is:
  3023. // if (src == NULL) goto labelEq
  3024. // if (src != undef) goto target
  3025. // labelEq:
  3026. if (isConst || (srcReg2 && this->IsNullOrUndefRegOpnd(srcReg2)))
  3027. {
  3028. IR::LabelInstr *labelEq = instr->GetOrCreateContinueLabel();
  3029. IR::BranchInstr *newBranch;
  3030. newBranch = this->GenerateFastBrConst(instr->AsBranchInstr(),
  3031. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  3032. true);
  3033. newBranch->AsBranchInstr()->SetTarget(labelEq);
  3034. this->GenerateFastBrConst(instr->AsBranchInstr(),
  3035. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
  3036. false);
  3037. instr->Remove();
  3038. return true;
  3039. }
  3040. return false;
  3041. }
  3042. bool
  3043. Lowerer::GenerateFastBrSrNeq(IR::Instr * instr, IR::Instr ** pInstrPrev)
  3044. {
  3045. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  3046. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  3047. if (srcReg2 && IsConstRegOpnd(srcReg2))
  3048. {
  3049. this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg2->m_sym->GetConstOpnd(), false);
  3050. instr->Remove();
  3051. return true;
  3052. }
  3053. else if (srcReg1 && IsConstRegOpnd(srcReg1))
  3054. {
  3055. instr->SwapOpnds();
  3056. this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg1->m_sym->GetConstOpnd(), false);
  3057. instr->Remove();
  3058. return true;
  3059. }
  3060. return false;
  3061. }
  3062. void
  3063. Lowerer::GenerateDynamicObjectAlloc(IR::Instr * newObjInstr, uint inlineSlotCount, uint slotCount, IR::RegOpnd * newObjDst, IR::Opnd * typeSrc)
  3064. {
  3065. size_t headerAllocSize = sizeof(Js::DynamicObject) + inlineSlotCount * sizeof(Js::Var);
  3066. IR::SymOpnd * tempObjectSymOpnd;
  3067. bool isZeroed = GenerateRecyclerOrMarkTempAlloc(newObjInstr, newObjDst, IR::HelperAllocMemForScObject, headerAllocSize, &tempObjectSymOpnd);
  3068. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  3069. {
  3070. // Hoist the vtable init to the outer most loop top as it never changes
  3071. InsertMove(tempObjectSymOpnd,
  3072. LoadVTableValueOpnd(this->outerMostLoopLabel, VTableValue::VtableDynamicObject), this->outerMostLoopLabel, false);
  3073. }
  3074. else
  3075. {
  3076. // MOV [newObjDst + offset(vtable)], DynamicObject::vtable
  3077. GenerateMemInit(newObjDst, 0, LoadVTableValueOpnd(newObjInstr, VTableValue::VtableDynamicObject), newObjInstr, isZeroed);
  3078. }
  3079. // MOV [newObjDst + offset(type)], newObjectType
  3080. GenerateMemInit(newObjDst, Js::DynamicObject::GetOffsetOfType(), typeSrc, newObjInstr, isZeroed);
  3081. // CALL JavascriptOperators::AllocMemForVarArray((slotCount - inlineSlotCount) * sizeof(Js::Var))
  3082. if (slotCount > inlineSlotCount)
  3083. {
  3084. size_t auxSlotsAllocSize = (slotCount - inlineSlotCount) * sizeof(Js::Var);
  3085. IR::RegOpnd* auxSlots = IR::RegOpnd::New(TyMachPtr, m_func);
  3086. GenerateRecyclerAllocAligned(IR::HelperAllocMemForVarArray, auxSlotsAllocSize, auxSlots, newObjInstr);
  3087. GenerateMemInit(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), auxSlots, newObjInstr, isZeroed);
  3088. IR::IndirOpnd* newObjAuxSlots = IR::IndirOpnd::New(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachPtr, m_func);
  3089. this->InsertMove(newObjAuxSlots, auxSlots, newObjInstr);
  3090. }
  3091. else
  3092. {
  3093. GenerateMemInitNull(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), newObjInstr, isZeroed);
  3094. }
  3095. GenerateMemInitNull(newObjDst, Js::DynamicObject::GetOffsetOfObjectArray(), newObjInstr, isZeroed);
  3096. }
  3097. void
  3098. Lowerer::LowerNewScObjectSimple(IR::Instr * instr)
  3099. {
  3100. GenerateDynamicObjectAlloc(
  3101. instr,
  3102. 0,
  3103. 0,
  3104. instr->UnlinkDst()->AsRegOpnd(),
  3105. LoadLibraryValueOpnd(
  3106. instr,
  3107. Js::FunctionBody::DoObjectHeaderInliningForEmptyObjects()
  3108. ? LibraryValue::ValueObjectHeaderInlinedType
  3109. : LibraryValue::ValueObjectType));
  3110. instr->Remove();
  3111. }
  3112. void
  3113. Lowerer::LowerNewScObjectLiteral(IR::Instr *newObjInstr)
  3114. {
  3115. Func * func = m_func;
  3116. IR::IntConstOpnd * literalObjectIdOpnd = newObjInstr->UnlinkSrc2()->AsIntConstOpnd();
  3117. intptr_t literalTypeRef = newObjInstr->m_func->GetJITFunctionBody()->GetObjectLiteralTypeRef(literalObjectIdOpnd->AsUint32());
  3118. IR::LabelInstr * helperLabel = nullptr;
  3119. IR::LabelInstr * allocLabel = nullptr;
  3120. IR::Opnd * literalTypeRefOpnd;
  3121. IR::Opnd * literalTypeOpnd;
  3122. IR::Opnd * propertyArrayOpnd;
  3123. IR::IntConstOpnd * propertyArrayIdOpnd = newObjInstr->UnlinkSrc1()->AsIntConstOpnd();
  3124. const Js::PropertyIdArray * propIds = newObjInstr->m_func->GetJITFunctionBody()->ReadPropertyIdArrayFromAuxData(propertyArrayIdOpnd->AsUint32());
  3125. intptr_t propArrayAddr = newObjInstr->m_func->GetJITFunctionBody()->GetAuxDataAddr(propertyArrayIdOpnd->AsUint32());
  3126. uint inlineSlotCapacity = Js::JavascriptOperators::GetLiteralInlineSlotCapacity(propIds);
  3127. uint slotCapacity = Js::JavascriptOperators::GetLiteralSlotCapacity(propIds);
  3128. IR::RegOpnd * dstOpnd;
  3129. literalTypeRefOpnd = IR::AddrOpnd::New(literalTypeRef, IR::AddrOpndKindDynamicMisc, this->m_func);
  3130. propertyArrayOpnd = IR::AddrOpnd::New(propArrayAddr, IR::AddrOpndKindDynamicMisc, this->m_func);
  3131. //#if 0 TODO: OOP JIT, obj literal types
  3132. // should pass in isShared bit through RPC, enable for in-proc jit to see perf impact
  3133. Js::DynamicType * literalType = func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts) ? nullptr : *(Js::DynamicType **)literalTypeRef;
  3134. if (literalType == nullptr || !literalType->GetIsShared())
  3135. {
  3136. helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3137. allocLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3138. literalTypeOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3139. InsertMove(literalTypeOpnd, IR::MemRefOpnd::New(literalTypeRef, TyMachPtr, func), newObjInstr);
  3140. InsertTestBranch(literalTypeOpnd, literalTypeOpnd,
  3141. Js::OpCode::BrEq_A, helperLabel, newObjInstr);
  3142. InsertTestBranch(IR::IndirOpnd::New(literalTypeOpnd->AsRegOpnd(), Js::DynamicType::GetOffsetOfIsShared(), TyInt8, func),
  3143. IR::IntConstOpnd::New(1, TyInt8, func, true), Js::OpCode::BrEq_A, helperLabel, newObjInstr);
  3144. dstOpnd = newObjInstr->GetDst()->AsRegOpnd();
  3145. }
  3146. else
  3147. {
  3148. literalTypeOpnd = IR::AddrOpnd::New(literalType, IR::AddrOpndKindDynamicType, func);
  3149. dstOpnd = newObjInstr->UnlinkDst()->AsRegOpnd();
  3150. Assert(inlineSlotCapacity == literalType->GetTypeHandler()->GetInlineSlotCapacity());
  3151. Assert(slotCapacity == (uint)literalType->GetTypeHandler()->GetSlotCapacity());
  3152. }
  3153. if (helperLabel)
  3154. {
  3155. InsertBranch(Js::OpCode::Br, allocLabel, newObjInstr);
  3156. // Slow path to ensure the type is there
  3157. newObjInstr->InsertBefore(helperLabel);
  3158. IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperEnsureObjectLiteralType, func);
  3159. m_lowererMD.LoadHelperArgument(newObjInstr, literalTypeRefOpnd);
  3160. m_lowererMD.LoadHelperArgument(newObjInstr, propertyArrayOpnd);
  3161. LoadScriptContext(newObjInstr);
  3162. IR::Instr * ensureTypeInstr = IR::Instr::New(Js::OpCode::Call, literalTypeOpnd, opndHelper, func);
  3163. newObjInstr->InsertBefore(ensureTypeInstr);
  3164. m_lowererMD.LowerCall(ensureTypeInstr, 0);
  3165. newObjInstr->InsertBefore(allocLabel);
  3166. }
  3167. else
  3168. {
  3169. Assert(allocLabel == nullptr);
  3170. }
  3171. // For the next call:
  3172. // inlineSlotCapacity == Number of slots to allocate beyond the DynamicObject header
  3173. // slotCapacity - inlineSlotCapacity == Number of aux slots to allocate
  3174. if(Js::FunctionBody::DoObjectHeaderInliningForObjectLiteral(propIds))
  3175. {
  3176. Assert(inlineSlotCapacity >= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity());
  3177. Assert(inlineSlotCapacity == slotCapacity);
  3178. slotCapacity = inlineSlotCapacity -= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity();
  3179. }
  3180. GenerateDynamicObjectAlloc(
  3181. newObjInstr,
  3182. inlineSlotCapacity,
  3183. slotCapacity,
  3184. dstOpnd,
  3185. literalTypeOpnd);
  3186. newObjInstr->Remove();
  3187. }
  3188. IR::Instr*
  3189. Lowerer::LowerProfiledNewScArray(IR::JitProfilingInstr* arrInstr)
  3190. {
  3191. IR::Instr *instrPrev = arrInstr->m_prev;
  3192. /*
  3193. JavascriptArray *ProfilingHelpers::ProfiledNewScArray(
  3194. const uint length,
  3195. FunctionBody *const functionBody,
  3196. const ProfileId profileId)
  3197. */
  3198. m_lowererMD.LoadHelperArgument(arrInstr, IR::Opnd::CreateProfileIdOpnd(arrInstr->profileId, m_func));
  3199. m_lowererMD.LoadHelperArgument(arrInstr, CreateFunctionBodyOpnd(arrInstr->m_func));
  3200. m_lowererMD.LoadHelperArgument(arrInstr, arrInstr->UnlinkSrc1());
  3201. arrInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledNewScArray, m_func));
  3202. m_lowererMD.LowerCall(arrInstr, 0);
  3203. return instrPrev;
  3204. }
  3205. IR::Instr *
  3206. Lowerer::LowerNewScArray(IR::Instr *arrInstr)
  3207. {
  3208. if (arrInstr->IsJitProfilingInstr())
  3209. {
  3210. return LowerProfiledNewScArray(arrInstr->AsJitProfilingInstr());
  3211. }
  3212. IR::Instr *instrPrev = arrInstr->m_prev;
  3213. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScArray;
  3214. if (arrInstr->IsProfiledInstr() && arrInstr->m_func->HasProfileInfo())
  3215. {
  3216. intptr_t weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
  3217. Assert(weakFuncRef);
  3218. Js::ProfileId profileId = static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
  3219. Js::ArrayCallSiteInfo *arrayInfo = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
  3220. intptr_t arrayInfoAddr = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
  3221. Assert(arrInstr->GetSrc1()->IsConstOpnd());
  3222. GenerateProfiledNewScArrayFastPath(arrInstr, arrayInfo, arrayInfoAddr, weakFuncRef, arrInstr->GetSrc1()->AsIntConstOpnd()->AsUint32());
  3223. if (arrInstr->GetDst() && arrInstr->GetDst()->GetValueType().IsLikelyNativeArray())
  3224. {
  3225. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
  3226. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
  3227. helperMethod = IR::HelperScrArr_ProfiledNewScArray;
  3228. }
  3229. }
  3230. LoadScriptContext(arrInstr);
  3231. IR::Opnd *src1Opnd = arrInstr->UnlinkSrc1();
  3232. m_lowererMD.LoadHelperArgument(arrInstr, src1Opnd);
  3233. m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
  3234. return instrPrev;
  3235. }
  3236. template <typename ArrayType>
  3237. BOOL Lowerer::IsSmallObject(uint32 length)
  3238. {
  3239. if (ArrayType::HasInlineHeadSegment(length))
  3240. return true;
  3241. uint32 alignedHeadSegmentSize = Js::SparseArraySegment<typename ArrayType::TElement>::GetAlignedSize(length);
  3242. size_t allocSize = sizeof(Js::SparseArraySegment<typename ArrayType::TElement>) + alignedHeadSegmentSize * sizeof(typename ArrayType::TElement);
  3243. return HeapInfo::IsSmallObject(HeapInfo::GetAlignedSizeNoCheck(allocSize));
  3244. }
  3245. bool
  3246. Lowerer::GenerateProfiledNewScArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef, uint32 length)
  3247. {
  3248. if (PHASE_OFF(Js::ArrayCtorFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
  3249. {
  3250. return false;
  3251. }
  3252. Func * func = this->m_func;
  3253. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3254. uint32 size = length;
  3255. bool isZeroed;
  3256. IR::RegOpnd *dstOpnd = instr->GetDst()->AsRegOpnd();
  3257. IR::RegOpnd *headOpnd;
  3258. uint32 i = length;
  3259. auto fillMissingItems = [&](IRType type, uint missingItemCount, uint offsetStart, uint itemSpacing)
  3260. {
  3261. IR::Opnd * missingItemOpnd = GetMissingItemOpnd(type, func);
  3262. #if _M_ARM32_OR_ARM64
  3263. IR::Instr * move = this->InsertMove(IR::RegOpnd::New(type, instr->m_func), missingItemOpnd, instr);
  3264. missingItemOpnd = move->GetDst();
  3265. #endif
  3266. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3267. const IR::AutoReuseOpnd autoReuseMissingItemOpnd(missingItemOpnd, func);
  3268. for (; i < missingItemCount; i++)
  3269. {
  3270. GenerateMemInit(headOpnd, offsetStart + i * itemSpacing, missingItemOpnd, instr, isZeroed);
  3271. }
  3272. };
  3273. if (instr->GetDst() && instr->GetDst()->GetValueType().IsLikelyNativeIntArray())
  3274. {
  3275. if (!IsSmallObject<Js::JavascriptNativeIntArray>(length))
  3276. {
  3277. return false;
  3278. }
  3279. GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3280. Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
  3281. headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isZeroed);
  3282. GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3283. fillMissingItems(TyInt32, size, sizeof(Js::SparseArraySegmentBase), sizeof(int32));
  3284. }
  3285. else if (instr->GetDst() && instr->GetDst()->GetValueType().IsLikelyNativeFloatArray())
  3286. {
  3287. if (!IsSmallObject<Js::JavascriptNativeFloatArray>(length))
  3288. {
  3289. return false;
  3290. }
  3291. GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3292. Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
  3293. headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isZeroed);
  3294. GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3295. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3296. uint const missingItemCount = size * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
  3297. i = i * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
  3298. fillMissingItems(TyVar, missingItemCount, offsetStart, sizeof(Js::JavascriptArray::MissingItem));
  3299. }
  3300. else
  3301. {
  3302. if (!IsSmallObject<Js::JavascriptArray>(length))
  3303. {
  3304. return false;
  3305. }
  3306. headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptArray>(instr, &size, arrayInfo, &isZeroed);
  3307. fillMissingItems(TyVar, size, sizeof(Js::SparseArraySegmentBase), sizeof(Js::Var));
  3308. }
  3309. // Skip pass the helper call
  3310. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3311. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  3312. instr->InsertBefore(helperLabel);
  3313. instr->InsertAfter(doneLabel);
  3314. return true;
  3315. }
  3316. void
  3317. Lowerer::GenerateArrayInfoIsNativeIntArrayTest(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, IR::LabelInstr * helperLabel)
  3318. {
  3319. Func * func = this->m_func;
  3320. InsertTestBranch(IR::MemRefOpnd::New(((char *)arrayInfoAddr) + Js::ArrayCallSiteInfo::GetOffsetOfBits(), TyUint8, func),
  3321. IR::IntConstOpnd::New(Js::ArrayCallSiteInfo::NotNativeIntBit, TyUint8, func), Js::OpCode::BrNeq_A, helperLabel, instr);
  3322. }
  3323. void
  3324. Lowerer::GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, IR::LabelInstr * helperLabel)
  3325. {
  3326. Func * func = this->m_func;
  3327. InsertCompareBranch(IR::MemRefOpnd::New(((char *)arrayInfoAddr) + Js::ArrayCallSiteInfo::GetOffsetOfBits(), TyUint8, func),
  3328. IR::IntConstOpnd::New(Js::ArrayCallSiteInfo::NotNativeIntBit, TyUint8, func), Js::OpCode::BrNeq_A, helperLabel, instr);
  3329. }
  3330. template <typename ArrayType>
  3331. static IR::JnHelperMethod GetArrayAllocMemHelper();
  3332. template <>
  3333. IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptArray>()
  3334. {
  3335. return IR::HelperAllocMemForJavascriptArray;
  3336. }
  3337. template <>
  3338. IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptNativeIntArray>()
  3339. {
  3340. return IR::HelperAllocMemForJavascriptNativeIntArray;
  3341. }
  3342. template <>
  3343. IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptNativeFloatArray>()
  3344. {
  3345. return IR::HelperAllocMemForJavascriptNativeFloatArray;
  3346. }
  3347. template <typename ArrayType>
  3348. IR::RegOpnd *
  3349. Lowerer::GenerateArrayLiteralsAlloc(IR::Instr *instr, uint32 * psize, Js::ArrayCallSiteInfo * arrayInfo, bool * pIsHeadSegmentZeroed)
  3350. {
  3351. return GenerateArrayAllocHelper<ArrayType>(instr, psize, arrayInfo, pIsHeadSegmentZeroed, false /* isArrayObjCtor */, false /* isNoArgs */);
  3352. }
  3353. template <typename ArrayType>
  3354. IR::RegOpnd *
  3355. Lowerer::GenerateArrayObjectsAlloc(IR::Instr *instr, uint32 * psize, Js::ArrayCallSiteInfo * arrayInfo, bool * pIsHeadSegmentZeroed, bool isNoArgs)
  3356. {
  3357. return GenerateArrayAllocHelper<ArrayType>(instr, psize, arrayInfo, pIsHeadSegmentZeroed, true /* isArrayObjCtor */, isNoArgs);
  3358. }
  3359. template <typename ArrayType>
  3360. IR::RegOpnd *
  3361. Lowerer::GenerateArrayAllocHelper(IR::Instr *instr, uint32 * psize, Js::ArrayCallSiteInfo * arrayInfo, bool * pIsHeadSegmentZeroed, bool isArrayObjCtor, bool isNoArgs)
  3362. {
  3363. Func * func = this->m_func;
  3364. IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
  3365. // Generate code as in JavascriptArray::NewLiteral
  3366. uint32 count = *psize;
  3367. uint alignedHeadSegmentSize;
  3368. size_t arrayAllocSize;
  3369. IR::RegOpnd * headOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3370. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func, false);
  3371. IR::Instr * leaHeadInstr = nullptr;
  3372. bool isHeadSegmentZeroed = false;
  3373. if (ArrayType::HasInlineHeadSegment(count))
  3374. {
  3375. if (isArrayObjCtor)
  3376. {
  3377. uint32 allocCount = isNoArgs ? Js::SparseArraySegmentBase::SMALL_CHUNK_SIZE : count;
  3378. arrayAllocSize = Js::JavascriptArray::DetermineAllocationSizeForArrayObjects<ArrayType, 0>(allocCount, nullptr, &alignedHeadSegmentSize);
  3379. }
  3380. else
  3381. {
  3382. uint32 allocCount = count == 0 ? Js::SparseArraySegmentBase::SMALL_CHUNK_SIZE : count;
  3383. arrayAllocSize = Js::JavascriptArray::DetermineAllocationSize<ArrayType, 0>(allocCount, nullptr, &alignedHeadSegmentSize);
  3384. }
  3385. // Note that it is possible for the returned alignedHeadSegmentSize to be greater than INLINE_CHUNK_SIZE because
  3386. // of rounding the *entire* object, including the head segment, to the nearest aligned size. In that case, ensure
  3387. // that this size is still not larger than INLINE_CHUNK_SIZE size because the head segment is still inlined. This
  3388. // keeps consistency with the definition of HasInlineHeadSegment and maintained in the assert below.
  3389. uint inlineChunkSize = Js::SparseArraySegmentBase::INLINE_CHUNK_SIZE;
  3390. alignedHeadSegmentSize = min(alignedHeadSegmentSize, inlineChunkSize);
  3391. Assert(ArrayType::HasInlineHeadSegment(alignedHeadSegmentSize));
  3392. leaHeadInstr = IR::Instr::New(Js::OpCode::LEA, headOpnd,
  3393. IR::IndirOpnd::New(dstOpnd, sizeof(ArrayType), TyMachPtr, func), func);
  3394. isHeadSegmentZeroed = true;
  3395. }
  3396. else
  3397. {
  3398. // Need to allocate the head segment first so that if it throws,
  3399. // we doesn't have the memory assigned to dstOpnd yet
  3400. // Even if the instruction is marked as dstIsTempObject, we still should not allocate
  3401. // that big of a chunk on the stack.
  3402. alignedHeadSegmentSize = Js::SparseArraySegment<typename ArrayType::TElement>::GetAlignedSize(count);
  3403. GenerateRecyclerAlloc(
  3404. IR::HelperAllocMemForSparseArraySegmentBase,
  3405. sizeof(Js::SparseArraySegment<typename ArrayType::TElement>) +
  3406. alignedHeadSegmentSize * sizeof(typename ArrayType::TElement),
  3407. headOpnd,
  3408. instr);
  3409. arrayAllocSize = sizeof(ArrayType);
  3410. }
  3411. *psize = alignedHeadSegmentSize;
  3412. IR::SymOpnd * tempObjectSymOpnd;
  3413. bool isZeroed = GenerateRecyclerOrMarkTempAlloc(instr, dstOpnd,
  3414. GetArrayAllocMemHelper<ArrayType>(), arrayAllocSize, &tempObjectSymOpnd);
  3415. isHeadSegmentZeroed = isHeadSegmentZeroed & isZeroed;
  3416. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  3417. {
  3418. // Hoist the vtable init to the outer most loop top as it never changes
  3419. InsertMove(tempObjectSymOpnd,
  3420. this->LoadVTableValueOpnd(this->outerMostLoopLabel, ArrayType::VtableHelper()),
  3421. this->outerMostLoopLabel, false);
  3422. }
  3423. else
  3424. {
  3425. GenerateMemInit(dstOpnd, 0, this->LoadVTableValueOpnd(instr, ArrayType::VtableHelper()), instr, isZeroed);
  3426. }
  3427. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfType(), this->LoadLibraryValueOpnd(instr, ArrayType::InitialTypeHelper()), instr, isZeroed);
  3428. GenerateMemInitNull(dstOpnd, ArrayType::GetOffsetOfAuxSlots(), instr, isZeroed);
  3429. // Emit the flags and call site index together
  3430. Js::ProfileId arrayCallSiteIndex = (Js::ProfileId)instr->AsProfiledInstr()->u.profileId;
  3431. #if DBG
  3432. if (instr->AsProfiledInstr()->u.profileId < Js::Constants::NoProfileId)
  3433. {
  3434. Assert((uint32)(arrayInfo - instr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(0)) == arrayCallSiteIndex);
  3435. }
  3436. else
  3437. {
  3438. Assert(arrayInfo == nullptr);
  3439. }
  3440. #endif
  3441. // The same at this:
  3442. // GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayFlags(), (uint16)Js::DynamicObjectFlags::InitialArrayValue, instr, isZeroed);
  3443. // GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayCallSiteIndex(), arrayCallSiteIndex, instr, isZeroed);
  3444. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayFlags(), (uint)Js::DynamicObjectFlags::InitialArrayValue | ((uint)arrayCallSiteIndex << 16), instr, isZeroed);
  3445. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLength(), count, instr, isZeroed);
  3446. if (leaHeadInstr != nullptr)
  3447. {
  3448. instr->InsertBefore(leaHeadInstr);
  3449. ChangeToLea(leaHeadInstr);
  3450. }
  3451. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfHead(), headOpnd, instr, isZeroed);
  3452. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLastUsedSegmentOrSegmentMap(), headOpnd, instr, isZeroed);
  3453. // Initialize segment head
  3454. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLeft(), 0, instr, isHeadSegmentZeroed);
  3455. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), isArrayObjCtor ? 0 : count, instr, isHeadSegmentZeroed);
  3456. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfSize(), alignedHeadSegmentSize, instr, isHeadSegmentZeroed);
  3457. GenerateMemInitNull(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfNext(), instr, isHeadSegmentZeroed);
  3458. *pIsHeadSegmentZeroed = isHeadSegmentZeroed;
  3459. return headOpnd;
  3460. }
  3461. template <typename ArrayType>
  3462. IR::RegOpnd *
  3463. Lowerer::GenerateArrayAlloc(IR::Instr *instr, IR::Opnd * arrayLenOpnd, Js::ArrayCallSiteInfo * arrayInfo)
  3464. {
  3465. Func * func = this->m_func;
  3466. IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
  3467. IR::RegOpnd * headOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3468. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func, false);
  3469. IR::Instr * leaHeadInstr = nullptr;
  3470. IR::Opnd * arraySizeOpnd = IR::RegOpnd::New(TyUint32, func);
  3471. IR::Opnd * alignedArrayAllocSizeOpnd = IR::RegOpnd::New(TyUint32, func);
  3472. IR::LabelInstr * doneCalculatingAllocSize = IR::LabelInstr::New(Js::OpCode::Label, func);
  3473. IR::LabelInstr * skipToNextBucket = nullptr;
  3474. uint8 bucketsCount = ArrayType::AllocationBucketsCount;
  3475. Js::JavascriptArray::EnsureCalculationOfAllocationBuckets<ArrayType>();
  3476. for (uint8 i = 0;i < bucketsCount;i++)
  3477. {
  3478. uint elementsCountToInitialize = ArrayType::allocationBuckets[i][Js::JavascriptArray::MissingElementsCountIndex];
  3479. uint allocationSize = ArrayType::allocationBuckets[i][Js::JavascriptArray::AllocationSizeIndex];
  3480. // Ensure we already have allocation size calculated and within range
  3481. Assert(elementsCountToInitialize > 0 && elementsCountToInitialize <= ArrayType::allocationBuckets[bucketsCount - 1][Js::JavascriptArray::MissingElementsCountIndex]);
  3482. Assert(allocationSize > 0 && allocationSize <= ArrayType::allocationBuckets[bucketsCount - 1][Js::JavascriptArray::AllocationSizeIndex]);
  3483. // CMP arrayLen, currentBucket
  3484. // JG $checkNextBucket
  3485. if (i != (bucketsCount - 1))
  3486. {
  3487. Lowerer::InsertCompare(arrayLenOpnd, IR::IntConstOpnd::New((uint16)ArrayType::allocationBuckets[i][Js::JavascriptArray::AllocationBucketIndex], TyUint32, func), instr);
  3488. skipToNextBucket = IR::LabelInstr::New(Js::OpCode::Label, func);
  3489. Lowerer::InsertBranch(Js::OpCode::BrGt_A, skipToNextBucket, instr);
  3490. }
  3491. // MOV $arrayAlignedSize, <const1>
  3492. // MOV $arrayAllocSize, <const2>
  3493. Lowerer::InsertMove(arraySizeOpnd, IR::IntConstOpnd::New((uint16)elementsCountToInitialize, TyUint32, func), instr);
  3494. Lowerer::InsertMove(alignedArrayAllocSizeOpnd, IR::IntConstOpnd::New((uint16)allocationSize, TyUint32, func), instr);
  3495. // JMP $doneCalculatingAllocSize
  3496. if (i != (bucketsCount - 1))
  3497. {
  3498. Lowerer::InsertBranch(Js::OpCode::Br, doneCalculatingAllocSize, instr);
  3499. instr->InsertBefore(skipToNextBucket);
  3500. }
  3501. }
  3502. instr->InsertBefore(doneCalculatingAllocSize);
  3503. // ***** Call to allocation helper *****
  3504. this->m_lowererMD.LoadHelperArgument(instr, this->LoadScriptContextValueOpnd(instr, ScriptContextValue::ScriptContextRecycler));
  3505. this->m_lowererMD.LoadHelperArgument(instr, alignedArrayAllocSizeOpnd);
  3506. IR::Instr *newObjCall = IR::Instr::New(Js::OpCode::Call, dstOpnd, IR::HelperCallOpnd::New(GetArrayAllocMemHelper<ArrayType>(), func), func);
  3507. instr->InsertBefore(newObjCall);
  3508. this->m_lowererMD.LowerCall(newObjCall, 0);
  3509. // ***** Load headSeg/initialize it *****
  3510. leaHeadInstr = IR::Instr::New(Js::OpCode::LEA, headOpnd,
  3511. IR::IndirOpnd::New(dstOpnd, sizeof(ArrayType), TyMachPtr, func), func);
  3512. GenerateMemInit(dstOpnd, 0, this->LoadVTableValueOpnd(instr, ArrayType::VtableHelper()), instr, true);
  3513. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfType(), this->LoadLibraryValueOpnd(instr, ArrayType::InitialTypeHelper()), instr, true);
  3514. GenerateMemInitNull(dstOpnd, ArrayType::GetOffsetOfAuxSlots(), instr, true);
  3515. Js::ProfileId arrayCallSiteIndex = (Js::ProfileId)instr->AsProfiledInstr()->u.profileId;
  3516. #if DBG
  3517. if (instr->AsProfiledInstr()->u.profileId < Js::Constants::NoProfileId)
  3518. {
  3519. Assert((uint32)(arrayInfo - instr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(0)) == arrayCallSiteIndex);
  3520. }
  3521. else
  3522. {
  3523. Assert(arrayInfo == nullptr);
  3524. }
  3525. #endif
  3526. // ***** Array object initialization *****
  3527. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayFlags(), IR::IntConstOpnd::New((uint16)Js::DynamicObjectFlags::InitialArrayValue, TyUint16, func), instr, true);
  3528. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLength(), arrayLenOpnd, instr, true);
  3529. if (leaHeadInstr != nullptr)
  3530. {
  3531. instr->InsertBefore(leaHeadInstr);
  3532. ChangeToLea(leaHeadInstr);
  3533. }
  3534. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfHead(), headOpnd, instr, true);
  3535. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLastUsedSegmentOrSegmentMap(), headOpnd, instr, true);
  3536. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLeft(), 0, instr, true);
  3537. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), 0, instr, true); // Set head segment length to 0
  3538. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfSize(), arraySizeOpnd, instr, true);
  3539. GenerateMemInitNull(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfNext(), instr, true);
  3540. return headOpnd;
  3541. }
  3542. bool
  3543. Lowerer::GenerateProfiledNewScObjArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef, uint32 length, IR::LabelInstr* labelDone, bool isNoArgs)
  3544. {
  3545. if (PHASE_OFF(Js::ArrayCtorFastPathPhase, m_func))
  3546. {
  3547. return false;
  3548. }
  3549. Func * func = this->m_func;
  3550. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3551. uint32 size = length;
  3552. bool isZeroed = false;
  3553. IR::RegOpnd *dstOpnd = instr->GetDst()->AsRegOpnd();
  3554. IR::RegOpnd *headOpnd;
  3555. Js::ProfileId profileId = static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId);
  3556. if (arrayInfo && arrayInfo->IsNativeIntArray())
  3557. {
  3558. GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3559. Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
  3560. headOpnd = GenerateArrayObjectsAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isZeroed, isNoArgs);
  3561. GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex(), IR::IntConstOpnd::New(profileId, TyUint16, func, true), instr, isZeroed);
  3562. GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3563. for (uint i = 0; i < size; i++)
  3564. {
  3565. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3566. Js::JavascriptNativeIntArray::MissingItem, instr, isZeroed);
  3567. }
  3568. }
  3569. else if (arrayInfo && arrayInfo->IsNativeFloatArray())
  3570. {
  3571. GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3572. Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
  3573. headOpnd = GenerateArrayObjectsAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isZeroed, isNoArgs);
  3574. GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex(), IR::IntConstOpnd::New(profileId, TyUint16, func, true), instr, isZeroed);
  3575. GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3576. // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
  3577. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3578. uint const missingItemCount = size * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
  3579. for (uint i = 0; i < missingItemCount; i++)
  3580. {
  3581. GenerateMemInit(
  3582. headOpnd, offsetStart + i * sizeof(Js::JavascriptArray::MissingItem),
  3583. IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstantAddress, m_func, true),
  3584. instr, isZeroed);
  3585. }
  3586. }
  3587. else
  3588. {
  3589. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3590. headOpnd = GenerateArrayObjectsAlloc<Js::JavascriptArray>(instr, &size, arrayInfo, &isZeroed, isNoArgs);
  3591. for (uint i = 0; i < size; i++)
  3592. {
  3593. GenerateMemInit(
  3594. headOpnd, offsetStart + i * sizeof(Js::Var),
  3595. IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstantAddress, m_func, true),
  3596. instr, isZeroed);
  3597. }
  3598. }
  3599. // Skip pass the helper call
  3600. InsertBranch(Js::OpCode::Br, labelDone, instr);
  3601. instr->InsertBefore(helperLabel);
  3602. return true;
  3603. }
  3604. template <typename ArrayType>
  3605. bool
  3606. Lowerer::GenerateProfiledNewScObjArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef, IR::LabelInstr* helperLabel,
  3607. IR::LabelInstr* labelDone, IR::Opnd* lengthOpnd, uint32 offsetOfCallSiteIndex, uint32 offsetOfWeakFuncRef)
  3608. {
  3609. if (PHASE_OFF(Js::ArrayCtorFastPathPhase, m_func))
  3610. {
  3611. return false;
  3612. }
  3613. Func * func = this->m_func;
  3614. IR::RegOpnd *dstOpnd = instr->GetDst()->AsRegOpnd();
  3615. IR::RegOpnd *headOpnd;
  3616. Js::ProfileId profileId = static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId);
  3617. uint sizeOfElement = 0;
  3618. uint allocationBucketsCount = ArrayType::AllocationBucketsCount;
  3619. uint(*allocationBuckets)[Js::JavascriptArray::AllocationBucketsInfoSize];
  3620. allocationBuckets = ArrayType::allocationBuckets;
  3621. uint sizeFactor = 1;
  3622. IRType missingItemType = (arrayInfo && arrayInfo->IsNativeIntArray()) ? IRType::TyInt32 : IRType::TyVar;
  3623. IR::LabelInstr * arrayInitDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  3624. bool isNativeArray = arrayInfo && (arrayInfo->IsNativeIntArray() || arrayInfo->IsNativeFloatArray());
  3625. if (arrayInfo && arrayInfo->IsNativeIntArray())
  3626. {
  3627. sizeOfElement = sizeof(int32);
  3628. GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3629. }
  3630. else if (arrayInfo && arrayInfo->IsNativeFloatArray())
  3631. {
  3632. // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
  3633. sizeFactor = sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
  3634. sizeOfElement = sizeof(Js::JavascriptArray::MissingItem);
  3635. GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3636. }
  3637. else
  3638. {
  3639. sizeOfElement = sizeof(Js::Var);
  3640. }
  3641. lengthOpnd = GenerateUntagVar(lengthOpnd->AsRegOpnd(), helperLabel, instr);
  3642. IR::Opnd* upperBound = IR::IntConstOpnd::New(8, TyUint8, func, true);
  3643. InsertCompare(lengthOpnd, upperBound, instr);
  3644. InsertBranch(Js::OpCode::BrGt_A, true /* isUnsigned */, helperLabel, instr);
  3645. headOpnd = GenerateArrayAlloc<ArrayType>(instr, lengthOpnd, arrayInfo);
  3646. if (isNativeArray)
  3647. {
  3648. Assert(ArrayType::GetOffsetOfArrayFlags() + sizeof(uint16) == offsetOfCallSiteIndex);
  3649. Assert(offsetOfWeakFuncRef > 0);
  3650. GenerateMemInit(dstOpnd, offsetOfCallSiteIndex, IR::IntConstOpnd::New(profileId, TyUint16, func, true), instr, true /* isZeroed */);
  3651. GenerateMemInit(dstOpnd, offsetOfWeakFuncRef, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, true /* isZeroed */);
  3652. }
  3653. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3654. uint missingItemCount = 0;
  3655. uint missingItemInitializedSoFar = 0;
  3656. uint missingItemIndex = 0;
  3657. uint maxAllocationSize = allocationBuckets[allocationBucketsCount - 1][Js::JavascriptArray::AllocationSizeIndex];
  3658. for (uint8 i = 0;i < allocationBucketsCount;i++)
  3659. {
  3660. missingItemCount = allocationBuckets[i][Js::JavascriptArray::MissingElementsCountIndex] * sizeFactor;
  3661. if (i > 0)
  3662. {
  3663. // Reduce missingItemCount we have already set so far
  3664. missingItemCount -= missingItemInitializedSoFar;
  3665. }
  3666. // Generate array initialization with MissingItem
  3667. for (uint j = 0;j < missingItemCount;j++)
  3668. {
  3669. // Ensure we don't write missingItems past allocation size
  3670. Assert(offsetStart + missingItemIndex * sizeOfElement <= maxAllocationSize);
  3671. GenerateMemInit(headOpnd, offsetStart + missingItemIndex * sizeOfElement, GetMissingItemOpndForAssignment(missingItemType, func), instr, true /*isZeroed*/);
  3672. missingItemIndex++;
  3673. }
  3674. // CMP arrayLen, currentBucket
  3675. // JG $checkNextBucket
  3676. if (i != (allocationBucketsCount - 1))
  3677. {
  3678. Lowerer::InsertCompare(lengthOpnd, IR::IntConstOpnd::New(allocationBuckets[i][Js::JavascriptArray::AllocationBucketIndex], TyUint32, func), instr);
  3679. Lowerer::InsertBranch(Js::OpCode::BrLe_A, arrayInitDone, instr);
  3680. }
  3681. missingItemInitializedSoFar += missingItemCount;
  3682. }
  3683. // Ensure no. of missingItems written are same
  3684. Assert(missingItemIndex == missingItemInitializedSoFar);
  3685. // Ensure no. of missingItems match what present in allocationBuckets
  3686. Assert(missingItemIndex == allocationBuckets[allocationBucketsCount - 1][Js::JavascriptArray::MissingElementsCountIndex] * sizeFactor);
  3687. instr->InsertBefore(arrayInitDone);
  3688. Lowerer::InsertBranch(Js::OpCode::Br, labelDone, instr);
  3689. instr->InsertBefore(helperLabel);
  3690. return true;
  3691. }
  3692. void
  3693. Lowerer::GenerateProfiledNewScIntArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef)
  3694. {
  3695. // Helper will deal with ForceES5ARray
  3696. if (PHASE_OFF(Js::ArrayLiteralFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
  3697. {
  3698. return;
  3699. }
  3700. if (!arrayInfo->IsNativeIntArray())
  3701. {
  3702. return;
  3703. }
  3704. if (instr->GetSrc1()->AsAddrOpnd()->GetAddrOpndKind() != IR::AddrOpndKindDynamicAuxBufferRef)
  3705. {
  3706. return;
  3707. }
  3708. Func * func = this->m_func;
  3709. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3710. GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3711. IR::AddrOpnd * elementsOpnd = instr->GetSrc1()->AsAddrOpnd();
  3712. Js::AuxArray<int32> * ints = (Js::AuxArray<int32> *)elementsOpnd->m_metadata;
  3713. uint32 size = ints->count;
  3714. // Generate code as in JavascriptArray::NewLiteral
  3715. bool isHeadSegmentZeroed;
  3716. IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
  3717. Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
  3718. IR::RegOpnd * headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isHeadSegmentZeroed);
  3719. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3720. GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicMisc, m_func), instr, isHeadSegmentZeroed);
  3721. // Initialize the elements
  3722. uint i = 0;
  3723. if (ints->count > 16)
  3724. {
  3725. // Do memcpy if > 16
  3726. IR::RegOpnd * dstElementsOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3727. const IR::AutoReuseOpnd autoReuseDstElementsOpnd(dstElementsOpnd, func);
  3728. IR::Opnd * srcOpnd = IR::AddrOpnd::New((intptr_t)elementsOpnd->m_address + Js::AuxArray<int32>::OffsetOfElements(), IR::AddrOpndKindDynamicMisc, func);
  3729. InsertLea(dstElementsOpnd, IR::IndirOpnd::New(headOpnd, sizeof(Js::SparseArraySegmentBase), TyMachPtr, func), instr);
  3730. GenerateMemCopy(dstElementsOpnd, srcOpnd, ints->count * sizeof(int32), instr);
  3731. i = ints->count;
  3732. }
  3733. else
  3734. {
  3735. for (; i < ints->count; i++)
  3736. {
  3737. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3738. ints->elements[i], instr, isHeadSegmentZeroed);
  3739. }
  3740. }
  3741. Assert(i == ints->count);
  3742. for (; i < size; i++)
  3743. {
  3744. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3745. Js::JavascriptNativeIntArray::MissingItem, instr, isHeadSegmentZeroed);
  3746. }
  3747. // Skip pass the helper call
  3748. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3749. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  3750. instr->InsertBefore(helperLabel);
  3751. instr->InsertAfter(doneLabel);
  3752. }
  3753. void
  3754. Lowerer::GenerateProfiledNewScFloatArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, intptr_t arrayInfoAddr, intptr_t weakFuncRef)
  3755. {
  3756. if (PHASE_OFF(Js::ArrayLiteralFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
  3757. {
  3758. return;
  3759. }
  3760. if (!arrayInfo->IsNativeFloatArray())
  3761. {
  3762. return;
  3763. }
  3764. if (instr->GetSrc1()->AsAddrOpnd()->GetAddrOpndKind() != IR::AddrOpndKindDynamicAuxBufferRef)
  3765. {
  3766. return;
  3767. }
  3768. Func * func = this->m_func;
  3769. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3770. // If the array info hasn't mark as not int array yet, go to the helper and mark it.
  3771. // It really is just for assert purpose in JavascriptNativeFloatArray::ToVarArray
  3772. GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, arrayInfoAddr, helperLabel);
  3773. IR::AddrOpnd * elementsOpnd = instr->GetSrc1()->AsAddrOpnd();
  3774. Js::AuxArray<double> * doubles = (Js::AuxArray<double> *)elementsOpnd->m_metadata;
  3775. uint32 size = doubles->count;
  3776. // Generate code as in JavascriptArray::NewLiteral
  3777. bool isHeadSegmentZeroed;
  3778. IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
  3779. Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
  3780. IR::RegOpnd * headOpnd = GenerateArrayLiteralsAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isHeadSegmentZeroed);
  3781. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3782. GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isHeadSegmentZeroed);
  3783. // Initialize the elements
  3784. IR::RegOpnd * dstElementsOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3785. const IR::AutoReuseOpnd autoReuseDstElementsOpnd(dstElementsOpnd, func);
  3786. IR::Opnd * srcOpnd = IR::AddrOpnd::New((intptr_t)elementsOpnd->m_address + Js::AuxArray<double>::OffsetOfElements(), IR::AddrOpndKindDynamicMisc, func);
  3787. InsertLea(dstElementsOpnd, IR::IndirOpnd::New(headOpnd, sizeof(Js::SparseArraySegmentBase), TyMachPtr, func), instr);
  3788. GenerateMemCopy(dstElementsOpnd, srcOpnd, doubles->count * sizeof(double), instr);
  3789. // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
  3790. uint const offsetStart = sizeof(Js::SparseArraySegmentBase) + doubles->count * sizeof(double);
  3791. uint const missingItem = (size - doubles->count) * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
  3792. for (uint i = 0; i < missingItem; i++)
  3793. {
  3794. GenerateMemInit(headOpnd, offsetStart + i * sizeof(Js::JavascriptArray::MissingItem),
  3795. IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstantAddress, m_func, true), instr, isHeadSegmentZeroed);
  3796. }
  3797. // Skip pass the helper call
  3798. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3799. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  3800. instr->InsertBefore(helperLabel);
  3801. instr->InsertAfter(doneLabel);
  3802. }
  3803. IR::Instr *
  3804. Lowerer::LowerNewScIntArray(IR::Instr *arrInstr)
  3805. {
  3806. IR::Instr *instrPrev = arrInstr->m_prev;
  3807. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScIntArray;
  3808. if ((arrInstr->IsJitProfilingInstr() || arrInstr->IsProfiledInstr()) && arrInstr->m_func->HasProfileInfo())
  3809. {
  3810. intptr_t weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
  3811. if (weakFuncRef)
  3812. {
  3813. // Technically a load of the same memory address either way.
  3814. Js::ProfileId profileId =
  3815. arrInstr->IsJitProfilingInstr()
  3816. ? arrInstr->AsJitProfilingInstr()->profileId
  3817. : static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
  3818. Js::ArrayCallSiteInfo *arrayInfo = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
  3819. intptr_t arrayInfoAddr = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
  3820. // Only do fast-path if it isn't a JitProfiling instr and not copy-on-access array
  3821. if (arrInstr->IsProfiledInstr()
  3822. #if ENABLE_COPYONACCESS_ARRAY
  3823. && (PHASE_OFF1(Js::Phase::CopyOnAccessArrayPhase) || arrayInfo->isNotCopyOnAccessArray) && !PHASE_FORCE1(Js::Phase::CopyOnAccessArrayPhase)
  3824. #endif
  3825. )
  3826. {
  3827. GenerateProfiledNewScIntArrayFastPath(arrInstr, arrayInfo, arrayInfoAddr, weakFuncRef);
  3828. }
  3829. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
  3830. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
  3831. helperMethod = IR::HelperScrArr_ProfiledNewScIntArray;
  3832. }
  3833. }
  3834. LoadScriptContext(arrInstr);
  3835. IR::Opnd *elementsOpnd = arrInstr->UnlinkSrc1();
  3836. m_lowererMD.LoadHelperArgument(arrInstr, elementsOpnd);
  3837. m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
  3838. return instrPrev;
  3839. }
  3840. IR::Instr *
  3841. Lowerer::LowerNewScFltArray(IR::Instr *arrInstr)
  3842. {
  3843. IR::Instr *instrPrev = arrInstr->m_prev;
  3844. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScFltArray;
  3845. if ((arrInstr->IsJitProfilingInstr() || arrInstr->IsProfiledInstr()) && arrInstr->m_func->HasProfileInfo())
  3846. {
  3847. intptr_t weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
  3848. if (weakFuncRef)
  3849. {
  3850. Js::ProfileId profileId =
  3851. arrInstr->IsJitProfilingInstr()
  3852. ? arrInstr->AsJitProfilingInstr()->profileId
  3853. : static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
  3854. Js::ArrayCallSiteInfo *arrayInfo = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
  3855. intptr_t arrayInfoAddr = arrInstr->m_func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
  3856. // Only do fast-path if it isn't a JitProfiling instr
  3857. if (arrInstr->IsProfiledInstr()) {
  3858. GenerateProfiledNewScFloatArrayFastPath(arrInstr, arrayInfo, arrayInfoAddr, weakFuncRef);
  3859. }
  3860. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
  3861. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
  3862. helperMethod = IR::HelperScrArr_ProfiledNewScFltArray;
  3863. }
  3864. }
  3865. LoadScriptContext(arrInstr);
  3866. IR::Opnd *elementsOpnd = arrInstr->UnlinkSrc1();
  3867. m_lowererMD.LoadHelperArgument(arrInstr, elementsOpnd);
  3868. m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
  3869. return instrPrev;
  3870. }
  3871. IR::Instr *
  3872. Lowerer::LowerArraySegmentVars(IR::Instr *arrayInstr)
  3873. {
  3874. IR::Instr * instrPrev;
  3875. IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperArraySegmentVars, m_func);
  3876. instrPrev = m_lowererMD.LoadHelperArgument(arrayInstr, arrayInstr->UnlinkSrc2());
  3877. m_lowererMD.LoadHelperArgument(arrayInstr, arrayInstr->UnlinkSrc1());
  3878. arrayInstr->m_opcode = Js::OpCode::Call;
  3879. arrayInstr->SetSrc1(opndHelper);
  3880. m_lowererMD.LowerCall(arrayInstr, 0);
  3881. return instrPrev;
  3882. }
  3883. IR::Instr* Lowerer::LowerProfiledNewArray(IR::JitProfilingInstr* instr, bool hasArgs)
  3884. {
  3885. // Use the special helper which checks whether Array has been overwritten by the user and if
  3886. // it hasn't, possibly allocates a native array
  3887. // Insert a temporary label before the instruction we're about to lower, so that we can return
  3888. // the first instruction above that needs to be lowered after we're done - regardless of argument
  3889. // list, StartCall, etc.
  3890. IR::Instr* startMarkerInstr = InsertLoweredRegionStartMarker(instr);
  3891. Assert(instr->isNewArray);
  3892. Assert(instr->arrayProfileId != Js::Constants::NoProfileId);
  3893. Assert(instr->profileId != Js::Constants::NoProfileId);
  3894. bool isSpreadCall = instr->m_opcode == Js::OpCode::NewScObjectSpread || instr->m_opcode == Js::OpCode::NewScObjArraySpread;
  3895. m_lowererMD.LoadNewScObjFirstArg(instr, IR::AddrOpnd::New(nullptr, IR::AddrOpndKindConstantVar, m_func, true), isSpreadCall ? 1 : 0);
  3896. if (isSpreadCall)
  3897. {
  3898. this->LowerSpreadCall(instr, Js::CallFlags_New, true);
  3899. }
  3900. else
  3901. {
  3902. const int32 argCount = m_lowererMD.LowerCallArgs(instr, Js::CallFlags_New, 4);
  3903. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->arrayProfileId, m_func));
  3904. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
  3905. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  3906. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  3907. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledNewScObjArray, m_func));
  3908. m_lowererMD.LowerCall(instr, static_cast<Js::ArgSlot>(argCount));
  3909. }
  3910. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  3911. }
  3912. ///----------------------------------------------------------------------------
  3913. ///
  3914. /// Lowerer::LowerNewScObject
  3915. ///
  3916. /// Machine independent lowering of a CallI instr.
  3917. ///
  3918. ///----------------------------------------------------------------------------
  3919. IR::Instr *
  3920. Lowerer::LowerNewScObject(IR::Instr *newObjInstr, bool callCtor, bool hasArgs, bool isBaseClassConstructorNewScObject)
  3921. {
  3922. if (newObjInstr->IsJitProfilingInstr() && newObjInstr->AsJitProfilingInstr()->isNewArray)
  3923. {
  3924. Assert(callCtor);
  3925. return LowerProfiledNewArray(newObjInstr->AsJitProfilingInstr(), hasArgs);
  3926. }
  3927. bool isSpreadCall = newObjInstr->m_opcode == Js::OpCode::NewScObjectSpread ||
  3928. newObjInstr->m_opcode == Js::OpCode::NewScObjArraySpread;
  3929. Func* func = newObjInstr->m_func;
  3930. // Insert a temporary label before the instruction we're about to lower, so that we can return
  3931. // the first instruction above that needs to be lowered after we're done - regardless of argument
  3932. // list, StartCall, etc.
  3933. IR::Instr* startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  3934. IR::Opnd *ctorOpnd = newObjInstr->GetSrc1();
  3935. IR::RegOpnd *newObjDst = newObjInstr->GetDst()->AsRegOpnd();
  3936. Assert(!callCtor || !hasArgs || (newObjInstr->GetSrc2() != nullptr /*&& newObjInstr->GetSrc2()->IsSymOpnd()*/));
  3937. bool skipNewScObj = false;
  3938. bool returnNewScObj = false;
  3939. bool emitBailOut = false;
  3940. // If we haven't yet split NewScObject into NewScObjectNoCtor and CallI, we will need a temporary register
  3941. // to hold the result of the object allocation.
  3942. IR::RegOpnd* createObjDst = callCtor ? IR::RegOpnd::New(TyVar, func) : newObjDst;
  3943. IR::LabelInstr* helperOrBailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, /* isOpHelper = */ true);
  3944. IR::LabelInstr* callCtorLabel = IR::LabelInstr::New(Js::OpCode::Label, func, /* isOpHelper = */ false);
  3945. // Try to emit the fast allocation and construction path.
  3946. bool usedFixedCtorCache = TryLowerNewScObjectWithFixedCtorCache(newObjInstr, createObjDst, helperOrBailoutLabel, callCtorLabel, skipNewScObj, returnNewScObj, emitBailOut);
  3947. AssertMsg(!skipNewScObj || callCtor, "What will we return if we skip the default new object and don't call the ctor?");
  3948. Assert(!skipNewScObj || !returnNewScObj);
  3949. Assert(usedFixedCtorCache || !skipNewScObj);
  3950. Assert(!usedFixedCtorCache || newObjInstr->HasFixedFunctionAddressTarget());
  3951. Assert(!skipNewScObj || !emitBailOut);
  3952. #if DBG && 0 // TODO: OOP JIT, enable assert
  3953. if (usedFixedCtorCache)
  3954. {
  3955. Js::JavascriptFunction* ctor = newObjInstr->GetFixedFunction();
  3956. Js::FunctionInfo* ctorInfo = ctor->GetFunctionInfo();
  3957. Assert((ctorInfo->GetAttributes() & Js::FunctionInfo::Attributes::ErrorOnNew) == 0);
  3958. Assert(!!(ctorInfo->GetAttributes() & Js::FunctionInfo::Attributes::SkipDefaultNewObject) == skipNewScObj);
  3959. }
  3960. #endif
  3961. IR::Instr* startCallInstr = nullptr;
  3962. if (callCtor && hasArgs)
  3963. {
  3964. hasArgs = !newObjInstr->HasEmptyArgOutChain(&startCallInstr);
  3965. }
  3966. // If we're not skipping the default new object, let's emit bailout or a call to NewScObject* helper
  3967. IR::JnHelperMethod newScHelper = IR::HelperInvalid;
  3968. IR::Instr *newScObjCall = nullptr;
  3969. if (!skipNewScObj)
  3970. {
  3971. // If we emitted the fast path, this block is a helper block.
  3972. if (usedFixedCtorCache)
  3973. {
  3974. newObjInstr->InsertBefore(helperOrBailoutLabel);
  3975. }
  3976. if (emitBailOut)
  3977. {
  3978. IR::Instr* bailOutInstr = newObjInstr;
  3979. newObjInstr = IR::Instr::New(newObjInstr->m_opcode, func);
  3980. bailOutInstr->TransferTo(newObjInstr);
  3981. bailOutInstr->m_opcode = Js::OpCode::BailOut;
  3982. bailOutInstr->InsertAfter(newObjInstr);
  3983. GenerateBailOut(bailOutInstr);
  3984. }
  3985. else
  3986. {
  3987. Assert(!newObjDst->CanStoreTemp());
  3988. // createObjDst = NewScObject...(ctorOpnd)
  3989. newScHelper = !callCtor ?
  3990. (isBaseClassConstructorNewScObject ?
  3991. (hasArgs ? IR::HelperNewScObjectNoCtorFull : IR::HelperNewScObjectNoArgNoCtorFull) :
  3992. (hasArgs ? IR::HelperNewScObjectNoCtor : IR::HelperNewScObjectNoArgNoCtor)) :
  3993. (hasArgs || usedFixedCtorCache ? IR::HelperNewScObjectNoCtor : IR::HelperNewScObjectNoArg);
  3994. LoadScriptContext(newObjInstr);
  3995. m_lowererMD.LoadHelperArgument(newObjInstr, newObjInstr->GetSrc1());
  3996. newScObjCall = IR::Instr::New(Js::OpCode::Call, createObjDst, IR::HelperCallOpnd::New(newScHelper, func), func);
  3997. newObjInstr->InsertBefore(newScObjCall);
  3998. m_lowererMD.LowerCall(newScObjCall, 0);
  3999. }
  4000. }
  4001. // If we call HelperNewScObjectNoArg directly, we won't be calling the constructor from here, because the helper will do it.
  4002. // We could probably avoid this complexity by converting NewScObjectNoArg to NewScObject in the IRBuilder, once we have dedicated
  4003. // code paths for new Object() and new Array().
  4004. callCtor &= hasArgs || usedFixedCtorCache;
  4005. AssertMsg(!skipNewScObj || callCtor, "What will we return if we skip the default new object and don't call the ctor?");
  4006. newObjInstr->InsertBefore(callCtorLabel);
  4007. if (callCtor && usedFixedCtorCache)
  4008. {
  4009. IR::JnHelperMethod ctorHelper = IR::JnHelperMethodCount;
  4010. // If we have no arguments (i.e. the argument chain is empty), we can recognize a couple of common special cases, such
  4011. // as new Object() or new Array(), for which we have optimized helpers.
  4012. FixedFieldInfo* ctor = newObjInstr->GetFixedFunction();
  4013. intptr_t ctorInfo = ctor->GetFuncInfoAddr();
  4014. if (!hasArgs && (ctorInfo == m_func->GetThreadContextInfo()->GetJavascriptObjectNewInstanceAddr() || ctorInfo == m_func->GetThreadContextInfo()->GetJavascriptArrayNewInstanceAddr()))
  4015. {
  4016. if (ctorInfo == m_func->GetThreadContextInfo()->GetJavascriptObjectNewInstanceAddr())
  4017. {
  4018. Assert(skipNewScObj);
  4019. ctorHelper = IR::HelperNewJavascriptObjectNoArg;
  4020. callCtor = false;
  4021. }
  4022. else if (ctorInfo == m_func->GetThreadContextInfo()->GetJavascriptArrayNewInstanceAddr())
  4023. {
  4024. Assert(skipNewScObj);
  4025. ctorHelper = IR::HelperNewJavascriptArrayNoArg;
  4026. callCtor = false;
  4027. }
  4028. if (!callCtor)
  4029. {
  4030. LoadScriptContext(newObjInstr);
  4031. IR::Instr *ctorCall = IR::Instr::New(Js::OpCode::Call, newObjDst, IR::HelperCallOpnd::New(ctorHelper, func), func);
  4032. newObjInstr->InsertBefore(ctorCall);
  4033. m_lowererMD.LowerCall(ctorCall, 0);
  4034. }
  4035. }
  4036. }
  4037. IR::AutoReuseOpnd autoReuseSavedCtorOpnd;
  4038. if (callCtor)
  4039. {
  4040. // Load the first argument, which is either the object just created or null. Spread has an extra argument.
  4041. IR::Instr * argInstr = this->m_lowererMD.LoadNewScObjFirstArg(newObjInstr, createObjDst, isSpreadCall ? 1 : 0);
  4042. IR::Instr * insertAfterCtorInstr = newObjInstr->m_next;
  4043. if (skipNewScObj)
  4044. {
  4045. // Since we skipped the default new object, we must be returning whatever the constructor returns
  4046. // (which better be an Object), so let's just use newObjDst directly.
  4047. // newObjDst = newObjInstr->m_src1(createObjDst, ...)
  4048. Assert(newObjInstr->GetDst() == newObjDst);
  4049. if (isSpreadCall)
  4050. {
  4051. newObjInstr = this->LowerSpreadCall(newObjInstr, Js::CallFlags_New);
  4052. }
  4053. else
  4054. {
  4055. newObjInstr = this->m_lowererMD.LowerCallI(newObjInstr, Js::CallFlags_New, false, argInstr);
  4056. }
  4057. }
  4058. else
  4059. {
  4060. // We may need to return the default new object or whatever the constructor returns. Let's stash
  4061. // away the constructor's return in a temporary operand, and do the right check, if necessary.
  4062. // ctorResultObjOpnd = newObjInstr->m_src1(createObjDst, ...)
  4063. IR::RegOpnd *ctorResultObjOpnd = IR::RegOpnd::New(TyVar, func);
  4064. newObjInstr->UnlinkDst();
  4065. newObjInstr->SetDst(ctorResultObjOpnd);
  4066. if (isSpreadCall)
  4067. {
  4068. newObjInstr = this->LowerSpreadCall(newObjInstr, Js::CallFlags_New);
  4069. }
  4070. else
  4071. {
  4072. newObjInstr = this->m_lowererMD.LowerCallI(newObjInstr, Js::CallFlags_New, false, argInstr);
  4073. }
  4074. if (returnNewScObj)
  4075. {
  4076. // MOV newObjDst, createObjDst
  4077. this->InsertMove(newObjDst, createObjDst, insertAfterCtorInstr);
  4078. }
  4079. else
  4080. {
  4081. LowerGetNewScObjectCommon(ctorResultObjOpnd, ctorResultObjOpnd, createObjDst, insertAfterCtorInstr);
  4082. this->InsertMove(newObjDst, ctorResultObjOpnd, insertAfterCtorInstr);
  4083. }
  4084. }
  4085. // We don't ever need to update the constructor cache, if we hard coded it. Caches requiring update after constructor
  4086. // don't get cloned, and those that don't require update will never need one anymore.
  4087. if (!usedFixedCtorCache)
  4088. {
  4089. LowerUpdateNewScObjectCache(insertAfterCtorInstr, newObjDst, ctorOpnd, false /* isCtorFunction */);
  4090. }
  4091. }
  4092. else
  4093. {
  4094. if (newObjInstr->IsJitProfilingInstr())
  4095. {
  4096. Assert(m_func->IsSimpleJit());
  4097. Assert(!CONFIG_FLAG(NewSimpleJit));
  4098. // This path skipped calling the Ctor, which skips calling LowerCallI with newObjInstr, meaning that the call will not be profiled.
  4099. // So we insert it manually here.
  4100. if(newScHelper == IR::HelperNewScObjectNoArg &&
  4101. newObjDst &&
  4102. ctorOpnd->IsRegOpnd() &&
  4103. newObjDst->AsRegOpnd()->m_sym == ctorOpnd->AsRegOpnd()->m_sym)
  4104. {
  4105. Assert(newObjInstr->m_func->IsSimpleJit());
  4106. Assert(createObjDst != newObjDst);
  4107. // The function object sym is going to be overwritten, so save it in a temp for profiling
  4108. IR::RegOpnd *const savedCtorOpnd = IR::RegOpnd::New(ctorOpnd->GetType(), newObjInstr->m_func);
  4109. autoReuseSavedCtorOpnd.Initialize(savedCtorOpnd, newObjInstr->m_func);
  4110. Lowerer::InsertMove(savedCtorOpnd, ctorOpnd, newObjInstr);
  4111. ctorOpnd = savedCtorOpnd;
  4112. }
  4113. // It is a constructor (CallFlags_New) and therefore a single argument (this) would have been given.
  4114. const auto info = Lowerer::MakeCallInfoConst(Js::CallFlags_New, 1, func);
  4115. Assert(newScObjCall);
  4116. IR::JitProfilingInstr *const newObjJitProfilingInstr = newObjInstr->AsJitProfilingInstr();
  4117. GenerateCallProfiling(
  4118. newObjJitProfilingInstr->profileId,
  4119. newObjJitProfilingInstr->inlineCacheIndex,
  4120. createObjDst,
  4121. ctorOpnd,
  4122. info,
  4123. false,
  4124. newScObjCall,
  4125. newObjInstr);
  4126. }
  4127. // MOV newObjDst, createObjDst
  4128. if (!skipNewScObj && createObjDst != newObjDst)
  4129. {
  4130. this->InsertMove(newObjDst, createObjDst, newObjInstr);
  4131. }
  4132. newObjInstr->Remove();
  4133. }
  4134. // Return the first instruction above the region we've just lowered.
  4135. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  4136. }
  4137. IR::Instr*
  4138. Lowerer::GenerateCallProfiling(Js::ProfileId profileId, Js::InlineCacheIndex inlineCacheIndex, IR::Opnd* retval, IR::Opnd*calleeFunctionObjOpnd, IR::Opnd* callInfo, bool returnTypeOnly, IR::Instr*callInstr,IR::Instr*insertAfter)
  4139. {
  4140. // This should only ever happen in profiling simplejit
  4141. Assert(m_func->DoSimpleJitDynamicProfile());
  4142. // Make sure they gave us the correct call instruction
  4143. #if defined(_M_IX86) || defined(_M_X64)
  4144. Assert(callInstr->m_opcode == Js::OpCode::CALL);
  4145. #elif defined(_M_ARM)
  4146. Assert(callInstr->m_opcode == Js::OpCode::BLX);
  4147. #elif defined(_M_ARM64)
  4148. Assert(callInstr->m_opcode == Js::OpCode::BLR);
  4149. #endif
  4150. Func*const func = insertAfter->m_func;
  4151. {
  4152. // First, we should save the implicit call flags
  4153. const auto starFlag = GetImplicitCallFlagsOpnd();
  4154. const auto saveOpnd = IR::RegOpnd::New(starFlag->GetType(), func);
  4155. IR::AutoReuseOpnd a(starFlag, func), b(saveOpnd, func);
  4156. //Save the flags (before call) and restore them (after the call)
  4157. this->InsertMove(saveOpnd, starFlag, callInstr);
  4158. // Note: On arm this is slightly inefficient because it forces a reload of the memory location to a reg (whereas x86 can load straight from hard-coded memory into a reg)
  4159. // But it works and making it not reload the memory location would force more refactoring.
  4160. this->InsertMove(starFlag, saveOpnd, insertAfter->m_next);
  4161. }
  4162. // Profile a call that just happened: push some extra info on the stack and call the helper
  4163. if (!retval)
  4164. {
  4165. if (returnTypeOnly)
  4166. {
  4167. // If we are only supposed to profile the return type but don't use the return value, we might
  4168. // as well do nothing!
  4169. return insertAfter;
  4170. }
  4171. retval = IR::AddrOpnd::NewNull(func);
  4172. }
  4173. IR::Instr* profileCall = IR::Instr::New(Js::OpCode::Call, func);
  4174. bool needInlineCacheIndex;
  4175. IR::JnHelperMethod helperMethod;
  4176. if (returnTypeOnly)
  4177. {
  4178. needInlineCacheIndex = false;
  4179. helperMethod = IR::HelperSimpleProfileReturnTypeCall;
  4180. }
  4181. else if(inlineCacheIndex == Js::Constants::NoInlineCacheIndex)
  4182. {
  4183. needInlineCacheIndex = false;
  4184. helperMethod = IR::HelperSimpleProfileCall_DefaultInlineCacheIndex;
  4185. }
  4186. else
  4187. {
  4188. needInlineCacheIndex = true;
  4189. helperMethod = IR::HelperSimpleProfileCall;
  4190. }
  4191. profileCall->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
  4192. insertAfter->InsertAfter(profileCall);
  4193. m_lowererMD.LoadHelperArgument(profileCall, callInfo);
  4194. m_lowererMD.LoadHelperArgument(profileCall, calleeFunctionObjOpnd);
  4195. m_lowererMD.LoadHelperArgument(profileCall, retval);
  4196. if(needInlineCacheIndex)
  4197. {
  4198. m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateInlineCacheIndexOpnd(inlineCacheIndex, func));
  4199. }
  4200. m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateProfileIdOpnd(profileId, func));
  4201. // Push the frame pointer so that the profiling call can grab the stack layout
  4202. m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateFramePointerOpnd(func));
  4203. // No args: the helper is stdcall
  4204. return m_lowererMD.LowerCall(profileCall, 0);
  4205. }
  4206. bool Lowerer::TryLowerNewScObjectWithFixedCtorCache(IR::Instr* newObjInstr, IR::RegOpnd* newObjDst,
  4207. IR::LabelInstr* helperOrBailoutLabel, IR::LabelInstr* callCtorLabel, bool& skipNewScObj, bool& returnNewScObj, bool& emitBailOut)
  4208. {
  4209. skipNewScObj = false;
  4210. returnNewScObj = false;
  4211. AssertMsg(!PHASE_OFF(Js::ObjTypeSpecNewObjPhase, this->m_func) || !newObjInstr->HasBailOutInfo(),
  4212. "Why do we have bailout on NewScObject when ObjTypeSpecNewObj is off?");
  4213. if (PHASE_OFF(Js::FixedNewObjPhase, newObjInstr->m_func) && PHASE_OFF(Js::ObjTypeSpecNewObjPhase, this->m_func))
  4214. {
  4215. return false;
  4216. }
  4217. JITTimeConstructorCache * ctorCache;
  4218. if (newObjInstr->HasBailOutInfo())
  4219. {
  4220. Assert(newObjInstr->IsNewScObjectInstr());
  4221. Assert(newObjInstr->IsProfiledInstr());
  4222. Assert(newObjInstr->GetBailOutKind() == IR::BailOutFailedCtorGuardCheck);
  4223. emitBailOut = true;
  4224. ctorCache = newObjInstr->m_func->GetConstructorCache(static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId));
  4225. Assert(ctorCache != nullptr);
  4226. Assert(!ctorCache->SkipNewScObject());
  4227. Assert(!ctorCache->IsTypeFinal() || ctorCache->CtorHasNoExplicitReturnValue());
  4228. LinkCtorCacheToGuardedProperties(ctorCache);
  4229. }
  4230. else
  4231. {
  4232. if (newObjInstr->m_opcode == Js::OpCode::NewScObjArray || newObjInstr->m_opcode == Js::OpCode::NewScObjArraySpread)
  4233. {
  4234. // These instr's carry a profile that indexes the array call site info, not the ctor cache.
  4235. return false;
  4236. }
  4237. ctorCache = newObjInstr->IsProfiledInstr() ? newObjInstr->m_func->GetConstructorCache(static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId)) : nullptr;
  4238. if (ctorCache == nullptr)
  4239. {
  4240. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func))
  4241. {
  4242. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4243. Output::Print(_u("FixedNewObj: function %s (%s): lowering non-fixed new script object for %s, because %s.\n"),
  4244. newObjInstr->m_func->GetJITFunctionBody()->GetDisplayName(), newObjInstr->m_func->GetDebugNumberSet(debugStringBuffer), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  4245. newObjInstr->IsProfiledInstr() ? _u("constructor cache hasn't been cloned") : _u("instruction is not profiled"));
  4246. Output::Flush();
  4247. }
  4248. return false;
  4249. }
  4250. }
  4251. Assert(ctorCache != nullptr);
  4252. // We should only have cloned if the script contexts match.
  4253. // TODO: oop jit, add ctorCache->scriptContext for tracing assert
  4254. // Assert(newObjInstr->m_func->GetScriptContextInfo()->GetAddr() == ctorCache->scriptContext);
  4255. // Built-in constructors don't need a default new object. Since we know which constructor we're calling, we can skip creating a default
  4256. // object and call a specialized helper (or even constructor, directly) avoiding the checks in generic NewScObjectCommon.
  4257. if (ctorCache->SkipNewScObject())
  4258. {
  4259. #if 0 // TODO: oop jit, add constructor info for tracing
  4260. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func))
  4261. {
  4262. const Js::JavascriptFunction* ctor = ctorCache->constructor;
  4263. Js::FunctionBody* ctorBody = ctor->GetFunctionInfo()->HasBody() ? ctor->GetFunctionInfo()->GetFunctionBody() : nullptr;
  4264. const char16* ctorName = ctorBody != nullptr ? ctorBody->GetDisplayName() : _u("<unknown>");
  4265. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4266. char16 debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4267. Output::Print(_u("FixedNewObj: function %s (%s): lowering skipped new script object for %s with %s ctor <unknown> (%s %s).\n"),
  4268. newObjInstr->m_func->GetJITFunctionBody()->GetDisplayName(), newObjInstr->m_func->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  4269. newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? _u("inlined") : _u("called"),
  4270. ctorName, ctorBody ? ctorBody->GetDebugNumberSet(debugStringBuffer) : _u("(null)"));
  4271. Output::Flush();
  4272. }
  4273. #endif
  4274. // All built-in constructors share a special singleton cache that is never checked and never invalidated. It cannot be used
  4275. // as a guard to protect any property operations downstream from the constructor. If this ever becomes a performance issue,
  4276. // we could have a dedicated cache for each built-in constructor, populate it and invalidate it as any other constructor cache.
  4277. AssertMsg(!emitBailOut, "Can't bail out on constructor cache guard for built-in constructors.");
  4278. skipNewScObj = true;
  4279. IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
  4280. this->InsertMove(newObjDst, zeroOpnd, newObjInstr);
  4281. return true;
  4282. }
  4283. AssertMsg(ctorCache->GetType() != nullptr, "Why did we hard-code a mismatched, invalidated or polymorphic constructor cache?");
  4284. #if 0 // TODO: oop jit, add constructor info for tracing
  4285. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func))
  4286. {
  4287. const Js::JavascriptFunction* constructor = ctorCache->constructor;
  4288. Js::FunctionBody* constructorBody = constructor->GetFunctionInfo()->HasBody() ? constructor->GetFunctionInfo()->GetFunctionBody() : nullptr;
  4289. const char16* constructorName = constructorBody != nullptr ? constructorBody->GetDisplayName() : _u("<unknown>");
  4290. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4291. char16 debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4292. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func))
  4293. {
  4294. Output::Print(_u("FixedNewObj: function %s (%s): lowering fixed new script object for %s with %s ctor <unknown> (%s %s): type = %p, slots = %d, inlined slots = %d.\n"),
  4295. newObjInstr->m_func->GetJITFunctionBody()->GetDisplayName(), newObjInstr->m_func->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  4296. newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? _u("inlined") : _u("called"),
  4297. constructorName, constructorBody ? constructorBody->GetDebugNumberSet(debugStringBuffer) : _u("(null)"),
  4298. ctorCache->type, ctorCache->slotCount, ctorCache->inlineSlotCount);
  4299. }
  4300. else
  4301. {
  4302. Output::Print(_u("FixedNewObj: function %s (%s): lowering fixed new script object for %s with %s ctor <unknown> (%s %s): slots = %d, inlined slots = %d.\n"),
  4303. newObjInstr->m_func->GetJITFunctionBody()->GetDisplayName(), newObjInstr->m_func->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  4304. newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? _u("inlined") : _u("called"),
  4305. constructorName, debugStringBuffer, ctorCache->slotCount, ctorCache->inlineSlotCount);
  4306. }
  4307. Output::Flush();
  4308. }
  4309. #endif
  4310. // If the constructor has no return statements, we can safely return the object that was created here.
  4311. // No need to check what the constructor returned - it must be undefined.
  4312. returnNewScObj = ctorCache->CtorHasNoExplicitReturnValue();
  4313. Assert(Js::ConstructorCache::GetSizeOfGuardValue() == static_cast<size_t>(TySize[TyMachPtr]));
  4314. IR::MemRefOpnd* guardOpnd = IR::MemRefOpnd::New(ctorCache->GetRuntimeCacheGuardAddr(), TyMachReg, this->m_func,
  4315. IR::AddrOpndKindDynamicGuardValueRef);
  4316. IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
  4317. InsertCompareBranch(guardOpnd, zeroOpnd, Js::OpCode::BrEq_A, helperOrBailoutLabel, newObjInstr);
  4318. // If we are calling new on a class constructor, the contract is that we pass new.target as the 'this' argument.
  4319. // function is the constructor on which we called new - which is new.target.
  4320. FixedFieldInfo* ctor = newObjInstr->GetFixedFunction();
  4321. if (ctor->IsClassCtor())
  4322. {
  4323. // MOV newObjDst, function
  4324. this->InsertMove(newObjDst, newObjInstr->GetSrc1(), newObjInstr);
  4325. }
  4326. else
  4327. {
  4328. JITTypeHolder newObjectType(ctorCache->GetType());
  4329. Assert(newObjectType->IsShared());
  4330. IR::AddrOpnd* typeSrc = IR::AddrOpnd::New(newObjectType->GetAddr(), IR::AddrOpndKindDynamicType, m_func);
  4331. // For the next call:
  4332. // inlineSlotSize == Number of slots to allocate beyond the DynamicObject header
  4333. // slotSize - inlineSlotSize == Number of aux slots to allocate
  4334. int inlineSlotSize = ctorCache->GetInlineSlotCount();
  4335. int slotSize = ctorCache->GetSlotCount();
  4336. if (newObjectType->GetTypeHandler()->IsObjectHeaderInlinedTypeHandler())
  4337. {
  4338. Assert(inlineSlotSize >= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity());
  4339. Assert(inlineSlotSize == slotSize);
  4340. slotSize = inlineSlotSize -= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity();
  4341. }
  4342. GenerateDynamicObjectAlloc(newObjInstr, inlineSlotSize, slotSize, newObjDst, typeSrc);
  4343. }
  4344. // JMP $callCtor
  4345. IR::BranchInstr *callCtorBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, callCtorLabel, m_func);
  4346. newObjInstr->InsertBefore(callCtorBranch);
  4347. return true;
  4348. }
  4349. void
  4350. Lowerer::GenerateRecyclerAllocAligned(IR::JnHelperMethod allocHelper, size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, bool inOpHelper)
  4351. {
  4352. IR::LabelInstr * allocDoneLabel = nullptr;
  4353. if (!PHASE_OFF(Js::JitAllocNewObjPhase, insertionPointInstr->m_func) && HeapInfo::IsSmallObject(allocSize))
  4354. {
  4355. IR::LabelInstr * allocHelperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4356. allocDoneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, inOpHelper);
  4357. this->m_lowererMD.GenerateFastRecyclerAlloc(allocSize, newObjDst, insertionPointInstr, allocHelperLabel, allocDoneLabel);
  4358. // $allocHelper:
  4359. insertionPointInstr->InsertBefore(allocHelperLabel);
  4360. }
  4361. // call JavascriptOperators::AllocMemForScObject(allocSize, scriptContext->GetRecycler())
  4362. this->m_lowererMD.LoadHelperArgument(insertionPointInstr, this->LoadScriptContextValueOpnd(insertionPointInstr, ScriptContextValue::ScriptContextRecycler));
  4363. this->m_lowererMD.LoadHelperArgument(insertionPointInstr, IR::IntConstOpnd::New((int32)allocSize, TyUint32, m_func, true));
  4364. IR::Instr *newObjCall = IR::Instr::New(Js::OpCode::Call, newObjDst, IR::HelperCallOpnd::New(allocHelper, m_func), m_func);
  4365. insertionPointInstr->InsertBefore(newObjCall);
  4366. this->m_lowererMD.LowerCall(newObjCall, 0);
  4367. if (allocDoneLabel != nullptr)
  4368. {
  4369. // $allocDone:
  4370. insertionPointInstr->InsertBefore(allocDoneLabel);
  4371. }
  4372. }
  4373. IR::Instr *
  4374. Lowerer::LowerGetNewScObject(IR::Instr *instr)
  4375. {
  4376. Assert(instr);
  4377. Assert(instr->m_opcode == Js::OpCode::GetNewScObject);
  4378. Assert(instr->GetDst());
  4379. Assert(instr->GetSrc1());
  4380. Assert(instr->GetSrc2());
  4381. const auto instrPrev = instr->m_prev;
  4382. Assert(instrPrev);
  4383. LowerGetNewScObjectCommon(
  4384. instr->GetDst()->AsRegOpnd(),
  4385. instr->GetSrc1()->AsRegOpnd(),
  4386. instr->GetSrc2()->AsRegOpnd(),
  4387. instr);
  4388. instr->Remove();
  4389. return instrPrev;
  4390. }
  4391. void
  4392. Lowerer::LowerGetNewScObjectCommon(
  4393. IR::RegOpnd *const resultObjOpnd,
  4394. IR::RegOpnd *const constructorReturnOpnd,
  4395. IR::RegOpnd *const newObjOpnd,
  4396. IR::Instr *insertBeforeInstr)
  4397. {
  4398. Assert(resultObjOpnd);
  4399. Assert(constructorReturnOpnd);
  4400. Assert(newObjOpnd);
  4401. Assert(insertBeforeInstr);
  4402. // (newObjOpnd == 'this' value passed to constructor)
  4403. //
  4404. // if (!IsJsObject(constructorReturnOpnd))
  4405. // goto notObjectLabel
  4406. // newObjOpnd = constructorReturnOpnd
  4407. // notObjectLabel:
  4408. // resultObjOpnd = newObjOpnd
  4409. if(!constructorReturnOpnd->IsEqual(newObjOpnd))
  4410. {
  4411. // Need to check whether the constructor returned an object
  4412. IR::LabelInstr *notObjectLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4413. Assert(insertBeforeInstr->m_prev);
  4414. IR::LabelInstr *const doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4415. insertBeforeInstr->InsertBefore(doneLabel);
  4416. insertBeforeInstr = doneLabel;
  4417. #if defined(_M_ARM32_OR_ARM64)
  4418. m_lowererMD.LoadHelperArgument(insertBeforeInstr, constructorReturnOpnd);
  4419. IR::Opnd * targetOpnd = IR::RegOpnd::New(StackSym::New(TyInt32,m_func), TyInt32, m_func);
  4420. IR::Instr * callIsObjectInstr = IR::Instr::New(Js::OpCode::Call, targetOpnd, m_func);
  4421. insertBeforeInstr->InsertBefore(callIsObjectInstr);
  4422. this->m_lowererMD.ChangeToHelperCall(callIsObjectInstr, IR::HelperOp_IsObject);
  4423. InsertTestBranch( targetOpnd, targetOpnd, Js::OpCode::BrEq_A, notObjectLabel,insertBeforeInstr);
  4424. #else
  4425. m_lowererMD.GenerateIsJsObjectTest(constructorReturnOpnd, insertBeforeInstr, notObjectLabel);
  4426. #endif
  4427. // Value returned by constructor is an object (use constructorReturnOpnd)
  4428. if(!resultObjOpnd->IsEqual(constructorReturnOpnd))
  4429. {
  4430. this->InsertMove(resultObjOpnd, constructorReturnOpnd, insertBeforeInstr);
  4431. }
  4432. insertBeforeInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, doneLabel, m_func));
  4433. // Value returned by constructor is not an object (use newObjOpnd)
  4434. insertBeforeInstr->InsertBefore(notObjectLabel);
  4435. }
  4436. if(!resultObjOpnd->IsEqual(newObjOpnd))
  4437. {
  4438. this->InsertMove(resultObjOpnd, newObjOpnd, insertBeforeInstr);
  4439. }
  4440. // fall through to insertBeforeInstr or doneLabel
  4441. }
  4442. ///----------------------------------------------------------------------------
  4443. ///
  4444. /// Lowerer::LowerUpdateNewScObjectCache
  4445. ///
  4446. ///----------------------------------------------------------------------------
  4447. IR::Instr *
  4448. Lowerer::LowerUpdateNewScObjectCache(IR::Instr * insertInstr, IR::Opnd *dst, IR::Opnd *src1, const bool isCtorFunction)
  4449. {
  4450. // if (!isCtorFunction)
  4451. // {
  4452. // MOV r1, [src1 + offset(type)] -- check base TypeIds_Function
  4453. // CMP [r1 + offset(typeId)], TypeIds_Function
  4454. // }
  4455. // JNE $fallThru
  4456. // MOV r2, [src1 + offset(constructorCache)]
  4457. // MOV r3, [r2 + offset(updateAfterCtor)]
  4458. // TEST r3, r3 -- check if updateAfterCtor is 0
  4459. // JEQ $fallThru
  4460. // CALL UpdateNewScObjectCache(src1, dst, scriptContext)
  4461. // $fallThru:
  4462. IR::LabelInstr *labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4463. src1 = GetRegOpnd(src1, insertInstr, m_func, TyMachReg);
  4464. // Check if constructor is a function if we don't already know it.
  4465. if (!isCtorFunction)
  4466. {
  4467. IR::RegOpnd* src1RegOpnd = src1->AsRegOpnd();
  4468. // MOV r1, [src1 + offset(type)] -- check base TypeIds_Function
  4469. IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  4470. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src1RegOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  4471. Lowerer::InsertMove(r1, indirOpnd, insertInstr);
  4472. // CMP [r1 + offset(typeId)], TypeIds_Function
  4473. // JNE $fallThru
  4474. indirOpnd = IR::IndirOpnd::New(r1, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
  4475. IR::IntConstOpnd *intOpnd = IR::IntConstOpnd::New(Js::TypeIds_Function, TyInt32, this->m_func, true);
  4476. IR::BranchInstr* branchInstr = InsertCompareBranch(indirOpnd, intOpnd, Js::OpCode::BrNeq_A, labelFallThru, insertInstr);
  4477. InsertObjectPoison(src1RegOpnd, branchInstr, insertInstr, false);
  4478. }
  4479. // Every function has a constructor cache, even if only the default blank one.
  4480. // r2 = MOV JavascriptFunction->constructorCache
  4481. IR::RegOpnd *r2 = IR::RegOpnd::New(TyVar, this->m_func);
  4482. IR::IndirOpnd *opndIndir = IR::IndirOpnd::New(src1->AsRegOpnd(), Js::JavascriptFunction::GetOffsetOfConstructorCache(), TyMachReg, this->m_func);
  4483. IR::Instr *instr = Lowerer::InsertMove(r2, opndIndir, insertInstr);
  4484. // r3 = constructorCache->updateAfterCtor
  4485. IR::RegOpnd *r3 = IR::RegOpnd::New(TyInt8, this->m_func);
  4486. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(r2, Js::ConstructorCache::GetOffsetOfUpdateAfterCtor(), TyUint8, this->m_func);
  4487. instr = Lowerer::InsertMove(r3, indirOpnd, insertInstr);
  4488. // TEST r3, r3 -- check if updateAfterCtor is 0
  4489. // JEQ $fallThru
  4490. InsertTestBranch(r3, r3, Js::OpCode::BrEq_A, labelFallThru, insertInstr);
  4491. // r2 = UpdateNewScObjectCache(src1, dst, scriptContext)
  4492. insertInstr->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true)); // helper label for uncommon path
  4493. IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperUpdateNewScObjectCache, m_func);
  4494. LoadScriptContext(insertInstr);
  4495. m_lowererMD.LoadHelperArgument(insertInstr, dst);
  4496. m_lowererMD.LoadHelperArgument(insertInstr, src1);
  4497. instr = IR::Instr::New(Js::OpCode::Call, m_func);
  4498. instr->SetSrc1(opndHelper);
  4499. insertInstr->InsertBefore(instr);
  4500. m_lowererMD.LowerCall(instr, 0);
  4501. // $fallThru:
  4502. insertInstr->InsertBefore(labelFallThru);
  4503. return insertInstr;
  4504. }
  4505. IR::Instr *
  4506. Lowerer::LowerNewScObjArray(IR::Instr *newObjInstr)
  4507. {
  4508. if (newObjInstr->HasEmptyArgOutChain())
  4509. {
  4510. newObjInstr->FreeSrc2();
  4511. return LowerNewScObjArrayNoArg(newObjInstr);
  4512. }
  4513. IR::Instr* startMarkerInstr = nullptr;
  4514. IR::Opnd *targetOpnd = newObjInstr->GetSrc1();
  4515. Func *func = newObjInstr->m_func;
  4516. if (!targetOpnd->IsAddrOpnd())
  4517. {
  4518. if (!newObjInstr->HasBailOutInfo())
  4519. {
  4520. return this->LowerNewScObject(newObjInstr, true, true);
  4521. }
  4522. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4523. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4524. // list, StartCall, etc.
  4525. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4526. // For whatever reason, we couldn't do a fixed function check on the call target.
  4527. // Generate a runtime check on the target.
  4528. Assert(newObjInstr->GetBailOutKind() == IR::BailOutOnNotNativeArray);
  4529. IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, func);
  4530. InsertCompareBranch(
  4531. targetOpnd,
  4532. LoadLibraryValueOpnd(newObjInstr, LibraryValue::ValueArrayConstructor),
  4533. Js::OpCode::BrEq_A,
  4534. true,
  4535. labelSkipBailOut,
  4536. newObjInstr);
  4537. IR::ProfiledInstr *instrNew = IR::ProfiledInstr::New(newObjInstr->m_opcode, newObjInstr->UnlinkDst(), newObjInstr->UnlinkSrc1(), newObjInstr->UnlinkSrc2(), func);
  4538. instrNew->u.profileId = newObjInstr->AsProfiledInstr()->u.profileId;
  4539. newObjInstr->InsertAfter(instrNew);
  4540. newObjInstr->m_opcode = Js::OpCode::BailOut;
  4541. GenerateBailOut(newObjInstr);
  4542. instrNew->InsertBefore(labelSkipBailOut);
  4543. newObjInstr = instrNew;
  4544. }
  4545. else
  4546. {
  4547. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4548. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4549. // list, StartCall, etc.
  4550. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4551. }
  4552. intptr_t weakFuncRef = 0;
  4553. Js::ArrayCallSiteInfo *arrayInfo = nullptr;
  4554. intptr_t arrayInfoAddr = 0;
  4555. Assert(newObjInstr->IsProfiledInstr());
  4556. IR::RegOpnd *resultObjOpnd = newObjInstr->GetDst()->AsRegOpnd();
  4557. IR::Instr * insertInstr = newObjInstr->m_next;
  4558. Js::ProfileId profileId = static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId);
  4559. // We may not have profileId if we converted a NewScObject to NewScObjArray
  4560. if (profileId != Js::Constants::NoProfileId)
  4561. {
  4562. arrayInfo = func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
  4563. arrayInfoAddr = func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
  4564. Assert(arrayInfo);
  4565. weakFuncRef = func->GetWeakFuncRef();
  4566. Assert(weakFuncRef);
  4567. }
  4568. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  4569. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  4570. IR::Opnd *linkOpnd = newObjInstr->GetSrc2();
  4571. Assert(linkOpnd->IsSymOpnd());
  4572. StackSym *linkSym = linkOpnd->AsSymOpnd()->m_sym->AsStackSym();
  4573. Assert(linkSym->IsSingleDef());
  4574. IR::Instr* argInstr = linkSym->GetInstrDef();
  4575. IR::Opnd *opndOfArrayCtor = argInstr->GetSrc1();
  4576. const uint16 upperBoundValue = 8;
  4577. // Generate fast path only if it meets all the conditions:
  4578. // 1. It is the only parameter and it is a likely int
  4579. // 2a. If 1st parameter is a variable, emit fast path with checks
  4580. // 2b. If 1st parameter is a constant, it is in range 0 and upperBoundValue (inclusive)
  4581. if (opndOfArrayCtor->GetValueType().IsLikelyInt() && (opndOfArrayCtor->IsAddrOpnd() || opndOfArrayCtor->IsRegOpnd())) // #1
  4582. {
  4583. if ((linkSym->GetArgSlotNum() == 2)) // 1. It is the only parameter
  4584. {
  4585. AssertMsg(linkSym->IsArgSlotSym(), "Not an argSlot symbol...");
  4586. linkOpnd = argInstr->GetSrc2();
  4587. bool emittedFastPath = false;
  4588. // 2a. If 1st parameter is a variable, emit fast path with checks
  4589. if (opndOfArrayCtor->IsRegOpnd())
  4590. {
  4591. if (!opndOfArrayCtor->AsRegOpnd()->IsNotInt())
  4592. {
  4593. // 3. GenerateFastPath
  4594. if (arrayInfo && arrayInfo->IsNativeIntArray())
  4595. {
  4596. emittedFastPath = GenerateProfiledNewScObjArrayFastPath<Js::JavascriptNativeIntArray>(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, helperLabel, labelDone, opndOfArrayCtor,
  4597. Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex(),
  4598. Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef());
  4599. }
  4600. else if (arrayInfo && arrayInfo->IsNativeFloatArray())
  4601. {
  4602. emittedFastPath = GenerateProfiledNewScObjArrayFastPath<Js::JavascriptNativeFloatArray>(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, helperLabel, labelDone, opndOfArrayCtor,
  4603. Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex(),
  4604. Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef());
  4605. }
  4606. else
  4607. {
  4608. emittedFastPath = GenerateProfiledNewScObjArrayFastPath<Js::JavascriptArray>(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, helperLabel, labelDone, opndOfArrayCtor, 0, 0);
  4609. }
  4610. }
  4611. }
  4612. // 2b. If 1st parameter is a constant, it is in range 0 and upperBoundValue (inclusive)
  4613. else
  4614. {
  4615. int32 length = linkSym->GetIntConstValue();
  4616. if (length >= 0 && length <= upperBoundValue)
  4617. {
  4618. emittedFastPath = GenerateProfiledNewScObjArrayFastPath(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, (uint32)length, labelDone, false);
  4619. }
  4620. }
  4621. // Since we emitted fast path above, move the startCall/argOut instruction right before helper
  4622. if (emittedFastPath)
  4623. {
  4624. linkSym = linkOpnd->AsRegOpnd()->m_sym->AsStackSym();
  4625. AssertMsg(!linkSym->IsArgSlotSym() && linkSym->m_isSingleDef, "Arg tree not single def...");
  4626. IR::Instr* startCallInstr = linkSym->m_instrDef;
  4627. AssertMsg(startCallInstr->GetArgOutCount(false) == 2, "Generating ArrayFastPath for more than 1 parameter not allowed.");
  4628. // Since we emitted fast path above, move the startCall/argOut instruction right before helper
  4629. startCallInstr->Move(newObjInstr);
  4630. argInstr->Move(newObjInstr);
  4631. }
  4632. }
  4633. }
  4634. newObjInstr->UnlinkSrc1();
  4635. IR::Opnd *profileOpnd = IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, func);
  4636. this->m_lowererMD.LoadNewScObjFirstArg(newObjInstr, profileOpnd);
  4637. IR::JnHelperMethod helperMethod = IR::HelperScrArr_ProfiledNewInstance;
  4638. newObjInstr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
  4639. newObjInstr = GenerateDirectCall(newObjInstr, targetOpnd, Js::CallFlags_New);
  4640. IR::BranchInstr* branchInstr = InsertCompareBranch(
  4641. IR::IndirOpnd::New(resultObjOpnd, 0, TyMachPtr, func),
  4642. LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptArray),
  4643. Js::OpCode::BrEq_A,
  4644. true,
  4645. labelDone,
  4646. insertInstr);
  4647. InsertObjectPoison(resultObjOpnd, branchInstr, insertInstr, true);
  4648. // We know we have a native array, so store the weak ref and call site index.
  4649. InsertMove(
  4650. IR::IndirOpnd::New(resultObjOpnd, Js::JavascriptNativeArray::GetOffsetOfArrayCallSiteIndex(), TyUint16, func),
  4651. IR::Opnd::CreateProfileIdOpnd(profileId, func),
  4652. insertInstr);
  4653. InsertMove(
  4654. IR::IndirOpnd::New(resultObjOpnd, Js::JavascriptNativeArray::GetOffsetOfWeakFuncRef(), TyMachReg, func),
  4655. IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, func),
  4656. insertInstr);
  4657. insertInstr->InsertBefore(labelDone);
  4658. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  4659. }
  4660. IR::Instr *
  4661. Lowerer::LowerNewScObjArrayNoArg(IR::Instr *newObjInstr)
  4662. {
  4663. IR::Opnd *targetOpnd = newObjInstr->GetSrc1();
  4664. Func *func = newObjInstr->m_func;
  4665. IR::Instr* startMarkerInstr = nullptr;
  4666. if (!targetOpnd->IsAddrOpnd())
  4667. {
  4668. if (!newObjInstr->HasBailOutInfo())
  4669. {
  4670. return this->LowerNewScObject(newObjInstr, true, false);
  4671. }
  4672. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4673. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4674. // list, StartCall, etc.
  4675. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4676. // For whatever reason, we couldn't do a fixed function check on the call target.
  4677. // Generate a runtime check on the target.
  4678. Assert(newObjInstr->GetBailOutKind() == IR::BailOutOnNotNativeArray);
  4679. IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, func);
  4680. InsertCompareBranch(
  4681. targetOpnd,
  4682. LoadLibraryValueOpnd(newObjInstr, LibraryValue::ValueArrayConstructor),
  4683. Js::OpCode::BrEq_A,
  4684. true,
  4685. labelSkipBailOut,
  4686. newObjInstr);
  4687. IR::ProfiledInstr *instrNew = IR::ProfiledInstr::New(newObjInstr->m_opcode, newObjInstr->UnlinkDst(), newObjInstr->UnlinkSrc1(), func);
  4688. instrNew->u.profileId = newObjInstr->AsProfiledInstr()->u.profileId;
  4689. newObjInstr->InsertAfter(instrNew);
  4690. newObjInstr->m_opcode = Js::OpCode::BailOut;
  4691. GenerateBailOut(newObjInstr);
  4692. instrNew->InsertBefore(labelSkipBailOut);
  4693. newObjInstr = instrNew;
  4694. }
  4695. else
  4696. {
  4697. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4698. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4699. // list, StartCall, etc.
  4700. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4701. }
  4702. Assert(newObjInstr->IsProfiledInstr());
  4703. intptr_t weakFuncRef = 0;
  4704. intptr_t arrayInfoAddr = 0;
  4705. Js::ArrayCallSiteInfo *arrayInfo = nullptr;
  4706. Js::ProfileId profileId = static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId);
  4707. if (profileId != Js::Constants::NoProfileId)
  4708. {
  4709. arrayInfo = func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfo(profileId);
  4710. arrayInfoAddr = func->GetReadOnlyProfileInfo()->GetArrayCallSiteInfoAddr(profileId);
  4711. Assert(arrayInfo);
  4712. weakFuncRef = func->GetWeakFuncRef();
  4713. Assert(weakFuncRef);
  4714. }
  4715. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  4716. GenerateProfiledNewScObjArrayFastPath(newObjInstr, arrayInfo, arrayInfoAddr, weakFuncRef, 0, labelDone, true);
  4717. newObjInstr->InsertAfter(labelDone);
  4718. m_lowererMD.LoadHelperArgument(newObjInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, func));
  4719. m_lowererMD.LoadHelperArgument(newObjInstr, IR::AddrOpnd::New(arrayInfoAddr, IR::AddrOpndKindDynamicArrayCallSiteInfo, func));
  4720. LoadScriptContext(newObjInstr);
  4721. m_lowererMD.LoadHelperArgument(newObjInstr, targetOpnd);
  4722. newObjInstr->UnlinkSrc1();
  4723. newObjInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperScrArr_ProfiledNewInstanceNoArg, func));
  4724. m_lowererMD.LowerCall(newObjInstr, 0);
  4725. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  4726. }
  4727. ///----------------------------------------------------------------------------
  4728. ///
  4729. /// Lowerer::LowerPrologEpilog
  4730. ///
  4731. ///----------------------------------------------------------------------------
  4732. void
  4733. Lowerer::LowerPrologEpilog()
  4734. {
  4735. if (m_func->GetJITFunctionBody()->IsCoroutine())
  4736. {
  4737. LowerGeneratorResumeJumpTable();
  4738. }
  4739. IR::Instr * instr;
  4740. instr = m_func->m_headInstr;
  4741. AssertMsg(instr->IsEntryInstr(), "First instr isn't an EntryInstr...");
  4742. m_lowererMD.LowerEntryInstr(instr->AsEntryInstr());
  4743. instr = m_func->m_exitInstr;
  4744. AssertMsg(instr->IsExitInstr(), "Last instr isn't an ExitInstr...");
  4745. m_lowererMD.LowerExitInstr(instr->AsExitInstr());
  4746. }
  4747. void
  4748. Lowerer::LowerPrologEpilogAsmJs()
  4749. {
  4750. IR::Instr * instr;
  4751. instr = m_func->m_headInstr;
  4752. AssertMsg(instr->IsEntryInstr(), "First instr isn't an EntryInstr...");
  4753. m_lowererMD.LowerEntryInstr(instr->AsEntryInstr());
  4754. instr = m_func->m_exitInstr;
  4755. AssertMsg(instr->IsExitInstr(), "Last instr isn't an ExitInstr...");
  4756. m_lowererMD.LowerExitInstrAsmJs(instr->AsExitInstr());
  4757. }
  4758. void
  4759. Lowerer::LowerGeneratorResumeJumpTable()
  4760. {
  4761. Assert(m_func->GetJITFunctionBody()->IsCoroutine());
  4762. IR::Instr * jumpTableInstr = m_func->m_headInstr;
  4763. AssertMsg(jumpTableInstr->IsEntryInstr(), "First instr isn't an EntryInstr...");
  4764. // Hope to do away with this linked list scan by moving this lowering to a post-prolog-epilog/pre-encoder phase that is common to all architectures (currently such phase is only available on amd64/arm)
  4765. while (jumpTableInstr->m_opcode != Js::OpCode::GeneratorResumeJumpTable)
  4766. {
  4767. jumpTableInstr = jumpTableInstr->m_next;
  4768. }
  4769. IR::Opnd * srcOpnd = jumpTableInstr->UnlinkSrc1();
  4770. m_func->MapYieldOffsetResumeLabels([&](int i, const YieldOffsetResumeLabel& yorl)
  4771. {
  4772. uint32 offset = yorl.First();
  4773. IR::LabelInstr * label = yorl.Second();
  4774. if (label != nullptr && label->m_hasNonBranchRef)
  4775. {
  4776. // Also fix up the bailout at the label with the jump to epilog that was not emitted in GenerateBailOut()
  4777. Assert(label->m_prev->HasBailOutInfo());
  4778. GenerateJumpToEpilogForBailOut(label->m_prev->GetBailOutInfo(), label->m_prev);
  4779. }
  4780. else if (label == nullptr)
  4781. {
  4782. label = m_func->m_bailOutNoSaveLabel;
  4783. }
  4784. // For each offset label pair, insert a compare of the offset and branch if equal to the label
  4785. InsertCompareBranch(srcOpnd, IR::IntConstOpnd::New(offset, TyUint32, m_func), Js::OpCode::BrSrEq_A, label, jumpTableInstr);
  4786. });
  4787. jumpTableInstr->Remove();
  4788. }
  4789. void
  4790. Lowerer::DoInterruptProbes()
  4791. {
  4792. this->m_func->SetHasInstrNumber(true);
  4793. uint instrCount = 1;
  4794. FOREACH_INSTR_IN_FUNC(instr, this->m_func)
  4795. {
  4796. instr->SetNumber(instrCount++);
  4797. if (instr->IsLabelInstr())
  4798. {
  4799. IR::LabelInstr *labelInstr = instr->AsLabelInstr();
  4800. if (labelInstr->m_isLoopTop)
  4801. {
  4802. // For every loop top label, insert the following:
  4803. // cmp sp, ThreadContext::stackLimitForCurrentThread
  4804. // bgt $continue
  4805. // $helper:
  4806. // call JavascriptOperators::ScriptAbort
  4807. // b $exit
  4808. // $continue:
  4809. IR::LabelInstr *newLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4810. labelInstr->InsertAfter(newLabel);
  4811. this->InsertOneLoopProbe(newLabel, newLabel);
  4812. }
  4813. }
  4814. }
  4815. NEXT_INSTR_IN_FUNC;
  4816. }
  4817. // Insert an interrupt probe at each loop back branch. (Currently uncalled, since we're inserting
  4818. // probes at loop tops instead of back edges, but kept around because it may prove useful.)
  4819. uint
  4820. Lowerer::DoLoopProbeAndNumber(IR::BranchInstr *branchInstr)
  4821. {
  4822. IR::LabelInstr *labelInstr = branchInstr->GetTarget();
  4823. if (labelInstr == nullptr || labelInstr->GetNumber() == 0)
  4824. {
  4825. // Forward branch (possibly an indirect jump after try-catch-finally); nothing to do.
  4826. return branchInstr->GetNumber() + 1;
  4827. }
  4828. Assert(labelInstr->m_isLoopTop);
  4829. // Insert a stack probe at this branch. Number all the instructions we insert
  4830. // and return the next instruction number.
  4831. uint number = branchInstr->GetNumber();
  4832. IR::Instr *instrPrev = branchInstr->m_prev;
  4833. IR::Instr *instrNext = branchInstr->m_next;
  4834. if (branchInstr->IsUnconditional())
  4835. {
  4836. // B $loop ==>
  4837. // cmp [], 0
  4838. // beq $loop
  4839. // $helper:
  4840. // call abort
  4841. // b $exit
  4842. this->InsertOneLoopProbe(branchInstr, labelInstr);
  4843. branchInstr->Remove();
  4844. }
  4845. else
  4846. {
  4847. // Bcc $loop ==>
  4848. // Binv $notloop
  4849. // cmp [], 0
  4850. // beq $loop
  4851. // $helper:
  4852. // call abort
  4853. // b $exit
  4854. // $notloop:
  4855. IR::LabelInstr *loopExitLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4856. branchInstr->SetTarget(loopExitLabel);
  4857. LowererMD::InvertBranch(branchInstr);
  4858. branchInstr->InsertAfter(loopExitLabel);
  4859. this->InsertOneLoopProbe(loopExitLabel, labelInstr);
  4860. }
  4861. FOREACH_INSTR_IN_RANGE(instr, instrPrev->m_next, instrNext->m_prev)
  4862. {
  4863. instr->SetNumber(number++);
  4864. }
  4865. NEXT_INSTR_IN_RANGE;
  4866. return number;
  4867. }
  4868. void
  4869. Lowerer::InsertOneLoopProbe(IR::Instr *insertInstr, IR::LabelInstr *loopLabel)
  4870. {
  4871. // Insert one interrupt probe at the given instruction. Probe the stack and call the abort helper
  4872. // directly if the probe fails.
  4873. IR::Opnd *memRefOpnd = IR::MemRefOpnd::New(
  4874. m_func->GetThreadContextInfo()->GetThreadStackLimitAddr(),
  4875. TyMachReg, this->m_func);
  4876. IR::RegOpnd *regStackPointer = IR::RegOpnd::New(
  4877. NULL, this->m_lowererMD.GetRegStackPointer(), TyMachReg, this->m_func);
  4878. InsertCompareBranch(regStackPointer, memRefOpnd, Js::OpCode::BrGt_A, loopLabel, insertInstr);
  4879. IR::LabelInstr *helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4880. insertInstr->InsertBefore(helperLabel);
  4881. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperScriptAbort, this->m_func);
  4882. IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  4883. instr->SetSrc1(helperOpnd);
  4884. insertInstr->InsertBefore(instr);
  4885. this->m_lowererMD.LowerCall(instr, 0);
  4886. // Jump to the exit after the helper call. This instruction will never be reached, but the jump
  4887. // indicates that nothing is live after the call (to avoid useless spills in code that will
  4888. // be executed).
  4889. instr = this->m_func->m_exitInstr->GetPrevRealInstrOrLabel();
  4890. if (instr->IsLabelInstr())
  4891. {
  4892. helperLabel = instr->AsLabelInstr();
  4893. }
  4894. else
  4895. {
  4896. helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4897. this->m_func->m_exitInstr->InsertBefore(helperLabel);
  4898. }
  4899. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, helperLabel, this->m_func);
  4900. insertInstr->InsertBefore(instr);
  4901. }
  4902. ///----------------------------------------------------------------------------
  4903. ///
  4904. /// Lowerer::LoadPropertySymAsArgument
  4905. ///
  4906. /// Generate code to pass a fieldSym as argument to a helper.
  4907. ///----------------------------------------------------------------------------
  4908. IR::Instr *
  4909. Lowerer::LoadPropertySymAsArgument(IR::Instr *instr, IR::Opnd *fieldSrc)
  4910. {
  4911. IR::Instr * instrPrev;
  4912. AssertMsg(fieldSrc->IsSymOpnd() && fieldSrc->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as src of LdFld");
  4913. IR::SymOpnd *symOpnd = fieldSrc->AsSymOpnd();
  4914. PropertySym * fieldSym = symOpnd->m_sym->AsPropertySym();
  4915. IR::IntConstOpnd * indexOpnd = IR::IntConstOpnd::New(fieldSym->m_propertyId, TyInt32, m_func, /*dontEncode*/true);
  4916. instrPrev = m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  4917. IR::RegOpnd * instanceOpnd = symOpnd->CreatePropertyOwnerOpnd(m_func);
  4918. m_lowererMD.LoadHelperArgument(instr, instanceOpnd);
  4919. return instrPrev;
  4920. }
  4921. ///----------------------------------------------------------------------------
  4922. ///
  4923. /// Lowerer::LoadFunctionBodyAsArgument
  4924. ///
  4925. /// Special case: the "property ID" is a key into the ScriptContext's FunctionBody map
  4926. ///----------------------------------------------------------------------------
  4927. IR::Instr *
  4928. Lowerer::LoadFunctionBodyAsArgument(IR::Instr *instr, IR::IntConstOpnd * functionBodySlotOpnd, IR::RegOpnd * envOpnd)
  4929. {
  4930. IR::Instr * instrPrev;
  4931. // We need to pass in the function reference, we can't embed the pointer to the function proxy here.
  4932. // The function proxy may be deferred parsed/serialize, and may 'progress' to a real function body after it is undeferred
  4933. // At which point the deferred function proxy may be collect.
  4934. // Just pass it the address where we will find the function proxy/body
  4935. Js::FunctionInfoPtrPtr infoRef = instr->m_func->GetJITFunctionBody()->GetNestedFuncRef((uint)functionBodySlotOpnd->GetValue());
  4936. AssertMsg(infoRef, "Expected FunctionProxy for index of NewScFunc or NewScGenFunc opnd");
  4937. IR::AddrOpnd * indexOpnd = IR::AddrOpnd::New((Js::Var)infoRef, IR::AddrOpndKindDynamicMisc, m_func);
  4938. instrPrev = m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  4939. m_lowererMD.LoadHelperArgument(instr, envOpnd);
  4940. return instrPrev;
  4941. }
  4942. IR::Instr *
  4943. Lowerer::LowerProfiledLdFld(IR::JitProfilingInstr *ldFldInstr)
  4944. {
  4945. const auto instrPrev = ldFldInstr->m_prev;
  4946. auto src = ldFldInstr->UnlinkSrc1();
  4947. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  4948. IR::JnHelperMethod helper = IR::HelperInvalid;
  4949. switch (ldFldInstr->m_opcode)
  4950. {
  4951. case Js::OpCode::LdFld:
  4952. helper = IR::HelperProfiledLdFld;
  4953. goto ldFldCommon;
  4954. case Js::OpCode::LdRootFld:
  4955. helper = IR::HelperProfiledLdRootFld;
  4956. goto ldFldCommon;
  4957. case Js::OpCode::LdMethodFld:
  4958. helper = IR::HelperProfiledLdMethodFld;
  4959. goto ldFldCommon;
  4960. case Js::OpCode::LdRootMethodFld:
  4961. helper = IR::HelperProfiledLdRootMethodFld;
  4962. goto ldFldCommon;
  4963. case Js::OpCode::LdFldForCallApplyTarget:
  4964. helper = IR::HelperProfiledLdFld_CallApplyTarget;
  4965. goto ldFldCommon;
  4966. case Js::OpCode::LdFldForTypeOf:
  4967. helper = IR::HelperProfiledLdFldForTypeOf;
  4968. goto ldFldCommon;
  4969. case Js::OpCode::LdRootFldForTypeOf:
  4970. helper = IR::HelperProfiledLdRootFldForTypeOf;
  4971. goto ldFldCommon;
  4972. ldFldCommon:
  4973. {
  4974. Assert(ldFldInstr->profileId == Js::Constants::NoProfileId);
  4975. /*
  4976. Var ProfilingHelpers::ProfiledLdFld_Jit(
  4977. const Var instance,
  4978. const PropertyId propertyId,
  4979. const InlineCacheIndex inlineCacheIndex,
  4980. void *const framePointer)
  4981. */
  4982. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  4983. m_lowererMD.LoadHelperArgument(
  4984. ldFldInstr,
  4985. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  4986. LoadPropertySymAsArgument(ldFldInstr, src);
  4987. break;
  4988. }
  4989. case Js::OpCode::LdSuperFld:
  4990. {
  4991. Assert(ldFldInstr->profileId == Js::Constants::NoProfileId);
  4992. IR::Opnd * src2 = nullptr;
  4993. /*
  4994. Var ProfilingHelpers::ProfiledLdSuperFld_Jit(
  4995. const Var instance,
  4996. const PropertyId propertyId,
  4997. const InlineCacheIndex inlineCacheIndex,
  4998. void *const framePointer,
  4999. const Var thisInstance)
  5000. */
  5001. src2 = ldFldInstr->UnlinkSrc2();
  5002. m_lowererMD.LoadHelperArgument(ldFldInstr, src2 );
  5003. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  5004. m_lowererMD.LoadHelperArgument(
  5005. ldFldInstr,
  5006. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  5007. LoadPropertySymAsArgument(ldFldInstr, src);
  5008. helper = IR::HelperProfiledLdSuperFld;
  5009. break;
  5010. }
  5011. case Js::OpCode::LdLen_A:
  5012. Assert(ldFldInstr->profileId != Js::Constants::NoProfileId);
  5013. /*
  5014. Var ProfilingHelpers::ProfiledLdLen_Jit(
  5015. const Var instance,
  5016. const PropertyId propertyId,
  5017. const InlineCacheIndex inlineCacheIndex,
  5018. const ProfileId profileId,
  5019. void *const framePointer)
  5020. */
  5021. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  5022. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateProfileIdOpnd(ldFldInstr->profileId, m_func));
  5023. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  5024. LoadPropertySymAsArgument(ldFldInstr, src);
  5025. helper = IR::HelperProfiledLdLen;
  5026. break;
  5027. default:
  5028. Assert(false);
  5029. }
  5030. ldFldInstr->SetSrc1(IR::HelperCallOpnd::New(helper, m_func));
  5031. m_lowererMD.LowerCall(ldFldInstr, 0);
  5032. return instrPrev;
  5033. }
  5034. void
  5035. Lowerer::GenerateProtoLdFldFromFlagInlineCache(
  5036. IR::Instr * insertBeforeInstr,
  5037. IR::Opnd * opndDst,
  5038. IR::RegOpnd * opndInlineCache,
  5039. IR::LabelInstr * labelFallThru,
  5040. bool isInlineSlot)
  5041. {
  5042. // Generate:
  5043. //
  5044. // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
  5045. // s1 = MOV [&s1->slots] -- load the slot array
  5046. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  5047. // dst = MOV [s1 + s2*4]
  5048. // JMP $fallthru
  5049. IR::Opnd* inlineCacheObjOpnd;
  5050. IR::IndirOpnd * opndIndir;
  5051. IR::RegOpnd * opndObjSlots = nullptr;
  5052. inlineCacheObjOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.object), TyMachReg, this->m_func);
  5053. // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
  5054. IR::RegOpnd *opndObject = IR::RegOpnd::New(TyMachReg, this->m_func);
  5055. InsertMove(opndObject, inlineCacheObjOpnd, insertBeforeInstr, false);
  5056. if (!isInlineSlot)
  5057. {
  5058. // s1 = MOV [&s1->slots] -- load the slot array
  5059. opndObjSlots = IR::RegOpnd::New(TyMachReg, this->m_func);
  5060. opndIndir = IR::IndirOpnd::New(opndObject, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  5061. InsertMove(opndObjSlots, opndIndir, insertBeforeInstr, false);
  5062. }
  5063. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  5064. IR::RegOpnd *opndSlotIndex = IR::RegOpnd::New(TyMachReg, this->m_func);
  5065. IR::Opnd* slotIndexOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.slotIndex), TyUint16, this->m_func);
  5066. InsertMove(opndSlotIndex, slotIndexOpnd, insertBeforeInstr, false);
  5067. if (isInlineSlot)
  5068. {
  5069. // dst = MOV [s1 + s2*4]
  5070. opndIndir = IR::IndirOpnd::New(opndObject, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
  5071. }
  5072. else
  5073. {
  5074. // dst = MOV [s1 + s2*4]
  5075. opndIndir = IR::IndirOpnd::New(opndObjSlots, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
  5076. }
  5077. InsertMove(opndDst, opndIndir, insertBeforeInstr, false);
  5078. // JMP $fallthru
  5079. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  5080. }
  5081. void
  5082. Lowerer::GenerateLocalLdFldFromFlagInlineCache(
  5083. IR::Instr * insertBeforeInstr,
  5084. IR::RegOpnd * opndBase,
  5085. IR::Opnd * opndDst,
  5086. IR::RegOpnd * opndInlineCache,
  5087. IR::LabelInstr * labelFallThru,
  5088. bool isInlineSlot)
  5089. {
  5090. // Generate:
  5091. //
  5092. // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
  5093. // s1 = MOV [&s1->slots] -- load the slot array
  5094. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  5095. // dst = MOV [s1 + s2*4]
  5096. // JMP $fallthru
  5097. IR::IndirOpnd * opndIndir;
  5098. IR::RegOpnd * opndObjSlots = nullptr;
  5099. if (!isInlineSlot)
  5100. {
  5101. // s1 = MOV [&s1->slots] -- load the slot array
  5102. opndObjSlots = IR::RegOpnd::New(TyMachReg, this->m_func);
  5103. opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  5104. InsertMove(opndObjSlots, opndIndir, insertBeforeInstr, false);
  5105. }
  5106. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  5107. IR::RegOpnd *opndSlotIndex = IR::RegOpnd::New(TyMachReg, this->m_func);
  5108. IR::Opnd* slotIndexOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.slotIndex), TyUint16, this->m_func);
  5109. InsertMove(opndSlotIndex, slotIndexOpnd, insertBeforeInstr, false);
  5110. if (isInlineSlot)
  5111. {
  5112. // dst = MOV [s1 + s2*4]
  5113. opndIndir = IR::IndirOpnd::New(opndBase, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
  5114. }
  5115. else
  5116. {
  5117. // dst = MOV [s1 + s2*4]
  5118. opndIndir = IR::IndirOpnd::New(opndObjSlots, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
  5119. }
  5120. InsertMove(opndDst, opndIndir, insertBeforeInstr, false);
  5121. // JMP $fallthru
  5122. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  5123. }
  5124. void
  5125. Lowerer::GenerateFlagProtoCheck(
  5126. IR::Instr * insertBeforeInstr,
  5127. IR::RegOpnd * opndInlineCache,
  5128. IR::LabelInstr * labelNotOnProto)
  5129. {
  5130. // Generate:
  5131. //
  5132. // TEST [&(inlineCache->u.accessor.isOnProto)], Js::FlagIsOnProto
  5133. // JEQ $next
  5134. IR::Opnd* flagsOpnd;
  5135. flagsOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.rawUInt16), TyInt8, insertBeforeInstr->m_func);
  5136. uint isOnProtoFlagMask = Js::InlineCache::GetIsOnProtoFlagMask();
  5137. InsertTestBranch(flagsOpnd, IR::IntConstOpnd::New(isOnProtoFlagMask, TyInt8, this->m_func), Js::OpCode::BrEq_A, labelNotOnProto, insertBeforeInstr);
  5138. }
  5139. ///----------------------------------------------------------------------------
  5140. ///
  5141. /// Lowerer::GenerateFastLdMethodFromFlags
  5142. ///
  5143. /// Make use of the helper to cache the type and slot index used to do a LdFld
  5144. /// and do an inline load from the appropriate slot if the type hasn't changed
  5145. /// since the last time this LdFld was executed.
  5146. ///
  5147. ///----------------------------------------------------------------------------
  5148. bool
  5149. Lowerer::GenerateFastLdMethodFromFlags(IR::Instr * instrLdFld)
  5150. {
  5151. IR::LabelInstr * labelFallThru;
  5152. IR::LabelInstr * bailOutLabel;
  5153. IR::Opnd * opndSrc;
  5154. IR::Opnd * opndDst;
  5155. IR::RegOpnd * opndBase;
  5156. IR::RegOpnd * opndType;
  5157. IR::RegOpnd * opndInlineCache;
  5158. opndSrc = instrLdFld->GetSrc1();
  5159. AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->IsPropertySymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(),
  5160. "Expected property sym operand as src of LdFldFlags");
  5161. IR::PropertySymOpnd * propertySymOpnd = opndSrc->AsPropertySymOpnd();
  5162. Assert(!instrLdFld->DoStackArgsOpt());
  5163. if (propertySymOpnd->IsTypeCheckSeqCandidate())
  5164. {
  5165. AssertMsg(propertySymOpnd->HasObjectTypeSym(), "Type optimized property sym operand without a type sym?");
  5166. StackSym *typeSym = propertySymOpnd->GetObjectTypeSym();
  5167. opndType = IR::RegOpnd::New(typeSym, TyMachReg, this->m_func);
  5168. }
  5169. else
  5170. {
  5171. opndType = IR::RegOpnd::New(TyMachReg, this->m_func);
  5172. }
  5173. opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  5174. opndDst = instrLdFld->GetDst();
  5175. opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  5176. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5177. // Label to jump to (or fall through to) when bailing out
  5178. bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instrLdFld->m_func, true /* isOpHelper */);
  5179. InsertMove(opndInlineCache, LoadRuntimeInlineCacheOpnd(instrLdFld, propertySymOpnd), instrLdFld);
  5180. IR::LabelInstr * labelFlagAux = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5181. // Check the flag cache with the untagged type
  5182. GenerateObjectTestAndTypeLoad(instrLdFld, opndBase, opndType, bailOutLabel);
  5183. GenerateFlagInlineCacheCheck(instrLdFld, opndType, opndInlineCache, labelFlagAux);
  5184. IR::LabelInstr * labelFlagInlineLocal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5185. GenerateFlagProtoCheck(instrLdFld, opndInlineCache, labelFlagInlineLocal);
  5186. GenerateProtoLdFldFromFlagInlineCache(instrLdFld, opndDst, opndInlineCache, labelFallThru, true);
  5187. instrLdFld->InsertBefore(labelFlagInlineLocal);
  5188. GenerateLocalLdFldFromFlagInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  5189. // Check the flag cache with the tagged type
  5190. instrLdFld->InsertBefore(labelFlagAux);
  5191. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, this->m_func);
  5192. m_lowererMD.GenerateLoadTaggedType(instrLdFld, opndType, opndTaggedType);
  5193. GenerateFlagInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, bailOutLabel);
  5194. IR::LabelInstr * labelFlagAuxLocal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5195. GenerateFlagProtoCheck(instrLdFld, opndInlineCache, labelFlagAuxLocal);
  5196. GenerateProtoLdFldFromFlagInlineCache(instrLdFld, opndDst, opndInlineCache, labelFallThru, false);
  5197. instrLdFld->InsertBefore(labelFlagAuxLocal);
  5198. GenerateLocalLdFldFromFlagInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  5199. instrLdFld->InsertBefore(bailOutLabel);
  5200. instrLdFld->InsertAfter(labelFallThru);
  5201. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  5202. // ordering instructions anymore.
  5203. instrLdFld->UnlinkSrc1();
  5204. GenerateBailOut(instrLdFld);
  5205. return true;
  5206. }
  5207. ///----------------------------------------------------------------------------
  5208. ///
  5209. /// Lowerer::LowerLdFld
  5210. ///
  5211. /// Lower an instruction (LdFld, ScopedLdFld) that takes a property
  5212. /// reference as a source and puts a result in a register.
  5213. ///
  5214. ///----------------------------------------------------------------------------
  5215. IR::Instr *
  5216. Lowerer::LowerLdFld(
  5217. IR::Instr * ldFldInstr,
  5218. IR::JnHelperMethod helperMethod,
  5219. IR::JnHelperMethod polymorphicHelperMethod,
  5220. bool useInlineCache,
  5221. IR::LabelInstr *labelBailOut,
  5222. bool isHelper)
  5223. {
  5224. if (ldFldInstr->IsJitProfilingInstr())
  5225. {
  5226. // If we want to profile then do something completely different
  5227. return this->LowerProfiledLdFld(ldFldInstr->AsJitProfilingInstr());
  5228. }
  5229. IR::Opnd *src;
  5230. IR::Instr *instrPrev = ldFldInstr->m_prev;
  5231. src = ldFldInstr->UnlinkSrc1();
  5232. if (ldFldInstr->m_opcode == Js::OpCode::LdSuperFld)
  5233. {
  5234. IR::Opnd * src2 = nullptr;
  5235. src2 = ldFldInstr->UnlinkSrc2();
  5236. m_lowererMD.LoadHelperArgument(ldFldInstr, src2);
  5237. }
  5238. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  5239. if (useInlineCache)
  5240. {
  5241. IR::Opnd * inlineCacheOpnd;
  5242. AssertMsg(src->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  5243. if (src->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache && polymorphicHelperMethod != helperMethod)
  5244. {
  5245. JITTimePolymorphicInlineCache * polymorphicInlineCache = src->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache;
  5246. helperMethod = polymorphicHelperMethod;
  5247. inlineCacheOpnd = IR::AddrOpnd::New(polymorphicInlineCache->GetAddr(), IR::AddrOpndKindDynamicInlineCache, this->m_func);
  5248. }
  5249. else
  5250. {
  5251. // Need to load runtime inline cache opnd first before loading any helper argument
  5252. // because LoadRuntimeInlineCacheOpnd may create labels marked as helper,
  5253. // and cause op helper register push/pop save in x86, messing up with any helper arguments that is already pushed
  5254. inlineCacheOpnd = this->LoadRuntimeInlineCacheOpnd(ldFldInstr, src->AsPropertySymOpnd(), isHelper);
  5255. }
  5256. this->LoadPropertySymAsArgument(ldFldInstr, src);
  5257. this-> m_lowererMD.LoadHelperArgument(
  5258. ldFldInstr,
  5259. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  5260. this->m_lowererMD.LoadHelperArgument(ldFldInstr, inlineCacheOpnd);
  5261. this->m_lowererMD.LoadHelperArgument(ldFldInstr, LoadFunctionBodyOpnd(ldFldInstr));
  5262. }
  5263. else
  5264. {
  5265. LoadScriptContext(ldFldInstr);
  5266. this->LoadPropertySymAsArgument(ldFldInstr, src);
  5267. }
  5268. // Do we need to reload the type and slot array after the helper returns?
  5269. // (We do if there's a propertySymOpnd downstream that needs it, i.e., the type is not dead.)
  5270. IR::RegOpnd *opndBase = src->AsSymOpnd()->CreatePropertyOwnerOpnd(m_func);
  5271. m_lowererMD.ChangeToHelperCall(ldFldInstr, helperMethod, labelBailOut, opndBase, src->AsSymOpnd()->IsPropertySymOpnd() ? src->AsSymOpnd()->AsPropertySymOpnd() : nullptr, isHelper);
  5272. return instrPrev;
  5273. }
  5274. bool
  5275. Lowerer::GenerateLdFldWithCachedType(IR::Instr * instrLdFld, bool* continueAsHelperOut, IR::LabelInstr** labelHelperOut, IR::RegOpnd** typeOpndOut)
  5276. {
  5277. IR::Instr *instr;
  5278. IR::Opnd *opnd;
  5279. IR::LabelInstr *labelObjCheckFailed = nullptr;
  5280. IR::LabelInstr *labelTypeCheckFailed = nullptr;
  5281. IR::LabelInstr *labelDone = nullptr;
  5282. Assert(continueAsHelperOut != nullptr);
  5283. *continueAsHelperOut = false;
  5284. Assert(labelHelperOut != nullptr);
  5285. *labelHelperOut = nullptr;
  5286. Assert(typeOpndOut != nullptr);
  5287. *typeOpndOut = nullptr;
  5288. Assert(instrLdFld->GetSrc1()->IsSymOpnd());
  5289. if (!instrLdFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
  5290. {
  5291. return false;
  5292. }
  5293. IR::PropertySymOpnd *propertySymOpnd = instrLdFld->GetSrc1()->AsPropertySymOpnd();
  5294. if (!propertySymOpnd->IsTypeCheckSeqCandidate())
  5295. {
  5296. return false;
  5297. }
  5298. AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
  5299. if (!propertySymOpnd->IsTypeCheckSeqParticipant() && !propertySymOpnd->NeedsLocalTypeCheck())
  5300. {
  5301. return false;
  5302. }
  5303. Assert(!propertySymOpnd->NeedsTypeCheckAndBailOut() || (instrLdFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrLdFld->GetBailOutKind())));
  5304. // In the backwards pass we only add guarded property operations to instructions that are not already
  5305. // protected by an upstream type check.
  5306. Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
  5307. PHASE_PRINT_TESTTRACE(
  5308. Js::ObjTypeSpecPhase,
  5309. this->m_func,
  5310. _u("Field load: %s, property ID: %d, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n"),
  5311. Js::OpCodeUtil::GetOpCodeName(instrLdFld->m_opcode),
  5312. propertySymOpnd->m_sym->AsPropertySym()->m_propertyId,
  5313. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  5314. propertySymOpnd->m_inlineCacheIndex,
  5315. propertySymOpnd->GetCacheLayoutString(),
  5316. propertySymOpnd->IsTypeChecked() ? _u("true") : _u("false"));
  5317. if (propertySymOpnd->HasFinalType() && !propertySymOpnd->IsLoadedFromProto())
  5318. {
  5319. propertySymOpnd->UpdateSlotForFinalType();
  5320. }
  5321. // TODO (ObjTypeSpec): If ((PropertySym*)propertySymOpnd->m_sym)->m_stackSym->m_isIntConst consider emitting a direct
  5322. // jump to helper or bailout. If we have a type check bailout, we could even abort compilation.
  5323. bool hasTypeCheckBailout = instrLdFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrLdFld->GetBailOutKind());
  5324. // If the hard-coded type is not available here, do a type check, and branch to the helper if the check fails.
  5325. // In the prototype case, we have to check the type even if it was checked upstream, to cover the case where
  5326. // the property has been added locally. Note that this is not necessary if the proto chain has been checked,
  5327. // because then we know there's been no store of the property since the type was checked.
  5328. bool emitPrimaryTypeCheck = propertySymOpnd->NeedsPrimaryTypeCheck();
  5329. bool emitLocalTypeCheck = propertySymOpnd->NeedsLocalTypeCheck();
  5330. bool emitLoadFromProtoTypeCheck = propertySymOpnd->NeedsLoadFromProtoTypeCheck();
  5331. bool emitTypeCheck = emitPrimaryTypeCheck || emitLocalTypeCheck || emitLoadFromProtoTypeCheck;
  5332. if (emitTypeCheck)
  5333. {
  5334. if (emitLoadFromProtoTypeCheck)
  5335. {
  5336. propertySymOpnd->EnsureGuardedPropOps(this->m_func->m_alloc);
  5337. propertySymOpnd->SetGuardedPropOp(propertySymOpnd->GetObjTypeSpecFldId());
  5338. }
  5339. labelTypeCheckFailed = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5340. labelObjCheckFailed = hasTypeCheckBailout ? labelTypeCheckFailed : IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5341. *typeOpndOut = this->GenerateCachedTypeCheck(instrLdFld, propertySymOpnd, labelObjCheckFailed, labelTypeCheckFailed);
  5342. }
  5343. IR::Opnd *opndSlotArray;
  5344. if (propertySymOpnd->IsLoadedFromProto())
  5345. {
  5346. opndSlotArray = this->LoadSlotArrayWithCachedProtoType(instrLdFld, propertySymOpnd);
  5347. }
  5348. else
  5349. {
  5350. opndSlotArray = this->LoadSlotArrayWithCachedLocalType(instrLdFld, propertySymOpnd);
  5351. }
  5352. // Load the value from the slot, getting the slot ID from the cache.
  5353. uint16 index = propertySymOpnd->GetSlotIndex();
  5354. Assert(index != -1);
  5355. if (opndSlotArray->IsRegOpnd())
  5356. {
  5357. opnd = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, this->m_func);
  5358. }
  5359. else
  5360. {
  5361. Assert(opndSlotArray->IsMemRefOpnd());
  5362. opnd = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, this->m_func, IR::AddrOpndKindDynamicPropertySlotRef);
  5363. }
  5364. Lowerer::InsertMove(instrLdFld->GetDst(), opnd, instrLdFld);
  5365. // We eliminate the helper, or the type check succeeds, or we bail out before the operation.
  5366. // Either delete the original instruction or replace it with a bailout.
  5367. if (!emitPrimaryTypeCheck && !emitLocalTypeCheck && !emitLoadFromProtoTypeCheck)
  5368. {
  5369. Assert(labelTypeCheckFailed == nullptr);
  5370. AssertMsg(!instrLdFld->HasBailOutInfo(), "Why does a direct field load have bailout?");
  5371. instrLdFld->Remove();
  5372. return true;
  5373. }
  5374. // Otherwise, branch around the bailout or helper.
  5375. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5376. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  5377. instrLdFld->InsertBefore(instr);
  5378. // Insert the bailout or helper label here.
  5379. instrLdFld->InsertBefore(labelTypeCheckFailed);
  5380. instrLdFld->InsertAfter(labelDone);
  5381. if (hasTypeCheckBailout)
  5382. {
  5383. AssertMsg(PHASE_ON1(Js::ObjTypeSpecIsolatedFldOpsWithBailOutPhase) || !propertySymOpnd->IsTypeDead(),
  5384. "Why does a field load have a type check bailout, if its type is dead?");
  5385. // Convert the original instruction to a bailout.
  5386. if (instrLdFld->GetBailOutInfo()->bailOutInstr != instrLdFld)
  5387. {
  5388. // Set the cache index in the bailout info so that the bailout code will write it into the
  5389. // bailout record at runtime.
  5390. instrLdFld->GetBailOutInfo()->polymorphicCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  5391. }
  5392. instrLdFld->FreeDst();
  5393. instrLdFld->FreeSrc1();
  5394. instrLdFld->m_opcode = Js::OpCode::BailOut;
  5395. this->GenerateBailOut(instrLdFld);
  5396. return true;
  5397. }
  5398. else
  5399. {
  5400. *continueAsHelperOut = true;
  5401. Assert(labelObjCheckFailed != nullptr && labelObjCheckFailed != labelTypeCheckFailed);
  5402. *labelHelperOut = labelObjCheckFailed;
  5403. return false;
  5404. }
  5405. }
  5406. template<bool isRoot>
  5407. IR::Instr* Lowerer::GenerateCompleteLdFld(IR::Instr* instr, bool emitFastPath, IR::JnHelperMethod monoHelperAfterFastPath, IR::JnHelperMethod polyHelperAfterFastPath,
  5408. IR::JnHelperMethod monoHelperWithoutFastPath, IR::JnHelperMethod polyHelperWithoutFastPath)
  5409. {
  5410. if(instr->CallsAccessor() && instr->HasBailOutInfo())
  5411. {
  5412. IR::BailOutKind kindMinusBits = instr->GetBailOutKind() & ~IR::BailOutKindBits;
  5413. Assert(kindMinusBits != IR::BailOutOnImplicitCalls && kindMinusBits != IR::BailOutOnImplicitCallsPreOp);
  5414. }
  5415. IR::Instr* prevInstr = instr->m_prev;
  5416. IR::LabelInstr* labelHelper = nullptr;
  5417. IR::LabelInstr* labelBailOut = nullptr;
  5418. bool isHelper = false;
  5419. IR::RegOpnd* typeOpnd = nullptr;
  5420. if (isRoot)
  5421. {
  5422. // Don't do the fast path here if emitFastPath is false, even if we can.
  5423. if (emitFastPath && (this->GenerateLdFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd) || this->GenerateNonConfigurableLdRootFld(instr)))
  5424. {
  5425. Assert(labelHelper == nullptr);
  5426. return prevInstr;
  5427. }
  5428. }
  5429. else
  5430. {
  5431. if (this->GenerateLdFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd))
  5432. {
  5433. Assert(labelHelper == nullptr);
  5434. return prevInstr;
  5435. }
  5436. }
  5437. if (emitFastPath)
  5438. {
  5439. if (!GenerateFastLdFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, &labelBailOut, typeOpnd, &isHelper, &labelHelper))
  5440. {
  5441. if (labelHelper != nullptr)
  5442. {
  5443. labelHelper->isOpHelper = isHelper;
  5444. instr->InsertBefore(labelHelper);
  5445. }
  5446. prevInstr = LowerLdFld(instr, monoHelperAfterFastPath, polyHelperAfterFastPath, true, labelBailOut, isHelper);
  5447. }
  5448. }
  5449. else
  5450. {
  5451. if (labelHelper != nullptr)
  5452. {
  5453. labelHelper->isOpHelper = isHelper;
  5454. instr->InsertBefore(labelHelper);
  5455. }
  5456. prevInstr = LowerLdFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, true, labelBailOut, isHelper);
  5457. }
  5458. return prevInstr;
  5459. }
  5460. bool
  5461. Lowerer::GenerateCheckFixedFld(IR::Instr * instrChkFld)
  5462. {
  5463. IR::Instr *instr;
  5464. IR::LabelInstr *labelBailOut = nullptr;
  5465. IR::LabelInstr *labelDone = nullptr;
  5466. AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instrChkFld->m_func) ||
  5467. !PHASE_OFF(Js::UseFixedDataPropsPhase, instrChkFld->m_func), "Lowering a check fixed field with fixed data/method phase disabled?");
  5468. Assert(instrChkFld->GetSrc1()->IsSymOpnd() && instrChkFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd());
  5469. IR::PropertySymOpnd *propertySymOpnd = instrChkFld->GetSrc1()->AsPropertySymOpnd();
  5470. AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
  5471. Assert(propertySymOpnd->MayNeedTypeCheckProtection());
  5472. // In the backwards pass we only add guarded property operations to instructions that are not already
  5473. // protected by an upstream type check.
  5474. Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
  5475. // For the non-configurable properties on the global object we do not need a type check. Otherwise,
  5476. // we need a type check and bailout here unless this operation is part of the type check sequence and
  5477. // is protected by a type check upstream.
  5478. bool emitPrimaryTypeCheck = propertySymOpnd->NeedsPrimaryTypeCheck();
  5479. // In addition, we may also need a local type check in case the property comes from the prototype and
  5480. // it may have been overwritten on the instance after the primary type check upstream. If the property
  5481. // comes from the instance, we must still protect against its value changing after the type check, but
  5482. // for this a cheaper guard check is sufficient (see below).
  5483. bool emitFixedFieldTypeCheck = propertySymOpnd->NeedsCheckFixedFieldTypeCheck() &&
  5484. (!propertySymOpnd->IsTypeChecked() || propertySymOpnd->IsLoadedFromProto());
  5485. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  5486. uint inlineCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  5487. OUTPUT_TRACE_FUNC(
  5488. Js::ObjTypeSpecPhase,
  5489. this->m_func,
  5490. _u("Fixed field check: %s, property ID: %d, cache ID: %u, cloned cache: true, layout: %s, redundant check: %s count of props: %u \n"),
  5491. Js::OpCodeUtil::GetOpCodeName(instrChkFld->m_opcode),
  5492. propertySym->m_propertyId,
  5493. inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(), propertySymOpnd->IsTypeChecked() ? _u("true") : _u("false"),
  5494. propertySymOpnd->GetGuardedPropOps() ? propertySymOpnd->GetGuardedPropOps()->Count() : 0);
  5495. if (emitPrimaryTypeCheck || emitFixedFieldTypeCheck)
  5496. {
  5497. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5498. if(emitFixedFieldTypeCheck && propertySymOpnd->IsRootObjectNonConfigurableFieldLoad())
  5499. {
  5500. AssertMsg(!propertySymOpnd->GetGuardedPropOps() || propertySymOpnd->GetGuardedPropOps()->IsEmpty(), "This property Guard is used only for one property");
  5501. //We need only cheaper Guard check, if the property belongs to the GlobalObject.
  5502. GenerateFixedFieldGuardCheck(instrChkFld, propertySymOpnd, labelBailOut);
  5503. }
  5504. else
  5505. {
  5506. if (emitFixedFieldTypeCheck)
  5507. {
  5508. propertySymOpnd->EnsureGuardedPropOps(this->m_func->m_alloc);
  5509. propertySymOpnd->SetGuardedPropOp(propertySymOpnd->GetObjTypeSpecFldId());
  5510. }
  5511. this->GenerateCachedTypeCheck(instrChkFld, propertySymOpnd, labelBailOut, labelBailOut);
  5512. }
  5513. }
  5514. // We may still need this guard if we didn't emit the write protect type check above. This situation arises if we have
  5515. // a fixed field from the instance (not proto) and a property of the same name has been written somewhere between the
  5516. // primary type check and here. Note that we don't need a type check, because we know the fixed field exists on the
  5517. // object even if it has been written since primary type check, but we need to verify the fixed value didn't get overwritten.
  5518. if (!emitPrimaryTypeCheck && !emitFixedFieldTypeCheck && !propertySymOpnd->IsWriteGuardChecked())
  5519. {
  5520. if (!PHASE_OFF(Js::FixedFieldGuardCheckPhase, this->m_func))
  5521. {
  5522. Assert(labelBailOut == nullptr);
  5523. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5524. GenerateFixedFieldGuardCheck(instrChkFld, propertySymOpnd, labelBailOut);
  5525. }
  5526. }
  5527. // Note that a type handler holds only a weak reference to the singleton instance it represents, so
  5528. // it is possible that the instance gets collected before the type and handler do. Hence, the upstream
  5529. // type check may succeed, even as the original instance no longer exists. However, this would happen
  5530. // only if another instance reached the same type (otherwise we wouldn't ever pass the type check
  5531. // upstream). In that case we would have invalidated all fixed fields on that type, and so the type
  5532. // check (or property guard check, if necessary) above would fail. All in all, we would never attempt
  5533. // to access a fixed field from an instance that has been collected.
  5534. if (!emitPrimaryTypeCheck && !emitFixedFieldTypeCheck && propertySymOpnd->IsWriteGuardChecked())
  5535. {
  5536. Assert(labelBailOut == nullptr);
  5537. AssertMsg(!instrChkFld->HasBailOutInfo(), "Why does a direct fixed field check have bailout?");
  5538. if (propertySymOpnd->ProducesAuxSlotPtr())
  5539. {
  5540. this->GenerateAuxSlotPtrLoad(propertySymOpnd, instrChkFld);
  5541. }
  5542. instrChkFld->Remove();
  5543. return true;
  5544. }
  5545. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5546. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  5547. instrChkFld->InsertBefore(instr);
  5548. // Insert the helper label here.
  5549. instrChkFld->InsertBefore(labelBailOut);
  5550. instrChkFld->InsertAfter(labelDone);
  5551. if (propertySymOpnd->ProducesAuxSlotPtr())
  5552. {
  5553. this->GenerateAuxSlotPtrLoad(propertySymOpnd, labelDone->m_next);
  5554. }
  5555. // Convert the original instruction to a bailout.
  5556. Assert(instrChkFld->HasBailOutInfo());
  5557. if (instrChkFld->GetBailOutInfo()->bailOutInstr != instrChkFld)
  5558. {
  5559. // Set the cache index in the bailout info so that the bailout code will write it into the
  5560. // bailout record at runtime.
  5561. instrChkFld->GetBailOutInfo()->polymorphicCacheIndex = inlineCacheIndex;
  5562. }
  5563. instrChkFld->FreeSrc1();
  5564. instrChkFld->m_opcode = Js::OpCode::BailOut;
  5565. this->GenerateBailOut(instrChkFld);
  5566. return true;
  5567. }
  5568. void
  5569. Lowerer::GenerateCheckObjType(IR::Instr * instrChkObjType)
  5570. {
  5571. Assert(instrChkObjType->GetSrc1()->IsSymOpnd() && instrChkObjType->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd());
  5572. IR::PropertySymOpnd *propertySymOpnd = instrChkObjType->GetSrc1()->AsPropertySymOpnd();
  5573. // Why do we have an explicit type check if the cached type has been checked upstream? The dead store pass should have
  5574. // removed this instruction.
  5575. Assert(propertySymOpnd->IsTypeCheckSeqCandidate() && !propertySymOpnd->IsTypeChecked());
  5576. // Why do we have an explicit type check on a non-configurable root field load?
  5577. Assert(!propertySymOpnd->IsRootObjectNonConfigurableFieldLoad());
  5578. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  5579. uint inlineCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  5580. PHASE_PRINT_TESTTRACE(
  5581. Js::ObjTypeSpecPhase,
  5582. this->m_func,
  5583. _u("Object type check: %s, property ID: %d, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n"),
  5584. Js::OpCodeUtil::GetOpCodeName(instrChkObjType->m_opcode),
  5585. propertySym->m_propertyId,
  5586. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  5587. inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(), _u("false"));
  5588. IR::LabelInstr* labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5589. this->GenerateCachedTypeCheck(instrChkObjType, propertySymOpnd, labelBailOut, labelBailOut);
  5590. IR::LabelInstr* labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5591. IR::Instr* instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  5592. instrChkObjType->InsertBefore(instr);
  5593. // Insert the bailout label here.
  5594. instrChkObjType->InsertBefore(labelBailOut);
  5595. instrChkObjType->InsertAfter(labelDone);
  5596. if (propertySymOpnd->ProducesAuxSlotPtr())
  5597. {
  5598. this->GenerateAuxSlotPtrLoad(propertySymOpnd, labelDone->m_next);
  5599. }
  5600. // Convert the original instruction to a bailout.
  5601. Assert(instrChkObjType->HasBailOutInfo());
  5602. if (instrChkObjType->GetBailOutInfo()->bailOutInstr != instrChkObjType)
  5603. {
  5604. // Set the cache index in the bailout info so that the bailout code will write it into the
  5605. // bailout record at runtime.
  5606. instrChkObjType->GetBailOutInfo()->polymorphicCacheIndex = inlineCacheIndex;
  5607. }
  5608. instrChkObjType->FreeSrc1();
  5609. instrChkObjType->m_opcode = Js::OpCode::BailOut;
  5610. this->GenerateBailOut(instrChkObjType);
  5611. }
  5612. void
  5613. Lowerer::LowerAdjustObjType(IR::Instr * instrAdjustObjType)
  5614. {
  5615. IR::AddrOpnd *finalTypeOpnd = instrAdjustObjType->UnlinkDst()->AsAddrOpnd();
  5616. IR::AddrOpnd *initialTypeOpnd = instrAdjustObjType->UnlinkSrc2()->AsAddrOpnd();
  5617. IR::RegOpnd *baseOpnd = instrAdjustObjType->UnlinkSrc1()->AsRegOpnd();
  5618. bool adjusted = this->GenerateAdjustBaseSlots(
  5619. instrAdjustObjType, baseOpnd, JITTypeHolder((JITType*)initialTypeOpnd->m_metadata), JITTypeHolder((JITType*)finalTypeOpnd->m_metadata));
  5620. if (instrAdjustObjType->m_opcode == Js::OpCode::AdjustObjTypeReloadAuxSlotPtr)
  5621. {
  5622. Assert(adjusted);
  5623. // We reallocated the aux slots, so reload them if necessary.
  5624. StackSym * auxSlotPtrSym = baseOpnd->m_sym->GetAuxSlotPtrSym();
  5625. Assert(auxSlotPtrSym);
  5626. IR::Opnd *opndIndir = IR::IndirOpnd::New(baseOpnd, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  5627. IR::RegOpnd *regOpnd = IR::RegOpnd::New(auxSlotPtrSym, TyMachReg, this->m_func);
  5628. regOpnd->SetIsJITOptimizedReg(true);
  5629. Lowerer::InsertMove(regOpnd, opndIndir, instrAdjustObjType);
  5630. }
  5631. this->m_func->PinTypeRef((JITType*)finalTypeOpnd->m_metadata);
  5632. IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrAdjustObjType->m_func);
  5633. this->InsertMove(opnd, finalTypeOpnd, instrAdjustObjType);
  5634. initialTypeOpnd->Free(instrAdjustObjType->m_func);
  5635. instrAdjustObjType->Remove();
  5636. }
  5637. bool
  5638. Lowerer::GenerateNonConfigurableLdRootFld(IR::Instr * instrLdFld)
  5639. {
  5640. if (!instrLdFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
  5641. {
  5642. return false;
  5643. }
  5644. IR::PropertySymOpnd *propertySymOpnd = instrLdFld->GetSrc1()->AsPropertySymOpnd();
  5645. if (!propertySymOpnd->IsRootObjectNonConfigurableFieldLoad())
  5646. {
  5647. return false;
  5648. }
  5649. Assert(!PHASE_OFF(Js::RootObjectFldFastPathPhase, this->m_func));
  5650. Assert(!instrLdFld->HasBailOutInfo());
  5651. IR::Opnd * srcOpnd;
  5652. intptr_t rootObject = this->m_func->GetJITFunctionBody()->GetRootObject();
  5653. if (propertySymOpnd->UsesAuxSlot())
  5654. {
  5655. IR::RegOpnd * auxSlotOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  5656. this->InsertMove(auxSlotOpnd, IR::MemRefOpnd::New((byte *)rootObject + Js::DynamicObject::GetOffsetOfAuxSlots(),
  5657. TyMachPtr, this->m_func), instrLdFld);
  5658. srcOpnd = IR::IndirOpnd::New(auxSlotOpnd, propertySymOpnd->GetSlotIndex() * sizeof(Js::Var *),
  5659. TyVar, this->m_func);
  5660. }
  5661. else
  5662. {
  5663. srcOpnd = IR::MemRefOpnd::New((Js::Var *)rootObject + propertySymOpnd->GetSlotIndex(),
  5664. TyVar, this->m_func);
  5665. }
  5666. instrLdFld->ReplaceSrc1(srcOpnd);
  5667. instrLdFld->m_opcode = Js::OpCode::Ld_A;
  5668. LowererMD::ChangeToAssign(instrLdFld);
  5669. return true;
  5670. }
  5671. IR::Instr *
  5672. Lowerer::LowerDelFld(IR::Instr *delFldInstr, IR::JnHelperMethod helperMethod, bool useInlineCache, bool strictMode)
  5673. {
  5674. IR::Instr *instrPrev;
  5675. Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
  5676. if (strictMode)
  5677. {
  5678. propertyOperationFlag = Js::PropertyOperation_StrictMode;
  5679. }
  5680. instrPrev = m_lowererMD.LoadHelperArgument(delFldInstr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
  5681. LowerLdFld(delFldInstr, helperMethod, helperMethod, useInlineCache);
  5682. return instrPrev;
  5683. }
  5684. IR::Instr *
  5685. Lowerer::LowerIsInst(IR::Instr * isInstInstr, IR::JnHelperMethod helperMethod)
  5686. {
  5687. IR::Instr * instrPrev;
  5688. IR::Instr * instrArg;
  5689. IR::RegOpnd * argOpnd;
  5690. // inlineCache
  5691. instrPrev = m_lowererMD.LoadHelperArgument(isInstInstr, LoadIsInstInlineCacheOpnd(isInstInstr, isInstInstr->GetSrc1()->AsIntConstOpnd()->AsUint32()));
  5692. isInstInstr->FreeSrc1();
  5693. argOpnd = isInstInstr->UnlinkSrc2()->AsRegOpnd();
  5694. Assert(argOpnd->m_sym->m_isSingleDef);
  5695. instrArg = argOpnd->m_sym->m_instrDef;
  5696. argOpnd->Free(m_func);
  5697. // scriptContext
  5698. LoadScriptContext(isInstInstr);
  5699. // instance goes last, so remember it now
  5700. IR::Opnd * instanceOpnd = instrArg->UnlinkSrc1();
  5701. argOpnd = instrArg->UnlinkSrc2()->AsRegOpnd();
  5702. Assert(argOpnd->m_sym->m_isSingleDef);
  5703. instrArg->Remove();
  5704. instrArg = argOpnd->m_sym->m_instrDef;
  5705. argOpnd->Free(m_func);
  5706. // function
  5707. IR::Opnd *opnd = instrArg->UnlinkSrc1();
  5708. m_lowererMD.LoadHelperArgument(isInstInstr, opnd);
  5709. Assert(instrArg->GetSrc2() == NULL);
  5710. instrArg->Remove();
  5711. // instance
  5712. m_lowererMD.LoadHelperArgument(isInstInstr, instanceOpnd);
  5713. m_lowererMD.ChangeToHelperCall(isInstInstr, helperMethod);
  5714. return instrPrev;
  5715. }
  5716. void
  5717. Lowerer::GenerateStackScriptFunctionInit(StackSym * stackSym, Js::FunctionInfoPtrPtr nestedInfo)
  5718. {
  5719. Func * func = this->m_func;
  5720. Assert(func->HasAnyStackNestedFunc());
  5721. Assert(nextStackFunctionOpnd);
  5722. IR::Instr * insertBeforeInstr = func->GetFunctionEntryInsertionPoint();
  5723. IR::RegOpnd * addressOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5724. const IR::AutoReuseOpnd autoReuseAddressOpnd(addressOpnd, func);
  5725. InsertLea(addressOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertBeforeInstr);
  5726. // Currently we don't initialize the environment until we actually allocate the function, we also
  5727. // walk the list of stack function when we need to box them. so we should use initialize it to NullFrameDisplay
  5728. GenerateStackScriptFunctionInit(addressOpnd, nestedInfo,
  5729. IR::AddrOpnd::New(func->GetThreadContextInfo()->GetNullFrameDisplayAddr(), IR::AddrOpndKindDynamicMisc, func), insertBeforeInstr);
  5730. // Establish the next link
  5731. InsertMove(nextStackFunctionOpnd, addressOpnd, insertBeforeInstr);
  5732. this->nextStackFunctionOpnd = IR::SymOpnd::New(stackSym, sizeof(Js::StackScriptFunction), TyMachPtr, func);
  5733. }
  5734. void
  5735. Lowerer::GenerateScriptFunctionInit(IR::RegOpnd * regOpnd, IR::Opnd * vtableAddressOpnd,
  5736. Js::FunctionInfoPtrPtr nestedInfo, IR::Opnd * envOpnd, IR::Instr * insertBeforeInstr, bool isZeroed)
  5737. {
  5738. Func * func = this->m_func;
  5739. IR::Opnd * functionInfoOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5740. InsertMove(functionInfoOpnd, IR::MemRefOpnd::New(nestedInfo, TyMachPtr, func), insertBeforeInstr);
  5741. IR::Opnd * functionProxyOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5742. InsertMove(functionProxyOpnd, IR::IndirOpnd::New(functionInfoOpnd->AsRegOpnd(), Js::FunctionInfo::GetOffsetOfFunctionProxy(), TyMachPtr, func), insertBeforeInstr);
  5743. IR::Opnd * typeOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5744. InsertMove(typeOpnd, IR::IndirOpnd::New(functionProxyOpnd->AsRegOpnd(), Js::FunctionProxy::GetOffsetOfDeferredPrototypeType(),
  5745. TyMachPtr, func), insertBeforeInstr);
  5746. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5747. InsertTestBranch(typeOpnd, typeOpnd, Js::OpCode::BrEq_A, labelHelper, insertBeforeInstr);
  5748. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  5749. InsertBranch(Js::OpCode::Br, labelDone, insertBeforeInstr);
  5750. insertBeforeInstr->InsertBefore(labelHelper);
  5751. m_lowererMD.LoadHelperArgument(insertBeforeInstr, functionProxyOpnd);
  5752. IR::Instr * callHelperInstr = IR::Instr::New(Js::OpCode::Call, typeOpnd,
  5753. IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperEnsureFunctionProxyDeferredPrototypeType, func), func);
  5754. insertBeforeInstr->InsertBefore(callHelperInstr);
  5755. m_lowererMD.LowerCall(callHelperInstr, 0);
  5756. insertBeforeInstr->InsertBefore(labelDone);
  5757. GenerateMemInit(regOpnd, 0, vtableAddressOpnd, insertBeforeInstr, isZeroed);
  5758. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfType(), typeOpnd, insertBeforeInstr, isZeroed);
  5759. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfAuxSlots(), insertBeforeInstr, isZeroed);
  5760. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfObjectArray(), insertBeforeInstr, isZeroed);
  5761. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfConstructorCache(),
  5762. LoadLibraryValueOpnd(insertBeforeInstr, LibraryValue::ValueConstructorCacheDefaultInstance),
  5763. insertBeforeInstr, isZeroed);
  5764. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfFunctionInfo(), functionInfoOpnd, insertBeforeInstr, isZeroed);
  5765. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfEnvironment(), envOpnd, insertBeforeInstr, isZeroed);
  5766. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfCachedScopeObj(), insertBeforeInstr, isZeroed);
  5767. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfHasInlineCaches(), insertBeforeInstr, isZeroed);
  5768. }
  5769. void
  5770. Lowerer::GenerateStackScriptFunctionInit(IR::RegOpnd * regOpnd, Js::FunctionInfoPtrPtr nestedInfo, IR::Opnd * envOpnd, IR::Instr * insertBeforeInstr)
  5771. {
  5772. Func * func = this->m_func;
  5773. GenerateScriptFunctionInit(regOpnd,
  5774. LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableStackScriptFunction),
  5775. nestedInfo, envOpnd, insertBeforeInstr);
  5776. InsertMove(IR::IndirOpnd::New(regOpnd, Js::StackScriptFunction::GetOffsetOfBoxedScriptFunction(), TyMachPtr, func),
  5777. IR::AddrOpnd::NewNull(func), insertBeforeInstr);
  5778. }
  5779. void
  5780. Lowerer::EnsureStackFunctionListStackSym()
  5781. {
  5782. Func * func = this->m_func;
  5783. Assert(func->HasAnyStackNestedFunc());
  5784. #if defined(_M_IX86) || defined(_M_X64)
  5785. Assert(func->m_localStackHeight == (func->HasArgumentSlot()? MachArgsSlotOffset : 0));
  5786. StackSym * stackFunctionListStackSym = StackSym::New(TyMachPtr, func);
  5787. func->StackAllocate(stackFunctionListStackSym, sizeof(Js::ScriptFunction *));
  5788. nextStackFunctionOpnd = IR::SymOpnd::New(stackFunctionListStackSym, TyMachPtr, func);
  5789. #else
  5790. Assert(func->m_localStackHeight == 0);
  5791. nextStackFunctionOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(NULL, FRAME_REG, TyMachReg, func),
  5792. -(int32)(Js::Constants::StackNestedFuncList * sizeof(Js::Var)), TyMachPtr, func);
  5793. #endif
  5794. }
  5795. void
  5796. Lowerer::AllocStackClosure()
  5797. {
  5798. m_func->StackAllocate(m_func->GetLocalFrameDisplaySym(), sizeof(Js::Var));
  5799. m_func->StackAllocate(m_func->GetLocalClosureSym(), sizeof(Js::Var));
  5800. }
  5801. void
  5802. Lowerer::EnsureZeroLastStackFunctionNext()
  5803. {
  5804. Assert(nextStackFunctionOpnd != nullptr);
  5805. Func * func = this->m_func;
  5806. IR::Instr * insertBeforeInstr = func->GetFunctionEntryInsertionPoint();
  5807. InsertMove(nextStackFunctionOpnd, IR::AddrOpnd::NewNull(func), insertBeforeInstr);
  5808. }
  5809. IR::Instr *
  5810. Lowerer::GenerateNewStackScFunc(IR::Instr * newScFuncInstr, IR::RegOpnd ** ppEnvOpnd)
  5811. {
  5812. Assert(newScFuncInstr->m_func->DoStackNestedFunc());
  5813. Func * func = newScFuncInstr->m_func;
  5814. uint index = newScFuncInstr->GetSrc1()->AsIntConstOpnd()->AsUint32();
  5815. Assert(index < func->GetJITFunctionBody()->GetNestedCount());
  5816. IR::LabelInstr * labelNoStackFunc = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5817. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  5818. InsertTestBranch(IR::MemRefOpnd::New(func->GetJITFunctionBody()->GetFlagsAddr(), TyInt8, func),
  5819. IR::IntConstOpnd::New(Js::FunctionBody::Flags_StackNestedFunc, TyInt8, func, true),
  5820. Js::OpCode::BrEq_A, labelNoStackFunc, newScFuncInstr);
  5821. Js::FunctionInfoPtrPtr nestedInfo = func->GetJITFunctionBody()->GetNestedFuncRef(index);
  5822. IR::Instr * instrAssignDst;
  5823. IR::RegOpnd * envOpnd = *ppEnvOpnd;
  5824. if (!func->IsLoopBody())
  5825. {
  5826. // the stackAllocate Call below for this sym is passing a size that is not represented by any IRType and hence passing TyMisc for the constructor
  5827. StackSym * stackSym = StackSym::New(TyMisc, func);
  5828. // ScriptFunction and it's next pointer
  5829. this->m_func->StackAllocate(stackSym, sizeof(Js::StackScriptFunction) + sizeof(Js::StackScriptFunction *));
  5830. GenerateStackScriptFunctionInit(stackSym, nestedInfo);
  5831. InsertMove(IR::SymOpnd::New(stackSym, Js::ScriptFunction::GetOffsetOfEnvironment(), TyMachPtr, func),
  5832. envOpnd,
  5833. newScFuncInstr);
  5834. instrAssignDst =
  5835. InsertLea(newScFuncInstr->GetDst()->AsRegOpnd(), IR::SymOpnd::New(stackSym, TyMachPtr, func), newScFuncInstr);
  5836. }
  5837. else
  5838. {
  5839. Assert(func->IsTopFunc());
  5840. Assert(func->m_loopParamSym);
  5841. IR::Instr * envDefInstr = envOpnd->AsRegOpnd()->m_sym->m_instrDef;
  5842. Assert(envDefInstr && envDefInstr->m_opcode == Js::OpCode::NewScFuncData);
  5843. IR::RegOpnd * opndFuncPtr = envDefInstr->UnlinkSrc2()->AsRegOpnd();
  5844. Assert(opndFuncPtr);
  5845. envOpnd = envDefInstr->UnlinkSrc1()->AsRegOpnd();
  5846. Assert(envOpnd);
  5847. *ppEnvOpnd = envOpnd;
  5848. envDefInstr->Remove();
  5849. if (index != 0)
  5850. {
  5851. IR::RegOpnd * opnd = IR::RegOpnd::New(TyVar, func);
  5852. InsertAdd(false, opnd, opndFuncPtr, IR::IntConstOpnd::New(index * sizeof(Js::StackScriptFunction), TyMachPtr, func), newScFuncInstr);
  5853. opndFuncPtr = opnd;
  5854. }
  5855. InsertMove(IR::IndirOpnd::New(opndFuncPtr, Js::ScriptFunction::GetOffsetOfEnvironment(), TyMachPtr, func),
  5856. envOpnd, newScFuncInstr);
  5857. instrAssignDst = InsertMove(newScFuncInstr->GetDst(), opndFuncPtr, newScFuncInstr);
  5858. }
  5859. InsertBranch(Js::OpCode::Br, labelDone, newScFuncInstr);
  5860. newScFuncInstr->InsertBefore(labelNoStackFunc);
  5861. newScFuncInstr->InsertAfter(labelDone);
  5862. return instrAssignDst;
  5863. }
  5864. IR::Instr *
  5865. Lowerer::LowerNewScFunc(IR::Instr * newScFuncInstr)
  5866. {
  5867. IR::Instr *stackNewScFuncInstr = nullptr;
  5868. IR::RegOpnd * envOpnd = newScFuncInstr->UnlinkSrc2()->AsRegOpnd();
  5869. if (newScFuncInstr->m_func->DoStackNestedFunc())
  5870. {
  5871. stackNewScFuncInstr = GenerateNewStackScFunc(newScFuncInstr, &envOpnd);
  5872. }
  5873. IR::IntConstOpnd * functionBodySlotOpnd = newScFuncInstr->UnlinkSrc1()->AsIntConstOpnd();
  5874. IR::Instr * instrPrev = this->LoadFunctionBodyAsArgument(newScFuncInstr, functionBodySlotOpnd, envOpnd);
  5875. m_lowererMD.ChangeToHelperCall(newScFuncInstr, IR::HelperScrFunc_OP_NewScFunc );
  5876. return stackNewScFuncInstr == nullptr? instrPrev : stackNewScFuncInstr;
  5877. }
  5878. IR::Instr *
  5879. Lowerer::LowerNewScFuncHomeObj(IR::Instr * newScFuncInstr)
  5880. {
  5881. newScFuncInstr->m_opcode = Js::OpCode::CallHelper;
  5882. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperScrFunc_OP_NewScFuncHomeObj, this->m_func);
  5883. IR::Opnd * src1 = newScFuncInstr->UnlinkSrc1();
  5884. newScFuncInstr->SetSrc1(helperOpnd);
  5885. newScFuncInstr->SetSrc2(src1);
  5886. return newScFuncInstr;
  5887. }
  5888. IR::Instr *
  5889. Lowerer::LowerNewScGenFunc(IR::Instr * newScFuncInstr)
  5890. {
  5891. IR::IntConstOpnd * functionBodySlotOpnd = newScFuncInstr->UnlinkSrc1()->AsIntConstOpnd();
  5892. IR::RegOpnd * envOpnd = newScFuncInstr->UnlinkSrc2()->AsRegOpnd();
  5893. IR::Instr * instrPrev = this->LoadFunctionBodyAsArgument(newScFuncInstr, functionBodySlotOpnd, envOpnd);
  5894. m_lowererMD.ChangeToHelperCall(newScFuncInstr, IR::HelperScrFunc_OP_NewScGenFunc );
  5895. return instrPrev;
  5896. }
  5897. IR::Instr *
  5898. Lowerer::LowerNewScGenFuncHomeObj(IR::Instr * newScFuncInstr)
  5899. {
  5900. newScFuncInstr->m_opcode = Js::OpCode::CallHelper;
  5901. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperScrFunc_OP_NewScGenFuncHomeObj, this->m_func);
  5902. IR::Opnd * src1 = newScFuncInstr->UnlinkSrc1();
  5903. newScFuncInstr->SetSrc1(helperOpnd);
  5904. newScFuncInstr->SetSrc2(src1);
  5905. return newScFuncInstr;
  5906. }
  5907. IR::Instr *
  5908. Lowerer::LowerStPropIdArrFromVar(IR::Instr * stPropIdInstr)
  5909. {
  5910. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperStPropIdArrFromVar, this->m_func);
  5911. IR::Opnd * src1 = stPropIdInstr->UnlinkSrc1();
  5912. stPropIdInstr->SetSrc1(helperOpnd);
  5913. stPropIdInstr->SetSrc2(src1);
  5914. return m_lowererMD.LowerCallHelper(stPropIdInstr);
  5915. }
  5916. IR::Instr *
  5917. Lowerer::LowerRestify(IR::Instr * newRestInstr)
  5918. {
  5919. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperRestify, this->m_func);
  5920. IR::Opnd * src1 = newRestInstr->UnlinkSrc1();
  5921. newRestInstr->SetSrc1(helperOpnd);
  5922. newRestInstr->SetSrc2(src1);
  5923. return m_lowererMD.LowerCallHelper(newRestInstr);
  5924. }
  5925. ///----------------------------------------------------------------------------
  5926. ///
  5927. /// Lowerer::LowerScopedLdFld
  5928. ///
  5929. /// Lower a load instruction that takes an additional instance to use as a
  5930. /// a default if the scope chain provided doesn't contain the property.
  5931. ///
  5932. ///----------------------------------------------------------------------------
  5933. IR::Instr *
  5934. Lowerer::LowerScopedLdFld(IR::Instr * ldFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache)
  5935. {
  5936. IR::Opnd *src;
  5937. IR::Instr *instrPrev = ldFldInstr->m_prev;
  5938. if(!withInlineCache)
  5939. {
  5940. LoadScriptContext(ldFldInstr);
  5941. }
  5942. intptr_t rootObject = m_func->GetJITFunctionBody()->GetRootObject();
  5943. src = IR::AddrOpnd::New(rootObject, IR::AddrOpndKindDynamicVar, this->m_func, true);
  5944. instrPrev = m_lowererMD.LoadHelperArgument(ldFldInstr, src);
  5945. src = ldFldInstr->UnlinkSrc1();
  5946. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  5947. this->LoadPropertySymAsArgument(ldFldInstr, src);
  5948. if (withInlineCache)
  5949. {
  5950. AssertMsg(src->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  5951. m_lowererMD.LoadHelperArgument(
  5952. ldFldInstr,
  5953. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  5954. // Not using the polymorphic inline cache because the fast path only uses the monomorphic inline cache
  5955. this->m_lowererMD.LoadHelperArgument(ldFldInstr, this->LoadRuntimeInlineCacheOpnd(ldFldInstr, src->AsPropertySymOpnd()));
  5956. m_lowererMD.LoadHelperArgument(ldFldInstr, LoadFunctionBodyOpnd(ldFldInstr));
  5957. }
  5958. m_lowererMD.ChangeToHelperCall(ldFldInstr, helperMethod);
  5959. return instrPrev;
  5960. }
  5961. ///----------------------------------------------------------------------------
  5962. ///
  5963. /// Lowerer::LowerScopedLdInst
  5964. ///
  5965. /// Lower a load instruction that takes an additional instance to use as a
  5966. /// a default if the scope chain provided doesn't contain the property.
  5967. ///
  5968. ///----------------------------------------------------------------------------
  5969. IR::Instr *
  5970. Lowerer::LowerScopedLdInst(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  5971. {
  5972. IR::Opnd *src;
  5973. IR::Instr *instrPrev;
  5974. // last argument is the scriptContext
  5975. instrPrev = LoadScriptContext(instr);
  5976. src = instr->UnlinkSrc2();
  5977. AssertMsg(src->IsRegOpnd(), "Expected Reg opnd as src2");
  5978. // __out Var*. The StackSym is allocated in irbuilder, and here we need to insert a lea
  5979. StackSym* dstSym = src->GetStackSym();
  5980. IR::Instr *load = InsertLoadStackAddress(dstSym, instr);
  5981. IR::Opnd* tempOpnd = load->GetDst();
  5982. m_lowererMD.LoadHelperArgument(instr, tempOpnd);
  5983. // now 3rd last argument is the rootObject of the function. Need to add addrOpnd to
  5984. // pass in the address of the roobObject.
  5985. IR::Opnd * srcOpnd;
  5986. intptr_t rootObject = m_func->GetJITFunctionBody()->GetRootObject();
  5987. srcOpnd = IR::AddrOpnd::New(rootObject, IR::AddrOpndKindDynamicVar, instr->m_func, true);
  5988. instrPrev = m_lowererMD.LoadHelperArgument(instr, srcOpnd);
  5989. // no change, the property field built from irbuilder.
  5990. src = instr->UnlinkSrc1();
  5991. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  5992. this->LoadPropertySymAsArgument(instr, src);
  5993. instrPrev = m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  5994. IR::RegOpnd* regOpnd = IR::RegOpnd::New(dstSym, TyVar, m_func);
  5995. IR::SymOpnd*symOpnd = IR::SymOpnd::New(dstSym, TyVar, m_func);
  5996. this->InsertMove(regOpnd, symOpnd, instrPrev);
  5997. return instrPrev;
  5998. }
  5999. IR::Instr *
  6000. Lowerer::LowerScopedDelFld(IR::Instr * delFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache, bool strictMode)
  6001. {
  6002. IR::Instr *instrPrev;
  6003. Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
  6004. if (strictMode)
  6005. {
  6006. propertyOperationFlag = Js::PropertyOperation_StrictMode;
  6007. }
  6008. instrPrev = m_lowererMD.LoadHelperArgument(delFldInstr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
  6009. LowerScopedLdFld(delFldInstr, helperMethod, withInlineCache);
  6010. return instrPrev;
  6011. }
  6012. IR::Instr *
  6013. Lowerer::LowerProfiledStFld(IR::JitProfilingInstr *stFldInstr, Js::PropertyOperationFlags flags)
  6014. {
  6015. Assert(stFldInstr->profileId == Js::Constants::NoProfileId);
  6016. IR::Instr *const instrPrev = stFldInstr->m_prev;
  6017. /*
  6018. void ProfilingHelpers::ProfiledInitFld_Jit(
  6019. const Var instance,
  6020. const PropertyId propertyId,
  6021. const InlineCacheIndex inlineCacheIndex,
  6022. const Var value,
  6023. void *const framePointer)
  6024. void ProfilingHelpers::ProfiledStFld_Jit(
  6025. const Var instance,
  6026. const PropertyId propertyId,
  6027. const InlineCacheIndex inlineCacheIndex,
  6028. const Var value,
  6029. void *const framePointer)
  6030. void ProfilingHelpers::ProfiledStSuperFld_Jit(
  6031. const Var instance,
  6032. const PropertyId propertyId,
  6033. const InlineCacheIndex inlineCacheIndex,
  6034. const Var value,
  6035. void *const framePointer,
  6036. const Var thisInstance)
  6037. {
  6038. */
  6039. m_lowererMD.LoadHelperArgument(stFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  6040. if (stFldInstr->m_opcode == Js::OpCode::StSuperFld)
  6041. {
  6042. m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc2());
  6043. }
  6044. m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc1());
  6045. IR::Opnd *dst = stFldInstr->UnlinkDst();
  6046. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
  6047. m_lowererMD.LoadHelperArgument(
  6048. stFldInstr,
  6049. IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  6050. LoadPropertySymAsArgument(stFldInstr, dst);
  6051. IR::JnHelperMethod helper;
  6052. switch (stFldInstr->m_opcode)
  6053. {
  6054. case Js::OpCode::InitFld:
  6055. case Js::OpCode::InitRootFld:
  6056. helper = IR::HelperProfiledInitFld;
  6057. break;
  6058. case Js::OpCode::StSuperFld:
  6059. helper = IR::HelperProfiledStSuperFld;
  6060. break;
  6061. default:
  6062. helper =
  6063. flags & Js::PropertyOperation_Root
  6064. ? flags & Js::PropertyOperation_StrictMode ? IR::HelperProfiledStRootFld_Strict : IR::HelperProfiledStRootFld
  6065. : flags & Js::PropertyOperation_StrictMode ? IR::HelperProfiledStFld_Strict : IR::HelperProfiledStFld;
  6066. break;
  6067. }
  6068. stFldInstr->SetSrc1(IR::HelperCallOpnd::New(helper, m_func));
  6069. m_lowererMD.LowerCall(stFldInstr, 0);
  6070. return instrPrev;
  6071. }
  6072. ///----------------------------------------------------------------------------
  6073. ///
  6074. /// Lowerer::LowerStFld
  6075. ///
  6076. ///----------------------------------------------------------------------------
  6077. IR::Instr *
  6078. Lowerer::LowerStFld(
  6079. IR::Instr * stFldInstr,
  6080. IR::JnHelperMethod helperMethod,
  6081. IR::JnHelperMethod polymorphicHelperMethod,
  6082. bool withInlineCache,
  6083. IR::LabelInstr *labelBailOut,
  6084. bool isHelper,
  6085. bool withPutFlags,
  6086. Js::PropertyOperationFlags flags)
  6087. {
  6088. if (stFldInstr->IsJitProfilingInstr())
  6089. {
  6090. // If we want to profile then do something completely different
  6091. return this->LowerProfiledStFld(stFldInstr->AsJitProfilingInstr(), flags);
  6092. }
  6093. IR::Instr *instrPrev = stFldInstr->m_prev;
  6094. IR::Opnd *dst = stFldInstr->UnlinkDst();
  6095. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
  6096. IR::Opnd * inlineCacheOpnd = nullptr;
  6097. if (withInlineCache)
  6098. {
  6099. AssertMsg(dst->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  6100. if (dst->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache && polymorphicHelperMethod != helperMethod)
  6101. {
  6102. JITTimePolymorphicInlineCache * polymorphicInlineCache = dst->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache;
  6103. helperMethod = polymorphicHelperMethod;
  6104. inlineCacheOpnd = IR::AddrOpnd::New(polymorphicInlineCache->GetAddr(), IR::AddrOpndKindDynamicInlineCache, this->m_func);
  6105. }
  6106. else
  6107. {
  6108. // Need to load runtime inline cache opnd first before loading any helper argument
  6109. // because LoadRuntimeInlineCacheOpnd may create labels marked as helper
  6110. // and cause op helper register push/pop save in x86, messing up with any helper arguments that is already pushed
  6111. inlineCacheOpnd = this->LoadRuntimeInlineCacheOpnd(stFldInstr, dst->AsPropertySymOpnd(), isHelper);
  6112. }
  6113. }
  6114. if (withPutFlags)
  6115. {
  6116. m_lowererMD.LoadHelperArgument(stFldInstr,
  6117. IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
  6118. }
  6119. IR::Opnd *src = stFldInstr->UnlinkSrc1();
  6120. if (stFldInstr->m_opcode == Js::OpCode::StSuperFld)
  6121. {
  6122. m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc2());
  6123. }
  6124. m_lowererMD.LoadHelperArgument(stFldInstr, src);
  6125. this->LoadPropertySymAsArgument(stFldInstr, dst);
  6126. if (withInlineCache)
  6127. {
  6128. Assert(inlineCacheOpnd != nullptr);
  6129. this->m_lowererMD.LoadHelperArgument(
  6130. stFldInstr,
  6131. IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  6132. this->m_lowererMD.LoadHelperArgument(stFldInstr, inlineCacheOpnd);
  6133. this->m_lowererMD.LoadHelperArgument(stFldInstr, LoadFunctionBodyOpnd(stFldInstr));
  6134. }
  6135. IR::RegOpnd *opndBase = dst->AsSymOpnd()->CreatePropertyOwnerOpnd(m_func);
  6136. m_lowererMD.ChangeToHelperCall(stFldInstr, helperMethod, labelBailOut, opndBase, dst->AsSymOpnd()->IsPropertySymOpnd() ? dst->AsSymOpnd()->AsPropertySymOpnd() : nullptr, isHelper);
  6137. return instrPrev;
  6138. }
  6139. IR::Instr* Lowerer::GenerateCompleteStFld(IR::Instr* instr, bool emitFastPath, IR::JnHelperMethod monoHelperAfterFastPath, IR::JnHelperMethod polyHelperAfterFastPath,
  6140. IR::JnHelperMethod monoHelperWithoutFastPath, IR::JnHelperMethod polyHelperWithoutFastPath, bool withPutFlags, Js::PropertyOperationFlags flags)
  6141. {
  6142. if(instr->CallsAccessor() && instr->HasBailOutInfo())
  6143. {
  6144. IR::BailOutKind kindMinusBits = instr->GetBailOutKind() & ~IR::BailOutKindBits;
  6145. Assert(kindMinusBits != IR::BailOutOnImplicitCalls && kindMinusBits != IR::BailOutOnImplicitCallsPreOp);
  6146. }
  6147. IR::Instr* prevInstr = instr->m_prev;
  6148. IR::LabelInstr* labelBailOut = nullptr;
  6149. IR::LabelInstr* labelHelper = nullptr;
  6150. bool isHelper = false;
  6151. IR::RegOpnd* typeOpnd = nullptr;
  6152. if(emitFastPath && GenerateFastStFldForCustomProperty(instr, &labelHelper))
  6153. {
  6154. if(labelHelper)
  6155. {
  6156. Assert(labelHelper->isOpHelper);
  6157. instr->InsertBefore(labelHelper);
  6158. prevInstr = this->LowerStFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
  6159. }
  6160. else
  6161. {
  6162. instr->Remove();
  6163. return prevInstr;
  6164. }
  6165. }
  6166. else if (this->GenerateStFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd))
  6167. {
  6168. Assert(labelHelper == nullptr);
  6169. return prevInstr;
  6170. }
  6171. else if (emitFastPath)
  6172. {
  6173. if (!GenerateFastStFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, &labelBailOut, typeOpnd, &isHelper, &labelHelper, withPutFlags, flags))
  6174. {
  6175. if (labelHelper != nullptr)
  6176. {
  6177. labelHelper->isOpHelper = isHelper;
  6178. instr->InsertBefore(labelHelper);
  6179. }
  6180. prevInstr = this->LowerStFld(instr, monoHelperAfterFastPath, polyHelperAfterFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
  6181. }
  6182. }
  6183. else
  6184. {
  6185. if (labelHelper != nullptr)
  6186. {
  6187. labelHelper->isOpHelper = isHelper;
  6188. instr->InsertBefore(labelHelper);
  6189. }
  6190. prevInstr = this->LowerStFld(instr, monoHelperWithoutFastPath, monoHelperWithoutFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
  6191. }
  6192. return prevInstr;
  6193. }
  6194. void
  6195. Lowerer::GenerateDirectFieldStore(IR::Instr* instrStFld, IR::PropertySymOpnd* propertySymOpnd)
  6196. {
  6197. Func* func = instrStFld->m_func;
  6198. IR::Opnd *opndSlotArray = this->LoadSlotArrayWithCachedLocalType(instrStFld, propertySymOpnd);
  6199. // Store the value to the slot, getting the slot index from the cache.
  6200. uint16 index = propertySymOpnd->GetSlotIndex();
  6201. Assert(index != -1);
  6202. #if defined(RECYCLER_WRITE_BARRIER_JIT) && (defined(_M_IX86) || defined(_M_AMD64))
  6203. if (opndSlotArray->IsRegOpnd())
  6204. {
  6205. IR::IndirOpnd * opndDst = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, func);
  6206. this->GetLowererMD()->GenerateWriteBarrierAssign(opndDst, instrStFld->GetSrc1(), instrStFld);
  6207. }
  6208. else
  6209. {
  6210. Assert(opndSlotArray->IsMemRefOpnd());
  6211. IR::MemRefOpnd * opndDst = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, func);
  6212. this->GetLowererMD()->GenerateWriteBarrierAssign(opndDst, instrStFld->GetSrc1(), instrStFld);
  6213. }
  6214. #else
  6215. IR::Opnd *opnd;
  6216. if (opndSlotArray->IsRegOpnd())
  6217. {
  6218. opnd = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, func);
  6219. }
  6220. else
  6221. {
  6222. opnd = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, func);
  6223. }
  6224. this->InsertMove(opnd, instrStFld->GetSrc1(), instrStFld);
  6225. #endif
  6226. }
  6227. bool
  6228. Lowerer::GenerateStFldWithCachedType(IR::Instr *instrStFld, bool* continueAsHelperOut, IR::LabelInstr** labelHelperOut, IR::RegOpnd** typeOpndOut)
  6229. {
  6230. IR::Instr *instr;
  6231. IR::RegOpnd *typeOpnd = nullptr;
  6232. IR::LabelInstr* labelObjCheckFailed = nullptr;
  6233. IR::LabelInstr *labelTypeCheckFailed = nullptr;
  6234. IR::LabelInstr *labelBothTypeChecksFailed = nullptr;
  6235. IR::LabelInstr *labelDone = nullptr;
  6236. Assert(continueAsHelperOut != nullptr);
  6237. *continueAsHelperOut = false;
  6238. Assert(labelHelperOut != nullptr);
  6239. *labelHelperOut = nullptr;
  6240. Assert(typeOpndOut != nullptr);
  6241. *typeOpndOut = nullptr;
  6242. Assert(instrStFld->GetDst()->IsSymOpnd());
  6243. if (!instrStFld->GetDst()->AsSymOpnd()->IsPropertySymOpnd() || !instrStFld->GetDst()->AsPropertySymOpnd()->IsTypeCheckSeqCandidate())
  6244. {
  6245. return false;
  6246. }
  6247. IR::PropertySymOpnd *propertySymOpnd = instrStFld->GetDst()->AsPropertySymOpnd();
  6248. // If we have any object type spec info, we better not believe this is a load from prototype, since this is a store
  6249. // and we never share inline caches between loads and stores.
  6250. Assert(!propertySymOpnd->HasObjTypeSpecFldInfo() || !propertySymOpnd->IsLoadedFromProto());
  6251. AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
  6252. if (!propertySymOpnd->IsTypeCheckSeqCandidate())
  6253. {
  6254. return false;
  6255. }
  6256. if (!propertySymOpnd->IsTypeCheckSeqParticipant() && !propertySymOpnd->NeedsLocalTypeCheck())
  6257. {
  6258. return false;
  6259. }
  6260. Assert(!propertySymOpnd->NeedsTypeCheckAndBailOut() || (instrStFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrStFld->GetBailOutKind())));
  6261. // In the backwards pass we only add guarded property operations to instructions that are not already
  6262. // protected by an upstream type check.
  6263. Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
  6264. PHASE_PRINT_TESTTRACE(
  6265. Js::ObjTypeSpecPhase,
  6266. this->m_func,
  6267. _u("Field store: %s, property ID: %d, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n"),
  6268. Js::OpCodeUtil::GetOpCodeName(instrStFld->m_opcode),
  6269. propertySymOpnd->m_sym->AsPropertySym()->m_propertyId,
  6270. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  6271. propertySymOpnd->m_inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(),
  6272. propertySymOpnd->IsTypeChecked() ? _u("true") : _u("false"));
  6273. if (propertySymOpnd->HasFinalType() && !propertySymOpnd->IsLoadedFromProto())
  6274. {
  6275. propertySymOpnd->UpdateSlotForFinalType();
  6276. }
  6277. Func* func = instrStFld->m_func;
  6278. // TODO (ObjTypeSpec): If ((PropertySym*)propertySymOpnd->m_sym)->m_stackSym->m_isIntConst consider emitting a direct
  6279. // jump to helper or bailout. If we have a type check bailout, we could even abort compilation.
  6280. bool hasTypeCheckBailout = instrStFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrStFld->GetBailOutKind());
  6281. // If the type hasn't been checked upstream, see if it makes sense to check it here.
  6282. bool isTypeChecked = propertySymOpnd->IsTypeChecked();
  6283. if (!isTypeChecked)
  6284. {
  6285. // If the initial type has been checked, we can do a hard coded type transition without any type checks
  6286. // (see GenerateStFldWithCachedFinalType), which is always worth doing, even if the type is not needed
  6287. // downstream. We're not introducing any additional bailouts.
  6288. if (propertySymOpnd->HasFinalType() && propertySymOpnd->HasInitialType() && !propertySymOpnd->IsTypeDead())
  6289. {
  6290. // We have a final type in hand, so we can JIT (most of) the type transition work.
  6291. return this->GenerateStFldWithCachedFinalType(instrStFld, propertySymOpnd);
  6292. }
  6293. if (propertySymOpnd->HasTypeMismatch())
  6294. {
  6295. // So we have a type mismatch, which happens when the type (and the type without property if ObjTypeSpecStore
  6296. // is on) on this instruction didn't match the live type value according to the flow. We must have hit some
  6297. // stale inline cache (perhaps inlined from a different function, or on a code path not taken for a while).
  6298. // Either way, we know exactly what type the object must have at this point (fully determined by flow), but
  6299. // we don't know whether that type already has the property we're storing here. All in all, we know exactly
  6300. // what shape the object will have after this operation, but we're not sure what label (type) to give this
  6301. // shape. Thus we can simply let the fast path do its thing based on the live inline cache. The downstream
  6302. // instructions relying only on this shape (loads and stores) are safe, and those that need the next type
  6303. // (i.e. adds) will do the same thing as this instruction.
  6304. return false;
  6305. }
  6306. // If we're still here then we must need a primary type check on this instruction to protect
  6307. // a sequence of field operations downstream, or a local type check for an isolated field store.
  6308. Assert(propertySymOpnd->NeedsPrimaryTypeCheck() || propertySymOpnd->NeedsLocalTypeCheck());
  6309. labelTypeCheckFailed = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  6310. labelBothTypeChecksFailed = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  6311. labelObjCheckFailed = hasTypeCheckBailout ? labelBothTypeChecksFailed : IR::LabelInstr::New(Js::OpCode::Label, func, true);
  6312. typeOpnd = this->GenerateCachedTypeCheck(instrStFld, propertySymOpnd, labelObjCheckFailed, labelBothTypeChecksFailed, labelTypeCheckFailed);
  6313. *typeOpndOut = typeOpnd;
  6314. }
  6315. // Either we are protected by a type check upstream or we just emitted a type check above,
  6316. // now it's time to store the field value.
  6317. GenerateDirectFieldStore(instrStFld, propertySymOpnd);
  6318. // If we are protected by a type check upstream, we don't need a bailout or helper here, delete the instruction
  6319. // and return "true" to indicate that we succeeded in eliminating it.
  6320. if (isTypeChecked)
  6321. {
  6322. Assert(labelTypeCheckFailed == nullptr && labelBothTypeChecksFailed == nullptr);
  6323. AssertMsg(!instrStFld->HasBailOutInfo(), "Why does a direct field store have bailout?");
  6324. instrStFld->Remove();
  6325. return true;
  6326. }
  6327. // Otherwise, branch around the helper on successful type check.
  6328. labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  6329. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, func);
  6330. instrStFld->InsertBefore(instr);
  6331. // On failed type check, try the type without property if we've got one.
  6332. instrStFld->InsertBefore(labelTypeCheckFailed);
  6333. // Caution, this is one of the dusty corners of the JIT. We only get here if this is an isolated StFld which adds a property, or
  6334. // ObjTypeSpecStore is off. In the former case no downstream operations depend on the final type produced here, and we can fall
  6335. // back on live cache and helper if the type doesn't match. In the latter we may have a cache with type transition, which must
  6336. // produce a value for the type after transition, because that type is consumed downstream. Thus, if the object's type doesn't
  6337. // match either the type with or the type without the property we're storing, we must bail out here.
  6338. bool emitAddProperty = propertySymOpnd->IsMono() && propertySymOpnd->HasInitialType();
  6339. if (emitAddProperty)
  6340. {
  6341. GenerateCachedTypeWithoutPropertyCheck(instrStFld, propertySymOpnd, typeOpnd, labelBothTypeChecksFailed);
  6342. GenerateFieldStoreWithTypeChange(instrStFld, propertySymOpnd, propertySymOpnd->GetInitialType(), propertySymOpnd->GetType());
  6343. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, func);
  6344. instrStFld->InsertBefore(instr);
  6345. }
  6346. instrStFld->InsertBefore(labelBothTypeChecksFailed);
  6347. instrStFld->InsertAfter(labelDone);
  6348. if (hasTypeCheckBailout)
  6349. {
  6350. AssertMsg(PHASE_ON1(Js::ObjTypeSpecIsolatedFldOpsWithBailOutPhase) || !PHASE_ON(Js::DeadStoreTypeChecksOnStoresPhase, this->m_func) || !propertySymOpnd->IsTypeDead() || propertySymOpnd->TypeCheckRequired(),
  6351. "Why does a field store have a type check bailout, if its type is dead?");
  6352. if (instrStFld->GetBailOutInfo()->bailOutInstr != instrStFld)
  6353. {
  6354. // Set the cache index in the bailout info so that the generated code will write it into the
  6355. // bailout record at runtime.
  6356. instrStFld->GetBailOutInfo()->polymorphicCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  6357. }
  6358. else
  6359. {
  6360. Assert(instrStFld->GetBailOutInfo()->polymorphicCacheIndex == propertySymOpnd->m_inlineCacheIndex);
  6361. }
  6362. instrStFld->m_opcode = Js::OpCode::BailOut;
  6363. instrStFld->FreeSrc1();
  6364. instrStFld->FreeDst();
  6365. this->GenerateBailOut(instrStFld);
  6366. return true;
  6367. }
  6368. else
  6369. {
  6370. *continueAsHelperOut = true;
  6371. Assert(labelObjCheckFailed != nullptr && labelObjCheckFailed != labelBothTypeChecksFailed);
  6372. *labelHelperOut = labelObjCheckFailed;
  6373. return false;
  6374. }
  6375. }
  6376. IR::RegOpnd *
  6377. Lowerer::GenerateCachedTypeCheck(IR::Instr *instrChk, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr* labelObjCheckFailed, IR::LabelInstr *labelTypeCheckFailed, IR::LabelInstr *labelSecondChance)
  6378. {
  6379. Assert(propertySymOpnd->MayNeedTypeCheckProtection());
  6380. Func* func = instrChk->m_func;
  6381. IR::RegOpnd *regOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(func);
  6382. regOpnd->SetValueType(propertySymOpnd->GetPropertyOwnerValueType());
  6383. if (!regOpnd->IsNotTaggedValue())
  6384. {
  6385. m_lowererMD.GenerateObjectTest(regOpnd, instrChk, labelObjCheckFailed);
  6386. }
  6387. // Load the current object type into typeOpnd
  6388. IR::RegOpnd* typeOpnd = IR::RegOpnd::New(TyMachReg, func);
  6389. IR::Opnd *sourceType;
  6390. if (regOpnd->m_sym->IsConst() && !regOpnd->m_sym->IsIntConst() && !regOpnd->m_sym->IsFloatConst())
  6391. {
  6392. sourceType = IR::MemRefOpnd::New((BYTE*)regOpnd->m_sym->GetConstAddress() +
  6393. Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func, IR::AddrOpndKindDynamicObjectTypeRef);
  6394. }
  6395. else
  6396. {
  6397. sourceType = IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func);
  6398. }
  6399. InsertMove(typeOpnd, sourceType, instrChk);
  6400. // Note: don't attempt equivalent type check if we're doing a final type optimization or if we have a monomorphic
  6401. // cache and no type check bailout. In the latter case, we can wind up doing expensive failed equivalence checks
  6402. // repeatedly and never rejit.
  6403. bool doEquivTypeCheck =
  6404. instrChk->HasEquivalentTypeCheckBailOut() ||
  6405. (propertySymOpnd->HasEquivalentTypeSet() &&
  6406. !(propertySymOpnd->HasFinalType() && propertySymOpnd->HasInitialType()) &&
  6407. !propertySymOpnd->MustDoMonoCheck() &&
  6408. (propertySymOpnd->IsPoly() || instrChk->HasTypeCheckBailOut()));
  6409. Assert(doEquivTypeCheck || !instrChk->HasEquivalentTypeCheckBailOut());
  6410. // Create and initialize the property guard if required. Note that for non-shared monomorphic checks we can refer
  6411. // directly to the (pinned) type and not use a guard.
  6412. Js::PropertyGuard * typeCheckGuard;
  6413. IR::RegOpnd * polyIndexOpnd = nullptr;
  6414. JITTypeHolder monoType = nullptr;
  6415. if (doEquivTypeCheck)
  6416. {
  6417. typeCheckGuard = CreateEquivalentTypeGuardAndLinkToGuardedProperties(propertySymOpnd);
  6418. if (typeCheckGuard->IsPoly())
  6419. {
  6420. Assert(propertySymOpnd->ShouldUsePolyEquivTypeGuard(this->m_func));
  6421. polyIndexOpnd = this->GeneratePolymorphicTypeIndex(typeOpnd, typeCheckGuard, instrChk);
  6422. }
  6423. }
  6424. else
  6425. {
  6426. monoType = propertySymOpnd->MustDoMonoCheck() ? propertySymOpnd->GetMonoGuardType() : propertySymOpnd->GetType();
  6427. typeCheckGuard = CreateTypePropertyGuardForGuardedProperties(monoType, propertySymOpnd);
  6428. }
  6429. // Create the opnd we will check against the current type.
  6430. IR::Opnd *expectedTypeOpnd;
  6431. JITTypeHolder directCheckType = nullptr;
  6432. if (typeCheckGuard == nullptr)
  6433. {
  6434. Assert(monoType != nullptr);
  6435. expectedTypeOpnd = IR::AddrOpnd::New(monoType->GetAddr(), IR::AddrOpndKindDynamicType, func, true);
  6436. directCheckType = monoType;
  6437. }
  6438. else
  6439. {
  6440. Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
  6441. if (this->m_func->IsOOPJIT())
  6442. {
  6443. if (polyIndexOpnd != nullptr)
  6444. {
  6445. IR::RegOpnd * baseOpnd = IR::RegOpnd::New(TyMachPtr, func);
  6446. this->GenerateLeaOfOOPData(baseOpnd, typeCheckGuard, Js::JitPolyEquivalentTypeGuard::GetOffsetOfPolyValues(), instrChk);
  6447. expectedTypeOpnd = IR::IndirOpnd::New(baseOpnd, polyIndexOpnd, m_lowererMD.GetDefaultIndirScale(), TyMachPtr, func);
  6448. }
  6449. else
  6450. {
  6451. expectedTypeOpnd = this->GenerateIndirOfOOPData(typeCheckGuard, 0, instrChk);
  6452. }
  6453. this->addToLiveOnBackEdgeSyms->Set(func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  6454. }
  6455. else
  6456. {
  6457. if (polyIndexOpnd != nullptr)
  6458. {
  6459. IR::RegOpnd * baseOpnd = IR::RegOpnd::New(TyMachPtr, func);
  6460. InsertMove(baseOpnd, IR::AddrOpnd::New((Js::Var)typeCheckGuard->AsPolyTypeCheckGuard()->GetAddressOfPolyValues(), IR::AddrOpndKindDynamicTypeCheckGuard, func, true), instrChk);
  6461. expectedTypeOpnd = IR::IndirOpnd::New(baseOpnd, polyIndexOpnd, m_lowererMD.GetDefaultIndirScale(), TyMachPtr, func);
  6462. }
  6463. else
  6464. {
  6465. expectedTypeOpnd = IR::MemRefOpnd::New((void*)(typeCheckGuard->GetAddressOfValue()), TyMachPtr, func, IR::AddrOpndKindDynamicGuardValueRef);
  6466. }
  6467. }
  6468. }
  6469. if (PHASE_VERBOSE_TRACE(Js::ObjTypeSpecPhase, this->m_func))
  6470. {
  6471. OUTPUT_VERBOSE_TRACE_FUNC(Js::ObjTypeSpecPhase, this->m_func, _u("Emitted %s type check "),
  6472. directCheckType != nullptr ? _u("direct") : propertySymOpnd->IsPoly() ? _u("equivalent") : _u("indirect"));
  6473. #if DBG
  6474. if (propertySymOpnd->GetGuardedPropOps() != nullptr)
  6475. {
  6476. Output::Print(_u(" guarding operations:\n "));
  6477. propertySymOpnd->GetGuardedPropOps()->Dump();
  6478. }
  6479. else
  6480. {
  6481. Output::Print(_u("\n"));
  6482. }
  6483. #else
  6484. Output::Print(_u("\n"));
  6485. #endif
  6486. Output::Flush();
  6487. }
  6488. if (doEquivTypeCheck)
  6489. {
  6490. // TODO (ObjTypeSpec): For isolated equivalent type checks it would be good to emit a check if the cache is still valid, and
  6491. // if not go straight to live polymorphic cache. This way we wouldn't have to bail out and re-JIT, and also wouldn't continue
  6492. // to try the equivalent type cache, miss it and do the slow comparison. This may be as easy as sticking a null on the main
  6493. // type in the equivalent type cache.
  6494. IR::LabelInstr* labelCheckEquivalentType = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  6495. IR::BranchInstr* branchInstr = InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelCheckEquivalentType, instrChk);
  6496. InsertObjectPoison(regOpnd, branchInstr, instrChk, false);
  6497. IR::LabelInstr *labelTypeCheckSucceeded = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  6498. InsertBranch(Js::OpCode::Br, labelTypeCheckSucceeded, instrChk);
  6499. instrChk->InsertBefore(labelCheckEquivalentType);
  6500. IR::Opnd* typeCheckGuardOpnd = nullptr;
  6501. if (this->m_func->IsOOPJIT())
  6502. {
  6503. typeCheckGuardOpnd = IR::RegOpnd::New(TyMachPtr, func);
  6504. this->GenerateLeaOfOOPData(typeCheckGuardOpnd->AsRegOpnd(), typeCheckGuard, 0, instrChk);
  6505. this->addToLiveOnBackEdgeSyms->Set(func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  6506. }
  6507. else
  6508. {
  6509. typeCheckGuardOpnd = IR::AddrOpnd::New((Js::Var)typeCheckGuard, IR::AddrOpndKindDynamicTypeCheckGuard, func, true);
  6510. }
  6511. IR::JnHelperMethod helperMethod;
  6512. if (polyIndexOpnd != nullptr)
  6513. {
  6514. helperMethod = propertySymOpnd->HasFixedValue() ? IR::HelperCheckIfPolyTypeIsEquivalentForFixedField : IR::HelperCheckIfPolyTypeIsEquivalent;
  6515. this->m_lowererMD.LoadHelperArgument(instrChk, polyIndexOpnd);
  6516. }
  6517. else
  6518. {
  6519. helperMethod = propertySymOpnd->HasFixedValue() ? IR::HelperCheckIfTypeIsEquivalentForFixedField : IR::HelperCheckIfTypeIsEquivalent;
  6520. }
  6521. this->m_lowererMD.LoadHelperArgument(instrChk, typeCheckGuardOpnd);
  6522. this->m_lowererMD.LoadHelperArgument(instrChk, typeOpnd);
  6523. IR::RegOpnd* equivalentTypeCheckResultOpnd = IR::RegOpnd::New(TyUint8, func);
  6524. IR::HelperCallOpnd* equivalentTypeCheckHelperCallOpnd = IR::HelperCallOpnd::New(helperMethod, func);
  6525. IR::Instr* equivalentTypeCheckCallInstr = IR::Instr::New(Js::OpCode::Call, equivalentTypeCheckResultOpnd, equivalentTypeCheckHelperCallOpnd, func);
  6526. instrChk->InsertBefore(equivalentTypeCheckCallInstr);
  6527. this->m_lowererMD.LowerCall(equivalentTypeCheckCallInstr, 0);
  6528. InsertTestBranch(equivalentTypeCheckResultOpnd, equivalentTypeCheckResultOpnd, Js::OpCode::BrEq_A, labelTypeCheckFailed, instrChk);
  6529. // TODO (ObjTypeSpec): Consider emitting a shared bailout to which a specific bailout kind is written at runtime. This would allow us to distinguish
  6530. // between non-equivalent type and other cases, such as invalidated guard (due to fixed field overwrite, perhaps) or too much thrashing on the
  6531. // equivalent type cache. We could determine bailout kind based on the value returned by the helper. In the case of cache thrashing we could just
  6532. // turn off the whole optimization for a given function.
  6533. instrChk->InsertBefore(labelTypeCheckSucceeded);
  6534. }
  6535. else
  6536. {
  6537. IR::BranchInstr* branchInstr = InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelSecondChance != nullptr ? labelSecondChance : labelTypeCheckFailed, instrChk);
  6538. InsertObjectPoison(regOpnd, branchInstr, instrChk, false);
  6539. }
  6540. // Don't pin the type for polymorphic operations. The code can successfully execute even if this type is no longer referenced by any objects,
  6541. // as long as there are other objects with types equivalent on the properties referenced by this code. The type is kept alive until entry point
  6542. // installation by the JIT transfer data, and after that by the equivalent type cache, so it will stay alive unless or until it gets evicted
  6543. // from the cache.
  6544. if (!doEquivTypeCheck)
  6545. {
  6546. Assert(monoType != nullptr);
  6547. PinTypeRef(monoType, monoType.t, instrChk, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
  6548. }
  6549. return typeOpnd;
  6550. }
  6551. IR::RegOpnd *
  6552. Lowerer::GeneratePolymorphicTypeIndex(IR::RegOpnd * typeOpnd, Js::PropertyGuard * typeCheckGuard, IR::Instr * instrInsert)
  6553. {
  6554. IR::RegOpnd * resultOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  6555. InsertMove(resultOpnd, typeOpnd, instrInsert);
  6556. InsertShift(Js::OpCode::ShrU_A, false, resultOpnd, resultOpnd, IR::IntConstOpnd::New(PolymorphicInlineCacheShift, TyInt8, this->m_func, true), instrInsert);
  6557. InsertAnd(resultOpnd, resultOpnd, IR::IntConstOpnd::New(typeCheckGuard->AsPolyTypeCheckGuard()->GetSize() - 1, TyMachReg, this->m_func, true), instrInsert);
  6558. return resultOpnd;
  6559. }
  6560. void
  6561. Lowerer::GenerateLeaOfOOPData(IR::RegOpnd * regOpnd, void * address, int32 offset, IR::Instr * instrInsert)
  6562. {
  6563. Func * func = instrInsert->m_func;
  6564. int32 dataOffset;
  6565. Int32Math::Add(NativeCodeData::GetDataTotalOffset(address), offset, &dataOffset);
  6566. InsertLea(regOpnd,
  6567. IR::IndirOpnd::New(IR::RegOpnd::New(func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), dataOffset, TyMachPtr,
  6568. #if DBG
  6569. NativeCodeData::GetDataDescription(address, func->m_alloc),
  6570. #endif
  6571. func, true),
  6572. instrInsert);
  6573. }
  6574. IR::Opnd *
  6575. Lowerer::GenerateIndirOfOOPData(void * address, int32 offset, IR::Instr * instrInsert)
  6576. {
  6577. Func * func = instrInsert->m_func;
  6578. int32 dataOffset;
  6579. Int32Math::Add(NativeCodeData::GetDataTotalOffset(address), offset, &dataOffset);
  6580. IR::Opnd * opnd = IR::IndirOpnd::New(IR::RegOpnd::New(func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), dataOffset, TyMachPtr,
  6581. #if DBG
  6582. NativeCodeData::GetDataDescription(address, func->m_alloc),
  6583. #endif
  6584. func, true);
  6585. return opnd;
  6586. }
  6587. void
  6588. Lowerer::InsertObjectPoison(IR::Opnd* poisonedOpnd, IR::BranchInstr* branchInstr, IR::Instr* insertInstr, bool isForStore)
  6589. {
  6590. #ifndef _M_ARM
  6591. LowererMD::InsertObjectPoison(poisonedOpnd, branchInstr, insertInstr, isForStore);
  6592. #endif
  6593. }
  6594. void
  6595. Lowerer::PinTypeRef(JITTypeHolder type, void* typeRef, IR::Instr* instr, Js::PropertyId propertyId)
  6596. {
  6597. this->m_func->PinTypeRef(typeRef);
  6598. if (PHASE_TRACE(Js::TracePinnedTypesPhase, this->m_func))
  6599. {
  6600. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6601. Output::Print(_u("PinnedTypes: function %s(%s) instr %s property ID %u pinned %s reference 0x%p to type 0x%p.\n"),
  6602. this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
  6603. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode), propertyId,
  6604. typeRef == type.t ? _u("strong") : _u("weak"), typeRef, type.t);
  6605. Output::Flush();
  6606. }
  6607. }
  6608. void
  6609. Lowerer::GenerateCachedTypeWithoutPropertyCheck(IR::Instr *instrInsert, IR::PropertySymOpnd *propertySymOpnd, IR::Opnd *typeOpnd, IR::LabelInstr *labelTypeCheckFailed)
  6610. {
  6611. Assert(propertySymOpnd->IsMonoObjTypeSpecCandidate());
  6612. Assert(propertySymOpnd->HasInitialType());
  6613. JITTypeHolder typeWithoutProperty = propertySymOpnd->GetInitialType();
  6614. // We should never add properties to objects of static types.
  6615. Assert(Js::DynamicType::Is(typeWithoutProperty->GetTypeId()));
  6616. if (typeOpnd == nullptr)
  6617. {
  6618. // No opnd holding the type was passed in, so we have to load the type here.
  6619. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  6620. if (!baseOpnd->IsNotTaggedValue())
  6621. {
  6622. m_lowererMD.GenerateObjectTest(baseOpnd, instrInsert, labelTypeCheckFailed);
  6623. }
  6624. IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  6625. typeOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  6626. InsertMove(typeOpnd, opnd, instrInsert);
  6627. }
  6628. Js::JitTypePropertyGuard* typePropertyGuard = CreateTypePropertyGuardForGuardedProperties(typeWithoutProperty, propertySymOpnd);
  6629. IR::Opnd *expectedTypeOpnd;
  6630. if (typePropertyGuard)
  6631. {
  6632. bool emitDirectCheck = true;
  6633. Assert(typePropertyGuard != nullptr);
  6634. Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
  6635. if (this->m_func->IsOOPJIT())
  6636. {
  6637. int typeCheckGuardOffset = NativeCodeData::GetDataTotalOffset(typePropertyGuard);
  6638. expectedTypeOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), typeCheckGuardOffset, TyMachPtr,
  6639. #if DBG
  6640. NativeCodeData::GetDataDescription(typePropertyGuard, this->m_func->m_alloc),
  6641. #endif
  6642. this->m_func, true);
  6643. this->addToLiveOnBackEdgeSyms->Set(m_func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  6644. }
  6645. else
  6646. {
  6647. expectedTypeOpnd = IR::MemRefOpnd::New((void*)(typePropertyGuard->GetAddressOfValue()), TyMachPtr, this->m_func, IR::AddrOpndKindDynamicGuardValueRef);
  6648. }
  6649. emitDirectCheck = false;
  6650. OUTPUT_VERBOSE_TRACE_FUNC(Js::ObjTypeSpecPhase, this->m_func, _u("Emitted %s type check for type 0x%p.\n"),
  6651. emitDirectCheck ? _u("direct") : _u("indirect"), typeWithoutProperty->GetAddr());
  6652. }
  6653. else
  6654. {
  6655. expectedTypeOpnd = IR::AddrOpnd::New(typeWithoutProperty->GetAddr(), IR::AddrOpndKindDynamicType, m_func, true);
  6656. }
  6657. InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelTypeCheckFailed, instrInsert);
  6658. // Technically, it should be enough to pin the final type, because it should keep all of its predecessors alive, but
  6659. // just to be extra cautious, let's pin the initial type as well.
  6660. PinTypeRef(typeWithoutProperty, typeWithoutProperty.t, instrInsert, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
  6661. }
  6662. void
  6663. Lowerer::GenerateFixedFieldGuardCheck(IR::Instr *insertPointInstr, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr *labelBailOut)
  6664. {
  6665. GeneratePropertyGuardCheck(insertPointInstr, propertySymOpnd, labelBailOut);
  6666. }
  6667. Js::JitTypePropertyGuard*
  6668. Lowerer::CreateTypePropertyGuardForGuardedProperties(JITTypeHolder type, IR::PropertySymOpnd* propertySymOpnd)
  6669. {
  6670. // We should always have a list of guarded properties.
  6671. Assert(propertySymOpnd->GetGuardedPropOps() != nullptr);
  6672. Js::JitTypePropertyGuard* guard = nullptr;
  6673. if (m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuards())
  6674. {
  6675. // Consider (ObjTypeSpec): Because we allocate these guards from the JIT thread we can't share guards for the same type across multiple functions.
  6676. // This leads to proliferation of property guards on the thread context. The alternative would be to pre-allocate shared (by value) guards
  6677. // from the thread context during work item creation. We would create too many of them (because some types aren't actually used as guards),
  6678. // but we could share a guard for a given type between functions. This may ultimately be better.
  6679. LinkGuardToGuardedProperties(propertySymOpnd->GetGuardedPropOps(), [this, type, &guard](Js::PropertyId propertyId)
  6680. {
  6681. if (DoLazyFixedTypeBailout(this->m_func))
  6682. {
  6683. this->m_func->lazyBailoutProperties.Item(propertyId);
  6684. }
  6685. else
  6686. {
  6687. if (guard == nullptr)
  6688. {
  6689. guard = this->m_func->GetOrCreateSingleTypeGuard(type->GetAddr());
  6690. }
  6691. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
  6692. {
  6693. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6694. Output::Print(_u("ObjTypeSpec: function %s(%s) registered guard 0x%p with value 0x%p for property ID %u.\n"),
  6695. m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
  6696. guard, guard->GetValue(), propertyId);
  6697. Output::Flush();
  6698. }
  6699. this->m_func->EnsurePropertyGuardsByPropertyId();
  6700. this->m_func->LinkGuardToPropertyId(propertyId, guard);
  6701. }
  6702. });
  6703. }
  6704. return guard;
  6705. }
  6706. Js::JitEquivalentTypeGuard*
  6707. Lowerer::CreateEquivalentTypeGuardAndLinkToGuardedProperties(IR::PropertySymOpnd* propertySymOpnd)
  6708. {
  6709. // We should always have a list of guarded properties.
  6710. Assert(propertySymOpnd->HasObjTypeSpecFldInfo() && propertySymOpnd->HasEquivalentTypeSet() && propertySymOpnd->GetGuardedPropOps());
  6711. Js::JitEquivalentTypeGuard* guard;
  6712. if (propertySymOpnd->ShouldUsePolyEquivTypeGuard(this->m_func))
  6713. {
  6714. Js::JitPolyEquivalentTypeGuard *polyGuard = this->m_func->CreatePolyEquivalentTypeGuard(propertySymOpnd->GetObjTypeSpecFldId());
  6715. // Copy types from the type set to the guard's value locations
  6716. Js::EquivalentTypeSet* typeSet = propertySymOpnd->GetEquivalentTypeSet();
  6717. for (uint16 ti = 0; ti < typeSet->GetCount(); ti++)
  6718. {
  6719. intptr_t typeToCache = typeSet->GetType(ti)->GetAddr();
  6720. polyGuard->SetPolyValue(typeToCache, polyGuard->GetIndexForValue(typeToCache));
  6721. }
  6722. guard = polyGuard;
  6723. }
  6724. else
  6725. {
  6726. guard = this->m_func->CreateEquivalentTypeGuard(propertySymOpnd->GetFirstEquivalentType(), propertySymOpnd->GetObjTypeSpecFldId());
  6727. }
  6728. if (m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuards())
  6729. {
  6730. LinkGuardToGuardedProperties(propertySymOpnd->GetGuardedPropOps(), [=](Js::PropertyId propertyId)
  6731. {
  6732. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
  6733. {
  6734. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6735. Output::Print(_u("ObjTypeSpec: function %s(%s) registered equivalent type spec guard 0x%p with value 0x%p for property ID %u.\n"),
  6736. this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
  6737. guard, guard->GetValue(), propertyId);
  6738. Output::Flush();
  6739. }
  6740. this->m_func->EnsurePropertyGuardsByPropertyId();
  6741. this->m_func->LinkGuardToPropertyId(propertyId, guard);
  6742. });
  6743. }
  6744. Assert(guard->GetCache() != nullptr);
  6745. Js::EquivalentTypeCache* cache = guard->GetCache();
  6746. // TODO (ObjTypeSpec): If we delayed populating the types until encoder, we could bulk allocate all equivalent type caches
  6747. // in one block from the heap. This would allow us to not allocate them from the native code data allocator and free them
  6748. // when no longer needed. However, we would need to store the global property operation ID in the guard, so we can look up
  6749. // the info in the encoder. Perhaps we could overload the cache pointer to be the ID until encoder.
  6750. // Copy types from the type set to the guard's cache
  6751. Js::EquivalentTypeSet* typeSet = propertySymOpnd->GetEquivalentTypeSet();
  6752. uint16 cachedTypeCount = typeSet->GetCount() < EQUIVALENT_TYPE_CACHE_SIZE ? typeSet->GetCount() : EQUIVALENT_TYPE_CACHE_SIZE;
  6753. for (uint16 ti = 0; ti < cachedTypeCount; ti++)
  6754. {
  6755. cache->types[ti] = (Js::Type*)typeSet->GetType(ti)->GetAddr();
  6756. }
  6757. #ifdef DEBUG
  6758. bool there_was_a_null_type = false;
  6759. for (uint16 ti = 0; ti < cachedTypeCount; ti++)
  6760. {
  6761. if (cache->types[ti] == nullptr)
  6762. {
  6763. there_was_a_null_type = true;
  6764. }
  6765. else if (there_was_a_null_type)
  6766. {
  6767. AssertMsg(false, "there_was_a_null_type ? something is wrong here.");
  6768. }
  6769. }
  6770. #endif
  6771. // Populate property ID and slot index arrays on the guard's cache. We iterate over the
  6772. // bit vector of property operations protected by this guard, but some property operations
  6773. // may be referring to the same property ID (but not share the same cache). We skip
  6774. // redundant entries by maintaining a hash set of property IDs we've already encountered.
  6775. auto propOps = propertySymOpnd->GetGuardedPropOps();
  6776. uint propOpCount = propOps->Count();
  6777. bool isTypeStatic = Js::StaticType::Is(propertySymOpnd->GetFirstEquivalentType()->GetTypeId());
  6778. JsUtil::BaseDictionary<Js::PropertyId, Js::EquivalentPropertyEntry*, JitArenaAllocator> propIds(this->m_alloc, propOpCount);
  6779. Js::EquivalentPropertyEntry* properties = AnewArray(this->m_alloc, Js::EquivalentPropertyEntry, propOpCount);
  6780. uint propIdCount = 0;
  6781. FOREACH_BITSET_IN_SPARSEBV(propOpId, propOps)
  6782. {
  6783. ObjTypeSpecFldInfo* propOpInfo = this->m_func->GetGlobalObjTypeSpecFldInfo(propOpId);
  6784. Js::PropertyId propertyId = propOpInfo->GetPropertyId();
  6785. Js::PropertyIndex propOpIndex = Js::Constants::NoSlot;
  6786. bool hasFixedValue = propOpInfo->HasFixedValue();
  6787. if (hasFixedValue)
  6788. {
  6789. cache->SetHasFixedValue();
  6790. }
  6791. bool isLoadedFromProto = propOpInfo->IsLoadedFromProto();
  6792. if (isLoadedFromProto)
  6793. {
  6794. cache->SetIsLoadedFromProto();
  6795. }
  6796. else
  6797. {
  6798. propOpIndex = propOpInfo->GetSlotIndex();
  6799. }
  6800. bool propOpUsesAuxSlot = propOpInfo->UsesAuxSlot();
  6801. AssertMsg(!isTypeStatic || !propOpInfo->IsBeingStored(), "Why are we storing a field to an object of static type?");
  6802. Js::EquivalentPropertyEntry* entry = nullptr;
  6803. if (propIds.TryGetValue(propertyId, &entry))
  6804. {
  6805. if (propOpIndex == entry->slotIndex && propOpUsesAuxSlot == entry->isAuxSlot)
  6806. {
  6807. entry->mustBeWritable |= propOpInfo->IsBeingStored();
  6808. }
  6809. else
  6810. {
  6811. // Due to inline cache sharing we have the same property accessed using different caches
  6812. // with inconsistent info. This means a guaranteed bailout on the equivalent type check.
  6813. // We'll just let it happen and turn off the optimization for this function. We could avoid
  6814. // this problem by tracking property information on the value type in glob opt.
  6815. if (PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->m_func))
  6816. {
  6817. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6818. Output::Print(_u("EquivObjTypeSpec: top function %s (%s): duplicate property clash on %d \n"),
  6819. m_func->GetJITFunctionBody()->GetDisplayName(), m_func->GetDebugNumberSet(debugStringBuffer), propertyId);
  6820. Output::Flush();
  6821. }
  6822. Assert(propIdCount < propOpCount);
  6823. __analysis_assume(propIdCount < propOpCount);
  6824. entry = &properties[propIdCount++];
  6825. entry->propertyId = propertyId;
  6826. entry->slotIndex = propOpIndex;
  6827. entry->isAuxSlot = propOpUsesAuxSlot;
  6828. entry->mustBeWritable = propOpInfo->IsBeingStored();
  6829. }
  6830. }
  6831. else
  6832. {
  6833. Assert(propIdCount < propOpCount);
  6834. __analysis_assume(propIdCount < propOpCount);
  6835. entry = &properties[propIdCount++];
  6836. entry->propertyId = propertyId;
  6837. entry->slotIndex = propOpIndex;
  6838. entry->isAuxSlot = propOpUsesAuxSlot;
  6839. entry->mustBeWritable = propOpInfo->IsBeingStored();
  6840. propIds.AddNew(propertyId, entry);
  6841. }
  6842. }
  6843. NEXT_BITSET_IN_SPARSEBV;
  6844. cache->record.propertyCount = propIdCount;
  6845. // Js::EquivalentPropertyEntry does not contain pointer, no need to fixup
  6846. cache->record.properties = NativeCodeDataNewArrayNoFixup(this->m_func->GetNativeCodeDataAllocator(), Js::EquivalentPropertyEntry, propIdCount);
  6847. memcpy(cache->record.properties, properties, propIdCount * sizeof(Js::EquivalentPropertyEntry));
  6848. return guard;
  6849. }
  6850. bool
  6851. Lowerer::LinkCtorCacheToGuardedProperties(JITTimeConstructorCache* ctorCache)
  6852. {
  6853. // We do not always have guarded properties. If the constructor is empty and the subsequent code doesn't load or store any of
  6854. // the constructed object's properties, or if all inline caches are empty then this ctor cache doesn't guard any properties.
  6855. if (ctorCache->GetGuardedPropOps() == nullptr)
  6856. {
  6857. return false;
  6858. }
  6859. bool linked = false;
  6860. if (this->m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuards())
  6861. {
  6862. linked = LinkGuardToGuardedProperties(ctorCache->GetGuardedPropOps(), [=](Js::PropertyId propertyId)
  6863. {
  6864. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
  6865. {
  6866. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6867. Output::Print(_u("ObjTypeSpec: function %s(%s) registered ctor cache 0x%p with value 0x%p for property %u.\n"),
  6868. this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
  6869. ctorCache->GetRuntimeCacheAddr(), ctorCache->GetType()->GetAddr(), propertyId);
  6870. Output::Flush();
  6871. }
  6872. this->m_func->EnsureCtorCachesByPropertyId();
  6873. this->m_func->LinkCtorCacheToPropertyId(propertyId, ctorCache);
  6874. });
  6875. }
  6876. return linked;
  6877. }
  6878. template<typename LinkFunc>
  6879. bool
  6880. Lowerer::LinkGuardToGuardedProperties(const BVSparse<JitArenaAllocator>* guardedPropOps, LinkFunc link)
  6881. {
  6882. Assert(this->m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuards());
  6883. Assert(guardedPropOps != nullptr);
  6884. bool linked = false;
  6885. // For every entry in the bit vector, register the guard for the corresponding property ID.
  6886. FOREACH_BITSET_IN_SPARSEBV(propertyOpId, guardedPropOps)
  6887. {
  6888. ObjTypeSpecFldInfo* propertyOpInfo = this->m_func->GetGlobalObjTypeSpecFldInfo(propertyOpId);
  6889. Js::PropertyId propertyId = propertyOpInfo->GetPropertyId();
  6890. // It's okay for an equivalent type check to be registered as a guard against a property becoming read-only. This transpires if, there is
  6891. // a different monomorphic type check upstream, which guarantees the actual type of the object needed for the hard-coded type transition,
  6892. // but it is later followed by a sequence of polymorphic inline caches, which do not have that type in the type set. At the beginning of
  6893. // that sequence we'll emit an equivalent type check to verify that the actual type has relevant properties on appropriate slots. Then in
  6894. // the dead store pass we'll walk upwards and encounter this check first, thus we'll drop the guarded properties accumulated thus far
  6895. // (including the one being added) on that check.
  6896. // AssertMsg(!propertyOpInfo->IsBeingAdded() || !isEquivalentTypeGuard, "Why do we have an equivalent type check protecting a property add?");
  6897. if (propertyOpInfo->IsBeingAdded() || propertyOpInfo->IsLoadedFromProto() || propertyOpInfo->HasFixedValue())
  6898. {
  6899. // Equivalent object type spec only supports fixed fields on prototypes. This is to simplify the slow type equivalence check.
  6900. // See JavascriptOperators::CheckIfTypeIsEquivalent.
  6901. Assert(!propertyOpInfo->IsPoly() || (!propertyOpInfo->HasFixedValue() || propertyOpInfo->IsLoadedFromProto() || propertyOpInfo->UsesAccessor()));
  6902. if (this->m_func->GetWorkItem()->GetJITTimeInfo()->HasSharedPropertyGuard(propertyId))
  6903. {
  6904. link(propertyId);
  6905. linked = true;
  6906. }
  6907. else
  6908. {
  6909. AssertMsg(false, "Did we fail to create a shared property guard for a guarded property?");
  6910. }
  6911. }
  6912. }
  6913. NEXT_BITSET_IN_SPARSEBV;
  6914. return linked;
  6915. }
  6916. void
  6917. Lowerer::GeneratePropertyGuardCheck(IR::Instr *insertPointInstr, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr *labelBailOut)
  6918. {
  6919. intptr_t guard = propertySymOpnd->GetPropertyGuardValueAddr();
  6920. Assert(guard != 0);
  6921. if (!DoLazyFixedDataBailout(this->m_func))
  6922. {
  6923. Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
  6924. IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
  6925. IR::MemRefOpnd* guardOpnd = IR::MemRefOpnd::New(guard, TyMachPtr, this->m_func, IR::AddrOpndKindDynamicGuardValueRef);
  6926. IR::BranchInstr *branchInstr = InsertCompareBranch(guardOpnd, zeroOpnd, Js::OpCode::BrEq_A, labelBailOut, insertPointInstr);
  6927. IR::RegOpnd *objPtrReg = IR::RegOpnd::New(propertySymOpnd->GetObjectSym(), TyMachPtr, m_func);
  6928. InsertObjectPoison(objPtrReg, branchInstr, insertPointInstr, false);
  6929. }
  6930. else
  6931. {
  6932. this->m_func->lazyBailoutProperties.Item(propertySymOpnd->GetPropertyId());
  6933. }
  6934. }
  6935. IR::Instr*
  6936. Lowerer::GeneratePropertyGuardCheckBailoutAndLoadType(IR::Instr *insertInstr)
  6937. {
  6938. IR::Instr* instrPrev = insertInstr->m_prev;
  6939. IR::Opnd* numberTypeOpnd = IR::AddrOpnd::New(insertInstr->m_func->GetScriptContextInfo()->GetNumberTypeStaticAddr(), IR::AddrOpndKindDynamicType, insertInstr->m_func);
  6940. IR::PropertySymOpnd* propertySymOpnd = insertInstr->GetSrc1()->AsPropertySymOpnd();
  6941. IR::LabelInstr* labelBailout = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func, true);
  6942. IR::LabelInstr* labelContinue = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func);
  6943. IR::LabelInstr* loadNumberTypeLabel = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func, true);
  6944. GeneratePropertyGuardCheck(insertInstr, propertySymOpnd, labelBailout);
  6945. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  6946. GenerateObjectTestAndTypeLoad(insertInstr, baseOpnd, insertInstr->GetDst()->AsRegOpnd(), loadNumberTypeLabel);
  6947. insertInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelContinue, this->m_func));
  6948. insertInstr->InsertBefore(loadNumberTypeLabel);
  6949. this->InsertMove(insertInstr->GetDst(), numberTypeOpnd, insertInstr);
  6950. insertInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelContinue, this->m_func));
  6951. insertInstr->InsertBefore(labelBailout);
  6952. insertInstr->InsertAfter(labelContinue);
  6953. insertInstr->FreeSrc1();
  6954. insertInstr->m_opcode = Js::OpCode::BailOut;
  6955. this->GenerateBailOut(insertInstr);
  6956. return instrPrev;
  6957. }
  6958. void
  6959. Lowerer::GenerateAdjustSlots(IR::Instr *instrInsert, IR::PropertySymOpnd *propertySymOpnd, JITTypeHolder initialType, JITTypeHolder finalType)
  6960. {
  6961. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  6962. bool adjusted = this->GenerateAdjustBaseSlots(instrInsert, baseOpnd, initialType, finalType);
  6963. if (!adjusted)
  6964. {
  6965. baseOpnd->Free(m_func);
  6966. }
  6967. }
  6968. bool
  6969. Lowerer::GenerateAdjustBaseSlots(IR::Instr *instrInsert, IR::RegOpnd *baseOpnd, JITTypeHolder initialType, JITTypeHolder finalType)
  6970. {
  6971. // Possibly allocate new slot capacity to accommodate a type transition.
  6972. AssertMsg(JITTypeHandler::IsTypeHandlerCompatibleForObjectHeaderInlining(initialType->GetTypeHandler(), finalType->GetTypeHandler()),
  6973. "Incompatible typeHandler transition?");
  6974. int oldCount = 0;
  6975. int newCount = 0;
  6976. Js::PropertyIndex inlineSlotCapacity = 0;
  6977. Js::PropertyIndex newInlineSlotCapacity = 0;
  6978. bool needSlotAdjustment =
  6979. JITTypeHandler::NeedSlotAdjustment(initialType->GetTypeHandler(), finalType->GetTypeHandler(), &oldCount, &newCount, &inlineSlotCapacity, &newInlineSlotCapacity);
  6980. if (!needSlotAdjustment)
  6981. {
  6982. return false;
  6983. }
  6984. // Call AdjustSlots using the new counts. Because AdjustSlots uses the "no dispose" flavor of alloc,
  6985. // no implicit calls are possible, and we don't need an implicit call check and bailout.
  6986. // CALL AdjustSlots, instance, newInlineSlotCapacity, newAuxSlotCapacity
  6987. //3rd Param
  6988. Assert(newCount > newInlineSlotCapacity);
  6989. const int newAuxSlotCapacity = newCount - newInlineSlotCapacity;
  6990. m_lowererMD.LoadHelperArgument(instrInsert, IR::IntConstOpnd::New(newAuxSlotCapacity, TyInt32, this->m_func));
  6991. //2nd Param
  6992. m_lowererMD.LoadHelperArgument(instrInsert, IR::IntConstOpnd::New(newInlineSlotCapacity, TyUint16, this->m_func));
  6993. //1st Param (instance)
  6994. m_lowererMD.LoadHelperArgument(instrInsert, baseOpnd);
  6995. //CALL HelperAdjustSlots
  6996. IR::Opnd *opnd = IR::HelperCallOpnd::New(IR::HelperAdjustSlots, this->m_func);
  6997. IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  6998. instr->SetSrc1(opnd);
  6999. instrInsert->InsertBefore(instr);
  7000. m_lowererMD.LowerCall(instr, 0);
  7001. return true;
  7002. }
  7003. void
  7004. Lowerer::GenerateFieldStoreWithTypeChange(IR::Instr * instrStFld, IR::PropertySymOpnd *propertySymOpnd, JITTypeHolder initialType, JITTypeHolder finalType)
  7005. {
  7006. // Adjust instance slots, if necessary.
  7007. this->GenerateAdjustSlots(instrStFld, propertySymOpnd, initialType, finalType);
  7008. // We should never add properties to objects of static types.
  7009. Assert(Js::DynamicType::Is(finalType->GetTypeId()));
  7010. // Let's pin the final type to be sure its alive when we try to do the type transition.
  7011. PinTypeRef(finalType, finalType.t, instrStFld, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
  7012. IR::Opnd *finalTypeOpnd = IR::AddrOpnd::New(finalType->GetAddr(), IR::AddrOpndKindDynamicType, instrStFld->m_func, true);
  7013. // Set the new type.
  7014. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(instrStFld->m_func);
  7015. IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrStFld->m_func);
  7016. this->InsertMove(opnd, finalTypeOpnd, instrStFld);
  7017. // Now do the store.
  7018. GenerateDirectFieldStore(instrStFld, propertySymOpnd);
  7019. }
  7020. bool
  7021. Lowerer::GenerateStFldWithCachedFinalType(IR::Instr * instrStFld, IR::PropertySymOpnd *propertySymOpnd)
  7022. {
  7023. // This function tries to treat a sequence of add-property stores as a single type transition.
  7024. Assert(propertySymOpnd == instrStFld->GetDst()->AsPropertySymOpnd());
  7025. Assert(propertySymOpnd->IsMonoObjTypeSpecCandidate());
  7026. Assert(propertySymOpnd->HasFinalType());
  7027. Assert(propertySymOpnd->HasInitialType());
  7028. IR::Instr *instr;
  7029. IR::LabelInstr *labelBailOut = nullptr;
  7030. AssertMsg(!propertySymOpnd->IsTypeChecked(), "Why are we doing a type transition when we have the type we want?");
  7031. // If the initial type must be checked here, do it.
  7032. Assert(instrStFld->HasBailOutInfo());
  7033. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  7034. GenerateCachedTypeWithoutPropertyCheck(instrStFld, propertySymOpnd, nullptr/*typeOpnd*/, labelBailOut);
  7035. // Do the type transition.
  7036. GenerateFieldStoreWithTypeChange(instrStFld, propertySymOpnd, propertySymOpnd->GetInitialType(), propertySymOpnd->GetFinalType());
  7037. instrStFld->FreeSrc1();
  7038. instrStFld->FreeDst();
  7039. // Insert the bailout and let the main path branch around it.
  7040. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  7041. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  7042. instrStFld->InsertBefore(instr);
  7043. if (instrStFld->HasBailOutInfo())
  7044. {
  7045. Assert(labelBailOut != nullptr);
  7046. instrStFld->InsertBefore(labelBailOut);
  7047. instrStFld->InsertAfter(labelDone);
  7048. instrStFld->m_opcode = Js::OpCode::BailOut;
  7049. this->GenerateBailOut(instrStFld);
  7050. }
  7051. else
  7052. {
  7053. instrStFld->InsertAfter(labelDone);
  7054. instrStFld->Remove();
  7055. }
  7056. return true;
  7057. }
  7058. ///----------------------------------------------------------------------------
  7059. ///
  7060. /// Lowerer::LowerScopedStFld
  7061. ///
  7062. ///----------------------------------------------------------------------------
  7063. IR::Instr *
  7064. Lowerer::LowerScopedStFld(IR::Instr * stFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache,
  7065. bool withPropertyOperationFlags, Js::PropertyOperationFlags flags)
  7066. {
  7067. IR::Instr *instrPrev = stFldInstr->m_prev;
  7068. if (withPropertyOperationFlags)
  7069. {
  7070. m_lowererMD.LoadHelperArgument(stFldInstr,
  7071. IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
  7072. }
  7073. if(!withInlineCache)
  7074. {
  7075. LoadScriptContext(stFldInstr);
  7076. }
  7077. // Pass the default instance
  7078. IR::Opnd *src = stFldInstr->UnlinkSrc2();
  7079. m_lowererMD.LoadHelperArgument(stFldInstr, src);
  7080. // Pass the value to store
  7081. src = stFldInstr->UnlinkSrc1();
  7082. m_lowererMD.LoadHelperArgument(stFldInstr, src);
  7083. // Pass the property sym to store to
  7084. IR::Opnd *dst = stFldInstr->UnlinkDst();
  7085. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
  7086. this->LoadPropertySymAsArgument(stFldInstr, dst);
  7087. if (withInlineCache)
  7088. {
  7089. AssertMsg(dst->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  7090. m_lowererMD.LoadHelperArgument(
  7091. stFldInstr,
  7092. IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  7093. // Not using the polymorphic inline cache because the fast path only uses the monomorphic inline cache
  7094. this->m_lowererMD.LoadHelperArgument(stFldInstr, this->LoadRuntimeInlineCacheOpnd(stFldInstr, dst->AsPropertySymOpnd()));
  7095. m_lowererMD.LoadHelperArgument(stFldInstr, LoadFunctionBodyOpnd(stFldInstr));
  7096. }
  7097. m_lowererMD.ChangeToHelperCall(stFldInstr, helperMethod);
  7098. return instrPrev;
  7099. }
  7100. ///----------------------------------------------------------------------------
  7101. ///
  7102. /// Lowerer::LowerLoadVar
  7103. ///
  7104. ///----------------------------------------------------------------------------
  7105. IR::Instr *
  7106. Lowerer::LowerLoadVar(IR::Instr *instr, IR::Opnd *opnd)
  7107. {
  7108. instr->SetSrc1(opnd);
  7109. return m_lowererMD.ChangeToAssign(instr);
  7110. }
  7111. IR::Instr *
  7112. Lowerer::LoadHelperTemp(IR::Instr * instr, IR::Instr * instrInsert)
  7113. {
  7114. IR::Opnd *tempOpnd;
  7115. IR::Opnd *dst = instr->GetDst();
  7116. AssertMsg(dst != nullptr, "Always expect a dst for these.");
  7117. AssertMsg(instr->dstIsTempNumber, "Should only be loading temps here");
  7118. Assert(dst->IsRegOpnd());
  7119. StackSym * tempNumberSym = this->GetTempNumberSym(dst, instr->dstIsTempNumberTransferred);
  7120. IR::Instr *load = InsertLoadStackAddress(tempNumberSym, instrInsert);
  7121. tempOpnd = load->GetDst();
  7122. m_lowererMD.LoadHelperArgument(instrInsert, tempOpnd);
  7123. return load;
  7124. }
  7125. void
  7126. Lowerer::LoadArgumentCount(IR::Instr *const instr)
  7127. {
  7128. Assert(instr);
  7129. Assert(instr->GetDst());
  7130. Assert(!instr->GetSrc1());
  7131. Assert(!instr->GetSrc2());
  7132. if(instr->m_func->IsInlinee())
  7133. {
  7134. // Argument count including 'this'
  7135. instr->SetSrc1(IR::IntConstOpnd::New(instr->m_func->actualCount, TyUint32, instr->m_func, true));
  7136. LowererMD::ChangeToAssign(instr);
  7137. }
  7138. else if (instr->m_func->GetJITFunctionBody()->IsCoroutine())
  7139. {
  7140. IR::SymOpnd* symOpnd = LoadCallInfo(instr);
  7141. instr->SetSrc1(symOpnd);
  7142. LowererMD::ChangeToAssign(instr);
  7143. }
  7144. else
  7145. {
  7146. m_lowererMD.LoadArgumentCount(instr);
  7147. }
  7148. }
  7149. void
  7150. Lowerer::LoadStackArgPtr(IR::Instr *const instr)
  7151. {
  7152. Assert(instr);
  7153. Assert(instr->GetDst());
  7154. Assert(!instr->GetSrc1());
  7155. Assert(!instr->GetSrc2());
  7156. if(instr->m_func->IsInlinee())
  7157. {
  7158. // Address of argument after 'this'
  7159. const auto firstRealArgStackSym = instr->m_func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  7160. this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr);
  7161. instr->SetSrc1(IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, instr->m_func));
  7162. ChangeToLea(instr);
  7163. }
  7164. else
  7165. {
  7166. m_lowererMD.LoadStackArgPtr(instr);
  7167. }
  7168. }
  7169. IR::Instr *
  7170. Lowerer::InsertLoadStackAddress(StackSym *sym, IR::Instr * instrInsert, IR::RegOpnd *optionalDstOpnd /* = nullptr */)
  7171. {
  7172. IR::RegOpnd * regDst = optionalDstOpnd != nullptr ? optionalDstOpnd : IR::RegOpnd::New(TyMachReg, this->m_func);
  7173. IR::SymOpnd * symSrc = IR::SymOpnd::New(sym, TyMachPtr, this->m_func);
  7174. return InsertLea(regDst, symSrc, instrInsert);
  7175. }
  7176. void
  7177. Lowerer::LoadArgumentsFromFrame(IR::Instr *const instr)
  7178. {
  7179. Assert(instr);
  7180. Assert(instr->GetDst());
  7181. Assert(!instr->GetSrc1());
  7182. Assert(!instr->GetSrc2());
  7183. if(instr->m_func->IsInlinee())
  7184. {
  7185. // Use the inline object meta arg slot for the arguments object
  7186. instr->SetSrc1(instr->m_func->GetInlineeArgumentsObjectSlotOpnd());
  7187. LowererMD::ChangeToAssign(instr);
  7188. }
  7189. else
  7190. {
  7191. m_lowererMD.LoadArgumentsFromFrame(instr);
  7192. }
  7193. }
  7194. #ifdef ENABLE_WASM
  7195. IR::Instr *
  7196. Lowerer::LowerCheckWasmSignature(IR::Instr * instr)
  7197. {
  7198. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  7199. Assert(instr->GetSrc1());
  7200. Assert(instr->GetSrc2()->IsIntConstOpnd());
  7201. int sigId = instr->UnlinkSrc2()->AsIntConstOpnd()->AsInt32();
  7202. IR::Instr *instrPrev = instr->m_prev;
  7203. IR::IndirOpnd * actualSig = IR::IndirOpnd::New(instr->UnlinkSrc1()->AsRegOpnd(), Js::WasmScriptFunction::GetOffsetOfSignature(), TyMachReg, m_func);
  7204. Wasm::WasmSignature * expectedSig = m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetWasmSignature(sigId);
  7205. if (expectedSig->GetShortSig() == Js::Constants::InvalidSignature)
  7206. {
  7207. intptr_t sigAddr = m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetWasmSignatureAddr(sigId);
  7208. IR::AddrOpnd * expectedOpnd = IR::AddrOpnd::New(sigAddr, IR::AddrOpndKindConstantAddress, m_func);
  7209. m_lowererMD.LoadHelperArgument(instr, expectedOpnd);
  7210. m_lowererMD.LoadHelperArgument(instr, actualSig);
  7211. LoadScriptContext(instr);
  7212. m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_CheckWasmSignature);
  7213. }
  7214. else
  7215. {
  7216. IR::LabelInstr * trapLabel = InsertLabel(true, instr);
  7217. IR::LabelInstr * labelFallThrough = InsertLabel(false, instr->m_next);
  7218. IR::RegOpnd * actualRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  7219. InsertMove(actualRegOpnd, actualSig, trapLabel);
  7220. IR::IndirOpnd * shortSigIndir = IR::IndirOpnd::New(actualRegOpnd, Wasm::WasmSignature::GetOffsetOfShortSig(), TyMachReg, m_func);
  7221. InsertCompareBranch(shortSigIndir, IR::IntConstOpnd::New(expectedSig->GetShortSig(), TyMachReg, m_func), Js::OpCode::BrNeq_A, trapLabel, trapLabel);
  7222. InsertBranch(Js::OpCode::Br, labelFallThrough, trapLabel);
  7223. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_SignatureMismatch), TyInt32, m_func), instr);
  7224. instr->Remove();
  7225. }
  7226. return instrPrev;
  7227. }
  7228. IR::Instr *
  7229. Lowerer::LowerLdWasmFunc(IR::Instr* instr)
  7230. {
  7231. IR::Instr * prev = instr->m_prev;
  7232. IR::RegOpnd * tableReg = instr->UnlinkSrc1()->AsRegOpnd();
  7233. IR::Opnd * indexOpnd = instr->UnlinkSrc2();
  7234. IR::Opnd * dst = instr->UnlinkDst();
  7235. IR::IndirOpnd * lengthOpnd = IR::IndirOpnd::New(tableReg, Js::WebAssemblyTable::GetOffsetOfCurrentLength(), TyUint32, m_func);
  7236. IR::IndirOpnd * valuesIndirOpnd = IR::IndirOpnd::New(tableReg, Js::WebAssemblyTable::GetOffsetOfValues(), TyMachPtr, m_func);
  7237. IR::RegOpnd * valuesRegOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  7238. byte scale = m_lowererMD.GetDefaultIndirScale();
  7239. IR::IndirOpnd * funcIndirOpnd;
  7240. if (indexOpnd->IsIntConstOpnd())
  7241. {
  7242. funcIndirOpnd = IR::IndirOpnd::New(valuesRegOpnd, indexOpnd->AsIntConstOpnd()->AsInt32() << scale, TyMachPtr, m_func);
  7243. }
  7244. else
  7245. {
  7246. Assert(indexOpnd->IsRegOpnd());
  7247. funcIndirOpnd = IR::IndirOpnd::New(valuesRegOpnd, indexOpnd->AsRegOpnd(), TyMachPtr, m_func);
  7248. funcIndirOpnd->SetScale(scale);
  7249. }
  7250. IR::LabelInstr * trapOutOfBoundsLabel = InsertLabel(true, instr);
  7251. IR::LabelInstr * trapLabel = InsertLabel(true, trapOutOfBoundsLabel);
  7252. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  7253. InsertCompareBranch(indexOpnd, lengthOpnd, Js::OpCode::BrGe_A, true, trapOutOfBoundsLabel, trapLabel);
  7254. InsertMove(valuesRegOpnd, valuesIndirOpnd, trapLabel);
  7255. InsertMove(dst, funcIndirOpnd, trapLabel);
  7256. InsertCompareBranch(dst, IR::IntConstOpnd::New(0, TyMachPtr, m_func), Js::OpCode::BrEq_A, trapLabel, trapLabel);
  7257. InsertBranch(Js::OpCode::Br, doneLabel, trapLabel);
  7258. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_NeedWebAssemblyFunc), TyInt32, m_func), trapOutOfBoundsLabel);
  7259. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_TableIndexOutOfRange), TyInt32, m_func), instr);
  7260. instr->Remove();
  7261. return prev;
  7262. }
  7263. IR::Instr *
  7264. Lowerer::LowerGrowWasmMemory(IR::Instr* instr)
  7265. {
  7266. IR::Instr * instrPrev = m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc2());
  7267. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  7268. m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_GrowWasmMemory);
  7269. return instrPrev;
  7270. }
  7271. #endif
  7272. IR::Instr *
  7273. Lowerer::LowerUnaryHelper(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::Opnd* opndBailoutArg)
  7274. {
  7275. IR::Instr *instrPrev;
  7276. IR::Opnd *src1 = instr->UnlinkSrc1();
  7277. instrPrev = m_lowererMD.LoadHelperArgument(instr, src1);
  7278. m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, opndBailoutArg);
  7279. return instrPrev;
  7280. }
  7281. // helper takes memory context as second argument
  7282. IR::Instr *
  7283. Lowerer::LowerUnaryHelperMem(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::Opnd* opndBailoutArg)
  7284. {
  7285. IR::Instr *instrPrev;
  7286. instrPrev = LoadScriptContext(instr);
  7287. return this->LowerUnaryHelper(instr, helperMethod, opndBailoutArg);
  7288. }
  7289. IR::Instr *
  7290. Lowerer::LowerUnaryHelperMemWithFunctionInfo(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  7291. {
  7292. m_lowererMD.LoadHelperArgument(instr, this->LoadFunctionInfoOpnd(instr));
  7293. return this->LowerUnaryHelperMem(instr, helperMethod);
  7294. }
  7295. IR::Instr *
  7296. Lowerer::LowerUnaryHelperMemWithFuncBody(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  7297. {
  7298. m_lowererMD.LoadHelperArgument(instr, this->LoadFunctionBodyOpnd(instr));
  7299. return this->LowerUnaryHelperMem(instr, helperMethod);
  7300. }
  7301. IR::Instr *
  7302. Lowerer::LowerBinaryHelperMemWithFuncBody(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  7303. {
  7304. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3, "Expected a binary instruction...");
  7305. m_lowererMD.LoadHelperArgument(instr, this->LoadFunctionBodyOpnd(instr));
  7306. return this->LowerBinaryHelperMem(instr, helperMethod);
  7307. }
  7308. IR::Instr *
  7309. Lowerer::LowerUnaryHelperMemWithTemp(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  7310. {
  7311. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2, "Expected a unary instruction...");
  7312. IR::Instr * instrFirst;
  7313. IR::Opnd * tempOpnd;
  7314. if (instr->dstIsTempNumber)
  7315. {
  7316. instrFirst = this->LoadHelperTemp(instr, instr);
  7317. }
  7318. else
  7319. {
  7320. tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  7321. instrFirst = m_lowererMD.LoadHelperArgument(instr, tempOpnd);
  7322. }
  7323. this->LowerUnaryHelperMem(instr, helperMethod);
  7324. return instrFirst;
  7325. }
  7326. IR::Instr *
  7327. Lowerer::LowerUnaryHelperMemWithTemp2(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::JnHelperMethod helperMethodWithTemp)
  7328. {
  7329. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2, "Expected a unary instruction...");
  7330. if (instr->dstIsTempNumber)
  7331. {
  7332. IR::Instr * instrFirst = this->LoadHelperTemp(instr, instr);
  7333. this->LowerUnaryHelperMem(instr, helperMethodWithTemp);
  7334. return instrFirst;
  7335. }
  7336. return this->LowerUnaryHelperMem(instr, helperMethod);
  7337. }
  7338. IR::Instr *
  7339. Lowerer::LowerUnaryHelperMemWithBoolReference(IR::Instr *instr, IR::JnHelperMethod helperMethod, bool useBoolForBailout)
  7340. {
  7341. if (!this->m_func->tempSymBool)
  7342. {
  7343. this->m_func->tempSymBool = StackSym::New(TyUint8, this->m_func);
  7344. this->m_func->StackAllocate(this->m_func->tempSymBool, TySize[TyUint8]);
  7345. }
  7346. IR::SymOpnd * boolOpnd = IR::SymOpnd::New(this->m_func->tempSymBool, TyUint8, this->m_func);
  7347. IR::RegOpnd * boolRefOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  7348. InsertLea(boolRefOpnd, boolOpnd, instr);
  7349. m_lowererMD.LoadHelperArgument(instr, boolRefOpnd);
  7350. return this->LowerUnaryHelperMem(instr, helperMethod, useBoolForBailout ? boolOpnd : nullptr);
  7351. }
  7352. IR::Instr *
  7353. Lowerer::LowerInitCachedScope(IR::Instr* instr)
  7354. {
  7355. instr->m_opcode = Js::OpCode::CallHelper;
  7356. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperOP_InitCachedScope, this->m_func);
  7357. IR::Opnd * src1 = instr->UnlinkSrc1();
  7358. instr->SetSrc1(helperOpnd);
  7359. instr->SetSrc2(src1);
  7360. return instr;
  7361. }
  7362. ///----------------------------------------------------------------------------
  7363. ///
  7364. /// Lowerer::LowerBinaryHelper
  7365. ///
  7366. ///----------------------------------------------------------------------------
  7367. IR::Instr *
  7368. Lowerer::LowerBinaryHelper(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  7369. {
  7370. // The only case where this would still be null when we return is when
  7371. // helperMethod == HelperOP_CmSrEq_EmptyString; in which case we ignore
  7372. // instrPrev.
  7373. IR::Instr *instrPrev = nullptr;
  7374. AssertMsg((Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Unsigned1) ||
  7375. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3 ||
  7376. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2 ||
  7377. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2Int1 ||
  7378. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::ElementU ||
  7379. instr->m_opcode == Js::OpCode::InvalCachedScope, "Expected a binary instruction...");
  7380. IR::Opnd *src2 = instr->UnlinkSrc2();
  7381. if (helperMethod != IR::HelperOP_CmSrEq_EmptyString)
  7382. instrPrev = m_lowererMD.LoadHelperArgument(instr, src2);
  7383. IR::Opnd *src1 = instr->UnlinkSrc1();
  7384. m_lowererMD.LoadHelperArgument(instr, src1);
  7385. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  7386. return instrPrev;
  7387. }
  7388. // helper takes memory context as third argument
  7389. IR::Instr *
  7390. Lowerer::LowerBinaryHelperMem(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  7391. {
  7392. IR::Instr *instrPrev;
  7393. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3 ||
  7394. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2 ||
  7395. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2Int1 ||
  7396. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Unsigned1, "Expected a binary instruction...");
  7397. instrPrev = LoadScriptContext(instr);
  7398. return this->LowerBinaryHelper(instr, helperMethod);
  7399. }
  7400. IR::Instr *
  7401. Lowerer::LowerBinaryHelperMemWithTemp(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  7402. {
  7403. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3, "Expected a binary instruction...");
  7404. IR::Instr * instrFirst;
  7405. IR::Opnd * tempOpnd;
  7406. if (instr->dstIsTempNumber)
  7407. {
  7408. instrFirst = this->LoadHelperTemp(instr, instr);
  7409. }
  7410. else
  7411. {
  7412. tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  7413. instrFirst = m_lowererMD.LoadHelperArgument(instr, tempOpnd);
  7414. }
  7415. this->LowerBinaryHelperMem(instr, helperMethod);
  7416. return instrFirst;
  7417. }
  7418. IR::Instr *
  7419. Lowerer::LowerBinaryHelperMemWithTemp2(
  7420. IR::Instr *instr,
  7421. IR::JnHelperMethod helperMethod,
  7422. IR::JnHelperMethod helperMethodWithTemp
  7423. )
  7424. {
  7425. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3, "Expected a binary instruction...");
  7426. if (instr->dstIsTempNumber && instr->GetDst() && instr->GetDst()->GetValueType().HasBeenNumber())
  7427. {
  7428. IR::Instr * instrFirst = this->LoadHelperTemp(instr, instr);
  7429. this->LowerBinaryHelperMem(instr, helperMethodWithTemp);
  7430. return instrFirst;
  7431. }
  7432. return this->LowerBinaryHelperMem(instr, helperMethod);
  7433. }
  7434. IR::Instr *
  7435. Lowerer::LowerAddLeftDeadForString(IR::Instr *instr)
  7436. {
  7437. IR::Opnd * opndLeft;
  7438. IR::Opnd * opndRight;
  7439. opndLeft = instr->GetSrc1();
  7440. opndRight = instr->GetSrc2();
  7441. Assert(opndLeft && opndRight);
  7442. bool generateFastPath = this->m_func->DoFastPaths();
  7443. if (!generateFastPath
  7444. || !opndLeft->IsRegOpnd()
  7445. || !opndRight->IsRegOpnd()
  7446. || !instr->GetDst()->IsRegOpnd()
  7447. || !opndLeft->GetValueType().IsLikelyString()
  7448. || !opndRight->GetValueType().IsLikelyString()
  7449. || !opndLeft->IsEqual(instr->GetDst()->AsRegOpnd())
  7450. || opndLeft->IsEqual(opndRight))
  7451. {
  7452. return this->LowerBinaryHelperMemWithTemp(instr, IR::HelperOp_AddLeftDead);
  7453. }
  7454. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  7455. IR::LabelInstr * labelFallThrough = instr->GetOrCreateContinueLabel(false);
  7456. IR::LabelInstr *insertBeforeInstr = labelHelper;
  7457. instr->InsertBefore(labelHelper);
  7458. if (!opndLeft->IsNotTaggedValue())
  7459. {
  7460. this->m_lowererMD.GenerateObjectTest(opndLeft->AsRegOpnd(), insertBeforeInstr, labelHelper);
  7461. }
  7462. IR::BranchInstr* branchInstr = InsertCompareBranch(
  7463. IR::IndirOpnd::New(opndLeft->AsRegOpnd(), 0, TyMachPtr, m_func),
  7464. this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableCompoundString),
  7465. Js::OpCode::BrNeq_A,
  7466. labelHelper,
  7467. insertBeforeInstr);
  7468. InsertObjectPoison(opndLeft->AsRegOpnd(), branchInstr, insertBeforeInstr, false);
  7469. GenerateStringTest(opndRight->AsRegOpnd(), insertBeforeInstr, labelHelper);
  7470. // left->m_charLength <= JavascriptArray::MaxCharLength
  7471. IR::IndirOpnd *indirLeftCharLengthOpnd = IR::IndirOpnd::New(opndLeft->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, m_func);
  7472. IR::RegOpnd *regLeftCharLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  7473. InsertMove(regLeftCharLengthOpnd, indirLeftCharLengthOpnd, insertBeforeInstr);
  7474. InsertCompareBranch(
  7475. regLeftCharLengthOpnd,
  7476. IR::IntConstOpnd::New(Js::JavascriptString::MaxCharLength, TyUint32, m_func),
  7477. Js::OpCode::BrGe_A,
  7478. labelHelper,
  7479. insertBeforeInstr);
  7480. // left->m_pszValue == NULL (!left->IsFinalized())
  7481. InsertCompareBranch(
  7482. IR::IndirOpnd::New(opndLeft->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func),
  7483. IR::AddrOpnd::NewNull(m_func),
  7484. Js::OpCode::BrNeq_A,
  7485. labelHelper,
  7486. insertBeforeInstr);
  7487. // right->m_pszValue != NULL (right->IsFinalized())
  7488. InsertCompareBranch(
  7489. IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func),
  7490. IR::AddrOpnd::NewNull(m_func),
  7491. Js::OpCode::BrEq_A,
  7492. labelHelper,
  7493. insertBeforeInstr);
  7494. // if ownsLastBlock != 0
  7495. InsertCompareBranch(
  7496. IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfOwnsLastBlock(), TyUint8, m_func),
  7497. IR::IntConstOpnd::New(0, TyUint8, m_func),
  7498. Js::OpCode::BrEq_A,
  7499. labelHelper,
  7500. insertBeforeInstr);
  7501. // if right->m_charLength == 1
  7502. InsertCompareBranch(IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_charLength), TyUint32, m_func),
  7503. IR::IntConstOpnd::New(1, TyUint32, m_func),
  7504. Js::OpCode::BrNeq_A, labelHelper, insertBeforeInstr);
  7505. // if left->m_directCharLength == -1
  7506. InsertCompareBranch(IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfDirectCharLength(), TyUint32, m_func),
  7507. IR::IntConstOpnd::New(UINT32_MAX, TyUint32, m_func),
  7508. Js::OpCode::BrNeq_A, labelHelper, insertBeforeInstr);
  7509. // if lastBlockInfo.charLength < lastBlockInfo.charCapacity
  7510. IR::IndirOpnd *indirCharLength = IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo() + (int32)Js::CompoundString::GetOffsetOfLastBlockInfoCharLength(), TyUint32, m_func);
  7511. IR::RegOpnd *charLengthOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
  7512. InsertMove(charLengthOpnd, indirCharLength, insertBeforeInstr);
  7513. InsertCompareBranch(charLengthOpnd, IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo() + (int32)Js::CompoundString::GetOffsetOfLastBlockInfoCharCapacity(), TyUint32, m_func), Js::OpCode::BrGe_A, labelHelper, insertBeforeInstr);
  7514. // load c = right->m_pszValue[0]
  7515. IR::RegOpnd *pszValue0Opnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  7516. IR::IndirOpnd *indirRightPszOpnd = IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func);
  7517. InsertMove(pszValue0Opnd, indirRightPszOpnd, insertBeforeInstr);
  7518. IR::RegOpnd *charResultOpnd = IR::RegOpnd::New(TyUint16, this->m_func);
  7519. InsertMove(charResultOpnd, IR::IndirOpnd::New(pszValue0Opnd, 0, TyUint16, this->m_func), insertBeforeInstr);
  7520. // lastBlockInfo.buffer[blockCharLength] = c;
  7521. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  7522. InsertMove(baseOpnd, IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo() + (int32)Js::CompoundString::GetOffsetOfLastBlockInfoBuffer(), TyMachPtr, m_func), insertBeforeInstr);
  7523. IR::IndirOpnd *indirBufferToStore = IR::IndirOpnd::New(baseOpnd, charLengthOpnd, (byte)Math::Log2(sizeof(char16)), TyUint16, m_func);
  7524. InsertMove(indirBufferToStore, charResultOpnd, insertBeforeInstr);
  7525. // left->m_charLength++
  7526. InsertAdd(false, indirLeftCharLengthOpnd, regLeftCharLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
  7527. // lastBlockInfo.charLength++
  7528. InsertAdd(false, indirCharLength, indirCharLength, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
  7529. InsertBranch(Js::OpCode::Br, labelFallThrough, insertBeforeInstr);
  7530. return this->LowerBinaryHelperMemWithTemp(instr, IR::HelperOp_AddLeftDead);
  7531. }
  7532. IR::Instr *
  7533. Lowerer::LowerBinaryHelperMemWithTemp3(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::JnHelperMethod helperMethodWithTemp, IR::JnHelperMethod helperMethodLeftDead)
  7534. {
  7535. IR::Opnd *src1 = instr->GetSrc1();
  7536. if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_isTempLastUse && !src1->GetValueType().IsNotString())
  7537. {
  7538. Assert(helperMethodLeftDead == IR::HelperOp_AddLeftDead);
  7539. return LowerAddLeftDeadForString(instr);
  7540. }
  7541. else
  7542. {
  7543. return this->LowerBinaryHelperMemWithTemp2(instr, helperMethod, helperMethodWithTemp);
  7544. }
  7545. }
  7546. StackSym *
  7547. Lowerer::GetTempNumberSym(IR::Opnd * opnd, bool isTempTransferred)
  7548. {
  7549. AssertMsg(opnd->IsRegOpnd(), "Expected regOpnd");
  7550. if (isTempTransferred)
  7551. {
  7552. StackSym * tempNumberSym = StackSym::New(TyMisc, m_func);
  7553. this->m_func->StackAllocate(tempNumberSym, sizeof(Js::JavascriptNumber));
  7554. return tempNumberSym;
  7555. }
  7556. StackSym * stackSym = opnd->AsRegOpnd()->m_sym;
  7557. StackSym * tempNumberSym = stackSym->m_tempNumberSym;
  7558. if (tempNumberSym == nullptr)
  7559. {
  7560. tempNumberSym = StackSym::New(TyMisc, m_func);
  7561. this->m_func->StackAllocate(tempNumberSym, sizeof(Js::JavascriptNumber));
  7562. stackSym->m_tempNumberSym = tempNumberSym;
  7563. }
  7564. return tempNumberSym;
  7565. }
  7566. void Lowerer::LowerProfiledLdElemI(IR::JitProfilingInstr *const instr)
  7567. {
  7568. Assert(instr);
  7569. /*
  7570. Var ProfilingHelpers::ProfiledLdElem(
  7571. const Var base,
  7572. const Var varIndex,
  7573. FunctionBody *const functionBody,
  7574. const ProfileId profileId,
  7575. bool didArrayAccessHelperCall,
  7576. bool bailedOutOnArraySpecialization)
  7577. */
  7578. Func *const func = instr->m_func;
  7579. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(false, TyInt8, func));
  7580. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(false, TyInt8, func));
  7581. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, func));
  7582. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(func));
  7583. IR::IndirOpnd *const indir = instr->UnlinkSrc1()->AsIndirOpnd();
  7584. IR::Opnd *const indexOpnd = indir->UnlinkIndexOpnd();
  7585. Assert(indexOpnd || indir->GetOffset() >= 0 && !Js::TaggedInt::IsOverflow(indir->GetOffset()));
  7586. m_lowererMD.LoadHelperArgument(
  7587. instr,
  7588. indexOpnd
  7589. ? static_cast<IR::Opnd *>(indexOpnd)
  7590. : IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(indir->GetOffset()), IR::AddrOpndKindDynamicVar, func));
  7591. m_lowererMD.LoadHelperArgument(instr, indir->UnlinkBaseOpnd());
  7592. indir->Free(func);
  7593. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledLdElem, func));
  7594. m_lowererMD.LowerCall(instr, 0);
  7595. }
  7596. void Lowerer::LowerProfiledStElemI(IR::JitProfilingInstr *const instr, const Js::PropertyOperationFlags flags)
  7597. {
  7598. Assert(instr);
  7599. /*
  7600. void ProfilingHelpers::ProfiledStElem(
  7601. const Var base,
  7602. const Var varIndex,
  7603. const Var value,
  7604. FunctionBody *const functionBody,
  7605. const ProfileId profileId,
  7606. const PropertyOperationFlags flags,
  7607. bool didArrayAccessHelperCall,
  7608. bool bailedOutOnArraySpecialization)
  7609. */
  7610. Func *const func = instr->m_func;
  7611. IR::JnHelperMethod helper;
  7612. if(flags == Js::PropertyOperation_None)
  7613. {
  7614. helper = IR::HelperProfiledStElem_DefaultFlags;
  7615. }
  7616. else
  7617. {
  7618. helper = IR::HelperProfiledStElem;
  7619. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(false, TyInt8, func));
  7620. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(false, TyInt8, func));
  7621. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(flags, TyInt32, func, true));
  7622. }
  7623. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, func));
  7624. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(func));
  7625. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  7626. IR::IndirOpnd *const indir = instr->UnlinkDst()->AsIndirOpnd();
  7627. IR::Opnd *const indexOpnd = indir->UnlinkIndexOpnd();
  7628. Assert(indexOpnd || indir->GetOffset() >= 0 && !Js::TaggedInt::IsOverflow(indir->GetOffset()));
  7629. m_lowererMD.LoadHelperArgument(
  7630. instr,
  7631. indexOpnd
  7632. ? static_cast<IR::Opnd *>(indexOpnd)
  7633. : IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(indir->GetOffset()), IR::AddrOpndKindDynamicVar, func));
  7634. m_lowererMD.LoadHelperArgument(instr, indir->UnlinkBaseOpnd());
  7635. indir->Free(func);
  7636. instr->SetSrc1(IR::HelperCallOpnd::New(helper, func));
  7637. m_lowererMD.LowerCall(instr, 0);
  7638. }
  7639. ///----------------------------------------------------------------------------
  7640. ///
  7641. /// Lowerer::LowerStElemI
  7642. ///
  7643. ///----------------------------------------------------------------------------
  7644. IR::Instr *
  7645. Lowerer::LowerStElemI(IR::Instr * instr, Js::PropertyOperationFlags flags, bool isHelper, IR::JnHelperMethod helperMethod)
  7646. {
  7647. IR::Instr *instrPrev = instr->m_prev;
  7648. if (instr->IsJitProfilingInstr())
  7649. {
  7650. Assert(!isHelper);
  7651. LowerProfiledStElemI(instr->AsJitProfilingInstr(), flags);
  7652. return instrPrev;
  7653. }
  7654. IR::Opnd *src1 = instr->GetSrc1();
  7655. IR::Opnd *dst = instr->GetDst();
  7656. IR::Opnd *newDst = nullptr;
  7657. IRType srcType = src1->GetType();
  7658. AssertMsg(dst->IsIndirOpnd(), "Expected indirOpnd on StElementI");
  7659. #if !FLOATVAR
  7660. if (dst->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyOptimizedTypedArray() && src1->IsRegOpnd())
  7661. {
  7662. // We allow the source of typedArray StElem to be marked as temp, since we just need the value,
  7663. // however if the array turns out to be a non-typed array, or the index isn't valid (the value is then stored as a property)
  7664. // the temp needs to be boxed if it is a float. The BoxStackNumber helper will box JavascriptNumbers
  7665. // which are on the stack.
  7666. // regVar = BoxStackNumber(src1, scriptContext)
  7667. IR::Instr *newInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  7668. IR::RegOpnd *regVar = IR::RegOpnd::New(TyVar, this->m_func);
  7669. newInstr->SetDst(regVar);
  7670. newInstr->SetSrc1(src1);
  7671. instr->InsertBefore(newInstr);
  7672. LowerUnaryHelperMem(newInstr, IR::HelperBoxStackNumber);
  7673. // MOV src1, regVar
  7674. newInstr = IR::Instr::New(Js::OpCode::Ld_A, src1, regVar, this->m_func);
  7675. instr->InsertBefore(m_lowererMD.ChangeToAssign(newInstr));
  7676. }
  7677. #endif
  7678. if(instr->HasBailOutInfo())
  7679. {
  7680. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  7681. if(bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment)
  7682. {
  7683. Assert(!(bailOutKind & IR::BailOutOnMissingValue));
  7684. LowerBailOnInvalidatedArrayHeadSegment(instr, isHelper);
  7685. bailOutKind ^= IR::BailOutOnInvalidatedArrayHeadSegment;
  7686. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7687. }
  7688. else if(bailOutKind & IR::BailOutOnMissingValue)
  7689. {
  7690. LowerBailOnCreatedMissingValue(instr, isHelper);
  7691. bailOutKind ^= IR::BailOutOnMissingValue;
  7692. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7693. }
  7694. if(bailOutKind & IR::BailOutOnInvalidatedArrayLength)
  7695. {
  7696. LowerBailOnInvalidatedArrayLength(instr, isHelper);
  7697. bailOutKind ^= IR::BailOutOnInvalidatedArrayLength;
  7698. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7699. }
  7700. if(bailOutKind & IR::BailOutConvertedNativeArray)
  7701. {
  7702. IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  7703. instr->InsertAfter(labelSkipBailOut);
  7704. LowerOneBailOutKind(instr, IR::BailOutConvertedNativeArray, isHelper);
  7705. newDst = IR::RegOpnd::New(TyMachReg, m_func);
  7706. InsertTestBranch(newDst, newDst, Js::OpCode::BrEq_A, labelSkipBailOut, instr->m_next);
  7707. }
  7708. }
  7709. instr->UnlinkDst();
  7710. instr->UnlinkSrc1();
  7711. Assert(
  7712. helperMethod == IR::HelperOP_InitElemGetter ||
  7713. helperMethod == IR::HelperOP_InitElemSetter ||
  7714. helperMethod == IR::HelperOP_InitComputedProperty ||
  7715. helperMethod == IR::HelperOp_SetElementI ||
  7716. helperMethod == IR::HelperOp_InitClassMemberComputedName ||
  7717. helperMethod == IR::HelperOp_InitClassMemberGetComputedName ||
  7718. helperMethod == IR::HelperOp_InitClassMemberSetComputedName
  7719. );
  7720. IR::IndirOpnd* dstIndirOpnd = dst->AsIndirOpnd();
  7721. IR::Opnd *indexOpnd = dstIndirOpnd->UnlinkIndexOpnd();
  7722. if (indexOpnd && indexOpnd->GetType() != TyVar)
  7723. {
  7724. Assert(
  7725. helperMethod != IR::HelperOP_InitElemGetter &&
  7726. helperMethod != IR::HelperOP_InitElemSetter &&
  7727. helperMethod != IR::HelperOp_InitClassMemberGetComputedName &&
  7728. helperMethod != IR::HelperOp_InitClassMemberSetComputedName
  7729. );
  7730. if (indexOpnd->GetType() == TyInt32)
  7731. {
  7732. helperMethod =
  7733. srcType == TyVar ? IR::HelperOp_SetElementI_Int32 :
  7734. srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI_Int32 :
  7735. IR::HelperOp_SetNativeFloatElementI_Int32;
  7736. }
  7737. else if (indexOpnd->GetType() == TyUint32)
  7738. {
  7739. helperMethod =
  7740. srcType == TyVar ? IR::HelperOp_SetElementI_UInt32 :
  7741. srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI_UInt32 :
  7742. IR::HelperOp_SetNativeFloatElementI_UInt32;
  7743. }
  7744. else
  7745. {
  7746. Assert(FALSE);
  7747. }
  7748. }
  7749. else
  7750. {
  7751. if (indexOpnd == nullptr)
  7752. {
  7753. // No index; the offset identifies the element.
  7754. IntConstType offset = (IntConstType)dst->AsIndirOpnd()->GetOffset();
  7755. indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
  7756. }
  7757. if (srcType != TyVar)
  7758. {
  7759. helperMethod =
  7760. srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI : IR::HelperOp_SetNativeFloatElementI;
  7761. }
  7762. }
  7763. if (srcType == TyFloat64)
  7764. {
  7765. m_lowererMD.LoadDoubleHelperArgument(instr, src1);
  7766. }
  7767. m_lowererMD.LoadHelperArgument(instr,
  7768. IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
  7769. LoadScriptContext(instr);
  7770. if (srcType != TyFloat64)
  7771. {
  7772. m_lowererMD.LoadHelperArgument(instr, src1);
  7773. }
  7774. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  7775. IR::Opnd *baseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
  7776. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  7777. dst->Free(this->m_func);
  7778. if (newDst)
  7779. {
  7780. instr->SetDst(newDst);
  7781. }
  7782. m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, nullptr, nullptr, isHelper);
  7783. return instrPrev;
  7784. }
  7785. ///----------------------------------------------------------------------------
  7786. ///
  7787. /// Lowerer::LowerLdElemI
  7788. ///
  7789. ///----------------------------------------------------------------------------
  7790. IR::Instr *
  7791. Lowerer::LowerLdElemI(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool isHelper)
  7792. {
  7793. IR::Instr *instrPrev = instr->m_prev;
  7794. if(instr->IsJitProfilingInstr())
  7795. {
  7796. Assert(helperMethod == IR::HelperOp_GetElementI);
  7797. Assert(!isHelper);
  7798. LowerProfiledLdElemI(instr->AsJitProfilingInstr());
  7799. return instrPrev;
  7800. }
  7801. if (!isHelper && instr->DoStackArgsOpt())
  7802. {
  7803. IR::LabelInstr * labelLdElem = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  7804. // Pass in null for labelFallThru to only generate the LdHeapArgument call
  7805. GenerateFastArgumentsLdElemI(instr, nullptr);
  7806. instr->InsertBefore(labelLdElem);
  7807. instr->UnlinkSrc1();
  7808. instr->UnlinkDst();
  7809. Assert(instr->HasBailOutInfo() && instr->GetBailOutKind() == IR::BailOutKind::BailOnStackArgsOutOfActualsRange);
  7810. instr = GenerateBailOut(instr, nullptr, nullptr);
  7811. return instrPrev;
  7812. }
  7813. IR::Opnd *src1 = instr->UnlinkSrc1();
  7814. AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd");
  7815. IR::IndirOpnd *indirOpnd = src1->AsIndirOpnd();
  7816. bool loadScriptContext = true;
  7817. IRType dstType = instr->GetDst()->GetType();
  7818. IR::Opnd *indexOpnd = indirOpnd->UnlinkIndexOpnd();
  7819. if (indexOpnd && indexOpnd->GetType() != TyVar)
  7820. {
  7821. Assert(indexOpnd->GetType() == TyUint32 || indexOpnd->GetType() == TyInt32);
  7822. switch (helperMethod)
  7823. {
  7824. case IR::HelperOp_GetElementI:
  7825. if (indexOpnd->GetType() == TyUint32)
  7826. {
  7827. helperMethod =
  7828. dstType == TyVar ? IR::HelperOp_GetElementI_UInt32 :
  7829. dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI_UInt32 :
  7830. IR::HelperOp_GetNativeFloatElementI_UInt32;
  7831. }
  7832. else
  7833. {
  7834. helperMethod =
  7835. dstType == TyVar ? IR::HelperOp_GetElementI_Int32 :
  7836. dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI_Int32 :
  7837. IR::HelperOp_GetNativeFloatElementI_Int32;
  7838. }
  7839. break;
  7840. case IR::HelperOp_GetMethodElement:
  7841. Assert(dstType == TyVar);
  7842. helperMethod = indexOpnd->GetType() == TyUint32?
  7843. IR::HelperOp_GetMethodElement_UInt32 : IR::HelperOp_GetMethodElement_Int32;
  7844. break;
  7845. case IR::HelperOp_TypeofElem:
  7846. Assert(dstType == TyVar);
  7847. helperMethod = indexOpnd->GetType() == TyUint32?
  7848. IR::HelperOp_TypeofElem_UInt32 : IR::HelperOp_TypeofElem_Int32;
  7849. break;
  7850. default:
  7851. Assert(false);
  7852. }
  7853. }
  7854. else
  7855. {
  7856. if (indexOpnd == nullptr)
  7857. {
  7858. // No index; the offset identifies the element.
  7859. IntConstType offset = (IntConstType)src1->AsIndirOpnd()->GetOffset();
  7860. indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
  7861. }
  7862. if (dstType != TyVar)
  7863. {
  7864. loadScriptContext = false;
  7865. helperMethod =
  7866. dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI : IR::HelperOp_GetNativeFloatElementI;
  7867. }
  7868. }
  7869. // Jitted loop bodies have volatile information about values created outside the loop, so don't update array creation site
  7870. // profile data from jitted loop bodies
  7871. if(!m_func->IsLoopBody())
  7872. {
  7873. const ValueType baseValueType(indirOpnd->GetBaseOpnd()->GetValueType());
  7874. if( baseValueType.IsLikelyObject() &&
  7875. baseValueType.GetObjectType() == ObjectType::Array &&
  7876. !baseValueType.HasIntElements())
  7877. {
  7878. switch(helperMethod)
  7879. {
  7880. case IR::HelperOp_GetElementI:
  7881. helperMethod =
  7882. baseValueType.HasFloatElements()
  7883. ? IR::HelperOp_GetElementI_ExpectingNativeFloatArray
  7884. : IR::HelperOp_GetElementI_ExpectingVarArray;
  7885. break;
  7886. case IR::HelperOp_GetElementI_UInt32:
  7887. helperMethod =
  7888. baseValueType.HasFloatElements()
  7889. ? IR::HelperOp_GetElementI_UInt32_ExpectingNativeFloatArray
  7890. : IR::HelperOp_GetElementI_UInt32_ExpectingVarArray;
  7891. break;
  7892. case IR::HelperOp_GetElementI_Int32:
  7893. helperMethod =
  7894. baseValueType.HasFloatElements()
  7895. ? IR::HelperOp_GetElementI_Int32_ExpectingNativeFloatArray
  7896. : IR::HelperOp_GetElementI_Int32_ExpectingVarArray;
  7897. break;
  7898. }
  7899. }
  7900. }
  7901. if (loadScriptContext)
  7902. {
  7903. LoadScriptContext(instr);
  7904. }
  7905. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  7906. IR::Opnd *baseOpnd = indirOpnd->UnlinkBaseOpnd();
  7907. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  7908. src1->Free(this->m_func);
  7909. m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, nullptr, nullptr, isHelper);
  7910. return instrPrev;
  7911. }
  7912. void Lowerer::LowerLdLen(IR::Instr *const instr, const bool isHelper)
  7913. {
  7914. Assert(instr);
  7915. Assert(instr->m_opcode == Js::OpCode::LdLen_A);
  7916. // LdLen has persisted to this point for the sake of pre-lower opts.
  7917. // Turn it into a LdFld of the "length" property.
  7918. // This is normally a load of the internal "length" of an Array, so it probably doesn't benefit
  7919. // from inline caching.
  7920. if (instr->GetSrc1()->IsRegOpnd())
  7921. {
  7922. IR::RegOpnd * baseOpnd = instr->GetSrc1()->AsRegOpnd();
  7923. PropertySym* fieldSym = PropertySym::FindOrCreate(baseOpnd->m_sym->m_id, Js::PropertyIds::length, (uint32)-1, (uint)-1, PropertyKindData, m_func);
  7924. instr->ReplaceSrc1(IR::SymOpnd::New(fieldSym, TyVar, m_func));
  7925. }
  7926. LowerLdFld(instr, IR::HelperOp_GetProperty, IR::HelperOp_GetProperty, false, nullptr, isHelper);
  7927. }
  7928. IR::Instr* InsertMaskableMove(bool isStore, bool generateWriteBarrier, IR::Opnd* dst, IR::Opnd* src1, IR::Opnd* src2, IR::Opnd* indexOpnd, IR::Instr* insertBeforeInstr, Lowerer* lowerer)
  7929. {
  7930. Assert(insertBeforeInstr->m_func->GetJITFunctionBody()->IsAsmJsMode());
  7931. // Mask with the bounds check operand to avoid speculation issues
  7932. const bool usesFastArray = insertBeforeInstr->m_func->GetJITFunctionBody()->UsesWAsmJsFastVirtualBuffer();
  7933. IR::RegOpnd* mask = nullptr;
  7934. bool shouldMaskResult = false;
  7935. if (!usesFastArray)
  7936. {
  7937. bool shouldMask = isStore ? CONFIG_FLAG_RELEASE(PoisonTypedArrayStore) : CONFIG_FLAG_RELEASE(PoisonTypedArrayLoad);
  7938. if (shouldMask && indexOpnd != nullptr)
  7939. {
  7940. // indices in asmjs fit in 32 bits, but we need a mask
  7941. IR::RegOpnd* temp = IR::RegOpnd::New(indexOpnd->GetType(), insertBeforeInstr->m_func);
  7942. lowerer->InsertMove(temp, indexOpnd, insertBeforeInstr, false);
  7943. lowerer->InsertAdd(false, temp, temp, IR::IntConstOpnd::New((uint32)src1->GetSize() - 1, temp->GetType(), insertBeforeInstr->m_func, true), insertBeforeInstr);
  7944. // For native ints and vars, we do the masking after the load; we don't do this for
  7945. // floats and doubles because the conversion to and from fp regs is slow.
  7946. shouldMaskResult = (!isStore) && IRType_IsNativeIntOrVar(src1->GetType()) && TySize[dst->GetType()] <= TySize[TyMachReg];
  7947. // When we do post-load masking, we AND the mask with dst, so they need to have the
  7948. // same type, as otherwise we'll hit asserts later on. When we do pre-load masking,
  7949. // we AND the mask with the index component of the indir opnd for the move from the
  7950. // array, so we need to align with that type instead.
  7951. mask = IR::RegOpnd::New((shouldMaskResult ? dst : indexOpnd)->GetType(), insertBeforeInstr->m_func);
  7952. if (temp->GetSize() != mask->GetSize())
  7953. {
  7954. Assert(mask->GetSize() == MachPtr);
  7955. Assert(src2->GetType() == TyUint32);
  7956. temp = temp->UseWithNewType(TyMachPtr, insertBeforeInstr->m_func)->AsRegOpnd();
  7957. src2 = src2->UseWithNewType(TyMachPtr, insertBeforeInstr->m_func)->AsRegOpnd();
  7958. }
  7959. lowerer->InsertSub(false, mask, temp, src2, insertBeforeInstr);
  7960. lowerer->InsertShift(Js::OpCode::Shr_A, false, mask, mask, IR::IntConstOpnd::New(TySize[mask->GetType()] * 8 - 1, TyInt8, insertBeforeInstr->m_func), insertBeforeInstr);
  7961. // If we're not masking the result, we're masking the index
  7962. if (!shouldMaskResult)
  7963. {
  7964. lowerer->InsertAnd(indexOpnd, indexOpnd, mask, insertBeforeInstr);
  7965. }
  7966. }
  7967. }
  7968. IR::Instr* ret = lowerer->InsertMove(dst, src1, insertBeforeInstr, generateWriteBarrier);
  7969. if(!usesFastArray && shouldMaskResult)
  7970. {
  7971. // Mask the result if we didn't use the mask earlier to mask the index
  7972. lowerer->InsertAnd(dst, dst, mask, insertBeforeInstr);
  7973. }
  7974. return ret;
  7975. }
  7976. IR::Instr *
  7977. Lowerer::LowerLdArrViewElem(IR::Instr * instr)
  7978. {
  7979. #ifdef ASMJS_PLAT
  7980. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  7981. Assert(instr);
  7982. Assert(instr->m_opcode == Js::OpCode::LdArrViewElem);
  7983. IR::Instr * instrPrev = instr->m_prev;
  7984. IR::RegOpnd * indexOpnd = instr->GetSrc1()->AsIndirOpnd()->GetIndexOpnd();
  7985. int32 offset = instr->GetSrc1()->AsIndirOpnd()->GetOffset();
  7986. IR::Opnd * dst = instr->GetDst();
  7987. IR::Opnd * src1 = instr->GetSrc1();
  7988. IR::Opnd * src2 = instr->GetSrc2();
  7989. IR::Instr * done;
  7990. if (offset < 0)
  7991. {
  7992. IR::Opnd * oobValue = nullptr;
  7993. if(dst->IsFloat32())
  7994. {
  7995. oobValue = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetFloatNaNAddr(), TyFloat32, m_func);
  7996. }
  7997. else if(dst->IsFloat64())
  7998. {
  7999. oobValue = IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleNaNAddr(), TyFloat64, m_func);
  8000. }
  8001. else
  8002. {
  8003. oobValue = IR::IntConstOpnd::New(0, dst->GetType(), m_func);
  8004. }
  8005. instr->ReplaceSrc1(oobValue);
  8006. if (src2)
  8007. {
  8008. instr->FreeSrc2();
  8009. }
  8010. return m_lowererMD.ChangeToAssign(instr);
  8011. }
  8012. if (indexOpnd || m_func->GetJITFunctionBody()->GetAsmJsInfo()->AccessNeedsBoundCheck((uint32)offset))
  8013. {
  8014. // CMP indexOpnd, src2(arrSize)
  8015. // JA $helper
  8016. // JMP $load
  8017. // $helper:
  8018. // MOV dst, 0
  8019. // JMP $done
  8020. // $load:
  8021. // MOV dst, src1([arrayBuffer + indexOpnd])
  8022. // $done:
  8023. Assert(!dst->IsFloat32() || src1->IsFloat32());
  8024. Assert(!dst->IsFloat64() || src1->IsFloat64());
  8025. done = m_lowererMD.LowerAsmJsLdElemHelper(instr);
  8026. }
  8027. else
  8028. {
  8029. // any access below 0x10000 is safe
  8030. instr->UnlinkDst();
  8031. instr->UnlinkSrc1();
  8032. if (src2)
  8033. {
  8034. instr->FreeSrc2();
  8035. }
  8036. done = instr;
  8037. }
  8038. InsertMaskableMove(false, true, dst, src1, src2, indexOpnd, done, this);
  8039. instr->Remove();
  8040. return instrPrev;
  8041. #else
  8042. Assert(UNREACHED);
  8043. return instr;
  8044. #endif
  8045. }
  8046. IR::Instr *
  8047. Lowerer::LowerWasmArrayBoundsCheck(IR::Instr * instr, IR::Opnd *addrOpnd)
  8048. {
  8049. uint32 offset = addrOpnd->AsIndirOpnd()->GetOffset();
  8050. // don't encode offset for wasm memory reads/writes
  8051. addrOpnd->AsIndirOpnd()->m_dontEncode = true;
  8052. // if offset/size overflow the max length, throw (this also saves us from having to do int64 math)
  8053. int64 constOffset = (int64)addrOpnd->GetSize() + (int64)offset;
  8054. if (constOffset >= Js::ArrayBuffer::MaxArrayBufferLength)
  8055. {
  8056. GenerateRuntimeError(instr, WASMERR_ArrayIndexOutOfRange, IR::HelperOp_WebAssemblyRuntimeError);
  8057. return instr;
  8058. }
  8059. else
  8060. {
  8061. return m_lowererMD.LowerWasmArrayBoundsCheck(instr, addrOpnd);
  8062. }
  8063. }
  8064. IR::Instr *
  8065. Lowerer::LowerLdArrViewElemWasm(IR::Instr * instr)
  8066. {
  8067. #ifdef ENABLE_WASM
  8068. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  8069. Assert(instr);
  8070. Assert(instr->m_opcode == Js::OpCode::LdArrViewElemWasm);
  8071. IR::Instr * instrPrev = instr->m_prev;
  8072. IR::Opnd * dst = instr->GetDst();
  8073. IR::Opnd * src1 = instr->GetSrc1();
  8074. Assert(!dst->IsFloat32() || src1->IsFloat32());
  8075. Assert(!dst->IsFloat64() || src1->IsFloat64());
  8076. IR::Instr * done = LowerWasmArrayBoundsCheck(instr, src1);
  8077. IR::Instr* newMove = InsertMaskableMove(false, true, dst, src1, instr->GetSrc2(), src1->AsIndirOpnd()->GetIndexOpnd(), done, this);
  8078. if (m_func->GetJITFunctionBody()->UsesWAsmJsFastVirtualBuffer())
  8079. {
  8080. // We need to have an AV when accessing out of bounds memory even if the dst is not used
  8081. // Make sure LinearScan doesn't dead store this instruction
  8082. newMove->hasSideEffects = true;
  8083. }
  8084. instr->Remove();
  8085. return instrPrev;
  8086. #else
  8087. Assert(UNREACHED);
  8088. return instr;
  8089. #endif
  8090. }
  8091. IR::Instr *
  8092. Lowerer::LowerMemset(IR::Instr * instr, IR::RegOpnd * helperRet)
  8093. {
  8094. IR::Opnd * dst = instr->UnlinkDst();
  8095. IR::Opnd * src1 = instr->UnlinkSrc1();
  8096. Assert(dst->IsIndirOpnd());
  8097. IR::Opnd *baseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
  8098. IR::Opnd *indexOpnd = dst->AsIndirOpnd()->UnlinkIndexOpnd();
  8099. IR::Opnd *sizeOpnd = instr->UnlinkSrc2();
  8100. Assert(baseOpnd);
  8101. Assert(sizeOpnd);
  8102. Assert(indexOpnd);
  8103. IR::JnHelperMethod helperMethod = IR::HelperOp_Memset;
  8104. IR::Instr *instrPrev = nullptr;
  8105. if (src1->IsRegOpnd() && !src1->IsVar())
  8106. {
  8107. IR::RegOpnd* varOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  8108. instrPrev = IR::Instr::New(Js::OpCode::ToVar, varOpnd, src1, instr->m_func);
  8109. instr->InsertBefore(instrPrev);
  8110. src1 = varOpnd;
  8111. }
  8112. instr->SetDst(helperRet);
  8113. LoadScriptContext(instr);
  8114. m_lowererMD.LoadHelperArgument(instr, sizeOpnd);
  8115. m_lowererMD.LoadHelperArgument(instr, src1);
  8116. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  8117. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  8118. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  8119. dst->Free(m_func);
  8120. return instrPrev;
  8121. }
  8122. IR::Instr *
  8123. Lowerer::LowerMemcopy(IR::Instr * instr, IR::RegOpnd * helperRet)
  8124. {
  8125. IR::Opnd * dst = instr->UnlinkDst();
  8126. IR::Opnd * src = instr->UnlinkSrc1();
  8127. Assert(dst->IsIndirOpnd());
  8128. Assert(src->IsIndirOpnd());
  8129. IR::Opnd *dstBaseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
  8130. IR::Opnd *dstIndexOpnd = dst->AsIndirOpnd()->UnlinkIndexOpnd();
  8131. IR::Opnd *srcBaseOpnd = src->AsIndirOpnd()->UnlinkBaseOpnd();
  8132. IR::Opnd *srcIndexOpnd = src->AsIndirOpnd()->UnlinkIndexOpnd();
  8133. IR::Opnd *sizeOpnd = instr->UnlinkSrc2();
  8134. Assert(sizeOpnd);
  8135. Assert(dstBaseOpnd);
  8136. Assert(dstIndexOpnd);
  8137. Assert(srcBaseOpnd);
  8138. Assert(srcIndexOpnd);
  8139. IR::JnHelperMethod helperMethod = IR::HelperOp_Memcopy;
  8140. instr->SetDst(helperRet);
  8141. LoadScriptContext(instr);
  8142. m_lowererMD.LoadHelperArgument(instr, sizeOpnd);
  8143. m_lowererMD.LoadHelperArgument(instr, srcIndexOpnd);
  8144. m_lowererMD.LoadHelperArgument(instr, srcBaseOpnd);
  8145. m_lowererMD.LoadHelperArgument(instr, dstIndexOpnd);
  8146. m_lowererMD.LoadHelperArgument(instr, dstBaseOpnd);
  8147. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  8148. dst->Free(m_func);
  8149. src->Free(m_func);
  8150. return nullptr;
  8151. }
  8152. IR::Instr *
  8153. Lowerer::LowerMemOp(IR::Instr * instr)
  8154. {
  8155. Assert(instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  8156. IR::Instr *instrPrev = instr->m_prev;
  8157. IR::RegOpnd* helperRet = IR::RegOpnd::New(TyInt8, instr->m_func);
  8158. const bool isHelper = false;
  8159. AssertMsg(instr->HasBailOutInfo(), "Expected bailOut on MemOp instruction");
  8160. if (instr->HasBailOutInfo())
  8161. {
  8162. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  8163. if (bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment)
  8164. {
  8165. Assert(!(bailOutKind & IR::BailOutOnMissingValue));
  8166. LowerBailOnInvalidatedArrayHeadSegment(instr, isHelper);
  8167. bailOutKind ^= IR::BailOutOnInvalidatedArrayHeadSegment;
  8168. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  8169. }
  8170. else if (bailOutKind & IR::BailOutOnMissingValue)
  8171. {
  8172. LowerBailOnCreatedMissingValue(instr, isHelper);
  8173. bailOutKind ^= IR::BailOutOnMissingValue;
  8174. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  8175. }
  8176. if (bailOutKind & IR::BailOutOnInvalidatedArrayLength)
  8177. {
  8178. LowerBailOnInvalidatedArrayLength(instr, isHelper);
  8179. bailOutKind ^= IR::BailOutOnInvalidatedArrayLength;
  8180. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  8181. }
  8182. AssertMsg(bailOutKind & IR::BailOutOnMemOpError, "Expected BailOutOnMemOpError on MemOp instruction");
  8183. if (bailOutKind & IR::BailOutOnMemOpError)
  8184. {
  8185. // Insert or get continue label
  8186. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isHelper);
  8187. Func *const func = instr->m_func;
  8188. LowerOneBailOutKind(instr, IR::BailOutOnMemOpError, isHelper);
  8189. IR::Instr *const insertBeforeInstr = instr->m_next;
  8190. // test helperRet, helperRet
  8191. // jz $skipBailOut
  8192. InsertCompareBranch(
  8193. helperRet,
  8194. IR::IntConstOpnd::New(0, TyInt8, func),
  8195. Js::OpCode::BrNeq_A,
  8196. skipBailOutLabel,
  8197. insertBeforeInstr);
  8198. // (Bail out with IR::BailOutOnMemOpError)
  8199. // $skipBailOut:
  8200. bailOutKind ^= IR::BailOutOnMemOpError;
  8201. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  8202. }
  8203. instr->ClearBailOutInfo();
  8204. }
  8205. IR::Instr* newInstrPrev = nullptr;
  8206. if (instr->m_opcode == Js::OpCode::Memset)
  8207. {
  8208. newInstrPrev = LowerMemset(instr, helperRet);
  8209. }
  8210. else if (instr->m_opcode == Js::OpCode::Memcopy)
  8211. {
  8212. newInstrPrev = LowerMemcopy(instr, helperRet);
  8213. }
  8214. if (newInstrPrev != nullptr)
  8215. {
  8216. instrPrev = newInstrPrev;
  8217. }
  8218. return instrPrev;
  8219. }
  8220. IR::Instr*
  8221. Lowerer::LowerStAtomicsWasm(IR::Instr* instr)
  8222. {
  8223. #ifdef ENABLE_WASM
  8224. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  8225. Assert(instr);
  8226. Assert(instr->m_opcode == Js::OpCode::StAtomicWasm);
  8227. IR::Instr * instrPrev = instr->m_prev;
  8228. IR::Opnd * dst = instr->GetDst();
  8229. IR::Opnd * src1 = instr->GetSrc1();
  8230. Assert(IRType_IsNativeInt(dst->GetType()));
  8231. IR::Instr * done = LowerWasmArrayBoundsCheck(instr, dst);
  8232. m_lowererMD.LowerAtomicStore(dst, src1, done);
  8233. instr->Remove();
  8234. return instrPrev;
  8235. #else
  8236. Assert(UNREACHED);
  8237. return instr;
  8238. #endif
  8239. }
  8240. IR::Instr * Lowerer::LowerLdAtomicsWasm(IR::Instr * instr)
  8241. {
  8242. #ifdef ENABLE_WASM
  8243. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  8244. Assert(instr);
  8245. Assert(instr->m_opcode == Js::OpCode::LdAtomicWasm);
  8246. IR::Instr * instrPrev = instr->m_prev;
  8247. IR::Opnd * dst = instr->GetDst();
  8248. IR::Opnd * src1 = instr->GetSrc1();
  8249. Assert(IRType_IsNativeInt(dst->GetType()));
  8250. IR::Instr * done = LowerWasmArrayBoundsCheck(instr, src1);
  8251. m_lowererMD.LowerAtomicLoad(dst, src1, done);
  8252. instr->Remove();
  8253. return instrPrev;
  8254. #else
  8255. Assert(UNREACHED);
  8256. return instr;
  8257. #endif
  8258. }
  8259. IR::Instr *
  8260. Lowerer::LowerStArrViewElem(IR::Instr * instr)
  8261. {
  8262. #ifdef ASMJS_PLAT
  8263. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  8264. Assert(instr);
  8265. Assert(instr->m_opcode == Js::OpCode::StArrViewElem);
  8266. IR::Instr * instrPrev = instr->m_prev;
  8267. IR::Opnd * dst = instr->GetDst();
  8268. IR::Opnd * src1 = instr->GetSrc1();
  8269. IR::Opnd * src2 = instr->GetSrc2();
  8270. // type of dst is the type of array
  8271. IR::RegOpnd * indexOpnd = dst->AsIndirOpnd()->GetIndexOpnd();
  8272. int32 offset = dst->AsIndirOpnd()->GetOffset();
  8273. Assert(!dst->IsFloat32() || src1->IsFloat32());
  8274. Assert(!dst->IsFloat64() || src1->IsFloat64());
  8275. Assert(!dst->IsInt64() || src1->IsInt64());
  8276. IR::Instr * done;
  8277. if (m_func->GetJITFunctionBody()->IsWasmFunction())
  8278. {
  8279. done = LowerWasmArrayBoundsCheck(instr, dst);
  8280. }
  8281. else if (offset < 0)
  8282. {
  8283. instr->Remove();
  8284. return instrPrev;
  8285. }
  8286. else if (indexOpnd || m_func->GetJITFunctionBody()->GetAsmJsInfo()->AccessNeedsBoundCheck((uint32)offset))
  8287. {
  8288. // CMP indexOpnd, src2(arrSize)
  8289. // JA $helper
  8290. // JMP $store
  8291. // $helper:
  8292. // JMP $done
  8293. // $store:
  8294. // MOV dst([arrayBuffer + indexOpnd]), src1
  8295. // $done:
  8296. done = m_lowererMD.LowerAsmJsStElemHelper(instr);
  8297. }
  8298. else
  8299. {
  8300. // any constant access below 0x10000 is safe, as that is the min heap size
  8301. instr->UnlinkDst();
  8302. instr->UnlinkSrc1();
  8303. done = instr;
  8304. if (src2)
  8305. {
  8306. instr->FreeSrc2();
  8307. }
  8308. }
  8309. // wasm memory buffer is not recycler allocated, so we shouldn't generate write barrier
  8310. InsertMaskableMove(true, false, dst, src1, src2, indexOpnd, done, this);
  8311. instr->Remove();
  8312. return instrPrev;
  8313. #else
  8314. Assert(UNREACHED);
  8315. return instr;
  8316. #endif
  8317. }
  8318. IR::Instr *
  8319. Lowerer::LowerArrayDetachedCheck(IR::Instr * instr)
  8320. {
  8321. // TEST isDetached, isDetached
  8322. // JE Done
  8323. // Helper:
  8324. // CALL Js::Throw::OutOfMemory
  8325. // Done:
  8326. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  8327. IR::Instr * instrPrev = instr->m_prev;
  8328. IR::Opnd * isDetachedOpnd = instr->UnlinkSrc1();
  8329. Assert(isDetachedOpnd->IsIndirOpnd() || isDetachedOpnd->IsMemRefOpnd());
  8330. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  8331. IR::LabelInstr * helperLabel = InsertLabel(true, instr);
  8332. InsertTestBranch(isDetachedOpnd, isDetachedOpnd, Js::OpCode::BrNotNeq_A, doneLabel, helperLabel);
  8333. m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_OutOfMemoryError);
  8334. return instrPrev;
  8335. }
  8336. ///----------------------------------------------------------------------------
  8337. ///
  8338. /// Lowerer::LowerDeleteElemI
  8339. ///
  8340. ///----------------------------------------------------------------------------
  8341. IR::Instr *
  8342. Lowerer::LowerDeleteElemI(IR::Instr * instr, bool strictMode)
  8343. {
  8344. IR::Instr *instrPrev;
  8345. IR::Opnd *src1 = instr->UnlinkSrc1();
  8346. AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd on DeleteElementI");
  8347. Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
  8348. if (strictMode)
  8349. {
  8350. propertyOperationFlag = Js::PropertyOperation_StrictMode;
  8351. }
  8352. instrPrev = instr->m_prev;
  8353. IR::JnHelperMethod helperMethod = IR::HelperOp_DeleteElementI;
  8354. IR::Opnd *indexOpnd = src1->AsIndirOpnd()->UnlinkIndexOpnd();
  8355. if (indexOpnd)
  8356. {
  8357. if (indexOpnd->GetType() == TyInt32)
  8358. {
  8359. helperMethod = IR::HelperOp_DeleteElementI_Int32;
  8360. }
  8361. else if (indexOpnd->GetType() == TyUint32)
  8362. {
  8363. helperMethod = IR::HelperOp_DeleteElementI_UInt32;
  8364. }
  8365. else
  8366. {
  8367. Assert(indexOpnd->GetType() == TyVar);
  8368. }
  8369. }
  8370. else
  8371. {
  8372. // No index; the offset identifies the element.
  8373. IntConstType offset = (IntConstType)src1->AsIndirOpnd()->GetOffset();
  8374. indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
  8375. }
  8376. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
  8377. LoadScriptContext(instr);
  8378. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  8379. IR::Opnd *baseOpnd = src1->AsIndirOpnd()->UnlinkBaseOpnd();
  8380. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  8381. src1->Free(this->m_func);
  8382. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  8383. return instrPrev;
  8384. }
  8385. IR::Opnd *
  8386. Lowerer::GetForInEnumeratorFieldOpnd(IR::Opnd * forInEnumeratorOpnd, uint fieldOffset, IRType type)
  8387. {
  8388. if (forInEnumeratorOpnd->IsSymOpnd())
  8389. {
  8390. IR::SymOpnd * symOpnd = forInEnumeratorOpnd->AsSymOpnd();
  8391. return IR::SymOpnd::New(symOpnd->GetStackSym(), symOpnd->m_offset + fieldOffset, type, this->m_func);
  8392. }
  8393. Assert(forInEnumeratorOpnd->IsIndirOpnd());
  8394. IR::IndirOpnd * indirOpnd = forInEnumeratorOpnd->AsIndirOpnd();
  8395. return IR::IndirOpnd::New(indirOpnd->GetBaseOpnd(), indirOpnd->GetOffset() + fieldOffset, type, this->m_func);
  8396. }
  8397. void
  8398. Lowerer::GenerateFastBrBReturn(IR::Instr * instr)
  8399. {
  8400. Assert(instr->m_opcode == Js::OpCode::BrOnEmpty || instr->m_opcode == Js::OpCode::BrOnNotEmpty);
  8401. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  8402. IR::Opnd * forInEnumeratorOpnd = instr->GetSrc1();
  8403. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  8404. IR::LabelInstr * loopBody = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  8405. // CMP forInEnumerator->canUseJitFastPath, 0
  8406. // JEQ $helper
  8407. IR::Opnd * canUseJitFastPathOpnd = GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfCanUseJitFastPath(), TyInt8);
  8408. InsertCompareBranch(canUseJitFastPathOpnd, IR::IntConstOpnd::New(0, TyInt8, this->m_func), Js::OpCode::BrEq_A, labelHelper, instr);
  8409. // MOV objectOpnd, forInEnumerator->enumerator.object
  8410. // MOV cachedDataTypeOpnd, forInEnumerator->enumerator.cachedDataType
  8411. // CMP cachedDataTypeOpnd, objectOpnd->type
  8412. // JNE $helper
  8413. IR::RegOpnd * objectOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  8414. InsertMove(objectOpnd,
  8415. GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorObject(), TyMachPtr), instr);
  8416. IR::RegOpnd * cachedDataTypeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  8417. InsertMove(cachedDataTypeOpnd,
  8418. GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorInitialType(), TyMachPtr), instr);
  8419. InsertCompareBranch(cachedDataTypeOpnd, IR::IndirOpnd::New(objectOpnd, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, this->m_func),
  8420. Js::OpCode::BrNeq_A, labelHelper, instr);
  8421. // MOV cachedDataOpnd, forInEnumeratorOpnd->enumerator.cachedData
  8422. // MOV enumeratedCountOpnd, forInEnumeratorOpnd->enumerator.enumeratedCount
  8423. // CMP enumeratedCountOpnd, cachedDataOpnd->cachedCount
  8424. // JLT $loopBody
  8425. IR::RegOpnd * cachedDataOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  8426. InsertMove(cachedDataOpnd,
  8427. GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorCachedData(), TyMachPtr), instr);
  8428. IR::RegOpnd * enumeratedCountOpnd = IR::RegOpnd::New(TyUint32, m_func);
  8429. InsertMove(enumeratedCountOpnd,
  8430. GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorEnumeratedCount(), TyUint32), instr);
  8431. InsertCompareBranch(enumeratedCountOpnd,
  8432. IR::IndirOpnd::New(cachedDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataCachedCount(), TyUint32, this->m_func),
  8433. Js::OpCode::BrLt_A, loopBody, instr);
  8434. // CMP cacheData.completed, 0
  8435. // JNE $loopEnd
  8436. // JMP $helper
  8437. IR::LabelInstr * labelAfter = instr->GetOrCreateContinueLabel();
  8438. InsertCompareBranch(
  8439. IR::IndirOpnd::New(cachedDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataCompleted(), TyInt8, this->m_func),
  8440. IR::IntConstOpnd::New(0, TyInt8, this->m_func),
  8441. Js::OpCode::BrNeq_A, instr->m_opcode == Js::OpCode::BrOnNotEmpty ? labelAfter : instr->AsBranchInstr()->GetTarget(), instr);
  8442. InsertBranch(Js::OpCode::Br, labelHelper, instr);
  8443. // $loopBody:
  8444. instr->InsertBefore(loopBody);
  8445. IR::Opnd * opndDst = instr->GetDst(); // ForIn result propertyString
  8446. Assert(opndDst->IsRegOpnd());
  8447. // MOV stringsOpnd, cachedData->strings
  8448. // MOV opndDst, stringsOpnd[enumeratedCount]
  8449. IR::RegOpnd * stringsOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  8450. InsertMove(stringsOpnd,
  8451. IR::IndirOpnd::New(cachedDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataStrings(), TyMachPtr, this->m_func), instr);
  8452. InsertMove(opndDst,
  8453. IR::IndirOpnd::New(stringsOpnd, enumeratedCountOpnd, m_lowererMD.GetDefaultIndirScale(), TyVar, this->m_func), instr);
  8454. // MOV indexesOpnd, cachedData->indexes
  8455. // MOV objectIndexOpnd, indexesOpnd[enumeratedCount]
  8456. // MOV forInEnumeratorOpnd->enumerator.objectIndex, objectIndexOpnd
  8457. IR::RegOpnd * indexesOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  8458. InsertMove(indexesOpnd,
  8459. IR::IndirOpnd::New(cachedDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataIndexes(), TyMachPtr, this->m_func), instr);
  8460. IR::RegOpnd * objectIndexOpnd = IR::RegOpnd::New(TyUint32, m_func);
  8461. InsertMove(objectIndexOpnd,
  8462. IR::IndirOpnd::New(indexesOpnd, enumeratedCountOpnd, IndirScale4, TyUint32, this->m_func), instr);
  8463. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorObjectIndex(), TyUint32),
  8464. objectIndexOpnd, instr);
  8465. // INC enumeratedCountOpnd
  8466. // MOV forInEnumeratorOpnd->enumerator.enumeratedCount, enumeratedCountOpnd
  8467. InsertAdd(false, enumeratedCountOpnd, enumeratedCountOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), instr);
  8468. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorEnumeratedCount(), TyUint32),
  8469. enumeratedCountOpnd, instr);
  8470. // We know result propertyString (opndDst) != NULL
  8471. InsertBranch(Js::OpCode::Br, instr->m_opcode == Js::OpCode::BrOnNotEmpty ? instr->AsBranchInstr()->GetTarget() : labelAfter, instr);
  8472. // $helper
  8473. instr->InsertBefore(labelHelper);
  8474. // $after
  8475. }
  8476. ///----------------------------------------------------------------------------
  8477. ///
  8478. /// Lowerer::LowerBrB - lower 1-operand (boolean) conditional branch
  8479. ///
  8480. ///----------------------------------------------------------------------------
  8481. IR::Instr *
  8482. Lowerer::LowerBrBReturn(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool isHelper)
  8483. {
  8484. IR::Instr * instrPrev;
  8485. IR::Instr * instrCall;
  8486. IR::HelperCallOpnd * opndHelper;
  8487. IR::Opnd * opndDst;
  8488. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  8489. Assert(instr->m_opcode == Js::OpCode::BrOnEmpty || instr->m_opcode == Js::OpCode::BrOnNotEmpty);
  8490. IR::RegOpnd * forInEnumeratorRegOpnd = GenerateForInEnumeratorLoad(instr->UnlinkSrc1(), instr);
  8491. instrPrev = m_lowererMD.LoadHelperArgument(instr, forInEnumeratorRegOpnd);
  8492. // Generate helper call to convert the unknown operand to boolean
  8493. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8494. opndDst = instr->UnlinkDst();
  8495. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8496. instr->InsertBefore(instrCall);
  8497. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8498. // Branch on the result of the call
  8499. instr->m_opcode = (instr->m_opcode == Js::OpCode::BrOnNotEmpty? Js::OpCode::BrTrue_A : Js::OpCode::BrFalse_A);
  8500. instr->SetSrc1(opndDst);
  8501. IR::Instr *loweredInstr;
  8502. loweredInstr = this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, isHelper);
  8503. #if DBG
  8504. if (isHelper)
  8505. {
  8506. if (!loweredInstr->IsBranchInstr())
  8507. {
  8508. loweredInstr = loweredInstr->GetNextBranchOrLabel();
  8509. }
  8510. if (loweredInstr->IsBranchInstr())
  8511. {
  8512. loweredInstr->AsBranchInstr()->m_isHelperToNonHelperBranch = true;
  8513. }
  8514. }
  8515. #endif
  8516. return instrPrev;
  8517. }
  8518. ///----------------------------------------------------------------------------
  8519. ///
  8520. /// Lowerer::LowerMultiBr
  8521. /// - Lowers the instruction for dictionary look up(string case arms)
  8522. ///
  8523. ///----------------------------------------------------------------------------
  8524. IR::Instr* Lowerer::LowerMultiBr(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  8525. {
  8526. IR::Instr * instrPrev = instr->m_prev;
  8527. IR::Instr * instrCall;
  8528. IR::HelperCallOpnd * opndHelper;
  8529. IR::Opnd * opndSrc;
  8530. IR::Opnd * opndDst;
  8531. StackSym * symDst;
  8532. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnd on BrB");
  8533. // Push the args in reverse order.
  8534. // The end and start labels for the function are used to guarantee
  8535. // that the dictionary jump destinations haven't been tampered with, so we
  8536. // will always jump to some location within this function
  8537. IR::LabelOpnd * endFuncOpnd = IR::LabelOpnd::New(m_func->EnsureFuncEndLabel(), m_func);
  8538. m_lowererMD.LoadHelperArgument(instr, endFuncOpnd);
  8539. IR::LabelOpnd * startFuncOpnd = IR::LabelOpnd::New(m_func->EnsureFuncStartLabel(), m_func);
  8540. m_lowererMD.LoadHelperArgument(instr, startFuncOpnd);
  8541. //Load the address of the dictionary pair- Js::StringDictionaryWrapper
  8542. auto dictionary = instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary();
  8543. if (this->m_func->IsOOPJIT())
  8544. {
  8545. auto dictionaryOffset = NativeCodeData::GetDataTotalOffset(dictionary);
  8546. auto addressRegOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  8547. Lowerer::InsertLea(addressRegOpnd,
  8548. IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), dictionaryOffset, TyMachPtr,
  8549. #if DBG
  8550. NativeCodeData::GetDataDescription(dictionary, this->m_func->m_alloc),
  8551. #endif
  8552. this->m_func, true), instr);
  8553. this->addToLiveOnBackEdgeSyms->Set(m_func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  8554. m_lowererMD.LoadHelperArgument(instr, addressRegOpnd);
  8555. }
  8556. else
  8557. {
  8558. IR::AddrOpnd* nativestringDictionaryOpnd = IR::AddrOpnd::New(dictionary, IR::AddrOpndKindDynamicMisc, this->m_func);
  8559. m_lowererMD.LoadHelperArgument(instr, nativestringDictionaryOpnd);
  8560. }
  8561. //Load the String passed in the Switch expression for look up - JavascriptString
  8562. opndSrc = instr->UnlinkSrc1();
  8563. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  8564. // Generate helper call for dictionary lookup.
  8565. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8566. symDst = StackSym::New(TyMachPtr,this->m_func);
  8567. opndDst = IR::RegOpnd::New(symDst, TyMachPtr, this->m_func);
  8568. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8569. instr->InsertBefore(instrCall);
  8570. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8571. instr->SetSrc1(instrCall->GetDst());
  8572. instr->m_opcode = LowererMD::MDMultiBranchOpcode;
  8573. return instrPrev;
  8574. }
  8575. void
  8576. Lowerer::LowerJumpTableMultiBranch(IR::MultiBranchInstr * multiBrInstr, IR::RegOpnd * indexOpnd)
  8577. {
  8578. Func * func = this->m_func;
  8579. IR::Opnd * opndDst = IR::RegOpnd::New(TyMachPtr, func);
  8580. //Move the native address of the jump table to a register
  8581. IR::LabelInstr * nativeJumpTableLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  8582. nativeJumpTableLabel->m_isDataLabel = true;
  8583. IR::LabelOpnd * nativeJumpTable = IR::LabelOpnd::New(nativeJumpTableLabel, m_func);
  8584. IR::RegOpnd * nativeJumpTableReg = IR::RegOpnd::New(TyMachPtr, func);
  8585. InsertMove(nativeJumpTableReg, nativeJumpTable, multiBrInstr);
  8586. BranchJumpTableWrapper * branchJumpTable = multiBrInstr->GetBranchJumpTable();
  8587. AssertMsg(branchJumpTable->labelInstr == nullptr, "Should not be already assigned");
  8588. branchJumpTable->labelInstr = nativeJumpTableLabel;
  8589. //Indirect addressing @ target location in the jump table.
  8590. //MOV eax, [nativeJumpTableReg + (offset * indirScale)]
  8591. BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  8592. IR::Opnd * opndSrc = IR::IndirOpnd::New(nativeJumpTableReg, indexOpnd, indirScale, TyMachReg, this->m_func);
  8593. IR::Instr * indirInstr = InsertMove(opndDst, opndSrc, multiBrInstr);
  8594. //MultiBr eax
  8595. multiBrInstr->SetSrc1(indirInstr->GetDst());
  8596. //Jump to the address at the target location in the jump table
  8597. multiBrInstr->m_opcode = LowererMD::MDMultiBranchOpcode;
  8598. }
  8599. ///----------------------------------------------------------------------------
  8600. ///
  8601. /// Lowerer::LowerMultiBr
  8602. /// - Lowers the instruction for jump table(consecutive integer case arms)
  8603. ///
  8604. ///----------------------------------------------------------------------------
  8605. IR::Instr* Lowerer::LowerMultiBr(IR::Instr * instr)
  8606. {
  8607. IR::Instr * instrPrev = instr->m_prev;
  8608. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnd on BrB");
  8609. AssertMsg(instr->IsBranchInstr() && instr->AsBranchInstr()->IsMultiBranch(), "Bad Instruction Lowering Call to LowerMultiBr()");
  8610. IR::MultiBranchInstr * multiBrInstr = instr->AsBranchInstr()->AsMultiBrInstr();
  8611. IR::RegOpnd * offset = instr->UnlinkSrc1()->AsRegOpnd();
  8612. LowerJumpTableMultiBranch(multiBrInstr, offset);
  8613. return instrPrev;
  8614. }
  8615. IR::Instr* Lowerer::LowerBrBMem(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  8616. {
  8617. IR::Instr * instrPrev;
  8618. IR::Instr * instrCall;
  8619. IR::HelperCallOpnd * opndHelper;
  8620. IR::Opnd * opndSrc;
  8621. IR::Opnd * opndDst;
  8622. StackSym * symDst;
  8623. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  8624. instrPrev = LoadScriptContext(instr);
  8625. opndSrc = instr->UnlinkSrc1();
  8626. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  8627. // Generate helper call to convert the unknown operand to boolean
  8628. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8629. symDst = StackSym::New(TyVar, this->m_func);
  8630. opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
  8631. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8632. instr->InsertBefore(instrCall);
  8633. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8634. // Branch on the result of the call
  8635. instr->SetSrc1(opndDst);
  8636. m_lowererMD.LowerCondBranch(instr);
  8637. return instrPrev;
  8638. }
  8639. IR::Instr* Lowerer::LowerBrOnObject(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  8640. {
  8641. IR::Instr * instrPrev;
  8642. IR::Instr * instrCall;
  8643. IR::HelperCallOpnd * opndHelper;
  8644. IR::Opnd * opndSrc;
  8645. IR::Opnd * opndDst;
  8646. StackSym * symDst;
  8647. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  8648. opndSrc = instr->UnlinkSrc1();
  8649. instrPrev = m_lowererMD.LoadHelperArgument(instr, opndSrc);
  8650. // Generate helper call to check if the operand's type is object
  8651. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8652. symDst = StackSym::New(TyVar, this->m_func);
  8653. opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
  8654. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8655. instr->InsertBefore(instrCall);
  8656. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8657. // Branch on the result of the call
  8658. instr->SetSrc1(opndDst);
  8659. m_lowererMD.LowerCondBranch(instr);
  8660. return instrPrev;
  8661. }
  8662. IR::Instr * Lowerer::LowerBrOnClassConstructor(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  8663. {
  8664. IR::Instr * instrPrev;
  8665. IR::Instr * instrCall;
  8666. IR::HelperCallOpnd * opndHelper;
  8667. IR::Opnd * opndSrc;
  8668. IR::Opnd * opndDst;
  8669. StackSym * symDst;
  8670. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  8671. opndSrc = instr->UnlinkSrc1();
  8672. instrPrev = m_lowererMD.LoadHelperArgument(instr, opndSrc);
  8673. // Generate helper call to check if the operand's type is object
  8674. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8675. symDst = StackSym::New(TyVar, this->m_func);
  8676. opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
  8677. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8678. instr->InsertBefore(instrCall);
  8679. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8680. // Branch on the result of the call
  8681. instr->SetSrc1(opndDst);
  8682. m_lowererMD.LowerCondBranch(instr);
  8683. return instrPrev;
  8684. }
  8685. IR::Instr *
  8686. Lowerer::LowerEqualityCompare(IR::Instr* instr, IR::JnHelperMethod helper)
  8687. {
  8688. IR::Instr * instrPrev = instr->m_prev;
  8689. bool needHelper = true;
  8690. bool fNoLower = false;
  8691. if (instr->GetSrc1()->IsFloat())
  8692. {
  8693. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  8694. this->m_lowererMD.GenerateFastCmXxR8(instr);
  8695. }
  8696. else if (PHASE_OFF(Js::BranchFastPathPhase, m_func) || !m_func->DoFastPaths())
  8697. {
  8698. LowerBinaryHelperMem(instr, helper);
  8699. }
  8700. else if (TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, instr->IsNeq(), &fNoLower))
  8701. {
  8702. if (!fNoLower)
  8703. {
  8704. LowerBinaryHelperMem(instr, helper);
  8705. }
  8706. }
  8707. else if (instr->m_opcode == Js::OpCode::CmSrEq_A && TryGenerateFastCmSrEq(instr))
  8708. {
  8709. }
  8710. else
  8711. {
  8712. bool hasStrFastpath = GenerateFastBrOrCmString(instr);
  8713. if(GenerateFastCmEqLikely(instr, &needHelper, hasStrFastpath) || GenerateFastEqBoolInt(instr, &needHelper, hasStrFastpath))
  8714. {
  8715. if (needHelper)
  8716. {
  8717. LowerBinaryHelperMem(instr, helper);
  8718. }
  8719. else
  8720. {
  8721. instr->Remove();
  8722. }
  8723. }
  8724. else if (!m_lowererMD.GenerateFastCmXxTaggedInt(instr, hasStrFastpath))
  8725. {
  8726. LowerBinaryHelperMem(instr, helper);
  8727. }
  8728. }
  8729. return instrPrev;
  8730. }
  8731. IR::Instr *
  8732. Lowerer::LowerEqualityBranch(IR::Instr* instr, IR::JnHelperMethod helper)
  8733. {
  8734. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  8735. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  8736. IR::Instr * instrPrev = instr->m_prev;
  8737. bool fNoLower = false;
  8738. const bool noFastPath = PHASE_OFF(Js::BranchFastPathPhase, m_func) || !m_func->DoFastPaths();
  8739. if (instr->GetSrc1()->IsFloat())
  8740. {
  8741. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  8742. m_lowererMD.LowerToFloat(instr);
  8743. return instrPrev;
  8744. }
  8745. if (noFastPath)
  8746. {
  8747. LowerBrCMem(instr, helper, true, false /*isHelper*/);
  8748. return instrPrev;
  8749. }
  8750. if (TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, instr->IsNeq(), &fNoLower))
  8751. {
  8752. if (!fNoLower)
  8753. {
  8754. LowerBrCMem(instr, helper, false, false /*isHelper*/);
  8755. }
  8756. return instrPrev;
  8757. }
  8758. bool done = false;
  8759. switch(instr->m_opcode)
  8760. {
  8761. case Js::OpCode::BrNeq_A:
  8762. case Js::OpCode::BrNotEq_A:
  8763. done = TryGenerateFastBrNeq(instr);
  8764. break;
  8765. case Js::OpCode::BrEq_A:
  8766. case Js::OpCode::BrNotNeq_A:
  8767. done = TryGenerateFastBrEq(instr);
  8768. break;
  8769. case Js::OpCode::BrSrEq_A:
  8770. case Js::OpCode::BrSrNotNeq_A:
  8771. done = GenerateFastBrSrEq(instr, srcReg1, srcReg2, &instrPrev, noFastPath);
  8772. break;
  8773. case Js::OpCode::BrSrNeq_A:
  8774. case Js::OpCode::BrSrNotEq_A:
  8775. done = GenerateFastBrSrNeq(instr, &instrPrev);
  8776. break;
  8777. default:
  8778. Assume(UNREACHED);
  8779. }
  8780. if (done)
  8781. {
  8782. return instrPrev;
  8783. }
  8784. bool needHelper = true;
  8785. IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  8786. bool hasStrFastPath = GenerateFastBrOrCmString(instr);
  8787. if (GenerateFastBrEqLikely(instr->AsBranchInstr(), &needHelper, hasStrFastPath) || GenerateFastEqBoolInt(instr, &needHelper, hasStrFastPath))
  8788. {
  8789. if (needHelper)
  8790. {
  8791. LowerBrCMem(instr, helper, false);
  8792. }
  8793. }
  8794. else if (needHelper)
  8795. {
  8796. LowerBrCMem(instr, helper, false, hasStrFastPath);
  8797. }
  8798. if (!needHelper)
  8799. {
  8800. if (instr->AsBranchInstr()->GetTarget()->m_isLoopTop)
  8801. {
  8802. LowerBrCMem(instr, helper, false, hasStrFastPath);
  8803. }
  8804. else
  8805. {
  8806. instr->Remove();
  8807. }
  8808. }
  8809. return instrPrev;
  8810. }
  8811. IR::Instr *
  8812. Lowerer::LowerBrCMem(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool noMathFastPath, bool isHelper)
  8813. {
  8814. IR::Instr * instrPrev = instr->m_prev;
  8815. IR::Instr * instrCall;
  8816. IR::HelperCallOpnd * opndHelper;
  8817. IR::Opnd * opndSrc;
  8818. IR::Opnd * opndDst;
  8819. StackSym * symDst;
  8820. bool inverted = false;
  8821. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() != nullptr, "Expected 2 src opnds on BrC");
  8822. if (!noMathFastPath && !this->GenerateFastCondBranch(instr->AsBranchInstr(), &isHelper))
  8823. {
  8824. return instrPrev;
  8825. }
  8826. // Push the args in reverse order.
  8827. const bool loadScriptContext = !(helperMethod == IR::HelperOp_StrictEqualString || helperMethod == IR::HelperOp_StrictEqualEmptyString);
  8828. const bool loadArg2 = !(helperMethod == IR::HelperOp_StrictEqualEmptyString);
  8829. if (helperMethod == IR::HelperOp_NotEqual)
  8830. {
  8831. // Op_NotEqual() returns !Op_Equal(). It is faster to call Op_Equal() directly.
  8832. helperMethod = IR::HelperOp_Equal;
  8833. instr->AsBranchInstr()->Invert();
  8834. inverted = true;
  8835. }
  8836. else if(helperMethod == IR::HelperOp_NotStrictEqual)
  8837. {
  8838. // Op_NotStrictEqual() returns !Op_StrictEqual(). It is faster to call Op_StrictEqual() directly.
  8839. helperMethod = IR::HelperOp_StrictEqual;
  8840. instr->AsBranchInstr()->Invert();
  8841. inverted = true;
  8842. }
  8843. if (loadScriptContext)
  8844. LoadScriptContext(instr);
  8845. opndSrc = instr->UnlinkSrc2();
  8846. if (loadArg2)
  8847. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  8848. opndSrc = instr->UnlinkSrc1();
  8849. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  8850. // Generate helper call to compare the source operands.
  8851. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8852. symDst = StackSym::New(TyMachReg, this->m_func);
  8853. opndDst = IR::RegOpnd::New(symDst, TyMachReg, this->m_func);
  8854. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8855. instr->InsertBefore(instrCall);
  8856. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8857. switch (instr->m_opcode)
  8858. {
  8859. case Js::OpCode::BrNotEq_A:
  8860. case Js::OpCode::BrNotNeq_A:
  8861. case Js::OpCode::BrSrNotEq_A:
  8862. case Js::OpCode::BrSrNotNeq_A:
  8863. if (instr->HasBailOutInfo())
  8864. {
  8865. instr->GetBailOutInfo()->isInvertedBranch = true;
  8866. }
  8867. break;
  8868. case Js::OpCode::BrNotGe_A:
  8869. case Js::OpCode::BrNotGt_A:
  8870. case Js::OpCode::BrNotLe_A:
  8871. case Js::OpCode::BrNotLt_A:
  8872. inverted = true;
  8873. break;
  8874. }
  8875. // Branch if the result is "true".
  8876. instr->SetSrc1(opndDst);
  8877. instr->m_opcode = (inverted ? Js::OpCode::BrFalse_A : Js::OpCode::BrTrue_A);
  8878. this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, !noMathFastPath && isHelper);
  8879. return instrPrev;
  8880. }
  8881. IR::Instr *
  8882. Lowerer::LowerBrFncApply(IR::Instr * instr, IR::JnHelperMethod helperMethod) {
  8883. IR::Instr * instrPrev = instr->m_prev;
  8884. IR::Instr * instrCall;
  8885. IR::HelperCallOpnd * opndHelper;
  8886. IR::Opnd * opndSrc;
  8887. IR::Opnd * opndDst;
  8888. StackSym * symDst;
  8889. AssertMsg(instr->GetSrc1() != nullptr, "Expected 1 src opnd on BrFncApply");
  8890. LoadScriptContext(instr);
  8891. opndSrc = instr->UnlinkSrc1();
  8892. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  8893. // Generate helper call to compare the source operands.
  8894. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  8895. symDst = StackSym::New(TyMachReg, this->m_func);
  8896. opndDst = IR::RegOpnd::New(symDst, TyMachReg, this->m_func);
  8897. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8898. instr->InsertBefore(instrCall);
  8899. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8900. // Branch if the result is "true".
  8901. instr->SetSrc1(opndDst);
  8902. instr->m_opcode = Js::OpCode::BrTrue_A;
  8903. m_lowererMD.LowerCondBranch(instr);
  8904. return instrPrev;
  8905. }
  8906. ///----------------------------------------------------------------------------
  8907. ///
  8908. /// Lowerer::LowerBrProperty - lower branch-on-has/no-property
  8909. ///
  8910. ///----------------------------------------------------------------------------
  8911. IR::Instr *
  8912. Lowerer::LowerBrProperty(IR::Instr * instr, IR::JnHelperMethod helper)
  8913. {
  8914. IR::Instr * instrPrev;
  8915. IR::Instr * instrCall;
  8916. IR::HelperCallOpnd * opndHelper;
  8917. IR::Opnd * opndSrc;
  8918. IR::Opnd * opndDst;
  8919. opndSrc = instr->UnlinkSrc1();
  8920. AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(),
  8921. "Expected propertySym as src of BrProperty");
  8922. instrPrev = LoadScriptContext(instr);
  8923. this->LoadPropertySymAsArgument(instr, opndSrc);
  8924. opndHelper = IR::HelperCallOpnd::New(helper, this->m_func);
  8925. opndDst = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), TyMachReg, this->m_func);
  8926. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  8927. instr->InsertBefore(instrCall);
  8928. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  8929. // Branch on the result of the call
  8930. instr->SetSrc1(opndDst);
  8931. switch (instr->m_opcode)
  8932. {
  8933. case Js::OpCode::BrOnHasProperty:
  8934. instr->m_opcode = Js::OpCode::BrTrue_A;
  8935. break;
  8936. case Js::OpCode::BrOnNoProperty:
  8937. instr->m_opcode = Js::OpCode::BrFalse_A;
  8938. break;
  8939. default:
  8940. AssertMsg(0, "Unknown opcode on BrProperty branch");
  8941. break;
  8942. }
  8943. this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, false);
  8944. return instrPrev;
  8945. }
  8946. ///----------------------------------------------------------------------------
  8947. ///
  8948. /// Lowerer::LowerElementUndefined
  8949. ///
  8950. ///----------------------------------------------------------------------------
  8951. IR::Instr *
  8952. Lowerer::LowerElementUndefined(IR::Instr * instr, IR::JnHelperMethod helper)
  8953. {
  8954. IR::Opnd *dst = instr->UnlinkDst();
  8955. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as dst of Ld Undefined");
  8956. // Pass the property sym to store to
  8957. this->LoadPropertySymAsArgument(instr, dst);
  8958. m_lowererMD.ChangeToHelperCall(instr, helper);
  8959. return instr;
  8960. }
  8961. IR::Instr *
  8962. Lowerer::LowerElementUndefinedMem(IR::Instr * instr, IR::JnHelperMethod helper)
  8963. {
  8964. // Pass script context
  8965. IR::Instr * instrPrev = LoadScriptContext(instr);
  8966. this->LowerElementUndefined(instr, helper);
  8967. return instrPrev;
  8968. }
  8969. IR::Instr *
  8970. Lowerer::LowerLdElemUndef(IR::Instr * instr)
  8971. {
  8972. if (this->m_func->GetJITFunctionBody()->IsEval())
  8973. {
  8974. return LowerElementUndefinedMem(instr, IR::HelperOp_LdElemUndefDynamic);
  8975. }
  8976. else
  8977. {
  8978. return LowerElementUndefined(instr, IR::HelperOp_LdElemUndef);
  8979. }
  8980. }
  8981. ///----------------------------------------------------------------------------
  8982. ///
  8983. /// Lowerer::LowerElementUndefinedScoped
  8984. ///
  8985. ///----------------------------------------------------------------------------
  8986. IR::Instr *
  8987. Lowerer::LowerElementUndefinedScoped(IR::Instr * instr, IR::JnHelperMethod helper)
  8988. {
  8989. IR::Instr * instrPrev = instr->m_prev;
  8990. // Pass the default instance
  8991. IR::Opnd *src = instr->UnlinkSrc1();
  8992. m_lowererMD.LoadHelperArgument(instr, src);
  8993. // Pass the property sym to store to
  8994. IR::Opnd * dst = instr->UnlinkDst();
  8995. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as dst of Ld Undefined Scoped");
  8996. this->LoadPropertySymAsArgument(instr, dst);
  8997. m_lowererMD.ChangeToHelperCall(instr, helper);
  8998. return instrPrev;
  8999. }
  9000. IR::Instr *
  9001. Lowerer::LowerElementUndefinedScopedMem(IR::Instr * instr, IR::JnHelperMethod helper)
  9002. {
  9003. // Pass script context
  9004. IR::Instr * instrPrev = LoadScriptContext(instr);
  9005. this->LowerElementUndefinedScoped(instr, helper);
  9006. return instrPrev;
  9007. }
  9008. void
  9009. Lowerer::LowerStLoopBodyCount(IR::Instr* instr)
  9010. {
  9011. intptr_t header = m_func->m_workItem->GetLoopHeaderAddr();
  9012. IR::MemRefOpnd *loopBodyCounterOpnd = IR::MemRefOpnd::New((BYTE*)(header) + Js::LoopHeader::GetOffsetOfProfiledLoopCounter(), TyUint32, this->m_func);
  9013. instr->SetDst(loopBodyCounterOpnd);
  9014. instr->ReplaceSrc1(instr->GetSrc1()->AsRegOpnd()->UseWithNewType(TyUint32, this->m_func));
  9015. IR::AutoReuseOpnd autoReuse(loopBodyCounterOpnd, this->m_func);
  9016. m_lowererMD.ChangeToAssign(instr);
  9017. return;
  9018. }
  9019. #if !FLOATVAR
  9020. IR::Instr *
  9021. Lowerer::LowerStSlotBoxTemp(IR::Instr *stSlot)
  9022. {
  9023. // regVar = BoxStackNumber(src, scriptContext)
  9024. IR::RegOpnd * regSrc = stSlot->UnlinkSrc1()->AsRegOpnd();
  9025. IR::Instr * instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  9026. IR::RegOpnd *regVar = IR::RegOpnd::New(TyVar, this->m_func);
  9027. instr->SetDst(regVar);
  9028. instr->SetSrc1(regSrc);
  9029. stSlot->InsertBefore(instr);
  9030. this->LowerUnaryHelperMem(instr, IR::HelperBoxStackNumber);
  9031. stSlot->SetSrc1(regVar);
  9032. return this->LowerStSlot(stSlot);
  9033. }
  9034. #endif
  9035. IR::Opnd *
  9036. Lowerer::CreateOpndForSlotAccess(IR::Opnd * opnd)
  9037. {
  9038. IR::SymOpnd * symOpnd = opnd->AsSymOpnd();
  9039. PropertySym * dstSym = symOpnd->m_sym->AsPropertySym();
  9040. if (!m_func->IsLoopBody() &&
  9041. m_func->DoStackFrameDisplay() &&
  9042. (dstSym->m_stackSym == m_func->GetLocalClosureSym() || dstSym->m_stackSym == m_func->GetLocalFrameDisplaySym()))
  9043. {
  9044. // Stack closure syms are made to look like slot accesses for the benefit of GlobOpt, so that it can do proper
  9045. // copy prop and implicit call bailout. But what we really want is local stack load/store.
  9046. // Don't do this for loop body, though, since we don't have the value saved on the stack.
  9047. IR::SymOpnd * closureSym = IR::SymOpnd::New(dstSym->m_stackSym, 0, TyMachReg, this->m_func);
  9048. closureSym->GetStackSym()->m_isClosureSym = true;
  9049. return closureSym;
  9050. }
  9051. int32 offset = dstSym->m_propertyId;
  9052. if (!m_func->GetJITFunctionBody()->IsAsmJsMode())
  9053. {
  9054. offset = offset * TySize[opnd->GetType()];
  9055. }
  9056. #ifdef ASMJS_PLAT
  9057. if (m_func->IsTJLoopBody())
  9058. {
  9059. offset = offset - m_func->GetJITFunctionBody()->GetAsmJsInfo()->GetTotalSizeInBytes();
  9060. }
  9061. #endif
  9062. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(symOpnd->CreatePropertyOwnerOpnd(m_func),
  9063. offset , opnd->GetType(), this->m_func);
  9064. return indirOpnd;
  9065. }
  9066. IR::Instr *
  9067. Lowerer::LowerStSlot(IR::Instr *instr)
  9068. {
  9069. // StSlot stores the nth Var in the buffer pointed to by the property sym's stack sym.
  9070. IR::Opnd * dstOpnd = instr->UnlinkDst();
  9071. AssertMsg(dstOpnd, "Expected dst opnd on StSlot");
  9072. IR::Opnd * dstNew = this->CreateOpndForSlotAccess(dstOpnd);
  9073. dstOpnd->Free(this->m_func);
  9074. instr->SetDst(dstNew);
  9075. instr = m_lowererMD.ChangeToWriteBarrierAssign(instr, this->m_func);
  9076. return instr;
  9077. }
  9078. IR::Instr *
  9079. Lowerer::LowerStSlotChkUndecl(IR::Instr *instrStSlot)
  9080. {
  9081. Assert(instrStSlot->GetSrc2() != nullptr);
  9082. // Src2 is required only to avoid dead store false positives during GlobOpt.
  9083. instrStSlot->FreeSrc2();
  9084. IR::Opnd *dstOpnd = this->CreateOpndForSlotAccess(instrStSlot->GetDst());
  9085. IR::Instr *instr = this->LowerStSlot(instrStSlot);
  9086. this->GenUndeclChk(instr, dstOpnd);
  9087. return instr;
  9088. }
  9089. void Lowerer::LowerProfileLdSlot(IR::Opnd *const valueOpnd, Func *const ldSlotFunc, const Js::ProfileId profileId, IR::Instr *const insertBeforeInstr)
  9090. {
  9091. Assert(valueOpnd);
  9092. Assert(profileId != Js::Constants::NoProfileId);
  9093. Assert(insertBeforeInstr);
  9094. Func *const irFunc = insertBeforeInstr->m_func;
  9095. m_lowererMD.LoadHelperArgument(insertBeforeInstr, IR::Opnd::CreateProfileIdOpnd(profileId, irFunc));
  9096. m_lowererMD.LoadHelperArgument(insertBeforeInstr, CreateFunctionBodyOpnd(ldSlotFunc));
  9097. m_lowererMD.LoadHelperArgument(insertBeforeInstr, valueOpnd);
  9098. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, irFunc);
  9099. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfileLdSlot, irFunc));
  9100. insertBeforeInstr->InsertBefore(callInstr);
  9101. m_lowererMD.LowerCall(callInstr, 0);
  9102. }
  9103. void
  9104. Lowerer::LowerLdSlot(IR::Instr *instr)
  9105. {
  9106. IR::Opnd * srcOpnd = instr->UnlinkSrc1();
  9107. AssertMsg(srcOpnd, "Expected src opnd on LdSlot");
  9108. IR::Opnd * srcNew = this->CreateOpndForSlotAccess(srcOpnd);
  9109. srcOpnd->Free(this->m_func);
  9110. instr->SetSrc1(srcNew);
  9111. m_lowererMD.ChangeToAssign(instr);
  9112. }
  9113. IR::Instr *
  9114. Lowerer::LowerChkUndecl(IR::Instr *instr)
  9115. {
  9116. IR::Instr *instrPrev = instr->m_prev;
  9117. this->GenUndeclChk(instr, instr->GetSrc1());
  9118. instr->Remove();
  9119. return instrPrev;
  9120. }
  9121. void
  9122. Lowerer::GenUndeclChk(IR::Instr *instrInsert, IR::Opnd *opnd)
  9123. {
  9124. IR::LabelInstr *labelContinue = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  9125. InsertCompareBranch(
  9126. opnd,
  9127. LoadLibraryValueOpnd(instrInsert, LibraryValue::ValueUndeclBlockVar),
  9128. Js::OpCode::BrNeq_A, labelContinue, instrInsert);
  9129. IR::LabelInstr *labelThrow = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  9130. instrInsert->InsertBefore(labelThrow);
  9131. IR::Instr *instr = IR::Instr::New(
  9132. Js::OpCode::RuntimeReferenceError,
  9133. IR::RegOpnd::New(TyMachReg, m_func),
  9134. IR::IntConstOpnd::New(SCODE_CODE(JSERR_UseBeforeDeclaration), TyInt32, m_func),
  9135. m_func);
  9136. instrInsert->InsertBefore(instr);
  9137. this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeReferenceError);
  9138. instrInsert->InsertBefore(labelContinue);
  9139. }
  9140. ///----------------------------------------------------------------------------
  9141. ///
  9142. /// Lowerer::LowerStElemC
  9143. ///
  9144. ///----------------------------------------------------------------------------
  9145. IR::Instr *
  9146. Lowerer::LowerStElemC(IR::Instr * stElem)
  9147. {
  9148. IR::Instr *instrPrev = stElem->m_prev;
  9149. IR::IndirOpnd * indirOpnd = stElem->GetDst()->AsIndirOpnd();
  9150. IR::RegOpnd *indexOpnd = indirOpnd->UnlinkIndexOpnd();
  9151. Assert(!indexOpnd || indexOpnd->m_sym->IsIntConst());
  9152. IntConstType value;
  9153. if (indexOpnd)
  9154. {
  9155. value = indexOpnd->AsRegOpnd()->m_sym->GetIntConstValue();
  9156. indexOpnd->Free(this->m_func);
  9157. }
  9158. else
  9159. {
  9160. value = (IntConstType)indirOpnd->GetOffset();
  9161. }
  9162. if (stElem->IsJitProfilingInstr())
  9163. {
  9164. Assert(stElem->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
  9165. m_lowererMD.LoadHelperArgument(stElem, stElem->UnlinkSrc1());
  9166. const auto meth = stElem->m_opcode == Js::OpCode::StElemC ? IR::HelperSimpleStoreArrayHelper : IR::HelperSimpleStoreArraySegHelper;
  9167. stElem->SetSrc1(IR::HelperCallOpnd::New(meth, m_func));
  9168. m_lowererMD.LoadHelperArgument(stElem, IR::IntConstOpnd::New(value, TyUint32, m_func));
  9169. m_lowererMD.LoadHelperArgument(stElem, indirOpnd->UnlinkBaseOpnd());
  9170. stElem->UnlinkDst()->Free(m_func);
  9171. m_lowererMD.LowerCall(stElem, 0);
  9172. return instrPrev;
  9173. }
  9174. IntConstType base;
  9175. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  9176. const ValueType baseValueType(baseOpnd->GetValueType());
  9177. if(baseValueType.IsLikelyNativeArray())
  9178. {
  9179. Assert(stElem->m_opcode == Js::OpCode::StElemC);
  9180. IR::LabelInstr *labelBailOut = nullptr;
  9181. IR::Instr *instrBailOut = nullptr;
  9182. if (stElem->HasBailOutInfo())
  9183. {
  9184. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  9185. instrBailOut = stElem;
  9186. stElem = IR::Instr::New(instrBailOut->m_opcode, m_func);
  9187. instrBailOut->TransferTo(stElem);
  9188. instrBailOut->InsertBefore(stElem);
  9189. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  9190. InsertBranch(Js::OpCode::Br, labelDone, instrBailOut);
  9191. instrBailOut->InsertBefore(labelBailOut);
  9192. instrBailOut->InsertAfter(labelDone);
  9193. instrBailOut->m_opcode = Js::OpCode::BailOut;
  9194. GenerateBailOut(instrBailOut);
  9195. }
  9196. if (!baseValueType.IsObject())
  9197. {
  9198. // Likely native array: do a vtable check and bail if it fails.
  9199. Assert(labelBailOut);
  9200. GenerateArrayTest(baseOpnd, labelBailOut, labelBailOut, stElem, true);
  9201. }
  9202. if (stElem->GetSrc1()->GetType() == TyVar)
  9203. {
  9204. // Storing a non-specialized value. This may cause array conversion, which invalidates all the code
  9205. // that depends on the array check we've already done.
  9206. // Call a helper that returns the type ID of the resulting array, check it here against the one we
  9207. // expect, and bail if it fails.
  9208. Assert(labelBailOut);
  9209. // Call a helper to (try and) unbox the var and store it.
  9210. // If we had to convert the array to do the store, we'll bail.
  9211. LoadScriptContext(stElem);
  9212. m_lowererMD.LoadHelperArgument(stElem, stElem->UnlinkSrc1());
  9213. IR::IntConstOpnd * intConstIndexOpnd = IR::IntConstOpnd::New(value, TyUint32, m_func);
  9214. m_lowererMD.LoadHelperArgument(stElem, intConstIndexOpnd);
  9215. m_lowererMD.LoadHelperArgument(stElem, indirOpnd->UnlinkBaseOpnd());
  9216. IR::JnHelperMethod helperMethod;
  9217. if (baseValueType.HasIntElements())
  9218. {
  9219. helperMethod = IR::HelperScrArr_SetNativeIntElementC;
  9220. }
  9221. else
  9222. {
  9223. helperMethod = IR::HelperScrArr_SetNativeFloatElementC;
  9224. }
  9225. IR::Instr *instrInsertBranch = stElem->m_next;
  9226. IR::RegOpnd *typeIdOpnd = IR::RegOpnd::New(TyUint32, m_func);
  9227. stElem->ReplaceDst(typeIdOpnd);
  9228. m_lowererMD.ChangeToHelperCall(stElem, helperMethod);
  9229. InsertCompareBranch(
  9230. typeIdOpnd,
  9231. IR::IntConstOpnd::New(
  9232. baseValueType.HasIntElements() ?
  9233. Js::TypeIds_NativeIntArray : Js::TypeIds_NativeFloatArray, TyUint32, m_func),
  9234. Js::OpCode::BrNeq_A,
  9235. labelBailOut,
  9236. instrInsertBranch);
  9237. return instrPrev;
  9238. }
  9239. else if (baseValueType.HasIntElements() && labelBailOut)
  9240. {
  9241. Assert(stElem->GetSrc1()->GetType() == GetArrayIndirType(baseValueType));
  9242. IR::Opnd* missingElementOpnd = GetMissingItemOpnd(stElem->GetSrc1()->GetType(), m_func);
  9243. if (!stElem->GetSrc1()->IsEqual(missingElementOpnd))
  9244. {
  9245. InsertMissingItemCompareBranch(stElem->GetSrc1(), Js::OpCode::BrEq_A, labelBailOut, stElem);
  9246. }
  9247. else
  9248. {
  9249. //Its a missing value store and data flow proves that src1 is always missing value. Array cannot be an int array at the first place
  9250. //if this code was ever hit. Just bailout, this code path would be updated with the profile information next time around.
  9251. InsertBranch(Js::OpCode::Br, labelBailOut, stElem);
  9252. #if DBG
  9253. labelBailOut->m_noHelperAssert = true;
  9254. #endif
  9255. stElem->Remove();
  9256. return instrPrev;
  9257. }
  9258. }
  9259. else
  9260. {
  9261. Assert(stElem->GetSrc1()->GetType() == GetArrayIndirType(baseValueType));
  9262. }
  9263. stElem->GetDst()->SetType(stElem->GetSrc1()->GetType());
  9264. Assert(value <= Js::SparseArraySegmentBase::INLINE_CHUNK_SIZE);
  9265. if(baseValueType.HasIntElements())
  9266. {
  9267. base = sizeof(Js::JavascriptNativeIntArray) + offsetof(Js::SparseArraySegment<int32>, elements);
  9268. }
  9269. else
  9270. {
  9271. base = sizeof(Js::JavascriptNativeFloatArray) + offsetof(Js::SparseArraySegment<double>, elements);
  9272. }
  9273. }
  9274. else if(baseValueType.IsLikelyObject() && baseValueType.GetObjectType() == ObjectType::Array)
  9275. {
  9276. Assert(stElem->m_opcode == Js::OpCode::StElemC);
  9277. Assert(value <= Js::SparseArraySegmentBase::INLINE_CHUNK_SIZE);
  9278. base = sizeof(Js::JavascriptArray) + offsetof(Js::SparseArraySegment<Js::Var>, elements);
  9279. }
  9280. else
  9281. {
  9282. Assert(stElem->m_opcode == Js::OpCode::StElemC || stElem->m_opcode == Js::OpCode::StArrSegElemC);
  9283. Assert(indirOpnd->GetBaseOpnd()->GetType() == TyVar);
  9284. base = offsetof(Js::SparseArraySegment<Js::Var>, elements);
  9285. }
  9286. Assert(value >= 0);
  9287. // MOV [r3 + offset(element) + index], src
  9288. const BYTE indirScale =
  9289. baseValueType.IsLikelyAnyOptimizedArray() ? GetArrayIndirScale(baseValueType) : m_lowererMD.GetDefaultIndirScale();
  9290. IntConstType offset = base + (value << indirScale);
  9291. Assert(Math::FitsInDWord(offset));
  9292. indirOpnd->SetOffset((int32)offset);
  9293. m_lowererMD.ChangeToWriteBarrierAssign(stElem, this->m_func);
  9294. return instrPrev;
  9295. }
  9296. void Lowerer::LowerLdArrHead(IR::Instr *const instr)
  9297. {
  9298. IR::RegOpnd *array = instr->UnlinkSrc1()->AsRegOpnd();
  9299. const ValueType arrayValueType(array->GetValueType());
  9300. Assert(arrayValueType.IsAnyOptimizedArray());
  9301. if(arrayValueType.GetObjectType() == ObjectType::ObjectWithArray)
  9302. {
  9303. array = LoadObjectArray(array, instr);
  9304. }
  9305. // mov arrayHeadSegment, [array + offset(headSegment)]
  9306. instr->GetDst()->SetType(TyMachPtr);
  9307. instr->SetSrc1(
  9308. IR::IndirOpnd::New(
  9309. array,
  9310. GetArrayOffsetOfHeadSegment(arrayValueType),
  9311. TyMachPtr,
  9312. instr->m_func));
  9313. LowererMD::ChangeToAssign(instr);
  9314. }
  9315. // Creates the rest parameter array.
  9316. // Var JavascriptArray::OP_NewScArrayWithElements(
  9317. // uint32 elementCount,
  9318. // Var *elements,
  9319. // ScriptContext* scriptContext)
  9320. IR::Instr *Lowerer::LowerRestParameter(IR::Opnd *formalsOpnd, IR::Opnd *dstOpnd, IR::Opnd *excessOpnd, IR::Instr *instr, IR::RegOpnd *generatorArgsPtrOpnd)
  9321. {
  9322. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, dstOpnd, instr->m_func);
  9323. instr->InsertAfter(helperCallInstr);
  9324. // Var JavascriptArray::OP_NewScArrayWithElements(
  9325. // int32 elementCount,
  9326. // Var *elements,
  9327. // ScriptContext* scriptContext)
  9328. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScArrayWithElements;
  9329. LoadScriptContext(helperCallInstr);
  9330. BOOL isGenerator = this->m_func->GetJITFunctionBody()->IsCoroutine();
  9331. // Elements pointer = ebp + (formals count + formals offset + 1)*sizeof(Var)
  9332. IR::RegOpnd *srcOpnd = isGenerator ? generatorArgsPtrOpnd : IR::Opnd::CreateFramePointerOpnd(this->m_func);
  9333. uint16 actualOffset = isGenerator ? 0 : GetFormalParamOffset(); //4
  9334. IR::RegOpnd *argPtrOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  9335. InsertAdd(false, argPtrOpnd, srcOpnd, IR::IntConstOpnd::New((formalsOpnd->AsIntConstOpnd()->GetValue() + actualOffset) * MachPtr, TyMachPtr, this->m_func), helperCallInstr);
  9336. m_lowererMD.LoadHelperArgument(helperCallInstr, argPtrOpnd);
  9337. m_lowererMD.LoadHelperArgument(helperCallInstr, excessOpnd);
  9338. m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
  9339. return helperCallInstr;
  9340. }
  9341. ///----------------------------------------------------------------------------
  9342. ///
  9343. /// Lowerer::LowerArgIn
  9344. ///
  9345. /// This function checks the passed-in argument count against the index of this
  9346. /// argument and uses null for a param value if the caller didn't explicitly
  9347. /// pass anything.
  9348. ///
  9349. ///----------------------------------------------------------------------------
  9350. IR::Instr *
  9351. Lowerer::LowerArgIn(IR::Instr *instrArgIn)
  9352. {
  9353. IR::LabelInstr * labelDone;
  9354. IR::LabelInstr * labelUndef;
  9355. IR::LabelInstr * labelNormal;
  9356. IR::LabelInstr * labelInit;
  9357. IR::LabelInstr * labelInitNext;
  9358. IR::BranchInstr * instrBranch;
  9359. IR::Instr * instrArgInNext;
  9360. IR::Instr * instrInsert;
  9361. IR::Instr * instrPrev;
  9362. IR::Instr * instrResume = nullptr;
  9363. IR::Opnd * dstOpnd;
  9364. IR::Opnd * srcOpnd;
  9365. IR::Opnd * opndUndef;
  9366. Js::ArgSlot argIndex;
  9367. StackSym * symParam;
  9368. BOOLEAN isDuplicate;
  9369. IR::RegOpnd * generatorArgsPtrOpnd = nullptr;
  9370. // We start with:
  9371. // s1 = ArgIn_A param1
  9372. // s2 = ArgIn_A param2
  9373. // ...
  9374. // sn = ArgIn_A paramn
  9375. //
  9376. // We want to end up with:
  9377. //
  9378. // s1 = ArgIn_A param1 -- Note that this is unconditional
  9379. // count = (load from param area)
  9380. // BrLt_A $start, count, n -- Forward cbranch to the uncommon case
  9381. // Br $Ln
  9382. // $start:
  9383. // sn = assign undef
  9384. // BrGe_A $Ln-1, count, n-1
  9385. // sn-1 = assign undef
  9386. // ...
  9387. // s2 = assign undef
  9388. // Br $done
  9389. // $Ln:
  9390. // sn = assign paramn
  9391. // $Ln-1:
  9392. // sn-1 = assign paramn-1
  9393. // ...
  9394. // s2 = assign param2
  9395. // $done:
  9396. AnalysisAssert(instrArgIn);
  9397. IR::Opnd *restDst = nullptr;
  9398. bool hasRest = instrArgIn->m_opcode == Js::OpCode::ArgIn_Rest;
  9399. if (hasRest)
  9400. {
  9401. IR::Instr *restInstr = instrArgIn;
  9402. restDst = restInstr->UnlinkDst();
  9403. if (m_func->GetJITFunctionBody()->HasImplicitArgIns() && m_func->argInsCount > 0)
  9404. {
  9405. while (instrArgIn->m_opcode != Js::OpCode::ArgIn_A)
  9406. {
  9407. instrArgIn = instrArgIn->m_prev;
  9408. if (instrResume == nullptr)
  9409. {
  9410. instrResume = instrArgIn;
  9411. }
  9412. }
  9413. restInstr->Remove();
  9414. }
  9415. else
  9416. {
  9417. Assert(instrArgIn->m_func == this->m_func);
  9418. IR::Instr * instrCount = m_lowererMD.LoadInputParamCount(instrArgIn, -this->m_func->GetInParamsCount());
  9419. IR::Opnd * excessOpnd = instrCount->GetDst();
  9420. IR::LabelInstr *createRestArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  9421. // BrGe $createRestArray, excess, 0
  9422. InsertCompareBranch(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), Js::OpCode::BrGe_A, createRestArrayLabel, instrArgIn);
  9423. // MOV excess, 0
  9424. InsertMove(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), instrArgIn);
  9425. // $createRestArray
  9426. instrArgIn->InsertBefore(createRestArrayLabel);
  9427. if (m_func->GetJITFunctionBody()->IsCoroutine())
  9428. {
  9429. generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrArgIn);
  9430. }
  9431. IR::IntConstOpnd * formalsOpnd = IR::IntConstOpnd::New(this->m_func->GetInParamsCount(), TyUint32, this->m_func);
  9432. IR::Instr *prev = LowerRestParameter(formalsOpnd, restDst, excessOpnd, instrArgIn, generatorArgsPtrOpnd);
  9433. instrArgIn->Remove();
  9434. return prev;
  9435. }
  9436. }
  9437. srcOpnd = instrArgIn->GetSrc1();
  9438. symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
  9439. argIndex = symParam->GetParamSlotNum();
  9440. if (argIndex == 1)
  9441. {
  9442. // The "this" argument is not source-dependent and doesn't need to be checked.
  9443. if (m_func->GetJITFunctionBody()->IsCoroutine())
  9444. {
  9445. generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrArgIn);
  9446. ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
  9447. }
  9448. m_lowererMD.ChangeToAssign(instrArgIn);
  9449. return instrResume == nullptr ? instrArgIn->m_prev : instrResume;
  9450. }
  9451. Js::ArgSlot formalsCount = this->m_func->GetInParamsCount();
  9452. AssertMsg(argIndex <= formalsCount, "Expect to see the ArgIn's within the range of the formals");
  9453. // Because there may be instructions between the ArgIn's, such as saves to the frame object,
  9454. // we find the top of the sequence of ArgIn's and insert everything there. This assumes that
  9455. // ArgIn's use param symbols as src's and not the results of previous instructions.
  9456. instrPrev = instrArgIn;
  9457. Js::ArgSlot currArgInCount = 0;
  9458. Assert(this->m_func->argInsCount > 0);
  9459. while (currArgInCount < this->m_func->argInsCount - 1)
  9460. {
  9461. instrPrev = instrPrev->m_prev;
  9462. if (instrPrev->m_opcode == Js::OpCode::ArgIn_A)
  9463. {
  9464. srcOpnd = instrPrev->GetSrc1();
  9465. symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
  9466. AssertMsg(symParam->GetParamSlotNum() < argIndex, "ArgIn's not in numerical order");
  9467. argIndex = symParam->GetParamSlotNum();
  9468. currArgInCount++;
  9469. }
  9470. else
  9471. {
  9472. // Make sure that this instruction gets lowered.
  9473. if (instrResume == nullptr)
  9474. {
  9475. instrResume = instrPrev;
  9476. }
  9477. }
  9478. }
  9479. // The loading of parameters will be inserted above this instruction.
  9480. instrInsert = instrPrev;
  9481. AnalysisAssert(instrInsert);
  9482. if (instrResume == nullptr)
  9483. {
  9484. // We found no intervening non-ArgIn's, so lowering can resume at the previous instruction.
  9485. instrResume = instrInsert->m_prev;
  9486. }
  9487. // Now insert all the checks and undef-assigns.
  9488. if (m_func->GetJITFunctionBody()->IsCoroutine())
  9489. {
  9490. generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrInsert);
  9491. }
  9492. // excessOpnd = (load from param area) - formalCounts
  9493. IR::Instr * instrCount = this->m_lowererMD.LoadInputParamCount(instrInsert, -formalsCount, true);
  9494. IR::Opnd * excessOpnd = instrCount->GetDst();
  9495. labelUndef = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /*helperLabel*/ true);
  9496. Lowerer::InsertBranch(Js::OpCode::BrLt_A, labelUndef, instrInsert);
  9497. // Br $Ln
  9498. labelNormal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  9499. labelInit = labelNormal;
  9500. instrBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelNormal, this->m_func);
  9501. instrInsert->InsertBefore(instrBranch);
  9502. // Insert the labels
  9503. instrInsert->InsertBefore(labelUndef);
  9504. instrInsert->InsertBefore(labelNormal);
  9505. //Adjustment for deadstore of ArgIn_A
  9506. Js::ArgSlot highestSlotNum = instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum();
  9507. Js::ArgSlot missingSlotNums = this->m_func->GetInParamsCount() - highestSlotNum;
  9508. Assert(missingSlotNums >= 0);
  9509. while (missingSlotNums > 0)
  9510. {
  9511. InsertAdd(true, excessOpnd, excessOpnd, IR::IntConstOpnd::New(1, TyMachReg, this->m_func), labelNormal);
  9512. Lowerer::InsertBranch(Js::OpCode::BrEq_A, labelNormal, labelNormal);
  9513. missingSlotNums--;
  9514. }
  9515. // MOV undefReg, undefAddress
  9516. IR::Opnd* opndUndefAddress = this->LoadLibraryValueOpnd(labelNormal, LibraryValue::ValueUndefined);
  9517. opndUndef = IR::RegOpnd::New(TyMachPtr, this->m_func);
  9518. Lowerer::InsertMove(opndUndef, opndUndefAddress, labelNormal);
  9519. BVSparse<JitArenaAllocator> *formalsBv = JitAnew(this->m_alloc, BVSparse<JitArenaAllocator>, this->m_alloc);
  9520. while (currArgInCount > 0)
  9521. {
  9522. dstOpnd = instrArgIn->GetDst();
  9523. Assert(dstOpnd->IsRegOpnd());
  9524. isDuplicate = formalsBv->TestAndSet(dstOpnd->AsRegOpnd()->m_sym->AsStackSym()->m_id);
  9525. // Now insert the undef initialization before the "normal" label
  9526. // sn = assign undef
  9527. Lowerer::InsertMove(dstOpnd, opndUndef, labelNormal);
  9528. // INC excessOpnd
  9529. // BrEq_A $Ln-1
  9530. currArgInCount--;
  9531. labelInitNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  9532. // And insert the "normal" initialization before the "done" label
  9533. // sn = assign paramn
  9534. // $Ln-1:
  9535. labelInit->InsertAfter(labelInitNext);
  9536. labelInit = labelInitNext;
  9537. instrArgInNext = instrArgIn->m_prev;
  9538. instrArgIn->Unlink();
  9539. Js::ArgSlot prevParamSlotNum = instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum();
  9540. // function foo(x, x) { use(x); }
  9541. // This should refer to the second 'x'. Since we reverse the order here however, we need to skip
  9542. // the initialization of the first 'x' to not override the one for the second. WOOB:1105504
  9543. if (isDuplicate)
  9544. {
  9545. instrArgIn->Free();
  9546. }
  9547. else
  9548. {
  9549. ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
  9550. labelInit->InsertBefore(instrArgIn);
  9551. this->m_lowererMD.ChangeToAssign(instrArgIn);
  9552. }
  9553. instrArgIn = instrArgInNext;
  9554. while (instrArgIn->m_opcode != Js::OpCode::ArgIn_A)
  9555. {
  9556. instrArgIn = instrArgIn->m_prev;
  9557. AssertMsg(instrArgIn, "???");
  9558. }
  9559. //Adjustment for deadstore of ArgIn_A
  9560. Js::ArgSlot currParamSlotNum = instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum();
  9561. Js::ArgSlot diffSlotsNum = prevParamSlotNum - currParamSlotNum;
  9562. AssertMsg(diffSlotsNum > 0, "Argins are not in order?");
  9563. while (diffSlotsNum > 0)
  9564. {
  9565. InsertAdd(true, excessOpnd, excessOpnd, IR::IntConstOpnd::New(1, TyMachReg, this->m_func), labelNormal);
  9566. InsertBranch(Js::OpCode::BrEq_A, labelInitNext, labelNormal);
  9567. diffSlotsNum--;
  9568. }
  9569. AssertMsg(instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum() <= formalsCount,
  9570. "Expect all ArgIn's to be in numerical order by param slot");
  9571. }
  9572. // Insert final undef and normal initializations, jumping unconditionally to the end
  9573. // rather than checking against the decremented formals count as we did inside the loop above.
  9574. // s2 = assign undef
  9575. dstOpnd = instrArgIn->GetDst();
  9576. Assert(dstOpnd->IsRegOpnd());
  9577. isDuplicate = formalsBv->TestAndSet(dstOpnd->AsRegOpnd()->m_sym->AsStackSym()->m_id);
  9578. Lowerer::InsertMove(dstOpnd, opndUndef, labelNormal);
  9579. if (hasRest)
  9580. {
  9581. InsertMove(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), labelNormal);
  9582. }
  9583. // Br $done
  9584. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  9585. instrBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  9586. labelNormal->InsertBefore(instrBranch);
  9587. // s2 = assign param2
  9588. // $done:
  9589. labelInit->InsertAfter(labelDone);
  9590. if (hasRest)
  9591. {
  9592. // The formals count has been tainted, so restore it before lowering rest
  9593. IR::IntConstOpnd * formalsOpnd = IR::IntConstOpnd::New(this->m_func->GetInParamsCount(), TyUint32, this->m_func);
  9594. LowerRestParameter(formalsOpnd, restDst, excessOpnd, labelDone, generatorArgsPtrOpnd);
  9595. }
  9596. instrArgIn->Unlink();
  9597. if (isDuplicate)
  9598. {
  9599. instrArgIn->Free();
  9600. }
  9601. else
  9602. {
  9603. ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
  9604. labelDone->InsertBefore(instrArgIn);
  9605. this->m_lowererMD.ChangeToAssign(instrArgIn);
  9606. }
  9607. JitAdelete(this->m_alloc, formalsBv);
  9608. return instrResume;
  9609. }
  9610. void
  9611. Lowerer::ConvertArgOpndIfGeneratorFunction(IR::Instr *instrArgIn, IR::RegOpnd *generatorArgsPtrOpnd)
  9612. {
  9613. if (this->m_func->GetJITFunctionBody()->IsCoroutine())
  9614. {
  9615. // Replace stack param operand with offset into arguments array held by
  9616. // the generator object.
  9617. IR::Opnd * srcOpnd = instrArgIn->UnlinkSrc1();
  9618. StackSym * symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
  9619. Js::ArgSlot argIndex = symParam->GetParamSlotNum();
  9620. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorArgsPtrOpnd, (argIndex - 1) * MachPtr, TyMachPtr, this->m_func);
  9621. srcOpnd->Free(this->m_func);
  9622. instrArgIn->SetSrc1(indirOpnd);
  9623. }
  9624. }
  9625. IR::RegOpnd *
  9626. Lowerer::LoadGeneratorArgsPtr(IR::Instr *instrInsert)
  9627. {
  9628. IR::Instr * instr = LoadGeneratorObject(instrInsert);
  9629. IR::RegOpnd * generatorRegOpnd = instr->GetDst()->AsRegOpnd();
  9630. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorRegOpnd, Js::JavascriptGenerator::GetArgsPtrOffset(), TyMachPtr, instrInsert->m_func);
  9631. IR::RegOpnd * argsPtrOpnd = IR::RegOpnd::New(TyMachReg, instrInsert->m_func);
  9632. Lowerer::InsertMove(argsPtrOpnd, indirOpnd, instrInsert);
  9633. return argsPtrOpnd;
  9634. }
  9635. IR::Instr *
  9636. Lowerer::LoadGeneratorObject(IR::Instr * instrInsert)
  9637. {
  9638. StackSym * generatorSym = StackSym::NewImplicitParamSym(3, instrInsert->m_func);
  9639. instrInsert->m_func->SetArgOffset(generatorSym, LowererMD::GetFormalParamOffset() * MachPtr);
  9640. IR::SymOpnd * generatorSymOpnd = IR::SymOpnd::New(generatorSym, TyMachPtr, instrInsert->m_func);
  9641. IR::RegOpnd * generatorRegOpnd = IR::RegOpnd::New(TyMachPtr, instrInsert->m_func);
  9642. instrInsert->m_func->SetHasImplicitParamLoad();
  9643. return Lowerer::InsertMove(generatorRegOpnd, generatorSymOpnd, instrInsert);
  9644. }
  9645. IR::Instr *
  9646. Lowerer::LowerArgInAsmJs(IR::Instr * instr)
  9647. {
  9648. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  9649. Assert(instr && instr->m_opcode == Js::OpCode::ArgIn_A);
  9650. IR::Instr* instrPrev = instr->m_prev;
  9651. m_lowererMD.ChangeToAssign(instr);
  9652. return instrPrev;
  9653. }
  9654. bool
  9655. Lowerer::InlineBuiltInLibraryCall(IR::Instr *callInstr)
  9656. {
  9657. IR::Opnd *src1 = callInstr->GetSrc1();
  9658. IR::Opnd *src2 = callInstr->GetSrc2();
  9659. // Get the arg count by looking at the slot number of the last arg symbol.
  9660. if (!src2->IsSymOpnd())
  9661. {
  9662. // No args? Not sure this is possible, but handle it.
  9663. return false;
  9664. }
  9665. StackSym *argLinkSym = src2->AsSymOpnd()->m_sym->AsStackSym();
  9666. // Subtract "this" from the arg count.
  9667. IntConstType argCount = argLinkSym->GetArgSlotNum() - 1;
  9668. // Find the callee's built-in index (if any).
  9669. Js::BuiltinFunction index = Func::GetBuiltInIndex(src1);
  9670. // Warning!
  9671. // Don't add new built-in to following switch. Built-ins needs to be inlined in call direct way.
  9672. // Following is only for prejit scenarios where we don't get inlining always and generate fast path in lowerer.
  9673. // Generating fastpath here misses fixed functions and globopt optimizations.
  9674. switch(index)
  9675. {
  9676. case Js::BuiltinFunction::JavascriptString_CharAt:
  9677. case Js::BuiltinFunction::JavascriptString_CharCodeAt:
  9678. if (argCount != 1)
  9679. {
  9680. return false;
  9681. }
  9682. if (!callInstr->GetDst())
  9683. {
  9684. // Optimization of Char[Code]At assumes result is used.
  9685. return false;
  9686. }
  9687. break;
  9688. case Js::BuiltinFunction::Math_Abs:
  9689. #ifdef _M_IX86
  9690. if (!AutoSystemInfo::Data.SSE2Available())
  9691. {
  9692. return false;
  9693. }
  9694. #endif
  9695. if (argCount != 1)
  9696. {
  9697. return false;
  9698. }
  9699. if (!callInstr->GetDst())
  9700. {
  9701. // Optimization of Abs assumes result is used.
  9702. return false;
  9703. }
  9704. break;
  9705. case Js::BuiltinFunction::JavascriptArray_Push:
  9706. {
  9707. if (argCount != 1)
  9708. {
  9709. return false;
  9710. }
  9711. if (callInstr->GetDst())
  9712. {
  9713. // Optimization of push assumes result is unused.
  9714. return false;
  9715. }
  9716. StackSym *linkSym = callInstr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym();
  9717. Assert(linkSym->IsSingleDef());
  9718. linkSym = linkSym->m_instrDef->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym();
  9719. Assert(linkSym->IsSingleDef());
  9720. IR::Opnd *const arrayOpnd = linkSym->m_instrDef->GetSrc1();
  9721. if(!arrayOpnd->IsRegOpnd())
  9722. {
  9723. // This should be rare, but needs to be handled.
  9724. // By now, we've already started some of the inlining. Simply jmp to the helper.
  9725. // The branch will get peeped later.
  9726. return false;
  9727. }
  9728. if(!ShouldGenerateArrayFastPath(arrayOpnd, false, false, false) ||
  9729. arrayOpnd->GetValueType().IsLikelyNativeArray())
  9730. {
  9731. // Rejecting native array for now, since we have to do a FromVar at the call site and bail out.
  9732. return false;
  9733. }
  9734. break;
  9735. }
  9736. case Js::BuiltinFunction::JavascriptString_Replace:
  9737. {
  9738. if(argCount != 2)
  9739. {
  9740. return false;
  9741. }
  9742. if(!ShouldGenerateStringReplaceFastPath(callInstr, argCount))
  9743. {
  9744. return false;
  9745. }
  9746. break;
  9747. }
  9748. default:
  9749. return false;
  9750. }
  9751. Assert(Func::IsBuiltInInlinedInLowerer(callInstr->GetSrc1()));
  9752. IR::Opnd *callTargetOpnd = callInstr->GetSrc1();
  9753. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  9754. IR::Opnd *objRefOpnd = IR::MemRefOpnd::New((void*)this->GetObjRefForBuiltInTarget(callTargetOpnd->AsRegOpnd()), TyMachReg, this->m_func);
  9755. InsertCompareBranch(callTargetOpnd, objRefOpnd, Js::OpCode::BrNeq_A, labelHelper, callInstr);
  9756. callInstr->InsertBefore(labelHelper);
  9757. Assert(argCount <= 2);
  9758. IR::Opnd *argsOpnd[3];
  9759. IR::Opnd *linkOpnd = callInstr->GetSrc2();
  9760. while(linkOpnd->IsSymOpnd())
  9761. {
  9762. IR::SymOpnd * symOpnd = linkOpnd->AsSymOpnd();
  9763. StackSym *sym = symOpnd->m_sym->AsStackSym();
  9764. Assert(sym->m_isSingleDef);
  9765. IR::Instr *argInstr = sym->m_instrDef;
  9766. Assert(argCount >= 0);
  9767. argsOpnd[argCount] = argInstr->GetSrc1();
  9768. argCount--;
  9769. argInstr->Unlink();
  9770. labelHelper->InsertAfter(argInstr);
  9771. linkOpnd = argInstr->GetSrc2();
  9772. }
  9773. AnalysisAssert(argCount == -1);
  9774. // Move startcall
  9775. Assert(linkOpnd->IsRegOpnd());
  9776. StackSym *sym = linkOpnd->AsRegOpnd()->m_sym;
  9777. Assert(sym->m_isSingleDef);
  9778. IR::Instr *startCall = sym->m_instrDef;
  9779. Assert(startCall->m_opcode == Js::OpCode::StartCall);
  9780. startCall->Unlink();
  9781. labelHelper->InsertAfter(startCall);
  9782. // $doneLabel:
  9783. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  9784. callInstr->InsertAfter(doneLabel);
  9785. bool success = true;
  9786. switch(index)
  9787. {
  9788. case Js::BuiltinFunction::Math_Abs:
  9789. this->m_lowererMD.GenerateFastAbs(callInstr->GetDst(), argsOpnd[1], callInstr, labelHelper, labelHelper, doneLabel);
  9790. break;
  9791. case Js::BuiltinFunction::JavascriptString_CharCodeAt:
  9792. case Js::BuiltinFunction::JavascriptString_CharAt:
  9793. success = GenerateFastCharAt(index, callInstr->GetDst(), argsOpnd[0], argsOpnd[1],
  9794. callInstr, labelHelper, labelHelper, doneLabel);
  9795. break;
  9796. case Js::BuiltinFunction::JavascriptArray_Push:
  9797. success = GenerateFastPush(argsOpnd[0], argsOpnd[1], callInstr, labelHelper, labelHelper, nullptr, doneLabel);
  9798. break;
  9799. case Js::BuiltinFunction::JavascriptString_Replace:
  9800. success = GenerateFastReplace(argsOpnd[0], argsOpnd[1], argsOpnd[2], callInstr, labelHelper, labelHelper, doneLabel);
  9801. break;
  9802. default:
  9803. Assert(UNREACHED);
  9804. }
  9805. IR::Instr *instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, doneLabel, this->m_func);
  9806. labelHelper->InsertBefore(instr);
  9807. return success;
  9808. }
  9809. // Perform lowerer part of inlining built-in function.
  9810. // For details, see inline.cpp.
  9811. //
  9812. // Description of changes here (note that taking care of Argouts are similar to InlineeStart):
  9813. // - Move ArgOut_A_InlineBuiltIn next to the call instr -- used by bailout processing in register allocator.
  9814. // - Remove StartCall and InlineBuiltInStart for this call.
  9815. // Before:
  9816. // StartCall fn
  9817. // d1 = BIA s1, link1
  9818. // ...
  9819. // InlineBuiltInStart fn, link0
  9820. // After:
  9821. // ...
  9822. // d1 = BIA s1, NULL
  9823. void Lowerer::LowerInlineBuiltIn(IR::Instr* builtInEndInstr)
  9824. {
  9825. Assert(builtInEndInstr->m_opcode == Js::OpCode::InlineBuiltInEnd || builtInEndInstr->m_opcode == Js::OpCode::InlineNonTrackingBuiltInEnd);
  9826. IR::Instr* startCallInstr = nullptr;
  9827. builtInEndInstr->IterateArgInstrs([&](IR::Instr* argInstr) {
  9828. startCallInstr = argInstr->GetSrc2()->GetStackSym()->m_instrDef;
  9829. return false;
  9830. });
  9831. // Keep the startCall around as bailout refers to it. Just unlink it for now - do not delete it.
  9832. startCallInstr->Unlink();
  9833. builtInEndInstr->Remove();
  9834. }
  9835. intptr_t
  9836. Lowerer::GetObjRefForBuiltInTarget(IR::RegOpnd * regOpnd)
  9837. {
  9838. intptr_t mathFns = m_func->GetScriptContextInfo()->GetBuiltinFunctionsBaseAddr();
  9839. Js::BuiltinFunction index = regOpnd->m_sym->m_builtInIndex;
  9840. AssertMsg(index < Js::BuiltinFunction::Count, "Invalid built-in index on a call target marked as built-in");
  9841. return mathFns + index;
  9842. }
  9843. IR::Instr *
  9844. Lowerer::LowerNewRegEx(IR::Instr * instr)
  9845. {
  9846. IR::Opnd *src1 = instr->UnlinkSrc1();
  9847. Assert(src1->IsAddrOpnd());
  9848. #if ENABLE_REGEX_CONFIG_OPTIONS
  9849. if (REGEX_CONFIG_FLAG(RegexTracing))
  9850. {
  9851. Assert(!instr->GetDst()->CanStoreTemp());
  9852. IR::Instr * instrPrev = LoadScriptContext(instr);
  9853. instrPrev = m_lowererMD.LoadHelperArgument(instr, src1);
  9854. m_lowererMD.ChangeToHelperCall(instr, IR::HelperScrRegEx_OP_NewRegEx);
  9855. return instrPrev;
  9856. }
  9857. #endif
  9858. IR::Instr * instrPrev = instr->m_prev;
  9859. IR::RegOpnd * dstOpnd = instr->UnlinkDst()->AsRegOpnd();
  9860. IR::SymOpnd * tempObjectSymOpnd;
  9861. bool isZeroed = GenerateRecyclerOrMarkTempAlloc(instr, dstOpnd, IR::HelperAllocMemForJavascriptRegExp, sizeof(Js::JavascriptRegExp), &tempObjectSymOpnd);
  9862. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  9863. {
  9864. // Hoist the vtable and pattern init to the outer most loop top as it never changes
  9865. InsertMove(tempObjectSymOpnd,
  9866. LoadVTableValueOpnd(this->outerMostLoopLabel, VTableValue::VtableJavascriptRegExp),
  9867. this->outerMostLoopLabel, false);
  9868. }
  9869. else
  9870. {
  9871. GenerateMemInit(dstOpnd, 0, LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp), instr, isZeroed);
  9872. }
  9873. GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfType(),
  9874. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueRegexType), instr, isZeroed);
  9875. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfAuxSlots(), instr, isZeroed);
  9876. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfObjectArray(), instr, isZeroed);
  9877. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  9878. {
  9879. InsertMove(IR::SymOpnd::New(tempObjectSymOpnd->m_sym,
  9880. tempObjectSymOpnd->m_offset + Js::JavascriptRegExp::GetOffsetOfPattern(), TyMachPtr, this->m_func),
  9881. src1, this->outerMostLoopLabel, false);
  9882. }
  9883. else
  9884. {
  9885. GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfPattern(), src1, instr, isZeroed);
  9886. }
  9887. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfSplitPattern(), instr, isZeroed);
  9888. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), instr, isZeroed);
  9889. GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), 0, instr, isZeroed);
  9890. instr->Remove();
  9891. return instrPrev;
  9892. }
  9893. IR::Instr *
  9894. Lowerer::GenerateRuntimeError(IR::Instr * insertBeforeInstr, Js::MessageId errorCode, IR::JnHelperMethod helper /*= IR::JnHelperMethod::HelperOp_RuntimeTypeError*/)
  9895. {
  9896. IR::Instr * runtimeErrorInstr = IR::Instr::New(Js::OpCode::RuntimeTypeError, this->m_func);
  9897. runtimeErrorInstr->SetSrc1(IR::IntConstOpnd::New(errorCode, TyInt32, this->m_func, true));
  9898. insertBeforeInstr->InsertBefore(runtimeErrorInstr);
  9899. return this->LowerUnaryHelperMem(runtimeErrorInstr, helper);
  9900. }
  9901. bool Lowerer::IsNullOrUndefRegOpnd(IR::RegOpnd *opnd) const
  9902. {
  9903. StackSym *sym = opnd->m_sym;
  9904. if (!sym->IsConst() || sym->IsIntConst() || sym->IsFloatConst())
  9905. {
  9906. return false;
  9907. }
  9908. Js::Var var = sym->GetConstAddress();
  9909. return (intptr_t)var == m_func->GetScriptContextInfo()->GetNullAddr() || (intptr_t)var == m_func->GetScriptContextInfo()->GetUndefinedAddr();
  9910. }
  9911. bool Lowerer::IsConstRegOpnd(IR::RegOpnd *opnd) const
  9912. {
  9913. StackSym *sym = opnd->m_sym;
  9914. if (!sym->IsConst() || sym->IsIntConst() || sym->IsFloatConst())
  9915. {
  9916. return false;
  9917. }
  9918. const auto& vt = sym->m_instrDef->GetSrc1()->GetValueType();
  9919. return vt.IsUndefined() || vt.IsNull() || vt.IsBoolean();
  9920. }
  9921. bool
  9922. Lowerer::HasSideEffects(IR::Instr *instr)
  9923. {
  9924. if (LowererMD::IsCall(instr))
  9925. {
  9926. #ifdef _M_IX86
  9927. IR::Opnd *src1 = instr->GetSrc1();
  9928. if (src1->IsHelperCallOpnd())
  9929. {
  9930. IR::HelperCallOpnd * helper = src1->AsHelperCallOpnd();
  9931. switch(helper->m_fnHelper)
  9932. {
  9933. case IR::HelperOp_Int32ToAtomInPlace:
  9934. case IR::HelperOp_Int32ToAtom:
  9935. case IR::HelperOp_UInt32ToAtom:
  9936. return false;
  9937. }
  9938. }
  9939. #endif
  9940. return true;
  9941. }
  9942. return instr->HasAnySideEffects();
  9943. }
  9944. bool Lowerer::IsArgSaveRequired(Func *func) {
  9945. return (!func->IsTrueLeaf() || func->IsJitInDebugMode() ||
  9946. // GetHasImplicitParamLoad covers generators, asmjs,
  9947. // and other javascript functions that implicitly read from the arg stack slots
  9948. func->GetHasThrow() || func->GetHasImplicitParamLoad() || func->HasThis() || func->argInsCount > 0);
  9949. }
  9950. IR::Instr*
  9951. Lowerer::GenerateFastInlineBuiltInMathRandom(IR::Instr* instr)
  9952. {
  9953. AssertMsg(instr->GetDst()->IsFloat(), "dst must be float.");
  9954. IR::Instr* retInstr = instr->m_prev;
  9955. IR::Opnd* dst = instr->GetDst();
  9956. #if defined(_M_X64)
  9957. if (m_func->GetScriptContextInfo()->IsPRNGSeeded())
  9958. {
  9959. const uint64 mExp = 0x3FF0000000000000;
  9960. const uint64 mMant = 0x000FFFFFFFFFFFFF;
  9961. IR::RegOpnd* r0 = IR::RegOpnd::New(TyUint64, m_func); // s0
  9962. IR::RegOpnd* r1 = IR::RegOpnd::New(TyUint64, m_func); // s1
  9963. IR::RegOpnd* r3 = IR::RegOpnd::New(TyUint64, m_func); // helper uint64 reg
  9964. IR::RegOpnd* r4 = IR::RegOpnd::New(TyFloat64, m_func); // helper float64 reg
  9965. // ===========================================================
  9966. // s0 = scriptContext->GetLibrary()->GetRandSeed1();
  9967. // s1 = scriptContext->GetLibrary()->GetRandSeed0();
  9968. // ===========================================================
  9969. this->InsertMove(r0,
  9970. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetRandSeed1Offset(), TyUint64, instr->m_func), instr);
  9971. this->InsertMove(r1,
  9972. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetRandSeed0Offset(), TyUint64, instr->m_func), instr);
  9973. // ===========================================================
  9974. // s1 ^= s1 << 23;
  9975. // ===========================================================
  9976. this->InsertMove(r3, r1, instr);
  9977. this->InsertShift(Js::OpCode::Shl_A, false, r3, r3, IR::IntConstOpnd::New(23, TyInt8, m_func), instr);
  9978. this->InsertXor(r1, r1, r3, instr);
  9979. // ===========================================================
  9980. // s1 ^= s1 >> 17;
  9981. // ===========================================================
  9982. this->InsertMove(r3, r1, instr);
  9983. this->InsertShift(Js::OpCode::ShrU_A, false, r3, r3, IR::IntConstOpnd::New(17, TyInt8, m_func), instr);
  9984. this->InsertXor(r1, r1, r3, instr);
  9985. // ===========================================================
  9986. // s1 ^= s0;
  9987. // ===========================================================
  9988. this->InsertXor(r1, r1, r0, instr);
  9989. // ===========================================================
  9990. // s1 ^= s0 >> 26;
  9991. // ===========================================================
  9992. this->InsertMove(r3, r0, instr);
  9993. this->InsertShift(Js::OpCode::ShrU_A, false, r3, r3, IR::IntConstOpnd::New(26, TyInt8, m_func), instr);
  9994. this->InsertXor(r1, r1, r3, instr);
  9995. // ===========================================================
  9996. // scriptContext->GetLibrary()->SetRandSeed0(s0);
  9997. // scriptContext->GetLibrary()->SetRandSeed1(s1);
  9998. // ===========================================================
  9999. this->InsertMove(
  10000. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetRandSeed0Offset(), TyUint64, m_func), r0, instr);
  10001. this->InsertMove(
  10002. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContextInfo()->GetLibraryAddr() + Js::JavascriptLibrary::GetRandSeed1Offset(), TyUint64, m_func), r1, instr);
  10003. // ===========================================================
  10004. // dst = bit_cast<float64>(((s0 + s1) & mMant) | mExp);
  10005. // ===========================================================
  10006. this->InsertAdd(false, r1, r1, r0, instr);
  10007. this->InsertMove(r3, IR::IntConstOpnd::New(mMant, TyInt64, m_func, true), instr);
  10008. this->InsertAnd(r1, r1, r3, instr);
  10009. this->InsertMove(r3, IR::IntConstOpnd::New(mExp, TyInt64, m_func, true), instr);
  10010. this->InsertOr(r1, r1, r3, instr);
  10011. this->InsertMoveBitCast(dst, r1, instr);
  10012. // ===================================================================
  10013. // dst -= 1.0;
  10014. // ===================================================================
  10015. this->InsertMove(r4, IR::MemRefOpnd::New(m_func->GetThreadContextInfo()->GetDoubleOnePointZeroAddr(), TyFloat64, m_func, IR::AddrOpndKindDynamicDoubleRef), instr);
  10016. this->InsertSub(false, dst, dst, r4, instr);
  10017. }
  10018. else
  10019. #endif
  10020. {
  10021. IR::Opnd* tmpdst = dst;
  10022. if (!dst->IsRegOpnd())
  10023. {
  10024. tmpdst = IR::RegOpnd::New(dst->GetType(), instr->m_func);
  10025. }
  10026. LoadScriptContext(instr);
  10027. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, tmpdst, instr->m_func);
  10028. instr->InsertBefore(helperCallInstr);
  10029. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperDirectMath_Random);
  10030. if (tmpdst != dst)
  10031. {
  10032. InsertMove(dst, tmpdst, instr);
  10033. }
  10034. }
  10035. instr->Remove();
  10036. return retInstr;
  10037. }
  10038. IR::Instr *
  10039. Lowerer::LowerCallDirect(IR::Instr * instr)
  10040. {
  10041. IR::Opnd* linkOpnd = instr->UnlinkSrc2();
  10042. StackSym *linkSym = linkOpnd->AsSymOpnd()->m_sym->AsStackSym();
  10043. IR::Instr* argInstr = linkSym->m_instrDef;
  10044. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_InlineSpecialized);
  10045. IR::Opnd* funcObj = argInstr->UnlinkSrc1();
  10046. instr->SetSrc2(argInstr->UnlinkSrc2());
  10047. argInstr->Remove();
  10048. if(instr->HasBailOutInfo())
  10049. {
  10050. IR::Instr * bailOutInstr = this->SplitBailOnImplicitCall(instr, instr->m_next, instr->m_next);
  10051. this->LowerBailOnEqualOrNotEqual(bailOutInstr);
  10052. }
  10053. Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
  10054. return this->GenerateDirectCall(instr, funcObj, (ushort)flags);
  10055. }
  10056. IR::Instr *
  10057. Lowerer::GenerateDirectCall(IR::Instr* inlineInstr, IR::Opnd* funcObj, ushort callflags)
  10058. {
  10059. int32 argCount = m_lowererMD.LowerCallArgs(inlineInstr, callflags);
  10060. m_lowererMD.LoadHelperArgument(inlineInstr, funcObj);
  10061. m_lowererMD.LowerCall(inlineInstr, (Js::ArgSlot)argCount); //to account for function object and callinfo
  10062. return inlineInstr->m_prev;
  10063. }
  10064. /*
  10065. * GenerateHelperToArrayPushFastPath
  10066. * Generates Helper Call and pushes arguments to the Push HelperCall
  10067. */
  10068. IR::Instr *
  10069. Lowerer::GenerateHelperToArrayPushFastPath(IR::Instr * instr, IR::LabelInstr * bailOutLabelHelper)
  10070. {
  10071. IR::Opnd * arrayHelperOpnd = instr->UnlinkSrc1();
  10072. IR::Opnd * elementHelperOpnd = instr->UnlinkSrc2();
  10073. IR::JnHelperMethod helperMethod;
  10074. if(elementHelperOpnd->IsInt32())
  10075. {
  10076. Assert(arrayHelperOpnd->GetValueType().IsLikelyNativeIntArray());
  10077. helperMethod = IR::HelperArray_NativeIntPush;
  10078. m_lowererMD.LoadHelperArgument(instr, elementHelperOpnd);
  10079. }
  10080. else if(elementHelperOpnd->IsFloat())
  10081. {
  10082. Assert(arrayHelperOpnd->GetValueType().IsLikelyNativeFloatArray());
  10083. helperMethod = IR::HelperArray_NativeFloatPush;
  10084. m_lowererMD.LoadDoubleHelperArgument(instr, elementHelperOpnd);
  10085. }
  10086. else
  10087. {
  10088. helperMethod = IR::HelperArray_VarPush;
  10089. m_lowererMD.LoadHelperArgument(instr, elementHelperOpnd);
  10090. }
  10091. m_lowererMD.LoadHelperArgument(instr, arrayHelperOpnd);
  10092. LoadScriptContext(instr);
  10093. return m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  10094. }
  10095. /*
  10096. * GenerateHelperToArrayPopFastPath
  10097. * Generates Helper Call and pushes arguments to the Pop HelperCall
  10098. */
  10099. IR::Instr *
  10100. Lowerer::GenerateHelperToArrayPopFastPath(IR::Instr * instr, IR::LabelInstr * doneLabel, IR::LabelInstr * bailOutLabelHelper)
  10101. {
  10102. IR::Opnd * arrayHelperOpnd = instr->UnlinkSrc1();
  10103. ValueType arrayValueType = arrayHelperOpnd->GetValueType();
  10104. IR::JnHelperMethod helperMethod;
  10105. //Decide the helperMethod based on dst availability and nativity of the array.
  10106. if(arrayValueType.IsLikelyNativeArray() && !instr->GetDst())
  10107. {
  10108. helperMethod = IR::HelperArray_NativePopWithNoDst;
  10109. }
  10110. else if(arrayValueType.IsLikelyNativeIntArray())
  10111. {
  10112. helperMethod = IR::HelperArray_NativeIntPop;
  10113. }
  10114. else if(arrayValueType.IsLikelyNativeFloatArray())
  10115. {
  10116. helperMethod = IR::HelperArray_NativeFloatPop;
  10117. }
  10118. else
  10119. {
  10120. helperMethod = IR::HelperArray_VarPop;
  10121. }
  10122. m_lowererMD.LoadHelperArgument(instr, arrayHelperOpnd);
  10123. //We do not need scriptContext for HelperArray_NativePopWithNoDst call.
  10124. if(helperMethod != IR::HelperArray_NativePopWithNoDst)
  10125. {
  10126. LoadScriptContext(instr);
  10127. }
  10128. IR::Instr * retInstr = m_lowererMD.ChangeToHelperCall(instr, helperMethod, bailOutLabelHelper);
  10129. //We don't need missing item check for var arrays, as there it is taken care by the helper.
  10130. if(arrayValueType.IsLikelyNativeArray())
  10131. {
  10132. if(retInstr->GetDst())
  10133. {
  10134. //Do this check only for native arrays with Dst. For Var arrays, this is taken care in the Runtime helper itself.
  10135. InsertMissingItemCompareBranch(retInstr->GetDst(), Js::OpCode::BrNeq_A, doneLabel, bailOutLabelHelper);
  10136. }
  10137. else
  10138. {
  10139. //We need unconditional jump to doneLabel, if there is no dst in Pop instr.
  10140. InsertBranch(Js::OpCode::Br, true, doneLabel, bailOutLabelHelper);
  10141. }
  10142. }
  10143. return retInstr;
  10144. }
  10145. IR::Instr *
  10146. Lowerer::LowerCondBranchCheckBailOut(IR::BranchInstr * branchInstr, IR::Instr * helperCall, bool isHelper)
  10147. {
  10148. Assert(branchInstr->m_opcode == Js::OpCode::BrTrue_A || branchInstr->m_opcode == Js::OpCode::BrFalse_A);
  10149. if (branchInstr->HasBailOutInfo())
  10150. {
  10151. #ifdef ENABLE_SCRIPT_DEBUGGING
  10152. IR::BailOutKind debuggerBailOutKind = IR::BailOutInvalid;
  10153. if (branchInstr->HasAuxBailOut())
  10154. {
  10155. // We have shared debugger bailout. For branches we lower it here, not in SplitBailForDebugger.
  10156. // See SplitBailForDebugger for details.
  10157. AssertMsg(!(branchInstr->GetBailOutKind() & IR::BailOutForDebuggerBits), "There should be no debugger bits in main bailout kind.");
  10158. debuggerBailOutKind = branchInstr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits;
  10159. AssertMsg((debuggerBailOutKind & ~(IR::BailOutIgnoreException | IR::BailOutForceByFlag)) == 0, "Only IR::BailOutIgnoreException|ForceByFlag supported here.");
  10160. }
  10161. #endif
  10162. IR::Instr * bailOutInstr = this->SplitBailOnImplicitCall(branchInstr, helperCall, branchInstr);
  10163. IR::Instr* prevInstr = this->LowerBailOnEqualOrNotEqual(bailOutInstr, branchInstr, nullptr, nullptr, isHelper);
  10164. #ifdef ENABLE_SCRIPT_DEBUGGING
  10165. if (debuggerBailOutKind != IR::BailOutInvalid)
  10166. {
  10167. // Note that by this time implicit calls bailout is already lowered.
  10168. // What we do here is use same bailout info and lower debugger bailout which would be shared bailout.
  10169. BailOutInfo* bailOutInfo = bailOutInstr->GetBailOutInfo();
  10170. IR::BailOutInstr* debuggerBailoutInstr = IR::BailOutInstr::New(
  10171. Js::OpCode::BailForDebugger, debuggerBailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
  10172. prevInstr->InsertAfter(debuggerBailoutInstr);
  10173. // The result of that is:
  10174. // original helper op_* instr, then debugger bailout, then implicit calls bailout/etc with the branch instr.
  10175. // Example:
  10176. // s35(eax).i32 = CALL Op_GreaterEqual.u32 # -- original op_* helper
  10177. // s34.i32 = MOV s35(eax).i32 #
  10178. // BailForDebugger # Bailout: #0042 (BailOutIgnoreException) -- the debugger bailout
  10179. // CMP [0x0003BDE0].i8, 1 (0x1).i8 # -- implicit calls check
  10180. // JEQ $L10 #
  10181. //$L11: [helper] #
  10182. // CALL SaveAllRegistersAndBranchBailOut.u32 # Bailout: #0042 (BailOutOnImplicitCalls)
  10183. // JMP $L5 #
  10184. //$L10: [helper] #
  10185. // BrFalse_A $L3, s34.i32 #0034 -- The BrTrue/BrFalse branch (branch instr)
  10186. //$L6: [helper] #0042
  10187. this->LowerBailForDebugger(debuggerBailoutInstr, isHelper);
  10188. // After lowering this we will have a check which on bailout condition will JMP to $L11.
  10189. }
  10190. #else
  10191. (prevInstr);
  10192. #endif
  10193. }
  10194. return m_lowererMD.LowerCondBranch(branchInstr);
  10195. }
  10196. IR::SymOpnd *
  10197. Lowerer::LoadCallInfo(IR::Instr * instrInsert)
  10198. {
  10199. IR::SymOpnd * srcOpnd;
  10200. Func * func = instrInsert->m_func;
  10201. if (func->GetJITFunctionBody()->IsCoroutine())
  10202. {
  10203. // Generator function arguments and ArgumentsInfo are not on the stack. Instead they
  10204. // are accessed off the generator object (which is prm1).
  10205. IR::Instr *genLoadInstr = LoadGeneratorObject(instrInsert);
  10206. IR::RegOpnd * generatorRegOpnd = genLoadInstr->GetDst()->AsRegOpnd();
  10207. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorRegOpnd, Js::JavascriptGenerator::GetCallInfoOffset(), TyMachPtr, func);
  10208. IR::Instr * instr = Lowerer::InsertMove(IR::RegOpnd::New(TyMachPtr, func), indirOpnd, instrInsert);
  10209. StackSym * callInfoSym = StackSym::New(TyMachReg, func);
  10210. IR::SymOpnd * callInfoSymOpnd = IR::SymOpnd::New(callInfoSym, TyMachReg, func);
  10211. Lowerer::InsertMove(callInfoSymOpnd, instr->GetDst(), instrInsert);
  10212. srcOpnd = IR::SymOpnd::New(callInfoSym, TyMachReg, func);
  10213. }
  10214. else
  10215. {
  10216. // Otherwise callInfo is always the "second" argument.
  10217. // The stack looks like this:
  10218. //
  10219. // script param N
  10220. // ...
  10221. // script param 1
  10222. // callinfo
  10223. // function object
  10224. // return addr
  10225. // FP -> FP chain
  10226. StackSym * srcSym = LowererMD::GetImplicitParamSlotSym(1, func);
  10227. srcOpnd = IR::SymOpnd::New(srcSym, TyMachReg, func);
  10228. }
  10229. return srcOpnd;
  10230. }
  10231. IR::Instr *
  10232. Lowerer::LowerBailOnNotStackArgs(IR::Instr * instr)
  10233. {
  10234. if (!this->m_func->GetHasStackArgs())
  10235. {
  10236. throw Js::RejitException(RejitReason::InlineApplyDisabled);
  10237. }
  10238. IR::Instr * prevInstr = instr->m_prev;
  10239. // Bail out test
  10240. // Label to skip Bailout and continue
  10241. IR::LabelInstr * continueLabelInstr;
  10242. IR::Instr *instrNext = instr->m_next;
  10243. if (instrNext->IsLabelInstr())
  10244. {
  10245. continueLabelInstr = instrNext->AsLabelInstr();
  10246. }
  10247. else
  10248. {
  10249. continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  10250. instr->InsertAfter(continueLabelInstr);
  10251. }
  10252. if (!instr->m_func->IsInlinee())
  10253. {
  10254. //BailOut if the number of actuals (except "this" argument) is greater than or equal to 15.
  10255. IR::RegOpnd* ldLenDstOpnd = IR::RegOpnd::New(TyUint32, instr->m_func);
  10256. const IR::AutoReuseOpnd autoReuseldLenDstOpnd(ldLenDstOpnd, instr->m_func);
  10257. IR::Instr* ldLen = IR::Instr::New(Js::OpCode::LdLen_A, ldLenDstOpnd, instr->m_func);
  10258. ldLenDstOpnd->SetValueType(ValueType::GetTaggedInt()); //LdLen_A works only on stack arguments
  10259. instr->InsertBefore(ldLen);
  10260. this->GenerateFastRealStackArgumentsLdLen(ldLen);
  10261. this->InsertCompareBranch(ldLenDstOpnd, IR::IntConstOpnd::New(Js::InlineeCallInfo::MaxInlineeArgoutCount, TyUint32, m_func, true), Js::OpCode::BrLt_A, true, continueLabelInstr, instr);
  10262. this->GenerateBailOut(instr, nullptr, nullptr);
  10263. }
  10264. else
  10265. {
  10266. //For Inlined functions, we are sure actuals can't exceed Js::InlineeCallInfo::MaxInlineeArgoutCount (15).
  10267. //No need to bail out.
  10268. instr->Remove();
  10269. }
  10270. return prevInstr;
  10271. }
  10272. IR::Instr *
  10273. Lowerer::LowerBailOnNotSpreadable(IR::Instr *instr)
  10274. {
  10275. // We only avoid bailing out / throwing a rejit exception when the array operand is a simple, non-optimized, non-object array.
  10276. IR::Instr * prevInstr = instr->m_prev;
  10277. Func *func = instr->m_func;
  10278. IR::Opnd *arraySrcOpnd = instr->UnlinkSrc1();
  10279. IR::RegOpnd *arrayOpnd = GetRegOpnd(arraySrcOpnd, instr, func, TyMachPtr);
  10280. const ValueType baseValueType(arrayOpnd->GetValueType());
  10281. // Check if we can just throw a rejit exception based on valuetype alone instead of bailing out.
  10282. if (!baseValueType.IsLikelyArray()
  10283. || baseValueType.IsLikelyAnyOptimizedArray()
  10284. || (baseValueType.IsLikelyObject() && (baseValueType.GetObjectType() == ObjectType::ObjectWithArray))
  10285. // Validate that GenerateArrayTest will not fail.
  10286. || !(baseValueType.IsUninitialized() || baseValueType.HasBeenObject())
  10287. || m_func->IsInlinee())
  10288. {
  10289. throw Js::RejitException(RejitReason::InlineSpreadDisabled);
  10290. }
  10291. // Past this point, we will need to use a bailout.
  10292. IR::LabelInstr *bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true /* isOpHelper */);
  10293. // See if we can skip various array checks on value type alone
  10294. if (!baseValueType.IsArray())
  10295. {
  10296. GenerateArrayTest(arrayOpnd, bailOutLabel, bailOutLabel, instr, false);
  10297. }
  10298. if (!(baseValueType.IsArray() && baseValueType.HasNoMissingValues()))
  10299. {
  10300. InsertTestBranch(
  10301. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, func),
  10302. IR::IntConstOpnd::New(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues), TyUint8, func, true),
  10303. Js::OpCode::BrEq_A,
  10304. bailOutLabel,
  10305. instr);
  10306. }
  10307. IR::IndirOpnd *arrayLenPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, func);
  10308. InsertCompareBranch(arrayLenPtrOpnd, IR::IntConstOpnd::New(Js::InlineeCallInfo::MaxInlineeArgoutCount - 1, TyUint8, func), Js::OpCode::BrGt_A, true, bailOutLabel, instr);
  10309. IR::LabelInstr *skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  10310. InsertBranch(Js::OpCode::Br, skipBailOutLabel, instr);
  10311. instr->InsertBefore(bailOutLabel);
  10312. instr->InsertAfter(skipBailOutLabel);
  10313. GenerateBailOut(instr);
  10314. return prevInstr;
  10315. }
  10316. IR::Instr *
  10317. Lowerer::LowerBailOnNotPolymorphicInlinee(IR::Instr * instr)
  10318. {
  10319. Assert(instr->HasBailOutInfo() && (instr->GetBailOutKind() == IR::BailOutOnFailedPolymorphicInlineTypeCheck || instr->GetBailOutKind() == IR::BailOutOnPolymorphicInlineFunction));
  10320. IR::Instr* instrPrev = instr->m_prev;
  10321. this->GenerateBailOut(instr, nullptr, nullptr);
  10322. return instrPrev;
  10323. }
  10324. void
  10325. Lowerer::LowerBailoutCheckAndLabel(IR::Instr *instr, bool onEqual, bool isHelper)
  10326. {
  10327. // Label to skip Bailout and continue
  10328. IR::LabelInstr * continueLabelInstr;
  10329. IR::Instr *instrNext = instr->m_next;
  10330. if (instrNext->IsLabelInstr())
  10331. {
  10332. continueLabelInstr = instrNext->AsLabelInstr();
  10333. }
  10334. else
  10335. {
  10336. continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  10337. instr->InsertAfter(continueLabelInstr);
  10338. }
  10339. if(instr->GetBailOutKind() == IR::BailOutInjected)
  10340. {
  10341. // BailOnEqual 0, 0
  10342. Assert(onEqual);
  10343. Assert(instr->GetSrc1()->IsEqual(instr->GetSrc2()));
  10344. Assert(instr->GetSrc1()->AsIntConstOpnd()->GetValue() == 0);
  10345. // The operands cannot be equal when generating a compare (assert) but since this is for testing purposes, hoist a src.
  10346. // Ideally, we would just create a BailOut instruction that generates a guaranteed bailout, but there seem to be issues
  10347. // with doing this in a non-helper path. So finally, it would generate:
  10348. // xor s0, s0
  10349. // test s0, s0
  10350. // jnz $continue
  10351. // $bailout:
  10352. // // bailout
  10353. // $continue:
  10354. instr->HoistSrc1(LowererMD::GetLoadOp(instr->GetSrc1()->GetType()));
  10355. }
  10356. InsertCompareBranch(instr->UnlinkSrc1(), instr->UnlinkSrc2(),
  10357. onEqual ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, continueLabelInstr, instr);
  10358. if (!isHelper)
  10359. {
  10360. IR::LabelInstr * helperLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  10361. instr->InsertBefore(helperLabelInstr);
  10362. }
  10363. }
  10364. IR::Instr *
  10365. Lowerer::LowerBailOnEqualOrNotEqual(IR::Instr * instr,
  10366. IR::BranchInstr *branchInstr, // = nullptr
  10367. IR::LabelInstr *labelBailOut, // = nullptr
  10368. IR::PropertySymOpnd * propSymOpnd, // = nullptr
  10369. bool isHelper) // = false
  10370. {
  10371. IR::Instr * prevInstr = instr->m_prev;
  10372. // Bail out test
  10373. bool onEqual = instr->m_opcode == Js::OpCode::BailOnEqual;
  10374. LowerBailoutCheckAndLabel(instr, onEqual, isHelper);
  10375. // BailOutOnImplicitCalls is a post-op bailout. Since we look at the profile info for LdFld/StFld to decide whether the instruction may or may not call an accessor,
  10376. // we need to update this profile information on the bailout path for BailOutOnImplicitCalls if the implicit call was an accessor call.
  10377. if(propSymOpnd && ((instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCalls) && (propSymOpnd->m_inlineCacheIndex != -1) &&
  10378. instr->m_func->HasProfileInfo())
  10379. {
  10380. // result = AND implCallFlags, ~ImplicitCall_None
  10381. // TST result, ImplicitCall_Accessor
  10382. // JEQ $bail
  10383. // OR profiledFlags, ( FldInfo_FromAccessor | FldInfo_Polymorphic )
  10384. // $bail
  10385. IR::Opnd * implicitCallFlags = GetImplicitCallFlagsOpnd();
  10386. IR::Opnd * accessorImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_Accessor & ~Js::ImplicitCall_None, GetImplicitCallFlagsType(), instr->m_func, true);
  10387. IR::Opnd * maskNoImplicitCall = IR::IntConstOpnd::New((Js::ImplicitCallFlags)~Js::ImplicitCall_None, GetImplicitCallFlagsType(), instr->m_func, true);
  10388. IR::Opnd * fldInfoAccessor = IR::IntConstOpnd::New(Js::FldInfo_FromAccessor | Js::FldInfo_Polymorphic, GetFldInfoFlagsType(), instr->m_func, true);
  10389. IR::LabelInstr * label = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true);
  10390. IR::Instr * andInstr = InsertAnd(IR::RegOpnd::New(GetImplicitCallFlagsType(), instr->m_func), implicitCallFlags, maskNoImplicitCall, instr);
  10391. InsertTestBranch(andInstr->GetDst(), accessorImplicitCall, Js::OpCode::BrEq_A, label, instr);
  10392. intptr_t infoAddr = instr->m_func->GetReadOnlyProfileInfo()->GetFldInfoAddr(propSymOpnd->m_inlineCacheIndex);
  10393. IR::Opnd * profiledFlags = IR::MemRefOpnd::New(infoAddr + Js::FldInfo::GetOffsetOfFlags(), TyInt8, instr->m_func);
  10394. InsertOr(profiledFlags, profiledFlags, fldInfoAccessor, instr);
  10395. instr->InsertBefore(label);
  10396. }
  10397. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  10398. return prevInstr;
  10399. }
  10400. void Lowerer::LowerBailOnNegative(IR::Instr *const instr)
  10401. {
  10402. Assert(instr);
  10403. Assert(instr->m_opcode == Js::OpCode::BailOnNegative);
  10404. Assert(instr->HasBailOutInfo());
  10405. Assert(!instr->GetDst());
  10406. Assert(instr->GetSrc1());
  10407. Assert(instr->GetSrc1()->GetType() == TyInt32 || instr->GetSrc1()->GetType() == TyUint32);
  10408. Assert(!instr->GetSrc2());
  10409. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(false);
  10410. LowerOneBailOutKind(instr, instr->GetBailOutKind(), false);
  10411. Assert(!instr->HasBailOutInfo());
  10412. IR::Instr *insertBeforeInstr = instr->m_next;
  10413. Func *const func = instr->m_func;
  10414. // test src, src
  10415. // jns $skipBailOut
  10416. InsertCompareBranch(
  10417. instr->UnlinkSrc1(),
  10418. IR::IntConstOpnd::New(0, TyInt32, func, true),
  10419. Js::OpCode::BrGe_A,
  10420. skipBailOutLabel,
  10421. insertBeforeInstr);
  10422. instr->Remove();
  10423. }
  10424. IR::Instr *
  10425. Lowerer::LowerBailOnNotObject(IR::Instr *instr,
  10426. IR::BranchInstr *branchInstr /* = nullptr */,
  10427. IR::LabelInstr *labelBailOut /* = nullptr */)
  10428. {
  10429. IR::Instr *prevInstr = instr->m_prev;
  10430. IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label,
  10431. m_func);
  10432. instr->InsertAfter(continueLabelInstr);
  10433. this->m_lowererMD.GenerateObjectTest(instr->UnlinkSrc1(),
  10434. instr,
  10435. continueLabelInstr,
  10436. /* fContinueLabel = */ true);
  10437. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  10438. return prevInstr;
  10439. }
  10440. IR::Instr *
  10441. Lowerer::LowerBailOnTrue(IR::Instr* instr, IR::LabelInstr* labelBailOut /*nullptr*/)
  10442. {
  10443. IR::Instr* instrPrev = instr->m_prev;
  10444. IR::LabelInstr* continueLabel = instr->GetOrCreateContinueLabel();
  10445. IR::RegOpnd * regSrc1 = IR::RegOpnd::New(instr->GetSrc1()->GetType(), this->m_func);
  10446. InsertMove(regSrc1, instr->UnlinkSrc1(), instr);
  10447. InsertTestBranch(regSrc1, regSrc1, Js::OpCode::BrEq_A, continueLabel, instr);
  10448. GenerateBailOut(instr, nullptr, labelBailOut);
  10449. return instrPrev;
  10450. }
  10451. IR::Instr *
  10452. Lowerer::LowerBailOnNotBuiltIn(IR::Instr *instr,
  10453. IR::BranchInstr *branchInstr /* = nullptr */,
  10454. IR::LabelInstr *labelBailOut /* = nullptr */)
  10455. {
  10456. Assert(instr->GetSrc2()->IsIntConstOpnd());
  10457. IR::Instr *prevInstr = instr->m_prev;
  10458. intptr_t builtInFuncs = m_func->GetScriptContextInfo()->GetBuiltinFunctionsBaseAddr();
  10459. Js::BuiltinFunction builtInIndex = instr->UnlinkSrc2()->AsIntConstOpnd()->AsInt32();
  10460. IR::Opnd *builtIn = IR::MemRefOpnd::New((void*)(builtInFuncs + builtInIndex * MachPtr), TyMachReg, instr->m_func);
  10461. #if TESTBUILTINFORNULL
  10462. IR::LabelInstr * continueAfterTestLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  10463. InsertTestBranch(builtIn, builtIn, Js::OpCode::BrNeq_A, continueAfterTestLabel, instr);
  10464. this->m_lowererMD.GenerateDebugBreak(instr);
  10465. instr->InsertBefore(continueAfterTestLabel);
  10466. #endif
  10467. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  10468. instr->InsertAfter(continueLabel);
  10469. InsertCompareBranch(instr->UnlinkSrc1(), builtIn, Js::OpCode::BrEq_A, continueLabel, instr);
  10470. GenerateBailOut(instr, branchInstr, labelBailOut);
  10471. return prevInstr;
  10472. }
  10473. #ifdef ENABLE_SCRIPT_DEBUGGING
  10474. IR::Instr *
  10475. Lowerer::LowerBailForDebugger(IR::Instr* instr, bool isInsideHelper /* = false */)
  10476. {
  10477. IR::Instr * prevInstr = instr->m_prev;
  10478. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  10479. AssertMsg(bailOutKind, "bailOutKind should not be zero at this time.");
  10480. AssertMsg(!(bailOutKind & IR::BailOutExplicit) || bailOutKind == IR::BailOutExplicit,
  10481. "BailOutExplicit cannot be combined with any other bailout flags.");
  10482. IR::LabelInstr* explicitBailOutLabel = nullptr;
  10483. if (!(bailOutKind & IR::BailOutExplicit))
  10484. {
  10485. intptr_t flags = m_func->GetScriptContextInfo()->GetDebuggingFlagsAddr();
  10486. // Check 1 (do we need to bail out?)
  10487. // JXX bailoutLabel
  10488. // Check 2 (do we need to bail out?)
  10489. // JXX bailoutLabel
  10490. // ...
  10491. // JMP continueLabel
  10492. // bailoutDocumentLabel:
  10493. // (determine if document boundary reached - if not, JMP to continueLabel)
  10494. // NOTE: THIS BLOCK IS CONDITIONALLY GENERATED BASED ON doGenerateBailOutDocumentBlock
  10495. // bailoutLabel:
  10496. // bail out
  10497. // continueLabel:
  10498. // ...
  10499. IR::LabelInstr* bailOutDocumentLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ true);
  10500. instr->InsertBefore(bailOutDocumentLabel);
  10501. IR::LabelInstr* bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ true);
  10502. instr->InsertBefore(bailOutLabel);
  10503. IR::LabelInstr* continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ isInsideHelper);
  10504. instr->InsertAfter(continueLabel);
  10505. IR::BranchInstr* continueBranchInstr = this->InsertBranch(Js::OpCode::Br, continueLabel, bailOutDocumentLabel); // JMP continueLabel.
  10506. bool doGenerateBailOutDocumentBlock = false;
  10507. const IR::BailOutKind c_forceAndIgnoreEx = IR::BailOutForceByFlag | IR::BailOutIgnoreException;
  10508. if ((bailOutKind & c_forceAndIgnoreEx) == c_forceAndIgnoreEx)
  10509. {
  10510. // It's faster to check these together in 1 check rather than 2 separate checks at run time.
  10511. // CMP [&(flags->m_forceInterpreter, flags->m_isIgnoreException)], 0
  10512. // BNE bailout
  10513. IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + DebuggingFlags::GetForceInterpreterOffset(), TyInt16, m_func);
  10514. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt16, m_func, /*dontEncode*/ true);
  10515. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  10516. bailOutKind ^= c_forceAndIgnoreEx;
  10517. }
  10518. else
  10519. {
  10520. if (bailOutKind & IR::BailOutForceByFlag)
  10521. {
  10522. // CMP [&flags->m_forceInterpreter], 0
  10523. // BNE bailout
  10524. IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + DebuggingFlags::GetForceInterpreterOffset(), TyInt8, m_func);
  10525. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func, /*dontEncode*/ true);
  10526. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  10527. bailOutKind ^= IR::BailOutForceByFlag;
  10528. }
  10529. if (bailOutKind & IR::BailOutIgnoreException)
  10530. {
  10531. // CMP [&flags->m_byteCodeOffsetAfterIgnoreException], DebuggingFlags::InvalidByteCodeOffset
  10532. // BNE bailout
  10533. IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + DebuggingFlags::GetByteCodeOffsetAfterIgnoreExceptionOffset(), TyInt32, m_func);
  10534. IR::Opnd* opnd2 = IR::IntConstOpnd::New(DebuggingFlags::InvalidByteCodeOffset, TyInt32, m_func, /*dontEncode*/ true);
  10535. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  10536. bailOutKind ^= IR::BailOutIgnoreException;
  10537. }
  10538. }
  10539. if (bailOutKind & IR::BailOutBreakPointInFunction)
  10540. {
  10541. // CMP [&functionBody->m_sourceInfo.m_probeCount], 0
  10542. // BNE bailout
  10543. IR::Opnd* opnd1 = IR::MemRefOpnd::New(m_func->GetJITFunctionBody()->GetProbeCountAddr(), TyInt32, m_func);
  10544. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt32, m_func, /*dontEncode*/ true);
  10545. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  10546. bailOutKind ^= IR::BailOutBreakPointInFunction;
  10547. }
  10548. // on method entry
  10549. if(bailOutKind & IR::BailOutStep)
  10550. {
  10551. // TEST STEP_BAILOUT, [&stepController->StepType]
  10552. // BNE BailoutLabel
  10553. IR::Opnd* opnd1 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugStepTypeAddr(), TyInt8, m_func);
  10554. IR::Opnd* opnd2 = IR::IntConstOpnd::New(Js::STEP_BAILOUT, TyInt8, this->m_func, /*dontEncode*/ true);
  10555. InsertTestBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  10556. // CMP STEP_DOCUMENT, [&stepController->StepType]
  10557. // BEQ BailoutDocumentLabel
  10558. opnd1 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugStepTypeAddr(), TyInt8, m_func);
  10559. opnd2 = IR::IntConstOpnd::New(Js::STEP_DOCUMENT, TyInt8, this->m_func, /*dontEncode*/ true);
  10560. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrEq_A, /*isUnsigned*/ true, bailOutDocumentLabel, continueBranchInstr);
  10561. doGenerateBailOutDocumentBlock = true;
  10562. bailOutKind ^= IR::BailOutStep;
  10563. }
  10564. // on method exit
  10565. if (bailOutKind & IR::BailOutStackFrameBase)
  10566. {
  10567. // CMP EffectiveFrameBase, [&stepController->frameAddrWhenSet]
  10568. // BA bailoutLabel
  10569. RegNum effectiveFrameBaseReg;
  10570. #ifdef _M_X64
  10571. effectiveFrameBaseReg = m_lowererMD.GetRegStackPointer();
  10572. #else
  10573. effectiveFrameBaseReg = m_lowererMD.GetRegFramePointer();
  10574. #endif
  10575. IR::Opnd* opnd1 = IR::RegOpnd::New(nullptr, effectiveFrameBaseReg, TyMachReg, m_func);
  10576. IR::Opnd* opnd2 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugFrameAddressAddr(), TyMachReg, m_func);
  10577. this->InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrGt_A, /*isUnsigned*/ true, bailOutLabel, continueBranchInstr);
  10578. // CMP STEP_DOCUMENT, [&stepController->StepType]
  10579. // BEQ BailoutDocumentLabel
  10580. opnd1 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugStepTypeAddr(), TyInt8, m_func);
  10581. opnd2 = IR::IntConstOpnd::New(Js::STEP_DOCUMENT, TyInt8, this->m_func, /*dontEncode*/ true);
  10582. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrEq_A, /*isUnsigned*/ true, bailOutDocumentLabel, continueBranchInstr);
  10583. doGenerateBailOutDocumentBlock = true;
  10584. bailOutKind ^= IR::BailOutStackFrameBase;
  10585. }
  10586. if (bailOutKind & IR::BailOutLocalValueChanged)
  10587. {
  10588. int32 hasLocalVarChangedOffset = m_func->GetHasLocalVarChangedOffset();
  10589. if (hasLocalVarChangedOffset != Js::Constants::InvalidOffset)
  10590. {
  10591. // CMP [EBP + hasLocalVarChangedStackOffset], 0
  10592. // BNE bailout
  10593. StackSym* sym = StackSym::New(TyInt8, m_func);
  10594. sym->m_offset = hasLocalVarChangedOffset;
  10595. sym->m_allocated = true;
  10596. IR::Opnd* opnd1 = IR::SymOpnd::New(sym, TyInt8, m_func);
  10597. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func);
  10598. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  10599. }
  10600. bailOutKind ^= IR::BailOutLocalValueChanged;
  10601. }
  10602. if (doGenerateBailOutDocumentBlock)
  10603. {
  10604. // GENERATE the BailoutDocumentLabel
  10605. // bailOutDocumentLabel:
  10606. // CMP CurrentScriptId, [&stepController->ScriptIdWhenSet]
  10607. // BEQ ContinueLabel
  10608. // bailOutLabel: // (fallthrough bailOutLabel)
  10609. IR::Opnd* opnd1 = IR::MemRefOpnd::New(m_func->GetJITFunctionBody()->GetScriptIdAddr(), TyInt32, m_func);
  10610. IR::Opnd* opnd2 = IR::MemRefOpnd::New(m_func->GetScriptContextInfo()->GetDebugScriptIdWhenSetAddr(), TyInt32, m_func);
  10611. IR::RegOpnd* reg1 = IR::RegOpnd::New(TyInt32, m_func);
  10612. InsertMove(reg1, opnd2, bailOutLabel);
  10613. InsertCompareBranch(opnd1, reg1, Js::OpCode::BrEq_A, /*isUnsigned*/ true, continueLabel, bailOutLabel);
  10614. }
  10615. AssertMsg(bailOutKind == (IR::BailOutKind)0, "Some of the bits in BailOutKind were not processed!");
  10616. // Note: at this time the 'instr' is in between bailoutLabel and continueLabel.
  10617. }
  10618. else
  10619. {
  10620. // For explicit/unconditional bailout use label which is not a helper, otherwise we would get a helper in main code path
  10621. // which breaks helper label consistency (you can only get to helper from a conditional branch in main code), see DbCheckPostLower.
  10622. explicitBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  10623. }
  10624. this->GenerateBailOut(instr, nullptr, explicitBailOutLabel);
  10625. return prevInstr;
  10626. }
  10627. #endif
  10628. IR::Instr*
  10629. Lowerer::LowerBailOnException(IR::Instr * instr)
  10630. {
  10631. Assert(instr->HasBailOutInfo());
  10632. IR::Instr * instrPrev = instr->m_prev;
  10633. this->GenerateBailOut(instr, nullptr, nullptr);
  10634. return instrPrev;
  10635. }
  10636. IR::Instr*
  10637. Lowerer::LowerBailOnEarlyExit(IR::Instr * instr)
  10638. {
  10639. Assert(instr->HasBailOutInfo());
  10640. IR::Instr * instrPrev = instr->m_prev;
  10641. this->GenerateBailOut(instr, nullptr, nullptr);
  10642. return instrPrev;
  10643. }
  10644. // Generate BailOut Lowerer Instruction if the value is INT_MIN.
  10645. // It it's not INT_MIN, we continue without bailout.
  10646. IR::Instr *
  10647. Lowerer::LowerBailOnIntMin(IR::Instr *instr, IR::BranchInstr *branchInstr /* = nullptr */, IR::LabelInstr *labelBailOut /* = nullptr */)
  10648. {
  10649. Assert(instr);
  10650. Assert(instr->GetSrc1());
  10651. IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  10652. instr->InsertAfter(continueLabelInstr);
  10653. if(!instr->HasBailOutInfo())
  10654. {
  10655. instr->Remove();
  10656. }
  10657. else
  10658. {
  10659. Assert(instr->GetBailOutKind() == IR::BailOnIntMin);
  10660. // Note: src1 must be int32 at this point.
  10661. if (instr->GetSrc1()->IsIntConstOpnd())
  10662. {
  10663. // For consts we can check the value at JIT time. Note: without this check we'll have to legalize the CMP instr.
  10664. IR::IntConstOpnd* intConst = instr->UnlinkSrc1()->AsIntConstOpnd();
  10665. if (intConst->GetValue() == INT_MIN)
  10666. {
  10667. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  10668. intConst->Free(instr->m_func);
  10669. }
  10670. else
  10671. {
  10672. instr->Remove();
  10673. }
  10674. }
  10675. else
  10676. {
  10677. InsertCompareBranch(instr->UnlinkSrc1(), IR::IntConstOpnd::New(INT_MIN, TyInt32, this->m_func), Js::OpCode::BrNeq_A, continueLabelInstr, instr);
  10678. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  10679. }
  10680. }
  10681. return continueLabelInstr;
  10682. }
  10683. ///----------------------------------------------------------------------------
  10684. ///
  10685. /// Lowerer::LowerBailOnNotString
  10686. /// Generate BailOut Lowerer Instruction if not a String
  10687. ///
  10688. ///----------------------------------------------------------------------------
  10689. void Lowerer::LowerBailOnNotString(IR::Instr *instr)
  10690. {
  10691. if (!instr->GetSrc1()->GetValueType().IsString())
  10692. {
  10693. /*Creating a MOV instruction*/
  10694. IR::Instr * movInstr = IR::Instr::New(instr->m_opcode, instr->UnlinkDst(), instr->UnlinkSrc1(), instr->m_func);
  10695. instr->InsertBefore(movInstr);
  10696. IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  10697. IR::LabelInstr *helperLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  10698. instr->InsertAfter(continueLabelInstr);
  10699. IR::RegOpnd *srcReg = movInstr->GetSrc1()->IsRegOpnd() ? movInstr->GetSrc1()->AsRegOpnd() : nullptr;
  10700. this->GenerateStringTest(srcReg, instr, helperLabelInstr, continueLabelInstr);
  10701. this->GenerateBailOut(instr, nullptr, helperLabelInstr);
  10702. }
  10703. else
  10704. {
  10705. instr->ClearBailOutInfo();
  10706. }
  10707. }
  10708. void Lowerer::LowerOneBailOutKind(
  10709. IR::Instr *const instr,
  10710. const IR::BailOutKind bailOutKindToLower,
  10711. const bool isInHelperBlock,
  10712. const bool preserveBailOutKindInInstr)
  10713. {
  10714. Assert(instr);
  10715. Assert(bailOutKindToLower);
  10716. Assert(!(bailOutKindToLower & IR::BailOutKindBits) || !(bailOutKindToLower & bailOutKindToLower - 1u));
  10717. Func *const func = instr->m_func;
  10718. // Split bailouts other than the one being lowered here
  10719. BailOutInfo *const bailOutInfo = instr->GetBailOutInfo();
  10720. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  10721. Assert(
  10722. bailOutKindToLower & IR::BailOutKindBits
  10723. ? bailOutKind & bailOutKindToLower
  10724. : (bailOutKind & ~IR::BailOutKindBits) == bailOutKindToLower);
  10725. if(!preserveBailOutKindInInstr)
  10726. {
  10727. bailOutKind -= bailOutKindToLower;
  10728. }
  10729. if(bailOutKind)
  10730. {
  10731. if(bailOutInfo->bailOutInstr == instr)
  10732. {
  10733. // Create a shared bailout point for the split bailout checks
  10734. IR::Instr *const sharedBail = instr->ShareBailOut();
  10735. Assert(sharedBail->GetBailOutInfo() == bailOutInfo);
  10736. GenerateBailOut(sharedBail);
  10737. }
  10738. instr->SetBailOutKind(bailOutKind);
  10739. }
  10740. else
  10741. {
  10742. instr->UnlinkBailOutInfo();
  10743. if(bailOutInfo->bailOutInstr == instr)
  10744. {
  10745. bailOutInfo->bailOutInstr = nullptr;
  10746. }
  10747. }
  10748. IR::Instr *const insertBeforeInstr = instr->m_next;
  10749. // (Bail out with the requested bail out kind)
  10750. IR::BailOutInstr *const bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOut, bailOutKindToLower, bailOutInfo, func);
  10751. bailOutInstr->SetByteCodeOffset(instr);
  10752. insertBeforeInstr->InsertBefore(bailOutInstr);
  10753. GenerateBailOut(bailOutInstr);
  10754. // The caller is expected to generate code to decide whether to bail out
  10755. }
  10756. void Lowerer::SplitBailOnNotArray(
  10757. IR::Instr *const instr,
  10758. IR::Instr * *const bailOnNotArrayRef,
  10759. IR::Instr * *const bailOnMissingValueRef)
  10760. {
  10761. Assert(instr);
  10762. Assert(!instr->GetDst());
  10763. Assert(instr->GetSrc1());
  10764. Assert(instr->GetSrc1()->IsRegOpnd());
  10765. Assert(!instr->GetSrc2());
  10766. Assert(bailOnNotArrayRef);
  10767. Assert(bailOnMissingValueRef);
  10768. IR::Instr *&bailOnNotArray = *bailOnNotArrayRef;
  10769. IR::Instr *&bailOnMissingValue = *bailOnMissingValueRef;
  10770. bailOnNotArray = instr;
  10771. bailOnMissingValue = nullptr;
  10772. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  10773. if(bailOutKind == IR::BailOutOnNotArray ||
  10774. bailOutKind == IR::BailOutOnNotNativeArray)
  10775. {
  10776. return;
  10777. }
  10778. // Split array checks
  10779. BailOutInfo *const bailOutInfo = instr->GetBailOutInfo();
  10780. if(bailOutInfo->bailOutInstr == instr)
  10781. {
  10782. // Create a shared bailout point for the split bailout checks
  10783. IR::Instr *const sharedBail = instr->ShareBailOut();
  10784. Assert(sharedBail->GetBailOutInfo() == bailOutInfo);
  10785. LowerBailTarget(sharedBail);
  10786. }
  10787. bailOutKind -= IR::BailOutOnMissingValue;
  10788. Assert(bailOutKind == IR::BailOutOnNotArray ||
  10789. bailOutKind == IR::BailOutOnNotNativeArray);
  10790. instr->SetBailOutKind(bailOutKind);
  10791. Func *const func = bailOutInfo->bailOutFunc;
  10792. IR::Instr *const insertBeforeInstr = instr->m_next;
  10793. // Split missing value checks
  10794. bailOnMissingValue = IR::BailOutInstr::New(Js::OpCode::BailOnNotArray, IR::BailOutOnMissingValue, bailOutInfo, func);
  10795. bailOnMissingValue->SetByteCodeOffset(instr);
  10796. insertBeforeInstr->InsertBefore(bailOnMissingValue);
  10797. }
  10798. IR::RegOpnd *Lowerer::LowerBailOnNotArray(IR::Instr *const instr)
  10799. {
  10800. Assert(instr);
  10801. Assert(!instr->GetDst());
  10802. Assert(instr->GetSrc1());
  10803. Assert(instr->GetSrc1()->IsRegOpnd());
  10804. Assert(!instr->GetSrc2());
  10805. Func *const func = instr->m_func;
  10806. // Label to jump to (or fall through to) when bailing out
  10807. const auto bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true /* isOpHelper */);
  10808. instr->InsertBefore(bailOutLabel);
  10809. // Label to jump to when not bailing out
  10810. const auto skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  10811. instr->InsertAfter(skipBailOutLabel);
  10812. // Do the array tests and jump to bailOutLabel if it's not an array. Fall through if it is an array.
  10813. IR::RegOpnd *const arrayOpnd =
  10814. GenerateArrayTest(instr->UnlinkSrc1()->AsRegOpnd(), bailOutLabel, bailOutLabel, bailOutLabel, true);
  10815. // Skip bail-out when it is an array
  10816. InsertBranch(Js::OpCode::Br, skipBailOutLabel, bailOutLabel);
  10817. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  10818. // ordering instructions anymore.
  10819. GenerateBailOut(instr);
  10820. return arrayOpnd;
  10821. }
  10822. void Lowerer::LowerBailOnMissingValue(IR::Instr *const instr, IR::RegOpnd *const arrayOpnd)
  10823. {
  10824. Assert(instr);
  10825. Assert(!instr->GetDst());
  10826. Assert(!instr->GetSrc1());
  10827. Assert(!instr->GetSrc2());
  10828. Assert(arrayOpnd);
  10829. Assert(arrayOpnd->GetValueType().IsArrayOrObjectWithArray());
  10830. Func *const func = instr->m_func;
  10831. // Label to jump to when not bailing out
  10832. const auto skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  10833. instr->InsertAfter(skipBailOutLabel);
  10834. // Skip bail-out when the array has no missing values
  10835. //
  10836. // test [array + offsetOf(objectArrayOrFlags)], Js::DynamicObjectFlags::HasNoMissingValues
  10837. // jnz $skipBailOut
  10838. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, func);
  10839. CompileAssert(
  10840. static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
  10841. Js::DynamicObjectFlags::HasNoMissingValues);
  10842. InsertTestBranch(
  10843. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, func),
  10844. IR::IntConstOpnd::New(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues), TyUint8, func, true),
  10845. Js::OpCode::BrNeq_A,
  10846. skipBailOutLabel,
  10847. instr);
  10848. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  10849. // ordering instructions anymore.
  10850. GenerateBailOut(instr);
  10851. }
  10852. void Lowerer::LowerBailOnInvalidatedArrayHeadSegment(IR::Instr *const instr, const bool isInHelperBlock)
  10853. {
  10854. /*
  10855. // Generate checks for whether the head segment or the head segment length changed during the helper call
  10856. if(!(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentSym()))
  10857. {
  10858. // Record the array head segment before the helper call
  10859. headSegmentBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayHeadSegmentForArrayOrObjectWithArray(base)
  10860. }
  10861. if(!(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentLengthSym()))
  10862. {
  10863. // Record the array head segment length before the helper call
  10864. if(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentSym())
  10865. {
  10866. mov headSegmentLengthBeforeHelperCall, [headSegmentBeforeHelperCall + offsetOf(length)]
  10867. }
  10868. else
  10869. {
  10870. headSegmentLengthBeforeHelperCall =
  10871. Js::JavascriptArray::Jit_GetArrayHeadSegmentLength(headSegmentBeforeHelperCall)
  10872. }
  10873. }
  10874. helperCall:
  10875. (Helper call and other bailout checks)
  10876. // If the array has a different head segment or head segment length after the helper call, then this store needs to bail
  10877. // out
  10878. invalidatedHeadSegment =
  10879. JavascriptArray::Jit_OperationInvalidatedArrayHeadSegment(
  10880. headSegmentBeforeHelperCall,
  10881. headSegmentLengthBeforeHelperCall,
  10882. base)
  10883. test invalidatedHeadSegment, invalidatedHeadSegment
  10884. jz $skipBailOut
  10885. (Bail out with IR::BailOutOnInvalidatedArrayHeadSegment)
  10886. $skipBailOut:
  10887. */
  10888. Assert(instr);
  10889. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  10890. Assert(instr->GetDst());
  10891. Assert(instr->GetDst()->IsIndirOpnd());
  10892. Func *const func = instr->m_func;
  10893. IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  10894. const ValueType baseValueType(baseOpnd->GetValueType());
  10895. Assert(!baseValueType.IsNotArrayOrObjectWithArray());
  10896. const bool isArrayOrObjectWithArray = baseValueType.IsArrayOrObjectWithArray();
  10897. IR::ArrayRegOpnd *const arrayOpnd = baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd() : nullptr;
  10898. IR::RegOpnd *headSegmentBeforeHelperCallOpnd;
  10899. IR::AutoReuseOpnd autoReuseHeadSegmentBeforeHelperCallOpnd;
  10900. if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentSym())
  10901. {
  10902. headSegmentBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->HeadSegmentSym(), TyMachPtr, func);
  10903. autoReuseHeadSegmentBeforeHelperCallOpnd.Initialize(headSegmentBeforeHelperCallOpnd, func);
  10904. }
  10905. else
  10906. {
  10907. // Record the array head segment before the helper call
  10908. // headSegmentBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayHeadSegmentForArrayOrObjectWithArray(base)
  10909. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  10910. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  10911. headSegmentBeforeHelperCallOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, func), TyMachPtr, func);
  10912. autoReuseHeadSegmentBeforeHelperCallOpnd.Initialize(headSegmentBeforeHelperCallOpnd, func);
  10913. callInstr->SetDst(headSegmentBeforeHelperCallOpnd);
  10914. instr->InsertBefore(callInstr);
  10915. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayHeadSegmentForArrayOrObjectWithArray);
  10916. }
  10917. IR::RegOpnd *headSegmentLengthBeforeHelperCallOpnd;
  10918. IR::AutoReuseOpnd autoReuseHeadSegmentLengthBeforeHelperCallOpnd;
  10919. if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentLengthSym())
  10920. {
  10921. headSegmentLengthBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->HeadSegmentLengthSym(), TyUint32, func);
  10922. autoReuseHeadSegmentLengthBeforeHelperCallOpnd.Initialize(headSegmentLengthBeforeHelperCallOpnd, func);
  10923. }
  10924. else
  10925. {
  10926. headSegmentLengthBeforeHelperCallOpnd = IR::RegOpnd::New(StackSym::New(TyUint32, func), TyUint32, func);
  10927. autoReuseHeadSegmentLengthBeforeHelperCallOpnd.Initialize(headSegmentLengthBeforeHelperCallOpnd, func);
  10928. if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentSym())
  10929. {
  10930. // Record the array head segment length before the helper call
  10931. // mov headSegmentLengthBeforeHelperCall, [headSegmentBeforeHelperCall + offsetOf(length)]
  10932. InsertMove(
  10933. headSegmentLengthBeforeHelperCallOpnd,
  10934. IR::IndirOpnd::New(
  10935. headSegmentBeforeHelperCallOpnd,
  10936. Js::SparseArraySegmentBase::GetOffsetOfLength(),
  10937. TyUint32,
  10938. func),
  10939. instr);
  10940. }
  10941. else
  10942. {
  10943. // Record the array head segment length before the helper call
  10944. // headSegmentLengthBeforeHelperCall =
  10945. // Js::JavascriptArray::Jit_GetArrayHeadSegmentLength(headSegmentBeforeHelperCall)
  10946. m_lowererMD.LoadHelperArgument(instr, headSegmentBeforeHelperCallOpnd);
  10947. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  10948. callInstr->SetDst(headSegmentLengthBeforeHelperCallOpnd);
  10949. instr->InsertBefore(callInstr);
  10950. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayHeadSegmentLength);
  10951. }
  10952. }
  10953. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
  10954. LowerOneBailOutKind(instr, IR::BailOutOnInvalidatedArrayHeadSegment, isInHelperBlock);
  10955. IR::Instr *const insertBeforeInstr = instr->m_next;
  10956. // If the array has a different head segment or head segment length after the helper call, then this store needs to bail out
  10957. // invalidatedHeadSegment =
  10958. // JavascriptArray::Jit_OperationInvalidatedArrayHeadSegment(
  10959. // headSegmentBeforeHelperCall,
  10960. // headSegmentLengthBeforeHelperCall,
  10961. // base)
  10962. m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
  10963. m_lowererMD.LoadHelperArgument(insertBeforeInstr, headSegmentLengthBeforeHelperCallOpnd);
  10964. m_lowererMD.LoadHelperArgument(insertBeforeInstr, headSegmentBeforeHelperCallOpnd);
  10965. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  10966. IR::RegOpnd *const invalidatedHeadSegmentOpnd = IR::RegOpnd::New(TyUint8, func);
  10967. const IR::AutoReuseOpnd autoReuseInvalidatedHeadSegmentOpnd(invalidatedHeadSegmentOpnd, func);
  10968. callInstr->SetDst(invalidatedHeadSegmentOpnd);
  10969. insertBeforeInstr->InsertBefore(callInstr);
  10970. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationInvalidatedArrayHeadSegment);
  10971. // test invalidatedHeadSegment, invalidatedHeadSegment
  10972. // jz $skipBailOut
  10973. InsertTestBranch(
  10974. invalidatedHeadSegmentOpnd,
  10975. invalidatedHeadSegmentOpnd,
  10976. Js::OpCode::BrEq_A,
  10977. skipBailOutLabel,
  10978. insertBeforeInstr);
  10979. // (Bail out with IR::BailOutOnInvalidatedArrayHeadSegment)
  10980. // $skipBailOut:
  10981. }
  10982. void Lowerer::LowerBailOnInvalidatedArrayLength(IR::Instr *const instr, const bool isInHelperBlock)
  10983. {
  10984. /*
  10985. // Generate checks for whether the length changed during the helper call
  10986. if(!(arrayOpnd && arrayOpnd.LengthSym() && arrayOpnd.LengthSym() != arrayOpnd.HeadSegmentLengthSym()))
  10987. {
  10988. // Record the array length before the helper call
  10989. lengthBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayLength(base)
  10990. }
  10991. helperCall:
  10992. (Helper call and other bailout checks)
  10993. // If the array has a different length after the helper call, then this store needs to bail out
  10994. invalidatedLength = JavascriptArray::Jit_OperationInvalidatedArrayLength(lengthBeforeHelperCall, base)
  10995. test invalidatedLength, invalidatedLength
  10996. jz $skipBailOut
  10997. (Bail out with IR::BailOutOnInvalidatedArrayLength)
  10998. $skipBailOut:
  10999. */
  11000. Assert(instr);
  11001. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  11002. Assert(instr->GetDst());
  11003. Assert(instr->GetDst()->IsIndirOpnd());
  11004. Func *const func = instr->m_func;
  11005. IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  11006. const ValueType baseValueType(baseOpnd->GetValueType());
  11007. Assert(!baseValueType.IsNotArray());
  11008. IR::ArrayRegOpnd *const arrayOpnd = baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd() : nullptr;
  11009. IR::RegOpnd *lengthBeforeHelperCallOpnd;
  11010. IR::AutoReuseOpnd autoReuseLengthBeforeHelperCallOpnd;
  11011. if(arrayOpnd && arrayOpnd->LengthSym() && arrayOpnd->LengthSym() != arrayOpnd->HeadSegmentLengthSym())
  11012. {
  11013. lengthBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->LengthSym(), arrayOpnd->LengthSym()->GetType(), func);
  11014. autoReuseLengthBeforeHelperCallOpnd.Initialize(lengthBeforeHelperCallOpnd, func);
  11015. }
  11016. else
  11017. {
  11018. // Record the array length before the helper call
  11019. // lengthBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayLength(base)
  11020. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  11021. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  11022. lengthBeforeHelperCallOpnd = IR::RegOpnd::New(TyUint32, func);
  11023. autoReuseLengthBeforeHelperCallOpnd.Initialize(lengthBeforeHelperCallOpnd, func);
  11024. callInstr->SetDst(lengthBeforeHelperCallOpnd);
  11025. instr->InsertBefore(callInstr);
  11026. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayLength);
  11027. }
  11028. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
  11029. LowerOneBailOutKind(instr, IR::BailOutOnInvalidatedArrayLength, isInHelperBlock);
  11030. IR::Instr *const insertBeforeInstr = instr->m_next;
  11031. // If the array has a different length after the helper call, then this store needs to bail out
  11032. // invalidatedLength = JavascriptArray::Jit_OperationInvalidatedArrayLength(lengthBeforeHelperCall, base)
  11033. m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
  11034. m_lowererMD.LoadHelperArgument(insertBeforeInstr, lengthBeforeHelperCallOpnd);
  11035. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  11036. IR::RegOpnd *const invalidatedLengthOpnd = IR::RegOpnd::New(TyUint8, func);
  11037. const IR::AutoReuseOpnd autoReuseInvalidatedLengthOpnd(invalidatedLengthOpnd, func);
  11038. callInstr->SetDst(invalidatedLengthOpnd);
  11039. insertBeforeInstr->InsertBefore(callInstr);
  11040. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationInvalidatedArrayLength);
  11041. // test invalidatedLength, invalidatedLength
  11042. // jz $skipBailOut
  11043. InsertTestBranch(
  11044. invalidatedLengthOpnd,
  11045. invalidatedLengthOpnd,
  11046. Js::OpCode::BrEq_A,
  11047. skipBailOutLabel,
  11048. insertBeforeInstr);
  11049. // (Bail out with IR::BailOutOnInvalidatedArrayLength)
  11050. // $skipBailOut:
  11051. }
  11052. void Lowerer::LowerBailOnCreatedMissingValue(IR::Instr *const instr, const bool isInHelperBlock)
  11053. {
  11054. /*
  11055. // Generate checks for whether the first missing value was created during the helper call
  11056. if(!(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()))
  11057. {
  11058. // Record whether the array has missing values before the helper call
  11059. arrayFlagsBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayFlagsForArrayOrObjectWithArray(base)
  11060. }
  11061. helperCall:
  11062. (Helper call and other bailout checks)
  11063. // If the array had no missing values before the helper call, and the array has missing values after the helper
  11064. // call, then this store created the first missing value in the array and needs to bail out
  11065. if(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues())
  11066. (arrayFlagsBeforeHelperCall = Js::DynamicObjectFlags::HasNoMissingValues)
  11067. createdFirstMissingValue = JavascriptArray::Jit_OperationCreatedFirstMissingValue(arrayFlagsBeforeHelperCall, base)
  11068. test createdFirstMissingValue, createdFirstMissingValue
  11069. jz $skipBailOut
  11070. (Bail out with IR::BailOutOnMissingValue)
  11071. $skipBailOut:
  11072. */
  11073. Assert(instr);
  11074. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  11075. Assert(instr->GetDst());
  11076. Assert(instr->GetDst()->IsIndirOpnd());
  11077. Func *const func = instr->m_func;
  11078. IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  11079. const ValueType baseValueType(baseOpnd->GetValueType());
  11080. Assert(!baseValueType.IsNotArrayOrObjectWithArray());
  11081. IR::Opnd *arrayFlagsBeforeHelperCallOpnd = nullptr;
  11082. IR::AutoReuseOpnd autoReuseArrayFlagsBeforeHelperCallOpnd;
  11083. const IRType arrayFlagsType = sizeof(uintptr_t) == sizeof(uint32) ? TyUint32 : TyUint64;
  11084. if(!(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()))
  11085. {
  11086. // Record whether the array has missing values before the helper call
  11087. // arrayFlagsBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayFlagsForArrayOrObjectWithArray(base)
  11088. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  11089. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  11090. arrayFlagsBeforeHelperCallOpnd = IR::RegOpnd::New(arrayFlagsType, func);
  11091. autoReuseArrayFlagsBeforeHelperCallOpnd.Initialize(arrayFlagsBeforeHelperCallOpnd, func);
  11092. callInstr->SetDst(arrayFlagsBeforeHelperCallOpnd);
  11093. instr->InsertBefore(callInstr);
  11094. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayFlagsForArrayOrObjectWithArray);
  11095. }
  11096. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
  11097. LowerOneBailOutKind(instr, IR::BailOutOnMissingValue, isInHelperBlock);
  11098. IR::Instr *const insertBeforeInstr = instr->m_next;
  11099. // If the array had no missing values before the helper call, and the array has missing values after the helper
  11100. // call, then this store created the first missing value in the array and needs to bail out
  11101. if(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues())
  11102. {
  11103. // (arrayFlagsBeforeHelperCall = Js::DynamicObjectFlags::HasNoMissingValues)
  11104. Assert(!arrayFlagsBeforeHelperCallOpnd);
  11105. arrayFlagsBeforeHelperCallOpnd =
  11106. arrayFlagsType == TyUint32
  11107. ? static_cast<IR::Opnd *>(
  11108. IR::IntConstOpnd::New(
  11109. static_cast<uintptr_t>(Js::DynamicObjectFlags::HasNoMissingValues),
  11110. arrayFlagsType,
  11111. func,
  11112. true))
  11113. : IR::AddrOpnd::New(
  11114. reinterpret_cast<void *>(Js::DynamicObjectFlags::HasNoMissingValues),
  11115. IR::AddrOpndKindConstantVar,
  11116. func,
  11117. true);
  11118. autoReuseArrayFlagsBeforeHelperCallOpnd.Initialize(arrayFlagsBeforeHelperCallOpnd, func);
  11119. }
  11120. else
  11121. {
  11122. Assert(arrayFlagsBeforeHelperCallOpnd);
  11123. }
  11124. // createdFirstMissingValue = JavascriptArray::Jit_OperationCreatedFirstMissingValue(arrayFlagsBeforeHelperCall, base)
  11125. m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
  11126. m_lowererMD.LoadHelperArgument(insertBeforeInstr, arrayFlagsBeforeHelperCallOpnd);
  11127. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  11128. IR::RegOpnd *const createdFirstMissingValueOpnd = IR::RegOpnd::New(TyUint8, func);
  11129. IR::AutoReuseOpnd autoReuseCreatedFirstMissingValueOpnd(createdFirstMissingValueOpnd, func);
  11130. callInstr->SetDst(createdFirstMissingValueOpnd);
  11131. insertBeforeInstr->InsertBefore(callInstr);
  11132. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationCreatedFirstMissingValue);
  11133. // test createdFirstMissingValue, createdFirstMissingValue
  11134. // jz $skipBailOut
  11135. InsertCompareBranch(
  11136. createdFirstMissingValueOpnd,
  11137. IR::IntConstOpnd::New(0, createdFirstMissingValueOpnd->GetType(), func, true),
  11138. Js::OpCode::BrEq_A,
  11139. skipBailOutLabel,
  11140. insertBeforeInstr);
  11141. // (Bail out with IR::BailOutOnMissingValue)
  11142. // $skipBailOut:
  11143. }
  11144. IR::Opnd*
  11145. Lowerer::GetFuncObjectOpnd(IR::Instr* insertBeforeInstr)
  11146. {
  11147. Func * func = insertBeforeInstr->m_func;
  11148. IR::Opnd *paramOpnd = nullptr;
  11149. if (func->IsInlinee())
  11150. {
  11151. paramOpnd = func->GetInlineeFunctionObjectSlotOpnd();
  11152. }
  11153. else
  11154. {
  11155. #if defined(_M_ARM32_OR_ARM64)
  11156. StackSym * paramSym = this->m_lowererMD.GetImplicitParamSlotSym(0);
  11157. #else
  11158. StackSym *paramSym = StackSym::New(TyMachReg, this->m_func);
  11159. this->m_func->SetArgOffset(paramSym, 2 * MachPtr);
  11160. this->m_func->SetHasImplicitParamLoad();
  11161. #endif
  11162. paramOpnd = IR::SymOpnd::New(paramSym, TyMachReg, this->m_func);
  11163. }
  11164. if (func->GetJITFunctionBody()->IsCoroutine())
  11165. {
  11166. // the function object for generator calls is a GeneratorVirtualScriptFunction object
  11167. // and we need to return the real JavascriptGeneratorFunction object so grab it before
  11168. // assigning to the dst
  11169. Assert(!func->IsInlinee());
  11170. IR::RegOpnd *tmpOpnd = IR::RegOpnd::New(TyMachReg, func);
  11171. Lowerer::InsertMove(tmpOpnd, paramOpnd, insertBeforeInstr);
  11172. paramOpnd = IR::IndirOpnd::New(tmpOpnd, Js::GeneratorVirtualScriptFunction::GetRealFunctionOffset(), TyMachPtr, func);
  11173. }
  11174. return paramOpnd;
  11175. }
  11176. ///----------------------------------------------------------------------------
  11177. ///
  11178. /// Lowerer::LoadFuncExpression
  11179. ///
  11180. /// Load the function expression to src1 from [ebp + 8]
  11181. ///
  11182. ///----------------------------------------------------------------------------
  11183. IR::Instr *
  11184. Lowerer::LoadFuncExpression(IR::Instr *instrFuncExpr)
  11185. {
  11186. ASSERT_INLINEE_FUNC(instrFuncExpr);
  11187. IR::Opnd *paramOpnd = GetFuncObjectOpnd(instrFuncExpr);
  11188. // mov dst, param
  11189. instrFuncExpr->SetSrc1(paramOpnd);
  11190. LowererMD::ChangeToAssign(instrFuncExpr);
  11191. return instrFuncExpr;
  11192. }
  11193. void Lowerer::LowerBoundCheck(IR::Instr *const instr)
  11194. {
  11195. Assert(instr);
  11196. Assert(instr->m_opcode == Js::OpCode::BoundCheck || instr->m_opcode == Js::OpCode::UnsignedBoundCheck);
  11197. #if DBG
  11198. if(instr->m_opcode == Js::OpCode::UnsignedBoundCheck)
  11199. {
  11200. // UnsignedBoundCheck is currently only supported for the pattern:
  11201. // UnsignedBoundCheck s1 <= s2 + c, where c == 0 || c == -1
  11202. Assert(instr->GetSrc1()->IsRegOpnd());
  11203. Assert(instr->GetSrc1()->IsInt32());
  11204. Assert(instr->GetSrc2());
  11205. Assert(!instr->GetSrc2()->IsIntConstOpnd());
  11206. if(instr->GetDst())
  11207. {
  11208. const int32 c = instr->GetDst()->AsIntConstOpnd()->AsInt32();
  11209. Assert(c == 0 || c == -1);
  11210. }
  11211. }
  11212. #endif
  11213. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  11214. Assert(
  11215. bailOutKind == IR::BailOutOnArrayAccessHelperCall ||
  11216. bailOutKind == IR::BailOutOnInvalidatedArrayHeadSegment ||
  11217. bailOutKind == IR::BailOutOnFailedHoistedBoundCheck ||
  11218. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  11219. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(false);
  11220. LowerOneBailOutKind(instr, bailOutKind, false);
  11221. Assert(!instr->HasBailOutInfo());
  11222. IR::Instr *insertBeforeInstr = instr->m_next;
  11223. #if DBG
  11224. const auto VerifyLeftOrRightOpnd = [&](IR::Opnd *const opnd, const bool isRightOpnd)
  11225. {
  11226. if(!opnd)
  11227. {
  11228. Assert(isRightOpnd);
  11229. return;
  11230. }
  11231. if(opnd->IsIntConstOpnd())
  11232. {
  11233. Assert(!isRightOpnd || opnd->AsIntConstOpnd()->GetValue() != 0);
  11234. return;
  11235. }
  11236. Assert(opnd->GetType() == TyInt32 || opnd->GetType() == TyUint32);
  11237. };
  11238. #endif
  11239. // left <= right + offset (src1 <= src2 + dst)
  11240. IR::Opnd *leftOpnd = instr->UnlinkSrc1();
  11241. DebugOnly(VerifyLeftOrRightOpnd(leftOpnd, false));
  11242. IR::Opnd *rightOpnd = instr->UnlinkSrc2();
  11243. DebugOnly(VerifyLeftOrRightOpnd(rightOpnd, true));
  11244. Assert(!leftOpnd->IsIntConstOpnd() || rightOpnd && !rightOpnd->IsIntConstOpnd());
  11245. IR::IntConstOpnd *offsetOpnd = instr->GetDst() ? instr->UnlinkDst()->AsIntConstOpnd() : nullptr;
  11246. Assert(!offsetOpnd || offsetOpnd->GetValue() != 0);
  11247. const bool doUnsignedCompare = instr->m_opcode == Js::OpCode::UnsignedBoundCheck;
  11248. instr->Remove();
  11249. Func *const func = insertBeforeInstr->m_func;
  11250. IntConstType offset = offsetOpnd ? offsetOpnd->GetValue() : 0;
  11251. Js::OpCode compareOpCode = Js::OpCode::BrLe_A;
  11252. if(leftOpnd->IsIntConstOpnd() && rightOpnd->IsRegOpnd() && offset != IntConstMin)
  11253. {
  11254. // Put the constants together: swap the operands, negate the offset, and invert the branch
  11255. IR::Opnd *const tempOpnd = leftOpnd;
  11256. leftOpnd = rightOpnd;
  11257. rightOpnd = tempOpnd;
  11258. offset = -offset;
  11259. compareOpCode = Js::OpCode::BrGe_A;
  11260. }
  11261. if(rightOpnd->IsIntConstOpnd())
  11262. {
  11263. // Try to aggregate right + offset into a constant offset
  11264. IntConstType newOffset;
  11265. if(!IntConstMath::Add(offset, rightOpnd->AsIntConstOpnd()->GetValue(), TyInt32, &newOffset))
  11266. {
  11267. offset = newOffset;
  11268. rightOpnd = nullptr;
  11269. offsetOpnd = nullptr;
  11270. }
  11271. }
  11272. // Determine if the Add for (right + offset) is necessary, and the op code that will be used for the comparison
  11273. IR::AutoReuseOpnd autoReuseAddResultOpnd;
  11274. if(offset == -1 && compareOpCode == Js::OpCode::BrLe_A)
  11275. {
  11276. offset = 0;
  11277. compareOpCode = Js::OpCode::BrLt_A;
  11278. }
  11279. else if(offset == 1 && compareOpCode == Js::OpCode::BrGe_A)
  11280. {
  11281. offset = 0;
  11282. compareOpCode = Js::OpCode::BrGt_A;
  11283. }
  11284. else if(offset != 0 && rightOpnd)
  11285. {
  11286. // Need to Add (right + offset). If it overflows, bail out.
  11287. IR::LabelInstr *const bailOutLabel = insertBeforeInstr->m_prev->GetOrCreateContinueLabel(true);
  11288. insertBeforeInstr = bailOutLabel;
  11289. // mov temp, right
  11290. // add temp, offset
  11291. // jo $bailOut
  11292. // $bailOut: (insertBeforeInstr)
  11293. Assert(!offsetOpnd || offsetOpnd->GetValue() == offset);
  11294. IR::RegOpnd *const addResultOpnd = IR::RegOpnd::New(TyInt32, func);
  11295. autoReuseAddResultOpnd.Initialize(addResultOpnd, func);
  11296. InsertAdd(
  11297. true,
  11298. addResultOpnd,
  11299. rightOpnd,
  11300. offsetOpnd ? offsetOpnd->UseWithNewType(TyInt32, func) : IR::IntConstOpnd::New(offset, TyInt32, func),
  11301. insertBeforeInstr);
  11302. InsertBranch(LowererMD::MDOverflowBranchOpcode, bailOutLabel, insertBeforeInstr);
  11303. rightOpnd = addResultOpnd;
  11304. }
  11305. // cmp left, right
  11306. // jl[e] $skipBailOut
  11307. // $bailOut:
  11308. if(!rightOpnd)
  11309. {
  11310. rightOpnd = IR::IntConstOpnd::New(offset, TyInt32, func);
  11311. }
  11312. InsertCompareBranch(leftOpnd, rightOpnd, compareOpCode, doUnsignedCompare, skipBailOutLabel, insertBeforeInstr);
  11313. }
  11314. IR::Instr *
  11315. Lowerer::LowerBailTarget(IR::Instr * instr)
  11316. {
  11317. // this is just a bailout target, just skip over it and generate a label before so other bailout can jump here.
  11318. IR::Instr * prevInstr = instr->m_prev;
  11319. IR::LabelInstr * continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  11320. instr->InsertAfter(continueLabelInstr);
  11321. IR::BranchInstr * skipInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, continueLabelInstr, this->m_func);
  11322. instr->InsertBefore(skipInstr);
  11323. this->GenerateBailOut(instr);
  11324. return prevInstr;
  11325. }
  11326. IR::Instr *
  11327. Lowerer::SplitBailOnImplicitCall(IR::Instr *& instr)
  11328. {
  11329. Assert(instr->IsPlainInstr() || instr->IsProfiledInstr());
  11330. const auto bailOutKind = instr->GetBailOutKind();
  11331. Assert(BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind));
  11332. IR::Opnd * implicitCallFlags = this->GetImplicitCallFlagsOpnd();
  11333. const IR::AutoReuseOpnd autoReuseImplicitCallFlags(implicitCallFlags, instr->m_func);
  11334. IR::IntConstOpnd * noImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_None, TyInt8, this->m_func, true);
  11335. const IR::AutoReuseOpnd autoReuseNoImplicitCall(noImplicitCall, instr->m_func);
  11336. // Reset the implicit call flag on every helper call
  11337. Lowerer::InsertMove(implicitCallFlags, noImplicitCall, instr);
  11338. IR::Instr *disableImplicitCallsInstr = nullptr, *enableImplicitCallsInstr = nullptr;
  11339. if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
  11340. {
  11341. const auto disableImplicitCallAddress =
  11342. m_lowererMD.GenerateMemRef(
  11343. instr->m_func->GetThreadContextInfo()->GetDisableImplicitFlagsAddr(),
  11344. TyInt8,
  11345. instr);
  11346. // Disable implicit calls since they will be called after bailing out
  11347. disableImplicitCallsInstr =
  11348. IR::Instr::New(
  11349. Js::OpCode::Ld_A,
  11350. disableImplicitCallAddress,
  11351. IR::IntConstOpnd::New(DisableImplicitCallFlag, TyInt8, instr->m_func, true),
  11352. instr->m_func);
  11353. instr->InsertBefore(disableImplicitCallsInstr);
  11354. // Create instruction for re-enabling implicit calls
  11355. enableImplicitCallsInstr =
  11356. IR::Instr::New(
  11357. Js::OpCode::Ld_A,
  11358. disableImplicitCallAddress,
  11359. IR::IntConstOpnd::New(DisableImplicitNoFlag, TyInt8, instr->m_func, true),
  11360. instr->m_func);
  11361. }
  11362. IR::Instr * bailOutInstr = instr;
  11363. instr = IR::Instr::New(instr->m_opcode, instr->m_func);
  11364. bailOutInstr->TransferTo(instr);
  11365. bailOutInstr->InsertBefore(instr);
  11366. if(disableImplicitCallsInstr)
  11367. {
  11368. // Re-enable implicit calls
  11369. Assert(enableImplicitCallsInstr);
  11370. bailOutInstr->InsertBefore(enableImplicitCallsInstr);
  11371. // Lower both instructions. Lowering an instruction may free the instruction's original operands, so do that last.
  11372. LowererMD::ChangeToAssign(disableImplicitCallsInstr);
  11373. LowererMD::ChangeToAssign(enableImplicitCallsInstr);
  11374. }
  11375. bailOutInstr->m_opcode = Js::OpCode::BailOnNotEqual;
  11376. bailOutInstr->SetSrc1(implicitCallFlags);
  11377. bailOutInstr->SetSrc2(noImplicitCall);
  11378. return bailOutInstr;
  11379. }
  11380. IR::Instr *
  11381. Lowerer::SplitBailOnImplicitCall(IR::Instr * instr, IR::Instr * helperCall, IR::Instr * insertBeforeInstr)
  11382. {
  11383. IR::Opnd * implicitCallFlags = this->GetImplicitCallFlagsOpnd();
  11384. const IR::AutoReuseOpnd autoReuseImplicitCallFlags(implicitCallFlags, instr->m_func);
  11385. IR::IntConstOpnd * noImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_None, TyInt8, this->m_func, true);
  11386. const IR::AutoReuseOpnd autoReuseNoImplicitCall(noImplicitCall, instr->m_func);
  11387. // Reset the implicit call flag on every helper call
  11388. Lowerer::InsertMove(implicitCallFlags, noImplicitCall, helperCall->m_prev);
  11389. BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
  11390. if (bailOutInfo->bailOutInstr == instr)
  11391. {
  11392. bailOutInfo->bailOutInstr = nullptr;
  11393. }
  11394. IR::Instr * bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOnNotEqual, IR::BailOutOnImplicitCalls, bailOutInfo, bailOutInfo->bailOutFunc);
  11395. bailOutInstr->SetSrc1(implicitCallFlags);
  11396. bailOutInstr->SetSrc2(noImplicitCall);
  11397. insertBeforeInstr->InsertBefore(bailOutInstr);
  11398. instr->ClearBailOutInfo();
  11399. return bailOutInstr;
  11400. }
  11401. // Split out bailout for debugger into separate bailout instr out of real instr which has bailout for debugger.
  11402. // Returns the instr which needs to lower next, which would normally be last of splitted instr.
  11403. // IR on input:
  11404. // - Real instr with BailOutInfo but it's opcode is not BailForDebugger.
  11405. // - debugger bailout is not shared. In this case we'll have debugger bailout in instr->GetBailOutKind().
  11406. // - debugger bailout is shared. In this case we'll have debugger bailout in instr->GetAuxBailOutKind().
  11407. // IR on output:
  11408. // - Either of:
  11409. // - real instr, then debuggerBailout -- in case we only had debugger bailout.
  11410. // - real instr with BailOutInfo w/o debugger bailout, then debuggerBailout, then sharedBailout -- in case bailout for debugger was shared w/some other b.o.
  11411. IR::Instr* Lowerer::SplitBailForDebugger(IR::Instr* instr)
  11412. {
  11413. Assert(m_func->IsJitInDebugMode() && instr->m_opcode != Js::OpCode::BailForDebugger);
  11414. IR::BailOutKind debuggerBailOutKind; // Used for splitted instr.
  11415. BailOutInfo* bailOutInfo = instr->GetBailOutInfo();
  11416. IR::Instr* sharedBailoutInstr = nullptr;
  11417. if (instr->GetBailOutKind() & IR::BailOutForDebuggerBits)
  11418. {
  11419. // debugger bailout is not shared.
  11420. Assert(!instr->HasAuxBailOut());
  11421. AssertMsg(!(instr->GetBailOutKind() & ~IR::BailOutForDebuggerBits), "There should only be debugger bailout bits in the instr.");
  11422. debuggerBailOutKind = instr->GetBailOutKind() & IR::BailOutForDebuggerBits;
  11423. // There is no non-debugger bailout in the instr, still can't clear bailout info, as we use it for the splitted instr,
  11424. // but we need to mark the bailout as hasn't been generated yet.
  11425. if (bailOutInfo->bailOutInstr == instr)
  11426. {
  11427. // null will be picked up by following BailOutInstr::New which will change it to new bailout instr.
  11428. bailOutInfo->bailOutInstr = nullptr;
  11429. }
  11430. // Remove bailout info from the original instr which from now on becomes just regular instr, w/o deallocating bailout info.
  11431. instr->ClearBailOutInfo();
  11432. }
  11433. else if (instr->IsBranchInstr() && instr->HasBailOutInfo() && instr->HasAuxBailOut())
  11434. {
  11435. // Branches with shared bailout are lowered in LowerCondBranchCheckBailOut,
  11436. // can't do here because we need to use BranchBailOutRecord but don't know which BrTrue/BrFalse to use for it.
  11437. debuggerBailOutKind = IR::BailOutInvalid;
  11438. }
  11439. else if (instr->HasAuxBailOut() && instr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits)
  11440. {
  11441. // debugger bailout is shared.
  11442. AssertMsg(!(instr->GetBailOutKind() & IR::BailOutForDebuggerBits), "There should be no debugger bits in main bailout kind.");
  11443. debuggerBailOutKind = instr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits;
  11444. // This will insert SharedBail instr after current instr and set bailOutInfo->bailOutInstr to the shared one.
  11445. sharedBailoutInstr = instr->ShareBailOut();
  11446. // As we extracted aux bail out, invalidate all tracks of it in the instr.
  11447. instr->ResetAuxBailOut();
  11448. }
  11449. else
  11450. {
  11451. AssertMsg(FALSE, "shouldn't get here");
  11452. debuggerBailOutKind = IR::BailOutInvalid;
  11453. }
  11454. if (debuggerBailOutKind != IR::BailOutInvalid)
  11455. {
  11456. IR::BailOutInstr* debuggerBailoutInstr = IR::BailOutInstr::New(
  11457. Js::OpCode::BailForDebugger, debuggerBailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
  11458. instr->InsertAfter(debuggerBailoutInstr);
  11459. // Since we go backwards, we need to process extracted out bailout for debugger first.
  11460. instr = sharedBailoutInstr ? sharedBailoutInstr : debuggerBailoutInstr;
  11461. }
  11462. return instr;
  11463. }
  11464. IR::Instr *
  11465. Lowerer::SplitBailOnResultCondition(IR::Instr *const instr) const
  11466. {
  11467. Assert(instr);
  11468. Assert(!instr->IsLowered());
  11469. Assert(
  11470. instr->GetBailOutKind() & IR::BailOutOnResultConditions ||
  11471. instr->GetBailOutKind() == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  11472. const auto nonBailOutInstr = IR::Instr::New(instr->m_opcode, instr->m_func);
  11473. instr->TransferTo(nonBailOutInstr);
  11474. instr->InsertBefore(nonBailOutInstr);
  11475. return nonBailOutInstr;
  11476. }
  11477. void
  11478. Lowerer::LowerBailOnResultCondition(
  11479. IR::Instr *const instr,
  11480. IR::LabelInstr * *const bailOutLabel,
  11481. IR::LabelInstr * *const skipBailOutLabel)
  11482. {
  11483. Assert(instr);
  11484. Assert(
  11485. instr->GetBailOutKind() & IR::BailOutOnResultConditions ||
  11486. instr->GetBailOutKind() == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  11487. Assert(bailOutLabel);
  11488. Assert(skipBailOutLabel);
  11489. // Label to jump to (or fall through to) when bailing out. The actual bailout label
  11490. // (bailOutInfo->bailOutInstr->AsLabelInstr()) may be shared, and code may be added to restore values before the jump to the
  11491. // actual bailout label in the cloned bailout case, so always create a new bailout label for this particular path.
  11492. *bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true /* isOpHelper */);
  11493. instr->InsertBefore(*bailOutLabel);
  11494. // Label to jump to when not bailing out
  11495. *skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  11496. instr->InsertAfter(*skipBailOutLabel);
  11497. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  11498. // ordering instructions anymore.
  11499. GenerateBailOut(instr);
  11500. }
  11501. void
  11502. Lowerer::PreserveSourcesForBailOnResultCondition(IR::Instr *const instr, IR::LabelInstr *const skipBailOutLabel) const
  11503. {
  11504. Assert(instr);
  11505. Assert(!instr->IsLowered());
  11506. Assert(!instr->HasBailOutInfo());
  11507. // Since this instruction may bail out, writing to the destination cannot overwrite one of the sources, or we may lose one
  11508. // of the sources needed to redo the equivalent byte code instruction. Determine if the sources need to be preserved.
  11509. const auto dst = instr->GetDst();
  11510. Assert(dst);
  11511. const auto dstStackSym = dst->GetStackSym();
  11512. if(!dstStackSym || !dstStackSym->HasByteCodeRegSlot())
  11513. {
  11514. // We only need to ensure that a byte-code source is not being overwritten
  11515. return;
  11516. }
  11517. switch(instr->m_opcode)
  11518. {
  11519. // The sources of these instructions don't need restoring, or will be restored in the bailout path
  11520. case Js::OpCode::Neg_I4:
  11521. // In case of overflow or zero, the result is the same as the operand
  11522. case Js::OpCode::Add_I4:
  11523. case Js::OpCode::Sub_I4:
  11524. // In case of overflow, there is always enough information to restore the operands
  11525. return;
  11526. }
  11527. Assert(instr->GetSrc1());
  11528. if(!dst->IsEqual(instr->GetSrc1()) && !(instr->GetSrc2() && dst->IsEqual(instr->GetSrc2())))
  11529. {
  11530. // The destination is different from the sources
  11531. return;
  11532. }
  11533. // The destination is the same as one of the sources and the original sources cannot be restored after the instruction, so
  11534. // use a temporary destination for the result and move it back to the original destination after deciding not to bail out
  11535. LowererMD::ChangeToAssign(instr->SinkDst(Js::OpCode::Ld_I4, RegNOREG, skipBailOutLabel));
  11536. }
  11537. void
  11538. Lowerer::LowerInstrWithBailOnResultCondition(
  11539. IR::Instr *const instr,
  11540. const IR::BailOutKind bailOutKind,
  11541. IR::LabelInstr *const bailOutLabel,
  11542. IR::LabelInstr *const skipBailOutLabel) const
  11543. {
  11544. Assert(instr);
  11545. Assert(!instr->IsLowered());
  11546. Assert(!instr->HasBailOutInfo());
  11547. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  11548. Assert(bailOutLabel);
  11549. Assert(instr->m_next == bailOutLabel);
  11550. Assert(skipBailOutLabel);
  11551. // Preserve sources that are overwritten by the instruction if needed
  11552. PreserveSourcesForBailOnResultCondition(instr, skipBailOutLabel);
  11553. // Lower the instruction
  11554. switch(instr->m_opcode)
  11555. {
  11556. case Js::OpCode::Neg_I4:
  11557. LowererMD::LowerInt4NegWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  11558. break;
  11559. case Js::OpCode::Add_I4:
  11560. LowererMD::LowerInt4AddWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  11561. break;
  11562. case Js::OpCode::Sub_I4:
  11563. LowererMD::LowerInt4SubWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  11564. break;
  11565. case Js::OpCode::Mul_I4:
  11566. LowererMD::LowerInt4MulWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  11567. break;
  11568. case Js::OpCode::Rem_I4:
  11569. m_lowererMD.LowerInt4RemWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  11570. break;
  11571. default:
  11572. Assert(false); // not implemented
  11573. __assume(false);
  11574. }
  11575. }
  11576. void
  11577. Lowerer::GenerateObjectTestAndTypeLoad(IR::Instr *instrLdSt, IR::RegOpnd *opndBase, IR::RegOpnd *opndType, IR::LabelInstr *labelHelper)
  11578. {
  11579. IR::IndirOpnd *opndIndir;
  11580. if (!opndBase->IsNotTaggedValue())
  11581. {
  11582. m_lowererMD.GenerateObjectTest(opndBase, instrLdSt, labelHelper);
  11583. }
  11584. opndIndir = IR::IndirOpnd::New(opndBase, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  11585. InsertMove(opndType, opndIndir, instrLdSt);
  11586. }
  11587. IR::LabelInstr *
  11588. Lowerer::GenerateBailOut(IR::Instr * instr, IR::BranchInstr * branchInstr, IR::LabelInstr *bailOutLabel, IR::LabelInstr * collectRuntimeStatsLabel)
  11589. {
  11590. BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
  11591. IR::Instr * bailOutInstr = bailOutInfo->bailOutInstr;
  11592. if (instr->IsCloned())
  11593. {
  11594. Assert(bailOutInstr != instr);
  11595. // jump to the cloned bail out label
  11596. IR::LabelInstr * bailOutLabelInstr = bailOutInstr->AsLabelInstr();
  11597. IR::BranchInstr * bailOutBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, bailOutLabelInstr, this->m_func);
  11598. instr->InsertBefore(bailOutBranch);
  11599. instr->Remove();
  11600. return bailOutLabel;
  11601. }
  11602. // Add helper label to trigger layout.
  11603. if (!collectRuntimeStatsLabel)
  11604. {
  11605. collectRuntimeStatsLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  11606. }
  11607. Assert(!collectRuntimeStatsLabel->IsLinked());
  11608. instr->InsertBefore(collectRuntimeStatsLabel);
  11609. if (bailOutInstr != instr)
  11610. {
  11611. // this bailOutInfo is shared, just jump to the bailout target
  11612. IR::Opnd * indexOpndForBailOutKind = nullptr;
  11613. int bailOutRecordOffset = 0;
  11614. if (this->m_func->IsOOPJIT())
  11615. {
  11616. bailOutRecordOffset = NativeCodeData::GetDataTotalOffset(bailOutInfo->bailOutRecord);
  11617. indexOpndForBailOutKind = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), (int)(bailOutRecordOffset + BailOutRecord::GetOffsetOfBailOutKind()), TyUint32,
  11618. #if DBG
  11619. NativeCodeData::GetDataDescription(bailOutInfo->bailOutRecord, this->m_func->m_alloc),
  11620. #endif
  11621. m_func, true);
  11622. this->addToLiveOnBackEdgeSyms->Set(m_func->GetTopFunc()->GetNativeCodeDataSym()->m_id);
  11623. }
  11624. else
  11625. {
  11626. indexOpndForBailOutKind =
  11627. IR::MemRefOpnd::New((BYTE*)bailOutInfo->bailOutRecord + BailOutRecord::GetOffsetOfBailOutKind(), TyUint32, this->m_func, IR::AddrOpndKindDynamicBailOutKindRef);
  11628. }
  11629. InsertMove(
  11630. indexOpndForBailOutKind, IR::IntConstOpnd::New(instr->GetBailOutKind(), indexOpndForBailOutKind->GetType(), this->m_func), instr, false);
  11631. // No point in doing this for BailOutFailedEquivalentTypeCheck or BailOutFailedEquivalentFixedFieldTypeCheck,
  11632. // because the respective inline cache is already polymorphic, anyway.
  11633. if (instr->GetBailOutKind() == IR::BailOutFailedTypeCheck || instr->GetBailOutKind() == IR::BailOutFailedFixedFieldTypeCheck)
  11634. {
  11635. // We have a type check bailout that shares a bailout record with other instructions.
  11636. // Generate code to write the cache index into the bailout record before we jump to the call site.
  11637. Assert(bailOutInfo->polymorphicCacheIndex != (uint)-1);
  11638. Assert(bailOutInfo->bailOutRecord);
  11639. IR::Opnd * indexOpnd = nullptr;
  11640. if (this->m_func->IsOOPJIT())
  11641. {
  11642. indexOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), (int)(bailOutRecordOffset + BailOutRecord::GetOffsetOfPolymorphicCacheIndex()), TyUint32, m_func);
  11643. }
  11644. else
  11645. {
  11646. indexOpnd = IR::MemRefOpnd::New((BYTE*)bailOutInfo->bailOutRecord + BailOutRecord::GetOffsetOfPolymorphicCacheIndex(), TyUint32, this->m_func);
  11647. }
  11648. InsertMove(
  11649. indexOpnd, IR::IntConstOpnd::New(bailOutInfo->polymorphicCacheIndex, TyUint32, this->m_func), instr, false);
  11650. }
  11651. if (bailOutInfo->bailOutRecord->IsShared())
  11652. {
  11653. IR::Opnd *functionBodyOpnd;
  11654. if (this->m_func->IsOOPJIT())
  11655. {
  11656. functionBodyOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(m_func->GetTopFunc()->GetNativeCodeDataSym(), TyVar, m_func), (int)(bailOutRecordOffset + SharedBailOutRecord::GetOffsetOfFunctionBody()), TyMachPtr, m_func);
  11657. }
  11658. else
  11659. {
  11660. functionBodyOpnd = IR::MemRefOpnd::New((BYTE*)bailOutInfo->bailOutRecord + SharedBailOutRecord::GetOffsetOfFunctionBody(), TyMachPtr, this->m_func);
  11661. }
  11662. InsertMove(
  11663. functionBodyOpnd, CreateFunctionBodyOpnd(instr->m_func), instr, false);
  11664. }
  11665. // GenerateBailOut should have replaced this as a label as we should have already lowered
  11666. // the main bailOutInstr.
  11667. IR::LabelInstr * bailOutTargetLabel = bailOutInstr->AsLabelInstr();
  11668. #if DBG
  11669. if (bailOutTargetLabel->m_noHelperAssert)
  11670. {
  11671. collectRuntimeStatsLabel->m_noHelperAssert = true;
  11672. }
  11673. #endif
  11674. Assert(bailOutLabel == nullptr || bailOutLabel == bailOutTargetLabel);
  11675. IR::BranchInstr * newBranchInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, bailOutTargetLabel, this->m_func);
  11676. instr->InsertAfter(newBranchInstr);
  11677. instr->Remove();
  11678. return collectRuntimeStatsLabel ? collectRuntimeStatsLabel : bailOutLabel;
  11679. }
  11680. // The bailout hasn't be generated yet.
  11681. Assert(!bailOutInstr->IsLabelInstr());
  11682. // capture the condition for this bailout
  11683. if (bailOutLabel == nullptr)
  11684. {
  11685. // Create a label and place it in the bailout info so that shared bailout point can jump to this one
  11686. if (instr->m_prev->IsLabelInstr())
  11687. {
  11688. bailOutLabel = instr->m_prev->AsLabelInstr();
  11689. Assert(bailOutLabel->isOpHelper);
  11690. }
  11691. else
  11692. {
  11693. bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  11694. instr->InsertBefore(bailOutLabel);
  11695. }
  11696. }
  11697. else
  11698. {
  11699. instr->InsertBefore(bailOutLabel);
  11700. }
  11701. #if DBG
  11702. const IR::BailOutKind bailOutKind = bailOutInstr->GetBailOutKind();
  11703. if (bailOutInstr->m_opcode == Js::OpCode::BailOnNoSimdTypeSpec ||
  11704. bailOutInstr->m_opcode == Js::OpCode::BailOnNoProfile ||
  11705. bailOutInstr->m_opcode == Js::OpCode::BailOnException ||
  11706. bailOutInstr->m_opcode == Js::OpCode::Yield ||
  11707. bailOutKind & (IR::BailOutConventionalTypedArrayAccessOnly |
  11708. IR::BailOutConventionalNativeArrayAccessOnly |
  11709. IR::BailOutOnArrayAccessHelperCall))
  11710. {
  11711. bailOutLabel->m_noHelperAssert = true;
  11712. }
  11713. #endif
  11714. bailOutInfo->bailOutInstr = bailOutLabel;
  11715. bailOutLabel->m_hasNonBranchRef = true;
  11716. // Create the bail out record
  11717. Assert(bailOutInfo->bailOutRecord == nullptr);
  11718. BailOutRecord * bailOutRecord;
  11719. IR::JnHelperMethod helperMethod;
  11720. if (branchInstr != nullptr)
  11721. {
  11722. Assert(branchInstr->GetSrc2() == nullptr);
  11723. Assert(branchInstr->GetDst() == nullptr);
  11724. IR::LabelInstr * targetLabel = branchInstr->GetTarget();
  11725. Assert(targetLabel->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset);
  11726. uint32 trueOffset;
  11727. uint32 falseOffset;
  11728. IR::Opnd *condOpnd = branchInstr->GetSrc1();
  11729. bool invertTarget = (branchInstr->m_opcode == Js::OpCode::BrFalse_A);
  11730. if (bailOutInfo->isInvertedBranch)
  11731. {
  11732. // Flip the condition
  11733. IR::Instr *subInstr = IR::Instr::New(Js::OpCode::Sub_I4, condOpnd, condOpnd, IR::IntConstOpnd::New(1, TyMachReg, instr->m_func), instr->m_func);
  11734. instr->InsertBefore(subInstr);
  11735. this->m_lowererMD.EmitInt4Instr(subInstr);
  11736. // We should really do a DEC/NEG for a full 2's complement flip from 0/1 to 1/0,
  11737. // but DEC is sufficient to flip from 0/1 to -1/0, which is false/true to true/false...
  11738. //instr->InsertBefore(IR::Instr::New(Js::OpCode::Neg_I4, condOpnd, condOpnd, instr->m_func));
  11739. invertTarget = invertTarget ? false : true;
  11740. }
  11741. if (!invertTarget)
  11742. {
  11743. trueOffset = targetLabel->GetByteCodeOffset();
  11744. falseOffset = bailOutInfo->bailOutOffset;
  11745. }
  11746. else
  11747. {
  11748. falseOffset = targetLabel->GetByteCodeOffset();
  11749. trueOffset = bailOutInfo->bailOutOffset;
  11750. }
  11751. bailOutRecord = NativeCodeDataNewZ(this->m_func->GetNativeCodeDataAllocator(),
  11752. BranchBailOutRecord, trueOffset, falseOffset, branchInstr->GetByteCodeReg(), instr->GetBailOutKind(), bailOutInfo->bailOutFunc);
  11753. helperMethod = IR::HelperSaveAllRegistersAndBranchBailOut;
  11754. #ifdef _M_IX86
  11755. if(!AutoSystemInfo::Data.SSE2Available())
  11756. {
  11757. helperMethod = IR::HelperSaveAllRegistersNoSse2AndBranchBailOut;
  11758. }
  11759. #endif
  11760. // Save the condition. The register allocator will generate arguments.
  11761. bailOutInfo->branchConditionOpnd = branchInstr->GetSrc1()->Copy(branchInstr->m_func);
  11762. }
  11763. else
  11764. {
  11765. if (bailOutInstr->GetBailOutKind() == IR::BailOutShared)
  11766. {
  11767. bailOutRecord = NativeCodeDataNewZ(this->m_func->GetNativeCodeDataAllocator(),
  11768. SharedBailOutRecord, bailOutInfo->bailOutOffset, bailOutInfo->polymorphicCacheIndex, instr->GetBailOutKind(), bailOutInfo->bailOutFunc);
  11769. if (bailOutInfo->isLoopTopBailOutInfo)
  11770. {
  11771. bailOutRecord->SetType(BailOutRecord::BailoutRecordType::SharedForLoopTop);
  11772. }
  11773. }
  11774. else
  11775. {
  11776. bailOutRecord = NativeCodeDataNewZ(this->m_func->GetNativeCodeDataAllocator(),
  11777. BailOutRecord, bailOutInfo->bailOutOffset, bailOutInfo->polymorphicCacheIndex, instr->GetBailOutKind(), bailOutInfo->bailOutFunc);
  11778. }
  11779. helperMethod = IR::HelperSaveAllRegistersAndBailOut;
  11780. #ifdef _M_IX86
  11781. if(!AutoSystemInfo::Data.SSE2Available())
  11782. {
  11783. helperMethod = IR::HelperSaveAllRegistersNoSse2AndBailOut;
  11784. }
  11785. #endif
  11786. }
  11787. // Save the bailout record. The register allocator will generate arguments.
  11788. bailOutInfo->bailOutRecord = bailOutRecord;
  11789. #if ENABLE_DEBUG_CONFIG_OPTIONS
  11790. bailOutRecord->bailOutOpcode = bailOutInfo->bailOutOpcode;
  11791. #endif
  11792. if (instr->m_opcode == Js::OpCode::BailOnNotStackArgs && instr->GetSrc1())
  11793. {
  11794. // src1 on BailOnNotStackArgs is helping CSE
  11795. instr->FreeSrc1();
  11796. }
  11797. if (instr->GetSrc2() != nullptr)
  11798. {
  11799. // Ideally we should never be in this situation but incase we reached a
  11800. // condition where we didn't freed src2. Free it here.
  11801. instr->FreeSrc2();
  11802. }
  11803. // Call the bail out wrapper
  11804. instr->m_opcode = Js::OpCode::Call;
  11805. if(instr->GetDst())
  11806. {
  11807. // To facilitate register allocation, don't assign a destination. The result will anyway go into the return register,
  11808. // but the register allocator does not need to kill that register for the call.
  11809. instr->FreeDst();
  11810. }
  11811. instr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, this->m_func));
  11812. m_lowererMD.LowerCall(instr, 0);
  11813. if (bailOutInstr->GetBailOutKind() != IR::BailOutForGeneratorYield)
  11814. {
  11815. // Defer introducing the JMP to epilog until LowerPrologEpilog phase for Yield bailouts so
  11816. // that Yield does not appear to have flow out of its containing block for the RegAlloc phase.
  11817. // Yield is an unconditional bailout but we want to simulate the flow as if the Yield were
  11818. // just like a call.
  11819. GenerateJumpToEpilogForBailOut(bailOutInfo, instr);
  11820. }
  11821. return collectRuntimeStatsLabel ? collectRuntimeStatsLabel : bailOutLabel;
  11822. }
  11823. void
  11824. Lowerer::GenerateJumpToEpilogForBailOut(BailOutInfo * bailOutInfo, IR::Instr *instr)
  11825. {
  11826. IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->m_prev;
  11827. // JMP to the epilog
  11828. IR::LabelInstr * exitTargetInstr;
  11829. if (exitPrevInstr->IsLabelInstr())
  11830. {
  11831. exitTargetInstr = exitPrevInstr->AsLabelInstr();
  11832. }
  11833. else
  11834. {
  11835. exitTargetInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  11836. exitPrevInstr->InsertAfter(exitTargetInstr);
  11837. }
  11838. exitTargetInstr = m_lowererMD.GetBailOutStackRestoreLabel(bailOutInfo, exitTargetInstr);
  11839. IR::Instr * instrAfter = instr->m_next;
  11840. IR::BranchInstr * exitInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, exitTargetInstr, this->m_func);
  11841. instrAfter->InsertBefore(exitInstr);
  11842. }
  11843. ///----------------------------------------------------------------------------
  11844. ///
  11845. /// Lowerer::GenerateFastCondBranch
  11846. ///
  11847. ///----------------------------------------------------------------------------
  11848. bool
  11849. Lowerer::GenerateFastCondBranch(IR::BranchInstr * instrBranch, bool *pIsHelper)
  11850. {
  11851. // The idea is to do an inline compare if we can prove that both sources
  11852. // are tagged ints
  11853. //
  11854. // Given:
  11855. //
  11856. // Brxx_A $L, src1, src2
  11857. //
  11858. // Generate:
  11859. //
  11860. // (If not Int31's, goto $helper)
  11861. // Jxx $L, src1, src2
  11862. // JMP $fallthru
  11863. // $helper:
  11864. // (caller will generate normal helper call sequence)
  11865. // $fallthru:
  11866. IR::LabelInstr * labelHelper = nullptr;
  11867. IR::LabelInstr * labelFallThru;
  11868. IR::BranchInstr * instr;
  11869. IR::Opnd * opndSrc1;
  11870. IR::Opnd * opndSrc2;
  11871. opndSrc1 = instrBranch->GetSrc1();
  11872. opndSrc2 = instrBranch->GetSrc2();
  11873. AssertMsg(opndSrc1 && opndSrc2, "BrC expects 2 src operands");
  11874. // Not tagged ints?
  11875. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  11876. {
  11877. return true;
  11878. }
  11879. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  11880. {
  11881. return true;
  11882. }
  11883. // Tagged ints?
  11884. bool isTaggedInts = false;
  11885. if (opndSrc1->IsTaggedInt())
  11886. {
  11887. if (opndSrc2->IsTaggedInt())
  11888. {
  11889. isTaggedInts = true;
  11890. }
  11891. }
  11892. if (!isTaggedInts)
  11893. {
  11894. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  11895. this->m_lowererMD.GenerateSmIntPairTest(instrBranch, opndSrc1, opndSrc2, labelHelper);
  11896. }
  11897. // Jxx $L, src1, src2
  11898. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  11899. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  11900. instr = IR::BranchInstr::New(instrBranch->m_opcode, instrBranch->GetTarget(), opndSrc1, opndSrc2, this->m_func);
  11901. instrBranch->InsertBefore(instr);
  11902. this->m_lowererMD.LowerCondBranch(instr);
  11903. if (isTaggedInts)
  11904. {
  11905. instrBranch->Remove();
  11906. // Skip lowering call to helper
  11907. return false;
  11908. }
  11909. // JMP $fallthru
  11910. IR::Instr *instrNext = instrBranch->GetNextRealInstrOrLabel();
  11911. if (instrNext->IsLabelInstr())
  11912. {
  11913. labelFallThru = instrNext->AsLabelInstr();
  11914. }
  11915. else
  11916. {
  11917. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /**pIsHelper*/FALSE);
  11918. instrBranch->InsertAfter(labelFallThru);
  11919. }
  11920. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallThru, this->m_func);
  11921. instrBranch->InsertBefore(instr);
  11922. // $helper:
  11923. // (caller will generate normal helper call sequence)
  11924. // $fallthru:
  11925. AssertMsg(labelHelper, "Should not be NULL");
  11926. instrBranch->InsertBefore(labelHelper);
  11927. *pIsHelper = true;
  11928. return true;
  11929. }
  11930. IR::Instr *
  11931. Lowerer::LowerInlineeStart(IR::Instr * inlineeStartInstr)
  11932. {
  11933. IR::Opnd *linkOpnd = inlineeStartInstr->GetSrc2();
  11934. if (!linkOpnd)
  11935. {
  11936. Assert(inlineeStartInstr->m_func->m_hasInlineArgsOpt);
  11937. return inlineeStartInstr->m_prev;
  11938. }
  11939. AssertMsg(inlineeStartInstr->m_func->firstActualStackOffset != -1, "This should have been already done in backward pass");
  11940. IR::Instr *startCall;
  11941. // Free the argOut links and lower them to MOVs
  11942. inlineeStartInstr->IterateArgInstrs([&](IR::Instr* argInstr){
  11943. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A || argInstr->m_opcode == Js::OpCode::ArgOut_A_Inline);
  11944. startCall = argInstr->GetSrc2()->GetStackSym()->m_instrDef;
  11945. argInstr->FreeSrc2();
  11946. #pragma prefast(suppress:6235, "Non-Zero Constant in Condition")
  11947. if (!PHASE_ON(Js::EliminateArgoutForInlineePhase, this->m_func) || inlineeStartInstr->m_func->GetJITFunctionBody()->HasOrParentHasArguments())
  11948. {
  11949. m_lowererMD.ChangeToAssign(argInstr);
  11950. }
  11951. else
  11952. {
  11953. argInstr->m_opcode = Js::OpCode::ArgOut_A_InlineBuiltIn;
  11954. }
  11955. return false;
  11956. });
  11957. IR::Instr *argInsertInstr = inlineeStartInstr;
  11958. uint i = 0;
  11959. inlineeStartInstr->IterateMetaArgs( [&] (IR::Instr* metaArg)
  11960. {
  11961. if(i == 0)
  11962. {
  11963. Lowerer::InsertMove(metaArg->m_func->GetNextInlineeFrameArgCountSlotOpnd(),
  11964. IR::AddrOpnd::NewNull(metaArg->m_func),
  11965. argInsertInstr);
  11966. }
  11967. if (i == Js::Constants::InlineeMetaArgIndex_FunctionObject)
  11968. {
  11969. metaArg->SetSrc1(inlineeStartInstr->GetSrc1());
  11970. }
  11971. metaArg->Unlink();
  11972. argInsertInstr->InsertBefore(metaArg);
  11973. IR::Instr* prev = metaArg->m_prev;
  11974. m_lowererMD.ChangeToAssign(metaArg);
  11975. if (i == Js::Constants::InlineeMetaArgIndex_Argc)
  11976. {
  11977. #if defined(_M_IX86) || defined(_M_X64)
  11978. Assert(metaArg == prev->m_next);
  11979. #else //defined(_M_ARM)
  11980. Assert(prev->m_next->m_opcode == Js::OpCode::LDIMM);
  11981. #endif
  11982. metaArg = prev->m_next;
  11983. Assert(metaArg->GetSrc1()->AsIntConstOpnd()->m_dontEncode == true);
  11984. metaArg->isInlineeEntryInstr = true;
  11985. LowererMD::Legalize(metaArg);
  11986. }
  11987. argInsertInstr = metaArg;
  11988. i++;
  11989. return false;
  11990. });
  11991. IR::Instr* prev = inlineeStartInstr->m_prev;
  11992. if (inlineeStartInstr->m_func->m_hasInlineArgsOpt)
  11993. {
  11994. inlineeStartInstr->FreeSrc1();
  11995. inlineeStartInstr->FreeSrc2();
  11996. inlineeStartInstr->FreeDst();
  11997. }
  11998. else
  11999. {
  12000. inlineeStartInstr->Remove();
  12001. }
  12002. return prev;
  12003. }
  12004. void
  12005. Lowerer::LowerInlineeEnd(IR::Instr *instr)
  12006. {
  12007. Assert(instr->m_func->IsInlinee());
  12008. Assert(m_func->IsTopFunc());
  12009. // No need to emit code if the function wasn't marked as having implicit calls or bailout. Dead-Store should have removed inline overhead.
  12010. if (instr->m_func->GetHasImplicitCalls() || PHASE_OFF(Js::DeadStorePhase, this->m_func))
  12011. {
  12012. Lowerer::InsertMove(instr->m_func->GetInlineeArgCountSlotOpnd(),
  12013. IR::IntConstOpnd::New(0, TyMachReg, instr->m_func),
  12014. instr);
  12015. }
  12016. // Keep InlineeEnd around as it is used by register allocator, if we have optimized the arguments stack
  12017. if (instr->m_func->m_hasInlineArgsOpt)
  12018. {
  12019. instr->FreeSrc1();
  12020. }
  12021. else
  12022. {
  12023. instr->Remove();
  12024. }
  12025. }
  12026. IR::Instr *
  12027. Lowerer::LoadFloatFromNonReg(IR::Opnd * opndSrc, IR::Opnd * opndDst, IR::Instr * instrInsert)
  12028. {
  12029. double value;
  12030. if (opndSrc->IsAddrOpnd())
  12031. {
  12032. Js::Var var = opndSrc->AsAddrOpnd()->m_address;
  12033. if (Js::TaggedInt::Is(var))
  12034. {
  12035. value = Js::TaggedInt::ToDouble(var);
  12036. }
  12037. else
  12038. {
  12039. value = Js::JavascriptNumber::GetValue(var);
  12040. }
  12041. }
  12042. else if (opndSrc->IsIntConstOpnd())
  12043. {
  12044. if (opndSrc->IsUInt32())
  12045. {
  12046. value = (double)(uint32)opndSrc->AsIntConstOpnd()->GetValue();
  12047. }
  12048. else
  12049. {
  12050. value = (double)opndSrc->AsIntConstOpnd()->GetValue();
  12051. }
  12052. }
  12053. else if (opndSrc->IsFloatConstOpnd())
  12054. {
  12055. value = (double)opndSrc->AsFloatConstOpnd()->m_value;
  12056. }
  12057. else if (opndSrc->IsFloat32ConstOpnd())
  12058. {
  12059. float floatValue = opndSrc->AsFloat32ConstOpnd()->m_value;
  12060. return LowererMD::LoadFloatValue(opndDst, floatValue, instrInsert);
  12061. }
  12062. else
  12063. {
  12064. AssertMsg(0, "Unexpected opnd type");
  12065. value = 0;
  12066. }
  12067. return LowererMD::LoadFloatValue(opndDst, value, instrInsert);
  12068. }
  12069. void
  12070. Lowerer::LoadInt32FromUntaggedVar(IR::Instr *const instrLoad)
  12071. {
  12072. Assert(instrLoad);
  12073. Assert(instrLoad->GetDst());
  12074. Assert(instrLoad->GetDst()->IsRegOpnd());
  12075. Assert(instrLoad->GetDst()->IsInt32());
  12076. Assert(instrLoad->GetSrc1());
  12077. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  12078. Assert(instrLoad->GetSrc1()->IsVar());
  12079. Assert(!instrLoad->GetSrc2());
  12080. // push src
  12081. // int32Value = call JavascriptNumber::GetNonzeroInt32Value_NoChecks
  12082. // test int32Value, int32Value
  12083. // jne $done
  12084. // (fall through to 'instrLoad'; caller will generate code here)
  12085. // $done:
  12086. // (rest of program)
  12087. Func *const func = instrLoad->m_func;
  12088. IR::LabelInstr *const doneLabel = instrLoad->GetOrCreateContinueLabel();
  12089. // push src
  12090. // int32Value = call JavascriptNumber::GetNonzeroInt32Value_NoChecks
  12091. StackSym *const int32ValueSym = instrLoad->GetDst()->AsRegOpnd()->m_sym;
  12092. IR::Instr *const instr =
  12093. IR::Instr::New(
  12094. Js::OpCode::Call,
  12095. IR::RegOpnd::New(int32ValueSym, TyInt32, func),
  12096. instrLoad->GetSrc1()->AsRegOpnd(),
  12097. func);
  12098. instrLoad->InsertBefore(instr);
  12099. LowerUnaryHelper(instr, IR::HelperGetNonzeroInt32Value_NoTaggedIntCheck);
  12100. // test int32Value, int32Value
  12101. // jne $done
  12102. InsertCompareBranch(
  12103. IR::RegOpnd::New(int32ValueSym, TyInt32, func),
  12104. IR::IntConstOpnd::New(0, TyInt32, func, true),
  12105. Js::OpCode::BrNeq_A,
  12106. doneLabel,
  12107. instrLoad);
  12108. }
  12109. bool
  12110. Lowerer::GetValueFromIndirOpnd(IR::IndirOpnd *indirOpnd, IR::Opnd **pValueOpnd, IntConstType *pValue)
  12111. {
  12112. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  12113. IR::Opnd* valueOpnd = nullptr;
  12114. IntConstType value = 0;
  12115. if (!indexOpnd)
  12116. {
  12117. value = (IntConstType)indirOpnd->GetOffset();
  12118. if (value < 0)
  12119. {
  12120. // Can't do fast path for negative index
  12121. return false;
  12122. }
  12123. valueOpnd = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
  12124. }
  12125. else if (indexOpnd->m_sym->IsIntConst())
  12126. {
  12127. value = indexOpnd->AsRegOpnd()->m_sym->GetIntConstValue();
  12128. if (value < 0)
  12129. {
  12130. // Can't do fast path for negative index
  12131. return false;
  12132. }
  12133. valueOpnd = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
  12134. }
  12135. *pValueOpnd = valueOpnd;
  12136. *pValue = value;
  12137. return true;
  12138. }
  12139. void
  12140. Lowerer::GenerateFastBrOnObject(IR::Instr *instr)
  12141. {
  12142. Assert(instr->m_opcode == Js::OpCode::BrOnObject_A);
  12143. IR::RegOpnd *object = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  12144. IR::LabelInstr *done = instr->GetOrCreateContinueLabel();
  12145. IR::LabelInstr *target = instr->AsBranchInstr()->GetTarget();
  12146. IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  12147. IR::IntConstOpnd *typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, instr->m_func);
  12148. if (!object)
  12149. {
  12150. object = IR::RegOpnd::New(TyVar, m_func);
  12151. Lowerer::InsertMove(object, instr->GetSrc1(), instr);
  12152. }
  12153. // TEST object, 1
  12154. // JNE $done
  12155. // MOV typeRegOpnd, [object + offset(Type)]
  12156. // CMP [typeRegOpnd + offset(TypeId)], TypeIds_LastJavascriptPrimitiveType
  12157. // JGT $target
  12158. // $done:
  12159. m_lowererMD.GenerateObjectTest(object, instr, done);
  12160. InsertMove(typeRegOpnd,
  12161. IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  12162. instr);
  12163. InsertCompareBranch(
  12164. IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
  12165. typeIdOpnd, Js::OpCode::BrGt_A, target, instr);
  12166. instr->Remove();
  12167. }
  12168. void Lowerer::GenerateObjectHeaderInliningTest(IR::RegOpnd *baseOpnd, IR::LabelInstr * target,IR::Instr *insertBeforeInstr)
  12169. {
  12170. Assert(baseOpnd);
  12171. Assert(target);
  12172. AssertMsg(
  12173. baseOpnd->GetValueType().IsLikelyObject() &&
  12174. baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray,
  12175. "Why are we here, when the object is already known not to have an ObjArray");
  12176. Assert(insertBeforeInstr);
  12177. Func *const func = insertBeforeInstr->m_func;
  12178. // mov type, [base + offsetOf(type)]
  12179. IR::RegOpnd *const opnd = IR::RegOpnd::New(TyMachPtr, func);
  12180. InsertMove(
  12181. opnd,
  12182. IR::IndirOpnd::New(
  12183. baseOpnd,
  12184. Js::DynamicObject::GetOffsetOfType(),
  12185. opnd->GetType(),
  12186. func),
  12187. insertBeforeInstr);
  12188. // mov typeHandler, [type + offsetOf(typeHandler)]
  12189. InsertMove(
  12190. opnd,
  12191. IR::IndirOpnd::New(
  12192. opnd,
  12193. Js::DynamicType::GetOffsetOfTypeHandler(),
  12194. opnd->GetType(),
  12195. func),
  12196. insertBeforeInstr);
  12197. IR::IndirOpnd * offsetOfInlineSlotOpnd = IR::IndirOpnd::New(opnd,Js::DynamicTypeHandler::GetOffsetOfOffsetOfInlineSlots(), TyInt16, func);
  12198. IR::IntConstOpnd * objHeaderInlinedSlotOffset = IR::IntConstOpnd::New(Js::DynamicTypeHandler::GetOffsetOfObjectHeaderInlineSlots(), TyInt16, func);
  12199. // CMP [typeHandler + offsetOf(offsetOfInlineSlots)], objHeaderInlinedSlotOffset
  12200. InsertCompareBranch(
  12201. offsetOfInlineSlotOpnd,
  12202. objHeaderInlinedSlotOffset,
  12203. Js::OpCode::BrEq_A,
  12204. target,
  12205. insertBeforeInstr);
  12206. }
  12207. void Lowerer::GenerateObjectTypeTest(IR::RegOpnd *srcReg, IR::Instr *instrInsert, IR::LabelInstr *labelHelper)
  12208. {
  12209. Assert(srcReg);
  12210. if (!srcReg->IsNotTaggedValue())
  12211. {
  12212. m_lowererMD.GenerateObjectTest(srcReg, instrInsert, labelHelper);
  12213. }
  12214. // CMP [srcReg], Js::DynamicObject::`vtable'
  12215. // JNE $helper
  12216. IR::BranchInstr *branchInstr = InsertCompareBranch(
  12217. IR::IndirOpnd::New(srcReg, 0, TyMachPtr, m_func),
  12218. LoadVTableValueOpnd(instrInsert, VTableValue::VtableDynamicObject),
  12219. Js::OpCode::BrNeq_A,
  12220. labelHelper,
  12221. instrInsert);
  12222. InsertObjectPoison(srcReg, branchInstr, instrInsert, false);
  12223. }
  12224. const VTableValue Lowerer::VtableAddresses[static_cast<ValueType::TSize>(ObjectType::Count)] =
  12225. {
  12226. /* ObjectType::UninitializedObject */ VTableValue::VtableInvalid,
  12227. /* ObjectType::Object */ VTableValue::VtableInvalid,
  12228. /* ObjectType::RegExp */ VTableValue::VtableInvalid,
  12229. /* ObjectType::ObjectWithArray */ VTableValue::VtableJavascriptArray,
  12230. /* ObjectType::Array */ VTableValue::VtableJavascriptArray,
  12231. /* ObjectType::Int8Array */ VTableValue::VtableInt8Array,
  12232. /* ObjectType::Uint8Array */ VTableValue::VtableUint8Array,
  12233. /* ObjectType::Uint8ClampedArray */ VTableValue::VtableUint8ClampedArray,
  12234. /* ObjectType::Int16Array */ VTableValue::VtableInt16Array,
  12235. /* ObjectType::Uint16Array */ VTableValue::VtableUint16Array,
  12236. /* ObjectType::Int32Array */ VTableValue::VtableInt32Array,
  12237. /* ObjectType::Uint32Array */ VTableValue::VtableUint32Array,
  12238. /* ObjectType::Float32Array */ VTableValue::VtableFloat32Array,
  12239. /* ObjectType::Float64Array */ VTableValue::VtableFloat64Array,
  12240. /* ObjectType::Int8VirtualArray */ VTableValue::VtableInt8VirtualArray,
  12241. /* ObjectType::Uint8VirtualArray */ VTableValue::VtableUint8VirtualArray,
  12242. /* ObjectType::Uint8ClampedVirtualArray */ VTableValue::VtableUint8ClampedVirtualArray,
  12243. /* ObjectType::Int16VirtualArray */ VTableValue::VtableInt16VirtualArray,
  12244. /* ObjectType::Uint16VirtualArray */ VTableValue::VtableUint16VirtualArray,
  12245. /* ObjectType::Int32VirtualArray */ VTableValue::VtableInt32VirtualArray,
  12246. /* ObjectType::Uint32VirtualArray */ VTableValue::VtableUint32VirtualArray,
  12247. /* ObjectType::Float32VirtualArray */ VTableValue::VtableFloat32VirtualArray,
  12248. /* ObjectType::Float64VirtualArray */ VTableValue::VtableFloat64VirtualArray,
  12249. /* ObjectType::Int8MixedArray */ VTableValue::VtableInt8Array,
  12250. /* ObjectType::Uint8MixedArray */ VTableValue::VtableUint8Array,
  12251. /* ObjectType::Uint8ClampedMixedArray */ VTableValue::VtableUint8ClampedArray,
  12252. /* ObjectType::Int16MixedArray */ VTableValue::VtableInt16Array,
  12253. /* ObjectType::Uint16MixedArray */ VTableValue::VtableUint16Array,
  12254. /* ObjectType::Int32MixedArray */ VTableValue::VtableInt32Array,
  12255. /* ObjectType::Uint32MixedArray */ VTableValue::VtableUint32Array,
  12256. /* ObjectType::Float32MixedArray */ VTableValue::VtableFloat32Array,
  12257. /* ObjectType::Float64MixedArray */ VTableValue::VtableFloat64Array,
  12258. /* ObjectType::Int64Array */ VTableValue::VtableInt64Array,
  12259. /* ObjectType::Uint64Array */ VTableValue::VtableUint64Array,
  12260. /* ObjectType::BoolArray */ VTableValue::VtableBoolArray,
  12261. /* ObjectType::CharArray */ VTableValue::VtableCharArray
  12262. };
  12263. const uint32 Lowerer::OffsetsOfHeadSegment[static_cast<ValueType::TSize>(ObjectType::Count)] =
  12264. {
  12265. /* ObjectType::UninitializedObject */ static_cast<uint32>(-1),
  12266. /* ObjectType::Object */ static_cast<uint32>(-1),
  12267. /* ObjectType::RegExp */ static_cast<uint32>(-1),
  12268. /* ObjectType::ObjectWithArray */ Js::JavascriptArray::GetOffsetOfHead(),
  12269. /* ObjectType::Array */ Js::JavascriptArray::GetOffsetOfHead(),
  12270. /* ObjectType::Int8Array */ Js::Int8Array::GetOffsetOfBuffer(),
  12271. /* ObjectType::Uint8Array */ Js::Uint8Array::GetOffsetOfBuffer(),
  12272. /* ObjectType::Uint8ClampedArray */ Js::Uint8ClampedArray::GetOffsetOfBuffer(),
  12273. /* ObjectType::Int16Array */ Js::Int16Array::GetOffsetOfBuffer(),
  12274. /* ObjectType::Uint16Array */ Js::Uint16Array::GetOffsetOfBuffer(),
  12275. /* ObjectType::Int32Array */ Js::Int32Array::GetOffsetOfBuffer(),
  12276. /* ObjectType::Uint32Array */ Js::Uint32Array::GetOffsetOfBuffer(),
  12277. /* ObjectType::Float32Array */ Js::Float32Array::GetOffsetOfBuffer(),
  12278. /* ObjectType::Float64Array */ Js::Float64Array::GetOffsetOfBuffer(),
  12279. /* ObjectType::Int8VirtualArray */ Js::Int8VirtualArray::GetOffsetOfBuffer(),
  12280. /* ObjectType::Uint8VirtualArray */ Js::Uint8VirtualArray::GetOffsetOfBuffer(),
  12281. /* ObjectType::Uint8ClampedVirtualArray */ Js::Uint8ClampedVirtualArray::GetOffsetOfBuffer(),
  12282. /* ObjectType::Int16VirtualArray */ Js::Int16VirtualArray::GetOffsetOfBuffer(),
  12283. /* ObjectType::Uint16VirtualArray */ Js::Uint16VirtualArray::GetOffsetOfBuffer(),
  12284. /* ObjectType::Int32VirtualArray */ Js::Int32VirtualArray::GetOffsetOfBuffer(),
  12285. /* ObjectType::Uint32VirtualArray */ Js::Uint32VirtualArray::GetOffsetOfBuffer(),
  12286. /* ObjectType::Float32VirtualArray */ Js::Float32VirtualArray::GetOffsetOfBuffer(),
  12287. /* ObjectType::Float64VirtualArray */ Js::Float64VirtualArray::GetOffsetOfBuffer(),
  12288. /* ObjectType::Int8MixedArray */ Js::Int8Array::GetOffsetOfBuffer(),
  12289. /* ObjectType::Uint8MixedArray */ Js::Uint8Array::GetOffsetOfBuffer(),
  12290. /* ObjectType::Uint8ClampedMixedArray */ Js::Uint8ClampedArray::GetOffsetOfBuffer(),
  12291. /* ObjectType::Int16MixedArray */ Js::Int16Array::GetOffsetOfBuffer(),
  12292. /* ObjectType::Uint16MixedArray */ Js::Uint16Array::GetOffsetOfBuffer(),
  12293. /* ObjectType::Int32MixedArray */ Js::Int32Array::GetOffsetOfBuffer(),
  12294. /* ObjectType::Uint32MixedArray */ Js::Uint32Array::GetOffsetOfBuffer(),
  12295. /* ObjectType::Float32MixedArray */ Js::Float32Array::GetOffsetOfBuffer(),
  12296. /* ObjectType::Float64MixedArray */ Js::Float64Array::GetOffsetOfBuffer(),
  12297. /* ObjectType::Int64Array */ Js::Int64Array::GetOffsetOfBuffer(),
  12298. /* ObjectType::Uint64Array */ Js::Uint64Array::GetOffsetOfBuffer(),
  12299. /* ObjectType::BoolArray */ Js::BoolArray::GetOffsetOfBuffer(),
  12300. /* ObjectType::CharArray */ Js::CharArray::GetOffsetOfBuffer()
  12301. };
  12302. const uint32 Lowerer::OffsetsOfLength[static_cast<ValueType::TSize>(ObjectType::Count)] =
  12303. {
  12304. /* ObjectType::UninitializedObject */ static_cast<uint32>(-1),
  12305. /* ObjectType::Object */ static_cast<uint32>(-1),
  12306. /* ObjectType::RegExp */ static_cast<uint32>(-1),
  12307. /* ObjectType::ObjectWithArray */ Js::JavascriptArray::GetOffsetOfLength(),
  12308. /* ObjectType::Array */ Js::JavascriptArray::GetOffsetOfLength(),
  12309. /* ObjectType::Int8Array */ Js::Int8Array::GetOffsetOfLength(),
  12310. /* ObjectType::Uint8Array */ Js::Uint8Array::GetOffsetOfLength(),
  12311. /* ObjectType::Uint8ClampedArray */ Js::Uint8ClampedArray::GetOffsetOfLength(),
  12312. /* ObjectType::Int16Array */ Js::Int16Array::GetOffsetOfLength(),
  12313. /* ObjectType::Uint16Array */ Js::Uint16Array::GetOffsetOfLength(),
  12314. /* ObjectType::Int32Array */ Js::Int32Array::GetOffsetOfLength(),
  12315. /* ObjectType::Uint32Array */ Js::Uint32Array::GetOffsetOfLength(),
  12316. /* ObjectType::Float32Array */ Js::Float32Array::GetOffsetOfLength(),
  12317. /* ObjectType::Float64Array */ Js::Float64Array::GetOffsetOfLength(),
  12318. /* ObjectType::Int8VirtualArray */ Js::Int8VirtualArray::GetOffsetOfLength(),
  12319. /* ObjectType::Uint8VirtualArray */ Js::Uint8VirtualArray::GetOffsetOfLength(),
  12320. /* ObjectType::Uint8ClampedVirtualArray */ Js::Uint8ClampedVirtualArray::GetOffsetOfLength(),
  12321. /* ObjectType::Int16VirtualArray */ Js::Int16VirtualArray::GetOffsetOfLength(),
  12322. /* ObjectType::Uint16VirtualArray */ Js::Uint16VirtualArray::GetOffsetOfLength(),
  12323. /* ObjectType::Int32VirtualArray */ Js::Int32VirtualArray::GetOffsetOfLength(),
  12324. /* ObjectType::Uint32VirtualArray */ Js::Uint32VirtualArray::GetOffsetOfLength(),
  12325. /* ObjectType::Float32VirtualArray */ Js::Float32VirtualArray::GetOffsetOfLength(),
  12326. /* ObjectType::Float64VirtualArray */ Js::Float64VirtualArray::GetOffsetOfLength(),
  12327. /* ObjectType::Int8MixedArray */ Js::Int8Array::GetOffsetOfLength(),
  12328. /* ObjectType::Uint8MixedArray */ Js::Uint8Array::GetOffsetOfLength(),
  12329. /* ObjectType::Uint8ClampedMixedArray */ Js::Uint8ClampedArray::GetOffsetOfLength(),
  12330. /* ObjectType::Int16MixedArray */ Js::Int16Array::GetOffsetOfLength(),
  12331. /* ObjectType::Uint16MixedArray */ Js::Uint16Array::GetOffsetOfLength(),
  12332. /* ObjectType::Int32MixedArray */ Js::Int32Array::GetOffsetOfLength(),
  12333. /* ObjectType::Uint32MixedArray */ Js::Uint32Array::GetOffsetOfLength(),
  12334. /* ObjectType::Float32MixedArray */ Js::Float32Array::GetOffsetOfLength(),
  12335. /* ObjectType::Float64MixedArray */ Js::Float64Array::GetOffsetOfLength(),
  12336. /* ObjectType::Int64Array */ Js::Int64Array::GetOffsetOfLength(),
  12337. /* ObjectType::Uint64Array */ Js::Uint64Array::GetOffsetOfLength(),
  12338. /* ObjectType::BoolArray */ Js::BoolArray::GetOffsetOfLength(),
  12339. /* ObjectType::CharArray */ Js::CharArray::GetOffsetOfLength()
  12340. };
  12341. const IRType Lowerer::IndirTypes[static_cast<ValueType::TSize>(ObjectType::Count)] =
  12342. {
  12343. /* ObjectType::UninitializedObject */ TyIllegal,
  12344. /* ObjectType::Object */ TyIllegal,
  12345. /* ObjectType::RegExp */ TyIllegal,
  12346. /* ObjectType::ObjectWithArray */ TyVar,
  12347. /* ObjectType::Array */ TyVar,
  12348. /* ObjectType::Int8Array */ TyInt8,
  12349. /* ObjectType::Uint8Array */ TyUint8,
  12350. /* ObjectType::Uint8ClampedArray */ TyUint8,
  12351. /* ObjectType::Int16Array */ TyInt16,
  12352. /* ObjectType::Uint16Array */ TyUint16,
  12353. /* ObjectType::Int32Array */ TyInt32,
  12354. /* ObjectType::Uint32Array */ TyUint32,
  12355. /* ObjectType::Float32Array */ TyFloat32,
  12356. /* ObjectType::Float64Array */ TyFloat64,
  12357. /* ObjectType::Int8VirtualArray */ TyInt8,
  12358. /* ObjectType::Uint8VirtualArray */ TyUint8,
  12359. /* ObjectType::Uint8ClampedVirtualArray */ TyUint8,
  12360. /* ObjectType::Int16VirtualArray */ TyInt16,
  12361. /* ObjectType::Uint16vArray */ TyUint16,
  12362. /* ObjectType::Int32VirtualArray */ TyInt32,
  12363. /* ObjectType::Uint32VirtualArray */ TyUint32,
  12364. /* ObjectType::Float32VirtualArray */ TyFloat32,
  12365. /* ObjectType::Float64VirtualArray */ TyFloat64,
  12366. /* ObjectType::Int8MixedArray */ TyInt8,
  12367. /* ObjectType::Uint8MixedArray */ TyUint8,
  12368. /* ObjectType::Uint8ClampedMixedArray */ TyUint8,
  12369. /* ObjectType::Int16MixedArray */ TyInt16,
  12370. /* ObjectType::Uint16MixedArray */ TyUint16,
  12371. /* ObjectType::Int32MixedArray */ TyInt32,
  12372. /* ObjectType::Uint32MixedArray */ TyUint32,
  12373. /* ObjectType::Float32MixedArray */ TyFloat32,
  12374. /* ObjectType::Float64MixedArray */ TyFloat64,
  12375. /* ObjectType::Int64Array */ TyInt64,
  12376. /* ObjectType::Uint64Array */ TyUint64,
  12377. /* ObjectType::BoolArray */ TyUint8,
  12378. /* ObjectType::CharArray */ TyUint16
  12379. };
  12380. const BYTE Lowerer::IndirScales[static_cast<ValueType::TSize>(ObjectType::Count)] =
  12381. {
  12382. /* ObjectType::UninitializedObject */ static_cast<BYTE>(-1),
  12383. /* ObjectType::Object */ static_cast<BYTE>(-1),
  12384. /* ObjectType::RegExp */ static_cast<BYTE>(-1),
  12385. /* ObjectType::ObjectWithArray */ LowererMD::GetDefaultIndirScale(),
  12386. /* ObjectType::Array */ LowererMD::GetDefaultIndirScale(),
  12387. /* ObjectType::Int8Array */ 0, // log2(sizeof(int8))
  12388. /* ObjectType::Uint8Array */ 0, // log2(sizeof(uint8))
  12389. /* ObjectType::Uint8ClampedArray */ 0, // log2(sizeof(uint8))
  12390. /* ObjectType::Int16Array */ 1, // log2(sizeof(int16))
  12391. /* ObjectType::Uint16Array */ 1, // log2(sizeof(uint16))
  12392. /* ObjectType::Int32Array */ 2, // log2(sizeof(int32))
  12393. /* ObjectType::Uint32Array */ 2, // log2(sizeof(uint32))
  12394. /* ObjectType::Float32Array */ 2, // log2(sizeof(float))
  12395. /* ObjectType::Float64Array */ 3, // log2(sizeof(double))
  12396. /* ObjectType::Int8VirtualArray */ 0, // log2(sizeof(int8))
  12397. /* ObjectType::Uint8VirtualArray */ 0, // log2(sizeof(uint8))
  12398. /* ObjectType::Uint8ClampedVirtualArray */ 0, // log2(sizeof(uint8))
  12399. /* ObjectType::Int16VirtualArray */ 1, // log2(sizeof(int16))
  12400. /* ObjectType::Uint16VirtualArray */ 1, // log2(sizeof(uint16))
  12401. /* ObjectType::Int32VirtualArray */ 2, // log2(sizeof(int32))
  12402. /* ObjectType::Uint32VirtualArray */ 2, // log2(sizeof(uint32))
  12403. /* ObjectType::Float32VirtualArray */ 2, // log2(sizeof(float))
  12404. /* ObjectType::Float64VirtualArray */ 3, // log2(sizeof(double))
  12405. /* ObjectType::Int8MixedArray */ 0, // log2(sizeof(int8))
  12406. /* ObjectType::Uint8MixedArray */ 0, // log2(sizeof(uint8))
  12407. /* ObjectType::Uint8ClampedMixedArray */ 0, // log2(sizeof(uint8))
  12408. /* ObjectType::Int16MixedArray */ 1, // log2(sizeof(int16))
  12409. /* ObjectType::Uint16MixedArray */ 1, // log2(sizeof(uint16))
  12410. /* ObjectType::Int32MixedArray */ 2, // log2(sizeof(int32))
  12411. /* ObjectType::Uint32MixedArray */ 2, // log2(sizeof(uint32))
  12412. /* ObjectType::Float32MixedArray */ 2, // log2(sizeof(float))
  12413. /* ObjectType::Float64MixedArray */ 3, // log2(sizeof(double))
  12414. /* ObjectType::Int64Array */ 3, // log2(sizeof(int64))
  12415. /* ObjectType::Uint64Array */ 3, // log2(sizeof(uint64))
  12416. /* ObjectType::BoolArray */ 0, // log2(sizeof(bool))
  12417. /* ObjectType::CharArray */ 1 // log2(sizeof(char16))
  12418. };
  12419. VTableValue Lowerer::GetArrayVtableAddress(const ValueType valueType, bool getVirtual)
  12420. {
  12421. Assert(valueType.IsLikelyAnyOptimizedArray());
  12422. if(valueType.IsLikelyArrayOrObjectWithArray())
  12423. {
  12424. if(valueType.HasIntElements())
  12425. {
  12426. return VTableValue::VtableNativeIntArray;
  12427. }
  12428. else if(valueType.HasFloatElements())
  12429. {
  12430. return VTableValue::VtableNativeFloatArray;
  12431. }
  12432. }
  12433. if (getVirtual && valueType.IsLikelyMixedTypedArrayType())
  12434. {
  12435. return VtableAddresses[static_cast<ValueType::TSize>(valueType.GetMixedToVirtualTypedArrayObjectType())];
  12436. }
  12437. return VtableAddresses[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  12438. }
  12439. uint32 Lowerer::GetArrayOffsetOfHeadSegment(const ValueType valueType)
  12440. {
  12441. Assert(valueType.IsLikelyAnyOptimizedArray());
  12442. return OffsetsOfHeadSegment[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  12443. }
  12444. uint32 Lowerer::GetArrayOffsetOfLength(const ValueType valueType)
  12445. {
  12446. Assert(valueType.IsLikelyAnyOptimizedArray());
  12447. return OffsetsOfLength[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  12448. }
  12449. IRType Lowerer::GetArrayIndirType(const ValueType valueType)
  12450. {
  12451. Assert(valueType.IsLikelyAnyOptimizedArray());
  12452. if(valueType.IsLikelyArrayOrObjectWithArray())
  12453. {
  12454. if(valueType.HasIntElements())
  12455. {
  12456. return TyInt32;
  12457. }
  12458. else if(valueType.HasFloatElements())
  12459. {
  12460. return TyFloat64;
  12461. }
  12462. }
  12463. return IndirTypes[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  12464. }
  12465. BYTE Lowerer::GetArrayIndirScale(const ValueType valueType)
  12466. {
  12467. Assert(valueType.IsLikelyAnyOptimizedArray());
  12468. if(valueType.IsLikelyArrayOrObjectWithArray())
  12469. {
  12470. if(valueType.HasIntElements())
  12471. {
  12472. return 2; // log2(sizeof(int32))
  12473. }
  12474. else if(valueType.HasFloatElements())
  12475. {
  12476. return 3; // log2(sizeof(double))
  12477. }
  12478. }
  12479. return IndirScales[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  12480. }
  12481. int Lowerer::SimdGetElementCountFromBytes(ValueType arrValueType, uint8 dataWidth)
  12482. {
  12483. Assert(dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  12484. Assert(arrValueType.IsTypedArray());
  12485. BYTE bpe = 1 << Lowerer::GetArrayIndirScale(arrValueType);
  12486. // round up
  12487. return (int)::ceil(((float)dataWidth) / bpe);
  12488. }
  12489. bool Lowerer::ShouldGenerateArrayFastPath(
  12490. const IR::Opnd *const arrayOpnd,
  12491. const bool supportsObjectsWithArrays,
  12492. const bool supportsTypedArrays,
  12493. const bool requiresSse2ForFloatArrays) const
  12494. {
  12495. Assert(arrayOpnd);
  12496. const ValueType arrayValueType(arrayOpnd->GetValueType());
  12497. if(arrayValueType.IsUninitialized())
  12498. {
  12499. // Don't have info about the value type, better to generate the fast path anyway
  12500. return true;
  12501. }
  12502. if (!arrayValueType.IsLikelyObject())
  12503. {
  12504. if (!arrayValueType.HasBeenObject() || arrayValueType.IsLikelyString())
  12505. {
  12506. return false;
  12507. }
  12508. //We have seen at least once there is an object in the code path. Generate fastpath hoping it to be array.
  12509. //Its nice if we can get all the attributes set but valueType is only 16 bits. Consider expanding the same.
  12510. return true;
  12511. }
  12512. if( (!supportsObjectsWithArrays && arrayValueType.GetObjectType() == ObjectType::ObjectWithArray) ||
  12513. (!supportsTypedArrays && arrayValueType.IsLikelyTypedArray()) )
  12514. {
  12515. // The fast path likely would not hit
  12516. return false;
  12517. }
  12518. if(arrayValueType.GetObjectType() == ObjectType::UninitializedObject)
  12519. {
  12520. // Don't have info about the object type, better to generate the fast path anyway
  12521. return true;
  12522. }
  12523. #ifdef _M_IX86
  12524. if(requiresSse2ForFloatArrays &&
  12525. (
  12526. arrayValueType.GetObjectType() == ObjectType::Float32Array ||
  12527. arrayValueType.GetObjectType() == ObjectType::Float64Array
  12528. ) &&
  12529. !AutoSystemInfo::Data.SSE2Available())
  12530. {
  12531. // Fast paths for float arrays rely on SSE2
  12532. return false;
  12533. }
  12534. #endif
  12535. return !arrayValueType.IsLikelyAnyUnOptimizedArray();
  12536. }
  12537. IR::RegOpnd *Lowerer::LoadObjectArray(IR::RegOpnd *const baseOpnd, IR::Instr *const insertBeforeInstr)
  12538. {
  12539. Assert(baseOpnd);
  12540. Assert(
  12541. baseOpnd->GetValueType().IsLikelyObject() &&
  12542. baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray);
  12543. Assert(insertBeforeInstr);
  12544. Func *const func = insertBeforeInstr->m_func;
  12545. // mov array, [base + offsetOf(objectArrayOrFlags)]
  12546. IR::RegOpnd *const arrayOpnd =
  12547. baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd()->CopyAsRegOpnd(func) : baseOpnd->Copy(func)->AsRegOpnd();
  12548. arrayOpnd->m_sym = StackSym::New(TyVar, func);
  12549. arrayOpnd->SetValueType(arrayOpnd->GetValueType().ToArray());
  12550. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, func, false /* autoDelete */);
  12551. InsertMove(
  12552. arrayOpnd,
  12553. IR::IndirOpnd::New(
  12554. baseOpnd,
  12555. Js::DynamicObject::GetOffsetOfObjectArray(),
  12556. arrayOpnd->GetType(),
  12557. func),
  12558. insertBeforeInstr);
  12559. return arrayOpnd;
  12560. }
  12561. void
  12562. Lowerer::GenerateIsEnabledArraySetElementFastPathCheck(
  12563. IR::LabelInstr * isDisabledLabel,
  12564. IR::Instr * const insertBeforeInstr)
  12565. {
  12566. InsertCompareBranch(
  12567. this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable),
  12568. LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableInvalid),
  12569. Js::OpCode::BrEq_A,
  12570. isDisabledLabel,
  12571. insertBeforeInstr);
  12572. }
  12573. IR::RegOpnd *Lowerer::GenerateArrayTest(
  12574. IR::RegOpnd *const baseOpnd,
  12575. IR::LabelInstr *const isNotObjectLabel,
  12576. IR::LabelInstr *const isNotArrayLabel,
  12577. IR::Instr *const insertBeforeInstr,
  12578. const bool forceFloat,
  12579. const bool isStore,
  12580. const bool allowDefiniteArray)
  12581. {
  12582. Assert(baseOpnd);
  12583. const ValueType baseValueType(baseOpnd->GetValueType());
  12584. // Shouldn't request to do an array test when it's already known to be an array, or if it's unlikely to be an array
  12585. Assert(!baseValueType.IsAnyOptimizedArray() || allowDefiniteArray || baseValueType.IsNativeArray());
  12586. Assert(baseValueType.IsUninitialized() || baseValueType.HasBeenObject());
  12587. Assert(isNotObjectLabel);
  12588. Assert(isNotArrayLabel);
  12589. Assert(insertBeforeInstr);
  12590. Func *const func = insertBeforeInstr->m_func;
  12591. IR::RegOpnd *arrayOpnd;
  12592. IR::AutoReuseOpnd autoReuseArrayOpnd;
  12593. if(baseValueType.IsLikelyObject() && baseValueType.GetObjectType() == ObjectType::ObjectWithArray)
  12594. {
  12595. // Only DynamicObject is allowed (DynamicObject vtable is ensured) because some object types have special handling for
  12596. // index properties - arguments object, string object, external object, etc.
  12597. // JavascriptArray::Jit_TryGetArrayForObjectWithArray as well.
  12598. GenerateObjectTypeTest(baseOpnd, insertBeforeInstr, isNotObjectLabel);
  12599. GenerateObjectHeaderInliningTest(baseOpnd, isNotArrayLabel, insertBeforeInstr);
  12600. arrayOpnd = LoadObjectArray(baseOpnd, insertBeforeInstr);
  12601. autoReuseArrayOpnd.Initialize(arrayOpnd, func, false /* autoDelete */);
  12602. // test array, array
  12603. // je $isNotArrayLabel
  12604. // test array, 1
  12605. // jne $isNotArrayLabel
  12606. InsertTestBranch(
  12607. arrayOpnd,
  12608. arrayOpnd,
  12609. Js::OpCode::BrEq_A,
  12610. isNotArrayLabel,
  12611. insertBeforeInstr);
  12612. InsertTestBranch(
  12613. arrayOpnd,
  12614. IR::IntConstOpnd::New(1, TyUint8, func, true),
  12615. Js::OpCode::BrNeq_A,
  12616. isNotArrayLabel,
  12617. insertBeforeInstr);
  12618. }
  12619. else
  12620. {
  12621. if(!baseOpnd->IsNotTaggedValue())
  12622. {
  12623. m_lowererMD.GenerateObjectTest(baseOpnd, insertBeforeInstr, isNotObjectLabel);
  12624. }
  12625. arrayOpnd = baseOpnd->Copy(func)->AsRegOpnd();
  12626. if(!baseValueType.IsLikelyAnyOptimizedArray())
  12627. {
  12628. arrayOpnd->SetValueType(
  12629. ValueType::GetObject(ObjectType::Array)
  12630. .ToLikely()
  12631. .SetHasNoMissingValues(false)
  12632. .SetArrayTypeId(Js::TypeIds_Array));
  12633. }
  12634. autoReuseArrayOpnd.Initialize(arrayOpnd, func, false /* autoDelete */);
  12635. }
  12636. VTableValue vtableAddress = baseValueType.IsLikelyAnyOptimizedArray()
  12637. ? GetArrayVtableAddress(baseValueType)
  12638. : VTableValue::VtableJavascriptArray;
  12639. VTableValue virtualVtableAddress = VTableValue::VtableInvalid;
  12640. if (baseValueType.IsLikelyMixedTypedArrayType())
  12641. {
  12642. virtualVtableAddress = GetArrayVtableAddress(baseValueType, true);
  12643. }
  12644. IR::Opnd * vtableOpnd;
  12645. IR::Opnd * vtableVirtualOpnd = nullptr;
  12646. if (isStore &&
  12647. (vtableAddress == VTableValue::VtableJavascriptArray ||
  12648. baseValueType.IsLikelyNativeArray()))
  12649. {
  12650. vtableOpnd = IR::RegOpnd::New(TyMachPtr, func);
  12651. if (baseValueType.IsLikelyNativeArray())
  12652. {
  12653. if (baseValueType.HasIntElements())
  12654. {
  12655. InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesIntArraySetElementFastPathVtable), insertBeforeInstr);
  12656. }
  12657. else
  12658. {
  12659. Assert(baseValueType.HasFloatElements());
  12660. InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesFloatArraySetElementFastPathVtable), insertBeforeInstr);
  12661. }
  12662. }
  12663. else
  12664. {
  12665. InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable), insertBeforeInstr);
  12666. }
  12667. }
  12668. else
  12669. {
  12670. vtableOpnd = LoadVTableValueOpnd(insertBeforeInstr, vtableAddress);
  12671. }
  12672. // cmp [array], vtableAddress
  12673. // jne $isNotArrayLabel
  12674. if (forceFloat && baseValueType.IsLikelyNativeFloatArray())
  12675. {
  12676. // We expect a native float array. If we get native int instead, convert it on the spot and bail out afterward.
  12677. const auto goodArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  12678. IR::BranchInstr* branchInstr = InsertCompareBranch(
  12679. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  12680. vtableOpnd,
  12681. Js::OpCode::BrEq_A,
  12682. goodArrayLabel,
  12683. insertBeforeInstr);
  12684. InsertObjectPoison(arrayOpnd, branchInstr, insertBeforeInstr, isStore);
  12685. IR::LabelInstr *notFloatArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  12686. insertBeforeInstr->InsertBefore(notFloatArrayLabel);
  12687. if (isStore)
  12688. {
  12689. vtableOpnd = IR::RegOpnd::New(TyMachPtr, func);
  12690. InsertMove(vtableOpnd, IR::MemRefOpnd::New(
  12691. func->GetScriptContextInfo()->GetIntArraySetElementFastPathVtableAddr(),
  12692. TyMachPtr, func), insertBeforeInstr);
  12693. }
  12694. else
  12695. {
  12696. vtableOpnd = LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableJavascriptNativeIntArray);
  12697. }
  12698. branchInstr = InsertCompareBranch(
  12699. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  12700. vtableOpnd,
  12701. Js::OpCode::BrNeq_A,
  12702. isNotArrayLabel,
  12703. insertBeforeInstr);
  12704. InsertObjectPoison(arrayOpnd, branchInstr, insertBeforeInstr, isStore);
  12705. m_lowererMD.LoadHelperArgument(insertBeforeInstr, arrayOpnd);
  12706. IR::Instr *helperInstr = IR::Instr::New(Js::OpCode::Call, m_func);
  12707. insertBeforeInstr->InsertBefore(helperInstr);
  12708. m_lowererMD.ChangeToHelperCall(helperInstr, IR::HelperIntArr_ToNativeFloatArray);
  12709. // Branch to the (bailout) label, because converting the array may have made our array checks unsafe.
  12710. InsertBranch(Js::OpCode::Br, isNotArrayLabel, insertBeforeInstr);
  12711. insertBeforeInstr->InsertBefore(goodArrayLabel);
  12712. }
  12713. else
  12714. {
  12715. IR::LabelInstr* goodArrayLabel = nullptr;
  12716. if (baseValueType.IsLikelyMixedTypedArrayType())
  12717. {
  12718. goodArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  12719. InsertCompareBranch(
  12720. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  12721. vtableOpnd,
  12722. Js::OpCode::BrEq_A,
  12723. goodArrayLabel,
  12724. insertBeforeInstr);
  12725. Assert(virtualVtableAddress);
  12726. vtableVirtualOpnd = LoadVTableValueOpnd(insertBeforeInstr, virtualVtableAddress);
  12727. Assert(vtableVirtualOpnd);
  12728. IR::BranchInstr* branchInstr = InsertCompareBranch(
  12729. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  12730. vtableVirtualOpnd,
  12731. Js::OpCode::BrNeq_A,
  12732. isNotArrayLabel,
  12733. insertBeforeInstr);
  12734. InsertObjectPoison(arrayOpnd, branchInstr, insertBeforeInstr, isStore);
  12735. insertBeforeInstr->InsertBefore(goodArrayLabel);
  12736. }
  12737. else
  12738. {
  12739. IR::BranchInstr *branchInstr = InsertCompareBranch(
  12740. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  12741. vtableOpnd,
  12742. Js::OpCode::BrNeq_A,
  12743. isNotArrayLabel,
  12744. insertBeforeInstr);
  12745. InsertObjectPoison(arrayOpnd, branchInstr, insertBeforeInstr, isStore);
  12746. }
  12747. }
  12748. ValueType arrayValueType(arrayOpnd->GetValueType());
  12749. if(arrayValueType.IsLikelyArrayOrObjectWithArray() && !arrayValueType.IsObject())
  12750. {
  12751. arrayValueType = arrayValueType.SetHasNoMissingValues(false);
  12752. }
  12753. arrayValueType = arrayValueType.ToDefiniteObject();
  12754. arrayOpnd->SetValueType(arrayValueType);
  12755. return arrayOpnd;
  12756. }
  12757. ///----------------------------------------------------------------------------
  12758. ///
  12759. /// Lowerer::HoistIndirOffset
  12760. ///
  12761. /// Replace the offset of the given indir with a new symbol, which becomes the indir index.
  12762. /// Assign the new symbol by creating an assignment from the constant offset.
  12763. ///
  12764. ///----------------------------------------------------------------------------
  12765. IR::Instr *Lowerer::HoistIndirOffset(IR::Instr* instr, IR::IndirOpnd *indirOpnd, RegNum regNum)
  12766. {
  12767. int32 offset = indirOpnd->GetOffset();
  12768. if (indirOpnd->GetIndexOpnd())
  12769. {
  12770. Assert(indirOpnd->GetBaseOpnd());
  12771. return Lowerer::HoistIndirOffsetAsAdd(instr, indirOpnd, indirOpnd->GetBaseOpnd(), offset, regNum);
  12772. }
  12773. IR::IntConstOpnd *offsetOpnd = IR::IntConstOpnd::New(offset, TyInt32, instr->m_func);
  12774. IR::RegOpnd *indexOpnd = IR::RegOpnd::New(StackSym::New(TyMachReg, instr->m_func), regNum, TyMachReg, instr->m_func);
  12775. #if defined(DBG) && defined(_M_ARM)
  12776. if (regNum == SCRATCH_REG)
  12777. {
  12778. AssertMsg(indirOpnd->GetBaseOpnd()->GetReg()!= SCRATCH_REG, "Why both are SCRATCH_REG");
  12779. if (instr->GetSrc1() && instr->GetSrc1()->IsRegOpnd())
  12780. {
  12781. Assert(instr->GetSrc1()->AsRegOpnd()->GetReg() != SCRATCH_REG);
  12782. }
  12783. if (instr->GetSrc2() && instr->GetSrc2()->IsRegOpnd())
  12784. {
  12785. Assert(instr->GetSrc2()->AsRegOpnd()->GetReg() != SCRATCH_REG);
  12786. }
  12787. if (instr->GetDst() && instr->GetDst()->IsRegOpnd())
  12788. {
  12789. Assert(instr->GetDst()->AsRegOpnd()->GetReg() != SCRATCH_REG);
  12790. }
  12791. }
  12792. #endif
  12793. // Clear the offset and add a new reg as the index.
  12794. indirOpnd->SetOffset(0);
  12795. indirOpnd->SetIndexOpnd(indexOpnd);
  12796. IR::Instr *instrAssign = Lowerer::InsertMove(indexOpnd, offsetOpnd, instr);
  12797. indexOpnd->m_sym->SetIsIntConst(offset);
  12798. return instrAssign;
  12799. }
  12800. IR::Instr *Lowerer::HoistIndirOffsetAsAdd(IR::Instr* instr, IR::IndirOpnd *orgOpnd, IR::Opnd *baseOpnd, int offset, RegNum regNum)
  12801. {
  12802. IR::RegOpnd *newBaseOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, instr->m_func), regNum, TyMachPtr, instr->m_func);
  12803. IR::IntConstOpnd *src2 = IR::IntConstOpnd::New(offset, TyInt32, instr->m_func);
  12804. IR::Instr * instrAdd = IR::Instr::New(Js::OpCode::Add_A, newBaseOpnd, baseOpnd, src2, instr->m_func);
  12805. LowererMD::ChangeToAdd(instrAdd, false);
  12806. instr->InsertBefore(instrAdd);
  12807. orgOpnd->ReplaceBaseOpnd(newBaseOpnd);
  12808. orgOpnd->SetOffset(0);
  12809. return instrAdd;
  12810. }
  12811. IR::Instr *Lowerer::HoistIndirIndexOpndAsAdd(IR::Instr* instr, IR::IndirOpnd *orgOpnd, IR::Opnd *baseOpnd, IR::Opnd *indexOpnd, RegNum regNum)
  12812. {
  12813. IR::RegOpnd *newBaseOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, instr->m_func), regNum, TyMachPtr, instr->m_func);
  12814. IR::Instr * instrAdd = IR::Instr::New(Js::OpCode::Add_A, newBaseOpnd, baseOpnd, indexOpnd->UseWithNewType(TyMachPtr, instr->m_func), instr->m_func);
  12815. LowererMD::ChangeToAdd(instrAdd, false);
  12816. instr->InsertBefore(instrAdd);
  12817. orgOpnd->ReplaceBaseOpnd(newBaseOpnd);
  12818. orgOpnd->SetIndexOpnd(nullptr);
  12819. return instrAdd;
  12820. }
  12821. ///----------------------------------------------------------------------------
  12822. ///
  12823. /// Lowerer::HoistSymOffset
  12824. ///
  12825. /// Replace the given sym with an indir using the given base and offset.
  12826. /// (This is used, for instance, to hoist a sym offset that is too large to encode.)
  12827. ///
  12828. ///----------------------------------------------------------------------------
  12829. IR::Instr *Lowerer::HoistSymOffset(IR::Instr *instr, IR::SymOpnd *symOpnd, RegNum baseReg, uint32 offset, RegNum regNum)
  12830. {
  12831. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(nullptr, baseReg, TyMachPtr, instr->m_func);
  12832. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(baseOpnd, offset, symOpnd->GetType(), instr->m_func);
  12833. if (symOpnd == instr->GetDst())
  12834. {
  12835. instr->ReplaceDst(indirOpnd);
  12836. }
  12837. else
  12838. {
  12839. instr->ReplaceSrc(symOpnd, indirOpnd);
  12840. }
  12841. return Lowerer::HoistIndirOffset(instr, indirOpnd, regNum);
  12842. }
  12843. IR::Instr *Lowerer::HoistSymOffsetAsAdd(IR::Instr* instr, IR::SymOpnd *orgOpnd, IR::Opnd *baseOpnd, int offset, RegNum regNum)
  12844. {
  12845. IR::IndirOpnd *newIndirOpnd = IR::IndirOpnd::New(baseOpnd->AsRegOpnd(), 0, TyMachPtr, instr->m_func);
  12846. instr->Replace(orgOpnd, newIndirOpnd); // Replace SymOpnd with IndirOpnd
  12847. return Lowerer::HoistIndirOffsetAsAdd(instr, newIndirOpnd, baseOpnd, offset, regNum);
  12848. }
  12849. IR::LabelInstr *Lowerer::InsertLabel(const bool isHelper, IR::Instr *const insertBeforeInstr)
  12850. {
  12851. Assert(insertBeforeInstr);
  12852. Func *const func = insertBeforeInstr->m_func;
  12853. IR::LabelInstr *const instr = IR::LabelInstr::New(Js::OpCode::Label, func, isHelper);
  12854. insertBeforeInstr->InsertBefore(instr);
  12855. return instr;
  12856. }
  12857. IR::Instr *Lowerer::InsertMoveWithBarrier(IR::Opnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr)
  12858. {
  12859. return Lowerer::InsertMove(dst, src, insertBeforeInstr, true);
  12860. }
  12861. IR::Instr *Lowerer::InsertMove(IR::Opnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr, bool generateWriteBarrier)
  12862. {
  12863. Assert(dst);
  12864. Assert(src);
  12865. Assert(insertBeforeInstr);
  12866. Func *const func = insertBeforeInstr->m_func;
  12867. if(dst->IsFloat() && src->IsConstOpnd())
  12868. {
  12869. return LoadFloatFromNonReg(src, dst, insertBeforeInstr);
  12870. }
  12871. if(TySize[dst->GetType()] < TySize[src->GetType()])
  12872. {
  12873. #if _M_IX86
  12874. if (IRType_IsInt64(src->GetType()))
  12875. {
  12876. // On x86, if we are trying to move an int64 to a smaller type
  12877. // Insert a move of the low bits into dst
  12878. return InsertMove(dst, func->FindOrCreateInt64Pair(src).low, insertBeforeInstr, generateWriteBarrier);
  12879. }
  12880. else
  12881. #endif
  12882. {
  12883. src = src->UseWithNewType(dst->GetType(), func);
  12884. }
  12885. }
  12886. IR::Instr * instr = IR::Instr::New(Js::OpCode::Ld_A, dst, src, func);
  12887. insertBeforeInstr->InsertBefore(instr);
  12888. if (generateWriteBarrier)
  12889. {
  12890. instr = LowererMD::ChangeToWriteBarrierAssign(instr, func);
  12891. }
  12892. else
  12893. {
  12894. LowererMD::ChangeToAssignNoBarrierCheck(instr);
  12895. }
  12896. return instr;
  12897. }
  12898. IR::BranchInstr *Lowerer::InsertBranch(
  12899. const Js::OpCode opCode,
  12900. IR::LabelInstr *const target,
  12901. IR::Instr *const insertBeforeInstr)
  12902. {
  12903. return InsertBranch(opCode, false /* isUnsigned */, target, insertBeforeInstr);
  12904. }
  12905. IR::BranchInstr *Lowerer::InsertBranch(
  12906. const Js::OpCode opCode,
  12907. const bool isUnsigned,
  12908. IR::LabelInstr *const target,
  12909. IR::Instr *const insertBeforeInstr)
  12910. {
  12911. Assert(target);
  12912. Assert(insertBeforeInstr);
  12913. Func *const func = insertBeforeInstr->m_func;
  12914. IR::BranchInstr *const instr = IR::BranchInstr::New(opCode, target, func);
  12915. if(!instr->IsLowered())
  12916. {
  12917. if(opCode == Js::OpCode::Br)
  12918. {
  12919. instr->m_opcode = LowererMD::MDUncondBranchOpcode;
  12920. }
  12921. else if(isUnsigned)
  12922. {
  12923. instr->m_opcode = LowererMD::MDUnsignedBranchOpcode(opCode);
  12924. }
  12925. else
  12926. {
  12927. instr->m_opcode = LowererMD::MDBranchOpcode(opCode);
  12928. }
  12929. }
  12930. insertBeforeInstr->InsertBefore(instr);
  12931. return instr;
  12932. }
  12933. IR::Instr *Lowerer::InsertCompare(IR::Opnd *const src1, IR::Opnd *const src2, IR::Instr *const insertBeforeInstr)
  12934. {
  12935. Assert(src1);
  12936. Assert(!src1->IsFloat64()); // not implemented
  12937. Assert(src2);
  12938. Assert(!src2->IsFloat64()); // not implemented
  12939. Assert(!src1->IsEqual(src2));
  12940. Assert(insertBeforeInstr);
  12941. Func *const func = insertBeforeInstr->m_func;
  12942. IR::Instr *const instr = IR::Instr::New(Js::OpCode::CMP, func);
  12943. instr->SetSrc1(src1);
  12944. instr->SetSrc2(src2);
  12945. insertBeforeInstr->InsertBefore(instr);
  12946. LowererMD::Legalize(instr);
  12947. return instr;
  12948. }
  12949. IR::BranchInstr *Lowerer::InsertCompareBranch(
  12950. IR::Opnd *const compareSrc1,
  12951. IR::Opnd *const compareSrc2,
  12952. Js::OpCode branchOpCode,
  12953. IR::LabelInstr *const target,
  12954. IR::Instr *const insertBeforeInstr,
  12955. const bool ignoreNaN)
  12956. {
  12957. return InsertCompareBranch(compareSrc1, compareSrc2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr, ignoreNaN);
  12958. }
  12959. IR::BranchInstr *Lowerer::InsertCompareBranch(
  12960. IR::Opnd *compareSrc1,
  12961. IR::Opnd *compareSrc2,
  12962. Js::OpCode branchOpCode,
  12963. const bool isUnsigned,
  12964. IR::LabelInstr *const target,
  12965. IR::Instr *const insertBeforeInstr,
  12966. const bool ignoreNaN)
  12967. {
  12968. Assert(compareSrc1);
  12969. Assert(compareSrc2);
  12970. Func *const func = insertBeforeInstr->m_func;
  12971. if(compareSrc1->IsFloat())
  12972. {
  12973. Assert(compareSrc2->IsFloat());
  12974. Assert(!isUnsigned);
  12975. IR::BranchInstr *const instr = IR::BranchInstr::New(branchOpCode, target, compareSrc1, compareSrc2, func);
  12976. insertBeforeInstr->InsertBefore(instr);
  12977. return LowererMD::LowerFloatCondBranch(instr, ignoreNaN);
  12978. }
  12979. #ifdef _M_IX86
  12980. else if (compareSrc1->IsInt64())
  12981. {
  12982. Assert(compareSrc2->IsInt64());
  12983. IR::BranchInstr *const instr = IR::BranchInstr::New(branchOpCode, target, compareSrc1, compareSrc2, func);
  12984. insertBeforeInstr->InsertBefore(instr);
  12985. m_lowererMD.EmitInt64Instr(instr);
  12986. return instr;
  12987. }
  12988. #endif
  12989. Js::OpCode swapSrcsBranchOpCode;
  12990. switch(branchOpCode)
  12991. {
  12992. case Js::OpCode::BrEq_A:
  12993. case Js::OpCode::BrNeq_A:
  12994. swapSrcsBranchOpCode = branchOpCode;
  12995. goto Common_BrEqNeqGeGtLeLt;
  12996. case Js::OpCode::BrGe_A:
  12997. swapSrcsBranchOpCode = Js::OpCode::BrLe_A;
  12998. goto Common_BrEqNeqGeGtLeLt;
  12999. case Js::OpCode::BrGt_A:
  13000. swapSrcsBranchOpCode = Js::OpCode::BrLt_A;
  13001. goto Common_BrEqNeqGeGtLeLt;
  13002. case Js::OpCode::BrLe_A:
  13003. swapSrcsBranchOpCode = Js::OpCode::BrGe_A;
  13004. goto Common_BrEqNeqGeGtLeLt;
  13005. case Js::OpCode::BrLt_A:
  13006. swapSrcsBranchOpCode = Js::OpCode::BrGt_A;
  13007. // fall through
  13008. Common_BrEqNeqGeGtLeLt:
  13009. // Check if src1 is a constant and src2 is not, and facilitate folding the constant into the Cmp instruction
  13010. if( (
  13011. compareSrc1->IsIntConstOpnd() ||
  13012. (
  13013. compareSrc1->IsAddrOpnd() &&
  13014. Math::FitsInDWord(reinterpret_cast<size_t>(compareSrc1->AsAddrOpnd()->m_address))
  13015. )
  13016. ) &&
  13017. !compareSrc2->IsIntConstOpnd() &&
  13018. !compareSrc2->IsAddrOpnd())
  13019. {
  13020. // Swap the sources and branch
  13021. IR::Opnd *const tempSrc = compareSrc1;
  13022. compareSrc1 = compareSrc2;
  13023. compareSrc2 = tempSrc;
  13024. branchOpCode = swapSrcsBranchOpCode;
  13025. }
  13026. // Check for compare with zero, to prefer using Test instead of Cmp
  13027. if( !compareSrc1->IsRegOpnd() ||
  13028. !(
  13029. (compareSrc2->IsIntConstOpnd() && compareSrc2->AsIntConstOpnd()->GetValue() == 0) ||
  13030. (compareSrc2->IsAddrOpnd() && !compareSrc2->AsAddrOpnd()->m_address)
  13031. ) ||
  13032. branchOpCode == Js::OpCode::BrGt_A || branchOpCode == Js::OpCode::BrLe_A)
  13033. {
  13034. goto Default;
  13035. }
  13036. if(branchOpCode == Js::OpCode::BrGe_A || branchOpCode == Js::OpCode::BrLt_A)
  13037. {
  13038. if(isUnsigned)
  13039. {
  13040. goto Default;
  13041. }
  13042. branchOpCode = LowererMD::MDCompareWithZeroBranchOpcode(branchOpCode);
  13043. }
  13044. if(!compareSrc2->IsInUse())
  13045. {
  13046. compareSrc2->Free(func);
  13047. }
  13048. InsertTest(compareSrc1, compareSrc1, insertBeforeInstr);
  13049. break;
  13050. default:
  13051. Default:
  13052. InsertCompare(compareSrc1, compareSrc2, insertBeforeInstr);
  13053. break;
  13054. }
  13055. return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
  13056. }
  13057. IR::Instr *Lowerer::InsertTest(IR::Opnd *const src1, IR::Opnd *const src2, IR::Instr *const insertBeforeInstr)
  13058. {
  13059. Assert(src1);
  13060. Assert(!src1->IsFloat64()); // not implemented
  13061. Assert(src2);
  13062. Assert(!src2->IsFloat64()); // not implemented
  13063. #if !TARGET_64
  13064. Assert(!src1->IsInt64()); // not implemented
  13065. Assert(!src2->IsInt64()); // not implemented
  13066. #endif
  13067. Assert(insertBeforeInstr);
  13068. Func *const func = insertBeforeInstr->m_func;
  13069. IR::Instr *const instr = IR::Instr::New(LowererMD::MDTestOpcode, func);
  13070. instr->SetSrc1(src1);
  13071. instr->SetSrc2(src2);
  13072. insertBeforeInstr->InsertBefore(instr);
  13073. LowererMD::Legalize(instr);
  13074. return instr;
  13075. }
  13076. IR::BranchInstr *Lowerer::InsertTestBranch(
  13077. IR::Opnd *const testSrc1,
  13078. IR::Opnd *const testSrc2,
  13079. const Js::OpCode branchOpCode,
  13080. IR::LabelInstr *const target,
  13081. IR::Instr *const insertBeforeInstr)
  13082. {
  13083. return InsertTestBranch(testSrc1, testSrc2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr);
  13084. }
  13085. IR::BranchInstr *Lowerer::InsertTestBranch(
  13086. IR::Opnd *const testSrc1,
  13087. IR::Opnd *const testSrc2,
  13088. const Js::OpCode branchOpCode,
  13089. const bool isUnsigned,
  13090. IR::LabelInstr *const target,
  13091. IR::Instr *const insertBeforeInstr)
  13092. {
  13093. InsertTest(testSrc1, testSrc2, insertBeforeInstr);
  13094. return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
  13095. }
  13096. /* Inserts add with an overflow check, if we overflow throw OOM
  13097. * add dst, src
  13098. * jno $continueLabel
  13099. * overflow code
  13100. * $continueLabel : fall through
  13101. */
  13102. void Lowerer::InsertAddWithOverflowCheck(
  13103. const bool needFlags,
  13104. IR::Opnd *const dst,
  13105. IR::Opnd *src1,
  13106. IR::Opnd *src2,
  13107. IR::Instr *const insertBeforeInstr,
  13108. IR::Instr **const onOverflowInsertBeforeInstrRef)
  13109. {
  13110. Func * func = insertBeforeInstr->m_func;
  13111. InsertAdd(needFlags, dst, src1, src2, insertBeforeInstr);
  13112. IR::LabelInstr *const continueLabel = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  13113. InsertBranch(LowererMD::MDNotOverflowBranchOpcode, continueLabel, insertBeforeInstr);
  13114. *onOverflowInsertBeforeInstrRef = continueLabel;
  13115. }
  13116. IR::Instr *Lowerer::InsertAdd(
  13117. const bool needFlags,
  13118. IR::Opnd *const dst,
  13119. IR::Opnd *src1,
  13120. IR::Opnd *src2,
  13121. IR::Instr *const insertBeforeInstr)
  13122. {
  13123. Assert(dst);
  13124. Assert(src1);
  13125. Assert(src2);
  13126. Assert(insertBeforeInstr);
  13127. Func *const func = insertBeforeInstr->m_func;
  13128. if(src2->IsIntConstOpnd())
  13129. {
  13130. IR::IntConstOpnd *const intConstOpnd = src2->AsIntConstOpnd();
  13131. const IntConstType value = intConstOpnd->GetValue();
  13132. if(value < 0 && value != IntConstMin)
  13133. {
  13134. // Change (s1 = s1 + -5) into (s1 = s1 - 5)
  13135. IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
  13136. newSrc2->SetValue(-value);
  13137. return InsertSub(needFlags, dst, src1, newSrc2, insertBeforeInstr);
  13138. }
  13139. }
  13140. else if(src1->IsIntConstOpnd())
  13141. {
  13142. IR::IntConstOpnd *const intConstOpnd = src1->AsIntConstOpnd();
  13143. const IntConstType value = intConstOpnd->GetValue();
  13144. if(value < 0 && value != IntConstMin)
  13145. {
  13146. // Change (s1 = -5 + s1) into (s1 = s1 - 5)
  13147. IR::Opnd *const newSrc1 = src2;
  13148. IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
  13149. newSrc2->SetValue(-value);
  13150. return InsertSub(needFlags, dst, newSrc1, newSrc2, insertBeforeInstr);
  13151. }
  13152. }
  13153. IR::Instr *const instr = IR::Instr::New(Js::OpCode::Add_A, dst, src1, src2, func);
  13154. insertBeforeInstr->InsertBefore(instr);
  13155. LowererMD::ChangeToAdd(instr, needFlags);
  13156. LowererMD::Legalize(instr);
  13157. return instr;
  13158. }
  13159. IR::Instr *Lowerer::InsertSub(
  13160. const bool needFlags,
  13161. IR::Opnd *const dst,
  13162. IR::Opnd *src1,
  13163. IR::Opnd *src2,
  13164. IR::Instr *const insertBeforeInstr)
  13165. {
  13166. Assert(dst);
  13167. Assert(src1);
  13168. Assert(src2);
  13169. Assert(insertBeforeInstr);
  13170. Func *const func = insertBeforeInstr->m_func;
  13171. if(src2->IsIntConstOpnd())
  13172. {
  13173. IR::IntConstOpnd *const intConstOpnd = src2->AsIntConstOpnd();
  13174. const IntConstType value = intConstOpnd->GetValue();
  13175. if(value < 0 && value != IntConstMin)
  13176. {
  13177. // Change (s1 = s1 - -5) into (s1 = s1 + 5)
  13178. IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
  13179. newSrc2->SetValue(-value);
  13180. return InsertAdd(needFlags, dst, src1, newSrc2, insertBeforeInstr);
  13181. }
  13182. }
  13183. IR::Instr *const instr = IR::Instr::New(Js::OpCode::Sub_A, dst, src1, src2, func);
  13184. insertBeforeInstr->InsertBefore(instr);
  13185. LowererMD::ChangeToSub(instr, needFlags);
  13186. LowererMD::Legalize(instr);
  13187. return instr;
  13188. }
  13189. IR::Instr *Lowerer::InsertLea(IR::RegOpnd *const dst, IR::Opnd *const src, IR::Instr *const insertBeforeInstr)
  13190. {
  13191. Assert(dst);
  13192. Assert(src);
  13193. Assert(src->IsIndirOpnd() || src->IsSymOpnd());
  13194. Assert(insertBeforeInstr);
  13195. Func *const func = insertBeforeInstr->m_func;
  13196. IR::Instr *const instr = IR::Instr::New(LowererMD::MDLea, dst, src, func);
  13197. insertBeforeInstr->InsertBefore(instr);
  13198. return ChangeToLea(instr);
  13199. }
  13200. IR::Instr *
  13201. Lowerer::ChangeToLea(IR::Instr * instr)
  13202. {
  13203. Assert(instr);
  13204. Assert(instr->GetDst());
  13205. Assert(instr->GetDst()->IsRegOpnd());
  13206. Assert(instr->GetSrc1());
  13207. Assert(instr->GetSrc1()->IsIndirOpnd() || instr->GetSrc1()->IsSymOpnd());
  13208. Assert(!instr->GetSrc2());
  13209. instr->m_opcode = LowererMD::MDLea;
  13210. LowererMD::Legalize(instr);
  13211. return instr;
  13212. }
  13213. #if _M_X64
  13214. IR::Instr *Lowerer::InsertMoveBitCast(
  13215. IR::Opnd *const dst,
  13216. IR::Opnd *const src1,
  13217. IR::Instr *const insertBeforeInstr)
  13218. {
  13219. Assert(dst);
  13220. Assert(dst->GetType() == TyFloat64);
  13221. Assert(src1);
  13222. Assert(src1->GetType() == TyUint64);
  13223. Assert(insertBeforeInstr);
  13224. Func *const func = insertBeforeInstr->m_func;
  13225. IR::Instr *const instr = IR::Instr::New(LowererMD::MDMovUint64ToFloat64Opcode, dst, src1, func);
  13226. insertBeforeInstr->InsertBefore(instr);
  13227. LowererMD::Legalize(instr);
  13228. return instr;
  13229. }
  13230. #endif
  13231. IR::Instr *Lowerer::InsertXor(
  13232. IR::Opnd *const dst,
  13233. IR::Opnd *const src1,
  13234. IR::Opnd *const src2,
  13235. IR::Instr *const insertBeforeInstr)
  13236. {
  13237. Assert(dst);
  13238. Assert(src1);
  13239. Assert(src2);
  13240. Assert(insertBeforeInstr);
  13241. Func *const func = insertBeforeInstr->m_func;
  13242. IR::Instr *const instr = IR::Instr::New(LowererMD::MDXorOpcode, dst, src1, src2, func);
  13243. insertBeforeInstr->InsertBefore(instr);
  13244. LowererMD::Legalize(instr);
  13245. return instr;
  13246. }
  13247. IR::Instr *Lowerer::InsertAnd(
  13248. IR::Opnd *const dst,
  13249. IR::Opnd *const src1,
  13250. IR::Opnd *const src2,
  13251. IR::Instr *const insertBeforeInstr)
  13252. {
  13253. Assert(dst);
  13254. Assert(src1);
  13255. Assert(src2);
  13256. Assert(insertBeforeInstr);
  13257. Func *const func = insertBeforeInstr->m_func;
  13258. IR::Instr *const instr = IR::Instr::New(Js::OpCode::AND, dst, src1, src2, func);
  13259. insertBeforeInstr->InsertBefore(instr);
  13260. LowererMD::Legalize(instr);
  13261. return instr;
  13262. }
  13263. IR::Instr *Lowerer::InsertOr(
  13264. IR::Opnd *const dst,
  13265. IR::Opnd *const src1,
  13266. IR::Opnd *const src2,
  13267. IR::Instr *const insertBeforeInstr)
  13268. {
  13269. Assert(dst);
  13270. Assert(src1);
  13271. Assert(src2);
  13272. Assert(insertBeforeInstr);
  13273. Func *const func = insertBeforeInstr->m_func;
  13274. IR::Instr *const instr = IR::Instr::New(LowererMD::MDOrOpcode, dst, src1, src2, func);
  13275. insertBeforeInstr->InsertBefore(instr);
  13276. LowererMD::Legalize(instr);
  13277. return instr;
  13278. }
  13279. IR::Instr *Lowerer::InsertShift(
  13280. const Js::OpCode opCode,
  13281. const bool needFlags,
  13282. IR::Opnd *const dst,
  13283. IR::Opnd *const src1,
  13284. IR::Opnd *const src2,
  13285. IR::Instr *const insertBeforeInstr)
  13286. {
  13287. Assert(dst);
  13288. Assert(!dst->IsFloat64()); // not implemented
  13289. Assert(src1);
  13290. Assert(!src1->IsFloat64()); // not implemented
  13291. Assert(src2);
  13292. Assert(!src2->IsFloat64()); // not implemented
  13293. Assert(insertBeforeInstr);
  13294. Func *const func = insertBeforeInstr->m_func;
  13295. IR::Instr *const instr = IR::Instr::New(opCode, dst, src1, src2, func);
  13296. insertBeforeInstr->InsertBefore(instr);
  13297. LowererMD::ChangeToShift(instr, needFlags);
  13298. LowererMD::Legalize(instr);
  13299. return instr;
  13300. }
  13301. IR::Instr *Lowerer::InsertShiftBranch(
  13302. const Js::OpCode shiftOpCode,
  13303. IR::Opnd *const dst,
  13304. IR::Opnd *const src1,
  13305. IR::Opnd *const src2,
  13306. const Js::OpCode branchOpCode,
  13307. IR::LabelInstr *const target,
  13308. IR::Instr *const insertBeforeInstr)
  13309. {
  13310. return InsertShiftBranch(shiftOpCode, dst, src1, src2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr);
  13311. }
  13312. IR::Instr *Lowerer::InsertShiftBranch(
  13313. const Js::OpCode shiftOpCode,
  13314. IR::Opnd *const dst,
  13315. IR::Opnd *const src1,
  13316. IR::Opnd *const src2,
  13317. const Js::OpCode branchOpCode,
  13318. const bool isUnsigned,
  13319. IR::LabelInstr *const target,
  13320. IR::Instr *const insertBeforeInstr)
  13321. {
  13322. InsertShift(shiftOpCode, true /* needFlags */, dst, src1, src2, insertBeforeInstr);
  13323. return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
  13324. }
  13325. IR::Instr *Lowerer::InsertConvertFloat32ToFloat64(
  13326. IR::Opnd *const dst,
  13327. IR::Opnd *const src,
  13328. IR::Instr *const insertBeforeInstr)
  13329. {
  13330. Assert(dst);
  13331. Assert(dst->IsFloat64());
  13332. Assert(src);
  13333. Assert(src->IsFloat32());
  13334. Assert(insertBeforeInstr);
  13335. Func *const func = insertBeforeInstr->m_func;
  13336. IR::Instr *const instr = IR::Instr::New(LowererMD::MDConvertFloat32ToFloat64Opcode, dst, src, func);
  13337. insertBeforeInstr->InsertBefore(instr);
  13338. LowererMD::Legalize(instr);
  13339. return instr;
  13340. }
  13341. IR::Instr *Lowerer::InsertConvertFloat64ToFloat32(
  13342. IR::Opnd *const dst,
  13343. IR::Opnd *const src,
  13344. IR::Instr *const insertBeforeInstr)
  13345. {
  13346. Assert(dst);
  13347. Assert(dst->IsFloat32());
  13348. Assert(src);
  13349. Assert(src->IsFloat64());
  13350. Assert(insertBeforeInstr);
  13351. Func *const func = insertBeforeInstr->m_func;
  13352. IR::Instr *const instr = IR::Instr::New(LowererMD::MDConvertFloat64ToFloat32Opcode, dst, src, func);
  13353. insertBeforeInstr->InsertBefore(instr);
  13354. LowererMD::Legalize(instr);
  13355. return instr;
  13356. }
  13357. void Lowerer::InsertDecUInt32PreventOverflow(
  13358. IR::Opnd *const dst,
  13359. IR::Opnd *const src,
  13360. IR::Instr *const insertBeforeInstr,
  13361. IR::Instr * *const onOverflowInsertBeforeInstrRef)
  13362. {
  13363. Assert(dst);
  13364. Assert(dst->GetType() == TyUint32);
  13365. Assert(src);
  13366. Assert(src->GetType() == TyUint32);
  13367. Assert(insertBeforeInstr);
  13368. Func *const func = insertBeforeInstr->m_func;
  13369. // Generate:
  13370. // subs temp, src, 1
  13371. // bcs $overflow
  13372. // mov dst, temp
  13373. // b $continue
  13374. // $overflow:
  13375. // mov dst, 0
  13376. // $continue:
  13377. IR::LabelInstr *const overflowLabel = Lowerer::InsertLabel(false, insertBeforeInstr);
  13378. // subs temp, src, 1
  13379. IR::RegOpnd *const tempOpnd = IR::RegOpnd::New(StackSym::New(TyUint32, func), TyUint32, func);
  13380. const IR::AutoReuseOpnd autoReuseTempOpnd(tempOpnd, func);
  13381. Lowerer::InsertSub(true, tempOpnd, src, IR::IntConstOpnd::New(1, TyUint32, func, true), overflowLabel);
  13382. // bcs $overflow
  13383. Lowerer::InsertBranch(Js::OpCode::BrLt_A, true, overflowLabel, overflowLabel);
  13384. // mov dst, temp
  13385. Lowerer::InsertMove(dst, tempOpnd, overflowLabel);
  13386. const bool dstEqualsSrc = dst->IsEqual(src);
  13387. if(!dstEqualsSrc || onOverflowInsertBeforeInstrRef)
  13388. {
  13389. // b $continue
  13390. // $overflow:
  13391. // mov dst, 0
  13392. // $continue:
  13393. IR::LabelInstr *const continueLabel = Lowerer::InsertLabel(false, insertBeforeInstr);
  13394. Lowerer::InsertBranch(Js::OpCode::Br, continueLabel, overflowLabel);
  13395. if(!dstEqualsSrc)
  13396. {
  13397. Lowerer::InsertMove(dst, IR::IntConstOpnd::New(0, TyUint32, func, true), continueLabel);
  13398. }
  13399. if(onOverflowInsertBeforeInstrRef)
  13400. {
  13401. *onOverflowInsertBeforeInstrRef = continueLabel;
  13402. }
  13403. }
  13404. else
  13405. {
  13406. // $overflow:
  13407. }
  13408. }
  13409. void Lowerer::InsertFloatCheckForZeroOrNanBranch(
  13410. IR::Opnd *const src,
  13411. const bool branchOnZeroOrNan,
  13412. IR::LabelInstr *const target,
  13413. IR::LabelInstr *const fallthroughLabel,
  13414. IR::Instr *const insertBeforeInstr)
  13415. {
  13416. Assert(src);
  13417. Assert(src->IsFloat64());
  13418. Assert(target);
  13419. Assert(!fallthroughLabel || fallthroughLabel != target);
  13420. Assert(insertBeforeInstr);
  13421. Func *const func = insertBeforeInstr->m_func;
  13422. IR::BranchInstr *const branchOnEqualOrNotEqual =
  13423. InsertCompareBranch(
  13424. src,
  13425. IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetDoubleZeroAddr(), TyFloat64, func),
  13426. branchOnZeroOrNan ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A,
  13427. target,
  13428. insertBeforeInstr,
  13429. true /* ignoreNaN */);
  13430. // x86/x64
  13431. // When NaN is ignored, on x86 and x64, JE branches when equal or unordered since an unordered result sets the zero
  13432. // flag, and JNE branches when not equal and not unordered. By comparing with zero, JE will branch when src is zero or
  13433. // NaN, and JNE will branch when src is not zero and not NaN.
  13434. //
  13435. // ARM
  13436. // When NaN is ignored, BEQ branches when equal and not unordered, and BNE branches when not equal or unordered. So,
  13437. // when comparing src with zero, an unordered check needs to be added before the BEQ/BNE.
  13438. branchOnEqualOrNotEqual; // satisfy the compiler
  13439. #ifdef _M_ARM32_OR_ARM64
  13440. InsertBranch(
  13441. Js::OpCode::BVS,
  13442. branchOnZeroOrNan
  13443. ? target
  13444. : fallthroughLabel ? fallthroughLabel : insertBeforeInstr->m_prev->GetOrCreateContinueLabel(),
  13445. branchOnEqualOrNotEqual);
  13446. #endif
  13447. }
  13448. IR::IndirOpnd*
  13449. Lowerer::GenerateFastElemICommon(
  13450. _In_ IR::Instr* elemInstr,
  13451. _In_ bool isStore,
  13452. _In_ IR::IndirOpnd* indirOpnd,
  13453. _In_ IR::LabelInstr* labelHelper,
  13454. _In_ IR::LabelInstr* labelCantUseArray,
  13455. _In_opt_ IR::LabelInstr* labelFallthrough,
  13456. _Out_ bool* pIsTypedArrayElement,
  13457. _Out_ bool* pIsStringIndex,
  13458. _Out_opt_ bool* emitBailoutRef,
  13459. _Outptr_opt_result_maybenull_ IR::Opnd** maskOpnd,
  13460. _Outptr_opt_result_maybenull_ IR::LabelInstr** pLabelSegmentLengthIncreased, // = nullptr
  13461. _In_ bool checkArrayLengthOverflow, // = true
  13462. _In_ bool forceGenerateFastPath, // = false
  13463. _In_ bool returnLength, // = false
  13464. _In_opt_ IR::LabelInstr* bailOutLabelInstr, // = nullptr
  13465. _Out_opt_ bool* indirOpndOverflowed, // = nullptr
  13466. _In_ Js::FldInfoFlags flags) // = Js::FldInfo_NoInfo
  13467. {
  13468. *pIsTypedArrayElement = false;
  13469. *pIsStringIndex = false;
  13470. if(pLabelSegmentLengthIncreased)
  13471. {
  13472. *pLabelSegmentLengthIncreased = nullptr;
  13473. }
  13474. if (maskOpnd)
  13475. {
  13476. *maskOpnd = nullptr;
  13477. }
  13478. if (indirOpndOverflowed)
  13479. {
  13480. *indirOpndOverflowed = false;
  13481. }
  13482. if (emitBailoutRef)
  13483. {
  13484. *emitBailoutRef = false;
  13485. }
  13486. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  13487. AssertMsg(baseOpnd, "This shouldn't be NULL");
  13488. // Caution: If making changes to the conditions under which we don't emit the typical array checks, make sure
  13489. // the code in GlobOpt::ShouldAssumeIndirOpndHasNonNegativeIntIndex is updated accordingly. We don't want the
  13490. // global optimizer to type specialize instructions, for which the lowerer is forced to emit unconditional
  13491. // bailouts.
  13492. if (baseOpnd->IsTaggedInt())
  13493. {
  13494. return NULL;
  13495. }
  13496. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  13497. if (indexOpnd)
  13498. {
  13499. const bool normalLocation = (flags & (Js::FldInfo_FromLocal | Js::FldInfo_FromProto | Js::FldInfo_FromLocalWithoutProperty)) != 0;
  13500. const bool normalSlots = (flags & (Js::FldInfo_FromAuxSlots | Js::FldInfo_FromInlineSlots)) != 0;
  13501. const bool generateFastpath = !baseOpnd->GetValueType().IsLikelyOptimizedTypedArray() && normalLocation && normalSlots && flags != Js::FldInfo_NoInfo;
  13502. if (indexOpnd->GetValueType().IsLikelyString())
  13503. {
  13504. if (generateFastpath)
  13505. {
  13506. // If profile data says that it's a typed array - do not generate the property string fast path as the src. could be a temp and that would cause a bug.
  13507. *pIsTypedArrayElement = false;
  13508. *pIsStringIndex = true;
  13509. return GenerateFastElemIStringIndexCommon(elemInstr, isStore, indirOpnd, labelHelper, flags);
  13510. }
  13511. else
  13512. {
  13513. // There's no point in generating the int index fast path if we know the index has a string value.
  13514. return nullptr;
  13515. }
  13516. }
  13517. else if (indexOpnd->GetValueType().IsLikelySymbol())
  13518. {
  13519. if (generateFastpath)
  13520. {
  13521. // If profile data says that it's a typed array - do not generate the symbol fast path as the src. could be a temp and that would cause a bug.
  13522. return GenerateFastElemISymbolIndexCommon(elemInstr, isStore, indirOpnd, labelHelper, flags);
  13523. }
  13524. else
  13525. {
  13526. // There's no point in generating the int index fast path if we know the index has a symbol value.
  13527. return nullptr;
  13528. }
  13529. }
  13530. }
  13531. return
  13532. GenerateFastElemIIntIndexCommon(
  13533. elemInstr,
  13534. isStore,
  13535. indirOpnd,
  13536. labelHelper,
  13537. labelCantUseArray,
  13538. labelFallthrough,
  13539. pIsTypedArrayElement,
  13540. emitBailoutRef,
  13541. pLabelSegmentLengthIncreased,
  13542. checkArrayLengthOverflow,
  13543. maskOpnd,
  13544. false,
  13545. returnLength,
  13546. bailOutLabelInstr,
  13547. indirOpndOverflowed);
  13548. }
  13549. void
  13550. Lowerer::GenerateDynamicLoadPolymorphicInlineCacheSlot(IR::Instr * instrInsert, IR::RegOpnd * inlineCacheOpnd, IR::Opnd * objectTypeOpnd)
  13551. {
  13552. // Generates:
  13553. // MOV opndOffset, objectTypeOpnd
  13554. // SHR opndOffset, PolymorphicInlineCacheShift
  13555. // MOVZX cacheIndexOpnd, inlineCacheOpnd->size
  13556. // DEC cacheIndexOpnd
  13557. // AND opndOffset, cacheIndexOpnd
  13558. // SHL opndOffset, Math::Log2(sizeof(Js::InlineCache))
  13559. // MOV inlineCacheOpnd, inlineCacheOpnd->inlineCaches
  13560. // LEA inlineCacheOpnd, [inlineCacheOpnd + opndOffset]
  13561. IntConstType rightShiftAmount = PolymorphicInlineCacheShift;
  13562. IntConstType leftShiftAmount = Math::Log2(sizeof(Js::InlineCache));
  13563. Assert(rightShiftAmount > leftShiftAmount);
  13564. IR::RegOpnd * opndOffset = IR::RegOpnd::New(TyMachPtr, m_func);
  13565. InsertShift(Js::OpCode::ShrU_A, false, opndOffset, objectTypeOpnd, IR::IntConstOpnd::New(rightShiftAmount, TyUint8, m_func, true), instrInsert);
  13566. IR::RegOpnd * cacheIndexOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  13567. InsertMove(cacheIndexOpnd, IR::IndirOpnd::New(inlineCacheOpnd, Js::PolymorphicInlineCache::GetOffsetOfSize(), TyUint16, m_func), instrInsert);
  13568. InsertSub(false, cacheIndexOpnd, cacheIndexOpnd, IR::IntConstOpnd::New(1, TyMachPtr, m_func), instrInsert);
  13569. InsertAnd(opndOffset, opndOffset, cacheIndexOpnd, instrInsert);
  13570. InsertShift(Js::OpCode::Shl_A, false, opndOffset, opndOffset, IR::IntConstOpnd::New(leftShiftAmount, TyUint8, m_func), instrInsert);
  13571. InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(inlineCacheOpnd, Js::PolymorphicInlineCache::GetOffsetOfInlineCaches(), TyMachPtr, m_func), instrInsert);
  13572. InsertLea(inlineCacheOpnd, IR::IndirOpnd::New(inlineCacheOpnd, opndOffset, TyMachPtr, m_func), instrInsert);
  13573. }
  13574. // Test that the operand is a PropertyString, or bail to helper
  13575. void
  13576. Lowerer::GeneratePropertyStringTest(IR::RegOpnd *srcReg, IR::Instr *instrInsert, IR::LabelInstr *labelHelper, bool isStore)
  13577. {
  13578. // Generates:
  13579. // StringTest(srcReg, $helper) ; verify index is string type
  13580. // CMP srcReg, PropertyString::`vtable' ; verify index is property string
  13581. // JNE $helper
  13582. GenerateStringTest(srcReg, instrInsert, labelHelper);
  13583. IR::LabelInstr * notPropStrLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  13584. IR::LabelInstr * propStrLoadedLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  13585. IR::BranchInstr *branchInstr = InsertCompareBranch(
  13586. IR::IndirOpnd::New(srcReg, 0, TyMachPtr, m_func),
  13587. LoadVTableValueOpnd(instrInsert, VTableValue::VtablePropertyString),
  13588. Js::OpCode::BrNeq_A, notPropStrLabel, instrInsert);
  13589. InsertObjectPoison(srcReg, branchInstr, instrInsert, isStore);
  13590. InsertBranch(Js::OpCode::Br, propStrLoadedLabel, instrInsert);
  13591. InsertBranch(Js::OpCode::Br, propStrLoadedLabel, instrInsert);
  13592. instrInsert->InsertBefore(notPropStrLabel);
  13593. branchInstr = InsertCompareBranch(
  13594. IR::IndirOpnd::New(srcReg, 0, TyMachPtr, m_func),
  13595. LoadVTableValueOpnd(instrInsert, VTableValue::VtableLiteralStringWithPropertyStringPtr),
  13596. Js::OpCode::BrNeq_A, labelHelper, instrInsert);
  13597. InsertObjectPoison(srcReg, branchInstr, instrInsert, isStore);
  13598. IR::IndirOpnd * propStrOpnd = IR::IndirOpnd::New(srcReg, Js::LiteralStringWithPropertyStringPtr::GetOffsetOfPropertyString(), TyMachPtr, m_func);
  13599. InsertCompareBranch(propStrOpnd, IR::IntConstOpnd::New(NULL, TyMachPtr, m_func), Js::OpCode::BrNeq_A, labelHelper, instrInsert);
  13600. // We don't really own srcReg, but it is fine to update it to be the PropertyString, since that is better to have anyway
  13601. InsertMove(srcReg, propStrOpnd, instrInsert);
  13602. instrInsert->InsertBefore(propStrLoadedLabel);
  13603. }
  13604. IR::IndirOpnd*
  13605. Lowerer::GenerateFastElemIStringIndexCommon(
  13606. _In_ IR::Instr* elemInstr,
  13607. _In_ bool isStore,
  13608. _In_ IR::IndirOpnd* indirOpnd,
  13609. _In_ IR::LabelInstr* labelHelper,
  13610. _In_ Js::FldInfoFlags flags)
  13611. {
  13612. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  13613. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  13614. Assert(baseOpnd != nullptr);
  13615. Assert(indexOpnd->GetValueType().IsLikelyString());
  13616. // Generates:
  13617. // PropertyStringTest(indexOpnd, $helper) ; verify index is string type
  13618. // FastElemISymbolOrStringIndexCommon(indexOpnd, baseOpnd, $helper) ; shared code with JavascriptSymbol
  13619. GeneratePropertyStringTest(indexOpnd, elemInstr, labelHelper, isStore);
  13620. const uint32 inlineCacheOffset = isStore ? Js::PropertyString::GetOffsetOfStElemInlineCache() : Js::PropertyString::GetOffsetOfLdElemInlineCache();
  13621. const uint32 hitRateOffset = Js::PropertyString::GetOffsetOfHitRate();
  13622. return GenerateFastElemISymbolOrStringIndexCommon(elemInstr, indexOpnd, baseOpnd, inlineCacheOffset, hitRateOffset, labelHelper, flags);
  13623. }
  13624. IR::IndirOpnd*
  13625. Lowerer::GenerateFastElemISymbolIndexCommon(
  13626. _In_ IR::Instr* elemInstr,
  13627. _In_ bool isStore,
  13628. _In_ IR::IndirOpnd* indirOpnd,
  13629. _In_ IR::LabelInstr* labelHelper,
  13630. _In_ Js::FldInfoFlags flags)
  13631. {
  13632. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  13633. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  13634. Assert(baseOpnd != nullptr);
  13635. Assert(indexOpnd->GetValueType().IsLikelySymbol());
  13636. // Generates:
  13637. // SymbolTest(indexOpnd, $helper) ; verify index is symbol type
  13638. // FastElemISymbolOrStringIndexCommon(indexOpnd, baseOpnd, $helper) ; shared code with PropertyString
  13639. GenerateSymbolTest(indexOpnd, elemInstr, labelHelper);
  13640. const uint32 inlineCacheOffset = isStore ? Js::JavascriptSymbol::GetOffsetOfStElemInlineCache() : Js::JavascriptSymbol::GetOffsetOfLdElemInlineCache();
  13641. const uint32 hitRateOffset = Js::JavascriptSymbol::GetOffsetOfHitRate();
  13642. return GenerateFastElemISymbolOrStringIndexCommon(elemInstr, indexOpnd, baseOpnd, inlineCacheOffset, hitRateOffset, labelHelper, flags);
  13643. }
  13644. void
  13645. Lowerer::GenerateFastIsInSymbolOrStringIndex(IR::Instr * instrInsert, IR::RegOpnd *indexOpnd, IR::RegOpnd *baseOpnd, IR::Opnd *dest, uint32 inlineCacheOffset, const uint32 hitRateOffset, IR::LabelInstr * labelHelper, IR::LabelInstr * labelDone)
  13646. {
  13647. // Try to look up the property in the cache, or bail to helper
  13648. GenerateLookUpInIndexCache(instrInsert, indexOpnd, baseOpnd, nullptr /*opndSlotArray*/, nullptr /*opndSlotIndex*/, inlineCacheOffset, hitRateOffset, labelHelper);
  13649. // MOV dest, true
  13650. InsertMove(dest, LoadLibraryValueOpnd(instrInsert, LibraryValue::ValueTrue), instrInsert);
  13651. // JMP labelDone
  13652. InsertBranch(Js::OpCode::Br, labelDone, instrInsert);
  13653. }
  13654. IR::IndirOpnd*
  13655. Lowerer::GenerateFastElemISymbolOrStringIndexCommon(
  13656. _In_ IR::Instr* instrInsert,
  13657. _In_ IR::RegOpnd* indexOpnd,
  13658. _In_ IR::RegOpnd* baseOpnd,
  13659. _In_ const uint32 inlineCacheOffset,
  13660. _In_ const uint32 hitRateOffset,
  13661. _In_ IR::LabelInstr* labelHelper,
  13662. _In_ Js::FldInfoFlags flags)
  13663. {
  13664. // Try to look up the property in the cache, or bail to helper
  13665. IR::RegOpnd * opndSlotArray = IR::RegOpnd::New(TyMachReg, instrInsert->m_func);
  13666. IR::RegOpnd * opndSlotIndex = IR::RegOpnd::New(TyMachReg, instrInsert->m_func);
  13667. GenerateLookUpInIndexCache(instrInsert, indexOpnd, baseOpnd, opndSlotArray, opndSlotIndex, inlineCacheOffset, hitRateOffset, labelHelper, flags);
  13668. // return [opndSlotArray + opndSlotIndex * PtrSize]
  13669. return IR::IndirOpnd::New(opndSlotArray, opndSlotIndex, m_lowererMD.GetDefaultIndirScale(), TyMachReg, instrInsert->m_func);
  13670. }
  13671. // Look up a value from the polymorphic inline cache on a PropertyString or Symbol. Offsets are relative to indexOpnd.
  13672. // Checks local and/or proto caches based on profile data. If the property is not found, jump to the helper.
  13673. // opndSlotArray is optional; if provided, it will receive the base address of the slot array that contains the property.
  13674. // opndSlotIndex is optional; if provided, it will receive the index of the match within the slot array.
  13675. void
  13676. Lowerer::GenerateLookUpInIndexCache(
  13677. _In_ IR::Instr* instrInsert,
  13678. _In_ IR::RegOpnd* indexOpnd,
  13679. _In_ IR::RegOpnd* baseOpnd,
  13680. _In_opt_ IR::RegOpnd* opndSlotArray,
  13681. _In_opt_ IR::RegOpnd* opndSlotIndex,
  13682. _In_ const uint32 inlineCacheOffset,
  13683. _In_ const uint32 hitRateOffset,
  13684. _In_ IR::LabelInstr* labelHelper,
  13685. _In_ Js::FldInfoFlags flags) // = Js::FldInfo_NoInfo
  13686. {
  13687. // Generates:
  13688. // MOV inlineCacheOpnd, index->inlineCache
  13689. // GenerateObjectTest(baseOpnd, $helper) ; verify base is an object
  13690. // MOV objectTypeOpnd, baseOpnd->type
  13691. // GenerateDynamicLoadPolymorphicInlineCacheSlot(inlineCacheOpnd, objectTypeOpnd) ; loads inline cache for given type
  13692. // if (checkLocalInlineSlots)
  13693. // GenerateLookUpInIndexCacheHelper<CheckLocal, CheckInlineSlot> // checks local inline slots, goes to next on failure
  13694. // if (checkLocalAuxSlots)
  13695. // GenerateLookUpInIndexCacheHelper<CheckLocal, CheckAuxSlot> // checks local aux slots, goes to next on failure
  13696. // if (fromProto && fromInlineSlots)
  13697. // GenerateLookUpInIndexCacheHelper<CheckProto, CheckInlineSlot> // checks proto inline slots, goes to next on failure
  13698. // if (fromProto && fromAuxSlots)
  13699. // GenerateLookUpInIndexCacheHelper<CheckProto, CheckAuxSlot> // checks proto aux slots, goes to next on failure
  13700. // if (doAdd && fromInlineSlots)
  13701. // GenerateLookUpInIndexCacheHelper<CheckLocal, CheckInlineSlot, DoAdd> // checks typeWithoutProperty inline slots, goes to next on failure
  13702. // if (doAdd && fromAuxSlots)
  13703. // GenerateLookUpInIndexCacheHelper<CheckLocal, CheckAuxSlot, DoAdd> // checks typeWithoutProperty aux slots, goes to helper on failure
  13704. // $slotIndexLoadedLabel
  13705. // INC indexOpnd->hitRate
  13706. const bool fromInlineSlots = (flags & Js::FldInfo_FromInlineSlots) == Js::FldInfo_FromInlineSlots;
  13707. const bool fromAuxSlots = (flags & Js::FldInfo_FromAuxSlots) == Js::FldInfo_FromAuxSlots;
  13708. const bool fromLocal = (flags & Js::FldInfo_FromLocal) == Js::FldInfo_FromLocal;
  13709. const bool fromProto = (flags & Js::FldInfo_FromProto) == Js::FldInfo_FromProto;
  13710. const bool doAdd = (flags & Js::FldInfo_FromLocalWithoutProperty) == Js::FldInfo_FromLocalWithoutProperty;
  13711. const bool checkLocalInlineSlots = flags == Js::FldInfo_NoInfo || (fromInlineSlots && fromLocal);
  13712. const bool checkLocalAuxSlots = flags == Js::FldInfo_NoInfo || (fromAuxSlots && fromLocal);
  13713. m_lowererMD.GenerateObjectTest(baseOpnd, instrInsert, labelHelper);
  13714. IR::RegOpnd * objectTypeOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  13715. InsertMove(objectTypeOpnd, IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, m_func), instrInsert);
  13716. IR::RegOpnd * inlineCacheOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  13717. InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(indexOpnd, inlineCacheOffset, TyMachPtr, m_func), instrInsert);
  13718. GenerateDynamicLoadPolymorphicInlineCacheSlot(instrInsert, inlineCacheOpnd, objectTypeOpnd);
  13719. IR::LabelInstr* slotIndexLoadedLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  13720. IR::BranchInstr* branchToPatch = nullptr;
  13721. IR::LabelInstr* nextLabel = nullptr;
  13722. IR::RegOpnd* taggedTypeOpnd = nullptr;
  13723. if (checkLocalInlineSlots)
  13724. {
  13725. GenerateLookUpInIndexCacheHelper<true /* CheckLocal */, true /* CheckInlineSlot */, false /* DoAdd */>(
  13726. instrInsert,
  13727. baseOpnd,
  13728. opndSlotArray,
  13729. opndSlotIndex,
  13730. objectTypeOpnd,
  13731. inlineCacheOpnd,
  13732. slotIndexLoadedLabel,
  13733. labelHelper,
  13734. &nextLabel,
  13735. &branchToPatch,
  13736. &taggedTypeOpnd);
  13737. }
  13738. if (checkLocalAuxSlots)
  13739. {
  13740. GenerateLookUpInIndexCacheHelper<true /* CheckLocal */, false /* CheckInlineSlot */, false /* DoAdd */>(
  13741. instrInsert,
  13742. baseOpnd,
  13743. opndSlotArray,
  13744. opndSlotIndex,
  13745. objectTypeOpnd,
  13746. inlineCacheOpnd,
  13747. slotIndexLoadedLabel,
  13748. labelHelper,
  13749. &nextLabel,
  13750. &branchToPatch,
  13751. &taggedTypeOpnd);
  13752. }
  13753. if (fromProto)
  13754. {
  13755. if (fromInlineSlots)
  13756. {
  13757. GenerateLookUpInIndexCacheHelper<false /* CheckLocal */, true /* CheckInlineSlot */, false /* DoAdd */>(
  13758. instrInsert,
  13759. baseOpnd,
  13760. opndSlotArray,
  13761. opndSlotIndex,
  13762. objectTypeOpnd,
  13763. inlineCacheOpnd,
  13764. slotIndexLoadedLabel,
  13765. labelHelper,
  13766. &nextLabel,
  13767. &branchToPatch,
  13768. &taggedTypeOpnd);
  13769. }
  13770. if (fromAuxSlots)
  13771. {
  13772. GenerateLookUpInIndexCacheHelper<false /* CheckLocal */, false /* CheckInlineSlot */, false /* DoAdd */>(
  13773. instrInsert,
  13774. baseOpnd,
  13775. opndSlotArray,
  13776. opndSlotIndex,
  13777. objectTypeOpnd,
  13778. inlineCacheOpnd,
  13779. slotIndexLoadedLabel,
  13780. labelHelper,
  13781. &nextLabel,
  13782. &branchToPatch,
  13783. &taggedTypeOpnd);
  13784. }
  13785. }
  13786. if (doAdd)
  13787. {
  13788. Assert(opndSlotArray);
  13789. if (fromInlineSlots)
  13790. {
  13791. GenerateLookUpInIndexCacheHelper<true /* CheckLocal */, true /* CheckInlineSlot */, true /* DoAdd */>(
  13792. instrInsert,
  13793. baseOpnd,
  13794. opndSlotArray,
  13795. opndSlotIndex,
  13796. objectTypeOpnd,
  13797. inlineCacheOpnd,
  13798. slotIndexLoadedLabel,
  13799. labelHelper,
  13800. &nextLabel,
  13801. &branchToPatch,
  13802. &taggedTypeOpnd);
  13803. }
  13804. if (fromAuxSlots)
  13805. {
  13806. GenerateLookUpInIndexCacheHelper<true /* CheckLocal */, false /* CheckInlineSlot */, true /* DoAdd */>(
  13807. instrInsert,
  13808. baseOpnd,
  13809. opndSlotArray,
  13810. opndSlotIndex,
  13811. objectTypeOpnd,
  13812. inlineCacheOpnd,
  13813. slotIndexLoadedLabel,
  13814. labelHelper,
  13815. &nextLabel,
  13816. &branchToPatch,
  13817. &taggedTypeOpnd);
  13818. }
  13819. }
  13820. Assert(branchToPatch);
  13821. Assert(nextLabel);
  13822. Assert(nextLabel->labelRefs.Count() == 1 && nextLabel->labelRefs.Head() == branchToPatch);
  13823. branchToPatch->SetTarget(labelHelper);
  13824. nextLabel->Remove();
  13825. instrInsert->InsertBefore(slotIndexLoadedLabel);
  13826. IR::IndirOpnd * hitRateOpnd = IR::IndirOpnd::New(indexOpnd, hitRateOffset, TyInt32, m_func);
  13827. IR::IntConstOpnd * incOpnd = IR::IntConstOpnd::New(1, TyInt32, m_func);
  13828. // overflow check: not needed here, we don't allocate anything with hitrate
  13829. InsertAdd(false, hitRateOpnd, hitRateOpnd, incOpnd, instrInsert);
  13830. }
  13831. template <bool CheckLocal, bool CheckInlineSlot, bool DoAdd>
  13832. void
  13833. Lowerer::GenerateLookUpInIndexCacheHelper(
  13834. _In_ IR::Instr* insertInstr,
  13835. _In_ IR::RegOpnd* baseOpnd,
  13836. _In_opt_ IR::RegOpnd* opndSlotArray,
  13837. _In_opt_ IR::RegOpnd* opndSlotIndex,
  13838. _In_ IR::RegOpnd* objectTypeOpnd,
  13839. _In_ IR::RegOpnd* inlineCacheOpnd,
  13840. _In_ IR::LabelInstr* doneLabel,
  13841. _In_ IR::LabelInstr* helperLabel,
  13842. _Outptr_ IR::LabelInstr** nextLabel,
  13843. _Outptr_ IR::BranchInstr** branchToPatch,
  13844. _Inout_ IR::RegOpnd** taggedTypeOpnd)
  13845. {
  13846. CompileAssert(!DoAdd || CheckLocal);
  13847. AnalysisAssert(!opndSlotArray || opndSlotIndex);
  13848. *nextLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  13849. IR::RegOpnd* typeOpnd = nullptr;
  13850. if (CheckInlineSlot)
  13851. {
  13852. typeOpnd = objectTypeOpnd;
  13853. }
  13854. else
  13855. {
  13856. if (*taggedTypeOpnd == nullptr)
  13857. {
  13858. *taggedTypeOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  13859. m_lowererMD.GenerateLoadTaggedType(insertInstr, objectTypeOpnd, *taggedTypeOpnd);
  13860. }
  13861. typeOpnd = *taggedTypeOpnd;
  13862. }
  13863. IR::RegOpnd* objectOpnd = nullptr;
  13864. if (CheckLocal)
  13865. {
  13866. *branchToPatch = GenerateLocalInlineCacheCheck(insertInstr, typeOpnd, inlineCacheOpnd, *nextLabel, DoAdd);
  13867. if (DoAdd)
  13868. {
  13869. if (!CheckInlineSlot)
  13870. {
  13871. GenerateAuxSlotAdjustmentRequiredCheck(insertInstr, inlineCacheOpnd, helperLabel);
  13872. }
  13873. GenerateSetObjectTypeFromInlineCache(insertInstr, baseOpnd, inlineCacheOpnd, !CheckInlineSlot);
  13874. }
  13875. objectOpnd = baseOpnd;
  13876. }
  13877. else
  13878. {
  13879. *branchToPatch = GenerateProtoInlineCacheCheck(insertInstr, typeOpnd, inlineCacheOpnd, *nextLabel);
  13880. IR::RegOpnd* protoOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  13881. int32 protoObjOffset = (int32)offsetof(Js::InlineCache, u.proto.prototypeObject);
  13882. IR::IndirOpnd* protoIndir = IR::IndirOpnd::New(inlineCacheOpnd, protoObjOffset, TyMachReg, m_func);
  13883. InsertMove(protoOpnd, protoIndir, insertInstr);
  13884. objectOpnd = protoOpnd;
  13885. }
  13886. if (opndSlotArray)
  13887. {
  13888. if (CheckInlineSlot)
  13889. {
  13890. InsertMove(opndSlotArray, objectOpnd, insertInstr);
  13891. }
  13892. else
  13893. {
  13894. IR::IndirOpnd* auxIndir = IR::IndirOpnd::New(objectOpnd, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, m_func);
  13895. InsertMove(opndSlotArray, auxIndir, insertInstr);
  13896. }
  13897. size_t slotIndexOffset = CheckLocal ? offsetof(Js::InlineCache, u.local.slotIndex) : offsetof(Js::InlineCache, u.proto.slotIndex);
  13898. IR::IndirOpnd* slotOffsetIndir = IR::IndirOpnd::New(inlineCacheOpnd, (int32)slotIndexOffset, TyUint16, m_func);
  13899. // overflow check: not needed here, we don't allocate anything with hitrate
  13900. InsertMove(opndSlotIndex, slotOffsetIndir, insertInstr);
  13901. }
  13902. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  13903. insertInstr->InsertBefore(*nextLabel);
  13904. }
  13905. IR::IndirOpnd *
  13906. Lowerer::GenerateFastElemIIntIndexCommon(
  13907. IR::Instr * instr,
  13908. bool isStore,
  13909. IR::IndirOpnd * indirOpnd,
  13910. IR::LabelInstr * labelHelper,
  13911. IR::LabelInstr * labelCantUseArray,
  13912. IR::LabelInstr *labelFallthrough,
  13913. bool * pIsTypedArrayElement,
  13914. bool *emitBailoutRef,
  13915. IR::LabelInstr **pLabelSegmentLengthIncreased,
  13916. bool checkArrayLengthOverflow /*= true*/,
  13917. IR::Opnd** maskOpnd,
  13918. bool forceGenerateFastPath /* = false */,
  13919. bool returnLength,
  13920. IR::LabelInstr *bailOutLabelInstr /* = nullptr*/,
  13921. bool * indirOpndOverflowed /* = nullptr */)
  13922. {
  13923. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  13924. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  13925. Assert(!baseOpnd->IsTaggedInt() || (indexOpnd && indexOpnd->IsNotInt()));
  13926. if (indirOpndOverflowed != nullptr)
  13927. {
  13928. *indirOpndOverflowed = false;
  13929. }
  13930. BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  13931. IRType indirType = TyVar;
  13932. const ValueType baseValueType(baseOpnd->GetValueType());
  13933. // TEST base, AtomTag -- check base not tagged int
  13934. // JNE $helper
  13935. // if (base.GetValueType() != Array) {
  13936. // CMP [base], JavascriptArray::`vtable'
  13937. // JNE $helper
  13938. // }
  13939. // TEST index, 1 -- index tagged int
  13940. // JEQ $helper
  13941. // if (inputIndex is not int const) {
  13942. // MOV index, inputIndex
  13943. // SAR index, Js::VarTag_Shift -- remote atom tag
  13944. // JS $helper -- exclude negative index
  13945. // }
  13946. // MOV headSegment, [base + offset(head)]
  13947. // CMP [headSegment + offset(length)], index -- bounds check
  13948. // if (opcode == StElemI_A) {
  13949. // JA $done (for typedarray, JA $toNumberHelper)
  13950. // CMP [headSegment + offset(size)], index -- chunk has room?
  13951. // JBE $helper
  13952. // if (index is not int const) {
  13953. // LEA newLength, [index + 1]
  13954. // } else {
  13955. // newLength = index + 1
  13956. // }
  13957. // if(BailOutOnInvalidatedArrayLength) {
  13958. // CMP [base + offset(length)], newlength
  13959. // JB $helper
  13960. // }
  13961. // MOV [headSegment + offset(length)], newLength -- update length on chunk
  13962. // CMP [base + offset(length)], newLength
  13963. // JAE $done
  13964. // MOV [base + offset(length)], newLength -- update length on array
  13965. // if(length to be returned){
  13966. // SHL newLength, AtomTag
  13967. // INC newLength
  13968. // MOV dst, newLength
  13969. // }
  13970. // JMP $done
  13971. //
  13972. // $toNumberHelper: Call HelperOp_ConvNumber_Full
  13973. // JMP $done
  13974. // $done
  13975. // } else {la
  13976. // JBE $helper
  13977. // }
  13978. // return [headSegment + offset(elements) + index]
  13979. // Caution: If making changes to the conditions under which we don't emit the typical array checks, make sure
  13980. // the code in GlobOpt::ShouldAssumeIndirOpndHasNonNegativeIntIndex is updated accordingly. We don't want the
  13981. // global optimizer to type specialize instructions, for which the lowerer is forced to emit unconditional
  13982. // bailouts.
  13983. bool isIndexNotInt = false;
  13984. IntConstType value = 0;
  13985. IR::Opnd * indexValueOpnd = nullptr;
  13986. bool invertBoundCheckComparison = false;
  13987. bool checkIndexConstOverflowed = false;
  13988. if (indirOpnd->TryGetIntConstIndexValue(true, &value, &isIndexNotInt))
  13989. {
  13990. if (value >= 0)
  13991. {
  13992. indexValueOpnd = IR::IntConstOpnd::New(value, TyUint32, this->m_func);
  13993. invertBoundCheckComparison = true; // facilitate folding the constant index into the compare instruction
  13994. checkIndexConstOverflowed = true;
  13995. }
  13996. else
  13997. {
  13998. // If the index is a negative int constant we go directly to helper.
  13999. Assert(!forceGenerateFastPath);
  14000. return nullptr;
  14001. }
  14002. }
  14003. else if (isIndexNotInt)
  14004. {
  14005. // If we know the index is not an int we go directly to helper.
  14006. Assert(!forceGenerateFastPath);
  14007. return nullptr;
  14008. }
  14009. //At this point indexValueOpnd is either NULL or contains the valueOpnd
  14010. if(!forceGenerateFastPath && !ShouldGenerateArrayFastPath(baseOpnd, true, true, true))
  14011. {
  14012. return nullptr;
  14013. }
  14014. if(baseValueType.IsLikelyAnyOptimizedArray())
  14015. {
  14016. indirScale = GetArrayIndirScale(baseValueType);
  14017. indirType = GetArrayIndirType(baseValueType);
  14018. }
  14019. if (checkIndexConstOverflowed && (static_cast<uint64>(value) << indirScale) > INT32_MAX &&
  14020. indirOpndOverflowed != nullptr)
  14021. {
  14022. *indirOpndOverflowed = true;
  14023. return nullptr;
  14024. }
  14025. IRType elementType = TyIllegal;
  14026. IR::Opnd * element = nullptr;
  14027. if(instr->m_opcode == Js::OpCode::InlineArrayPush)
  14028. {
  14029. element = instr->GetSrc2();
  14030. elementType = element->GetType();
  14031. }
  14032. else if(isStore && instr->GetSrc1())
  14033. {
  14034. element = instr->GetSrc1();
  14035. elementType = element->GetType();
  14036. }
  14037. Assert(isStore || (element == nullptr && elementType == TyIllegal));
  14038. if (isStore && baseValueType.IsLikelyNativeArray() && indirType != elementType)
  14039. {
  14040. // We're trying to write a value of the wrong type, which should force a conversion of the array.
  14041. // Go to the helper for that.
  14042. return nullptr;
  14043. }
  14044. IR::RegOpnd *arrayOpnd = baseOpnd;
  14045. IR::RegOpnd *headSegmentOpnd = nullptr;
  14046. IR::Opnd *headSegmentLengthOpnd = nullptr;
  14047. IR::AutoReuseOpnd autoReuseHeadSegmentOpnd, autoReuseHeadSegmentLengthOpnd;
  14048. bool indexIsNonnegative = indexValueOpnd || indexOpnd->GetType() == TyUint32 || !checkArrayLengthOverflow;
  14049. bool indexIsLessThanHeadSegmentLength = false;
  14050. if(!baseValueType.IsAnyOptimizedArray())
  14051. {
  14052. arrayOpnd = GenerateArrayTest(baseOpnd, labelCantUseArray, labelCantUseArray, instr, true, isStore);
  14053. }
  14054. else
  14055. {
  14056. if(arrayOpnd->IsArrayRegOpnd())
  14057. {
  14058. IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
  14059. if(arrayRegOpnd->HeadSegmentSym())
  14060. {
  14061. headSegmentOpnd = IR::RegOpnd::New(arrayRegOpnd->HeadSegmentSym(), TyMachPtr, m_func);
  14062. DebugOnly(headSegmentOpnd->FreezeSymValue());
  14063. autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
  14064. }
  14065. if(arrayRegOpnd->HeadSegmentLengthSym())
  14066. {
  14067. headSegmentLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->HeadSegmentLengthSym(), TyUint32, m_func);
  14068. // This value can change over the course of this function
  14069. //DebugOnly(headSegmentLengthOpnd->AsRegOpnd()->FreezeSymValue());
  14070. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  14071. }
  14072. if (arrayRegOpnd->EliminatedLowerBoundCheck())
  14073. {
  14074. indexIsNonnegative = true;
  14075. }
  14076. if(arrayRegOpnd->EliminatedUpperBoundCheck())
  14077. {
  14078. indexIsLessThanHeadSegmentLength = true;
  14079. }
  14080. }
  14081. }
  14082. IR::AutoReuseOpnd autoReuseArrayOpnd;
  14083. if(arrayOpnd->GetValueType().GetObjectType() != ObjectType::ObjectWithArray)
  14084. {
  14085. autoReuseArrayOpnd.Initialize(arrayOpnd, m_func);
  14086. }
  14087. const auto EnsureObjectArrayLoaded = [&]()
  14088. {
  14089. if(arrayOpnd->GetValueType().GetObjectType() != ObjectType::ObjectWithArray)
  14090. {
  14091. return;
  14092. }
  14093. arrayOpnd = LoadObjectArray(arrayOpnd, instr);
  14094. autoReuseArrayOpnd.Initialize(arrayOpnd, m_func);
  14095. };
  14096. const bool doUpperBoundCheck = checkArrayLengthOverflow && !indexIsLessThanHeadSegmentLength;
  14097. if(!indexValueOpnd)
  14098. {
  14099. indexValueOpnd =
  14100. m_lowererMD.LoadNonnegativeIndex(
  14101. indexOpnd,
  14102. (
  14103. indexIsNonnegative
  14104. #if !INT32VAR
  14105. ||
  14106. // On 32-bit platforms, skip the negative check since for now, the unsigned upper bound check covers it
  14107. doUpperBoundCheck
  14108. #endif
  14109. ),
  14110. labelCantUseArray,
  14111. labelHelper,
  14112. instr);
  14113. }
  14114. const IR::AutoReuseOpnd autoReuseIndexValueOpnd(indexValueOpnd, m_func);
  14115. if (baseValueType.IsLikelyTypedArray())
  14116. {
  14117. *pIsTypedArrayElement = true;
  14118. if(doUpperBoundCheck)
  14119. {
  14120. if(!headSegmentLengthOpnd)
  14121. {
  14122. // (headSegmentLength = [base + offset(length)])
  14123. int lengthOffset;
  14124. lengthOffset = Js::Float64Array::GetOffsetOfLength();
  14125. headSegmentLengthOpnd = IR::IndirOpnd::New(arrayOpnd, lengthOffset, TyUint32, m_func);
  14126. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  14127. }
  14128. // CMP index, headSegmentLength -- upper bound check
  14129. if(!invertBoundCheckComparison)
  14130. {
  14131. InsertCompare(indexValueOpnd, headSegmentLengthOpnd, instr);
  14132. }
  14133. else
  14134. {
  14135. InsertCompare(headSegmentLengthOpnd, indexValueOpnd, instr);
  14136. }
  14137. }
  14138. }
  14139. else
  14140. {
  14141. *pIsTypedArrayElement = false;
  14142. if (isStore &&
  14143. baseValueType.IsLikelyNativeIntArray() &&
  14144. (!element->IsIntConstOpnd() || Js::SparseArraySegment<int32>::GetMissingItem() == element->AsIntConstOpnd()->AsInt32()))
  14145. {
  14146. Assert(instr->m_opcode != Js::OpCode::InlineArrayPush || bailOutLabelInstr);
  14147. // Check for a write of the MissingItem value.
  14148. InsertMissingItemCompareBranch(
  14149. element,
  14150. Js::OpCode::BrEq_A,
  14151. instr->m_opcode == Js::OpCode::InlineArrayPush ? bailOutLabelInstr : labelCantUseArray,
  14152. instr);
  14153. }
  14154. if(!headSegmentOpnd)
  14155. {
  14156. EnsureObjectArrayLoaded();
  14157. // MOV headSegment, [base + offset(head)]
  14158. indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfHead(), TyMachPtr, this->m_func);
  14159. headSegmentOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  14160. autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
  14161. InsertMove(headSegmentOpnd, indirOpnd, instr);
  14162. }
  14163. if(doUpperBoundCheck)
  14164. {
  14165. if(!headSegmentLengthOpnd)
  14166. {
  14167. // (headSegmentLength = [headSegment + offset(length)])
  14168. headSegmentLengthOpnd =
  14169. IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
  14170. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  14171. }
  14172. // CMP index, headSegmentLength -- upper bound check
  14173. if(!invertBoundCheckComparison)
  14174. {
  14175. InsertCompare(indexValueOpnd, headSegmentLengthOpnd, instr);
  14176. }
  14177. else
  14178. {
  14179. InsertCompare(headSegmentLengthOpnd, indexValueOpnd, instr);
  14180. }
  14181. }
  14182. }
  14183. const IR::BailOutKind bailOutKind = instr->HasBailOutInfo() ? instr->GetBailOutKind() : IR::BailOutInvalid;
  14184. const bool needBailOutOnInvalidLength = !!(bailOutKind & (IR::BailOutOnInvalidatedArrayHeadSegment));
  14185. const bool needBailOutToHelper = !!(bailOutKind & (IR::BailOutOnArrayAccessHelperCall));
  14186. const bool needBailOutOnSegmentLengthCompare = needBailOutToHelper || needBailOutOnInvalidLength;
  14187. bool usingSegmentLengthIncreasedLabel = false;
  14188. if(indexIsLessThanHeadSegmentLength || needBailOutOnSegmentLengthCompare)
  14189. {
  14190. if (needBailOutOnSegmentLengthCompare)
  14191. {
  14192. // The bailout must be pre-op because it will not have completed the operation
  14193. Assert(instr->GetBailOutInfo()->bailOutOffset == instr->GetByteCodeOffset());
  14194. // Verify other bailouts these can be combined with
  14195. Assert(
  14196. !(
  14197. bailOutKind &
  14198. IR::BailOutKindBits &
  14199. ~(
  14200. IR::BailOutOnArrayAccessHelperCall |
  14201. IR::BailOutOnInvalidatedArrayHeadSegment |
  14202. IR::BailOutOnInvalidatedArrayLength |
  14203. IR::BailOutConventionalNativeArrayAccessOnly |
  14204. IR::BailOutOnMissingValue |
  14205. (bailOutKind & IR::BailOutOnArrayAccessHelperCall ? IR::BailOutInvalid : IR::BailOutConvertedNativeArray)
  14206. )
  14207. )
  14208. );
  14209. if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  14210. {
  14211. // Omit the helper call and generate a bailout instead
  14212. Assert(emitBailoutRef);
  14213. *emitBailoutRef = true;
  14214. }
  14215. }
  14216. if (indexIsLessThanHeadSegmentLength)
  14217. {
  14218. Assert(!(bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment));
  14219. }
  14220. else
  14221. {
  14222. IR::LabelInstr *bailOutLabel;
  14223. if (needBailOutOnInvalidLength)
  14224. {
  14225. Assert(isStore);
  14226. // Lower a separate (but shared) bailout for this case, and preserve the bailout kind in the instruction if the
  14227. // helper call is going to be generated, because the bailout kind needs to be lowered again and differently in the
  14228. // helper call path.
  14229. //
  14230. // Generate:
  14231. // (instr)
  14232. // jmp $continue
  14233. // $bailOut:
  14234. // Bail out with IR::BailOutOnInvalidatedArrayHeadSegment
  14235. // $continue:
  14236. LowerOneBailOutKind(
  14237. instr,
  14238. IR::BailOutOnInvalidatedArrayHeadSegment,
  14239. false,
  14240. !(bailOutKind & IR::BailOutOnArrayAccessHelperCall));
  14241. bailOutLabel = instr->GetOrCreateContinueLabel(true);
  14242. InsertBranch(Js::OpCode::Br, labelFallthrough, bailOutLabel);
  14243. }
  14244. else
  14245. {
  14246. Assert(needBailOutToHelper);
  14247. bailOutLabel = labelHelper;
  14248. }
  14249. // Bail out if the index is outside the head segment bounds
  14250. // jae $bailOut
  14251. Assert(checkArrayLengthOverflow);
  14252. InsertBranch(
  14253. !invertBoundCheckComparison ? Js::OpCode::BrGe_A : Js::OpCode::BrLe_A,
  14254. true /* isUnsigned */,
  14255. bailOutLabel,
  14256. instr);
  14257. }
  14258. }
  14259. else if (isStore && !baseValueType.IsLikelyTypedArray()) // #if (opcode == StElemI_A)
  14260. {
  14261. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14262. LABELNAME(labelDone);
  14263. IR::LabelInstr *labelSegmentLengthIncreased = nullptr;
  14264. const bool isPush = instr->m_opcode != Js::OpCode::StElemI_A && instr->m_opcode != Js::OpCode::StElemI_A_Strict;
  14265. // Put the head segment size check and length updates in a helper block since they're not the common path for StElem.
  14266. // For push, that is the common path so keep it in a non-helper block.
  14267. const bool isInHelperBlock = !isPush;
  14268. if(checkArrayLengthOverflow)
  14269. {
  14270. if(pLabelSegmentLengthIncreased &&
  14271. !(
  14272. (baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()) ||
  14273. ((instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict) &&
  14274. instr->IsProfiledInstr() && !instr->AsProfiledInstr()->u.stElemInfo->LikelyFillsMissingValue())
  14275. ))
  14276. {
  14277. // For arrays that are not guaranteed to have no missing values, before storing to an element where
  14278. // (index < length), the element value needs to be checked to see if it's a missing value, and if so, fall back
  14279. // to the helper. This is done to keep the missing value tracking precise in arrays. So, create a separate label
  14280. // for the case where the length was increased (index >= length), and pass it back to GenerateFastStElemI, which
  14281. // will fill in the rest.
  14282. labelSegmentLengthIncreased = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelperBlock);
  14283. LABELNAME(labelSegmentLengthIncreased);
  14284. *pLabelSegmentLengthIncreased = labelSegmentLengthIncreased;
  14285. // Since this is effectively a separate exit point, we need to do the spectre mitigations in this place as well.
  14286. usingSegmentLengthIncreasedLabel = true;
  14287. }
  14288. else
  14289. {
  14290. labelSegmentLengthIncreased = labelDone;
  14291. }
  14292. // JB $done
  14293. InsertBranch(
  14294. !invertBoundCheckComparison ? Js::OpCode::BrLt_A : Js::OpCode::BrGt_A,
  14295. true /* isUnsigned */,
  14296. labelDone,
  14297. instr);
  14298. }
  14299. if(isInHelperBlock)
  14300. {
  14301. InsertLabel(true /* isHelper */, instr);
  14302. }
  14303. EnsureObjectArrayLoaded();
  14304. do // while(false);
  14305. {
  14306. if(checkArrayLengthOverflow)
  14307. {
  14308. if(instr->HasBailOutInfo() && instr->GetBailOutKind() & IR::BailOutOnMissingValue)
  14309. {
  14310. // Need to bail out if this store would create a missing value. The store would cause a missing value to be
  14311. // created if (index > length && index < size). If (index >= size) we would go to helper anyway, and the bailout
  14312. // handling for this is done after the helper call, so just go to helper if (index > length).
  14313. //
  14314. // jne $helper // branch for (cmp index, headSegmentLength)
  14315. InsertBranch(Js::OpCode::BrNeq_A, labelHelper, instr);
  14316. }
  14317. else
  14318. {
  14319. // If (index < size) we will not call the helper, so the array flags must be updated to reflect that it no
  14320. // longer has no missing values.
  14321. //
  14322. // jne indexGreaterThanLength // branch for (cmp index, headSegmentLength)
  14323. // cmp index, [headSegment + offset(size)]
  14324. // jae $helper
  14325. // jmp indexLessThanSize
  14326. // indexGreaterThanLength:
  14327. // cmp index, [headSegment + offset(size)]
  14328. // jae $helper
  14329. // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
  14330. // indexLessThanSize:
  14331. // if(!index->IsConstOpnd()) {
  14332. // sub temp, index, [headSegment + offset(size)]
  14333. // sar temp, 31
  14334. // and index, temp
  14335. // }
  14336. IR::LabelInstr *const indexGreaterThanLengthLabel = InsertLabel(true /* isHelper */, instr);
  14337. LABELNAME(indexGreaterThanLengthLabel);
  14338. IR::LabelInstr *const indexLessThanSizeLabel = InsertLabel(isInHelperBlock, instr);
  14339. LABELNAME(indexLessThanSizeLabel);
  14340. // jne indexGreaterThanLength // branch for (cmp index, headSegmentLength)
  14341. InsertBranch(Js::OpCode::BrNeq_A, indexGreaterThanLengthLabel, indexGreaterThanLengthLabel);
  14342. // cmp index, [headSegment + offset(size)]
  14343. // jae $helper
  14344. // jmp indexLessThanSize
  14345. // indexGreaterThanLength:
  14346. InsertCompareBranch(
  14347. indexValueOpnd,
  14348. IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, m_func),
  14349. Js::OpCode::BrGe_A,
  14350. true /* isUnsigned */,
  14351. labelHelper,
  14352. indexGreaterThanLengthLabel);
  14353. InsertBranch(Js::OpCode::Br, indexLessThanSizeLabel, indexGreaterThanLengthLabel);
  14354. // indexGreaterThanLength:
  14355. // cmp index, [headSegment + offset(size)]
  14356. // jae $helper
  14357. // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
  14358. // indexLessThanSize:
  14359. InsertCompareBranch(
  14360. indexValueOpnd,
  14361. IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, m_func),
  14362. Js::OpCode::BrGe_A,
  14363. true /* isUnsigned */,
  14364. labelHelper,
  14365. indexLessThanSizeLabel);
  14366. CompileAssert(
  14367. static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
  14368. Js::DynamicObjectFlags::HasNoMissingValues);
  14369. InsertAnd(
  14370. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  14371. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  14372. IR::IntConstOpnd::New(
  14373. static_cast<uint8>(~Js::DynamicObjectFlags::HasNoMissingValues),
  14374. TyUint8,
  14375. m_func,
  14376. true),
  14377. indexLessThanSizeLabel);
  14378. // In speculative cases, we want to avoid a write to an array setting the length to something huge, which
  14379. // would then allow subsequent reads to hit arbitrary memory (in the speculative path). This is done with
  14380. // a mask generated from the difference between the index and the size. Since we should have already gone
  14381. // to the helper in any case where this would execute, it's a functional no-op.
  14382. // indexLessThanSize:
  14383. // In speculative cases, we want to avoid a write to an array setting the length to something huge, which
  14384. // would then allow subsequent reads to hit arbitrary memory (in the speculative path). This is done with
  14385. // a mask generated from the difference between the index and the size. Since we should have already gone
  14386. // to the helper in any case where this would execute, it's a functional no-op.
  14387. // if(!index->IsConstOpnd()) {
  14388. // sub temp, index, [headSegment + offset(size)]
  14389. // sar temp, 31
  14390. // and index, temp
  14391. // }
  14392. if (!indexValueOpnd->IsConstOpnd()
  14393. && (baseValueType.IsLikelyTypedArray()
  14394. ? CONFIG_FLAG_RELEASE(PoisonTypedArrayStore)
  14395. : ((indirType == TyVar && CONFIG_FLAG_RELEASE(PoisonVarArrayStore))
  14396. || (IRType_IsNativeInt(indirType) && CONFIG_FLAG_RELEASE(PoisonIntArrayStore))
  14397. || (IRType_IsFloat(indirType) && CONFIG_FLAG_RELEASE(PoisonFloatArrayStore)))
  14398. )
  14399. )
  14400. {
  14401. IR::RegOpnd* temp = IR::RegOpnd::New(TyUint32, m_func);
  14402. InsertSub(
  14403. false,
  14404. temp,
  14405. indexValueOpnd,
  14406. IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, m_func),
  14407. instr);
  14408. InsertShift(Js::OpCode::Shr_A, false, temp, temp, IR::IntConstOpnd::New(31, TyInt8, m_func), instr);
  14409. InsertAnd(indexValueOpnd, indexValueOpnd, temp, instr);
  14410. }
  14411. break;
  14412. }
  14413. }
  14414. // CMP index, [headSegment + offset(size)]
  14415. // JAE $helper
  14416. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, this->m_func);
  14417. InsertCompareBranch(indexValueOpnd, indirOpnd, Js::OpCode::BrGe_A, true /* isUnsigned */, labelHelper, instr);
  14418. } while(false);
  14419. if(isPush)
  14420. {
  14421. IR::LabelInstr *const updateLengthLabel = InsertLabel(isInHelperBlock, instr);
  14422. LABELNAME(updateLengthLabel);
  14423. if(!doUpperBoundCheck && !headSegmentLengthOpnd)
  14424. {
  14425. // (headSegmentLength = [headSegment + offset(length)])
  14426. headSegmentLengthOpnd =
  14427. IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
  14428. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  14429. }
  14430. // For push, it is guaranteed that (index >= length). We already know that (index < size), but we need to check if
  14431. // (index > length) because in that case a missing value will be created and the missing value tracking in the array
  14432. // needs to be updated.
  14433. //
  14434. // cmp index, headSegmentLength
  14435. // je $updateLength
  14436. // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
  14437. // updateLength:
  14438. InsertCompareBranch(
  14439. indexValueOpnd,
  14440. headSegmentLengthOpnd,
  14441. Js::OpCode::BrEq_A,
  14442. updateLengthLabel,
  14443. updateLengthLabel);
  14444. CompileAssert(
  14445. static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
  14446. Js::DynamicObjectFlags::HasNoMissingValues);
  14447. InsertAnd(
  14448. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  14449. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  14450. IR::IntConstOpnd::New(
  14451. static_cast<uint8>(~Js::DynamicObjectFlags::HasNoMissingValues),
  14452. TyUint8,
  14453. m_func,
  14454. true),
  14455. updateLengthLabel);
  14456. }
  14457. if (baseValueType.IsArrayOrObjectWithArray())
  14458. {
  14459. // We didn't emit an array check, but if we are going to grow the array
  14460. // We need to go to helper if there is an ES5 array/objectarray used as prototype
  14461. GenerateIsEnabledArraySetElementFastPathCheck(labelHelper, instr);
  14462. }
  14463. IR::Opnd *newLengthOpnd;
  14464. IR::AutoReuseOpnd autoReuseNewLengthOpnd;
  14465. if (indexValueOpnd->IsRegOpnd())
  14466. {
  14467. // LEA newLength, [index + 1]
  14468. newLengthOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
  14469. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  14470. InsertAdd(false /* needFlags */, newLengthOpnd, indexValueOpnd, IR::IntConstOpnd::New(1, TyUint32, m_func), instr);
  14471. }
  14472. else
  14473. {
  14474. newLengthOpnd = IR::IntConstOpnd::New(value + 1, TyUint32, this->m_func);
  14475. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  14476. }
  14477. // This is a common enough case that we want to go through this path instead of the simpler one, since doing it this way is faster for preallocated but un-filled arrays.
  14478. if (!!(bailOutKind & IR::BailOutOnInvalidatedArrayLength))
  14479. {
  14480. // If we'd increase the array length, go to the helper
  14481. indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
  14482. InsertCompareBranch(
  14483. newLengthOpnd,
  14484. indirOpnd,
  14485. Js::OpCode::BrGt_A,
  14486. true,
  14487. labelHelper,
  14488. instr);
  14489. }
  14490. // MOV [headSegment + offset(length)], newLength
  14491. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, length), TyUint32, this->m_func);
  14492. InsertMove(indirOpnd, newLengthOpnd, instr);
  14493. // We've changed the head segment length, so we may need to change the head segment length opnd
  14494. if (headSegmentLengthOpnd != nullptr && !headSegmentLengthOpnd->IsIndirOpnd())
  14495. {
  14496. InsertMove(headSegmentLengthOpnd, newLengthOpnd, instr);
  14497. }
  14498. if (checkArrayLengthOverflow)
  14499. {
  14500. // CMP newLength, [base + offset(length)]
  14501. // JBE $segmentLengthIncreased
  14502. Assert(labelSegmentLengthIncreased);
  14503. indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
  14504. InsertCompareBranch(
  14505. newLengthOpnd,
  14506. indirOpnd,
  14507. Js::OpCode::BrLe_A,
  14508. true /* isUnsigned */,
  14509. labelSegmentLengthIncreased,
  14510. instr);
  14511. if(!isInHelperBlock)
  14512. {
  14513. InsertLabel(true /* isHelper */, instr);
  14514. }
  14515. }
  14516. // MOV [base + offset(length)], newLength
  14517. indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
  14518. InsertMove(indirOpnd, newLengthOpnd, instr);
  14519. if(returnLength)
  14520. {
  14521. if(newLengthOpnd->GetSize() != MachPtr)
  14522. {
  14523. newLengthOpnd = newLengthOpnd->UseWithNewType(TyMachPtr, m_func)->AsRegOpnd();
  14524. }
  14525. // SHL newLength, AtomTag
  14526. // INC newLength
  14527. this->m_lowererMD.GenerateInt32ToVarConversion(newLengthOpnd, instr);
  14528. // MOV dst, newLength
  14529. InsertMove(instr->GetDst(), newLengthOpnd, instr);
  14530. }
  14531. // Calling code assumes that indirOpnd is initialized before labelSegmentLengthIncreased is reached
  14532. if(labelSegmentLengthIncreased && labelSegmentLengthIncreased != labelDone)
  14533. {
  14534. // labelSegmentLengthIncreased:
  14535. instr->InsertBefore(labelSegmentLengthIncreased);
  14536. }
  14537. // $done
  14538. instr->InsertBefore(labelDone);
  14539. }
  14540. else // #else
  14541. {
  14542. if (checkArrayLengthOverflow)
  14543. {
  14544. if (*pIsTypedArrayElement && isStore)
  14545. {
  14546. IR::LabelInstr *labelInlineSet = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14547. LABELNAME(labelInlineSet);
  14548. //For positive index beyond length or negative index its essentially nop for typed array store
  14549. InsertBranch(
  14550. !invertBoundCheckComparison ? Js::OpCode::BrLt_A : Js::OpCode::BrGt_A,
  14551. true /* isUnsigned */,
  14552. labelInlineSet,
  14553. instr);
  14554. // For typed array, call ToNumber before we fallThrough.
  14555. if (instr->GetSrc1()->GetType() == TyVar && !instr->GetSrc1()->GetValueType().IsPrimitive())
  14556. {
  14557. // Enter an ophelper block
  14558. IR::LabelInstr * opHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14559. LABELNAME(opHelper);
  14560. instr->InsertBefore(opHelper);
  14561. IR::Instr *toNumberInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  14562. toNumberInstr->SetSrc1(instr->GetSrc1());
  14563. instr->InsertBefore(toNumberInstr);
  14564. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  14565. {
  14566. // Bail out if this conversion triggers implicit calls.
  14567. toNumberInstr = this->AddBailoutToHelperCallInstr(toNumberInstr, instr->GetBailOutInfo(), bailOutKind, instr);
  14568. }
  14569. LowerUnaryHelperMem(toNumberInstr, IR::HelperOp_ConvNumber_Full);
  14570. }
  14571. InsertBranch(Js::OpCode::Br, labelFallthrough, instr); //Jump to fallThrough
  14572. instr->InsertBefore(labelInlineSet);
  14573. }
  14574. else
  14575. {
  14576. // JAE $helper
  14577. InsertBranch(
  14578. !invertBoundCheckComparison ? Js::OpCode::BrGe_A : Js::OpCode::BrLe_A,
  14579. true /* isUnsigned */,
  14580. labelHelper,
  14581. instr);
  14582. }
  14583. }
  14584. EnsureObjectArrayLoaded();
  14585. if (instr->m_opcode == Js::OpCode::InlineArrayPop)
  14586. {
  14587. Assert(!baseValueType.IsLikelyTypedArray());
  14588. Assert(bailOutLabelInstr);
  14589. if (indexValueOpnd->IsIntConstOpnd())
  14590. {
  14591. // indirOpnd = [headSegment + index + offset(elements)]
  14592. IntConstType offset = offsetof(Js::SparseArraySegment<Js::Var>, elements) + (value << indirScale);
  14593. // TODO: Assert(Math::FitsInDWord(offset));
  14594. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
  14595. }
  14596. else
  14597. {
  14598. // indirOpnd = [headSegment + offset(elements) + (index << scale)]
  14599. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
  14600. indirOpnd->SetOffset(offsetof(Js::SparseArraySegment<Js::Var>, elements));
  14601. }
  14602. IR::Opnd * tmpDst = nullptr;
  14603. IR::Opnd * dst = instr->GetDst();
  14604. // Pop might not have a dst, if not don't worry about returning the last element. But we still have to
  14605. // worry about gaps, because these force us to access the prototype chain, which may have side-effects.
  14606. if (dst || !baseValueType.HasNoMissingValues())
  14607. {
  14608. if (!dst)
  14609. {
  14610. dst = IR::RegOpnd::New(indirType, this->m_func);
  14611. }
  14612. else if (dst->AsRegOpnd()->m_sym == arrayOpnd->m_sym)
  14613. {
  14614. tmpDst = IR::RegOpnd::New(TyVar, this->m_func);
  14615. dst = tmpDst;
  14616. }
  14617. // Use a mask to prevent arbitrary speculative reads
  14618. // If you think this code looks highly similar to the code later in this function,
  14619. // you'd be right. Unfortunately, I wasn't able to find a way to reduce duplication
  14620. // here without significantly complicating the code structure.
  14621. if (!headSegmentLengthOpnd)
  14622. {
  14623. headSegmentLengthOpnd =
  14624. IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
  14625. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  14626. }
  14627. IR::RegOpnd* localMaskOpnd = nullptr;
  14628. #if TARGET_64
  14629. IR::Opnd* lengthOpnd = nullptr;
  14630. AnalysisAssert(headSegmentLengthOpnd != nullptr);
  14631. lengthOpnd = IR::RegOpnd::New(headSegmentLengthOpnd->GetType(), m_func);
  14632. {
  14633. IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV_TRUNC, lengthOpnd, headSegmentLengthOpnd, m_func);
  14634. instr->InsertBefore(instrMov);
  14635. LowererMD::Legalize(instrMov);
  14636. }
  14637. if (lengthOpnd->GetSize() != MachPtr)
  14638. {
  14639. lengthOpnd = lengthOpnd->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
  14640. }
  14641. // MOV r1, [opnd + offset(type)]
  14642. IR::RegOpnd* indexValueRegOpnd = IR::RegOpnd::New(indexValueOpnd->GetType(), m_func);
  14643. {
  14644. IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV_TRUNC, indexValueRegOpnd, indexValueOpnd, m_func);
  14645. instr->InsertBefore(instrMov);
  14646. LowererMD::Legalize(instrMov);
  14647. }
  14648. if (indexValueRegOpnd->GetSize() != MachPtr)
  14649. {
  14650. indexValueRegOpnd = indexValueRegOpnd->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
  14651. }
  14652. localMaskOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  14653. InsertSub(false, localMaskOpnd, indexValueRegOpnd, lengthOpnd, instr);
  14654. InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(63, TyInt8, m_func), instr);
  14655. #else
  14656. localMaskOpnd = IR::RegOpnd::New(TyInt32, m_func);
  14657. InsertSub(false, localMaskOpnd, indexValueOpnd, headSegmentLengthOpnd, instr);
  14658. InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(31, TyInt8, m_func), instr);
  14659. #endif
  14660. // for pop we always do the masking before the load in cases where we load a value
  14661. IR::RegOpnd* loadAddr = IR::RegOpnd::New(TyMachPtr, m_func);
  14662. #if _M_ARM32_OR_ARM64
  14663. if (indirOpnd->GetIndexOpnd() != nullptr && indirOpnd->GetScale() > 0)
  14664. {
  14665. // We don't support encoding for LEA with scale on ARM/ARM64, so do the scale calculation as a separate instruction
  14666. IR::RegOpnd* fullIndexOpnd = IR::RegOpnd::New(indirOpnd->GetIndexOpnd()->GetType(), m_func);
  14667. InsertShift(Js::OpCode::Shl_A, false, fullIndexOpnd, indirOpnd->GetIndexOpnd(), IR::IntConstOpnd::New(indirOpnd->GetScale(), TyInt8, m_func), instr);
  14668. IR::IndirOpnd* newIndir = IR::IndirOpnd::New(indirOpnd->GetBaseOpnd(), fullIndexOpnd, indirType, m_func);
  14669. if (indirOpnd->GetOffset() != 0)
  14670. {
  14671. newIndir->SetOffset(indirOpnd->GetOffset());
  14672. }
  14673. indirOpnd = newIndir;
  14674. }
  14675. #endif
  14676. IR::AutoReuseOpnd reuseIndir(indirOpnd, m_func);
  14677. InsertLea(loadAddr, indirOpnd, instr);
  14678. InsertAnd(loadAddr, loadAddr, localMaskOpnd, instr);
  14679. indirOpnd = IR::IndirOpnd::New(loadAddr, 0, indirType, m_func);
  14680. // MOV dst, [head + offset]
  14681. InsertMove(dst, indirOpnd, instr);
  14682. //If the array has missing values, check for one
  14683. if (!baseValueType.HasNoMissingValues())
  14684. {
  14685. InsertMissingItemCompareBranch(
  14686. dst,
  14687. Js::OpCode::BrEq_A,
  14688. bailOutLabelInstr,
  14689. instr);
  14690. }
  14691. }
  14692. // MOV [head + offset], missing
  14693. InsertMove(indirOpnd, GetMissingItemOpndForAssignment(indirType, m_func), instr);
  14694. IR::Opnd *newLengthOpnd;
  14695. IR::AutoReuseOpnd autoReuseNewLengthOpnd;
  14696. if (indexValueOpnd->IsRegOpnd())
  14697. {
  14698. // LEA newLength, [index]
  14699. newLengthOpnd = indexValueOpnd;
  14700. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  14701. }
  14702. else
  14703. {
  14704. newLengthOpnd = IR::IntConstOpnd::New(value, TyUint32, this->m_func);
  14705. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  14706. }
  14707. //update segment length and array length
  14708. // MOV [headSegment + offset(length)], newLength
  14709. IR::IndirOpnd *lengthIndirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, length), TyUint32, this->m_func);
  14710. InsertMove(lengthIndirOpnd, newLengthOpnd, instr);
  14711. // We've changed the head segment length, so we may need to change the head segment length opnd
  14712. if (headSegmentLengthOpnd != nullptr && !headSegmentLengthOpnd->IsIndirOpnd())
  14713. {
  14714. InsertMove(headSegmentLengthOpnd, newLengthOpnd, instr);
  14715. }
  14716. // MOV [base + offset(length)], newLength
  14717. lengthIndirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
  14718. InsertMove(lengthIndirOpnd, newLengthOpnd, instr);
  14719. if (tmpDst)
  14720. {
  14721. // The array opnd and the destination is the same, need to move the value in the tmp dst
  14722. // to the actual dst
  14723. InsertMove(instr->GetDst(), tmpDst, instr);
  14724. }
  14725. return indirOpnd;
  14726. }
  14727. } // #endif
  14728. // Should we poison the load of the address to/from which the store/load happens?
  14729. bool shouldPoisonLoad = maskOpnd != nullptr
  14730. && (
  14731. (!isStore && (!instr->IsSafeToSpeculate()) &&
  14732. (baseValueType.IsLikelyTypedArray()
  14733. ? CONFIG_FLAG_RELEASE(PoisonTypedArrayLoad)
  14734. : ((indirType == TyVar && CONFIG_FLAG_RELEASE(PoisonVarArrayLoad))
  14735. || (IRType_IsNativeInt(indirType) && CONFIG_FLAG_RELEASE(PoisonIntArrayLoad))
  14736. || (IRType_IsFloat(indirType) && CONFIG_FLAG_RELEASE(PoisonFloatArrayLoad)))
  14737. )
  14738. )
  14739. ||
  14740. (isStore &&
  14741. (baseValueType.IsLikelyTypedArray()
  14742. ? CONFIG_FLAG_RELEASE(PoisonTypedArrayStore)
  14743. : ((indirType == TyVar && CONFIG_FLAG_RELEASE(PoisonVarArrayStore))
  14744. || (IRType_IsNativeInt(indirType) && CONFIG_FLAG_RELEASE(PoisonIntArrayStore))
  14745. || (IRType_IsFloat(indirType) && CONFIG_FLAG_RELEASE(PoisonFloatArrayStore)))
  14746. )
  14747. )
  14748. )
  14749. ;
  14750. // We have two exit paths for this function in the store case when we might grow the head
  14751. // segment, due to tracking for missing elements. This unfortunately means that we need a
  14752. // copy of the poisoning code on the other exit path, since the determination of the path
  14753. // and the use of the path determination to decide whether we found the missing value are
  14754. // things that have to happen on opposite sides of the poisoning.
  14755. IR::Instr* insertForSegmentLengthIncreased = nullptr;
  14756. if (shouldPoisonLoad && usingSegmentLengthIncreasedLabel)
  14757. {
  14758. insertForSegmentLengthIncreased = (*pLabelSegmentLengthIncreased)->m_next;
  14759. }
  14760. #if TARGET_32
  14761. if (shouldPoisonLoad)
  14762. {
  14763. // Prevent index from being negative, which would break the poisoning
  14764. if (indexValueOpnd->IsIntConstOpnd())
  14765. {
  14766. indexValueOpnd = IR::IntConstOpnd::New(value & INT32_MAX, TyUint32, m_func);
  14767. }
  14768. else
  14769. {
  14770. IR::RegOpnd* newIndexValueOpnd = IR::RegOpnd::New(TyUint32, m_func);
  14771. InsertAnd(newIndexValueOpnd, indexValueOpnd, IR::IntConstOpnd::New(INT32_MAX, TyUint32, m_func), instr);
  14772. if(insertForSegmentLengthIncreased != nullptr)
  14773. {
  14774. InsertAnd(newIndexValueOpnd, indexValueOpnd, IR::IntConstOpnd::New(INT32_MAX, TyUint32, m_func), insertForSegmentLengthIncreased);
  14775. }
  14776. indexValueOpnd = newIndexValueOpnd;
  14777. }
  14778. }
  14779. #endif
  14780. if (baseValueType.IsLikelyTypedArray())
  14781. {
  14782. if(!headSegmentOpnd)
  14783. {
  14784. // MOV headSegment, [base + offset(arrayBuffer)]
  14785. int bufferOffset;
  14786. bufferOffset = Js::Float64Array::GetOffsetOfBuffer();
  14787. indirOpnd = IR::IndirOpnd::New(arrayOpnd, bufferOffset, TyMachPtr, this->m_func);
  14788. headSegmentOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  14789. autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
  14790. IR::AutoReuseOpnd reuseIndir(indirOpnd, m_func);
  14791. InsertMove(headSegmentOpnd, indirOpnd, instr);
  14792. if(insertForSegmentLengthIncreased != nullptr)
  14793. {
  14794. InsertMove(headSegmentOpnd, indirOpnd, insertForSegmentLengthIncreased);
  14795. }
  14796. }
  14797. // indirOpnd = [headSegment + index]
  14798. if (indexValueOpnd->IsIntConstOpnd())
  14799. {
  14800. IntConstType offset = (value << indirScale);
  14801. // TODO: Assert(Math::FitsInDWord(offset));
  14802. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
  14803. }
  14804. else
  14805. {
  14806. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
  14807. }
  14808. }
  14809. else if (indexValueOpnd->IsIntConstOpnd())
  14810. {
  14811. // indirOpnd = [headSegment + index + offset(elements)]
  14812. IntConstType offset = offsetof(Js::SparseArraySegment<Js::Var>, elements) + (value << indirScale);
  14813. // TODO: Assert(Math::FitsInDWord(offset));
  14814. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
  14815. }
  14816. else
  14817. {
  14818. // indirOpnd = [headSegment + offset(elements) + (index << scale)]
  14819. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
  14820. indirOpnd->SetOffset(offsetof(Js::SparseArraySegment<Js::Var>, elements));
  14821. }
  14822. if (shouldPoisonLoad)
  14823. {
  14824. // Use a mask to prevent arbitrary speculative reads
  14825. if (!headSegmentLengthOpnd
  14826. #if ENABLE_FAST_ARRAYBUFFER
  14827. && !baseValueType.IsLikelyOptimizedVirtualTypedArray()
  14828. #endif
  14829. )
  14830. {
  14831. if (baseValueType.IsLikelyTypedArray())
  14832. {
  14833. int lengthOffset;
  14834. lengthOffset = GetArrayOffsetOfLength(baseValueType);
  14835. headSegmentLengthOpnd = IR::IndirOpnd::New(arrayOpnd, lengthOffset, TyUint32, m_func);
  14836. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  14837. }
  14838. else
  14839. {
  14840. headSegmentLengthOpnd =
  14841. IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
  14842. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  14843. }
  14844. }
  14845. IR::RegOpnd* localMaskOpnd = nullptr;
  14846. #if TARGET_64
  14847. IR::Opnd* lengthOpnd = nullptr;
  14848. #if ENABLE_FAST_ARRAYBUFFER
  14849. if (baseValueType.IsLikelyOptimizedVirtualTypedArray())
  14850. {
  14851. lengthOpnd = IR::IntConstOpnd::New(MAX_ASMJS_ARRAYBUFFER_LENGTH >> indirScale, TyMachReg, m_func);
  14852. }
  14853. else
  14854. #endif
  14855. {
  14856. AnalysisAssert(headSegmentLengthOpnd != nullptr);
  14857. lengthOpnd = IR::RegOpnd::New(headSegmentLengthOpnd->GetType(), m_func);
  14858. IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV_TRUNC, lengthOpnd, headSegmentLengthOpnd, m_func);
  14859. instr->InsertBefore(instrMov);
  14860. LowererMD::Legalize(instrMov);
  14861. if (insertForSegmentLengthIncreased != nullptr)
  14862. {
  14863. IR::Instr * instrMov2 = IR::Instr::New(Js::OpCode::MOV_TRUNC, lengthOpnd, headSegmentLengthOpnd, m_func);
  14864. insertForSegmentLengthIncreased->InsertBefore(instrMov2);
  14865. LowererMD::Legalize(instrMov2);
  14866. }
  14867. if (lengthOpnd->GetSize() != MachPtr)
  14868. {
  14869. lengthOpnd = lengthOpnd->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
  14870. }
  14871. }
  14872. // MOV r1, [opnd + offset(type)]
  14873. IR::RegOpnd* indexValueRegOpnd = IR::RegOpnd::New(indexValueOpnd->GetType(), m_func);
  14874. IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV_TRUNC, indexValueRegOpnd, indexValueOpnd, m_func);
  14875. instr->InsertBefore(instrMov);
  14876. LowererMD::Legalize(instrMov);
  14877. if (insertForSegmentLengthIncreased != nullptr)
  14878. {
  14879. IR::Instr * instrMov2 = IR::Instr::New(Js::OpCode::MOV_TRUNC, indexValueRegOpnd, indexValueOpnd, m_func);
  14880. insertForSegmentLengthIncreased->InsertBefore(instrMov2);
  14881. LowererMD::Legalize(instrMov2);
  14882. }
  14883. if (indexValueRegOpnd->GetSize() != MachPtr)
  14884. {
  14885. indexValueRegOpnd = indexValueRegOpnd->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
  14886. }
  14887. localMaskOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  14888. InsertSub(false, localMaskOpnd, indexValueRegOpnd, lengthOpnd, instr);
  14889. InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(63, TyInt8, m_func), instr);
  14890. if (insertForSegmentLengthIncreased != nullptr)
  14891. {
  14892. InsertSub(false, localMaskOpnd, indexValueRegOpnd, lengthOpnd, insertForSegmentLengthIncreased);
  14893. InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(63, TyInt8, m_func), insertForSegmentLengthIncreased);
  14894. }
  14895. #else
  14896. localMaskOpnd = IR::RegOpnd::New(TyInt32, m_func);
  14897. InsertSub(false, localMaskOpnd, indexValueOpnd, headSegmentLengthOpnd, instr);
  14898. InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(31, TyInt8, m_func), instr);
  14899. if (insertForSegmentLengthIncreased != nullptr)
  14900. {
  14901. InsertSub(false, localMaskOpnd, indexValueOpnd, headSegmentLengthOpnd, insertForSegmentLengthIncreased);
  14902. InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(31, TyInt8, m_func), insertForSegmentLengthIncreased);
  14903. }
  14904. #endif
  14905. if ((IRType_IsNativeInt(indirType) || indirType == TyVar) && !isStore)
  14906. {
  14907. *maskOpnd = localMaskOpnd;
  14908. }
  14909. else
  14910. {
  14911. // for float values, do the poisoning before the load to avoid needing slow floating point conversions
  14912. IR::RegOpnd* loadAddr = IR::RegOpnd::New(TyMachPtr, m_func);
  14913. #if _M_ARM32_OR_ARM64
  14914. if (indirOpnd->GetIndexOpnd() != nullptr && indirOpnd->GetScale() > 0)
  14915. {
  14916. // We don't support encoding for LEA with scale on ARM/ARM64, so do the scale calculation as a separate instruction
  14917. IR::RegOpnd* fullIndexOpnd = IR::RegOpnd::New(indirOpnd->GetIndexOpnd()->GetType(), m_func);
  14918. InsertShift(Js::OpCode::Shl_A, false, fullIndexOpnd, indirOpnd->GetIndexOpnd(), IR::IntConstOpnd::New(indirOpnd->GetScale(), TyInt8, m_func), instr);
  14919. IR::IndirOpnd* newIndir = IR::IndirOpnd::New(indirOpnd->GetBaseOpnd(), fullIndexOpnd, indirType, m_func);
  14920. if (insertForSegmentLengthIncreased != nullptr)
  14921. {
  14922. InsertShift(Js::OpCode::Shl_A, false, fullIndexOpnd, indirOpnd->GetIndexOpnd(), IR::IntConstOpnd::New(indirOpnd->GetScale(), TyInt8, m_func), insertForSegmentLengthIncreased);
  14923. }
  14924. if (indirOpnd->GetOffset() != 0)
  14925. {
  14926. newIndir->SetOffset(indirOpnd->GetOffset());
  14927. }
  14928. indirOpnd = newIndir;
  14929. }
  14930. #endif
  14931. IR::AutoReuseOpnd reuseIndir(indirOpnd, m_func);
  14932. InsertLea(loadAddr, indirOpnd, instr);
  14933. InsertAnd(loadAddr, loadAddr, localMaskOpnd, instr);
  14934. if (insertForSegmentLengthIncreased != nullptr)
  14935. {
  14936. InsertLea(loadAddr, indirOpnd, insertForSegmentLengthIncreased);
  14937. InsertAnd(loadAddr, loadAddr, localMaskOpnd, insertForSegmentLengthIncreased);
  14938. // We want to export a segmentLengthIncreasedLabel to the caller that is after the poisoning
  14939. // code, since that's also the code that generates indirOpnd in this case.
  14940. IR::LabelInstr* exportedSegmentLengthIncreasedLabel = IR::LabelInstr::New(Js::OpCode::Label, insertForSegmentLengthIncreased->m_func, (*pLabelSegmentLengthIncreased)->isOpHelper);
  14941. LABELNAME(exportedSegmentLengthIncreasedLabel);
  14942. insertForSegmentLengthIncreased->InsertBefore(exportedSegmentLengthIncreasedLabel);
  14943. *pLabelSegmentLengthIncreased = exportedSegmentLengthIncreasedLabel;
  14944. }
  14945. indirOpnd = IR::IndirOpnd::New(loadAddr, 0, indirType, m_func);
  14946. }
  14947. }
  14948. return indirOpnd;
  14949. }
  14950. IR::BranchInstr*
  14951. Lowerer::InsertMissingItemCompareBranch(IR::Opnd* compareSrc, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr)
  14952. {
  14953. IR::Opnd* missingItemOpnd = GetMissingItemOpndForCompare(compareSrc->GetType(), m_func);
  14954. if (compareSrc->IsFloat64())
  14955. {
  14956. Assert(compareSrc->IsRegOpnd() || compareSrc->IsIndirOpnd());
  14957. return m_lowererMD.InsertMissingItemCompareBranch(compareSrc, missingItemOpnd, opcode, target, insertBeforeInstr);
  14958. }
  14959. else
  14960. {
  14961. Assert(compareSrc->IsInt32() || compareSrc->IsVar());
  14962. return InsertCompareBranch(missingItemOpnd, compareSrc, opcode, target, insertBeforeInstr, true);
  14963. }
  14964. }
  14965. IR::RegOpnd *
  14966. Lowerer::GenerateUntagVar(IR::RegOpnd * opnd, IR::LabelInstr * labelFail, IR::Instr * insertBeforeInstr, bool generateTagCheck)
  14967. {
  14968. if (!opnd->IsVar())
  14969. {
  14970. AssertMsg(opnd->GetSize() == 4, "This should be 32-bit wide");
  14971. return opnd;
  14972. }
  14973. AssertMsg(!opnd->IsNotInt(), "An opnd we know is not an int should not try to untag it as it will always fail");
  14974. if (opnd->m_sym->IsIntConst())
  14975. {
  14976. int32 constValue = opnd->m_sym->GetIntConstValue();
  14977. IR::IntConstOpnd* constOpnd = IR::IntConstOpnd::New(constValue, TyInt32, this->m_func);
  14978. IR::RegOpnd* regOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  14979. InsertMove(regOpnd, constOpnd, insertBeforeInstr);
  14980. return regOpnd;
  14981. }
  14982. return m_lowererMD.GenerateUntagVar(opnd, labelFail, insertBeforeInstr, generateTagCheck && !opnd->IsTaggedInt());
  14983. }
  14984. void
  14985. Lowerer::GenerateNotZeroTest( IR::Opnd * opndSrc, IR::LabelInstr * isZeroLabel, IR::Instr * insertBeforeInstr)
  14986. {
  14987. InsertTestBranch(opndSrc, opndSrc, Js::OpCode::BrEq_A, isZeroLabel, insertBeforeInstr);
  14988. }
  14989. bool
  14990. Lowerer::GenerateFastStringLdElem(IR::Instr * ldElem, IR::LabelInstr * labelHelper, IR::LabelInstr * labelFallThru)
  14991. {
  14992. IR::IndirOpnd * indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  14993. IR::RegOpnd * baseOpnd = indirOpnd->GetBaseOpnd();
  14994. // don't generate the fast path if the instance is not likely string
  14995. if (!baseOpnd->GetValueType().IsLikelyString())
  14996. {
  14997. return false;
  14998. }
  14999. Assert(!baseOpnd->IsTaggedInt());
  15000. IR::RegOpnd * indexOpnd = indirOpnd->GetIndexOpnd();
  15001. // Don't generate the fast path if the index operand is not likely int
  15002. if (indexOpnd && !indexOpnd->GetValueType().IsLikelyInt())
  15003. {
  15004. return false;
  15005. }
  15006. // Make sure the instance is a string
  15007. Assert(!indexOpnd || !indexOpnd->IsNotInt());
  15008. GenerateStringTest(baseOpnd, ldElem, labelHelper);
  15009. IR::Opnd * index32CmpOpnd;
  15010. IR::RegOpnd * bufferOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  15011. const IR::AutoReuseOpnd autoReuseBufferOpnd(bufferOpnd, m_func);
  15012. IR::IndirOpnd * charIndirOpnd;
  15013. if (indexOpnd)
  15014. {
  15015. // Untag the var and generate the indir into the string buffer
  15016. IR::RegOpnd * index32Opnd = GenerateUntagVar(indexOpnd, labelHelper, ldElem);
  15017. charIndirOpnd = IR::IndirOpnd::New(bufferOpnd, index32Opnd, 1, TyUint16, this->m_func);
  15018. index32CmpOpnd = index32Opnd;
  15019. }
  15020. else
  15021. {
  15022. // Just use the offset to indirect into the string buffer
  15023. charIndirOpnd = IR::IndirOpnd::New(bufferOpnd, indirOpnd->GetOffset() * sizeof(char16), TyUint16, this->m_func);
  15024. index32CmpOpnd = IR::IntConstOpnd::New((uint32)indirOpnd->GetOffset(), TyUint32, this->m_func);
  15025. }
  15026. // Check if the index is in range of the string length
  15027. // CMP [baseOpnd + offset(length)], indexOpnd -- string length
  15028. // JBE $helper -- unsigned compare, and string length are at most INT_MAX - 1
  15029. // -- so that even if we have a negative index, this will fail
  15030. IR::RegOpnd* lengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  15031. InsertMove(lengthOpnd, IR::IndirOpnd::New(baseOpnd, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func), ldElem);
  15032. InsertCompareBranch(lengthOpnd, index32CmpOpnd, Js::OpCode::BrLe_A, true, labelHelper, ldElem);
  15033. // Load the string buffer and make sure it is not null
  15034. // MOV bufferOpnd, [baseOpnd + offset(m_pszValue)]
  15035. // TEST bufferOpnd, bufferOpnd
  15036. // JEQ $labelHelper
  15037. indirOpnd = IR::IndirOpnd::New(baseOpnd, offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func);
  15038. InsertMove(bufferOpnd, indirOpnd, ldElem);
  15039. GenerateNotZeroTest(bufferOpnd, labelHelper, ldElem);
  15040. IR::RegOpnd* maskOpnd = nullptr;
  15041. if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
  15042. {
  15043. // Mask off the sign before loading so that poisoning will work for negative indices
  15044. if (index32CmpOpnd->IsIntConstOpnd())
  15045. {
  15046. charIndirOpnd->SetOffset((index32CmpOpnd->AsIntConstOpnd()->AsUint32() & INT32_MAX) * sizeof(char16));
  15047. }
  15048. else
  15049. {
  15050. InsertAnd(index32CmpOpnd, index32CmpOpnd, IR::IntConstOpnd::New(INT32_MAX, TyInt32, m_func), ldElem);
  15051. }
  15052. // All bits in mask will be 1 for a valid index or 0 for an OOB index
  15053. maskOpnd = IR::RegOpnd::New(TyInt32, m_func);
  15054. InsertSub(false, maskOpnd, index32CmpOpnd, lengthOpnd, ldElem);
  15055. InsertShift(Js::OpCode::Shr_A, false, maskOpnd, maskOpnd, IR::IntConstOpnd::New(31, TyInt8, m_func), ldElem);
  15056. }
  15057. // Load the character and check if it is 7bit ASCI (which we have the cache for)
  15058. // MOV charOpnd, [bufferOpnd + index32Opnd]
  15059. // CMP charOpnd, 0x80
  15060. // JAE $helper
  15061. IR::RegOpnd * charOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
  15062. const IR::AutoReuseOpnd autoReuseCharOpnd(charOpnd, m_func);
  15063. InsertMove(charOpnd, charIndirOpnd, ldElem);
  15064. if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
  15065. {
  15066. InsertAnd(charOpnd, charOpnd, maskOpnd, ldElem);
  15067. }
  15068. InsertCompareBranch(charOpnd, IR::IntConstOpnd::New(Js::CharStringCache::CharStringCacheSize, TyUint16, this->m_func),
  15069. Js::OpCode::BrGe_A, true, labelHelper, ldElem);
  15070. // Load the string from the cache
  15071. // MOV charStringCache, <charStringCache, address>
  15072. // MOV stringOpnd, [charStringCache + charOpnd * 4]
  15073. IR::RegOpnd * cacheOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  15074. const IR::AutoReuseOpnd autoReuseCacheOpnd(cacheOpnd, m_func);
  15075. Assert(Js::JavascriptLibrary::GetCharStringCacheAOffset() == Js::JavascriptLibrary::GetCharStringCacheOffset());
  15076. InsertMove(cacheOpnd, this->LoadLibraryValueOpnd(ldElem, LibraryValue::ValueCharStringCache), ldElem);
  15077. // Check if we have created the string or not
  15078. // TEST stringOpnd, stringOpnd
  15079. // JE $helper
  15080. IR::RegOpnd * stringOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  15081. const IR::AutoReuseOpnd autoReuseStringOpnd(stringOpnd, m_func);
  15082. InsertMove(stringOpnd, IR::IndirOpnd::New(cacheOpnd, charOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyVar, this->m_func), ldElem);
  15083. GenerateNotZeroTest(stringOpnd, labelHelper, ldElem);
  15084. InsertMove(ldElem->GetDst(), stringOpnd, ldElem);
  15085. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  15086. return true;
  15087. }
  15088. bool
  15089. Lowerer::GenerateFastLdElemI(IR::Instr *& ldElem, bool *instrIsInHelperBlockRef)
  15090. {
  15091. Assert(instrIsInHelperBlockRef);
  15092. bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
  15093. instrIsInHelperBlock = false;
  15094. IR::LabelInstr * labelHelper;
  15095. IR::LabelInstr * labelFallThru;
  15096. IR::LabelInstr * labelBailOut = nullptr;
  15097. IR::LabelInstr * labelMissingNative = nullptr;
  15098. IR::Opnd *src1 = ldElem->GetSrc1();
  15099. AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd on LdElementI");
  15100. IR::IndirOpnd * indirOpnd = src1->AsIndirOpnd();
  15101. // From FastElemICommon:
  15102. // TEST base, AtomTag -- check base not tagged int
  15103. // JNE $helper
  15104. // MOV r1, [base + offset(type)] -- check base isArray
  15105. // CMP [r1 + offset(typeId)], TypeIds_Array
  15106. // JNE $helper
  15107. // TEST index, 1 -- index tagged int
  15108. // JEQ $helper
  15109. // MOV r2, index
  15110. // SAR r2, Js::VarTag_Shift -- remoe atom tag
  15111. // JS $helper -- exclude negative index
  15112. // MOV r4, [base + offset(head)]
  15113. // CMP r2, [r4 + offset(length)] -- bounds check
  15114. // JAE $helper
  15115. // MOV r3, [r4 + offset(elements)]
  15116. // Generated here:
  15117. // MOV dst, [r3 + r2]
  15118. // TEST dst, dst
  15119. // JNE $fallthrough
  15120. if(ldElem->m_opcode == Js::OpCode::LdMethodElem && indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyOptimizedTypedArray())
  15121. {
  15122. // Typed arrays don't return objects, so it's not worth generating a fast path for LdMethodElem. Calling the helper also
  15123. // generates a better error message. Skip the fast path and just generate a helper call.
  15124. return true;
  15125. }
  15126. labelFallThru = ldElem->GetOrCreateContinueLabel();
  15127. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  15128. // If we know for sure (based on flow graph) we're loading from the arguments object, then ignore the (path-based) profile info.
  15129. bool isNativeArrayLoad = !ldElem->DoStackArgsOpt() && indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyNativeArray();
  15130. bool needMissingValueCheck = true;
  15131. bool emittedFastPath = false;
  15132. bool emitBailout = false;
  15133. if (ldElem->DoStackArgsOpt())
  15134. {
  15135. emittedFastPath = GenerateFastArgumentsLdElemI(ldElem, labelFallThru);
  15136. emitBailout = true;
  15137. }
  15138. else if (GenerateFastStringLdElem(ldElem, labelHelper, labelFallThru))
  15139. {
  15140. emittedFastPath = true;
  15141. }
  15142. else
  15143. {
  15144. IR::LabelInstr * labelCantUseArray = labelHelper;
  15145. if (isNativeArrayLoad)
  15146. {
  15147. if (ldElem->GetDst()->GetType() == TyVar)
  15148. {
  15149. // Skip the fast path and just generate a helper call
  15150. return true;
  15151. }
  15152. // Specialized native array lowering for LdElem requires that it is profiled. When not profiled, GlobOpt should not
  15153. // have specialized it.
  15154. Assert(ldElem->IsProfiledInstr());
  15155. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  15156. labelCantUseArray = labelBailOut;
  15157. }
  15158. Js::FldInfoFlags flags = Js::FldInfo_NoInfo;
  15159. if (ldElem->IsProfiledInstr())
  15160. {
  15161. flags = ldElem->AsProfiledInstr()->u.ldElemInfo->flags;
  15162. }
  15163. bool isTypedArrayElement, isStringIndex, indirOpndOverflowed = false;
  15164. IR::Opnd* maskOpnd = nullptr;
  15165. indirOpnd =
  15166. GenerateFastElemICommon(
  15167. ldElem,
  15168. false,
  15169. src1->AsIndirOpnd(),
  15170. labelHelper,
  15171. labelCantUseArray,
  15172. labelFallThru,
  15173. &isTypedArrayElement,
  15174. &isStringIndex,
  15175. &emitBailout,
  15176. &maskOpnd,
  15177. nullptr, /* pLabelSegmentLengthIncreased */
  15178. true, /* checkArrayLengthOverflow */
  15179. false, /* forceGenerateFastPath */
  15180. false, /* returnLength */
  15181. nullptr, /* bailOutLabelInstr */
  15182. &indirOpndOverflowed,
  15183. flags);
  15184. IR::Opnd *dst = ldElem->GetDst();
  15185. IRType dstType = dst->AsRegOpnd()->GetType();
  15186. // The index is negative or not int.
  15187. if (indirOpnd == nullptr)
  15188. {
  15189. // could have bailout kind BailOutOnArrayAccessHelperCall if indirOpnd overflows
  15190. Assert(!(ldElem->HasBailOutInfo() && ldElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall) || indirOpndOverflowed);
  15191. // don't check fast path without bailout because it might not be TypedArray
  15192. if (indirOpndOverflowed && ldElem->HasBailOutInfo())
  15193. {
  15194. bool bailoutForOpndOverflow = false;
  15195. const IR::BailOutKind bailOutKind = ldElem->GetBailOutKind();
  15196. // return undefined for typed array if load dest is var, bailout otherwise
  15197. if ((bailOutKind & ~IR::BailOutKindBits) == IR::BailOutConventionalTypedArrayAccessOnly)
  15198. {
  15199. if (dst->IsVar())
  15200. {
  15201. // returns undefined in case of indirOpnd overflow which is consistent with behavior of interpreter
  15202. IR::Opnd * undefinedOpnd = this->LoadLibraryValueOpnd(ldElem, LibraryValue::ValueUndefined);
  15203. InsertMove(dst, undefinedOpnd, ldElem);
  15204. ldElem->FreeSrc1();
  15205. ldElem->FreeDst();
  15206. ldElem->Remove();
  15207. emittedFastPath = true;
  15208. }
  15209. else
  15210. {
  15211. bailoutForOpndOverflow = true;
  15212. }
  15213. }
  15214. if (bailoutForOpndOverflow || (bailOutKind & (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)))
  15215. {
  15216. IR::Opnd * constOpnd = nullptr;
  15217. if (dst->IsFloat())
  15218. {
  15219. constOpnd = IR::FloatConstOpnd::New(Js::JavascriptNumber::NaN, TyFloat64, m_func);
  15220. }
  15221. else
  15222. {
  15223. constOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func, true);
  15224. }
  15225. InsertMove(dst, constOpnd, ldElem);
  15226. ldElem->FreeSrc1();
  15227. ldElem->FreeDst();
  15228. GenerateBailOut(ldElem, nullptr, nullptr);
  15229. emittedFastPath = true;
  15230. }
  15231. return !emittedFastPath;
  15232. }
  15233. // The global optimizer should never type specialize a LdElem for which the index is not int or an integer constant
  15234. // with a negative value. This would force an unconditional bail out on the main code path.
  15235. else if (dst->IsVar())
  15236. {
  15237. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  15238. {
  15239. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  15240. Output::Print(_u("Typed Array Lowering: function: %s (%s): instr %s, not specialized by glob opt due to negative or not likely int index.\n"),
  15241. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  15242. this->m_func->GetDebugNumberSet(debugStringBuffer),
  15243. Js::OpCodeUtil::GetOpCodeName(ldElem->m_opcode));
  15244. Output::Flush();
  15245. }
  15246. // We must be dealing with some unconventional index value. Don't emit fast path, but go directly to helper.
  15247. emittedFastPath = false;
  15248. return true;
  15249. }
  15250. else
  15251. {
  15252. AssertMsg(false, "Global optimizer shouldn't have specialized this instruction.");
  15253. Assert(dst->IsRegOpnd());
  15254. // If global optimizer failed to notice the unconventional index and type specialized the dst,
  15255. // there is nothing to do but bail out. This could happen if global optimizer's information based
  15256. // on value tracking fails to recognize a non-integer index or a constant int index that is negative.
  15257. // The bailout below ensures that we behave correctly in retail builds even under
  15258. // these (unlikely) conditions. To satisfy the downstream code we must populate the type specialized operand
  15259. // with some made up values, even though we will unconditionally bail out here and the values will never be
  15260. // used.
  15261. IR::IntConstOpnd *constOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func, true);
  15262. InsertMove(dst, constOpnd, ldElem);
  15263. ldElem->FreeSrc1();
  15264. ldElem->FreeDst();
  15265. GenerateBailOut(ldElem, nullptr, nullptr);
  15266. return false;
  15267. }
  15268. }
  15269. const IR::AutoReuseOpnd autoReuseIndirOpnd(indirOpnd, m_func);
  15270. const ValueType baseValueType(src1->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
  15271. if ((ldElem->HasBailOutInfo() &&
  15272. ldElem->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset &&
  15273. ldElem->GetBailOutInfo()->bailOutOffset <= ldElem->GetByteCodeOffset() &&
  15274. dst->IsEqual(src1->AsIndirOpnd()->GetBaseOpnd())) ||
  15275. (src1->AsIndirOpnd()->GetIndexOpnd() && dst->IsEqual(src1->AsIndirOpnd()->GetIndexOpnd())))
  15276. {
  15277. // This is a pre-op bailout where the dst is the same as one of the srcs. The dst may be trashed before bailing out,
  15278. // but since the operation will be processed again in the interpreter, src values need to be kept intact. Use a
  15279. // temporary dst until after the operation is complete.
  15280. IR::Instr *instrSink = ldElem->SinkDst(Js::OpCode::Ld_A);
  15281. // The sink instruction needs to be on the fall-through path
  15282. instrSink->Unlink();
  15283. labelFallThru->InsertAfter(instrSink);
  15284. LowererMD::ChangeToAssign(instrSink);
  15285. dst = ldElem->GetDst();
  15286. }
  15287. if (isTypedArrayElement)
  15288. {
  15289. // For typedArrays, convert the loaded element to the appropriate type
  15290. IR::RegOpnd *reg;
  15291. IR::AutoReuseOpnd autoReuseReg;
  15292. Assert(dst->IsRegOpnd());
  15293. if(indirOpnd->IsFloat())
  15294. {
  15295. AssertMsg((dstType == TyFloat64) || (dstType == TyVar), "For Float32Array LdElemI's dst should be specialized to TyFloat64 or not at all.");
  15296. if(indirOpnd->IsFloat32())
  15297. {
  15298. // MOVSS reg32.f32, indirOpnd.f32
  15299. IR::RegOpnd *reg32 = IR::RegOpnd::New(TyFloat32, this->m_func);
  15300. const IR::AutoReuseOpnd autoReuseReg32(reg32, m_func);
  15301. InsertMove(reg32, indirOpnd, ldElem);
  15302. // CVTPS2PD dst/reg.f64, reg32.f64
  15303. reg = dstType == TyFloat64 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyFloat64, this->m_func);
  15304. autoReuseReg.Initialize(reg, m_func);
  15305. InsertConvertFloat32ToFloat64(reg, reg32, ldElem);
  15306. }
  15307. else
  15308. {
  15309. Assert(indirOpnd->IsFloat64());
  15310. // MOVSD dst/reg.f64, indirOpnd.f64
  15311. reg = dstType == TyFloat64 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyFloat64, this->m_func);
  15312. autoReuseReg.Initialize(reg, m_func);
  15313. InsertMove(reg, indirOpnd, ldElem);
  15314. }
  15315. if (dstType != TyFloat64)
  15316. {
  15317. // Convert reg.f64 to var
  15318. m_lowererMD.SaveDoubleToVar(dst->AsRegOpnd(), reg, ldElem, ldElem);
  15319. }
  15320. #if FLOATVAR
  15321. // For NaNs, go to the helper to guarantee we don't have an illegal NaN
  15322. // TODO(magardn): move this to MD code.
  15323. #if _M_X64
  15324. // UCOMISD reg, reg
  15325. {
  15326. IR::Instr *const instr = IR::Instr::New(Js::OpCode::UCOMISD, this->m_func);
  15327. instr->SetSrc1(reg);
  15328. instr->SetSrc2(reg);
  15329. ldElem->InsertBefore(instr);
  15330. }
  15331. // JP $helper
  15332. {
  15333. IR::Instr *const instr = IR::BranchInstr::New(Js::OpCode::JP, labelHelper, this->m_func);
  15334. ldElem->InsertBefore(instr);
  15335. }
  15336. #elif _M_ARM64
  15337. // FCMP reg, reg
  15338. {
  15339. IR::Instr *const instr = IR::Instr::New(Js::OpCode::FCMP, this->m_func);
  15340. instr->SetSrc1(reg);
  15341. instr->SetSrc2(reg);
  15342. ldElem->InsertBefore(instr);
  15343. }
  15344. // BVS $helper
  15345. {
  15346. IR::Instr *const instr = IR::BranchInstr::New(Js::OpCode::BVS, labelHelper, this->m_func);
  15347. ldElem->InsertBefore(instr);
  15348. }
  15349. #endif
  15350. #endif
  15351. if(dstType == TyFloat64)
  15352. {
  15353. emitBailout = true;
  15354. }
  15355. }
  15356. else
  15357. {
  15358. AssertMsg((dstType == TyInt32) || (dstType == TyVar), "For Int/UintArray LdElemI's dst should be specialized to TyInt32 or not at all.");
  15359. reg = dstType == TyInt32 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyInt32, this->m_func);
  15360. autoReuseReg.Initialize(reg, m_func);
  15361. // Int32 and Uint32 arrays could overflow an int31, but the others can't
  15362. if (indirOpnd->GetType() != TyUint32
  15363. #if !INT32VAR
  15364. && indirOpnd->GetType() != TyInt32
  15365. #endif
  15366. )
  15367. {
  15368. reg->SetValueType(ValueType::GetTaggedInt()); // Fits as a tagged-int
  15369. }
  15370. // MOV/MOVZX/MOVSX dst/reg.int32, IndirOpnd.type
  15371. IR::Instr* instrMov = InsertMove(reg, indirOpnd, ldElem);
  15372. if (maskOpnd)
  15373. {
  15374. #if TARGET_64
  15375. if (maskOpnd->GetSize() != reg->GetType())
  15376. {
  15377. maskOpnd = maskOpnd->UseWithNewType(reg->GetType(), m_func)->AsRegOpnd();
  15378. }
  15379. #endif
  15380. instrMov = InsertAnd(reg, reg, maskOpnd, ldElem);
  15381. }
  15382. if (dstType == TyInt32)
  15383. {
  15384. instrMov->dstIsTempNumber = ldElem->dstIsTempNumber;
  15385. instrMov->dstIsTempNumberTransferred = ldElem->dstIsTempNumberTransferred;
  15386. if (indirOpnd->GetType() == TyUint32)
  15387. {
  15388. // TEST dst, dst
  15389. // JSB $helper (bailout)
  15390. InsertCompareBranch(
  15391. reg,
  15392. IR::IntConstOpnd::New(0, TyUint32, this->m_func, /* dontEncode = */ true),
  15393. Js::OpCode::BrLt_A,
  15394. labelHelper,
  15395. ldElem);
  15396. }
  15397. emitBailout = true;
  15398. }
  15399. else
  15400. {
  15401. // MOV dst, reg
  15402. IR::Instr *const instr = IR::Instr::New(Js::OpCode::ToVar, dst, reg, this->m_func);
  15403. instr->dstIsTempNumber = ldElem->dstIsTempNumber;
  15404. instr->dstIsTempNumberTransferred = ldElem->dstIsTempNumberTransferred;
  15405. ldElem->InsertBefore(instr);
  15406. // Convert dst to var
  15407. m_lowererMD.EmitLoadVar(instr, /* isFromUint32 = */ (indirOpnd->GetType() == TyUint32));
  15408. }
  15409. }
  15410. // JMP $fallthrough
  15411. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  15412. emittedFastPath = true;
  15413. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  15414. {
  15415. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  15416. baseValueType.ToString(baseValueTypeStr);
  15417. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  15418. Output::Print(_u("Typed Array Lowering: function: %s (%s), instr: %s, base value type: %S, %s."),
  15419. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  15420. this->m_func->GetDebugNumberSet(debugStringBuffer),
  15421. Js::OpCodeUtil::GetOpCodeName(ldElem->m_opcode),
  15422. baseValueTypeStr,
  15423. (!dst->IsVar() ? _u("specialized") : _u("not specialized")));
  15424. Output::Print(_u("\n"));
  15425. Output::Flush();
  15426. }
  15427. }
  15428. else
  15429. {
  15430. // MOV dst, indirOpnd
  15431. InsertMove(dst, indirOpnd, ldElem);
  15432. if (maskOpnd)
  15433. {
  15434. #if TARGET_64
  15435. if (maskOpnd->GetSize() != dst->GetType())
  15436. {
  15437. maskOpnd = maskOpnd->UseWithNewType(dst->GetType(), m_func)->AsRegOpnd();
  15438. }
  15439. #endif
  15440. InsertAnd(dst, dst, maskOpnd, ldElem);
  15441. }
  15442. // The string index fast path does not operate on index properties (we don't get a PropertyString in that case), so
  15443. // we don't need to do any further checks in that case
  15444. // For LdMethodElem, if the loaded value is a tagged number, the error message generated by the helper call is
  15445. // better than if we were to just try to call the number. Also, the call arguments need to be evaluated before
  15446. // throwing the error, so just test whether it's an object and jump to helper if it's not.
  15447. const bool needObjectTest = !isStringIndex && !isNativeArrayLoad && ldElem->m_opcode == Js::OpCode::LdMethodElem;
  15448. needMissingValueCheck =
  15449. !isStringIndex && !(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues());
  15450. if(needMissingValueCheck)
  15451. {
  15452. // TEST dst, dst
  15453. // JEQ $helper | JNE $fallthrough
  15454. InsertMissingItemCompareBranch(
  15455. dst,
  15456. needObjectTest ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A,
  15457. needObjectTest ? labelHelper : labelFallThru,
  15458. ldElem);
  15459. if (isNativeArrayLoad)
  15460. {
  15461. Assert(!needObjectTest);
  15462. Assert(labelHelper != labelBailOut);
  15463. if(ldElem->AsProfiledInstr()->u.ldElemInfo->GetElementType().HasBeenUndefined())
  15464. {
  15465. // We're going to bail out trying to load "missing value" into a type-spec'd opnd.
  15466. // Branch to a point where we'll convert the array so that we don't keep bailing here.
  15467. // (Gappy arrays are not well-suited to nativeness.)
  15468. labelMissingNative = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  15469. InsertBranch(Js::OpCode::Br, labelMissingNative, ldElem);
  15470. }
  15471. else
  15472. {
  15473. // If the value has not been profiled to be undefined at some point, jump directly to bail out
  15474. InsertBranch(Js::OpCode::Br, labelBailOut, ldElem);
  15475. }
  15476. }
  15477. }
  15478. if(needObjectTest)
  15479. {
  15480. // GenerateObjectTest(dst)
  15481. // JIsObject $fallthrough
  15482. m_lowererMD.GenerateObjectTest(dst, ldElem, labelFallThru, true);
  15483. }
  15484. else if(!needMissingValueCheck)
  15485. {
  15486. // JMP $fallthrough
  15487. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  15488. }
  15489. emittedFastPath = true;
  15490. }
  15491. }
  15492. // $helper:
  15493. // bailout or caller generated helper call
  15494. // $fallthru:
  15495. if (!emittedFastPath)
  15496. {
  15497. labelHelper->isOpHelper = false;
  15498. }
  15499. ldElem->InsertBefore(labelHelper);
  15500. instrIsInHelperBlock = true;
  15501. if (isNativeArrayLoad)
  15502. {
  15503. Assert(ldElem->HasBailOutInfo());
  15504. Assert(labelHelper != labelBailOut);
  15505. // Transform the original instr:
  15506. //
  15507. // $helper:
  15508. // dst = LdElemI_A src (BailOut)
  15509. // $fallthrough:
  15510. //
  15511. // to:
  15512. //
  15513. // b $fallthru <--- we get here if we loaded a valid element directly
  15514. // $helper:
  15515. // dst = LdElemI_A src
  15516. // cmp dst, MissingItem
  15517. // bne $fallthrough
  15518. // $bailout:
  15519. // BailOut
  15520. // $fallthrough:
  15521. LowerOneBailOutKind(ldElem, IR::BailOutConventionalNativeArrayAccessOnly, instrIsInHelperBlock);
  15522. IR::Instr *const insertBeforeInstr = ldElem->m_next;
  15523. // Do missing value check on value returned from helper so that we don't have to check the index against
  15524. // array length. (We already checked it above against the segment length.)
  15525. bool hasBeenUndefined = ldElem->AsProfiledInstr()->u.ldElemInfo->GetElementType().HasBeenUndefined();
  15526. if (hasBeenUndefined)
  15527. {
  15528. if(!emitBailout)
  15529. {
  15530. if (labelMissingNative == nullptr)
  15531. {
  15532. labelMissingNative = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  15533. }
  15534. InsertMissingItemCompareBranch(ldElem->GetDst(), Js::OpCode::BrEq_A, labelMissingNative, insertBeforeInstr);
  15535. }
  15536. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  15537. if(labelMissingNative)
  15538. {
  15539. // We're going to bail out on a load from a gap, but convert the array to Var first, so we don't just
  15540. // bail here over and over. Gappy arrays are not well suited to nativeness.
  15541. // NOTE: only emit this call if the profile tells us that this has happened before ("hasBeenUndefined").
  15542. // Emitting this in Navier-Stokes brutalizes the score.
  15543. insertBeforeInstr->InsertBefore(labelMissingNative);
  15544. IR::JnHelperMethod helperMethod;
  15545. indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  15546. if (indirOpnd->GetBaseOpnd()->GetValueType().HasIntElements())
  15547. {
  15548. helperMethod = IR::HelperIntArr_ToVarArray;
  15549. }
  15550. else
  15551. {
  15552. Assert(indirOpnd->GetBaseOpnd()->GetValueType().HasFloatElements());
  15553. helperMethod = IR::HelperFloatArr_ToVarArray;
  15554. }
  15555. m_lowererMD.LoadHelperArgument(insertBeforeInstr, indirOpnd->GetBaseOpnd());
  15556. IR::Instr *instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
  15557. instrHelper->SetSrc1(IR::HelperCallOpnd::New(helperMethod, m_func));
  15558. insertBeforeInstr->InsertBefore(instrHelper);
  15559. m_lowererMD.LowerCall(instrHelper, 0);
  15560. }
  15561. }
  15562. else
  15563. {
  15564. if(!emitBailout)
  15565. {
  15566. InsertMissingItemCompareBranch(ldElem->GetDst(), Js::OpCode::BrEq_A, labelBailOut, insertBeforeInstr);
  15567. }
  15568. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  15569. }
  15570. insertBeforeInstr->InsertBefore(labelBailOut);
  15571. }
  15572. if (emitBailout)
  15573. {
  15574. ldElem->UnlinkSrc1();
  15575. ldElem->UnlinkDst();
  15576. GenerateBailOut(ldElem, nullptr, nullptr);
  15577. }
  15578. return !emitBailout;
  15579. }
  15580. IR::Opnd *
  15581. Lowerer::GetMissingItemOpnd(IRType type, Func *func)
  15582. {
  15583. if (type == TyVar)
  15584. {
  15585. return IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstantAddress, func, true);
  15586. }
  15587. if (type == TyInt32)
  15588. {
  15589. return IR::IntConstOpnd::New(Js::JavascriptNativeIntArray::MissingItem, TyInt32, func, true);
  15590. }
  15591. AssertMsg(false, "Only expecting TyVar and TyInt32 in Lowerer::GetMissingItemOpnd");
  15592. __assume(false);
  15593. }
  15594. IR::Opnd*
  15595. Lowerer::GetMissingItemOpndForAssignment(IRType type, Func *func)
  15596. {
  15597. switch (type)
  15598. {
  15599. case TyVar:
  15600. case TyInt32:
  15601. return GetMissingItemOpnd(type, func);
  15602. case TyFloat64:
  15603. return IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetNativeFloatArrayMissingItemAddr(), TyFloat64, func);
  15604. default:
  15605. AnalysisAssertMsg(false, "Unexpected type in Lowerer::GetMissingItemOpndForAssignment");
  15606. __assume(false);
  15607. }
  15608. }
  15609. IR::Opnd *
  15610. Lowerer::GetMissingItemOpndForCompare(IRType type, Func *func)
  15611. {
  15612. switch (type)
  15613. {
  15614. case TyVar:
  15615. case TyInt32:
  15616. return GetMissingItemOpnd(type, func);
  15617. case TyFloat64:
  15618. #if TARGET_64
  15619. return IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetNativeFloatArrayMissingItemAddr(), TyUint64, func);
  15620. #else
  15621. return IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetNativeFloatArrayMissingItemAddr(), TyUint32, func);
  15622. #endif
  15623. default:
  15624. AnalysisAssertMsg(false, "Unexpected type in Lowerer::GetMissingItemOpndForCompare");
  15625. __assume(false);
  15626. }
  15627. }
  15628. bool
  15629. Lowerer::GenerateFastStElemI(IR::Instr *& stElem, bool *instrIsInHelperBlockRef)
  15630. {
  15631. Assert(instrIsInHelperBlockRef);
  15632. bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
  15633. instrIsInHelperBlock = false;
  15634. IR::LabelInstr * labelHelper;
  15635. IR::LabelInstr * labelSegmentLengthIncreased;
  15636. IR::LabelInstr * labelFallThru;
  15637. IR::LabelInstr * labelBailOut = nullptr;
  15638. IR::Opnd *dst = stElem->GetDst();
  15639. IR::IndirOpnd * indirOpnd = dst->AsIndirOpnd();
  15640. AssertMsg(dst->IsIndirOpnd(), "Expected indirOpnd on StElementI");
  15641. // From FastElemICommon:
  15642. // TEST base, AtomTag -- check base not tagged int
  15643. // JNE $helper
  15644. // MOV r1, [base + offset(type)] -- check base isArray
  15645. // CMP [r1 + offset(typeId)], TypeIds_Array
  15646. // JNE $helper
  15647. // TEST index, 1 -- index tagged int
  15648. // JEQ $helper
  15649. // MOV r2, index
  15650. // SAR r2, Js::VarTag_Shift -- remove atom tag
  15651. // JS $helper -- exclude negative index
  15652. // MOV r4, [base + offset(head)]
  15653. // CMP r2, [r4 + offset(length)] -- bounds check
  15654. // JB $done
  15655. // CMP r2, [r4 + offset(size)] -- chunk has room?
  15656. // JAE $helper
  15657. // LEA r5, [r2 + 1]
  15658. // MOV [r4 + offset(length)], r5 -- update length on chunk
  15659. // CMP r5, [base + offset(length)]
  15660. // JBE $done
  15661. // MOV [base + offset(length)], r5 -- update length on array
  15662. // $done
  15663. // LEA r3, [r4 + offset(elements)]
  15664. // Generated here.
  15665. // MOV [r3 + r2], src
  15666. labelFallThru = stElem->GetOrCreateContinueLabel();
  15667. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  15668. bool emitBailout = false;
  15669. bool isNativeArrayStore = indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyNativeArray();
  15670. IR::LabelInstr * labelCantUseArray = labelHelper;
  15671. if (isNativeArrayStore)
  15672. {
  15673. if (stElem->GetSrc1()->GetType() != GetArrayIndirType(indirOpnd->GetBaseOpnd()->GetValueType()))
  15674. {
  15675. // Skip the fast path and just generate a helper call
  15676. return true;
  15677. }
  15678. if(stElem->HasBailOutInfo())
  15679. {
  15680. const IR::BailOutKind bailOutKind = stElem->GetBailOutKind();
  15681. if (bailOutKind & IR::BailOutConventionalNativeArrayAccessOnly)
  15682. {
  15683. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  15684. labelCantUseArray = labelBailOut;
  15685. }
  15686. }
  15687. }
  15688. Js::FldInfoFlags flags = Js::FldInfo_NoInfo;
  15689. if (stElem->IsProfiledInstr())
  15690. {
  15691. flags = stElem->AsProfiledInstr()->u.stElemInfo->flags;
  15692. }
  15693. bool isTypedArrayElement, isStringIndex, indirOpndOverflowed = false;
  15694. IR::Opnd* maskOpnd = nullptr;
  15695. indirOpnd =
  15696. GenerateFastElemICommon(
  15697. stElem,
  15698. true,
  15699. indirOpnd,
  15700. labelHelper,
  15701. labelCantUseArray,
  15702. labelFallThru,
  15703. &isTypedArrayElement,
  15704. &isStringIndex,
  15705. &emitBailout,
  15706. &maskOpnd,
  15707. &labelSegmentLengthIncreased,
  15708. true, /* checkArrayLengthOverflow */
  15709. false, /* forceGenerateFastPath */
  15710. false, /* returnLength */
  15711. nullptr, /* bailOutLabelInstr */
  15712. &indirOpndOverflowed,
  15713. flags);
  15714. IR::Opnd *src = stElem->GetSrc1();
  15715. const IR::AutoReuseOpnd autoReuseSrc(src, m_func);
  15716. // The index is negative or not int.
  15717. if (indirOpnd == nullptr)
  15718. {
  15719. Assert(!(stElem->HasBailOutInfo() && stElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall) || indirOpndOverflowed);
  15720. if (indirOpndOverflowed && stElem->HasBailOutInfo())
  15721. {
  15722. bool emittedFastPath = false;
  15723. const IR::BailOutKind bailOutKind = stElem->GetBailOutKind();
  15724. // ignore StElemI in case of indirOpnd overflow only for typed array which is consistent with behavior of interpreter
  15725. if ((bailOutKind & ~IR::BailOutKindBits) == IR::BailOutConventionalTypedArrayAccessOnly)
  15726. {
  15727. stElem->FreeSrc1();
  15728. stElem->FreeDst();
  15729. stElem->Remove();
  15730. emittedFastPath = true;
  15731. }
  15732. if (!emittedFastPath && (bailOutKind & (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)))
  15733. {
  15734. stElem->FreeSrc1();
  15735. stElem->FreeDst();
  15736. GenerateBailOut(stElem, nullptr, nullptr);
  15737. emittedFastPath = true;
  15738. }
  15739. return !emittedFastPath;
  15740. }
  15741. // The global optimizer should never type specialize a StElem for which we know the index is not int or is a negative
  15742. // int constant. This would result in an unconditional bailout on the main code path.
  15743. else if (src->IsVar())
  15744. {
  15745. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  15746. {
  15747. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  15748. Output::Print(_u("Typed Array Lowering: function: %s (%s): instr %s, not specialized by glob opt due to negative or not likely int index.\n"),
  15749. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  15750. this->m_func->GetDebugNumberSet(debugStringBuffer),
  15751. Js::OpCodeUtil::GetOpCodeName(stElem->m_opcode));
  15752. Output::Flush();
  15753. }
  15754. // We must be dealing with some atypical index value. Don't emit fast path, but go directly to helper.
  15755. return true;
  15756. }
  15757. else
  15758. {
  15759. // If global optimizer failed to notice the unconventional index and type specialized the src,
  15760. // there is nothing to do but bail out. We should never hit this code path, unless the global optimizer's conditions
  15761. // for not specializing the instruction don't match the lowerer's conditions for not emitting the array checks (see above).
  15762. // This could happen if global optimizer's information based on value tracking fails to recognize a non-integer index or
  15763. // a constant int index that is negative. The bailout below ensures that we behave correctly in retail builds even under
  15764. // these (unlikely) conditions.
  15765. AssertMsg(false, "Global optimizer shouldn't have specialized this instruction.");
  15766. stElem->FreeSrc1();
  15767. stElem->FreeDst();
  15768. GenerateBailOut(stElem, nullptr, nullptr);
  15769. return false;
  15770. }
  15771. }
  15772. const IR::AutoReuseOpnd autoReuseIndirOpnd(indirOpnd, m_func);
  15773. const ValueType baseValueType(dst->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
  15774. if (isTypedArrayElement)
  15775. {
  15776. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  15777. {
  15778. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  15779. baseValueType.ToString(baseValueTypeStr);
  15780. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  15781. Output::Print(_u("Typed Array Lowering: function: %s (%s), instr: %s, base value type: %S, %s."),
  15782. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  15783. this->m_func->GetDebugNumberSet(debugStringBuffer),
  15784. Js::OpCodeUtil::GetOpCodeName(stElem->m_opcode),
  15785. baseValueTypeStr,
  15786. (!src->IsVar() ? _u("specialized") : _u("not specialized")));
  15787. Output::Print(_u("\n"));
  15788. Output::Flush();
  15789. }
  15790. ObjectType objectType = baseValueType.GetObjectType();
  15791. if(indirOpnd->IsFloat())
  15792. {
  15793. if (src->GetType() == TyFloat64)
  15794. {
  15795. IR::RegOpnd *const regSrc = src->AsRegOpnd();
  15796. if (indirOpnd->IsFloat32())
  15797. {
  15798. // CVTSD2SS reg.f32, regSrc.f64 -- Convert regSrc from f64 to f32
  15799. IR::RegOpnd *const reg = IR::RegOpnd::New(TyFloat32, this->m_func);
  15800. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  15801. InsertConvertFloat64ToFloat32(reg, regSrc, stElem);
  15802. // MOVSS indirOpnd, reg
  15803. InsertMove(indirOpnd, reg, stElem, false);
  15804. }
  15805. else
  15806. {
  15807. // MOVSD indirOpnd, regSrc
  15808. InsertMove(indirOpnd, regSrc, stElem, false);
  15809. }
  15810. emitBailout = true;
  15811. }
  15812. else
  15813. {
  15814. Assert(src->GetType() == TyVar);
  15815. // MOV reg, src
  15816. IR::RegOpnd *const reg = IR::RegOpnd::New(TyVar, this->m_func);
  15817. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  15818. InsertMove(reg, src, stElem);
  15819. // Convert to float, and assign to indirOpnd
  15820. if (baseValueType.IsLikelyOptimizedVirtualTypedArray())
  15821. {
  15822. IR::RegOpnd* dstReg = IR::RegOpnd::New(indirOpnd->GetType(), this->m_func);
  15823. m_lowererMD.EmitLoadFloat(dstReg, reg, stElem, stElem, labelHelper);
  15824. InsertMove(indirOpnd, dstReg, stElem);
  15825. }
  15826. else
  15827. {
  15828. m_lowererMD.EmitLoadFloat(indirOpnd, reg, stElem, stElem, labelHelper);
  15829. }
  15830. }
  15831. }
  15832. else if (objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedVirtualArray || objectType == ObjectType::Uint8ClampedMixedArray)
  15833. {
  15834. Assert(indirOpnd->GetType() == TyUint8);
  15835. IR::RegOpnd *regSrc;
  15836. IR::AutoReuseOpnd autoReuseRegSrc;
  15837. if(src->IsRegOpnd())
  15838. {
  15839. regSrc = src->AsRegOpnd();
  15840. }
  15841. else
  15842. {
  15843. regSrc = IR::RegOpnd::New(StackSym::New(src->GetType(), m_func), src->GetType(), m_func);
  15844. autoReuseRegSrc.Initialize(regSrc, m_func);
  15845. InsertMove(regSrc, src, stElem);
  15846. }
  15847. IR::Opnd *bitMaskOpnd;
  15848. IRType srcType = regSrc->GetType();
  15849. if ((srcType == TyFloat64) || (srcType == TyInt32))
  15850. {
  15851. // if (srcType == TyInt32) {
  15852. // TEST regSrc, ~255
  15853. // JE $storeValue
  15854. // JSB $handleNegative
  15855. // MOV indirOpnd, 255
  15856. // JMP $fallThru
  15857. // $handleNegative [isHelper = false]
  15858. // MOV indirOpnd, 0
  15859. // JMP $fallThru
  15860. // $storeValue
  15861. // MOV indirOpnd, regSrc
  15862. // }
  15863. // else {
  15864. // MOVSD regTmp, regSrc
  15865. // ADDSD regTmp, 0.5
  15866. // CVTTSD2SI regOpnd, regTmp
  15867. // TEST regOpnd, ~255
  15868. // JE $storeValue
  15869. // $handleOutOfBounds [isHelper = true]
  15870. // COMISD regSrc, [&FloatZero]
  15871. // JB $handleNegative
  15872. // MOV regOpnd, 255
  15873. // JMP $storeValue
  15874. // $handleNegative [isHelper = true]
  15875. // MOV regOpnd, 0
  15876. // $storeValue
  15877. // MOV indirOpnd, regOpnd
  15878. // }
  15879. // $fallThru
  15880. IR::RegOpnd *regOpnd;
  15881. IR::AutoReuseOpnd autoReuseRegOpnd;
  15882. if (srcType == TyInt32)
  15883. {
  15884. // When srcType == TyInt32 we will never call the helper and we will never
  15885. // modify the regOpnd. Therefore, it's okay to use regSrc directly, and it
  15886. // reduces register pressure.
  15887. regOpnd = regSrc;
  15888. }
  15889. else
  15890. {
  15891. #ifdef _M_IX86
  15892. AssertMsg(AutoSystemInfo::Data.SSE2Available(), "GlobOpt shouldn't have specialized Uint8ClampedArray StElem to float64 if SSE2 is unavailable.");
  15893. #endif
  15894. regOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  15895. autoReuseRegOpnd.Initialize(regOpnd, m_func);
  15896. Assert(objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedVirtualArray || objectType == ObjectType::Uint8ClampedMixedArray);
  15897. // Uint8ClampedArray follows IEEE 754 rounding rules for ties which round up
  15898. // odd integers and round down even integers. Both ties result in the nearest
  15899. // even integer value.
  15900. //
  15901. // CVTSD2SI regOpnd, regSrc
  15902. LowererMD::InsertConvertFloat64ToInt32(RoundModeHalfToEven, regOpnd, regSrc, stElem);
  15903. }
  15904. IR::LabelInstr *labelStoreValue = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  15905. #ifndef _M_ARM
  15906. // TEST regOpnd, ~255
  15907. // JE $storeValue
  15908. bitMaskOpnd = IR::IntConstOpnd::New(~255, TyInt32, this->m_func, true);
  15909. InsertTestBranch(regOpnd, bitMaskOpnd, Js::OpCode::BrEq_A, labelStoreValue, stElem);
  15910. #else // ARM
  15911. // Special case for ARM, a shift may be better
  15912. //
  15913. // ASRS tempReg, src, 8
  15914. // BEQ $inlineSet
  15915. InsertShiftBranch(
  15916. Js::OpCode::Shr_A,
  15917. IR::RegOpnd::New(TyInt32, this->m_func),
  15918. regOpnd,
  15919. IR::IntConstOpnd::New(8, TyInt8, this->m_func),
  15920. Js::OpCode::BrEq_A,
  15921. labelStoreValue,
  15922. stElem);
  15923. #endif
  15924. IR::LabelInstr *labelHandleNegative = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, srcType == TyFloat64);
  15925. if (srcType == TyInt32)
  15926. {
  15927. // JSB $handleNegativeOrOverflow
  15928. InsertBranch(
  15929. LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A),
  15930. labelHandleNegative,
  15931. stElem);
  15932. // MOV IndirOpnd.u8, 255
  15933. InsertMove(indirOpnd, IR::IntConstOpnd::New(255, TyUint8, this->m_func, true), stElem);
  15934. // JMP $fallThru
  15935. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  15936. // $handleNegative [isHelper = false]
  15937. stElem->InsertBefore(labelHandleNegative);
  15938. // MOV IndirOpnd.u8, 0
  15939. InsertMove(indirOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func, true), stElem);
  15940. // JMP $fallThru
  15941. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  15942. }
  15943. else
  15944. {
  15945. Assert(regOpnd != regSrc);
  15946. // This label is just to ensure the following code is moved to the helper block.
  15947. // $handleOutOfBounds [isHelper = true]
  15948. IR::LabelInstr *labelHandleOutOfBounds = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  15949. stElem->InsertBefore(labelHandleOutOfBounds);
  15950. // COMISD regSrc, FloatZero
  15951. // JB labelHandleNegative
  15952. IR::MemRefOpnd * zeroOpnd = IR::MemRefOpnd::New(this->m_func->GetThreadContextInfo()->GetDoubleZeroAddr(), TyMachDouble, this->m_func);
  15953. InsertCompareBranch(regSrc, zeroOpnd, Js::OpCode::BrNotGe_A, labelHandleNegative, stElem);
  15954. // MOV regOpnd, 255
  15955. InsertMove(regOpnd, IR::IntConstOpnd::New(255, TyUint8, this->m_func, true), stElem);
  15956. // JMP $storeValue
  15957. InsertBranch(Js::OpCode::Br, labelStoreValue, stElem);
  15958. // $handleNegative [isHelper = true]
  15959. stElem->InsertBefore(labelHandleNegative);
  15960. // MOV regOpnd, 0
  15961. InsertMove(regOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func, true), stElem);
  15962. }
  15963. // $storeValue
  15964. stElem->InsertBefore(labelStoreValue);
  15965. // MOV IndirOpnd.u8, regOpnd.u8
  15966. InsertMove(indirOpnd, regOpnd, stElem);
  15967. emitBailout = true;
  15968. }
  15969. else
  15970. {
  15971. Assert(srcType == TyVar);
  15972. #if INT32VAR
  15973. bitMaskOpnd = IR::AddrOpnd::New((Js::Var)~(INT_PTR)(Js::TaggedInt::ToVarUnchecked(255)), IR::AddrOpndKindConstantVar, this->m_func, true);
  15974. #else
  15975. bitMaskOpnd = IR::IntConstOpnd::New(~(INT_PTR)(Js::TaggedInt::ToVarUnchecked(255)), TyMachReg, this->m_func, true);
  15976. #endif
  15977. // Note: We are assuming that if no bits other than ~(TaggedInt(255)) are 1, that we have a tagged
  15978. // int value between 0 - 255.
  15979. // #if INT32VAR
  15980. // This works for pointers because tagged int bit can't be on, and first 64k are not valid addresses
  15981. // This works for floats because a valid float would have one of the upper 13 bits on.
  15982. // #else
  15983. // Any pointer is larger than 512 because first 64k memory is reserved by the OS
  15984. // #endif
  15985. IR::LabelInstr *labelInlineSet = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  15986. #ifndef _M_ARM
  15987. // TEST src, ~(TaggedInt(255)) -- Check for tagged int >= 255 and <= 0
  15988. // JEQ $inlineSet
  15989. InsertTestBranch(regSrc, bitMaskOpnd, Js::OpCode::BrEq_A, labelInlineSet, stElem);
  15990. #else // ARM
  15991. // Special case for ARM, a shift may be better
  15992. //
  15993. // ASRS tempReg, src, 8
  15994. // BEQ $inlineSet
  15995. InsertShiftBranch(
  15996. Js::OpCode::Shr_A,
  15997. IR::RegOpnd::New(TyInt32, this->m_func),
  15998. regSrc,
  15999. IR::IntConstOpnd::New(8, TyInt8, this->m_func),
  16000. Js::OpCode::BrEq_A,
  16001. labelInlineSet,
  16002. stElem);
  16003. #endif
  16004. // Uint8ClampedArray::DirectSetItem(array, index, value);
  16005. // Inserting a helper call. Make sure it observes the main instructions's requirements regarding implicit calls.
  16006. if (!instrIsInHelperBlock)
  16007. {
  16008. stElem->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, m_func, true));
  16009. }
  16010. if (stElem->HasBailOutInfo() && (stElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall))
  16011. {
  16012. // Bail out instead of doing the helper call.
  16013. Assert(labelHelper);
  16014. this->InsertBranch(Js::OpCode::Br, labelHelper, stElem);
  16015. }
  16016. else
  16017. {
  16018. IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  16019. stElem->InsertBefore(instr);
  16020. if (stElem->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(stElem->GetBailOutKind()))
  16021. {
  16022. // Bail out if this helper triggers implicit calls.
  16023. instr = this->AddBailoutToHelperCallInstr(instr, stElem->GetBailOutInfo(), stElem->GetBailOutKind(), stElem);
  16024. }
  16025. m_lowererMD.LoadHelperArgument(instr, regSrc);
  16026. IR::Opnd *indexOpnd = indirOpnd->GetIndexOpnd();
  16027. if (indexOpnd == nullptr)
  16028. {
  16029. if (indirOpnd->GetOffset() == 0)
  16030. {
  16031. // There are two ways that we can get an indirOpnd with no index and 0 offset.
  16032. // The first is that we're storing to element 0 in the array by constant offset.
  16033. // The second is that we got a pointer back that has spectre masking, so it's going
  16034. // to not have the appropriate index into the array. In that case, we need to regen
  16035. // the index.
  16036. // The plan is
  16037. // 1. get the backing buffer pointer
  16038. // 2. subtract that from the indexOpnd to get the numeric index
  16039. // This is unfortunately slightly worse perf for constant writes of vars to index 0
  16040. // of Uint8ClampedArrays, but that's hopefully uncommon enough that the impact will
  16041. // be minimal
  16042. // MOV backingBufferOpnd, [base + offset(arrayBuffer)]
  16043. // SUB indexOpnd, backingBufferOpnd
  16044. int bufferOffset = GetArrayOffsetOfHeadSegment(baseValueType);
  16045. IR::IndirOpnd* arrayBufferOpnd = IR::IndirOpnd::New(stElem->GetDst()->AsIndirOpnd()->GetBaseOpnd(), bufferOffset, TyMachPtr, this->m_func);
  16046. IR::RegOpnd* backingBufferOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  16047. InsertMove(backingBufferOpnd, arrayBufferOpnd, instr);
  16048. IR::RegOpnd* tempIndexOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  16049. InsertSub(false, tempIndexOpnd, indirOpnd->GetBaseOpnd(), backingBufferOpnd, instr);
  16050. indexOpnd = tempIndexOpnd->UseWithNewType(TyInt32, this->m_func);
  16051. }
  16052. else
  16053. {
  16054. indexOpnd = IR::IntConstOpnd::New(indirOpnd->GetOffset(), TyInt32, this->m_func);
  16055. }
  16056. }
  16057. else
  16058. {
  16059. Assert(indirOpnd->GetOffset() == 0);
  16060. }
  16061. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  16062. m_lowererMD.LoadHelperArgument(instr, stElem->GetDst()->AsIndirOpnd()->GetBaseOpnd());
  16063. Assert(objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedMixedArray || objectType == ObjectType::Uint8ClampedVirtualArray);
  16064. m_lowererMD.ChangeToHelperCall(instr, IR::JnHelperMethod::HelperUint8ClampedArraySetItem);
  16065. // JMP $fallThrough
  16066. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  16067. }
  16068. //$inlineSet
  16069. stElem->InsertBefore(labelInlineSet);
  16070. IR::RegOpnd *regOpnd;
  16071. IR::AutoReuseOpnd autoReuseRegOpnd;
  16072. #if INT32VAR
  16073. regOpnd = regSrc;
  16074. #else
  16075. // MOV r1, src
  16076. // SAR r1, 1
  16077. regOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  16078. autoReuseRegOpnd.Initialize(regOpnd, m_func);
  16079. InsertShift(
  16080. Js::OpCode::Shr_A,
  16081. false /* needFlags */,
  16082. regOpnd,
  16083. regSrc,
  16084. IR::IntConstOpnd::New(1, TyInt8, this->m_func),
  16085. stElem);
  16086. #endif
  16087. // MOV IndirOpnd.u8, reg.u8
  16088. InsertMove(indirOpnd, regOpnd, stElem);
  16089. }
  16090. }
  16091. else
  16092. {
  16093. if (src->IsInt32())
  16094. {
  16095. // MOV indirOpnd, src
  16096. InsertMove(indirOpnd, src, stElem);
  16097. emitBailout = true;
  16098. }
  16099. else if (src->IsFloat64())
  16100. {
  16101. AssertMsg(indirOpnd->GetType() == TyUint32, "Only StElemI to Uint32Array could be specialized to float64.");
  16102. #ifdef _M_IX86
  16103. AssertMsg(AutoSystemInfo::Data.SSE2Available(), "GloOpt shouldn't have specialized Uint32Array StElemI to float64 if SSE2 is unavailable.");
  16104. #endif
  16105. bool bailOutOnHelperCall = stElem->HasBailOutInfo() ? !!(stElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall) : false;
  16106. if (bailOutOnHelperCall)
  16107. {
  16108. if(!GlobOpt::DoEliminateArrayAccessHelperCall(this->m_func))
  16109. {
  16110. // Array access helper call removal is already off for some reason. Prevent trying to rejit again
  16111. // because it won't help and the same thing will happen again. Just abort jitting this function.
  16112. if(PHASE_TRACE(Js::BailOutPhase, this->m_func))
  16113. {
  16114. Output::Print(_u(" Aborting JIT because EliminateArrayAccessHelperCall is already off\n"));
  16115. Output::Flush();
  16116. }
  16117. throw Js::OperationAbortedException();
  16118. }
  16119. throw Js::RejitException(RejitReason::ArrayAccessHelperCallEliminationDisabled);
  16120. }
  16121. IR::RegOpnd *const reg = IR::RegOpnd::New(TyInt32, this->m_func);
  16122. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  16123. m_lowererMD.EmitFloatToInt(reg, src, stElem, stElem, labelHelper);
  16124. // MOV indirOpnd, reg
  16125. InsertMove(indirOpnd, reg, stElem);
  16126. emitBailout = true;
  16127. }
  16128. else
  16129. {
  16130. Assert(src->IsVar());
  16131. if(src->IsAddrOpnd())
  16132. {
  16133. IR::AddrOpnd *const addrSrc = src->AsAddrOpnd();
  16134. Assert(addrSrc->IsVar());
  16135. Assert(Js::TaggedInt::Is(addrSrc->m_address));
  16136. // MOV indirOpnd, intValue
  16137. InsertMove(
  16138. indirOpnd,
  16139. IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(addrSrc->m_address), TyInt32, m_func),
  16140. stElem);
  16141. }
  16142. else
  16143. {
  16144. IR::RegOpnd *const regSrc = src->AsRegOpnd();
  16145. // FromVar reg, Src
  16146. IR::RegOpnd *const reg = IR::RegOpnd::New(TyInt32, this->m_func);
  16147. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  16148. IR::Instr * instr = IR::Instr::New(Js::OpCode::FromVar, reg, regSrc, stElem->m_func);
  16149. stElem->InsertBefore(instr);
  16150. // Convert reg to int32
  16151. // Note: ToUint32 is implemented as (uint32)ToInt32()
  16152. IR::BailOutKind bailOutKind = stElem->HasBailOutInfo() ? stElem->GetBailOutKind() : IR::BailOutInvalid;
  16153. if (BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind))
  16154. {
  16155. instr = this->AddBailoutToHelperCallInstr(instr, stElem->GetBailOutInfo(), bailOutKind, stElem);
  16156. }
  16157. bool bailOutOnHelperCall = !!(bailOutKind & IR::BailOutOnArrayAccessHelperCall);
  16158. m_lowererMD.EmitLoadInt32(instr, true /*conversionFromObjectAllowed*/, bailOutOnHelperCall, labelHelper);
  16159. // MOV indirOpnd, reg
  16160. InsertMove(indirOpnd, reg, stElem);
  16161. }
  16162. }
  16163. }
  16164. }
  16165. else
  16166. {
  16167. if(labelSegmentLengthIncreased)
  16168. {
  16169. IR::Instr *const insertBeforeInstr = labelSegmentLengthIncreased->m_next;
  16170. // We might be changing the array to have missing values here, or we might be
  16171. // changing it to extend it; in either case, we're not going to make it _not_
  16172. // have missing values after this operation, so just write and fallthrough.
  16173. // labelSegmentLengthIncreased:
  16174. // mov [segment + index], src
  16175. // jmp $fallThru
  16176. InsertMove(indirOpnd, src, insertBeforeInstr);
  16177. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  16178. }
  16179. if (!(isStringIndex || (baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues())))
  16180. {
  16181. if(!stElem->IsProfiledInstr() || stElem->AsProfiledInstr()->u.stElemInfo->LikelyFillsMissingValue())
  16182. {
  16183. // Check whether the store is filling a missing value. If so, fall back to the helper so that it can check whether
  16184. // this store is filling the last missing value in the array. This is necessary to keep the missing value tracking
  16185. // in arrays precise. The check is omitted when profile data says that the store is likely to create missing values.
  16186. //
  16187. // cmp [segment + index], Js::SparseArraySegment::MissingValue
  16188. // je $helper
  16189. InsertMissingItemCompareBranch(
  16190. indirOpnd,
  16191. Js::OpCode::BrEq_A,
  16192. labelHelper,
  16193. stElem);
  16194. }
  16195. else
  16196. {
  16197. GenerateIsEnabledArraySetElementFastPathCheck(labelHelper, stElem);
  16198. }
  16199. }
  16200. // MOV [r3 + r2], src
  16201. InsertMoveWithBarrier(indirOpnd, src, stElem);
  16202. }
  16203. // JMP $fallThru
  16204. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  16205. // $helper:
  16206. // bailout or caller generated helper call
  16207. // $fallThru:
  16208. stElem->InsertBefore(labelHelper);
  16209. instrIsInHelperBlock = true;
  16210. if (isNativeArrayStore && !isStringIndex)
  16211. {
  16212. Assert(stElem->HasBailOutInfo());
  16213. Assert(labelHelper != labelBailOut);
  16214. // Transform the original instr:
  16215. //
  16216. // $helper:
  16217. // dst = LdElemI_A src (BailOut)
  16218. // $fallthrough:
  16219. //
  16220. // to:
  16221. //
  16222. // $helper:
  16223. // dst = LdElemI_A src
  16224. // b $fallthrough
  16225. // $bailout:
  16226. // BailOut
  16227. // $fallthrough:
  16228. LowerOneBailOutKind(stElem, IR::BailOutConventionalNativeArrayAccessOnly, instrIsInHelperBlock);
  16229. IR::Instr *const insertBeforeInstr = stElem->m_next;
  16230. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  16231. insertBeforeInstr->InsertBefore(labelBailOut);
  16232. }
  16233. if (emitBailout)
  16234. {
  16235. stElem->FreeSrc1();
  16236. stElem->FreeDst();
  16237. GenerateBailOut(stElem, nullptr, nullptr);
  16238. }
  16239. return !emitBailout;
  16240. }
  16241. bool
  16242. Lowerer::GenerateFastLdLen(IR::Instr *ldLen, bool *instrIsInHelperBlockRef)
  16243. {
  16244. Assert(instrIsInHelperBlockRef);
  16245. bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
  16246. instrIsInHelperBlock = false;
  16247. // TEST src, AtomTag -- check src not tagged int
  16248. // JNE $helper
  16249. // CMP [src], JavascriptArray::`vtable' -- check base isArray
  16250. // JNE $string
  16251. // MOV length, [src + offset(length)] -- Load array length
  16252. // JMP $tovar
  16253. // $string:
  16254. // CMP [src + offset(type)], static_string_type -- check src isString
  16255. // JNE $helper
  16256. // MOV length, [src + offset(length)] -- Load string length
  16257. // $toVar:
  16258. // TEST length, 0xC0000000 -- test for overflow of SHL, or negative
  16259. // JNE $helper
  16260. // SHL length, Js::VarTag_Shift -- restore the var tag on the result
  16261. // INC length
  16262. // MOV dst, length
  16263. // JMP $fallthru
  16264. // $helper:
  16265. // CALL GetProperty(src, length_property_id, scriptContext)
  16266. // $fallthru:
  16267. IR::Opnd * opnd = ldLen->GetSrc1();
  16268. IR::RegOpnd * dst = ldLen->GetDst()->AsRegOpnd();
  16269. const ValueType srcValueType(opnd->GetValueType());
  16270. IR::LabelInstr *const labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  16271. if (ldLen->DoStackArgsOpt())
  16272. {
  16273. GenerateFastArgumentsLdLen(ldLen, ldLen->GetOrCreateContinueLabel());
  16274. ldLen->Remove();
  16275. return false;
  16276. }
  16277. else
  16278. {
  16279. const bool arrayFastPath = ShouldGenerateArrayFastPath(opnd, false, true, false);
  16280. // HasBeenString instead of IsLikelyString because it could be a merge between StringObject and String, and this
  16281. // information about whether it's a StringObject or some other object is not available in the profile data
  16282. const bool stringFastPath = srcValueType.IsUninitialized() || srcValueType.HasBeenString();
  16283. if(!(arrayFastPath || stringFastPath))
  16284. {
  16285. return true;
  16286. }
  16287. IR::RegOpnd * src;
  16288. if (opnd->IsRegOpnd())
  16289. {
  16290. src = opnd->AsRegOpnd();
  16291. }
  16292. else
  16293. {
  16294. // LdLen has a PropertySymOpnd until globopt where the decision whether to convert it to LdFld is made. If globopt is skipped, the opnd will
  16295. // still be a PropertySymOpnd here. In that case, do the conversion here.
  16296. IR::SymOpnd * symOpnd = opnd->AsSymOpnd();
  16297. PropertySym * propertySym = symOpnd->m_sym->AsPropertySym();
  16298. src = IR::RegOpnd::New(propertySym->m_stackSym, IRType::TyVar, this->m_func);
  16299. ldLen->ReplaceSrc1(src);
  16300. opnd = src;
  16301. }
  16302. const int32 arrayOffsetOfLength =
  16303. srcValueType.IsLikelyAnyOptimizedArray()
  16304. ? GetArrayOffsetOfLength(srcValueType)
  16305. : Js::JavascriptArray::GetOffsetOfLength();
  16306. IR::LabelInstr *labelString = nullptr;
  16307. IR::RegOpnd *arrayOpnd = src;
  16308. IR::RegOpnd *arrayLengthOpnd = nullptr;
  16309. IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
  16310. if(arrayFastPath)
  16311. {
  16312. if(!srcValueType.IsAnyOptimizedArray())
  16313. {
  16314. if(stringFastPath)
  16315. {
  16316. // If we don't have info about the src value type or its object type, the array and string fast paths are
  16317. // generated
  16318. labelString = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16319. }
  16320. arrayOpnd = GenerateArrayTest(src, labelHelper, stringFastPath ? labelString : labelHelper, ldLen, false);
  16321. }
  16322. else if(src->IsArrayRegOpnd())
  16323. {
  16324. IR::ArrayRegOpnd *const arrayRegOpnd = src->AsArrayRegOpnd();
  16325. if(arrayRegOpnd->LengthSym())
  16326. {
  16327. arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), TyUint32, m_func);
  16328. DebugOnly(arrayLengthOpnd->FreezeSymValue());
  16329. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  16330. }
  16331. }
  16332. }
  16333. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
  16334. IR::RegOpnd *lengthOpnd = nullptr;
  16335. IR::AutoReuseOpnd autoReuseLengthOpnd;
  16336. const auto EnsureLengthOpnd = [&]()
  16337. {
  16338. if(lengthOpnd)
  16339. {
  16340. return;
  16341. }
  16342. lengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  16343. autoReuseLengthOpnd.Initialize(lengthOpnd, m_func);
  16344. };
  16345. if(arrayFastPath)
  16346. {
  16347. if(arrayLengthOpnd)
  16348. {
  16349. lengthOpnd = arrayLengthOpnd;
  16350. autoReuseLengthOpnd.Initialize(lengthOpnd, m_func);
  16351. Assert(!stringFastPath);
  16352. }
  16353. else
  16354. {
  16355. // MOV length, [array + offset(length)] -- Load array length
  16356. EnsureLengthOpnd();
  16357. IR::IndirOpnd *const indirOpnd = IR::IndirOpnd::New(arrayOpnd, arrayOffsetOfLength, TyUint32, this->m_func);
  16358. InsertMove(lengthOpnd, indirOpnd, ldLen);
  16359. }
  16360. }
  16361. if(stringFastPath)
  16362. {
  16363. IR::LabelInstr *labelToVar = nullptr;
  16364. if(arrayFastPath)
  16365. {
  16366. // JMP $tovar
  16367. labelToVar = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16368. InsertBranch(Js::OpCode::Br, labelToVar, ldLen);
  16369. // $string:
  16370. ldLen->InsertBefore(labelString);
  16371. }
  16372. // CMP [src + offset(type)], static_stringtype -- check src isString
  16373. // JNE $helper
  16374. GenerateStringTest(src, ldLen, labelHelper, nullptr, !arrayFastPath);
  16375. // MOV length, [src + offset(length)] -- Load string length
  16376. EnsureLengthOpnd();
  16377. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(src, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func);
  16378. InsertMove(lengthOpnd, indirOpnd, ldLen);
  16379. if(arrayFastPath)
  16380. {
  16381. // $toVar:
  16382. ldLen->InsertBefore(labelToVar);
  16383. }
  16384. }
  16385. Assert(lengthOpnd);
  16386. if(ldLen->HasBailOutInfo() && (ldLen->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutOnIrregularLength)
  16387. {
  16388. Assert(ldLen->GetBailOutKind() == IR::BailOutOnIrregularLength);
  16389. Assert(dst->IsInt32());
  16390. // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative
  16391. // test length, length
  16392. // js $helper
  16393. // mov dst, length
  16394. // jmp $fallthrough
  16395. InsertCompareBranch(
  16396. lengthOpnd,
  16397. IR::IntConstOpnd::New(0, lengthOpnd->GetType(), m_func, true),
  16398. Js::OpCode::BrLt_A,
  16399. labelHelper,
  16400. ldLen);
  16401. InsertMove(dst, lengthOpnd, ldLen);
  16402. InsertBranch(Js::OpCode::Br, ldLen->GetOrCreateContinueLabel(), ldLen);
  16403. // $helper:
  16404. // (Bail out with IR::BailOutOnIrregularLength)
  16405. ldLen->InsertBefore(labelHelper);
  16406. instrIsInHelperBlock = true;
  16407. ldLen->FreeDst();
  16408. ldLen->FreeSrc1();
  16409. GenerateBailOut(ldLen);
  16410. return false;
  16411. }
  16412. #if INT32VAR
  16413. // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative
  16414. // test length, length
  16415. // js $helper
  16416. InsertCompareBranch(
  16417. lengthOpnd,
  16418. IR::IntConstOpnd::New(0, lengthOpnd->GetType(), m_func, true),
  16419. Js::OpCode::BrLt_A,
  16420. labelHelper,
  16421. ldLen);
  16422. #else
  16423. // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative.
  16424. // Additionally, verify that the signed value's width is not greater than 31 bits, since it needs to be tagged.
  16425. // test length, 0xC0000000
  16426. // jne $helper
  16427. InsertTestBranch(
  16428. lengthOpnd,
  16429. IR::IntConstOpnd::New(0xC0000000, TyUint32, this->m_func, true),
  16430. Js::OpCode::BrNeq_A,
  16431. labelHelper,
  16432. ldLen);
  16433. #endif
  16434. #if INT32VAR
  16435. //
  16436. // dst_32 = MOV length
  16437. // dst_64 = OR dst_64, Js::AtomTag_IntPtr
  16438. //
  16439. Assert(dst->GetType() == TyVar);
  16440. IR::Opnd *dst32 = dst->Copy(this->m_func);
  16441. dst32->SetType(TyInt32);
  16442. // This will clear the top bits.
  16443. InsertMove(dst32, lengthOpnd, ldLen);
  16444. m_lowererMD.GenerateInt32ToVarConversion(dst, ldLen);
  16445. #else
  16446. // dst = SHL length, Js::VarTag_Shift -- restore the var tag on the result
  16447. InsertShift(
  16448. Js::OpCode::Shl_A,
  16449. false /* needFlags */,
  16450. dst,
  16451. lengthOpnd,
  16452. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func),
  16453. ldLen);
  16454. // dst = ADD dst, AtomTag
  16455. InsertAdd(
  16456. false /* needFlags */,
  16457. dst,
  16458. dst,
  16459. IR::IntConstOpnd::New(Js::AtomTag_Int32, TyUint32, m_func, true),
  16460. ldLen);
  16461. #endif
  16462. // JMP $fallthrough
  16463. InsertBranch(Js::OpCode::Br, ldLen->GetOrCreateContinueLabel(), ldLen);
  16464. }
  16465. // $helper:
  16466. // (caller generates helper call)
  16467. ldLen->InsertBefore(labelHelper);
  16468. instrIsInHelperBlock = true;
  16469. return true; // fast path was generated, helper call will be in a helper block
  16470. }
  16471. void
  16472. Lowerer::GenerateFastInlineStringCodePointAt(IR::Instr* lastInstr, Func* func, IR::Opnd *strLength, IR::Opnd *srcIndex, IR::RegOpnd *lowerChar, IR::RegOpnd *strPtr)
  16473. {
  16474. //// Required State:
  16475. // strLength - UInt32
  16476. // srcIndex - TyVar if not Address
  16477. // lowerChar - TyMachReg
  16478. // strPtr - Addr
  16479. //// Instructions
  16480. // CMP [strLength], srcIndex + 1
  16481. // JBE charCodeAt
  16482. // CMP lowerChar 0xDC00
  16483. // JGE charCodeAt
  16484. // CMP lowerChar 0xD7FF
  16485. // JLE charCodeAt
  16486. // upperChar = MOVZX [strPtr + srcIndex + 1]
  16487. // CMP upperChar 0xE000
  16488. // JGE charCodeAt
  16489. // CMP lowerChar 0xDBFF
  16490. // JLE charCodeAt
  16491. // lowerChar = SUB lowerChar - 0xD800
  16492. // lowerChar = SHL lowerChar, 10
  16493. // lowerChar = ADD lowerChar + upperChar
  16494. // lowerChar = ADD lowerChar + 0x2400
  16495. // :charCodeAt
  16496. // :done
  16497. // Asserts
  16498. // Arm should change to Uint32 for the strLength
  16499. Assert(strLength->GetType() == TyUint32 || strLength->GetType() == TyMachReg);
  16500. Assert(srcIndex->GetType() == TyVar || srcIndex->IsAddrOpnd());
  16501. Assert(lowerChar->GetType() == TyMachReg || lowerChar->GetType() == TyUint32);
  16502. Assert(strPtr->IsRegOpnd());
  16503. IR::RegOpnd *tempReg = IR::RegOpnd::New(TyMachReg, func);
  16504. IR::LabelInstr *labelCharCodeAt = IR::LabelInstr::New(Js::OpCode::Label, func);
  16505. IR::IndirOpnd *tempIndirOpnd;
  16506. if (srcIndex->IsAddrOpnd())
  16507. {
  16508. uint32 length = Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address) + 1U;
  16509. InsertCompareBranch(strLength, IR::IntConstOpnd::New(length, TyUint32, func), Js::OpCode::BrLe_A, true, labelCharCodeAt, lastInstr);
  16510. tempIndirOpnd = IR::IndirOpnd::New(strPtr, (length) * sizeof(char16), TyUint16, func);
  16511. }
  16512. else
  16513. {
  16514. InsertMove(tempReg, srcIndex, lastInstr);
  16515. #if INT32VAR
  16516. IR::Opnd * reg32Bit = tempReg->UseWithNewType(TyInt32, func);
  16517. InsertMove(tempReg, reg32Bit, lastInstr);
  16518. tempReg = reg32Bit->AsRegOpnd();
  16519. #else
  16520. InsertShift(Js::OpCode::Shr_A, false, tempReg, tempReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, func), lastInstr);
  16521. #endif
  16522. InsertAdd(false, tempReg, tempReg, IR::IntConstOpnd::New(1, TyInt32, func), lastInstr);
  16523. InsertCompareBranch(strLength, tempReg, Js::OpCode::BrLe_A, true, labelCharCodeAt, lastInstr);
  16524. if(tempReg->GetSize() != MachPtr)
  16525. {
  16526. tempReg = tempReg->UseWithNewType(TyMachPtr, func)->AsRegOpnd();
  16527. }
  16528. tempIndirOpnd = IR::IndirOpnd::New(strPtr, tempReg, 1, TyUint16, func);
  16529. }
  16530. // By this point, we have added instructions before labelCharCodeAt to check for extra length required for the surrogate pair
  16531. // The branching for that is already handled, all we have to do now is to check for correct values.
  16532. // Validate char is in range [D800, DBFF]; otherwise just get a charCodeAt
  16533. InsertCompareBranch(lowerChar, IR::IntConstOpnd::New(0xDC00, TyUint32, func), Js::OpCode::BrGe_A, labelCharCodeAt, lastInstr);
  16534. InsertCompareBranch(lowerChar, IR::IntConstOpnd::New(0xD7FF, TyUint32, func), Js::OpCode::BrLe_A, labelCharCodeAt, lastInstr);
  16535. // upperChar = MOVZX r3, [r1 + r3 * 2] -- this is the value of the upper surrogate pair char
  16536. IR::RegOpnd *upperChar = IR::RegOpnd::New(TyInt32, func);
  16537. InsertMove(upperChar, tempIndirOpnd, lastInstr);
  16538. // Validate upper is in range [DC00, DFFF]; otherwise just get a charCodeAt
  16539. InsertCompareBranch(upperChar, IR::IntConstOpnd::New(0xE000, TyUint32, func), Js::OpCode::BrGe_A, labelCharCodeAt, lastInstr);
  16540. InsertCompareBranch(upperChar, IR::IntConstOpnd::New(0xDBFF, TyUint32, func), Js::OpCode::BrLe_A, labelCharCodeAt, lastInstr);
  16541. // (lower - 0xD800) << 10 + second - 0xDC00 + 0x10000 -- 0x10000 - 0xDC00 = 0x2400
  16542. // lowerChar = SUB lowerChar - 0xD800
  16543. // lowerChar = SHL lowerChar, 10
  16544. // lowerChar = ADD lowerChar + upperChar
  16545. // lowerChar = ADD lowerChar + 0x2400
  16546. InsertSub(false, lowerChar, lowerChar, IR::IntConstOpnd::New(0xD800, TyUint32, func), lastInstr);
  16547. InsertShift(Js::OpCode::Shl_A, false, lowerChar, lowerChar, IR::IntConstOpnd::New(10, TyUint32, func), lastInstr);
  16548. InsertAdd(false, lowerChar, lowerChar, upperChar, lastInstr);
  16549. InsertAdd(false, lowerChar, lowerChar, IR::IntConstOpnd::New(0x2400, TyUint32, func), lastInstr);
  16550. lastInstr->InsertBefore(labelCharCodeAt);
  16551. }
  16552. bool
  16553. Lowerer::GenerateFastInlineStringFromCodePoint(IR::Instr* instr)
  16554. {
  16555. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  16556. // ArgOut sequence
  16557. // s8.var = StartCall 2 (0x2).i32 #000c
  16558. // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, String object
  16559. // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to FromCharCode
  16560. // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
  16561. // s0[LikelyTaggedInt].var = CallDirect String_FromCodePoint.u32, arg1(s11)<0>.u32 #001c
  16562. IR::Opnd * linkOpnd = instr->GetSrc2();
  16563. IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  16564. linkOpnd = tmpInstr->GetSrc2();
  16565. #if DBG
  16566. IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  16567. Assert(argCount == 2);
  16568. #endif
  16569. IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
  16570. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  16571. IR::Opnd *src1 = argInstr->GetSrc1();
  16572. if (src1->GetValueType().IsLikelyInt())
  16573. {
  16574. //Trying to generate this code
  16575. // MOV resultOpnd, dst
  16576. // MOV fromCharCodeIntArgOpnd, src1
  16577. // SAR fromCharCodeIntArgOpnd, Js::VarTag_Shift
  16578. // JAE $Helper
  16579. // CMP fromCharCodeIntArgOpnd, Js::ScriptContext::CharStringCacheSize
  16580. //
  16581. // JAE $labelWCharStringCheck <
  16582. // MOV resultOpnd, GetCharStringCache[fromCharCodeIntArgOpnd]
  16583. // TST resultOpnd, resultOpnd //Check for null
  16584. // JEQ $helper
  16585. // JMP $Done
  16586. //
  16587. //$labelWCharStringCheck:
  16588. // resultOpnd = Call HelperGetStringForCharW
  16589. // JMP $Done
  16590. //$helper:
  16591. IR::RegOpnd * resultOpnd = nullptr;
  16592. if (!instr->GetDst()->IsRegOpnd() || instr->GetDst()->IsEqual(src1))
  16593. {
  16594. resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  16595. }
  16596. else
  16597. {
  16598. resultOpnd = instr->GetDst()->AsRegOpnd();
  16599. }
  16600. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16601. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  16602. IR::RegOpnd * fromCodePointIntArgOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  16603. IR::AutoReuseOpnd autoReuseFromCodePointIntArgOpnd(fromCodePointIntArgOpnd, instr->m_func);
  16604. InsertMove(fromCodePointIntArgOpnd, src1, instr);
  16605. //Check for tagged int and get the untagged version.
  16606. fromCodePointIntArgOpnd = GenerateUntagVar(fromCodePointIntArgOpnd, labelHelper, instr);
  16607. GenerateGetSingleCharString(fromCodePointIntArgOpnd, resultOpnd, labelHelper, doneLabel, instr, true);
  16608. instr->InsertBefore(labelHelper);
  16609. instr->InsertAfter(doneLabel);
  16610. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  16611. }
  16612. return true;
  16613. }
  16614. bool
  16615. Lowerer::GenerateFastInlineStringFromCharCode(IR::Instr* instr)
  16616. {
  16617. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  16618. // ArgOut sequence
  16619. // s8.var = StartCall 2 (0x2).i32 #000c
  16620. // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, String object
  16621. // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to FromCharCode
  16622. // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
  16623. // s0[LikelyTaggedInt].var = CallDirect String_FromCharCode.u32, arg1(s11)<0>.u32 #001c
  16624. IR::Opnd * linkOpnd = instr->GetSrc2();
  16625. IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  16626. linkOpnd = tmpInstr->GetSrc2();
  16627. #if DBG
  16628. IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  16629. Assert(argCount == 2);
  16630. #endif
  16631. IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
  16632. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  16633. IR::Opnd *src1 = argInstr->GetSrc1();
  16634. if (src1->GetValueType().IsLikelyInt())
  16635. {
  16636. //Trying to generate this code
  16637. // MOV resultOpnd, dst
  16638. // MOV fromCharCodeIntArgOpnd, src1
  16639. // SAR fromCharCodeIntArgOpnd, Js::VarTag_Shift
  16640. // JAE $Helper
  16641. // CMP fromCharCodeIntArgOpnd, Js::ScriptContext::CharStringCacheSize
  16642. //
  16643. // JAE $labelWCharStringCheck <
  16644. // MOV resultOpnd, GetCharStringCache[fromCharCodeIntArgOpnd]
  16645. // TST resultOpnd, resultOpnd //Check for null
  16646. // JEQ $helper
  16647. // JMP $Done
  16648. //
  16649. //$labelWCharStringCheck:
  16650. // resultOpnd = Call HelperGetStringForCharW
  16651. // JMP $Done
  16652. //$helper:
  16653. IR::RegOpnd * resultOpnd = nullptr;
  16654. if (!instr->GetDst()->IsRegOpnd() || instr->GetDst()->IsEqual(src1))
  16655. {
  16656. resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  16657. }
  16658. else
  16659. {
  16660. resultOpnd = instr->GetDst()->AsRegOpnd();
  16661. }
  16662. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  16663. IR::RegOpnd * fromCharCodeIntArgOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  16664. IR::AutoReuseOpnd autoReuseFromCharCodeIntArgOpnd(fromCharCodeIntArgOpnd, instr->m_func);
  16665. InsertMove(fromCharCodeIntArgOpnd, src1, instr);
  16666. //Check for tagged int and get the untagged version.
  16667. fromCharCodeIntArgOpnd = GenerateUntagVar(fromCharCodeIntArgOpnd, labelHelper, instr);
  16668. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16669. GenerateGetSingleCharString(fromCharCodeIntArgOpnd, resultOpnd, labelHelper, doneLabel, instr, false);
  16670. instr->InsertBefore(labelHelper);
  16671. instr->InsertAfter(doneLabel);
  16672. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  16673. }
  16674. return true;
  16675. }
  16676. void
  16677. Lowerer::GenerateGetSingleCharString(IR::RegOpnd * charCodeOpnd, IR::Opnd * resultOpnd, IR::LabelInstr * labelHelper, IR::LabelInstr * doneLabel, IR::Instr * instr, bool isCodePoint)
  16678. {
  16679. // MOV cacheReg, CharStringCache
  16680. // CMP charCodeOpnd, Js::ScriptContext::CharStringCacheSize
  16681. // JAE $labelWCharStringCheck <
  16682. // MOV resultOpnd, cacheReg[charCodeOpnd]
  16683. // TST resultOpnd, resultOpnd //Check for null
  16684. // JEQ $helper
  16685. // JMP $Done
  16686. //
  16687. //$labelWCharStringCheck:
  16688. // Arg1 = charCodeOpnd
  16689. // Arg0 = cacheReg
  16690. // resultOpnd = Call HelperGetStringForCharW/CodePoint
  16691. // JMP $Done
  16692. //$helper:
  16693. IR::LabelInstr *labelWCharStringCheck = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16694. //Try to load from in CharStringCacheA
  16695. IR::RegOpnd *cacheRegOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  16696. IR::AutoReuseOpnd autoReuseCacheRegOpnd(cacheRegOpnd, instr->m_func);
  16697. Assert(Js::JavascriptLibrary::GetCharStringCacheAOffset() == Js::JavascriptLibrary::GetCharStringCacheOffset());
  16698. InsertMove(cacheRegOpnd, this->LoadLibraryValueOpnd(instr, LibraryValue::ValueCharStringCache), instr);
  16699. InsertCompareBranch(charCodeOpnd, IR::IntConstOpnd::New(Js::CharStringCache::CharStringCacheSize, TyUint32, this->m_func), Js::OpCode::BrGe_A, true, labelWCharStringCheck, instr);
  16700. InsertMove(resultOpnd, IR::IndirOpnd::New(cacheRegOpnd, charCodeOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyVar, instr->m_func), instr);
  16701. InsertTestBranch(resultOpnd, resultOpnd, Js::OpCode::BrEq_A, labelHelper, instr);
  16702. InsertMove(instr->GetDst(), resultOpnd, instr);
  16703. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  16704. instr->InsertBefore(labelWCharStringCheck);
  16705. IR::JnHelperMethod helperMethod;
  16706. if (isCodePoint)
  16707. {
  16708. helperMethod = IR::HelperGetStringForCharCodePoint;
  16709. }
  16710. else
  16711. {
  16712. InsertMove(charCodeOpnd, charCodeOpnd->UseWithNewType(TyUint16, instr->m_func), instr);
  16713. helperMethod = IR::HelperGetStringForChar;
  16714. }
  16715. //Try to load from in CharStringCacheW or CharStringCacheCodePoint, this is a helper call.
  16716. this->m_lowererMD.LoadHelperArgument(instr, charCodeOpnd);
  16717. this->m_lowererMD.LoadHelperArgument(instr, cacheRegOpnd);
  16718. IR::Instr* helperCallInstr = IR::Instr::New(Js::OpCode::Call, resultOpnd, IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
  16719. instr->InsertBefore(helperCallInstr);
  16720. this->m_lowererMD.LowerCall(helperCallInstr, 0);
  16721. InsertMove(instr->GetDst(), resultOpnd, instr);
  16722. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  16723. }
  16724. bool
  16725. Lowerer::GenerateFastInlineGlobalObjectParseInt(IR::Instr *instr)
  16726. {
  16727. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  16728. // ArgOut sequence
  16729. // s8.var = StartCall 2 (0x2).i32 #000c
  16730. // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, global object
  16731. // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to parseInt
  16732. // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
  16733. // s0[LikelyTaggedInt].var = CallDirect GlobalObject_ParseInt.u32, arg1(s11)<0>.u32 #001c
  16734. IR::Opnd * linkOpnd = instr->GetSrc2();
  16735. IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  16736. linkOpnd = tmpInstr->GetSrc2();
  16737. #if DBG
  16738. IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  16739. Assert(argCount == 2);
  16740. #endif
  16741. IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
  16742. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  16743. IR::Opnd *parseIntArgOpnd = argInstr->GetSrc1();
  16744. if (parseIntArgOpnd->GetValueType().IsLikelyNumber())
  16745. {
  16746. //If likely int check for tagged int and set the dst
  16747. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16748. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  16749. if (!parseIntArgOpnd->IsTaggedInt())
  16750. {
  16751. this->m_lowererMD.GenerateSmIntTest(parseIntArgOpnd, instr, labelHelper);
  16752. }
  16753. if (instr->GetDst())
  16754. {
  16755. this->InsertMove(instr->GetDst(), parseIntArgOpnd, instr);
  16756. }
  16757. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  16758. instr->InsertBefore(labelHelper);
  16759. instr->InsertAfter(doneLabel);
  16760. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  16761. }
  16762. return true;
  16763. }
  16764. void
  16765. Lowerer::GenerateFastInlineArrayPop(IR::Instr * instr)
  16766. {
  16767. Assert(instr->m_opcode == Js::OpCode::InlineArrayPop);
  16768. IR::Opnd *arrayOpnd = instr->GetSrc1();
  16769. IR::LabelInstr *bailOutLabelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  16770. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16771. bool isLikelyNativeArray = arrayOpnd->GetValueType().IsLikelyNativeArray();
  16772. if (ShouldGenerateArrayFastPath(arrayOpnd, false, false, false))
  16773. {
  16774. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  16775. if(isLikelyNativeArray)
  16776. {
  16777. //We bailOut on cases like length == 0, Array Test failing cases (Runtime helper cannot handle these cases)
  16778. GenerateFastPop(arrayOpnd, instr, labelHelper, doneLabel, bailOutLabelHelper);
  16779. }
  16780. else
  16781. {
  16782. //We jump to helper on cases like length == 0, Array Test failing cases
  16783. GenerateFastPop(arrayOpnd, instr, labelHelper, doneLabel, labelHelper);
  16784. }
  16785. instr->InsertBefore(labelHelper);
  16786. ///JMP to $doneLabel
  16787. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  16788. }
  16789. else
  16790. {
  16791. //We assume here that the array will be a Var array. - Runtime Helper calls assume this.
  16792. Assert(!isLikelyNativeArray);
  16793. }
  16794. instr->InsertAfter(doneLabel);
  16795. if(isLikelyNativeArray)
  16796. {
  16797. //Lower IR::BailOutConventionalNativeArrayAccessOnly here.
  16798. LowerOneBailOutKind(instr, IR::BailOutConventionalNativeArrayAccessOnly, false, false);
  16799. instr->InsertAfter(bailOutLabelHelper);
  16800. }
  16801. GenerateHelperToArrayPopFastPath(instr, doneLabel, bailOutLabelHelper);
  16802. }
  16803. void
  16804. Lowerer::GenerateFastInlineIsArray(IR::Instr * instr)
  16805. {
  16806. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  16807. IR::Opnd * dst = instr->GetDst();
  16808. Assert(dst);
  16809. //CallDirect src2
  16810. IR::Opnd * linkOpnd = instr->GetSrc2();
  16811. //ArgOut_A_InlineSpecialized
  16812. IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  16813. IR::Opnd * argsOpnd[2] = { 0 };
  16814. bool result = instr->FetchOperands(argsOpnd, 2);
  16815. Assert(result);
  16816. AnalysisAssert(argsOpnd[1]);
  16817. IR::LabelInstr *helperLabel = InsertLabel(true, instr);
  16818. IR::Instr * insertInstr = helperLabel;
  16819. IR::LabelInstr *doneLabel = InsertLabel(false, instr->m_next);
  16820. ValueType valueType = argsOpnd[1]->GetValueType();
  16821. IR::RegOpnd * src = GetRegOpnd(argsOpnd[1], insertInstr, m_func, argsOpnd[1]->GetType());
  16822. IR::LabelInstr *checkNotArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, valueType.IsLikelyArray());
  16823. IR::LabelInstr *notArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, valueType.IsLikelyArray());
  16824. if (!src->IsNotTaggedValue())
  16825. {
  16826. m_lowererMD.GenerateObjectTest(src, insertInstr, notArrayLabel);
  16827. }
  16828. // MOV typeOpnd, [opnd + offset(type)]
  16829. IR::RegOpnd *typeOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  16830. const IR::AutoReuseOpnd autoReuseTypeOpnd(typeOpnd, m_func);
  16831. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, m_func);
  16832. InsertMove(typeOpnd, indirOpnd, insertInstr);
  16833. // MOV typeIdOpnd, [typeOpnd + offset(typeId)]
  16834. IR::RegOpnd *typeIdOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  16835. const IR::AutoReuseOpnd autoReuseTypeIdOpnd(typeIdOpnd, m_func);
  16836. indirOpnd = IR::IndirOpnd::New(typeOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func);
  16837. InsertMove(typeIdOpnd, indirOpnd, insertInstr);
  16838. // CMP typeIdOpnd, TypeIds_ArrayFirst
  16839. // JLT $notArray
  16840. InsertCompareBranch(
  16841. typeIdOpnd,
  16842. IR::IntConstOpnd::New(Js::TypeIds_ArrayFirst, TyInt32, m_func),
  16843. Js::OpCode::BrLt_A,
  16844. checkNotArrayLabel,
  16845. insertInstr);
  16846. // CMP typeIdOpnd, TypeIds_ArrayLastWithES5
  16847. // JGT $notArray
  16848. InsertCompareBranch(
  16849. typeIdOpnd,
  16850. IR::IntConstOpnd::New(Js::TypeIds_ArrayLastWithES5, TyInt32, m_func),
  16851. Js::OpCode::BrGt_A,
  16852. notArrayLabel,
  16853. insertInstr);
  16854. // MOV dst, True
  16855. InsertMove(dst, LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), insertInstr);
  16856. // JMP $done
  16857. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  16858. // $checkNotArray:
  16859. insertInstr->InsertBefore(checkNotArrayLabel);
  16860. // CMP typeIdOpnd, TypeIds_Proxy
  16861. // JEQ $helperLabel
  16862. InsertCompareBranch(
  16863. typeIdOpnd,
  16864. IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyInt32, m_func),
  16865. Js::OpCode::BrEq_A,
  16866. helperLabel,
  16867. insertInstr);
  16868. CompileAssert(Js::TypeIds_Proxy < Js::TypeIds_ArrayFirst);
  16869. // CMP typeIdOpnd, TypeIds_HostDispatch
  16870. // JEQ $helperLabel
  16871. InsertCompareBranch(
  16872. typeIdOpnd,
  16873. IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, m_func),
  16874. Js::OpCode::BrEq_A,
  16875. helperLabel,
  16876. insertInstr);
  16877. CompileAssert(Js::TypeIds_HostDispatch < Js::TypeIds_ArrayFirst);
  16878. // $notObjectLabel:
  16879. insertInstr->InsertBefore(notArrayLabel);
  16880. // MOV dst, False
  16881. InsertMove(dst, LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), insertInstr);
  16882. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  16883. RelocateCallDirectToHelperPath(tmpInstr, helperLabel);
  16884. }
  16885. void
  16886. Lowerer::GenerateFastInlineHasOwnProperty(IR::Instr * instr)
  16887. {
  16888. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  16889. //CallDirect src2
  16890. IR::Opnd * linkOpnd = instr->GetSrc2();
  16891. //ArgOut_A_InlineSpecialized
  16892. IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  16893. IR::Opnd * argsOpnd[2] = { 0 };
  16894. bool result = instr->FetchOperands(argsOpnd, 2);
  16895. Assert(result);
  16896. AnalysisAssert(argsOpnd[0] && argsOpnd[1]);
  16897. if (argsOpnd[1]->GetValueType().IsNotString()
  16898. || argsOpnd[0]->GetValueType().IsNotObject()
  16899. || !argsOpnd[0]->IsRegOpnd()
  16900. || !argsOpnd[1]->IsRegOpnd())
  16901. {
  16902. return;
  16903. }
  16904. IR::RegOpnd * thisObj = argsOpnd[0]->AsRegOpnd();
  16905. IR::RegOpnd * propOpnd = argsOpnd[1]->AsRegOpnd();
  16906. // fast path case where hasOwnProperty is being called using a property name loaded via a for-in loop
  16907. bool generateForInFastpath = propOpnd->GetValueType().IsString()
  16908. && propOpnd->m_sym->m_isSingleDef
  16909. && (propOpnd->m_sym->m_instrDef->m_opcode == Js::OpCode::BrOnEmpty
  16910. || propOpnd->m_sym->m_instrDef->m_opcode == Js::OpCode::BrOnNotEmpty);
  16911. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  16912. IR::LabelInstr * labelHelper = InsertLabel(true, instr);
  16913. IR::LabelInstr * cacheMissLabel = generateForInFastpath ? IR::LabelInstr::New(Js::OpCode::Label, m_func, true) : labelHelper;
  16914. IR::Instr * insertInstr = labelHelper;
  16915. // GenerateObjectTest(propOpnd, $labelHelper)
  16916. // CMP indexOpnd, PropertyString::`vtable'
  16917. // JNE $helper
  16918. // GenerateObjectTest(thisObj, $labelHelper)
  16919. // MOV inlineCacheOpnd, propOpnd->lsElemInlineCache
  16920. // MOV objectTypeOpnd, thisObj->type
  16921. // GenerateDynamicLoadPolymorphicInlineCacheSlot(inlineCacheOpnd, objectTypeOpnd) ; loads inline cache for given type
  16922. // GenerateLocalInlineCacheCheck(objectTypeOpnd, inlineCacheOpnd, $notInlineSlotsLabel) ; check for type in inline slots, jump to $notInlineSlotsLabel on failure
  16923. // MOV dst, ValueTrue
  16924. // JMP $done
  16925. // $notInlineSlotsLabel:
  16926. // GenerateLoadTaggedType(objectTypeOpnd, opndTaggedType)
  16927. // GenerateLocalInlineCacheCheck(opndTaggedType, inlineCacheOpnd, $cacheMissLabel) ; check for type in aux slot, jump to $cacheMissLabel on failure
  16928. // MOV dst, ValueTrue
  16929. // JMP $done
  16930. m_lowererMD.GenerateObjectTest(propOpnd, insertInstr, labelHelper);
  16931. InsertCompareBranch(IR::IndirOpnd::New(propOpnd, 0, TyMachPtr, m_func), LoadVTableValueOpnd(insertInstr, VTableValue::VtablePropertyString), Js::OpCode::BrNeq_A, labelHelper, insertInstr);
  16932. m_lowererMD.GenerateObjectTest(thisObj, insertInstr, labelHelper);
  16933. IR::RegOpnd * inlineCacheOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  16934. InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(propOpnd, Js::PropertyString::GetOffsetOfLdElemInlineCache(), TyMachPtr, m_func), insertInstr);
  16935. IR::RegOpnd * objectTypeOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  16936. InsertMove(objectTypeOpnd, IR::IndirOpnd::New(thisObj, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, m_func), insertInstr);
  16937. GenerateDynamicLoadPolymorphicInlineCacheSlot(insertInstr, inlineCacheOpnd, objectTypeOpnd);
  16938. IR::LabelInstr * notInlineSlotsLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  16939. GenerateLocalInlineCacheCheck(insertInstr, objectTypeOpnd, inlineCacheOpnd, notInlineSlotsLabel);
  16940. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), insertInstr);
  16941. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  16942. insertInstr->InsertBefore(notInlineSlotsLabel);
  16943. IR::RegOpnd * opndTaggedType = IR::RegOpnd::New(TyMachReg, m_func);
  16944. m_lowererMD.GenerateLoadTaggedType(insertInstr, objectTypeOpnd, opndTaggedType);
  16945. GenerateLocalInlineCacheCheck(insertInstr, opndTaggedType, inlineCacheOpnd, cacheMissLabel);
  16946. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), insertInstr);
  16947. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  16948. if (!generateForInFastpath)
  16949. {
  16950. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  16951. return;
  16952. }
  16953. insertInstr->InsertBefore(cacheMissLabel);
  16954. // CMP forInEnumeratorOpnd->canUseJitFastPath, 0
  16955. // JEQ $labelHelper
  16956. // MOV cachedDataTypeOpnd, forInEnumeratorOpnd->enumeratorInitialType
  16957. // CMP thisObj->type, cachedDataTypeOpnd
  16958. // JNE $labelHelper
  16959. // CMP forInEnumeratorOpnd->enumeratingPrototype, 0
  16960. // JNE $falseLabel
  16961. // MOV dst, True
  16962. // JMP $doneLabel
  16963. // $falseLabel: [helper]
  16964. // MOV dst, False
  16965. // JMP $doneLabel
  16966. // $labelHelper: [helper]
  16967. // CallDirect code
  16968. // ...
  16969. // $doneLabel:
  16970. IR::Opnd * forInEnumeratorOpnd = argsOpnd[1]->AsRegOpnd()->m_sym->m_instrDef->GetSrc1();
  16971. // go to helper if we can't use JIT fastpath
  16972. IR::Opnd * canUseJitFastPathOpnd = GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfCanUseJitFastPath(), TyInt8);
  16973. InsertCompareBranch(canUseJitFastPathOpnd, IR::IntConstOpnd::New(0, TyInt8, m_func), Js::OpCode::BrEq_A, labelHelper, insertInstr);
  16974. // go to helper if initial type is not same as the object we are querying
  16975. IR::RegOpnd * cachedDataTypeOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  16976. InsertMove(cachedDataTypeOpnd, GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorInitialType(), TyMachPtr), insertInstr);
  16977. InsertCompareBranch(cachedDataTypeOpnd, IR::IndirOpnd::New(thisObj, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, m_func), Js::OpCode::BrNeq_A, labelHelper, insertInstr);
  16978. // if we haven't yet gone to helper, then we can check if we are enumerating the prototype to know if property is an own property
  16979. IR::LabelInstr *falseLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  16980. IR::Opnd * enumeratingPrototype = GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratingPrototype(), TyInt8);
  16981. InsertCompareBranch(enumeratingPrototype, IR::IntConstOpnd::New(0, TyInt8, m_func), Js::OpCode::BrNeq_A, falseLabel, insertInstr);
  16982. // assume true is the main path
  16983. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), insertInstr);
  16984. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  16985. // load false on helper path
  16986. insertInstr->InsertBefore(falseLabel);
  16987. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), insertInstr);
  16988. InsertBranch(Js::OpCode::Br, doneLabel, insertInstr);
  16989. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  16990. }
  16991. bool
  16992. Lowerer::ShouldGenerateStringReplaceFastPath(IR::Instr * callInstr, IntConstType argCount)
  16993. {
  16994. // a.replace(b,c)
  16995. // We want to emit the fast path if 'a' and 'c' are strings and 'b' is a regex
  16996. //
  16997. // argout sequence:
  16998. // arg1(s12)<0>.var = ArgOut_A s2.var, s11.var #0014 <---- a
  16999. // arg2(s13)<4>.var = ArgOut_A s3.var, arg1(s12)<0>.var #0018 <---- b
  17000. // arg3(s14)<8>.var = ArgOut_A s4.var, arg2(s13)<4>.var #001c <---- c
  17001. // s0[LikelyString].var = CallI s5[ffunc].var, arg3(s14)<8>.var #0020
  17002. IR::Opnd *linkOpnd = callInstr->GetSrc2();
  17003. Assert(argCount == 2);
  17004. while(linkOpnd->IsSymOpnd())
  17005. {
  17006. IR::SymOpnd *src2 = linkOpnd->AsSymOpnd();
  17007. StackSym *sym = src2->m_sym->AsStackSym();
  17008. Assert(sym->m_isSingleDef);
  17009. IR::Instr *argInstr = sym->m_instrDef;
  17010. Assert(argCount >= 0);
  17011. // check to see if 'a' and 'c' are likely strings
  17012. if((argCount == 2 || argCount == 0) && (!argInstr->GetSrc1()->GetValueType().IsLikelyString()))
  17013. {
  17014. return false;
  17015. }
  17016. // we want 'b' to be regex. Don't generate fastpath if it is a tagged int
  17017. if((argCount == 1) && (argInstr->GetSrc1()->IsTaggedInt()))
  17018. {
  17019. return false;
  17020. }
  17021. argCount--;
  17022. linkOpnd = argInstr->GetSrc2();
  17023. }
  17024. return true;
  17025. }
  17026. bool
  17027. Lowerer::GenerateFastReplace(IR::Opnd* strOpnd, IR::Opnd* src1, IR::Opnd* src2, IR::Instr *callInstr, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
  17028. {
  17029. // a.replace(b,c)
  17030. // We want to emit the fast path if 'a' and 'c' are strings and 'b' is a regex
  17031. //
  17032. // strOpnd --> a
  17033. // src1 --> b
  17034. // src2 --> c
  17035. IR::Opnd * callDst = callInstr->GetDst();
  17036. Assert(strOpnd->GetValueType().IsLikelyString() && src2->GetValueType().IsLikelyString());
  17037. if(!strOpnd->GetValueType().IsString())
  17038. {
  17039. strOpnd = GetRegOpnd(strOpnd, insertInstr, m_func, TyVar);
  17040. this->GenerateStringTest(strOpnd->AsRegOpnd(), insertInstr, labelHelper);
  17041. }
  17042. if(!src1->IsNotTaggedValue())
  17043. {
  17044. m_lowererMD.GenerateObjectTest(src1, insertInstr, labelHelper);
  17045. }
  17046. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptRegExp);
  17047. // cmp [regex], vtableAddress
  17048. // jne $labelHelper
  17049. src1 = GetRegOpnd(src1, insertInstr, m_func, TyVar);
  17050. InsertCompareBranch(
  17051. IR::IndirOpnd::New(src1->AsRegOpnd(), 0, TyMachPtr, insertInstr->m_func),
  17052. vtableOpnd,
  17053. Js::OpCode::BrNeq_A,
  17054. labelHelper,
  17055. insertInstr);
  17056. if(!src2->GetValueType().IsString())
  17057. {
  17058. src2 = GetRegOpnd(src2, insertInstr, m_func, TyVar);
  17059. this->GenerateStringTest(src2->AsRegOpnd(), insertInstr, labelHelper);
  17060. }
  17061. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, insertInstr->m_func);
  17062. if (callDst)
  17063. {
  17064. helperCallInstr->SetDst(callDst);
  17065. }
  17066. insertInstr->InsertBefore(helperCallInstr);
  17067. if (insertInstr->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(insertInstr->GetBailOutKind()))
  17068. {
  17069. helperCallInstr = AddBailoutToHelperCallInstr(helperCallInstr, insertInstr->GetBailOutInfo(), insertInstr->GetBailOutKind(), insertInstr);
  17070. }
  17071. //scriptContext, pRegEx, pThis, pReplace (to be pushed in reverse order)
  17072. // pReplace, pThis, pRegEx
  17073. this->m_lowererMD.LoadHelperArgument(helperCallInstr, src2);
  17074. this->m_lowererMD.LoadHelperArgument(helperCallInstr, strOpnd);
  17075. this->m_lowererMD.LoadHelperArgument(helperCallInstr, src1);
  17076. // script context
  17077. LoadScriptContext(helperCallInstr);
  17078. if(callDst)
  17079. {
  17080. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperRegExp_ReplaceStringResultUsed);
  17081. }
  17082. else
  17083. {
  17084. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperRegExp_ReplaceStringResultNotUsed);
  17085. }
  17086. return true;
  17087. }
  17088. ///----
  17089. void
  17090. Lowerer::GenerateFastInlineStringSplitMatch(IR::Instr * instr)
  17091. {
  17092. // a.split(b,c (optional) )
  17093. // We want to emit the fast path when
  17094. // 1. c is not present, and
  17095. // 2. 'a' is a string and 'b' is a regex.
  17096. //
  17097. // a.match(b)
  17098. // We want to emit the fast path when 'a' is a string and 'b' is a regex.
  17099. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  17100. IR::Opnd * callDst = instr->GetDst();
  17101. //helperCallOpnd
  17102. IR::Opnd * src1 = instr->GetSrc1();
  17103. //ArgOut_A_InlineSpecialized
  17104. IR::Instr * tmpInstr = instr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  17105. IR::Opnd * argsOpnd[2];
  17106. if(!instr->FetchOperands(argsOpnd, 2))
  17107. {
  17108. return;
  17109. }
  17110. if(!argsOpnd[0]->GetValueType().IsLikelyString() || argsOpnd[1]->IsTaggedInt())
  17111. {
  17112. return;
  17113. }
  17114. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  17115. if(!argsOpnd[0]->GetValueType().IsString())
  17116. {
  17117. argsOpnd[0] = GetRegOpnd(argsOpnd[0], instr, m_func, TyVar);
  17118. this->GenerateStringTest(argsOpnd[0]->AsRegOpnd(), instr, labelHelper);
  17119. }
  17120. if(!argsOpnd[1]->IsNotTaggedValue())
  17121. {
  17122. m_lowererMD.GenerateObjectTest(argsOpnd[1], instr, labelHelper);
  17123. }
  17124. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  17125. // cmp [regex], vtableAddress
  17126. // jne $labelHelper
  17127. argsOpnd[1] = GetRegOpnd(argsOpnd[1], instr, m_func, TyVar);
  17128. InsertCompareBranch(
  17129. IR::IndirOpnd::New(argsOpnd[1]->AsRegOpnd(), 0, TyMachPtr, instr->m_func),
  17130. vtableOpnd,
  17131. Js::OpCode::BrNeq_A,
  17132. labelHelper,
  17133. instr);
  17134. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
  17135. if (callDst)
  17136. {
  17137. helperCallInstr->SetDst(callDst);
  17138. }
  17139. instr->InsertBefore(helperCallInstr);
  17140. if (instr->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(instr->GetBailOutKind()))
  17141. {
  17142. helperCallInstr = AddBailoutToHelperCallInstr(helperCallInstr, instr->GetBailOutInfo(), instr->GetBailOutKind(), instr);
  17143. }
  17144. // [stackAllocationPointer, ]scriptcontext, regexp, input[, limit] (to be pushed in reverse order)
  17145. if(src1->AsHelperCallOpnd()->m_fnHelper == IR::JnHelperMethod::HelperString_Split)
  17146. {
  17147. //limit
  17148. //As we are optimizing only for two operands, make limit UINT_MAX
  17149. IR::Opnd* limit = IR::IntConstOpnd::New(UINT_MAX, TyUint32, instr->m_func);
  17150. this->m_lowererMD.LoadHelperArgument(helperCallInstr, limit);
  17151. }
  17152. //input, regexp
  17153. this->m_lowererMD.LoadHelperArgument(helperCallInstr, argsOpnd[0]);
  17154. this->m_lowererMD.LoadHelperArgument(helperCallInstr, argsOpnd[1]);
  17155. // script context
  17156. LoadScriptContext(helperCallInstr);
  17157. IR::JnHelperMethod helperMethod = IR::JnHelperMethod::HelperInvalid;
  17158. IR::AutoReuseOpnd autoReuseStackAllocationOpnd;
  17159. if(callDst && instr->dstIsTempObject)
  17160. {
  17161. switch(src1->AsHelperCallOpnd()->m_fnHelper)
  17162. {
  17163. case IR::JnHelperMethod::HelperString_Split:
  17164. helperMethod = IR::JnHelperMethod::HelperRegExp_SplitResultUsedAndMayBeTemp;
  17165. break;
  17166. case IR::JnHelperMethod::HelperString_Match:
  17167. helperMethod = IR::JnHelperMethod::HelperRegExp_MatchResultUsedAndMayBeTemp;
  17168. break;
  17169. default:
  17170. Assert(false);
  17171. __assume(false);
  17172. }
  17173. // Allocate some space on the stack for the result array
  17174. IR::RegOpnd *const stackAllocationOpnd = IR::RegOpnd::New(TyVar, m_func);
  17175. autoReuseStackAllocationOpnd.Initialize(stackAllocationOpnd, m_func);
  17176. stackAllocationOpnd->SetValueType(callDst->GetValueType());
  17177. GenerateMarkTempAlloc(stackAllocationOpnd, Js::JavascriptArray::StackAllocationSize, helperCallInstr);
  17178. m_lowererMD.LoadHelperArgument(helperCallInstr, stackAllocationOpnd);
  17179. }
  17180. else
  17181. {
  17182. switch(src1->AsHelperCallOpnd()->m_fnHelper)
  17183. {
  17184. case IR::JnHelperMethod::HelperString_Split:
  17185. helperMethod =
  17186. callDst
  17187. ? IR::JnHelperMethod::HelperRegExp_SplitResultUsed
  17188. : IR::JnHelperMethod::HelperRegExp_SplitResultNotUsed;
  17189. break;
  17190. case IR::JnHelperMethod::HelperString_Match:
  17191. helperMethod =
  17192. callDst
  17193. ? IR::JnHelperMethod::HelperRegExp_MatchResultUsed
  17194. : IR::JnHelperMethod::HelperRegExp_MatchResultNotUsed;
  17195. break;
  17196. default:
  17197. Assert(false);
  17198. __assume(false);
  17199. }
  17200. }
  17201. m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
  17202. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  17203. instr->InsertAfter(doneLabel);
  17204. instr->InsertBefore(labelHelper);
  17205. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  17206. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  17207. }
  17208. void
  17209. Lowerer::GenerateFastInlineRegExpExec(IR::Instr * instr)
  17210. {
  17211. // a.exec(b)
  17212. // We want to emit the fast path when 'a' is a regex and 'b' is a string
  17213. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  17214. IR::Opnd * callDst = instr->GetDst();
  17215. //ArgOut_A_InlineSpecialized
  17216. IR::Instr * tmpInstr = instr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  17217. IR::Opnd * argsOpnd[2];
  17218. if (!instr->FetchOperands(argsOpnd, 2))
  17219. {
  17220. return;
  17221. }
  17222. IR::Opnd *opndString = argsOpnd[1];
  17223. if(!opndString->GetValueType().IsLikelyString() || argsOpnd[0]->IsTaggedInt())
  17224. {
  17225. return;
  17226. }
  17227. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  17228. if(!opndString->GetValueType().IsString())
  17229. {
  17230. opndString = GetRegOpnd(opndString, instr, m_func, TyVar);
  17231. this->GenerateStringTest(opndString->AsRegOpnd(), instr, labelHelper);
  17232. }
  17233. IR::Opnd *opndRegex = argsOpnd[0];
  17234. if(!opndRegex->IsNotTaggedValue())
  17235. {
  17236. m_lowererMD.GenerateObjectTest(opndRegex, instr, labelHelper);
  17237. }
  17238. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  17239. // cmp [regex], vtableAddress
  17240. // jne $labelHelper
  17241. opndRegex = GetRegOpnd(opndRegex, instr, m_func, TyVar);
  17242. InsertCompareBranch(
  17243. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), 0, TyMachPtr, instr->m_func),
  17244. vtableOpnd,
  17245. Js::OpCode::BrNeq_A,
  17246. labelHelper,
  17247. instr);
  17248. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17249. if (!PHASE_OFF(Js::ExecBOIFastPathPhase, m_func))
  17250. {
  17251. // Load pattern from regex operand
  17252. IR::RegOpnd *opndPattern = IR::RegOpnd::New(TyMachPtr, m_func);
  17253. Lowerer::InsertMove(
  17254. opndPattern,
  17255. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfPattern(), TyMachPtr, m_func),
  17256. instr);
  17257. // Load program from pattern
  17258. IR::RegOpnd *opndProgram = IR::RegOpnd::New(TyMachPtr, m_func);
  17259. Lowerer::InsertMove(
  17260. opndProgram,
  17261. IR::IndirOpnd::New(opndPattern, offsetof(UnifiedRegex::RegexPattern, rep) + offsetof(UnifiedRegex::RegexPattern::UnifiedRep, program), TyMachPtr, m_func),
  17262. instr);
  17263. IR::LabelInstr *labelFastHelper = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17264. // We want the program's tag to be BOILiteral2Tag
  17265. InsertCompareBranch(
  17266. IR::IndirOpnd::New(opndProgram, (int32)UnifiedRegex::Program::GetOffsetOfTag(), TyUint8, m_func),
  17267. IR::IntConstOpnd::New((IntConstType)UnifiedRegex::Program::GetBOILiteral2Tag(), TyUint8, m_func),
  17268. Js::OpCode::BrNeq_A,
  17269. labelFastHelper,
  17270. instr);
  17271. // Test the program's flags for "global"
  17272. InsertTestBranch(
  17273. IR::IndirOpnd::New(opndProgram, offsetof(UnifiedRegex::Program, flags), TyUint8, m_func),
  17274. IR::IntConstOpnd::New(UnifiedRegex::GlobalRegexFlag, TyUint8, m_func),
  17275. Js::OpCode::BrNeq_A,
  17276. labelFastHelper,
  17277. instr);
  17278. IR::LabelInstr *labelNoMatch = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17279. // If string length < 2...
  17280. InsertCompareBranch(
  17281. IR::IndirOpnd::New(opndString->AsRegOpnd(), offsetof(Js::JavascriptString, m_charLength), TyUint32, m_func),
  17282. IR::IntConstOpnd::New(2, TyUint32, m_func),
  17283. Js::OpCode::BrLt_A,
  17284. labelNoMatch,
  17285. instr);
  17286. // ...or the DWORD doesn't match the pattern...
  17287. IR::RegOpnd *opndBuffer = IR::RegOpnd::New(TyMachReg, m_func);
  17288. Lowerer::InsertMove(
  17289. opndBuffer,
  17290. IR::IndirOpnd::New(opndString->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, m_func),
  17291. instr);
  17292. IR::LabelInstr *labelGotString = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17293. InsertTestBranch(opndBuffer, opndBuffer, Js::OpCode::BrNeq_A, labelGotString, instr);
  17294. m_lowererMD.LoadHelperArgument(instr, opndString);
  17295. IR::Instr *instrCall = IR::Instr::New(Js::OpCode::Call, opndBuffer, IR::HelperCallOpnd::New(IR::HelperString_GetSz, m_func), m_func);
  17296. instr->InsertBefore(instrCall);
  17297. m_lowererMD.LowerCall(instrCall, 0);
  17298. instr->InsertBefore(labelGotString);
  17299. IR::RegOpnd *opndBufferDWORD = IR::RegOpnd::New(TyUint32, m_func);
  17300. Lowerer::InsertMove(
  17301. opndBufferDWORD,
  17302. IR::IndirOpnd::New(opndBuffer, 0, TyUint32, m_func),
  17303. instr);
  17304. InsertCompareBranch(
  17305. IR::IndirOpnd::New(opndProgram, (int32)(UnifiedRegex::Program::GetOffsetOfRep() + UnifiedRegex::Program::GetOffsetOfBOILiteral2Literal()), TyUint32, m_func),
  17306. opndBufferDWORD,
  17307. Js::OpCode::BrEq_A,
  17308. labelFastHelper,
  17309. instr);
  17310. // ...then set the last index to 0...
  17311. instr->InsertBefore(labelNoMatch);
  17312. Lowerer::InsertMove(
  17313. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), TyVar, m_func),
  17314. IR::AddrOpnd::NewNull(m_func),
  17315. instr);
  17316. Lowerer::InsertMove(
  17317. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), TyUint32, m_func),
  17318. IR::IntConstOpnd::New(0, TyUint32, m_func),
  17319. instr);
  17320. // ...and set the dst to null...
  17321. if (callDst)
  17322. {
  17323. Lowerer::InsertMove(
  17324. callDst,
  17325. LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  17326. instr);
  17327. }
  17328. // ...and we're done.
  17329. this->InsertBranch(Js::OpCode::Br, doneLabel, instr);
  17330. instr->InsertBefore(labelFastHelper);
  17331. }
  17332. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
  17333. if (callDst)
  17334. {
  17335. helperCallInstr->SetDst(callDst);
  17336. }
  17337. instr->InsertBefore(helperCallInstr);
  17338. if (instr->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(instr->GetBailOutKind()))
  17339. {
  17340. helperCallInstr = AddBailoutToHelperCallInstr(helperCallInstr, instr->GetBailOutInfo(), instr->GetBailOutKind(), instr);
  17341. }
  17342. // [stackAllocationPointer, ]scriptcontext, regexp, string (to be pushed in reverse order)
  17343. //string, regexp
  17344. this->m_lowererMD.LoadHelperArgument(helperCallInstr, opndString);
  17345. this->m_lowererMD.LoadHelperArgument(helperCallInstr, opndRegex);
  17346. // script context
  17347. LoadScriptContext(helperCallInstr);
  17348. IR::JnHelperMethod helperMethod;
  17349. IR::AutoReuseOpnd autoReuseStackAllocationOpnd;
  17350. if (callDst)
  17351. {
  17352. if (instr->dstIsTempObject)
  17353. {
  17354. helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultUsedAndMayBeTemp;
  17355. // Allocate some space on the stack for the result array
  17356. IR::RegOpnd *const stackAllocationOpnd = IR::RegOpnd::New(TyVar, m_func);
  17357. autoReuseStackAllocationOpnd.Initialize(stackAllocationOpnd, m_func);
  17358. stackAllocationOpnd->SetValueType(callDst->GetValueType());
  17359. GenerateMarkTempAlloc(stackAllocationOpnd, Js::JavascriptArray::StackAllocationSize, helperCallInstr);
  17360. m_lowererMD.LoadHelperArgument(helperCallInstr, stackAllocationOpnd);
  17361. }
  17362. else
  17363. {
  17364. helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultUsed;
  17365. }
  17366. }
  17367. else
  17368. {
  17369. helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultNotUsed;
  17370. }
  17371. m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
  17372. instr->InsertAfter(doneLabel);
  17373. instr->InsertBefore(labelHelper);
  17374. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  17375. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  17376. }
  17377. // Generate a fast path for the "in" operator that check quickly if we have an array or not and if the index of the data is contained in the array's length.
  17378. void Lowerer::GenerateFastArrayIsIn(IR::Instr * instr)
  17379. {
  17380. // operator "foo in bar"
  17381. IR::Opnd* src1 = instr->GetSrc1(); // foo
  17382. IR::Opnd* src2 = instr->GetSrc2(); // bar
  17383. if (
  17384. !src1->GetValueType().IsLikelyInt() ||
  17385. // Do not do a fast path if we know for sure we don't have an int
  17386. src1->IsNotInt() ||
  17387. !src2->GetValueType().IsLikelyArray() ||
  17388. !src2->GetValueType().HasNoMissingValues())
  17389. {
  17390. return;
  17391. }
  17392. IR::LabelInstr* helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  17393. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17394. IR::LabelInstr* isArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17395. IR::RegOpnd* src1Untagged = GenerateUntagVar(src1->AsRegOpnd(), helperLabel, instr);
  17396. IR::RegOpnd* src2RegOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  17397. InsertMove(src2RegOpnd, src2, instr);
  17398. IR::AutoReuseOpnd autoReuseArrayOpnd;
  17399. m_lowererMD.GenerateObjectTest(src2RegOpnd, instr, helperLabel);
  17400. IR::RegOpnd* arrayOpnd = src2RegOpnd->Copy(instr->m_func)->AsRegOpnd();
  17401. autoReuseArrayOpnd.Initialize(arrayOpnd, instr->m_func, false /* autoDelete */);
  17402. IR::Opnd* vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptArray);
  17403. InsertCompareBranch(
  17404. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, instr->m_func),
  17405. vtableOpnd,
  17406. Js::OpCode::BrEq_A,
  17407. isArrayLabel,
  17408. instr);
  17409. vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableNativeIntArray);
  17410. InsertCompareBranch(
  17411. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, instr->m_func),
  17412. vtableOpnd,
  17413. Js::OpCode::BrEq_A,
  17414. isArrayLabel,
  17415. instr);
  17416. vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableNativeFloatArray);
  17417. InsertCompareBranch(
  17418. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, instr->m_func),
  17419. vtableOpnd,
  17420. Js::OpCode::BrNeq_A,
  17421. helperLabel,
  17422. instr);
  17423. instr->InsertBefore(isArrayLabel);
  17424. InsertTestBranch(
  17425. IR::IndirOpnd::New(src2RegOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  17426. IR::IntConstOpnd::New(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues), TyUint8, m_func, true),
  17427. Js::OpCode::BrEq_A,
  17428. helperLabel,
  17429. instr);
  17430. IR::AutoReuseOpnd autoReuseHeadSegmentOpnd;
  17431. IR::AutoReuseOpnd autoReuseHeadSegmentLengthOpnd;
  17432. IR::IndirOpnd* indirOpnd = IR::IndirOpnd::New(src2RegOpnd, Js::JavascriptArray::GetOffsetOfHead(), TyMachPtr, this->m_func);
  17433. IR::RegOpnd* headSegmentOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  17434. autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
  17435. InsertMove(headSegmentOpnd, indirOpnd, instr);
  17436. IR::Opnd* headSegmentLengthOpnd = IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
  17437. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  17438. InsertCompareBranch(
  17439. src1Untagged,
  17440. headSegmentLengthOpnd,
  17441. Js::OpCode::BrGe_A,
  17442. helperLabel,
  17443. instr);
  17444. InsertCompareBranch(
  17445. src1Untagged,
  17446. IR::IntConstOpnd::New(0, src1Untagged->GetType(), this->m_func),
  17447. Js::OpCode::BrLt_A,
  17448. helperLabel,
  17449. instr);
  17450. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), instr);
  17451. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  17452. instr->InsertBefore(helperLabel);
  17453. instr->InsertAfter(doneLabel);
  17454. }
  17455. // Generate a fast path for the "in" operator to use the cache where the key may be a PropertyString or Symbol.
  17456. void Lowerer::GenerateFastObjectIsIn(IR::Instr * instr)
  17457. {
  17458. IR::RegOpnd* baseOpnd = GetRegOpnd(instr->GetSrc2(), instr, m_func, TyVar);
  17459. IR::RegOpnd* indexOpnd = GetRegOpnd(instr->GetSrc1(), instr, m_func, TyVar);
  17460. bool likelyStringIndex = indexOpnd->GetValueType().IsLikelyString();
  17461. bool likelySymbolIndex = indexOpnd->GetValueType().IsLikelySymbol();
  17462. if (!baseOpnd->GetValueType().IsLikelyObject() || !(likelyStringIndex || likelySymbolIndex))
  17463. {
  17464. return;
  17465. }
  17466. IR::LabelInstr* helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  17467. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17468. if (likelyStringIndex)
  17469. {
  17470. GeneratePropertyStringTest(indexOpnd, instr, helperLabel, false /*isStore*/);
  17471. const uint32 inlineCacheOffset = Js::PropertyString::GetOffsetOfLdElemInlineCache();
  17472. const uint32 hitRateOffset = Js::PropertyString::GetOffsetOfHitRate();
  17473. GenerateFastIsInSymbolOrStringIndex(instr, indexOpnd, baseOpnd, instr->GetDst(), inlineCacheOffset, hitRateOffset, helperLabel, doneLabel);
  17474. }
  17475. else
  17476. {
  17477. Assert(likelySymbolIndex);
  17478. GenerateSymbolTest(indexOpnd, instr, helperLabel);
  17479. const uint32 inlineCacheOffset = Js::JavascriptSymbol::GetOffsetOfLdElemInlineCache();
  17480. const uint32 hitRateOffset = Js::JavascriptSymbol::GetOffsetOfHitRate();
  17481. GenerateFastIsInSymbolOrStringIndex(instr, indexOpnd, baseOpnd, instr->GetDst(), inlineCacheOffset, hitRateOffset, helperLabel, doneLabel);
  17482. }
  17483. instr->InsertBefore(helperLabel);
  17484. instr->InsertAfter(doneLabel);
  17485. }
  17486. // Given an operand, either cast it or move it to a register
  17487. IR::RegOpnd * Lowerer::GetRegOpnd(IR::Opnd* opnd, IR::Instr* insertInstr, Func* func, IRType type)
  17488. {
  17489. if (opnd->IsRegOpnd())
  17490. {
  17491. return opnd->AsRegOpnd();
  17492. }
  17493. IR::RegOpnd *regOpnd = IR::RegOpnd::New(type, func);
  17494. InsertMove(regOpnd, opnd, insertInstr);
  17495. return regOpnd;
  17496. }
  17497. template <bool Saturate>
  17498. void Lowerer::GenerateTruncWithCheck(_In_ IR::Instr* instr)
  17499. {
  17500. Assert(instr->GetSrc1()->IsFloat());
  17501. if (instr->GetDst()->IsInt32() || instr->GetDst()->IsUInt32())
  17502. {
  17503. m_lowererMD.GenerateTruncWithCheck<Saturate>(instr);
  17504. }
  17505. else
  17506. {
  17507. Assert(instr->GetDst()->IsInt64());
  17508. LoadScriptContext(instr);
  17509. if (instr->GetSrc1()->IsFloat32())
  17510. {
  17511. m_lowererMD.LoadFloatHelperArgument(instr, instr->GetSrc1());
  17512. }
  17513. else
  17514. {
  17515. m_lowererMD.LoadDoubleHelperArgument(instr, instr->GetSrc1());
  17516. }
  17517. IR::JnHelperMethod helper;
  17518. if (Saturate)
  17519. {
  17520. IR::JnHelperMethod helperList[2][2] = { IR::HelperF32ToI64Sat, IR::HelperF32ToU64Sat, IR::HelperF64ToI64Sat ,IR::HelperF64ToU64Sat };
  17521. helper = helperList[instr->GetSrc1()->GetType() != TyFloat32][instr->GetDst()->GetType() == TyUint64];
  17522. }
  17523. else
  17524. {
  17525. IR::JnHelperMethod helperList[2][2] = { IR::HelperF32ToI64, IR::HelperF32ToU64, IR::HelperF64ToI64 ,IR::HelperF64ToU64 };
  17526. helper = helperList[instr->GetSrc1()->GetType() != TyFloat32][instr->GetDst()->GetType() == TyUint64];
  17527. }
  17528. instr->UnlinkSrc1();
  17529. this->m_lowererMD.ChangeToHelperCall(instr, helper);
  17530. }
  17531. }
  17532. void
  17533. Lowerer::RelocateCallDirectToHelperPath(IR::Instr* argoutInlineSpecialized, IR::LabelInstr* labelHelper)
  17534. {
  17535. IR::Opnd *linkOpnd = argoutInlineSpecialized->GetSrc2(); //ArgOut_A_InlineSpecialized src2; link to actual argouts.
  17536. argoutInlineSpecialized->Unlink();
  17537. labelHelper->InsertAfter(argoutInlineSpecialized);
  17538. while(linkOpnd->IsSymOpnd())
  17539. {
  17540. IR::SymOpnd *src2 = linkOpnd->AsSymOpnd();
  17541. StackSym *sym = src2->m_sym->AsStackSym();
  17542. Assert(sym->m_isSingleDef);
  17543. IR::Instr *argInstr = sym->m_instrDef;
  17544. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  17545. argInstr->Unlink();
  17546. labelHelper->InsertAfter(argInstr);
  17547. linkOpnd = argInstr->GetSrc2();
  17548. }
  17549. // Move startcall
  17550. Assert(linkOpnd->IsRegOpnd());
  17551. StackSym *sym = linkOpnd->AsRegOpnd()->m_sym;
  17552. Assert(sym->m_isSingleDef);
  17553. IR::Instr *startCall = sym->m_instrDef;
  17554. Assert(startCall->m_opcode == Js::OpCode::StartCall);
  17555. startCall->Unlink();
  17556. labelHelper->InsertAfter(startCall);
  17557. }
  17558. bool
  17559. Lowerer::GenerateFastInlineStringCharCodeAt(IR::Instr * instr, Js::BuiltinFunction index)
  17560. {
  17561. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  17562. //CallDirect src2
  17563. IR::Opnd * linkOpnd = instr->GetSrc2();
  17564. //ArgOut_A_InlineSpecialized
  17565. IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  17566. IR::Opnd * argsOpnd[2] = {0};
  17567. bool result = instr->FetchOperands(argsOpnd, 2);
  17568. Assert(result);
  17569. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  17570. instr->InsertAfter(doneLabel);
  17571. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  17572. bool success = GenerateFastCharAt(index, instr->GetDst(), argsOpnd[0], argsOpnd[1],
  17573. instr, instr, labelHelper, doneLabel);
  17574. instr->InsertBefore(labelHelper);
  17575. if (!success)
  17576. {
  17577. return false;
  17578. }
  17579. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  17580. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  17581. return true;
  17582. }
  17583. void
  17584. Lowerer::GenerateCtz(IR::Instr* instr)
  17585. {
  17586. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsInt64());
  17587. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsInt64());
  17588. m_lowererMD.GenerateCtz(instr);
  17589. }
  17590. void
  17591. Lowerer::GeneratePopCnt(IR::Instr* instr)
  17592. {
  17593. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32() || instr->GetSrc1()->IsInt64());
  17594. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsUInt32() || instr->GetDst()->IsInt64());
  17595. m_lowererMD.GeneratePopCnt(instr);
  17596. }
  17597. void
  17598. Lowerer::GenerateFastInlineMathClz(IR::Instr* instr)
  17599. {
  17600. Assert(instr->GetDst()->IsInt32() || instr->GetDst()->IsInt64());
  17601. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsInt64());
  17602. m_lowererMD.GenerateClz(instr);
  17603. }
  17604. void
  17605. Lowerer::GenerateFastInlineMathImul(IR::Instr* instr)
  17606. {
  17607. IR::Opnd* src1 = instr->GetSrc1();
  17608. IR::Opnd* src2 = instr->GetSrc2();
  17609. IR::Opnd* dst = instr->GetDst();
  17610. Assert(dst->IsInt32());
  17611. Assert(src1->IsInt32());
  17612. Assert(src2->IsInt32());
  17613. IR::Instr* imul = IR::Instr::New(LowererMD::MDImulOpcode, dst, src1, src2, instr->m_func);
  17614. instr->InsertBefore(imul);
  17615. LowererMD::Legalize(imul);
  17616. instr->Remove();
  17617. }
  17618. void
  17619. Lowerer::LowerReinterpretPrimitive(IR::Instr* instr)
  17620. {
  17621. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  17622. IR::Opnd* src1 = instr->GetSrc1();
  17623. IR::Opnd* dst = instr->GetDst();
  17624. Assert(dst->GetSize() == src1->GetSize());
  17625. Assert((dst->IsFloat32() && src1->IsInt32()) ||
  17626. (dst->IsInt32() && src1->IsFloat32()) ||
  17627. (dst->IsInt64() && src1->IsFloat64()) ||
  17628. (dst->IsFloat64() && src1->IsInt64()) );
  17629. m_lowererMD.EmitReinterpretPrimitive(dst, src1, instr);
  17630. instr->Remove();
  17631. }
  17632. void
  17633. Lowerer::GenerateFastInlineMathFround(IR::Instr* instr)
  17634. {
  17635. IR::Opnd* src1 = instr->GetSrc1();
  17636. IR::Opnd* dst = instr->GetDst();
  17637. Assert(dst->IsFloat());
  17638. Assert(src1->IsFloat());
  17639. // This function is supposed to convert a float to the closest float32 representation.
  17640. // However, it is a bit loose about types, which the ARM64 encoder takes issue with.
  17641. #ifdef _M_ARM64
  17642. LowererMD::GenerateFastInlineMathFround(instr);
  17643. #else
  17644. IR::Instr* fcvt64to32 = IR::Instr::New(LowererMD::MDConvertFloat64ToFloat32Opcode, dst, src1, instr->m_func);
  17645. instr->InsertBefore(fcvt64to32);
  17646. LowererMD::Legalize(fcvt64to32);
  17647. if (dst->IsFloat64())
  17648. {
  17649. IR::Instr* fcvt32to64 = IR::Instr::New(LowererMD::MDConvertFloat32ToFloat64Opcode, dst, dst, instr->m_func);
  17650. instr->InsertBefore(fcvt32to64);
  17651. LowererMD::Legalize(fcvt32to64);
  17652. }
  17653. instr->Remove();
  17654. #endif
  17655. return;
  17656. }
  17657. bool
  17658. Lowerer::GenerateFastInlineStringReplace(IR::Instr * instr)
  17659. {
  17660. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  17661. //CallDirect src2
  17662. IR::Opnd * linkOpnd = instr->GetSrc2();
  17663. //ArgOut_A_InlineSpecialized
  17664. IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  17665. IR::Opnd * argsOpnd[3] = {0};
  17666. bool result = instr->FetchOperands(argsOpnd, 3);
  17667. Assert(result);
  17668. AnalysisAssert(argsOpnd[0] && argsOpnd[1] && argsOpnd[2]);
  17669. if (!argsOpnd[0]->GetValueType().IsLikelyString()
  17670. || argsOpnd[1]->GetValueType().IsNotObject()
  17671. || !argsOpnd[2]->GetValueType().IsLikelyString())
  17672. {
  17673. return false;
  17674. }
  17675. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  17676. instr->InsertAfter(doneLabel);
  17677. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  17678. bool success = this->GenerateFastReplace(argsOpnd[0], argsOpnd[1], argsOpnd[2],
  17679. instr, instr, labelHelper, doneLabel);
  17680. instr->InsertBefore(labelHelper);
  17681. if (!success)
  17682. {
  17683. return false;
  17684. }
  17685. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  17686. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  17687. return true;
  17688. }
  17689. #ifdef ENABLE_DOM_FAST_PATH
  17690. /*
  17691. Lower the DOMFastPathGetter opcode
  17692. We have inliner generated bytecode:
  17693. (dst)helpArg1: ExtendArg_A (src1)thisObject (src2)null
  17694. (dst)helpArg2: ExtendArg_A (src1)funcObject (src2)helpArg1
  17695. method: DOMFastPathGetter (src1)HelperCall (src2)helpArg2
  17696. We'll convert it to a JavascriptFunction entry method call:
  17697. CALL Helper funcObject CallInfo(CallFlags_Value, 3) thisObj
  17698. */
  17699. void
  17700. Lowerer::LowerFastInlineDOMFastPathGetter(IR::Instr* instr)
  17701. {
  17702. IR::Opnd* helperOpnd = instr->UnlinkSrc1();
  17703. Assert(helperOpnd->IsHelperCallOpnd());
  17704. IR::Opnd *linkOpnd = instr->UnlinkSrc2();
  17705. Assert(linkOpnd->IsRegOpnd());
  17706. IR::Instr* prevInstr = linkOpnd->AsRegOpnd()->m_sym->m_instrDef;
  17707. Assert(prevInstr->m_opcode == Js::OpCode::ExtendArg_A);
  17708. IR::Opnd* funcObj = prevInstr->GetSrc1();
  17709. Assert(funcObj->IsRegOpnd());
  17710. // If the Extended_arg was CSE's across a loop or hoisted out of a loop,
  17711. // adding a new reference down here might cause funcObj to now be liveOnBackEdge.
  17712. // Use the addToLiveOnBackEdgeSyms bit vector to add it to a loop if we encounter one.
  17713. // We'll clear it once we reach the Extended arg.
  17714. this->addToLiveOnBackEdgeSyms->Set(funcObj->AsRegOpnd()->m_sym->m_id);
  17715. Assert(prevInstr->GetSrc2() != nullptr);
  17716. prevInstr = prevInstr->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
  17717. Assert(prevInstr->m_opcode == Js::OpCode::ExtendArg_A);
  17718. IR::Opnd* thisObj = prevInstr->GetSrc1();
  17719. Assert(prevInstr->GetSrc2() == nullptr);
  17720. Assert(thisObj->IsRegOpnd());
  17721. this->addToLiveOnBackEdgeSyms->Set(thisObj->AsRegOpnd()->m_sym->m_id);
  17722. const auto info = Lowerer::MakeCallInfoConst(Js::CallFlags_Value, 1, m_func);
  17723. m_lowererMD.LoadHelperArgument(instr, thisObj);
  17724. m_lowererMD.LoadHelperArgument(instr, info);
  17725. m_lowererMD.LoadHelperArgument(instr, funcObj);
  17726. instr->m_opcode = Js::OpCode::Call;
  17727. IR::HelperCallOpnd *helperCallOpnd = Lowerer::CreateHelperCallOpnd(helperOpnd->AsHelperCallOpnd()->m_fnHelper, 3, m_func);
  17728. instr->SetSrc1(helperCallOpnd);
  17729. m_lowererMD.LowerCall(instr, 3); // we have funcobj, callInfo, and this.
  17730. }
  17731. #endif
  17732. void
  17733. Lowerer::GenerateFastInlineArrayPush(IR::Instr * instr)
  17734. {
  17735. Assert(instr->m_opcode == Js::OpCode::InlineArrayPush);
  17736. IR::Opnd * baseOpnd = instr->GetSrc1();
  17737. IR::Opnd * srcOpnd = instr->GetSrc2();
  17738. bool returnLength = false;
  17739. if(instr->GetDst())
  17740. {
  17741. returnLength = true;
  17742. }
  17743. IR::LabelInstr * bailOutLabelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  17744. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  17745. instr->InsertAfter(doneLabel);
  17746. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  17747. //Don't Generate fast path according to ShouldGenerateArrayFastPath()
  17748. //AND, Don't Generate fast path if the array is LikelyNative and the element is not specialized
  17749. if(ShouldGenerateArrayFastPath(baseOpnd, false, false, false) &&
  17750. !(baseOpnd->GetValueType().IsLikelyNativeArray() && srcOpnd->IsVar()))
  17751. {
  17752. GenerateFastPush(baseOpnd, srcOpnd, instr, instr, labelHelper, doneLabel, bailOutLabelHelper, returnLength);
  17753. instr->InsertBefore(labelHelper);
  17754. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  17755. }
  17756. if(baseOpnd->GetValueType().IsLikelyNativeArray())
  17757. {
  17758. //Lower IR::BailOutConventionalNativeArrayAccessOnly here.
  17759. LowerOneBailOutKind(instr, IR::BailOutConventionalNativeArrayAccessOnly, false, false);
  17760. instr->InsertAfter(bailOutLabelHelper);
  17761. InsertBranch(Js::OpCode::Br, doneLabel, bailOutLabelHelper);
  17762. }
  17763. GenerateHelperToArrayPushFastPath(instr, bailOutLabelHelper);
  17764. }
  17765. bool Lowerer::GenerateFastPop(IR::Opnd *baseOpndParam, IR::Instr *callInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel, IR::LabelInstr * bailOutLabelHelper)
  17766. {
  17767. Assert(ShouldGenerateArrayFastPath(baseOpndParam, false, false, false));
  17768. // TEST baseOpnd, AtomTag -- check baseOpnd not tagged int
  17769. // JNE $helper
  17770. // CMP [baseOpnd], JavascriptArray::`vtable' -- check baseOpnd isArray
  17771. // JNE $helper
  17772. // MOV r2, [baseOpnd + offset(length)] -- Load array length
  17773. IR::RegOpnd * baseOpnd = baseOpndParam->AsRegOpnd();
  17774. const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
  17775. ValueType arrValueType(baseOpndParam->GetValueType());
  17776. IR::RegOpnd *arrayOpnd = baseOpnd;
  17777. IR::RegOpnd *arrayLengthOpnd = nullptr;
  17778. IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
  17779. if(!arrValueType.IsAnyOptimizedArray())
  17780. {
  17781. arrayOpnd = GenerateArrayTest(baseOpnd, bailOutLabelHelper, bailOutLabelHelper, callInstr, false, true);
  17782. arrValueType = arrayOpnd->GetValueType().ToDefiniteObject().SetHasNoMissingValues(false);
  17783. }
  17784. else if(arrayOpnd->IsArrayRegOpnd())
  17785. {
  17786. IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
  17787. if(arrayRegOpnd->LengthSym())
  17788. {
  17789. arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), arrayRegOpnd->LengthSym()->GetType(), m_func);
  17790. DebugOnly(arrayLengthOpnd->FreezeSymValue());
  17791. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  17792. }
  17793. }
  17794. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
  17795. IR::AutoReuseOpnd autoReuseMutableArrayLengthOpnd;
  17796. {
  17797. IR::RegOpnd *const mutableArrayLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  17798. autoReuseMutableArrayLengthOpnd.Initialize(mutableArrayLengthOpnd, m_func);
  17799. if(arrayLengthOpnd)
  17800. {
  17801. // mov mutableArrayLength, arrayLength
  17802. InsertMove(mutableArrayLengthOpnd, arrayLengthOpnd, callInstr);
  17803. }
  17804. else
  17805. {
  17806. // MOV mutableArrayLength, [array + offset(length)] -- Load array length
  17807. // We know this index is safe since, so mark it as UInt32 to avoid unnecessary conversion/checks
  17808. InsertMove(
  17809. mutableArrayLengthOpnd,
  17810. IR::IndirOpnd::New(
  17811. arrayOpnd,
  17812. Js::JavascriptArray::GetOffsetOfLength(),
  17813. mutableArrayLengthOpnd->GetType(),
  17814. this->m_func),
  17815. callInstr);
  17816. }
  17817. arrayLengthOpnd = mutableArrayLengthOpnd;
  17818. }
  17819. InsertCompareBranch(arrayLengthOpnd, IR::IntConstOpnd::New(0, TyUint32, this->m_func), Js::OpCode::BrEq_A, true, bailOutLabelHelper, callInstr);
  17820. InsertSub(false, arrayLengthOpnd, arrayLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func),callInstr);
  17821. IR::IndirOpnd *arrayRef = IR::IndirOpnd::New(arrayOpnd, arrayLengthOpnd, TyVar, this->m_func);
  17822. arrayRef->GetBaseOpnd()->SetValueType(arrValueType);
  17823. //Array length is going to overflow, hence don't check for Array.length and Segment.length overflow.
  17824. bool isTypedArrayElement, isStringIndex;
  17825. IR::IndirOpnd *const indirOpnd =
  17826. GenerateFastElemICommon(
  17827. callInstr,
  17828. false,
  17829. arrayRef,
  17830. labelHelper,
  17831. labelHelper,
  17832. nullptr,
  17833. &isTypedArrayElement,
  17834. &isStringIndex,
  17835. nullptr,
  17836. nullptr,
  17837. nullptr /*pLabelSegmentLengthIncreased*/,
  17838. true /*checkArrayLengthOverflow*/,
  17839. true /* forceGenerateFastPath */,
  17840. false/* = returnLength */,
  17841. bailOutLabelHelper /* = bailOutLabelInstr*/);
  17842. Assert(!isTypedArrayElement);
  17843. Assert(indirOpnd);
  17844. return true;
  17845. }
  17846. bool Lowerer::GenerateFastPush(IR::Opnd *baseOpndParam, IR::Opnd *src, IR::Instr *callInstr,
  17847. IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel, IR::LabelInstr * bailOutLabelHelper, bool returnLength)
  17848. {
  17849. Assert(ShouldGenerateArrayFastPath(baseOpndParam, false, false, false));
  17850. // TEST baseOpnd, AtomTag -- check baseOpnd not tagged int
  17851. // JNE $helper
  17852. // CMP [baseOpnd], JavascriptArray::`vtable' -- check baseOpnd isArray
  17853. // JNE $helper
  17854. // MOV r2, [baseOpnd + offset(length)] -- Load array length
  17855. IR::RegOpnd * baseOpnd = baseOpndParam->AsRegOpnd();
  17856. const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
  17857. ValueType arrValueType(baseOpndParam->GetValueType());
  17858. IR::RegOpnd *arrayOpnd = baseOpnd;
  17859. IR::RegOpnd *arrayLengthOpnd = nullptr;
  17860. IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
  17861. if(!arrValueType.IsAnyOptimizedArray())
  17862. {
  17863. arrayOpnd = GenerateArrayTest(baseOpnd, labelHelper, labelHelper, insertInstr, false, true);
  17864. arrValueType = arrayOpnd->GetValueType().ToDefiniteObject().SetHasNoMissingValues(false);
  17865. }
  17866. else if(arrayOpnd->IsArrayRegOpnd())
  17867. {
  17868. IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
  17869. if(arrayRegOpnd->LengthSym())
  17870. {
  17871. arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), arrayRegOpnd->LengthSym()->GetType(), m_func);
  17872. DebugOnly(arrayLengthOpnd->FreezeSymValue());
  17873. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  17874. }
  17875. }
  17876. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
  17877. if(!arrayLengthOpnd)
  17878. {
  17879. // MOV arrayLength, [array + offset(length)] -- Load array length
  17880. // We know this index is safe since, so mark it as UInt32 to avoid unnecessary conversion/checks
  17881. arrayLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  17882. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  17883. InsertMove(
  17884. arrayLengthOpnd,
  17885. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), arrayLengthOpnd->GetType(), this->m_func),
  17886. insertInstr);
  17887. }
  17888. IR::IndirOpnd *arrayRef = IR::IndirOpnd::New(arrayOpnd, arrayLengthOpnd, TyVar, this->m_func);
  17889. arrayRef->GetBaseOpnd()->SetValueType(arrValueType);
  17890. if (returnLength && src->IsEqual(insertInstr->GetDst()))
  17891. {
  17892. //If the dst is same as the src, then dst is going to be overridden by GenerateFastElemICommon in process of updating the length.
  17893. //Save it in a temp register.
  17894. IR::RegOpnd *opnd = IR::RegOpnd::New(src->GetType(), this->m_func);
  17895. InsertMove(opnd, src, insertInstr);
  17896. src = opnd;
  17897. }
  17898. //Array length is going to overflow, hence don't check for Array.length and Segment.length overflow.
  17899. bool isTypedArrayElement, isStringIndex;
  17900. IR::IndirOpnd *const indirOpnd =
  17901. GenerateFastElemICommon(
  17902. insertInstr,
  17903. true,
  17904. arrayRef,
  17905. labelHelper,
  17906. labelHelper,
  17907. nullptr,
  17908. &isTypedArrayElement,
  17909. &isStringIndex,
  17910. nullptr,
  17911. nullptr,
  17912. nullptr /*pLabelSegmentLengthIncreased*/,
  17913. false /*checkArrayLengthOverflow*/,
  17914. true /* forceGenerateFastPath */,
  17915. returnLength,
  17916. bailOutLabelHelper);
  17917. Assert(!isTypedArrayElement);
  17918. Assert(indirOpnd);
  17919. // MOV [r3 + r2], src
  17920. InsertMoveWithBarrier(indirOpnd, src, insertInstr);
  17921. return true;
  17922. }
  17923. bool
  17924. Lowerer::GenerateFastCharAt(Js::BuiltinFunction index, IR::Opnd *dst, IR::Opnd *srcStr, IR::Opnd *srcIndex, IR::Instr *callInstr,
  17925. IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
  17926. {
  17927. // if regSrcStr is not object, JMP $helper
  17928. // CMP [regSrcStr + offset(type)] , static string type -- check base string type
  17929. // JNE $helper
  17930. // MOV r1, [regSrcStr + offset(m_pszValue)]
  17931. // TEST r1, r1
  17932. // JEQ $helper
  17933. // MOV r2, srcIndex
  17934. // If r2 is not int, JMP $helper
  17935. // Convert r2 to int
  17936. // CMP [regSrcStr + offsetof(length)], r2
  17937. // JBE $helper
  17938. // MOVZX r2, [r1 + r2 * 2]
  17939. // if (charAt)
  17940. // PUSH r1
  17941. // PUSH scriptContext
  17942. // CALL GetStringFromChar
  17943. // MOV dst, EAX
  17944. // else (charCodeAt)
  17945. // if (codePointAt)
  17946. // Lowerer.GenerateFastCodePointAt -- Common inline functions
  17947. // Convert r2 to Var
  17948. // MOV dst, r2
  17949. bool isInt = false;
  17950. bool isNotTaggedValue = false;
  17951. if (srcStr->IsRegOpnd())
  17952. {
  17953. if (srcStr->AsRegOpnd()->IsTaggedInt())
  17954. {
  17955. isInt = true;
  17956. }
  17957. else if (srcStr->AsRegOpnd()->IsNotTaggedValue())
  17958. {
  17959. isNotTaggedValue = true;
  17960. }
  17961. }
  17962. IR::RegOpnd *regSrcStr = GetRegOpnd(srcStr, insertInstr, m_func, TyVar);
  17963. if (!isNotTaggedValue)
  17964. {
  17965. if (!isInt)
  17966. {
  17967. m_lowererMD.GenerateObjectTest(regSrcStr, insertInstr, labelHelper);
  17968. }
  17969. else
  17970. {
  17971. // Insert delete branch opcode to tell the dbChecks not to assert on this helper label
  17972. IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
  17973. insertInstr->InsertBefore(fakeBr);
  17974. InsertBranch(Js::OpCode::Br, labelHelper, insertInstr);
  17975. }
  17976. }
  17977. // Bail out if index a constant and is less than zero.
  17978. if (srcIndex->IsAddrOpnd() && Js::TaggedInt::ToInt32(srcIndex->AsAddrOpnd()->m_address) < 0)
  17979. {
  17980. labelHelper->isOpHelper = false;
  17981. InsertBranch(Js::OpCode::Br, labelHelper, insertInstr);
  17982. return false;
  17983. }
  17984. GenerateStringTest(regSrcStr, insertInstr, labelHelper, nullptr, false);
  17985. // r1 contains the value of the char16* pointer inside JavascriptString.
  17986. // MOV r1, [regSrcStr + offset(m_pszValue)]
  17987. IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  17988. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(regSrcStr->AsRegOpnd(), Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, this->m_func);
  17989. InsertMove(r1, indirOpnd, insertInstr);
  17990. // TEST r1, r1 -- Null pointer test
  17991. // JEQ $helper
  17992. InsertTestBranch(r1, r1, Js::OpCode::BrEq_A, labelHelper, insertInstr);
  17993. IR::RegOpnd *strLength = IR::RegOpnd::New(TyUint32, m_func);
  17994. InsertMove(strLength, IR::IndirOpnd::New(regSrcStr, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func), insertInstr);
  17995. IR::Opnd* indexOpnd = nullptr;
  17996. if (srcIndex->IsAddrOpnd())
  17997. {
  17998. uint32 indexValue = Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address);
  17999. // CMP [regSrcStr + offsetof(length)], index
  18000. // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
  18001. // JBE $helper
  18002. InsertCompareBranch(strLength, IR::IntConstOpnd::New(indexValue, TyUint32, m_func), Js::OpCode::BrLe_A, true, labelHelper, insertInstr);
  18003. // Mask off the sign so that poisoning will work for negative indices
  18004. #if TARGET_32
  18005. uint32 maskedIndex = CONFIG_FLAG_RELEASE(PoisonStringLoad) ? (indexValue & INT32_MAX) : indexValue;
  18006. #else
  18007. uint32 maskedIndex = indexValue;
  18008. #endif
  18009. indirOpnd = IR::IndirOpnd::New(r1, maskedIndex * sizeof(char16), TyUint16, this->m_func);
  18010. indexOpnd = IR::IntConstOpnd::New(maskedIndex, TyMachPtr, m_func);
  18011. }
  18012. else
  18013. {
  18014. IR::RegOpnd *r2 = IR::RegOpnd::New(TyVar, this->m_func);
  18015. // MOV r2, srcIndex
  18016. InsertMove(r2, srcIndex, insertInstr);
  18017. r2 = GenerateUntagVar(r2, labelHelper, insertInstr);
  18018. // CMP [regSrcStr + offsetof(length)], r2
  18019. // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
  18020. // JBE $helper
  18021. InsertCompareBranch(strLength, r2, Js::OpCode::BrLe_A, true, labelHelper, insertInstr);
  18022. #if TARGET_32
  18023. if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
  18024. {
  18025. // Mask off the sign so that poisoning will work for negative indices
  18026. InsertAnd(r2, r2, IR::IntConstOpnd::New(INT32_MAX, TyInt32, m_func), insertInstr);
  18027. }
  18028. #endif
  18029. if (r2->GetSize() != MachPtr)
  18030. {
  18031. r2 = r2->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
  18032. }
  18033. indexOpnd = r2;
  18034. indirOpnd = IR::IndirOpnd::New(r1, r2, 1, TyUint16, this->m_func);
  18035. }
  18036. IR::RegOpnd* maskOpnd = nullptr;
  18037. if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
  18038. {
  18039. maskOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
  18040. if (strLength->GetSize() != MachPtr)
  18041. {
  18042. strLength = strLength->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
  18043. }
  18044. InsertSub(false, maskOpnd, indexOpnd, strLength, insertInstr);
  18045. InsertShift(Js::OpCode::Shr_A, false, maskOpnd, maskOpnd, IR::IntConstOpnd::New(MachRegInt * 8 - 1, TyInt8, m_func), insertInstr);
  18046. if (maskOpnd->GetSize() != TyUint32)
  18047. {
  18048. maskOpnd = maskOpnd->UseWithNewType(TyUint32, this->m_func)->AsRegOpnd();
  18049. }
  18050. }
  18051. // MOVZX charReg, [r1 + r2 * 2] -- this is the value of the char
  18052. IR::RegOpnd *charReg = IR::RegOpnd::New(TyUint32, this->m_func);
  18053. InsertMove(charReg, indirOpnd, insertInstr);
  18054. if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
  18055. {
  18056. InsertAnd(charReg, charReg, maskOpnd, insertInstr);
  18057. }
  18058. if (index == Js::BuiltinFunction::JavascriptString_CharAt)
  18059. {
  18060. IR::Opnd *resultOpnd;
  18061. if (dst->IsEqual(srcStr))
  18062. {
  18063. resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  18064. }
  18065. else
  18066. {
  18067. resultOpnd = dst;
  18068. }
  18069. GenerateGetSingleCharString(charReg, resultOpnd, labelHelper, doneLabel, insertInstr, false);
  18070. }
  18071. else
  18072. {
  18073. Assert(index == Js::BuiltinFunction::JavascriptString_CharCodeAt || index == Js::BuiltinFunction::JavascriptString_CodePointAt);
  18074. if (index == Js::BuiltinFunction::JavascriptString_CodePointAt)
  18075. {
  18076. GenerateFastInlineStringCodePointAt(insertInstr, this->m_func, strLength, srcIndex, charReg, r1);
  18077. }
  18078. if (charReg->GetSize() != MachPtr)
  18079. {
  18080. charReg = charReg->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
  18081. }
  18082. m_lowererMD.GenerateInt32ToVarConversion(charReg, insertInstr);
  18083. // MOV dst, charReg
  18084. InsertMove(dst, charReg, insertInstr);
  18085. }
  18086. return true;
  18087. }
  18088. IR::Opnd*
  18089. Lowerer::GenerateArgOutForInlineeStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  18090. {
  18091. Assert(callInstr->m_func->IsInlinee());
  18092. Func *func = callInstr->m_func;
  18093. uint32 actualCount = func->actualCount - 1; // don't count this pointer
  18094. Assert(actualCount < Js::InlineeCallInfo::MaxInlineeArgoutCount);
  18095. const auto firstRealArgStackSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  18096. this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr); //Start after this pointer
  18097. IR::SymOpnd *firstArg = IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, func);
  18098. const IR::AutoReuseOpnd autoReuseFirstArg(firstArg, func);
  18099. IR::RegOpnd* argInOpnd = IR::RegOpnd::New(TyMachReg, func);
  18100. const IR::AutoReuseOpnd autoReuseArgInOpnd(argInOpnd, func);
  18101. InsertLea(argInOpnd, firstArg, callInstr);
  18102. IR::IndirOpnd *argIndirOpnd = nullptr;
  18103. IR::Instr* argout = nullptr;
  18104. #if defined(_M_IX86)
  18105. // Maintain alignment
  18106. if ((actualCount & 1) == 0)
  18107. {
  18108. IR::Instr *alignPush = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  18109. alignPush->SetSrc1(IR::IntConstOpnd::New(1, TyInt32, this->m_func));
  18110. callInstr->InsertBefore(alignPush);
  18111. }
  18112. #endif
  18113. for(uint i = actualCount; i > 0; i--)
  18114. {
  18115. argIndirOpnd = IR::IndirOpnd::New(argInOpnd, (i - 1) * MachPtr, TyMachReg, func);
  18116. argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  18117. argout->SetSrc1(argIndirOpnd);
  18118. callInstr->InsertBefore(argout);
  18119. // i represents ith arguments from actuals, with is i + 3 counting this, callInfo and function object
  18120. this->m_lowererMD.LoadDynamicArgument(argout, i + 3);
  18121. }
  18122. return IR::IntConstOpnd::New(func->actualCount, TyMachReg, func);
  18123. }
  18124. // For AMD64 and ARM only.
  18125. void
  18126. Lowerer::LowerInlineSpreadArgOutLoopUsingRegisters(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd)
  18127. {
  18128. Func *const func = callInstr->m_func;
  18129. IR::LabelInstr *oneArgLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  18130. InsertCompareBranch(indexOpnd, IR::IntConstOpnd::New(1, TyUint8, func), Js::OpCode::BrEq_A, true, oneArgLabel, callInstr);
  18131. IR::LabelInstr *startLoopLabel = InsertLoopTopLabel(callInstr);
  18132. Loop * loop = startLoopLabel->GetLoop();
  18133. loop->regAlloc.liveOnBackEdgeSyms->Set(indexOpnd->m_sym->m_id);
  18134. loop->regAlloc.liveOnBackEdgeSyms->Set(arrayElementsStartOpnd->m_sym->m_id);
  18135. InsertSub(false, indexOpnd, indexOpnd, IR::IntConstOpnd::New(1, TyInt8, func), callInstr);
  18136. IR::IndirOpnd *elemPtrOpnd = IR::IndirOpnd::New(arrayElementsStartOpnd, indexOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyMachPtr, func);
  18137. // Generate argout for n+2 arg (skipping function object + this)
  18138. IR::Instr *argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  18139. // X64 requires a reg opnd
  18140. IR::RegOpnd *elemRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  18141. Lowerer::InsertMove(elemRegOpnd, elemPtrOpnd, callInstr);
  18142. argout->SetSrc1(elemRegOpnd);
  18143. argout->SetSrc2(indexOpnd);
  18144. callInstr->InsertBefore(argout);
  18145. this->m_lowererMD.LoadDynamicArgumentUsingLength(argout);
  18146. InsertCompareBranch(indexOpnd, IR::IntConstOpnd::New(1, TyUint8, func), Js::OpCode::BrNeq_A, true, startLoopLabel, callInstr);
  18147. // Emit final argument into register 4 on AMD64 and ARM
  18148. callInstr->InsertBefore(oneArgLabel);
  18149. argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  18150. argout->SetSrc1(elemPtrOpnd);
  18151. callInstr->InsertBefore(argout);
  18152. this->m_lowererMD.LoadDynamicArgument(argout, 4); //4 to denote this is 4th register after this, callinfo & function object
  18153. }
  18154. IR::Instr *
  18155. Lowerer::LowerCallIDynamicSpread(IR::Instr *callInstr, ushort callFlags)
  18156. {
  18157. Assert(callInstr->m_opcode == Js::OpCode::CallIDynamicSpread);
  18158. IR::Instr * insertBeforeInstrForCFG = nullptr;
  18159. Func *const func = callInstr->m_func;
  18160. if (func->IsInlinee())
  18161. {
  18162. throw Js::RejitException(RejitReason::InlineSpreadDisabled);
  18163. }
  18164. IR::Instr *spreadArrayInstr = callInstr;
  18165. IR::SymOpnd *argLinkOpnd = spreadArrayInstr->UnlinkSrc2()->AsSymOpnd();
  18166. StackSym *argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  18167. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  18168. argLinkOpnd->Free(this->m_func);
  18169. spreadArrayInstr = argLinkSym->m_instrDef;
  18170. Assert(spreadArrayInstr->m_opcode == Js::OpCode::ArgOut_A_SpreadArg);
  18171. IR::Opnd *arraySrcOpnd = spreadArrayInstr->UnlinkSrc1();
  18172. IR::RegOpnd *arrayOpnd = GetRegOpnd(arraySrcOpnd, spreadArrayInstr, func, TyMachPtr);
  18173. argLinkOpnd = spreadArrayInstr->UnlinkSrc2()->AsSymOpnd();
  18174. // Walk the arg chain and find the start call
  18175. argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  18176. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  18177. argLinkOpnd->Free(this->m_func);
  18178. // Nothing to be done for the function object, emit as normal
  18179. IR::Instr *thisInstr = argLinkSym->m_instrDef;
  18180. IR::RegOpnd *thisOpnd = thisInstr->UnlinkSrc2()->AsRegOpnd();
  18181. argLinkSym = thisOpnd->m_sym->AsStackSym();
  18182. thisInstr->Unlink();
  18183. thisInstr->FreeDst();
  18184. // Remove the array ArgOut instr and StartCall, they are no longer needed
  18185. spreadArrayInstr->Unlink();
  18186. spreadArrayInstr->FreeDst();
  18187. IR::Instr *startCallInstr = argLinkSym->m_instrDef;
  18188. Assert(startCallInstr->m_opcode == Js::OpCode::StartCall);
  18189. insertBeforeInstrForCFG = startCallInstr->GetNextRealInstr();
  18190. startCallInstr->Remove();
  18191. IR::RegOpnd *argsLengthOpnd = IR::RegOpnd::New(TyUint32, func);
  18192. IR::IndirOpnd *arrayLengthPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, func);
  18193. Lowerer::InsertMove(argsLengthOpnd, arrayLengthPtrOpnd, callInstr);
  18194. // Don't bother expanding args if there are zero
  18195. IR::LabelInstr *zeroArgsLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  18196. InsertCompareBranch(argsLengthOpnd, IR::IntConstOpnd::New(0, TyInt8, func), Js::OpCode::BrEq_A, true, zeroArgsLabel, callInstr);
  18197. IR::RegOpnd *indexOpnd = IR::RegOpnd::New(TyUint32, func);
  18198. Lowerer::InsertMove(indexOpnd, argsLengthOpnd, callInstr);
  18199. // Get the array head offset and length
  18200. IR::IndirOpnd *arrayHeadPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfHead(), TyMachPtr, func);
  18201. IR::RegOpnd *arrayElementsStartOpnd = IR::RegOpnd::New(TyMachPtr, func);
  18202. InsertAdd(false, arrayElementsStartOpnd, arrayHeadPtrOpnd, IR::IntConstOpnd::New(offsetof(Js::SparseArraySegment<Js::Var>, elements), TyUint8, func), callInstr);
  18203. this->m_lowererMD.LowerInlineSpreadArgOutLoop(callInstr, indexOpnd, arrayElementsStartOpnd);
  18204. // Resume if we have zero args
  18205. callInstr->InsertBefore(zeroArgsLabel);
  18206. // Lower call
  18207. callInstr->m_opcode = Js::OpCode::CallIDynamic;
  18208. callInstr = m_lowererMD.LowerCallIDynamic(callInstr, thisInstr, argsLengthOpnd, callFlags, insertBeforeInstrForCFG);
  18209. return callInstr;
  18210. }
  18211. IR::Instr *
  18212. Lowerer::LowerCallIDynamic(IR::Instr * callInstr, ushort callFlags)
  18213. {
  18214. if (!this->m_func->GetHasStackArgs())
  18215. {
  18216. throw Js::RejitException(RejitReason::InlineApplyDisabled);
  18217. }
  18218. IR::Instr * insertBeforeInstrForCFG = nullptr;
  18219. // Lower args and look for StartCall
  18220. IR::Instr * argInstr = callInstr;
  18221. IR::SymOpnd * argLinkOpnd = argInstr->UnlinkSrc2()->AsSymOpnd();
  18222. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  18223. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  18224. argLinkOpnd->Free(this->m_func);
  18225. argInstr = argLinkSym->m_instrDef;
  18226. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  18227. IR::Instr* saveThisArgOutInstr = argInstr;
  18228. saveThisArgOutInstr->Unlink();
  18229. saveThisArgOutInstr->FreeDst();
  18230. argLinkOpnd = argInstr->UnlinkSrc2()->AsSymOpnd();
  18231. argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  18232. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  18233. argLinkOpnd->Free(this->m_func);
  18234. argInstr = argLinkSym->m_instrDef;
  18235. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_FromStackArgs);
  18236. IR::Opnd* argsLength = m_lowererMD.GenerateArgOutForStackArgs(callInstr, argInstr);
  18237. IR::RegOpnd* startCallDstOpnd = argInstr->UnlinkSrc2()->AsRegOpnd();
  18238. argLinkSym = startCallDstOpnd->m_sym->AsStackSym();
  18239. startCallDstOpnd->Free(this->m_func);
  18240. argInstr->Remove();// Remove ArgOut_A_FromStackArgs
  18241. argInstr = argLinkSym->m_instrDef;
  18242. Assert(argInstr->m_opcode == Js::OpCode::StartCall);
  18243. insertBeforeInstrForCFG = argInstr->GetNextRealInstr();
  18244. argInstr->Remove(); //Remove start call
  18245. return m_lowererMD.LowerCallIDynamic(callInstr, saveThisArgOutInstr, argsLength, callFlags, insertBeforeInstrForCFG);
  18246. }
  18247. //This is only for x64 & ARM.
  18248. IR::Opnd*
  18249. Lowerer::GenerateArgOutForStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  18250. {
  18251. // For architectures were we only pass 4 parameters in registers, the
  18252. // generated code looks something like this:
  18253. // s25.var = LdLen_A s4.var
  18254. // s26.var = Ld_A s25.var
  18255. // BrEq_I4 $L3, s25.var,0 // If we have no further arguments to pass, don't pass them
  18256. // $L2:
  18257. // BrEq_I4 $L4, s25.var,1 // Loop through the rest of the arguments, putting them on the stack
  18258. // s25.var = SUB_I4 s25.var, 0x1
  18259. // s10.var = LdElemI_A [s4.var+s25.var].var
  18260. // ArgOut_A_Dynamic s10.var, s25.var
  18261. // Br $L2
  18262. // $L4:
  18263. // s25.var = LdImm 0 // set s25 to 0, since it'll be 1 on the way into this block
  18264. // s10.var = LdElemI_A [s4.var + 0 * MachReg].var // The last one has to be put into argslot 4, since this is likely a register, not a stack location.
  18265. // ArgOut_A_Dynamic s10.var, 4
  18266. // $L3:
  18267. //
  18268. // Generalizing this for more register-passed parameters gives us code
  18269. // something like this:
  18270. // s25.var = LdLen_A s4.var
  18271. // s26.var = Ld_A s25.var
  18272. // BrLe_I4 $L3, s25.var,0 // If we have no further arguments to pass, don't pass them
  18273. // $L2:
  18274. // BrLe_I4 $L4, s25.var,INT_REG_COUNT-3 // Loop through the rest of the arguments up to the number passed in registers, putting them on the stack
  18275. // s25.var = SUB_I4 s25.var, 0x1
  18276. // s10.var = LdElemI_A [s4.var+s25.var].var
  18277. // ArgOut_A_Dynamic s10.var, s25.var
  18278. // Br $L2
  18279. // $L4:
  18280. // foreach of the remaining ones, N going down from (the number we can pass in regs -1) to 1 (0 omitted as we know that it'll be at least one register argument):
  18281. // BrEq_I4 $L__N, s25.var, N
  18282. // end foreach
  18283. // foreach of the remaining ones, N going down from (the number we can pass in regs -1) to 0:
  18284. // $L__N:
  18285. // s10.var = LdElemI_A [s4.var + N * MachReg].var // The last one has to be put into argslot 4, since this is likely a register, not a stack location.
  18286. // ArgOut_A_Dynamic s10.var, N+3
  18287. // end foreach
  18288. // $L3:
  18289. #if defined(_M_IX86)
  18290. // We get a compilation error on x86 due to assigning a negative to a uint
  18291. // TODO: don't even define this function on x86 - we Assert(false) anyway there.
  18292. // Alternatively, don't define when INT_ARG_REG_COUNT - 4 < 0
  18293. AssertOrFailFast(false);
  18294. return nullptr;
  18295. #else
  18296. Assert(stackArgsInstr->m_opcode == Js::OpCode::ArgOut_A_FromStackArgs);
  18297. Assert(callInstr->m_opcode == Js::OpCode::CallIDynamic);
  18298. this->m_lowererMD.GenerateFunctionObjectTest(callInstr, callInstr->GetSrc1()->AsRegOpnd(), false);
  18299. if (callInstr->m_func->IsInlinee())
  18300. {
  18301. return this->GenerateArgOutForInlineeStackArgs(callInstr, stackArgsInstr);
  18302. }
  18303. Func *func = callInstr->m_func;
  18304. IR::RegOpnd* stackArgs = stackArgsInstr->GetSrc1()->AsRegOpnd();
  18305. IR::RegOpnd* ldLenDstOpnd = IR::RegOpnd::New(TyMachReg, func);
  18306. const IR::AutoReuseOpnd autoReuseLdLenDstOpnd(ldLenDstOpnd, func);
  18307. IR::Instr* ldLen = IR::Instr::New(Js::OpCode::LdLen_A, ldLenDstOpnd ,stackArgs, func);
  18308. ldLenDstOpnd->SetValueType(ValueType::GetTaggedInt()); /*LdLen_A works only on stack arguments*/
  18309. callInstr->InsertBefore(ldLen);
  18310. GenerateFastRealStackArgumentsLdLen(ldLen);
  18311. IR::Instr* saveLenInstr = IR::Instr::New(Js::OpCode::MOV, IR::RegOpnd::New(TyMachReg, func), ldLenDstOpnd, func);
  18312. saveLenInstr->GetDst()->SetValueType(ValueType::GetTaggedInt());
  18313. callInstr->InsertBefore(saveLenInstr);
  18314. IR::LabelInstr* doneArgs = IR::LabelInstr::New(Js::OpCode::Label, func);
  18315. IR::Instr* branchDoneArgs = IR::BranchInstr::New(Js::OpCode::BrEq_I4, doneArgs, ldLenDstOpnd, IR::IntConstOpnd::New(0, TyInt8, func),func);
  18316. callInstr->InsertBefore(branchDoneArgs);
  18317. this->m_lowererMD.EmitInt4Instr(branchDoneArgs);
  18318. IR::LabelInstr* startLoop = InsertLoopTopLabel(callInstr);
  18319. Loop * loop = startLoop->GetLoop();
  18320. IR::LabelInstr* endLoop = IR::LabelInstr::New(Js::OpCode::Label, func);
  18321. IR::Instr* branchOutOfLoop = IR::BranchInstr::New(Js::OpCode::BrLe_I4, endLoop, ldLenDstOpnd, IR::IntConstOpnd::New(INT_ARG_REG_COUNT - 3, TyInt8, func),func);
  18322. callInstr->InsertBefore(branchOutOfLoop);
  18323. this->m_lowererMD.EmitInt4Instr(branchOutOfLoop);
  18324. IR::Instr* subInstr = IR::Instr::New(Js::OpCode::Sub_I4, ldLenDstOpnd, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyMachReg, func),func);
  18325. callInstr->InsertBefore(subInstr);
  18326. this->m_lowererMD.EmitInt4Instr(subInstr);
  18327. IR::IndirOpnd *nthArgument = IR::IndirOpnd::New(stackArgs, ldLenDstOpnd, TyMachReg, func);
  18328. IR::RegOpnd* ldElemDstOpnd = IR::RegOpnd::New(TyMachReg,func);
  18329. const IR::AutoReuseOpnd autoReuseldElemDstOpnd(ldElemDstOpnd, func);
  18330. IR::Instr* ldElem = IR::Instr::New(Js::OpCode::LdElemI_A, ldElemDstOpnd, nthArgument, func);
  18331. callInstr->InsertBefore(ldElem);
  18332. GenerateFastStackArgumentsLdElemI(ldElem);
  18333. IR::Instr* argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  18334. argout->SetSrc1(ldElemDstOpnd);
  18335. argout->SetSrc2(ldLenDstOpnd);
  18336. callInstr->InsertBefore(argout);
  18337. this->m_lowererMD.LoadDynamicArgumentUsingLength(argout);
  18338. IR::BranchInstr *tailBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, startLoop, func);
  18339. callInstr->InsertBefore(tailBranch);
  18340. callInstr->InsertBefore(endLoop);
  18341. loop->regAlloc.liveOnBackEdgeSyms->Set(ldLenDstOpnd->m_sym->m_id);
  18342. // Note: This loop iteratively adds instructions in two locations; in the block
  18343. // of branches that jump to the "load elements to argOuts" instructions, and in
  18344. // the the block of load elements to argOuts instructions themselves.
  18345. // 4 to denote this is 4th register after this, callinfo & function object
  18346. // INT_ARG_REG_COUNT is the number of parameters passed in int regs
  18347. uint current_reg_pass = INT_ARG_REG_COUNT - 4;
  18348. do
  18349. {
  18350. // If we're on this pass we know we have to do at least one of these, so skip
  18351. // the branch if we're on the last one.
  18352. if (current_reg_pass != INT_ARG_REG_COUNT - 4)
  18353. {
  18354. IR::LabelInstr* loadBlockLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  18355. IR::Instr* branchToBlock = IR::BranchInstr::New(Js::OpCode::BrEq_I4, loadBlockLabel, ldLenDstOpnd, IR::IntConstOpnd::New(current_reg_pass + 1, TyInt8, func), func);
  18356. endLoop->InsertAfter(branchToBlock);
  18357. callInstr->InsertBefore(loadBlockLabel);
  18358. }
  18359. // TODO: We can further optimize this with a GenerateFastStackArgumentsLdElemI that can
  18360. // handle us passing along constant argument references and encode them into the offset
  18361. // instead of having to use an IndirOpnd; this would allow us to save a few bytes here,
  18362. // and reduce register pressure a hair
  18363. // stemp.var = LdImm current_reg_pass
  18364. IR::RegOpnd* localTemp = IR::RegOpnd::New(TyInt32, func);
  18365. // We need to make it a tagged int because GenerateFastStackArgumentsLdElemI asserts if
  18366. // it is not.
  18367. localTemp->SetValueType(ValueType::GetTaggedInt());
  18368. const IR::AutoReuseOpnd autoReuseldElemDstOpnd3(localTemp, func);
  18369. this->InsertMove(localTemp, IR::IntConstOpnd::New(current_reg_pass, TyInt8, func, true), callInstr);
  18370. // sTemp = LdElem_I [s4.var + current_reg_pass (aka stemp.var) ]
  18371. nthArgument = IR::IndirOpnd::New(stackArgs, localTemp, TyMachReg, func);
  18372. ldElemDstOpnd = IR::RegOpnd::New(TyMachReg, func);
  18373. const IR::AutoReuseOpnd autoReuseldElemDstOpnd2(ldElemDstOpnd, func);
  18374. ldElem = IR::Instr::New(Js::OpCode::LdElemI_A, ldElemDstOpnd, nthArgument, func);
  18375. callInstr->InsertBefore(ldElem);
  18376. GenerateFastStackArgumentsLdElemI(ldElem);
  18377. argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  18378. argout->SetSrc1(ldElemDstOpnd);
  18379. callInstr->InsertBefore(argout);
  18380. this->m_lowererMD.LoadDynamicArgument(argout, current_reg_pass + 4);
  18381. }
  18382. while (current_reg_pass-- != 0);
  18383. callInstr->InsertBefore(doneArgs);
  18384. /*return the length which will be used for callInfo generations & stack allocation*/
  18385. return saveLenInstr->GetDst()->AsRegOpnd();
  18386. #endif
  18387. }
  18388. void
  18389. Lowerer::GenerateLoadStackArgumentByIndex(IR::Opnd *dst, IR::RegOpnd *indexOpnd, IR::Instr *instr, int32 offset, Func *func)
  18390. {
  18391. // Load argument set dst = [ebp + index].
  18392. IR::RegOpnd *ebpOpnd = IR::Opnd::CreateFramePointerOpnd(func);
  18393. IR::IndirOpnd *argIndirOpnd = nullptr;
  18394. // The stack looks like this:
  18395. // [new.target or FrameDisplay] <== EBP + formalParamOffset (4) + callInfo.Count
  18396. // arguments[n] <== EBP + formalParamOffset (4) + n
  18397. // ...
  18398. // arguments[1] <== EBP + formalParamOffset (4) + 2
  18399. // arguments[0] <== EBP + formalParamOffset (4) + 1
  18400. // this or new.target <== EBP + formalParamOffset (4)
  18401. // callinfo
  18402. // function object
  18403. // return addr
  18404. // EBP-> EBP chain
  18405. //actual arguments offset is LowererMD::GetFormalParamOffset() + 1 (this)
  18406. int32 actualOffset = GetFormalParamOffset() + offset;
  18407. Assert(GetFormalParamOffset() == 4);
  18408. const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  18409. argIndirOpnd = IR::IndirOpnd::New(ebpOpnd, indexOpnd, indirScale, TyMachReg, this->m_func);
  18410. argIndirOpnd->SetOffset(actualOffset << indirScale);
  18411. Lowerer::InsertMove(dst, argIndirOpnd, instr);
  18412. }
  18413. //This function assumes there is stackargs bailout and index is always on the range.
  18414. bool
  18415. Lowerer::GenerateFastStackArgumentsLdElemI(IR::Instr* ldElem)
  18416. {
  18417. // MOV dst, ebp [(valueOpnd + 5) *4] // 5 for the stack layout
  18418. //
  18419. IR::IndirOpnd *indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  18420. // Now load the index and check if it is an integer.
  18421. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  18422. Assert (indexOpnd && indexOpnd->IsTaggedInt());
  18423. if(ldElem->m_func->IsInlinee())
  18424. {
  18425. IR::IndirOpnd *argIndirOpnd = GetArgsIndirOpndForInlinee(ldElem, indexOpnd);
  18426. Lowerer::InsertMove(ldElem->GetDst(), argIndirOpnd, ldElem);
  18427. }
  18428. else
  18429. {
  18430. GenerateLoadStackArgumentByIndex(ldElem->GetDst(), indexOpnd, ldElem, indirOpnd->GetOffset() + 1, m_func); // +1 to offset 'this'
  18431. }
  18432. ldElem->Remove();
  18433. return false;
  18434. }
  18435. IR::IndirOpnd*
  18436. Lowerer::GetArgsIndirOpndForInlinee(IR::Instr* ldElem, IR::Opnd* valueOpnd)
  18437. {
  18438. Assert(ldElem->m_func->IsInlinee());
  18439. IR::IndirOpnd* argIndirOpnd = nullptr;
  18440. // Address of argument after 'this'
  18441. const auto firstRealArgStackSym = ldElem->m_func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  18442. this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr); //Start after this pointer
  18443. IR::SymOpnd *firstArg = IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, ldElem->m_func);
  18444. const IR::AutoReuseOpnd autoReuseFirstArg(firstArg, m_func);
  18445. IR::RegOpnd *const baseOpnd = IR::RegOpnd::New(TyMachReg, ldElem->m_func);
  18446. const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
  18447. InsertLea(baseOpnd, firstArg, ldElem);
  18448. if (valueOpnd->IsIntConstOpnd())
  18449. {
  18450. IntConstType offset = valueOpnd->AsIntConstOpnd()->GetValue() * MachPtr;
  18451. // TODO: Assert(Math::FitsInDWord(offset));
  18452. argIndirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, ldElem->m_func);
  18453. }
  18454. else
  18455. {
  18456. Assert(valueOpnd->IsRegOpnd());
  18457. const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  18458. argIndirOpnd = IR::IndirOpnd::New(baseOpnd, valueOpnd->AsRegOpnd(), indirScale, TyMachReg, ldElem->m_func);
  18459. }
  18460. return argIndirOpnd;
  18461. }
  18462. IR::IndirOpnd*
  18463. Lowerer::GetArgsIndirOpndForTopFunction(IR::Instr* ldElem, IR::Opnd* valueOpnd)
  18464. {
  18465. // Load argument set dst = [ebp + index] (or grab from the generator object if m_func is a generator function).
  18466. IR::RegOpnd *baseOpnd = m_func->GetJITFunctionBody()->IsCoroutine() ? LoadGeneratorArgsPtr(ldElem) : IR::Opnd::CreateFramePointerOpnd(m_func);
  18467. IR::IndirOpnd* argIndirOpnd = nullptr;
  18468. // The stack looks like this:
  18469. // ...
  18470. // arguments[1]
  18471. // arguments[0]
  18472. // this
  18473. // callinfo
  18474. // function object
  18475. // return addr
  18476. // EBP-> EBP chain
  18477. //actual arguments offset is LowererMD::GetFormalParamOffset() + 1 (this)
  18478. uint16 actualOffset = m_func->GetJITFunctionBody()->IsCoroutine() ? 1 : GetFormalParamOffset() + 1; //5
  18479. Assert(actualOffset == 5 || m_func->GetJITFunctionBody()->IsGenerator());
  18480. if (valueOpnd->IsIntConstOpnd())
  18481. {
  18482. IntConstType offset = (valueOpnd->AsIntConstOpnd()->GetValue() + actualOffset) * MachPtr;
  18483. // TODO: Assert(Math::FitsInDWord(offset));
  18484. argIndirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, this->m_func);
  18485. }
  18486. else
  18487. {
  18488. const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  18489. argIndirOpnd = IR::IndirOpnd::New(baseOpnd->AsRegOpnd(), valueOpnd->AsRegOpnd(), indirScale, TyMachReg, this->m_func);
  18490. // Need to offset valueOpnd by 5. Instead of changing valueOpnd, we can just add an offset to the indir. Changing
  18491. // valueOpnd requires creation of a temp sym (if it's not already a temp) so that the value of the sym that
  18492. // valueOpnd represents is not changed.
  18493. argIndirOpnd->SetOffset(actualOffset << indirScale);
  18494. }
  18495. return argIndirOpnd;
  18496. }
  18497. void
  18498. Lowerer::GenerateCheckForArgumentsLength(IR::Instr* ldElem, IR::LabelInstr* labelCreateHeapArgs, IR::Opnd* actualParamOpnd, IR::Opnd* valueOpnd, Js::OpCode opcode)
  18499. {
  18500. // Check if index < nr_actuals.
  18501. InsertCompare(actualParamOpnd, valueOpnd, ldElem);
  18502. // Jump to helper if index >= nr_actuals.
  18503. // Do an unsigned check here so that a negative index will also fail.
  18504. // (GenerateLdValueFromCheckedIndexOpnd does not guarantee positive index on x86.)
  18505. InsertBranch(opcode, true, labelCreateHeapArgs, ldElem);
  18506. }
  18507. bool
  18508. Lowerer::GenerateFastArgumentsLdElemI(IR::Instr* ldElem, IR::LabelInstr *labelFallThru)
  18509. {
  18510. // ---GenerateSmIntTest
  18511. // ---GenerateLdValueFromCheckedIndexOpnd
  18512. // ---LoadInputParamCount
  18513. // CMP actualParamOpnd, valueOpnd //Compare between the actual count & the index count (say i in arguments[i])
  18514. // JLE $labelCreateHeapArgs
  18515. // MOV dst, ebp [(valueOpnd + 5) *4] // 5 for the stack layout
  18516. // JMP $fallthrough
  18517. //
  18518. //labelCreateHeapArgs:
  18519. // ---Bail out to create Heap Arguments object
  18520. Assert(ldElem->DoStackArgsOpt());
  18521. IR::IndirOpnd *indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  18522. bool isInlinee = ldElem->m_func->IsInlinee();
  18523. Func *func = ldElem->m_func;
  18524. IR::LabelInstr *labelCreateHeapArgs = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  18525. // Now load the index and check if it is an integer.
  18526. bool emittedFastPath = false;
  18527. bool isNotInt = false;
  18528. IntConstType value = 0;
  18529. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  18530. IR::Opnd *valueOpnd = nullptr;
  18531. IR::Opnd *actualParamOpnd = nullptr;
  18532. bool hasIntConstIndex = indirOpnd->TryGetIntConstIndexValue(true, &value, &isNotInt);
  18533. if (isNotInt || (isInlinee && hasIntConstIndex && value >= (ldElem->m_func->actualCount - 1)))
  18534. {
  18535. //Outside the range of actuals, skip
  18536. }
  18537. else if (labelFallThru != nullptr && !(hasIntConstIndex && value < 0)) //if index is not a negative int constant
  18538. {
  18539. if (isInlinee)
  18540. {
  18541. actualParamOpnd = IR::IntConstOpnd::New(ldElem->m_func->actualCount - 1, TyInt32, func);
  18542. }
  18543. else
  18544. {
  18545. // Load actuals count, LoadHeapArguments will reuse the generated instructions here
  18546. IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldElem, -1 /* don't include 'this' while counting actuals. */);
  18547. actualParamOpnd = loadInputParamCountInstr->GetDst()->UseWithNewType(TyInt32,this->m_func);
  18548. }
  18549. if (hasIntConstIndex)
  18550. {
  18551. //Constant index
  18552. valueOpnd = IR::IntConstOpnd::New(value, TyInt32, func);
  18553. }
  18554. else
  18555. {
  18556. //Load valueOpnd from the index
  18557. valueOpnd =
  18558. m_lowererMD.LoadNonnegativeIndex(
  18559. indexOpnd,
  18560. (
  18561. #if INT32VAR
  18562. indexOpnd->GetType() == TyUint32
  18563. #else
  18564. // On 32-bit platforms, skip the negative check since for now, the unsigned upper bound check covers it
  18565. true
  18566. #endif
  18567. ),
  18568. labelCreateHeapArgs,
  18569. labelCreateHeapArgs,
  18570. ldElem);
  18571. }
  18572. if (isInlinee)
  18573. {
  18574. if (!hasIntConstIndex)
  18575. {
  18576. //Runtime check if to make sure length is within the arguments.length range.
  18577. GenerateCheckForArgumentsLength(ldElem, labelCreateHeapArgs, valueOpnd, actualParamOpnd, Js::OpCode::BrGe_A);
  18578. }
  18579. }
  18580. else
  18581. {
  18582. GenerateCheckForArgumentsLength(ldElem, labelCreateHeapArgs, actualParamOpnd, valueOpnd, Js::OpCode::BrLe_A);
  18583. }
  18584. IR::Opnd *argIndirOpnd = nullptr;
  18585. if (isInlinee)
  18586. {
  18587. argIndirOpnd = GetArgsIndirOpndForInlinee(ldElem, valueOpnd);
  18588. }
  18589. else
  18590. {
  18591. argIndirOpnd = GetArgsIndirOpndForTopFunction(ldElem, valueOpnd);
  18592. }
  18593. Lowerer::InsertMove(ldElem->GetDst(), argIndirOpnd, ldElem);
  18594. // JMP $done
  18595. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  18596. // $labelCreateHeapArgs:
  18597. ldElem->InsertBefore(labelCreateHeapArgs);
  18598. emittedFastPath = true;
  18599. }
  18600. if (!emittedFastPath)
  18601. {
  18602. throw Js::RejitException(RejitReason::DisableStackArgOpt);
  18603. }
  18604. return emittedFastPath;
  18605. }
  18606. bool
  18607. Lowerer::GenerateFastRealStackArgumentsLdLen(IR::Instr *ldLen)
  18608. {
  18609. if(ldLen->m_func->IsInlinee())
  18610. {
  18611. //Get the length of the arguments
  18612. Lowerer::InsertMove(ldLen->GetDst(),
  18613. IR::IntConstOpnd::New(ldLen->m_func->actualCount - 1, TyUint32, ldLen->m_func),
  18614. ldLen);
  18615. }
  18616. else
  18617. {
  18618. IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldLen, -1);
  18619. IR::RegOpnd *actualCountOpnd = loadInputParamCountInstr->GetDst()->AsRegOpnd();
  18620. Lowerer::InsertMove(ldLen->GetDst(), actualCountOpnd, ldLen);
  18621. }
  18622. ldLen->Remove();
  18623. return false;
  18624. }
  18625. bool
  18626. Lowerer::GenerateFastArgumentsLdLen(IR::Instr *ldLen, IR::LabelInstr* labelFallThru)
  18627. {
  18628. // TEST argslot, argslot //Test if the arguments slot is zero
  18629. // JNE $helper
  18630. // actualCountOpnd <-LoadInputParamCount fastpath
  18631. // SHL actualCountOpnd, actualCountOpnd, 1 // Left shift for tagging
  18632. // INC actualCountOpnd // Tagging
  18633. // MOV dst, actualCountOpnd
  18634. // JMP $fallthrough
  18635. //$helper:
  18636. Assert(ldLen->DoStackArgsOpt());
  18637. if(ldLen->m_func->IsInlinee())
  18638. {
  18639. //Get the length of the arguments
  18640. Lowerer::InsertMove(ldLen->GetDst(),
  18641. IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(ldLen->m_func->actualCount - 1), IR::AddrOpndKindConstantVar, ldLen->m_func), // -1 to exclude this pointer
  18642. ldLen);
  18643. }
  18644. else
  18645. {
  18646. IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldLen, -1);
  18647. IR::RegOpnd *actualCountOpnd = loadInputParamCountInstr->GetDst()->AsRegOpnd();
  18648. this->m_lowererMD.GenerateInt32ToVarConversion(actualCountOpnd, ldLen);
  18649. Lowerer::InsertMove(ldLen->GetDst(), actualCountOpnd, ldLen);
  18650. }
  18651. return true;
  18652. }
  18653. IR::RegOpnd*
  18654. Lowerer::GenerateFunctionTypeFromFixedFunctionObject(IR::Instr *insertInstrPt, IR::Opnd* functionObjOpnd)
  18655. {
  18656. IR::RegOpnd * functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  18657. IR::Opnd *functionTypeOpnd = nullptr;
  18658. if(functionObjOpnd->IsAddrOpnd())
  18659. {
  18660. IR::AddrOpnd* functionObjAddrOpnd = functionObjOpnd->AsAddrOpnd();
  18661. // functionTypeRegOpnd = MOV [fixed function address + type offset]
  18662. functionObjAddrOpnd->m_address;
  18663. functionTypeOpnd = IR::MemRefOpnd::New((void *)((intptr_t)functionObjAddrOpnd->m_address + Js::RecyclableObject::GetOffsetOfType()), TyMachPtr, this->m_func,
  18664. IR::AddrOpndKindDynamicObjectTypeRef);
  18665. }
  18666. else
  18667. {
  18668. functionTypeOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, this->m_func);
  18669. }
  18670. Lowerer::InsertMove(functionTypeRegOpnd, functionTypeOpnd, insertInstrPt);
  18671. return functionTypeRegOpnd;
  18672. }
  18673. void
  18674. Lowerer::FinalLower()
  18675. {
  18676. this->m_lowererMD.FinalLower();
  18677. // ensure that the StartLabel and EndLabel are inserted
  18678. // before the prolog and after the epilog respectively
  18679. IR::LabelInstr * startLabel = m_func->GetFuncStartLabel();
  18680. if (startLabel != nullptr)
  18681. {
  18682. m_func->m_headInstr->InsertAfter(startLabel);
  18683. }
  18684. IR::LabelInstr * endLabel = m_func->GetFuncEndLabel();
  18685. if (endLabel != nullptr)
  18686. {
  18687. m_func->m_tailInstr->GetPrevRealInstr()->InsertBefore(endLabel);
  18688. }
  18689. }
  18690. void
  18691. Lowerer::EHBailoutPatchUp()
  18692. {
  18693. Assert(this->m_func->isPostLayout);
  18694. // 1. Insert return thunks for all the regions.
  18695. // 2. Set the hasBailedOut bit to true on all bailout paths in EH regions.
  18696. // 3. Insert code after every bailout in a try or catch region to save the return value on the stack, and jump to the return thunk (See Region.h) of that region.
  18697. // 4. Insert code right before the epilog, to restore the return value (saved in 2.) from a bailout into eax.
  18698. IR::LabelInstr * restoreReturnValueFromBailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  18699. IR::LabelInstr * epilogLabel;
  18700. IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->GetPrevRealInstrOrLabel();
  18701. if (exitPrevInstr->IsLabelInstr())
  18702. {
  18703. epilogLabel = exitPrevInstr->AsLabelInstr();
  18704. }
  18705. else
  18706. {
  18707. epilogLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  18708. this->m_func->m_exitInstr->InsertBefore(epilogLabel);
  18709. }
  18710. IR::Instr * tmpInstr = nullptr;
  18711. bool restoreReturnFromBailoutEmitted = false;
  18712. FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, this->m_func)
  18713. {
  18714. if (instr->IsLabelInstr())
  18715. {
  18716. this->currentRegion = instr->AsLabelInstr()->GetRegion();
  18717. }
  18718. // Consider (radua): Assert(this->currentRegion) here?
  18719. if (this->currentRegion)
  18720. {
  18721. RegionType currentRegionType = this->currentRegion->GetType();
  18722. if (currentRegionType == RegionTypeTry || currentRegionType == RegionTypeCatch || currentRegionType == RegionTypeFinally)
  18723. {
  18724. if (this->currentRegion->IsNonExceptingFinally())
  18725. {
  18726. Region * parent = this->currentRegion->GetParent();
  18727. while (parent->IsNonExceptingFinally())
  18728. {
  18729. parent = parent->GetParent();
  18730. }
  18731. if (parent->GetType() == RegionTypeRoot)
  18732. {
  18733. continue;
  18734. }
  18735. }
  18736. this->InsertReturnThunkForRegion(this->currentRegion, restoreReturnValueFromBailoutLabel);
  18737. if (instr->HasBailOutInfo())
  18738. {
  18739. if (instr->GetBailOutInfo()->bailOutFunc == this->m_func)
  18740. {
  18741. // We dont set this bit for inlined code, if there was a bailout in the inlined code,
  18742. // and an exception was thrown, we want the caller's handler to handle the exception accordingly.
  18743. // TODO : Revisit when we start inlining functions with try-catch/try-finally
  18744. this->SetHasBailedOut(instr);
  18745. }
  18746. tmpInstr = this->EmitEHBailoutStackRestore(instr);
  18747. this->EmitSaveEHBailoutReturnValueAndJumpToRetThunk(tmpInstr);
  18748. if (!restoreReturnFromBailoutEmitted)
  18749. {
  18750. this->EmitRestoreReturnValueFromEHBailout(restoreReturnValueFromBailoutLabel, epilogLabel);
  18751. restoreReturnFromBailoutEmitted = true;
  18752. }
  18753. }
  18754. }
  18755. }
  18756. }
  18757. NEXT_INSTR_IN_FUNC_EDITING
  18758. }
  18759. bool
  18760. Lowerer::GenerateFastLdFld(IR::Instr * const instrLdFld, IR::JnHelperMethod helperMethod, IR::JnHelperMethod polymorphicHelperMethod,
  18761. IR::LabelInstr ** labelBailOut, IR::RegOpnd* typeOpnd, bool* pIsHelper, IR::LabelInstr** pLabelHelper)
  18762. {
  18763. // Generates:
  18764. //
  18765. // r1 = object->type
  18766. // if (r1 is taggedInt) goto helper
  18767. // Load inline cache
  18768. // if monomorphic
  18769. // r2 = address of the monomorphic inline cache
  18770. // if polymorphic
  18771. // r2 = address of the polymorphic inline cache array
  18772. // r3 = (type >> PIC shift amount) & (PIC size - 1)
  18773. // r2 = r2 + r3
  18774. // Try load property using proto cache (if protoFirst)
  18775. // Try load property using local cache
  18776. // Try loading property using proto cache (if !protoFirst)
  18777. // Try loading property using flags cache
  18778. //
  18779. // Loading property using local cache:
  18780. // if (r1 == r2->u.local.type)
  18781. // result = load inline slot r2->u.local.slotIndex from r1
  18782. // goto fallthru
  18783. // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.local.type)
  18784. // result = load aux slot r2->u.local.slotIndex from r1
  18785. // goto fallthru
  18786. //
  18787. // Loading property using proto cache:
  18788. // if (r1 == r2->u.proto.type)
  18789. // r3 = r2->u.proto.prototypeObject
  18790. // result = load inline slot r2->u.proto.slotIndex from r3
  18791. // goto fallthru
  18792. // if (r1 | InlineCacheAuxSlotTypeTag) == r2.u.proto.type)
  18793. // r3 = r2->u.proto.prototypeObject
  18794. // result = load aux slot r2->u.proto.slotIndex from r3
  18795. // goto fallthru
  18796. //
  18797. // Loading property using flags cache:
  18798. // if (r2->u.accessor.flags & (Js::InlineCacheGetterFlag | Js::InlineCacheSetterFlag) == 0)
  18799. // if (r1 == r2->u.accessor.type)
  18800. // result = load inline slot r2->u.accessor.slotIndex from r1
  18801. // goto fallthru
  18802. // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.accessor.type)
  18803. // result = load aux slot r2->u.accessor.slotIndex from r1
  18804. // goto fallthru
  18805. //
  18806. // Loading an inline slot:
  18807. // result = [r1 + slotIndex * sizeof(Var)]
  18808. //
  18809. // Loading an aux slot:
  18810. // slotArray = r1->auxSlots
  18811. // result = [slotArray + slotIndex * sizeof(Var)]
  18812. //
  18813. // We only emit the code block for a type of cache (local/proto/flags) if the profile data
  18814. // indicates that type of cache was used to load the property in the past.
  18815. // We don't emit the type check with aux slot tag if the profile data indicates that we didn't
  18816. // load the property from an aux slot before.
  18817. // We don't emit the type check without an aux slot tag if the profile data indicates that we didn't
  18818. // load the property from an inline slot before.
  18819. IR::Opnd * opndSrc = instrLdFld->GetSrc1();
  18820. AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->IsPropertySymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(), "Expected PropertySym as src of LdFld");
  18821. Assert(!instrLdFld->DoStackArgsOpt());
  18822. IR::PropertySymOpnd * propertySymOpnd = opndSrc->AsPropertySymOpnd();
  18823. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  18824. PHASE_PRINT_TESTTRACE(
  18825. Js::ObjTypeSpecPhase,
  18826. this->m_func,
  18827. _u("Field load: %s, property ID: %d, func: %s, cache ID: %d, cloned cache: false\n"),
  18828. Js::OpCodeUtil::GetOpCodeName(instrLdFld->m_opcode),
  18829. propertySym->m_propertyId,
  18830. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  18831. propertySymOpnd->m_inlineCacheIndex);
  18832. Assert(pIsHelper != nullptr);
  18833. bool& isHelper = *pIsHelper;
  18834. Assert(pLabelHelper != nullptr);
  18835. IR::LabelInstr*& labelHelper = *pLabelHelper;
  18836. bool doLocal = true;
  18837. bool doProto = instrLdFld->m_opcode == Js::OpCode::LdMethodFld
  18838. || instrLdFld->m_opcode == Js::OpCode::LdRootMethodFld
  18839. || instrLdFld->m_opcode == Js::OpCode::ScopedLdMethodFld;
  18840. bool doProtoFirst = doProto;
  18841. bool doInlineSlots = true;
  18842. bool doAuxSlots = true;
  18843. if (!PHASE_OFF(Js::ProfileBasedFldFastPathPhase, this->m_func) && instrLdFld->IsProfiledInstr())
  18844. {
  18845. IR::ProfiledInstr * profiledInstrLdFld = instrLdFld->AsProfiledInstr();
  18846. if (profiledInstrLdFld->u.FldInfo().flags != Js::FldInfo_NoInfo)
  18847. {
  18848. doProto = !!(profiledInstrLdFld->u.FldInfo().flags & Js::FldInfo_FromProto);
  18849. doLocal = !!(profiledInstrLdFld->u.FldInfo().flags & Js::FldInfo_FromLocal);
  18850. if ((profiledInstrLdFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromInlineSlots)
  18851. {
  18852. // If the inline slots flag is set and the aux slots flag is not, only generate the inline slots check
  18853. doAuxSlots = false;
  18854. }
  18855. else if ((profiledInstrLdFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromAuxSlots)
  18856. {
  18857. // If the aux slots flag is set and the inline slots flag is not, only generate the aux slots check
  18858. doInlineSlots = false;
  18859. }
  18860. }
  18861. else if (!profiledInstrLdFld->u.FldInfo().valueType.IsUninitialized())
  18862. {
  18863. // We have value type info about the field but no flags. This means we shouldn't generate any
  18864. // fast paths for this field load.
  18865. doLocal = false;
  18866. doProto = false;
  18867. }
  18868. }
  18869. if (!doLocal && !doProto)
  18870. {
  18871. return false;
  18872. }
  18873. IR::LabelInstr * labelFallThru = instrLdFld->GetOrCreateContinueLabel();
  18874. if (labelHelper == nullptr)
  18875. {
  18876. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  18877. }
  18878. IR::RegOpnd * opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  18879. bool usePolymorphicInlineCache = !!propertySymOpnd->m_runtimePolymorphicInlineCache;
  18880. IR::RegOpnd * opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  18881. if (usePolymorphicInlineCache)
  18882. {
  18883. Lowerer::InsertMove(opndInlineCache, IR::AddrOpnd::New(propertySymOpnd->m_runtimePolymorphicInlineCache->GetInlineCachesAddr(), IR::AddrOpndKindDynamicInlineCache, this->m_func, true), instrLdFld);
  18884. }
  18885. else
  18886. {
  18887. Lowerer::InsertMove(opndInlineCache, this->LoadRuntimeInlineCacheOpnd(instrLdFld, propertySymOpnd, isHelper), instrLdFld);
  18888. }
  18889. if (typeOpnd == nullptr)
  18890. {
  18891. typeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  18892. GenerateObjectTestAndTypeLoad(instrLdFld, opndBase, typeOpnd, labelHelper);
  18893. }
  18894. if (usePolymorphicInlineCache)
  18895. {
  18896. LowererMD::GenerateLoadPolymorphicInlineCacheSlot(instrLdFld, opndInlineCache, typeOpnd, propertySymOpnd->m_runtimePolymorphicInlineCache->GetSize());
  18897. }
  18898. IR::LabelInstr * labelNext = nullptr;
  18899. IR::Opnd * opndDst = instrLdFld->GetDst();
  18900. IR::RegOpnd * opndTaggedType = nullptr;
  18901. IR::BranchInstr * labelNextBranchToPatch = nullptr;
  18902. if (doProto && doProtoFirst)
  18903. {
  18904. if (doInlineSlots)
  18905. {
  18906. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  18907. labelNextBranchToPatch = GenerateProtoInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
  18908. GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  18909. instrLdFld->InsertBefore(labelNext);
  18910. }
  18911. if (doAuxSlots)
  18912. {
  18913. if (opndTaggedType == nullptr)
  18914. {
  18915. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  18916. LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
  18917. }
  18918. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  18919. labelNextBranchToPatch = GenerateProtoInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
  18920. GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  18921. instrLdFld->InsertBefore(labelNext);
  18922. }
  18923. }
  18924. if (doLocal)
  18925. {
  18926. if (doInlineSlots)
  18927. {
  18928. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  18929. labelNextBranchToPatch = GenerateLocalInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
  18930. GenerateLdFldFromLocalInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  18931. instrLdFld->InsertBefore(labelNext);
  18932. }
  18933. if (doAuxSlots)
  18934. {
  18935. if (opndTaggedType == nullptr)
  18936. {
  18937. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  18938. LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
  18939. }
  18940. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  18941. labelNextBranchToPatch = GenerateLocalInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
  18942. GenerateLdFldFromLocalInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  18943. instrLdFld->InsertBefore(labelNext);
  18944. }
  18945. }
  18946. if (doProto && !doProtoFirst)
  18947. {
  18948. if (doInlineSlots)
  18949. {
  18950. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  18951. labelNextBranchToPatch = GenerateProtoInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
  18952. GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  18953. instrLdFld->InsertBefore(labelNext);
  18954. }
  18955. if (doAuxSlots)
  18956. {
  18957. if (opndTaggedType == nullptr)
  18958. {
  18959. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  18960. LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
  18961. }
  18962. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  18963. labelNextBranchToPatch = GenerateProtoInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
  18964. GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  18965. instrLdFld->InsertBefore(labelNext);
  18966. }
  18967. }
  18968. Assert(labelNextBranchToPatch);
  18969. labelNextBranchToPatch->SetTarget(labelHelper);
  18970. labelNext->Remove();
  18971. // $helper:
  18972. // dst = CALL Helper(inlineCache, base, field, scriptContext)
  18973. // $fallthru:
  18974. isHelper = true;
  18975. // Return false to indicate the original instruction was not lowered. Caller will insert the helper label.
  18976. return false;
  18977. }
  18978. void
  18979. Lowerer::GenerateAuxSlotAdjustmentRequiredCheck(
  18980. IR::Instr * instrToInsertBefore,
  18981. IR::RegOpnd * opndInlineCache,
  18982. IR::LabelInstr * labelHelper)
  18983. {
  18984. // regSlotCap = MOV [&(inlineCache->u.local.rawUInt16)] // sized to 16 bits
  18985. IR::RegOpnd * regSlotCap = IR::RegOpnd::New(TyMachReg, instrToInsertBefore->m_func);
  18986. IR::IndirOpnd * memSlotCap = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.rawUInt16), TyUint16, instrToInsertBefore->m_func);
  18987. InsertMove(regSlotCap, memSlotCap, instrToInsertBefore);
  18988. IR::IntConstOpnd * constSelectorBitCount = IR::IntConstOpnd::New(Js::InlineCache::CacheLayoutSelectorBitCount, TyUint16, instrToInsertBefore->m_func, /* dontEncode = */ true);
  18989. #if _M_ARM64
  18990. IR::Instr * testBranch = InsertBranch(Js::OpCode::TBZ, labelHelper, instrToInsertBefore);
  18991. testBranch->SetSrc1(regSlotCap);
  18992. testBranch->SetSrc2(constSelectorBitCount);
  18993. #else
  18994. // SAR regSlotCap, Js::InlineCache::CacheLayoutSelectorBitCount
  18995. InsertShiftBranch(Js::OpCode::Shr_A, regSlotCap, regSlotCap, constSelectorBitCount, Js::OpCode::BrNeq_A, true, labelHelper, instrToInsertBefore);
  18996. #endif
  18997. }
  18998. void
  18999. Lowerer::GenerateSetObjectTypeFromInlineCache(
  19000. IR::Instr * instrToInsertBefore,
  19001. IR::RegOpnd * opndBase,
  19002. IR::RegOpnd * opndInlineCache,
  19003. bool isTypeTagged)
  19004. {
  19005. // regNewType = MOV [&(inlineCache->u.local.type)]
  19006. IR::RegOpnd * regNewType = IR::RegOpnd::New(TyMachReg, instrToInsertBefore->m_func);
  19007. IR::IndirOpnd * memNewType = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.type), TyMachReg, instrToInsertBefore->m_func);
  19008. InsertMove(regNewType, memNewType, instrToInsertBefore);
  19009. // AND regNewType, ~InlineCacheAuxSlotTypeTag
  19010. if (isTypeTagged)
  19011. {
  19012. // On 64-bit platforms IntConstOpnd isn't big enough to hold TyMachReg values.
  19013. IR::IntConstOpnd * constTypeTagComplement = IR::IntConstOpnd::New(~InlineCacheAuxSlotTypeTag, TyMachReg, instrToInsertBefore->m_func, /* dontEncode = */ true);
  19014. InsertAnd(regNewType, regNewType, constTypeTagComplement, instrToInsertBefore);
  19015. }
  19016. // MOV base->type, regNewType
  19017. IR::IndirOpnd * memObjType = IR::IndirOpnd::New(opndBase, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrToInsertBefore->m_func);
  19018. InsertMove(memObjType, regNewType, instrToInsertBefore);
  19019. }
  19020. bool
  19021. Lowerer::GenerateFastStFld(IR::Instr * const instrStFld, IR::JnHelperMethod helperMethod, IR::JnHelperMethod polymorphicHelperMethod, IR::LabelInstr ** labelBailOut, IR::RegOpnd* typeOpnd,
  19022. bool* pIsHelper, IR::LabelInstr** pLabelHelper, bool withPutFlags, Js::PropertyOperationFlags flags)
  19023. {
  19024. // Generates:
  19025. //
  19026. // r1 = object->type
  19027. // if (r1 is taggedInt) goto helper
  19028. // Load inline cache
  19029. // if monomorphic
  19030. // r2 = address of the monomorphic inline cache
  19031. // if polymorphic
  19032. // r2 = address of the polymorphic inline cache array
  19033. // r3 = (type >> PIC shift amount) & (PIC size - 1)
  19034. // r2 = r2 + r3
  19035. // Try store property using local cache
  19036. //
  19037. // Loading property using local cache:
  19038. // if (r1 == r2->u.local.type)
  19039. // store value to inline slot r2->u.local.slotIndex on r1
  19040. // goto fallthru
  19041. // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.local.type)
  19042. // store value to aux slot r2->u.local.slotIndex on r1
  19043. // goto fallthru
  19044. //
  19045. // Storing to an inline slot:
  19046. // [r1 + slotIndex * sizeof(Var)] = value
  19047. //
  19048. // Storing to an aux slot:
  19049. // slotArray = r1->auxSlots
  19050. // [slotArray + slotIndex * sizeof(Var)] = value
  19051. //
  19052. // We don't emit the type check with aux slot tag if the profile data indicates that we didn't
  19053. // store the property to an aux slot before.
  19054. // We don't emit the type check without an aux slot tag if the profile data indicates that we didn't
  19055. // store the property to an inline slot before.
  19056. IR::Opnd * opndSrc = instrStFld->GetSrc1();
  19057. IR::Opnd * opndDst = instrStFld->GetDst();
  19058. AssertMsg(opndDst->IsSymOpnd() && opndDst->AsSymOpnd()->IsPropertySymOpnd() && opndDst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected PropertySym as dst of StFld");
  19059. IR::PropertySymOpnd * propertySymOpnd = opndDst->AsPropertySymOpnd();
  19060. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  19061. PHASE_PRINT_TESTTRACE(
  19062. Js::ObjTypeSpecPhase,
  19063. this->m_func,
  19064. _u("Field store: %s, property ID: %u, func: %s, cache ID: %d, cloned cache: false\n"),
  19065. Js::OpCodeUtil::GetOpCodeName(instrStFld->m_opcode),
  19066. propertySym->m_propertyId,
  19067. this->m_func->GetJITFunctionBody()->GetDisplayName(),
  19068. propertySymOpnd->m_inlineCacheIndex);
  19069. Assert(pIsHelper != nullptr);
  19070. bool& isHelper = *pIsHelper;
  19071. Assert(pLabelHelper != nullptr);
  19072. IR::LabelInstr*& labelHelper = *pLabelHelper;
  19073. bool doStore = true;
  19074. bool doAdd = false;
  19075. bool doInlineSlots = true;
  19076. bool doAuxSlots = true;
  19077. if (!PHASE_OFF(Js::ProfileBasedFldFastPathPhase, this->m_func) && instrStFld->IsProfiledInstr())
  19078. {
  19079. IR::ProfiledInstr * profiledInstrStFld = instrStFld->AsProfiledInstr();
  19080. if (profiledInstrStFld->u.FldInfo().flags != Js::FldInfo_NoInfo)
  19081. {
  19082. if (!(profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromLocal | Js::FldInfo_FromLocalWithoutProperty)))
  19083. {
  19084. return false;
  19085. }
  19086. if (!PHASE_OFF(Js::AddFldFastPathPhase, this->m_func))
  19087. {
  19088. // We always try to do the store field fast path, unless the profile specifically says we never set, but always add a property here.
  19089. if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromLocal | Js::FldInfo_FromLocalWithoutProperty)) == Js::FldInfo_FromLocalWithoutProperty)
  19090. {
  19091. doStore = false;
  19092. }
  19093. // On the other hand, we only emit the add field fast path, if the profile explicitly says we do add properties here.
  19094. if (!!(profiledInstrStFld->u.FldInfo().flags & Js::FldInfo_FromLocalWithoutProperty))
  19095. {
  19096. doAdd = true;
  19097. }
  19098. }
  19099. else
  19100. {
  19101. #if ENABLE_DEBUG_CONFIG_OPTIONS
  19102. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  19103. #endif
  19104. PHASE_PRINT_TRACE(Js::AddFldFastPathPhase, this->m_func,
  19105. _u("AddFldFastPath: function: %s(%s) property ID: %u no fast path, because the phase is off.\n"),
  19106. this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
  19107. propertySym->m_propertyId);
  19108. }
  19109. if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromInlineSlots)
  19110. {
  19111. // If the inline slots flag is set and the aux slots flag is not, only generate the inline slots check
  19112. doAuxSlots = false;
  19113. }
  19114. else if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromAuxSlots)
  19115. {
  19116. // If the aux slots flag is set and the inline slots flag is not, only generate the aux slots check
  19117. doInlineSlots = false;
  19118. }
  19119. }
  19120. else if (!profiledInstrStFld->u.FldInfo().valueType.IsUninitialized())
  19121. {
  19122. // We have value type info about the field but no flags. This means we shouldn't generate any
  19123. // fast paths for this field store.
  19124. return false;
  19125. }
  19126. }
  19127. Assert(doStore || doAdd);
  19128. if (labelHelper == nullptr)
  19129. {
  19130. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  19131. }
  19132. IR::LabelInstr * labelFallThru = instrStFld->GetOrCreateContinueLabel();
  19133. IR::RegOpnd * opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  19134. bool usePolymorphicInlineCache = !!propertySymOpnd->m_runtimePolymorphicInlineCache;
  19135. if (doAdd)
  19136. {
  19137. #if ENABLE_DEBUG_CONFIG_OPTIONS
  19138. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  19139. #endif
  19140. PHASE_PRINT_TRACE(Js::AddFldFastPathPhase, this->m_func,
  19141. _u("AddFldFastPath: function: %s(%s) property ID: %d %s fast path for %s.\n"),
  19142. this->m_func->GetJITFunctionBody()->GetDisplayName(), this->m_func->GetDebugNumberSet(debugStringBuffer),
  19143. propertySym->m_propertyId,
  19144. usePolymorphicInlineCache ? _u("poly") : _u("mono"), doStore ? _u("store and add") : _u("add only"));
  19145. }
  19146. IR::RegOpnd * opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  19147. if (usePolymorphicInlineCache)
  19148. {
  19149. Lowerer::InsertMove(opndInlineCache, IR::AddrOpnd::New(propertySymOpnd->m_runtimePolymorphicInlineCache->GetInlineCachesAddr(), IR::AddrOpndKindDynamicInlineCache, this->m_func, true), instrStFld);
  19150. }
  19151. else
  19152. {
  19153. Lowerer::InsertMove(opndInlineCache, this->LoadRuntimeInlineCacheOpnd(instrStFld, propertySymOpnd, isHelper), instrStFld);
  19154. }
  19155. if (typeOpnd == nullptr)
  19156. {
  19157. typeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  19158. GenerateObjectTestAndTypeLoad(instrStFld, opndBase, typeOpnd, labelHelper);
  19159. }
  19160. if (usePolymorphicInlineCache)
  19161. {
  19162. LowererMD::GenerateLoadPolymorphicInlineCacheSlot(instrStFld, opndInlineCache, typeOpnd, propertySymOpnd->m_runtimePolymorphicInlineCache->GetSize());
  19163. }
  19164. IR::LabelInstr * labelNext = nullptr;
  19165. IR::RegOpnd * opndTaggedType = nullptr;
  19166. IR::BranchInstr * lastBranchToNext = nullptr;
  19167. if (doStore)
  19168. {
  19169. if (doInlineSlots)
  19170. {
  19171. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  19172. lastBranchToNext = GenerateLocalInlineCacheCheck(instrStFld, typeOpnd, opndInlineCache, labelNext);
  19173. this->GetLowererMD()->GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, true);
  19174. instrStFld->InsertBefore(labelNext);
  19175. }
  19176. if (doAuxSlots)
  19177. {
  19178. if (opndTaggedType == nullptr)
  19179. {
  19180. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  19181. LowererMD::GenerateLoadTaggedType(instrStFld, typeOpnd, opndTaggedType);
  19182. }
  19183. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  19184. lastBranchToNext = GenerateLocalInlineCacheCheck(instrStFld, opndTaggedType, opndInlineCache, labelNext);
  19185. this->GetLowererMD()->GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, false);
  19186. instrStFld->InsertBefore(labelNext);
  19187. }
  19188. }
  19189. if (doAdd)
  19190. {
  19191. if (doInlineSlots)
  19192. {
  19193. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  19194. lastBranchToNext = GenerateLocalInlineCacheCheck(instrStFld, typeOpnd, opndInlineCache, labelNext, true);
  19195. GenerateSetObjectTypeFromInlineCache(instrStFld, opndBase, opndInlineCache, false);
  19196. this->GetLowererMD()->GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, true);
  19197. instrStFld->InsertBefore(labelNext);
  19198. }
  19199. if (doAuxSlots)
  19200. {
  19201. if (opndTaggedType == nullptr)
  19202. {
  19203. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  19204. LowererMD::GenerateLoadTaggedType(instrStFld, typeOpnd, opndTaggedType);
  19205. }
  19206. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  19207. lastBranchToNext = GenerateLocalInlineCacheCheck(instrStFld, opndTaggedType, opndInlineCache, labelNext, true);
  19208. GenerateAuxSlotAdjustmentRequiredCheck(instrStFld, opndInlineCache, labelHelper);
  19209. GenerateSetObjectTypeFromInlineCache(instrStFld, opndBase, opndInlineCache, true);
  19210. this->GetLowererMD()->GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, false);
  19211. instrStFld->InsertBefore(labelNext);
  19212. }
  19213. }
  19214. Assert(lastBranchToNext);
  19215. lastBranchToNext->SetTarget(labelHelper);
  19216. labelNext->Remove();
  19217. // $helper:
  19218. // CALL Helper(inlineCache, base, field, src, scriptContext)
  19219. // $fallthru:
  19220. isHelper = true;
  19221. // Return false to indicate the original instruction was not lowered. Caller will insert the helper label.
  19222. return false;
  19223. }
  19224. bool Lowerer::GenerateFastStFldForCustomProperty(IR::Instr *const instr, IR::LabelInstr * *const labelHelperRef)
  19225. {
  19226. Assert(instr);
  19227. Assert(labelHelperRef);
  19228. Assert(!*labelHelperRef);
  19229. switch(instr->m_opcode)
  19230. {
  19231. case Js::OpCode::StFld:
  19232. case Js::OpCode::StFldStrict:
  19233. break;
  19234. default:
  19235. return false;
  19236. }
  19237. IR::SymOpnd *const symOpnd = instr->GetDst()->AsSymOpnd();
  19238. PropertySym *const propertySym = symOpnd->m_sym->AsPropertySym();
  19239. if(propertySym->m_propertyId != Js::PropertyIds::lastIndex || !symOpnd->IsPropertySymOpnd())
  19240. {
  19241. return false;
  19242. }
  19243. const ValueType objectValueType(symOpnd->GetPropertyOwnerValueType());
  19244. if(!objectValueType.IsLikelyRegExp())
  19245. {
  19246. return false;
  19247. }
  19248. if(instr->HasBailOutInfo())
  19249. {
  19250. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  19251. if(!BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind) || bailOutKind & IR::BailOutKindBits)
  19252. {
  19253. // Other bailout kinds will likely need bailout checks that would not be generated here. In particular, if a type
  19254. // check is necessary here to guard against downstream property accesses on the same object, the type check will
  19255. // fail and cause a bailout if the object is a RegExp object since the "lastIndex" property accesses are not cached.
  19256. return false;
  19257. }
  19258. }
  19259. Func *const func = instr->m_func;
  19260. IR::RegOpnd *const objectOpnd = symOpnd->CreatePropertyOwnerOpnd(func);
  19261. const IR::AutoReuseOpnd autoReuseObjectOpnd(objectOpnd, func);
  19262. IR::LabelInstr *labelHelper = nullptr;
  19263. if(!objectOpnd->IsNotTaggedValue())
  19264. {
  19265. // test object, 1
  19266. // jnz $helper
  19267. if(!labelHelper)
  19268. {
  19269. *labelHelperRef = labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  19270. }
  19271. m_lowererMD.GenerateObjectTest(objectOpnd, instr, labelHelper);
  19272. }
  19273. if(!objectValueType.IsObject())
  19274. {
  19275. // cmp [object], Js::JavascriptRegExp::vtable
  19276. // jne $helper
  19277. if(!labelHelper)
  19278. {
  19279. *labelHelperRef = labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  19280. }
  19281. InsertCompareBranch(
  19282. IR::IndirOpnd::New(objectOpnd, 0, TyMachPtr, func),
  19283. LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp),
  19284. Js::OpCode::BrNeq_A,
  19285. labelHelper,
  19286. instr);
  19287. objectOpnd->SetValueType(objectValueType.ToDefiniteObject());
  19288. }
  19289. // mov [object + offset(lastIndexVar)], src
  19290. // mov [object + offset(lastIndexOrFlag)], Js::JavascriptRegExp::NotCachedValue
  19291. // jmp $done
  19292. InsertMove(
  19293. IR::IndirOpnd::New(objectOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), TyVar, func),
  19294. instr->GetSrc1(),
  19295. instr);
  19296. InsertMove(
  19297. IR::IndirOpnd::New(objectOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), TyUint32, func),
  19298. IR::IntConstOpnd::New(Js::JavascriptRegExp::NotCachedValue, TyUint32, func, true),
  19299. instr);
  19300. InsertBranch(Js::OpCode::Br, instr->GetOrCreateContinueLabel(), instr);
  19301. return true;
  19302. }
  19303. IR::RegOpnd *
  19304. Lowerer::GenerateIsBuiltinRecyclableObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool checkObjectAndDynamicObject, IR::LabelInstr *labelContinue, bool isInHelper)
  19305. {
  19306. // CMP [srcReg], Js::DynamicObject::`vtable'
  19307. // JEQ $fallThough
  19308. // MOV r1, [src1 + offset(type)] -- get the type id
  19309. // MOV r1, [r1 + offset(typeId)]
  19310. // ADD r1, ~TypeIds_LastStaticType -- if (typeId > TypeIds_LastStaticType && typeId <= TypeIds_LastBuiltinDynamicObject)
  19311. // CMP r1, (TypeIds_LastBuiltinDynamicObject - TypeIds_LastStaticType - 1)
  19312. // JA $helper
  19313. //fallThrough:
  19314. IR::LabelInstr *labelFallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  19315. if (checkObjectAndDynamicObject)
  19316. {
  19317. if (!regOpnd->IsNotTaggedValue())
  19318. {
  19319. m_lowererMD.GenerateObjectTest(regOpnd, insertInstr, labelHelper);
  19320. }
  19321. GenerateIsDynamicObject(regOpnd, insertInstr, labelFallthrough, true);
  19322. }
  19323. IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  19324. IR::RegOpnd * typeIdRegOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  19325. IR::IndirOpnd *indirOpnd;
  19326. // MOV typeRegOpnd, [src1 + offset(type)]
  19327. indirOpnd = IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  19328. InsertMove(typeRegOpnd, indirOpnd, insertInstr);
  19329. // MOV typeIdRegOpnd, [typeRegOpnd + offset(typeId)]
  19330. indirOpnd = IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
  19331. InsertMove(typeIdRegOpnd, indirOpnd, insertInstr);
  19332. // ADD typeIdRegOpnd, ~TypeIds_LastStaticType
  19333. InsertAdd(false, typeIdRegOpnd, typeIdRegOpnd,
  19334. IR::IntConstOpnd::New(~Js::TypeIds_LastStaticType, TyInt32, this->m_func, true), insertInstr);
  19335. // CMP typeIdRegOpnd, (TypeIds_LastBuiltinDynamicObject - TypeIds_LastStaticType - 1)
  19336. InsertCompare(
  19337. typeIdRegOpnd,
  19338. IR::IntConstOpnd::New(Js::TypeIds_LastBuiltinDynamicObject - Js::TypeIds_LastStaticType - 1, TyInt32, this->m_func),
  19339. insertInstr);
  19340. if (labelContinue)
  19341. {
  19342. // On success, go to continuation label.
  19343. InsertBranch(Js::OpCode::BrLe_A, true, labelContinue, insertInstr);
  19344. }
  19345. else
  19346. {
  19347. // On failure, go to helper.
  19348. InsertBranch(Js::OpCode::BrGt_A, true, labelHelper, insertInstr);
  19349. }
  19350. // $fallThrough
  19351. insertInstr->InsertBefore(labelFallthrough);
  19352. return typeRegOpnd;
  19353. }
  19354. void Lowerer::GenerateIsDynamicObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool fContinueLabel)
  19355. {
  19356. // CMP [srcReg], Js::DynamicObject::`vtable'
  19357. InsertCompare(
  19358. IR::IndirOpnd::New(regOpnd, 0, TyMachPtr, m_func),
  19359. LoadVTableValueOpnd(insertInstr, VTableValue::VtableDynamicObject),
  19360. insertInstr);
  19361. if (fContinueLabel)
  19362. {
  19363. // JEQ $fallThough
  19364. Lowerer::InsertBranch(Js::OpCode::BrEq_A, labelHelper, insertInstr);
  19365. }
  19366. else
  19367. {
  19368. // JNE $helper
  19369. Lowerer::InsertBranch(Js::OpCode::BrNeq_A, labelHelper, insertInstr);
  19370. }
  19371. }
  19372. void Lowerer::GenerateIsRecyclableObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool checkObjectAndDynamicObject)
  19373. {
  19374. // CMP [srcReg], Js::DynamicObject::`vtable'
  19375. // JEQ $fallThough
  19376. // MOV r1, [src1 + offset(type)] -- get the type id
  19377. // MOV r1, [r1 + offset(typeId)]
  19378. // ADD r1, ~TypeIds_LastJavascriptPrimitiveType -- if (typeId > TypeIds_LastJavascriptPrimitiveType && typeId <= TypeIds_LastTrueJavascriptObjectType)
  19379. // CMP r1, (TypeIds_LastTrueJavascriptObjectType - TypeIds_LastJavascriptPrimitiveType - 1)
  19380. // JA $helper
  19381. //fallThrough:
  19382. IR::LabelInstr *labelFallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  19383. if (checkObjectAndDynamicObject)
  19384. {
  19385. if (!regOpnd->IsNotTaggedValue())
  19386. {
  19387. m_lowererMD.GenerateObjectTest(regOpnd, insertInstr, labelHelper);
  19388. }
  19389. this->GenerateIsDynamicObject(regOpnd, insertInstr, labelFallthrough, true);
  19390. }
  19391. IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  19392. IR::RegOpnd * typeIdRegOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  19393. // MOV r1, [src1 + offset(type)]
  19394. InsertMove(typeRegOpnd, IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func), insertInstr);
  19395. // MOV r1, [r1 + offset(typeId)]
  19396. InsertMove(typeIdRegOpnd, IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func), insertInstr);
  19397. // ADD r1, ~TypeIds_LastJavascriptPrimitiveType
  19398. InsertAdd(false, typeIdRegOpnd, typeIdRegOpnd, IR::IntConstOpnd::New(~Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, this->m_func, true), insertInstr);
  19399. // CMP r1, (TypeIds_LastTrueJavascriptObjectType - TypeIds_LastJavascriptPrimitiveType - 1)
  19400. InsertCompare(
  19401. typeIdRegOpnd,
  19402. IR::IntConstOpnd::New(Js::TypeIds_LastTrueJavascriptObjectType - Js::TypeIds_LastJavascriptPrimitiveType - 1, TyInt32, this->m_func),
  19403. insertInstr);
  19404. // JA $helper
  19405. InsertBranch(Js::OpCode::BrGe_A, true, labelHelper, insertInstr);
  19406. // $fallThrough
  19407. insertInstr->InsertBefore(labelFallthrough);
  19408. }
  19409. bool
  19410. Lowerer::GenerateLdThisCheck(IR::Instr * instr)
  19411. {
  19412. //
  19413. // If not a recyclable object, jump to $helper
  19414. // MOV dst, src1 -- return the object itself
  19415. // JMP $fallthrough
  19416. // $helper:
  19417. // (caller generates helper call)
  19418. // $fallthrough:
  19419. //
  19420. IR::RegOpnd * src1 = instr->GetSrc1()->AsRegOpnd();
  19421. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  19422. IR::LabelInstr * fallthrough = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  19423. GenerateIsRecyclableObject(src1, instr, helper);
  19424. // MOV dst, src1
  19425. if (instr->GetDst() && !instr->GetDst()->IsEqual(src1))
  19426. {
  19427. InsertMove(instr->GetDst(), src1, instr);
  19428. }
  19429. // JMP $fallthrough
  19430. InsertBranch(Js::OpCode::Br, fallthrough, instr);
  19431. // $helper:
  19432. // (caller generates helper call)
  19433. // $fallthrough:
  19434. instr->InsertBefore(helper);
  19435. instr->InsertAfter(fallthrough);
  19436. return true;
  19437. }
  19438. //
  19439. // TEST src, Js::AtomTag
  19440. // JNE $done
  19441. // MOV typeReg, objectSrc + offsetof(RecyclableObject::type)
  19442. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_ActivationObject
  19443. // JEQ $helper
  19444. // $done:
  19445. // MOV dst, src
  19446. // JMP $fallthru
  19447. // helper:
  19448. // MOV dst, undefined
  19449. // $fallthru:
  19450. bool
  19451. Lowerer::GenerateLdThisStrict(IR::Instr* instr)
  19452. {
  19453. IR::RegOpnd * src1 = instr->GetSrc1()->AsRegOpnd();
  19454. IR::RegOpnd * typeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  19455. IR::LabelInstr * done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  19456. IR::LabelInstr * fallthru = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  19457. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*helper*/true);
  19458. bool assign = instr->GetDst() && !instr->GetDst()->IsEqual(src1);
  19459. if (!src1->IsNotTaggedValue())
  19460. {
  19461. // TEST src1, Js::AtomTag
  19462. // JNE $done
  19463. this->m_lowererMD.GenerateObjectTest(src1, instr, assign ? done : fallthru);
  19464. }
  19465. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(src1, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  19466. Lowerer::InsertMove(typeReg, indirOpnd, instr);
  19467. IR::IndirOpnd * typeID = IR::IndirOpnd::New(typeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
  19468. IR::Opnd * activationObject = IR::IntConstOpnd::New(Js::TypeIds_ActivationObject, TyMachReg, this->m_func);
  19469. Lowerer::InsertCompare(typeID, activationObject, instr);
  19470. // JEQ $helper
  19471. Lowerer::InsertBranch(Js::OpCode::BrEq_A, helper, instr);
  19472. if (assign)
  19473. {
  19474. // $done:
  19475. instr->InsertBefore(done);
  19476. // MOV dst, src
  19477. Lowerer::InsertMove(instr->GetDst(), src1, instr);
  19478. }
  19479. // JMP $fallthru
  19480. Lowerer::InsertBranch(Js::OpCode::Br, fallthru, instr);
  19481. instr->InsertBefore(helper);
  19482. if (instr->GetDst())
  19483. {
  19484. // MOV dst, undefined
  19485. Lowerer::InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined), instr);
  19486. }
  19487. // $fallthru:
  19488. instr->InsertAfter(fallthru);
  19489. return true;
  19490. }
  19491. // given object instanceof function, functionReg is a register with function,
  19492. // objectReg is a register with instance and inlineCache is an InstIsInlineCache.
  19493. // We want to generate:
  19494. //
  19495. // fallback on helper (will patch the inline cache) if function does not match the cache
  19496. // MOV dst, Js::false
  19497. // CMP functionReg, [&(inlineCache->function)]
  19498. // JNE helper
  19499. //
  19500. // fallback if object is a tagged int
  19501. // TEST objectReg, Js::AtomTag
  19502. // JNE done
  19503. //
  19504. // return false if object is a primitive
  19505. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_LastJavascriptPrimitiveType
  19506. // JLE done
  19507. // fallback if object's type is not the cached type
  19508. // MOV typeReg, objectSrc + offsetof(RecyclableObject::type)
  19509. // CMP typeReg, [&(inlineCache->type]
  19510. // JNE checkPrimType
  19511. // use the cached result and fallthrough
  19512. // MOV dst, [&(inlineCache->result)]
  19513. // JMP done
  19514. //
  19515. //
  19516. // $helper
  19517. // $done
  19518. bool
  19519. Lowerer::GenerateFastIsInst(IR::Instr * instr)
  19520. {
  19521. IR::LabelInstr * helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  19522. IR::LabelInstr * done = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  19523. IR::RegOpnd * typeReg = IR::RegOpnd::New(TyMachReg, this->m_func);
  19524. IR::Opnd * objectSrc;
  19525. IR::Opnd * functionSrc;
  19526. intptr_t inlineCache;
  19527. IR::Instr * instrArg;
  19528. // We are going to use the extra ArgOut_A instructions to lower the helper call later,
  19529. // so we leave them alone here and clean them up then.
  19530. inlineCache = instr->m_func->GetJITFunctionBody()->GetIsInstInlineCache(instr->GetSrc1()->AsIntConstOpnd()->AsUint32());
  19531. Assert(instr->GetSrc2()->AsRegOpnd()->m_sym->m_isSingleDef);
  19532. instrArg = instr->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
  19533. objectSrc = instrArg->GetSrc1();
  19534. Assert(instrArg->GetSrc2()->AsRegOpnd()->m_sym->m_isSingleDef);
  19535. instrArg = instrArg->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
  19536. functionSrc = instrArg->GetSrc1();
  19537. Assert(instrArg->GetSrc2() == nullptr);
  19538. // MOV dst, Js::false
  19539. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
  19540. IR::RegOpnd * functionReg = GetRegOpnd(functionSrc, instr, m_func, TyMachReg);
  19541. // CMP functionReg, [&(inlineCache->function)]
  19542. {
  19543. IR::Opnd* cacheFunction = IR::MemRefOpnd::New(inlineCache + Js::IsInstInlineCache::OffsetOfFunction(), TyMachReg, m_func, IR::AddrOpndKindDynamicIsInstInlineCacheFunctionRef);
  19544. InsertCompare(functionReg, cacheFunction, instr);
  19545. }
  19546. // JNE helper
  19547. InsertBranch(Js::OpCode::BrNeq_A, helper, instr);
  19548. IR::RegOpnd * objectReg = GetRegOpnd(objectSrc, instr, m_func, TyMachReg);
  19549. // TEST objectReg, Js::AtomTag
  19550. // JNE done
  19551. m_lowererMD.GenerateObjectTest(objectReg, instr, done);
  19552. // MOV typeReg, objectSrc + offsetof(RecyclableObject::type)
  19553. InsertMove(typeReg, IR::IndirOpnd::New(objectReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func), instr);
  19554. // CMP [typeReg + offsetof(Type::typeid)], TypeIds_LastJavascriptPrimitiveType
  19555. {
  19556. IR::IndirOpnd * typeId = IR::IndirOpnd::New(typeReg, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func);
  19557. IR::IntConstOpnd * lastPrimitive = IR::IntConstOpnd::New(Js::TypeId::TypeIds_LastJavascriptPrimitiveType, TyInt32, m_func);
  19558. InsertCompare(typeId, lastPrimitive, instr);
  19559. }
  19560. // JLE done
  19561. InsertBranch(Js::OpCode::BrLe_A, done, instr);
  19562. // CMP typeReg, [&(inlineCache->type]
  19563. {
  19564. IR::Opnd * cacheType = IR::MemRefOpnd::New(inlineCache + Js::IsInstInlineCache::OffsetOfType(), TyMachReg, m_func, IR::AddrOpndKindDynamicIsInstInlineCacheTypeRef);
  19565. InsertCompare(typeReg, cacheType, instr);
  19566. }
  19567. // JNE helper
  19568. InsertBranch(Js::OpCode::BrNeq_A, helper, instr);
  19569. // MOV dst, [&(inlineCache->result)]
  19570. {
  19571. IR::Opnd * cacheResult = IR::MemRefOpnd::New(inlineCache + Js::IsInstInlineCache::OffsetOfResult(), TyMachReg, m_func, IR::AddrOpndKindDynamicIsInstInlineCacheResultRef);
  19572. InsertMove(instr->GetDst(), cacheResult, instr);
  19573. }
  19574. // JMP done
  19575. InsertBranch(Js::OpCode::Br, done, instr);
  19576. // LABEL helper
  19577. instr->InsertBefore(helper);
  19578. instr->InsertAfter(done);
  19579. return true;
  19580. }
  19581. void Lowerer::GenerateBooleanNegate(IR::Instr * instr, IR::Opnd * srcBool, IR::Opnd * dst)
  19582. {
  19583. // dst = src
  19584. // dst = dst ^ (true ^ false) (= !src)
  19585. Lowerer::InsertMove(dst, srcBool, instr);
  19586. ScriptContextInfo* sci = instr->m_func->GetScriptContextInfo();
  19587. IR::AddrOpnd* xorval = IR::AddrOpnd::New(sci->GetTrueAddr() ^ sci->GetFalseAddr(), IR::AddrOpndKindDynamicMisc, instr->m_func, true);
  19588. InsertXor(dst, dst, xorval, instr);
  19589. }
  19590. bool Lowerer::GenerateJSBooleanTest(IR::RegOpnd * regSrc, IR::Instr * insertInstr, IR::LabelInstr * labelTarget, bool fContinueLabel)
  19591. {
  19592. if (regSrc->GetValueType().IsBoolean())
  19593. {
  19594. if (fContinueLabel)
  19595. {
  19596. // JMP $labelTarget
  19597. InsertBranch(Js::OpCode::Br, labelTarget, insertInstr);
  19598. #if DBG
  19599. if (labelTarget->isOpHelper)
  19600. {
  19601. labelTarget->m_noHelperAssert = true;
  19602. }
  19603. #endif
  19604. }
  19605. return false;
  19606. }
  19607. IR::IndirOpnd * vtablePtrOpnd = IR::IndirOpnd::New(regSrc, 0, TyMachPtr, this->m_func);
  19608. IR::Opnd * jsBooleanVTable = LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptBoolean);
  19609. InsertCompare(vtablePtrOpnd, jsBooleanVTable, insertInstr);
  19610. if (fContinueLabel)
  19611. {
  19612. // JEQ $labelTarget
  19613. InsertBranch(Js::OpCode::BrEq_A, labelTarget, insertInstr);
  19614. // $helper
  19615. InsertLabel(true, insertInstr);
  19616. }
  19617. else
  19618. {
  19619. // JNE $labelTarget
  19620. InsertBranch(Js::OpCode::BrNeq_A, labelTarget, insertInstr);
  19621. }
  19622. return true;
  19623. }
  19624. bool Lowerer::GenerateFastEqBoolInt(IR::Instr * instr, bool *pNeedHelper, bool isInHelper)
  19625. {
  19626. Assert(instr);
  19627. // There's a total of 8 modes for this function, based on these inferred flags
  19628. bool isBranchNotCompare = instr->IsBranchInstr();
  19629. bool isStrict = false;
  19630. bool isNegOp = false;
  19631. switch (instr->m_opcode)
  19632. {
  19633. case Js::OpCode::BrSrEq_A:
  19634. case Js::OpCode::BrSrNotNeq_A:
  19635. case Js::OpCode::BrSrNeq_A:
  19636. case Js::OpCode::BrSrNotEq_A:
  19637. case Js::OpCode::CmSrEq_A:
  19638. case Js::OpCode::CmSrNeq_A:
  19639. isStrict = true;
  19640. break;
  19641. default:
  19642. break;
  19643. }
  19644. switch (instr->m_opcode)
  19645. {
  19646. case Js::OpCode::BrSrEq_A:
  19647. case Js::OpCode::BrSrNotNeq_A:
  19648. case Js::OpCode::CmSrEq_A:
  19649. case Js::OpCode::BrEq_A:
  19650. case Js::OpCode::BrNotNeq_A:
  19651. case Js::OpCode::CmEq_A:
  19652. isNegOp = false;
  19653. break;
  19654. case Js::OpCode::BrSrNeq_A:
  19655. case Js::OpCode::BrSrNotEq_A:
  19656. case Js::OpCode::CmSrNeq_A:
  19657. case Js::OpCode::BrNeq_A:
  19658. case Js::OpCode::BrNotEq_A:
  19659. case Js::OpCode::CmNeq_A:
  19660. isNegOp = true;
  19661. break;
  19662. default:
  19663. // This opcode is not one of the ones that should be handled here.
  19664. return false;
  19665. break;
  19666. }
  19667. IR::Opnd *src1 = instr->GetSrc1();
  19668. IR::Opnd *src2 = instr->GetSrc2();
  19669. // The instrucions given to this _should_ all be 2-arg.
  19670. Assert(src1 && src2);
  19671. if (!(src1 && src2))
  19672. {
  19673. return false;
  19674. }
  19675. // If it's a branch instruction, we'll want these to be defined
  19676. //IR::BranchInstr *instrBranch = nullptr;
  19677. IR::LabelInstr *targetInstr = nullptr;
  19678. IR::LabelInstr *labelFallthrough = nullptr;
  19679. if (isBranchNotCompare)
  19680. {
  19681. IR::BranchInstr * instrBranch = instr->AsBranchInstr();
  19682. targetInstr = instrBranch->GetTarget();
  19683. labelFallthrough = instrBranch->GetOrCreateContinueLabel(isInHelper);
  19684. }
  19685. // Assume we need the helper until we can show otherwise.
  19686. *pNeedHelper = true;
  19687. // If we don't know the final types well enough at JIT time, a helper block to set
  19688. // the inputs to the correct types will be needed.
  19689. IR::LabelInstr *labelHelper = nullptr;
  19690. // If we're doing a compare and can handle it early, then we want to skip the helper
  19691. IR::LabelInstr *labelDone = instr->GetOrCreateContinueLabel(isInHelper);
  19692. // Normallize for orderings
  19693. IR::Opnd *srcBool = nullptr;
  19694. IR::Opnd *srcInt = nullptr;
  19695. if (src1->GetValueType().IsLikelyBoolean() && src2->GetValueType().IsLikelyTaggedInt())
  19696. {
  19697. srcBool = src1;
  19698. srcInt = src2;
  19699. }
  19700. else if (src1->GetValueType().IsLikelyTaggedInt() && src2->GetValueType().IsLikelyBoolean())
  19701. {
  19702. srcInt = src1;
  19703. srcBool = src2;
  19704. }
  19705. else
  19706. {
  19707. return false;
  19708. }
  19709. // If either instruction is constant, we can simplify the check. If both are constant, we can eliminate it
  19710. bool srcIntConst = false;
  19711. bool srcIntConstVal = false;
  19712. // If we're comparing with a number that is not 0 or 1, then the two are inequal by default
  19713. bool srcIntIsBoolable = false;
  19714. bool srcBoolConst = false;
  19715. bool srcBoolConstVal = false;
  19716. if (srcInt->IsIntConstOpnd())
  19717. {
  19718. IR::IntConstOpnd * constSrcInt = srcInt->AsIntConstOpnd();
  19719. IntConstType constIntVal = constSrcInt->GetValue();
  19720. srcIntConst = true;
  19721. if (constIntVal == 0)
  19722. {
  19723. srcIntConstVal = false;
  19724. srcIntIsBoolable = true;
  19725. }
  19726. else if (constIntVal == 1)
  19727. {
  19728. srcIntConstVal = true;
  19729. srcIntIsBoolable = true;
  19730. }
  19731. }
  19732. else if (srcInt->IsAddrOpnd())
  19733. {
  19734. IR::AddrOpnd * addrSrcInt = srcInt->AsAddrOpnd();
  19735. if (!(addrSrcInt && addrSrcInt->IsVar() && Js::TaggedInt::Is(addrSrcInt->m_address)))
  19736. {
  19737. return false;
  19738. }
  19739. int32 constIntVal = Js::TaggedInt::ToInt32(addrSrcInt->m_address);
  19740. srcIntConst = true;
  19741. if (constIntVal == 0)
  19742. {
  19743. srcIntConstVal = false;
  19744. srcIntIsBoolable = true;
  19745. }
  19746. else if (constIntVal == 1)
  19747. {
  19748. srcIntConstVal = true;
  19749. srcIntIsBoolable = true;
  19750. }
  19751. }
  19752. else if (srcInt->IsConstOpnd())
  19753. {
  19754. // Not handled yet
  19755. return false;
  19756. }
  19757. if (srcBool->IsIntConstOpnd())
  19758. {
  19759. IR::IntConstOpnd * constSrcBool = srcBool->AsIntConstOpnd();
  19760. IntConstType constIntVal = constSrcBool->GetValue();
  19761. srcBoolConst = true;
  19762. srcBoolConstVal = constIntVal != 0;
  19763. }
  19764. else if (srcBool->IsAddrOpnd())
  19765. {
  19766. IR::AddrOpnd * addrSrcBool = srcInt->AsAddrOpnd();
  19767. if (!(addrSrcBool && addrSrcBool->IsVar() && Js::TaggedInt::Is(addrSrcBool->m_address)))
  19768. {
  19769. return false;
  19770. }
  19771. int32 value = Js::TaggedInt::ToInt32(addrSrcBool->m_address);
  19772. srcBoolConst = true;
  19773. srcBoolConstVal = value != 0;
  19774. }
  19775. else if (srcBool->IsConstOpnd())
  19776. {
  19777. // Not handled yet
  19778. return false;
  19779. }
  19780. // Do these checks here, since that way we avoid emitting instructions before exiting earlier
  19781. if (srcInt->GetValueType().IsTaggedInt() && srcBool->GetValueType().IsBoolean()) {
  19782. // ok, we know the types, so no helper needed
  19783. *pNeedHelper = false;
  19784. }
  19785. else
  19786. {
  19787. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  19788. // check the types and jump to the helper if incorrect
  19789. if (!srcInt->IsConstOpnd() && !srcInt->GetValueType().IsTaggedInt())
  19790. {
  19791. this->m_lowererMD.GenerateSmIntTest(srcInt->AsRegOpnd(), instr, labelHelper);
  19792. }
  19793. if (!srcBool->IsConstOpnd() && !srcBool->GetValueType().IsBoolean())
  19794. {
  19795. if (!srcBool->GetValueType().IsObject())
  19796. {
  19797. this->m_lowererMD.GenerateObjectTest(srcBool->AsRegOpnd(), instr, labelHelper, false);
  19798. }
  19799. GenerateJSBooleanTest(srcBool->AsRegOpnd(), instr, labelHelper, false);
  19800. }
  19801. }
  19802. // At this point, we know both which operand is an integer and which is a boolean,
  19803. // whether either operand is constant, and what the constant true/false values are
  19804. // for any constant operands. This should allow us to emit some decent code.
  19805. LibraryValue equalResultValue = !isNegOp ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
  19806. LibraryValue inequalResultValue = !isNegOp ? LibraryValue::ValueFalse : LibraryValue::ValueTrue;
  19807. IR::LabelInstr *equalResultTarget = !isNegOp ? targetInstr : labelFallthrough;
  19808. IR::LabelInstr *inequalResultTarget = !isNegOp ? labelFallthrough : targetInstr;
  19809. // For the Sr instructions, we now know that the types are different, so we can immediately
  19810. // decide what the result will be.
  19811. if (isStrict)
  19812. {
  19813. if (isBranchNotCompare)
  19814. {
  19815. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, inequalResultTarget, this->m_func));
  19816. #if DBG
  19817. // Since we're not making a non-helper path to one of the branches, we need to tell
  19818. // DbCheckPostLower that we are going to have a non-helper label without non-helper
  19819. // branches.
  19820. // Note: this following line isn't good practice in general
  19821. equalResultTarget->m_noHelperAssert = true;
  19822. #endif
  19823. }
  19824. else
  19825. {
  19826. Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, inequalResultValue), instr);
  19827. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  19828. }
  19829. }
  19830. // Now that we've checked the types, we can lower some instructions to quickly do the check
  19831. // in the case that it's not a type-strict strict equality/inequality check.
  19832. else if (srcIntConst && srcBoolConst)
  19833. {
  19834. // If both arguments are constant, we can statically determine the result.
  19835. bool sameVal = srcIntConstVal == srcBoolConstVal;
  19836. if (isBranchNotCompare)
  19837. {
  19838. // For constant branches, branch to the target
  19839. Assert(instr);
  19840. IR::LabelInstr * target = sameVal && srcIntIsBoolable ? equalResultTarget : inequalResultTarget;
  19841. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, target, this->m_func));
  19842. #if DBG
  19843. // Since we're not making a non-helper path to one of the branches, we need to tell
  19844. // DbCheckPostLower that we are going to have a non-helper label without non-helper
  19845. // branches.
  19846. // Note: this following line isn't good practice in general
  19847. (sameVal && srcIntIsBoolable ? inequalResultTarget : equalResultTarget)->m_noHelperAssert = true;
  19848. #endif
  19849. }
  19850. else
  19851. {
  19852. // For constant compares, load the constant result
  19853. Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, sameVal && srcIntIsBoolable ? equalResultValue : inequalResultValue), instr);
  19854. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  19855. }
  19856. }
  19857. else if (!srcIntConst && !srcBoolConst)
  19858. {
  19859. // If neither is constant, we can still do a bit better than loading the helper
  19860. IR::LabelInstr * firstFalse = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  19861. IR::LabelInstr * forceInequal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  19862. // We branch based on the zero-ness of the integer argument to two checks against the boolean argument
  19863. this->m_lowererMD.GenerateTaggedZeroTest(srcInt->AsRegOpnd(), instr, firstFalse);
  19864. // If it's not zero, then it's either 1, in which case it's true, or it's something else, in which
  19865. // case the two will compare as inequal
  19866. InsertCompareBranch(
  19867. IR::IntConstOpnd::New((((IntConstType)1) << Js::VarTag_Shift) + Js::AtomTag, IRType::TyVar, this->m_func, true),
  19868. srcInt->AsRegOpnd(),
  19869. Js::OpCode::BrNeq_A,
  19870. isBranchNotCompare ? inequalResultTarget : forceInequal, // in the case of branching, we can go straight to the inequal target; for compares, we need to load the value
  19871. instr,
  19872. true);
  19873. if (isBranchNotCompare)
  19874. {
  19875. // if the int evaluates to 1 (true)
  19876. InsertCompareBranch(
  19877. srcBool,
  19878. LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue),
  19879. instr->m_opcode,
  19880. targetInstr,
  19881. instr);
  19882. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallthrough, this->m_func));
  19883. // if the int evaluates to 0 (false)
  19884. instr->InsertBefore(firstFalse);
  19885. InsertCompareBranch(
  19886. srcBool,
  19887. LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse),
  19888. instr->m_opcode,
  19889. targetInstr,
  19890. instr);
  19891. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallthrough, this->m_func));
  19892. }
  19893. else
  19894. {
  19895. // the int resolves to 1 (true)
  19896. // Load either the bool or its complement into the dst reg, depending on the opcode
  19897. if (isNegOp)
  19898. {
  19899. GenerateBooleanNegate(instr, srcBool, instr->GetDst());
  19900. }
  19901. else
  19902. {
  19903. this->InsertMove(instr->GetDst(), srcBool, instr);
  19904. }
  19905. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  19906. // the int resolves to 0 (false)
  19907. // Handle the complement case
  19908. instr->InsertBefore(firstFalse);
  19909. if (!isNegOp)
  19910. {
  19911. GenerateBooleanNegate(instr, srcBool, instr->GetDst());
  19912. }
  19913. else
  19914. {
  19915. this->InsertMove(instr->GetDst(), srcBool, instr);
  19916. }
  19917. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  19918. // the int resolves to something other than 0 or 1 (inequal to a bool)
  19919. instr->InsertBefore(forceInequal);
  19920. Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, inequalResultValue), instr);
  19921. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  19922. }
  19923. }
  19924. else if (srcIntConst)
  19925. {
  19926. if (isBranchNotCompare)
  19927. {
  19928. if (srcIntIsBoolable)
  19929. {
  19930. LibraryValue intval = srcIntConstVal ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
  19931. InsertCompareBranch(
  19932. srcBool,
  19933. LoadLibraryValueOpnd(instr, intval),
  19934. instr->m_opcode,
  19935. targetInstr,
  19936. instr);
  19937. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallthrough, this->m_func));
  19938. }
  19939. else
  19940. {
  19941. // Since a constant int that isn't 0 or 1 will always be inequal to bools, just jump to the inequal result
  19942. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, inequalResultTarget, this->m_func));
  19943. #if DBG
  19944. // Since we're not making a non-helper path to one of the branches, we need to tell
  19945. // DbCheckPostLower that we are going to have a non-helper label without non-helper
  19946. // branches.
  19947. // Note: this following line isn't good practice in general
  19948. equalResultTarget->m_noHelperAssert = true;
  19949. #endif
  19950. }
  19951. }
  19952. else
  19953. {
  19954. if (srcIntIsBoolable)
  19955. {
  19956. bool directPassthrough = isNegOp != srcIntConstVal;
  19957. if (directPassthrough)
  19958. {
  19959. // If this case is hit, the result value is the same as the value in srcBool
  19960. this->InsertMove(instr->GetDst(), srcBool, instr);
  19961. }
  19962. else
  19963. {
  19964. // Otherwise, the result value is the negation of the value in srcBool
  19965. GenerateBooleanNegate(instr, srcBool, instr->GetDst());
  19966. }
  19967. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  19968. }
  19969. else
  19970. {
  19971. Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, inequalResultValue), instr);
  19972. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  19973. }
  19974. }
  19975. }
  19976. else if (srcBoolConst)
  19977. {
  19978. if (isBranchNotCompare)
  19979. {
  19980. this->m_lowererMD.GenerateTaggedZeroTest(srcInt->AsRegOpnd(), instr, srcBoolConstVal ? inequalResultTarget : equalResultTarget);
  19981. if (srcBoolConstVal)
  19982. {
  19983. // If it's not zero, then it's either 1, in which case it's true, or it's something else, in which
  19984. // case we have an issue.
  19985. InsertCompareBranch(IR::IntConstOpnd::New((((IntConstType)1) << Js::VarTag_Shift) + Js::AtomTag, IRType::TyVar, this->m_func), srcInt->AsRegOpnd(), Js::OpCode::BrNeq_A, inequalResultTarget, instr, true);
  19986. }
  19987. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, srcBoolConstVal ? equalResultTarget : inequalResultTarget, this->m_func));
  19988. }
  19989. else
  19990. {
  19991. IR::LabelInstr* isNonZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  19992. IR::LabelInstr* isZero = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  19993. this->m_lowererMD.GenerateTaggedZeroTest(srcInt->AsRegOpnd(), instr, isZero);
  19994. if (srcBoolConstVal)
  19995. {
  19996. // If it's not zero, then it's either 1, in which case it's true, or it's something else, in which
  19997. // case we have an issue.
  19998. InsertCompareBranch(IR::IntConstOpnd::New((((IntConstType)1) << Js::VarTag_Shift) + Js::AtomTag, IRType::TyVar, this->m_func), srcInt->AsRegOpnd(), Js::OpCode::BrNeq_A, isZero, instr, true);
  19999. }
  20000. instr->InsertBefore(isNonZero);
  20001. Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, srcBoolConstVal ? equalResultValue : inequalResultValue), instr);
  20002. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  20003. instr->InsertBefore(isZero);
  20004. Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, !srcBoolConstVal ? equalResultValue : inequalResultValue), instr);
  20005. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func));
  20006. }
  20007. }
  20008. if (*pNeedHelper)
  20009. {
  20010. instr->InsertBefore(labelHelper);
  20011. }
  20012. return true;
  20013. }
  20014. bool Lowerer::GenerateFastBrEqLikely(IR::BranchInstr * instrBranch, bool *pNeedHelper, bool isInHelper)
  20015. {
  20016. IR::Opnd *src1 = instrBranch->GetSrc1();
  20017. IR::Opnd *src2 = instrBranch->GetSrc2();
  20018. IR::LabelInstr *targetInstr = instrBranch->GetTarget();
  20019. IR::LabelInstr *labelEqualLikely = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  20020. IR::LabelInstr *labelTrue = instrBranch->GetOrCreateContinueLabel(isInHelper);
  20021. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  20022. *pNeedHelper = true;
  20023. if (!this->GenerateFastBooleanAndObjectEqLikely(instrBranch, src1, src2, labelHelper, labelEqualLikely, pNeedHelper, isInHelper))
  20024. {
  20025. return false;
  20026. }
  20027. instrBranch->InsertBefore(labelEqualLikely);
  20028. IR::BranchInstr *newBranch = IR::BranchInstr::New(instrBranch->m_opcode, targetInstr, src1, src2, this->m_func);
  20029. instrBranch->InsertBefore(newBranch);
  20030. this->m_lowererMD.LowerCondBranch(newBranch);
  20031. newBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelTrue, this->m_func);
  20032. instrBranch->InsertBefore(newBranch);
  20033. instrBranch->InsertBefore(labelHelper);
  20034. return true;
  20035. }
  20036. bool Lowerer::GenerateFastBooleanAndObjectEqLikely(IR::Instr * instr, IR::Opnd *src1, IR::Opnd *src2, IR::LabelInstr * labelHelper, IR::LabelInstr * labelEqualLikely, bool *pNeedHelper, bool isInHelper)
  20037. {
  20038. *pNeedHelper = true;
  20039. if (!src1 || !src2)
  20040. {
  20041. return false;
  20042. }
  20043. bool isStrictCompare = false;
  20044. bool isStrictMode = this->m_func->GetJITFunctionBody()->IsStrictMode();
  20045. switch (instr->m_opcode)
  20046. {
  20047. case Js::OpCode::BrSrEq_A:
  20048. case Js::OpCode::BrSrNotNeq_A:
  20049. case Js::OpCode::BrSrNeq_A:
  20050. case Js::OpCode::BrSrNotEq_A:
  20051. case Js::OpCode::CmSrEq_A:
  20052. case Js::OpCode::CmSrNeq_A:
  20053. isStrictCompare = true;
  20054. break;
  20055. }
  20056. if (src1->GetValueType().IsLikelyBoolean() && src2->GetValueType().IsLikelyBoolean())
  20057. {
  20058. //
  20059. // Booleans
  20060. //
  20061. if (isStrictCompare)
  20062. {
  20063. if (!src1->GetValueType().IsBoolean() && !src2->GetValueType().IsBoolean())
  20064. {
  20065. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instr, labelHelper, false);
  20066. if (GenerateJSBooleanTest(src2->AsRegOpnd(), instr, labelEqualLikely, true))
  20067. {
  20068. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func));
  20069. }
  20070. }
  20071. else
  20072. {
  20073. *pNeedHelper = false;
  20074. }
  20075. }
  20076. else
  20077. {
  20078. this->m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instr, labelHelper, false);
  20079. GenerateJSBooleanTest(src1->AsRegOpnd(), instr, labelHelper, false);
  20080. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instr, labelHelper, false);
  20081. if (GenerateJSBooleanTest(src2->AsRegOpnd(), instr, labelEqualLikely, true))
  20082. {
  20083. instr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func));
  20084. }
  20085. }
  20086. }
  20087. else if (src1->GetValueType().HasBeenObject() && src2->GetValueType().HasBeenObject())
  20088. {
  20089. //
  20090. // Objects
  20091. //
  20092. IR::LabelInstr *labelTypeIdCheck = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  20093. if (!isStrictCompare)
  20094. {
  20095. // If not strictBr, verify both sides are dynamic objects
  20096. this->m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instr, labelHelper, false);
  20097. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instr, labelHelper, false);
  20098. GenerateIsDynamicObject(src1->AsRegOpnd(), instr, labelTypeIdCheck, false);
  20099. }
  20100. else
  20101. {
  20102. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instr, labelHelper, false);
  20103. }
  20104. GenerateIsDynamicObject(src2->AsRegOpnd(), instr, labelEqualLikely, true);
  20105. instr->InsertBefore(labelTypeIdCheck);
  20106. if (isStrictMode)
  20107. {
  20108. labelTypeIdCheck->isOpHelper = true;
  20109. IR::BranchInstr *branchToHelper = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func);
  20110. instr->InsertBefore(branchToHelper);
  20111. }
  20112. else
  20113. {
  20114. if (!ExternalLowerer::TryGenerateFastExternalEqTest(src1, src2, instr, labelHelper, labelEqualLikely, this, isStrictCompare, isInHelper))
  20115. {
  20116. if (!isStrictCompare)
  20117. {
  20118. GenerateIsBuiltinRecyclableObject(src1->AsRegOpnd(), instr, labelHelper, false /*checkObjectAndDynamicObject*/, nullptr /*labelContinue*/, isInHelper);
  20119. }
  20120. GenerateIsBuiltinRecyclableObject(src2->AsRegOpnd(), instr, labelHelper, false /*checkObjectAndDynamicObject*/, nullptr /*labelContinue*/, isInHelper);
  20121. }
  20122. }
  20123. }
  20124. else if (src1->GetValueType().IsLikelySymbol() && src2->GetValueType().IsLikelySymbol())
  20125. {
  20126. this->GenerateSymbolTest(src1->AsRegOpnd(), instr, labelHelper, nullptr, true);
  20127. this->GenerateSymbolTest(src2->AsRegOpnd(), instr, labelHelper, nullptr, true);
  20128. }
  20129. else
  20130. {
  20131. return false;
  20132. }
  20133. return true;
  20134. }
  20135. bool Lowerer::GenerateFastCmEqLikely(IR::Instr * instr, bool *pNeedHelper, bool isInHelper)
  20136. {
  20137. *pNeedHelper = false;
  20138. Assert(instr->m_opcode == Js::OpCode::CmSrEq_A ||
  20139. instr->m_opcode == Js::OpCode::CmSrNeq_A ||
  20140. instr->m_opcode == Js::OpCode::CmEq_A ||
  20141. instr->m_opcode == Js::OpCode::CmNeq_A);
  20142. bool isNegOp = false;
  20143. bool isStrict = false;
  20144. switch (instr->m_opcode)
  20145. {
  20146. case Js::OpCode::CmSrEq_A:
  20147. isStrict = true;
  20148. break;
  20149. case Js::OpCode::CmSrNeq_A:
  20150. isStrict = true;
  20151. case Js::OpCode::CmNeq_A:
  20152. isNegOp = true;
  20153. break;
  20154. }
  20155. IR::Opnd *src1 = instr->GetSrc1();
  20156. IR::Opnd *src2 = instr->GetSrc2();
  20157. IR::LabelInstr *labelEqualLikely = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  20158. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  20159. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  20160. if (!this->GenerateFastBooleanAndObjectEqLikely(instr, src1, src2, labelHelper, labelEqualLikely, pNeedHelper, isInHelper))
  20161. {
  20162. return false;
  20163. }
  20164. instr->InsertBefore(labelEqualLikely);
  20165. // $labelEqualLikely
  20166. //
  20167. // Will only come here for
  20168. // if src2 is dynamic object(matches Js::DynamicObject::`vtable'), for non strict cm both src1 and src2 should be dynamic object
  20169. // or if src2 is builtin recyclableobject(typeId > TypeIds_LastStaticType && typeId <= TypeIds_LastBuiltinDynamicObject)
  20170. // or if CustomExternalType with no operations usage flags
  20171. //
  20172. // src1->IsEqual(src2)
  20173. // MOV DST SUCCESS
  20174. // JMP $DONE
  20175. // CMP src1, src2
  20176. // MOV DST SUCCESS
  20177. // JEQ $DONE
  20178. // MOV DST FAILURE
  20179. // JMP $DONE
  20180. LibraryValue successValueType = !isNegOp ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
  20181. LibraryValue failureValueType = !isNegOp ? LibraryValue::ValueFalse : LibraryValue::ValueTrue;
  20182. if (src1->IsEqual(src2))
  20183. {
  20184. Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, successValueType), instr);
  20185. instr->InsertBefore(IR::BranchInstr::New(this->m_lowererMD.MDUncondBranchOpcode, labelDone, this->m_func));
  20186. }
  20187. else
  20188. {
  20189. IR::LabelInstr *cmEqual = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelper);
  20190. this->InsertCompareBranch(src1, src2, isStrict ? Js::OpCode::BrSrEq_A : Js::OpCode::BrEq_A, cmEqual, instr);
  20191. Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, failureValueType), instr);
  20192. instr->InsertBefore(IR::BranchInstr::New(this->m_lowererMD.MDUncondBranchOpcode, labelDone, this->m_func));
  20193. instr->InsertBefore(cmEqual);
  20194. Lowerer::InsertMove(instr->GetDst(), this->LoadLibraryValueOpnd(instr, successValueType), instr);
  20195. instr->InsertBefore(IR::BranchInstr::New(this->m_lowererMD.MDUncondBranchOpcode, labelDone, this->m_func));
  20196. }
  20197. instr->InsertBefore(labelHelper);
  20198. instr->InsertAfter(labelDone);
  20199. return true;
  20200. }
  20201. bool
  20202. Lowerer::GenerateFastBrOrCmString(IR::Instr* instr)
  20203. {
  20204. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  20205. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  20206. if (!srcReg1 ||
  20207. !srcReg2 ||
  20208. srcReg1->IsTaggedInt() ||
  20209. srcReg2->IsTaggedInt() ||
  20210. !srcReg1->GetValueType().HasHadStringTag() ||
  20211. !srcReg2->GetValueType().HasHadStringTag())
  20212. {
  20213. return false;
  20214. }
  20215. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  20216. IR::LabelInstr *labelBranchFail = nullptr;
  20217. IR::LabelInstr *labelBranchSuccess = nullptr;
  20218. bool isEqual = false;
  20219. bool isStrict = false;
  20220. bool isBranch = true;
  20221. bool isCmNegOp = false;
  20222. switch (instr->m_opcode)
  20223. {
  20224. case Js::OpCode::BrSrEq_A:
  20225. case Js::OpCode::BrSrNotNeq_A:
  20226. isStrict = true;
  20227. case Js::OpCode::BrEq_A:
  20228. case Js::OpCode::BrNotNeq_A:
  20229. labelBranchFail = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  20230. labelBranchSuccess = instr->AsBranchInstr()->GetTarget();
  20231. instr->InsertAfter(labelBranchFail);
  20232. isEqual = true;
  20233. break;
  20234. case Js::OpCode::BrSrNeq_A:
  20235. case Js::OpCode::BrSrNotEq_A:
  20236. isStrict = true;
  20237. case Js::OpCode::BrNeq_A:
  20238. case Js::OpCode::BrNotEq_A:
  20239. labelBranchSuccess = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  20240. labelBranchFail = instr->AsBranchInstr()->GetTarget();
  20241. instr->InsertAfter(labelBranchSuccess);
  20242. isEqual = false;
  20243. break;
  20244. case Js::OpCode::CmSrEq_A:
  20245. isStrict = true;
  20246. case Js::OpCode::CmEq_A:
  20247. isEqual = true;
  20248. isBranch = false;
  20249. break;
  20250. case Js::OpCode::CmSrNeq_A:
  20251. isStrict = true;
  20252. case Js::OpCode::CmNeq_A:
  20253. isEqual = false;
  20254. isBranch = false;
  20255. isCmNegOp = true;
  20256. break;
  20257. default:
  20258. Assume(UNREACHED);
  20259. }
  20260. if (!isBranch)
  20261. {
  20262. labelBranchSuccess = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  20263. labelBranchFail = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  20264. }
  20265. GenerateFastStringCheck(instr, srcReg1, srcReg2, isEqual, isStrict, labelHelper, labelBranchSuccess, labelBranchFail);
  20266. IR::LabelInstr *labelFallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  20267. if (!isBranch)
  20268. {
  20269. const LibraryValue successValueType = !isCmNegOp ? LibraryValue::ValueTrue : LibraryValue::ValueFalse;
  20270. const LibraryValue failureValueType = !isCmNegOp ? LibraryValue::ValueFalse : LibraryValue::ValueTrue;
  20271. instr->InsertBefore(labelBranchSuccess);
  20272. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, successValueType), instr);
  20273. InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  20274. instr->InsertBefore(labelBranchFail);
  20275. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, failureValueType), instr);
  20276. InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  20277. }
  20278. instr->InsertBefore(labelHelper);
  20279. instr->InsertAfter(labelFallthrough);
  20280. #if DBG
  20281. // The fast-path for strings assumes the case where 2 strings are equal is rare, and marks that path as 'helper'.
  20282. // This breaks the helper label dbchecks as it can result in non-helper blocks be reachable only from helper blocks.
  20283. // Use m_isHelperToNonHelperBranch and m_noHelperAssert to fix this.
  20284. IR::Instr *blockEndInstr;
  20285. if (isEqual)
  20286. {
  20287. blockEndInstr = labelHelper->GetNextBranchOrLabel();
  20288. }
  20289. else
  20290. {
  20291. blockEndInstr = instr->GetNextBranchOrLabel();
  20292. }
  20293. if (blockEndInstr->IsBranchInstr())
  20294. {
  20295. blockEndInstr->AsBranchInstr()->m_isHelperToNonHelperBranch = true;
  20296. }
  20297. labelFallthrough->m_noHelperAssert = true;
  20298. #endif
  20299. return true;
  20300. }
  20301. bool
  20302. Lowerer::GenerateFastStringCheck(IR::Instr *instr, IR::RegOpnd *srcReg1, IR::RegOpnd *srcReg2, bool isEqual, bool isStrict, IR::LabelInstr *labelHelper, IR::LabelInstr *labelBranchSuccess, IR::LabelInstr *labelBranchFail)
  20303. {
  20304. Assert(instr->m_opcode == Js::OpCode::BrSrEq_A ||
  20305. instr->m_opcode == Js::OpCode::BrSrNeq_A ||
  20306. instr->m_opcode == Js::OpCode::BrEq_A ||
  20307. instr->m_opcode == Js::OpCode::BrNeq_A ||
  20308. instr->m_opcode == Js::OpCode::BrSrNotEq_A ||
  20309. instr->m_opcode == Js::OpCode::BrSrNotNeq_A ||
  20310. instr->m_opcode == Js::OpCode::BrNotEq_A ||
  20311. instr->m_opcode == Js::OpCode::BrNotNeq_A ||
  20312. instr->m_opcode == Js::OpCode::CmEq_A ||
  20313. instr->m_opcode == Js::OpCode::CmNeq_A ||
  20314. instr->m_opcode == Js::OpCode::CmSrEq_A ||
  20315. instr->m_opcode == Js::OpCode::CmSrNeq_A);
  20316. // if src1 is not string
  20317. // generate object test, if not equal jump to $helper
  20318. // compare type check to string, if not jump to $helper
  20319. //
  20320. // if strict mode generate string test as above for src1 and jump to $failure if failed any time
  20321. // else if not strict generate string test as above for src1 and jump to $helper if failed any time
  20322. //
  20323. // Compare length of src1 and src2 if not equal goto $failure
  20324. //
  20325. // if src1 is not flat string jump to $helper
  20326. //
  20327. // if src1 and src2 m_pszValue pointer match goto $success
  20328. //
  20329. // if src2 is not flat string jump to $helper
  20330. //
  20331. // if first character of src1 and src2 doesn't match goto $failure
  20332. //
  20333. // shift left by 1 length of src1 (length*2)
  20334. //
  20335. // wmemcmp src1 and src2 flat strings till length * 2
  20336. //
  20337. // test eax (result of wmemcmp)
  20338. // if equal jump to $success else to $failure
  20339. //
  20340. // $success
  20341. // jmp to $fallthrough
  20342. // $failure
  20343. // jmp to $fallthrough
  20344. // $helper
  20345. //
  20346. // $fallthrough
  20347. // Generates:
  20348. // GenerateObjectTest(src1);
  20349. // CMP srcReg1, srcReg2
  20350. // JEQ $success
  20351. // MOV s1, [srcReg1 + offset(Type)]
  20352. // CMP type, static_string_type
  20353. // JNE $helper
  20354. // GenerateObjectTest(src2);
  20355. // MOV s2, [srcReg2 + offset(Type)]
  20356. // CMP type, static_string_type
  20357. // JNE $fail ; if src1 is string but not src2, src1 !== src2 if isStrict
  20358. // MOV s3, [srcReg1,offset(m_charLength)]
  20359. // CMP [srcReg2,offset(m_charLength)], s3
  20360. // JNE $fail <--- length check done
  20361. // MOV s4, [srcReg1,offset(m_pszValue)]
  20362. // CMP s4, 0
  20363. // JEQ $helper
  20364. // MOV s5, [srcReg2,offset(m_pszValue)]
  20365. // CMP s5, 0
  20366. // JEQ $helper
  20367. // MOV s6,[s4]
  20368. // CMP [s5], s6 -First character comparison
  20369. // JNE $fail
  20370. // SHL length, 1
  20371. // eax = wmemcmp(src1String, src2String, length*2)
  20372. // TEST eax, eax
  20373. // JEQ $success
  20374. // JMP $fail
  20375. IR::Instr* instrInsert = instr;
  20376. GenerateStringTest(srcReg1, instrInsert, labelHelper);
  20377. if (srcReg1->IsEqual(srcReg2))
  20378. {
  20379. InsertBranch(Js::OpCode::Br, labelBranchSuccess, instrInsert);
  20380. #if DBG
  20381. if (instr->IsBranchInstr())
  20382. {
  20383. // we might have other cases on helper path which will generate branch to the target
  20384. instr->AsBranchInstr()->GetTarget()->m_noHelperAssert = true;
  20385. }
  20386. #endif
  20387. return true;
  20388. }
  20389. // CMP srcReg1, srcReg2 - Ptr comparison
  20390. // JEQ $branchSuccess
  20391. InsertCompareBranch(srcReg1, srcReg2, Js::OpCode::BrEq_A, labelBranchSuccess, instrInsert);
  20392. if (isStrict)
  20393. {
  20394. GenerateStringTest(srcReg2, instrInsert, labelBranchFail);
  20395. }
  20396. else
  20397. {
  20398. GenerateStringTest(srcReg2, instrInsert, labelHelper);
  20399. }
  20400. // MOV s3, [srcReg1,offset(m_charLength)]
  20401. // CMP [srcReg2,offset(m_charLength)], s3
  20402. // JNE $branchfail
  20403. IR::RegOpnd * src1LengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  20404. InsertMove(src1LengthOpnd, IR::IndirOpnd::New(srcReg1, Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, m_func), instrInsert);
  20405. InsertCompareBranch(IR::IndirOpnd::New(srcReg2, Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, m_func), src1LengthOpnd, Js::OpCode::BrNeq_A, labelBranchFail, instrInsert);
  20406. // MOV s4, [src1,offset(m_pszValue)]
  20407. // CMP s4, 0
  20408. // JEQ $helper
  20409. // MOV s5, [src2,offset(m_pszValue)]
  20410. // CMP s5, 0
  20411. // JEQ $helper
  20412. IR::RegOpnd * src1FlatString = IR::RegOpnd::New(TyMachPtr, m_func);
  20413. InsertMove(src1FlatString, IR::IndirOpnd::New(srcReg1, Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, m_func), instrInsert);
  20414. InsertCompareBranch(src1FlatString, IR::IntConstOpnd::New(0, TyUint32, m_func), Js::OpCode::BrEq_A, labelHelper, instrInsert);
  20415. IR::RegOpnd * src2FlatString = IR::RegOpnd::New(TyMachPtr, m_func);
  20416. InsertMove(src2FlatString, IR::IndirOpnd::New(srcReg2, Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, m_func), instrInsert);
  20417. InsertCompareBranch(src2FlatString, IR::IntConstOpnd::New(0, TyUint32, m_func), Js::OpCode::BrEq_A, labelHelper, instrInsert);
  20418. // MOV s6,[s4]
  20419. // CMP [s5], s6 -First character comparison
  20420. // JNE $branchfail
  20421. IR::RegOpnd * src1FirstChar = IR::RegOpnd::New(TyUint16, m_func);
  20422. InsertMove(src1FirstChar, IR::IndirOpnd::New(src1FlatString, 0, TyUint16, m_func), instrInsert);
  20423. InsertCompareBranch(IR::IndirOpnd::New(src2FlatString, 0, TyUint16, m_func), src1FirstChar, Js::OpCode::BrNeq_A, labelBranchFail, instrInsert);
  20424. // eax = wmemcmp(src1String, src2String, length)
  20425. m_lowererMD.LoadHelperArgument(instr, src1LengthOpnd);
  20426. m_lowererMD.LoadHelperArgument(instr, src1FlatString);
  20427. m_lowererMD.LoadHelperArgument(instr, src2FlatString);
  20428. IR::RegOpnd *dstOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  20429. IR::Instr *instrCall = IR::Instr::New(Js::OpCode::Call, dstOpnd, IR::HelperCallOpnd::New(IR::HelperWMemCmp, m_func), m_func);
  20430. instr->InsertBefore(instrCall);
  20431. m_lowererMD.LowerCall(instrCall, 3);
  20432. // TEST eax, eax
  20433. // JEQ success
  20434. InsertTestBranch(dstOpnd, dstOpnd, Js::OpCode::BrEq_A, labelBranchSuccess, instrInsert);
  20435. // JMP fail
  20436. InsertBranch(Js::OpCode::Br, labelBranchFail, instrInsert);
  20437. return true;
  20438. }
  20439. bool Lowerer::GenerateFastBrBool(IR::BranchInstr *const instr)
  20440. {
  20441. Assert(instr);
  20442. Assert(instr->m_opcode == Js::OpCode::BrFalse_A || instr->m_opcode == Js::OpCode::BrTrue_A);
  20443. Func *const func = instr->m_func;
  20444. if(!instr->GetSrc1()->IsRegOpnd())
  20445. {
  20446. LowererMD::ChangeToAssign(instr->HoistSrc1(Js::OpCode::Ld_A));
  20447. }
  20448. IR::RegOpnd *const src = instr->GetSrc1()->Copy(func)->AsRegOpnd();
  20449. const IR::AutoReuseOpnd autoReuseSrc(src, func);
  20450. const ValueType srcOriginalValueType(src->GetValueType());
  20451. ValueType srcValueType(srcOriginalValueType);
  20452. IR::LabelInstr *const labelTarget = instr->GetTarget();
  20453. IR::LabelInstr *const labelFallthrough = instr->GetOrCreateContinueLabel();
  20454. if(labelTarget == labelFallthrough)
  20455. {
  20456. // Nothing to do
  20457. instr->Remove();
  20458. return false;
  20459. }
  20460. const bool branchOnFalse = instr->m_opcode == Js::OpCode::BrFalse_A;
  20461. IR::LabelInstr *const labelFalse = branchOnFalse ? labelTarget : labelFallthrough;
  20462. IR::LabelInstr *const labelTrue = branchOnFalse ? labelFallthrough : labelTarget;
  20463. const Js::OpCode compareWithFalseBranchToTargetOpCode = branchOnFalse ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A;
  20464. IR::LabelInstr *lastLabelBeforeHelper = nullptr;
  20465. /// Typespec'd float
  20466. if (instr->GetSrc1()->GetType() == TyFloat64)
  20467. {
  20468. InsertFloatCheckForZeroOrNanBranch(instr->GetSrc1(), branchOnFalse, labelTarget, labelFallthrough, instr);
  20469. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  20470. instr->Remove();
  20471. return false;
  20472. }
  20473. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  20474. // Null fast path
  20475. if (srcValueType.HasBeenNull() || srcOriginalValueType.IsUninitialized())
  20476. {
  20477. if(srcValueType.IsNull())
  20478. {
  20479. // jmp $false
  20480. InsertBranch(Js::OpCode::Br, labelFalse, instr);
  20481. // Skip lowering call to helper
  20482. Assert(instr->m_prev->IsBranchInstr());
  20483. instr->Remove();
  20484. return false;
  20485. }
  20486. // cmp src, null
  20487. // je $false
  20488. InsertCompareBranch(
  20489. src,
  20490. LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  20491. Js::OpCode::BrEq_A,
  20492. labelFalse,
  20493. instr);
  20494. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Null));
  20495. }
  20496. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  20497. // Undefined fast path
  20498. if(srcValueType.HasBeenUndefined() || srcOriginalValueType.IsUninitialized())
  20499. {
  20500. if(srcValueType.IsUndefined())
  20501. {
  20502. // jmp $false
  20503. InsertBranch(Js::OpCode::Br, labelFalse, instr);
  20504. // Skip lowering call to helper
  20505. Assert(instr->m_prev->IsBranchInstr());
  20506. instr->Remove();
  20507. return false;
  20508. }
  20509. // cmp src, undefined
  20510. // je $false
  20511. InsertCompareBranch(
  20512. src,
  20513. LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
  20514. Js::OpCode::BrEq_A,
  20515. labelFalse,
  20516. instr);
  20517. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Undefined));
  20518. }
  20519. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  20520. // Tagged int fast path
  20521. const bool isNotInt = src->IsNotInt();
  20522. bool checkedForTaggedInt = isNotInt;
  20523. if( (
  20524. srcValueType.HasBeenInt() ||
  20525. srcValueType.HasBeenUnknownNumber() ||
  20526. srcOriginalValueType.IsUninitialized()
  20527. ) && !isNotInt)
  20528. {
  20529. checkedForTaggedInt = true;
  20530. IR::LabelInstr *notTaggedIntLabel = nullptr;
  20531. if(!src->IsTaggedInt())
  20532. {
  20533. // test src, 1
  20534. // jz $notTaggedInt
  20535. notTaggedIntLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  20536. m_lowererMD.GenerateSmIntTest(src, instr, notTaggedIntLabel);
  20537. }
  20538. // cmp src, tag(0)
  20539. // je/jne $target
  20540. m_lowererMD.GenerateTaggedZeroTest(src, instr);
  20541. Lowerer::InsertBranch(compareWithFalseBranchToTargetOpCode, labelTarget, instr);
  20542. if(src->IsTaggedInt())
  20543. {
  20544. // Skip lowering call to helper
  20545. Assert(instr->m_prev->IsBranchInstr());
  20546. instr->Remove();
  20547. return false;
  20548. }
  20549. // jmp $fallthrough
  20550. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  20551. // $notTaggedInt:
  20552. if(notTaggedIntLabel)
  20553. {
  20554. instr->InsertBefore(notTaggedIntLabel);
  20555. lastLabelBeforeHelper = notTaggedIntLabel;
  20556. }
  20557. }
  20558. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  20559. // Float fast path
  20560. bool generateFloatTest = srcValueType.IsLikelyFloat();
  20561. #ifdef _M_IX86
  20562. if (!AutoSystemInfo::Data.SSE2Available())
  20563. {
  20564. generateFloatTest = false;
  20565. }
  20566. #endif
  20567. bool checkedForTaggedFloat =
  20568. #if FLOATVAR
  20569. srcValueType.IsNotNumber();
  20570. #else
  20571. true; // there are no tagged floats, indicate that it has been checked
  20572. #endif
  20573. if (generateFloatTest)
  20574. {
  20575. // if(srcValueType.IsFloat()) // skip tagged int check?
  20576. //
  20577. // ValueType::IsFloat() does not guarantee that the storage is not in a tagged int.
  20578. // The tagged int check is necessary. It does, however, guarantee that as long as the value is not
  20579. // stored in a tagged int, that it is definitely stored in a JavascriptNumber/TaggedFloat.
  20580. IR::LabelInstr *const notFloatLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  20581. if(!checkedForTaggedInt)
  20582. {
  20583. checkedForTaggedInt = true;
  20584. m_lowererMD.GenerateSmIntTest(src, instr, notFloatLabel, nullptr, true);
  20585. }
  20586. // cmp [src], JavascriptNumber::vtable
  20587. // jne $notFloat
  20588. #if FLOATVAR
  20589. checkedForTaggedFloat = true;
  20590. IR::RegOpnd *const floatOpnd = m_lowererMD.CheckFloatAndUntag(src, instr, notFloatLabel);
  20591. #else
  20592. m_lowererMD.GenerateFloatTest(src, instr, notFloatLabel);
  20593. IR::IndirOpnd *const floatOpnd = IR::IndirOpnd::New(src, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, func);
  20594. #endif
  20595. // cmp src, 0.0
  20596. // jp $false
  20597. // je/jne $target
  20598. // jmp $fallthrough
  20599. InsertFloatCheckForZeroOrNanBranch(floatOpnd, branchOnFalse, labelTarget, labelFallthrough, instr);
  20600. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  20601. // $notFloat:
  20602. instr->InsertBefore(notFloatLabel);
  20603. lastLabelBeforeHelper = notFloatLabel;
  20604. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::AnyNumber));
  20605. }
  20606. IR::LabelInstr *labelHelper = nullptr;
  20607. bool _didObjectTest = checkedForTaggedInt && checkedForTaggedFloat;
  20608. const auto EnsureObjectTest = [&]()
  20609. {
  20610. if(_didObjectTest)
  20611. {
  20612. return;
  20613. }
  20614. if(!labelHelper)
  20615. {
  20616. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  20617. }
  20618. m_lowererMD.GenerateObjectTest(src, instr, labelHelper);
  20619. _didObjectTest = true;
  20620. };
  20621. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  20622. // Boolean fast path
  20623. if (srcValueType.HasBeenBoolean() || srcOriginalValueType.IsUninitialized())
  20624. {
  20625. IR::LabelInstr *notBooleanLabel = nullptr;
  20626. if (!srcValueType.IsBoolean())
  20627. {
  20628. EnsureObjectTest();
  20629. // cmp [src], JavascriptBoolean::vtable
  20630. // jne $notBoolean
  20631. notBooleanLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  20632. InsertCompareBranch(
  20633. IR::IndirOpnd::New(src, 0, TyMachPtr, func),
  20634. LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptBoolean),
  20635. Js::OpCode::BrNeq_A,
  20636. notBooleanLabel,
  20637. instr);
  20638. }
  20639. // cmp src, false
  20640. // je/jne $target
  20641. InsertCompareBranch(
  20642. src,
  20643. LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse),
  20644. compareWithFalseBranchToTargetOpCode,
  20645. labelTarget,
  20646. instr);
  20647. if (srcValueType.IsBoolean())
  20648. {
  20649. // Skip lowering call to helper
  20650. Assert(!labelHelper);
  20651. Assert(instr->m_prev->IsBranchInstr());
  20652. instr->Remove();
  20653. return false;
  20654. }
  20655. // jmp $fallthrough
  20656. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  20657. if (notBooleanLabel)
  20658. {
  20659. instr->InsertBefore(notBooleanLabel);
  20660. lastLabelBeforeHelper = notBooleanLabel;
  20661. }
  20662. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Boolean));
  20663. }
  20664. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  20665. // String fast path
  20666. if(srcValueType.HasBeenString())
  20667. {
  20668. IR::LabelInstr *notStringLabel = nullptr;
  20669. if(!srcValueType.IsString())
  20670. {
  20671. EnsureObjectTest();
  20672. notStringLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  20673. GenerateStringTest(src, instr, notStringLabel, nullptr, false);
  20674. }
  20675. // cmp [src + offset(length)], 0
  20676. // jeq/jne $target
  20677. InsertCompareBranch(
  20678. IR::IndirOpnd::New(src, Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func),
  20679. IR::IntConstOpnd::New(0, TyUint32, func, true),
  20680. compareWithFalseBranchToTargetOpCode,
  20681. labelTarget,
  20682. instr);
  20683. if(srcValueType.IsString())
  20684. {
  20685. // Skip lowering call to helper
  20686. Assert(!labelHelper);
  20687. Assert(instr->m_prev->IsBranchInstr());
  20688. instr->Remove();
  20689. return false;
  20690. }
  20691. // jmp $fallthrough
  20692. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  20693. if(notStringLabel)
  20694. {
  20695. instr->InsertBefore(notStringLabel);
  20696. lastLabelBeforeHelper = notStringLabel;
  20697. }
  20698. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::String));
  20699. }
  20700. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  20701. // Object fast path
  20702. if (srcValueType.IsLikelyObject())
  20703. {
  20704. if(srcValueType.IsObject())
  20705. {
  20706. if(srcValueType.GetObjectType() > ObjectType::Object)
  20707. {
  20708. // Specific object types that are tracked are equivalent to 'true'
  20709. // jmp $true
  20710. InsertBranch(Js::OpCode::Br, labelTrue, instr);
  20711. // Skip lowering call to helper
  20712. Assert(!labelHelper);
  20713. Assert(instr->m_prev->IsBranchInstr());
  20714. instr->Remove();
  20715. return false;
  20716. }
  20717. }
  20718. else
  20719. {
  20720. EnsureObjectTest();
  20721. }
  20722. // mov srcType, [src + offset(type)] -- load type
  20723. IR::RegOpnd *const srcType = IR::RegOpnd::New(TyMachPtr, func);
  20724. const IR::AutoReuseOpnd autoReuseR1(srcType, func);
  20725. InsertMove(srcType, IR::IndirOpnd::New(src, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func), instr);
  20726. // test [srcType + offset(flags)], TypeFlagMask_IsFalsy -- check if falsy
  20727. // jnz $false
  20728. InsertTestBranch(
  20729. IR::IndirOpnd::New(srcType, Js::Type::GetOffsetOfFlags(), TyUint8, func),
  20730. IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyUint8, func),
  20731. Js::OpCode::BrNeq_A,
  20732. labelFalse,
  20733. instr);
  20734. // cmp [srcType + offset(typeId)], TypeIds_LastJavascriptPrimitiveType -- check base TypeIds_LastJavascriptPrimitiveType
  20735. // ja $true
  20736. InsertCompareBranch(
  20737. IR::IndirOpnd::New(srcType, Js::Type::GetOffsetOfTypeId(), TyInt32, func),
  20738. IR::IntConstOpnd::New(Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, func),
  20739. Js::OpCode::BrGt_A,
  20740. true /* isUnsigned */,
  20741. labelTrue,
  20742. instr);
  20743. if(!labelHelper)
  20744. {
  20745. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  20746. }
  20747. lastLabelBeforeHelper = nullptr;
  20748. }
  20749. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  20750. // Helper call
  20751. // $helper:
  20752. if(lastLabelBeforeHelper)
  20753. {
  20754. Assert(instr->m_prev == lastLabelBeforeHelper);
  20755. lastLabelBeforeHelper->isOpHelper = true;
  20756. }
  20757. if (labelHelper)
  20758. {
  20759. Assert(labelHelper->isOpHelper);
  20760. instr->InsertBefore(labelHelper);
  20761. }
  20762. // call JavascriptConversion::ToBoolean
  20763. IR::RegOpnd *const toBoolDst = IR::RegOpnd::New(TyInt32, func);
  20764. const IR::AutoReuseOpnd autoReuseToBoolDst(toBoolDst, func);
  20765. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, toBoolDst, instr->GetSrc1(), func);
  20766. instr->InsertBefore(callInstr);
  20767. LowerUnaryHelperMem(callInstr, IR::HelperConv_ToBoolean);
  20768. // test eax, eax
  20769. InsertTest(toBoolDst, toBoolDst, instr);
  20770. // je/jne $target
  20771. Assert(instr->IsBranchInstr());
  20772. instr->FreeSrc1();
  20773. instr->m_opcode = LowererMD::MDBranchOpcode(compareWithFalseBranchToTargetOpCode);
  20774. Assert(instr->AsBranchInstr()->GetTarget() == labelTarget);
  20775. // Skip lowering another call to helper
  20776. return false;
  20777. }
  20778. // Helper method used in LowerMD by all platforms.
  20779. // Creates HelperCallOpnd or DiagHelperCallOpnd, based on helperMethod and state.
  20780. // static
  20781. IR::HelperCallOpnd*
  20782. Lowerer::CreateHelperCallOpnd(IR::JnHelperMethod helperMethod, int helperArgCount, Func* func)
  20783. {
  20784. Assert(func);
  20785. IR::HelperCallOpnd* helperCallOpnd;
  20786. if (CONFIG_FLAG(EnableContinueAfterExceptionWrappersForHelpers) &&
  20787. func->IsJitInDebugMode() &&
  20788. HelperMethodAttributes::CanThrow(helperMethod))
  20789. {
  20790. // Create DiagHelperCallOpnd to indicate that it's needed to wrap original helper with try-catch wrapper,
  20791. // so that we can ignore exception and bailout to next stmt in debugger.
  20792. // For details, see: Lib\Runtime\Debug\DiagHelperMethodWrapper.{h,cpp}.
  20793. helperCallOpnd = IR::DiagHelperCallOpnd::New(helperMethod, func, helperArgCount);
  20794. }
  20795. else
  20796. {
  20797. helperCallOpnd = IR::HelperCallOpnd::New(helperMethod, func);
  20798. }
  20799. return helperCallOpnd;
  20800. }
  20801. bool
  20802. Lowerer::TryGenerateFastBrOrCmTypeOf(IR::Instr *instr, IR::Instr **prev, bool isNeqOp, bool *pfNoLower)
  20803. {
  20804. Assert(prev);
  20805. Assert(instr->m_opcode == Js::OpCode::BrSrEq_A ||
  20806. instr->m_opcode == Js::OpCode::BrSrNeq_A ||
  20807. instr->m_opcode == Js::OpCode::BrSrNotEq_A ||
  20808. instr->m_opcode == Js::OpCode::BrSrNotNeq_A ||
  20809. instr->m_opcode == Js::OpCode::CmSrEq_A ||
  20810. instr->m_opcode == Js::OpCode::CmSrNeq_A ||
  20811. instr->m_opcode == Js::OpCode::BrEq_A ||
  20812. instr->m_opcode == Js::OpCode::BrNeq_A ||
  20813. instr->m_opcode == Js::OpCode::BrNotEq_A ||
  20814. instr->m_opcode == Js::OpCode::BrNotNeq_A ||
  20815. instr->m_opcode == Js::OpCode::CmEq_A ||
  20816. instr->m_opcode == Js::OpCode::CmNeq_A);
  20817. //
  20818. // instr - (Br/Cm)(Sr)(N(ot))eq_A
  20819. // instr->m_prev - typeOf
  20820. //
  20821. IR::Instr *instrLd = instr->GetPrevRealInstrOrLabel();
  20822. bool skippedLoads = false;
  20823. //Skip intermediate Ld_A which might be inserted by flow graph peeps
  20824. while (instrLd && instrLd->m_opcode == Js::OpCode::Ld_A )
  20825. {
  20826. if (!(instrLd->GetDst()->IsRegOpnd() && instrLd->GetDst()->AsRegOpnd()->m_fgPeepTmp))
  20827. {
  20828. return false;
  20829. }
  20830. if (instrLd->HasBailOutInfo())
  20831. {
  20832. return false;
  20833. }
  20834. instrLd = instrLd->GetPrevRealInstrOrLabel();
  20835. skippedLoads = true;
  20836. }
  20837. IR::Instr *typeOf = instrLd;
  20838. IR::RegOpnd *instrSrc1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  20839. IR::RegOpnd *instrSrc2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  20840. if (typeOf && (typeOf->m_opcode == Js::OpCode::Typeof))
  20841. {
  20842. IR::RegOpnd *typeOfDst = typeOf->GetDst()->IsRegOpnd() ? typeOf->GetDst()->AsRegOpnd() : nullptr;
  20843. if (typeOfDst && instrSrc1 && instrSrc2)
  20844. {
  20845. IR::RegOpnd *typeOpnd = nullptr;
  20846. IR::RegOpnd *idOpnd = nullptr;
  20847. if (instrSrc1->m_sym == typeOfDst->m_sym)
  20848. {
  20849. typeOpnd = instrSrc1;
  20850. idOpnd = instrSrc2;
  20851. }
  20852. else if (instrSrc2->m_sym == typeOfDst->m_sym)
  20853. {
  20854. typeOpnd = instrSrc2;
  20855. idOpnd = instrSrc1;
  20856. }
  20857. else
  20858. {
  20859. // Neither source turned out to be the typeOpnd
  20860. return false;
  20861. }
  20862. if (!typeOpnd->m_isTempLastUse)
  20863. {
  20864. return false;
  20865. }
  20866. if (!(idOpnd->m_sym->m_isSingleDef && idOpnd->m_sym->m_isStrConst))
  20867. {
  20868. return false;
  20869. }
  20870. // The second argument to [Cm|Br]TypeOf is the typeid.
  20871. IR::IntConstOpnd *typeIdOpnd = nullptr;
  20872. Assert(idOpnd->m_sym->m_isSingleDef);
  20873. Assert(idOpnd->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd());
  20874. // We can't optimize non-javascript type strings.
  20875. JITJavascriptString *typeNameJsString = JITJavascriptString::FromVar(idOpnd->m_sym->m_instrDef->GetSrc1()->AsAddrOpnd()->m_localAddress);
  20876. const char16 *typeName = typeNameJsString->GetString();
  20877. Js::InternalString typeNameString(typeName, typeNameJsString->GetLength());
  20878. if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::UndefinedTypeNameString))
  20879. {
  20880. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Undefined, TyInt32, instr->m_func);
  20881. }
  20882. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::ObjectTypeNameString))
  20883. {
  20884. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Object, TyInt32, instr->m_func);
  20885. }
  20886. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::BooleanTypeNameString))
  20887. {
  20888. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Boolean, TyInt32, instr->m_func);
  20889. }
  20890. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::NumberTypeNameString))
  20891. {
  20892. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Number, TyInt32, instr->m_func);
  20893. }
  20894. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::StringTypeNameString))
  20895. {
  20896. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_String, TyInt32, instr->m_func);
  20897. }
  20898. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::FunctionTypeNameString))
  20899. {
  20900. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Function, TyInt32, instr->m_func);
  20901. }
  20902. else
  20903. {
  20904. return false;
  20905. }
  20906. if (skippedLoads)
  20907. {
  20908. //validate none of dst of Ld_A overlaps with typeof src or dst
  20909. IR::Opnd* typeOfSrc = typeOf->GetSrc1();
  20910. instrLd = typeOf->GetNextRealInstr();
  20911. while (instrLd != instr)
  20912. {
  20913. if (instrLd->GetDst()->IsEqual(typeOfDst) || instrLd->GetDst()->IsEqual(typeOfSrc))
  20914. {
  20915. return false;
  20916. }
  20917. instrLd = instrLd->GetNextRealInstr();
  20918. }
  20919. typeOf->Unlink();
  20920. instr->InsertBefore(typeOf);
  20921. }
  20922. // The first argument to [Cm|Br]TypeOf is the first arg to the TypeOf instruction.
  20923. IR::Opnd *objectOpnd = typeOf->GetSrc1();
  20924. Assert(objectOpnd->IsRegOpnd());
  20925. // Now emit this instruction and remove the ldstr and typeOf.
  20926. *prev = typeOf->m_prev;
  20927. *pfNoLower = false;
  20928. if (instr->IsBranchInstr())
  20929. {
  20930. GenerateFastBrTypeOf(instr, objectOpnd->AsRegOpnd(), typeIdOpnd, typeOf, pfNoLower, isNeqOp);
  20931. }
  20932. else
  20933. {
  20934. GenerateFastCmTypeOf(instr, objectOpnd->AsRegOpnd(), typeIdOpnd, typeOf, pfNoLower, isNeqOp);
  20935. }
  20936. return true;
  20937. }
  20938. }
  20939. if (instrSrc1 && instrSrc1->GetStackSym()->IsSingleDef() && instrSrc2 && instrSrc2->GetStackSym()->IsSingleDef() &&
  20940. instrSrc1->GetStackSym()->GetInstrDef()->m_opcode == Js::OpCode::Typeof &&
  20941. instrSrc2->GetStackSym()->GetInstrDef()->m_opcode == Js::OpCode::Typeof)
  20942. {
  20943. *pfNoLower = true;
  20944. if (instr->IsBranchInstr())
  20945. {
  20946. InsertCompareBranch(instrSrc1, instrSrc2, isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, instr->AsBranchInstr()->GetTarget(), instr);
  20947. instr->Remove();
  20948. }
  20949. else
  20950. {
  20951. if (instrSrc1->IsEqual(instrSrc2))
  20952. {
  20953. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, isNeqOp ? LibraryValue::ValueFalse : LibraryValue::ValueTrue), instr);
  20954. }
  20955. else
  20956. {
  20957. // t1 = typeof o1
  20958. // t2 = typeof o2
  20959. // dst = t1 == t2
  20960. // MOV dst, true
  20961. // CMP t1, t2
  20962. // x86, amd64
  20963. // CMOVNE dst, false
  20964. // arm
  20965. // BEQ $done
  20966. // MOV dst, false
  20967. // $done
  20968. if (instr->GetDst()->IsEqual(instrSrc1))
  20969. {
  20970. IR::Instr* hoistInstr = m_lowererMD.ChangeToAssign(instr->HoistSrc1(Js::OpCode::Ld_A));
  20971. instrSrc1 = hoistInstr->GetDst()->AsRegOpnd();
  20972. }
  20973. if (instr->GetDst()->IsEqual(instrSrc2))
  20974. {
  20975. IR::Instr* hoistInstr = m_lowererMD.ChangeToAssign(instr->HoistSrc2(Js::OpCode::Ld_A));
  20976. instrSrc2 = hoistInstr->GetDst()->AsRegOpnd();
  20977. }
  20978. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueTrue), instr);
  20979. #if defined(_M_ARM32_OR_ARM64)
  20980. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  20981. InsertCompareBranch(instrSrc1, instrSrc2, isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, doneLabel, instr);
  20982. InsertMove(instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
  20983. instr->InsertBefore(doneLabel);
  20984. #else
  20985. InsertCompare(instrSrc1, instrSrc2, instr);
  20986. LowererMD::InsertCmovCC(isNeqOp ? Js::OpCode::CMOVE : Js::OpCode::CMOVNE, instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
  20987. #endif
  20988. }
  20989. instr->Remove();
  20990. }
  20991. return true;
  20992. }
  20993. return false;
  20994. }
  20995. void
  20996. Lowerer::GenerateFalsyObjectTest(IR::Instr * insertInstr, IR::RegOpnd * typeOpnd, IR::LabelInstr * falsyLabel)
  20997. {
  20998. IR::Opnd *flagsOpnd = IR::IndirOpnd::New(typeOpnd, Js::Type::GetOffsetOfFlags(), TyInt32, this->m_func);
  20999. InsertTestBranch(flagsOpnd, IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyInt32, this->m_func), Js::OpCode::BrNeq_A, falsyLabel, insertInstr);
  21000. }
  21001. void
  21002. Lowerer::GenerateFalsyObjectTest(IR::Instr *insertInstr, IR::RegOpnd *typeOpnd, Js::TypeId typeIdToCheck, IR::LabelInstr* target, IR::LabelInstr* done, bool isNeqOp)
  21003. {
  21004. if (!this->m_func->GetThreadContextInfo()->CanBeFalsy(typeIdToCheck) && typeIdToCheck != Js::TypeIds_Undefined)
  21005. {
  21006. // Don't need the check for falsy, the typeId we are looking for doesn't care
  21007. return;
  21008. }
  21009. IR::Opnd *flagsOpnd = IR::IndirOpnd::New(typeOpnd, Js::Type::GetOffsetOfFlags(), TyInt32, this->m_func);
  21010. InsertTest(flagsOpnd, IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyInt32, this->m_func), insertInstr);
  21011. if (typeIdToCheck == Js::TypeIds_Undefined)
  21012. {
  21013. //Falsy object returns true for undefined ((typeof falsyObj) == "undefined")
  21014. InsertBranch( Js::OpCode::BrNeq_A, true, isNeqOp ? done : target, insertInstr);
  21015. }
  21016. else
  21017. {
  21018. //Falsy object returns false for all other types ((typeof falsyObj) != "function")
  21019. InsertBranch( Js::OpCode::BrNeq_A, true, isNeqOp? target : done , insertInstr);
  21020. }
  21021. }
  21022. ///----------------------------------------------------------------------------
  21023. ///
  21024. /// LowererMD::GenerateFastBrTypeOf
  21025. ///
  21026. ///----------------------------------------------------------------------------
  21027. void
  21028. Lowerer::GenerateFastBrTypeOf(IR::Instr *branch, IR::RegOpnd *object, IR::IntConstOpnd *typeIdOpnd, IR::Instr *typeOf, bool *pfNoLower, bool isNeqOp)
  21029. {
  21030. Js::TypeId typeId = static_cast<Js::TypeId>(typeIdOpnd->GetValue());
  21031. IR::LabelInstr *target = branch->AsBranchInstr()->GetTarget();
  21032. IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  21033. IR::LabelInstr *helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  21034. IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  21035. switch(branch->m_opcode)
  21036. {
  21037. case Js::OpCode::BrSrNeq_A:
  21038. case Js::OpCode::BrNeq_A:
  21039. case Js::OpCode::BrSrNotEq_A:
  21040. case Js::OpCode::BrNotEq_A:
  21041. case Js::OpCode::BrSrEq_A:
  21042. case Js::OpCode::BrEq_A:
  21043. case Js::OpCode::BrSrNotNeq_A:
  21044. case Js::OpCode::BrNotNeq_A:
  21045. break;
  21046. default:
  21047. Assert(UNREACHED);
  21048. __assume(UNREACHED);
  21049. }
  21050. // JNE/BNE (typeId == Js::TypeIds_Number) ? $target : $done
  21051. IR::LabelInstr *label = (typeId == Js::TypeIds_Number) ? target : done;
  21052. if (isNeqOp)
  21053. label = (label == target) ? done : target;
  21054. m_lowererMD.GenerateObjectTest(object, branch, label);
  21055. // MOV typeRegOpnd, [object + offset(Type)]
  21056. InsertMove(typeRegOpnd,
  21057. IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  21058. branch);
  21059. GenerateFalsyObjectTest(branch, typeRegOpnd, typeId, target, done, isNeqOp);
  21060. // MOV objTypeId, [typeRegOpnd + offset(TypeId)]
  21061. IR::RegOpnd* objTypeIdOpnd = IR::RegOpnd::New(TyInt32, m_func);
  21062. InsertMove(objTypeIdOpnd,
  21063. IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
  21064. branch);
  21065. // CMP objTypeId, typeId
  21066. // JEQ/JGE $done
  21067. if (typeId == Js::TypeIds_Object)
  21068. {
  21069. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrGe_A, isNeqOp ? done : target, branch);
  21070. }
  21071. else if (typeId == Js::TypeIds_Function)
  21072. {
  21073. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrEq_A, isNeqOp ? done : target, branch);
  21074. }
  21075. else if (typeId == Js::TypeIds_Number)
  21076. {
  21077. //Check for the typeIds between TypeIds_FirstNumberType <= typeIds <= TypeIds_LastNumberType
  21078. InsertSub(false, objTypeIdOpnd, objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_FirstNumberType, TyInt32, branch->m_func),branch);
  21079. InsertCompare(objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_LastNumberType - Js::TypeIds_FirstNumberType, TyInt32, branch->m_func), branch);
  21080. InsertBranch(isNeqOp ? Js::OpCode::BrGt_A : Js::OpCode::BrLe_A, true, target, branch);
  21081. }
  21082. else
  21083. {
  21084. InsertCompare(objTypeIdOpnd, typeIdOpnd, branch);
  21085. InsertBranch(isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, target, branch);
  21086. }
  21087. // This could be 'null' which, for historical reasons, has a TypeId < TypeIds_Object but
  21088. // is still a Javascript "object."
  21089. if (typeId == Js::TypeIds_Object)
  21090. {
  21091. // CMP object, 0xXXXXXXXX
  21092. // JEQ isNeqOp ? $done : $target
  21093. InsertCompareBranch(object,
  21094. LoadLibraryValueOpnd(branch, LibraryValue::ValueNull),
  21095. Js::OpCode::BrEq_A,
  21096. isNeqOp ? done : target,
  21097. branch);
  21098. }
  21099. branch->InsertAfter(done); // Get this label first
  21100. // "object" or "function" may come from HostDispatch. Needs helper if that's the case.
  21101. if (typeId == Js::TypeIds_Object || typeId == Js::TypeIds_Function)
  21102. {
  21103. // CMP objTypeId, TypeIds_Proxy. typeof proxy could be 'object' or 'function' depends on the target
  21104. // JNE isNeqOp ? $target : $done
  21105. InsertCompareBranch(objTypeIdOpnd,
  21106. IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyInt32, m_func),
  21107. Js::OpCode::BrEq_A,
  21108. helper,
  21109. branch);
  21110. // CMP objTypeId, TypeIds_HostDispatch
  21111. // JNE isNeqOp ? $target : $done
  21112. InsertCompareBranch(objTypeIdOpnd,
  21113. IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, m_func),
  21114. Js::OpCode::BrNeq_A,
  21115. isNeqOp ? target : done,
  21116. branch);
  21117. // Now emit Typeof and lower it like we would've for the helper call.
  21118. {
  21119. branch->InsertBefore(helper);
  21120. typeOf->Unlink();
  21121. branch->InsertBefore(typeOf);
  21122. if (branch->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(branch->GetBailOutKind()) &&
  21123. (!typeOf->HasBailOutInfo() || !BailOutInfo::IsBailOutOnImplicitCalls(typeOf->GetBailOutKind())))
  21124. {
  21125. typeOf = AddBailoutToHelperCallInstr(typeOf, branch->GetBailOutInfo(), branch->GetBailOutKind(), branch);
  21126. }
  21127. LowerUnaryHelperMem(typeOf, IR::HelperOp_Typeof);
  21128. }
  21129. }
  21130. else // Other primitive types don't need helper
  21131. {
  21132. typeOf->Remove();
  21133. branch->Remove();
  21134. *pfNoLower = true;
  21135. }
  21136. // $done:
  21137. }
  21138. ///----------------------------------------------------------------------------
  21139. ///
  21140. /// LowererMD::GenerateFastCmTypeOf
  21141. ///
  21142. ///----------------------------------------------------------------------------
  21143. void
  21144. Lowerer::GenerateFastCmTypeOf(IR::Instr *compare, IR::RegOpnd *object, IR::IntConstOpnd *typeIdOpnd, IR::Instr *typeOf, bool *pfNoLower, bool isNeqOp)
  21145. {
  21146. Assert(compare->m_opcode == Js::OpCode::CmSrEq_A ||
  21147. compare->m_opcode == Js::OpCode::CmEq_A ||
  21148. compare->m_opcode == Js::OpCode::CmSrNeq_A ||
  21149. compare->m_opcode == Js::OpCode::CmNeq_A);
  21150. Js::TypeId typeId = static_cast<Js::TypeId>(typeIdOpnd->GetValue());
  21151. IR::LabelInstr *movFalse = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  21152. IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  21153. IR::LabelInstr *helper= IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  21154. IR::RegOpnd *dst = compare->GetDst()->IsRegOpnd() ? compare->GetDst()->AsRegOpnd() : nullptr;
  21155. IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  21156. Assert(dst);
  21157. if (dst->IsEqual(object))
  21158. {
  21159. //dst same as the src of typeof. As we need to move true to dst first we need to save the src to a new opnd
  21160. IR::RegOpnd *newObject = IR::RegOpnd::New(object->GetType(), m_func);
  21161. InsertMove(newObject, object, compare); //Save src
  21162. object = newObject;
  21163. }
  21164. // mov dst, 'true'
  21165. InsertMove(dst,
  21166. LoadLibraryValueOpnd(compare, LibraryValue::ValueTrue),
  21167. compare);
  21168. // TEST object, 1
  21169. // JNE (typeId == Js::TypeIds_Number) ? $done : $movFalse
  21170. IR::LabelInstr *target = (typeId == Js::TypeIds_Number) ? done : movFalse;
  21171. if (isNeqOp)
  21172. {
  21173. target = (target == done) ? movFalse : done;
  21174. }
  21175. m_lowererMD.GenerateObjectTest(object, compare, target);
  21176. // MOV typeRegOpnd, [object + offset(Type)]
  21177. InsertMove(typeRegOpnd,
  21178. IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  21179. compare);
  21180. GenerateFalsyObjectTest(compare, typeRegOpnd, typeId, done, movFalse, isNeqOp);
  21181. // MOV objTypeId, [typeRegOpnd + offset(TypeId)]
  21182. IR::RegOpnd* objTypeIdOpnd = IR::RegOpnd::New(TyInt32, m_func);
  21183. InsertMove(objTypeIdOpnd,
  21184. IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
  21185. compare);
  21186. // CMP objTypeId, typeId
  21187. // JEQ/JGE $done
  21188. if (typeId == Js::TypeIds_Object)
  21189. {
  21190. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrGe_A, isNeqOp ? movFalse : done, compare);
  21191. }
  21192. else if (typeId == Js::TypeIds_Function)
  21193. {
  21194. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrEq_A, isNeqOp ? movFalse : done, compare);
  21195. }
  21196. else if (typeId == Js::TypeIds_Number)
  21197. {
  21198. //Check for the typeIds between TypeIds_FirstNumberType <= typeIds <= TypeIds_LastNumberType
  21199. InsertCompareBranch(objTypeIdOpnd,
  21200. IR::IntConstOpnd::New(Js::TypeIds_LastNumberType, TyInt32, compare->m_func),
  21201. Js::OpCode::BrGt_A,
  21202. isNeqOp ? done : movFalse,
  21203. compare);
  21204. InsertCompareBranch(objTypeIdOpnd,
  21205. IR::IntConstOpnd::New(Js::TypeIds_FirstNumberType, TyInt32, compare->m_func),
  21206. isNeqOp? Js::OpCode::BrLt_A : Js::OpCode::BrGe_A,
  21207. done,
  21208. compare);
  21209. }
  21210. else
  21211. {
  21212. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, done, compare);
  21213. }
  21214. // This could be 'null' which, for historical reasons, has a TypeId < TypeIds_Object but
  21215. // is still a Javascript "object."
  21216. if (typeId == Js::TypeIds_Object)
  21217. {
  21218. // CMP object, 0xXXXXXXXX
  21219. // JEQ isNeqOp ? $movFalse : $done
  21220. InsertCompareBranch(object,
  21221. LoadLibraryValueOpnd(compare, LibraryValue::ValueNull),
  21222. Js::OpCode::BrEq_A,
  21223. isNeqOp ? movFalse : done,
  21224. compare);
  21225. }
  21226. compare->InsertAfter(done); // Get this label first
  21227. // "object" or "function" may come from HostDispatch. Needs helper if that's the case.
  21228. if (typeId == Js::TypeIds_Object || typeId == Js::TypeIds_Function)
  21229. {
  21230. // CMP objTypeId, TypeIds_Proxy
  21231. // JNE isNeqOp ? $done : $movFalse
  21232. InsertCompareBranch(objTypeIdOpnd,
  21233. IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyInt32, m_func),
  21234. Js::OpCode::BrEq_A,
  21235. helper,
  21236. compare);
  21237. // CMP objTypeId, TypeIds_HostDispatch
  21238. // JNE isNeqOp ? $done : $movFalse
  21239. InsertCompareBranch(objTypeIdOpnd,
  21240. IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, m_func),
  21241. Js::OpCode::BrNeq_A,
  21242. isNeqOp ? done : movFalse,
  21243. compare);
  21244. // Now emit Typeof like we would've for the helper call.
  21245. {
  21246. compare->InsertBefore(helper);
  21247. typeOf->Unlink();
  21248. compare->InsertBefore(typeOf);
  21249. if (compare->HasBailOutInfo() && BailOutInfo::IsBailOutOnImplicitCalls(compare->GetBailOutKind()) &&
  21250. (!typeOf->HasBailOutInfo() || !BailOutInfo::IsBailOutOnImplicitCalls(typeOf->GetBailOutKind())))
  21251. {
  21252. typeOf = AddBailoutToHelperCallInstr(typeOf, compare->GetBailOutInfo(), compare->GetBailOutKind(), compare);
  21253. }
  21254. LowerUnaryHelperMem(typeOf, IR::HelperOp_Typeof);
  21255. }
  21256. // JMP/B $done
  21257. InsertBranch(Js::OpCode::Br, done, done);
  21258. }
  21259. else // Other primitive types don't need helper
  21260. {
  21261. typeOf->Remove();
  21262. dst = compare->UnlinkDst()->AsRegOpnd();
  21263. compare->Remove();
  21264. *pfNoLower = true;
  21265. }
  21266. // $movFalse: (insert before $done)
  21267. done->InsertBefore(movFalse);
  21268. // MOV dst, 'false'
  21269. InsertMove(dst, LoadLibraryValueOpnd(done, LibraryValue::ValueFalse), done);
  21270. // $done:
  21271. }
  21272. void
  21273. Lowerer::GenerateCheckForCallFlagNew(IR::Instr* instrInsert)
  21274. {
  21275. Func *func = instrInsert->m_func;
  21276. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21277. Assert(!func->IsInlinee());
  21278. // MOV s1, [ebp + 4] // s1 = call info
  21279. // AND s2, s1, Js::CallFlags_New // s2 = s1 & Js::CallFlags_New
  21280. // CMP s2, 0
  21281. // JNE $Done
  21282. // CALL RuntimeTypeError
  21283. // $Done
  21284. IR::SymOpnd* callInfoOpnd = Lowerer::LoadCallInfo(instrInsert);
  21285. Assert(Js::CallInfo::ksizeofCount == 24);
  21286. IR::RegOpnd* isNewFlagSetRegOpnd = IR::RegOpnd::New(TyMachReg, func);
  21287. InsertAnd(isNewFlagSetRegOpnd, callInfoOpnd, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_New << Js::CallInfo::ksizeofCount, TyMachReg, func, true), instrInsert);
  21288. InsertTestBranch(isNewFlagSetRegOpnd, isNewFlagSetRegOpnd, Js::OpCode::BrNeq_A, labelDone, instrInsert);
  21289. IR::Instr *throwInstr = IR::Instr::New(
  21290. Js::OpCode::RuntimeTypeError,
  21291. IR::RegOpnd::New(TyMachReg, m_func),
  21292. IR::IntConstOpnd::New(SCODE_CODE(JSERR_ClassConstructorCannotBeCalledWithoutNew), TyInt32, m_func),
  21293. m_func);
  21294. instrInsert->InsertBefore(throwInstr);
  21295. this->LowerUnaryHelperMem(throwInstr, IR::HelperOp_RuntimeTypeError);
  21296. instrInsert->InsertBefore(labelDone);
  21297. instrInsert->Remove();
  21298. }
  21299. void
  21300. Lowerer::GenerateJavascriptOperatorsIsConstructorGotoElse(IR::Instr *instrInsert, IR::RegOpnd *instanceRegOpnd, IR::LabelInstr *labelReturnTrue, IR::LabelInstr *labelReturnFalse)
  21301. {
  21302. // $ProxyLoop:
  21303. // // if (!VarIs<RecyclableObject>(instance)) { goto $ReturnFalse }; // omitted: VarIs<RecyclableObject>(instance) always true
  21304. // MOV s0, instance->type
  21305. // MOV s1, s0->typeId
  21306. // CMP s1, TypeIds_Proxy
  21307. // JNE $NotProxy
  21308. //
  21309. // MOV instance, instance->target
  21310. // JMP $ProxyLoop
  21311. //
  21312. // $NotProxy:
  21313. // CMP s1, TypeIds_Function
  21314. // JNE $ReturnFalse // external
  21315. //
  21316. // MOV s0, instance->functionInfo
  21317. // MOV s1, s0->attributes
  21318. // TEST s1, ErrorOnNew
  21319. // JNE $ReturnFalse // external
  21320. //
  21321. // JMP $ReturnTrue // external
  21322. Func *func = instrInsert->m_func;
  21323. IR::LabelInstr *labelProxyLoop = InsertLoopTopLabel(instrInsert);
  21324. IR::LabelInstr *labelNotProxy = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21325. IR::RegOpnd *indir0RegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  21326. IR::RegOpnd *indir1RegOpnd = IR::RegOpnd::New(TyUint32, func);
  21327. Loop * loop = labelProxyLoop->GetLoop();
  21328. loop->regAlloc.liveOnBackEdgeSyms->Set(instanceRegOpnd->m_sym->m_id);
  21329. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func);
  21330. Lowerer::InsertMove(indir0RegOpnd, indirOpnd, instrInsert);
  21331. indirOpnd = IR::IndirOpnd::New(indir0RegOpnd, Js::Type::GetOffsetOfTypeId(), TyUint32, func);
  21332. Lowerer::InsertMove(indir1RegOpnd, indirOpnd, instrInsert);
  21333. InsertCompareBranch(indir1RegOpnd, IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyUint32, func, true), Js::OpCode::BrNeq_A, labelNotProxy, instrInsert);
  21334. indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::JavascriptProxy::GetOffsetOfTarget(), TyMachPtr, func);
  21335. Lowerer::InsertMove(instanceRegOpnd, indirOpnd, instrInsert);
  21336. InsertBranch(Js::OpCode::Br, labelProxyLoop, instrInsert);
  21337. instrInsert->InsertBefore(labelNotProxy);
  21338. InsertCompareBranch(indir1RegOpnd, IR::IntConstOpnd::New(Js::TypeIds_Function, TyUint32, func, true), Js::OpCode::BrNeq_A, labelReturnFalse, instrInsert);
  21339. indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::JavascriptFunction::GetOffsetOfFunctionInfo(), TyMachPtr, func);
  21340. Lowerer::InsertMove(indir0RegOpnd, indirOpnd, instrInsert);
  21341. indirOpnd = IR::IndirOpnd::New(indir0RegOpnd, Js::FunctionInfo::GetAttributesOffset(), TyUint32, func);
  21342. Lowerer::InsertMove(indir1RegOpnd, indirOpnd, instrInsert);
  21343. InsertTestBranch(indir1RegOpnd, IR::IntConstOpnd::New(Js::FunctionInfo::Attributes::ErrorOnNew, TyUint32, func, true), Js::OpCode::BrNeq_A, labelReturnFalse, instrInsert);
  21344. InsertBranch(Js::OpCode::Br, labelReturnTrue, instrInsert);
  21345. }
  21346. void
  21347. Lowerer::GenerateRecyclableObjectGetPrototypeNullptrGoto(IR::Instr *instrInsert, IR::RegOpnd *instanceRegOpnd, IR::LabelInstr *labelReturnNullptr)
  21348. {
  21349. // MOV instance, instance->type
  21350. // MOV flags, instance->flags
  21351. // TEST flags, TypeFlagMask_HasSpecialPrototype
  21352. // JNE $ReturnNullptr // external, bypassing nullptr check
  21353. // MOV instance, instance->prototype
  21354. Func *func = instrInsert->m_func;
  21355. IR::RegOpnd *flagsRegOpnd = IR::RegOpnd::New(TyUint32, func);
  21356. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func);
  21357. Lowerer::InsertMove(instanceRegOpnd, indirOpnd, instrInsert);
  21358. indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::Type::GetOffsetOfFlags(), TyUint32, func);
  21359. Lowerer::InsertMove(flagsRegOpnd, indirOpnd, instrInsert);
  21360. InsertTestBranch(flagsRegOpnd, IR::IntConstOpnd::New(TypeFlagMask_HasSpecialPrototype, TyUint32, func, true), Js::OpCode::BrNeq_A, labelReturnNullptr, instrInsert);
  21361. indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::Type::GetOffsetOfPrototype(), TyMachPtr, func);
  21362. Lowerer::InsertMove(instanceRegOpnd, indirOpnd, instrInsert);
  21363. }
  21364. void
  21365. Lowerer::GenerateRecyclableObjectIsElse(IR::Instr *instrInsert, IR::RegOpnd *instanceRegOpnd, IR::LabelInstr *labelFalse)
  21366. {
  21367. Func *func = instrInsert->m_func;
  21368. #if INT32VAR
  21369. InsertTestBranch(instanceRegOpnd, IR::AddrOpnd::New((Js::Var)0xffff000000000000, IR::AddrOpndKindConstantVar, func, true), Js::OpCode::BrNeq_A, labelFalse, instrInsert);
  21370. #else
  21371. InsertTestBranch(instanceRegOpnd, IR::IntConstOpnd::New(Js::AtomTag, TyUint32, func, true), Js::OpCode::BrNeq_A, labelFalse, instrInsert);
  21372. #endif
  21373. }
  21374. void
  21375. Lowerer::GenerateLdHomeObj(IR::Instr* instr)
  21376. {
  21377. // MOV dst, undefined
  21378. // MOV instance, functionObject // functionObject through stack params or src1
  21379. // CMP [instance], VtableStackScriptFunction
  21380. // JE $Done
  21381. // MOV instance, instance->homeObj
  21382. // TEST instance, instance
  21383. // JZ $Done
  21384. // MOV dst, instance
  21385. // $Done:
  21386. Func *func = instr->m_func;
  21387. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21388. IR::LabelInstr *labelInlineFunc = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21389. IR::LabelInstr *testLabel = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21390. IR::LabelInstr *scriptFuncLabel = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21391. IR::Opnd *opndUndefAddress = this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined);
  21392. IR::RegOpnd *instanceRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  21393. IR::Opnd *dstOpnd = instr->GetDst();
  21394. Assert(dstOpnd->IsRegOpnd());
  21395. Lowerer::InsertMove(dstOpnd, opndUndefAddress, instr);
  21396. IR::Opnd * functionObjOpnd = nullptr;
  21397. m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  21398. Lowerer::InsertMove(instanceRegOpnd, functionObjOpnd, instr);
  21399. IR::Opnd * vtableAddressOpnd = this->LoadVTableValueOpnd(instr, VTableValue::VtableStackScriptFunction);
  21400. IR::BranchInstr* branchInstr = InsertCompareBranch(IR::IndirOpnd::New(instanceRegOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
  21401. Js::OpCode::BrEq_A, true, labelDone, instr);
  21402. InsertObjectPoison(instanceRegOpnd, branchInstr, instr, false);
  21403. if (func->GetJITFunctionBody()->HasHomeObj())
  21404. {
  21405. // Is this an function with inline cache and home obj??
  21406. IR::Opnd * vtableAddressInlineFuncHomObjOpnd = this->LoadVTableValueOpnd(instr, VTableValue::VtableScriptFunctionWithInlineCacheAndHomeObj);
  21407. IR::BranchInstr* inlineFuncHomObjOpndBr = InsertCompareBranch(IR::IndirOpnd::New(instanceRegOpnd, 0, TyMachPtr, func), vtableAddressInlineFuncHomObjOpnd, Js::OpCode::BrNeq_A, labelInlineFunc, instr);
  21408. InsertObjectPoison(instanceRegOpnd, inlineFuncHomObjOpndBr, instr, false);
  21409. IR::IndirOpnd *indirInlineFuncHomeObjOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::FunctionWithHomeObj<Js::ScriptFunctionWithInlineCache>::GetOffsetOfHomeObj(), TyMachPtr, func);
  21410. Lowerer::InsertMove(instanceRegOpnd, indirInlineFuncHomeObjOpnd, instr);
  21411. InsertBranch(Js::OpCode::Br, testLabel, instr);
  21412. instr->InsertBefore(labelInlineFunc);
  21413. // Is this a function with inline cache, home obj and computed name??
  21414. IR::Opnd * vtableAddressInlineFuncHomObjCompNameOpnd = this->LoadVTableValueOpnd(instr, VTableValue::VtableScriptFunctionWithInlineCacheHomeObjAndComputedName);
  21415. IR::BranchInstr* inlineFuncHomObjCompNameBr = InsertCompareBranch(IR::IndirOpnd::New(instanceRegOpnd, 0, TyMachPtr, func), vtableAddressInlineFuncHomObjCompNameOpnd, Js::OpCode::BrNeq_A, scriptFuncLabel, instr);
  21416. InsertObjectPoison(instanceRegOpnd, inlineFuncHomObjCompNameBr, instr, false);
  21417. IR::IndirOpnd *indirInlineFuncHomeObjCompNameOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::FunctionWithComputedName<Js::FunctionWithHomeObj<Js::ScriptFunctionWithInlineCache>>::GetOffsetOfHomeObj(), TyMachPtr, func);
  21418. Lowerer::InsertMove(instanceRegOpnd, indirInlineFuncHomeObjCompNameOpnd, instr);
  21419. InsertBranch(Js::OpCode::Br, testLabel, instr);
  21420. instr->InsertBefore(scriptFuncLabel);
  21421. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::ScriptFunctionWithHomeObj::GetOffsetOfHomeObj(), TyMachPtr, func);
  21422. Lowerer::InsertMove(instanceRegOpnd, indirOpnd, instr);
  21423. }
  21424. else
  21425. {
  21426. // Even if the function does not have home object in eval cases we still have the LdHomeObj opcode
  21427. InsertBranch(Js::OpCode::Br, labelDone, instr);
  21428. }
  21429. instr->InsertBefore(testLabel);
  21430. InsertTestBranch(instanceRegOpnd, instanceRegOpnd, Js::OpCode::BrEq_A, labelDone, instr);
  21431. Lowerer::InsertMove(dstOpnd, instanceRegOpnd, instr);
  21432. instr->InsertBefore(labelDone);
  21433. instr->Remove();
  21434. }
  21435. void
  21436. Lowerer::GenerateLdHomeObjProto(IR::Instr* instr)
  21437. {
  21438. // MOV dst, undefined
  21439. // MOV instance, src1 // homeObj
  21440. // TEST instance, instance
  21441. // JZ $Done
  21442. //
  21443. // if (!VarIs<RecyclableObject>(instance)) goto $Done
  21444. // MOV type, [instance+Offset(type)]
  21445. // MOV typeId, [type+Offset(typeId)]
  21446. // CMP typeId, TypeIds_Null
  21447. // JEQ $Err
  21448. // CMP typeId, TypeIds_Undefined
  21449. // JNE $NoErr
  21450. //
  21451. // $Err:
  21452. // ThrowRuntimeReferenceError(JSERR_BadSuperReference);
  21453. //
  21454. // $NoErr:
  21455. // instance = ((RecyclableObject*)instance)->GetPrototype();
  21456. // if (instance == nullptr) goto $Done;
  21457. //
  21458. // if (!VarIs<RecyclableObject>(instance)) goto $Done
  21459. //
  21460. // MOV dst, instance
  21461. // $Done:
  21462. Func *func = instr->m_func;
  21463. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  21464. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21465. IR::LabelInstr *labelErr = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21466. IR::LabelInstr *labelNoErr = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21467. IR::Opnd *opndUndefAddress = this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined);
  21468. IR::RegOpnd *instanceRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  21469. IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  21470. IR::RegOpnd *typeIdRegOpnd = IR::RegOpnd::New(TyUint32, func);
  21471. IR::Opnd *dstOpnd = instr->GetDst();
  21472. Assert(dstOpnd->IsRegOpnd());
  21473. Lowerer::InsertMove(dstOpnd, opndUndefAddress, instr);
  21474. Lowerer::InsertMove(instanceRegOpnd, src1Opnd, instr);
  21475. InsertTestBranch(instanceRegOpnd, instanceRegOpnd, Js::OpCode::BrEq_A, labelDone, instr);
  21476. this->GenerateRecyclableObjectIsElse(instr, instanceRegOpnd, labelDone);
  21477. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func);
  21478. Lowerer::InsertMove(typeRegOpnd, indirOpnd, instr);
  21479. indirOpnd = IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyUint32, func);
  21480. Lowerer::InsertMove(typeIdRegOpnd, indirOpnd, instr);
  21481. InsertCompareBranch(typeIdRegOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_Null, TyUint32, func, true), Js::OpCode::BrEq_A, labelErr, instr);
  21482. InsertCompareBranch(typeIdRegOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_Undefined, TyUint32, func, true), Js::OpCode::BrNeq_A, labelNoErr, instr);
  21483. instr->InsertBefore(labelErr);
  21484. this->GenerateRuntimeError(instr, JSERR_BadSuperReference, IR::HelperOp_RuntimeReferenceError);
  21485. instr->InsertBefore(labelNoErr);
  21486. this->GenerateRecyclableObjectGetPrototypeNullptrGoto(instr, instanceRegOpnd, labelDone);
  21487. this->GenerateRecyclableObjectIsElse(instr, instanceRegOpnd, labelDone);
  21488. Lowerer::InsertMove(dstOpnd, instanceRegOpnd, instr);
  21489. instr->InsertBefore(labelDone);
  21490. instr->Remove();
  21491. }
  21492. void
  21493. Lowerer::GenerateLdFuncObj(IR::Instr* instr)
  21494. {
  21495. // MOV dst, functionObject // functionObject through stack params or src1
  21496. IR::Opnd *dstOpnd = instr->GetDst();
  21497. IR::Opnd *functionObjOpnd = nullptr;
  21498. m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  21499. Lowerer::InsertMove(dstOpnd, functionObjOpnd, instr);
  21500. instr->Remove();
  21501. }
  21502. void
  21503. Lowerer::GenerateLdFuncObjProto(IR::Instr* instr)
  21504. {
  21505. // MOV instance, src1
  21506. //
  21507. // instance = ((RecyclableObject*)instance)->GetPrototype();
  21508. // if (instance == nullptr) goto $ThrowTypeError;
  21509. //
  21510. // MOV dst, instance
  21511. //
  21512. // if (!JavascriptOperators::IsConstructor(instance))
  21513. // goto $ThrowTypeError;
  21514. // else
  21515. // goto $Done;
  21516. //
  21517. // $helperLabelThrowTypeError:
  21518. // ThrowRuntimeTypeError(JSERR_NotAConstructor);
  21519. //
  21520. // $Done:
  21521. Func *func = instr->m_func;
  21522. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  21523. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21524. IR::LabelInstr *helperLabelThrowTypeError = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21525. IR::RegOpnd *instanceRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  21526. IR::Opnd *dstOpnd = instr->GetDst();
  21527. Lowerer::InsertMove(instanceRegOpnd, src1Opnd, instr);
  21528. this->GenerateRecyclableObjectGetPrototypeNullptrGoto(instr, instanceRegOpnd, helperLabelThrowTypeError);
  21529. Lowerer::InsertMove(dstOpnd, instanceRegOpnd, instr);
  21530. this->GenerateJavascriptOperatorsIsConstructorGotoElse(instr, instanceRegOpnd, labelDone, helperLabelThrowTypeError);
  21531. instr->InsertBefore(helperLabelThrowTypeError);
  21532. this->GenerateRuntimeError(instr, JSERR_NotAConstructor, IR::HelperOp_RuntimeTypeError);
  21533. instr->InsertBefore(labelDone);
  21534. instr->Remove();
  21535. }
  21536. void
  21537. Lowerer::GenerateLoadNewTarget(IR::Instr* instrInsert)
  21538. {
  21539. Func *func = instrInsert->m_func;
  21540. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21541. IR::LabelInstr * labelLoadArgNewTarget = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21542. IR::Opnd* opndUndefAddress = this->LoadLibraryValueOpnd(instrInsert, LibraryValue::ValueUndefined);
  21543. Assert(!func->IsInlinee());
  21544. if (func->GetJITFunctionBody()->IsCoroutine())
  21545. {
  21546. instrInsert->SetSrc1(opndUndefAddress);
  21547. LowererMD::ChangeToAssign(instrInsert);
  21548. return;
  21549. }
  21550. // MOV dst, undefined // dst = undefined
  21551. // MOV s1, callInfo // s1 = callInfo
  21552. // TEST s1, Js::CallFlags_NewTarget << 24 // if (callInfo.Flags & Js::CallFlags_NewTarget)
  21553. // JNE $LoadLastArgument // goto $LoadLastArgument
  21554. // TEST s1, Js::CallFlags_New << 24 // if (!(callInfo.Flags & Js::CallFlags_New))
  21555. // JE $Done // goto $Done
  21556. // MOV dst, functionObject // dst = functionObject
  21557. // JMP $Done // goto $Done
  21558. // $LoadLastArgument
  21559. // AND s1, s1, (0x00FFFFFF) // s2 = callInfo.Count == arguments.length + 2
  21560. // MOV dst, [ebp + (s1 - 1) * sizeof(Var) + formalParamOffset * sizeof(Var) ] // points to new.target
  21561. // $Done
  21562. IR::Opnd *dstOpnd = instrInsert->GetDst();
  21563. Assert(dstOpnd->IsRegOpnd());
  21564. Lowerer::InsertMove(dstOpnd, opndUndefAddress, instrInsert);
  21565. IR::SymOpnd *callInfoOpnd = Lowerer::LoadCallInfo(instrInsert);
  21566. Assert(Js::CallInfo::ksizeofCount == 24);
  21567. IR::RegOpnd *s1 = IR::RegOpnd::New(TyUint32, func);
  21568. Lowerer::InsertMove(s1, callInfoOpnd, instrInsert);
  21569. InsertTestBranch(s1, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_NewTarget << Js::CallInfo::ksizeofCount, TyUint32, func, true), Js::OpCode::BrNeq_A, labelLoadArgNewTarget, instrInsert);
  21570. InsertTestBranch(s1, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_New << Js::CallInfo::ksizeofCount, TyUint32, func, true), Js::OpCode::BrEq_A, labelDone, instrInsert);
  21571. IR::Instr* loadFuncInstr = IR::Instr::New(Js::OpCode::AND, func);
  21572. loadFuncInstr->SetDst(instrInsert->GetDst());
  21573. LoadFuncExpression(loadFuncInstr);
  21574. instrInsert->InsertBefore(loadFuncInstr);
  21575. InsertBranch(Js::OpCode::Br, labelDone, instrInsert);
  21576. instrInsert->InsertBefore(labelLoadArgNewTarget);
  21577. InsertAnd(s1, s1, IR::IntConstOpnd::New(0x00FFFFFF, TyUint32, func, true), instrInsert); // callInfo.Count
  21578. // [formalOffset (4) + callInfo.Count] points to 'new.target' - see diagram in GenerateLoadStackArgumentByIndex()
  21579. GenerateLoadStackArgumentByIndex(dstOpnd, s1, instrInsert, 0, m_func);
  21580. instrInsert->InsertBefore(labelDone);
  21581. instrInsert->Remove();
  21582. }
  21583. void
  21584. Lowerer::GenerateGetCurrentFunctionObject(IR::Instr * instr)
  21585. {
  21586. Func * func = this->m_func;
  21587. IR::Instr * insertBeforeInstr = instr->m_next;
  21588. IR::RegOpnd * functionObjectOpnd = instr->GetDst()->AsRegOpnd();
  21589. IR::Opnd * vtableAddressOpnd = this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableStackScriptFunction);
  21590. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  21591. IR::BranchInstr *branchInstr = InsertCompareBranch(IR::IndirOpnd::New(functionObjectOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
  21592. Js::OpCode::BrNeq_A, true, labelDone, insertBeforeInstr);
  21593. InsertObjectPoison(functionObjectOpnd, branchInstr, insertBeforeInstr, false);
  21594. IR::RegOpnd * boxedFunctionObjectOpnd = IR::RegOpnd::New(TyMachPtr, func);
  21595. InsertMove(boxedFunctionObjectOpnd, IR::IndirOpnd::New(functionObjectOpnd,
  21596. Js::StackScriptFunction::GetOffsetOfBoxedScriptFunction(), TyMachPtr, func), insertBeforeInstr);
  21597. InsertTestBranch(boxedFunctionObjectOpnd, boxedFunctionObjectOpnd, Js::OpCode::BrEq_A, true, labelDone, insertBeforeInstr);
  21598. InsertMove(functionObjectOpnd, boxedFunctionObjectOpnd, insertBeforeInstr);
  21599. insertBeforeInstr->InsertBefore(labelDone);
  21600. }
  21601. IR::Opnd *
  21602. Lowerer::GetInlineCacheFromFuncObjectForRuntimeUse(IR::Instr * instr, IR::PropertySymOpnd * propSymOpnd, bool isHelper)
  21603. {
  21604. // MOV s1, [ebp + 8] //s1 = function object
  21605. // MOV s2, [s1 + offset(hasInlineCaches)]
  21606. // TEST s2, s2
  21607. // JE $L1
  21608. // MOV s3, [s1 + offset(m_inlineCaches)] //s3 = inlineCaches from function object
  21609. // MOV s4, [s3 + index*scale] //s4 = inlineCaches[index]
  21610. // JMP $L2
  21611. // $L1
  21612. // MOV s3, propSym->m_runtimeCache
  21613. // $L2
  21614. byte indirScale = this->m_lowererMD.GetDefaultIndirScale();
  21615. IR::RegOpnd * funcObjOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  21616. IR::Instr * funcObjInstr = IR::Instr::New(Js::OpCode::Ld_A, funcObjOpnd, instr->m_func);
  21617. instr->InsertBefore(funcObjInstr);
  21618. LoadFuncExpression(funcObjInstr);
  21619. IR::RegOpnd * funcObjHasInlineCachesOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  21620. this->InsertMove(funcObjHasInlineCachesOpnd, IR::IndirOpnd::New(funcObjOpnd, Js::ScriptFunction::GetOffsetOfHasInlineCaches(), TyUint8, instr->m_func), instr);
  21621. IR::LabelInstr * inlineCachesNullLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, isHelper);
  21622. InsertTestBranch(funcObjHasInlineCachesOpnd, funcObjHasInlineCachesOpnd, Js::OpCode::BrEq_A, inlineCachesNullLabel, instr);
  21623. IR::RegOpnd * inlineCachesOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  21624. Lowerer::InsertMove(inlineCachesOpnd, IR::IndirOpnd::New(funcObjOpnd, Js::ScriptFunctionWithInlineCache::GetOffsetOfInlineCaches(), TyMachPtr, instr->m_func), instr);
  21625. IR::RegOpnd * inlineCacheOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  21626. IR::RegOpnd * indexOpnd = IR::RegOpnd::New(TyMachReg, instr->m_func);
  21627. int inlineCacheOffset;
  21628. if (!Int32Math::Mul(sizeof(Js::InlineCache *), propSymOpnd->m_inlineCacheIndex, &inlineCacheOffset))
  21629. {
  21630. Lowerer::InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(inlineCachesOpnd, inlineCacheOffset, TyMachPtr, instr->m_func), instr);
  21631. }
  21632. else
  21633. {
  21634. Lowerer::InsertMove(indexOpnd, IR::IntConstOpnd::New(propSymOpnd->m_inlineCacheIndex, TyUint32, instr->m_func), instr);
  21635. Lowerer::InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(inlineCachesOpnd, indexOpnd, indirScale, TyMachPtr, instr->m_func), instr);
  21636. }
  21637. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, isHelper);
  21638. InsertBranch(LowererMD::MDUncondBranchOpcode, continueLabel, instr);
  21639. IR::Instr * ldCacheFromPropSymOpndInstr = this->InsertMove(inlineCacheOpnd, IR::AddrOpnd::New(propSymOpnd->m_runtimeInlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func), instr);
  21640. ldCacheFromPropSymOpndInstr->InsertBefore(inlineCachesNullLabel);
  21641. ldCacheFromPropSymOpndInstr->InsertAfter(continueLabel);
  21642. return inlineCacheOpnd;
  21643. }
  21644. IR::Instr *
  21645. Lowerer::LowerInitClass(IR::Instr * instr)
  21646. {
  21647. // scriptContext
  21648. IR::Instr * prevInstr = LoadScriptContext(instr);
  21649. // extends
  21650. if (instr->GetSrc2() != nullptr)
  21651. {
  21652. IR::Opnd * extendsOpnd = instr->UnlinkSrc2();
  21653. m_lowererMD.LoadHelperArgument(instr, extendsOpnd);
  21654. }
  21655. else
  21656. {
  21657. IR::AddrOpnd* extendsOpnd = IR::AddrOpnd::NewNull(this->m_func);
  21658. m_lowererMD.LoadHelperArgument(instr, extendsOpnd);
  21659. }
  21660. // constructor
  21661. IR::Opnd * ctorOpnd = instr->UnlinkSrc1();
  21662. m_lowererMD.LoadHelperArgument(instr, ctorOpnd);
  21663. // call
  21664. m_lowererMD.ChangeToHelperCall(instr, IR::HelperOP_InitClass);
  21665. return prevInstr;
  21666. }
  21667. void
  21668. Lowerer::LowerNewConcatStrMulti(IR::Instr * instr)
  21669. {
  21670. IR::IntConstOpnd * countOpnd = instr->UnlinkSrc1()->AsIntConstOpnd();
  21671. IR::RegOpnd * dstOpnd = instr->UnlinkDst()->AsRegOpnd();
  21672. uint8 count = (uint8)countOpnd->GetValue();
  21673. Assert(dstOpnd->GetValueType().IsString());
  21674. GenerateRecyclerAlloc(IR::HelperAllocMemForConcatStringMulti, Js::ConcatStringMulti::GetAllocSize(count), dstOpnd, instr);
  21675. GenerateRecyclerMemInit(dstOpnd, 0, this->LoadVTableValueOpnd(instr, VTableValue::VtableConcatStringMulti), instr);
  21676. GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfType(),
  21677. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueStringTypeStatic), instr);
  21678. GenerateRecyclerMemInitNull(dstOpnd, Js::ConcatStringMulti::GetOffsetOfpszValue(), instr);
  21679. GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), 0, instr);
  21680. GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfSlotCount(), countOpnd->AsUint32(), instr);
  21681. instr->Remove();
  21682. }
  21683. void
  21684. Lowerer::LowerNewConcatStrMultiBE(IR::Instr * instr)
  21685. {
  21686. // Lower
  21687. // t1 = SetConcatStrMultiBE s1
  21688. // t2 = SetConcatStrMultiBE s2, t1
  21689. // t3 = SetConcatStrMultiBE s3, t2
  21690. // s = NewConcatStrMultiBE 3, t3
  21691. // to
  21692. // s = new concat string
  21693. // s+0 = s1
  21694. // s+1 = s2
  21695. // s+2 = s3
  21696. Assert(instr->GetSrc1()->IsConstOpnd());
  21697. Assert(instr->GetDst()->IsRegOpnd());
  21698. IR::RegOpnd * newString = instr->GetDst()->AsRegOpnd();
  21699. IR::Opnd * newConcatItemOpnd = nullptr;
  21700. uint index = instr->GetSrc1()->AsIntConstOpnd()->AsUint32() - 1;
  21701. IR::Instr * concatItemInstr = nullptr;
  21702. IR::Opnd * linkOpnd = instr->GetSrc2();
  21703. while (linkOpnd)
  21704. {
  21705. Assert(linkOpnd->IsRegOpnd());
  21706. concatItemInstr = linkOpnd->GetStackSym()->GetInstrDef();
  21707. Assert(concatItemInstr->m_opcode == Js::OpCode::SetConcatStrMultiItemBE);
  21708. IR::Opnd * concatItemOpnd = concatItemInstr->GetSrc1();
  21709. Assert(concatItemOpnd->IsRegOpnd());
  21710. // If one of the concat items is equal to the dst of the concat expressions (s = s + a + b),
  21711. // hoist the load of that item to before the setting of the new string to the dst.
  21712. if (concatItemOpnd->IsEqual(newString))
  21713. {
  21714. if (!newConcatItemOpnd)
  21715. {
  21716. IR::Instr * hoistSrcInstr = concatItemInstr->HoistSrc1(Js::OpCode::Ld_A);
  21717. newConcatItemOpnd = hoistSrcInstr->GetDst();
  21718. }
  21719. concatItemOpnd = newConcatItemOpnd;
  21720. }
  21721. else
  21722. {
  21723. // If only some of the SetConcatStrMultiItemBE instructions were CSE'd and the rest, along with the NewConcatStrMultiBE
  21724. // instruction, were in a loop, the strings on the CSE'd Set*BE instructions will become live on back edge. Add them to
  21725. // addToLiveOnBackEdgeSyms here and clear when we reach the Set*BE instruction.
  21726. // Note that we are doing this only for string opnds which are not the same as the dst of the concat expression. Reasoning
  21727. // behind this is that if a loop has a concat expression with one of its sources same as the dst, the Set*BE instruction
  21728. // for the dst wouldn't have been CSE'd as the dst's value is changing in the loop and the backward pass should have set the
  21729. // symbol as live on backedge.
  21730. this->addToLiveOnBackEdgeSyms->Set(concatItemOpnd->GetStackSym()->m_id);
  21731. }
  21732. IR::Instr * newConcatItemInstr = IR::Instr::New(Js::OpCode::SetConcatStrMultiItem,
  21733. IR::IndirOpnd::New(newString, index, TyVar, instr->m_func),
  21734. concatItemOpnd,
  21735. instr->m_func);
  21736. instr->InsertAfter(newConcatItemInstr);
  21737. this->LowerSetConcatStrMultiItem(newConcatItemInstr);
  21738. linkOpnd = concatItemInstr->GetSrc2();
  21739. index--;
  21740. }
  21741. Assert(index == -1);
  21742. this->LowerNewConcatStrMulti(instr);
  21743. }
  21744. void
  21745. Lowerer::LowerSetConcatStrMultiItem(IR::Instr * instr)
  21746. {
  21747. Func * func = this->m_func;
  21748. IR::IndirOpnd * dstOpnd = instr->GetDst()->AsIndirOpnd();
  21749. IR::RegOpnd * concatStrOpnd = dstOpnd->GetBaseOpnd();
  21750. IR::RegOpnd * srcOpnd = instr->UnlinkSrc1()->AsRegOpnd();
  21751. Assert(concatStrOpnd->GetValueType().IsString());
  21752. Assert(srcOpnd->GetValueType().IsString());
  21753. srcOpnd = GenerateGetImmutableOrScriptUnreferencedString(srcOpnd, instr, IR::HelperOp_CompoundStringCloneForConcat);
  21754. instr->SetSrc1(srcOpnd);
  21755. IR::IndirOpnd * dstLength = IR::IndirOpnd::New(concatStrOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), TyUint32, func);
  21756. IR::Opnd * srcLength;
  21757. if (srcOpnd->m_sym->m_isStrConst)
  21758. {
  21759. srcLength = IR::IntConstOpnd::New(JITJavascriptString::FromVar(srcOpnd->m_sym->GetConstAddress(true))->GetLength(), TyUint32, func);
  21760. }
  21761. else
  21762. {
  21763. srcLength = IR::RegOpnd::New(TyUint32, func);
  21764. InsertMove(srcLength, IR::IndirOpnd::New(srcOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), TyUint32, func), instr);
  21765. }
  21766. IR::Instr *onOverflowInsertBeforeInstr;
  21767. InsertAddWithOverflowCheck(false, dstLength, dstLength, srcLength, instr, &onOverflowInsertBeforeInstr);
  21768. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, func);
  21769. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperOp_OutOfMemoryError, func));
  21770. instr->InsertBefore(onOverflowInsertBeforeInstr);
  21771. onOverflowInsertBeforeInstr->InsertBefore(callInstr);
  21772. this->m_lowererMD.LowerCall(callInstr, 0);
  21773. dstOpnd->SetOffset(dstOpnd->GetOffset() * sizeof(Js::JavascriptString *) + Js::ConcatStringMulti::GetOffsetOfSlots());
  21774. LowererMD::ChangeToWriteBarrierAssign(instr, func);
  21775. }
  21776. IR::RegOpnd *
  21777. Lowerer::GenerateGetImmutableOrScriptUnreferencedString(IR::RegOpnd * strOpnd, IR::Instr * insertBeforeInstr, IR::JnHelperMethod helperMethod, bool reloadDst)
  21778. {
  21779. if (strOpnd->m_sym->m_isStrConst)
  21780. {
  21781. return strOpnd;
  21782. }
  21783. Func * const func = this->m_func;
  21784. IR::RegOpnd *dstOpnd = reloadDst == true ? IR::RegOpnd::New(TyVar, func) : strOpnd;
  21785. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  21786. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  21787. if (!strOpnd->IsNotTaggedValue())
  21788. {
  21789. this->m_lowererMD.GenerateObjectTest(strOpnd, insertBeforeInstr, doneLabel);
  21790. }
  21791. // CMP [strOpnd], Js::CompoundString::`vtable'
  21792. // JEQ $helper
  21793. InsertCompareBranch(
  21794. IR::IndirOpnd::New(strOpnd, 0, TyMachPtr, func),
  21795. this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableCompoundString),
  21796. Js::OpCode::BrEq_A,
  21797. helperLabel,
  21798. insertBeforeInstr);
  21799. if (reloadDst)
  21800. {
  21801. InsertMove(dstOpnd, strOpnd, insertBeforeInstr);
  21802. }
  21803. InsertBranch(Js::OpCode::Br, doneLabel, insertBeforeInstr);
  21804. insertBeforeInstr->InsertBefore(helperLabel);
  21805. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, strOpnd);
  21806. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, dstOpnd, func);
  21807. callInstr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
  21808. insertBeforeInstr->InsertBefore(callInstr);
  21809. this->m_lowererMD.LowerCall(callInstr, 0);
  21810. insertBeforeInstr->InsertBefore(doneLabel);
  21811. return dstOpnd;
  21812. }
  21813. void
  21814. Lowerer::LowerConvStrCommon(IR::JnHelperMethod helper, IR::Instr * instr)
  21815. {
  21816. IR::RegOpnd * src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
  21817. if (!src1Opnd->GetValueType().IsNotString())
  21818. {
  21819. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  21820. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  21821. this->GenerateStringTest(src1Opnd, instr, helperLabel);
  21822. InsertMove(instr->GetDst(), src1Opnd, instr);
  21823. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  21824. instr->InsertBefore(helperLabel);
  21825. instr->InsertAfter(doneLabel);
  21826. }
  21827. if (instr->GetSrc2())
  21828. {
  21829. this->m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc2());
  21830. }
  21831. this->LoadScriptContext(instr);
  21832. this->m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  21833. this->m_lowererMD.ChangeToHelperCall(instr, helper);
  21834. }
  21835. void
  21836. Lowerer::LowerConvStr(IR::Instr * instr)
  21837. {
  21838. LowerConvStrCommon(IR::HelperOp_ConvString, instr);
  21839. }
  21840. void
  21841. Lowerer::LowerCoerseStr(IR::Instr* instr)
  21842. {
  21843. LowerConvStrCommon(IR::HelperOp_CoerseString, instr);
  21844. }
  21845. ///----------------------------------------------------------------------------
  21846. ///
  21847. /// Lowerer::LowerCoerseStrOrRegex - This method is used for String.Replace(arg1, arg2)
  21848. /// where arg1 is regex or string
  21849. /// if arg1 is not regex, then do String.Replace(CoerseStr(arg1), arg2);
  21850. ///
  21851. /// CoerseStrOrRegex arg1
  21852. ///
  21853. /// if (value == regex) goto :done
  21854. /// else
  21855. ///helper:
  21856. /// ConvStr value
  21857. ///done:
  21858. ///----------------------------------------------------------------------------
  21859. void
  21860. Lowerer::LowerCoerseStrOrRegex(IR::Instr* instr)
  21861. {
  21862. IR::RegOpnd * src1Opnd = instr->GetSrc1()->AsRegOpnd();
  21863. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  21864. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  21865. // if (value == regex) goto :done
  21866. if (!src1Opnd->IsNotTaggedValue())
  21867. {
  21868. this->m_lowererMD.GenerateObjectTest(src1Opnd, instr, helperLabel);
  21869. }
  21870. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  21871. InsertCompareBranch(IR::IndirOpnd::New(src1Opnd, 0, TyMachPtr, instr->m_func),
  21872. vtableOpnd, Js::OpCode::BrNeq_A, helperLabel, instr);
  21873. InsertMove(instr->GetDst(), src1Opnd, instr);
  21874. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  21875. instr->InsertBefore(helperLabel);
  21876. instr->InsertAfter(doneLabel);
  21877. // helper: ConvStr value
  21878. LowerConvStr(instr);
  21879. }
  21880. ///----------------------------------------------------------------------------
  21881. ///
  21882. /// Lowerer::LowerCoerseRegex - This method is used for String.Match(arg1)
  21883. /// if arg1 is regex, then pass CreateRegEx(arg1) to String.Match
  21884. ///
  21885. ///----------------------------------------------------------------------------
  21886. void
  21887. Lowerer::LowerCoerseRegex(IR::Instr* instr)
  21888. {
  21889. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  21890. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  21891. IR::RegOpnd * src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
  21892. if (!src1Opnd->IsNotTaggedValue())
  21893. {
  21894. this->m_lowererMD.GenerateObjectTest(src1Opnd, instr, helperLabel);
  21895. }
  21896. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  21897. InsertCompareBranch(IR::IndirOpnd::New(src1Opnd, 0, TyMachPtr, instr->m_func),
  21898. vtableOpnd, Js::OpCode::BrNeq_A, helperLabel, instr);
  21899. InsertMove(instr->GetDst(), src1Opnd, instr);
  21900. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  21901. instr->InsertBefore(helperLabel);
  21902. instr->InsertAfter(doneLabel);
  21903. this->LoadScriptContext(instr);
  21904. this->m_lowererMD.LoadHelperArgument(instr, IR::AddrOpnd::NewNull(instr->m_func)); // option
  21905. this->m_lowererMD.LoadHelperArgument(instr, src1Opnd); // regex
  21906. this->m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_CoerseRegex);
  21907. }
  21908. void
  21909. Lowerer::LowerConvPrimStr(IR::Instr * instr)
  21910. {
  21911. LowerConvStrCommon(IR::HelperOp_ConvPrimitiveString, instr);
  21912. }
  21913. void
  21914. Lowerer::GenerateRecyclerAlloc(IR::JnHelperMethod allocHelper, size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, bool inOpHelper)
  21915. {
  21916. size_t alignedSize = HeapInfo::GetAlignedSizeNoCheck(allocSize);
  21917. this->GenerateRecyclerAllocAligned(allocHelper, alignedSize, newObjDst, insertionPointInstr, inOpHelper);
  21918. }
  21919. void
  21920. Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, int32 value, IR::Instr * insertBeforeInstr, bool isZeroed)
  21921. {
  21922. IRType type = TyInt32;
  21923. if (isZeroed)
  21924. {
  21925. if (value == 0)
  21926. {
  21927. // Recycler memory are zero initialized
  21928. return;
  21929. }
  21930. if (value > 0 && value <= USHORT_MAX)
  21931. {
  21932. // Recycler memory are zero initialized, so we can just initialize the 8 or 16 bits of value
  21933. type = (value <= UCHAR_MAX)? TyUint8 : TyUint16;
  21934. }
  21935. }
  21936. Func * func = this->m_func;
  21937. InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), IR::IntConstOpnd::New(value, type, func), insertBeforeInstr);
  21938. }
  21939. void
  21940. Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, uint32 value, IR::Instr * insertBeforeInstr, bool isZeroed)
  21941. {
  21942. IRType type = TyUint32;
  21943. if (isZeroed)
  21944. {
  21945. if (value == 0)
  21946. {
  21947. // Recycler memory are zero initialized
  21948. return;
  21949. }
  21950. if (value <= USHORT_MAX)
  21951. {
  21952. // Recycler memory are zero initialized, so we can just initialize the 8 or 16 bits of value
  21953. type = (value <= UCHAR_MAX)? TyUint8 : TyUint16;
  21954. }
  21955. }
  21956. Func * func = this->m_func;
  21957. InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), IR::IntConstOpnd::New(value, type, func), insertBeforeInstr);
  21958. }
  21959. void
  21960. Lowerer::GenerateMemInitNull(IR::RegOpnd * opnd, int32 offset, IR::Instr * insertBeforeInstr, bool isZeroed)
  21961. {
  21962. if (isZeroed)
  21963. {
  21964. return;
  21965. }
  21966. GenerateMemInit(opnd, offset, IR::AddrOpnd::NewNull(m_func), insertBeforeInstr);
  21967. }
  21968. void
  21969. Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, IR::Opnd * value, IR::Instr * insertBeforeInstr, bool isZeroed)
  21970. {
  21971. IRType type = value->GetType();
  21972. Func * func = this->m_func;
  21973. InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), value, insertBeforeInstr);
  21974. }
  21975. void
  21976. Lowerer::GenerateMemInit(IR::RegOpnd * opnd, IR::RegOpnd * offset, IR::Opnd * value, IR::Instr * insertBeforeInstr, bool isZeroed)
  21977. {
  21978. IRType type = value->GetType();
  21979. Func * func = this->m_func;
  21980. InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), value, insertBeforeInstr);
  21981. }
  21982. void
  21983. Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, int32 value, IR::Instr * insertBeforeInstr)
  21984. {
  21985. GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
  21986. }
  21987. void
  21988. Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, uint32 value, IR::Instr * insertBeforeInstr)
  21989. {
  21990. GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
  21991. }
  21992. void
  21993. Lowerer::GenerateRecyclerMemInitNull(IR::RegOpnd * opnd, int32 offset, IR::Instr * insertBeforeInstr)
  21994. {
  21995. GenerateMemInitNull(opnd, offset, insertBeforeInstr, true);
  21996. }
  21997. void
  21998. Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, IR::Opnd * value, IR::Instr * insertBeforeInstr)
  21999. {
  22000. GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
  22001. }
  22002. void
  22003. Lowerer::GenerateMemCopy(IR::Opnd * dst, IR::Opnd * src, uint32 size, IR::Instr * insertBeforeInstr)
  22004. {
  22005. Func * func = this->m_func;
  22006. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, IR::IntConstOpnd::New(size, TyUint32, func));
  22007. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, src);
  22008. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, dst);
  22009. IR::Instr * memcpyInstr = IR::Instr::New(Js::OpCode::Call, func);
  22010. memcpyInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperMemCpy, func));
  22011. insertBeforeInstr->InsertBefore(memcpyInstr);
  22012. m_lowererMD.LowerCall(memcpyInstr, 3);
  22013. }
  22014. bool
  22015. Lowerer::GenerateSimplifiedInt4Rem(
  22016. IR::Instr *const remInstr,
  22017. IR::LabelInstr *const skipBailOutLabel) const
  22018. {
  22019. Assert(remInstr);
  22020. Assert(remInstr->m_opcode == Js::OpCode::Rem_I4 || remInstr->m_opcode == Js::OpCode::RemU_I4);
  22021. auto *dst = remInstr->GetDst(), *src1 = remInstr->GetSrc1(), *src2 = remInstr->GetSrc2();
  22022. Assert(src1 && src2);
  22023. Assert(dst->IsRegOpnd());
  22024. bool isModByPowerOf2 = (remInstr->HasBailOutInfo() && remInstr->GetBailOutKind() == IR::BailOnModByPowerOf2);
  22025. if (PHASE_OFF(Js::Phase::MathFastPathPhase, remInstr->m_func->GetTopFunc()) && !isModByPowerOf2)
  22026. return false;
  22027. if (!(src2->IsIntConstOpnd() && Math::IsPow2(src2->AsIntConstOpnd()->AsInt32())) && !isModByPowerOf2)
  22028. {
  22029. return false;
  22030. }
  22031. // We have:
  22032. // s3 = s1 % s2 , where s2 = +2^i
  22033. //
  22034. // Generate:
  22035. // test s1, s1
  22036. // js $slowPathLabel
  22037. // s3 = and s1, 0x00..fff (2^i - 1)
  22038. // jmp $doneLabel
  22039. // $slowPathLabel:
  22040. // (Slow path)
  22041. // (Neg zero check)
  22042. // (Bailout code)
  22043. // $doneLabel:
  22044. IR::LabelInstr *doneLabel = skipBailOutLabel, *slowPathLabel;
  22045. if (!doneLabel)
  22046. {
  22047. doneLabel = IR::LabelInstr::New(Js::OpCode::Label, remInstr->m_func);
  22048. remInstr->InsertAfter(doneLabel);
  22049. }
  22050. slowPathLabel = IR::LabelInstr::New(Js::OpCode::Label, remInstr->m_func, isModByPowerOf2);
  22051. remInstr->InsertBefore(slowPathLabel);
  22052. // test s1, s1
  22053. InsertTest(src1, src1, slowPathLabel);
  22054. // jsb $slowPathLabel
  22055. InsertBranch(LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), slowPathLabel, slowPathLabel);
  22056. // s3 = and s1, 0x00..fff (2^i - 1)
  22057. IR::Opnd* maskOpnd;
  22058. if(isModByPowerOf2)
  22059. {
  22060. Assert(isModByPowerOf2);
  22061. maskOpnd = IR::RegOpnd::New(TyInt32, remInstr->m_func);
  22062. // mov maskOpnd, s2
  22063. InsertMove(maskOpnd, src2, slowPathLabel);
  22064. // dec maskOpnd
  22065. InsertSub(/*needFlags*/ true, maskOpnd, maskOpnd, IR::IntConstOpnd::New(1, TyInt32, this->m_func, /*dontEncode*/true), slowPathLabel);
  22066. // maskOpnd < 0 goto $slowPath
  22067. InsertBranch(LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), slowPathLabel, slowPathLabel);
  22068. // TEST src2, maskOpnd
  22069. InsertTestBranch(src2, maskOpnd, Js::OpCode::BrNeq_A, slowPathLabel, slowPathLabel);
  22070. }
  22071. else
  22072. {
  22073. Assert(src2->IsIntConstOpnd());
  22074. int32 mask = src2->AsIntConstOpnd()->AsInt32() - 1;
  22075. maskOpnd = IR::IntConstOpnd::New(mask, TyInt32, remInstr->m_func);
  22076. }
  22077. // dst = src1 & maskOpnd
  22078. InsertAnd(dst, src1, maskOpnd, slowPathLabel);
  22079. // jmp $doneLabel
  22080. InsertBranch(Js::OpCode::Br, doneLabel, slowPathLabel);
  22081. return true;
  22082. }
  22083. #if DBG
  22084. bool
  22085. Lowerer::ValidOpcodeAfterLower(IR::Instr* instr, Func * func)
  22086. {
  22087. Js::OpCode opcode = instr->m_opcode;
  22088. if (opcode > Js::OpCode::MDStart)
  22089. {
  22090. return true;
  22091. }
  22092. switch (opcode)
  22093. {
  22094. case Js::OpCode::Ret:
  22095. case Js::OpCode::Label:
  22096. case Js::OpCode::StatementBoundary:
  22097. case Js::OpCode::DeletedNonHelperBranch:
  22098. case Js::OpCode::FunctionEntry:
  22099. case Js::OpCode::FunctionExit:
  22100. case Js::OpCode::TryCatch:
  22101. case Js::OpCode::TryFinally:
  22102. case Js::OpCode::Catch:
  22103. case Js::OpCode::GeneratorResumeJumpTable:
  22104. case Js::OpCode::Break:
  22105. #ifdef _M_X64
  22106. case Js::OpCode::PrologStart:
  22107. case Js::OpCode::PrologEnd:
  22108. #endif
  22109. #ifdef _M_IX86
  22110. case Js::OpCode::BailOutStackRestore:
  22111. #endif
  22112. return true;
  22113. case Js::OpCode::RestoreOutParam:
  22114. Assert(func->isPostRegAlloc);
  22115. return true;
  22116. // These may be removed by peep
  22117. case Js::OpCode::StartCall:
  22118. case Js::OpCode::LoweredStartCall:
  22119. case Js::OpCode::Nop:
  22120. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  22121. return func && !func->isPostPeeps;
  22122. case Js::OpCode::InlineeStart:
  22123. case Js::OpCode::InlineeEnd:
  22124. return instr->m_func->m_hasInlineArgsOpt;
  22125. #ifdef _M_X64
  22126. case Js::OpCode::LdArgSize:
  22127. case Js::OpCode::LdSpillSize:
  22128. return func && !func->isPostFinalLower;
  22129. #endif
  22130. case Js::OpCode::Leave:
  22131. Assert(!func->IsLoopBodyInTry());
  22132. Assert(func->HasTry() && func->DoOptimizeTry());
  22133. return func && !func->isPostFinalLower; //Lowered in FinalLower phase
  22134. };
  22135. return false;
  22136. }
  22137. #endif
  22138. void Lowerer::LowerProfiledBeginSwitch(IR::JitProfilingInstr* instr)
  22139. {
  22140. Assert(instr->isBeginSwitch);
  22141. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  22142. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
  22143. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  22144. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleProfiledSwitch, m_func));
  22145. m_lowererMD.LowerCall(instr, 0);
  22146. }
  22147. void Lowerer::LowerProfiledBinaryOp(IR::JitProfilingInstr* instr, IR::JnHelperMethod meth)
  22148. {
  22149. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc2());
  22150. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  22151. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
  22152. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  22153. instr->SetSrc1(IR::HelperCallOpnd::New(meth, m_func));
  22154. m_lowererMD.LowerCall(instr, 0);
  22155. }
  22156. void Lowerer::GenerateNullOutGeneratorFrame(IR::Instr* insertInstr)
  22157. {
  22158. // null out frame pointer on generator object to signal completion to JavascriptGenerator::CallGenerator
  22159. // s = MOV prm1
  22160. // s[offset of JavascriptGenerator::frame] = MOV nullptr
  22161. StackSym *symSrc = StackSym::NewImplicitParamSym(3, m_func);
  22162. m_func->SetArgOffset(symSrc, LowererMD::GetFormalParamOffset() * MachPtr);
  22163. IR::SymOpnd *srcOpnd = IR::SymOpnd::New(symSrc, TyMachPtr, m_func);
  22164. IR::RegOpnd *dstOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  22165. InsertMove(dstOpnd, srcOpnd, insertInstr);
  22166. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(dstOpnd, Js::JavascriptGenerator::GetFrameOffset(), TyMachPtr, m_func);
  22167. IR::AddrOpnd *addrOpnd = IR::AddrOpnd::NewNull(m_func);
  22168. InsertMove(indirOpnd, addrOpnd, insertInstr);
  22169. }
  22170. void Lowerer::LowerFunctionExit(IR::Instr* funcExit)
  22171. {
  22172. if (m_func->GetJITFunctionBody()->IsCoroutine())
  22173. {
  22174. GenerateNullOutGeneratorFrame(funcExit->m_prev);
  22175. }
  22176. if (!m_func->DoSimpleJitDynamicProfile())
  22177. {
  22178. return;
  22179. }
  22180. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, m_func);
  22181. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleCleanImplicitCallFlags, m_func));
  22182. funcExit->m_prev->InsertBefore(callInstr);
  22183. m_lowererMD.LoadHelperArgument(callInstr, CreateFunctionBodyOpnd(funcExit->m_func));
  22184. m_lowererMD.LowerCall(callInstr, 0);
  22185. }
  22186. void Lowerer::LowerFunctionEntry(IR::Instr* funcEntry)
  22187. {
  22188. Assert(funcEntry->m_opcode == Js::OpCode::FunctionEntry);
  22189. //Don't do a body call increment for loops or asm.js
  22190. if (m_func->IsLoopBody() || m_func->GetJITFunctionBody()->IsAsmJsMode())
  22191. {
  22192. return;
  22193. }
  22194. IR::Instr *const insertBeforeInstr = this->m_func->GetFunctionEntryInsertionPoint();
  22195. LowerFunctionBodyCallCountChange(insertBeforeInstr);
  22196. if (m_func->DoSimpleJitDynamicProfile())
  22197. {
  22198. // Only generate the argument profiling if the function expects to have some arguments to profile and only if
  22199. // it has implicit ArgIns (the latter is a restriction imposed by the Interpreter, so it is mirrored in SimpleJit)
  22200. if (m_func->GetJITFunctionBody()->GetInParamsCount() > 1 && m_func->GetJITFunctionBody()->HasImplicitArgIns())
  22201. {
  22202. // Call out to the argument profiling helper
  22203. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, m_func);
  22204. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleProfileParameters, m_func));
  22205. insertBeforeInstr->InsertBefore(callInstr);
  22206. m_lowererMD.LoadHelperArgument(callInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  22207. m_lowererMD.LowerCall(callInstr, 0);
  22208. }
  22209. // Clear existing ImplicitCallFlags
  22210. const auto starFlag = GetImplicitCallFlagsOpnd();
  22211. this->InsertMove(starFlag, CreateClearImplicitCallFlagsOpnd(), insertBeforeInstr);
  22212. }
  22213. }
  22214. void Lowerer::LowerFunctionBodyCallCountChange(IR::Instr *const insertBeforeInstr)
  22215. {
  22216. Assert(insertBeforeInstr);
  22217. Func *const func = insertBeforeInstr->m_func;
  22218. const bool isSimpleJit = func->IsSimpleJit();
  22219. if ((isSimpleJit && PHASE_OFF(Js::FullJitPhase, m_func)))
  22220. {
  22221. return;
  22222. }
  22223. // mov countAddress, <countAddress>
  22224. IR::RegOpnd *const countAddressOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, func), TyMachPtr, func);
  22225. const IR::AutoReuseOpnd autoReuseCountAddressOpnd(countAddressOpnd, func);
  22226. InsertMove(
  22227. countAddressOpnd,
  22228. IR::AddrOpnd::New((Js::Var)func->GetWorkItem()->GetCallsCountAddress(), IR::AddrOpndKindDynamicMisc, func, true),
  22229. insertBeforeInstr);
  22230. IR::IndirOpnd *const countOpnd = IR::IndirOpnd::New(countAddressOpnd, 0, TyUint32, func);
  22231. const IR::AutoReuseOpnd autoReuseCountOpnd(countOpnd, func);
  22232. if(!isSimpleJit)
  22233. {
  22234. InsertAdd(false, countOpnd, countOpnd, IR::IntConstOpnd::New(1, TyUint32, func), insertBeforeInstr);
  22235. return;
  22236. }
  22237. IR::Instr *onOverflowInsertBeforeInstr;
  22238. InsertDecUInt32PreventOverflow(
  22239. countOpnd,
  22240. countOpnd,
  22241. insertBeforeInstr,
  22242. &onOverflowInsertBeforeInstr);
  22243. // ($overflow:)
  22244. // TransitionFromSimpleJit(framePointer)
  22245. m_lowererMD.LoadHelperArgument(onOverflowInsertBeforeInstr, IR::Opnd::CreateFramePointerOpnd(func));
  22246. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  22247. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperTransitionFromSimpleJit, func));
  22248. onOverflowInsertBeforeInstr->InsertBefore(callInstr);
  22249. m_lowererMD.LowerCall(callInstr, 0);
  22250. }
  22251. IR::Opnd*
  22252. Lowerer::GetImplicitCallFlagsOpnd()
  22253. {
  22254. return GetImplicitCallFlagsOpnd(m_func);
  22255. }
  22256. IR::Opnd*
  22257. Lowerer::GetImplicitCallFlagsOpnd(Func * func)
  22258. {
  22259. return IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetImplicitCallFlagsAddr(), GetImplicitCallFlagsType(), func);
  22260. }
  22261. IR::Opnd*
  22262. Lowerer::CreateClearImplicitCallFlagsOpnd()
  22263. {
  22264. return IR::IntConstOpnd::New(Js::ImplicitCall_None, GetImplicitCallFlagsType(), m_func);
  22265. }
  22266. void
  22267. Lowerer::GenerateFlagInlineCacheCheckForGetterSetter(
  22268. IR::Instr * insertBeforeInstr,
  22269. IR::RegOpnd * opndInlineCache,
  22270. IR::LabelInstr * labelNext)
  22271. {
  22272. uint accessorFlagMask;
  22273. if (PHASE_OFF(Js::InlineGettersPhase, insertBeforeInstr->m_func))
  22274. {
  22275. accessorFlagMask = Js::InlineCache::GetSetterFlagMask();
  22276. }
  22277. else if (PHASE_OFF(Js::InlineSettersPhase, insertBeforeInstr->m_func))
  22278. {
  22279. accessorFlagMask = Js::InlineCache::GetGetterFlagMask();
  22280. }
  22281. else
  22282. {
  22283. accessorFlagMask = Js::InlineCache::GetGetterSetterFlagMask();
  22284. }
  22285. // Generate:
  22286. //
  22287. // TEST [&(inlineCache->u.accessor.flags)], Js::InlineCacheGetterFlag | Js::InlineCacheSetterFlag
  22288. // JEQ $next
  22289. IR::Opnd * flagsOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.rawUInt16), TyInt8, insertBeforeInstr->m_func);
  22290. IR::Opnd * accessorOpnd = IR::IntConstOpnd::New(accessorFlagMask, TyInt8, this->m_func);
  22291. InsertTestBranch(flagsOpnd, accessorOpnd, Js::OpCode::BrEq_A, labelNext, insertBeforeInstr);
  22292. }
  22293. IR::BranchInstr *
  22294. Lowerer::GenerateLocalInlineCacheCheck(
  22295. IR::Instr * instrLdSt,
  22296. IR::RegOpnd * opndType,
  22297. IR::RegOpnd * inlineCache,
  22298. IR::LabelInstr * labelNext,
  22299. bool checkTypeWithoutProperty)
  22300. {
  22301. // Generate:
  22302. //
  22303. // CMP s1, [&(inlineCache->u.local.type/typeWithoutProperty)]
  22304. // JNE $next
  22305. IR::Opnd* typeOpnd;
  22306. if (checkTypeWithoutProperty)
  22307. {
  22308. typeOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.local.typeWithoutProperty), TyMachReg, instrLdSt->m_func);
  22309. }
  22310. else
  22311. {
  22312. typeOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.local.type), TyMachReg, instrLdSt->m_func);
  22313. }
  22314. InsertCompare(opndType, typeOpnd, instrLdSt);
  22315. return InsertBranch(Js::OpCode::BrNeq_A, labelNext, instrLdSt);
  22316. }
  22317. IR::BranchInstr *
  22318. Lowerer::GenerateProtoInlineCacheCheck(
  22319. IR::Instr * instrLdSt,
  22320. IR::RegOpnd * opndType,
  22321. IR::RegOpnd * inlineCache,
  22322. IR::LabelInstr * labelNext)
  22323. {
  22324. // Generate:
  22325. //
  22326. // CMP s1, [&(inlineCache->u.proto.type)]
  22327. // JNE $next
  22328. IR::Opnd* typeOpnd = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.proto.type), TyMachReg, instrLdSt->m_func);
  22329. InsertCompare(opndType, typeOpnd, instrLdSt);
  22330. return InsertBranch(Js::OpCode::BrNeq_A, labelNext, instrLdSt);
  22331. }
  22332. void
  22333. Lowerer::GenerateFlagInlineCacheCheck(
  22334. IR::Instr * instrLdSt,
  22335. IR::RegOpnd * opndType,
  22336. IR::RegOpnd * opndInlineCache,
  22337. IR::LabelInstr * labelNext)
  22338. {
  22339. // Generate:
  22340. //
  22341. // CMP s1, [&(inlineCache->u.accessor.type)]
  22342. // JNE $next
  22343. IR::Opnd* typeOpnd = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.type), TyMachReg, instrLdSt->m_func);
  22344. // CMP s1, [&(inlineCache->u.flag.type)]
  22345. InsertCompareBranch(opndType, typeOpnd, Js::OpCode::BrNeq_A, labelNext, instrLdSt);
  22346. }
  22347. void
  22348. Lowerer::GenerateLdFldFromLocalInlineCache(
  22349. IR::Instr * instrLdFld,
  22350. IR::RegOpnd * opndBase,
  22351. IR::Opnd * opndDst,
  22352. IR::RegOpnd * opndInlineCache,
  22353. IR::LabelInstr * labelFallThru,
  22354. bool isInlineSlot)
  22355. {
  22356. // Generate:
  22357. //
  22358. // s1 = MOV base->slots -- load the slot array
  22359. // s2 = MOVZXw [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
  22360. // dst = MOV [s1 + s2 * Scale] -- load the value directly from the slot
  22361. // JMP $fallthru
  22362. IR::IndirOpnd * opndIndir = nullptr;
  22363. IR::RegOpnd * opndSlotArray = nullptr;
  22364. if (!isInlineSlot)
  22365. {
  22366. opndSlotArray = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  22367. opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrLdFld->m_func);
  22368. InsertMove(opndSlotArray, opndIndir, instrLdFld);
  22369. }
  22370. // s2 = MOVZXw [&(inlineCache->u.local.slotIndex)] -- load the cached slot index
  22371. IR::RegOpnd * opndReg2 = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  22372. opndIndir = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.slotIndex), TyUint16, instrLdFld->m_func);
  22373. InsertMove(opndReg2, opndIndir, instrLdFld);
  22374. if (isInlineSlot)
  22375. {
  22376. // dst = MOV [base + s2 * Scale] -- load the value directly from the slot
  22377. opndIndir = IR::IndirOpnd::New(opndBase, opndReg2, LowererMD::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  22378. InsertMove(opndDst, opndIndir, instrLdFld);
  22379. }
  22380. else
  22381. {
  22382. // dst = MOV [s1 + s2 * Scale] -- load the value directly from the slot
  22383. opndIndir = IR::IndirOpnd::New(opndSlotArray, opndReg2, LowererMD::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  22384. InsertMove(opndDst, opndIndir, instrLdFld);
  22385. }
  22386. // JMP $fallthru
  22387. InsertBranch(Js::OpCode::Br, labelFallThru, instrLdFld);
  22388. }
  22389. void
  22390. Lowerer::GenerateLdFldFromProtoInlineCache(
  22391. IR::Instr * instrLdFld,
  22392. IR::RegOpnd * opndBase,
  22393. IR::Opnd * opndDst,
  22394. IR::RegOpnd * inlineCache,
  22395. IR::LabelInstr * labelFallThru,
  22396. bool isInlineSlot)
  22397. {
  22398. // Generate:
  22399. //
  22400. // s1 = MOV [&(inlineCache->u.proto.prototypeObject)] -- load the cached prototype object
  22401. // s1 = MOV [&s1->slots] -- load the slot array
  22402. // s2 = MOVZXW [&(inlineCache->u.proto.slotIndex)] -- load the cached slot index
  22403. // dst = MOV [s1 + s2*4]
  22404. // JMP $fallthru
  22405. IR::IndirOpnd * opndIndir = nullptr;
  22406. IR::RegOpnd * opndProtoSlots = nullptr;
  22407. // s1 = MOV [&(inlineCache->u.proto.prototypeObject)] -- load the cached prototype object
  22408. IR::RegOpnd * opndProto = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  22409. opndIndir = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.proto.prototypeObject), TyMachReg, instrLdFld->m_func);
  22410. InsertMove(opndProto, opndIndir, instrLdFld);
  22411. if (!isInlineSlot)
  22412. {
  22413. // s1 = MOV [&s1->slots] -- load the slot array
  22414. opndProtoSlots = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  22415. opndIndir = IR::IndirOpnd::New(opndProto, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, instrLdFld->m_func);
  22416. InsertMove(opndProtoSlots, opndIndir, instrLdFld);
  22417. }
  22418. // s2 = MOVZXW [&(inlineCache->u.proto.slotIndex)] -- load the cached slot index
  22419. IR::RegOpnd * opndSlotIndex = IR::RegOpnd::New(TyMachReg, instrLdFld->m_func);
  22420. opndIndir = IR::IndirOpnd::New(inlineCache, (int32)offsetof(Js::InlineCache, u.proto.slotIndex), TyUint16, instrLdFld->m_func);
  22421. InsertMove(opndSlotIndex, opndIndir, instrLdFld);
  22422. if (isInlineSlot)
  22423. {
  22424. // dst = MOV [s1 + s2*4]
  22425. opndIndir = IR::IndirOpnd::New(opndProto, opndSlotIndex, LowererMD::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  22426. InsertMove(opndDst, opndIndir, instrLdFld);
  22427. }
  22428. else
  22429. {
  22430. // dst = MOV [s1 + s2*4]
  22431. opndIndir = IR::IndirOpnd::New(opndProtoSlots, opndSlotIndex, LowererMD::GetDefaultIndirScale(), TyMachReg, instrLdFld->m_func);
  22432. InsertMove(opndDst, opndIndir, instrLdFld);
  22433. }
  22434. // JMP $fallthru
  22435. InsertBranch(Js::OpCode::Br, labelFallThru, instrLdFld);
  22436. }
  22437. void
  22438. Lowerer::GenerateLdFldFromFlagInlineCache(
  22439. IR::Instr * insertBeforeInstr,
  22440. IR::RegOpnd * opndBase,
  22441. IR::Opnd * opndDst,
  22442. IR::RegOpnd * opndInlineCache,
  22443. IR::LabelInstr * labelFallThru,
  22444. bool isInlineSlot)
  22445. {
  22446. // Generate:
  22447. //
  22448. // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
  22449. // s1 = MOV [&s1->slots] -- load the slot array
  22450. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  22451. // dst = MOV [s1 + s2 * 4]
  22452. // JMP $fallthru
  22453. IR::IndirOpnd * opndIndir = nullptr;
  22454. IR::RegOpnd * opndObjSlots = nullptr;
  22455. // s1 = MOV [&(inlineCache->u.accessor.object)] -- load the cached prototype object
  22456. IR::RegOpnd * opndObject = IR::RegOpnd::New(TyMachReg, this->m_func);
  22457. opndIndir = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.object), TyMachReg, this->m_func);
  22458. InsertMove(opndObject, opndIndir, insertBeforeInstr);
  22459. if (!isInlineSlot)
  22460. {
  22461. // s1 = MOV [&s1->slots] -- load the slot array
  22462. opndObjSlots = IR::RegOpnd::New(TyMachReg, this->m_func);
  22463. opndIndir = IR::IndirOpnd::New(opndObject, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  22464. InsertMove(opndObjSlots, opndIndir, insertBeforeInstr);
  22465. }
  22466. // s2 = MOVZXW [&(inlineCache->u.accessor.slotIndex)] -- load the cached slot index
  22467. IR::RegOpnd * opndSlotIndex = IR::RegOpnd::New(TyMachReg, this->m_func);
  22468. opndIndir = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.accessor.slotIndex), TyUint16, this->m_func);
  22469. InsertMove(opndSlotIndex, opndIndir, insertBeforeInstr);
  22470. if (isInlineSlot)
  22471. {
  22472. // dst = MOV [s1 + s2 * 4]
  22473. opndIndir = IR::IndirOpnd::New(opndObject, opndSlotIndex, this->m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
  22474. InsertMove(opndDst, opndIndir, insertBeforeInstr);
  22475. }
  22476. else
  22477. {
  22478. // dst = MOV [s1 + s2 * 4]
  22479. opndIndir = IR::IndirOpnd::New(opndObjSlots, opndSlotIndex, this->m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
  22480. InsertMove(opndDst, opndIndir, insertBeforeInstr);
  22481. }
  22482. // JMP $fallthru
  22483. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  22484. }
  22485. void
  22486. Lowerer::LowerSpreadArrayLiteral(IR::Instr *instr)
  22487. {
  22488. LoadScriptContext(instr);
  22489. IR::Opnd *src2Opnd = instr->UnlinkSrc2();
  22490. m_lowererMD.LoadHelperArgument(instr, src2Opnd);
  22491. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  22492. m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  22493. this->m_lowererMD.ChangeToHelperCall(instr, IR::HelperSpreadArrayLiteral);
  22494. }
  22495. IR::Instr *
  22496. Lowerer::LowerSpreadCall(IR::Instr *instr, Js::CallFlags callFlags, bool setupProfiledVersion)
  22497. {
  22498. // Get the target function object, and emit function object test.
  22499. IR::RegOpnd * functionObjOpnd = instr->UnlinkSrc1()->AsRegOpnd();
  22500. functionObjOpnd->m_isCallArg = true;
  22501. if (!(callFlags & Js::CallFlags_New) && !setupProfiledVersion)
  22502. {
  22503. IR::LabelInstr* continueAfterExLabel = InsertContinueAfterExceptionLabelForDebugger(m_func, instr, false);
  22504. this->m_lowererMD.GenerateFunctionObjectTest(instr, functionObjOpnd, false, continueAfterExLabel);
  22505. }
  22506. IR::Instr *spreadIndicesInstr;
  22507. spreadIndicesInstr = GetLdSpreadIndicesInstr(instr);
  22508. Assert(spreadIndicesInstr->m_opcode == Js::OpCode::LdSpreadIndices);
  22509. // Get AuxArray
  22510. IR::Opnd *spreadIndicesOpnd = spreadIndicesInstr->UnlinkSrc1();
  22511. // Remove LdSpreadIndices from the argument chain
  22512. instr->ReplaceSrc2(spreadIndicesInstr->UnlinkSrc2());
  22513. // Emit the normal args
  22514. if (!(callFlags & Js::CallFlags_New))
  22515. {
  22516. callFlags = (Js::CallFlags)(callFlags | (instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed));
  22517. }
  22518. // Profiled helper call requires three more parameters, ArrayProfileId, profileId, and the frame pointer.
  22519. // This is just following the convention of HelperProfiledNewScObjArray call.
  22520. const unsigned short extraArgsCount = setupProfiledVersion ? 5 : 2; // function object and AuxArray
  22521. int32 argCount = this->m_lowererMD.LowerCallArgs(instr, (ushort)callFlags, extraArgsCount);
  22522. // Emit our extra (first) args for the Spread helper in reverse order
  22523. if (setupProfiledVersion)
  22524. {
  22525. IR::JitProfilingInstr* jitInstr = (IR::JitProfilingInstr*)instr;
  22526. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(jitInstr->arrayProfileId, m_func));
  22527. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(jitInstr->profileId, m_func));
  22528. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  22529. }
  22530. m_lowererMD.LoadHelperArgument(instr, functionObjOpnd);
  22531. m_lowererMD.LoadHelperArgument(instr, spreadIndicesOpnd);
  22532. // Change the call target to our helper
  22533. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(setupProfiledVersion ? IR::HelperProfiledNewScObjArraySpread : IR::HelperSpreadCall, this->m_func);
  22534. instr->SetSrc1(helperOpnd);
  22535. return this->m_lowererMD.LowerCall(instr, (Js::ArgSlot)argCount);
  22536. }
  22537. void
  22538. Lowerer::LowerDivI4Common(IR::Instr * instr)
  22539. {
  22540. Assert(instr);
  22541. Assert((instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::Div_I4) ||
  22542. (instr->m_opcode == Js::OpCode::RemU_I4 || instr->m_opcode == Js::OpCode::DivU_I4));
  22543. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  22544. const bool isRem = instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::RemU_I4;
  22545. // MIN_INT/-1 path is only needed for signed operations
  22546. // TEST src2, src2
  22547. // JEQ $div0
  22548. // CMP src1, MIN_INT
  22549. // JEQ $minInt
  22550. // JMP $div
  22551. // $div0: [helper]
  22552. // MOV dst, 0
  22553. // JMP $done
  22554. // $minInt: [helper]
  22555. // CMP src2, -1
  22556. // JNE $div
  22557. // dst = MOV src1 / 0
  22558. // JMP $done
  22559. // $div:
  22560. // dst = IDIV src2, src1
  22561. // $done:
  22562. IR::LabelInstr * div0Label = InsertLabel(true, instr);
  22563. IR::LabelInstr * divLabel = InsertLabel(false, instr);
  22564. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  22565. IR::Opnd * dst = instr->GetDst();
  22566. IR::Opnd * src1 = instr->GetSrc1();
  22567. IR::Opnd * src2 = instr->GetSrc2();
  22568. bool isWasm = m_func->GetJITFunctionBody()->IsWasmFunction();
  22569. Assert(!isWasm || isRem);
  22570. if (!isWasm)
  22571. {
  22572. InsertTestBranch(src2, src2, Js::OpCode::BrEq_A, div0Label, div0Label);
  22573. InsertMove(dst, IR::IntConstOpnd::NewFromType(0, dst->GetType(), m_func), divLabel);
  22574. InsertBranch(Js::OpCode::Br, doneLabel, divLabel);
  22575. }
  22576. if (instr->GetSrc1()->IsSigned())
  22577. {
  22578. IR::LabelInstr * minIntLabel = nullptr;
  22579. // we need to check for INT_MIN/-1 if divisor is either -1 or variable, and dividend is either INT_MIN or variable
  22580. int64 intMin = IRType_IsInt64(src1->GetType()) ? LONGLONG_MIN : INT_MIN;
  22581. bool needsMinOverNeg1Check = !(src2->IsImmediateOpnd() && src2->GetImmediateValue(m_func) != -1);
  22582. if (src1->IsImmediateOpnd())
  22583. {
  22584. if (needsMinOverNeg1Check && src1->GetImmediateValue(m_func) == intMin)
  22585. {
  22586. minIntLabel = InsertLabel(true, divLabel);
  22587. InsertBranch(Js::OpCode::Br, minIntLabel, div0Label);
  22588. }
  22589. else
  22590. {
  22591. needsMinOverNeg1Check = false;
  22592. }
  22593. }
  22594. else if(needsMinOverNeg1Check)
  22595. {
  22596. minIntLabel = InsertLabel(true, divLabel);
  22597. InsertCompareBranch(src1, IR::IntConstOpnd::NewFromType(intMin, src1->GetType(), m_func), Js::OpCode::BrEq_A, minIntLabel, div0Label);
  22598. }
  22599. if (needsMinOverNeg1Check)
  22600. {
  22601. Assert(minIntLabel);
  22602. Assert(!src2->IsImmediateOpnd() || src2->GetImmediateValue(m_func) == -1);
  22603. if (!src2->IsImmediateOpnd())
  22604. {
  22605. InsertCompareBranch(src2, IR::IntConstOpnd::NewFromType(-1, src2->GetType(), m_func), Js::OpCode::BrNeq_A, divLabel, divLabel);
  22606. }
  22607. InsertMove(dst, !isRem ? src1 : IR::IntConstOpnd::NewFromType(0, dst->GetType(), m_func), divLabel);
  22608. InsertBranch(Js::OpCode::Br, doneLabel, divLabel);
  22609. }
  22610. }
  22611. InsertBranch(Js::OpCode::Br, divLabel, div0Label);
  22612. m_lowererMD.EmitInt4Instr(instr);
  22613. }
  22614. void
  22615. Lowerer::LowerRemI4(IR::Instr * instr)
  22616. {
  22617. Assert(instr);
  22618. Assert(instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::RemU_I4);
  22619. //Generate fast path for const divisors
  22620. if (m_lowererMD.GenerateFastDivAndRem(instr))
  22621. {
  22622. return;
  22623. }
  22624. if (m_func->GetJITFunctionBody()->IsAsmJsMode())
  22625. {
  22626. LowerDivI4Common(instr);
  22627. }
  22628. else
  22629. {
  22630. m_lowererMD.EmitInt4Instr(instr);
  22631. }
  22632. }
  22633. void
  22634. Lowerer::LowerTrapIfZero(IR::Instr * const instr)
  22635. {
  22636. Assert(instr);
  22637. Assert(instr->m_opcode == Js::OpCode::TrapIfZero);
  22638. Assert(instr->GetSrc1());
  22639. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  22640. IR::Opnd * src1 = instr->GetSrc1();
  22641. if (src1->IsImmediateOpnd())
  22642. {
  22643. if (src1->GetImmediateValue(m_func) == 0)
  22644. {
  22645. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_DivideByZero), TyInt32, m_func), instr);
  22646. }
  22647. }
  22648. else
  22649. {
  22650. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  22651. InsertCompareBranch(src1, IR::IntConstOpnd::NewFromType(0, src1->GetType(), m_func), Js::OpCode::BrNeq_A, doneLabel, doneLabel);
  22652. InsertLabel(true, doneLabel);
  22653. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_DivideByZero), TyInt32, m_func), doneLabel);
  22654. }
  22655. LowererMD::ChangeToAssign(instr);
  22656. }
  22657. IR::Instr*
  22658. Lowerer::LowerTrapIfUnalignedAccess(IR::Instr * const instr)
  22659. {
  22660. IR::Opnd* dst = instr->UnlinkDst();
  22661. IR::Opnd* src1 = instr->UnlinkSrc1();
  22662. IR::Opnd* src2 = instr->GetSrc2();
  22663. Assert(instr);
  22664. Assert(instr->m_opcode == Js::OpCode::TrapIfUnalignedAccess);
  22665. Assert(src1 && !src1->IsVar());
  22666. Assert(src2 && src2->IsImmediateOpnd());
  22667. Assert(src2->GetSize() > 1);
  22668. uint32 mask = src2->GetSize() - 1;
  22669. uint32 cmpValue = (uint32)src2->GetImmediateValue(m_func);
  22670. InsertMove(dst, src1, instr);
  22671. IR::IntConstOpnd* maskOpnd = IR::IntConstOpnd::New(mask, src1->GetType(), m_func);
  22672. IR::RegOpnd* maskedOpnd = IR::RegOpnd::New(src1->GetType(), m_func);
  22673. IR::Instr* maskInstr = IR::Instr::New(Js::OpCode::And_I4, maskedOpnd, src1, maskOpnd, m_func);
  22674. instr->InsertBefore(maskInstr);
  22675. IR::IntConstOpnd* cmpOpnd = IR::IntConstOpnd::New(cmpValue, maskedOpnd->GetType(), m_func, true);
  22676. IR::LabelInstr* alignedLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  22677. IR::Instr* branch = IR::BranchInstr::New(Js::OpCode::BrEq_I4, alignedLabel, maskedOpnd, cmpOpnd, m_func);
  22678. instr->InsertBefore(branch);
  22679. InsertLabel(true, instr);
  22680. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(WASMERR_UnalignedAtomicAccess), TyInt32, m_func), instr);
  22681. instr->InsertBefore(alignedLabel);
  22682. instr->Remove();
  22683. // The check and branch are not fully lowered yet, let them go in the lower loop.
  22684. return branch;
  22685. }
  22686. void
  22687. Lowerer::LowerTrapIfMinIntOverNegOne(IR::Instr * const instr)
  22688. {
  22689. Assert(instr);
  22690. Assert(instr->m_opcode == Js::OpCode::TrapIfMinIntOverNegOne);
  22691. Assert(instr->GetSrc1());
  22692. Assert(instr->GetSrc2());
  22693. Assert(m_func->GetJITFunctionBody()->IsWasmFunction());
  22694. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  22695. IR::Opnd * src1 = instr->GetSrc1();
  22696. IR::Opnd * src2 = instr->UnlinkSrc2();
  22697. int64 intMin = src1->IsInt64() ? LONGLONG_MIN : INT_MIN;
  22698. if (src1->IsImmediateOpnd())
  22699. {
  22700. if (src1->GetImmediateValue(m_func) != intMin)
  22701. {
  22702. // Const value not min int, will not trap
  22703. doneLabel->Remove();
  22704. src2->Free(m_func);
  22705. LowererMD::ChangeToAssign(instr);
  22706. return;
  22707. }
  22708. // Is min int no need to do check
  22709. }
  22710. else
  22711. {
  22712. InsertCompareBranch(src1, IR::IntConstOpnd::NewFromType(intMin, src1->GetType(), m_func), Js::OpCode::BrNeq_A, doneLabel, doneLabel);
  22713. }
  22714. if (src2->IsImmediateOpnd())
  22715. {
  22716. if (src2->GetImmediateValue(m_func) != -1)
  22717. {
  22718. // Const value not min int, will not trap
  22719. doneLabel->Remove();
  22720. src2->Free(m_func);
  22721. LowererMD::ChangeToAssign(instr);
  22722. return;
  22723. }
  22724. // Is -1 no need to do check
  22725. src2->Free(m_func);
  22726. }
  22727. else
  22728. {
  22729. InsertCompareBranch(src2, IR::IntConstOpnd::NewFromType(-1, src2->GetType(), m_func), Js::OpCode::BrNeq_A, doneLabel, doneLabel);
  22730. }
  22731. InsertLabel(true, doneLabel);
  22732. GenerateThrow(IR::IntConstOpnd::NewFromType(SCODE_CODE(VBSERR_Overflow), TyInt32, m_func), doneLabel);
  22733. LowererMD::ChangeToAssign(instr);
  22734. }
  22735. void
  22736. Lowerer::GenerateThrow(IR::Opnd* errorCode, IR::Instr * instr)
  22737. {
  22738. IR::Instr *throwInstr = IR::Instr::New(Js::OpCode::RuntimeTypeError, IR::RegOpnd::New(TyMachReg, m_func), errorCode, m_func);
  22739. instr->InsertBefore(throwInstr);
  22740. const bool isWasm = m_func->GetJITFunctionBody() && m_func->GetJITFunctionBody()->IsWasmFunction();
  22741. LowerUnaryHelperMem(throwInstr, isWasm ? IR::HelperOp_WebAssemblyRuntimeError : IR::HelperOp_RuntimeTypeError);
  22742. }
  22743. void
  22744. Lowerer::LowerDivI4(IR::Instr * instr)
  22745. {
  22746. Assert(instr);
  22747. Assert(instr->m_opcode == Js::OpCode::Div_I4 || instr->m_opcode == Js::OpCode::DivU_I4);
  22748. #ifdef _M_IX86
  22749. if (
  22750. instr->GetDst() && instr->GetDst()->IsInt64() ||
  22751. instr->GetSrc1() && instr->GetSrc1()->IsInt64() ||
  22752. instr->GetSrc2() && instr->GetSrc2()->IsInt64()
  22753. )
  22754. {
  22755. m_lowererMD.EmitInt64Instr(instr);
  22756. return;
  22757. }
  22758. #endif
  22759. Assert(instr->GetSrc2());
  22760. if (m_func->GetJITFunctionBody()->IsWasmFunction())
  22761. {
  22762. if (!m_lowererMD.GenerateFastDivAndRem(instr))
  22763. {
  22764. m_lowererMD.EmitInt4Instr(instr);
  22765. }
  22766. return;
  22767. }
  22768. if (m_func->GetJITFunctionBody()->IsAsmJsMode())
  22769. {
  22770. if (!m_lowererMD.GenerateFastDivAndRem(instr))
  22771. {
  22772. LowerDivI4Common(instr);
  22773. }
  22774. return;
  22775. }
  22776. if(!instr->HasBailOutInfo())
  22777. {
  22778. if (!m_lowererMD.GenerateFastDivAndRem(instr))
  22779. {
  22780. m_lowererMD.EmitInt4Instr(instr);
  22781. }
  22782. return;
  22783. }
  22784. Assert(!(instr->GetBailOutKind() & ~(IR::BailOnDivResultNotInt | IR::BailOutOnNegativeZero | IR::BailOutOnDivByZero | IR::BailOutOnDivOfMinInt)));
  22785. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  22786. // Split out and generate the bailout instruction
  22787. const auto nonBailOutInstr = IR::Instr::New(instr->m_opcode, instr->m_func);
  22788. instr->TransferTo(nonBailOutInstr);
  22789. instr->InsertBefore(nonBailOutInstr);
  22790. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  22791. instr->InsertAfter(doneLabel);
  22792. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  22793. // ordering instructions anymore.
  22794. IR::LabelInstr * bailOutLabel = GenerateBailOut(instr);
  22795. IR::Opnd * denominatorOpnd = nonBailOutInstr->GetSrc2();
  22796. IR::Opnd * nominatorOpnd = nonBailOutInstr->GetSrc1();
  22797. bool isFastDiv = false;
  22798. if (bailOutKind & IR::BailOutOnDivOfMinInt)
  22799. {
  22800. // Bailout if numerator is MIN_INT (could also check for denominator being -1
  22801. // before bailing out, but does not seem worth the extra code..)
  22802. InsertCompareBranch(nominatorOpnd, IR::IntConstOpnd::New(INT32_MIN, TyInt32, this->m_func, true), Js::OpCode::BrEq_A, bailOutLabel, nonBailOutInstr);
  22803. }
  22804. if (denominatorOpnd->IsIntConstOpnd() && Math::IsPow2(denominatorOpnd->AsIntConstOpnd()->AsInt32()))
  22805. {
  22806. Assert((bailOutKind & (IR::BailOutOnNegativeZero | IR::BailOutOnDivByZero)) == 0);
  22807. if (Math::IsPow2(denominatorOpnd->AsIntConstOpnd()->AsInt32()))
  22808. {
  22809. int pow2 = denominatorOpnd->AsIntConstOpnd()->AsInt32();
  22810. InsertTestBranch(nominatorOpnd, IR::IntConstOpnd::New(pow2 - 1, TyInt32, this->m_func),
  22811. Js::OpCode::BrNeq_A, bailOutLabel, nonBailOutInstr);
  22812. nonBailOutInstr->m_opcode = Js::OpCode::Shr_A;
  22813. nonBailOutInstr->ReplaceSrc2(IR::IntConstOpnd::New(Math::Log2(pow2), TyInt32, this->m_func));
  22814. LowererMD::ChangeToShift(nonBailOutInstr, false);
  22815. LowererMD::Legalize(nonBailOutInstr);
  22816. isFastDiv = true;
  22817. }
  22818. else
  22819. {
  22820. isFastDiv = m_lowererMD.GenerateFastDivAndRem(nonBailOutInstr, bailOutLabel);
  22821. }
  22822. }
  22823. if (!isFastDiv)
  22824. {
  22825. if (bailOutKind & IR::BailOutOnDivByZero)
  22826. {
  22827. // Bailout if denominator is 0
  22828. InsertTestBranch(denominatorOpnd, denominatorOpnd, Js::OpCode::BrEq_A, bailOutLabel, nonBailOutInstr);
  22829. }
  22830. // Lower the div and bailout if there is a reminder (machine specific)
  22831. IR::Instr * insertBeforeInstr = m_lowererMD.LowerDivI4AndBailOnReminder(nonBailOutInstr, bailOutLabel);
  22832. IR::Opnd * resultOpnd = nonBailOutInstr->GetDst();
  22833. if (bailOutKind & IR::BailOutOnNegativeZero)
  22834. {
  22835. // TEST result, result
  22836. // JNE skipNegDenominatorCheckLabel // Result not 0
  22837. // TEST denominator, denominator
  22838. // JNSB/BMI bailout // bail if negative
  22839. // skipNegDenominatorCheckLabel:
  22840. IR::LabelInstr * skipNegDenominatorCheckLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  22841. // Skip negative denominator check if the result is not 0
  22842. InsertTestBranch(resultOpnd, resultOpnd, Js::OpCode::BrNeq_A, skipNegDenominatorCheckLabel, insertBeforeInstr);
  22843. IR::LabelInstr * negDenominatorCheckLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  22844. insertBeforeInstr->InsertBefore(negDenominatorCheckLabel);
  22845. // Jump to done if the denominator is not negative
  22846. InsertTestBranch(denominatorOpnd, denominatorOpnd,
  22847. LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), bailOutLabel, insertBeforeInstr);
  22848. insertBeforeInstr->InsertBefore(skipNegDenominatorCheckLabel);
  22849. }
  22850. }
  22851. // We are all fine, jump around the bailout to done
  22852. InsertBranch(Js::OpCode::Br, doneLabel, bailOutLabel);
  22853. }
  22854. void
  22855. Lowerer::LowerRemR8(IR::Instr * instr)
  22856. {
  22857. Assert(instr);
  22858. Assert(instr->m_opcode == Js::OpCode::Rem_A);
  22859. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  22860. m_lowererMD.LoadDoubleHelperArgument(instr, instr->UnlinkSrc2());
  22861. m_lowererMD.LoadDoubleHelperArgument(instr, instr->UnlinkSrc1());
  22862. instr->SetSrc1(IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperOp_Rem_Double, m_func));
  22863. m_lowererMD.LowerCall(instr, 0);
  22864. }
  22865. void
  22866. Lowerer::LowerNewScopeSlots(IR::Instr * instr, bool doStackSlots)
  22867. {
  22868. Func * func = m_func;
  22869. if (PHASE_OFF(Js::NewScopeSlotFastPathPhase, func))
  22870. {
  22871. this->LowerUnaryHelperMemWithFunctionInfo(instr, IR::HelperOP_NewScopeSlots);
  22872. return;
  22873. }
  22874. uint const count = instr->GetSrc1()->AsIntConstOpnd()->AsUint32();
  22875. uint const allocSize = count * sizeof(Js::Var);
  22876. uint const actualSlotCount = count - Js::ScopeSlots::FirstSlotIndex;
  22877. IR::RegOpnd * dst = instr->UnlinkDst()->AsRegOpnd();
  22878. // dst = RecyclerAlloc(allocSize)
  22879. // dst[EncodedSlotCountSlotIndex] = min(actualSlotCount, MaxEncodedSlotCount);
  22880. // dst[ScopeMetadataSlotIndex] = FunctionBody;
  22881. // mov undefinedOpnd, undefined
  22882. // dst[FirstSlotIndex..count] = undefinedOpnd;
  22883. // Note: stack allocation of both scope slots and frame display are done together
  22884. // in lowering of NewStackFrameDisplay
  22885. if (!doStackSlots)
  22886. {
  22887. GenerateRecyclerAlloc(IR::HelperAllocMemForVarArray, allocSize, dst, instr);
  22888. }
  22889. m_lowererMD.GenerateMemInit(dst, Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
  22890. (size_t)min<uint>(actualSlotCount, Js::ScopeSlots::MaxEncodedSlotCount), instr, !doStackSlots);
  22891. IR::Opnd * functionInfoOpnd = this->LoadFunctionInfoOpnd(instr);
  22892. GenerateMemInit(dst, Js::ScopeSlots::ScopeMetadataSlotIndex * sizeof(Js::Var),
  22893. functionInfoOpnd, instr, !doStackSlots);
  22894. IR::Opnd * undefinedOpnd = this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined);
  22895. const IR::AutoReuseOpnd autoReuseUndefinedOpnd(undefinedOpnd, func);
  22896. // avoid using a register for the undefined pointer if we are going to assign 1 or 2
  22897. if (actualSlotCount > 2)
  22898. {
  22899. undefinedOpnd = GetRegOpnd(undefinedOpnd, instr, func, TyVar);
  22900. }
  22901. int const loopUnrollCount = 8;
  22902. if (actualSlotCount <= loopUnrollCount * 2)
  22903. {
  22904. // Just generate all the assignment in straight line code
  22905. // mov[dst + Js::FirstSlotIndex], undefinedOpnd
  22906. // ...
  22907. // mov[dst + count - 1], undefinedOpnd
  22908. for (unsigned int i = Js::ScopeSlots::FirstSlotIndex; i < count; i++)
  22909. {
  22910. GenerateMemInit(dst, sizeof(Js::Var) * i, undefinedOpnd, instr, !doStackSlots);
  22911. }
  22912. }
  22913. else
  22914. {
  22915. // Just generate all the assignment in loop of loopUnrollCount and the rest as straight line code
  22916. //
  22917. // lea currOpnd, [dst + sizeof(Var) * (loopAssignCount + Js::ScopeSlots::FirstSlotIndex - loopUnrollCount)];
  22918. // mov [currOpnd + loopUnrollCount + leftOverAssignCount - 1] , undefinedOpnd
  22919. // mov [currOpnd + loopUnrollCount + leftOverAssignCount - 2] , undefinedOpnd
  22920. // ...
  22921. // mov [currOpnd + loopUnrollCount], undefinedOpnd
  22922. // $LoopTop:
  22923. // mov [currOpnd + loopUnrollCount - 1], undefinedOpnd
  22924. // mov [currOpnd + loopUnrollCount - 2], undefinedOpnd
  22925. // ...
  22926. // mov [currOpnd], undefinedOpnd
  22927. // lea currOpnd, [currOpnd - loopUnrollCount]
  22928. // cmp dst, currOpnd
  22929. // jlt $Looptop
  22930. uint nLoop = actualSlotCount / loopUnrollCount;
  22931. uint loopAssignCount = nLoop * loopUnrollCount;
  22932. uint leftOverAssignCount = actualSlotCount - loopAssignCount; // The left over assignments
  22933. IR::RegOpnd * currOpnd = IR::RegOpnd::New(TyMachPtr, func);
  22934. const IR::AutoReuseOpnd autoReuseCurrOpnd(currOpnd, m_func);
  22935. InsertLea(
  22936. currOpnd,
  22937. IR::IndirOpnd::New(
  22938. dst,
  22939. sizeof(Js::Var) * (loopAssignCount + Js::ScopeSlots::FirstSlotIndex - loopUnrollCount),
  22940. TyMachPtr,
  22941. func),
  22942. instr);
  22943. for (unsigned int i = 0; i < leftOverAssignCount; i++)
  22944. {
  22945. GenerateMemInit(currOpnd, sizeof(Js::Var) * (loopUnrollCount + leftOverAssignCount - i - 1), undefinedOpnd, instr, !doStackSlots);
  22946. }
  22947. IR::LabelInstr * loopTop = InsertLoopTopLabel(instr);
  22948. Loop * loop = loopTop->GetLoop();
  22949. for (unsigned int i = 0; i < loopUnrollCount; i++)
  22950. {
  22951. GenerateMemInit(currOpnd, sizeof(Js::Var) * (loopUnrollCount - i - 1), undefinedOpnd, instr, !doStackSlots);
  22952. }
  22953. InsertLea(currOpnd, IR::IndirOpnd::New(currOpnd, -((int)sizeof(Js::Var) * loopUnrollCount), TyMachPtr, func), instr);
  22954. InsertCompareBranch(dst, currOpnd, Js::OpCode::BrLt_A, true, loopTop, instr);
  22955. loop->regAlloc.liveOnBackEdgeSyms->Set(currOpnd->m_sym->m_id);
  22956. loop->regAlloc.liveOnBackEdgeSyms->Set(dst->m_sym->m_id);
  22957. loop->regAlloc.liveOnBackEdgeSyms->Set(undefinedOpnd->AsRegOpnd()->m_sym->m_id);
  22958. }
  22959. if (!doStackSlots)
  22960. {
  22961. InsertMove(IR::RegOpnd::New(instr->m_func->GetLocalClosureSym(), TyMachPtr, func), dst, instr);
  22962. }
  22963. instr->Remove();
  22964. }
  22965. void Lowerer::LowerLdInnerFrameDisplay(IR::Instr *instr)
  22966. {
  22967. bool isStrict = instr->m_func->GetJITFunctionBody()->IsStrictMode();
  22968. if (isStrict)
  22969. {
  22970. if (instr->GetSrc2())
  22971. {
  22972. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdStrictInnerFrameDisplay);
  22973. }
  22974. else
  22975. {
  22976. #if DBG
  22977. instr->m_opcode = Js::OpCode::LdInnerFrameDisplayNoParent;
  22978. #endif
  22979. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdStrictInnerFrameDisplayNoParent);
  22980. }
  22981. }
  22982. else
  22983. {
  22984. if (instr->GetSrc2())
  22985. {
  22986. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdInnerFrameDisplay);
  22987. }
  22988. else
  22989. {
  22990. #if DBG
  22991. instr->m_opcode = Js::OpCode::LdInnerFrameDisplayNoParent;
  22992. #endif
  22993. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdInnerFrameDisplayNoParent);
  22994. }
  22995. }
  22996. }
  22997. void Lowerer::LowerLdFrameDisplay(IR::Instr *instr, bool doStackFrameDisplay)
  22998. {
  22999. bool isStrict = instr->m_func->GetJITFunctionBody()->IsStrictMode();
  23000. uint16 envDepth = instr->m_func->GetJITFunctionBody()->GetEnvDepth();
  23001. Func *func = this->m_func;
  23002. // envDepth of -1 indicates unknown depth (eval expression or HTML event handler).
  23003. // We could still fast-path these by generating a loop over the (dynamically loaded) scope chain length,
  23004. // but I doubt it's worth it.
  23005. // If the dst opnd is a byte code temp, that indicates we're prepending a block scope or some such and
  23006. // shouldn't attempt to do this.
  23007. if (envDepth == (uint16)-1 ||
  23008. (!doStackFrameDisplay && (instr->isNonFastPathFrameDisplay || instr->GetDst()->AsRegOpnd()->m_sym->IsTempReg(instr->m_func))) ||
  23009. PHASE_OFF(Js::FrameDisplayFastPathPhase, func))
  23010. {
  23011. if (isStrict)
  23012. {
  23013. if (instr->GetSrc2())
  23014. {
  23015. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdStrictFrameDisplay);
  23016. }
  23017. else
  23018. {
  23019. #if DBG
  23020. instr->m_opcode = Js::OpCode::LdFrameDisplayNoParent;
  23021. #endif
  23022. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdStrictFrameDisplayNoParent);
  23023. }
  23024. }
  23025. else
  23026. {
  23027. if (instr->GetSrc2())
  23028. {
  23029. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdFrameDisplay);
  23030. }
  23031. else
  23032. {
  23033. #if DBG
  23034. instr->m_opcode = Js::OpCode::LdFrameDisplayNoParent;
  23035. #endif
  23036. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdFrameDisplayNoParent);
  23037. }
  23038. }
  23039. return;
  23040. }
  23041. uint16 frameDispLength = envDepth + 1;
  23042. Assert(frameDispLength > 0);
  23043. IR::RegOpnd *dstOpnd = instr->UnlinkDst()->AsRegOpnd();
  23044. IR::RegOpnd *currentFrameOpnd = instr->UnlinkSrc1()->AsRegOpnd();
  23045. uint allocSize = sizeof(Js::FrameDisplay) + (frameDispLength * sizeof(Js::Var));
  23046. if (doStackFrameDisplay)
  23047. {
  23048. IR::Instr *insertInstr = func->GetFunctionEntryInsertionPoint();
  23049. // Initialize stack pointers for scope slots and frame display together at the top of the function
  23050. // (in case we bail out before executing the instructions).
  23051. IR::LabelInstr *labelNoStackFunc = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  23052. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  23053. // Check whether stack functions have been disabled since we jitted.
  23054. // If they have, then we must allocate closure memory on the heap.
  23055. InsertTestBranch(IR::MemRefOpnd::New(m_func->GetJITFunctionBody()->GetFlagsAddr(), TyInt8, m_func),
  23056. IR::IntConstOpnd::New(Js::FunctionBody::Flags_StackNestedFunc, TyInt8, m_func, true),
  23057. Js::OpCode::BrEq_A, labelNoStackFunc, insertInstr);
  23058. // allocSize is greater than TyMachPtr and hence changing the initial size to TyMisc
  23059. StackSym * stackSym = StackSym::New(TyMisc, instr->m_func);
  23060. m_func->StackAllocate(stackSym, allocSize);
  23061. InsertLea(dstOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertInstr);
  23062. uint scopeSlotAllocSize =
  23063. (m_func->GetJITFunctionBody()->GetScopeSlotArraySize() + Js::ScopeSlots::FirstSlotIndex) * sizeof(Js::Var);
  23064. stackSym = StackSym::New(TyMisc, instr->m_func);
  23065. m_func->StackAllocate(stackSym, scopeSlotAllocSize);
  23066. InsertLea(currentFrameOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertInstr);
  23067. InsertBranch(Js::OpCode::Br, labelDone, insertInstr);
  23068. insertInstr->InsertBefore(labelNoStackFunc);
  23069. GenerateRecyclerAlloc(IR::HelperAllocMemForFrameDisplay, allocSize, dstOpnd, insertInstr, true);
  23070. GenerateRecyclerAlloc(IR::HelperAllocMemForVarArray, scopeSlotAllocSize, currentFrameOpnd, insertInstr, true);
  23071. insertInstr->InsertBefore(labelDone);
  23072. InsertMove(IR::SymOpnd::New(m_func->GetLocalFrameDisplaySym(), 0, TyMachReg, m_func), dstOpnd, insertInstr);
  23073. InsertMove(IR::SymOpnd::New(m_func->GetLocalClosureSym(), 0, TyMachReg, m_func), currentFrameOpnd, insertInstr);
  23074. }
  23075. else
  23076. {
  23077. GenerateRecyclerAlloc(IR::HelperAllocMemForFrameDisplay, allocSize, dstOpnd, instr);
  23078. }
  23079. // Copy contents of environment
  23080. // Work back to front to leave the head element(s) in cache
  23081. if (envDepth > 0)
  23082. {
  23083. IR::RegOpnd *envOpnd = instr->UnlinkSrc2()->AsRegOpnd();
  23084. for (uint16 i = envDepth; i >= 1; i--)
  23085. {
  23086. IR::Opnd *scopeOpnd = IR::RegOpnd::New(TyMachReg, func);
  23087. IR::Opnd *envLoadOpnd =
  23088. IR::IndirOpnd::New(envOpnd, Js::FrameDisplay::GetOffsetOfScopes() + ((i - 1) * sizeof(Js::Var)), TyMachReg, func);
  23089. InsertMove(scopeOpnd, envLoadOpnd, instr);
  23090. IR::Opnd *dstStoreOpnd =
  23091. IR::IndirOpnd::New(dstOpnd, Js::FrameDisplay::GetOffsetOfScopes() + (i * sizeof(Js::Var)), TyMachReg, func);
  23092. InsertMove(dstStoreOpnd, scopeOpnd, instr);
  23093. }
  23094. }
  23095. // Assign current element.
  23096. InsertMove(
  23097. IR::IndirOpnd::New(dstOpnd, Js::FrameDisplay::GetOffsetOfScopes(), TyMachReg, func),
  23098. currentFrameOpnd,
  23099. instr);
  23100. // Combine tag, strict mode flag, and length
  23101. uintptr_t bits = 1 |
  23102. (isStrict << (Js::FrameDisplay::GetOffsetOfStrictMode() * 8)) |
  23103. (frameDispLength << (Js::FrameDisplay::GetOffsetOfLength() * 8));
  23104. InsertMove(
  23105. IR::IndirOpnd::New(dstOpnd, 0, TyMachReg, func),
  23106. IR::IntConstOpnd::New(bits, TyMachReg, func, true),
  23107. instr);
  23108. instr->Remove();
  23109. }
  23110. IR::AddrOpnd *Lowerer::CreateFunctionBodyOpnd(Func *const func) const
  23111. {
  23112. return IR::AddrOpnd::New(func->GetJITFunctionBody()->GetAddr(), IR::AddrOpndKindDynamicFunctionBody, m_func, true);
  23113. }
  23114. IR::AddrOpnd *Lowerer::CreateFunctionBodyOpnd(Js::FunctionBody *const functionBody) const
  23115. {
  23116. // TODO: OOP JIT, CreateFunctionBodyOpnd
  23117. Assert(!m_func->IsOOPJIT());
  23118. return IR::AddrOpnd::New(functionBody, IR::AddrOpndKindDynamicFunctionBody, m_func, true);
  23119. }
  23120. bool
  23121. Lowerer::GenerateRecyclerOrMarkTempAlloc(IR::Instr * instr, IR::RegOpnd * dstOpnd, IR::JnHelperMethod allocHelper, size_t allocSize, IR::SymOpnd ** tempObjectSymOpnd)
  23122. {
  23123. if (instr->dstIsTempObject)
  23124. {
  23125. *tempObjectSymOpnd = GenerateMarkTempAlloc(dstOpnd, allocSize, instr);
  23126. return false;
  23127. }
  23128. this->GenerateRecyclerAlloc(allocHelper, allocSize, dstOpnd, instr);
  23129. *tempObjectSymOpnd = nullptr;
  23130. return true;
  23131. }
  23132. IR::SymOpnd *
  23133. Lowerer::GenerateMarkTempAlloc(IR::RegOpnd *const dstOpnd, const size_t allocSize, IR::Instr *const insertBeforeInstr)
  23134. {
  23135. Assert(dstOpnd);
  23136. Assert(allocSize != 0);
  23137. Assert(insertBeforeInstr);
  23138. Func *const func = insertBeforeInstr->m_func;
  23139. // Allocate stack space for the reg exp instance, and a slot for the boxed value
  23140. StackSym *const tempObjectSym = StackSym::New(TyMisc, func);
  23141. m_func->StackAllocate(tempObjectSym, (int)(allocSize + sizeof(void *)));
  23142. IR::SymOpnd * tempObjectOpnd = IR::SymOpnd::New(tempObjectSym, sizeof(void *), TyVar, func);
  23143. InsertLea(dstOpnd, tempObjectOpnd, insertBeforeInstr);
  23144. // Initialize the boxed instance slot
  23145. if (this->outerMostLoopLabel == nullptr)
  23146. {
  23147. GenerateMemInit(dstOpnd, -(int)sizeof(void *), IR::AddrOpnd::NewNull(func), insertBeforeInstr, false);
  23148. }
  23149. else if (!PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func))
  23150. {
  23151. InsertMove(IR::SymOpnd::New(tempObjectSym, TyMachPtr, func), IR::AddrOpnd::NewNull(func), this->outerMostLoopLabel, false);
  23152. }
  23153. return tempObjectOpnd;
  23154. }
  23155. void Lowerer::LowerBrFncCachedScopeEq(IR::Instr *instr)
  23156. {
  23157. Assert(instr->m_opcode == Js::OpCode::BrFncCachedScopeEq || instr->m_opcode == Js::OpCode::BrFncCachedScopeNeq);
  23158. Js::OpCode opcode = (instr->m_opcode == Js::OpCode::BrFncCachedScopeEq ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A);
  23159. IR::RegOpnd *src1Reg = instr->UnlinkSrc1()->AsRegOpnd();
  23160. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src1Reg, Js::ScriptFunction::GetOffsetOfCachedScopeObj(), TyMachReg, this->m_func);
  23161. this->InsertCompareBranch(indirOpnd, instr->UnlinkSrc2(), opcode, false, instr->AsBranchInstr()->GetTarget(), instr->m_next);
  23162. instr->Remove();
  23163. }
  23164. IR::Instr* Lowerer::InsertLoweredRegionStartMarker(IR::Instr* instrToInsertBefore)
  23165. {
  23166. AssertMsg(instrToInsertBefore->m_prev != nullptr, "Can't insert lowered region start marker as the first instr in the func.");
  23167. IR::LabelInstr* startMarkerLabel = IR::LabelInstr::New(Js::OpCode::Label, instrToInsertBefore->m_func);
  23168. instrToInsertBefore->InsertBefore(startMarkerLabel);
  23169. return startMarkerLabel;
  23170. }
  23171. IR::Instr* Lowerer::RemoveLoweredRegionStartMarker(IR::Instr* startMarkerInstr)
  23172. {
  23173. AssertMsg(startMarkerInstr->m_prev != nullptr, "Lowered region start marker became the first instruction in the func after lowering?");
  23174. IR::Instr* prevInstr = startMarkerInstr->m_prev;
  23175. startMarkerInstr->Remove();
  23176. return prevInstr;
  23177. }
  23178. IR::Instr* Lowerer::GetLdSpreadIndicesInstr(IR::Instr *instr)
  23179. {
  23180. IR::Opnd *src2 = instr->GetSrc2();
  23181. if (!src2->IsSymOpnd())
  23182. {
  23183. return nullptr;
  23184. }
  23185. IR::SymOpnd * argLinkOpnd = src2->AsSymOpnd();
  23186. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  23187. Assert(argLinkSym->IsSingleDef());
  23188. return argLinkSym->m_instrDef;
  23189. }
  23190. bool Lowerer::IsSpreadCall(IR::Instr *instr)
  23191. {
  23192. IR::Instr *lastInstr = GetLdSpreadIndicesInstr(instr);
  23193. return lastInstr && lastInstr->m_opcode == Js::OpCode::LdSpreadIndices;
  23194. }
  23195. // When under debugger, generate a new label to be used as safe place to jump after ignore exception,
  23196. // insert it after insertAfterInstr, and return the label inserted.
  23197. // Returns nullptr/NoOP for non-debugger code path.
  23198. //static
  23199. IR::LabelInstr* Lowerer::InsertContinueAfterExceptionLabelForDebugger(Func* func, IR::Instr* insertAfterInstr, bool isHelper)
  23200. {
  23201. Assert(func);
  23202. Assert(insertAfterInstr);
  23203. IR::LabelInstr* continueAfterExLabel = nullptr;
  23204. if (func->IsJitInDebugMode())
  23205. {
  23206. continueAfterExLabel = IR::LabelInstr::New(Js::OpCode::Label, func, isHelper);
  23207. insertAfterInstr->InsertAfter(continueAfterExLabel);
  23208. }
  23209. return continueAfterExLabel;
  23210. }
  23211. void Lowerer::GenerateSingleCharStrJumpTableLookup(IR::Instr * instr)
  23212. {
  23213. IR::MultiBranchInstr * multiBrInstr = instr->AsBranchInstr()->AsMultiBrInstr();
  23214. Func * func = instr->m_func;
  23215. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  23216. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  23217. // MOV strLengthOpnd, str->length
  23218. IR::RegOpnd * strLengthOpnd = IR::RegOpnd::New(TyUint32, func);
  23219. InsertMove(strLengthOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func), instr);
  23220. // CMP strLengthOpnd, 1
  23221. // JNE defaultLabel
  23222. IR::LabelInstr * defaultLabelInstr = (IR::LabelInstr *)multiBrInstr->GetBranchJumpTable()->defaultTarget;
  23223. InsertCompareBranch(strLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
  23224. // MOV strBuffer, str->psz
  23225. IR::RegOpnd * strBufferOpnd = IR::RegOpnd::New(TyMachPtr, func);
  23226. InsertMove(strBufferOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, func), instr);
  23227. // TST strBuffer, strBuffer
  23228. // JNE $continue
  23229. InsertTestBranch(strBufferOpnd, strBufferOpnd, Js::OpCode::BrNeq_A, continueLabel, instr);
  23230. // $helper:
  23231. // PUSH str
  23232. // CALL JavascriptString::GetSzHelper
  23233. // MOV strBuffer, eax
  23234. // $continue:
  23235. instr->InsertBefore(helperLabel);
  23236. m_lowererMD.LoadHelperArgument(instr, instr->GetSrc1());
  23237. IR::Instr * instrCall = IR::Instr::New(Js::OpCode::Call, strBufferOpnd, IR::HelperCallOpnd::New(IR::HelperString_GetSz, func), func);
  23238. instr->InsertBefore(instrCall);
  23239. m_lowererMD.LowerCall(instrCall, 0);
  23240. instr->InsertBefore(continueLabel);
  23241. // MOV charOpnd, [strBuffer]
  23242. IR::RegOpnd * charOpnd = IR::RegOpnd::New(TyUint32, func);
  23243. InsertMove(charOpnd, IR::IndirOpnd::New(strBufferOpnd, 0, TyUint16, func), instr);
  23244. if (multiBrInstr->m_baseCaseValue != 0)
  23245. {
  23246. // SUB charOpnd, baseIndex
  23247. InsertSub(false, charOpnd, charOpnd, IR::IntConstOpnd::New(multiBrInstr->m_baseCaseValue, TyUint32, func), instr);
  23248. }
  23249. // CMP charOpnd, lastCaseIndex - baseCaseIndex
  23250. // JA defaultLabel
  23251. InsertCompareBranch(charOpnd, IR::IntConstOpnd::New(multiBrInstr->m_lastCaseValue - multiBrInstr->m_baseCaseValue, TyUint32, func),
  23252. Js::OpCode::BrGt_A, true, defaultLabelInstr, instr);
  23253. instr->UnlinkSrc1();
  23254. LowerJumpTableMultiBranch(multiBrInstr, charOpnd);
  23255. }
  23256. void Lowerer::GenerateSwitchStringLookup(IR::Instr * instr)
  23257. {
  23258. /* Collect information about string length in all the case*/
  23259. charcount_t minLength = UINT_MAX;
  23260. charcount_t maxLength = 0;
  23261. BVUnit32 bvLength;
  23262. instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary()->dictionary.Map([&](JITJavascriptString * str, void *)
  23263. {
  23264. charcount_t len = str->GetLength();
  23265. minLength = min(minLength, str->GetLength());
  23266. maxLength = max(maxLength, str->GetLength());
  23267. if (len < 32)
  23268. {
  23269. bvLength.Set(len);
  23270. }
  23271. });
  23272. Func * func = instr->m_func;
  23273. IR::RegOpnd * strLengthOpnd = IR::RegOpnd::New(TyUint32, func);
  23274. InsertMove(strLengthOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func), instr);
  23275. IR::LabelInstr * defaultLabelInstr = (IR::LabelInstr *)instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary()->defaultTarget;
  23276. if (minLength == maxLength)
  23277. {
  23278. // Generate single length filter
  23279. InsertCompareBranch(strLengthOpnd, IR::IntConstOpnd::New(minLength, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
  23280. }
  23281. else if (maxLength < 32)
  23282. {
  23283. // Generate bit filter
  23284. // Jump to default label if the bit is not on for the length % 32
  23285. IR::IntConstOpnd * lenBitMaskOpnd = IR::IntConstOpnd::New(bvLength.GetWord(), TyUint32, func);
  23286. InsertBitTestBranch(lenBitMaskOpnd, strLengthOpnd, false, defaultLabelInstr, instr);
  23287. // Jump to default label if the bit is > 32
  23288. InsertTestBranch(strLengthOpnd, IR::IntConstOpnd::New(UINT32_MAX ^ 31, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
  23289. }
  23290. else
  23291. {
  23292. // CONSIDER: Generate range filter
  23293. }
  23294. this->LowerMultiBr(instr, IR::HelperOp_SwitchStringLookUp);
  23295. }
  23296. IR::Instr *
  23297. Lowerer::LowerGetCachedFunc(IR::Instr *instr)
  23298. {
  23299. // src1 is an ActivationObjectEx, and we want to get the function object identified by the index (src2)
  23300. // dst = MOV (src1)->GetFuncCacheEntry(src2)->func
  23301. //
  23302. // => [src1 + (offsetof(src1, cache) + (src2 * sizeof(FuncCacheEntry)) + offsetof(FuncCacheEntry, func))]
  23303. IR::IntConstOpnd *src2Opnd = instr->UnlinkSrc2()->AsIntConstOpnd();
  23304. IR::RegOpnd *src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
  23305. IR::Instr *instrPrev = instr->m_prev;
  23306. instr->SetSrc1(IR::IndirOpnd::New(src1Opnd, int32((src2Opnd->GetValue() * sizeof(Js::FuncCacheEntry)) + Js::ActivationObjectEx::GetOffsetOfCache() + offsetof(Js::FuncCacheEntry, func)), TyVar, this->m_func));
  23307. this->m_lowererMD.ChangeToAssign(instr);
  23308. src2Opnd->Free(this->m_func);
  23309. return instrPrev;
  23310. }
  23311. IR::Instr *
  23312. Lowerer::LowerCommitScope(IR::Instr *instrCommit)
  23313. {
  23314. IR::Instr *instrPrev = instrCommit->m_prev;
  23315. IR::RegOpnd *baseOpnd = instrCommit->UnlinkSrc1()->AsRegOpnd();
  23316. IR::Opnd *opnd;
  23317. IR::Instr * insertInstr = instrCommit->m_next;
  23318. // Write undef to all the local var slots.
  23319. opnd = IR::IndirOpnd::New(baseOpnd, Js::ActivationObjectEx::GetOffsetOfCommitFlag(), TyInt8, this->m_func);
  23320. instrCommit->SetDst(opnd);
  23321. instrCommit->SetSrc1(IR::IntConstOpnd::New(1, TyInt8, this->m_func));
  23322. LowererMD::ChangeToAssign(instrCommit);
  23323. const Js::PropertyIdArray *propIds = instrCommit->m_func->GetJITFunctionBody()->GetFormalsPropIdArray();
  23324. uint firstVarSlot = (uint)Js::ActivationObjectEx::GetFirstVarSlot(propIds);
  23325. if (firstVarSlot < propIds->count)
  23326. {
  23327. // Instead of re-using the address of "undefined" for each store, put the address in a register and re-use that.
  23328. IR::RegOpnd *undefOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  23329. InsertMove(undefOpnd, LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueUndefined), insertInstr);
  23330. IR::RegOpnd *slotBaseOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  23331. // Load a pointer to the aux slots. We assume that all ActivationObject's have only aux slots.
  23332. opnd = IR::IndirOpnd::New(baseOpnd, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  23333. InsertMove(slotBaseOpnd, opnd, insertInstr);
  23334. for (uint i = firstVarSlot; i < propIds->count; i++)
  23335. {
  23336. opnd = IR::IndirOpnd::New(slotBaseOpnd, i << this->m_lowererMD.GetDefaultIndirScale(), TyMachReg, this->m_func);
  23337. InsertMove(opnd, undefOpnd, insertInstr);
  23338. }
  23339. }
  23340. return instrPrev;
  23341. }
  23342. IR::Instr *
  23343. Lowerer::LowerTry(IR::Instr* instr, bool tryCatch)
  23344. {
  23345. if (this->m_func->hasBailout)
  23346. {
  23347. this->EnsureBailoutReturnValueSym();
  23348. }
  23349. this->EnsureHasBailedOutSym();
  23350. IR::SymOpnd * hasBailedOutOpnd = IR::SymOpnd::New(this->m_func->m_hasBailedOutSym, TyUint32, this->m_func);
  23351. IR::Instr * setInstr = IR::Instr::New(LowererMD::GetStoreOp(TyUint32), hasBailedOutOpnd, IR::IntConstOpnd::New(0, TyUint32, this->m_func), this->m_func);
  23352. instr->InsertBefore(setInstr);
  23353. LowererMD::Legalize(setInstr);
  23354. return m_lowererMD.LowerTry(instr, tryCatch ? IR::HelperOp_TryCatch : ((this->m_func->DoOptimizeTry() || (this->m_func->IsSimpleJit() && this->m_func->hasBailout))? IR::HelperOp_TryFinally : IR::HelperOp_TryFinallyNoOpt));
  23355. }
  23356. IR::Instr *
  23357. Lowerer::LowerCatch(IR::Instr * instr)
  23358. {
  23359. // t1 = catch => t2 = catch
  23360. // => t1 = t2
  23361. IR::Opnd *catchObj = instr->UnlinkDst();
  23362. IR::RegOpnd *catchParamReg = IR::RegOpnd::New(TyMachPtr, this->m_func);
  23363. catchParamReg->SetReg(CATCH_OBJ_REG);
  23364. instr->SetDst(catchParamReg);
  23365. IR::Instr * mov = IR::Instr::New(Js::OpCode::Ld_A, catchObj, catchParamReg, this->m_func);
  23366. this->m_lowererMD.ChangeToAssign(mov);
  23367. instr->InsertAfter(mov);
  23368. return instr->m_prev;
  23369. }
  23370. IR::Instr *
  23371. Lowerer::LowerLeave(IR::Instr * leaveInstr, IR::LabelInstr * targetInstr, bool fromFinalLower, bool isOrphanedLeave)
  23372. {
  23373. if (isOrphanedLeave)
  23374. {
  23375. Assert(this->m_func->IsLoopBodyInTry());
  23376. leaveInstr->m_opcode = LowererMD::MDUncondBranchOpcode;
  23377. return leaveInstr->m_prev;
  23378. }
  23379. IR::Instr * instrPrev = leaveInstr->m_prev;
  23380. IR::LabelOpnd *labelOpnd = IR::LabelOpnd::New(targetInstr, this->m_func);
  23381. m_lowererMD.LowerEHRegionReturn(leaveInstr, labelOpnd);
  23382. if (fromFinalLower)
  23383. {
  23384. instrPrev = leaveInstr->m_prev;
  23385. }
  23386. leaveInstr->Remove();
  23387. return instrPrev;
  23388. }
  23389. void
  23390. Lowerer::EnsureBailoutReturnValueSym()
  23391. {
  23392. if (this->m_func->m_bailoutReturnValueSym == nullptr)
  23393. {
  23394. this->m_func->m_bailoutReturnValueSym = StackSym::New(TyVar, this->m_func);
  23395. this->m_func->StackAllocate(this->m_func->m_bailoutReturnValueSym, sizeof(Js::Var));
  23396. }
  23397. }
  23398. void
  23399. Lowerer::EnsureHasBailedOutSym()
  23400. {
  23401. if (this->m_func->m_hasBailedOutSym == nullptr)
  23402. {
  23403. this->m_func->m_hasBailedOutSym = StackSym::New(TyUint32, this->m_func);
  23404. this->m_func->StackAllocate(this->m_func->m_hasBailedOutSym, MachRegInt);
  23405. }
  23406. }
  23407. void
  23408. Lowerer::InsertReturnThunkForRegion(Region* region, IR::LabelInstr* restoreLabel)
  23409. {
  23410. Assert(this->m_func->isPostLayout);
  23411. Assert(region->GetType() == RegionTypeTry || region->GetType() == RegionTypeCatch || region->GetType() == RegionTypeFinally);
  23412. if (!region->returnThunkEmitted)
  23413. {
  23414. this->m_func->m_exitInstr->InsertAfter(region->GetBailoutReturnThunkLabel());
  23415. bool newLastInstrInserted = false;
  23416. IR::Instr * insertBeforeInstr = region->GetBailoutReturnThunkLabel()->m_next;
  23417. if (insertBeforeInstr == nullptr)
  23418. {
  23419. Assert(this->m_func->m_exitInstr == this->m_func->m_tailInstr);
  23420. insertBeforeInstr = IR::Instr::New(Js::OpCode::Nop, this->m_func);
  23421. newLastInstrInserted = true;
  23422. region->GetBailoutReturnThunkLabel()->InsertAfter(insertBeforeInstr);
  23423. this->m_func->m_tailInstr = insertBeforeInstr;
  23424. }
  23425. IR::LabelOpnd * continuationAddr;
  23426. // We insert return thunk to the region's parent return thunk label
  23427. // For non exception finallys, we do not need a return thunk
  23428. // Because, we are not calling none xception finallys from within amd64_callWithFakeFrame
  23429. // But a non exception finally maybe within other eh regions that need a return thunk
  23430. if (region->IsNonExceptingFinally())
  23431. {
  23432. Assert(region->GetParent()->GetType() != RegionTypeRoot);
  23433. Region *ancestor = region->GetParent()->GetFirstAncestorOfNonExceptingFinallyParent();
  23434. Assert(ancestor && !ancestor->IsNonExceptingFinally());
  23435. if (ancestor->GetType() != RegionTypeRoot)
  23436. {
  23437. continuationAddr = IR::LabelOpnd::New(ancestor->GetBailoutReturnThunkLabel(), this->m_func);
  23438. }
  23439. else
  23440. {
  23441. continuationAddr = IR::LabelOpnd::New(restoreLabel, this->m_func);
  23442. }
  23443. }
  23444. else if (region->GetParent()->IsNonExceptingFinally())
  23445. {
  23446. Region *ancestor = region->GetFirstAncestorOfNonExceptingFinally();
  23447. if (ancestor && ancestor->GetType() != RegionTypeRoot)
  23448. {
  23449. continuationAddr = IR::LabelOpnd::New(ancestor->GetBailoutReturnThunkLabel(), this->m_func);
  23450. }
  23451. else
  23452. {
  23453. continuationAddr = IR::LabelOpnd::New(restoreLabel, this->m_func);
  23454. }
  23455. }
  23456. else if (region->GetParent()->GetType() != RegionTypeRoot)
  23457. {
  23458. continuationAddr = IR::LabelOpnd::New(region->GetParent()->GetBailoutReturnThunkLabel(), this->m_func);
  23459. }
  23460. else
  23461. {
  23462. continuationAddr = IR::LabelOpnd::New(restoreLabel, this->m_func);
  23463. }
  23464. IR::Instr * lastInstr = m_lowererMD.LowerEHRegionReturn(insertBeforeInstr, continuationAddr);
  23465. if (newLastInstrInserted)
  23466. {
  23467. Assert(this->m_func->m_tailInstr == insertBeforeInstr);
  23468. insertBeforeInstr->Remove();
  23469. this->m_func->m_tailInstr = lastInstr;
  23470. }
  23471. region->returnThunkEmitted = true;
  23472. }
  23473. }
  23474. void
  23475. Lowerer::SetHasBailedOut(IR::Instr * bailoutInstr)
  23476. {
  23477. Assert(this->m_func->isPostLayout);
  23478. IR::SymOpnd * hasBailedOutOpnd = IR::SymOpnd::New(this->m_func->m_hasBailedOutSym, TyUint32, this->m_func);
  23479. IR::Instr * setInstr = IR::Instr::New(LowererMD::GetStoreOp(TyUint32), hasBailedOutOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), this->m_func);
  23480. bailoutInstr->InsertBefore(setInstr);
  23481. LowererMD::Legalize(setInstr);
  23482. }
  23483. IR::Instr*
  23484. Lowerer::EmitEHBailoutStackRestore(IR::Instr * bailoutInstr)
  23485. {
  23486. Assert(this->m_func->isPostLayout);
  23487. #ifdef _M_IX86
  23488. BailOutInfo * bailoutInfo = bailoutInstr->GetBailOutInfo();
  23489. uint totalLiveArgCount = 0;
  23490. if (bailoutInfo->startCallCount != 0)
  23491. {
  23492. uint totalStackToBeRestored = 0;
  23493. uint stackAlignmentAdjustment = 0;
  23494. for (uint i = 0; i < bailoutInfo->startCallCount; i++)
  23495. {
  23496. uint startCallLiveArgCount = bailoutInfo->startCallInfo[i].isOrphanedCall ? 0 : bailoutInfo->GetStartCallOutParamCount(i);
  23497. if ((Math::Align<int32>(startCallLiveArgCount * MachPtr, MachStackAlignment) - (startCallLiveArgCount * MachPtr)) != 0)
  23498. {
  23499. stackAlignmentAdjustment++;
  23500. }
  23501. totalLiveArgCount += startCallLiveArgCount;
  23502. }
  23503. totalStackToBeRestored = (totalLiveArgCount + stackAlignmentAdjustment) * MachPtr;
  23504. IR::RegOpnd * espOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegStackPointer(), TyMachReg, this->m_func);
  23505. IR::Opnd * opnd = IR::IndirOpnd::New(espOpnd, totalStackToBeRestored, TyMachReg, this->m_func);
  23506. IR::Instr * stackRestoreInstr = IR::Instr::New(Js::OpCode::LEA, espOpnd, opnd, this->m_func);
  23507. bailoutInstr->InsertAfter(stackRestoreInstr);
  23508. return stackRestoreInstr;
  23509. }
  23510. #endif
  23511. return bailoutInstr;
  23512. }
  23513. void
  23514. Lowerer::EmitSaveEHBailoutReturnValueAndJumpToRetThunk(IR::Instr * insertAfterInstr)
  23515. {
  23516. Assert(this->m_func->isPostLayout);
  23517. // After the CALL SaveAllRegistersAndBailout instruction, emit
  23518. //
  23519. // MOV bailoutReturnValueSym, eax
  23520. // JMP $currentRegion->bailoutReturnThunkLabel
  23521. IR::SymOpnd * bailoutReturnValueSymOpnd = IR::SymOpnd::New(this->m_func->m_bailoutReturnValueSym, TyVar, this->m_func);
  23522. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  23523. IR::Instr * movInstr = IR::Instr::New(LowererMD::GetStoreOp(TyVar), bailoutReturnValueSymOpnd, eaxOpnd, this->m_func);
  23524. insertAfterInstr->InsertAfter(movInstr);
  23525. LowererMD::Legalize(movInstr);
  23526. IR::BranchInstr * jumpInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, this->currentRegion->GetBailoutReturnThunkLabel(), this->m_func);
  23527. movInstr->InsertAfter(jumpInstr);
  23528. }
  23529. void
  23530. Lowerer::EmitRestoreReturnValueFromEHBailout(IR::LabelInstr * restoreLabel, IR::LabelInstr * epilogLabel)
  23531. {
  23532. Assert(this->m_func->isPostLayout);
  23533. // JMP $epilog
  23534. // $restore:
  23535. // MOV eax, bailoutReturnValueSym
  23536. // $epilog:
  23537. IR::SymOpnd * bailoutReturnValueSymOpnd = IR::SymOpnd::New(this->m_func->m_bailoutReturnValueSym, TyVar, this->m_func);
  23538. IR::RegOpnd * eaxOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  23539. IR::Instr * movInstr = IR::Instr::New(LowererMD::GetLoadOp(TyVar), eaxOpnd, bailoutReturnValueSymOpnd, this->m_func);
  23540. epilogLabel->InsertBefore(restoreLabel);
  23541. epilogLabel->InsertBefore(movInstr);
  23542. LowererMD::Legalize(movInstr);
  23543. restoreLabel->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, epilogLabel, this->m_func));
  23544. }
  23545. void
  23546. Lowerer::InsertBitTestBranch(IR::Opnd * bitMaskOpnd, IR::Opnd * bitIndex, bool jumpIfBitOn, IR::LabelInstr * targetLabel, IR::Instr * insertBeforeInstr)
  23547. {
  23548. #if defined(_M_IX86) || defined(_M_AMD64)
  23549. // Generate bit test and branch
  23550. // BT bitMaskOpnd, bitIndex
  23551. // JB/JAE targetLabel
  23552. Func * func = this->m_func;
  23553. IR::Instr * instr = IR::Instr::New(Js::OpCode::BT, func);
  23554. instr->SetSrc1(bitMaskOpnd);
  23555. instr->SetSrc2(bitIndex);
  23556. insertBeforeInstr->InsertBefore(instr);
  23557. if (!(bitMaskOpnd->IsRegOpnd() || bitMaskOpnd->IsIndirOpnd() || bitMaskOpnd->IsMemRefOpnd()))
  23558. {
  23559. instr->HoistSrc1(Js::OpCode::MOV);
  23560. }
  23561. InsertBranch(jumpIfBitOn ? Js::OpCode::JB : Js::OpCode::JAE, targetLabel, insertBeforeInstr);
  23562. #elif defined(_M_ARM)
  23563. // ARM don't have bit test instruction, so just generated
  23564. // MOV r1, 1
  23565. // SHL r1, bitIndex
  23566. // TEST bitMaskOpnd, r1
  23567. // BEQ/BNEQ targetLabel
  23568. Func * func = this->m_func;
  23569. IR::RegOpnd * lenBitOpnd = IR::RegOpnd::New(TyUint32, func);
  23570. InsertMove(lenBitOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
  23571. InsertShift(Js::OpCode::Shl_I4, false, lenBitOpnd, lenBitOpnd, bitIndex, insertBeforeInstr);
  23572. InsertTestBranch(lenBitOpnd, bitMaskOpnd, jumpIfBitOn ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, targetLabel, insertBeforeInstr);
  23573. #elif defined(_M_ARM64)
  23574. if (bitIndex->IsImmediateOpnd())
  23575. {
  23576. // TBZ/TBNZ bitMaskOpnd, bitIndex, targetLabel
  23577. IR::Instr* branchInstr = InsertBranch(jumpIfBitOn ? Js::OpCode::TBNZ : Js::OpCode::TBZ, targetLabel, insertBeforeInstr);
  23578. branchInstr->SetSrc1(bitMaskOpnd);
  23579. branchInstr->SetSrc2(bitIndex);
  23580. }
  23581. else
  23582. {
  23583. // TBZ/TBNZ require an immediate for the bit to test, so shift the mask to place the bit we want to test at bit zero, and then test bit zero.
  23584. Func * func = this->m_func;
  23585. IR::RegOpnd * maskOpnd = IR::RegOpnd::New(TyUint32, func);
  23586. InsertShift(Js::OpCode::Shr_I4, false, maskOpnd, bitMaskOpnd, bitIndex, insertBeforeInstr);
  23587. IR::Instr* branchInstr = InsertBranch(jumpIfBitOn ? Js::OpCode::TBNZ : Js::OpCode::TBZ, targetLabel, insertBeforeInstr);
  23588. branchInstr->SetSrc1(maskOpnd);
  23589. branchInstr->SetSrc2(IR::IntConstOpnd::New(0, TyUint32, this->m_func));
  23590. }
  23591. #else
  23592. AssertMsg(false, "Not implemented");
  23593. #endif
  23594. }
  23595. //
  23596. // Generates an object test and then a string test with the static string type
  23597. //
  23598. void
  23599. Lowerer::GenerateStringTest(IR::RegOpnd *srcReg, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr * continueLabel, bool generateObjectCheck)
  23600. {
  23601. Assert(srcReg);
  23602. if (!srcReg->GetValueType().IsString())
  23603. {
  23604. if (generateObjectCheck && !srcReg->IsNotTaggedValue())
  23605. {
  23606. this->m_lowererMD.GenerateObjectTest(srcReg, insertInstr, labelHelper);
  23607. }
  23608. // CMP [regSrcStr + offset(type)] , static string type -- check base string type
  23609. // BrEq/BrNeq labelHelper.
  23610. IR::IndirOpnd * src1 = IR::IndirOpnd::New(srcReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func);
  23611. IR::Opnd * src2 = this->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueStringTypeStatic);
  23612. IR::BranchInstr* branchInstr = nullptr;
  23613. if (continueLabel)
  23614. {
  23615. branchInstr = InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, continueLabel, insertInstr);
  23616. }
  23617. else
  23618. {
  23619. branchInstr = InsertCompareBranch(src1, src2, Js::OpCode::BrNeq_A, labelHelper, insertInstr);
  23620. }
  23621. InsertObjectPoison(srcReg, branchInstr, insertInstr, false);
  23622. }
  23623. }
  23624. //
  23625. // Generates an object test and then a symbol test with the static symbol type
  23626. //
  23627. void
  23628. Lowerer::GenerateSymbolTest(IR::RegOpnd *srcReg, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr * continueLabel, bool generateObjectCheck)
  23629. {
  23630. Assert(srcReg);
  23631. if (!srcReg->GetValueType().IsSymbol())
  23632. {
  23633. if (generateObjectCheck && !srcReg->IsNotTaggedValue())
  23634. {
  23635. this->m_lowererMD.GenerateObjectTest(srcReg, insertInstr, labelHelper);
  23636. }
  23637. // CMP [regSrcStr + offset(type)] , static symbol type -- check base symbol type
  23638. // BrEq/BrNeq labelHelper.
  23639. IR::IndirOpnd * src1 = IR::IndirOpnd::New(srcReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func);
  23640. IR::Opnd * src2 = this->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueSymbolTypeStatic);
  23641. if (continueLabel)
  23642. {
  23643. InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, continueLabel, insertInstr);
  23644. }
  23645. else
  23646. {
  23647. InsertCompareBranch(src1, src2, Js::OpCode::BrNeq_A, labelHelper, insertInstr);
  23648. }
  23649. }
  23650. }
  23651. void
  23652. Lowerer::LowerConvNum(IR::Instr *instrLoad, bool noMathFastPath)
  23653. {
  23654. if (PHASE_OFF(Js::OtherFastPathPhase, this->m_func) || noMathFastPath || !instrLoad->GetSrc1()->IsRegOpnd())
  23655. {
  23656. this->LowerUnaryHelperMemWithTemp2(instrLoad, IR_HELPER_OP_FULL_OR_INPLACE(ConvNumber));
  23657. return;
  23658. }
  23659. // MOV dst, src1
  23660. // TEST src1, 1
  23661. // JNE $done
  23662. // call ToNumber
  23663. //$done:
  23664. bool isInt = false;
  23665. bool isNotInt = false;
  23666. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  23667. IR::LabelInstr *labelDone = NULL;
  23668. IR::Instr *instr;
  23669. if (src1->IsTaggedInt())
  23670. {
  23671. isInt = true;
  23672. }
  23673. else if (src1->IsNotInt())
  23674. {
  23675. isNotInt = true;
  23676. }
  23677. if (!isNotInt)
  23678. {
  23679. // MOV dst, src1
  23680. instr = Lowerer::InsertMove(instrLoad->GetDst(), src1, instrLoad);
  23681. if (!isInt)
  23682. {
  23683. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  23684. bool didTest = m_lowererMD.GenerateObjectTest(src1, instrLoad, labelDone);
  23685. if (didTest)
  23686. {
  23687. // This label is needed only to mark the helper block
  23688. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  23689. instrLoad->InsertBefore(labelHelper);
  23690. }
  23691. }
  23692. }
  23693. if (!isInt)
  23694. {
  23695. if (labelDone)
  23696. {
  23697. instrLoad->InsertAfter(labelDone);
  23698. }
  23699. this->LowerUnaryHelperMemWithTemp2(instrLoad, IR_HELPER_OP_FULL_OR_INPLACE(ConvNumber));
  23700. }
  23701. else
  23702. {
  23703. instrLoad->Remove();
  23704. }
  23705. }
  23706. IR::Opnd *
  23707. Lowerer::LoadSlotArrayWithCachedLocalType(IR::Instr * instrInsert, IR::PropertySymOpnd *propertySymOpnd)
  23708. {
  23709. IR::RegOpnd *opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  23710. if (propertySymOpnd->UsesAuxSlot())
  23711. {
  23712. // If we use the auxiliary slot array, load it and return it
  23713. IR::RegOpnd * opndSlotArray;
  23714. if (propertySymOpnd->IsAuxSlotPtrSymAvailable() || propertySymOpnd->ProducesAuxSlotPtr())
  23715. {
  23716. // We want to reload and/or reuse the shared aux slot ptr sym
  23717. StackSym * auxSlotPtrSym = propertySymOpnd->GetAuxSlotPtrSym();
  23718. Assert(auxSlotPtrSym != nullptr);
  23719. opndSlotArray = IR::RegOpnd::New(auxSlotPtrSym, TyMachReg, this->m_func);
  23720. opndSlotArray->SetIsJITOptimizedReg(true);
  23721. if (!propertySymOpnd->ProducesAuxSlotPtr())
  23722. {
  23723. // No need to reload
  23724. return opndSlotArray;
  23725. }
  23726. }
  23727. else
  23728. {
  23729. opndSlotArray = IR::RegOpnd::New(TyMachReg, this->m_func);
  23730. }
  23731. IR::Opnd *opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  23732. Lowerer::InsertMove(opndSlotArray, opndIndir, instrInsert);
  23733. return opndSlotArray;
  23734. }
  23735. else
  23736. {
  23737. // If we use inline slot return the address to the object header
  23738. return opndBase;
  23739. }
  23740. }
  23741. IR::Opnd *
  23742. Lowerer::LoadSlotArrayWithCachedProtoType(IR::Instr * instrInsert, IR::PropertySymOpnd *propertySymOpnd)
  23743. {
  23744. // Get the prototype object from the cache
  23745. intptr_t prototypeObject = propertySymOpnd->GetProtoObject();
  23746. Assert(prototypeObject != 0);
  23747. if (propertySymOpnd->UsesAuxSlot())
  23748. {
  23749. // If we use the auxiliary slot array, load it from the prototype object and return it
  23750. IR::RegOpnd *opndSlotArray = IR::RegOpnd::New(TyMachReg, this->m_func);
  23751. IR::Opnd *opnd = IR::MemRefOpnd::New((char*)prototypeObject + Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func, IR::AddrOpndKindDynamicAuxSlotArrayRef);
  23752. Lowerer::InsertMove(opndSlotArray, opnd, instrInsert);
  23753. return opndSlotArray;
  23754. }
  23755. else
  23756. {
  23757. // If we use inline slot return the address of the prototype object
  23758. return IR::MemRefOpnd::New(prototypeObject, TyMachReg, this->m_func);
  23759. }
  23760. }
  23761. IR::Instr *
  23762. Lowerer::LowerLdAsmJsEnv(IR::Instr * instr)
  23763. {
  23764. Assert(m_func->GetJITFunctionBody()->IsAsmJsMode());
  23765. IR::Opnd * functionObjOpnd;
  23766. IR::Instr * instrPrev = this->m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  23767. Assert(!instr->GetSrc1());
  23768. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::AsmJsScriptFunction::GetOffsetOfModuleMemory(), TyMachPtr, m_func);
  23769. instr->SetSrc1(indirOpnd);
  23770. LowererMD::ChangeToAssign(instr);
  23771. return instrPrev;
  23772. }
  23773. IR::Instr *
  23774. Lowerer::LowerLdNativeCodeData(IR::Instr * instr)
  23775. {
  23776. Assert(!instr->GetSrc1());
  23777. Assert(m_func->IsTopFunc());
  23778. IR::Instr * instrPrev = instr->m_prev;
  23779. instr->SetSrc1(IR::MemRefOpnd::New((void*)m_func->GetWorkItem()->GetWorkItemData()->nativeDataAddr, TyMachPtr, m_func, IR::AddrOpndKindDynamicNativeCodeDataRef));
  23780. LowererMD::ChangeToAssign(instr);
  23781. return instrPrev;
  23782. }
  23783. IR::Instr *
  23784. Lowerer::LowerLdEnv(IR::Instr * instr)
  23785. {
  23786. IR::Opnd * src1 = instr->GetSrc1();
  23787. IR::Opnd * functionObjOpnd;
  23788. IR::Instr * instrPrev = this->m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  23789. Assert(!instr->GetSrc1());
  23790. if (src1 == nullptr || functionObjOpnd->IsRegOpnd())
  23791. {
  23792. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(),
  23793. Js::ScriptFunction::GetOffsetOfEnvironment(), TyMachPtr, m_func);
  23794. instr->SetSrc1(indirOpnd);
  23795. }
  23796. else
  23797. {
  23798. Assert(functionObjOpnd->IsAddrOpnd());
  23799. IR::AddrOpnd* functionObjAddrOpnd = functionObjOpnd->AsAddrOpnd();
  23800. IR::MemRefOpnd* functionEnvMemRefOpnd = IR::MemRefOpnd::New((void *)((intptr_t)functionObjAddrOpnd->m_address + Js::ScriptFunction::GetOffsetOfEnvironment()),
  23801. TyMachPtr, this->m_func, IR::AddrOpndKindDynamicFunctionEnvironmentRef);
  23802. instr->SetSrc1(functionEnvMemRefOpnd);
  23803. }
  23804. LowererMD::ChangeToAssign(instr);
  23805. return instrPrev;
  23806. }
  23807. IR::Instr *
  23808. Lowerer::LowerLdSuper(IR::Instr *instr, IR::JnHelperMethod helperOpCode)
  23809. {
  23810. IR::Opnd * functionObjOpnd;
  23811. IR::Instr * instrPrev = m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  23812. LoadScriptContext(instr);
  23813. m_lowererMD.LoadHelperArgument(instr, functionObjOpnd);
  23814. m_lowererMD.ChangeToHelperCall(instr, helperOpCode);
  23815. return instrPrev;
  23816. }
  23817. IR::Instr *
  23818. Lowerer::LowerFrameDisplayCheck(IR::Instr * instr)
  23819. {
  23820. IR::Instr *instrPrev = instr->m_prev;
  23821. IR::Instr *insertInstr = instr->m_next;
  23822. IR::AddrOpnd *addrOpnd = instr->UnlinkSrc2()->AsAddrOpnd();
  23823. FrameDisplayCheckRecord *record = (FrameDisplayCheckRecord*)addrOpnd->m_address;
  23824. IR::LabelInstr *errorLabel = nullptr;
  23825. IR::LabelInstr *continueLabel = nullptr;
  23826. IR::RegOpnd *envOpnd = instr->GetDst()->AsRegOpnd();
  23827. uint32 frameDisplayOffset = Js::FrameDisplay::GetOffsetOfScopes()/sizeof(Js::Var);
  23828. if (record->slotId != (uint32)-1 && record->slotId > frameDisplayOffset)
  23829. {
  23830. // Check that the frame display has enough scopes in it to satisfy the code.
  23831. errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  23832. continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  23833. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(envOpnd,
  23834. Js::FrameDisplay::GetOffsetOfLength(),
  23835. TyUint16, m_func, true);
  23836. IR::IntConstOpnd *slotIdOpnd = IR::IntConstOpnd::New(record->slotId - frameDisplayOffset, TyUint16, m_func);
  23837. InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrLe_A, true, errorLabel, insertInstr);
  23838. }
  23839. if (record->table)
  23840. {
  23841. // Check the size of each of the slot arrays in the scope chain.
  23842. FOREACH_HASHTABLE_ENTRY(uint32, bucket, record->table)
  23843. {
  23844. uint32 slotId = bucket.element;
  23845. if (slotId != (uint32)-1 && slotId > Js::ScopeSlots::FirstSlotIndex)
  23846. {
  23847. if (errorLabel == nullptr)
  23848. {
  23849. errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  23850. continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  23851. }
  23852. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(envOpnd,
  23853. bucket.value * sizeof(Js::Var),
  23854. TyVar, m_func, true);
  23855. IR::RegOpnd * slotArrayOpnd = IR::RegOpnd::New(TyVar, m_func);
  23856. InsertMove(slotArrayOpnd, indirOpnd, insertInstr);
  23857. indirOpnd = IR::IndirOpnd::New(slotArrayOpnd,
  23858. Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
  23859. TyVar, m_func, true);
  23860. IR::IntConstOpnd * slotIdOpnd = IR::IntConstOpnd::New(slotId - Js::ScopeSlots::FirstSlotIndex,
  23861. TyUint32, m_func);
  23862. InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrLe_A, true, errorLabel, insertInstr);
  23863. }
  23864. }
  23865. NEXT_HASHTABLE_ENTRY;
  23866. }
  23867. if (errorLabel)
  23868. {
  23869. InsertBranch(Js::OpCode::Br, continueLabel, insertInstr);
  23870. insertInstr->InsertBefore(errorLabel);
  23871. IR::Instr * instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
  23872. insertInstr->InsertBefore(instrHelper);
  23873. m_lowererMD.ChangeToHelperCall(instrHelper, IR::HelperOp_FatalInternalError);
  23874. insertInstr->InsertBefore(continueLabel);
  23875. }
  23876. m_lowererMD.ChangeToAssign(instr);
  23877. return instrPrev;
  23878. }
  23879. IR::Instr *
  23880. Lowerer::LowerSlotArrayCheck(IR::Instr * instr)
  23881. {
  23882. IR::Instr *instrPrev = instr->m_prev;
  23883. IR::Instr *insertInstr = instr->m_next;
  23884. IR::RegOpnd *slotArrayOpnd = instr->GetDst()->AsRegOpnd();
  23885. StackSym *stackSym = slotArrayOpnd->m_sym;
  23886. IR::IntConstOpnd *slotIdOpnd = instr->UnlinkSrc2()->AsIntConstOpnd();
  23887. uint32 slotId = (uint32)slotIdOpnd->GetValue();
  23888. Assert(slotId != (uint32)-1 && slotId >= Js::ScopeSlots::FirstSlotIndex);
  23889. if (slotId > Js::ScopeSlots::FirstSlotIndex)
  23890. {
  23891. if (m_func->DoStackFrameDisplay() && stackSym->m_id == m_func->GetLocalClosureSym()->m_id)
  23892. {
  23893. // The pointer we loaded points to the reserved/known address where the slot array can be boxed.
  23894. // Deref to get the real value.
  23895. IR::IndirOpnd * srcOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(stackSym, TyVar, m_func), 0, TyVar, m_func);
  23896. IR::RegOpnd * dstOpnd = IR::RegOpnd::New(TyVar, m_func);
  23897. InsertMove(dstOpnd, srcOpnd, insertInstr);
  23898. stackSym = dstOpnd->m_sym;
  23899. }
  23900. IR::LabelInstr *errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  23901. IR::LabelInstr *continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  23902. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(stackSym, TyVar, m_func),
  23903. Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
  23904. TyVar, m_func, true);
  23905. slotIdOpnd->SetValue(slotId - Js::ScopeSlots::FirstSlotIndex);
  23906. InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrGt_A, true, continueLabel, insertInstr);
  23907. insertInstr->InsertBefore(errorLabel);
  23908. IR::Instr * instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
  23909. insertInstr->InsertBefore(instrHelper);
  23910. m_lowererMD.ChangeToHelperCall(instrHelper, IR::HelperOp_FatalInternalError);
  23911. insertInstr->InsertBefore(continueLabel);
  23912. }
  23913. m_lowererMD.ChangeToAssign(instr);
  23914. return instrPrev;
  23915. }
  23916. IR::RegOpnd *
  23917. Lowerer::LoadIndexFromLikelyFloat(
  23918. IR::RegOpnd *indexOpnd,
  23919. const bool skipNegativeCheck,
  23920. IR::LabelInstr *const notIntLabel,
  23921. IR::LabelInstr *const negativeLabel,
  23922. IR::Instr *const insertBeforeInstr)
  23923. {
  23924. #ifdef _M_IX86
  23925. // We should only generate this if sse2 is available
  23926. Assert(AutoSystemInfo::Data.SSE2Available());
  23927. #endif
  23928. Func *func = insertBeforeInstr->m_func;
  23929. IR::LabelInstr * fallThrough = IR::LabelInstr::New(Js::OpCode::Label, func);
  23930. IR::RegOpnd *int32IndexOpnd = nullptr;
  23931. // If we know for sure that it's not an int, do not check to see if it's a tagged int
  23932. if (indexOpnd->IsNotInt())
  23933. {
  23934. int32IndexOpnd = IR::RegOpnd::New(TyInt32, func);
  23935. }
  23936. else
  23937. {
  23938. IR::LabelInstr * convertToUint = IR::LabelInstr::New(Js::OpCode::Label, func);
  23939. // First generate test for tagged int even though profile data says likely float. Indices are usually int and we need a fast path before we try to convert float to int
  23940. // mov intIndex, index
  23941. // sar intIndex, 1
  23942. // jae convertToInt
  23943. int32IndexOpnd = GenerateUntagVar(indexOpnd, convertToUint, insertBeforeInstr, !indexOpnd->IsTaggedInt());
  23944. if (!skipNegativeCheck)
  23945. {
  23946. // test index, index
  23947. // js $notTaggedIntOrNegative
  23948. InsertTestBranch(int32IndexOpnd, int32IndexOpnd, LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), negativeLabel, insertBeforeInstr);
  23949. }
  23950. InsertBranch(Js::OpCode::Br, fallThrough, insertBeforeInstr);
  23951. insertBeforeInstr->InsertBefore(convertToUint);
  23952. }
  23953. // try to convert float to int in a fast path
  23954. #if FLOATVAR
  23955. IR::RegOpnd* floatIndexOpnd = m_lowererMD.CheckFloatAndUntag(indexOpnd, insertBeforeInstr, notIntLabel);
  23956. #else
  23957. m_lowererMD.GenerateFloatTest(indexOpnd, insertBeforeInstr, notIntLabel);
  23958. IR::IndirOpnd * floatIndexOpnd = IR::IndirOpnd::New(indexOpnd, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  23959. #endif
  23960. IR::LabelInstr * doneConvUint32 = IR::LabelInstr::New(Js::OpCode::Label, func);
  23961. IR::LabelInstr * helperConvUint32 = IR::LabelInstr::New(Js::OpCode::Label, func, true /*helper*/);
  23962. m_lowererMD.ConvertFloatToInt32(int32IndexOpnd, floatIndexOpnd, helperConvUint32, doneConvUint32, insertBeforeInstr);
  23963. // helper path
  23964. insertBeforeInstr->InsertBefore(helperConvUint32);
  23965. m_lowererMD.LoadDoubleHelperArgument(insertBeforeInstr, floatIndexOpnd);
  23966. IR::Instr * helperCall = IR::Instr::New(Js::OpCode::Call, int32IndexOpnd, this->m_func);
  23967. insertBeforeInstr->InsertBefore(helperCall);
  23968. #if DBG
  23969. // This call to Conv_ToUint32Core wont be reentrant as we would only call it for floats
  23970. this->ClearAndSaveImplicitCallCheckOnHelperCallCheckState();
  23971. #endif
  23972. m_lowererMD.ChangeToHelperCall(helperCall, IR::HelperConv_ToUInt32Core);
  23973. #if DBG
  23974. this->RestoreImplicitCallCheckOnHelperCallCheckState();
  23975. #endif
  23976. // main path
  23977. insertBeforeInstr->InsertBefore(doneConvUint32);
  23978. //Convert uint32 to back to float for comparison that conversion was indeed successful
  23979. IR::RegOpnd *floatOpndFromUint32 = IR::RegOpnd::New(TyFloat64, func);
  23980. m_lowererMD.EmitUIntToFloat(floatOpndFromUint32, int32IndexOpnd->UseWithNewType(TyUint32, this->m_func), insertBeforeInstr);
  23981. // compare with float from the original indexOpnd, we need floatIndex == (float64)(uint32)floatIndex
  23982. InsertCompareBranch(floatOpndFromUint32, floatIndexOpnd, Js::OpCode::BrNeq_A, notIntLabel, insertBeforeInstr, false);
  23983. insertBeforeInstr->InsertBefore(fallThrough);
  23984. return int32IndexOpnd;
  23985. }
  23986. void
  23987. Lowerer::AllocStackForInObjectEnumeratorArray()
  23988. {
  23989. Func * func = this->m_func;
  23990. Assert(func->IsTopFunc());
  23991. if (func->m_forInLoopMaxDepth)
  23992. {
  23993. func->m_forInEnumeratorArrayOffset = func->StackAllocate(sizeof(Js::ForInObjectEnumerator) * this->m_func->m_forInLoopMaxDepth);
  23994. }
  23995. }
  23996. IR::RegOpnd *
  23997. Lowerer::GenerateForInEnumeratorLoad(IR::Opnd * forInEnumeratorOpnd, IR::Instr * insertBeforeInstr)
  23998. {
  23999. Func * func = insertBeforeInstr->m_func;
  24000. if (forInEnumeratorOpnd->IsSymOpnd())
  24001. {
  24002. StackSym * stackSym = forInEnumeratorOpnd->AsSymOpnd()->GetStackSym();
  24003. Assert(!stackSym->m_allocated);
  24004. uint forInLoopLevel = stackSym->m_offset;
  24005. Assert(func->m_forInLoopBaseDepth + forInLoopLevel < this->m_func->m_forInLoopMaxDepth);
  24006. stackSym->m_offset = this->m_func->m_forInEnumeratorArrayOffset + ((func->m_forInLoopBaseDepth + forInLoopLevel) * sizeof(Js::ForInObjectEnumerator));
  24007. stackSym->m_allocated = true;
  24008. }
  24009. else
  24010. {
  24011. Assert(forInEnumeratorOpnd->IsIndirOpnd());
  24012. if (forInEnumeratorOpnd->AsIndirOpnd()->GetOffset() == 0)
  24013. {
  24014. return forInEnumeratorOpnd->AsIndirOpnd()->GetBaseOpnd();
  24015. }
  24016. }
  24017. IR::RegOpnd * forInEnumeratorRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  24018. InsertLea(forInEnumeratorRegOpnd, forInEnumeratorOpnd, insertBeforeInstr);
  24019. return forInEnumeratorRegOpnd;
  24020. }
  24021. void
  24022. Lowerer::GenerateHasObjectArrayCheck(IR::RegOpnd * objectOpnd, IR::RegOpnd * typeOpnd, IR::LabelInstr * hasObjectArrayLabel, IR::Instr * insertBeforeInstr)
  24023. {
  24024. // CMP [objectOpnd + offset(objectArray)], nullptr
  24025. // JEQ $noObjectArrayLabel
  24026. // TEST[objectOpnd + offset(objectArray)], ObjectArrayFlagsTag (used as flags)
  24027. // JEQ $noObjectArrayLabel
  24028. // MOV typeHandlerOpnd, [typeOpnd + offset(typeHandler)]
  24029. // CMP typeHandler->OffsetOfInlineSlots, Js::DynamicTypeHandler::GetOffsetOfObjectHeaderInlineSlots()
  24030. // JNE $hasObjectArrayLabel
  24031. // $$noObjectArrayLabel: (fall thru)
  24032. Func * func = this->m_func;
  24033. IR::LabelInstr * noObjectArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  24034. IR::IndirOpnd * objectArrayOpnd = IR::IndirOpnd::New(objectOpnd, Js::DynamicObject::GetOffsetOfObjectArray(), TyMachPtr, func);
  24035. InsertCompareBranch(objectArrayOpnd, IR::AddrOpnd::NewNull(func), Js::OpCode::BrEq_A, noObjectArrayLabel, insertBeforeInstr);
  24036. InsertTestBranch(objectArrayOpnd, IR::IntConstOpnd::New((uint32)Js::DynamicObjectFlags::ObjectArrayFlagsTag, TyUint8, func),
  24037. Js::OpCode::BrNeq_A, noObjectArrayLabel, insertBeforeInstr);
  24038. IR::RegOpnd * typeHandlerOpnd = IR::RegOpnd::New(TyMachPtr, func);
  24039. InsertMove(typeHandlerOpnd, IR::IndirOpnd::New(typeOpnd, Js::DynamicType::GetOffsetOfTypeHandler(), TyMachPtr, func), insertBeforeInstr);
  24040. InsertCompareBranch(IR::IndirOpnd::New(typeHandlerOpnd, Js::DynamicTypeHandler::GetOffsetOfOffsetOfInlineSlots(), TyUint16, func),
  24041. IR::IntConstOpnd::New(Js::DynamicTypeHandler::GetOffsetOfObjectHeaderInlineSlots(), TyUint16, func),
  24042. Js::OpCode::BrNeq_A, hasObjectArrayLabel, insertBeforeInstr);
  24043. insertBeforeInstr->InsertBefore(noObjectArrayLabel);
  24044. }
  24045. void
  24046. Lowerer::GenerateInitForInEnumeratorFastPath(IR::Instr * instr, Js::EnumeratorCache * forInCache)
  24047. {
  24048. Func * func = this->m_func;
  24049. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  24050. IR::RegOpnd * objectOpnd = instr->GetSrc1()->AsRegOpnd();
  24051. // Tagged check and object check
  24052. m_lowererMD.GenerateObjectTest(objectOpnd, instr, helperLabel);
  24053. GenerateIsDynamicObject(objectOpnd, instr, helperLabel);
  24054. // Type check with cache
  24055. //
  24056. // MOV typeOpnd, [objectOpnd + offset(type)]
  24057. // CMP [&forInCache->type], typeOpnd
  24058. // JNE $helper
  24059. IR::RegOpnd * typeOpnd = IR::RegOpnd::New(TyMachPtr, func);
  24060. InsertMove(typeOpnd, IR::IndirOpnd::New(objectOpnd, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, func), instr);
  24061. InsertCompareBranch(IR::MemRefOpnd::New(&forInCache->type, TyMachPtr, func, IR::AddrOpndKindForInCacheType), typeOpnd, Js::OpCode::BrNeq_A, helperLabel, instr);
  24062. // Check forInCacheData->EnumNonEnumerable == false
  24063. //
  24064. // MOV forInCacheDataOpnd, [&forInCache->data]
  24065. // CMP forInCacheDataOpnd->enumNonEnumerable, 0
  24066. // JNE $helper
  24067. IR::RegOpnd * forInCacheDataOpnd = IR::RegOpnd::New(TyMachPtr, func);
  24068. InsertMove(forInCacheDataOpnd, IR::MemRefOpnd::New(&forInCache->data, TyMachPtr, func, IR::AddrOpndKindForInCacheData), instr);
  24069. InsertCompareBranch(IR::IndirOpnd::New(forInCacheDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataEnumNonEnumerable(), TyUint8, func),
  24070. IR::IntConstOpnd::New(0, TyUint8, func), Js::OpCode::BrNeq_A, helperLabel, instr);
  24071. // Check has object array
  24072. GenerateHasObjectArrayCheck(objectOpnd, typeOpnd, helperLabel, instr);
  24073. // Check first prototype with enumerable properties
  24074. //
  24075. // MOV prototypeObjectOpnd, [type + offset(prototype)]
  24076. // MOV prototypeTypeOpnd, [prototypeObjectOpnd + offset(type)]
  24077. // CMP [prototypeTypeOpnd + offset(typeId)], TypeIds_Null
  24078. // JEQ $noPrototypeWithEnumerablePropertiesLabel
  24079. //
  24080. // $checkFirstPrototypeLoopTopLabel:
  24081. // CMP [prototypeTypeOpnd + offset(typeId)], TypeIds_LastStaticType
  24082. // JLE $helper
  24083. // CMP [prototypeTypeOpnd, offset(hasNoEnumerableProperties], 0
  24084. // JEQ $helper
  24085. // <hasObjectArrayCheck prototypeObjectOpnd, prototypeTypeOpnd>
  24086. //
  24087. // MOV prototypeObjectOpnd, [prototypeTypeOpnd + offset(protottype)] (load next prototype)
  24088. //
  24089. // MOV prototypeTypeOpnd, [prototypeObjectOpnd + offset(type)] (tail dup TypeIds_Null check)
  24090. // CMP [prototypeTypeOpnd + offset(typeId)], TypeIds_Null
  24091. // JNE $checkFirstPrototypeLoopTopLabel
  24092. //
  24093. // $noPrototypeWithEnumerablePropertiesLabel:
  24094. //
  24095. IR::LabelInstr * noPrototypeWithEnumerablePropertiesLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  24096. IR::RegOpnd * prototypeObjectOpnd = IR::RegOpnd::New(TyMachPtr, func);
  24097. IR::RegOpnd * prototypeTypeOpnd = IR::RegOpnd::New(TyMachPtr, func);
  24098. IR::IndirOpnd * prototypeTypeIdOpnd = IR::IndirOpnd::New(prototypeTypeOpnd, Js::DynamicType::GetOffsetOfTypeId(), TyUint32, func);
  24099. InsertMove(prototypeObjectOpnd, IR::IndirOpnd::New(typeOpnd, Js::DynamicType::GetOffsetOfPrototype(), TyMachPtr, func), instr);
  24100. InsertMove(prototypeTypeOpnd, IR::IndirOpnd::New(prototypeObjectOpnd, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, func), instr);
  24101. InsertCompareBranch(prototypeTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_Null, TyUint32, func), Js::OpCode::BrEq_A, noPrototypeWithEnumerablePropertiesLabel, instr);
  24102. IR::LabelInstr * checkFirstPrototypeLoopTopLabel = InsertLoopTopLabel(instr);
  24103. Loop * loop = checkFirstPrototypeLoopTopLabel->GetLoop();
  24104. loop->regAlloc.liveOnBackEdgeSyms->Set(prototypeObjectOpnd->m_sym->m_id);
  24105. loop->regAlloc.liveOnBackEdgeSyms->Set(prototypeTypeOpnd->m_sym->m_id);
  24106. InsertCompareBranch(prototypeTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_LastStaticType, TyUint32, func), Js::OpCode::BrLe_A, helperLabel, instr);
  24107. // No need to do EnsureObjectReady. Defer init type may not have this bit set, so we will go to helper and call EnsureObjectReady then
  24108. InsertCompareBranch(IR::IndirOpnd::New(prototypeTypeOpnd, Js::DynamicType::GetOffsetOfHasNoEnumerableProperties(), TyUint8, func),
  24109. IR::IntConstOpnd::New(0, TyUint8, func), Js::OpCode::BrEq_A, helperLabel, instr);
  24110. GenerateHasObjectArrayCheck(prototypeObjectOpnd, prototypeTypeOpnd, helperLabel, instr);
  24111. InsertMove(prototypeObjectOpnd, IR::IndirOpnd::New(prototypeTypeOpnd, Js::DynamicType::GetOffsetOfPrototype(), TyMachPtr, func), instr);
  24112. // Tail dup the TypeIds_Null check
  24113. InsertMove(prototypeTypeOpnd, IR::IndirOpnd::New(prototypeObjectOpnd, Js::DynamicObject::GetOffsetOfType(), TyMachPtr, func), instr);
  24114. InsertCompareBranch(prototypeTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeId::TypeIds_Null, TyUint32, func), Js::OpCode::BrNeq_A, checkFirstPrototypeLoopTopLabel, instr);
  24115. instr->InsertBefore(noPrototypeWithEnumerablePropertiesLabel);
  24116. // Initialize DynamicObjectPropertyEnumerator fields
  24117. IR::Opnd * forInEnumeratorOpnd = instr->GetSrc2();
  24118. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorScriptContext(), TyMachPtr),
  24119. LoadScriptContextOpnd(instr), instr);
  24120. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorObject(), TyMachPtr),
  24121. objectOpnd, instr);
  24122. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorInitialType(), TyMachPtr),
  24123. typeOpnd, instr);
  24124. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorObjectIndex(), TyInt32),
  24125. IR::IntConstOpnd::New(Js::Constants::NoBigSlot, TyInt32, func), instr);
  24126. IR::RegOpnd * initialPropertyCountOpnd = IR::RegOpnd::New(TyInt32, func);
  24127. InsertMove(initialPropertyCountOpnd,
  24128. IR::IndirOpnd::New(forInCacheDataOpnd, Js::DynamicObjectPropertyEnumerator::GetOffsetOfCachedDataPropertyCount(), TyInt32, func), instr);
  24129. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorInitialPropertyCount(), TyInt32),
  24130. initialPropertyCountOpnd, instr);
  24131. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorEnumeratedCount(), TyInt32),
  24132. IR::IntConstOpnd::New(0, TyInt32, func), instr);
  24133. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorFlags(), TyUint8),
  24134. IR::IntConstOpnd::New((uint8)(Js::EnumeratorFlags::UseCache | Js::EnumeratorFlags::SnapShotSemantics), TyUint8, func), instr);
  24135. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorCachedData(), TyMachPtr),
  24136. forInCacheDataOpnd, instr);
  24137. // Initialize rest of the JavascriptStaticEnumerator fields
  24138. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorCurrentEnumerator(), TyMachPtr),
  24139. IR::AddrOpnd::NewNull(func), instr);
  24140. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorPrefixEnumerator(), TyMachPtr),
  24141. IR::AddrOpnd::NewNull(func), instr);
  24142. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfEnumeratorArrayEnumerator(), TyMachPtr),
  24143. IR::AddrOpnd::NewNull(func), instr);
  24144. // Initialize rest of the ForInObjectEnumerator fields
  24145. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfShadowData(), TyMachPtr),
  24146. IR::AddrOpnd::NewNull(func), instr);
  24147. // Initialize can UseJitFastPath = true and enumeratingPrototype = false at the same time.
  24148. InsertMove(GetForInEnumeratorFieldOpnd(forInEnumeratorOpnd, Js::ForInObjectEnumerator::GetOffsetOfStates(), TyUint16),
  24149. IR::IntConstOpnd::New(1, TyUint16, func, true), instr);
  24150. IR::LabelInstr* doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  24151. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  24152. instr->InsertBefore(helperLabel);
  24153. instr->InsertAfter(doneLabel);
  24154. }
  24155. void
  24156. Lowerer::LowerInitForInEnumerator(IR::Instr * instr)
  24157. {
  24158. Js::EnumeratorCache * forInCache = nullptr;
  24159. Func * func = instr->m_func;
  24160. if (instr->IsProfiledInstr())
  24161. {
  24162. uint profileId = instr->AsProfiledInstr()->u.profileId;
  24163. forInCache = instr->m_func->GetJITFunctionBody()->GetForInCache(profileId);
  24164. Assert(forInCache != nullptr);
  24165. if (!func->IsSimpleJit()
  24166. #if ENABLE_TTD
  24167. && (func->IsOOPJIT() || !func->GetScriptContext()->GetThreadContext()->IsRuntimeInTTDMode())
  24168. //TODO: We will need to enable OOPJIT info to exclude this if we have a TTD Runtime
  24169. #endif
  24170. )
  24171. {
  24172. GenerateInitForInEnumeratorFastPath(instr, forInCache);
  24173. }
  24174. }
  24175. IR::RegOpnd * forInEnumeratorRegOpnd = GenerateForInEnumeratorLoad(instr->UnlinkSrc2(), instr);
  24176. instr->SetSrc2(forInEnumeratorRegOpnd);
  24177. m_lowererMD.LoadHelperArgument(instr, IR::AddrOpnd::New(forInCache, IR::AddrOpndKindForInCache, func));
  24178. this->LowerBinaryHelperMem(instr, IR::HelperOp_OP_InitForInEnumerator);
  24179. }
  24180. IR::LabelInstr *
  24181. Lowerer::InsertLoopTopLabel(IR::Instr * insertBeforeInstr)
  24182. {
  24183. Func * func = this->m_func;
  24184. IR::LabelInstr * loopTopLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  24185. loopTopLabel->m_isLoopTop = true;
  24186. Loop *loop = JitAnew(func->m_alloc, Loop, func->m_alloc, func);
  24187. loopTopLabel->SetLoop(loop);
  24188. loop->SetLoopTopInstr(loopTopLabel);
  24189. loop->regAlloc.liveOnBackEdgeSyms = AllocatorNew(JitArenaAllocator, func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
  24190. insertBeforeInstr->InsertBefore(loopTopLabel);
  24191. return loopTopLabel;
  24192. }
  24193. IR::Instr *
  24194. Lowerer::AddBailoutToHelperCallInstr(IR::Instr * helperCallInstr, BailOutInfo * bailoutInfo, IR::BailOutKind bailoutKind, IR::Instr * primaryBailoutInstr)
  24195. {
  24196. helperCallInstr = helperCallInstr->ConvertToBailOutInstr(bailoutInfo, bailoutKind);
  24197. if (bailoutInfo->bailOutInstr == primaryBailoutInstr)
  24198. {
  24199. IR::Instr * instrShare = primaryBailoutInstr->ShareBailOut();
  24200. LowerBailTarget(instrShare);
  24201. }
  24202. return helperCallInstr;
  24203. }
  24204. void
  24205. Lowerer::GenerateAuxSlotPtrLoad(IR::PropertySymOpnd *propertySymOpnd, IR::Instr * instrInsert)
  24206. {
  24207. StackSym * auxSlotPtrSym = propertySymOpnd->GetAuxSlotPtrSym();
  24208. Assert(auxSlotPtrSym);
  24209. Func * func = instrInsert->m_func;
  24210. IR::Opnd *opndIndir = IR::IndirOpnd::New(propertySymOpnd->CreatePropertyOwnerOpnd(func), Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, func);
  24211. IR::RegOpnd *regOpnd = IR::RegOpnd::New(auxSlotPtrSym, TyMachReg, func);
  24212. regOpnd->SetIsJITOptimizedReg(true);
  24213. InsertMove(regOpnd, opndIndir, instrInsert);
  24214. }
  24215. void
  24216. Lowerer::InsertAndLegalize(IR::Instr * instr, IR::Instr* insertBeforeInstr)
  24217. {
  24218. insertBeforeInstr->InsertBefore(instr);
  24219. LowererMD::Legalize(instr);
  24220. }
  24221. #if DBG
  24222. void
  24223. Lowerer::LegalizeVerifyRange(IR::Instr * instrStart, IR::Instr * instrLast)
  24224. {
  24225. FOREACH_INSTR_IN_RANGE(verifyLegalizeInstr, instrStart, instrLast)
  24226. {
  24227. LowererMD::Legalize<true>(verifyLegalizeInstr);
  24228. }
  24229. NEXT_INSTR_IN_RANGE;
  24230. }
  24231. void
  24232. Lowerer::ReconcileWithLowererStateOnHelperCall(IR::Instr * callInstr, IR::JnHelperMethod helperMethod)
  24233. {
  24234. AssertMsg((this->helperCallCheckState & HelperCallCheckState_NoHelperCalls) == 0, "Emitting an helper call when we didn't allow helper calls");
  24235. if (HelperMethodAttributes::CanBeReentrant(helperMethod))
  24236. {
  24237. if (this->helperCallCheckState & HelperCallCheckState_ImplicitCallsBailout)
  24238. {
  24239. if (!callInstr->HasBailOutInfo() ||
  24240. !BailOutInfo::IsBailOutOnImplicitCalls(callInstr->GetBailOutKind()))
  24241. {
  24242. Output::Print(_u("HelperMethod : %s\n"), IR::GetMethodName(helperMethod));
  24243. AssertMsg(false, "Helper call doesn't have BailOutOnImplicitCalls when it should");
  24244. }
  24245. }
  24246. if (!OpCodeAttr::HasImplicitCall(m_currentInstrOpCode) && !OpCodeAttr::OpndHasImplicitCall(m_currentInstrOpCode)
  24247. // Special case where we allow support implicit calls, but FromVar says it doesn't have implicit calls
  24248. && m_currentInstrOpCode != Js::OpCode::FromVar
  24249. )
  24250. {
  24251. Output::Print(_u("HelperMethod : %s, OpCode: %s"), IR::GetMethodName(helperMethod), Js::OpCodeUtil::GetOpCodeName(m_currentInstrOpCode));
  24252. callInstr->DumpByteCodeOffset();
  24253. Output::Print(_u("\n"));
  24254. AssertMsg(false, "OpCode and Helper implicit call attribute mismatch");
  24255. }
  24256. }
  24257. }
  24258. void
  24259. Lowerer::ClearAndSaveImplicitCallCheckOnHelperCallCheckState()
  24260. {
  24261. this->oldHelperCallCheckState = this->helperCallCheckState;
  24262. this->helperCallCheckState = HelperCallCheckState(this->helperCallCheckState & ~HelperCallCheckState_ImplicitCallsBailout);
  24263. }
  24264. void
  24265. Lowerer::RestoreImplicitCallCheckOnHelperCallCheckState()
  24266. {
  24267. if (this->oldHelperCallCheckState & HelperCallCheckState_ImplicitCallsBailout)
  24268. {
  24269. this->helperCallCheckState = HelperCallCheckState(this->helperCallCheckState | HelperCallCheckState_ImplicitCallsBailout);
  24270. this->oldHelperCallCheckState = HelperCallCheckState_None;
  24271. }
  24272. }
  24273. IR::Instr*
  24274. Lowerer::LowerCheckLowerIntBound(IR::Instr * instr)
  24275. {
  24276. IR::Instr * instrPrev = instr->m_prev;
  24277. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, false /*isOpHelper*/);
  24278. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32());
  24279. InsertCompareBranch(instr->GetSrc1(), instr->GetSrc2(), Js::OpCode::BrGe_A, continueLabel, instr);
  24280. IR::Instr* helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
  24281. instr->InsertBefore(helperCallInstr);
  24282. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::HelperIntRangeCheckFailure);
  24283. instr->InsertAfter(continueLabel);
  24284. instr->Remove();
  24285. return instrPrev;
  24286. }
  24287. IR::Instr*
  24288. Lowerer::LowerCheckUpperIntBound(IR::Instr * instr)
  24289. {
  24290. bool lowerBoundCheckPresent = instr->m_prev->m_opcode == Js::OpCode::CheckLowerIntBound;
  24291. IR::Instr * instrPrev = lowerBoundCheckPresent ? instr->m_prev->m_prev : instr->m_prev;
  24292. IR::Instr * lowerBoundCheckInstr = lowerBoundCheckPresent ? instr->m_prev : nullptr;
  24293. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, false /*isOpHelper*/);
  24294. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true /*isOpHelper*/);
  24295. Assert(instr->GetSrc1()->IsInt32() || instr->GetSrc1()->IsUInt32());
  24296. if (lowerBoundCheckInstr)
  24297. {
  24298. InsertCompareBranch(instr->UnlinkSrc1(), instr->UnlinkSrc2(), Js::OpCode::BrGt_A, helperLabel, instr);
  24299. Assert(lowerBoundCheckInstr->GetSrc1()->IsInt32() || lowerBoundCheckInstr->GetSrc1()->IsUInt32());
  24300. InsertCompareBranch(lowerBoundCheckInstr->UnlinkSrc1(), lowerBoundCheckInstr->UnlinkSrc2(), Js::OpCode::BrGe_A, continueLabel, instr);
  24301. }
  24302. else
  24303. {
  24304. InsertCompareBranch(instr->UnlinkSrc1(), instr->UnlinkSrc2(), Js::OpCode::BrLe_A, continueLabel, instr);
  24305. }
  24306. instr->InsertBefore(helperLabel);
  24307. IR::Instr* helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
  24308. instr->InsertBefore(helperCallInstr);
  24309. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::HelperIntRangeCheckFailure);
  24310. instr->InsertAfter(continueLabel);
  24311. instr->Remove();
  24312. if (lowerBoundCheckInstr)
  24313. {
  24314. lowerBoundCheckInstr->Remove();
  24315. }
  24316. return instrPrev;
  24317. }
  24318. #endif