Lower.cpp 885 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747774877497750775177527753775477557756775777587759776077617762776377647765776677677768776977707771777277737774777577767777777877797780778177827783778477857786778777887789779077917792779377947795779677977798779978007801780278037804780578067807780878097810781178127813781478157816781778187819782078217822782378247825782678277828782978307831783278337834783578367837783878397840784178427843784478457846784778487849785078517852785378547855785678577858785978607861786278637864786578667867786878697870787178727873787478757876787778787879788078817882788378847885788678877888788978907891789278937894789578967897789878997900790179027903790479057906790779087909791079117912791379147915791679177918791979207921792279237924792579267927792879297930793179327933793479357936793779387939794079417942794379447945794679477948794979507951795279537954795579567957795879597960796179627963796479657966796779687969797079717972797379747975797679777978797979807981798279837984798579867987798879897990799179927993799479957996799779987999800080018002800380048005800680078008800980108011801280138014801580168017801880198020802180228023802480258026802780288029803080318032803380348035803680378038803980408041804280438044804580468047804880498050805180528053805480558056805780588059806080618062806380648065806680678068806980708071807280738074807580768077807880798080808180828083808480858086808780888089809080918092809380948095809680978098809981008101810281038104810581068107810881098110811181128113811481158116811781188119812081218122812381248125812681278128812981308131813281338134813581368137813881398140814181428143814481458146814781488149815081518152815381548155815681578158815981608161816281638164816581668167816881698170817181728173817481758176817781788179818081818182818381848185818681878188818981908191819281938194819581968197819881998200820182028203820482058206820782088209821082118212821382148215821682178218821982208221822282238224822582268227822882298230823182328233823482358236823782388239824082418242824382448245824682478248824982508251825282538254825582568257825882598260826182628263826482658266826782688269827082718272827382748275827682778278827982808281828282838284828582868287828882898290829182928293829482958296829782988299830083018302830383048305830683078308830983108311831283138314831583168317831883198320832183228323832483258326832783288329833083318332833383348335833683378338833983408341834283438344834583468347834883498350835183528353835483558356835783588359836083618362836383648365836683678368836983708371837283738374837583768377837883798380838183828383838483858386838783888389839083918392839383948395839683978398839984008401840284038404840584068407840884098410841184128413841484158416841784188419842084218422842384248425842684278428842984308431843284338434843584368437843884398440844184428443844484458446844784488449845084518452845384548455845684578458845984608461846284638464846584668467846884698470847184728473847484758476847784788479848084818482848384848485848684878488848984908491849284938494849584968497849884998500850185028503850485058506850785088509851085118512851385148515851685178518851985208521852285238524852585268527852885298530853185328533853485358536853785388539854085418542854385448545854685478548854985508551855285538554855585568557855885598560856185628563856485658566856785688569857085718572857385748575857685778578857985808581858285838584858585868587858885898590859185928593859485958596859785988599860086018602860386048605860686078608860986108611861286138614861586168617861886198620862186228623862486258626862786288629863086318632863386348635863686378638863986408641864286438644864586468647864886498650865186528653865486558656865786588659866086618662866386648665866686678668866986708671867286738674867586768677867886798680868186828683868486858686868786888689869086918692869386948695869686978698869987008701870287038704870587068707870887098710871187128713871487158716871787188719872087218722872387248725872687278728872987308731873287338734873587368737873887398740874187428743874487458746874787488749875087518752875387548755875687578758875987608761876287638764876587668767876887698770877187728773877487758776877787788779878087818782878387848785878687878788878987908791879287938794879587968797879887998800880188028803880488058806880788088809881088118812881388148815881688178818881988208821882288238824882588268827882888298830883188328833883488358836883788388839884088418842884388448845884688478848884988508851885288538854885588568857885888598860886188628863886488658866886788688869887088718872887388748875887688778878887988808881888288838884888588868887888888898890889188928893889488958896889788988899890089018902890389048905890689078908890989108911891289138914891589168917891889198920892189228923892489258926892789288929893089318932893389348935893689378938893989408941894289438944894589468947894889498950895189528953895489558956895789588959896089618962896389648965896689678968896989708971897289738974897589768977897889798980898189828983898489858986898789888989899089918992899389948995899689978998899990009001900290039004900590069007900890099010901190129013901490159016901790189019902090219022902390249025902690279028902990309031903290339034903590369037903890399040904190429043904490459046904790489049905090519052905390549055905690579058905990609061906290639064906590669067906890699070907190729073907490759076907790789079908090819082908390849085908690879088908990909091909290939094909590969097909890999100910191029103910491059106910791089109911091119112911391149115911691179118911991209121912291239124912591269127912891299130913191329133913491359136913791389139914091419142914391449145914691479148914991509151915291539154915591569157915891599160916191629163916491659166916791689169917091719172917391749175917691779178917991809181918291839184918591869187918891899190919191929193919491959196919791989199920092019202920392049205920692079208920992109211921292139214921592169217921892199220922192229223922492259226922792289229923092319232923392349235923692379238923992409241924292439244924592469247924892499250925192529253925492559256925792589259926092619262926392649265926692679268926992709271927292739274927592769277927892799280928192829283928492859286928792889289929092919292929392949295929692979298929993009301930293039304930593069307930893099310931193129313931493159316931793189319932093219322932393249325932693279328932993309331933293339334933593369337933893399340934193429343934493459346934793489349935093519352935393549355935693579358935993609361936293639364936593669367936893699370937193729373937493759376937793789379938093819382938393849385938693879388938993909391939293939394939593969397939893999400940194029403940494059406940794089409941094119412941394149415941694179418941994209421942294239424942594269427942894299430943194329433943494359436943794389439944094419442944394449445944694479448944994509451945294539454945594569457945894599460946194629463946494659466946794689469947094719472947394749475947694779478947994809481948294839484948594869487948894899490949194929493949494959496949794989499950095019502950395049505950695079508950995109511951295139514951595169517951895199520952195229523952495259526952795289529953095319532953395349535953695379538953995409541954295439544954595469547954895499550955195529553955495559556955795589559956095619562956395649565956695679568956995709571957295739574957595769577957895799580958195829583958495859586958795889589959095919592959395949595959695979598959996009601960296039604960596069607960896099610961196129613961496159616961796189619962096219622962396249625962696279628962996309631963296339634963596369637963896399640964196429643964496459646964796489649965096519652965396549655965696579658965996609661966296639664966596669667966896699670967196729673967496759676967796789679968096819682968396849685968696879688968996909691969296939694969596969697969896999700970197029703970497059706970797089709971097119712971397149715971697179718971997209721972297239724972597269727972897299730973197329733973497359736973797389739974097419742974397449745974697479748974997509751975297539754975597569757975897599760976197629763976497659766976797689769977097719772977397749775977697779778977997809781978297839784978597869787978897899790979197929793979497959796979797989799980098019802980398049805980698079808980998109811981298139814981598169817981898199820982198229823982498259826982798289829983098319832983398349835983698379838983998409841984298439844984598469847984898499850985198529853985498559856985798589859986098619862986398649865986698679868986998709871987298739874987598769877987898799880988198829883988498859886988798889889989098919892989398949895989698979898989999009901990299039904990599069907990899099910991199129913991499159916991799189919992099219922992399249925992699279928992999309931993299339934993599369937993899399940994199429943994499459946994799489949995099519952995399549955995699579958995999609961996299639964996599669967996899699970997199729973997499759976997799789979998099819982998399849985998699879988998999909991999299939994999599969997999899991000010001100021000310004100051000610007100081000910010100111001210013100141001510016100171001810019100201002110022100231002410025100261002710028100291003010031100321003310034100351003610037100381003910040100411004210043100441004510046100471004810049100501005110052100531005410055100561005710058100591006010061100621006310064100651006610067100681006910070100711007210073100741007510076100771007810079100801008110082100831008410085100861008710088100891009010091100921009310094100951009610097100981009910100101011010210103101041010510106101071010810109101101011110112101131011410115101161011710118101191012010121101221012310124101251012610127101281012910130101311013210133101341013510136101371013810139101401014110142101431014410145101461014710148101491015010151101521015310154101551015610157101581015910160101611016210163101641016510166101671016810169101701017110172101731017410175101761017710178101791018010181101821018310184101851018610187101881018910190101911019210193101941019510196101971019810199102001020110202102031020410205102061020710208102091021010211102121021310214102151021610217102181021910220102211022210223102241022510226102271022810229102301023110232102331023410235102361023710238102391024010241102421024310244102451024610247102481024910250102511025210253102541025510256102571025810259102601026110262102631026410265102661026710268102691027010271102721027310274102751027610277102781027910280102811028210283102841028510286102871028810289102901029110292102931029410295102961029710298102991030010301103021030310304103051030610307103081030910310103111031210313103141031510316103171031810319103201032110322103231032410325103261032710328103291033010331103321033310334103351033610337103381033910340103411034210343103441034510346103471034810349103501035110352103531035410355103561035710358103591036010361103621036310364103651036610367103681036910370103711037210373103741037510376103771037810379103801038110382103831038410385103861038710388103891039010391103921039310394103951039610397103981039910400104011040210403104041040510406104071040810409104101041110412104131041410415104161041710418104191042010421104221042310424104251042610427104281042910430104311043210433104341043510436104371043810439104401044110442104431044410445104461044710448104491045010451104521045310454104551045610457104581045910460104611046210463104641046510466104671046810469104701047110472104731047410475104761047710478104791048010481104821048310484104851048610487104881048910490104911049210493104941049510496104971049810499105001050110502105031050410505105061050710508105091051010511105121051310514105151051610517105181051910520105211052210523105241052510526105271052810529105301053110532105331053410535105361053710538105391054010541105421054310544105451054610547105481054910550105511055210553105541055510556105571055810559105601056110562105631056410565105661056710568105691057010571105721057310574105751057610577105781057910580105811058210583105841058510586105871058810589105901059110592105931059410595105961059710598105991060010601106021060310604106051060610607106081060910610106111061210613106141061510616106171061810619106201062110622106231062410625106261062710628106291063010631106321063310634106351063610637106381063910640106411064210643106441064510646106471064810649106501065110652106531065410655106561065710658106591066010661106621066310664106651066610667106681066910670106711067210673106741067510676106771067810679106801068110682106831068410685106861068710688106891069010691106921069310694106951069610697106981069910700107011070210703107041070510706107071070810709107101071110712107131071410715107161071710718107191072010721107221072310724107251072610727107281072910730107311073210733107341073510736107371073810739107401074110742107431074410745107461074710748107491075010751107521075310754107551075610757107581075910760107611076210763107641076510766107671076810769107701077110772107731077410775107761077710778107791078010781107821078310784107851078610787107881078910790107911079210793107941079510796107971079810799108001080110802108031080410805108061080710808108091081010811108121081310814108151081610817108181081910820108211082210823108241082510826108271082810829108301083110832108331083410835108361083710838108391084010841108421084310844108451084610847108481084910850108511085210853108541085510856108571085810859108601086110862108631086410865108661086710868108691087010871108721087310874108751087610877108781087910880108811088210883108841088510886108871088810889108901089110892108931089410895108961089710898108991090010901109021090310904109051090610907109081090910910109111091210913109141091510916109171091810919109201092110922109231092410925109261092710928109291093010931109321093310934109351093610937109381093910940109411094210943109441094510946109471094810949109501095110952109531095410955109561095710958109591096010961109621096310964109651096610967109681096910970109711097210973109741097510976109771097810979109801098110982109831098410985109861098710988109891099010991109921099310994109951099610997109981099911000110011100211003110041100511006110071100811009110101101111012110131101411015110161101711018110191102011021110221102311024110251102611027110281102911030110311103211033110341103511036110371103811039110401104111042110431104411045110461104711048110491105011051110521105311054110551105611057110581105911060110611106211063110641106511066110671106811069110701107111072110731107411075110761107711078110791108011081110821108311084110851108611087110881108911090110911109211093110941109511096110971109811099111001110111102111031110411105111061110711108111091111011111111121111311114111151111611117111181111911120111211112211123111241112511126111271112811129111301113111132111331113411135111361113711138111391114011141111421114311144111451114611147111481114911150111511115211153111541115511156111571115811159111601116111162111631116411165111661116711168111691117011171111721117311174111751117611177111781117911180111811118211183111841118511186111871118811189111901119111192111931119411195111961119711198111991120011201112021120311204112051120611207112081120911210112111121211213112141121511216112171121811219112201122111222112231122411225112261122711228112291123011231112321123311234112351123611237112381123911240112411124211243112441124511246112471124811249112501125111252112531125411255112561125711258112591126011261112621126311264112651126611267112681126911270112711127211273112741127511276112771127811279112801128111282112831128411285112861128711288112891129011291112921129311294112951129611297112981129911300113011130211303113041130511306113071130811309113101131111312113131131411315113161131711318113191132011321113221132311324113251132611327113281132911330113311133211333113341133511336113371133811339113401134111342113431134411345113461134711348113491135011351113521135311354113551135611357113581135911360113611136211363113641136511366113671136811369113701137111372113731137411375113761137711378113791138011381113821138311384113851138611387113881138911390113911139211393113941139511396113971139811399114001140111402114031140411405114061140711408114091141011411114121141311414114151141611417114181141911420114211142211423114241142511426114271142811429114301143111432114331143411435114361143711438114391144011441114421144311444114451144611447114481144911450114511145211453114541145511456114571145811459114601146111462114631146411465114661146711468114691147011471114721147311474114751147611477114781147911480114811148211483114841148511486114871148811489114901149111492114931149411495114961149711498114991150011501115021150311504115051150611507115081150911510115111151211513115141151511516115171151811519115201152111522115231152411525115261152711528115291153011531115321153311534115351153611537115381153911540115411154211543115441154511546115471154811549115501155111552115531155411555115561155711558115591156011561115621156311564115651156611567115681156911570115711157211573115741157511576115771157811579115801158111582115831158411585115861158711588115891159011591115921159311594115951159611597115981159911600116011160211603116041160511606116071160811609116101161111612116131161411615116161161711618116191162011621116221162311624116251162611627116281162911630116311163211633116341163511636116371163811639116401164111642116431164411645116461164711648116491165011651116521165311654116551165611657116581165911660116611166211663116641166511666116671166811669116701167111672116731167411675116761167711678116791168011681116821168311684116851168611687116881168911690116911169211693116941169511696116971169811699117001170111702117031170411705117061170711708117091171011711117121171311714117151171611717117181171911720117211172211723117241172511726117271172811729117301173111732117331173411735117361173711738117391174011741117421174311744117451174611747117481174911750117511175211753117541175511756117571175811759117601176111762117631176411765117661176711768117691177011771117721177311774117751177611777117781177911780117811178211783117841178511786117871178811789117901179111792117931179411795117961179711798117991180011801118021180311804118051180611807118081180911810118111181211813118141181511816118171181811819118201182111822118231182411825118261182711828118291183011831118321183311834118351183611837118381183911840118411184211843118441184511846118471184811849118501185111852118531185411855118561185711858118591186011861118621186311864118651186611867118681186911870118711187211873118741187511876118771187811879118801188111882118831188411885118861188711888118891189011891118921189311894118951189611897118981189911900119011190211903119041190511906119071190811909119101191111912119131191411915119161191711918119191192011921119221192311924119251192611927119281192911930119311193211933119341193511936119371193811939119401194111942119431194411945119461194711948119491195011951119521195311954119551195611957119581195911960119611196211963119641196511966119671196811969119701197111972119731197411975119761197711978119791198011981119821198311984119851198611987119881198911990119911199211993119941199511996119971199811999120001200112002120031200412005120061200712008120091201012011120121201312014120151201612017120181201912020120211202212023120241202512026120271202812029120301203112032120331203412035120361203712038120391204012041120421204312044120451204612047120481204912050120511205212053120541205512056120571205812059120601206112062120631206412065120661206712068120691207012071120721207312074120751207612077120781207912080120811208212083120841208512086120871208812089120901209112092120931209412095120961209712098120991210012101121021210312104121051210612107121081210912110121111211212113121141211512116121171211812119121201212112122121231212412125121261212712128121291213012131121321213312134121351213612137121381213912140121411214212143121441214512146121471214812149121501215112152121531215412155121561215712158121591216012161121621216312164121651216612167121681216912170121711217212173121741217512176121771217812179121801218112182121831218412185121861218712188121891219012191121921219312194121951219612197121981219912200122011220212203122041220512206122071220812209122101221112212122131221412215122161221712218122191222012221122221222312224122251222612227122281222912230122311223212233122341223512236122371223812239122401224112242122431224412245122461224712248122491225012251122521225312254122551225612257122581225912260122611226212263122641226512266122671226812269122701227112272122731227412275122761227712278122791228012281122821228312284122851228612287122881228912290122911229212293122941229512296122971229812299123001230112302123031230412305123061230712308123091231012311123121231312314123151231612317123181231912320123211232212323123241232512326123271232812329123301233112332123331233412335123361233712338123391234012341123421234312344123451234612347123481234912350123511235212353123541235512356123571235812359123601236112362123631236412365123661236712368123691237012371123721237312374123751237612377123781237912380123811238212383123841238512386123871238812389123901239112392123931239412395123961239712398123991240012401124021240312404124051240612407124081240912410124111241212413124141241512416124171241812419124201242112422124231242412425124261242712428124291243012431124321243312434124351243612437124381243912440124411244212443124441244512446124471244812449124501245112452124531245412455124561245712458124591246012461124621246312464124651246612467124681246912470124711247212473124741247512476124771247812479124801248112482124831248412485124861248712488124891249012491124921249312494124951249612497124981249912500125011250212503125041250512506125071250812509125101251112512125131251412515125161251712518125191252012521125221252312524125251252612527125281252912530125311253212533125341253512536125371253812539125401254112542125431254412545125461254712548125491255012551125521255312554125551255612557125581255912560125611256212563125641256512566125671256812569125701257112572125731257412575125761257712578125791258012581125821258312584125851258612587125881258912590125911259212593125941259512596125971259812599126001260112602126031260412605126061260712608126091261012611126121261312614126151261612617126181261912620126211262212623126241262512626126271262812629126301263112632126331263412635126361263712638126391264012641126421264312644126451264612647126481264912650126511265212653126541265512656126571265812659126601266112662126631266412665126661266712668126691267012671126721267312674126751267612677126781267912680126811268212683126841268512686126871268812689126901269112692126931269412695126961269712698126991270012701127021270312704127051270612707127081270912710127111271212713127141271512716127171271812719127201272112722127231272412725127261272712728127291273012731127321273312734127351273612737127381273912740127411274212743127441274512746127471274812749127501275112752127531275412755127561275712758127591276012761127621276312764127651276612767127681276912770127711277212773127741277512776127771277812779127801278112782127831278412785127861278712788127891279012791127921279312794127951279612797127981279912800128011280212803128041280512806128071280812809128101281112812128131281412815128161281712818128191282012821128221282312824128251282612827128281282912830128311283212833128341283512836128371283812839128401284112842128431284412845128461284712848128491285012851128521285312854128551285612857128581285912860128611286212863128641286512866128671286812869128701287112872128731287412875128761287712878128791288012881128821288312884128851288612887128881288912890128911289212893128941289512896128971289812899129001290112902129031290412905129061290712908129091291012911129121291312914129151291612917129181291912920129211292212923129241292512926129271292812929129301293112932129331293412935129361293712938129391294012941129421294312944129451294612947129481294912950129511295212953129541295512956129571295812959129601296112962129631296412965129661296712968129691297012971129721297312974129751297612977129781297912980129811298212983129841298512986129871298812989129901299112992129931299412995129961299712998129991300013001130021300313004130051300613007130081300913010130111301213013130141301513016130171301813019130201302113022130231302413025130261302713028130291303013031130321303313034130351303613037130381303913040130411304213043130441304513046130471304813049130501305113052130531305413055130561305713058130591306013061130621306313064130651306613067130681306913070130711307213073130741307513076130771307813079130801308113082130831308413085130861308713088130891309013091130921309313094130951309613097130981309913100131011310213103131041310513106131071310813109131101311113112131131311413115131161311713118131191312013121131221312313124131251312613127131281312913130131311313213133131341313513136131371313813139131401314113142131431314413145131461314713148131491315013151131521315313154131551315613157131581315913160131611316213163131641316513166131671316813169131701317113172131731317413175131761317713178131791318013181131821318313184131851318613187131881318913190131911319213193131941319513196131971319813199132001320113202132031320413205132061320713208132091321013211132121321313214132151321613217132181321913220132211322213223132241322513226132271322813229132301323113232132331323413235132361323713238132391324013241132421324313244132451324613247132481324913250132511325213253132541325513256132571325813259132601326113262132631326413265132661326713268132691327013271132721327313274132751327613277132781327913280132811328213283132841328513286132871328813289132901329113292132931329413295132961329713298132991330013301133021330313304133051330613307133081330913310133111331213313133141331513316133171331813319133201332113322133231332413325133261332713328133291333013331133321333313334133351333613337133381333913340133411334213343133441334513346133471334813349133501335113352133531335413355133561335713358133591336013361133621336313364133651336613367133681336913370133711337213373133741337513376133771337813379133801338113382133831338413385133861338713388133891339013391133921339313394133951339613397133981339913400134011340213403134041340513406134071340813409134101341113412134131341413415134161341713418134191342013421134221342313424134251342613427134281342913430134311343213433134341343513436134371343813439134401344113442134431344413445134461344713448134491345013451134521345313454134551345613457134581345913460134611346213463134641346513466134671346813469134701347113472134731347413475134761347713478134791348013481134821348313484134851348613487134881348913490134911349213493134941349513496134971349813499135001350113502135031350413505135061350713508135091351013511135121351313514135151351613517135181351913520135211352213523135241352513526135271352813529135301353113532135331353413535135361353713538135391354013541135421354313544135451354613547135481354913550135511355213553135541355513556135571355813559135601356113562135631356413565135661356713568135691357013571135721357313574135751357613577135781357913580135811358213583135841358513586135871358813589135901359113592135931359413595135961359713598135991360013601136021360313604136051360613607136081360913610136111361213613136141361513616136171361813619136201362113622136231362413625136261362713628136291363013631136321363313634136351363613637136381363913640136411364213643136441364513646136471364813649136501365113652136531365413655136561365713658136591366013661136621366313664136651366613667136681366913670136711367213673136741367513676136771367813679136801368113682136831368413685136861368713688136891369013691136921369313694136951369613697136981369913700137011370213703137041370513706137071370813709137101371113712137131371413715137161371713718137191372013721137221372313724137251372613727137281372913730137311373213733137341373513736137371373813739137401374113742137431374413745137461374713748137491375013751137521375313754137551375613757137581375913760137611376213763137641376513766137671376813769137701377113772137731377413775137761377713778137791378013781137821378313784137851378613787137881378913790137911379213793137941379513796137971379813799138001380113802138031380413805138061380713808138091381013811138121381313814138151381613817138181381913820138211382213823138241382513826138271382813829138301383113832138331383413835138361383713838138391384013841138421384313844138451384613847138481384913850138511385213853138541385513856138571385813859138601386113862138631386413865138661386713868138691387013871138721387313874138751387613877138781387913880138811388213883138841388513886138871388813889138901389113892138931389413895138961389713898138991390013901139021390313904139051390613907139081390913910139111391213913139141391513916139171391813919139201392113922139231392413925139261392713928139291393013931139321393313934139351393613937139381393913940139411394213943139441394513946139471394813949139501395113952139531395413955139561395713958139591396013961139621396313964139651396613967139681396913970139711397213973139741397513976139771397813979139801398113982139831398413985139861398713988139891399013991139921399313994139951399613997139981399914000140011400214003140041400514006140071400814009140101401114012140131401414015140161401714018140191402014021140221402314024140251402614027140281402914030140311403214033140341403514036140371403814039140401404114042140431404414045140461404714048140491405014051140521405314054140551405614057140581405914060140611406214063140641406514066140671406814069140701407114072140731407414075140761407714078140791408014081140821408314084140851408614087140881408914090140911409214093140941409514096140971409814099141001410114102141031410414105141061410714108141091411014111141121411314114141151411614117141181411914120141211412214123141241412514126141271412814129141301413114132141331413414135141361413714138141391414014141141421414314144141451414614147141481414914150141511415214153141541415514156141571415814159141601416114162141631416414165141661416714168141691417014171141721417314174141751417614177141781417914180141811418214183141841418514186141871418814189141901419114192141931419414195141961419714198141991420014201142021420314204142051420614207142081420914210142111421214213142141421514216142171421814219142201422114222142231422414225142261422714228142291423014231142321423314234142351423614237142381423914240142411424214243142441424514246142471424814249142501425114252142531425414255142561425714258142591426014261142621426314264142651426614267142681426914270142711427214273142741427514276142771427814279142801428114282142831428414285142861428714288142891429014291142921429314294142951429614297142981429914300143011430214303143041430514306143071430814309143101431114312143131431414315143161431714318143191432014321143221432314324143251432614327143281432914330143311433214333143341433514336143371433814339143401434114342143431434414345143461434714348143491435014351143521435314354143551435614357143581435914360143611436214363143641436514366143671436814369143701437114372143731437414375143761437714378143791438014381143821438314384143851438614387143881438914390143911439214393143941439514396143971439814399144001440114402144031440414405144061440714408144091441014411144121441314414144151441614417144181441914420144211442214423144241442514426144271442814429144301443114432144331443414435144361443714438144391444014441144421444314444144451444614447144481444914450144511445214453144541445514456144571445814459144601446114462144631446414465144661446714468144691447014471144721447314474144751447614477144781447914480144811448214483144841448514486144871448814489144901449114492144931449414495144961449714498144991450014501145021450314504145051450614507145081450914510145111451214513145141451514516145171451814519145201452114522145231452414525145261452714528145291453014531145321453314534145351453614537145381453914540145411454214543145441454514546145471454814549145501455114552145531455414555145561455714558145591456014561145621456314564145651456614567145681456914570145711457214573145741457514576145771457814579145801458114582145831458414585145861458714588145891459014591145921459314594145951459614597145981459914600146011460214603146041460514606146071460814609146101461114612146131461414615146161461714618146191462014621146221462314624146251462614627146281462914630146311463214633146341463514636146371463814639146401464114642146431464414645146461464714648146491465014651146521465314654146551465614657146581465914660146611466214663146641466514666146671466814669146701467114672146731467414675146761467714678146791468014681146821468314684146851468614687146881468914690146911469214693146941469514696146971469814699147001470114702147031470414705147061470714708147091471014711147121471314714147151471614717147181471914720147211472214723147241472514726147271472814729147301473114732147331473414735147361473714738147391474014741147421474314744147451474614747147481474914750147511475214753147541475514756147571475814759147601476114762147631476414765147661476714768147691477014771147721477314774147751477614777147781477914780147811478214783147841478514786147871478814789147901479114792147931479414795147961479714798147991480014801148021480314804148051480614807148081480914810148111481214813148141481514816148171481814819148201482114822148231482414825148261482714828148291483014831148321483314834148351483614837148381483914840148411484214843148441484514846148471484814849148501485114852148531485414855148561485714858148591486014861148621486314864148651486614867148681486914870148711487214873148741487514876148771487814879148801488114882148831488414885148861488714888148891489014891148921489314894148951489614897148981489914900149011490214903149041490514906149071490814909149101491114912149131491414915149161491714918149191492014921149221492314924149251492614927149281492914930149311493214933149341493514936149371493814939149401494114942149431494414945149461494714948149491495014951149521495314954149551495614957149581495914960149611496214963149641496514966149671496814969149701497114972149731497414975149761497714978149791498014981149821498314984149851498614987149881498914990149911499214993149941499514996149971499814999150001500115002150031500415005150061500715008150091501015011150121501315014150151501615017150181501915020150211502215023150241502515026150271502815029150301503115032150331503415035150361503715038150391504015041150421504315044150451504615047150481504915050150511505215053150541505515056150571505815059150601506115062150631506415065150661506715068150691507015071150721507315074150751507615077150781507915080150811508215083150841508515086150871508815089150901509115092150931509415095150961509715098150991510015101151021510315104151051510615107151081510915110151111511215113151141511515116151171511815119151201512115122151231512415125151261512715128151291513015131151321513315134151351513615137151381513915140151411514215143151441514515146151471514815149151501515115152151531515415155151561515715158151591516015161151621516315164151651516615167151681516915170151711517215173151741517515176151771517815179151801518115182151831518415185151861518715188151891519015191151921519315194151951519615197151981519915200152011520215203152041520515206152071520815209152101521115212152131521415215152161521715218152191522015221152221522315224152251522615227152281522915230152311523215233152341523515236152371523815239152401524115242152431524415245152461524715248152491525015251152521525315254152551525615257152581525915260152611526215263152641526515266152671526815269152701527115272152731527415275152761527715278152791528015281152821528315284152851528615287152881528915290152911529215293152941529515296152971529815299153001530115302153031530415305153061530715308153091531015311153121531315314153151531615317153181531915320153211532215323153241532515326153271532815329153301533115332153331533415335153361533715338153391534015341153421534315344153451534615347153481534915350153511535215353153541535515356153571535815359153601536115362153631536415365153661536715368153691537015371153721537315374153751537615377153781537915380153811538215383153841538515386153871538815389153901539115392153931539415395153961539715398153991540015401154021540315404154051540615407154081540915410154111541215413154141541515416154171541815419154201542115422154231542415425154261542715428154291543015431154321543315434154351543615437154381543915440154411544215443154441544515446154471544815449154501545115452154531545415455154561545715458154591546015461154621546315464154651546615467154681546915470154711547215473154741547515476154771547815479154801548115482154831548415485154861548715488154891549015491154921549315494154951549615497154981549915500155011550215503155041550515506155071550815509155101551115512155131551415515155161551715518155191552015521155221552315524155251552615527155281552915530155311553215533155341553515536155371553815539155401554115542155431554415545155461554715548155491555015551155521555315554155551555615557155581555915560155611556215563155641556515566155671556815569155701557115572155731557415575155761557715578155791558015581155821558315584155851558615587155881558915590155911559215593155941559515596155971559815599156001560115602156031560415605156061560715608156091561015611156121561315614156151561615617156181561915620156211562215623156241562515626156271562815629156301563115632156331563415635156361563715638156391564015641156421564315644156451564615647156481564915650156511565215653156541565515656156571565815659156601566115662156631566415665156661566715668156691567015671156721567315674156751567615677156781567915680156811568215683156841568515686156871568815689156901569115692156931569415695156961569715698156991570015701157021570315704157051570615707157081570915710157111571215713157141571515716157171571815719157201572115722157231572415725157261572715728157291573015731157321573315734157351573615737157381573915740157411574215743157441574515746157471574815749157501575115752157531575415755157561575715758157591576015761157621576315764157651576615767157681576915770157711577215773157741577515776157771577815779157801578115782157831578415785157861578715788157891579015791157921579315794157951579615797157981579915800158011580215803158041580515806158071580815809158101581115812158131581415815158161581715818158191582015821158221582315824158251582615827158281582915830158311583215833158341583515836158371583815839158401584115842158431584415845158461584715848158491585015851158521585315854158551585615857158581585915860158611586215863158641586515866158671586815869158701587115872158731587415875158761587715878158791588015881158821588315884158851588615887158881588915890158911589215893158941589515896158971589815899159001590115902159031590415905159061590715908159091591015911159121591315914159151591615917159181591915920159211592215923159241592515926159271592815929159301593115932159331593415935159361593715938159391594015941159421594315944159451594615947159481594915950159511595215953159541595515956159571595815959159601596115962159631596415965159661596715968159691597015971159721597315974159751597615977159781597915980159811598215983159841598515986159871598815989159901599115992159931599415995159961599715998159991600016001160021600316004160051600616007160081600916010160111601216013160141601516016160171601816019160201602116022160231602416025160261602716028160291603016031160321603316034160351603616037160381603916040160411604216043160441604516046160471604816049160501605116052160531605416055160561605716058160591606016061160621606316064160651606616067160681606916070160711607216073160741607516076160771607816079160801608116082160831608416085160861608716088160891609016091160921609316094160951609616097160981609916100161011610216103161041610516106161071610816109161101611116112161131611416115161161611716118161191612016121161221612316124161251612616127161281612916130161311613216133161341613516136161371613816139161401614116142161431614416145161461614716148161491615016151161521615316154161551615616157161581615916160161611616216163161641616516166161671616816169161701617116172161731617416175161761617716178161791618016181161821618316184161851618616187161881618916190161911619216193161941619516196161971619816199162001620116202162031620416205162061620716208162091621016211162121621316214162151621616217162181621916220162211622216223162241622516226162271622816229162301623116232162331623416235162361623716238162391624016241162421624316244162451624616247162481624916250162511625216253162541625516256162571625816259162601626116262162631626416265162661626716268162691627016271162721627316274162751627616277162781627916280162811628216283162841628516286162871628816289162901629116292162931629416295162961629716298162991630016301163021630316304163051630616307163081630916310163111631216313163141631516316163171631816319163201632116322163231632416325163261632716328163291633016331163321633316334163351633616337163381633916340163411634216343163441634516346163471634816349163501635116352163531635416355163561635716358163591636016361163621636316364163651636616367163681636916370163711637216373163741637516376163771637816379163801638116382163831638416385163861638716388163891639016391163921639316394163951639616397163981639916400164011640216403164041640516406164071640816409164101641116412164131641416415164161641716418164191642016421164221642316424164251642616427164281642916430164311643216433164341643516436164371643816439164401644116442164431644416445164461644716448164491645016451164521645316454164551645616457164581645916460164611646216463164641646516466164671646816469164701647116472164731647416475164761647716478164791648016481164821648316484164851648616487164881648916490164911649216493164941649516496164971649816499165001650116502165031650416505165061650716508165091651016511165121651316514165151651616517165181651916520165211652216523165241652516526165271652816529165301653116532165331653416535165361653716538165391654016541165421654316544165451654616547165481654916550165511655216553165541655516556165571655816559165601656116562165631656416565165661656716568165691657016571165721657316574165751657616577165781657916580165811658216583165841658516586165871658816589165901659116592165931659416595165961659716598165991660016601166021660316604166051660616607166081660916610166111661216613166141661516616166171661816619166201662116622166231662416625166261662716628166291663016631166321663316634166351663616637166381663916640166411664216643166441664516646166471664816649166501665116652166531665416655166561665716658166591666016661166621666316664166651666616667166681666916670166711667216673166741667516676166771667816679166801668116682166831668416685166861668716688166891669016691166921669316694166951669616697166981669916700167011670216703167041670516706167071670816709167101671116712167131671416715167161671716718167191672016721167221672316724167251672616727167281672916730167311673216733167341673516736167371673816739167401674116742167431674416745167461674716748167491675016751167521675316754167551675616757167581675916760167611676216763167641676516766167671676816769167701677116772167731677416775167761677716778167791678016781167821678316784167851678616787167881678916790167911679216793167941679516796167971679816799168001680116802168031680416805168061680716808168091681016811168121681316814168151681616817168181681916820168211682216823168241682516826168271682816829168301683116832168331683416835168361683716838168391684016841168421684316844168451684616847168481684916850168511685216853168541685516856168571685816859168601686116862168631686416865168661686716868168691687016871168721687316874168751687616877168781687916880168811688216883168841688516886168871688816889168901689116892168931689416895168961689716898168991690016901169021690316904169051690616907169081690916910169111691216913169141691516916169171691816919169201692116922169231692416925169261692716928169291693016931169321693316934169351693616937169381693916940169411694216943169441694516946169471694816949169501695116952169531695416955169561695716958169591696016961169621696316964169651696616967169681696916970169711697216973169741697516976169771697816979169801698116982169831698416985169861698716988169891699016991169921699316994169951699616997169981699917000170011700217003170041700517006170071700817009170101701117012170131701417015170161701717018170191702017021170221702317024170251702617027170281702917030170311703217033170341703517036170371703817039170401704117042170431704417045170461704717048170491705017051170521705317054170551705617057170581705917060170611706217063170641706517066170671706817069170701707117072170731707417075170761707717078170791708017081170821708317084170851708617087170881708917090170911709217093170941709517096170971709817099171001710117102171031710417105171061710717108171091711017111171121711317114171151711617117171181711917120171211712217123171241712517126171271712817129171301713117132171331713417135171361713717138171391714017141171421714317144171451714617147171481714917150171511715217153171541715517156171571715817159171601716117162171631716417165171661716717168171691717017171171721717317174171751717617177171781717917180171811718217183171841718517186171871718817189171901719117192171931719417195171961719717198171991720017201172021720317204172051720617207172081720917210172111721217213172141721517216172171721817219172201722117222172231722417225172261722717228172291723017231172321723317234172351723617237172381723917240172411724217243172441724517246172471724817249172501725117252172531725417255172561725717258172591726017261172621726317264172651726617267172681726917270172711727217273172741727517276172771727817279172801728117282172831728417285172861728717288172891729017291172921729317294172951729617297172981729917300173011730217303173041730517306173071730817309173101731117312173131731417315173161731717318173191732017321173221732317324173251732617327173281732917330173311733217333173341733517336173371733817339173401734117342173431734417345173461734717348173491735017351173521735317354173551735617357173581735917360173611736217363173641736517366173671736817369173701737117372173731737417375173761737717378173791738017381173821738317384173851738617387173881738917390173911739217393173941739517396173971739817399174001740117402174031740417405174061740717408174091741017411174121741317414174151741617417174181741917420174211742217423174241742517426174271742817429174301743117432174331743417435174361743717438174391744017441174421744317444174451744617447174481744917450174511745217453174541745517456174571745817459174601746117462174631746417465174661746717468174691747017471174721747317474174751747617477174781747917480174811748217483174841748517486174871748817489174901749117492174931749417495174961749717498174991750017501175021750317504175051750617507175081750917510175111751217513175141751517516175171751817519175201752117522175231752417525175261752717528175291753017531175321753317534175351753617537175381753917540175411754217543175441754517546175471754817549175501755117552175531755417555175561755717558175591756017561175621756317564175651756617567175681756917570175711757217573175741757517576175771757817579175801758117582175831758417585175861758717588175891759017591175921759317594175951759617597175981759917600176011760217603176041760517606176071760817609176101761117612176131761417615176161761717618176191762017621176221762317624176251762617627176281762917630176311763217633176341763517636176371763817639176401764117642176431764417645176461764717648176491765017651176521765317654176551765617657176581765917660176611766217663176641766517666176671766817669176701767117672176731767417675176761767717678176791768017681176821768317684176851768617687176881768917690176911769217693176941769517696176971769817699177001770117702177031770417705177061770717708177091771017711177121771317714177151771617717177181771917720177211772217723177241772517726177271772817729177301773117732177331773417735177361773717738177391774017741177421774317744177451774617747177481774917750177511775217753177541775517756177571775817759177601776117762177631776417765177661776717768177691777017771177721777317774177751777617777177781777917780177811778217783177841778517786177871778817789177901779117792177931779417795177961779717798177991780017801178021780317804178051780617807178081780917810178111781217813178141781517816178171781817819178201782117822178231782417825178261782717828178291783017831178321783317834178351783617837178381783917840178411784217843178441784517846178471784817849178501785117852178531785417855178561785717858178591786017861178621786317864178651786617867178681786917870178711787217873178741787517876178771787817879178801788117882178831788417885178861788717888178891789017891178921789317894178951789617897178981789917900179011790217903179041790517906179071790817909179101791117912179131791417915179161791717918179191792017921179221792317924179251792617927179281792917930179311793217933179341793517936179371793817939179401794117942179431794417945179461794717948179491795017951179521795317954179551795617957179581795917960179611796217963179641796517966179671796817969179701797117972179731797417975179761797717978179791798017981179821798317984179851798617987179881798917990179911799217993179941799517996179971799817999180001800118002180031800418005180061800718008180091801018011180121801318014180151801618017180181801918020180211802218023180241802518026180271802818029180301803118032180331803418035180361803718038180391804018041180421804318044180451804618047180481804918050180511805218053180541805518056180571805818059180601806118062180631806418065180661806718068180691807018071180721807318074180751807618077180781807918080180811808218083180841808518086180871808818089180901809118092180931809418095180961809718098180991810018101181021810318104181051810618107181081810918110181111811218113181141811518116181171811818119181201812118122181231812418125181261812718128181291813018131181321813318134181351813618137181381813918140181411814218143181441814518146181471814818149181501815118152181531815418155181561815718158181591816018161181621816318164181651816618167181681816918170181711817218173181741817518176181771817818179181801818118182181831818418185181861818718188181891819018191181921819318194181951819618197181981819918200182011820218203182041820518206182071820818209182101821118212182131821418215182161821718218182191822018221182221822318224182251822618227182281822918230182311823218233182341823518236182371823818239182401824118242182431824418245182461824718248182491825018251182521825318254182551825618257182581825918260182611826218263182641826518266182671826818269182701827118272182731827418275182761827718278182791828018281182821828318284182851828618287182881828918290182911829218293182941829518296182971829818299183001830118302183031830418305183061830718308183091831018311183121831318314183151831618317183181831918320183211832218323183241832518326183271832818329183301833118332183331833418335183361833718338183391834018341183421834318344183451834618347183481834918350183511835218353183541835518356183571835818359183601836118362183631836418365183661836718368183691837018371183721837318374183751837618377183781837918380183811838218383183841838518386183871838818389183901839118392183931839418395183961839718398183991840018401184021840318404184051840618407184081840918410184111841218413184141841518416184171841818419184201842118422184231842418425184261842718428184291843018431184321843318434184351843618437184381843918440184411844218443184441844518446184471844818449184501845118452184531845418455184561845718458184591846018461184621846318464184651846618467184681846918470184711847218473184741847518476184771847818479184801848118482184831848418485184861848718488184891849018491184921849318494184951849618497184981849918500185011850218503185041850518506185071850818509185101851118512185131851418515185161851718518185191852018521185221852318524185251852618527185281852918530185311853218533185341853518536185371853818539185401854118542185431854418545185461854718548185491855018551185521855318554185551855618557185581855918560185611856218563185641856518566185671856818569185701857118572185731857418575185761857718578185791858018581185821858318584185851858618587185881858918590185911859218593185941859518596185971859818599186001860118602186031860418605186061860718608186091861018611186121861318614186151861618617186181861918620186211862218623186241862518626186271862818629186301863118632186331863418635186361863718638186391864018641186421864318644186451864618647186481864918650186511865218653186541865518656186571865818659186601866118662186631866418665186661866718668186691867018671186721867318674186751867618677186781867918680186811868218683186841868518686186871868818689186901869118692186931869418695186961869718698186991870018701187021870318704187051870618707187081870918710187111871218713187141871518716187171871818719187201872118722187231872418725187261872718728187291873018731187321873318734187351873618737187381873918740187411874218743187441874518746187471874818749187501875118752187531875418755187561875718758187591876018761187621876318764187651876618767187681876918770187711877218773187741877518776187771877818779187801878118782187831878418785187861878718788187891879018791187921879318794187951879618797187981879918800188011880218803188041880518806188071880818809188101881118812188131881418815188161881718818188191882018821188221882318824188251882618827188281882918830188311883218833188341883518836188371883818839188401884118842188431884418845188461884718848188491885018851188521885318854188551885618857188581885918860188611886218863188641886518866188671886818869188701887118872188731887418875188761887718878188791888018881188821888318884188851888618887188881888918890188911889218893188941889518896188971889818899189001890118902189031890418905189061890718908189091891018911189121891318914189151891618917189181891918920189211892218923189241892518926189271892818929189301893118932189331893418935189361893718938189391894018941189421894318944189451894618947189481894918950189511895218953189541895518956189571895818959189601896118962189631896418965189661896718968189691897018971189721897318974189751897618977189781897918980189811898218983189841898518986189871898818989189901899118992189931899418995189961899718998189991900019001190021900319004190051900619007190081900919010190111901219013190141901519016190171901819019190201902119022190231902419025190261902719028190291903019031190321903319034190351903619037190381903919040190411904219043190441904519046190471904819049190501905119052190531905419055190561905719058190591906019061190621906319064190651906619067190681906919070190711907219073190741907519076190771907819079190801908119082190831908419085190861908719088190891909019091190921909319094190951909619097190981909919100191011910219103191041910519106191071910819109191101911119112191131911419115191161911719118191191912019121191221912319124191251912619127191281912919130191311913219133191341913519136191371913819139191401914119142191431914419145191461914719148191491915019151191521915319154191551915619157191581915919160191611916219163191641916519166191671916819169191701917119172191731917419175191761917719178191791918019181191821918319184191851918619187191881918919190191911919219193191941919519196191971919819199192001920119202192031920419205192061920719208192091921019211192121921319214192151921619217192181921919220192211922219223192241922519226192271922819229192301923119232192331923419235192361923719238192391924019241192421924319244192451924619247192481924919250192511925219253192541925519256192571925819259192601926119262192631926419265192661926719268192691927019271192721927319274192751927619277192781927919280192811928219283192841928519286192871928819289192901929119292192931929419295192961929719298192991930019301193021930319304193051930619307193081930919310193111931219313193141931519316193171931819319193201932119322193231932419325193261932719328193291933019331193321933319334193351933619337193381933919340193411934219343193441934519346193471934819349193501935119352193531935419355193561935719358193591936019361193621936319364193651936619367193681936919370193711937219373193741937519376193771937819379193801938119382193831938419385193861938719388193891939019391193921939319394193951939619397193981939919400194011940219403194041940519406194071940819409194101941119412194131941419415194161941719418194191942019421194221942319424194251942619427194281942919430194311943219433194341943519436194371943819439194401944119442194431944419445194461944719448194491945019451194521945319454194551945619457194581945919460194611946219463194641946519466194671946819469194701947119472194731947419475194761947719478194791948019481194821948319484194851948619487194881948919490194911949219493194941949519496194971949819499195001950119502195031950419505195061950719508195091951019511195121951319514195151951619517195181951919520195211952219523195241952519526195271952819529195301953119532195331953419535195361953719538195391954019541195421954319544195451954619547195481954919550195511955219553195541955519556195571955819559195601956119562195631956419565195661956719568195691957019571195721957319574195751957619577195781957919580195811958219583195841958519586195871958819589195901959119592195931959419595195961959719598195991960019601196021960319604196051960619607196081960919610196111961219613196141961519616196171961819619196201962119622196231962419625196261962719628196291963019631196321963319634196351963619637196381963919640196411964219643196441964519646196471964819649196501965119652196531965419655196561965719658196591966019661196621966319664196651966619667196681966919670196711967219673196741967519676196771967819679196801968119682196831968419685196861968719688196891969019691196921969319694196951969619697196981969919700197011970219703197041970519706197071970819709197101971119712197131971419715197161971719718197191972019721197221972319724197251972619727197281972919730197311973219733197341973519736197371973819739197401974119742197431974419745197461974719748197491975019751197521975319754197551975619757197581975919760197611976219763197641976519766197671976819769197701977119772197731977419775197761977719778197791978019781197821978319784197851978619787197881978919790197911979219793197941979519796197971979819799198001980119802198031980419805198061980719808198091981019811198121981319814198151981619817198181981919820198211982219823198241982519826198271982819829198301983119832198331983419835198361983719838198391984019841198421984319844198451984619847198481984919850198511985219853198541985519856198571985819859198601986119862198631986419865198661986719868198691987019871198721987319874198751987619877198781987919880198811988219883198841988519886198871988819889198901989119892198931989419895198961989719898198991990019901199021990319904199051990619907199081990919910199111991219913199141991519916199171991819919199201992119922199231992419925199261992719928199291993019931199321993319934199351993619937199381993919940199411994219943199441994519946199471994819949199501995119952199531995419955199561995719958199591996019961199621996319964199651996619967199681996919970199711997219973199741997519976199771997819979199801998119982199831998419985199861998719988199891999019991199921999319994199951999619997199981999920000200012000220003200042000520006200072000820009200102001120012200132001420015200162001720018200192002020021200222002320024200252002620027200282002920030200312003220033200342003520036200372003820039200402004120042200432004420045200462004720048200492005020051200522005320054200552005620057200582005920060200612006220063200642006520066200672006820069200702007120072200732007420075200762007720078200792008020081200822008320084200852008620087200882008920090200912009220093200942009520096200972009820099201002010120102201032010420105201062010720108201092011020111201122011320114201152011620117201182011920120201212012220123201242012520126201272012820129201302013120132201332013420135201362013720138201392014020141201422014320144201452014620147201482014920150201512015220153201542015520156201572015820159201602016120162201632016420165201662016720168201692017020171201722017320174201752017620177201782017920180201812018220183201842018520186201872018820189201902019120192201932019420195201962019720198201992020020201202022020320204202052020620207202082020920210202112021220213202142021520216202172021820219202202022120222202232022420225202262022720228202292023020231202322023320234202352023620237202382023920240202412024220243202442024520246202472024820249202502025120252202532025420255202562025720258202592026020261202622026320264202652026620267202682026920270202712027220273202742027520276202772027820279202802028120282202832028420285202862028720288202892029020291202922029320294202952029620297202982029920300203012030220303203042030520306203072030820309203102031120312203132031420315203162031720318203192032020321203222032320324203252032620327203282032920330203312033220333203342033520336203372033820339203402034120342203432034420345203462034720348203492035020351203522035320354203552035620357203582035920360203612036220363203642036520366203672036820369203702037120372203732037420375203762037720378203792038020381203822038320384203852038620387203882038920390203912039220393203942039520396203972039820399204002040120402204032040420405204062040720408204092041020411204122041320414204152041620417204182041920420204212042220423204242042520426204272042820429204302043120432204332043420435204362043720438204392044020441204422044320444204452044620447204482044920450204512045220453204542045520456204572045820459204602046120462204632046420465204662046720468204692047020471204722047320474204752047620477204782047920480204812048220483204842048520486204872048820489204902049120492204932049420495204962049720498204992050020501205022050320504205052050620507205082050920510205112051220513205142051520516205172051820519205202052120522205232052420525205262052720528205292053020531205322053320534205352053620537205382053920540205412054220543205442054520546205472054820549205502055120552205532055420555205562055720558205592056020561205622056320564205652056620567205682056920570205712057220573205742057520576205772057820579205802058120582205832058420585205862058720588205892059020591205922059320594205952059620597205982059920600206012060220603206042060520606206072060820609206102061120612206132061420615206162061720618206192062020621206222062320624206252062620627206282062920630206312063220633206342063520636206372063820639206402064120642206432064420645206462064720648206492065020651206522065320654206552065620657206582065920660206612066220663206642066520666206672066820669206702067120672206732067420675206762067720678206792068020681206822068320684206852068620687206882068920690206912069220693206942069520696206972069820699207002070120702207032070420705207062070720708207092071020711207122071320714207152071620717207182071920720207212072220723207242072520726207272072820729207302073120732207332073420735207362073720738207392074020741207422074320744207452074620747207482074920750207512075220753207542075520756207572075820759207602076120762207632076420765207662076720768207692077020771207722077320774207752077620777207782077920780207812078220783207842078520786207872078820789207902079120792207932079420795207962079720798207992080020801208022080320804208052080620807208082080920810208112081220813208142081520816208172081820819208202082120822208232082420825208262082720828208292083020831208322083320834208352083620837208382083920840208412084220843208442084520846208472084820849208502085120852208532085420855208562085720858208592086020861208622086320864208652086620867208682086920870208712087220873208742087520876208772087820879208802088120882208832088420885208862088720888208892089020891208922089320894208952089620897208982089920900209012090220903209042090520906209072090820909209102091120912209132091420915209162091720918209192092020921209222092320924209252092620927209282092920930209312093220933209342093520936209372093820939209402094120942209432094420945209462094720948209492095020951209522095320954209552095620957209582095920960209612096220963209642096520966209672096820969209702097120972209732097420975209762097720978209792098020981209822098320984209852098620987209882098920990209912099220993209942099520996209972099820999210002100121002210032100421005210062100721008210092101021011210122101321014210152101621017210182101921020210212102221023210242102521026210272102821029210302103121032210332103421035210362103721038210392104021041210422104321044210452104621047210482104921050210512105221053210542105521056210572105821059210602106121062210632106421065210662106721068210692107021071210722107321074210752107621077210782107921080210812108221083210842108521086210872108821089210902109121092210932109421095210962109721098210992110021101211022110321104211052110621107211082110921110211112111221113211142111521116211172111821119211202112121122211232112421125211262112721128211292113021131211322113321134211352113621137211382113921140211412114221143211442114521146211472114821149211502115121152211532115421155211562115721158211592116021161211622116321164211652116621167211682116921170211712117221173211742117521176211772117821179211802118121182211832118421185211862118721188211892119021191211922119321194211952119621197211982119921200212012120221203212042120521206212072120821209212102121121212212132121421215212162121721218212192122021221212222122321224212252122621227212282122921230212312123221233212342123521236212372123821239212402124121242212432124421245212462124721248212492125021251212522125321254212552125621257212582125921260212612126221263212642126521266212672126821269212702127121272212732127421275212762127721278212792128021281212822128321284212852128621287212882128921290212912129221293212942129521296212972129821299213002130121302213032130421305213062130721308213092131021311213122131321314213152131621317213182131921320213212132221323213242132521326213272132821329213302133121332213332133421335213362133721338213392134021341213422134321344213452134621347213482134921350213512135221353213542135521356213572135821359213602136121362213632136421365213662136721368213692137021371213722137321374213752137621377213782137921380213812138221383213842138521386213872138821389213902139121392213932139421395213962139721398213992140021401214022140321404214052140621407214082140921410214112141221413214142141521416214172141821419214202142121422214232142421425214262142721428214292143021431214322143321434214352143621437214382143921440214412144221443214442144521446214472144821449214502145121452214532145421455214562145721458214592146021461214622146321464214652146621467214682146921470214712147221473214742147521476214772147821479214802148121482214832148421485214862148721488214892149021491214922149321494214952149621497214982149921500215012150221503215042150521506215072150821509215102151121512215132151421515215162151721518215192152021521215222152321524215252152621527215282152921530215312153221533215342153521536215372153821539215402154121542215432154421545215462154721548215492155021551215522155321554215552155621557215582155921560215612156221563215642156521566215672156821569215702157121572215732157421575215762157721578215792158021581215822158321584215852158621587215882158921590215912159221593215942159521596215972159821599216002160121602216032160421605216062160721608216092161021611216122161321614216152161621617216182161921620216212162221623216242162521626216272162821629216302163121632216332163421635216362163721638216392164021641216422164321644216452164621647216482164921650216512165221653216542165521656216572165821659216602166121662216632166421665216662166721668216692167021671216722167321674216752167621677216782167921680216812168221683216842168521686216872168821689216902169121692216932169421695216962169721698216992170021701217022170321704217052170621707217082170921710217112171221713217142171521716217172171821719217202172121722217232172421725217262172721728217292173021731217322173321734217352173621737217382173921740217412174221743217442174521746217472174821749217502175121752217532175421755217562175721758217592176021761217622176321764217652176621767217682176921770217712177221773217742177521776217772177821779217802178121782217832178421785217862178721788217892179021791217922179321794217952179621797217982179921800218012180221803218042180521806218072180821809218102181121812218132181421815218162181721818218192182021821218222182321824218252182621827218282182921830218312183221833218342183521836218372183821839218402184121842218432184421845218462184721848218492185021851218522185321854218552185621857218582185921860218612186221863218642186521866218672186821869218702187121872218732187421875218762187721878218792188021881218822188321884218852188621887218882188921890218912189221893218942189521896218972189821899219002190121902219032190421905219062190721908219092191021911219122191321914219152191621917219182191921920219212192221923219242192521926219272192821929219302193121932219332193421935219362193721938219392194021941219422194321944219452194621947219482194921950219512195221953219542195521956219572195821959219602196121962219632196421965219662196721968219692197021971219722197321974219752197621977219782197921980219812198221983219842198521986219872198821989219902199121992219932199421995219962199721998219992200022001220022200322004220052200622007220082200922010220112201222013220142201522016220172201822019220202202122022220232202422025220262202722028220292203022031220322203322034220352203622037220382203922040220412204222043220442204522046220472204822049220502205122052220532205422055220562205722058220592206022061220622206322064220652206622067220682206922070220712207222073220742207522076220772207822079220802208122082220832208422085220862208722088220892209022091220922209322094220952209622097220982209922100221012210222103221042210522106221072210822109221102211122112221132211422115221162211722118221192212022121221222212322124221252212622127221282212922130221312213222133221342213522136221372213822139221402214122142221432214422145221462214722148221492215022151221522215322154221552215622157221582215922160221612216222163221642216522166221672216822169221702217122172221732217422175221762217722178221792218022181221822218322184221852218622187221882218922190221912219222193221942219522196221972219822199222002220122202222032220422205222062220722208222092221022211222122221322214222152221622217222182221922220222212222222223222242222522226222272222822229222302223122232222332223422235222362223722238222392224022241222422224322244222452224622247222482224922250222512225222253222542225522256222572225822259222602226122262222632226422265222662226722268222692227022271222722227322274222752227622277222782227922280222812228222283222842228522286222872228822289222902229122292222932229422295222962229722298222992230022301223022230322304223052230622307223082230922310223112231222313223142231522316223172231822319223202232122322223232232422325223262232722328223292233022331223322233322334223352233622337223382233922340223412234222343223442234522346223472234822349223502235122352223532235422355223562235722358223592236022361223622236322364223652236622367223682236922370223712237222373223742237522376223772237822379223802238122382223832238422385223862238722388223892239022391223922239322394223952239622397223982239922400224012240222403224042240522406224072240822409224102241122412224132241422415224162241722418224192242022421224222242322424224252242622427224282242922430224312243222433224342243522436224372243822439224402244122442224432244422445224462244722448224492245022451224522245322454224552245622457224582245922460224612246222463224642246522466224672246822469224702247122472224732247422475224762247722478224792248022481
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "Debug/DebuggingFlags.h"
  7. #include "Debug/DiagProbe.h"
  8. #include "Debug/DebugManager.h"
  9. // Parser includes
  10. #include "RegexCommon.h"
  11. #include "RegexPattern.h"
  12. #include "ExternalLowerer.h"
  13. #include "ExternalLowerer.h"
  14. ///----------------------------------------------------------------------------
  15. ///
  16. /// Lowerer::Lower
  17. ///
  18. /// Lowerer's main entrypoint. Lowers this function..
  19. ///
  20. ///----------------------------------------------------------------------------
  21. void
  22. Lowerer::Lower()
  23. {
  24. this->m_func->StopMaintainByteCodeOffset();
  25. NoRecoverMemoryJitArenaAllocator localAlloc(L"BE-Lower", this->m_func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  26. this->m_alloc = &localAlloc;
  27. BVSparse<JitArenaAllocator> localInitializedTempSym(&localAlloc);
  28. this->initializedTempSym = &localInitializedTempSym;
  29. BVSparse<JitArenaAllocator> localAddToLiveOnBackEdgeSyms(&localAlloc);
  30. this->addToLiveOnBackEdgeSyms = &localAddToLiveOnBackEdgeSyms;
  31. Assert(this->m_func->GetCloneMap() == nullptr);
  32. m_lowererMD.Init(this);
  33. bool defaultDoFastPath = this->m_func->DoFastPaths();
  34. bool loopFastPath = this->m_func->DoLoopFastPaths();
  35. if (!loopFastPath || !defaultDoFastPath
  36. #ifdef INLINE_CACHE_STATS
  37. || PHASE_STATS1(Js::PolymorphicInlineCachePhase)
  38. #endif
  39. )
  40. {
  41. //arguments[] access is similar to array fast path hence disable when array fastpath is disabled.
  42. //loopFastPath is always true except explicitly disabled
  43. //defaultDoFastPath can be false when we the source code size is huge
  44. m_func->SetHasStackArgs(false);
  45. }
  46. if (m_func->HasAnyStackNestedFunc())
  47. {
  48. EnsureStackFunctionListStackSym();
  49. }
  50. if (m_func->DoStackFrameDisplay() && !m_func->IsLoopBody())
  51. {
  52. AllocStackClosure();
  53. }
  54. if (m_func->IsJitInDebugMode())
  55. {
  56. // Initialize metadata of local var slots.
  57. // Too late to wait until Register Allocator, as we need the offset when lowerering bailout for debugger.
  58. int32 hasLocalVarChangedOffset = m_func->GetHasLocalVarChangedOffset();
  59. if (hasLocalVarChangedOffset != Js::Constants::InvalidOffset)
  60. {
  61. // MOV [EBP + m_func->GetHasLocalVarChangedOffset()], 0
  62. StackSym* sym = StackSym::New(TyInt8, m_func);
  63. sym->m_offset = hasLocalVarChangedOffset;
  64. sym->m_allocated = true;
  65. IR::Opnd* opnd1 = IR::SymOpnd::New(sym, TyInt8, m_func);
  66. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func);
  67. LowererMD::CreateAssign(opnd1, opnd2, m_func->GetFunctionEntryInsertionPoint());
  68. #ifdef DBG
  69. // Pre-fill all local slots with a pattern. This will help identify non-initialized/garbage var values.
  70. // Note that in the beginning of the function in bytecode we should initialize all locals to undefined.
  71. uint32 localSlotCount = m_func->GetJnFunction()->GetEndNonTempLocalIndex() - m_func->GetJnFunction()->GetFirstNonTempLocalIndex();
  72. for (uint i = 0; i < localSlotCount; ++i)
  73. {
  74. int offset = m_func->GetLocalVarSlotOffset(i);
  75. IRType opnd1Type;
  76. opnd2;
  77. uint32 slotSize = Func::GetDiagLocalSlotSize();
  78. switch (slotSize)
  79. {
  80. case 4:
  81. opnd1Type = TyInt32;
  82. opnd2 = IR::IntConstOpnd::New(Func::c_debugFillPattern4, opnd1Type, m_func);
  83. break;
  84. case 8:
  85. opnd1Type = TyInt64;
  86. opnd2 = IR::AddrOpnd::New((Js::Var)Func::c_debugFillPattern8, IR::AddrOpndKindConstant, m_func);
  87. break;
  88. default:
  89. AssertMsg(FALSE, "Unsupported slot size!");
  90. opnd1Type = TyIllegal;
  91. opnd2 = nullptr;
  92. }
  93. sym = StackSym::New(opnd1Type, m_func);
  94. sym->m_offset = offset;
  95. sym->m_allocated = true;
  96. opnd1 = IR::SymOpnd::New(sym, TyInt32, m_func);
  97. LowererMD::CreateAssign(opnd1, opnd2, m_func->GetFunctionEntryInsertionPoint());
  98. }
  99. #endif
  100. }
  101. Assert(!m_func->HasAnyStackNestedFunc());
  102. }
  103. this->LowerRange(m_func->m_headInstr, m_func->m_tailInstr, defaultDoFastPath, loopFastPath);
  104. this->m_func->ClearCloneMap();
  105. if (m_func->HasAnyStackNestedFunc())
  106. {
  107. EnsureZeroLastStackFunctionNext();
  108. }
  109. if (!m_func->IsSimpleJit())
  110. {
  111. Js::EntryPointInfo* entryPointInfo = this->m_func->m_workItem->GetEntryPoint();
  112. Assert(entryPointInfo->GetJitTransferData() != nullptr && !entryPointInfo->GetJitTransferData()->GetIsReady());
  113. }
  114. this->initializedTempSym = nullptr;
  115. this->m_alloc = nullptr;
  116. this->m_func->DisableConstandAddressLoadHoist();
  117. }
  118. void
  119. Lowerer::LowerRange(IR::Instr *instrStart, IR::Instr *instrEnd, bool defaultDoFastPath, bool defaultDoLoopFastPath)
  120. {
  121. bool noMathFastPath;
  122. bool noFieldFastPath;
  123. bool fNoLower = false;
  124. noFieldFastPath = !defaultDoFastPath;
  125. noMathFastPath = !defaultDoFastPath;
  126. #if DBG_DUMP
  127. wchar_t * globOptInstrString = nullptr;
  128. #endif
  129. FOREACH_INSTR_BACKWARD_EDITING_IN_RANGE(instr, instrPrev, instrEnd, instrStart)
  130. {
  131. // Try to peep this`
  132. instr = this->PreLowerPeepInstr(instr, &instrPrev);
  133. #if DBG
  134. IR::Instr * verifyLegalizeInstrNext = instr->m_next;
  135. #endif
  136. // If we have debugger bailout as part of real instr (not separate BailForDebugger instr),
  137. // extract/split out BailOutForDebugger into separate instr, if needed.
  138. // The instr can have just debugger bailout, or debugger bailout + other shared bailout.
  139. // Note that by the time we get here, we should not have aux-only bailout (in globopt we promote it to normal bailout).
  140. if (m_func->IsJitInDebugMode() && instr->HasBailOutInfo() &&
  141. ((instr->GetBailOutKind() & IR::BailOutForDebuggerBits) && instr->m_opcode != Js::OpCode::BailForDebugger ||
  142. instr->HasAuxBailOut()))
  143. {
  144. instr = this->SplitBailForDebugger(instr); // Change instr, as returned is the one we need to lower next.
  145. instrPrev = instr->m_prev; // Change just in case if instr got changed.
  146. }
  147. #if DBG_DUMP
  148. if (!instr->IsLowered() && !instr->IsLabelInstr()
  149. && (CONFIG_FLAG(ForcePostLowerGlobOptInstrString) ||
  150. PHASE_DUMP(Js::LowererPhase, m_func) ||
  151. PHASE_DUMP(Js::LinearScanPhase, m_func) ||
  152. PHASE_DUMP(Js::RegAllocPhase, m_func) ||
  153. PHASE_DUMP(Js::PeepsPhase, m_func) ||
  154. PHASE_DUMP(Js::LayoutPhase, m_func) ||
  155. PHASE_DUMP(Js::EmitterPhase, m_func) ||
  156. PHASE_DUMP(Js::EncoderPhase, m_func) ||
  157. PHASE_DUMP(Js::BackEndPhase, m_func)))
  158. {
  159. if(instr->m_next && instr->m_next->m_opcode != Js::OpCode::StatementBoundary && !instr->m_next->IsLabelInstr())
  160. {
  161. instr->m_next->globOptInstrString = globOptInstrString;
  162. }
  163. globOptInstrString = instr->DumpString();
  164. }
  165. #endif
  166. IR::Opnd *src1;
  167. IR::RegOpnd *srcReg1;
  168. IR::RegOpnd *srcReg2;
  169. if (instr->IsBranchInstr() && !instr->AsBranchInstr()->IsMultiBranch() && instr->AsBranchInstr()->GetTarget()->m_isLoopTop)
  170. {
  171. Loop * loop = instr->AsBranchInstr()->GetTarget()->GetLoop();
  172. if (this->outerMostLoopLabel == nullptr && !loop->isProcessed)
  173. {
  174. while (loop && loop->GetLoopTopInstr()) // some loops are optimized away so that they are not loops anymore.
  175. // They do, however, stay in the loop graph but don't have loop top labels assigned to them
  176. {
  177. this->outerMostLoopLabel = loop->GetLoopTopInstr();
  178. Assert(this->outerMostLoopLabel->m_isLoopTop);
  179. // landing pad must fall through to the loop
  180. Assert(this->outerMostLoopLabel->m_prev->HasFallThrough());
  181. loop = loop->parent;
  182. }
  183. this->initializedTempSym->ClearAll();
  184. }
  185. noFieldFastPath = !defaultDoLoopFastPath;
  186. noMathFastPath = !defaultDoLoopFastPath;
  187. }
  188. #ifdef INLINE_CACHE_STATS
  189. if(PHASE_STATS1(Js::PolymorphicInlineCachePhase))
  190. {
  191. // Always use the slow path, so we can track property accesses
  192. noFieldFastPath = true;
  193. }
  194. #endif
  195. switch(instr->m_opcode)
  196. {
  197. case Js::OpCode::LdHandlerScope:
  198. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdHandlerScope);
  199. break;
  200. case Js::OpCode::InitSetFld:
  201. instrPrev = this->LowerStFld(instr, IR::HelperOP_InitSetter, IR::HelperOP_InitSetter, false);
  202. break;
  203. case Js::OpCode::InitGetFld:
  204. instrPrev = this->LowerStFld(instr, IR::HelperOP_InitGetter, IR::HelperOP_InitGetter, false);
  205. break;
  206. case Js::OpCode::InitProto:
  207. instrPrev = this->LowerStFld(instr, IR::HelperOP_InitProto, IR::HelperOP_InitProto, false);
  208. break;
  209. case Js::OpCode::LdArgCnt:
  210. this->LoadArgumentCount(instr);
  211. break;
  212. case Js::OpCode::LdStackArgPtr:
  213. this->LoadStackArgPtr(instr);
  214. break;
  215. case Js::OpCode::LdHeapArguments:
  216. case Js::OpCode::LdLetHeapArguments:
  217. instrPrev = m_lowererMD.LoadHeapArguments(instr);
  218. break;
  219. case Js::OpCode::LdArgumentsFromStack:
  220. instrPrev = this->LoadArgumentsFromStack(instr);
  221. break;
  222. case Js::OpCode::LdHeapArgsCached:
  223. case Js::OpCode::LdLetHeapArgsCached:
  224. m_lowererMD.LoadHeapArgsCached(instr);
  225. break;
  226. case Js::OpCode::InvalCachedScope:
  227. this->LowerBinaryHelper(instr, IR::HelperOP_InvalidateCachedScope);
  228. break;
  229. case Js::OpCode::NewScopeObject:
  230. m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewScopeObject);
  231. break;
  232. case Js::OpCode::NewStackScopeSlots:
  233. this->LowerNewScopeSlots(instr, m_func->DoStackScopeSlots());
  234. break;
  235. case Js::OpCode::NewScopeSlots:
  236. this->LowerNewScopeSlots(instr, false);
  237. break;
  238. case Js::OpCode::InitLocalClosure:
  239. // Real initialization of the stack pointers happens on entry to the function, so this instruction
  240. // (which exists to provide a def in the IR) can go away.
  241. instr->Remove();
  242. break;
  243. case Js::OpCode::NewScopeSlotsWithoutPropIds:
  244. this->LowerBinaryHelperMemWithFuncBody(instr, IR::HelperOP_NewScopeSlotsWithoutPropIds);
  245. break;
  246. case Js::OpCode::NewBlockScope:
  247. m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewBlockScope);
  248. break;
  249. case Js::OpCode::NewPseudoScope:
  250. m_lowererMD.ChangeToHelperCallMem(instr, IR::HelperOP_NewPseudoScope);
  251. break;
  252. case Js::OpCode::CloneInnerScopeSlots:
  253. this->LowerUnaryHelperMem(instr, IR::HelperOP_CloneInnerScopeSlots);
  254. break;
  255. case Js::OpCode::CloneBlockScope:
  256. this->LowerUnaryHelperMem(instr, IR::HelperOP_CloneBlockScope);
  257. break;
  258. case Js::OpCode::GetCachedFunc:
  259. m_lowererMD.LowerGetCachedFunc(instr);
  260. break;
  261. case Js::OpCode::BrFncCachedScopeEq:
  262. case Js::OpCode::BrFncCachedScopeNeq:
  263. this->LowerBrFncCachedScopeEq(instr);
  264. break;
  265. case Js::OpCode::CommitScope:
  266. m_lowererMD.LowerCommitScope(instr);
  267. break;
  268. case Js::OpCode::LdFldForTypeOf:
  269. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValueForTypeOf, IR::HelperOp_PatchGetValuePolymorphicForTypeOf,
  270. IR::HelperOp_PatchGetValueForTypeOf, IR::HelperOp_PatchGetValuePolymorphicForTypeOf);
  271. break;
  272. case Js::OpCode::LdFld:
  273. case Js::OpCode::LdFldForCallApplyTarget:
  274. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValue, IR::HelperOp_PatchGetValuePolymorphic,
  275. IR::HelperOp_PatchGetValue, IR::HelperOp_PatchGetValuePolymorphic);
  276. break;
  277. case Js::OpCode::LdSuperFld:
  278. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetValueWithThisPtr, IR::HelperOp_PatchGetValuePolymorphicWithThisPtr,
  279. IR::HelperOp_PatchGetValueWithThisPtr, IR::HelperOp_PatchGetValuePolymorphicWithThisPtr);
  280. break;
  281. case Js::OpCode::LdRootFld:
  282. instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootValue, IR::HelperOp_PatchGetRootValuePolymorphic,
  283. IR::HelperOp_PatchGetRootValue, IR::HelperOp_PatchGetRootValuePolymorphic);
  284. break;
  285. case Js::OpCode::LdRootFldForTypeOf:
  286. instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootValueForTypeOf, IR::HelperOp_PatchGetRootValuePolymorphicForTypeOf,
  287. IR::HelperOp_PatchGetRootValueForTypeOf, IR::HelperOp_PatchGetRootValuePolymorphicForTypeOf);
  288. break;
  289. case Js::OpCode::LdMethodFldPolyInlineMiss:
  290. instrPrev = LowerLdFld(instr, IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic, true, nullptr, true);
  291. break;
  292. case Js::OpCode::LdMethodFld:
  293. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic,
  294. IR::HelperOp_PatchGetMethod, IR::HelperOp_PatchGetMethodPolymorphic);
  295. break;
  296. case Js::OpCode::LdRootMethodFld:
  297. instrPrev = GenerateCompleteLdFld<true>(instr, !noFieldFastPath, IR::HelperOp_PatchGetRootMethod, IR::HelperOp_PatchGetRootMethodPolymorphic,
  298. IR::HelperOp_PatchGetRootMethod, IR::HelperOp_PatchGetRootMethodPolymorphic);
  299. break;
  300. case Js::OpCode::ScopedLdMethodFld:
  301. // "Scoped" in ScopedLdMethodFld is a bit of a misnomer because it doesn't look through a scope chain.
  302. // Instead the op is to allow for either a LdRootMethodFld or LdMethodFld depending on whether the
  303. // object is the root object or not.
  304. instrPrev = GenerateCompleteLdFld<false>(instr, !noFieldFastPath, IR::HelperOp_ScopedGetMethod, IR::HelperOp_ScopedGetMethodPolymorphic,
  305. IR::HelperOp_ScopedGetMethod, IR::HelperOp_ScopedGetMethodPolymorphic);
  306. break;
  307. case Js::OpCode::LdMethodFromFlags:
  308. {
  309. Assert(instr->HasBailOutInfo());
  310. bool success = m_lowererMD.GenerateFastLdMethodFromFlags(instr);
  311. AssertMsg(success, "Not expected to generate helper block here");
  312. break;
  313. }
  314. case Js::OpCode::CheckFixedFld:
  315. AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instr->m_func->GetJnFunction()) || !PHASE_OFF(Js::UseFixedDataPropsPhase, instr->m_func->GetJnFunction()), "CheckFixedFld with fixed prop(Data|Method) phase disabled?");
  316. this->GenerateCheckFixedFld(instr);
  317. break;
  318. case Js::OpCode::CheckPropertyGuardAndLoadType:
  319. instrPrev = this->GeneratePropertyGuardCheckBailoutAndLoadType(instr);
  320. break;
  321. case Js::OpCode::CheckObjType:
  322. this->GenerateCheckObjType(instr);
  323. break;
  324. case Js::OpCode::AdjustObjType:
  325. this->LowerAdjustObjType(instr);
  326. break;
  327. case Js::OpCode::DeleteFld:
  328. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteProperty, false, false);
  329. break;
  330. case Js::OpCode::DeleteRootFld:
  331. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteRootProperty, false, false);
  332. break;
  333. case Js::OpCode::DeleteFldStrict:
  334. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteProperty, false, true);
  335. break;
  336. case Js::OpCode::DeleteRootFldStrict:
  337. instrPrev = this->LowerDelFld(instr, IR::HelperOp_DeleteRootProperty, false, true);
  338. break;
  339. case Js::OpCode::ScopedLdFldForTypeOf:
  340. if (!noFieldFastPath)
  341. {
  342. m_lowererMD.GenerateFastScopedLdFld(instr);
  343. }
  344. instrPrev = this->LowerScopedLdFld(instr, IR::HelperOp_PatchGetPropertyForTypeOfScoped, true);
  345. break;
  346. case Js::OpCode::ScopedLdFld:
  347. if (!noFieldFastPath)
  348. {
  349. m_lowererMD.GenerateFastScopedLdFld(instr);
  350. }
  351. instrPrev = this->LowerScopedLdFld(instr, IR::HelperOp_PatchGetPropertyScoped, true);
  352. break;
  353. case Js::OpCode::ScopedLdInst:
  354. instrPrev = this->LowerScopedLdInst(instr, IR::HelperOp_GetInstanceScoped);
  355. break;
  356. case Js::OpCode::ScopedDeleteFld:
  357. instrPrev = this->LowerScopedDelFld(instr, IR::HelperOp_DeletePropertyScoped, false, false);
  358. break;
  359. case Js::OpCode::ScopedDeleteFldStrict:
  360. instrPrev = this->LowerScopedDelFld(instr, IR::HelperOp_DeletePropertyScoped, false, true);
  361. break;
  362. case Js::OpCode::NewScFunc:
  363. instrPrev = this->LowerNewScFunc(instr);
  364. break;
  365. case Js::OpCode::NewScGenFunc:
  366. instrPrev = this->LowerNewScGenFunc(instr);
  367. break;
  368. case Js::OpCode::StFld:
  369. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueNoLocalFastPath, IR::HelperOp_PatchPutValueNoLocalFastPathPolymorphic,
  370. IR::HelperOp_PatchPutValue, IR::HelperOp_PatchPutValuePolymorphic, true, Js::PropertyOperation_None);
  371. break;
  372. case Js::OpCode::StSuperFld:
  373. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueWithThisPtrNoLocalFastPath, IR::HelperOp_PatchPutValueWithThisPtrNoLocalFastPathPolymorphic,
  374. IR::HelperOp_PatchPutValueWithThisPtr, IR::HelperOp_PatchPutValueWithThisPtrPolymorphic, true, Js::PropertyOperation_None);
  375. break;
  376. case Js::OpCode::StRootFld:
  377. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPathPolymorphic,
  378. IR::HelperOp_PatchPutRootValue, IR::HelperOp_PatchPutRootValuePolymorphic, true, Js::PropertyOperation_Root);
  379. break;
  380. case Js::OpCode::StFldStrict:
  381. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutValueNoLocalFastPath, IR::HelperOp_PatchPutValueNoLocalFastPathPolymorphic,
  382. IR::HelperOp_PatchPutValue, IR::HelperOp_PatchPutValuePolymorphic, true, Js::PropertyOperation_StrictMode);
  383. break;
  384. case Js::OpCode::StRootFldStrict:
  385. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPath, IR::HelperOp_PatchPutRootValueNoLocalFastPathPolymorphic,
  386. IR::HelperOp_PatchPutRootValue, IR::HelperOp_PatchPutRootValuePolymorphic, true, Js::PropertyOperation_StrictModeRoot);
  387. break;
  388. case Js::OpCode::InitFld:
  389. case Js::OpCode::InitRootFld:
  390. instrPrev = GenerateCompleteStFld(instr, !noFieldFastPath, IR::HelperOp_PatchInitValue, IR::HelperOp_PatchInitValuePolymorphic,
  391. IR::HelperOp_PatchInitValue, IR::HelperOp_PatchInitValuePolymorphic, false, Js::PropertyOperation_None);
  392. break;
  393. case Js::OpCode::ScopedInitFunc:
  394. instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_InitFuncScoped, false);
  395. break;
  396. case Js::OpCode::ScopedStFld:
  397. case Js::OpCode::ScopedStFldStrict:
  398. if (!noFieldFastPath)
  399. {
  400. m_lowererMD.GenerateFastScopedStFld(instr);
  401. }
  402. instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_PatchSetPropertyScoped, true, true,
  403. instr->m_opcode == Js::OpCode::ScopedStFld ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode);
  404. break;
  405. case Js::OpCode::ConsoleScopedStFld:
  406. {
  407. if (!noFieldFastPath)
  408. {
  409. m_lowererMD.GenerateFastScopedStFld(instr);
  410. }
  411. Js::PropertyOperationFlags flags = static_cast<Js::PropertyOperationFlags>(Js::PropertyOperation_None | Js::PropertyOperation_AllowUndeclInConsoleScope);
  412. instrPrev = this->LowerScopedStFld(instr, IR::HelperOp_ConsolePatchSetPropertyScoped, true, true, flags);
  413. break;
  414. }
  415. case Js::OpCode::LdStr:
  416. m_lowererMD.ChangeToAssign(instr);
  417. break;
  418. case Js::OpCode::CloneStr:
  419. {
  420. GenerateGetImmutableOrScriptUnreferencedString(instr->GetSrc1()->AsRegOpnd(), instr, IR::HelperOp_CompoundStringCloneForAppending, false);
  421. instr->Remove();
  422. break;
  423. }
  424. case Js::OpCode::NewScObjArray:
  425. instrPrev = this->LowerNewScObjArray(instr);
  426. break;
  427. case Js::OpCode::NewScObject:
  428. case Js::OpCode::NewScObjectSpread:
  429. case Js::OpCode::NewScObjArraySpread:
  430. instrPrev = this->LowerNewScObject(instr, true, true);
  431. break;
  432. case Js::OpCode::NewScObjectNoCtor:
  433. instrPrev = this->LowerNewScObject(instr, false, true);
  434. break;
  435. case Js::OpCode::NewScObjectNoCtorFull:
  436. instrPrev = this->LowerNewScObject(instr, false, true, true);
  437. break;
  438. case Js::OpCode::GetNewScObject:
  439. instrPrev = this->LowerGetNewScObject(instr);
  440. break;
  441. case Js::OpCode::UpdateNewScObjectCache:
  442. instrPrev = instr->m_prev;
  443. this->LowerUpdateNewScObjectCache(instr, instr->GetSrc2(), instr->GetSrc1(), true /* isCtorFunction */);
  444. instr->Remove();
  445. break;
  446. case Js::OpCode::NewScObjectSimple:
  447. this->LowerNewScObjectSimple(instr);
  448. break;
  449. case Js::OpCode::NewScObjectLiteral:
  450. this->LowerNewScObjectLiteral(instr);
  451. break;
  452. case Js::OpCode::LdPropIds:
  453. m_lowererMD.ChangeToAssign(instr);
  454. break;
  455. case Js::OpCode::StArrSegItem_A:
  456. instrPrev = this->LowerArraySegmentVars(instr);
  457. break;
  458. case Js::OpCode::InlineMathAcos:
  459. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Acos);
  460. break;
  461. case Js::OpCode::InlineMathAsin:
  462. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Asin);
  463. break;
  464. case Js::OpCode::InlineMathAtan:
  465. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Atan);
  466. break;
  467. case Js::OpCode::InlineMathAtan2:
  468. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Atan2);
  469. break;
  470. case Js::OpCode::InlineMathCos:
  471. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Cos);
  472. break;
  473. case Js::OpCode::InlineMathExp:
  474. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Exp);
  475. break;
  476. case Js::OpCode::InlineMathLog:
  477. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Log);
  478. break;
  479. case Js::OpCode::InlineMathPow:
  480. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Pow);
  481. break;
  482. case Js::OpCode::InlineMathSin:
  483. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Sin);
  484. break;
  485. case Js::OpCode::InlineMathSqrt:
  486. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  487. break;
  488. case Js::OpCode::InlineMathTan:
  489. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Tan);
  490. break;
  491. case Js::OpCode::InlineMathFloor:
  492. #if defined(ASMJS_PLAT) && (defined(_M_X64) || defined(_M_IX86))
  493. if (!AutoSystemInfo::Data.SSE4_1Available() && instr->m_func->GetJnFunction()->GetIsAsmjsMode())
  494. {
  495. m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_FloorFlt, IR::HelperDirectMath_FloorDb);
  496. break;
  497. }
  498. #endif
  499. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  500. break;
  501. case Js::OpCode::InlineMathCeil:
  502. #if defined(ASMJS_PLAT) && (defined(_M_X64) || defined(_M_IX86))
  503. if (!AutoSystemInfo::Data.SSE4_1Available() && instr->m_func->GetJnFunction()->GetIsAsmjsMode())
  504. {
  505. m_lowererMD.HelperCallForAsmMathBuiltin(instr, IR::HelperDirectMath_CeilFlt, IR::HelperDirectMath_CeilDb);
  506. break;
  507. }
  508. #endif
  509. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  510. break;
  511. case Js::OpCode::InlineMathRound:
  512. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  513. break;
  514. case Js::OpCode::InlineMathAbs:
  515. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  516. break;
  517. case Js::OpCode::InlineMathImul:
  518. GenerateFastInlineMathImul(instr);
  519. break;
  520. case Js::OpCode::InlineMathClz32:
  521. GenerateFastInlineMathClz32(instr);
  522. break;
  523. case Js::OpCode::InlineMathFround:
  524. GenerateFastInlineMathFround(instr);
  525. break;
  526. case Js::OpCode::InlineMathMin:
  527. case Js::OpCode::InlineMathMax:
  528. m_lowererMD.GenerateFastInlineBuiltInCall(instr, (IR::JnHelperMethod)0);
  529. break;
  530. case Js::OpCode::InlineMathRandom:
  531. this->GenerateFastInlineBuiltInMathRandom(instr);
  532. break;
  533. #ifdef ENABLE_DOM_FAST_PATH
  534. case Js::OpCode::DOMFastPathGetter:
  535. this->LowerFastInlineDOMFastPathGetter(instr);
  536. break;
  537. #endif
  538. case Js::OpCode::InlineArrayPush:
  539. this->GenerateFastInlineArrayPush(instr);
  540. break;
  541. case Js::OpCode::InlineArrayPop:
  542. this->GenerateFastInlineArrayPop(instr);
  543. break;
  544. //Now retrieve the function object from the ArgOut_A_InlineSpecialized instruction opcode to push it on the stack after all the other arguments have been pushed.
  545. //The lowering of the direct call to helper is handled by GenerateDirectCall (architecture specific).
  546. case Js::OpCode::CallDirect:
  547. {
  548. IR::Opnd * src1 = instr->GetSrc1();
  549. Assert(src1->IsHelperCallOpnd());
  550. switch (src1->AsHelperCallOpnd()->m_fnHelper)
  551. {
  552. case IR::JnHelperMethod::HelperString_Split:
  553. case IR::JnHelperMethod::HelperString_Match:
  554. GenerateFastInlineStringSplitMatch(instr);
  555. break;
  556. case IR::JnHelperMethod::HelperRegExp_Exec:
  557. GenerateFastInlineRegExpExec(instr);
  558. break;
  559. case IR::JnHelperMethod::HelperGlobalObject_ParseInt:
  560. GenerateFastInlineGlobalObjectParseInt(instr);
  561. break;
  562. case IR::JnHelperMethod::HelperString_FromCharCode:
  563. GenerateFastInlineStringFromCharCode(instr);
  564. break;
  565. case IR::JnHelperMethod::HelperString_FromCodePoint:
  566. GenerateFastInlineStringFromCodePoint(instr);
  567. break;
  568. case IR::JnHelperMethod::HelperString_CharAt:
  569. GenerateFastInlineStringCharCodeAt(instr, Js::BuiltinFunction::String_CharAt);
  570. break;
  571. case IR::JnHelperMethod::HelperString_CharCodeAt:
  572. GenerateFastInlineStringCharCodeAt(instr, Js::BuiltinFunction::String_CharCodeAt);
  573. break;
  574. case IR::JnHelperMethod::HelperString_Replace:
  575. GenerateFastInlineStringReplace(instr);
  576. break;
  577. }
  578. instrPrev = LowerCallDirect(instr);
  579. break;
  580. }
  581. case Js::OpCode::CallIDynamic:
  582. {
  583. Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
  584. instrPrev = this->LowerCallIDynamic(instr, (ushort)flags);
  585. break;
  586. }
  587. case Js::OpCode::CallIDynamicSpread:
  588. {
  589. Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
  590. instrPrev = this->LowerCallIDynamicSpread(instr, (ushort)flags);
  591. break;
  592. }
  593. case Js::OpCode::CallI:
  594. case Js::OpCode::CallINew:
  595. case Js::OpCode::CallIFixed:
  596. case Js::OpCode::CallINewTargetNew:
  597. {
  598. Js::CallFlags flags = Js::CallFlags_None;
  599. if (instr->isCtorCall)
  600. {
  601. flags = Js::CallFlags_New;
  602. }
  603. else
  604. {
  605. if (instr->m_opcode == Js::OpCode::CallINew)
  606. {
  607. flags = Js::CallFlags_New;
  608. }
  609. else if (instr->m_opcode == Js::OpCode::CallINewTargetNew)
  610. {
  611. flags = (Js::CallFlags) (Js::CallFlags_New | Js::CallFlags_ExtraArg | Js::CallFlags_NewTarget);
  612. }
  613. if (instr->GetDst())
  614. {
  615. flags = (Js::CallFlags) (flags | Js::CallFlags_Value);
  616. }
  617. else
  618. {
  619. flags = (Js::CallFlags) (flags | Js::CallFlags_NotUsed);
  620. }
  621. }
  622. if (!PHASE_OFF(Js::CallFastPathPhase, this->m_func) && !noMathFastPath)
  623. {
  624. // We shouldn't have turned this instruction into a fixed method call if we're calling one of the
  625. // built-ins we still inline in the lowerer.
  626. Assert(instr->m_opcode != Js::OpCode::CallIFixed || !Func::IsBuiltInInlinedInLowerer(instr->GetSrc1()));
  627. // Disable InlineBuiltInLibraryCall as it does not work well with 2nd chance reg alloc
  628. // and may invalidate live on back edge data by introducing refs across loops. See Winblue Bug: 577641
  629. //// Callee may still be a library built-in; if so, generate it inline.
  630. //if (this->InlineBuiltInLibraryCall(instr))
  631. //{
  632. // m_lowererMD.LowerCallI(instr, (ushort)flags, true /*isHelper*/);
  633. //}
  634. //else
  635. //{
  636. m_lowererMD.LowerCallI(instr, (ushort)flags);
  637. //}
  638. }
  639. else
  640. {
  641. m_lowererMD.LowerCallI(instr, (ushort)flags);
  642. }
  643. break;
  644. }
  645. case Js::OpCode::AsmJsCallI:
  646. m_lowererMD.LowerAsmJsCallI(instr);
  647. break;
  648. case Js::OpCode::AsmJsCallE:
  649. m_lowererMD.LowerAsmJsCallE(instr);
  650. break;
  651. case Js::OpCode::CallIEval:
  652. {
  653. Js::CallFlags flags = (Js::CallFlags)(Js::CallFlags_ExtraArg | (instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed));
  654. if (IsSpreadCall(instr))
  655. {
  656. instrPrev = LowerSpreadCall(instr, flags);
  657. }
  658. else
  659. {
  660. m_lowererMD.LowerCallI(instr, (ushort)flags);
  661. }
  662. #ifdef PERF_HINT
  663. if (PHASE_TRACE1(Js::PerfHintPhase))
  664. {
  665. WritePerfHint(PerfHints::CallsEval, this->m_func->GetJnFunction(), instr->GetByteCodeOffset());
  666. }
  667. #endif
  668. break;
  669. }
  670. case Js::OpCode::CallIPut:
  671. m_lowererMD.LowerCallPut(instr);
  672. break;
  673. case Js::OpCode::CallHelper:
  674. instrPrev = m_lowererMD.LowerCallHelper(instr);
  675. break;
  676. case Js::OpCode::Ret:
  677. if (instr->m_next->m_opcode != Js::OpCode::FunctionExit)
  678. {
  679. // If this RET isn't at the end of the function, insert a branch to
  680. // the epilog.
  681. IR::Instr *exitPrev = m_func->m_exitInstr->m_prev;
  682. if (!exitPrev->IsLabelInstr())
  683. {
  684. exitPrev = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  685. m_func->m_exitInstr->InsertBefore(exitPrev);
  686. }
  687. IR::BranchInstr *exitBr = IR::BranchInstr::New(Js::OpCode::Br,
  688. exitPrev->AsLabelInstr(), m_func);
  689. instr->InsertAfter(exitBr);
  690. m_lowererMD.LowerUncondBranch(exitBr);
  691. }
  692. m_lowererMD.LowerRet(instr);
  693. break;
  694. case Js::OpCode::LdArgumentsFromFrame:
  695. this->LoadArgumentsFromFrame(instr);
  696. break;
  697. case Js::OpCode::LdC_A_I4:
  698. src1 = instr->UnlinkSrc1();
  699. AssertMsg(src1->IsIntConstOpnd(), "Source of LdC_A_I4 should be an IntConst...");
  700. instrPrev = this->LowerLoadVar(instr,
  701. IR::AddrOpnd::NewFromNumber(static_cast<int32>(src1->AsIntConstOpnd()->GetValue()), this->m_func));
  702. src1->Free(this->m_func);
  703. break;
  704. case Js::OpCode::LdC_A_R8:
  705. src1 = instr->UnlinkSrc1();
  706. AssertMsg(src1->IsFloatConstOpnd(), "Source of LdC_A_R8 should be a FloatConst...");
  707. instrPrev = this->LowerLoadVar(instr, src1->AsFloatConstOpnd()->GetAddrOpnd(this->m_func));
  708. src1->Free(this->m_func);
  709. break;
  710. case Js::OpCode::LdC_F8_R8:
  711. src1 = instr->UnlinkSrc1();
  712. AssertMsg(src1->IsFloatConstOpnd(), "Source of LdC_F8_R8 should be a FloatConst...");
  713. instrPrev = m_lowererMD.LoadFloatValue(instr->UnlinkDst()->AsRegOpnd(), src1->AsFloatConstOpnd()->m_value, instr);
  714. src1->Free(this->m_func);
  715. instr->Remove();
  716. break;
  717. case Js::OpCode::NewRegEx:
  718. instrPrev = this->LowerNewRegEx(instr);
  719. break;
  720. case Js::OpCode::Conv_Obj:
  721. this->LowerUnaryHelperMem(instr, IR::HelperOp_ConvObject);
  722. break;
  723. case Js::OpCode::NewWithObject:
  724. this->LowerUnaryHelperMem(instr, IR::HelperOp_NewWithObject);
  725. break;
  726. case Js::OpCode::LdCustomSpreadIteratorList:
  727. this->LowerUnaryHelperMem(instr, IR::HelperOp_ToSpreadedFunctionArgument);
  728. break;
  729. case Js::OpCode::Conv_Num:
  730. this->LowerConvNum(instr, noMathFastPath);
  731. break;
  732. case Js::OpCode::Incr_A:
  733. if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  734. {
  735. this->LowerUnaryHelperMem(instr, IR::HelperOp_Increment);
  736. }
  737. else
  738. {
  739. instr->SetSrc2(IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(1), IR::AddrOpndKindConstantVar, this->m_func));
  740. m_lowererMD.GenerateFastAdd(instr);
  741. instr->FreeSrc2();
  742. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Increment));
  743. }
  744. break;
  745. case Js::OpCode::Decr_A:
  746. if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  747. {
  748. this->LowerUnaryHelperMem(instr, IR::HelperOp_Decrement);
  749. }
  750. else
  751. {
  752. instr->SetSrc2(IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(1), IR::AddrOpndKindConstantVar, this->m_func));
  753. m_lowererMD.GenerateFastSub(instr);
  754. instr->FreeSrc2();
  755. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Decrement));
  756. }
  757. break;
  758. case Js::OpCode::Neg_A:
  759. if (instr->GetDst()->IsFloat())
  760. {
  761. Assert(instr->GetSrc1()->IsFloat());
  762. m_lowererMD.LowerToFloat(instr);
  763. }
  764. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  765. {
  766. this->LowerUnaryHelperMem(instr, IR::HelperOp_Negate);
  767. }
  768. else if (m_lowererMD.GenerateFastNeg(instr))
  769. {
  770. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Negate));
  771. }
  772. break;
  773. case Js::OpCode::Not_A:
  774. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
  775. {
  776. this->LowerUnaryHelperMem(instr, IR::HelperOp_Not);
  777. }
  778. else if (m_lowererMD.GenerateFastNot(instr))
  779. {
  780. this->LowerUnaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Not));
  781. }
  782. break;
  783. case Js::OpCode::BrEq_I4:
  784. case Js::OpCode::BrNeq_I4:
  785. case Js::OpCode::BrGt_I4:
  786. case Js::OpCode::BrGe_I4:
  787. case Js::OpCode::BrLt_I4:
  788. case Js::OpCode::BrLe_I4:
  789. case Js::OpCode::BrUnGt_I4:
  790. case Js::OpCode::BrUnGe_I4:
  791. case Js::OpCode::BrUnLt_I4:
  792. case Js::OpCode::BrUnLe_I4:
  793. {
  794. // See calls to MarkOneFltTmpSym under BrSrEq. This is to handle the case
  795. // where a branch is type-specialized and uses the result of a float pref op,
  796. // which must then be saved to var at the def.
  797. StackSym *sym = instr->GetSrc1()->GetStackSym();
  798. if (sym)
  799. {
  800. sym = sym->GetVarEquivSym(nullptr);
  801. }
  802. sym = instr->GetSrc2()->GetStackSym();
  803. if (sym)
  804. {
  805. sym = sym->GetVarEquivSym(nullptr);
  806. }
  807. }
  808. // FALLTHROUGH
  809. case Js::OpCode::Neg_I4:
  810. case Js::OpCode::Not_I4:
  811. case Js::OpCode::Add_I4:
  812. case Js::OpCode::Sub_I4:
  813. case Js::OpCode::Mul_I4:
  814. case Js::OpCode::Rem_I4:
  815. case Js::OpCode::Or_I4:
  816. case Js::OpCode::Xor_I4:
  817. case Js::OpCode::And_I4:
  818. case Js::OpCode::Shl_I4:
  819. case Js::OpCode::Shr_I4:
  820. case Js::OpCode::ShrU_I4:
  821. case Js::OpCode::BrTrue_I4:
  822. case Js::OpCode::BrFalse_I4:
  823. if(instr->HasBailOutInfo())
  824. {
  825. const auto bailOutKind = instr->GetBailOutKind();
  826. if(bailOutKind & IR::BailOutOnResultConditions ||
  827. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck)
  828. {
  829. const auto nonBailOutInstr = SplitBailOnResultCondition(instr);
  830. IR::LabelInstr *bailOutLabel, *skipBailOutLabel;
  831. LowerBailOnResultCondition(instr, &bailOutLabel, &skipBailOutLabel);
  832. LowerInstrWithBailOnResultCondition(nonBailOutInstr, bailOutKind, bailOutLabel, skipBailOutLabel);
  833. }
  834. else if(bailOutKind == IR::BailOnModByPowerOf2)
  835. {
  836. Assert(instr->m_opcode == Js::OpCode::Rem_I4);
  837. bool fastPath = GenerateSimplifiedInt4Rem(instr);
  838. Assert(fastPath);
  839. instr->FreeSrc1();
  840. instr->FreeSrc2();
  841. this->GenerateBailOut(instr);
  842. }
  843. }
  844. else
  845. {
  846. if (instr->m_opcode == Js::OpCode::Rem_I4)
  847. {
  848. // fast path
  849. this->GenerateSimplifiedInt4Rem(instr);
  850. // slow path
  851. this->LowerRemI4(instr);
  852. }
  853. #if defined(_M_IX86) || defined(_M_X64)
  854. else if (instr->m_opcode == Js::OpCode::Mul_I4)
  855. {
  856. if (!LowererMD::GenerateSimplifiedInt4Mul(instr))
  857. {
  858. m_lowererMD.EmitInt4Instr(instr);
  859. }
  860. }
  861. #endif
  862. else
  863. {
  864. m_lowererMD.EmitInt4Instr(instr);
  865. }
  866. }
  867. break;
  868. case Js::OpCode::Div_I4:
  869. this->LowerDivI4(instr);
  870. break;
  871. case Js::OpCode::Add_Ptr:
  872. m_lowererMD.EmitPtrInstr(instr);
  873. break;
  874. case Js::OpCode::Typeof:
  875. this->LowerUnaryHelperMem(instr, IR::HelperOp_Typeof);
  876. break;
  877. case Js::OpCode::TypeofElem:
  878. this->LowerLdElemI(instr, IR::HelperOp_TypeofElem, false);
  879. break;
  880. case Js::OpCode::LdLen_A:
  881. {
  882. bool fastPath = !noMathFastPath;
  883. if(!fastPath && instr->HasBailOutInfo())
  884. {
  885. // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
  886. // bailouts require the fast path.
  887. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  888. if(bailOutKind & IR::BailOutKindBits)
  889. {
  890. fastPath = true;
  891. }
  892. else
  893. {
  894. const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
  895. fastPath =
  896. bailOutKindMinusBits &&
  897. bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
  898. bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
  899. }
  900. }
  901. bool instrIsInHelperBlock;
  902. if(!fastPath)
  903. {
  904. LowerLdLen(instr, false);
  905. }
  906. else if(GenerateFastLdLen(instr, &instrIsInHelperBlock))
  907. {
  908. Assert(
  909. !instr->HasBailOutInfo() ||
  910. (instr->GetBailOutKind() & ~IR::BailOutKindBits) != IR::BailOutOnIrregularLength);
  911. LowerLdLen(instr, instrIsInHelperBlock);
  912. }
  913. break;
  914. }
  915. case Js::OpCode::LdThis:
  916. {
  917. if (noFieldFastPath || !m_lowererMD.GenerateLdThisCheck(instr))
  918. {
  919. IR::JnHelperMethod meth;
  920. if (instr->IsJitProfilingInstr())
  921. {
  922. Assert(instr->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
  923. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  924. meth = IR::HelperSimpleProfiledLdThis;
  925. this->LowerBinaryHelper(instr, meth);
  926. }
  927. else
  928. {
  929. meth = IR::HelperLdThisNoFastPath;
  930. this->LowerBinaryHelperMem(instr, meth);
  931. }
  932. }
  933. else
  934. {
  935. this->LowerBinaryHelperMem(instr, IR::HelperLdThis);
  936. }
  937. break;
  938. }
  939. case Js::OpCode::StrictLdThis:
  940. if (noFieldFastPath)
  941. {
  942. IR::JnHelperMethod meth;
  943. if (instr->IsJitProfilingInstr())
  944. {
  945. Assert(instr->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
  946. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  947. meth = IR::HelperSimpleProfiledStrictLdThis;
  948. this->LowerUnaryHelper(instr, meth);
  949. }
  950. else
  951. {
  952. meth = IR::HelperStrictLdThis;
  953. this->LowerUnaryHelperMem(instr, meth);
  954. }
  955. }
  956. else
  957. {
  958. m_lowererMD.GenerateLdThisStrict(instr);
  959. instr->Remove();
  960. }
  961. break;
  962. case Js::OpCode::CheckThis:
  963. m_lowererMD.GenerateLdThisCheck(instr);
  964. instr->FreeSrc1();
  965. this->GenerateBailOut(instr);
  966. break;
  967. case Js::OpCode::StrictCheckThis:
  968. m_lowererMD.GenerateLdThisStrict(instr);
  969. instr->FreeSrc1();
  970. this->GenerateBailOut(instr);
  971. break;
  972. case Js::OpCode::NewScArray:
  973. instrPrev = this->LowerNewScArray(instr);
  974. break;
  975. case Js::OpCode::NewScArrayWithMissingValues:
  976. this->LowerUnaryHelperMem(instr, IR::HelperScrArr_OP_NewScArrayWithMissingValues);
  977. break;
  978. case Js::OpCode::NewScIntArray:
  979. instrPrev = this->LowerNewScIntArray(instr);
  980. break;
  981. case Js::OpCode::NewScFltArray:
  982. instrPrev = this->LowerNewScFltArray(instr);
  983. break;
  984. case Js::OpCode::GetForInEnumerator:
  985. this->LowerUnaryHelperMem(instr, IR::HelperOp_OP_GetForInEnumerator);
  986. break;
  987. case Js::OpCode::ReleaseForInEnumerator:
  988. this->LowerUnaryHelperMem(instr, IR::HelperOp_OP_ReleaseForInEnumerator);
  989. break;
  990. case Js::OpCode::Add_A:
  991. if (instr->GetDst()->IsFloat())
  992. {
  993. Assert(instr->GetSrc1()->IsFloat());
  994. Assert(instr->GetSrc2()->IsFloat());
  995. // we don't want to mix float32 and float64
  996. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  997. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  998. m_lowererMD.LowerToFloat(instr);
  999. }
  1000. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  1001. {
  1002. this->LowerBinaryHelperMem(instr, IR::HelperOp_Add);
  1003. }
  1004. else if (m_lowererMD.TryGenerateFastMulAdd(instr, &instrPrev))
  1005. {
  1006. }
  1007. else
  1008. {
  1009. m_lowererMD.GenerateFastAdd(instr);
  1010. this->LowerBinaryHelperMemWithTemp3(instr, IR_HELPER_OP_FULL_OR_INPLACE(Add), IR::HelperOp_AddLeftDead);
  1011. }
  1012. break;
  1013. case Js::OpCode::Div_A:
  1014. {
  1015. if (instr->IsJitProfilingInstr()) {
  1016. LowerProfiledBinaryOp(instr->AsJitProfilingInstr(), IR::HelperSimpleProfiledDivide);
  1017. }
  1018. else if (instr->GetDst()->IsFloat())
  1019. {
  1020. Assert(instr->GetSrc1()->IsFloat());
  1021. Assert(instr->GetSrc2()->IsFloat());
  1022. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1023. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1024. m_lowererMD.LowerToFloat(instr);
  1025. }
  1026. else
  1027. {
  1028. if (!PHASE_OFF(Js::MathFastPathPhase, this->m_func) && !noMathFastPath)
  1029. {
  1030. IR::AddrOpnd *src2 = instr->GetSrc2()->IsAddrOpnd() ? instr->GetSrc2()->AsAddrOpnd() : nullptr;
  1031. if (src2 && src2->IsVar() && Js::TaggedInt::Is(src2->m_address))
  1032. {
  1033. int32 value = Js::TaggedInt::ToInt32(src2->m_address);
  1034. if (Math::IsPow2(value))
  1035. {
  1036. m_lowererMD.GenerateFastDivByPow2(instr);
  1037. }
  1038. }
  1039. }
  1040. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Divide));
  1041. }
  1042. break;
  1043. }
  1044. case Js::OpCode::Expo_A:
  1045. {
  1046. if (instr->GetDst()->IsFloat())
  1047. {
  1048. Assert(instr->GetSrc1()->IsFloat());
  1049. Assert(instr->GetSrc2()->IsFloat());
  1050. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1051. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1052. m_lowererMD.GenerateFastInlineBuiltInCall(instr, IR::HelperDirectMath_Pow);
  1053. }
  1054. else
  1055. {
  1056. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Exponentiation));
  1057. }
  1058. break;
  1059. }
  1060. case Js::OpCode::Mul_A:
  1061. if (instr->GetDst()->IsFloat())
  1062. {
  1063. Assert(instr->GetSrc1()->IsFloat());
  1064. Assert(instr->GetSrc2()->IsFloat());
  1065. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1066. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1067. m_lowererMD.LowerToFloat(instr);
  1068. }
  1069. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  1070. {
  1071. this->LowerBinaryHelperMem(instr, IR::HelperOp_Multiply);
  1072. }
  1073. else if (m_lowererMD.GenerateFastMul(instr))
  1074. {
  1075. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Multiply));
  1076. }
  1077. break;
  1078. case Js::OpCode::Rem_A:
  1079. if (instr->GetDst()->IsFloat64())
  1080. {
  1081. this->LowerRemR8(instr);
  1082. }
  1083. else if (instr->IsJitProfilingInstr())
  1084. {
  1085. this->LowerProfiledBinaryOp(instr->AsJitProfilingInstr(), IR::HelperSimpleProfiledRemainder);
  1086. }
  1087. else
  1088. {
  1089. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Modulus));
  1090. }
  1091. break;
  1092. case Js::OpCode::Sub_A:
  1093. if (instr->GetDst()->IsFloat())
  1094. {
  1095. Assert(instr->GetSrc1()->IsFloat());
  1096. Assert(instr->GetSrc2()->IsFloat());
  1097. Assert(instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  1098. Assert(instr->GetDst()->GetType() == instr->GetSrc2()->GetType());
  1099. m_lowererMD.LowerToFloat(instr);
  1100. }
  1101. else if (PHASE_OFF(Js::MathFastPathPhase, this->m_func) || noMathFastPath)
  1102. {
  1103. this->LowerBinaryHelperMem(instr, IR::HelperOp_Subtract);
  1104. }
  1105. else if (m_lowererMD.TryGenerateFastMulAdd(instr, &instrPrev))
  1106. {
  1107. }
  1108. else
  1109. {
  1110. m_lowererMD.GenerateFastSub(instr);
  1111. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Subtract));
  1112. }
  1113. break;
  1114. case Js::OpCode::And_A:
  1115. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
  1116. {
  1117. this->LowerBinaryHelperMem(instr, IR::HelperOp_And);
  1118. }
  1119. else if (m_lowererMD.GenerateFastAnd(instr))
  1120. {
  1121. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(And));
  1122. }
  1123. break;
  1124. case Js::OpCode::Or_A:
  1125. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath)
  1126. {
  1127. this->LowerBinaryHelperMem(instr, IR::HelperOp_Or);
  1128. }
  1129. else if (m_lowererMD.GenerateFastOr(instr))
  1130. {
  1131. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Or));
  1132. }
  1133. break;
  1134. case Js::OpCode::Xor_A:
  1135. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastXor(instr))
  1136. {
  1137. this->LowerBinaryHelperMemWithTemp2(instr, IR_HELPER_OP_FULL_OR_INPLACE(Xor));
  1138. }
  1139. break;
  1140. case Js::OpCode::Shl_A:
  1141. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftLeft(instr))
  1142. {
  1143. this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftLeft);
  1144. }
  1145. break;
  1146. case Js::OpCode::Shr_A:
  1147. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftRight(instr))
  1148. {
  1149. this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftRight);
  1150. }
  1151. break;
  1152. case Js::OpCode::ShrU_A:
  1153. if (PHASE_OFF(Js::BitopsFastPathPhase, this->m_func) || noMathFastPath || m_lowererMD.GenerateFastShiftRight(instr))
  1154. {
  1155. this->LowerBinaryHelperMem(instr, IR::HelperOp_ShiftRightU);
  1156. }
  1157. break;
  1158. case Js::OpCode::CmEq_A:
  1159. if (instr->GetSrc1()->IsFloat())
  1160. {
  1161. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1162. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1163. }
  1164. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
  1165. {
  1166. if (!fNoLower)
  1167. {
  1168. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmEq_A);
  1169. }
  1170. }
  1171. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1172. {
  1173. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmEq_A);
  1174. }
  1175. break;
  1176. case Js::OpCode::CmNeq_A:
  1177. if (instr->GetSrc1()->IsFloat())
  1178. {
  1179. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1180. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1181. }
  1182. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
  1183. {
  1184. if (!fNoLower)
  1185. {
  1186. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmNeq_A);
  1187. }
  1188. }
  1189. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1190. {
  1191. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmNeq_A);
  1192. }
  1193. break;
  1194. case Js::OpCode::CmSrEq_A:
  1195. if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
  1196. {
  1197. if (!fNoLower)
  1198. {
  1199. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_A);
  1200. }
  1201. }
  1202. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->TryGenerateFastCmSrEq(instr))
  1203. {
  1204. }
  1205. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1206. {
  1207. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_A);
  1208. }
  1209. break;
  1210. case Js::OpCode::CmSrNeq_A:
  1211. if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
  1212. {
  1213. if (!fNoLower)
  1214. {
  1215. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrNeq_A);
  1216. }
  1217. }
  1218. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1219. {
  1220. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrNeq_A);
  1221. }
  1222. break;
  1223. case Js::OpCode::CmGt_A:
  1224. if (instr->GetSrc1()->IsFloat())
  1225. {
  1226. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1227. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1228. }
  1229. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1230. {
  1231. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmGt_A);
  1232. }
  1233. break;
  1234. case Js::OpCode::CmGe_A:
  1235. if (instr->GetSrc1()->IsFloat())
  1236. {
  1237. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1238. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1239. }
  1240. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1241. {
  1242. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmGe_A);
  1243. }
  1244. break;
  1245. case Js::OpCode::CmLt_A:
  1246. if (instr->GetSrc1()->IsFloat())
  1247. {
  1248. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1249. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1250. }
  1251. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1252. {
  1253. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmLt_A);
  1254. }
  1255. break;
  1256. case Js::OpCode::CmLe_A:
  1257. if (instr->GetSrc1()->IsFloat())
  1258. {
  1259. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1260. this->m_lowererMD.GenerateFastCmXxR8(instr);
  1261. }
  1262. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath || !m_lowererMD.GenerateFastCmXxTaggedInt(instr))
  1263. {
  1264. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmLe_A);
  1265. }
  1266. break;
  1267. case Js::OpCode::CmEq_I4:
  1268. case Js::OpCode::CmNeq_I4:
  1269. case Js::OpCode::CmGe_I4:
  1270. case Js::OpCode::CmGt_I4:
  1271. case Js::OpCode::CmLe_I4:
  1272. case Js::OpCode::CmLt_I4:
  1273. case Js::OpCode::CmUnGe_I4:
  1274. case Js::OpCode::CmUnGt_I4:
  1275. case Js::OpCode::CmUnLe_I4:
  1276. case Js::OpCode::CmUnLt_I4:
  1277. this->m_lowererMD.GenerateFastCmXxI4(instr);
  1278. break;
  1279. case Js::OpCode::Conv_Bool:
  1280. instrPrev = this->m_lowererMD.GenerateConvBool(instr);
  1281. break;
  1282. case Js::OpCode::IsInst:
  1283. m_lowererMD.GenerateFastIsInst(instr);
  1284. instrPrev = this->LowerIsInst(instr, IR::HelperScrObj_OP_IsInst);
  1285. break;
  1286. case Js::OpCode::IsIn:
  1287. this->LowerBinaryHelperMem(instr, IR::HelperOp_IsIn);
  1288. break;
  1289. case Js::OpCode::LdInt8ArrViewElem:
  1290. case Js::OpCode::LdUInt8ArrViewElem:
  1291. case Js::OpCode::LdInt16ArrViewElem:
  1292. case Js::OpCode::LdUInt16ArrViewElem:
  1293. case Js::OpCode::LdInt32ArrViewElem:
  1294. case Js::OpCode::LdUInt32ArrViewElem:
  1295. case Js::OpCode::LdFloat32ArrViewElem:
  1296. case Js::OpCode::LdFloat64ArrViewElem:
  1297. instrPrev = LowerLdArrViewElem(instr);
  1298. break;
  1299. case Js::OpCode::StInt8ArrViewElem:
  1300. case Js::OpCode::StUInt8ArrViewElem:
  1301. case Js::OpCode::StInt16ArrViewElem:
  1302. case Js::OpCode::StUInt16ArrViewElem:
  1303. case Js::OpCode::StInt32ArrViewElem:
  1304. case Js::OpCode::StUInt32ArrViewElem:
  1305. case Js::OpCode::StFloat32ArrViewElem:
  1306. case Js::OpCode::StFloat64ArrViewElem:
  1307. instrPrev = LowerStArrViewElem(instr);
  1308. break;
  1309. case Js::OpCode::Memset:
  1310. case Js::OpCode::Memcopy:
  1311. {
  1312. instrPrev = LowerMemOp(instr);
  1313. break;
  1314. }
  1315. case Js::OpCode::ArrayDetachedCheck:
  1316. instrPrev = LowerArrayDetachedCheck(instr);
  1317. break;
  1318. case Js::OpCode::StElemI_A:
  1319. case Js::OpCode::StElemI_A_Strict:
  1320. {
  1321. // Note: under debugger (Fast F12) don't let GenerateFastStElemI which calls into ToNumber_Helper
  1322. // which takes double, and currently our helper wrapper doesn't support double.
  1323. bool fastPath = !noMathFastPath && !m_func->IsJitInDebugMode();
  1324. if(!fastPath && instr->HasBailOutInfo())
  1325. {
  1326. // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
  1327. // bailouts require the fast path.
  1328. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1329. const IR::BailOutKind bailOutKindBits = bailOutKind & IR::BailOutKindBits;
  1330. if(bailOutKindBits & ~(IR::BailOutOnMissingValue | IR::BailOutConvertedNativeArray))
  1331. {
  1332. fastPath = true;
  1333. }
  1334. else
  1335. {
  1336. const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
  1337. fastPath =
  1338. bailOutKindMinusBits &&
  1339. bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
  1340. bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
  1341. }
  1342. }
  1343. IR::Opnd * opnd = instr->GetDst();
  1344. IR::Opnd * baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  1345. ValueType profiledBaseValueType = baseOpnd->AsRegOpnd()->GetValueType();
  1346. if (profiledBaseValueType.IsUninitialized() && baseOpnd->AsRegOpnd()->m_sym->IsSingleDef())
  1347. {
  1348. baseOpnd->SetValueType(baseOpnd->FindProfiledValueType());
  1349. }
  1350. bool instrIsInHelperBlock;
  1351. if (!fastPath)
  1352. {
  1353. this->LowerStElemI(
  1354. instr,
  1355. instr->m_opcode == Js::OpCode::StElemI_A ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode,
  1356. false);
  1357. }
  1358. else if (GenerateFastStElemI(instr, &instrIsInHelperBlock))
  1359. {
  1360. #if DBG
  1361. if(instr->HasBailOutInfo())
  1362. {
  1363. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1364. Assert(
  1365. (bailOutKind & ~IR::BailOutKindBits) != IR::BailOutConventionalTypedArrayAccessOnly &&
  1366. !(
  1367. bailOutKind &
  1368. (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)
  1369. ));
  1370. }
  1371. #endif
  1372. this->LowerStElemI(
  1373. instr,
  1374. instr->m_opcode == Js::OpCode::StElemI_A ? Js::PropertyOperation_None : Js::PropertyOperation_StrictMode,
  1375. instrIsInHelperBlock);
  1376. }
  1377. break;
  1378. }
  1379. case Js::OpCode::LdElemI_A:
  1380. case Js::OpCode::LdMethodElem:
  1381. {
  1382. bool fastPath =
  1383. !noMathFastPath &&
  1384. (
  1385. instr->m_opcode != Js::OpCode::LdMethodElem ||
  1386. instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyObject()
  1387. );
  1388. if(!fastPath && instr->HasBailOutInfo())
  1389. {
  1390. // Some bailouts are generated around the helper call, and will work even if the fast path is disabled. Other
  1391. // bailouts require the fast path.
  1392. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1393. if(bailOutKind & IR::BailOutKindBits)
  1394. {
  1395. fastPath = true;
  1396. }
  1397. else
  1398. {
  1399. const IR::BailOutKind bailOutKindMinusBits = bailOutKind & ~IR::BailOutKindBits;
  1400. fastPath =
  1401. bailOutKindMinusBits &&
  1402. bailOutKindMinusBits != IR::BailOutOnImplicitCalls &&
  1403. bailOutKindMinusBits != IR::BailOutOnImplicitCallsPreOp;
  1404. }
  1405. }
  1406. IR::Opnd * opnd = instr->GetSrc1();
  1407. IR::Opnd * baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  1408. ValueType profiledBaseValueType = baseOpnd->AsRegOpnd()->GetValueType();
  1409. if (profiledBaseValueType.IsUninitialized() && baseOpnd->AsRegOpnd()->m_sym->IsSingleDef())
  1410. {
  1411. baseOpnd->SetValueType(baseOpnd->FindProfiledValueType());
  1412. }
  1413. bool instrIsInHelperBlock;
  1414. if (!fastPath)
  1415. {
  1416. this->LowerLdElemI(
  1417. instr,
  1418. instr->m_opcode == Js::OpCode::LdElemI_A ? IR::HelperOp_GetElementI : IR::HelperOp_GetMethodElement,
  1419. false);
  1420. }
  1421. else if (GenerateFastLdElemI(instr, &instrIsInHelperBlock))
  1422. {
  1423. #if DBG
  1424. if(instr->HasBailOutInfo())
  1425. {
  1426. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1427. Assert(
  1428. (bailOutKind & ~IR::BailOutKindBits) != IR::BailOutConventionalTypedArrayAccessOnly &&
  1429. !(
  1430. bailOutKind &
  1431. (IR::BailOutConventionalNativeArrayAccessOnly | IR::BailOutOnArrayAccessHelperCall)
  1432. ));
  1433. }
  1434. #endif
  1435. this->LowerLdElemI(
  1436. instr,
  1437. instr->m_opcode == Js::OpCode::LdElemI_A ? IR::HelperOp_GetElementI : IR::HelperOp_GetMethodElement,
  1438. instrIsInHelperBlock);
  1439. }
  1440. break;
  1441. }
  1442. case Js::OpCode::InitSetElemI:
  1443. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitElemSetter);
  1444. break;
  1445. case Js::OpCode::InitGetElemI:
  1446. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitElemGetter);
  1447. break;
  1448. case Js::OpCode::InitComputedProperty:
  1449. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOP_InitComputedProperty);
  1450. break;
  1451. case Js::OpCode::Delete_A:
  1452. this->LowerUnaryHelperMem(instr, IR::HelperOp_Delete);
  1453. break;
  1454. case Js::OpCode::DeleteElemI_A:
  1455. this->LowerDeleteElemI(instr, false);
  1456. break;
  1457. case Js::OpCode::DeleteElemIStrict_A:
  1458. this->LowerDeleteElemI(instr, true);
  1459. break;
  1460. case Js::OpCode::BytecodeArgOutCapture:
  1461. m_lowererMD.ChangeToAssign(instr);
  1462. break;
  1463. case Js::OpCode::UnwrapWithObj:
  1464. this->LowerUnaryHelper(instr, IR::HelperOp_UnwrapWithObj);
  1465. break;
  1466. case Js::OpCode::Ld_A:
  1467. case Js::OpCode::Ld_I4:
  1468. case Js::OpCode::InitConst:
  1469. if (instr->IsJitProfilingInstr() && instr->AsJitProfilingInstr()->isBeginSwitch) {
  1470. LowerProfiledBeginSwitch(instr->AsJitProfilingInstr());
  1471. break;
  1472. }
  1473. m_lowererMD.ChangeToAssign(instr);
  1474. if(instr->HasBailOutInfo())
  1475. {
  1476. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1477. if(bailOutKind == IR::BailOutExpectingString)
  1478. {
  1479. this->LowerBailOnNotString(instr);
  1480. }
  1481. else
  1482. {
  1483. // Should not reach here as there are only 1 BailOutKind (BailOutExpectingString) currently associated with the Load Instr
  1484. Assert(false);
  1485. }
  1486. }
  1487. break;
  1488. case Js::OpCode::LdIndir:
  1489. Assert(instr->GetDst());
  1490. Assert(instr->GetDst()->IsRegOpnd());
  1491. Assert(instr->GetSrc1());
  1492. Assert(instr->GetSrc1()->IsIndirOpnd());
  1493. Assert(!instr->GetSrc2());
  1494. m_lowererMD.ChangeToAssign(instr);
  1495. break;
  1496. case Js::OpCode::FromVar:
  1497. Assert(instr->GetSrc1()->GetType() == TyVar);
  1498. if (instr->GetDst()->GetType() == TyInt32)
  1499. {
  1500. if(m_lowererMD.EmitLoadInt32(instr, !(instr->HasBailOutInfo() && (instr->GetBailOutKind() == IR::BailOutOnNotPrimitive))))
  1501. {
  1502. // Bail out instead of calling a helper
  1503. Assert(instr->GetBailOutKind() == IR::BailOutIntOnly || instr->GetBailOutKind() == IR::BailOutExpectingInteger);
  1504. Assert(!instr->GetSrc1()->GetValueType().IsInt()); // when we know it's an int, it should not have bailout info, to avoid generating a bailout path that will never be taken
  1505. instr->UnlinkSrc1();
  1506. instr->UnlinkDst();
  1507. GenerateBailOut(instr);
  1508. }
  1509. }
  1510. else if (instr->GetDst()->IsFloat())
  1511. {
  1512. if (m_func->GetJnFunction()->GetIsAsmJsFunction())
  1513. {
  1514. m_lowererMD.EmitLoadFloat(instr->GetDst(), instr->GetSrc1(), instr);
  1515. instr->Remove();
  1516. }
  1517. else
  1518. {
  1519. m_lowererMD.EmitLoadFloatFromNumber(instr->GetDst(), instr->GetSrc1(), instr);
  1520. }
  1521. }
  1522. // Support on IA only
  1523. #if defined(_M_IX86) || defined(_M_X64)
  1524. else if (instr->GetDst()->IsSimd128())
  1525. {
  1526. // SIMD_JS
  1527. m_lowererMD.GenerateCheckedSimdLoad(instr);
  1528. }
  1529. #endif
  1530. else
  1531. {
  1532. Assert(UNREACHED);
  1533. }
  1534. break;
  1535. case Js::OpCode::ArgOut_A:
  1536. // I don't know if this can happen in asm.js mode, but if it can, we might want to handle differently
  1537. Assert(!m_func->GetJnFunction()->GetIsAsmjsMode());
  1538. // fall-through
  1539. case Js::OpCode::ArgOut_A_Inline:
  1540. case Js::OpCode::ArgOut_A_Dynamic:
  1541. {
  1542. // ArgOut/StartCall are normally lowered by the lowering of the associated call instr.
  1543. // If the call becomes unreachable, we could end up with an orphan ArgOut or StartCall.
  1544. // Change the ArgOut into a store to the stack for bailouts
  1545. instr->FreeSrc2();
  1546. StackSym *argSym = instr->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  1547. argSym->m_offset = this->m_func->StackAllocate(sizeof(Js::Var));
  1548. argSym->m_allocated = true;
  1549. argSym->m_isOrphanedArg = true;
  1550. this->m_lowererMD.ChangeToAssign(instr);
  1551. }
  1552. break;
  1553. case Js::OpCode::LoweredStartCall:
  1554. case Js::OpCode::StartCall:
  1555. // ArgOut/StartCall are normally lowered by the lowering of the associated call instr.
  1556. // If the call becomes unreachable, we could end up with an orphan ArgOut or StartCall.
  1557. // We'll just delete these StartCalls during peeps.
  1558. break;
  1559. case Js::OpCode::ToVar:
  1560. Assert(instr->GetDst()->GetType() == TyVar);
  1561. if (instr->GetSrc1()->GetType() == TyInt32)
  1562. {
  1563. m_lowererMD.EmitLoadVar(instr);
  1564. }
  1565. else if (instr->GetSrc1()->GetType() == TyFloat64)
  1566. {
  1567. Assert(instr->GetSrc1()->IsRegOpnd());
  1568. m_lowererMD.SaveDoubleToVar(
  1569. instr->GetDst()->AsRegOpnd(),
  1570. instr->GetSrc1()->AsRegOpnd(), instr, instr);
  1571. instr->Remove();
  1572. }
  1573. #if defined(_M_IX86) || defined(_M_X64)
  1574. else if (IRType_IsSimd128(instr->GetSrc1()->GetType()))
  1575. {
  1576. m_lowererMD.GenerateSimdStore(instr);
  1577. }
  1578. #endif
  1579. else
  1580. {
  1581. Assert(UNREACHED);
  1582. }
  1583. break;
  1584. case Js::OpCode::Conv_Prim:
  1585. if (instr->GetDst()->IsFloat())
  1586. {
  1587. if (instr->GetSrc1()->IsIntConstOpnd())
  1588. {
  1589. LoadFloatFromNonReg(instr->UnlinkSrc1(), instr->UnlinkDst(), instr);
  1590. }
  1591. else if (instr->GetSrc1()->IsInt32())
  1592. {
  1593. m_lowererMD.EmitIntToFloat(instr->GetDst(), instr->GetSrc1(), instr);
  1594. }
  1595. else if (instr->GetSrc1()->IsUInt32())
  1596. {
  1597. Assert(instr->GetDst()->IsFloat64());
  1598. m_lowererMD.EmitUIntToFloat(instr->GetDst(), instr->GetSrc1(), instr);
  1599. }
  1600. else
  1601. {
  1602. Assert(instr->GetDst()->IsFloat64());
  1603. Assert(instr->GetSrc1()->IsFloat32());
  1604. m_lowererMD.EmitFloat32ToFloat64(instr->GetDst(), instr->GetSrc1(), instr);
  1605. }
  1606. }
  1607. else
  1608. {
  1609. Assert(instr->GetDst()->IsInt32());
  1610. Assert(instr->GetSrc1()->IsFloat());
  1611. m_lowererMD.EmitFloatToInt(instr->GetDst(), instr->GetSrc1(), instr);
  1612. }
  1613. instr->Remove();
  1614. break;
  1615. case Js::OpCode::FunctionExit:
  1616. LowerFunctionExit(instr);
  1617. // The rest of Epilog generation happens after reg allocation
  1618. break;
  1619. case Js::OpCode::FunctionEntry:
  1620. LowerFunctionEntry(instr);
  1621. // The rest of Prolog generation happens after reg allocation
  1622. break;
  1623. case Js::OpCode::ArgIn_Rest:
  1624. case Js::OpCode::ArgIn_A:
  1625. if (m_func->GetJnFunction()->GetIsAsmjsMode() && !m_func->IsLoopBody())
  1626. {
  1627. instrPrev = LowerArgInAsmJs(instr);
  1628. }
  1629. else
  1630. {
  1631. instrPrev = LowerArgIn(instr);
  1632. }
  1633. break;
  1634. case Js::OpCode::Label:
  1635. if (instr->AsLabelInstr()->m_isLoopTop)
  1636. {
  1637. if (this->outerMostLoopLabel == instr)
  1638. {
  1639. noFieldFastPath = !defaultDoFastPath;
  1640. noMathFastPath = !defaultDoFastPath;
  1641. this->outerMostLoopLabel = nullptr;
  1642. instr->AsLabelInstr()->GetLoop()->isProcessed = true;
  1643. }
  1644. this->m_func->MarkConstantAddressSyms(instr->AsLabelInstr()->GetLoop()->regAlloc.liveOnBackEdgeSyms);
  1645. instr->AsLabelInstr()->GetLoop()->regAlloc.liveOnBackEdgeSyms->Or(this->addToLiveOnBackEdgeSyms);
  1646. }
  1647. break;
  1648. case Js::OpCode::Br:
  1649. m_lowererMD.LowerUncondBranch(instr);
  1650. break;
  1651. case Js::OpCode::BrFncEqApply:
  1652. LowerBrFncApply(instr,IR::HelperOp_OP_BrFncEqApply);
  1653. break;
  1654. case Js::OpCode::BrFncNeqApply:
  1655. LowerBrFncApply(instr,IR::HelperOp_OP_BrFncNeqApply);
  1656. break;
  1657. case Js::OpCode::BrHasSideEffects:
  1658. case Js::OpCode::BrNotHasSideEffects:
  1659. m_lowererMD.GenerateFastBrS(instr->AsBranchInstr());
  1660. break;
  1661. case Js::OpCode::BrFalse_A:
  1662. case Js::OpCode::BrTrue_A:
  1663. if (instr->GetSrc1()->IsFloat())
  1664. {
  1665. GenerateFastBrBool(instr->AsBranchInstr());
  1666. }
  1667. else if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) ||
  1668. noMathFastPath ||
  1669. GenerateFastBrBool(instr->AsBranchInstr()))
  1670. {
  1671. this->LowerBrBMem(instr, IR::HelperConv_ToBoolean);
  1672. }
  1673. break;
  1674. case Js::OpCode::BrOnObject_A:
  1675. if (PHASE_OFF(Js::BranchFastPathPhase, this->m_func) || noMathFastPath)
  1676. {
  1677. this->LowerBrOnObject(instr, IR::HelperOp_IsObject);
  1678. }
  1679. else
  1680. {
  1681. GenerateFastBrOnObject(instr);
  1682. }
  1683. break;
  1684. case Js::OpCode::BrOnClassConstructor:
  1685. this->LowerBrOnClassConstructor(instr, IR::HelperOp_IsClassConstructor);
  1686. break;
  1687. case Js::OpCode::BrAddr_A:
  1688. case Js::OpCode::BrNotAddr_A:
  1689. case Js::OpCode::BrNotNull_A:
  1690. m_lowererMD.LowerCondBranch(instr);
  1691. break;
  1692. case Js::OpCode::BrEq_A:
  1693. case Js::OpCode::BrNotNeq_A:
  1694. if (instr->GetSrc1()->IsFloat())
  1695. {
  1696. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1697. m_lowererMD.LowerToFloat(instr);
  1698. }
  1699. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1700. {
  1701. bool needHelper = true;
  1702. if (this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
  1703. {
  1704. if (!fNoLower)
  1705. {
  1706. this->LowerBrCMem(instr, IR::HelperOp_Equal, false, false /*isHelper*/);
  1707. }
  1708. }
  1709. else if (this->TryGenerateFastBrEq(instr))
  1710. {
  1711. }
  1712. else if (m_lowererMD.GenerateFastBrString(instr->AsBranchInstr()) || this->GenerateFastBrEqLikely(instr->AsBranchInstr(), &needHelper))
  1713. {
  1714. if (needHelper)
  1715. {
  1716. this->LowerBrCMem(instr, IR::HelperOp_Equal, false);
  1717. }
  1718. }
  1719. else
  1720. {
  1721. if (needHelper)
  1722. {
  1723. this->LowerBrCMem(instr, IR::HelperOp_Equal, false, false /*isHelper*/);
  1724. }
  1725. }
  1726. if (!needHelper)
  1727. {
  1728. instr->Remove();
  1729. }
  1730. }
  1731. else
  1732. {
  1733. this->LowerBrCMem(instr, IR::HelperOp_Equal, true, false /*isHelper*/);
  1734. }
  1735. break;
  1736. case Js::OpCode::BrGe_A:
  1737. case Js::OpCode::BrNotGe_A:
  1738. if (instr->GetSrc1()->IsFloat())
  1739. {
  1740. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1741. m_lowererMD.LowerToFloat(instr);
  1742. }
  1743. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1744. {
  1745. this->LowerBrCMem(instr, IR::HelperOp_GreaterEqual, false, false /*isHelper*/);
  1746. }
  1747. else
  1748. {
  1749. this->LowerBrCMem(instr, IR::HelperOp_GreaterEqual, true, false /*isHelper*/);
  1750. }
  1751. break;
  1752. case Js::OpCode::BrGt_A:
  1753. case Js::OpCode::BrNotGt_A:
  1754. if (instr->GetSrc1()->IsFloat())
  1755. {
  1756. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1757. m_lowererMD.LowerToFloat(instr);
  1758. }
  1759. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1760. {
  1761. this->LowerBrCMem(instr, IR::HelperOp_Greater, false, false /*isHelper*/);
  1762. }
  1763. else
  1764. {
  1765. this->LowerBrCMem(instr, IR::HelperOp_Greater, true, false /*isHelper*/);
  1766. }
  1767. break;
  1768. case Js::OpCode::BrLt_A:
  1769. case Js::OpCode::BrNotLt_A:
  1770. if (instr->GetSrc1()->IsFloat())
  1771. {
  1772. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1773. m_lowererMD.LowerToFloat(instr);
  1774. }
  1775. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1776. {
  1777. this->LowerBrCMem(instr, IR::HelperOp_Less, false, false /*isHelper*/);
  1778. }
  1779. else
  1780. {
  1781. this->LowerBrCMem(instr, IR::HelperOp_Less, true, false /*isHelper*/);
  1782. }
  1783. break;
  1784. case Js::OpCode::BrLe_A:
  1785. case Js::OpCode::BrNotLe_A:
  1786. if (instr->GetSrc1()->IsFloat())
  1787. {
  1788. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1789. m_lowererMD.LowerToFloat(instr);
  1790. }
  1791. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1792. {
  1793. this->LowerBrCMem(instr, IR::HelperOp_LessEqual, false, false /*isHelper*/);
  1794. }
  1795. else
  1796. {
  1797. this->LowerBrCMem(instr, IR::HelperOp_LessEqual, true, false /*isHelper*/);
  1798. }
  1799. break;
  1800. case Js::OpCode::BrNeq_A:
  1801. case Js::OpCode::BrNotEq_A:
  1802. if (instr->GetSrc1()->IsFloat())
  1803. {
  1804. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1805. m_lowererMD.LowerToFloat(instr);
  1806. }
  1807. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1808. {
  1809. bool needHelper = true;
  1810. if (this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
  1811. {
  1812. if (!fNoLower)
  1813. {
  1814. this->LowerBrCMem(instr, IR::HelperOp_NotEqual, false, false /*isHelper*/);
  1815. }
  1816. }
  1817. else if (this->TryGenerateFastBrNeq(instr))
  1818. {
  1819. }
  1820. else if (m_lowererMD.GenerateFastBrString(instr->AsBranchInstr()) || this->GenerateFastBrEqLikely(instr->AsBranchInstr(), &needHelper))
  1821. {
  1822. this->LowerBrCMem(instr, IR::HelperOp_NotEqual, false);
  1823. }
  1824. else
  1825. {
  1826. this->LowerBrCMem(instr, IR::HelperOp_NotEqual, false, false /*isHelper*/);
  1827. }
  1828. }
  1829. else
  1830. {
  1831. this->LowerBrCMem(instr, IR::HelperOp_NotEqual, true, false /*isHelper*/);
  1832. }
  1833. break;
  1834. case Js::OpCode::MultiBr:
  1835. {
  1836. IR::MultiBranchInstr * multiBranchInstr = instr->AsBranchInstr()->AsMultiBrInstr();
  1837. switch (multiBranchInstr->m_kind)
  1838. {
  1839. case IR::MultiBranchInstr::StrDictionary:
  1840. this->GenerateSwitchStringLookup(instr);
  1841. break;
  1842. case IR::MultiBranchInstr::SingleCharStrJumpTable:
  1843. this->GenerateSingleCharStrJumpTableLookup(instr);
  1844. m_func->m_totalJumpTableSizeInBytesForSwitchStatements += (multiBranchInstr->GetBranchJumpTable()->tableSize * sizeof(void*));
  1845. break;
  1846. case IR::MultiBranchInstr::IntJumpTable:
  1847. this->LowerMultiBr(instr);
  1848. m_func->m_totalJumpTableSizeInBytesForSwitchStatements += (multiBranchInstr->GetBranchJumpTable()->tableSize * sizeof(void*));
  1849. break;
  1850. default:
  1851. Assert(false);
  1852. }
  1853. break;
  1854. }
  1855. case Js::OpCode::BrSrEq_A:
  1856. case Js::OpCode::BrSrNotNeq_A:
  1857. {
  1858. srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  1859. srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  1860. if (instr->GetSrc1()->IsFloat())
  1861. {
  1862. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1863. m_lowererMD.LowerToFloat(instr);
  1864. }
  1865. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
  1866. {
  1867. if (!fNoLower)
  1868. {
  1869. this->LowerBrCMem(instr, IR::HelperOp_StrictEqual, false, false /*isHelper*/);
  1870. }
  1871. }
  1872. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath && this->GenerateFastBrSrEq(instr, srcReg1, srcReg2, &instrPrev, noMathFastPath))
  1873. {
  1874. }
  1875. else
  1876. {
  1877. bool needHelper = true;
  1878. if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1879. {
  1880. if (m_lowererMD.GenerateFastBrString(instr->AsBranchInstr()) || this->GenerateFastBrEqLikely(instr->AsBranchInstr(), &needHelper))
  1881. {
  1882. if (needHelper)
  1883. {
  1884. this->LowerBrCMem(instr, IR::HelperOp_StrictEqual, false);
  1885. }
  1886. }
  1887. else
  1888. {
  1889. if (needHelper)
  1890. {
  1891. this->LowerBrCMem(instr, IR::HelperOp_StrictEqual, false, false /*isHelper*/);
  1892. }
  1893. }
  1894. if (!needHelper)
  1895. {
  1896. instr->Remove();
  1897. }
  1898. }
  1899. else
  1900. {
  1901. this->LowerBrCMem(instr, IR::HelperOp_StrictEqual, true, false /*isHelper*/);
  1902. }
  1903. }
  1904. break;
  1905. }
  1906. case Js::OpCode::BrSrNeq_A:
  1907. case Js::OpCode::BrSrNotEq_A:
  1908. if (instr->GetSrc1()->IsFloat())
  1909. {
  1910. Assert(instr->GetSrc1()->GetType() == instr->GetSrc2()->GetType());
  1911. m_lowererMD.LowerToFloat(instr);
  1912. }
  1913. else if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func) && !noMathFastPath)
  1914. {
  1915. bool needHelper = true;
  1916. if (this->TryGenerateFastBrOrCmTypeOf(instr, &instrPrev, &fNoLower))
  1917. {
  1918. if (!fNoLower)
  1919. {
  1920. this->LowerBrCMem(instr, IR::HelperOp_NotStrictEqual, false, false /*isHelper*/);
  1921. }
  1922. }
  1923. else if (this->GenerateFastBrSrNeq(instr, &instrPrev))
  1924. {
  1925. }
  1926. else if (m_lowererMD.GenerateFastBrString(instr->AsBranchInstr()) || this->GenerateFastBrEqLikely(instr->AsBranchInstr(), &needHelper))
  1927. {
  1928. if (needHelper)
  1929. {
  1930. this->LowerBrCMem(instr, IR::HelperOp_NotStrictEqual, false);
  1931. }
  1932. }
  1933. else
  1934. {
  1935. if (needHelper)
  1936. {
  1937. this->LowerBrCMem(instr, IR::HelperOp_NotStrictEqual, false, false /*isHelper*/);
  1938. }
  1939. }
  1940. if (!needHelper)
  1941. {
  1942. instr->Remove();
  1943. }
  1944. }
  1945. else
  1946. {
  1947. this->LowerBrCMem(instr, IR::HelperOp_NotStrictEqual, true, false /*isHelper*/);
  1948. }
  1949. break;
  1950. case Js::OpCode::BrOnEmpty:
  1951. case Js::OpCode::BrOnNotEmpty:
  1952. if (!PHASE_OFF(Js::BranchFastPathPhase, this->m_func))
  1953. {
  1954. m_lowererMD.GenerateFastBrBReturn(instr);
  1955. this->LowerBrBReturn(instr, IR::HelperOp_OP_BrOnEmpty, true);
  1956. }
  1957. else
  1958. {
  1959. this->LowerBrBReturn(instr, IR::HelperOp_OP_BrOnEmpty, false);
  1960. }
  1961. break;
  1962. case Js::OpCode::BrOnHasProperty:
  1963. case Js::OpCode::BrOnNoProperty:
  1964. this->LowerBrProperty(instr, IR::HelperOp_HasProperty);
  1965. break;
  1966. case Js::OpCode::BrOnException:
  1967. Assert(!this->m_func->DoGlobOpt());
  1968. instr->Remove();
  1969. break;
  1970. case Js::OpCode::BrOnNoException:
  1971. instr->m_opcode = LowererMD::MDUncondBranchOpcode;
  1972. break;
  1973. case Js::OpCode::StSlot:
  1974. this->LowerStSlot(instr);
  1975. break;
  1976. case Js::OpCode::StSlotChkUndecl:
  1977. this->LowerStSlotChkUndecl(instr);
  1978. break;
  1979. case Js::OpCode::ProfiledLoopStart:
  1980. {
  1981. Assert(m_func->DoSimpleJitDynamicProfile());
  1982. Assert(instr->IsJitProfilingInstr());
  1983. // Check for the helper instr from IRBuilding (it won't be there if there are no LoopEnds due to an infinite loop)
  1984. auto prev = instr->m_prev;
  1985. if (prev->IsJitProfilingInstr() && prev->AsJitProfilingInstr()->isLoopHelper)
  1986. {
  1987. auto saveOpnd = prev->UnlinkDst();
  1988. instrPrev = prev->m_prev;
  1989. prev->Remove();
  1990. const auto starFlag = GetImplicitCallFlagsOpnd();
  1991. IR::AutoReuseOpnd a(starFlag, m_func);
  1992. this->InsertMove(saveOpnd, starFlag, instr);
  1993. this->InsertMove(starFlag, CreateClearImplicitCallFlagsOpnd(), instr);
  1994. }
  1995. else
  1996. {
  1997. #if DBG
  1998. // Double check that we indeed do not have a LoopEnd that is part of the same loop for the rest of the function
  1999. auto cur = instr;
  2000. auto loopNumber = instr->AsJitProfilingInstr()->loopNumber;
  2001. while (cur)
  2002. {
  2003. Assert(cur->m_opcode != Js::OpCode::ProfiledLoopEnd || cur->IsJitProfilingInstr() && cur->AsJitProfilingInstr()->loopNumber != loopNumber);
  2004. cur = cur->m_next;
  2005. }
  2006. #endif
  2007. }
  2008. // If we turned off fulljit, there's no reason to do this.
  2009. if (!m_func->GetJnFunction()->DoFullJit())
  2010. {
  2011. instr->Remove();
  2012. }
  2013. else
  2014. {
  2015. Assert(instr->GetDst());
  2016. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleGetScheduledEntryPoint, m_func));
  2017. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateUint32Opnd(instr->AsJitProfilingInstr()->loopNumber, m_func));
  2018. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  2019. this->m_lowererMD.LowerCall(instr, 0);
  2020. }
  2021. break;
  2022. }
  2023. case Js::OpCode::ProfiledLoopBodyStart:
  2024. {
  2025. Assert(m_func->DoSimpleJitDynamicProfile());
  2026. const auto loopNum = instr->AsJitProfilingInstr()->loopNumber;
  2027. Assert(loopNum < m_func->GetJnFunction()->GetLoopCount());
  2028. auto entryPointOpnd = instr->UnlinkSrc1();
  2029. auto dobailout = instr->UnlinkDst();
  2030. const auto dobailoutType = TyUint8;
  2031. Assert(dobailout->GetType() == TyUint8 && sizeof(decltype(Js::SimpleJitHelpers::IsLoopCodeGenDone(nullptr))) == 1);
  2032. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(0, TyUint32, m_func)); // zero indicates that we do not want to add flags back in
  2033. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(loopNum, TyUint32, m_func));
  2034. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  2035. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleRecordLoopImplicitCallFlags, m_func));
  2036. m_lowererMD.LowerCall(instr, 0);
  2037. // Outline of JITed code:
  2038. //
  2039. // LoopStart:
  2040. // entryPoint = GetScheduledEntryPoint(framePtr, loopNum)
  2041. // LoopBodyStart:
  2042. // uint8 dobailout;
  2043. // if (entryPoint) {
  2044. // dobailout = IsLoopCodeGenDone(entryPoint)
  2045. // } else {
  2046. // dobailout = ++interpretCount >= threshold
  2047. // }
  2048. // // already exists from IRBuilding:
  2049. // if (dobailout) {
  2050. // Bailout
  2051. // }
  2052. if (!m_func->GetJnFunction()->DoFullJit() || !m_func->GetJnFunction()->DoJITLoopBody())
  2053. {
  2054. // If we're not doing fulljit, we've turned off JitLoopBodies, or if we don't have loop headers allocated (the function has a Try, etc)
  2055. // just move false to dobailout
  2056. this->InsertMove(dobailout, IR::IntConstOpnd::New(0, dobailoutType, m_func, true), instr->m_next);
  2057. }
  2058. else if (m_func->GetJnFunction()->ForceJITLoopBody())
  2059. {
  2060. // If we're forcing jit loop bodies, move true to dobailout
  2061. this->InsertMove(dobailout, IR::IntConstOpnd::New(1, dobailoutType, m_func, true), instr->m_next);
  2062. }
  2063. else
  2064. {
  2065. // Put in the labels
  2066. auto entryPointIsNull = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2067. auto checkDoBailout = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2068. instr->InsertAfter(checkDoBailout);
  2069. instr->InsertAfter(entryPointIsNull);
  2070. this->InsertCompareBranch(entryPointOpnd, IR::AddrOpnd::New(nullptr, IR::AddrOpndKindDynamicMisc, m_func), Js::OpCode::BrEq_A, false, entryPointIsNull, instr->m_next);
  2071. // If the entry point is not null
  2072. auto isCodeGenDone = IR::Instr::New(Js::OpCode::Call, dobailout, IR::HelperCallOpnd::New(IR::HelperSimpleIsLoopCodeGenDone, m_func), m_func);
  2073. entryPointIsNull->InsertBefore(isCodeGenDone);
  2074. m_lowererMD.LoadHelperArgument(isCodeGenDone, entryPointOpnd);
  2075. m_lowererMD.LowerCall(isCodeGenDone, 0);
  2076. this->InsertBranch(LowererMD::MDUncondBranchOpcode, true, checkDoBailout, entryPointIsNull);
  2077. // If the entry point is null
  2078. auto head = m_func->GetJnFunction()->GetLoopHeaderWithLock(loopNum);
  2079. Assert(head);
  2080. static_assert(sizeof(head->interpretCount) == 4, "Change the type in the following line");
  2081. const auto type = TyUint32;
  2082. auto countReg = IR::RegOpnd::New(type, m_func);
  2083. auto countAddr = IR::MemRefOpnd::New(&head->interpretCount, type, m_func);
  2084. IR::AutoReuseOpnd a(countReg, m_func), b(countAddr, m_func);
  2085. this->InsertAdd(false, countReg, countAddr, IR::IntConstOpnd::New(1, type, m_func, true), checkDoBailout);
  2086. this->InsertMove(countAddr, countReg, checkDoBailout);
  2087. this->InsertMove(dobailout, IR::IntConstOpnd::New(0, dobailoutType, m_func, true), checkDoBailout);
  2088. // GetLoopInterpretCount() is a dynamic quantity. It's computed at simple-JIT time here, but that's okay
  2089. // because there would have been sufficient iterations in interpreted mode to get a reasonable value.
  2090. const auto threshold = instr->m_func->GetJnFunction()->GetLoopInterpretCount(head);
  2091. this->InsertCompareBranch(countReg, IR::IntConstOpnd::New(threshold, type, m_func), Js::OpCode::BrLt_A, checkDoBailout, checkDoBailout);
  2092. this->InsertMove(dobailout, IR::IntConstOpnd::New(1, dobailoutType, m_func, true), checkDoBailout);
  2093. // fallthrough
  2094. // Label checkDoBailout (inserted above)
  2095. }
  2096. }
  2097. break;
  2098. case Js::OpCode::ProfiledLoopEnd:
  2099. {
  2100. Assert(m_func->DoSimpleJitDynamicProfile());
  2101. // This is set up in IRBuilding
  2102. Assert(instr->GetSrc1());
  2103. IR::Opnd* savedFlags = instr->UnlinkSrc1();
  2104. m_lowererMD.LoadHelperArgument(instr, savedFlags);
  2105. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateUint32Opnd(instr->AsJitProfilingInstr()->loopNumber, m_func));
  2106. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  2107. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleRecordLoopImplicitCallFlags, m_func));
  2108. m_lowererMD.LowerCall(instr, 0);
  2109. }
  2110. break;
  2111. case Js::OpCode::InitLoopBodyCount:
  2112. Assert(this->m_func->IsLoopBody());
  2113. instr->SetSrc1(IR::IntConstOpnd::New(0, TyUint32, this->m_func));
  2114. this->m_lowererMD.ChangeToAssign(instr);
  2115. break;
  2116. case Js::OpCode::StLoopBodyCount:
  2117. Assert(this->m_func->IsLoopBody());
  2118. this->LowerStLoopBodyCount(instr);
  2119. break;
  2120. case Js::OpCode::IncrLoopBodyCount:
  2121. Assert(this->m_func->IsLoopBody());
  2122. instr->m_opcode = Js::OpCode::Add_I4;
  2123. instr->SetSrc2(IR::IntConstOpnd::New(1, TyUint32, this->m_func));
  2124. this->m_lowererMD.EmitInt4Instr(instr);
  2125. break;
  2126. #if !FLOATVAR
  2127. case Js::OpCode::StSlotBoxTemp:
  2128. this->LowerStSlotBoxTemp(instr);
  2129. break;
  2130. #endif
  2131. case Js::OpCode::LdSlot:
  2132. case Js::OpCode::LdSlotArr:
  2133. {
  2134. Js::ProfileId profileId;
  2135. IR::Instr *profileBeforeInstr;
  2136. if(instr->IsJitProfilingInstr())
  2137. {
  2138. profileId = instr->AsJitProfilingInstr()->profileId;
  2139. Assert(profileId != Js::Constants::NoProfileId);
  2140. profileBeforeInstr = instr->m_next;
  2141. }
  2142. else
  2143. {
  2144. profileId = Js::Constants::NoProfileId;
  2145. profileBeforeInstr = nullptr;
  2146. }
  2147. this->LowerLdSlot(instr);
  2148. if(profileId != Js::Constants::NoProfileId)
  2149. {
  2150. LowerProfileLdSlot(instr->GetDst(), instr->m_func, profileId, profileBeforeInstr);
  2151. }
  2152. break;
  2153. }
  2154. case Js::OpCode::LdAsmJsSlot:
  2155. this->LowerLdSlot(instr);
  2156. break;
  2157. case Js::OpCode::StAsmJsSlot:
  2158. this->LowerStSlot(instr);
  2159. break;
  2160. case Js::OpCode::ChkUndecl:
  2161. instrPrev = this->LowerChkUndecl(instr);
  2162. break;
  2163. case Js::OpCode::LdArrHead:
  2164. this->LowerLdArrHead(instr);
  2165. break;
  2166. case Js::OpCode::StElemC:
  2167. case Js::OpCode::StArrSegElemC:
  2168. this->LowerStElemC(instr);
  2169. break;
  2170. case Js::OpCode::LdEnv:
  2171. instrPrev = this->LowerLdEnv(instr);
  2172. break;
  2173. case Js::OpCode::LdAsmJsEnv:
  2174. instrPrev = this->LowerLdAsmJsEnv(instr);
  2175. break;
  2176. case Js::OpCode::LdElemUndef:
  2177. this->LowerLdElemUndef(instr);
  2178. break;
  2179. case Js::OpCode::LdElemUndefScoped:
  2180. this->LowerElementUndefinedScopedMem(instr, IR::HelperOp_LdElemUndefScoped);
  2181. break;
  2182. case Js::OpCode::EnsureNoRootFld:
  2183. this->LowerElementUndefined(instr, IR::HelperOp_EnsureNoRootProperty);
  2184. break;
  2185. case Js::OpCode::EnsureNoRootRedeclFld:
  2186. this->LowerElementUndefined(instr, IR::HelperOp_EnsureNoRootRedeclProperty);
  2187. break;
  2188. case Js::OpCode::ScopedEnsureNoRedeclFld:
  2189. this->LowerElementUndefinedScoped(instr, IR::HelperOp_EnsureNoRedeclPropertyScoped);
  2190. break;
  2191. case Js::OpCode::LdFuncExpr:
  2192. // src = function Expression
  2193. m_lowererMD.LoadFuncExpression(instr);
  2194. this->GenerateGetCurrentFunctionObject(instr);
  2195. break;
  2196. case Js::OpCode::LdNewTarget:
  2197. this->GenerateLoadNewTarget(instr);
  2198. break;
  2199. case Js::OpCode::ChkNewCallFlag:
  2200. this->GenerateCheckForCallFlagNew(instr);
  2201. break;
  2202. case Js::OpCode::StFuncExpr:
  2203. // object.propid = src
  2204. LowerStFld(instr, IR::HelperOp_StFunctionExpression, IR::HelperOp_StFunctionExpression, false);
  2205. break;
  2206. case Js::OpCode::InitLetFld:
  2207. case Js::OpCode::InitRootLetFld:
  2208. LowerStFld(instr, IR::HelperOp_InitLetFld, IR::HelperOp_InitLetFld, false);
  2209. break;
  2210. case Js::OpCode::InitConstFld:
  2211. case Js::OpCode::InitRootConstFld:
  2212. LowerStFld(instr, IR::HelperOp_InitConstFld, IR::HelperOp_InitConstFld, false);
  2213. break;
  2214. case Js::OpCode::InitUndeclRootLetFld:
  2215. LowerElementUndefined(instr, IR::HelperOp_InitUndeclRootLetFld);
  2216. break;
  2217. case Js::OpCode::InitUndeclRootConstFld:
  2218. LowerElementUndefined(instr, IR::HelperOp_InitUndeclRootConstFld);
  2219. break;
  2220. case Js::OpCode::InitUndeclConsoleLetFld:
  2221. LowerElementUndefined(instr, IR::HelperOp_InitUndeclConsoleLetFld);
  2222. break;
  2223. case Js::OpCode::InitUndeclConsoleConstFld:
  2224. LowerElementUndefined(instr, IR::HelperOp_InitUndeclConsoleConstFld);
  2225. break;
  2226. case Js::OpCode::InitClassMember:
  2227. LowerStFld(instr, IR::HelperOp_InitClassMember, IR::HelperOp_InitClassMember, false);
  2228. break;
  2229. case Js::OpCode::InitClassMemberComputedName:
  2230. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberComputedName);
  2231. break;
  2232. case Js::OpCode::InitClassMemberGetComputedName:
  2233. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberGetComputedName);
  2234. break;
  2235. case Js::OpCode::InitClassMemberSetComputedName:
  2236. instrPrev = this->LowerStElemI(instr, Js::PropertyOperation_None, false, IR::HelperOp_InitClassMemberSetComputedName);
  2237. break;
  2238. case Js::OpCode::InitClassMemberGet:
  2239. instrPrev = this->LowerStFld(instr, IR::HelperOp_InitClassMemberGet, IR::HelperOp_InitClassMemberGet, false);
  2240. break;
  2241. case Js::OpCode::InitClassMemberSet:
  2242. instrPrev = this->LowerStFld(instr, IR::HelperOp_InitClassMemberSet, IR::HelperOp_InitClassMemberSet, false);
  2243. break;
  2244. case Js::OpCode::NewStackFrameDisplay:
  2245. this->LowerLdFrameDisplay(instr, m_func->DoStackFrameDisplay());
  2246. break;
  2247. case Js::OpCode::LdFrameDisplay:
  2248. this->LowerLdFrameDisplay(instr, false);
  2249. break;
  2250. case Js::OpCode::LdInnerFrameDisplay:
  2251. this->LowerLdInnerFrameDisplay(instr);
  2252. break;
  2253. case Js::OpCode::Throw:
  2254. case Js::OpCode::InlineThrow:
  2255. case Js::OpCode::EHThrow:
  2256. this->LowerUnaryHelperMem(instr, IR::HelperOp_Throw);
  2257. break;
  2258. case Js::OpCode::TryCatch:
  2259. instrPrev = this->LowerTry(instr, true /*try-catch*/);
  2260. break;
  2261. case Js::OpCode::TryFinally:
  2262. instrPrev = this->LowerTry(instr, false /*try-finally*/);
  2263. break;
  2264. case Js::OpCode::Catch:
  2265. instrPrev = m_lowererMD.LowerCatch(instr);
  2266. break;
  2267. case Js::OpCode::LeaveNull:
  2268. instrPrev = m_lowererMD.LowerLeaveNull(instr);
  2269. break;
  2270. case Js::OpCode::Leave:
  2271. if (this->m_func->HasTry() && this->m_func->DoOptimizeTryCatch())
  2272. {
  2273. // Required in Register Allocator to mark region boundaries
  2274. break;
  2275. }
  2276. instrPrev = m_lowererMD.LowerLeave(instr, instr->AsBranchInstr()->GetTarget(), false /*fromFinalLower*/, instr->AsBranchInstr()->m_isOrphanedLeave);
  2277. break;
  2278. case Js::OpCode::BailOnException:
  2279. instrPrev = this->LowerBailOnException(instr);
  2280. break;
  2281. case Js::OpCode::RuntimeTypeError:
  2282. case Js::OpCode::InlineRuntimeTypeError:
  2283. this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeTypeError);
  2284. break;
  2285. case Js::OpCode::RuntimeReferenceError:
  2286. case Js::OpCode::InlineRuntimeReferenceError:
  2287. this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeReferenceError);
  2288. break;
  2289. case Js::OpCode::Break:
  2290. // Inline breakpoint: for now do nothing.
  2291. break;
  2292. case Js::OpCode::Nop:
  2293. // This may need support for debugging the JIT, but for now just remove the instruction.
  2294. instr->Remove();
  2295. break;
  2296. case Js::OpCode::Unused:
  2297. // Currently Unused is used with ScopedLdInst to keep the second dst alive, but we don't need to lower it.
  2298. instr->Remove();
  2299. break;
  2300. case Js::OpCode::StatementBoundary:
  2301. // This instruction is merely to help convey source info through the IR
  2302. // and eventually generate the nativeOffset maps.
  2303. break;
  2304. case Js::OpCode::BailOnNotPolymorphicInlinee:
  2305. instrPrev = LowerBailOnNotPolymorphicInlinee(instr);
  2306. break;
  2307. case Js::OpCode::BailOnNoSimdTypeSpec:
  2308. case Js::OpCode::BailOnNoProfile:
  2309. this->GenerateBailOut(instr, nullptr, nullptr);
  2310. break;
  2311. case Js::OpCode::BailOnNotSpreadable:
  2312. instrPrev = this->LowerBailOnNotSpreadable(instr);
  2313. break;
  2314. case Js::OpCode::BailOnNotStackArgs:
  2315. instrPrev = this->LowerBailOnNotStackArgs(instr);
  2316. break;
  2317. case Js::OpCode::BailOnEqual:
  2318. case Js::OpCode::BailOnNotEqual:
  2319. instrPrev = this->LowerBailOnEqualOrNotEqual(instr);
  2320. break;
  2321. case Js::OpCode::BailOnNegative:
  2322. LowerBailOnNegative(instr);
  2323. break;
  2324. case Js::OpCode::BailForDebugger:
  2325. instrPrev = this->LowerBailForDebugger(instr);
  2326. break;
  2327. case Js::OpCode::BailOnNotObject:
  2328. instrPrev = this->LowerBailOnNotObject(instr);
  2329. break;
  2330. case Js::OpCode::BailOnNotBuiltIn:
  2331. instrPrev = this->LowerBailOnNotBuiltIn(instr);
  2332. break;
  2333. case Js::OpCode::BailOnNotArray:
  2334. {
  2335. IR::Instr *bailOnNotArray, *bailOnMissingValue;
  2336. SplitBailOnNotArray(instr, &bailOnNotArray, &bailOnMissingValue);
  2337. IR::RegOpnd *const arrayOpnd = LowerBailOnNotArray(bailOnNotArray);
  2338. if(bailOnMissingValue)
  2339. {
  2340. LowerBailOnMissingValue(bailOnMissingValue, arrayOpnd);
  2341. }
  2342. break;
  2343. }
  2344. case Js::OpCode::BoundCheck:
  2345. case Js::OpCode::UnsignedBoundCheck:
  2346. LowerBoundCheck(instr);
  2347. break;
  2348. case Js::OpCode::BailTarget:
  2349. instrPrev = this->LowerBailTarget(instr);
  2350. break;
  2351. case Js::OpCode::InlineeStart:
  2352. this->LowerInlineeStart(instr);
  2353. break;
  2354. case Js::OpCode::EndCallForPolymorphicInlinee:
  2355. instr->Remove();
  2356. break;
  2357. case Js::OpCode::InlineeEnd:
  2358. this->LowerInlineeEnd(instr);
  2359. break;
  2360. case Js::OpCode::InlineBuiltInEnd:
  2361. case Js::OpCode::InlineNonTrackingBuiltInEnd:
  2362. this->LowerInlineBuiltIn(instr);
  2363. break;
  2364. case Js::OpCode::ExtendArg_A:
  2365. if (instr->GetSrc1()->IsRegOpnd())
  2366. {
  2367. IR::RegOpnd *src1 = instr->GetSrc1()->AsRegOpnd();
  2368. this->addToLiveOnBackEdgeSyms->Clear(src1->m_sym->m_id);
  2369. }
  2370. instr->Remove();
  2371. break;
  2372. case Js::OpCode::InlineBuiltInStart:
  2373. case Js::OpCode::BytecodeArgOutUse:
  2374. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  2375. instr->Remove();
  2376. break;
  2377. case Js::OpCode::DeadBrEqual:
  2378. this->LowerBinaryHelperMem(instr, IR::HelperOp_Equal);
  2379. break;
  2380. case Js::OpCode::DeadBrSrEqual:
  2381. this->LowerBinaryHelperMem(instr, IR::HelperOp_StrictEqual);
  2382. break;
  2383. case Js::OpCode::DeadBrRelational:
  2384. this->LowerBinaryHelperMem(instr, IR::HelperOp_Greater);
  2385. break;
  2386. case Js::OpCode::DeadBrOnHasProperty:
  2387. this->LowerUnaryHelperMem(instr, IR::HelperOp_HasProperty);
  2388. break;
  2389. case Js::OpCode::DeletedNonHelperBranch:
  2390. break;
  2391. case Js::OpCode::InitClass:
  2392. instrPrev = this->LowerInitClass(instr);
  2393. break;
  2394. case Js::OpCode::NewConcatStrMulti:
  2395. this->LowerNewConcatStrMulti(instr);
  2396. break;
  2397. case Js::OpCode::NewConcatStrMultiBE:
  2398. this->LowerNewConcatStrMultiBE(instr);
  2399. break;
  2400. case Js::OpCode::SetConcatStrMultiItem:
  2401. this->LowerSetConcatStrMultiItem(instr);
  2402. break;
  2403. case Js::OpCode::SetConcatStrMultiItemBE:
  2404. Assert(instr->GetSrc1()->IsRegOpnd());
  2405. this->addToLiveOnBackEdgeSyms->Clear(instr->GetSrc1()->GetStackSym()->m_id);
  2406. // code corresponding to it should already have been generated while lowering NewConcatStrMultiBE
  2407. instr->Remove();
  2408. break;
  2409. case Js::OpCode::Conv_Str:
  2410. this->LowerConvStr(instr);
  2411. break;
  2412. case Js::OpCode::Coerse_Str:
  2413. this->LowerCoerseStr(instr);
  2414. break;
  2415. case Js::OpCode::Coerse_StrOrRegex:
  2416. this->LowerCoerseStrOrRegex(instr);
  2417. break;
  2418. case Js::OpCode::Coerse_Regex:
  2419. this->LowerCoerseRegex(instr);
  2420. break;
  2421. case Js::OpCode::Conv_PrimStr:
  2422. this->LowerConvPrimStr(instr);
  2423. break;
  2424. case Js::OpCode::ObjectFreeze:
  2425. this->LowerUnaryHelper(instr, IR::HelperOP_Freeze);
  2426. break;
  2427. case Js::OpCode::ClearAttributes:
  2428. this->LowerBinaryHelper(instr, IR::HelperOP_ClearAttributes);
  2429. break;
  2430. case Js::OpCode::SpreadArrayLiteral:
  2431. this->LowerSpreadArrayLiteral(instr);
  2432. break;
  2433. case Js::OpCode::CallIExtended:
  2434. {
  2435. // Currently, the only use for CallIExtended is a call that uses spread.
  2436. Assert(IsSpreadCall(instr));
  2437. instrPrev = this->LowerSpreadCall(instr, Js::CallFlags_None);
  2438. break;
  2439. }
  2440. case Js::OpCode::CallIExtendedNew:
  2441. {
  2442. // Currently, the only use for CallIExtended is a call that uses spread.
  2443. Assert(IsSpreadCall(instr));
  2444. instrPrev = this->LowerSpreadCall(instr, Js::CallFlags_New);
  2445. break;
  2446. }
  2447. case Js::OpCode::CallIExtendedNewTargetNew:
  2448. {
  2449. // Currently, the only use for CallIExtended is a call that uses spread.
  2450. Assert(IsSpreadCall(instr));
  2451. instrPrev = this->LowerSpreadCall(instr, (Js::CallFlags)(Js::CallFlags_New | Js::CallFlags_ExtraArg | Js::CallFlags_NewTarget));
  2452. break;
  2453. }
  2454. case Js::OpCode::LdSpreadIndices:
  2455. instr->Remove();
  2456. break;
  2457. case Js::OpCode::LdSuper:
  2458. this->GenerateLdSuper(instr);
  2459. break;
  2460. case Js::OpCode::LdSuperCtor:
  2461. this->GenerateLdSuperCtor(instr);
  2462. break;
  2463. case Js::OpCode::ScopedLdSuper:
  2464. instrPrev = m_lowererMD.LowerLdSuper(instr, IR::HelperScopedLdSuper);
  2465. break;
  2466. case Js::OpCode::ScopedLdSuperCtor:
  2467. instrPrev = m_lowererMD.LowerLdSuper(instr, IR::HelperScopedLdSuperCtor);
  2468. break;
  2469. case Js::OpCode::SetHomeObj:
  2470. {
  2471. this->GenerateSetHomeObj(instr);
  2472. break;
  2473. }
  2474. case Js::OpCode::SetComputedNameVar:
  2475. {
  2476. IR::Opnd *src2Opnd = instr->UnlinkSrc2();
  2477. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  2478. m_lowererMD.LoadHelperArgument(instr, src2Opnd);
  2479. m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  2480. m_lowererMD.ChangeToHelperCall(instr, IR::HelperSetComputedNameVar);
  2481. break;
  2482. }
  2483. case Js::OpCode::InlineeMetaArg:
  2484. {
  2485. m_lowererMD.ChangeToAssign(instr);
  2486. break;
  2487. }
  2488. case Js::OpCode::Yield:
  2489. {
  2490. instr->FreeSrc1(); // Source is not actually used by the backend other than to calculate lifetime
  2491. IR::Opnd* dstOpnd = instr->UnlinkDst();
  2492. // prm2 is the ResumeYieldData pointer per calling convention established in JavascriptGenerator::CallGenerator
  2493. // This is the value the bytecode expects to be in the dst register of the Yield opcode after resumption.
  2494. // Load it here after the bail-in.
  2495. StackSym *resumeYieldDataSym = StackSym::NewParamSlotSym(2, m_func);
  2496. m_func->SetArgOffset(resumeYieldDataSym, (LowererMD::GetFormalParamOffset() + 1) * MachPtr);
  2497. IR::SymOpnd * resumeYieldDataOpnd = IR::SymOpnd::New(resumeYieldDataSym, TyMachPtr, m_func);
  2498. AssertMsg(instr->m_next->IsLabelInstr(), "Expect the resume label to immediately follow Yield instruction");
  2499. m_lowererMD.CreateAssign(dstOpnd, resumeYieldDataOpnd, instr->m_next->m_next);
  2500. GenerateBailOut(instr);
  2501. break;
  2502. }
  2503. case Js::OpCode::ResumeYield:
  2504. case Js::OpCode::ResumeYieldStar:
  2505. {
  2506. IR::Opnd *srcOpnd1 = instr->UnlinkSrc1();
  2507. IR::Opnd *srcOpnd2 = instr->m_opcode == Js::OpCode::ResumeYieldStar ? instr->UnlinkSrc2() : IR::AddrOpnd::NewNull(m_func);
  2508. m_lowererMD.LoadHelperArgument(instr, srcOpnd2);
  2509. m_lowererMD.LoadHelperArgument(instr, srcOpnd1);
  2510. m_lowererMD.ChangeToHelperCall(instr, IR::HelperResumeYield);
  2511. break;
  2512. }
  2513. case Js::OpCode::GeneratorResumeJumpTable:
  2514. {
  2515. // Lowered in LowerPrologEpilog so that the jumps introduced are not considered to be part of the flow for the RegAlloc phase.
  2516. // Introduce a BailOutNoSave label if there were yield points that were elided due to optimizations. They could still be hit
  2517. // if an active generator object had been paused at such a yield point when the function body was JITed. So safe guard such a
  2518. // case by having the native code simply jump back to the interpreter for such yield points.
  2519. IR::LabelInstr *bailOutNoSaveLabel = nullptr;
  2520. m_func->MapUntilYieldOffsetResumeLabels([this, &bailOutNoSaveLabel](int, const YieldOffsetResumeLabel& yorl)
  2521. {
  2522. if (yorl.Second() == nullptr)
  2523. {
  2524. if (bailOutNoSaveLabel == nullptr)
  2525. {
  2526. bailOutNoSaveLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  2527. }
  2528. return true;
  2529. }
  2530. return false;
  2531. });
  2532. // Insert the bailoutnosave label somewhere along with a call to BailOutNoSave helper
  2533. if (bailOutNoSaveLabel != nullptr)
  2534. {
  2535. IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->m_prev;
  2536. IR::LabelInstr * exitTargetInstr;
  2537. if (exitPrevInstr->IsLabelInstr())
  2538. {
  2539. exitTargetInstr = exitPrevInstr->AsLabelInstr();
  2540. exitPrevInstr = exitPrevInstr->m_prev;
  2541. }
  2542. else
  2543. {
  2544. exitTargetInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  2545. exitPrevInstr->InsertAfter(exitTargetInstr);
  2546. }
  2547. bailOutNoSaveLabel->m_hasNonBranchRef = true;
  2548. bailOutNoSaveLabel->isOpHelper = true;
  2549. IR::Instr* bailOutCall = IR::Instr::New(Js::OpCode::Call, m_func);
  2550. exitPrevInstr->InsertAfter(bailOutCall);
  2551. exitPrevInstr->InsertAfter(bailOutNoSaveLabel);
  2552. exitPrevInstr->InsertAfter(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, exitTargetInstr, m_func));
  2553. IR::RegOpnd * frameRegOpnd = IR::RegOpnd::New(nullptr, LowererMD::GetRegFramePointer(), TyMachPtr, m_func);
  2554. m_lowererMD.LoadHelperArgument(bailOutCall, frameRegOpnd);
  2555. m_lowererMD.ChangeToHelperCall(bailOutCall, IR::HelperNoSaveRegistersBailOutForElidedYield);
  2556. m_func->m_bailOutNoSaveLabel = bailOutNoSaveLabel;
  2557. }
  2558. break;
  2559. }
  2560. case Js::OpCode::AsyncSpawn:
  2561. this->LowerBinaryHelperMem(instr, IR::HelperAsyncSpawn);
  2562. break;
  2563. case Js::OpCode::FrameDisplayCheck:
  2564. instrPrev = this->LowerFrameDisplayCheck(instr);
  2565. break;
  2566. case Js::OpCode::SlotArrayCheck:
  2567. instrPrev = this->LowerSlotArrayCheck(instr);
  2568. break;
  2569. default:
  2570. #if defined(_M_IX86) || defined(_M_X64)
  2571. if (IsSimd128Opcode(instr->m_opcode))
  2572. {
  2573. instrPrev = m_lowererMD.Simd128Instruction(instr);
  2574. break;
  2575. }
  2576. #endif
  2577. AssertMsg(instr->IsLowered(), "Unknown opcode");
  2578. if(!instr->IsLowered())
  2579. {
  2580. Fatal();
  2581. }
  2582. break;
  2583. }
  2584. #if DBG
  2585. LegalizeVerifyRange(instrPrev ? instrPrev->m_next : instrStart,
  2586. verifyLegalizeInstrNext ? verifyLegalizeInstrNext->m_prev : nullptr);
  2587. #endif
  2588. } NEXT_INSTR_BACKWARD_EDITING_IN_RANGE;
  2589. Assert(this->outerMostLoopLabel == nullptr);
  2590. }
  2591. IR::Instr *
  2592. Lowerer::LoadFunctionBody(IR::Instr * instr)
  2593. {
  2594. return m_lowererMD.LoadHelperArgument(instr, LoadFunctionBodyOpnd(instr));
  2595. }
  2596. IR::Instr *
  2597. Lowerer::LoadScriptContext(IR::Instr * instr)
  2598. {
  2599. return m_lowererMD.LoadHelperArgument(instr, LoadScriptContextOpnd(instr));
  2600. }
  2601. IR::Opnd *
  2602. Lowerer::LoadFunctionBodyOpnd(IR::Instr * instr)
  2603. {
  2604. return IR::AddrOpnd::New(instr->m_func->GetJnFunction(), IR::AddrOpndKindDynamicFunctionBody, instr->m_func);
  2605. }
  2606. IR::Opnd *
  2607. Lowerer::LoadScriptContextOpnd(IR::Instr * instr)
  2608. {
  2609. return IR::AddrOpnd::New(this->m_func->GetScriptContext(), IR::AddrOpndKindDynamicScriptContext, this->m_func);
  2610. }
  2611. IR::Opnd *
  2612. Lowerer::LoadScriptContextValueOpnd(IR::Instr * instr, ScriptContextValue valueType)
  2613. {
  2614. Js::ScriptContext *scriptContext = instr->m_func->GetScriptContext();
  2615. switch (valueType)
  2616. {
  2617. case ScriptContextValue::ScriptContextNumberAllocator:
  2618. return IR::AddrOpnd::New(scriptContext->GetNumberAllocator(), IR::AddrOpndKindDynamicMisc, instr->m_func);
  2619. case ScriptContextValue::ScriptContextRecycler:
  2620. return IR::AddrOpnd::New(scriptContext->GetRecycler(), IR::AddrOpndKindDynamicMisc, instr->m_func);
  2621. default:
  2622. Assert(false);
  2623. return nullptr;
  2624. }
  2625. }
  2626. IR::Opnd *
  2627. Lowerer::LoadLibraryValueOpnd(IR::Instr * instr, LibraryValue valueType, RegNum regNum)
  2628. {
  2629. Js::ScriptContext *scriptContext = instr->m_func->GetScriptContext();
  2630. switch (valueType)
  2631. {
  2632. case LibraryValue::ValueEmptyString:
  2633. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetEmptyString(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2634. case LibraryValue::ValueUndeclBlockVar:
  2635. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetUndeclBlockVar(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2636. case LibraryValue::ValueUndefined:
  2637. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetUndefined(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2638. case LibraryValue::ValueNull:
  2639. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetNull(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2640. case LibraryValue::ValueTrue:
  2641. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetTrue(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2642. case LibraryValue::ValueFalse:
  2643. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetFalse(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2644. case LibraryValue::ValueNegativeZero:
  2645. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetNegativeZero(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  2646. case LibraryValue::ValueNumberTypeStatic:
  2647. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetNumberTypeStatic(), IR::AddrOpndKindDynamicType, instr->m_func, true);
  2648. case LibraryValue::ValueStringTypeStatic:
  2649. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetStringTypeStatic(), IR::AddrOpndKindDynamicType, instr->m_func, true);
  2650. case LibraryValue::ValueObjectType:
  2651. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetObjectType(), IR::AddrOpndKindDynamicType, instr->m_func);
  2652. case LibraryValue::ValueObjectHeaderInlinedType:
  2653. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetObjectHeaderInlinedType(), IR::AddrOpndKindDynamicType, instr->m_func);
  2654. case LibraryValue::ValueRegexType:
  2655. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetRegexType(), IR::AddrOpndKindDynamicType, instr->m_func);
  2656. case LibraryValue::ValueArrayConstructor:
  2657. return IR::AddrOpnd::New(scriptContext->GetLibrary()->GetArrayConstructor(), IR::AddrOpndKindDynamicVar, instr->m_func);
  2658. case LibraryValue::ValueJavascriptArrayType:
  2659. return IR::AddrOpnd::New(Js::JavascriptArray::GetInitialType(scriptContext), IR::AddrOpndKindDynamicType, instr->m_func);
  2660. case LibraryValue::ValueNativeIntArrayType:
  2661. return IR::AddrOpnd::New(Js::JavascriptNativeIntArray::GetInitialType(scriptContext), IR::AddrOpndKindDynamicType, instr->m_func);
  2662. case LibraryValue::ValueNativeFloatArrayType:
  2663. return IR::AddrOpnd::New(Js::JavascriptNativeFloatArray::GetInitialType(scriptContext), IR::AddrOpndKindDynamicType, instr->m_func);
  2664. case LibraryValue::ValueConstructorCacheDefaultInstance:
  2665. return IR::AddrOpnd::New(&Js::ConstructorCache::DefaultInstance, IR::AddrOpndKindDynamicMisc, instr->m_func);
  2666. case LibraryValue::ValueAbsDoubleCst:
  2667. return IR::MemRefOpnd::New((void*)&Js::JavascriptNumber::AbsDoubleCst, TyMachDouble, instr->m_func, IR::AddrOpndKindDynamicDoubleRef);
  2668. case LibraryValue::ValueCharStringCache:
  2669. return IR::AddrOpnd::New((Js::Var)&scriptContext->GetLibrary()->GetCharStringCache(), IR::AddrOpndKindDynamicCharStringCache, instr->m_func);
  2670. default:
  2671. Assert(false);
  2672. return nullptr;
  2673. }
  2674. }
  2675. IR::Opnd *
  2676. Lowerer::LoadVTableValueOpnd(IR::Instr * instr, VTableValue vtableType)
  2677. {
  2678. return IR::AddrOpnd::New((Js::Var)instr->m_func->GetScriptContext()->GetLibrary()->GetVTableAddresses()[vtableType], IR::AddrOpndKindDynamicVtable, this->m_func);
  2679. }
  2680. IR::Opnd *
  2681. Lowerer::LoadOptimizationOverridesValueOpnd(IR::Instr *instr, OptimizationOverridesValue valueType)
  2682. {
  2683. Js::ScriptContext *scriptContext = instr->m_func->GetScriptContext();
  2684. switch (valueType)
  2685. {
  2686. case OptimizationOverridesValue::OptimizationOverridesSideEffects:
  2687. return IR::MemRefOpnd::New(scriptContext->optimizationOverrides.GetAddressOfSideEffects(), TyInt32, instr->m_func);
  2688. case OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable:
  2689. return IR::MemRefOpnd::New(scriptContext->optimizationOverrides.GetAddressOfArraySetElementFastPathVtable(), TyMachPtr, instr->m_func);
  2690. case OptimizationOverridesValue::OptimizationOverridesIntArraySetElementFastPathVtable:
  2691. return IR::MemRefOpnd::New(scriptContext->optimizationOverrides.GetAddressOfIntArraySetElementFastPathVtable(), TyMachPtr, instr->m_func);
  2692. case OptimizationOverridesValue::OptimizationOverridesFloatArraySetElementFastPathVtable:
  2693. return IR::MemRefOpnd::New(scriptContext->optimizationOverrides.GetAddressOfFloatArraySetElementFastPathVtable(), TyMachPtr, instr->m_func);
  2694. default:
  2695. Assert(false);
  2696. return nullptr;
  2697. }
  2698. }
  2699. IR::Opnd *
  2700. Lowerer::LoadNumberAllocatorValueOpnd(IR::Instr *instr, NumberAllocatorValue valueType)
  2701. {
  2702. Js::ScriptContext *scriptContext = instr->m_func->GetScriptContext();
  2703. bool allowNativeCodeBumpAllocation = scriptContext->GetNumberAllocator()->AllowNativeCodeBumpAllocation();
  2704. switch (valueType)
  2705. {
  2706. case NumberAllocatorValue::NumberAllocatorEndAddress:
  2707. return IR::MemRefOpnd::New(((char *)scriptContext->GetNumberAllocator()) + Js::RecyclerJavascriptNumberAllocator::GetEndAddressOffset(), TyMachPtr, instr->m_func);
  2708. case NumberAllocatorValue::NumberAllocatorFreeObjectList:
  2709. return IR::MemRefOpnd::New(
  2710. ((char *)scriptContext->GetNumberAllocator()) +
  2711. (allowNativeCodeBumpAllocation ? Js::RecyclerJavascriptNumberAllocator::GetFreeObjectListOffset() : Js::RecyclerJavascriptNumberAllocator::GetEndAddressOffset()),
  2712. TyMachPtr, instr->m_func);
  2713. default:
  2714. Assert(false);
  2715. return nullptr;
  2716. }
  2717. }
  2718. IR::Opnd *
  2719. Lowerer::LoadIsInstInlineCacheOpnd(IR::Instr * instr, uint inlineCacheIndex)
  2720. {
  2721. Js::IsInstInlineCache * inlineCache = instr->m_func->GetJnFunction()->GetIsInstInlineCache(inlineCacheIndex);
  2722. return IR::AddrOpnd::New(inlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func);
  2723. }
  2724. IR::Opnd *
  2725. Lowerer::LoadRuntimeInlineCacheOpnd(IR::Instr * instr, IR::PropertySymOpnd * propertySymOpnd, bool isHelper)
  2726. {
  2727. Assert(propertySymOpnd->m_runtimeInlineCache != nullptr);
  2728. IR::Opnd * inlineCacheOpnd = nullptr;
  2729. if (instr->m_func->GetJnFunction()->GetInlineCachesOnFunctionObject() && !instr->m_func->IsInlinee())
  2730. {
  2731. inlineCacheOpnd = this->GetInlineCacheFromFuncObjectForRuntimeUse(instr, propertySymOpnd, isHelper);
  2732. }
  2733. else
  2734. {
  2735. Js::InlineCache * inlineCache = propertySymOpnd->m_runtimeInlineCache;
  2736. inlineCacheOpnd = IR::AddrOpnd::New(inlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func, /* dontEncode */ true);
  2737. }
  2738. return inlineCacheOpnd;
  2739. }
  2740. bool
  2741. Lowerer::TryGenerateFastCmSrEq(IR::Instr * instr)
  2742. {
  2743. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  2744. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  2745. if (srcReg2 && IsConstRegOpnd(srcReg2))
  2746. {
  2747. return m_lowererMD.GenerateFastCmSrEqConst(instr);
  2748. }
  2749. else if (srcReg1 && IsConstRegOpnd(srcReg1))
  2750. {
  2751. instr->SwapOpnds();
  2752. return m_lowererMD.GenerateFastCmSrEqConst(instr);
  2753. }
  2754. else if (srcReg2 && (srcReg2->m_sym->m_isStrConst))
  2755. {
  2756. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_String);
  2757. return true;
  2758. }
  2759. else if (srcReg1 && (srcReg1->m_sym->m_isStrConst))
  2760. {
  2761. instr->SwapOpnds();
  2762. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_String);
  2763. return true;
  2764. }
  2765. else if (srcReg2 && (srcReg2->m_sym->m_isStrEmpty))
  2766. {
  2767. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_EmptyString);
  2768. return true;
  2769. }
  2770. else if (srcReg1 && (srcReg1->m_sym->m_isStrEmpty))
  2771. {
  2772. instr->SwapOpnds();
  2773. this->LowerBinaryHelperMem(instr, IR::HelperOP_CmSrEq_EmptyString);
  2774. return true;
  2775. }
  2776. return false;
  2777. }
  2778. bool
  2779. Lowerer::GenerateFastBrSrEq(IR::Instr * instr, IR::RegOpnd * srcReg1, IR::RegOpnd * srcReg2, IR::Instr ** pInstrPrev, bool noMathFastPath)
  2780. {
  2781. if (srcReg2 && IsConstRegOpnd(srcReg2))
  2782. {
  2783. this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg2->m_sym->GetConstOpnd(), true);
  2784. instr->Remove();
  2785. return true;
  2786. }
  2787. else if (srcReg1 && IsConstRegOpnd(srcReg1))
  2788. {
  2789. instr->SwapOpnds();
  2790. this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg1->m_sym->GetConstOpnd(), true);
  2791. instr->Remove();
  2792. return true;
  2793. }
  2794. else if (srcReg2 && (srcReg2->m_sym->m_isStrConst))
  2795. {
  2796. this->LowerBrCMem(instr, IR::HelperOp_StrictEqualString, noMathFastPath, false);
  2797. return true;
  2798. }
  2799. else if (srcReg1 && (srcReg1->m_sym->m_isStrConst))
  2800. {
  2801. instr->SwapOpnds();
  2802. this->LowerBrCMem(instr, IR::HelperOp_StrictEqualString, noMathFastPath, false);
  2803. return true;
  2804. }
  2805. else if (srcReg2 && (srcReg2->m_sym->m_isStrEmpty))
  2806. {
  2807. this->LowerBrCMem(instr, IR::HelperOp_StrictEqualEmptyString, noMathFastPath, false);
  2808. return true;
  2809. }
  2810. else if (srcReg1 && (srcReg1->m_sym->m_isStrEmpty))
  2811. {
  2812. instr->SwapOpnds();
  2813. this->LowerBrCMem(instr, IR::HelperOp_StrictEqualEmptyString, noMathFastPath, false);
  2814. return true;
  2815. }
  2816. return false;
  2817. }
  2818. ///----------------------------------------------------------------------------
  2819. ///
  2820. /// Lowerer::GenerateFastBrConst
  2821. ///
  2822. ///----------------------------------------------------------------------------
  2823. IR::BranchInstr *
  2824. Lowerer::GenerateFastBrConst(IR::BranchInstr *branchInstr, IR::Opnd * constOpnd, bool isEqual)
  2825. {
  2826. Assert(constOpnd->IsAddrOpnd() || constOpnd->IsIntConstOpnd());
  2827. //
  2828. // Given:
  2829. // BrSrEq_A $L1, s1, s2
  2830. // where s2 is either 'null', 'undefined', 'true' or 'false'
  2831. //
  2832. // Generate:
  2833. //
  2834. // CMP s1, s2
  2835. // JEQ/JNE $L1
  2836. //
  2837. Assert(this->IsConstRegOpnd(branchInstr->GetSrc2()->AsRegOpnd()));
  2838. IR::Opnd *opnd = branchInstr->GetSrc1();
  2839. if (!opnd->IsRegOpnd())
  2840. {
  2841. IR::RegOpnd *lhsReg = IR::RegOpnd::New(TyVar, m_func);
  2842. LowererMD::CreateAssign(lhsReg, opnd, branchInstr);
  2843. opnd = lhsReg;
  2844. }
  2845. Assert(opnd->IsRegOpnd());
  2846. IR::BranchInstr *newBranch;
  2847. newBranch = InsertCompareBranch(opnd, constOpnd, isEqual ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A, branchInstr->GetTarget(), branchInstr);
  2848. return newBranch;
  2849. }
  2850. bool
  2851. Lowerer::TryGenerateFastBrEq(IR::Instr * instr)
  2852. {
  2853. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  2854. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  2855. bool isConst = false;
  2856. if (srcReg1 && this->IsNullOrUndefRegOpnd(srcReg1))
  2857. {
  2858. instr->SwapOpnds();
  2859. isConst = true;
  2860. }
  2861. // Fast path for == null or == undefined
  2862. // if (src == null || src == undefined)
  2863. if (isConst || srcReg2 && this->IsNullOrUndefRegOpnd(srcReg2))
  2864. {
  2865. IR::BranchInstr *newBranch;
  2866. newBranch = this->GenerateFastBrConst(instr->AsBranchInstr(),
  2867. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  2868. true);
  2869. this->GenerateFastBrConst(instr->AsBranchInstr(),
  2870. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
  2871. true);
  2872. instr->Remove();
  2873. return true;
  2874. }
  2875. return false;
  2876. }
  2877. bool
  2878. Lowerer::TryGenerateFastBrNeq(IR::Instr * instr)
  2879. {
  2880. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  2881. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  2882. bool isConst = false;
  2883. if (srcReg1 && this->IsNullOrUndefRegOpnd(srcReg1))
  2884. {
  2885. instr->SwapOpnds();
  2886. isConst = true;
  2887. }
  2888. // Fast path for != null or != undefined
  2889. // if (src != null && src != undefined)
  2890. //
  2891. // That is:
  2892. // if (src == NULL) goto labelEq
  2893. // if (src != undef) goto target
  2894. // labelEq:
  2895. if (isConst || (srcReg2 && this->IsNullOrUndefRegOpnd(srcReg2)))
  2896. {
  2897. IR::LabelInstr *labelEq = instr->GetOrCreateContinueLabel();
  2898. IR::BranchInstr *newBranch;
  2899. newBranch = this->GenerateFastBrConst(instr->AsBranchInstr(),
  2900. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  2901. true);
  2902. newBranch->AsBranchInstr()->SetTarget(labelEq);
  2903. this->GenerateFastBrConst(instr->AsBranchInstr(),
  2904. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
  2905. false);
  2906. instr->Remove();
  2907. return true;
  2908. }
  2909. return false;
  2910. }
  2911. bool
  2912. Lowerer::GenerateFastBrSrNeq(IR::Instr * instr, IR::Instr ** pInstrPrev)
  2913. {
  2914. IR::RegOpnd *srcReg1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  2915. IR::RegOpnd *srcReg2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  2916. if (srcReg2 && IsConstRegOpnd(srcReg2))
  2917. {
  2918. this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg2->m_sym->GetConstOpnd(), false);
  2919. instr->Remove();
  2920. return true;
  2921. }
  2922. else if (srcReg1 && IsConstRegOpnd(srcReg1))
  2923. {
  2924. instr->SwapOpnds();
  2925. this->GenerateFastBrConst(instr->AsBranchInstr(), srcReg1->m_sym->GetConstOpnd(), false);
  2926. instr->Remove();
  2927. return true;
  2928. }
  2929. return false;
  2930. }
  2931. void
  2932. Lowerer::GenerateDynamicObjectAlloc(IR::Instr * newObjInstr, uint inlineSlotCount, uint slotCount, IR::RegOpnd * newObjDst, IR::Opnd * typeSrc)
  2933. {
  2934. size_t headerAllocSize = sizeof(Js::DynamicObject) + inlineSlotCount * sizeof(Js::Var);
  2935. IR::SymOpnd * tempObjectSymOpnd;
  2936. bool isZeroed = GenerateRecyclerOrMarkTempAlloc(newObjInstr, newObjDst, IR::HelperAllocMemForScObject, headerAllocSize, &tempObjectSymOpnd);
  2937. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  2938. {
  2939. // Hoist the vtable init to the outer most loop top as it never changes
  2940. InsertMove(tempObjectSymOpnd,
  2941. LoadVTableValueOpnd(this->outerMostLoopLabel, VTableValue::VtableDynamicObject), this->outerMostLoopLabel, false);
  2942. }
  2943. else
  2944. {
  2945. // MOV [newObjDst + offset(vtable)], DynamicObject::vtable
  2946. GenerateMemInit(newObjDst, 0, LoadVTableValueOpnd(newObjInstr, VTableValue::VtableDynamicObject), newObjInstr, isZeroed);
  2947. }
  2948. // MOV [newObjDst + offset(type)], newObjectType
  2949. GenerateMemInit(newObjDst, Js::DynamicObject::GetOffsetOfType(), typeSrc, newObjInstr, isZeroed);
  2950. // CALL JavascriptOperators::AllocMemForVarArray((slotCount - inlineSlotCount) * sizeof(Js::Var))
  2951. if (slotCount > inlineSlotCount)
  2952. {
  2953. size_t auxSlotsAllocSize = (slotCount - inlineSlotCount) * sizeof(Js::Var);
  2954. IR::RegOpnd* auxSlots = IR::RegOpnd::New(TyMachPtr, m_func);
  2955. GenerateRecyclerAllocAligned(IR::HelperAllocMemForVarArray, auxSlotsAllocSize, auxSlots, newObjInstr);
  2956. GenerateMemInit(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), auxSlots, newObjInstr, isZeroed);
  2957. IR::IndirOpnd* newObjAuxSlots = IR::IndirOpnd::New(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachPtr, m_func);
  2958. this->m_lowererMD.CreateAssign(newObjAuxSlots, auxSlots, newObjInstr);
  2959. }
  2960. else
  2961. {
  2962. GenerateMemInitNull(newObjDst, Js::DynamicObject::GetOffsetOfAuxSlots(), newObjInstr, isZeroed);
  2963. }
  2964. GenerateMemInitNull(newObjDst, Js::DynamicObject::GetOffsetOfObjectArray(), newObjInstr, isZeroed);
  2965. }
  2966. void
  2967. Lowerer::LowerNewScObjectSimple(IR::Instr * instr)
  2968. {
  2969. GenerateDynamicObjectAlloc(
  2970. instr,
  2971. 0,
  2972. 0,
  2973. instr->UnlinkDst()->AsRegOpnd(),
  2974. LoadLibraryValueOpnd(
  2975. instr,
  2976. Js::FunctionBody::DoObjectHeaderInliningForEmptyObjects()
  2977. ? LibraryValue::ValueObjectHeaderInlinedType
  2978. : LibraryValue::ValueObjectType));
  2979. instr->Remove();
  2980. }
  2981. void
  2982. Lowerer::LowerNewScObjectLiteral(IR::Instr *newObjInstr)
  2983. {
  2984. Func * func = m_func;
  2985. IR::IntConstOpnd * literalObjectIdOpnd = newObjInstr->UnlinkSrc2()->AsIntConstOpnd();
  2986. Js::DynamicType ** literalTypeRef = newObjInstr->m_func->GetJnFunction()->GetObjectLiteralTypeRefWithLock(literalObjectIdOpnd->AsUint32());
  2987. Js::DynamicType * literalType = *literalTypeRef;
  2988. IR::LabelInstr * helperLabel = nullptr;
  2989. IR::LabelInstr * allocLabel = nullptr;
  2990. IR::Opnd * literalTypeRefOpnd;
  2991. IR::Opnd * literalTypeOpnd;
  2992. IR::Opnd * propertyArrayOpnd;
  2993. IR::IntConstOpnd * propertyArrayIdOpnd = newObjInstr->UnlinkSrc1()->AsIntConstOpnd();
  2994. const Js::PropertyIdArray * propIds = Js::ByteCodeReader::ReadPropertyIdArrayWithLock(propertyArrayIdOpnd->AsUint32(), newObjInstr->m_func->GetJnFunction());
  2995. Js::ScriptContext *const scriptContext = newObjInstr->m_func->GetJnFunction()->GetScriptContext();
  2996. uint inlineSlotCapacity = Js::JavascriptOperators::GetLiteralInlineSlotCapacity(propIds, scriptContext);
  2997. uint slotCapacity = Js::JavascriptOperators::GetLiteralSlotCapacity(propIds, scriptContext);
  2998. IR::RegOpnd * dstOpnd;
  2999. literalTypeRefOpnd = IR::AddrOpnd::New(literalTypeRef, IR::AddrOpndKindDynamicMisc, this->m_func);
  3000. propertyArrayOpnd = IR::AddrOpnd::New((Js::Var)propIds, IR::AddrOpndKindDynamicMisc, this->m_func);
  3001. if (literalType == nullptr || !literalType->GetIsShared())
  3002. {
  3003. helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3004. allocLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3005. literalTypeOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3006. InsertMove(literalTypeOpnd, IR::MemRefOpnd::New(literalTypeRef, TyMachPtr, func), newObjInstr);
  3007. InsertTestBranch(literalTypeOpnd, literalTypeOpnd,
  3008. Js::OpCode::BrEq_A, helperLabel, newObjInstr);
  3009. InsertTestBranch(IR::IndirOpnd::New(literalTypeOpnd->AsRegOpnd(), Js::DynamicType::GetOffsetOfIsShared(), TyInt8, func),
  3010. IR::IntConstOpnd::New(1, TyInt8, func, true), Js::OpCode::BrEq_A, helperLabel, newObjInstr);
  3011. dstOpnd = newObjInstr->GetDst()->AsRegOpnd();
  3012. }
  3013. else
  3014. {
  3015. literalTypeOpnd = IR::AddrOpnd::New(literalType, IR::AddrOpndKindDynamicType, func);
  3016. dstOpnd = newObjInstr->UnlinkDst()->AsRegOpnd();
  3017. Assert(inlineSlotCapacity == literalType->GetTypeHandler()->GetInlineSlotCapacity());
  3018. Assert(slotCapacity == (uint)literalType->GetTypeHandler()->GetSlotCapacity());
  3019. }
  3020. if (helperLabel)
  3021. {
  3022. InsertBranch(Js::OpCode::Br, allocLabel, newObjInstr);
  3023. // Slow path to ensure the type is there
  3024. newObjInstr->InsertBefore(helperLabel);
  3025. IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperEnsureObjectLiteralType, func);
  3026. m_lowererMD.LoadHelperArgument(newObjInstr, literalTypeRefOpnd);
  3027. m_lowererMD.LoadHelperArgument(newObjInstr, propertyArrayOpnd);
  3028. LoadScriptContext(newObjInstr);
  3029. IR::Instr * ensureTypeInstr = IR::Instr::New(Js::OpCode::Call, literalTypeOpnd, opndHelper, func);
  3030. newObjInstr->InsertBefore(ensureTypeInstr);
  3031. m_lowererMD.LowerCall(ensureTypeInstr, 0);
  3032. newObjInstr->InsertBefore(allocLabel);
  3033. }
  3034. else
  3035. {
  3036. Assert(allocLabel == nullptr);
  3037. }
  3038. // For the next call:
  3039. // inlineSlotCapacity == Number of slots to allocate beyond the DynamicObject header
  3040. // slotCapacity - inlineSlotCapacity == Number of aux slots to allocate
  3041. if(Js::FunctionBody::DoObjectHeaderInliningForObjectLiteral(propIds, scriptContext))
  3042. {
  3043. Assert(inlineSlotCapacity >= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity());
  3044. Assert(inlineSlotCapacity == slotCapacity);
  3045. slotCapacity = inlineSlotCapacity -= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity();
  3046. }
  3047. GenerateDynamicObjectAlloc(
  3048. newObjInstr,
  3049. inlineSlotCapacity,
  3050. slotCapacity,
  3051. dstOpnd,
  3052. literalTypeOpnd);
  3053. newObjInstr->Remove();
  3054. }
  3055. IR::Instr*
  3056. Lowerer::LowerProfiledNewScArray(IR::JitProfilingInstr* arrInstr)
  3057. {
  3058. IR::Instr *instrPrev = arrInstr->m_prev;
  3059. /*
  3060. JavascriptArray *ProfilingHelpers::ProfiledNewScArray(
  3061. const uint length,
  3062. FunctionBody *const functionBody,
  3063. const ProfileId profileId)
  3064. */
  3065. m_lowererMD.LoadHelperArgument(arrInstr, IR::Opnd::CreateProfileIdOpnd(arrInstr->profileId, m_func));
  3066. m_lowererMD.LoadHelperArgument(arrInstr, CreateFunctionBodyOpnd(arrInstr->m_func));
  3067. m_lowererMD.LoadHelperArgument(arrInstr, arrInstr->UnlinkSrc1());
  3068. arrInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledNewScArray, m_func));
  3069. m_lowererMD.LowerCall(arrInstr, 0);
  3070. return instrPrev;
  3071. }
  3072. IR::Instr *
  3073. Lowerer::LowerNewScArray(IR::Instr *arrInstr)
  3074. {
  3075. if (arrInstr->IsJitProfilingInstr())
  3076. {
  3077. return LowerProfiledNewScArray(arrInstr->AsJitProfilingInstr());
  3078. }
  3079. IR::Instr *instrPrev = arrInstr->m_prev;
  3080. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScArray;
  3081. if (arrInstr->IsProfiledInstr() && arrInstr->m_func->HasProfileInfo())
  3082. {
  3083. RecyclerWeakReference<Js::FunctionBody> *weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
  3084. Assert(weakFuncRef);
  3085. Js::ProfileId profileId = static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
  3086. Js::FunctionBody *functionBody = arrInstr->m_func->GetJnFunction();
  3087. Js::DynamicProfileInfo *profileInfo = functionBody->GetAnyDynamicProfileInfo();
  3088. Js::ArrayCallSiteInfo *arrayInfo = profileInfo->GetArrayCallSiteInfo(functionBody, profileId);
  3089. Assert(arrInstr->GetSrc1()->IsConstOpnd());
  3090. GenerateProfiledNewScArrayFastPath(arrInstr, arrayInfo, weakFuncRef, arrInstr->GetSrc1()->AsIntConstOpnd()->AsUint32());
  3091. if (arrInstr->GetDst() && arrInstr->GetDst()->GetValueType().IsLikelyNativeArray())
  3092. {
  3093. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
  3094. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfo, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
  3095. helperMethod = IR::HelperScrArr_ProfiledNewScArray;
  3096. }
  3097. }
  3098. LoadScriptContext(arrInstr);
  3099. IR::Opnd *src1Opnd = arrInstr->UnlinkSrc1();
  3100. m_lowererMD.LoadHelperArgument(arrInstr, src1Opnd);
  3101. m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
  3102. return instrPrev;
  3103. }
  3104. template <typename ArrayType>
  3105. BOOL Lowerer::IsSmallObject(uint32 length)
  3106. {
  3107. if (ArrayType::HasInlineHeadSegment(length))
  3108. return true;
  3109. uint32 alignedHeadSegmentSize = Js::SparseArraySegment<typename ArrayType::TElement>::GetAlignedSize(length);
  3110. size_t allocSize = sizeof(Js::SparseArraySegment<typename ArrayType::TElement>) + alignedHeadSegmentSize * sizeof(typename ArrayType::TElement);
  3111. return HeapInfo::IsSmallObject(HeapInfo::GetAlignedSizeNoCheck(allocSize));
  3112. }
  3113. void
  3114. Lowerer::GenerateProfiledNewScArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, RecyclerWeakReference<Js::FunctionBody> * weakFuncRef, uint32 length)
  3115. {
  3116. if (PHASE_OFF(Js::ArrayCtorFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
  3117. {
  3118. return;
  3119. }
  3120. Func * func = this->m_func;
  3121. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3122. uint32 size = length;
  3123. bool isZeroed;
  3124. IR::RegOpnd *dstOpnd = instr->GetDst()->AsRegOpnd();
  3125. IR::RegOpnd *headOpnd;
  3126. uint32 i = length;
  3127. if (instr->GetDst() && instr->GetDst()->GetValueType().IsLikelyNativeIntArray())
  3128. {
  3129. if (!IsSmallObject<Js::JavascriptNativeIntArray>(length))
  3130. {
  3131. return;
  3132. }
  3133. GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, helperLabel);
  3134. Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
  3135. headOpnd = GenerateArrayAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isZeroed);
  3136. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3137. GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3138. for (; i < size; i++)
  3139. {
  3140. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3141. Js::JavascriptNativeIntArray::MissingItem, instr, isZeroed);
  3142. }
  3143. }
  3144. else if (instr->GetDst() && instr->GetDst()->GetValueType().IsLikelyNativeFloatArray())
  3145. {
  3146. if (!IsSmallObject<Js::JavascriptNativeFloatArray>(length))
  3147. {
  3148. return;
  3149. }
  3150. GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, helperLabel);
  3151. Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
  3152. headOpnd = GenerateArrayAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isZeroed);
  3153. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3154. GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3155. // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
  3156. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3157. uint const missingItemCount = size * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
  3158. i = i * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
  3159. for (; i < missingItemCount; i++)
  3160. {
  3161. GenerateMemInit(
  3162. headOpnd, offsetStart + i * sizeof(Js::JavascriptArray::MissingItem),
  3163. IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstant, m_func, true),
  3164. instr, isZeroed);
  3165. }
  3166. }
  3167. else
  3168. {
  3169. if (!IsSmallObject<Js::JavascriptArray>(length))
  3170. {
  3171. return;
  3172. }
  3173. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3174. headOpnd = GenerateArrayAlloc<Js::JavascriptArray>(instr, &size, arrayInfo, &isZeroed);
  3175. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3176. for (; i < size; i++)
  3177. {
  3178. GenerateMemInit(
  3179. headOpnd, offsetStart + i * sizeof(Js::Var),
  3180. IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstant, m_func, true),
  3181. instr, isZeroed);
  3182. }
  3183. }
  3184. // Skip pass the helper call
  3185. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3186. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  3187. instr->InsertBefore(helperLabel);
  3188. instr->InsertAfter(doneLabel);
  3189. }
  3190. void
  3191. Lowerer::GenerateArrayInfoIsNativeIntArrayTest(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, IR::LabelInstr * helperLabel)
  3192. {
  3193. Func * func = this->m_func;
  3194. InsertTestBranch(IR::MemRefOpnd::New(((char *)arrayInfo) + Js::ArrayCallSiteInfo::GetOffsetOfBits(), TyUint8, func),
  3195. IR::IntConstOpnd::New(Js::ArrayCallSiteInfo::NotNativeIntBit, TyUint8, func), Js::OpCode::BrNeq_A, helperLabel, instr);
  3196. }
  3197. void
  3198. Lowerer::GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, IR::LabelInstr * helperLabel)
  3199. {
  3200. Func * func = this->m_func;
  3201. InsertCompareBranch(IR::MemRefOpnd::New(((char *)arrayInfo) + Js::ArrayCallSiteInfo::GetOffsetOfBits(), TyUint8, func),
  3202. IR::IntConstOpnd::New(Js::ArrayCallSiteInfo::NotNativeIntBit, TyUint8, func), Js::OpCode::BrNeq_A, helperLabel, instr);
  3203. }
  3204. template <typename ArrayType>
  3205. static IR::JnHelperMethod GetArrayAllocMemHelper();
  3206. template <>
  3207. static IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptArray>()
  3208. {
  3209. return IR::HelperAllocMemForJavascriptArray;
  3210. }
  3211. template <>
  3212. static IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptNativeIntArray>()
  3213. {
  3214. return IR::HelperAllocMemForJavascriptNativeIntArray;
  3215. }
  3216. template <>
  3217. static IR::JnHelperMethod GetArrayAllocMemHelper<Js::JavascriptNativeFloatArray>()
  3218. {
  3219. return IR::HelperAllocMemForJavascriptNativeFloatArray;
  3220. }
  3221. template <typename ArrayType>
  3222. IR::RegOpnd *
  3223. Lowerer::GenerateArrayAlloc(IR::Instr *instr, uint32 * psize, Js::ArrayCallSiteInfo * arrayInfo, bool * pIsHeadSegmentZeroed)
  3224. {
  3225. Func * func = this->m_func;
  3226. IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
  3227. // Generate code as in JavascriptArray::NewLiteral
  3228. uint32 count = *psize;
  3229. uint alignedHeadSegmentSize;
  3230. size_t arrayAllocSize;
  3231. IR::RegOpnd * headOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3232. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func, false);
  3233. IR::Instr * leaHeadInstr = nullptr;
  3234. bool isHeadSegmentZeroed = false;
  3235. if (ArrayType::HasInlineHeadSegment(count))
  3236. {
  3237. uint32 allocCount = count == 0 ? Js::SparseArraySegmentBase::SMALL_CHUNK_SIZE : count;
  3238. arrayAllocSize = Js::JavascriptArray::DetermineAllocationSize<ArrayType, 0>(allocCount, nullptr, &alignedHeadSegmentSize);
  3239. leaHeadInstr = IR::Instr::New(Js::OpCode::LEA, headOpnd,
  3240. IR::IndirOpnd::New(dstOpnd, sizeof(ArrayType), TyMachPtr, func), func);
  3241. isHeadSegmentZeroed = true;
  3242. }
  3243. else
  3244. {
  3245. // Need to allocate the head segment first so that if it throws,
  3246. // we doesn't have the memory assigned to dstOpnd yet
  3247. // Even if the instruction is marked as dstIsTempObject, we still should not allocate
  3248. // that big of a chunk on the stack.
  3249. alignedHeadSegmentSize = Js::SparseArraySegment<typename ArrayType::TElement>::GetAlignedSize(count);
  3250. GenerateRecyclerAlloc(
  3251. IR::HelperAllocMemForSparseArraySegmentBase,
  3252. sizeof(Js::SparseArraySegment<typename ArrayType::TElement>) +
  3253. alignedHeadSegmentSize * sizeof(typename ArrayType::TElement),
  3254. headOpnd,
  3255. instr);
  3256. arrayAllocSize = sizeof(ArrayType);
  3257. }
  3258. *psize = alignedHeadSegmentSize;
  3259. IR::SymOpnd * tempObjectSymOpnd;
  3260. bool isZeroed = GenerateRecyclerOrMarkTempAlloc(instr, dstOpnd,
  3261. GetArrayAllocMemHelper<ArrayType>(), arrayAllocSize, &tempObjectSymOpnd);
  3262. isHeadSegmentZeroed = isHeadSegmentZeroed & isZeroed;
  3263. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  3264. {
  3265. // Hoist the vtable init to the outer most loop top as it never changes
  3266. InsertMove(tempObjectSymOpnd,
  3267. this->LoadVTableValueOpnd(this->outerMostLoopLabel, ArrayType::VtableHelper()),
  3268. this->outerMostLoopLabel, false);
  3269. }
  3270. else
  3271. {
  3272. GenerateMemInit(dstOpnd, 0, this->LoadVTableValueOpnd(instr, ArrayType::VtableHelper()), instr, isZeroed);
  3273. }
  3274. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfType(), this->LoadLibraryValueOpnd(instr, ArrayType::InitialTypeHelper()), instr, isZeroed);
  3275. GenerateMemInitNull(dstOpnd, ArrayType::GetOffsetOfAuxSlots(), instr, isZeroed);
  3276. // Emit the flags and call site index together
  3277. Js::ProfileId arrayCallSiteIndex = (Js::ProfileId)instr->AsProfiledInstr()->u.profileId;
  3278. #if DBG
  3279. if (instr->AsProfiledInstr()->u.profileId < Js::Constants::NoProfileId)
  3280. {
  3281. Js::FunctionBody * functionBody = instr->m_func->GetJnFunction();
  3282. Assert((uint32)(arrayInfo - functionBody->GetAnyDynamicProfileInfo()->GetArrayCallSiteInfo(functionBody, 0)) == arrayCallSiteIndex);
  3283. }
  3284. else
  3285. {
  3286. Assert(arrayInfo == nullptr);
  3287. }
  3288. #endif
  3289. // The same at this:
  3290. // GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayFlags(), (uint16)Js::DynamicObjectFlags::InitialArrayValue, instr, isZeroed);
  3291. // GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayCallSiteIndex(), arrayCallSiteIndex, instr, isZeroed);
  3292. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfArrayFlags(), (uint)Js::DynamicObjectFlags::InitialArrayValue | ((uint)arrayCallSiteIndex << 16), instr, isZeroed);
  3293. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLength(), count, instr, isZeroed);
  3294. if (leaHeadInstr != nullptr)
  3295. {
  3296. instr->InsertBefore(leaHeadInstr);
  3297. LowererMD::ChangeToLea(leaHeadInstr);
  3298. }
  3299. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfHead(), headOpnd, instr, isZeroed);
  3300. GenerateMemInit(dstOpnd, ArrayType::GetOffsetOfLastUsedSegmentOrSegmentMap(), headOpnd, instr, isZeroed);
  3301. // Initialize segment head
  3302. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLeft(), 0, instr, isHeadSegmentZeroed);
  3303. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), count, instr, isHeadSegmentZeroed);
  3304. GenerateMemInit(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfSize(), alignedHeadSegmentSize, instr, isHeadSegmentZeroed);
  3305. GenerateMemInitNull(headOpnd, Js::SparseArraySegmentBase::GetOffsetOfNext(), instr, isHeadSegmentZeroed);
  3306. *pIsHeadSegmentZeroed = isHeadSegmentZeroed;
  3307. return headOpnd;
  3308. }
  3309. void
  3310. Lowerer::GenerateProfiledNewScObjArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, RecyclerWeakReference<Js::FunctionBody> * weakFuncRef, uint32 length)
  3311. {
  3312. if (PHASE_OFF(Js::ArrayCtorFastPathPhase, m_func))
  3313. {
  3314. return;
  3315. }
  3316. Func * func = this->m_func;
  3317. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3318. uint32 size = length;
  3319. bool isZeroed;
  3320. IR::RegOpnd *dstOpnd = instr->GetDst()->AsRegOpnd();
  3321. IR::RegOpnd *headOpnd;
  3322. if (arrayInfo && arrayInfo->IsNativeIntArray())
  3323. {
  3324. GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, helperLabel);
  3325. Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
  3326. headOpnd = GenerateArrayAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isZeroed);
  3327. GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3328. for (uint i = 0; i < size; i++)
  3329. {
  3330. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3331. Js::JavascriptNativeIntArray::MissingItem, instr, isZeroed);
  3332. }
  3333. }
  3334. else if (arrayInfo && arrayInfo->IsNativeFloatArray())
  3335. {
  3336. GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, helperLabel);
  3337. Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
  3338. headOpnd = GenerateArrayAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isZeroed);
  3339. GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isZeroed);
  3340. // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
  3341. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3342. uint const missingItemCount = size * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
  3343. for (uint i = 0; i < missingItemCount; i++)
  3344. {
  3345. GenerateMemInit(
  3346. headOpnd, offsetStart + i * sizeof(Js::JavascriptArray::MissingItem),
  3347. IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstant, m_func, true),
  3348. instr, isZeroed);
  3349. }
  3350. }
  3351. else
  3352. {
  3353. uint const offsetStart = sizeof(Js::SparseArraySegmentBase);
  3354. headOpnd = GenerateArrayAlloc<Js::JavascriptArray>(instr, &size, arrayInfo, &isZeroed);
  3355. for (uint i = 0; i < size; i++)
  3356. {
  3357. GenerateMemInit(
  3358. headOpnd, offsetStart + i * sizeof(Js::Var),
  3359. IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstant, m_func, true),
  3360. instr, isZeroed);
  3361. }
  3362. }
  3363. // Skip pass the helper call
  3364. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3365. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  3366. instr->InsertBefore(helperLabel);
  3367. instr->InsertAfter(doneLabel);
  3368. }
  3369. void
  3370. Lowerer::GenerateProfiledNewScIntArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, RecyclerWeakReference<Js::FunctionBody> * weakFuncRef)
  3371. {
  3372. // Helper will deal with ForceES5ARray
  3373. if (PHASE_OFF(Js::ArrayLiteralFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
  3374. {
  3375. return;
  3376. }
  3377. if (!arrayInfo->IsNativeIntArray())
  3378. {
  3379. return;
  3380. }
  3381. Func * func = this->m_func;
  3382. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3383. GenerateArrayInfoIsNativeIntArrayTest(instr, arrayInfo, helperLabel);
  3384. IR::AddrOpnd * elementsOpnd = instr->GetSrc1()->AsAddrOpnd();
  3385. Js::AuxArray<int32> * ints = (Js::AuxArray<int32> *)elementsOpnd->m_address;
  3386. uint32 size = ints->count;
  3387. // Generate code as in JavascriptArray::NewLiteral
  3388. bool isHeadSegmentZeroed;
  3389. IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
  3390. Assert(Js::JavascriptNativeIntArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeIntArray::GetOffsetOfArrayCallSiteIndex());
  3391. IR::RegOpnd * headOpnd = GenerateArrayAlloc<Js::JavascriptNativeIntArray>(instr, &size, arrayInfo, &isHeadSegmentZeroed);
  3392. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3393. GenerateMemInit(dstOpnd, Js::JavascriptNativeIntArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicMisc, m_func), instr, isHeadSegmentZeroed);
  3394. // Initialize the elements
  3395. uint i = 0;
  3396. if (ints->count > 16)
  3397. {
  3398. // Do memcpy if > 16
  3399. IR::RegOpnd * dstElementsOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3400. const IR::AutoReuseOpnd autoReuseDstElementsOpnd(dstElementsOpnd, func);
  3401. IR::Opnd * srcOpnd = IR::AddrOpnd::New(ints->elements, IR::AddrOpndKindDynamicMisc, func);
  3402. InsertLea(dstElementsOpnd, IR::IndirOpnd::New(headOpnd, sizeof(Js::SparseArraySegmentBase), TyMachPtr, func), instr);
  3403. GenerateMemCopy(dstElementsOpnd, srcOpnd, ints->count * sizeof(int32), instr);
  3404. i = ints->count;
  3405. }
  3406. else
  3407. {
  3408. for (; i < ints->count; i++)
  3409. {
  3410. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3411. ints->elements[i], instr, isHeadSegmentZeroed);
  3412. }
  3413. }
  3414. Assert(i == ints->count);
  3415. for (; i < size; i++)
  3416. {
  3417. GenerateMemInit(headOpnd, sizeof(Js::SparseArraySegmentBase) + i * sizeof(int32),
  3418. Js::JavascriptNativeIntArray::MissingItem, instr, isHeadSegmentZeroed);
  3419. }
  3420. // Skip pass the helper call
  3421. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3422. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  3423. instr->InsertBefore(helperLabel);
  3424. instr->InsertAfter(doneLabel);
  3425. }
  3426. void
  3427. Lowerer::GenerateProfiledNewScFloatArrayFastPath(IR::Instr *instr, Js::ArrayCallSiteInfo * arrayInfo, RecyclerWeakReference<Js::FunctionBody> * weakFuncRef)
  3428. {
  3429. if (PHASE_OFF(Js::ArrayLiteralFastPathPhase, m_func) || CONFIG_FLAG(ForceES5Array))
  3430. {
  3431. return;
  3432. }
  3433. if (!arrayInfo->IsNativeFloatArray())
  3434. {
  3435. return;
  3436. }
  3437. Func * func = this->m_func;
  3438. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  3439. // If the array info hasn't mark as not int array yet, go to the helper and mark it.
  3440. // It really is just for assert purpose in JavascriptNativeFloatArray::ToVarArray
  3441. GenerateArrayInfoIsNativeFloatAndNotIntArrayTest(instr, arrayInfo, helperLabel);
  3442. IR::AddrOpnd * elementsOpnd = instr->GetSrc1()->AsAddrOpnd();
  3443. Js::AuxArray<double> * doubles = (Js::AuxArray<double> *)elementsOpnd->m_address;
  3444. uint32 size = doubles->count;
  3445. // Generate code as in JavascriptArray::NewLiteral
  3446. bool isHeadSegmentZeroed;
  3447. IR::RegOpnd * dstOpnd = instr->GetDst()->AsRegOpnd();
  3448. Assert(Js::JavascriptNativeFloatArray::GetOffsetOfArrayFlags() + sizeof(uint16) == Js::JavascriptNativeFloatArray::GetOffsetOfArrayCallSiteIndex());
  3449. IR::RegOpnd * headOpnd = GenerateArrayAlloc<Js::JavascriptNativeFloatArray>(instr, &size, arrayInfo, &isHeadSegmentZeroed);
  3450. const IR::AutoReuseOpnd autoReuseHeadOpnd(headOpnd, func);
  3451. GenerateMemInit(dstOpnd, Js::JavascriptNativeFloatArray::GetOffsetOfWeakFuncRef(), IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func), instr, isHeadSegmentZeroed);
  3452. // Initialize the elements
  3453. IR::RegOpnd * dstElementsOpnd = IR::RegOpnd::New(TyMachPtr, func);
  3454. const IR::AutoReuseOpnd autoReuseDstElementsOpnd(dstElementsOpnd, func);
  3455. IR::Opnd * srcOpnd = IR::AddrOpnd::New(doubles->elements, IR::AddrOpndKindDynamicMisc, func);
  3456. InsertLea(dstElementsOpnd, IR::IndirOpnd::New(headOpnd, sizeof(Js::SparseArraySegmentBase), TyMachPtr, func), instr);
  3457. GenerateMemCopy(dstElementsOpnd, srcOpnd, doubles->count * sizeof(double), instr);
  3458. // Js::JavascriptArray::MissingItem is a Var, so it may be 32-bit or 64 bit.
  3459. uint const offsetStart = sizeof(Js::SparseArraySegmentBase) + doubles->count * sizeof(double);
  3460. uint const missingItem = (size - doubles->count) * sizeof(double) / sizeof(Js::JavascriptArray::MissingItem);
  3461. for (uint i = 0; i < missingItem; i++)
  3462. {
  3463. GenerateMemInit(headOpnd, offsetStart + i * sizeof(Js::JavascriptArray::MissingItem),
  3464. IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstant, m_func, true), instr, isHeadSegmentZeroed);
  3465. }
  3466. // Skip pass the helper call
  3467. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  3468. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  3469. instr->InsertBefore(helperLabel);
  3470. instr->InsertAfter(doneLabel);
  3471. }
  3472. IR::Instr *
  3473. Lowerer::LowerNewScIntArray(IR::Instr *arrInstr)
  3474. {
  3475. IR::Instr *instrPrev = arrInstr->m_prev;
  3476. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScIntArray;
  3477. if ((arrInstr->IsJitProfilingInstr() || arrInstr->IsProfiledInstr()) && arrInstr->m_func->HasProfileInfo())
  3478. {
  3479. RecyclerWeakReference<Js::FunctionBody> *weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
  3480. if (weakFuncRef)
  3481. {
  3482. Js::FunctionBody *functionBody = arrInstr->m_func->GetJnFunction();
  3483. // Technically a load of the same memory address either way.
  3484. Js::ProfileId profileId =
  3485. arrInstr->IsJitProfilingInstr()
  3486. ? arrInstr->AsJitProfilingInstr()->profileId
  3487. : static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
  3488. Js::ArrayCallSiteInfo *arrayInfo =
  3489. functionBody->GetAnyDynamicProfileInfo()->GetArrayCallSiteInfo(functionBody, profileId);
  3490. // Only do fast-path if it isn't a JitProfiling instr and not copy-on-access array
  3491. if (arrInstr->IsProfiledInstr()
  3492. && (PHASE_OFF1(Js::Phase::CopyOnAccessArrayPhase) || arrayInfo->isNotCopyOnAccessArray) && !PHASE_FORCE1(Js::Phase::CopyOnAccessArrayPhase))
  3493. {
  3494. GenerateProfiledNewScIntArrayFastPath(arrInstr, arrayInfo, weakFuncRef);
  3495. }
  3496. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
  3497. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfo, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
  3498. helperMethod = IR::HelperScrArr_ProfiledNewScIntArray;
  3499. }
  3500. }
  3501. LoadScriptContext(arrInstr);
  3502. IR::Opnd *elementsOpnd = arrInstr->UnlinkSrc1();
  3503. m_lowererMD.LoadHelperArgument(arrInstr, elementsOpnd);
  3504. m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
  3505. return instrPrev;
  3506. }
  3507. IR::Instr *
  3508. Lowerer::LowerNewScFltArray(IR::Instr *arrInstr)
  3509. {
  3510. IR::Instr *instrPrev = arrInstr->m_prev;
  3511. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScFltArray;
  3512. if ((arrInstr->IsJitProfilingInstr() || arrInstr->IsProfiledInstr()) && arrInstr->m_func->HasProfileInfo())
  3513. {
  3514. RecyclerWeakReference<Js::FunctionBody> *weakFuncRef = arrInstr->m_func->GetWeakFuncRef();
  3515. if (weakFuncRef)
  3516. {
  3517. Js::ProfileId profileId =
  3518. arrInstr->IsJitProfilingInstr()
  3519. ? arrInstr->AsJitProfilingInstr()->profileId
  3520. : static_cast<Js::ProfileId>(arrInstr->AsProfiledInstr()->u.profileId);
  3521. Js::FunctionBody *functionBody = arrInstr->m_func->GetJnFunction();
  3522. Js::ArrayCallSiteInfo *arrayInfo =
  3523. functionBody->GetAnyDynamicProfileInfo()->GetArrayCallSiteInfo(functionBody, profileId);
  3524. // Only do fast-path if it isn't a JitProfiling instr
  3525. if (arrInstr->IsProfiledInstr()) {
  3526. GenerateProfiledNewScFloatArrayFastPath(arrInstr, arrayInfo, weakFuncRef);
  3527. }
  3528. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, m_func));
  3529. m_lowererMD.LoadHelperArgument(arrInstr, IR::AddrOpnd::New(arrayInfo, IR::AddrOpndKindDynamicArrayCallSiteInfo, m_func));
  3530. helperMethod = IR::HelperScrArr_ProfiledNewScFltArray;
  3531. }
  3532. }
  3533. LoadScriptContext(arrInstr);
  3534. IR::Opnd *elementsOpnd = arrInstr->UnlinkSrc1();
  3535. m_lowererMD.LoadHelperArgument(arrInstr, elementsOpnd);
  3536. m_lowererMD.ChangeToHelperCall(arrInstr, helperMethod);
  3537. return instrPrev;
  3538. }
  3539. IR::Instr *
  3540. Lowerer::LowerArraySegmentVars(IR::Instr *arrayInstr)
  3541. {
  3542. IR::Instr * instrPrev;
  3543. IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperArraySegmentVars, m_func);
  3544. instrPrev = m_lowererMD.LoadHelperArgument(arrayInstr, arrayInstr->UnlinkSrc2());
  3545. m_lowererMD.LoadHelperArgument(arrayInstr, arrayInstr->UnlinkSrc1());
  3546. arrayInstr->m_opcode = Js::OpCode::Call;
  3547. arrayInstr->SetSrc1(opndHelper);
  3548. m_lowererMD.LowerCall(arrayInstr, 0);
  3549. return instrPrev;
  3550. }
  3551. IR::Instr* Lowerer::LowerProfiledNewArray(IR::JitProfilingInstr* instr, bool hasArgs)
  3552. {
  3553. // Use the special helper which checks whether Array has been overwritten by the user and if
  3554. // it hasn't, possibly allocates a native array
  3555. // Insert a temporary label before the instruction we're about to lower, so that we can return
  3556. // the first instruction above that needs to be lowered after we're done - regardless of argument
  3557. // list, StartCall, etc.
  3558. IR::Instr* startMarkerInstr = InsertLoweredRegionStartMarker(instr);
  3559. Assert(instr->isNewArray);
  3560. Assert(instr->arrayProfileId != Js::Constants::NoProfileId);
  3561. Assert(instr->profileId != Js::Constants::NoProfileId);
  3562. bool isSpreadCall = instr->m_opcode == Js::OpCode::NewScObjectSpread || instr->m_opcode == Js::OpCode::NewScObjArraySpread;
  3563. m_lowererMD.LoadNewScObjFirstArg(instr, IR::AddrOpnd::New(nullptr, IR::AddrOpndKindConstantVar, m_func, true), isSpreadCall ? 1 : 0);
  3564. if (isSpreadCall)
  3565. {
  3566. this->LowerSpreadCall(instr, Js::CallFlags_New, true);
  3567. }
  3568. else
  3569. {
  3570. const int32 argCount = m_lowererMD.LowerCallArgs(instr, Js::CallFlags_New, 4);
  3571. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->arrayProfileId, m_func));
  3572. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
  3573. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  3574. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  3575. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledNewScObjArray, m_func));
  3576. m_lowererMD.LowerCall(instr, static_cast<Js::ArgSlot>(argCount));
  3577. }
  3578. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  3579. }
  3580. ///----------------------------------------------------------------------------
  3581. ///
  3582. /// Lowerer::LowerNewScObject
  3583. ///
  3584. /// Machine independent lowering of a CallI instr.
  3585. ///
  3586. ///----------------------------------------------------------------------------
  3587. IR::Instr *
  3588. Lowerer::LowerNewScObject(IR::Instr *newObjInstr, bool callCtor, bool hasArgs, bool isBaseClassConstructorNewScObject)
  3589. {
  3590. if (newObjInstr->IsJitProfilingInstr() && newObjInstr->AsJitProfilingInstr()->isNewArray)
  3591. {
  3592. Assert(callCtor);
  3593. return LowerProfiledNewArray(newObjInstr->AsJitProfilingInstr(), hasArgs);
  3594. }
  3595. bool isSpreadCall = newObjInstr->m_opcode == Js::OpCode::NewScObjectSpread ||
  3596. newObjInstr->m_opcode == Js::OpCode::NewScObjArraySpread;
  3597. Func* func = newObjInstr->m_func;
  3598. // Insert a temporary label before the instruction we're about to lower, so that we can return
  3599. // the first instruction above that needs to be lowered after we're done - regardless of argument
  3600. // list, StartCall, etc.
  3601. IR::Instr* startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  3602. IR::Opnd *ctorOpnd = newObjInstr->GetSrc1();
  3603. IR::RegOpnd *newObjDst = newObjInstr->GetDst()->AsRegOpnd();
  3604. Assert(!callCtor || !hasArgs || (newObjInstr->GetSrc2() != nullptr /*&& newObjInstr->GetSrc2()->IsSymOpnd()*/));
  3605. bool skipNewScObj = false;
  3606. bool returnNewScObj = false;
  3607. bool emitBailOut = false;
  3608. // If we haven't yet split NewScObject into NewScObjectNoCtor and CallI, we will need a temporary register
  3609. // to hold the result of the object allocation.
  3610. IR::RegOpnd* createObjDst = callCtor ? IR::RegOpnd::New(TyVar, func) : newObjDst;
  3611. IR::LabelInstr* helperOrBailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, /* isOpHelper = */ true);
  3612. IR::LabelInstr* callCtorLabel = IR::LabelInstr::New(Js::OpCode::Label, func, /* isOpHelper = */ false);
  3613. // Try to emit the fast allocation and construction path.
  3614. bool usedFixedCtorCache = TryLowerNewScObjectWithFixedCtorCache(newObjInstr, createObjDst, helperOrBailoutLabel, callCtorLabel, skipNewScObj, returnNewScObj, emitBailOut);
  3615. AssertMsg(!skipNewScObj || callCtor, "What will we return if we skip the default new object and don't call the ctor?");
  3616. Assert(!skipNewScObj || !returnNewScObj);
  3617. Assert(usedFixedCtorCache || !skipNewScObj);
  3618. Assert(!usedFixedCtorCache || newObjInstr->HasFixedFunctionAddressTarget());
  3619. Assert(!skipNewScObj || !emitBailOut);
  3620. #if DBG
  3621. if (usedFixedCtorCache)
  3622. {
  3623. Js::JavascriptFunction* ctor = newObjInstr->GetFixedFunction();
  3624. Js::FunctionInfo* ctorInfo = ctor->GetFunctionInfo();
  3625. Assert((ctorInfo->GetAttributes() & Js::FunctionInfo::Attributes::ErrorOnNew) == 0);
  3626. Assert(!!(ctorInfo->GetAttributes() & Js::FunctionInfo::Attributes::SkipDefaultNewObject) == skipNewScObj);
  3627. }
  3628. #endif
  3629. IR::Instr* startCallInstr = nullptr;
  3630. if (callCtor && hasArgs)
  3631. {
  3632. hasArgs = !newObjInstr->HasEmptyArgOutChain(&startCallInstr);
  3633. }
  3634. // If we're not skipping the default new object, let's emit bailout or a call to NewScObject* helper
  3635. IR::JnHelperMethod newScHelper = IR::HelperInvalid;
  3636. IR::Instr *newScObjCall = nullptr;
  3637. if (!skipNewScObj)
  3638. {
  3639. // If we emitted the fast path, this block is a helper block.
  3640. if (usedFixedCtorCache)
  3641. {
  3642. newObjInstr->InsertBefore(helperOrBailoutLabel);
  3643. }
  3644. if (emitBailOut)
  3645. {
  3646. IR::Instr* bailOutInstr = newObjInstr;
  3647. newObjInstr = IR::Instr::New(newObjInstr->m_opcode, func);
  3648. bailOutInstr->TransferTo(newObjInstr);
  3649. bailOutInstr->m_opcode = Js::OpCode::BailOut;
  3650. bailOutInstr->InsertAfter(newObjInstr);
  3651. GenerateBailOut(bailOutInstr);
  3652. }
  3653. else
  3654. {
  3655. Assert(!newObjDst->CanStoreTemp());
  3656. // createObjDst = NewScObject...(ctorOpnd)
  3657. newScHelper = !callCtor ?
  3658. (isBaseClassConstructorNewScObject ?
  3659. (hasArgs ? IR::HelperNewScObjectNoCtorFull : IR::HelperNewScObjectNoArgNoCtorFull) :
  3660. (hasArgs ? IR::HelperNewScObjectNoCtor : IR::HelperNewScObjectNoArgNoCtor)) :
  3661. (hasArgs || usedFixedCtorCache ? IR::HelperNewScObjectNoCtor : IR::HelperNewScObjectNoArg);
  3662. LoadScriptContext(newObjInstr);
  3663. m_lowererMD.LoadHelperArgument(newObjInstr, newObjInstr->GetSrc1());
  3664. newScObjCall = IR::Instr::New(Js::OpCode::Call, createObjDst, IR::HelperCallOpnd::New(newScHelper, func), func);
  3665. newObjInstr->InsertBefore(newScObjCall);
  3666. m_lowererMD.LowerCall(newScObjCall, 0);
  3667. }
  3668. }
  3669. // If we call HelperNewScObjectNoArg directly, we won't be calling the constructor from here, because the helper will do it.
  3670. // We could probably avoid this complexity by converting NewScObjectNoArg to NewScObject in the IRBuilder, once we have dedicated
  3671. // code paths for new Object() and new Array().
  3672. callCtor &= hasArgs || usedFixedCtorCache;
  3673. AssertMsg(!skipNewScObj || callCtor, "What will we return if we skip the default new object and don't call the ctor?");
  3674. newObjInstr->InsertBefore(callCtorLabel);
  3675. if (callCtor && usedFixedCtorCache)
  3676. {
  3677. IR::JnHelperMethod ctorHelper = IR::JnHelperMethodCount;
  3678. // If we have no arguments (i.e. the argument chain is empty), we can recognize a couple of common special cases, such
  3679. // as new Object() or new Array(), for which we have optimized helpers.
  3680. Js::JavascriptFunction* ctor = newObjInstr->GetFixedFunction();
  3681. Js::FunctionInfo* ctorInfo = ctor->GetFunctionInfo();
  3682. if (!hasArgs && (ctorInfo == &Js::JavascriptObject::EntryInfo::NewInstance || ctorInfo == &Js::JavascriptArray::EntryInfo::NewInstance))
  3683. {
  3684. if (ctorInfo == &Js::JavascriptObject::EntryInfo::NewInstance)
  3685. {
  3686. Assert(skipNewScObj);
  3687. ctorHelper = IR::HelperNewJavascriptObjectNoArg;
  3688. callCtor = false;
  3689. }
  3690. else if (ctorInfo == &Js::JavascriptArray::EntryInfo::NewInstance)
  3691. {
  3692. Assert(skipNewScObj);
  3693. ctorHelper = IR::HelperNewJavascriptArrayNoArg;
  3694. callCtor = false;
  3695. }
  3696. if (!callCtor)
  3697. {
  3698. LoadScriptContext(newObjInstr);
  3699. IR::Instr *ctorCall = IR::Instr::New(Js::OpCode::Call, newObjDst, IR::HelperCallOpnd::New(ctorHelper, func), func);
  3700. newObjInstr->InsertBefore(ctorCall);
  3701. m_lowererMD.LowerCall(ctorCall, 0);
  3702. }
  3703. }
  3704. }
  3705. IR::AutoReuseOpnd autoReuseSavedCtorOpnd;
  3706. if (callCtor)
  3707. {
  3708. // Load the first argument, which is either the object just created or null. Spread has an extra argument.
  3709. IR::Instr * argInstr = this->m_lowererMD.LoadNewScObjFirstArg(newObjInstr, createObjDst, isSpreadCall ? 1 : 0);
  3710. IR::Instr * insertAfterCtorInstr = newObjInstr->m_next;
  3711. if (skipNewScObj)
  3712. {
  3713. // Since we skipped the default new object, we must be returning whatever the constructor returns
  3714. // (which better be an Object), so let's just use newObjDst directly.
  3715. // newObjDst = newObjInstr->m_src1(createObjDst, ...)
  3716. Assert(newObjInstr->GetDst() == newObjDst);
  3717. if (isSpreadCall)
  3718. {
  3719. newObjInstr = this->LowerSpreadCall(newObjInstr, Js::CallFlags_New);
  3720. }
  3721. else
  3722. {
  3723. newObjInstr = this->m_lowererMD.LowerCallI(newObjInstr, Js::CallFlags_New, false, argInstr);
  3724. }
  3725. }
  3726. else
  3727. {
  3728. // We may need to return the default new object or whatever the constructor returns. Let's stash
  3729. // away the constructor's return in a temporary operand, and do the right check, if necessary.
  3730. // ctorResultObjOpnd = newObjInstr->m_src1(createObjDst, ...)
  3731. IR::RegOpnd *ctorResultObjOpnd = IR::RegOpnd::New(TyVar, func);
  3732. newObjInstr->UnlinkDst();
  3733. newObjInstr->SetDst(ctorResultObjOpnd);
  3734. if (isSpreadCall)
  3735. {
  3736. newObjInstr = this->LowerSpreadCall(newObjInstr, Js::CallFlags_New);
  3737. }
  3738. else
  3739. {
  3740. newObjInstr = this->m_lowererMD.LowerCallI(newObjInstr, Js::CallFlags_New, false, argInstr);
  3741. }
  3742. if (returnNewScObj)
  3743. {
  3744. // MOV newObjDst, createObjDst
  3745. this->m_lowererMD.CreateAssign(newObjDst, createObjDst, insertAfterCtorInstr);
  3746. }
  3747. else
  3748. {
  3749. LowerGetNewScObjectCommon(ctorResultObjOpnd, ctorResultObjOpnd, createObjDst, insertAfterCtorInstr);
  3750. this->m_lowererMD.CreateAssign(newObjDst, ctorResultObjOpnd, insertAfterCtorInstr);
  3751. }
  3752. }
  3753. // We don't ever need to update the constructor cache, if we hard coded it. Caches requiring update after constructor
  3754. // don't get cloned, and those that don't require update will never need one anymore.
  3755. if (!usedFixedCtorCache)
  3756. {
  3757. LowerUpdateNewScObjectCache(insertAfterCtorInstr, newObjDst, ctorOpnd, false /* isCtorFunction */);
  3758. }
  3759. }
  3760. else
  3761. {
  3762. if (newObjInstr->IsJitProfilingInstr())
  3763. {
  3764. Assert(m_func->IsSimpleJit());
  3765. Assert(!Js::FunctionBody::IsNewSimpleJit());
  3766. // This path skipped calling the Ctor, which skips calling LowerCallI with newObjInstr, meaning that the call will not be profiled.
  3767. // So we insert it manually here.
  3768. if(newScHelper == IR::HelperNewScObjectNoArg &&
  3769. newObjDst &&
  3770. ctorOpnd->IsRegOpnd() &&
  3771. newObjDst->AsRegOpnd()->m_sym == ctorOpnd->AsRegOpnd()->m_sym)
  3772. {
  3773. Assert(newObjInstr->m_func->IsSimpleJit());
  3774. Assert(createObjDst != newObjDst);
  3775. // The function object sym is going to be overwritten, so save it in a temp for profiling
  3776. IR::RegOpnd *const savedCtorOpnd = IR::RegOpnd::New(ctorOpnd->GetType(), newObjInstr->m_func);
  3777. autoReuseSavedCtorOpnd.Initialize(savedCtorOpnd, newObjInstr->m_func);
  3778. Lowerer::InsertMove(savedCtorOpnd, ctorOpnd, newObjInstr);
  3779. ctorOpnd = savedCtorOpnd;
  3780. }
  3781. // It is a constructor (CallFlags_New) and therefore a single argument (this) would have been given.
  3782. const auto info = Lowerer::MakeCallInfoConst(Js::CallFlags_New, 1, func);
  3783. Assert(newScObjCall);
  3784. IR::JitProfilingInstr *const newObjJitProfilingInstr = newObjInstr->AsJitProfilingInstr();
  3785. GenerateCallProfiling(
  3786. newObjJitProfilingInstr->profileId,
  3787. newObjJitProfilingInstr->inlineCacheIndex,
  3788. createObjDst,
  3789. ctorOpnd,
  3790. info,
  3791. false,
  3792. newScObjCall,
  3793. newObjInstr);
  3794. }
  3795. // MOV newObjDst, createObjDst
  3796. if (!skipNewScObj && createObjDst != newObjDst)
  3797. {
  3798. this->m_lowererMD.CreateAssign(newObjDst, createObjDst, newObjInstr);
  3799. }
  3800. newObjInstr->Remove();
  3801. }
  3802. // Return the first instruction above the region we've just lowered.
  3803. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  3804. }
  3805. IR::Instr*
  3806. Lowerer::GenerateCallProfiling(Js::ProfileId profileId, Js::InlineCacheIndex inlineCacheIndex, IR::Opnd* retval, IR::Opnd*calleeFunctionObjOpnd, IR::Opnd* callInfo, bool returnTypeOnly, IR::Instr*callInstr,IR::Instr*insertAfter)
  3807. {
  3808. // This should only ever happen in profiling simplejit
  3809. Assert(m_func->DoSimpleJitDynamicProfile());
  3810. // Make sure they gave us the correct call instruction
  3811. #if defined(_M_IX86) || defined(_M_X64)
  3812. Assert(callInstr->m_opcode == Js::OpCode::CALL);
  3813. #elif defined(_M_ARM)
  3814. Assert(callInstr->m_opcode == Js::OpCode::BLX);
  3815. #endif
  3816. Func*const func = insertAfter->m_func;
  3817. {
  3818. // First, we should save the implicit call flags
  3819. const auto starFlag = GetImplicitCallFlagsOpnd();
  3820. const auto saveOpnd = IR::RegOpnd::New(starFlag->GetType(), func);
  3821. IR::AutoReuseOpnd a(starFlag, func), b(saveOpnd, func);
  3822. //Save the flags (before call) and restore them (after the call)
  3823. this->InsertMove(saveOpnd, starFlag, callInstr);
  3824. // Note: On arm this is slightly inefficient because it forces a reload of the memory location to a reg (whereas x86 can load straight from hard-coded memory into a reg)
  3825. // But it works and making it not reload the memory location would force more refactoring.
  3826. this->InsertMove(starFlag, saveOpnd, insertAfter->m_next);
  3827. }
  3828. // Profile a call that just happened: push some extra info on the stack and call the helper
  3829. if (!retval)
  3830. {
  3831. if (returnTypeOnly)
  3832. {
  3833. // If we are only supposed to profile the return type but don't use the return value, we might
  3834. // as well do nothing!
  3835. return insertAfter;
  3836. }
  3837. retval = IR::AddrOpnd::NewNull(func);
  3838. }
  3839. IR::Instr* profileCall = IR::Instr::New(Js::OpCode::Call, func);
  3840. bool needInlineCacheIndex;
  3841. IR::JnHelperMethod helperMethod;
  3842. if (returnTypeOnly)
  3843. {
  3844. needInlineCacheIndex = false;
  3845. helperMethod = IR::HelperSimpleProfileReturnTypeCall;
  3846. }
  3847. else if(inlineCacheIndex == Js::Constants::NoInlineCacheIndex)
  3848. {
  3849. needInlineCacheIndex = false;
  3850. helperMethod = IR::HelperSimpleProfileCall_DefaultInlineCacheIndex;
  3851. }
  3852. else
  3853. {
  3854. needInlineCacheIndex = true;
  3855. helperMethod = IR::HelperSimpleProfileCall;
  3856. }
  3857. profileCall->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
  3858. insertAfter->InsertAfter(profileCall);
  3859. m_lowererMD.LoadHelperArgument(profileCall, callInfo);
  3860. m_lowererMD.LoadHelperArgument(profileCall, calleeFunctionObjOpnd);
  3861. m_lowererMD.LoadHelperArgument(profileCall, retval);
  3862. if(needInlineCacheIndex)
  3863. {
  3864. m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateInlineCacheIndexOpnd(inlineCacheIndex, func));
  3865. }
  3866. m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateProfileIdOpnd(profileId, func));
  3867. // Push the frame pointer so that the profiling call can grab the stack layout
  3868. m_lowererMD.LoadHelperArgument(profileCall, IR::Opnd::CreateFramePointerOpnd(func));
  3869. // No args: the helper is stdcall
  3870. return m_lowererMD.LowerCall(profileCall, 0);
  3871. }
  3872. bool Lowerer::TryLowerNewScObjectWithFixedCtorCache(IR::Instr* newObjInstr, IR::RegOpnd* newObjDst,
  3873. IR::LabelInstr* helperOrBailoutLabel, IR::LabelInstr* callCtorLabel, bool& skipNewScObj, bool& returnNewScObj, bool& emitBailOut)
  3874. {
  3875. skipNewScObj = false;
  3876. returnNewScObj = false;
  3877. AssertMsg(!PHASE_OFF(Js::ObjTypeSpecNewObjPhase, this->m_func) || !newObjInstr->HasBailOutInfo(),
  3878. "Why do we have bailout on NewScObject when ObjTypeSpecNewObj is off?");
  3879. if (PHASE_OFF(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()) && PHASE_OFF(Js::ObjTypeSpecNewObjPhase, this->m_func))
  3880. {
  3881. return false;
  3882. }
  3883. Js::JitTimeConstructorCache* ctorCache;
  3884. if (newObjInstr->HasBailOutInfo())
  3885. {
  3886. Assert(newObjInstr->IsNewScObjectInstr());
  3887. Assert(newObjInstr->IsProfiledInstr());
  3888. Assert(newObjInstr->GetBailOutKind() == IR::BailOutFailedCtorGuardCheck);
  3889. emitBailOut = true;
  3890. ctorCache = newObjInstr->m_func->GetConstructorCache(static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId));
  3891. Assert(ctorCache != nullptr);
  3892. Assert(!ctorCache->skipNewScObject);
  3893. Assert(!ctorCache->typeIsFinal || ctorCache->ctorHasNoExplicitReturnValue);
  3894. LinkCtorCacheToGuardedProperties(ctorCache);
  3895. }
  3896. else
  3897. {
  3898. if (newObjInstr->m_opcode == Js::OpCode::NewScObjArray || newObjInstr->m_opcode == Js::OpCode::NewScObjArraySpread)
  3899. {
  3900. // These instr's carry a profile that indexes the array call site info, not the ctor cache.
  3901. return false;
  3902. }
  3903. ctorCache = newObjInstr->IsProfiledInstr() ? newObjInstr->m_func->GetConstructorCache(static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId)) : nullptr;
  3904. if (ctorCache == nullptr)
  3905. {
  3906. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()))
  3907. {
  3908. Js::FunctionBody* callerFunctionBody = newObjInstr->m_func->GetJnFunction();
  3909. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  3910. Output::Print(L"FixedNewObj: function %s (%s): lowering non-fixed new script object for %s, because %s.\n",
  3911. callerFunctionBody->GetDisplayName(), callerFunctionBody->GetDebugNumberSet(debugStringBuffer), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  3912. newObjInstr->IsProfiledInstr() ? L"constructor cache hasn't been cloned" : L"instruction is not profiled");
  3913. Output::Flush();
  3914. }
  3915. return false;
  3916. }
  3917. }
  3918. Assert(ctorCache != nullptr);
  3919. // We should only have cloned if the script contexts match.
  3920. Assert(newObjInstr->m_func->GetScriptContext() == ctorCache->scriptContext);
  3921. // Built-in constructors don't need a default new object. Since we know which constructor we're calling, we can skip creating a default
  3922. // object and call a specialized helper (or even constructor, directly) avoiding the checks in generic NewScObjectCommon.
  3923. if (ctorCache->skipNewScObject)
  3924. {
  3925. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()))
  3926. {
  3927. Js::FunctionBody* callerFunctionBody = newObjInstr->m_func->GetJnFunction();
  3928. const Js::JavascriptFunction* ctor = ctorCache->constructor;
  3929. Js::FunctionBody* ctorBody = ctor->GetFunctionInfo()->HasBody() ? ctor->GetFunctionInfo()->GetFunctionBody() : nullptr;
  3930. const wchar_t* ctorName = ctorBody != nullptr ? ctorBody->GetDisplayName() : L"<unknown>";
  3931. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  3932. wchar_t debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  3933. Output::Print(L"FixedNewObj: function %s (%s): lowering skipped new script object for %s with %s ctor <unknown> (%s %s).\n",
  3934. callerFunctionBody->GetDisplayName(), callerFunctionBody->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  3935. newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? L"inlined" : L"called",
  3936. ctorName, ctorBody ? ctorBody->GetDebugNumberSet(debugStringBuffer) : L"(null)");
  3937. Output::Flush();
  3938. }
  3939. // All built-in constructors share a special singleton cache that is never checked and never invalidated. It cannot be used
  3940. // as a guard to protect any property operations downstream from the constructor. If this ever becomes a performance issue,
  3941. // we could have a dedicated cache for each built-in constructor, populate it and invalidate it as any other constructor cache.
  3942. AssertMsg(!emitBailOut, "Can't bail out on constructor cache guard for built-in constructors.");
  3943. skipNewScObj = true;
  3944. IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
  3945. this->m_lowererMD.CreateAssign(newObjDst, zeroOpnd, newObjInstr);
  3946. return true;
  3947. }
  3948. AssertMsg(ctorCache->type != nullptr, "Why did we hard-code a mismatched, invalidated or polymorphic constructor cache?");
  3949. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()) || PHASE_TESTTRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()))
  3950. {
  3951. Js::FunctionBody* callerFunctionBody = newObjInstr->m_func->GetJnFunction();
  3952. const Js::JavascriptFunction* constructor = ctorCache->constructor;
  3953. Js::FunctionBody* constructorBody = constructor->GetFunctionInfo()->HasBody() ? constructor->GetFunctionInfo()->GetFunctionBody() : nullptr;
  3954. const wchar_t* constructorName = constructorBody != nullptr ? constructorBody->GetDisplayName() : L"<unknown>";
  3955. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  3956. wchar_t debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  3957. if (PHASE_TRACE(Js::FixedNewObjPhase, newObjInstr->m_func->GetJnFunction()))
  3958. {
  3959. Output::Print(L"FixedNewObj: function %s (%s): lowering fixed new script object for %s with %s ctor <unknown> (%s %s): type = %p, slots = %d, inlined slots = %d.\n",
  3960. callerFunctionBody->GetDisplayName(), callerFunctionBody->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  3961. newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? L"inlined" : L"called",
  3962. constructorName, constructorBody ? constructorBody->GetDebugNumberSet(debugStringBuffer) : L"(null)",
  3963. ctorCache->type, ctorCache->slotCount, ctorCache->inlineSlotCount);
  3964. }
  3965. else
  3966. {
  3967. Output::Print(L"FixedNewObj: function %s (%s): lowering fixed new script object for %s with %s ctor <unknown> (%s %s): slots = %d, inlined slots = %d.\n",
  3968. callerFunctionBody->GetDisplayName(), callerFunctionBody->GetDebugNumberSet(debugStringBuffer2), Js::OpCodeUtil::GetOpCodeName(newObjInstr->m_opcode),
  3969. newObjInstr->m_opcode == Js::OpCode::NewScObjectNoCtor ? L"inlined" : L"called",
  3970. constructorName, debugStringBuffer, ctorCache->slotCount, ctorCache->inlineSlotCount);
  3971. }
  3972. Output::Flush();
  3973. }
  3974. // If the constructor has no return statements, we can safely return the object that was created here.
  3975. // No need to check what the constructor returned - it must be undefined.
  3976. returnNewScObj = ctorCache->ctorHasNoExplicitReturnValue;
  3977. Assert(Js::ConstructorCache::GetSizeOfGuardValue() == static_cast<size_t>(TySize[TyMachPtr]));
  3978. IR::MemRefOpnd* guardOpnd = IR::MemRefOpnd::New(const_cast<void*>(ctorCache->runtimeCache->GetAddressOfGuardValue()), TyMachReg, this->m_func,
  3979. IR::AddrOpndKindDynamicGuardValueRef);
  3980. IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
  3981. InsertCompareBranch(guardOpnd, zeroOpnd, Js::OpCode::BrEq_A, helperOrBailoutLabel, newObjInstr);
  3982. // If we are calling new on a class constructor, the contract is that we pass new.target as the 'this' argument.
  3983. // function is the constructor on which we called new - which is new.target.
  3984. Js::JavascriptFunction* ctor = newObjInstr->GetFixedFunction();
  3985. Js::FunctionInfo* functionInfo = Js::JavascriptOperators::GetConstructorFunctionInfo(ctor, this->m_func->GetScriptContext());
  3986. Assert(functionInfo);
  3987. if (functionInfo->IsClassConstructor())
  3988. {
  3989. // MOV newObjDst, function
  3990. this->m_lowererMD.CreateAssign(newObjDst, newObjInstr->GetSrc1(), newObjInstr);
  3991. }
  3992. else
  3993. {
  3994. const Js::DynamicType* newObjectType = ctorCache->type;
  3995. Assert(newObjectType->GetIsShared());
  3996. IR::AddrOpnd* typeSrc = IR::AddrOpnd::New(const_cast<void *>(reinterpret_cast<const void *>(newObjectType)), IR::AddrOpndKindDynamicType, m_func);
  3997. // For the next call:
  3998. // inlineSlotSize == Number of slots to allocate beyond the DynamicObject header
  3999. // slotSize - inlineSlotSize == Number of aux slots to allocate
  4000. int inlineSlotSize = ctorCache->inlineSlotCount;
  4001. int slotSize = ctorCache->slotCount;
  4002. if (newObjectType->GetTypeHandler()->IsObjectHeaderInlinedTypeHandler())
  4003. {
  4004. Assert(inlineSlotSize >= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity());
  4005. Assert(inlineSlotSize == slotSize);
  4006. slotSize = inlineSlotSize -= Js::DynamicTypeHandler::GetObjectHeaderInlinableSlotCapacity();
  4007. }
  4008. GenerateDynamicObjectAlloc(newObjInstr, inlineSlotSize, slotSize, newObjDst, typeSrc);
  4009. }
  4010. // JMP $callCtor
  4011. IR::BranchInstr *callCtorBranch = IR::BranchInstr::New(Js::OpCode::Br, callCtorLabel, m_func);
  4012. newObjInstr->InsertBefore(callCtorBranch);
  4013. this->m_lowererMD.LowerUncondBranch(callCtorBranch);
  4014. return true;
  4015. }
  4016. void
  4017. Lowerer::GenerateRecyclerAllocAligned(IR::JnHelperMethod allocHelper, size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, bool inOpHelper)
  4018. {
  4019. IR::LabelInstr * allocDoneLabel = nullptr;
  4020. if (!PHASE_OFF(Js::JitAllocNewObjPhase, insertionPointInstr->m_func->GetJnFunction()) && HeapInfo::IsSmallObject(allocSize))
  4021. {
  4022. IR::LabelInstr * allocHelperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4023. allocDoneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, inOpHelper);
  4024. this->m_lowererMD.GenerateFastRecyclerAlloc(allocSize, newObjDst, insertionPointInstr, allocHelperLabel, allocDoneLabel);
  4025. // $allocHelper:
  4026. insertionPointInstr->InsertBefore(allocHelperLabel);
  4027. }
  4028. // call JavascriptOperators::AllocMemForScObject(allocSize, scriptContext->GetRecycler())
  4029. this->m_lowererMD.LoadHelperArgument(insertionPointInstr, this->LoadScriptContextValueOpnd(insertionPointInstr, ScriptContextValue::ScriptContextRecycler));
  4030. this->m_lowererMD.LoadHelperArgument(insertionPointInstr, IR::IntConstOpnd::New((int32)allocSize, TyUint32, m_func, true));
  4031. IR::Instr *newObjCall = IR::Instr::New(Js::OpCode::Call, newObjDst, IR::HelperCallOpnd::New(allocHelper, m_func), m_func);
  4032. insertionPointInstr->InsertBefore(newObjCall);
  4033. this->m_lowererMD.LowerCall(newObjCall, 0);
  4034. if (allocDoneLabel != nullptr)
  4035. {
  4036. // $allocDone:
  4037. insertionPointInstr->InsertBefore(allocDoneLabel);
  4038. }
  4039. }
  4040. IR::Instr *
  4041. Lowerer::LowerGetNewScObject(IR::Instr *instr)
  4042. {
  4043. Assert(instr);
  4044. Assert(instr->m_opcode == Js::OpCode::GetNewScObject);
  4045. Assert(instr->GetDst());
  4046. Assert(instr->GetSrc1());
  4047. Assert(instr->GetSrc2());
  4048. const auto instrPrev = instr->m_prev;
  4049. Assert(instrPrev);
  4050. LowerGetNewScObjectCommon(
  4051. instr->GetDst()->AsRegOpnd(),
  4052. instr->GetSrc1()->AsRegOpnd(),
  4053. instr->GetSrc2()->AsRegOpnd(),
  4054. instr);
  4055. instr->Remove();
  4056. return instrPrev;
  4057. }
  4058. void
  4059. Lowerer::LowerGetNewScObjectCommon(
  4060. IR::RegOpnd *const resultObjOpnd,
  4061. IR::RegOpnd *const constructorReturnOpnd,
  4062. IR::RegOpnd *const newObjOpnd,
  4063. IR::Instr *insertBeforeInstr)
  4064. {
  4065. Assert(resultObjOpnd);
  4066. Assert(constructorReturnOpnd);
  4067. Assert(newObjOpnd);
  4068. Assert(insertBeforeInstr);
  4069. // (newObjOpnd == 'this' value passed to constructor)
  4070. //
  4071. // if (!IsJsObject(constructorReturnOpnd))
  4072. // goto notObjectLabel
  4073. // newObjOpnd = constructorReturnOpnd
  4074. // notObjectLabel:
  4075. // resultObjOpnd = newObjOpnd
  4076. if(!constructorReturnOpnd->IsEqual(newObjOpnd))
  4077. {
  4078. // Need to check whether the constructor returned an object
  4079. IR::LabelInstr *notObjectLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4080. Assert(insertBeforeInstr->m_prev);
  4081. IR::LabelInstr *const doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4082. insertBeforeInstr->InsertBefore(doneLabel);
  4083. insertBeforeInstr = doneLabel;
  4084. #if defined(_M_ARM32_OR_ARM64)
  4085. m_lowererMD.LoadHelperArgument(insertBeforeInstr, constructorReturnOpnd);
  4086. IR::Opnd * targetOpnd = IR::RegOpnd::New(StackSym::New(TyInt32,m_func), TyInt32, m_func);
  4087. IR::Instr * callIsObjectInstr = IR::Instr::New(Js::OpCode::Call, targetOpnd, m_func);
  4088. insertBeforeInstr->InsertBefore(callIsObjectInstr);
  4089. this->m_lowererMD.ChangeToHelperCall(callIsObjectInstr, IR::HelperOp_IsObject);
  4090. InsertTestBranch( targetOpnd, targetOpnd, Js::OpCode::BrEq_A, notObjectLabel,insertBeforeInstr);
  4091. #else
  4092. m_lowererMD.GenerateIsJsObjectTest(constructorReturnOpnd, insertBeforeInstr, notObjectLabel);
  4093. #endif
  4094. // Value returned by constructor is an object (use constructorReturnOpnd)
  4095. if(!resultObjOpnd->IsEqual(constructorReturnOpnd))
  4096. {
  4097. this->m_lowererMD.CreateAssign(resultObjOpnd, constructorReturnOpnd, insertBeforeInstr);
  4098. }
  4099. insertBeforeInstr->InsertBefore(
  4100. m_lowererMD.LowerUncondBranch(IR::BranchInstr::New(Js::OpCode::Br, doneLabel, m_func)));
  4101. // Value returned by constructor is not an object (use newObjOpnd)
  4102. insertBeforeInstr->InsertBefore(notObjectLabel);
  4103. }
  4104. if(!resultObjOpnd->IsEqual(newObjOpnd))
  4105. {
  4106. this->m_lowererMD.CreateAssign(resultObjOpnd, newObjOpnd, insertBeforeInstr);
  4107. }
  4108. // fall through to insertBeforeInstr or doneLabel
  4109. }
  4110. ///----------------------------------------------------------------------------
  4111. ///
  4112. /// Lowerer::LowerUpdateNewScObjectCache
  4113. ///
  4114. ///----------------------------------------------------------------------------
  4115. IR::Instr *
  4116. Lowerer::LowerUpdateNewScObjectCache(IR::Instr * insertInstr, IR::Opnd *dst, IR::Opnd *src1, const bool isCtorFunction)
  4117. {
  4118. // if (!isCtorFunction)
  4119. // {
  4120. // MOV r1, [src1 + offset(type)] -- check base TypeIds_Function
  4121. // CMP [r1 + offset(typeId)], TypeIds_Function
  4122. // }
  4123. // JNE $fallThru
  4124. // MOV r2, [src1 + offset(constructorCache)]
  4125. // MOV r3, [r2 + offset(updateAfterCtor)]
  4126. // TEST r3, r3 -- check if updateAfterCtor is 0
  4127. // JEQ $fallThru
  4128. // CALL UpdateNewScObjectCache(src1, dst, scriptContext)
  4129. // $fallThru:
  4130. IR::LabelInstr *labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  4131. if (!src1->IsRegOpnd())
  4132. {
  4133. IR::RegOpnd *srcRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  4134. LowererMD::CreateAssign(srcRegOpnd, src1, insertInstr);
  4135. src1 = srcRegOpnd;
  4136. }
  4137. // Check if constructor is a function if we don't already know it.
  4138. if (!isCtorFunction)
  4139. {
  4140. // MOV r1, [src1 + offset(type)] -- check base TypeIds_Function
  4141. IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  4142. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  4143. LowererMD::CreateAssign(r1, indirOpnd, insertInstr);
  4144. // CMP [r1 + offset(typeId)], TypeIds_Function
  4145. // JNE $fallThru
  4146. indirOpnd = IR::IndirOpnd::New(r1, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
  4147. IR::IntConstOpnd *intOpnd = IR::IntConstOpnd::New(Js::TypeIds_Function, TyInt32, this->m_func, true);
  4148. InsertCompareBranch(indirOpnd, intOpnd, Js::OpCode::BrNeq_A, labelFallThru, insertInstr);
  4149. }
  4150. // Every function has a constructor cache, even if only the default blank one.
  4151. // r2 = MOV JavascriptFunction->constructorCache
  4152. IR::RegOpnd *r2 = IR::RegOpnd::New(TyVar, this->m_func);
  4153. IR::IndirOpnd *opndIndir = IR::IndirOpnd::New(src1->AsRegOpnd(), Js::JavascriptFunction::GetOffsetOfConstructorCache(), TyMachReg, this->m_func);
  4154. IR::Instr *instr = LowererMD::CreateAssign(r2, opndIndir, insertInstr);
  4155. // r3 = constructorCache->updateAfterCtor
  4156. IR::RegOpnd *r3 = IR::RegOpnd::New(TyInt8, this->m_func);
  4157. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(r2, Js::ConstructorCache::GetOffsetOfUpdateAfterCtor(), TyUint8, this->m_func);
  4158. instr = LowererMD::CreateAssign(r3, indirOpnd, insertInstr);
  4159. // TEST r3, r3 -- check if updateAfterCtor is 0
  4160. // JEQ $fallThru
  4161. InsertTestBranch(r3, r3, Js::OpCode::BrEq_A, labelFallThru, insertInstr);
  4162. // r2 = UpdateNewScObjectCache(src1, dst, scriptContext)
  4163. insertInstr->InsertBefore(IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true)); // helper label for uncommon path
  4164. IR::HelperCallOpnd * opndHelper = IR::HelperCallOpnd::New(IR::HelperUpdateNewScObjectCache, m_func);
  4165. LoadScriptContext(insertInstr);
  4166. m_lowererMD.LoadHelperArgument(insertInstr, dst);
  4167. m_lowererMD.LoadHelperArgument(insertInstr, src1);
  4168. instr = IR::Instr::New(Js::OpCode::Call, m_func);
  4169. instr->SetSrc1(opndHelper);
  4170. insertInstr->InsertBefore(instr);
  4171. m_lowererMD.LowerCall(instr, 0);
  4172. // $fallThru:
  4173. insertInstr->InsertBefore(labelFallThru);
  4174. return insertInstr;
  4175. }
  4176. IR::Instr *
  4177. Lowerer::LowerNewScObjArray(IR::Instr *newObjInstr)
  4178. {
  4179. IR::Instr* startCallInstr;
  4180. if (newObjInstr->HasEmptyArgOutChain(&startCallInstr))
  4181. {
  4182. newObjInstr->FreeSrc2();
  4183. return LowerNewScObjArrayNoArg(newObjInstr);
  4184. }
  4185. IR::Instr* startMarkerInstr = nullptr;
  4186. IR::Opnd *targetOpnd = newObjInstr->GetSrc1();
  4187. Func *func = newObjInstr->m_func;
  4188. if (!targetOpnd->IsAddrOpnd())
  4189. {
  4190. if (!newObjInstr->HasBailOutInfo())
  4191. {
  4192. return this->LowerNewScObject(newObjInstr, true, true);
  4193. }
  4194. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4195. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4196. // list, StartCall, etc.
  4197. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4198. // For whatever reason, we couldn't do a fixed function check on the call target.
  4199. // Generate a runtime check on the target.
  4200. Assert(newObjInstr->GetBailOutKind() == IR::BailOutOnNotNativeArray);
  4201. IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, func);
  4202. InsertCompareBranch(
  4203. targetOpnd,
  4204. LoadLibraryValueOpnd(newObjInstr, LibraryValue::ValueArrayConstructor),
  4205. Js::OpCode::BrEq_A,
  4206. true,
  4207. labelSkipBailOut,
  4208. newObjInstr);
  4209. IR::ProfiledInstr *instrNew = IR::ProfiledInstr::New(newObjInstr->m_opcode, newObjInstr->UnlinkDst(), newObjInstr->UnlinkSrc1(), newObjInstr->UnlinkSrc2(), func);
  4210. instrNew->u.profileId = newObjInstr->AsProfiledInstr()->u.profileId;
  4211. newObjInstr->InsertAfter(instrNew);
  4212. newObjInstr->m_opcode = Js::OpCode::BailOut;
  4213. GenerateBailOut(newObjInstr);
  4214. instrNew->InsertBefore(labelSkipBailOut);
  4215. newObjInstr = instrNew;
  4216. }
  4217. else
  4218. {
  4219. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4220. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4221. // list, StartCall, etc.
  4222. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4223. }
  4224. RecyclerWeakReference<Js::FunctionBody> *weakFuncRef = nullptr;
  4225. Js::ArrayCallSiteInfo *arrayInfo = nullptr;
  4226. Assert(newObjInstr->IsProfiledInstr());
  4227. IR::RegOpnd *resultObjOpnd = newObjInstr->GetDst()->AsRegOpnd();
  4228. IR::Instr * insertInstr = newObjInstr->m_next;
  4229. Js::ProfileId profileId = static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId);
  4230. // We may not have profileId if we converted a NewScObject to NewScObjArray
  4231. if (profileId != Js::Constants::NoProfileId)
  4232. {
  4233. Js::FunctionBody *functionBody = func->GetJnFunction();
  4234. arrayInfo = functionBody->GetAnyDynamicProfileInfo()->GetArrayCallSiteInfo(functionBody, profileId);
  4235. Assert(arrayInfo);
  4236. weakFuncRef = func->GetWeakFuncRef();
  4237. Assert(weakFuncRef);
  4238. }
  4239. IR::Opnd *opndSrc1 = newObjInstr->UnlinkSrc1();
  4240. if (opndSrc1->IsImmediateOpnd())
  4241. {
  4242. intptr_t length = opndSrc1->GetImmediateValue();
  4243. if (length >= 0 && length <= 8)
  4244. {
  4245. GenerateProfiledNewScObjArrayFastPath(newObjInstr, arrayInfo, weakFuncRef, (uint32)length);
  4246. }
  4247. }
  4248. IR::Opnd *profileOpnd = IR::AddrOpnd::New(arrayInfo, IR::AddrOpndKindDynamicArrayCallSiteInfo, func);
  4249. this->m_lowererMD.LoadNewScObjFirstArg(newObjInstr, profileOpnd);
  4250. IR::JnHelperMethod helperMethod = IR::HelperScrArr_ProfiledNewInstance;
  4251. newObjInstr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
  4252. newObjInstr = GenerateDirectCall(newObjInstr, targetOpnd, Js::CallFlags_New);
  4253. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  4254. InsertCompareBranch(
  4255. IR::IndirOpnd::New(resultObjOpnd, 0, TyMachPtr, func),
  4256. LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptArray),
  4257. Js::OpCode::BrEq_A,
  4258. true,
  4259. labelDone,
  4260. insertInstr);
  4261. // We know we have a native array, so store the weak ref and call site index.
  4262. m_lowererMD.CreateAssign(
  4263. IR::IndirOpnd::New(resultObjOpnd, Js::JavascriptNativeArray::GetOffsetOfArrayCallSiteIndex(), TyUint16, func),
  4264. IR::Opnd::CreateProfileIdOpnd(profileId, func),
  4265. insertInstr);
  4266. m_lowererMD.CreateAssign(
  4267. IR::IndirOpnd::New(resultObjOpnd, Js::JavascriptNativeArray::GetOffsetOfWeakFuncRef(), TyMachReg, func),
  4268. IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, func),
  4269. insertInstr);
  4270. insertInstr->InsertBefore(labelDone);
  4271. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  4272. }
  4273. IR::Instr *
  4274. Lowerer::LowerNewScObjArrayNoArg(IR::Instr *newObjInstr)
  4275. {
  4276. IR::Opnd *targetOpnd = newObjInstr->GetSrc1();
  4277. Func *func = newObjInstr->m_func;
  4278. IR::Instr* startMarkerInstr = nullptr;
  4279. if (!targetOpnd->IsAddrOpnd())
  4280. {
  4281. if (!newObjInstr->HasBailOutInfo())
  4282. {
  4283. return this->LowerNewScObject(newObjInstr, true, false);
  4284. }
  4285. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4286. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4287. // list, StartCall, etc.
  4288. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4289. // For whatever reason, we couldn't do a fixed function check on the call target.
  4290. // Generate a runtime check on the target.
  4291. Assert(newObjInstr->GetBailOutKind() == IR::BailOutOnNotNativeArray);
  4292. IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, func);
  4293. InsertCompareBranch(
  4294. targetOpnd,
  4295. LoadLibraryValueOpnd(newObjInstr, LibraryValue::ValueArrayConstructor),
  4296. Js::OpCode::BrEq_A,
  4297. true,
  4298. labelSkipBailOut,
  4299. newObjInstr);
  4300. IR::ProfiledInstr *instrNew = IR::ProfiledInstr::New(newObjInstr->m_opcode, newObjInstr->UnlinkDst(), newObjInstr->UnlinkSrc1(), func);
  4301. instrNew->u.profileId = newObjInstr->AsProfiledInstr()->u.profileId;
  4302. newObjInstr->InsertAfter(instrNew);
  4303. newObjInstr->m_opcode = Js::OpCode::BailOut;
  4304. GenerateBailOut(newObjInstr);
  4305. instrNew->InsertBefore(labelSkipBailOut);
  4306. newObjInstr = instrNew;
  4307. }
  4308. else
  4309. {
  4310. // Insert a temporary label before the instruction we're about to lower, so that we can return
  4311. // the first instruction above that needs to be lowered after we're done - regardless of argument
  4312. // list, StartCall, etc.
  4313. startMarkerInstr = InsertLoweredRegionStartMarker(newObjInstr);
  4314. }
  4315. Assert(newObjInstr->IsProfiledInstr());
  4316. RecyclerWeakReference<Js::FunctionBody> *weakFuncRef = nullptr;
  4317. Js::ArrayCallSiteInfo *arrayInfo = nullptr;
  4318. Js::ProfileId profileId = static_cast<Js::ProfileId>(newObjInstr->AsProfiledInstr()->u.profileId);
  4319. if (profileId != Js::Constants::NoProfileId)
  4320. {
  4321. Js::FunctionBody *functionBody = func->GetJnFunction();
  4322. arrayInfo = functionBody->GetAnyDynamicProfileInfo()->GetArrayCallSiteInfo(functionBody, profileId);
  4323. Assert(arrayInfo);
  4324. weakFuncRef = func->GetWeakFuncRef();
  4325. Assert(weakFuncRef);
  4326. }
  4327. GenerateProfiledNewScObjArrayFastPath(newObjInstr, arrayInfo, weakFuncRef, 0);
  4328. m_lowererMD.LoadHelperArgument(newObjInstr, IR::AddrOpnd::New(weakFuncRef, IR::AddrOpndKindDynamicFunctionBodyWeakRef, func));
  4329. m_lowererMD.LoadHelperArgument(newObjInstr, IR::AddrOpnd::New(arrayInfo, IR::AddrOpndKindDynamicArrayCallSiteInfo, func));
  4330. LoadScriptContext(newObjInstr);
  4331. m_lowererMD.LoadHelperArgument(newObjInstr, targetOpnd);
  4332. newObjInstr->UnlinkSrc1();
  4333. newObjInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperScrArr_ProfiledNewInstanceNoArg, func));
  4334. m_lowererMD.LowerCall(newObjInstr, 0);
  4335. return RemoveLoweredRegionStartMarker(startMarkerInstr);
  4336. }
  4337. ///----------------------------------------------------------------------------
  4338. ///
  4339. /// Lowerer::LowerPrologEpilog
  4340. ///
  4341. ///----------------------------------------------------------------------------
  4342. void
  4343. Lowerer::LowerPrologEpilog()
  4344. {
  4345. if (m_func->GetJnFunction()->IsGenerator())
  4346. {
  4347. LowerGeneratorResumeJumpTable();
  4348. }
  4349. IR::Instr * instr;
  4350. instr = m_func->m_headInstr;
  4351. AssertMsg(instr->IsEntryInstr(), "First instr isn't an EntryInstr...");
  4352. m_lowererMD.LowerEntryInstr(instr->AsEntryInstr());
  4353. instr = m_func->m_exitInstr;
  4354. AssertMsg(instr->IsExitInstr(), "Last instr isn't an ExitInstr...");
  4355. m_lowererMD.LowerExitInstr(instr->AsExitInstr());
  4356. }
  4357. void
  4358. Lowerer::LowerPrologEpilogAsmJs()
  4359. {
  4360. IR::Instr * instr;
  4361. instr = m_func->m_headInstr;
  4362. AssertMsg(instr->IsEntryInstr(), "First instr isn't an EntryInstr...");
  4363. m_lowererMD.LowerEntryInstrAsmJs(instr->AsEntryInstr());
  4364. instr = m_func->m_exitInstr;
  4365. AssertMsg(instr->IsExitInstr(), "Last instr isn't an ExitInstr...");
  4366. m_lowererMD.LowerExitInstrAsmJs(instr->AsExitInstr());
  4367. }
  4368. void
  4369. Lowerer::LowerGeneratorResumeJumpTable()
  4370. {
  4371. Assert(m_func->GetJnFunction()->IsGenerator());
  4372. IR::Instr * jumpTableInstr = m_func->m_headInstr;
  4373. AssertMsg(jumpTableInstr->IsEntryInstr(), "First instr isn't an EntryInstr...");
  4374. // Hope to do away with this linked list scan by moving this lowering to a post-prolog-epilog/pre-encoder phase that is common to all architectures (currently such phase is only available on amd64/arm)
  4375. while (jumpTableInstr->m_opcode != Js::OpCode::GeneratorResumeJumpTable)
  4376. {
  4377. jumpTableInstr = jumpTableInstr->m_next;
  4378. }
  4379. IR::Opnd * srcOpnd = jumpTableInstr->UnlinkSrc1();
  4380. m_func->MapYieldOffsetResumeLabels([&](int i, const YieldOffsetResumeLabel& yorl)
  4381. {
  4382. uint32 offset = yorl.First();
  4383. IR::LabelInstr * label = yorl.Second();
  4384. if (label != nullptr && label->m_hasNonBranchRef)
  4385. {
  4386. // Also fix up the bailout at the label with the jump to epilog that was not emitted in GenerateBailOut()
  4387. Assert(label->m_prev->HasBailOutInfo());
  4388. GenerateJumpToEpilogForBailOut(label->m_prev->GetBailOutInfo(), label->m_prev);
  4389. }
  4390. else if (label == nullptr)
  4391. {
  4392. label = m_func->m_bailOutNoSaveLabel;
  4393. }
  4394. // For each offset label pair, insert a compare of the offset and branch if equal to the label
  4395. InsertCompareBranch(srcOpnd, IR::IntConstOpnd::New(offset, TyUint32, m_func), Js::OpCode::BrSrEq_A, label, jumpTableInstr);
  4396. });
  4397. jumpTableInstr->Remove();
  4398. }
  4399. void
  4400. Lowerer::DoInterruptProbes()
  4401. {
  4402. this->m_func->SetHasInstrNumber(true);
  4403. uint instrCount = 1;
  4404. FOREACH_INSTR_IN_FUNC(instr, this->m_func)
  4405. {
  4406. instr->SetNumber(instrCount++);
  4407. if (instr->IsLabelInstr())
  4408. {
  4409. IR::LabelInstr *labelInstr = instr->AsLabelInstr();
  4410. if (labelInstr->m_isLoopTop)
  4411. {
  4412. // For every loop top label, insert the following:
  4413. // cmp sp, ThreadContext::stackLimitForCurrentThread
  4414. // bgt $continue
  4415. // $helper:
  4416. // call JavascriptOperators::ScriptAbort
  4417. // b $exit
  4418. // $continue:
  4419. IR::LabelInstr *newLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4420. labelInstr->InsertAfter(newLabel);
  4421. this->InsertOneLoopProbe(newLabel, newLabel);
  4422. }
  4423. }
  4424. }
  4425. NEXT_INSTR_IN_FUNC;
  4426. }
  4427. // Insert an interrupt probe at each loop back branch. (Currently uncalled, since we're inserting
  4428. // probes at loop tops instead of back edges, but kept around because it may prove useful.)
  4429. uint
  4430. Lowerer::DoLoopProbeAndNumber(IR::BranchInstr *branchInstr)
  4431. {
  4432. IR::LabelInstr *labelInstr = branchInstr->GetTarget();
  4433. if (labelInstr == nullptr || labelInstr->GetNumber() == 0)
  4434. {
  4435. // Forward branch (possibly an indirect jump after try-catch-finally); nothing to do.
  4436. return branchInstr->GetNumber() + 1;
  4437. }
  4438. Assert(labelInstr->m_isLoopTop);
  4439. // Insert a stack probe at this branch. Number all the instructions we insert
  4440. // and return the next instruction number.
  4441. uint number = branchInstr->GetNumber();
  4442. IR::Instr *instrPrev = branchInstr->m_prev;
  4443. IR::Instr *instrNext = branchInstr->m_next;
  4444. if (branchInstr->IsUnconditional())
  4445. {
  4446. // B $loop ==>
  4447. // cmp [], 0
  4448. // beq $loop
  4449. // $helper:
  4450. // call abort
  4451. // b $exit
  4452. this->InsertOneLoopProbe(branchInstr, labelInstr);
  4453. branchInstr->Remove();
  4454. }
  4455. else
  4456. {
  4457. // Bcc $loop ==>
  4458. // Binv $notloop
  4459. // cmp [], 0
  4460. // beq $loop
  4461. // $helper:
  4462. // call abort
  4463. // b $exit
  4464. // $notloop:
  4465. IR::LabelInstr *loopExitLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4466. branchInstr->SetTarget(loopExitLabel);
  4467. LowererMD::InvertBranch(branchInstr);
  4468. branchInstr->InsertAfter(loopExitLabel);
  4469. this->InsertOneLoopProbe(loopExitLabel, labelInstr);
  4470. }
  4471. FOREACH_INSTR_IN_RANGE(instr, instrPrev->m_next, instrNext->m_prev)
  4472. {
  4473. instr->SetNumber(number++);
  4474. }
  4475. NEXT_INSTR_IN_RANGE;
  4476. return number;
  4477. }
  4478. void
  4479. Lowerer::InsertOneLoopProbe(IR::Instr *insertInstr, IR::LabelInstr *loopLabel)
  4480. {
  4481. // Insert one interrupt probe at the given instruction. Probe the stack and call the abort helper
  4482. // directly if the probe fails.
  4483. IR::Opnd *memRefOpnd = IR::MemRefOpnd::New(
  4484. this->m_func->GetScriptContext()->GetThreadContext()->GetAddressOfStackLimitForCurrentThread(),
  4485. TyMachReg, this->m_func);
  4486. IR::RegOpnd *regStackPointer = IR::RegOpnd::New(
  4487. NULL, this->m_lowererMD.GetRegStackPointer(), TyMachReg, this->m_func);
  4488. InsertCompareBranch(regStackPointer, memRefOpnd, Js::OpCode::BrGt_A, loopLabel, insertInstr);
  4489. IR::LabelInstr *helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4490. insertInstr->InsertBefore(helperLabel);
  4491. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(IR::HelperScriptAbort, this->m_func);
  4492. IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  4493. instr->SetSrc1(helperOpnd);
  4494. insertInstr->InsertBefore(instr);
  4495. this->m_lowererMD.LowerCall(instr, 0);
  4496. // Jump to the exit after the helper call. This instruction will never be reached, but the jump
  4497. // indicates that nothing is live after the call (to avoid useless spills in code that will
  4498. // be executed).
  4499. instr = this->m_func->m_exitInstr->GetPrevRealInstrOrLabel();
  4500. if (instr->IsLabelInstr())
  4501. {
  4502. helperLabel = instr->AsLabelInstr();
  4503. }
  4504. else
  4505. {
  4506. helperLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4507. this->m_func->m_exitInstr->InsertBefore(helperLabel);
  4508. }
  4509. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, helperLabel, this->m_func);
  4510. insertInstr->InsertBefore(instr);
  4511. }
  4512. ///----------------------------------------------------------------------------
  4513. ///
  4514. /// Lowerer::LoadPropertySymAsArgument
  4515. ///
  4516. /// Generate code to pass a fieldSym as argument to a helper.
  4517. ///----------------------------------------------------------------------------
  4518. IR::Instr *
  4519. Lowerer::LoadPropertySymAsArgument(IR::Instr *instr, IR::Opnd *fieldSrc)
  4520. {
  4521. IR::Instr * instrPrev;
  4522. AssertMsg(fieldSrc->IsSymOpnd() && fieldSrc->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as src of LdFld");
  4523. IR::SymOpnd *symOpnd = fieldSrc->AsSymOpnd();
  4524. PropertySym * fieldSym = symOpnd->m_sym->AsPropertySym();
  4525. IR::IntConstOpnd * indexOpnd = IR::IntConstOpnd::New(fieldSym->m_propertyId, TyInt32, m_func, /*dontEncode*/true);
  4526. instrPrev = m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  4527. IR::RegOpnd * instanceOpnd = symOpnd->CreatePropertyOwnerOpnd(m_func);
  4528. m_lowererMD.LoadHelperArgument(instr, instanceOpnd);
  4529. return instrPrev;
  4530. }
  4531. ///----------------------------------------------------------------------------
  4532. ///
  4533. /// Lowerer::LoadFunctionBodyAsArgument
  4534. ///
  4535. /// Special case: the "property ID" is a key into the ScriptContext's FunctionBody map
  4536. ///----------------------------------------------------------------------------
  4537. IR::Instr *
  4538. Lowerer::LoadFunctionBodyAsArgument(IR::Instr *instr, IR::IntConstOpnd * functionBodySlotOpnd, IR::RegOpnd * envOpnd)
  4539. {
  4540. IR::Instr * instrPrev;
  4541. // We need to pass in the function reference, we can't embed the pointer to the function proxy here.
  4542. // The function proxy may be deferred parsed/serialize, and may 'progress' to a real function body after it is undeferred
  4543. // At which point the deferred function proxy may be collect.
  4544. // Just pass it the address where we will find the function proxy/body
  4545. Js::FunctionProxyPtrPtr proxyRef = instr->m_func->GetJnFunction()->GetNestedFuncReference((uint)functionBodySlotOpnd->GetValue());
  4546. AssertMsg(proxyRef, "Expected FunctionProxy for index of NewScFunc or NewScGenFunc opnd");
  4547. AssertMsg(*proxyRef, "Expected FunctionProxy for index of NewScFunc or NewScGenFunc opnd");
  4548. IR::AddrOpnd * indexOpnd = IR::AddrOpnd::New((Js::Var)proxyRef, IR::AddrOpndKindDynamicMisc, m_func);
  4549. instrPrev = m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  4550. m_lowererMD.LoadHelperArgument(instr, envOpnd);
  4551. return instrPrev;
  4552. }
  4553. IR::Instr *
  4554. Lowerer::LowerProfiledLdFld(IR::JitProfilingInstr *ldFldInstr)
  4555. {
  4556. const auto instrPrev = ldFldInstr->m_prev;
  4557. auto src = ldFldInstr->UnlinkSrc1();
  4558. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  4559. IR::JnHelperMethod helper;
  4560. switch (ldFldInstr->m_opcode)
  4561. {
  4562. case Js::OpCode::LdFld:
  4563. helper = IR::HelperProfiledLdFld;
  4564. goto ldFldCommon;
  4565. case Js::OpCode::LdRootFld:
  4566. helper = IR::HelperProfiledLdRootFld;
  4567. goto ldFldCommon;
  4568. case Js::OpCode::LdMethodFld:
  4569. helper = IR::HelperProfiledLdMethodFld;
  4570. goto ldFldCommon;
  4571. case Js::OpCode::LdRootMethodFld:
  4572. helper = IR::HelperProfiledLdRootMethodFld;
  4573. goto ldFldCommon;
  4574. case Js::OpCode::LdFldForCallApplyTarget:
  4575. helper = IR::HelperProfiledLdFld_CallApplyTarget;
  4576. goto ldFldCommon;
  4577. case Js::OpCode::LdFldForTypeOf:
  4578. helper = IR::HelperProfiledLdFldForTypeOf;
  4579. goto ldFldCommon;
  4580. case Js::OpCode::LdRootFldForTypeOf:
  4581. helper = IR::HelperProfiledLdRootFldForTypeOf;
  4582. goto ldFldCommon;
  4583. ldFldCommon:
  4584. {
  4585. Assert(ldFldInstr->profileId == Js::Constants::NoProfileId);
  4586. /*
  4587. Var ProfilingHelpers::ProfiledLdFld_Jit(
  4588. const Var instance,
  4589. const PropertyId propertyId,
  4590. const InlineCacheIndex inlineCacheIndex,
  4591. void *const framePointer)
  4592. */
  4593. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  4594. m_lowererMD.LoadHelperArgument(
  4595. ldFldInstr,
  4596. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  4597. LoadPropertySymAsArgument(ldFldInstr, src);
  4598. break;
  4599. }
  4600. case Js::OpCode::LdSuperFld:
  4601. {
  4602. Assert(ldFldInstr->profileId == Js::Constants::NoProfileId);
  4603. IR::Opnd * src2 = nullptr;
  4604. /*
  4605. Var ProfilingHelpers::ProfiledLdSuperFld_Jit(
  4606. const Var instance,
  4607. const PropertyId propertyId,
  4608. const InlineCacheIndex inlineCacheIndex,
  4609. void *const framePointer,
  4610. const Var thisInstance)
  4611. */
  4612. src2 = ldFldInstr->UnlinkSrc2();
  4613. m_lowererMD.LoadHelperArgument(ldFldInstr, src2 );
  4614. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  4615. m_lowererMD.LoadHelperArgument(
  4616. ldFldInstr,
  4617. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  4618. LoadPropertySymAsArgument(ldFldInstr, src);
  4619. helper = IR::HelperProfiledLdSuperFld;
  4620. break;
  4621. }
  4622. case Js::OpCode::LdLen_A:
  4623. // If we want to profile this call, then push some extra args and call the profiling version
  4624. m_lowererMD.LoadHelperArgument(ldFldInstr, IR::Opnd::CreateProfileIdOpnd(ldFldInstr->profileId, m_func));
  4625. m_lowererMD.LoadHelperArgument(ldFldInstr, src->AsSymOpnd()->CreatePropertyOwnerOpnd(m_func));
  4626. m_lowererMD.LoadHelperArgument(ldFldInstr, CreateFunctionBodyOpnd(ldFldInstr->m_func));
  4627. helper = IR::HelperSimpleProfiledLdLen;
  4628. break;
  4629. default:
  4630. Assert(false);
  4631. __assume(false);
  4632. }
  4633. ldFldInstr->SetSrc1(IR::HelperCallOpnd::New(helper, m_func));
  4634. m_lowererMD.LowerCall(ldFldInstr, 0);
  4635. return instrPrev;
  4636. }
  4637. ///----------------------------------------------------------------------------
  4638. ///
  4639. /// Lowerer::LowerLdFld
  4640. ///
  4641. /// Lower an instruction (LdFld, ScopedLdFld) that takes a property
  4642. /// reference as a source and puts a result in a register.
  4643. ///
  4644. ///----------------------------------------------------------------------------
  4645. IR::Instr *
  4646. Lowerer::LowerLdFld(
  4647. IR::Instr * ldFldInstr,
  4648. IR::JnHelperMethod helperMethod,
  4649. IR::JnHelperMethod polymorphicHelperMethod,
  4650. bool useInlineCache,
  4651. IR::LabelInstr *labelBailOut,
  4652. bool isHelper)
  4653. {
  4654. if (ldFldInstr->IsJitProfilingInstr())
  4655. {
  4656. // If we want to profile then do something completely different
  4657. return this->LowerProfiledLdFld(ldFldInstr->AsJitProfilingInstr());
  4658. }
  4659. IR::Opnd *src;
  4660. IR::Instr *instrPrev = ldFldInstr->m_prev;
  4661. src = ldFldInstr->UnlinkSrc1();
  4662. if (ldFldInstr->m_opcode == Js::OpCode::LdSuperFld)
  4663. {
  4664. IR::Opnd * src2 = nullptr;
  4665. src2 = ldFldInstr->UnlinkSrc2();
  4666. m_lowererMD.LoadHelperArgument(ldFldInstr, src2);
  4667. }
  4668. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  4669. if (useInlineCache)
  4670. {
  4671. IR::Opnd * inlineCacheOpnd;
  4672. AssertMsg(src->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  4673. if (src->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache && polymorphicHelperMethod != helperMethod)
  4674. {
  4675. Js::PolymorphicInlineCache * polymorphicInlineCache = src->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache;
  4676. helperMethod = polymorphicHelperMethod;
  4677. inlineCacheOpnd = IR::AddrOpnd::New(polymorphicInlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func);
  4678. }
  4679. else
  4680. {
  4681. // Need to load runtime inline cache opnd first before loading any helper argument
  4682. // because LoadRuntimeInlineCacheOpnd may create labels marked as helper,
  4683. // and cause op helper register push/pop save in x86, messing up with any helper arguments that is already pushed
  4684. inlineCacheOpnd = this->LoadRuntimeInlineCacheOpnd(ldFldInstr, src->AsPropertySymOpnd(), isHelper);
  4685. }
  4686. this->LoadPropertySymAsArgument(ldFldInstr, src);
  4687. this-> m_lowererMD.LoadHelperArgument(
  4688. ldFldInstr,
  4689. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  4690. this->m_lowererMD.LoadHelperArgument(ldFldInstr, inlineCacheOpnd);
  4691. this->m_lowererMD.LoadHelperArgument(ldFldInstr, LoadFunctionBodyOpnd(ldFldInstr));
  4692. }
  4693. else
  4694. {
  4695. LoadScriptContext(ldFldInstr);
  4696. this->LoadPropertySymAsArgument(ldFldInstr, src);
  4697. }
  4698. // Do we need to reload the type and slot array after the helper returns?
  4699. // (We do if there's a propertySymOpnd downstream that needs it, i.e., the type is not dead.)
  4700. IR::RegOpnd *opndBase = src->AsSymOpnd()->CreatePropertyOwnerOpnd(m_func);
  4701. m_lowererMD.ChangeToHelperCall(ldFldInstr, helperMethod, labelBailOut, opndBase, src->AsSymOpnd()->IsPropertySymOpnd() ? src->AsSymOpnd()->AsPropertySymOpnd() : nullptr, isHelper);
  4702. return instrPrev;
  4703. }
  4704. bool
  4705. Lowerer::GenerateLdFldWithCachedType(IR::Instr * instrLdFld, bool* continueAsHelperOut, IR::LabelInstr** labelHelperOut, IR::RegOpnd** typeOpndOut)
  4706. {
  4707. IR::Instr *instr;
  4708. IR::Opnd *opnd;
  4709. IR::LabelInstr *labelObjCheckFailed = nullptr;
  4710. IR::LabelInstr *labelTypeCheckFailed = nullptr;
  4711. IR::LabelInstr *labelDone = nullptr;
  4712. Assert(continueAsHelperOut != nullptr);
  4713. *continueAsHelperOut = false;
  4714. Assert(labelHelperOut != nullptr);
  4715. *labelHelperOut = nullptr;
  4716. Assert(typeOpndOut != nullptr);
  4717. *typeOpndOut = nullptr;
  4718. Assert(instrLdFld->GetSrc1()->IsSymOpnd());
  4719. if (!instrLdFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
  4720. {
  4721. return false;
  4722. }
  4723. IR::PropertySymOpnd *propertySymOpnd = instrLdFld->GetSrc1()->AsPropertySymOpnd();
  4724. if (!propertySymOpnd->IsTypeCheckSeqCandidate())
  4725. {
  4726. return false;
  4727. }
  4728. AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
  4729. if (!propertySymOpnd->IsTypeCheckSeqParticipant() && !propertySymOpnd->NeedsLocalTypeCheck())
  4730. {
  4731. return false;
  4732. }
  4733. Assert(!propertySymOpnd->NeedsTypeCheckAndBailOut() || (instrLdFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrLdFld->GetBailOutKind())));
  4734. // In the backwards pass we only add guarded property operations to instructions that are not already
  4735. // protected by an upstream type check.
  4736. Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
  4737. PHASE_PRINT_TESTTRACE(
  4738. Js::ObjTypeSpecPhase,
  4739. this->m_func,
  4740. L"Field load: %s, property: %s, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n",
  4741. Js::OpCodeUtil::GetOpCodeName(instrLdFld->m_opcode),
  4742. this->m_func->GetScriptContext()->GetPropertyNameLocked(
  4743. propertySymOpnd->m_sym->AsPropertySym()->m_propertyId)->GetBuffer(),
  4744. this->m_func->GetJnFunction()->GetDisplayName(),
  4745. propertySymOpnd->m_inlineCacheIndex,
  4746. propertySymOpnd->GetCacheLayoutString(),
  4747. propertySymOpnd->IsTypeChecked() ? L"true" : L"false");
  4748. if (propertySymOpnd->HasFinalType() && !propertySymOpnd->IsLoadedFromProto())
  4749. {
  4750. propertySymOpnd->UpdateSlotForFinalType();
  4751. }
  4752. // TODO (ObjTypeSpec): If ((PropertySym*)propertySymOpnd->m_sym)->m_stackSym->m_isIntConst consider emitting a direct
  4753. // jump to helper or bailout. If we have a type check bailout, we could even abort compilation.
  4754. bool hasTypeCheckBailout = instrLdFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrLdFld->GetBailOutKind());
  4755. // If the hard-coded type is not available here, do a type check, and branch to the helper if the check fails.
  4756. // In the prototype case, we have to check the type even if it was checked upstream, to cover the case where
  4757. // the property has been added locally. Note that this is not necessary if the proto chain has been checked,
  4758. // because then we know there's been no store of the property since the type was checked.
  4759. bool emitPrimaryTypeCheck = propertySymOpnd->NeedsPrimaryTypeCheck();
  4760. bool emitLocalTypeCheck = propertySymOpnd->NeedsLocalTypeCheck();
  4761. bool emitLoadFromProtoTypeCheck = propertySymOpnd->NeedsLoadFromProtoTypeCheck();
  4762. if (emitPrimaryTypeCheck || emitLocalTypeCheck || emitLoadFromProtoTypeCheck)
  4763. {
  4764. if (emitLoadFromProtoTypeCheck)
  4765. {
  4766. propertySymOpnd->EnsureGuardedPropOps(this->m_func->m_alloc);
  4767. propertySymOpnd->SetGuardedPropOp(propertySymOpnd->GetObjTypeSpecFldId());
  4768. }
  4769. labelTypeCheckFailed = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4770. labelObjCheckFailed = hasTypeCheckBailout ? labelTypeCheckFailed : IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4771. *typeOpndOut = this->GenerateCachedTypeCheck(instrLdFld, propertySymOpnd, labelObjCheckFailed, labelTypeCheckFailed);
  4772. }
  4773. IR::Opnd *opndSlotArray;
  4774. if (propertySymOpnd->IsLoadedFromProto())
  4775. {
  4776. opndSlotArray = this->LoadSlotArrayWithCachedProtoType(instrLdFld, propertySymOpnd);
  4777. }
  4778. else
  4779. {
  4780. opndSlotArray = this->LoadSlotArrayWithCachedLocalType(instrLdFld, propertySymOpnd);
  4781. }
  4782. // Load the value from the slot, getting the slot ID from the cache.
  4783. uint16 index = propertySymOpnd->GetSlotIndex();
  4784. Assert(index != -1);
  4785. if (opndSlotArray->IsRegOpnd())
  4786. {
  4787. opnd = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, this->m_func);
  4788. }
  4789. else
  4790. {
  4791. Assert(opndSlotArray->IsMemRefOpnd());
  4792. opnd = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, this->m_func, IR::AddrOpndKindDynamicPropertySlotRef);
  4793. }
  4794. Lowerer::InsertMove(instrLdFld->GetDst(), opnd, instrLdFld);
  4795. // We eliminate the helper, or the type check succeeds, or we bail out before the operation.
  4796. // Either delete the original instruction or replace it with a bailout.
  4797. if (!emitPrimaryTypeCheck && !emitLocalTypeCheck && !emitLoadFromProtoTypeCheck)
  4798. {
  4799. Assert(labelTypeCheckFailed == nullptr);
  4800. AssertMsg(!instrLdFld->HasBailOutInfo(), "Why does a direct field load have bailout?");
  4801. instrLdFld->Remove();
  4802. return true;
  4803. }
  4804. // Otherwise, branch around the bailout or helper.
  4805. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4806. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  4807. instrLdFld->InsertBefore(instr);
  4808. // Insert the bailout or helper label here.
  4809. instrLdFld->InsertBefore(labelTypeCheckFailed);
  4810. instrLdFld->InsertAfter(labelDone);
  4811. if (hasTypeCheckBailout)
  4812. {
  4813. AssertMsg(PHASE_ON1(Js::ObjTypeSpecIsolatedFldOpsWithBailOutPhase) || !propertySymOpnd->IsTypeDead(),
  4814. "Why does a field load have a type check bailout, if its type is dead?");
  4815. // Convert the original instruction to a bailout.
  4816. if (instrLdFld->GetBailOutInfo()->bailOutInstr != instrLdFld)
  4817. {
  4818. // Set the cache index in the bailout info so that the bailout code will write it into the
  4819. // bailout record at runtime.
  4820. instrLdFld->GetBailOutInfo()->polymorphicCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  4821. }
  4822. instrLdFld->FreeDst();
  4823. instrLdFld->FreeSrc1();
  4824. instrLdFld->m_opcode = Js::OpCode::BailOut;
  4825. this->GenerateBailOut(instrLdFld);
  4826. return true;
  4827. }
  4828. else
  4829. {
  4830. *continueAsHelperOut = true;
  4831. Assert(labelObjCheckFailed != nullptr && labelObjCheckFailed != labelTypeCheckFailed);
  4832. *labelHelperOut = labelObjCheckFailed;
  4833. return false;
  4834. }
  4835. }
  4836. template<bool isRoot>
  4837. IR::Instr* Lowerer::GenerateCompleteLdFld(IR::Instr* instr, bool emitFastPath, IR::JnHelperMethod monoHelperAfterFastPath, IR::JnHelperMethod polyHelperAfterFastPath,
  4838. IR::JnHelperMethod monoHelperWithoutFastPath, IR::JnHelperMethod polyHelperWithoutFastPath)
  4839. {
  4840. if(instr->CallsAccessor() && instr->HasBailOutInfo())
  4841. {
  4842. IR::BailOutKind kindMinusBits = instr->GetBailOutKind() & ~IR::BailOutKindBits;
  4843. Assert(kindMinusBits != IR::BailOutOnImplicitCalls && kindMinusBits != IR::BailOutOnImplicitCallsPreOp);
  4844. }
  4845. IR::Instr* prevInstr = instr->m_prev;
  4846. IR::LabelInstr* labelHelper = nullptr;
  4847. IR::LabelInstr* labelBailOut = nullptr;
  4848. bool isHelper = false;
  4849. IR::RegOpnd* typeOpnd = nullptr;
  4850. if (isRoot)
  4851. {
  4852. // Don't do the fast path here if emitFastPath is false, even if we can.
  4853. if (emitFastPath && (this->GenerateLdFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd) || this->GenerateNonConfigurableLdRootFld(instr)))
  4854. {
  4855. Assert(labelHelper == nullptr);
  4856. return prevInstr;
  4857. }
  4858. }
  4859. else
  4860. {
  4861. if (this->GenerateLdFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd))
  4862. {
  4863. Assert(labelHelper == nullptr);
  4864. return prevInstr;
  4865. }
  4866. }
  4867. if (emitFastPath)
  4868. {
  4869. if (!GenerateFastLdFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, &labelBailOut, typeOpnd, &isHelper, &labelHelper))
  4870. {
  4871. if (labelHelper != nullptr)
  4872. {
  4873. labelHelper->isOpHelper = isHelper;
  4874. instr->InsertBefore(labelHelper);
  4875. }
  4876. prevInstr = LowerLdFld(instr, monoHelperAfterFastPath, polyHelperAfterFastPath, true, labelBailOut, isHelper);
  4877. }
  4878. }
  4879. else
  4880. {
  4881. if (labelHelper != nullptr)
  4882. {
  4883. labelHelper->isOpHelper = isHelper;
  4884. instr->InsertBefore(labelHelper);
  4885. }
  4886. prevInstr = LowerLdFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, true, labelBailOut, isHelper);
  4887. }
  4888. return prevInstr;
  4889. }
  4890. bool
  4891. Lowerer::GenerateCheckFixedFld(IR::Instr * instrChkFld)
  4892. {
  4893. IR::Instr *instr;
  4894. IR::LabelInstr *labelBailOut = nullptr;
  4895. IR::LabelInstr *labelDone = nullptr;
  4896. AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instrChkFld->m_func->GetJnFunction()) ||
  4897. !PHASE_OFF(Js::UseFixedDataPropsPhase, instrChkFld->m_func->GetJnFunction()), "Lowering a check fixed field with fixed data/method phase disabled?");
  4898. Assert(instrChkFld->GetSrc1()->IsSymOpnd() && instrChkFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd());
  4899. IR::PropertySymOpnd *propertySymOpnd = instrChkFld->GetSrc1()->AsPropertySymOpnd();
  4900. AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
  4901. Assert(propertySymOpnd->MayNeedTypeCheckProtection());
  4902. // In the backwards pass we only add guarded property operations to instructions that are not already
  4903. // protected by an upstream type check.
  4904. Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
  4905. // For the non-configurable properties on the global object we do not need a type check. Otherwise,
  4906. // we need a type check and bailout here unless this operation is part of the type check sequence and
  4907. // is protected by a type check upstream.
  4908. bool emitPrimaryTypeCheck = propertySymOpnd->NeedsPrimaryTypeCheck();
  4909. // In addition, we may also need a local type check in case the property comes from the prototype and
  4910. // it may have been overwritten on the instance after the primary type check upstream. If the property
  4911. // comes from the instance, we must still protect against its value changing after the type check, but
  4912. // for this a cheaper guard check is sufficient (see below).
  4913. bool emitFixedFieldTypeCheck = propertySymOpnd->NeedsCheckFixedFieldTypeCheck() &&
  4914. (!propertySymOpnd->IsTypeChecked() || propertySymOpnd->IsLoadedFromProto());
  4915. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  4916. uint inlineCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  4917. OUTPUT_TRACE_FUNC(
  4918. Js::ObjTypeSpecPhase,
  4919. this->m_func,
  4920. L"Fixed field check: %s, property: %s, cache ID: %u, cloned cache: true, layout: %s, redundant check: %s count of props: %u \n",
  4921. Js::OpCodeUtil::GetOpCodeName(instrChkFld->m_opcode),
  4922. this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySym->m_propertyId)->GetBuffer(),
  4923. inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(), propertySymOpnd->IsTypeChecked() ? L"true" : L"false",
  4924. propertySymOpnd->GetGuardedPropOps() ? propertySymOpnd->GetGuardedPropOps()->Count() : 0);
  4925. if (emitPrimaryTypeCheck || emitFixedFieldTypeCheck)
  4926. {
  4927. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4928. if(emitFixedFieldTypeCheck && propertySymOpnd->IsRootObjectNonConfigurableFieldLoad())
  4929. {
  4930. AssertMsg(!propertySymOpnd->GetGuardedPropOps() || propertySymOpnd->GetGuardedPropOps()->IsEmpty(), "This property Guard is used only for one property");
  4931. //We need only cheaper Guard check, if the property belongs to the GlobalObject.
  4932. GenerateFixedFieldGuardCheck(instrChkFld, propertySymOpnd, labelBailOut);
  4933. }
  4934. else
  4935. {
  4936. if (emitFixedFieldTypeCheck)
  4937. {
  4938. propertySymOpnd->EnsureGuardedPropOps(this->m_func->m_alloc);
  4939. propertySymOpnd->SetGuardedPropOp(propertySymOpnd->GetObjTypeSpecFldId());
  4940. }
  4941. this->GenerateCachedTypeCheck(instrChkFld, propertySymOpnd, labelBailOut, labelBailOut);
  4942. }
  4943. }
  4944. // We may still need this guard if we didn't emit the write protect type check above. This situation arises if we have
  4945. // a fixed field from the instance (not proto) and a property of the same name has been written somewhere between the
  4946. // primary type check and here. Note that we don't need a type check, because we know the fixed field exists on the
  4947. // object even if it has been written since primary type check, but we need to verify the fixed value didn't get overwritten.
  4948. if (!emitPrimaryTypeCheck && !emitFixedFieldTypeCheck && !propertySymOpnd->IsWriteGuardChecked())
  4949. {
  4950. if (!PHASE_OFF(Js::FixedFieldGuardCheckPhase, this->m_func))
  4951. {
  4952. Assert(labelBailOut == nullptr);
  4953. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  4954. GenerateFixedFieldGuardCheck(instrChkFld, propertySymOpnd, labelBailOut);
  4955. }
  4956. }
  4957. // Note that a type handler holds only a weak reference to the singleton instance it represents, so
  4958. // it is possible that the instance gets collected before the type and handler do. Hence, the upstream
  4959. // type check may succeed, even as the original instance no longer exists. However, this would happen
  4960. // only if another instance reached the same type (otherwise we wouldn't ever pass the type check
  4961. // upstream). In that case we would have invalidated all fixed fields on that type, and so the type
  4962. // check (or property guard check, if necessary) above would fail. All in all, we would never attempt
  4963. // to access a fixed field from an instance that has been collected.
  4964. if (!emitPrimaryTypeCheck && !emitFixedFieldTypeCheck && propertySymOpnd->IsWriteGuardChecked())
  4965. {
  4966. Assert(labelBailOut == nullptr);
  4967. AssertMsg(!instrChkFld->HasBailOutInfo(), "Why does a direct fixed field check have bailout?");
  4968. instrChkFld->Remove();
  4969. return true;
  4970. }
  4971. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  4972. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  4973. instrChkFld->InsertBefore(instr);
  4974. // Insert the helper label here.
  4975. instrChkFld->InsertBefore(labelBailOut);
  4976. instrChkFld->InsertAfter(labelDone);
  4977. // Convert the original instruction to a bailout.
  4978. Assert(instrChkFld->HasBailOutInfo());
  4979. if (instrChkFld->GetBailOutInfo()->bailOutInstr != instrChkFld)
  4980. {
  4981. // Set the cache index in the bailout info so that the bailout code will write it into the
  4982. // bailout record at runtime.
  4983. instrChkFld->GetBailOutInfo()->polymorphicCacheIndex = inlineCacheIndex;
  4984. }
  4985. instrChkFld->FreeSrc1();
  4986. instrChkFld->m_opcode = Js::OpCode::BailOut;
  4987. this->GenerateBailOut(instrChkFld);
  4988. return true;
  4989. }
  4990. void
  4991. Lowerer::GenerateCheckObjType(IR::Instr * instrChkObjType)
  4992. {
  4993. Assert(instrChkObjType->GetSrc1()->IsSymOpnd() && instrChkObjType->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd());
  4994. IR::PropertySymOpnd *propertySymOpnd = instrChkObjType->GetSrc1()->AsPropertySymOpnd();
  4995. // Why do we have an explicit type check if the cached type has been checked upstream? The dead store pass should have
  4996. // removed this instruction.
  4997. Assert(propertySymOpnd->IsTypeCheckSeqCandidate() && !propertySymOpnd->IsTypeChecked());
  4998. // Why do we have an explicit type check on a non-configurable root field load?
  4999. Assert(!propertySymOpnd->IsRootObjectNonConfigurableFieldLoad());
  5000. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  5001. uint inlineCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  5002. PHASE_PRINT_TESTTRACE(
  5003. Js::ObjTypeSpecPhase,
  5004. this->m_func,
  5005. L"Object type check: %s, property: %s, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n",
  5006. Js::OpCodeUtil::GetOpCodeName(instrChkObjType->m_opcode),
  5007. this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySym->m_propertyId)->GetBuffer(),
  5008. this->m_func->GetJnFunction()->GetDisplayName(),
  5009. inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(), L"false");
  5010. IR::LabelInstr* labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  5011. this->GenerateCachedTypeCheck(instrChkObjType, propertySymOpnd, labelBailOut, labelBailOut);
  5012. IR::LabelInstr* labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  5013. IR::Instr* instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  5014. instrChkObjType->InsertBefore(instr);
  5015. // Insert the bailout label here.
  5016. instrChkObjType->InsertBefore(labelBailOut);
  5017. instrChkObjType->InsertAfter(labelDone);
  5018. // Convert the original instruction to a bailout.
  5019. Assert(instrChkObjType->HasBailOutInfo());
  5020. if (instrChkObjType->GetBailOutInfo()->bailOutInstr != instrChkObjType)
  5021. {
  5022. // Set the cache index in the bailout info so that the bailout code will write it into the
  5023. // bailout record at runtime.
  5024. instrChkObjType->GetBailOutInfo()->polymorphicCacheIndex = inlineCacheIndex;
  5025. }
  5026. instrChkObjType->FreeSrc1();
  5027. instrChkObjType->m_opcode = Js::OpCode::BailOut;
  5028. this->GenerateBailOut(instrChkObjType);
  5029. }
  5030. void
  5031. Lowerer::LowerAdjustObjType(IR::Instr * instrAdjustObjType)
  5032. {
  5033. IR::AddrOpnd *finalTypeOpnd = instrAdjustObjType->UnlinkDst()->AsAddrOpnd();
  5034. IR::AddrOpnd *initialTypeOpnd = instrAdjustObjType->UnlinkSrc2()->AsAddrOpnd();
  5035. IR::RegOpnd *baseOpnd = instrAdjustObjType->UnlinkSrc1()->AsRegOpnd();
  5036. this->GenerateAdjustBaseSlots(
  5037. instrAdjustObjType, baseOpnd, (Js::Type*)initialTypeOpnd->m_address, (Js::Type*)finalTypeOpnd->m_address);
  5038. this->m_func->PinTypeRef(finalTypeOpnd->m_address);
  5039. IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrAdjustObjType->m_func);
  5040. this->m_lowererMD.CreateAssign(opnd, finalTypeOpnd, instrAdjustObjType);
  5041. initialTypeOpnd->Free(instrAdjustObjType->m_func);
  5042. instrAdjustObjType->Remove();
  5043. }
  5044. bool
  5045. Lowerer::GenerateNonConfigurableLdRootFld(IR::Instr * instrLdFld)
  5046. {
  5047. if (!instrLdFld->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
  5048. {
  5049. return false;
  5050. }
  5051. IR::PropertySymOpnd *propertySymOpnd = instrLdFld->GetSrc1()->AsPropertySymOpnd();
  5052. if (!propertySymOpnd->IsRootObjectNonConfigurableFieldLoad())
  5053. {
  5054. return false;
  5055. }
  5056. Assert(!PHASE_OFF(Js::RootObjectFldFastPathPhase, this->m_func->GetJnFunction()));
  5057. Assert(!instrLdFld->HasBailOutInfo());
  5058. IR::Opnd * srcOpnd;
  5059. Js::RootObjectBase * rootObject = this->m_func->GetJnFunction()->GetRootObject();
  5060. if (propertySymOpnd->UsesAuxSlot())
  5061. {
  5062. IR::RegOpnd * auxSlotOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  5063. this->InsertMove(auxSlotOpnd, IR::MemRefOpnd::New((byte *)rootObject + Js::DynamicObject::GetOffsetOfAuxSlots(),
  5064. TyMachPtr, this->m_func), instrLdFld);
  5065. srcOpnd = IR::IndirOpnd::New(auxSlotOpnd, propertySymOpnd->GetSlotIndex() * sizeof(Js::Var *),
  5066. TyVar, this->m_func);
  5067. }
  5068. else
  5069. {
  5070. srcOpnd = IR::MemRefOpnd::New((Js::Var *)rootObject + propertySymOpnd->GetSlotIndex(),
  5071. TyVar, this->m_func);
  5072. }
  5073. instrLdFld->ReplaceSrc1(srcOpnd);
  5074. instrLdFld->m_opcode = Js::OpCode::Ld_A;
  5075. LowererMD::ChangeToAssign(instrLdFld);
  5076. return true;
  5077. }
  5078. IR::Instr *
  5079. Lowerer::LowerDelFld(IR::Instr *delFldInstr, IR::JnHelperMethod helperMethod, bool useInlineCache, bool strictMode)
  5080. {
  5081. IR::Instr *instrPrev;
  5082. Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
  5083. if (strictMode)
  5084. {
  5085. propertyOperationFlag = Js::PropertyOperation_StrictMode;
  5086. }
  5087. instrPrev = m_lowererMD.LoadHelperArgument(delFldInstr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
  5088. LowerLdFld(delFldInstr, helperMethod, helperMethod, useInlineCache);
  5089. return instrPrev;
  5090. }
  5091. IR::Instr *
  5092. Lowerer::LowerIsInst(IR::Instr * isInstInstr, IR::JnHelperMethod helperMethod)
  5093. {
  5094. IR::Instr * instrPrev;
  5095. IR::Instr * instrArg;
  5096. IR::RegOpnd * argOpnd;
  5097. // inlineCache
  5098. instrPrev = m_lowererMD.LoadHelperArgument(isInstInstr, LoadIsInstInlineCacheOpnd(isInstInstr, isInstInstr->GetSrc1()->AsIntConstOpnd()->AsUint32()));
  5099. isInstInstr->FreeSrc1();
  5100. argOpnd = isInstInstr->UnlinkSrc2()->AsRegOpnd();
  5101. Assert(argOpnd->m_sym->m_isSingleDef);
  5102. instrArg = argOpnd->m_sym->m_instrDef;
  5103. argOpnd->Free(m_func);
  5104. // scriptContext
  5105. LoadScriptContext(isInstInstr);
  5106. // instance goes last, so remember it now
  5107. IR::Opnd * instanceOpnd = instrArg->UnlinkSrc1();
  5108. argOpnd = instrArg->UnlinkSrc2()->AsRegOpnd();
  5109. Assert(argOpnd->m_sym->m_isSingleDef);
  5110. instrArg->Remove();
  5111. instrArg = argOpnd->m_sym->m_instrDef;
  5112. argOpnd->Free(m_func);
  5113. // function
  5114. IR::Opnd *opnd = instrArg->UnlinkSrc1();
  5115. m_lowererMD.LoadHelperArgument(isInstInstr, opnd);
  5116. Assert(instrArg->GetSrc2() == NULL);
  5117. instrArg->Remove();
  5118. // instance
  5119. m_lowererMD.LoadHelperArgument(isInstInstr, instanceOpnd);
  5120. m_lowererMD.ChangeToHelperCall(isInstInstr, helperMethod);
  5121. return instrPrev;
  5122. }
  5123. void
  5124. Lowerer::GenerateStackScriptFunctionInit(StackSym * stackSym, Js::FunctionProxyPtrPtr nestedProxy)
  5125. {
  5126. Func * func = this->m_func;
  5127. Assert(func->HasAnyStackNestedFunc());
  5128. Assert(nextStackFunctionOpnd);
  5129. IR::Instr * insertBeforeInstr = func->GetFunctionEntryInsertionPoint();
  5130. IR::RegOpnd * addressOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5131. const IR::AutoReuseOpnd autoReuseAddressOpnd(addressOpnd, func);
  5132. InsertLea(addressOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertBeforeInstr);
  5133. // Currently we don't initialize the environment until we actually allocate the function, we also
  5134. // walk the list of stack function when we need to box them. so we should use initialize it to NullFrameDisplay
  5135. GenerateStackScriptFunctionInit(addressOpnd, nestedProxy,
  5136. IR::AddrOpnd::New((Js::Var)&Js::NullFrameDisplay, IR::AddrOpndKindDynamicMisc, func), insertBeforeInstr);
  5137. // Establish the next link
  5138. InsertMove(nextStackFunctionOpnd, addressOpnd, insertBeforeInstr);
  5139. this->nextStackFunctionOpnd = IR::SymOpnd::New(stackSym, sizeof(Js::StackScriptFunction), TyMachPtr, func);
  5140. }
  5141. void
  5142. Lowerer::GenerateScriptFunctionInit(IR::RegOpnd * regOpnd, IR::Opnd * vtableAddressOpnd,
  5143. Js::FunctionProxyPtrPtr nestedProxy, IR::Opnd * envOpnd, IR::Instr * insertBeforeInstr, bool isZeroed)
  5144. {
  5145. Func * func = this->m_func;
  5146. IR::Opnd * functionProxyOpnd;
  5147. Js::FunctionProxy * functionProxy = *nestedProxy;
  5148. IR::Opnd * typeOpnd = nullptr;
  5149. bool doCheckTypeOpnd = true;
  5150. if (functionProxy->IsDeferred())
  5151. {
  5152. functionProxyOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5153. InsertMove(functionProxyOpnd, IR::MemRefOpnd::New((Js::FunctionProxy**) nestedProxy, TyMachPtr, func), insertBeforeInstr);
  5154. typeOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5155. InsertMove(typeOpnd, IR::IndirOpnd::New(functionProxyOpnd->AsRegOpnd(), Js::FunctionProxy::GetOffsetOfDeferredPrototypeType(),
  5156. TyMachPtr, func), insertBeforeInstr);
  5157. }
  5158. else
  5159. {
  5160. Js::FunctionBody * functionBody = functionProxy->GetFunctionBody();
  5161. functionProxyOpnd = CreateFunctionBodyOpnd(functionBody);
  5162. Js::ScriptFunctionType * type = functionProxy->GetDeferredPrototypeType();
  5163. if (type != nullptr)
  5164. {
  5165. typeOpnd = IR::AddrOpnd::New(type, IR::AddrOpndKindDynamicType, func);
  5166. doCheckTypeOpnd = false;
  5167. }
  5168. else
  5169. {
  5170. typeOpnd = IR::RegOpnd::New(TyMachPtr, func);
  5171. InsertMove(typeOpnd,
  5172. IR::MemRefOpnd::New(((byte *)functionBody) + Js::FunctionProxy::GetOffsetOfDeferredPrototypeType(), TyMachPtr, func),
  5173. insertBeforeInstr);
  5174. }
  5175. }
  5176. if (doCheckTypeOpnd)
  5177. {
  5178. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5179. InsertTestBranch(typeOpnd, typeOpnd, Js::OpCode::BrEq_A, labelHelper, insertBeforeInstr);
  5180. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  5181. InsertBranch(Js::OpCode::Br, labelDone, insertBeforeInstr);
  5182. insertBeforeInstr->InsertBefore(labelHelper);
  5183. m_lowererMD.LoadHelperArgument(insertBeforeInstr, functionProxyOpnd);
  5184. IR::Instr * callHelperInstr = IR::Instr::New(Js::OpCode::Call, typeOpnd,
  5185. IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperEnsureFunctionProxyDeferredPrototypeType, func), func);
  5186. insertBeforeInstr->InsertBefore(callHelperInstr);
  5187. m_lowererMD.LowerCall(callHelperInstr, 0);
  5188. insertBeforeInstr->InsertBefore(labelDone);
  5189. }
  5190. GenerateMemInit(regOpnd, 0, vtableAddressOpnd, insertBeforeInstr, isZeroed);
  5191. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfType(), typeOpnd, insertBeforeInstr, isZeroed);
  5192. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfAuxSlots(), insertBeforeInstr, isZeroed);
  5193. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfObjectArray(), insertBeforeInstr, isZeroed);
  5194. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfConstructorCache(),
  5195. LoadLibraryValueOpnd(insertBeforeInstr, LibraryValue::ValueConstructorCacheDefaultInstance),
  5196. insertBeforeInstr, isZeroed);
  5197. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfFunctionInfo(), functionProxyOpnd, insertBeforeInstr, isZeroed);
  5198. GenerateMemInit(regOpnd, Js::ScriptFunction::GetOffsetOfEnvironment(), envOpnd, insertBeforeInstr, isZeroed);
  5199. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfCachedScopeObj(), insertBeforeInstr, isZeroed);
  5200. GenerateMemInitNull(regOpnd, Js::ScriptFunction::GetOffsetOfHasInlineCaches(), insertBeforeInstr, isZeroed);
  5201. }
  5202. void
  5203. Lowerer::GenerateStackScriptFunctionInit(IR::RegOpnd * regOpnd, Js::FunctionProxyPtrPtr nestedProxy, IR::Opnd * envOpnd, IR::Instr * insertBeforeInstr)
  5204. {
  5205. Func * func = this->m_func;
  5206. GenerateScriptFunctionInit(regOpnd,
  5207. LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableStackScriptFunction),
  5208. nestedProxy, envOpnd, insertBeforeInstr);
  5209. InsertMove(IR::IndirOpnd::New(regOpnd, Js::StackScriptFunction::GetOffsetOfBoxedScriptFunction(), TyMachPtr, func),
  5210. IR::AddrOpnd::NewNull(func), insertBeforeInstr);
  5211. }
  5212. void
  5213. Lowerer::EnsureStackFunctionListStackSym()
  5214. {
  5215. Func * func = this->m_func;
  5216. Assert(func->HasAnyStackNestedFunc());
  5217. #if defined(_M_IX86) || defined(_M_X64)
  5218. Assert(func->m_localStackHeight == (func->HasArgumentSlot()? MachArgsSlotOffset : 0));
  5219. StackSym * stackFunctionListStackSym = StackSym::New(TyMachPtr, func);
  5220. func->StackAllocate(stackFunctionListStackSym, sizeof(Js::ScriptFunction *));
  5221. nextStackFunctionOpnd = IR::SymOpnd::New(stackFunctionListStackSym, TyMachPtr, func);
  5222. #else
  5223. Assert(func->m_localStackHeight == 0);
  5224. nextStackFunctionOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(NULL, FRAME_REG, TyMachReg, func),
  5225. -(int32)(Js::Constants::StackNestedFuncList * sizeof(Js::Var)), TyMachPtr, func);
  5226. #endif
  5227. }
  5228. void
  5229. Lowerer::AllocStackClosure()
  5230. {
  5231. m_func->StackAllocate(m_func->GetLocalFrameDisplaySym(), sizeof(Js::Var));
  5232. m_func->StackAllocate(m_func->GetLocalClosureSym(), sizeof(Js::Var));
  5233. }
  5234. void
  5235. Lowerer::EnsureZeroLastStackFunctionNext()
  5236. {
  5237. Assert(nextStackFunctionOpnd != nullptr);
  5238. Func * func = this->m_func;
  5239. IR::Instr * insertBeforeInstr = func->GetFunctionEntryInsertionPoint();
  5240. InsertMove(nextStackFunctionOpnd, IR::AddrOpnd::NewNull(func), insertBeforeInstr);
  5241. }
  5242. IR::Instr *
  5243. Lowerer::GenerateNewStackScFunc(IR::Instr * newScFuncInstr)
  5244. {
  5245. Assert(newScFuncInstr->m_func->DoStackNestedFunc());
  5246. Func * func = newScFuncInstr->m_func;
  5247. uint index = newScFuncInstr->GetSrc1()->AsIntConstOpnd()->AsUint32();
  5248. Assert(index < func->GetJnFunction()->GetNestedCount());
  5249. Js::FunctionProxyPtrPtr nestedProxy = func->GetJnFunction()->GetNestedFuncReference(index);
  5250. // the stackAllocate Call below for this sym is passing a size that is not represented by any IRType and hence passing TyMisc for the constructor
  5251. StackSym * stackSym = StackSym::New(TyMisc, func);
  5252. // ScriptFunction and it's next pointer
  5253. this->m_func->StackAllocate(stackSym, sizeof(Js::StackScriptFunction) + sizeof(Js::StackScriptFunction *));
  5254. IR::Opnd * envOpnd = newScFuncInstr->GetSrc2();
  5255. GenerateStackScriptFunctionInit(stackSym, nestedProxy);
  5256. IR::LabelInstr * labelNoStackFunc = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5257. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  5258. InsertTestBranch(IR::MemRefOpnd::New(func->GetJnFunction()->GetAddressOfFlags(), TyInt8, func),
  5259. IR::IntConstOpnd::New(Js::FunctionBody::Flags_StackNestedFunc, TyInt8, func, true),
  5260. Js::OpCode::BrEq_A, labelNoStackFunc, newScFuncInstr);
  5261. InsertMove(IR::SymOpnd::New(stackSym, Js::ScriptFunction::GetOffsetOfEnvironment(), TyMachPtr, func),
  5262. envOpnd,
  5263. newScFuncInstr);
  5264. IR::Instr * lea =
  5265. InsertLea(newScFuncInstr->GetDst()->AsRegOpnd(), IR::SymOpnd::New(stackSym, TyMachPtr, func), newScFuncInstr);
  5266. InsertBranch(Js::OpCode::Br, labelDone, newScFuncInstr);
  5267. newScFuncInstr->InsertBefore(labelNoStackFunc);
  5268. newScFuncInstr->InsertAfter(labelDone);
  5269. return lea;
  5270. }
  5271. IR::Instr *
  5272. Lowerer::LowerNewScFunc(IR::Instr * newScFuncInstr)
  5273. {
  5274. IR::Instr *stackNewScFuncInstr = nullptr;
  5275. if (newScFuncInstr->m_func->DoStackNestedFunc())
  5276. {
  5277. stackNewScFuncInstr = GenerateNewStackScFunc(newScFuncInstr);
  5278. }
  5279. IR::IntConstOpnd * functionBodySlotOpnd = newScFuncInstr->UnlinkSrc1()->AsIntConstOpnd();
  5280. IR::RegOpnd * envOpnd = newScFuncInstr->UnlinkSrc2()->AsRegOpnd();
  5281. IR::Instr * instrPrev = this->LoadFunctionBodyAsArgument(newScFuncInstr, functionBodySlotOpnd, envOpnd);
  5282. m_lowererMD.ChangeToHelperCall(newScFuncInstr, IR::HelperScrFunc_OP_NewScFunc );
  5283. return stackNewScFuncInstr == nullptr? instrPrev : stackNewScFuncInstr;
  5284. }
  5285. IR::Instr *
  5286. Lowerer::LowerNewScGenFunc(IR::Instr * newScFuncInstr)
  5287. {
  5288. IR::IntConstOpnd * functionBodySlotOpnd = newScFuncInstr->UnlinkSrc1()->AsIntConstOpnd();
  5289. IR::RegOpnd * envOpnd = newScFuncInstr->UnlinkSrc2()->AsRegOpnd();
  5290. IR::Instr * instrPrev = this->LoadFunctionBodyAsArgument(newScFuncInstr, functionBodySlotOpnd, envOpnd);
  5291. m_lowererMD.ChangeToHelperCall(newScFuncInstr, IR::HelperScrFunc_OP_NewScGenFunc );
  5292. return instrPrev;
  5293. }
  5294. ///----------------------------------------------------------------------------
  5295. ///
  5296. /// Lowerer::LowerScopedLdFld
  5297. ///
  5298. /// Lower a load instruction that takes an additional instance to use as a
  5299. /// a default if the scope chain provided doesn't contain the property.
  5300. ///
  5301. ///----------------------------------------------------------------------------
  5302. IR::Instr *
  5303. Lowerer::LowerScopedLdFld(IR::Instr * ldFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache)
  5304. {
  5305. IR::Opnd *src;
  5306. IR::Instr *instrPrev = ldFldInstr->m_prev;
  5307. if(!withInlineCache)
  5308. {
  5309. LoadScriptContext(ldFldInstr);
  5310. }
  5311. src = ldFldInstr->UnlinkSrc2();
  5312. AssertMsg(src->IsRegOpnd(), "Expected reg opnd as src2");
  5313. instrPrev = m_lowererMD.LoadHelperArgument(ldFldInstr, src);
  5314. src = ldFldInstr->UnlinkSrc1();
  5315. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  5316. this->LoadPropertySymAsArgument(ldFldInstr, src);
  5317. if (withInlineCache)
  5318. {
  5319. AssertMsg(src->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  5320. m_lowererMD.LoadHelperArgument(
  5321. ldFldInstr,
  5322. IR::Opnd::CreateInlineCacheIndexOpnd(src->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  5323. // Not using the polymorphic inline cache because the fast path only uses the monomorphic inline cache
  5324. this->m_lowererMD.LoadHelperArgument(ldFldInstr, this->LoadRuntimeInlineCacheOpnd(ldFldInstr, src->AsPropertySymOpnd()));
  5325. m_lowererMD.LoadHelperArgument(ldFldInstr, LoadFunctionBodyOpnd(ldFldInstr));
  5326. }
  5327. m_lowererMD.ChangeToHelperCall(ldFldInstr, helperMethod);
  5328. return instrPrev;
  5329. }
  5330. ///----------------------------------------------------------------------------
  5331. ///
  5332. /// Lowerer::LowerScopedLdInst
  5333. ///
  5334. /// Lower a load instruction that takes an additional instance to use as a
  5335. /// a default if the scope chain provided doesn't contain the property.
  5336. ///
  5337. ///----------------------------------------------------------------------------
  5338. IR::Instr *
  5339. Lowerer::LowerScopedLdInst(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  5340. {
  5341. IR::Opnd *src;
  5342. IR::Instr *instrPrev;
  5343. // last argument is the scriptContext
  5344. instrPrev = LoadScriptContext(instr);
  5345. src = instr->UnlinkSrc2();
  5346. AssertMsg(src->IsRegOpnd(), "Expected Reg opnd as src2");
  5347. // __out Var*. The StackSym is allocated in irbuilder, and here we need to insert a lea
  5348. StackSym* dstSym = src->GetStackSym();
  5349. IR::Instr *load = this->m_lowererMD.LoadStackAddress(dstSym);
  5350. instr->InsertBefore(load);
  5351. IR::Opnd* tempOpnd = load->GetDst();
  5352. m_lowererMD.LoadHelperArgument(instr, tempOpnd);
  5353. // now 3rd last argument is the rootObject of the function. Need to add addrOpnd to
  5354. // pass in the address of the roobObject.
  5355. IR::Opnd * srcOpnd;
  5356. Js::RootObjectBase * rootObject = this->m_func->GetJnFunction()->GetRootObject();
  5357. srcOpnd = IR::AddrOpnd::New(rootObject, IR::AddrOpndKindDynamicVar, instr->m_func, true);
  5358. instrPrev = m_lowererMD.LoadHelperArgument(instr, srcOpnd);
  5359. // no change, the property field built from irbuilder.
  5360. src = instr->UnlinkSrc1();
  5361. AssertMsg(src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as src");
  5362. this->LoadPropertySymAsArgument(instr, src);
  5363. instrPrev = m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  5364. IR::RegOpnd* regOpnd = IR::RegOpnd::New(dstSym, TyVar, this->m_func);
  5365. IR::SymOpnd*symOpnd = IR::SymOpnd::New(dstSym, TyVar, this->m_func);
  5366. this->m_lowererMD.CreateAssign(regOpnd, symOpnd, instrPrev);
  5367. return instrPrev;
  5368. }
  5369. IR::Instr *
  5370. Lowerer::LowerScopedDelFld(IR::Instr * delFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache, bool strictMode)
  5371. {
  5372. IR::Instr *instrPrev;
  5373. Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
  5374. if (strictMode)
  5375. {
  5376. propertyOperationFlag = Js::PropertyOperation_StrictMode;
  5377. }
  5378. instrPrev = m_lowererMD.LoadHelperArgument(delFldInstr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
  5379. LowerScopedLdFld(delFldInstr, helperMethod, withInlineCache);
  5380. return instrPrev;
  5381. }
  5382. IR::Instr *
  5383. Lowerer::LowerProfiledStFld(IR::JitProfilingInstr *stFldInstr, Js::PropertyOperationFlags flags)
  5384. {
  5385. Assert(stFldInstr->profileId == Js::Constants::NoProfileId);
  5386. IR::Instr *const instrPrev = stFldInstr->m_prev;
  5387. /*
  5388. void ProfilingHelpers::ProfiledInitFld_Jit(
  5389. const Var instance,
  5390. const PropertyId propertyId,
  5391. const InlineCacheIndex inlineCacheIndex,
  5392. const Var value,
  5393. void *const framePointer)
  5394. void ProfilingHelpers::ProfiledStFld_Jit(
  5395. const Var instance,
  5396. const PropertyId propertyId,
  5397. const InlineCacheIndex inlineCacheIndex,
  5398. const Var value,
  5399. void *const framePointer)
  5400. void ProfilingHelpers::ProfiledStSuperFld_Jit(
  5401. const Var instance,
  5402. const PropertyId propertyId,
  5403. const InlineCacheIndex inlineCacheIndex,
  5404. const Var value,
  5405. void *const framePointer,
  5406. const Var thisInstance)
  5407. {
  5408. */
  5409. m_lowererMD.LoadHelperArgument(stFldInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  5410. if (stFldInstr->m_opcode == Js::OpCode::StSuperFld)
  5411. {
  5412. m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc2());
  5413. }
  5414. m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc1());
  5415. IR::Opnd *dst = stFldInstr->UnlinkDst();
  5416. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
  5417. m_lowererMD.LoadHelperArgument(
  5418. stFldInstr,
  5419. IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  5420. LoadPropertySymAsArgument(stFldInstr, dst);
  5421. IR::JnHelperMethod helper;
  5422. switch (stFldInstr->m_opcode)
  5423. {
  5424. case Js::OpCode::InitFld:
  5425. case Js::OpCode::InitRootFld:
  5426. helper = IR::HelperProfiledInitFld;
  5427. break;
  5428. case Js::OpCode::StSuperFld:
  5429. helper = IR::HelperProfiledStSuperFld;
  5430. break;
  5431. default:
  5432. helper =
  5433. flags & Js::PropertyOperation_Root
  5434. ? flags & Js::PropertyOperation_StrictMode ? IR::HelperProfiledStRootFld_Strict : IR::HelperProfiledStRootFld
  5435. : flags & Js::PropertyOperation_StrictMode ? IR::HelperProfiledStFld_Strict : IR::HelperProfiledStFld;
  5436. break;
  5437. }
  5438. stFldInstr->SetSrc1(IR::HelperCallOpnd::New(helper, m_func));
  5439. m_lowererMD.LowerCall(stFldInstr, 0);
  5440. return instrPrev;
  5441. }
  5442. ///----------------------------------------------------------------------------
  5443. ///
  5444. /// Lowerer::LowerStFld
  5445. ///
  5446. ///----------------------------------------------------------------------------
  5447. IR::Instr *
  5448. Lowerer::LowerStFld(
  5449. IR::Instr * stFldInstr,
  5450. IR::JnHelperMethod helperMethod,
  5451. IR::JnHelperMethod polymorphicHelperMethod,
  5452. bool withInlineCache,
  5453. IR::LabelInstr *labelBailOut,
  5454. bool isHelper,
  5455. bool withPutFlags,
  5456. Js::PropertyOperationFlags flags)
  5457. {
  5458. if (stFldInstr->IsJitProfilingInstr())
  5459. {
  5460. // If we want to profile then do something completely different
  5461. return this->LowerProfiledStFld(stFldInstr->AsJitProfilingInstr(), flags);
  5462. }
  5463. IR::Instr *instrPrev = stFldInstr->m_prev;
  5464. IR::Opnd *dst = stFldInstr->UnlinkDst();
  5465. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
  5466. IR::Opnd * inlineCacheOpnd = nullptr;
  5467. if (withInlineCache)
  5468. {
  5469. AssertMsg(dst->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  5470. if (dst->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache && polymorphicHelperMethod != helperMethod)
  5471. {
  5472. Js::PolymorphicInlineCache * polymorphicInlineCache = dst->AsPropertySymOpnd()->m_runtimePolymorphicInlineCache;
  5473. helperMethod = polymorphicHelperMethod;
  5474. inlineCacheOpnd = IR::AddrOpnd::New(polymorphicInlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func);
  5475. }
  5476. else
  5477. {
  5478. // Need to load runtime inline cache opnd first before loading any helper argument
  5479. // because LoadRuntimeInlineCacheOpnd may create labels marked as helper
  5480. // and cause op helper register push/pop save in x86, messing up with any helper arguments that is already pushed
  5481. inlineCacheOpnd = this->LoadRuntimeInlineCacheOpnd(stFldInstr, dst->AsPropertySymOpnd(), isHelper);
  5482. }
  5483. }
  5484. if (withPutFlags)
  5485. {
  5486. m_lowererMD.LoadHelperArgument(stFldInstr,
  5487. IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
  5488. }
  5489. IR::Opnd *src = stFldInstr->UnlinkSrc1();
  5490. if (stFldInstr->m_opcode == Js::OpCode::StSuperFld)
  5491. {
  5492. m_lowererMD.LoadHelperArgument(stFldInstr, stFldInstr->UnlinkSrc2());
  5493. }
  5494. m_lowererMD.LoadHelperArgument(stFldInstr, src);
  5495. this->LoadPropertySymAsArgument(stFldInstr, dst);
  5496. if (withInlineCache)
  5497. {
  5498. Assert(inlineCacheOpnd != nullptr);
  5499. this->m_lowererMD.LoadHelperArgument(
  5500. stFldInstr,
  5501. IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  5502. this->m_lowererMD.LoadHelperArgument(stFldInstr, inlineCacheOpnd);
  5503. this->m_lowererMD.LoadHelperArgument(stFldInstr, LoadFunctionBodyOpnd(stFldInstr));
  5504. }
  5505. IR::RegOpnd *opndBase = dst->AsSymOpnd()->CreatePropertyOwnerOpnd(m_func);
  5506. m_lowererMD.ChangeToHelperCall(stFldInstr, helperMethod, labelBailOut, opndBase, dst->AsSymOpnd()->IsPropertySymOpnd() ? dst->AsSymOpnd()->AsPropertySymOpnd() : nullptr, isHelper);
  5507. return instrPrev;
  5508. }
  5509. IR::Instr* Lowerer::GenerateCompleteStFld(IR::Instr* instr, bool emitFastPath, IR::JnHelperMethod monoHelperAfterFastPath, IR::JnHelperMethod polyHelperAfterFastPath,
  5510. IR::JnHelperMethod monoHelperWithoutFastPath, IR::JnHelperMethod polyHelperWithoutFastPath, bool withPutFlags, Js::PropertyOperationFlags flags)
  5511. {
  5512. if(instr->CallsAccessor() && instr->HasBailOutInfo())
  5513. {
  5514. IR::BailOutKind kindMinusBits = instr->GetBailOutKind() & ~IR::BailOutKindBits;
  5515. Assert(kindMinusBits != IR::BailOutOnImplicitCalls && kindMinusBits != IR::BailOutOnImplicitCallsPreOp);
  5516. }
  5517. IR::Instr* prevInstr = instr->m_prev;
  5518. IR::LabelInstr* labelBailOut = nullptr;
  5519. IR::LabelInstr* labelHelper = nullptr;
  5520. bool isHelper = false;
  5521. IR::RegOpnd* typeOpnd = nullptr;
  5522. if(emitFastPath && GenerateFastStFldForCustomProperty(instr, &labelHelper))
  5523. {
  5524. if(labelHelper)
  5525. {
  5526. Assert(labelHelper->isOpHelper);
  5527. instr->InsertBefore(labelHelper);
  5528. prevInstr = this->LowerStFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
  5529. }
  5530. else
  5531. {
  5532. instr->Remove();
  5533. return prevInstr;
  5534. }
  5535. }
  5536. else if (this->GenerateStFldWithCachedType(instr, &isHelper, &labelHelper, &typeOpnd))
  5537. {
  5538. Assert(labelHelper == nullptr);
  5539. return prevInstr;
  5540. }
  5541. else if (emitFastPath)
  5542. {
  5543. if (!GenerateFastStFld(instr, monoHelperWithoutFastPath, polyHelperWithoutFastPath, &labelBailOut, typeOpnd, &isHelper, &labelHelper, withPutFlags, flags))
  5544. {
  5545. if (labelHelper != nullptr)
  5546. {
  5547. labelHelper->isOpHelper = isHelper;
  5548. instr->InsertBefore(labelHelper);
  5549. }
  5550. prevInstr = this->LowerStFld(instr, monoHelperAfterFastPath, polyHelperAfterFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
  5551. }
  5552. }
  5553. else
  5554. {
  5555. if (labelHelper != nullptr)
  5556. {
  5557. labelHelper->isOpHelper = isHelper;
  5558. instr->InsertBefore(labelHelper);
  5559. }
  5560. prevInstr = this->LowerStFld(instr, monoHelperWithoutFastPath, monoHelperWithoutFastPath, true, labelBailOut, isHelper, withPutFlags, flags);
  5561. }
  5562. return prevInstr;
  5563. }
  5564. void
  5565. Lowerer::GenerateDirectFieldStore(IR::Instr* instrStFld, IR::PropertySymOpnd* propertySymOpnd)
  5566. {
  5567. Func* func = instrStFld->m_func;
  5568. IR::Opnd *opndSlotArray = this->LoadSlotArrayWithCachedLocalType(instrStFld, propertySymOpnd);
  5569. // Store the value to the slot, getting the slot index from the cache.
  5570. uint16 index = propertySymOpnd->GetSlotIndex();
  5571. Assert(index != -1);
  5572. #ifdef RECYCLER_RECYCLER_WRITE_BARRIER_JIT
  5573. if (opndSlotArray->IsRegOpnd())
  5574. {
  5575. IR::IndirOpnd * opndDst = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, func);
  5576. LowererMD::GenerateWriteBarrierAssign(opndDst, instrStFld->GetSrc1(), instrStFld);
  5577. }
  5578. else
  5579. {
  5580. Assert(opndSlotArray->IsMemRefOpnd());
  5581. IR::MemRefOpnd * opndDst = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, func);
  5582. LowererMD::GenerateWriteBarrierAssign(opndDst, instrStFld->GetSrc1(), instrStFld);
  5583. }
  5584. #else
  5585. IR::Opnd *opnd;
  5586. if (opndSlotArray->IsRegOpnd())
  5587. {
  5588. opnd = IR::IndirOpnd::New(opndSlotArray->AsRegOpnd(), index * sizeof(Js::Var), TyMachReg, func);
  5589. }
  5590. else
  5591. {
  5592. opnd = IR::MemRefOpnd::New((char*)opndSlotArray->AsMemRefOpnd()->GetMemLoc() + (index * sizeof(Js::Var)), TyMachReg, func);
  5593. }
  5594. this->m_lowererMD.CreateAssign(opnd, instrStFld->GetSrc1(), instrStFld);
  5595. #endif
  5596. }
  5597. bool
  5598. Lowerer::GenerateStFldWithCachedType(IR::Instr *instrStFld, bool* continueAsHelperOut, IR::LabelInstr** labelHelperOut, IR::RegOpnd** typeOpndOut)
  5599. {
  5600. IR::Instr *instr;
  5601. IR::RegOpnd *typeOpnd = nullptr;
  5602. IR::LabelInstr* labelObjCheckFailed = nullptr;
  5603. IR::LabelInstr *labelTypeCheckFailed = nullptr;
  5604. IR::LabelInstr *labelBothTypeChecksFailed = nullptr;
  5605. IR::LabelInstr *labelDone = nullptr;
  5606. Assert(continueAsHelperOut != nullptr);
  5607. *continueAsHelperOut = false;
  5608. Assert(labelHelperOut != nullptr);
  5609. *labelHelperOut = nullptr;
  5610. Assert(typeOpndOut != nullptr);
  5611. *typeOpndOut = nullptr;
  5612. Assert(instrStFld->GetDst()->IsSymOpnd());
  5613. if (!instrStFld->GetDst()->AsSymOpnd()->IsPropertySymOpnd() || !instrStFld->GetDst()->AsPropertySymOpnd()->IsTypeCheckSeqCandidate())
  5614. {
  5615. return false;
  5616. }
  5617. IR::PropertySymOpnd *propertySymOpnd = instrStFld->GetDst()->AsPropertySymOpnd();
  5618. // If we have any object type spec info, we better not believe this is a load from prototype, since this is a store
  5619. // and we never share inline caches between loads and stores.
  5620. Assert(!propertySymOpnd->HasObjTypeSpecFldInfo() || !propertySymOpnd->IsLoadedFromProto());
  5621. AssertMsg(propertySymOpnd->TypeCheckSeqBitsSetOnlyIfCandidate(), "Property sym operand optimized despite not being a candidate?");
  5622. if (!propertySymOpnd->IsTypeCheckSeqCandidate())
  5623. {
  5624. return false;
  5625. }
  5626. if (!propertySymOpnd->IsTypeCheckSeqParticipant() && !propertySymOpnd->NeedsLocalTypeCheck())
  5627. {
  5628. return false;
  5629. }
  5630. Assert(!propertySymOpnd->NeedsTypeCheckAndBailOut() || (instrStFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrStFld->GetBailOutKind())));
  5631. // In the backwards pass we only add guarded property operations to instructions that are not already
  5632. // protected by an upstream type check.
  5633. Assert(!propertySymOpnd->IsTypeCheckProtected() || propertySymOpnd->GetGuardedPropOps() == nullptr);
  5634. PHASE_PRINT_TESTTRACE(
  5635. Js::ObjTypeSpecPhase,
  5636. this->m_func,
  5637. L"Field store: %s, property: %s, func: %s, cache ID: %d, cloned cache: true, layout: %s, redundant check: %s\n",
  5638. Js::OpCodeUtil::GetOpCodeName(instrStFld->m_opcode),
  5639. this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySymOpnd->m_sym->AsPropertySym()->m_propertyId)->GetBuffer(),
  5640. this->m_func->GetJnFunction()->GetDisplayName(),
  5641. propertySymOpnd->m_inlineCacheIndex, propertySymOpnd->GetCacheLayoutString(),
  5642. propertySymOpnd->IsTypeChecked() ? L"true" : L"false");
  5643. if (propertySymOpnd->HasFinalType() && !propertySymOpnd->IsLoadedFromProto())
  5644. {
  5645. propertySymOpnd->UpdateSlotForFinalType();
  5646. }
  5647. Func* func = instrStFld->m_func;
  5648. // TODO (ObjTypeSpec): If ((PropertySym*)propertySymOpnd->m_sym)->m_stackSym->m_isIntConst consider emitting a direct
  5649. // jump to helper or bailout. If we have a type check bailout, we could even abort compilation.
  5650. bool hasTypeCheckBailout = instrStFld->HasBailOutInfo() && IR::IsTypeCheckBailOutKind(instrStFld->GetBailOutKind());
  5651. // If the type hasn't been checked upstream, see if it makes sense to check it here.
  5652. bool isTypeChecked = propertySymOpnd->IsTypeChecked();
  5653. if (!isTypeChecked)
  5654. {
  5655. // If the initial type has been checked, we can do a hard coded type transition without any type checks
  5656. // (see GenerateStFldWithCachedFinalType), which is always worth doing, even if the type is not needed
  5657. // downstream. We're not introducing any additional bailouts.
  5658. if (propertySymOpnd->HasFinalType() && propertySymOpnd->HasInitialType() && !propertySymOpnd->IsTypeDead())
  5659. {
  5660. // We have a final type in hand, so we can JIT (most of) the type transition work.
  5661. return this->GenerateStFldWithCachedFinalType(instrStFld, propertySymOpnd);
  5662. }
  5663. if (propertySymOpnd->HasTypeMismatch())
  5664. {
  5665. // So we have a type mismatch, which happens when the type (and the type without property if ObjTypeSpecStore
  5666. // is on) on this instruction didn't match the live type value according to the flow. We must have hit some
  5667. // stale inline cache (perhaps inlined from a different function, or on a code path not taken for a while).
  5668. // Either way, we know exactly what type the object must have at this point (fully determined by flow), but
  5669. // we don't know whether that type already has the property we're storing here. All in all, we know exactly
  5670. // what shape the object will have after this operation, but we're not sure what label (type) to give this
  5671. // shape. Thus we can simply let the fast path do its thing based on the live inline cache. The downstream
  5672. // instructions relying only on this shape (loads and stores) are safe, and those that need the next type
  5673. // (i.e. adds) will do the same thing as this instruction.
  5674. return false;
  5675. }
  5676. // If we're still here then we must need a primary type check on this instruction to protect
  5677. // a sequence of field operations downstream, or a local type check for an isolated field store.
  5678. Assert(propertySymOpnd->NeedsPrimaryTypeCheck() || propertySymOpnd->NeedsLocalTypeCheck());
  5679. labelTypeCheckFailed = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5680. labelBothTypeChecksFailed = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5681. labelObjCheckFailed = hasTypeCheckBailout ? labelBothTypeChecksFailed : IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5682. typeOpnd = this->GenerateCachedTypeCheck(instrStFld, propertySymOpnd, labelObjCheckFailed, labelBothTypeChecksFailed, labelTypeCheckFailed);
  5683. *typeOpndOut = typeOpnd;
  5684. }
  5685. // Either we are protected by a type check upstream or we just emitted a type check above,
  5686. // now it's time to store the field value.
  5687. GenerateDirectFieldStore(instrStFld, propertySymOpnd);
  5688. // If we are protected by a type check upstream, we don't need a bailout or helper here, delete the instruction
  5689. // and return "true" to indicate that we succeeded in eliminating it.
  5690. if (isTypeChecked)
  5691. {
  5692. Assert(labelTypeCheckFailed == nullptr && labelBothTypeChecksFailed == nullptr);
  5693. AssertMsg(!instrStFld->HasBailOutInfo(), "Why does a direct field store have bailout?");
  5694. instrStFld->Remove();
  5695. return true;
  5696. }
  5697. // Otherwise, branch around the helper on successful type check.
  5698. labelDone = IR::LabelInstr::New(Js::OpCode::Label, func);
  5699. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, func);
  5700. instrStFld->InsertBefore(instr);
  5701. // On failed type check, try the type without property if we've got one.
  5702. instrStFld->InsertBefore(labelTypeCheckFailed);
  5703. // Caution, this is one of the dusty corners of the JIT. We only get here if this is an isolated StFld which adds a property, or
  5704. // ObjTypeSpecStore is off. In the former case no downstream operations depend on the final type produced here, and we can fall
  5705. // back on live cache and helper if the type doesn't match. In the latter we may have a cache with type transition, which must
  5706. // produce a value for the type after transition, because that type is consumed downstream. Thus, if the object's type doesn't
  5707. // match either the type with or the type without the property we're storing, we must bail out here.
  5708. bool emitAddProperty = propertySymOpnd->IsMono() && propertySymOpnd->HasInitialType();
  5709. if (emitAddProperty)
  5710. {
  5711. GenerateCachedTypeWithoutPropertyCheck(instrStFld, propertySymOpnd, typeOpnd, labelBothTypeChecksFailed);
  5712. GenerateFieldStoreWithTypeChange(instrStFld, propertySymOpnd, propertySymOpnd->GetInitialType(), propertySymOpnd->GetType());
  5713. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, func);
  5714. instrStFld->InsertBefore(instr);
  5715. }
  5716. instrStFld->InsertBefore(labelBothTypeChecksFailed);
  5717. instrStFld->InsertAfter(labelDone);
  5718. if (hasTypeCheckBailout)
  5719. {
  5720. AssertMsg(PHASE_ON1(Js::ObjTypeSpecIsolatedFldOpsWithBailOutPhase) || !propertySymOpnd->IsTypeDead(),
  5721. "Why does a field store have a type check bailout, if its type is dead?");
  5722. if (instrStFld->GetBailOutInfo()->bailOutInstr != instrStFld)
  5723. {
  5724. // Set the cache index in the bailout info so that the generated code will write it into the
  5725. // bailout record at runtime.
  5726. instrStFld->GetBailOutInfo()->polymorphicCacheIndex = propertySymOpnd->m_inlineCacheIndex;
  5727. }
  5728. else
  5729. {
  5730. Assert(instrStFld->GetBailOutInfo()->polymorphicCacheIndex == propertySymOpnd->m_inlineCacheIndex);
  5731. }
  5732. instrStFld->m_opcode = Js::OpCode::BailOut;
  5733. instrStFld->FreeSrc1();
  5734. instrStFld->FreeDst();
  5735. this->GenerateBailOut(instrStFld);
  5736. return true;
  5737. }
  5738. else
  5739. {
  5740. *continueAsHelperOut = true;
  5741. Assert(labelObjCheckFailed != nullptr && labelObjCheckFailed != labelBothTypeChecksFailed);
  5742. *labelHelperOut = labelObjCheckFailed;
  5743. return false;
  5744. }
  5745. }
  5746. IR::RegOpnd *
  5747. Lowerer::GenerateCachedTypeCheck(IR::Instr *instrChk, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr* labelObjCheckFailed, IR::LabelInstr *labelTypeCheckFailed, IR::LabelInstr *labelSecondChance)
  5748. {
  5749. Assert(propertySymOpnd->MayNeedTypeCheckProtection());
  5750. Func* func = instrChk->m_func;
  5751. IR::RegOpnd *regOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(func);
  5752. regOpnd->SetValueType(propertySymOpnd->GetPropertyOwnerValueType());
  5753. if (!regOpnd->IsNotTaggedValue())
  5754. {
  5755. m_lowererMD.GenerateObjectTest(regOpnd, instrChk, labelObjCheckFailed);
  5756. }
  5757. IR::Opnd *expectedTypeOpnd;
  5758. bool emitDirectCheck = true;
  5759. // Note: don't attempt equivalent type check if we're doing a final type optimization or if we have a monomorphic
  5760. // cache and no type check bailout. In the latter case, we can wind up doing expensive failed equivalence checks
  5761. // repeatedly and never rejit.
  5762. bool doEquivTypeCheck =
  5763. propertySymOpnd->HasEquivalentTypeSet() &&
  5764. !(propertySymOpnd->HasFinalType() && propertySymOpnd->HasInitialType()) &&
  5765. !propertySymOpnd->MustDoMonoCheck() &&
  5766. (propertySymOpnd->IsPoly() || instrChk->HasTypeCheckBailOut());
  5767. Assert(doEquivTypeCheck || !instrChk->HasEquivalentTypeCheckBailOut());
  5768. Js::Type* type = propertySymOpnd->MustDoMonoCheck() ? propertySymOpnd->GetMonoGuardType() :
  5769. doEquivTypeCheck ? propertySymOpnd->GetFirstEquivalentType() : propertySymOpnd->GetType();
  5770. Js::PropertyGuard* typeCheckGuard = doEquivTypeCheck ?
  5771. (Js::PropertyGuard*)CreateEquivalentTypeGuardAndLinkToGuardedProperties(type, propertySymOpnd) :
  5772. (Js::PropertyGuard*)CreateTypePropertyGuardForGuardedProperties(type, propertySymOpnd);
  5773. if (typeCheckGuard == nullptr)
  5774. {
  5775. Assert(type != nullptr);
  5776. expectedTypeOpnd = IR::AddrOpnd::New(type, IR::AddrOpndKindDynamicType, func, true);
  5777. }
  5778. else
  5779. {
  5780. Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
  5781. expectedTypeOpnd = IR::MemRefOpnd::New((void*)(typeCheckGuard->GetAddressOfValue()), TyMachPtr, func, IR::AddrOpndKindDynamicGuardValueRef);
  5782. emitDirectCheck = false;
  5783. }
  5784. if (PHASE_VERBOSE_TRACE(Js::ObjTypeSpecPhase, this->m_func))
  5785. {
  5786. OUTPUT_VERBOSE_TRACE_FUNC(Js::ObjTypeSpecPhase, this->m_func, L"Emitted %s type check for type 0x%p",
  5787. emitDirectCheck ? L"direct" : propertySymOpnd->IsPoly() ? L"equivalent" : L"indirect", type);
  5788. #if DBG
  5789. if (propertySymOpnd->GetGuardedPropOps() != nullptr)
  5790. {
  5791. Output::Print(L" guarding operations:\n ");
  5792. propertySymOpnd->GetGuardedPropOps()->Dump();
  5793. }
  5794. else
  5795. {
  5796. Output::Print(L"\n");
  5797. }
  5798. #else
  5799. Output::Print(L"\n");
  5800. #endif
  5801. Output::Flush();
  5802. }
  5803. IR::RegOpnd* typeOpnd = IR::RegOpnd::New(TyMachReg, func);
  5804. IR::Opnd *sourceType;
  5805. if (regOpnd->m_sym->IsConst() && !regOpnd->m_sym->IsIntConst() && !regOpnd->m_sym->IsFloatConst())
  5806. {
  5807. sourceType = IR::MemRefOpnd::New((BYTE*)regOpnd->m_sym->GetConstAddress() +
  5808. Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func, IR::AddrOpndKindDynamicObjectTypeRef);
  5809. }
  5810. else
  5811. {
  5812. sourceType = IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, func);
  5813. }
  5814. m_lowererMD.CreateAssign(typeOpnd, sourceType, instrChk);
  5815. if (doEquivTypeCheck)
  5816. {
  5817. // TODO (ObjTypeSpec): For isolated equivalent type checks it would be good to emit a check if the cache is still valid, and
  5818. // if not go straight to live polymorphic cache. This way we wouldn't have to bail out and re-JIT, and also wouldn't continue
  5819. // to try the equivalent type cache, miss it and do the slow comparison. This may be as easy as sticking a null on the main
  5820. // type in the equivalent type cache.
  5821. IR::LabelInstr* labelCheckEquivalentType = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  5822. InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelCheckEquivalentType, instrChk);
  5823. IR::LabelInstr *labelTypeCheckSucceeded = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  5824. InsertBranch(Js::OpCode::Br, labelTypeCheckSucceeded, instrChk);
  5825. instrChk->InsertBefore(labelCheckEquivalentType);
  5826. this->m_lowererMD.LoadHelperArgument(instrChk, IR::AddrOpnd::New((Js::Var)typeCheckGuard, IR::AddrOpndKindDynamicTypeCheckGuard, func, true));
  5827. this->m_lowererMD.LoadHelperArgument(instrChk, typeOpnd);
  5828. IR::RegOpnd* equivalentTypeCheckResultOpnd = IR::RegOpnd::New(TyUint8, func);
  5829. IR::HelperCallOpnd* equivalentTypeCheckHelperCallOpnd = IR::HelperCallOpnd::New(IR::HelperCheckIfTypeIsEquivalent, func);
  5830. IR::Instr* equivalentTypeCheckCallInstr = IR::Instr::New(Js::OpCode::Call, equivalentTypeCheckResultOpnd, equivalentTypeCheckHelperCallOpnd, func);
  5831. instrChk->InsertBefore(equivalentTypeCheckCallInstr);
  5832. this->m_lowererMD.LowerCall(equivalentTypeCheckCallInstr, 0);
  5833. InsertTestBranch(equivalentTypeCheckResultOpnd, equivalentTypeCheckResultOpnd, Js::OpCode::BrEq_A, labelTypeCheckFailed, instrChk);
  5834. // TODO (ObjTypeSpec): Consider emitting a shared bailout to which a specific bailout kind is written at runtime. This would allow us to distinguish
  5835. // between non-equivalent type and other cases, such as invalidated guard (due to fixed field overwrite, perhaps) or too much thrashing on the
  5836. // equivalent type cache. We could determine bailout kind based on the value returned by the helper. In the case of cache thrashing we could just
  5837. // turn off the whole optimization for a given function.
  5838. instrChk->InsertBefore(labelTypeCheckSucceeded);
  5839. }
  5840. else
  5841. {
  5842. InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelSecondChance != nullptr ? labelSecondChance : labelTypeCheckFailed, instrChk);
  5843. }
  5844. // Don't pin the type for polymorphic operations. The code can successfully execute even if this type is no longer referenced by any objects,
  5845. // as long as there are other objects with types equivalent on the properties referenced by this code. The type is kept alive until entry point
  5846. // installation by the JIT transfer data, and after that by the equivalent type cache, so it will stay alive unless or until it gets evicted
  5847. // from the cache.
  5848. if (!doEquivTypeCheck)
  5849. {
  5850. PinTypeRef(type, type, instrChk, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
  5851. }
  5852. return typeOpnd;
  5853. }
  5854. void
  5855. Lowerer::PinTypeRef(Js::Type* type, void* typeRef, IR::Instr* instr, Js::PropertyId propertyId)
  5856. {
  5857. this->m_func->PinTypeRef(typeRef);
  5858. if (PHASE_TRACE(Js::TracePinnedTypesPhase, this->m_func))
  5859. {
  5860. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  5861. Output::Print(L"PinnedTypes: function %s(%s) instr %s property %s(#%u) pinned %s reference 0x%p to type 0x%p.\n",
  5862. this->m_func->GetJnFunction()->GetDisplayName(), this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  5863. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode), GetScriptContext()->GetPropertyNameLocked(propertyId)->GetBuffer(), propertyId,
  5864. typeRef == type ? L"strong" : L"weak", typeRef, type);
  5865. Output::Flush();
  5866. }
  5867. }
  5868. void
  5869. Lowerer::GenerateCachedTypeWithoutPropertyCheck(IR::Instr *instrInsert, IR::PropertySymOpnd *propertySymOpnd, IR::Opnd *typeOpnd, IR::LabelInstr *labelTypeCheckFailed)
  5870. {
  5871. Assert(propertySymOpnd->IsMonoObjTypeSpecCandidate());
  5872. Assert(propertySymOpnd->HasInitialType());
  5873. Js::Type* typeWithoutProperty = propertySymOpnd->GetInitialType();
  5874. // We should never add properties to objects of static types.
  5875. Assert(Js::DynamicType::Is(typeWithoutProperty->GetTypeId()));
  5876. if (typeOpnd == nullptr)
  5877. {
  5878. // No opnd holding the type was passed in, so we have to load the type here.
  5879. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  5880. if (!baseOpnd->IsNotTaggedValue())
  5881. {
  5882. m_lowererMD.GenerateObjectTest(baseOpnd, instrInsert, labelTypeCheckFailed);
  5883. }
  5884. IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  5885. typeOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  5886. m_lowererMD.CreateAssign(typeOpnd, opnd, instrInsert);
  5887. }
  5888. Js::JitTypePropertyGuard* typePropertyGuard = CreateTypePropertyGuardForGuardedProperties(typeWithoutProperty, propertySymOpnd);
  5889. IR::Opnd *expectedTypeOpnd;
  5890. if (typePropertyGuard)
  5891. {
  5892. bool emitDirectCheck = true;
  5893. Assert(typePropertyGuard != nullptr);
  5894. Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
  5895. expectedTypeOpnd = IR::MemRefOpnd::New((void*)(typePropertyGuard->GetAddressOfValue()), TyMachPtr, this->m_func, IR::AddrOpndKindDynamicGuardValueRef);
  5896. emitDirectCheck = false;
  5897. OUTPUT_VERBOSE_TRACE_FUNC(Js::ObjTypeSpecPhase, this->m_func, L"Emitted %s type check for type 0x%p.\n",
  5898. emitDirectCheck ? L"direct" : L"indirect", typeWithoutProperty);
  5899. }
  5900. else
  5901. {
  5902. expectedTypeOpnd = IR::AddrOpnd::New(typeWithoutProperty, IR::AddrOpndKindDynamicType, m_func, true);
  5903. }
  5904. InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelTypeCheckFailed, instrInsert);
  5905. // Technically, it should be enough to pin the final type, because it should keep all of its predecessors alive, but
  5906. // just to be extra cautious, let's pin the initial type as well.
  5907. PinTypeRef(typeWithoutProperty, typeWithoutProperty, instrInsert, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
  5908. }
  5909. void
  5910. Lowerer::GenerateFixedFieldGuardCheck(IR::Instr *insertPointInstr, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr *labelBailOut)
  5911. {
  5912. GeneratePropertyGuardCheck(insertPointInstr, propertySymOpnd, labelBailOut);
  5913. }
  5914. Js::JitTypePropertyGuard*
  5915. Lowerer::CreateTypePropertyGuardForGuardedProperties(Js::Type* type, IR::PropertySymOpnd* propertySymOpnd)
  5916. {
  5917. // We should always have a list of guarded properties.
  5918. Assert(propertySymOpnd->GetGuardedPropOps() != nullptr);
  5919. Js::JitTypePropertyGuard* guard = nullptr;
  5920. Js::EntryPointInfo* entryPointInfo = this->m_func->m_workItem->GetEntryPoint();
  5921. if (entryPointInfo->HasSharedPropertyGuards())
  5922. {
  5923. // Consider (ObjTypeSpec): Because we allocate these guards from the JIT thread we can't share guards for the same type across multiple functions.
  5924. // This leads to proliferation of property guards on the thread context. The alternative would be to pre-allocate shared (by value) guards
  5925. // from the thread context during work item creation. We would create too many of them (because some types aren't actually used as guards),
  5926. // but we could share a guard for a given type between functions. This may ultimately be better.
  5927. LinkGuardToGuardedProperties(entryPointInfo, propertySymOpnd->GetGuardedPropOps(), [this, type, &guard](Js::PropertyId propertyId)
  5928. {
  5929. if (DoLazyFixedTypeBailout(this->m_func))
  5930. {
  5931. this->m_func->lazyBailoutProperties.Item(propertyId);
  5932. }
  5933. else
  5934. {
  5935. if (guard == nullptr)
  5936. {
  5937. guard = this->m_func->GetOrCreateSingleTypeGuard(type);
  5938. }
  5939. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
  5940. {
  5941. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  5942. wchar_t workItemName[256];
  5943. this->m_func->m_workItem->GetDisplayName(workItemName, _countof(workItemName));
  5944. Output::Print(L"ObjTypeSpec: function %s(%s) registered guard 0x%p with value 0x%p for property %s (%u).\n",
  5945. workItemName, this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  5946. guard, guard->GetValue(), this->GetScriptContext()->GetPropertyNameLocked(propertyId)->GetBuffer(), propertyId);
  5947. Output::Flush();
  5948. }
  5949. this->m_func->EnsurePropertyGuardsByPropertyId();
  5950. this->m_func->LinkGuardToPropertyId(propertyId, guard);
  5951. }
  5952. });
  5953. }
  5954. return guard;
  5955. }
  5956. Js::JitEquivalentTypeGuard*
  5957. Lowerer::CreateEquivalentTypeGuardAndLinkToGuardedProperties(Js::Type* type, IR::PropertySymOpnd* propertySymOpnd)
  5958. {
  5959. // We should always have a list of guarded properties.
  5960. Assert(propertySymOpnd->HasObjTypeSpecFldInfo() && propertySymOpnd->HasEquivalentTypeSet() && propertySymOpnd->GetGuardedPropOps());
  5961. Js::JitEquivalentTypeGuard* guard = this->m_func->CreateEquivalentTypeGuard(type, propertySymOpnd->GetObjTypeSpecFldId());
  5962. Js::EntryPointInfo* entryPointInfo = this->m_func->m_workItem->GetEntryPoint();
  5963. if (entryPointInfo->HasSharedPropertyGuards())
  5964. {
  5965. LinkGuardToGuardedProperties(entryPointInfo, propertySymOpnd->GetGuardedPropOps(), [=](Js::PropertyId propertyId)
  5966. {
  5967. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
  5968. {
  5969. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  5970. Output::Print(L"ObjTypeSpec: function %s(%s) registered equivalent type spec guard 0x%p with value 0x%p for property %s (%u).\n",
  5971. this->m_func->GetJnFunction()->GetDisplayName(), this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  5972. guard, guard->GetValue(), GetScriptContext()->GetPropertyNameLocked(propertyId)->GetBuffer(), propertyId);
  5973. Output::Flush();
  5974. }
  5975. this->m_func->EnsurePropertyGuardsByPropertyId();
  5976. this->m_func->LinkGuardToPropertyId(propertyId, guard);
  5977. });
  5978. }
  5979. Assert(guard->GetCache() != nullptr);
  5980. Js::EquivalentTypeCache* cache = guard->GetCache();
  5981. // TODO (ObjTypeSpec): If we delayed populating the types until encoder, we could bulk allocate all equivalent type caches
  5982. // in one block from the heap. This would allow us to not allocate them from the native code data allocator and free them
  5983. // when no longer needed. However, we would need to store the global property operation ID in the guard, so we can look up
  5984. // the info in the encoder. Perhaps we could overload the cache pointer to be the ID until encoder.
  5985. // Copy types from the type set to the guard's cache
  5986. Js::EquivalentTypeSet* typeSet = propertySymOpnd->GetEquivalentTypeSet();
  5987. uint16 cachedTypeCount = typeSet->GetCount() < EQUIVALENT_TYPE_CACHE_SIZE ? typeSet->GetCount() : EQUIVALENT_TYPE_CACHE_SIZE;
  5988. for (uint16 ti = 0; ti < cachedTypeCount; ti++)
  5989. {
  5990. cache->types[ti] = typeSet->GetType(ti);
  5991. }
  5992. // Populate property ID and slot index arrays on the guard's cache. We iterate over the
  5993. // bit vector of property operations protected by this guard, but some property operations
  5994. // may be referring to the same property ID (but not share the same cache). We skip
  5995. // redundant entries by maintaining a hash set of property IDs we've already encountered.
  5996. auto propOps = propertySymOpnd->GetGuardedPropOps();
  5997. uint propOpCount = propOps->Count();
  5998. bool isTypeStatic = Js::StaticType::Is(type->GetTypeId());
  5999. JsUtil::BaseDictionary<Js::PropertyId, Js::EquivalentPropertyEntry*, JitArenaAllocator> propIds(this->m_alloc, propOpCount);
  6000. Js::EquivalentPropertyEntry* properties = AnewArray(this->m_alloc, Js::EquivalentPropertyEntry, propOpCount);
  6001. uint propIdCount = 0;
  6002. FOREACH_BITSET_IN_SPARSEBV(propOpId, propOps)
  6003. {
  6004. Js::ObjTypeSpecFldInfo* propOpInfo = this->m_func->GetGlobalObjTypeSpecFldInfo(propOpId);
  6005. Js::PropertyId propertyId = propOpInfo->GetPropertyId();
  6006. Js::PropertyIndex propOpIndex = Js::Constants::NoSlot;
  6007. bool hasFixedValue = propOpInfo->HasFixedValue();
  6008. if (hasFixedValue)
  6009. {
  6010. cache->SetHasFixedValue();
  6011. }
  6012. bool isLoadedFromProto = propOpInfo->IsLoadedFromProto();
  6013. if (isLoadedFromProto)
  6014. {
  6015. cache->SetIsLoadedFromProto();
  6016. }
  6017. else
  6018. {
  6019. propOpIndex = propOpInfo->GetSlotIndex();
  6020. }
  6021. bool propOpUsesAuxSlot = propOpInfo->UsesAuxSlot();
  6022. AssertMsg(!isTypeStatic || !propOpInfo->IsBeingStored(), "Why are we storing a field to an object of static type?");
  6023. Js::EquivalentPropertyEntry* entry;
  6024. if (propIds.TryGetValue(propertyId, &entry))
  6025. {
  6026. if (propOpIndex == entry->slotIndex && propOpUsesAuxSlot == entry->isAuxSlot)
  6027. {
  6028. entry->mustBeWritable |= propOpInfo->IsBeingStored();
  6029. }
  6030. else
  6031. {
  6032. // Due to inline cache sharing we have the same property accessed using different caches
  6033. // with inconsistent info. This means a guaranteed bailout on the equivalent type check.
  6034. // We'll just let it happen and turn off the optimization for this function. We could avoid
  6035. // this problem by tracking property information on the value type in glob opt.
  6036. if (PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->m_func))
  6037. {
  6038. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6039. Js::FunctionBody* topFunctionBody = this->m_func->GetJnFunction();
  6040. Js::ScriptContext* scriptContext = topFunctionBody->GetScriptContext();
  6041. Output::Print(L"EquivObjTypeSpec: top function %s (%s): duplicate property clash on %s(#%d) \n",
  6042. topFunctionBody->GetDisplayName(), topFunctionBody->GetDebugNumberSet(debugStringBuffer), propertyId, scriptContext->GetPropertyNameLocked(propertyId)->GetBuffer());
  6043. Output::Flush();
  6044. }
  6045. Assert(propIdCount < propOpCount);
  6046. __analysis_assume(propIdCount < propOpCount);
  6047. entry = &properties[propIdCount++];
  6048. entry->propertyId = propertyId;
  6049. entry->slotIndex = propOpIndex;
  6050. entry->isAuxSlot = propOpUsesAuxSlot;
  6051. entry->mustBeWritable = propOpInfo->IsBeingStored();
  6052. }
  6053. }
  6054. else
  6055. {
  6056. Assert(propIdCount < propOpCount);
  6057. __analysis_assume(propIdCount < propOpCount);
  6058. entry = &properties[propIdCount++];
  6059. entry->propertyId = propertyId;
  6060. entry->slotIndex = propOpIndex;
  6061. entry->isAuxSlot = propOpUsesAuxSlot;
  6062. entry->mustBeWritable = propOpInfo->IsBeingStored();
  6063. propIds.AddNew(propertyId, entry);
  6064. }
  6065. }
  6066. NEXT_BITSET_IN_SPARSEBV;
  6067. cache->record.propertyCount = propIdCount;
  6068. cache->record.properties = NativeCodeDataNewArray(this->m_func->GetNativeCodeDataAllocator(), Js::EquivalentPropertyEntry, propIdCount);
  6069. memcpy(cache->record.properties, properties, propIdCount * sizeof(Js::EquivalentPropertyEntry));
  6070. return guard;
  6071. }
  6072. bool
  6073. Lowerer::LinkCtorCacheToGuardedProperties(Js::JitTimeConstructorCache* ctorCache)
  6074. {
  6075. // We do not always have guarded properties. If the constructor is empty and the subsequent code doesn't load or store any of
  6076. // the constructed object's properties, or if all inline caches are empty then this ctor cache doesn't guard any properties.
  6077. if (ctorCache->GetGuardedPropOps() == nullptr)
  6078. {
  6079. return false;
  6080. }
  6081. bool linked = false;
  6082. Js::EntryPointInfo* entryPointInfo = this->m_func->m_workItem->GetEntryPoint();
  6083. if (entryPointInfo->HasSharedPropertyGuards())
  6084. {
  6085. linked = LinkGuardToGuardedProperties(entryPointInfo, ctorCache->GetGuardedPropOps(), [=](Js::PropertyId propertyId)
  6086. {
  6087. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
  6088. {
  6089. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6090. Output::Print(L"ObjTypeSpec: function %s(%s) registered ctor cache 0x%p with value 0x%p for property %s (%u).\n",
  6091. this->m_func->GetJnFunction()->GetDisplayName(), this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  6092. ctorCache->runtimeCache, ctorCache->type, GetScriptContext()->GetPropertyNameLocked(propertyId)->GetBuffer(), propertyId);
  6093. Output::Flush();
  6094. }
  6095. this->m_func->EnsureCtorCachesByPropertyId();
  6096. this->m_func->LinkCtorCacheToPropertyId(propertyId, ctorCache);
  6097. });
  6098. }
  6099. return linked;
  6100. }
  6101. template<typename LinkFunc>
  6102. bool
  6103. Lowerer::LinkGuardToGuardedProperties(Js::EntryPointInfo* entryPointInfo, const BVSparse<JitArenaAllocator>* guardedPropOps, LinkFunc link)
  6104. {
  6105. Assert(entryPointInfo != nullptr);
  6106. Assert(entryPointInfo->HasSharedPropertyGuards());
  6107. Assert(guardedPropOps != nullptr);
  6108. bool linked = false;
  6109. // For every entry in the bit vector, register the guard for the corresponding property ID.
  6110. FOREACH_BITSET_IN_SPARSEBV(propertyOpId, guardedPropOps)
  6111. {
  6112. Js::ObjTypeSpecFldInfo* propertyOpInfo = this->m_func->GetGlobalObjTypeSpecFldInfo(propertyOpId);
  6113. Js::PropertyId propertyId = propertyOpInfo->GetPropertyId();
  6114. // It's okay for an equivalent type check to be registered as a guard against a property becoming read-only. This transpires if, there is
  6115. // a different monomorphic type check upstream, which guarantees the actual type of the object needed for the hard-coded type transition,
  6116. // but it is later followed by a sequence of polymorphic inline caches, which do not have that type in the type set. At the beginning of
  6117. // that sequence we'll emit an equivalent type check to verify that the actual type has relevant properties on appropriate slots. Then in
  6118. // the dead store pass we'll walk upwards and encounter this check first, thus we'll drop the guarded properties accumulated thus far
  6119. // (including the one being added) on that check.
  6120. // AssertMsg(!propertyOpInfo->IsBeingAdded() || !isEquivalentTypeGuard, "Why do we have an equivalent type check protecting a property add?");
  6121. if (propertyOpInfo->IsBeingAdded() || propertyOpInfo->IsLoadedFromProto() || propertyOpInfo->HasFixedValue())
  6122. {
  6123. // Equivalent object type spec only supports fixed fields on prototypes. This is to simplify the slow type equivalence check.
  6124. // See JavascriptOperators::CheckIfTypeIsEquivalent.
  6125. Assert(!propertyOpInfo->IsPoly() || (!propertyOpInfo->HasFixedValue() || propertyOpInfo->IsLoadedFromProto() || propertyOpInfo->UsesAccessor()));
  6126. if (entryPointInfo->HasSharedPropertyGuard(propertyId))
  6127. {
  6128. link(propertyId);
  6129. linked = true;
  6130. }
  6131. else
  6132. {
  6133. #if TRUE
  6134. AssertMsg(false, "Did we fail to create a shared property guard for a guarded property?");
  6135. #else
  6136. if (PHASE_VERBOSE_TRACE(Js::ObjTypeSpecPhase, this->m_func) || PHASE_TRACE(Js::TracePropertyGuardsPhase, this->m_func))
  6137. {
  6138. if (!this->m_func->m_workItem->GetEntryPoint()->HasSharedPropertyGuard(propertyId))
  6139. {
  6140. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6141. Output::Print(L"ObjTypeStore: function %s(%s): no shared property guard for property % (%u).\n",
  6142. this->m_func->GetJnFunction()->GetDisplayName(), this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  6143. GetScriptContext()->GetPropertyNameLocked(propertyId)->GetBuffer(), propertyId);
  6144. Output::Flush();
  6145. }
  6146. }
  6147. #endif
  6148. }
  6149. }
  6150. }
  6151. NEXT_BITSET_IN_SPARSEBV;
  6152. return linked;
  6153. }
  6154. void
  6155. Lowerer::GeneratePropertyGuardCheck(IR::Instr *insertPointInstr, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr *labelBailOut)
  6156. {
  6157. Js::PropertyGuard* guard = propertySymOpnd->GetPropertyGuard();
  6158. Assert(guard != nullptr);
  6159. if (!DoLazyFixedDataBailout(this->m_func))
  6160. {
  6161. Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
  6162. IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
  6163. IR::MemRefOpnd* guardOpnd = IR::MemRefOpnd::New((void*)guard->GetAddressOfValue(), TyMachPtr, this->m_func, IR::AddrOpndKindDynamicGuardValueRef);
  6164. InsertCompareBranch(guardOpnd, zeroOpnd, Js::OpCode::BrEq_A, labelBailOut, insertPointInstr);
  6165. }
  6166. else
  6167. {
  6168. this->m_func->lazyBailoutProperties.Item(propertySymOpnd->GetPropertyId());
  6169. }
  6170. }
  6171. IR::Instr*
  6172. Lowerer::GeneratePropertyGuardCheckBailoutAndLoadType(IR::Instr *insertInstr)
  6173. {
  6174. IR::Instr* instrPrev = insertInstr->m_prev;
  6175. IR::Opnd* numberTypeOpnd = IR::AddrOpnd::New(insertInstr->m_func->GetScriptContext()->GetLibrary()->GetNumberTypeStatic(), IR::AddrOpndKindDynamicType, insertInstr->m_func);
  6176. IR::PropertySymOpnd* propertySymOpnd = insertInstr->GetSrc1()->AsPropertySymOpnd();
  6177. IR::LabelInstr* labelBailout = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func, true);
  6178. IR::LabelInstr* labelContinue = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func);
  6179. IR::LabelInstr* loadNumberTypeLabel = IR::LabelInstr::New(Js::OpCode::Label, insertInstr->m_func, true);
  6180. GeneratePropertyGuardCheck(insertInstr, propertySymOpnd, labelBailout);
  6181. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  6182. GenerateObjectTestAndTypeLoad(insertInstr, baseOpnd, insertInstr->GetDst()->AsRegOpnd(), loadNumberTypeLabel);
  6183. insertInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelContinue, this->m_func));
  6184. insertInstr->InsertBefore(loadNumberTypeLabel);
  6185. this->m_lowererMD.CreateAssign(insertInstr->GetDst(), numberTypeOpnd, insertInstr);
  6186. insertInstr->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelContinue, this->m_func));
  6187. insertInstr->InsertBefore(labelBailout);
  6188. insertInstr->InsertAfter(labelContinue);
  6189. insertInstr->FreeSrc1();
  6190. insertInstr->m_opcode = Js::OpCode::BailOut;
  6191. this->GenerateBailOut(insertInstr);
  6192. return instrPrev;
  6193. }
  6194. void
  6195. Lowerer::GenerateNonWritablePropertyCheck(IR::Instr *instrInsert, IR::PropertySymOpnd *propertySymOpnd, IR::LabelInstr *labelBailOut)
  6196. {
  6197. IR::Opnd *opnd;
  6198. IR::Instr *instr;
  6199. // Generate a check for non-writable properties, on the model of the work done by PatchPutValueetc.
  6200. // Inline the check on the bit in the prototype object's type. If that check fails, call the helper.
  6201. // If the helper finds a non-writable property, bail out, as we're counting on being able to add the property.
  6202. Js::Type *typeWithoutProperty = propertySymOpnd->GetInitialType();
  6203. Assert(typeWithoutProperty);
  6204. Js::RecyclableObject *protoObject = typeWithoutProperty->GetPrototype();
  6205. Assert(protoObject);
  6206. // s1 = MOV [proto->type].ptr
  6207. IR::RegOpnd *typeOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  6208. opnd = IR::MemRefOpnd::New((char*)protoObject + Js::RecyclableObject::GetOffsetOfType(), TyMachReg,
  6209. this->m_func, IR::AddrOpndKindDynamicObjectTypeRef);
  6210. m_lowererMD.CreateAssign(typeOpnd, opnd, instrInsert);
  6211. // TEST [s1->areThisAndPrototypesEnsuredToHaveOnlyWritableDataProperties].u8, 1
  6212. // JNE $continue
  6213. IR::LabelInstr *labelContinue = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6214. opnd = IR::IndirOpnd::New(typeOpnd, (int32)Js::Type::OffsetOfWritablePropertiesFlag(), TyUint8, this->m_func);
  6215. InsertTestBranch(opnd, IR::IntConstOpnd::New(1, TyUint8, this->m_func), Js::OpCode::BrNeq_A, labelContinue, instrInsert);
  6216. // $Lhelper:
  6217. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6218. instrInsert->InsertBefore(labelHelper);
  6219. // s2 = CALL DoProtoCheck, prototype
  6220. opnd = IR::AddrOpnd::New(protoObject, IR::AddrOpndKindDynamicVar, this->m_func, true);
  6221. m_lowererMD.LoadHelperArgument(instrInsert, opnd);
  6222. opnd = IR::HelperCallOpnd::New(IR::HelperCheckProtoHasNonWritable, this->m_func);
  6223. instr = IR::Instr::New(Js::OpCode::Call, IR::RegOpnd::New(TyUint8, this->m_func), opnd, this->m_func);
  6224. instrInsert->InsertBefore(instr);
  6225. opnd = instr->GetDst();
  6226. m_lowererMD.LowerCall(instr, 0);
  6227. InsertTestBranch(opnd, opnd, Js::OpCode::BrEq_A, labelBailOut, instrInsert);
  6228. // $Lcontinue:
  6229. instrInsert->InsertBefore(labelContinue);
  6230. }
  6231. void
  6232. Lowerer::GenerateAdjustSlots(IR::Instr *instrInsert, IR::PropertySymOpnd *propertySymOpnd, Js::Type* initialType, Js::Type* finalType)
  6233. {
  6234. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  6235. bool adjusted = this->GenerateAdjustBaseSlots(instrInsert, baseOpnd, initialType, finalType);
  6236. if (!adjusted)
  6237. {
  6238. baseOpnd->Free(m_func);
  6239. }
  6240. }
  6241. bool
  6242. Lowerer::GenerateAdjustBaseSlots(IR::Instr *instrInsert, IR::RegOpnd *baseOpnd, Js::Type* initialType, Js::Type* finalType)
  6243. {
  6244. // Possibly allocate new slot capacity to accommodate a type transition.
  6245. Js::DynamicType *oldType = static_cast<Js::DynamicType*>(initialType);
  6246. Assert(oldType);
  6247. Js::DynamicType *newType = static_cast<Js::DynamicType*>(finalType);
  6248. Assert(newType);
  6249. AssertMsg(Js::DynamicObject::IsTypeHandlerCompatibleForObjectHeaderInlining(oldType->GetTypeHandler(), newType->GetTypeHandler()),
  6250. "Incompatible typeHandler transition?");
  6251. int oldCount = oldType->GetTypeHandler()->GetSlotCapacity();
  6252. int newCount = newType->GetTypeHandler()->GetSlotCapacity();
  6253. Js::PropertyIndex inlineSlotCapacity = oldType->GetTypeHandler()->GetInlineSlotCapacity();
  6254. Js::PropertyIndex newInlineSlotCapacity = newType->GetTypeHandler()->GetInlineSlotCapacity();
  6255. if (oldCount >= newCount || newCount <= inlineSlotCapacity)
  6256. {
  6257. // Already have enough slot capacity. Do nothing.
  6258. return false;
  6259. }
  6260. // Call AdjustSlots using the new counts. Because AdjustSlots uses the "no dispose" flavor of alloc,
  6261. // no implicit calls are possible, and we don't need an implicit call check and bailout.
  6262. // CALL AdjustSlots, instance, newInlineSlotCapacity, newAuxSlotCapacity
  6263. //3rd Param
  6264. Assert(newCount > newInlineSlotCapacity);
  6265. const int newAuxSlotCapacity = newCount - newInlineSlotCapacity;
  6266. m_lowererMD.LoadHelperArgument(instrInsert, IR::IntConstOpnd::New(newAuxSlotCapacity, TyInt32, this->m_func));
  6267. //2nd Param
  6268. m_lowererMD.LoadHelperArgument(instrInsert, IR::IntConstOpnd::New(newInlineSlotCapacity, TyUint16, this->m_func));
  6269. //1st Param (instance)
  6270. m_lowererMD.LoadHelperArgument(instrInsert, baseOpnd);
  6271. //CALL HelperAdjustSlots
  6272. IR::Opnd *opnd = IR::HelperCallOpnd::New(IR::HelperAdjustSlots, this->m_func);
  6273. IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  6274. instr->SetSrc1(opnd);
  6275. instrInsert->InsertBefore(instr);
  6276. m_lowererMD.LowerCall(instr, 0);
  6277. return true;
  6278. }
  6279. void
  6280. Lowerer::GenerateFieldStoreWithTypeChange(IR::Instr * instrStFld, IR::PropertySymOpnd *propertySymOpnd, Js::Type* initialType, Js::Type* finalType)
  6281. {
  6282. // Adjust instance slots, if necessary.
  6283. this->GenerateAdjustSlots(instrStFld, propertySymOpnd, initialType, finalType);
  6284. // We should never add properties to objects of static types.
  6285. Assert(Js::DynamicType::Is(finalType->GetTypeId()));
  6286. // Let's pin the final type to be sure its alive when we try to do the type transition.
  6287. PinTypeRef(finalType, finalType, instrStFld, propertySymOpnd->m_sym->AsPropertySym()->m_propertyId);
  6288. IR::Opnd *finalTypeOpnd = IR::AddrOpnd::New(finalType, IR::AddrOpndKindDynamicType, instrStFld->m_func, true);
  6289. // Set the new type.
  6290. IR::RegOpnd *baseOpnd = propertySymOpnd->CreatePropertyOwnerOpnd(instrStFld->m_func);
  6291. IR::Opnd *opnd = IR::IndirOpnd::New(baseOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrStFld->m_func);
  6292. this->m_lowererMD.CreateAssign(opnd, finalTypeOpnd, instrStFld);
  6293. // Now do the store.
  6294. GenerateDirectFieldStore(instrStFld, propertySymOpnd);
  6295. }
  6296. bool
  6297. Lowerer::GenerateStFldWithCachedFinalType(IR::Instr * instrStFld, IR::PropertySymOpnd *propertySymOpnd)
  6298. {
  6299. // This function tries to treat a sequence of add-property stores as a single type transition.
  6300. Assert(propertySymOpnd == instrStFld->GetDst()->AsPropertySymOpnd());
  6301. Assert(propertySymOpnd->IsMonoObjTypeSpecCandidate());
  6302. Assert(propertySymOpnd->HasFinalType());
  6303. Assert(propertySymOpnd->HasInitialType());
  6304. IR::Instr *instr;
  6305. IR::LabelInstr *labelBailOut = nullptr;
  6306. AssertMsg(!propertySymOpnd->IsTypeChecked(), "Why are we doing a type transition when we have the type we want?");
  6307. // If the initial type must be checked here, do it.
  6308. Assert(instrStFld->HasBailOutInfo());
  6309. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6310. GenerateCachedTypeWithoutPropertyCheck(instrStFld, propertySymOpnd, nullptr/*typeOpnd*/, labelBailOut);
  6311. // Do the type transition.
  6312. GenerateFieldStoreWithTypeChange(instrStFld, propertySymOpnd, propertySymOpnd->GetInitialType(), propertySymOpnd->GetFinalType());
  6313. instrStFld->FreeSrc1();
  6314. instrStFld->FreeDst();
  6315. // Insert the bailout and let the main path branch around it.
  6316. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  6317. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelDone, this->m_func);
  6318. instrStFld->InsertBefore(instr);
  6319. if (instrStFld->HasBailOutInfo())
  6320. {
  6321. Assert(labelBailOut != nullptr);
  6322. instrStFld->InsertBefore(labelBailOut);
  6323. instrStFld->InsertAfter(labelDone);
  6324. instrStFld->m_opcode = Js::OpCode::BailOut;
  6325. this->GenerateBailOut(instrStFld);
  6326. }
  6327. else
  6328. {
  6329. instrStFld->InsertAfter(labelDone);
  6330. instrStFld->Remove();
  6331. }
  6332. return true;
  6333. }
  6334. ///----------------------------------------------------------------------------
  6335. ///
  6336. /// Lowerer::LowerScopedStFld
  6337. ///
  6338. ///----------------------------------------------------------------------------
  6339. IR::Instr *
  6340. Lowerer::LowerScopedStFld(IR::Instr * stFldInstr, IR::JnHelperMethod helperMethod, bool withInlineCache,
  6341. bool withPropertyOperationFlags, Js::PropertyOperationFlags flags)
  6342. {
  6343. IR::Instr *instrPrev = stFldInstr->m_prev;
  6344. if (withPropertyOperationFlags)
  6345. {
  6346. m_lowererMD.LoadHelperArgument(stFldInstr,
  6347. IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
  6348. }
  6349. if(!withInlineCache)
  6350. {
  6351. LoadScriptContext(stFldInstr);
  6352. }
  6353. // Pass the default instance
  6354. IR::Opnd *src = stFldInstr->UnlinkSrc2();
  6355. m_lowererMD.LoadHelperArgument(stFldInstr, src);
  6356. // Pass the value to store
  6357. src = stFldInstr->UnlinkSrc1();
  6358. m_lowererMD.LoadHelperArgument(stFldInstr, src);
  6359. // Pass the property sym to store to
  6360. IR::Opnd *dst = stFldInstr->UnlinkDst();
  6361. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected property sym as dst of field store");
  6362. this->LoadPropertySymAsArgument(stFldInstr, dst);
  6363. if (withInlineCache)
  6364. {
  6365. AssertMsg(dst->AsSymOpnd()->IsPropertySymOpnd(), "Need property sym operand to find the inline cache");
  6366. m_lowererMD.LoadHelperArgument(
  6367. stFldInstr,
  6368. IR::Opnd::CreateInlineCacheIndexOpnd(dst->AsPropertySymOpnd()->m_inlineCacheIndex, m_func));
  6369. // Not using the polymorphic inline cache because the fast path only uses the monomorphic inline cache
  6370. this->m_lowererMD.LoadHelperArgument(stFldInstr, this->LoadRuntimeInlineCacheOpnd(stFldInstr, dst->AsPropertySymOpnd()));
  6371. m_lowererMD.LoadHelperArgument(stFldInstr, LoadFunctionBodyOpnd(stFldInstr));
  6372. }
  6373. m_lowererMD.ChangeToHelperCall(stFldInstr, helperMethod);
  6374. return instrPrev;
  6375. }
  6376. ///----------------------------------------------------------------------------
  6377. ///
  6378. /// Lowerer::LowerLoadVar
  6379. ///
  6380. ///----------------------------------------------------------------------------
  6381. IR::Instr *
  6382. Lowerer::LowerLoadVar(IR::Instr *instr, IR::Opnd *opnd)
  6383. {
  6384. instr->SetSrc1(opnd);
  6385. return m_lowererMD.ChangeToAssign(instr);
  6386. }
  6387. IR::Instr *
  6388. Lowerer::LoadHelperTemp(IR::Instr * instr, IR::Instr * instrInsert)
  6389. {
  6390. IR::Opnd *tempOpnd;
  6391. IR::Opnd *dst = instr->GetDst();
  6392. AssertMsg(dst != nullptr, "Always expect a dst for these.");
  6393. AssertMsg(instr->dstIsTempNumber, "Should only be loading temps here");
  6394. Assert(dst->IsRegOpnd());
  6395. StackSym * tempNumberSym = this->GetTempNumberSym(dst, instr->dstIsTempNumberTransferred);
  6396. IR::Instr *load = this->m_lowererMD.LoadStackAddress(tempNumberSym);
  6397. instrInsert->InsertBefore(load);
  6398. tempOpnd = load->GetDst();
  6399. m_lowererMD.LoadHelperArgument(instrInsert, tempOpnd);
  6400. return load;
  6401. }
  6402. void
  6403. Lowerer::LoadArgumentCount(IR::Instr *const instr)
  6404. {
  6405. Assert(instr);
  6406. Assert(instr->GetDst());
  6407. Assert(!instr->GetSrc1());
  6408. Assert(!instr->GetSrc2());
  6409. if(instr->m_func->IsInlinee())
  6410. {
  6411. // Argument count including 'this'
  6412. instr->SetSrc1(IR::IntConstOpnd::New(instr->m_func->actualCount, TyUint32, instr->m_func, true));
  6413. LowererMD::ChangeToAssign(instr);
  6414. }
  6415. else if (instr->m_func->GetJnFunction()->IsGenerator())
  6416. {
  6417. IR::SymOpnd* symOpnd = LoadCallInfo(instr);
  6418. instr->SetSrc1(symOpnd);
  6419. LowererMD::ChangeToAssign(instr);
  6420. }
  6421. else
  6422. {
  6423. m_lowererMD.LoadArgumentCount(instr);
  6424. }
  6425. }
  6426. void
  6427. Lowerer::LoadStackArgPtr(IR::Instr *const instr)
  6428. {
  6429. Assert(instr);
  6430. Assert(instr->GetDst());
  6431. Assert(!instr->GetSrc1());
  6432. Assert(!instr->GetSrc2());
  6433. if(instr->m_func->IsInlinee())
  6434. {
  6435. // Address of argument after 'this'
  6436. const auto firstRealArgStackSym = instr->m_func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  6437. this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr);
  6438. instr->SetSrc1(IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, instr->m_func));
  6439. LowererMD::ChangeToLea(instr);
  6440. }
  6441. else
  6442. {
  6443. m_lowererMD.LoadStackArgPtr(instr);
  6444. }
  6445. }
  6446. void
  6447. Lowerer::LoadArgumentsFromFrame(IR::Instr *const instr)
  6448. {
  6449. Assert(instr);
  6450. Assert(instr->GetDst());
  6451. Assert(!instr->GetSrc1());
  6452. Assert(!instr->GetSrc2());
  6453. if(instr->m_func->IsInlinee())
  6454. {
  6455. // Use the inline object meta arg slot for the arguments object
  6456. instr->SetSrc1(instr->m_func->GetInlineeArgumentsObjectSlotOpnd());
  6457. LowererMD::ChangeToAssign(instr);
  6458. }
  6459. else
  6460. {
  6461. m_lowererMD.LoadArgumentsFromFrame(instr);
  6462. }
  6463. }
  6464. IR::Instr *
  6465. Lowerer::LowerUnaryHelper(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::Opnd* opndBailoutArg)
  6466. {
  6467. IR::Instr *instrPrev;
  6468. IR::Opnd *src1 = instr->UnlinkSrc1();
  6469. instrPrev = m_lowererMD.LoadHelperArgument(instr, src1);
  6470. m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, opndBailoutArg);
  6471. return instrPrev;
  6472. }
  6473. // helper takes memory context as second argument
  6474. IR::Instr *
  6475. Lowerer::LowerUnaryHelperMem(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::Opnd* opndBailoutArg)
  6476. {
  6477. IR::Instr *instrPrev;
  6478. instrPrev = LoadScriptContext(instr);
  6479. return this->LowerUnaryHelper(instr, helperMethod, opndBailoutArg);
  6480. }
  6481. IR::Instr *
  6482. Lowerer::LowerUnaryHelperMemWithFuncBody(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6483. {
  6484. m_lowererMD.LoadHelperArgument(instr, this->LoadFunctionBodyOpnd(instr));
  6485. return this->LowerUnaryHelperMem(instr, helperMethod);
  6486. }
  6487. IR::Instr *
  6488. Lowerer::LowerBinaryHelperMemWithFuncBody(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6489. {
  6490. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Int2, "Expected a binary instruction...");
  6491. m_lowererMD.LoadHelperArgument(instr, this->LoadFunctionBodyOpnd(instr));
  6492. return this->LowerBinaryHelperMem(instr, helperMethod);
  6493. }
  6494. IR::Instr *
  6495. Lowerer::LowerUnaryHelperMemWithTemp(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6496. {
  6497. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2, "Expected a unary instruction...");
  6498. IR::Instr * instrFirst;
  6499. IR::Opnd * tempOpnd;
  6500. if (instr->dstIsTempNumber)
  6501. {
  6502. instrFirst = this->LoadHelperTemp(instr, instr);
  6503. }
  6504. else
  6505. {
  6506. tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  6507. instrFirst = m_lowererMD.LoadHelperArgument(instr, tempOpnd);
  6508. }
  6509. this->LowerUnaryHelperMem(instr, helperMethod);
  6510. return instrFirst;
  6511. }
  6512. IR::Instr *
  6513. Lowerer::LowerUnaryHelperMemWithTemp2(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::JnHelperMethod helperMethodWithTemp)
  6514. {
  6515. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2, "Expected a unary instruction...");
  6516. if (instr->dstIsTempNumber)
  6517. {
  6518. IR::Instr * instrFirst = this->LoadHelperTemp(instr, instr);
  6519. this->LowerUnaryHelperMem(instr, helperMethodWithTemp);
  6520. return instrFirst;
  6521. }
  6522. return this->LowerUnaryHelperMem(instr, helperMethod);
  6523. }
  6524. IR::Instr *
  6525. Lowerer::LowerUnaryHelperMemWithBoolReference(IR::Instr *instr, IR::JnHelperMethod helperMethod, bool useBoolForBailout)
  6526. {
  6527. if (!this->m_func->tempSymBool)
  6528. {
  6529. this->m_func->tempSymBool = StackSym::New(TyUint8, this->m_func);
  6530. this->m_func->StackAllocate(this->m_func->tempSymBool, TySize[TyUint8]);
  6531. }
  6532. IR::SymOpnd * boolOpnd = IR::SymOpnd::New(this->m_func->tempSymBool, TyUint8, this->m_func);
  6533. IR::RegOpnd * boolRefOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  6534. InsertLea(boolRefOpnd, boolOpnd, instr);
  6535. m_lowererMD.LoadHelperArgument(instr, boolRefOpnd);
  6536. return this->LowerUnaryHelperMem(instr, helperMethod, useBoolForBailout ? boolOpnd : nullptr);
  6537. }
  6538. ///----------------------------------------------------------------------------
  6539. ///
  6540. /// Lowerer::LowerBinaryHelper
  6541. ///
  6542. ///----------------------------------------------------------------------------
  6543. IR::Instr *
  6544. Lowerer::LowerBinaryHelper(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6545. {
  6546. // The only case where this would still be null when we return is when
  6547. // helperMethod == HelperOP_CmSrEq_EmptyString; in which case we ignore
  6548. // instrPrev.
  6549. IR::Instr *instrPrev = nullptr;
  6550. AssertMsg((Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Unsigned1 && !instr->GetDst()) ||
  6551. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3 ||
  6552. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2Int1 ||
  6553. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Int2 ||
  6554. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::ElementU ||
  6555. instr->m_opcode == Js::OpCode::InvalCachedScope, "Expected a binary instruction...");
  6556. IR::Opnd *src2 = instr->UnlinkSrc2();
  6557. if (helperMethod != IR::HelperOP_CmSrEq_EmptyString)
  6558. instrPrev = m_lowererMD.LoadHelperArgument(instr, src2);
  6559. IR::Opnd *src1 = instr->UnlinkSrc1();
  6560. m_lowererMD.LoadHelperArgument(instr, src1);
  6561. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  6562. return instrPrev;
  6563. }
  6564. // helper takes memory context as third argument
  6565. IR::Instr *
  6566. Lowerer::LowerBinaryHelperMem(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6567. {
  6568. IR::Instr *instrPrev;
  6569. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3 ||
  6570. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg2Int1 ||
  6571. Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg1Int2, "Expected a binary instruction...");
  6572. instrPrev = LoadScriptContext(instr);
  6573. return this->LowerBinaryHelper(instr, helperMethod);
  6574. }
  6575. IR::Instr *
  6576. Lowerer::LowerBinaryHelperMemWithTemp(IR::Instr *instr, IR::JnHelperMethod helperMethod)
  6577. {
  6578. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3, "Expected a binary instruction...");
  6579. IR::Instr * instrFirst;
  6580. IR::Opnd * tempOpnd;
  6581. if (instr->dstIsTempNumber)
  6582. {
  6583. instrFirst = this->LoadHelperTemp(instr, instr);
  6584. }
  6585. else
  6586. {
  6587. tempOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func);
  6588. instrFirst = m_lowererMD.LoadHelperArgument(instr, tempOpnd);
  6589. }
  6590. this->LowerBinaryHelperMem(instr, helperMethod);
  6591. return instrFirst;
  6592. }
  6593. IR::Instr *
  6594. Lowerer::LowerBinaryHelperMemWithTemp2(
  6595. IR::Instr *instr,
  6596. IR::JnHelperMethod helperMethod,
  6597. IR::JnHelperMethod helperMethodWithTemp
  6598. )
  6599. {
  6600. AssertMsg(Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::Reg3, "Expected a binary instruction...");
  6601. if (instr->dstIsTempNumber && instr->GetDst() && instr->GetDst()->GetValueType().HasBeenNumber())
  6602. {
  6603. IR::Instr * instrFirst = this->LoadHelperTemp(instr, instr);
  6604. this->LowerBinaryHelperMem(instr, helperMethodWithTemp);
  6605. return instrFirst;
  6606. }
  6607. return this->LowerBinaryHelperMem(instr, helperMethod);
  6608. }
  6609. IR::Instr *
  6610. Lowerer::LowerAddLeftDeadForString(IR::Instr *instr)
  6611. {
  6612. IR::Opnd * opndLeft;
  6613. IR::Opnd * opndRight;
  6614. opndLeft = instr->GetSrc1();
  6615. opndRight = instr->GetSrc2();
  6616. Assert(opndLeft && opndRight);
  6617. bool generateFastPath = this->m_func->DoFastPaths();
  6618. if (!generateFastPath
  6619. || !opndLeft->IsRegOpnd()
  6620. || !opndRight->IsRegOpnd()
  6621. || !instr->GetDst()->IsRegOpnd()
  6622. || !opndLeft->GetValueType().IsLikelyString()
  6623. || !opndRight->GetValueType().IsLikelyString()
  6624. || !opndLeft->IsEqual(instr->GetDst()->AsRegOpnd())
  6625. || opndLeft->IsEqual(opndRight))
  6626. {
  6627. return this->LowerBinaryHelperMemWithTemp(instr, IR::HelperOp_AddLeftDead);
  6628. }
  6629. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  6630. IR::LabelInstr * labelFallThrough = instr->GetOrCreateContinueLabel(false);
  6631. IR::LabelInstr *insertBeforeInstr = labelHelper;
  6632. instr->InsertBefore(labelHelper);
  6633. if (!opndLeft->IsNotTaggedValue())
  6634. {
  6635. this->m_lowererMD.GenerateObjectTest(opndLeft->AsRegOpnd(), insertBeforeInstr, labelHelper);
  6636. }
  6637. InsertCompareBranch(
  6638. IR::IndirOpnd::New(opndLeft->AsRegOpnd(), 0, TyMachPtr, m_func),
  6639. this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableCompoundString),
  6640. Js::OpCode::BrNeq_A,
  6641. labelHelper,
  6642. insertBeforeInstr);
  6643. GenerateStringTest(opndRight->AsRegOpnd(), insertBeforeInstr, labelHelper);
  6644. // left->m_charLength <= JavascriptArray::MaxCharLength
  6645. IR::IndirOpnd *indirLeftCharLengthOpnd = IR::IndirOpnd::New(opndLeft->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, m_func);
  6646. IR::RegOpnd *regLeftCharLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  6647. InsertMove(regLeftCharLengthOpnd, indirLeftCharLengthOpnd, insertBeforeInstr);
  6648. InsertCompareBranch(
  6649. regLeftCharLengthOpnd,
  6650. IR::IntConstOpnd::New(Js::JavascriptString::MaxCharLength, TyUint32, m_func),
  6651. Js::OpCode::BrGt_A,
  6652. labelHelper,
  6653. insertBeforeInstr);
  6654. // left->m_pszValue == NULL (!left->IsFinalized())
  6655. InsertCompareBranch(
  6656. IR::IndirOpnd::New(opndLeft->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func),
  6657. IR::AddrOpnd::NewNull(m_func),
  6658. Js::OpCode::BrNeq_A,
  6659. labelHelper,
  6660. insertBeforeInstr);
  6661. // right->m_pszValue != NULL (right->IsFinalized())
  6662. InsertCompareBranch(
  6663. IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func),
  6664. IR::AddrOpnd::NewNull(m_func),
  6665. Js::OpCode::BrEq_A,
  6666. labelHelper,
  6667. insertBeforeInstr);
  6668. // if ownsLastBlock != 0
  6669. InsertCompareBranch(
  6670. IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfOwnsLastBlock(), TyUint8, m_func),
  6671. IR::IntConstOpnd::New(0, TyUint8, m_func),
  6672. Js::OpCode::BrEq_A,
  6673. labelHelper,
  6674. insertBeforeInstr);
  6675. // if right->m_charLength == 1
  6676. InsertCompareBranch(IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_charLength), TyUint32, m_func),
  6677. IR::IntConstOpnd::New(1, TyUint32, m_func),
  6678. Js::OpCode::BrNeq_A, labelHelper, insertBeforeInstr);
  6679. // if left->m_directCharLength == -1
  6680. InsertCompareBranch(IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfDirectCharLength(), TyUint32, m_func),
  6681. IR::IntConstOpnd::New(UINT32_MAX, TyUint32, m_func),
  6682. Js::OpCode::BrNeq_A, labelHelper, insertBeforeInstr);
  6683. // if lastBlockInfo.charLength < lastBlockInfo.charCapacity
  6684. IR::IndirOpnd *indirCharLength = IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo()+ (int32)Js::CompoundString::GetOffsetOfLastBlockInfoCharLength(), TyMachPtr, m_func);
  6685. IR::RegOpnd *charLengthOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
  6686. InsertMove(charLengthOpnd, indirCharLength, insertBeforeInstr);
  6687. InsertCompareBranch(charLengthOpnd, IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo() + (int32)Js::CompoundString::GetOffsetOfLastBlockInfoCharCapacity(), TyMachPtr, m_func), Js::OpCode::BrGe_A, labelHelper, insertBeforeInstr);
  6688. // load c= right->m_pszValue[0]
  6689. IR::RegOpnd *pszValue0Opnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  6690. IR::IndirOpnd *indirRightPszOpnd = IR::IndirOpnd::New(opndRight->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func);
  6691. InsertMove(pszValue0Opnd, indirRightPszOpnd, insertBeforeInstr);
  6692. IR::RegOpnd *charResultOpnd = IR::RegOpnd::New(TyUint16, this->m_func);
  6693. InsertMove(charResultOpnd, IR::IndirOpnd::New(pszValue0Opnd, 0, TyUint16, this->m_func), insertBeforeInstr);
  6694. // lastBlockInfo.buffer[blockCharLength] = c;
  6695. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  6696. InsertMove(baseOpnd, IR::IndirOpnd::New(opndLeft->AsRegOpnd(), (int32)Js::CompoundString::GetOffsetOfLastBlockInfo() + (int32)Js::CompoundString::GetOffsetOfLastBlockInfoBuffer(), TyMachPtr, m_func), insertBeforeInstr);
  6697. IR::IndirOpnd *indirBufferToStore = IR::IndirOpnd::New(baseOpnd, charLengthOpnd, (byte)Math::Log2(sizeof(wchar_t)), TyUint16, m_func);
  6698. InsertMove(indirBufferToStore, charResultOpnd, insertBeforeInstr);
  6699. // left->m_charLength++
  6700. InsertAdd(false, indirLeftCharLengthOpnd, regLeftCharLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
  6701. // lastBlockInfo.charLength++
  6702. InsertAdd(false, indirCharLength, indirCharLength, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
  6703. InsertBranch(Js::OpCode::Br, labelFallThrough, insertBeforeInstr);
  6704. return this->LowerBinaryHelperMemWithTemp(instr, IR::HelperOp_AddLeftDead);
  6705. }
  6706. IR::Instr *
  6707. Lowerer::LowerBinaryHelperMemWithTemp3(IR::Instr *instr, IR::JnHelperMethod helperMethod, IR::JnHelperMethod helperMethodWithTemp, IR::JnHelperMethod helperMethodLeftDead)
  6708. {
  6709. IR::Opnd *src1 = instr->GetSrc1();
  6710. if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_isTempLastUse && !src1->GetValueType().IsNotString())
  6711. {
  6712. Assert(helperMethodLeftDead == IR::HelperOp_AddLeftDead);
  6713. return LowerAddLeftDeadForString(instr);
  6714. }
  6715. else
  6716. {
  6717. return this->LowerBinaryHelperMemWithTemp2(instr, helperMethod, helperMethodWithTemp);
  6718. }
  6719. }
  6720. StackSym *
  6721. Lowerer::GetTempNumberSym(IR::Opnd * opnd, bool isTempTransferred)
  6722. {
  6723. AssertMsg(opnd->IsRegOpnd(), "Expected regOpnd");
  6724. if (isTempTransferred)
  6725. {
  6726. StackSym * tempNumberSym = StackSym::New(TyMisc, m_func);
  6727. this->m_func->StackAllocate(tempNumberSym, sizeof(Js::JavascriptNumber));
  6728. return tempNumberSym;
  6729. }
  6730. StackSym * stackSym = opnd->AsRegOpnd()->m_sym;
  6731. StackSym * tempNumberSym = stackSym->m_tempNumberSym;
  6732. if (tempNumberSym == nullptr)
  6733. {
  6734. tempNumberSym = StackSym::New(TyMisc, m_func);
  6735. this->m_func->StackAllocate(tempNumberSym, sizeof(Js::JavascriptNumber));
  6736. stackSym->m_tempNumberSym = tempNumberSym;
  6737. }
  6738. return tempNumberSym;
  6739. }
  6740. void Lowerer::LowerProfiledLdElemI(IR::JitProfilingInstr *const instr)
  6741. {
  6742. Assert(instr);
  6743. /*
  6744. Var ProfilingHelpers::ProfiledLdElem(
  6745. const Var base,
  6746. const Var varIndex,
  6747. FunctionBody *const functionBody,
  6748. const ProfileId profileId)
  6749. */
  6750. Func *const func = instr->m_func;
  6751. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, func));
  6752. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(func));
  6753. IR::IndirOpnd *const indir = instr->UnlinkSrc1()->AsIndirOpnd();
  6754. IR::Opnd *const indexOpnd = indir->UnlinkIndexOpnd();
  6755. Assert(indexOpnd || indir->GetOffset() >= 0 && !Js::TaggedInt::IsOverflow(indir->GetOffset()));
  6756. m_lowererMD.LoadHelperArgument(
  6757. instr,
  6758. indexOpnd
  6759. ? static_cast<IR::Opnd *>(indexOpnd)
  6760. : IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(indir->GetOffset()), IR::AddrOpndKindDynamicVar, func));
  6761. m_lowererMD.LoadHelperArgument(instr, indir->UnlinkBaseOpnd());
  6762. indir->Free(func);
  6763. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfiledLdElem, func));
  6764. m_lowererMD.LowerCall(instr, 0);
  6765. }
  6766. void Lowerer::LowerProfiledStElemI(IR::JitProfilingInstr *const instr, const Js::PropertyOperationFlags flags)
  6767. {
  6768. Assert(instr);
  6769. /*
  6770. void ProfilingHelpers::ProfiledStElem(
  6771. const Var base,
  6772. const Var varIndex,
  6773. const Var value,
  6774. FunctionBody *const functionBody,
  6775. const ProfileId profileId,
  6776. const PropertyOperationFlags flags)
  6777. */
  6778. Func *const func = instr->m_func;
  6779. IR::JnHelperMethod helper;
  6780. if(flags == Js::PropertyOperation_None)
  6781. {
  6782. helper = IR::HelperProfiledStElem_DefaultFlags;
  6783. }
  6784. else
  6785. {
  6786. helper = IR::HelperProfiledStElem;
  6787. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New(flags, TyInt32, func, true));
  6788. }
  6789. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, func));
  6790. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(func));
  6791. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  6792. IR::IndirOpnd *const indir = instr->UnlinkDst()->AsIndirOpnd();
  6793. IR::Opnd *const indexOpnd = indir->UnlinkIndexOpnd();
  6794. Assert(indexOpnd || indir->GetOffset() >= 0 && !Js::TaggedInt::IsOverflow(indir->GetOffset()));
  6795. m_lowererMD.LoadHelperArgument(
  6796. instr,
  6797. indexOpnd
  6798. ? static_cast<IR::Opnd *>(indexOpnd)
  6799. : IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(indir->GetOffset()), IR::AddrOpndKindDynamicVar, func));
  6800. m_lowererMD.LoadHelperArgument(instr, indir->UnlinkBaseOpnd());
  6801. indir->Free(func);
  6802. instr->SetSrc1(IR::HelperCallOpnd::New(helper, func));
  6803. m_lowererMD.LowerCall(instr, 0);
  6804. }
  6805. ///----------------------------------------------------------------------------
  6806. ///
  6807. /// Lowerer::LowerStElemI
  6808. ///
  6809. ///----------------------------------------------------------------------------
  6810. IR::Instr *
  6811. Lowerer::LowerStElemI(IR::Instr * instr, Js::PropertyOperationFlags flags, bool isHelper, IR::JnHelperMethod helperMethod)
  6812. {
  6813. IR::Instr *instrPrev = instr->m_prev;
  6814. if (instr->IsJitProfilingInstr())
  6815. {
  6816. Assert(!isHelper);
  6817. LowerProfiledStElemI(instr->AsJitProfilingInstr(), flags);
  6818. return instrPrev;
  6819. }
  6820. IR::Opnd *src1 = instr->GetSrc1();
  6821. IR::Opnd *dst = instr->GetDst();
  6822. IR::Opnd *newDst = nullptr;
  6823. IRType srcType = src1->GetType();
  6824. AssertMsg(dst->IsIndirOpnd(), "Expected indirOpnd on StElementI");
  6825. #if !FLOATVAR
  6826. if (dst->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyOptimizedTypedArray() && src1->IsRegOpnd())
  6827. {
  6828. // We allow the source of typedArray StElem to be marked as temp, since we just need the value,
  6829. // however if the array turns out to be a non-typed array, or the index isn't valid (the value is then stored as a property)
  6830. // the temp needs to be boxed if it is a float. The BoxStackNumber helper will box JavascriptNumbers
  6831. // which are on the stack.
  6832. // regVar = BoxStackNumber(src1, scriptContext)
  6833. IR::Instr *newInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  6834. IR::RegOpnd *regVar = IR::RegOpnd::New(TyVar, this->m_func);
  6835. newInstr->SetDst(regVar);
  6836. newInstr->SetSrc1(src1);
  6837. instr->InsertBefore(newInstr);
  6838. LowerUnaryHelperMem(newInstr, IR::HelperBoxStackNumber);
  6839. // MOV src1, regVar
  6840. newInstr = IR::Instr::New(Js::OpCode::Ld_A, src1, regVar, this->m_func);
  6841. instr->InsertBefore(m_lowererMD.ChangeToAssign(newInstr));
  6842. }
  6843. #endif
  6844. if(instr->HasBailOutInfo())
  6845. {
  6846. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  6847. if(bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment)
  6848. {
  6849. Assert(!(bailOutKind & IR::BailOutOnMissingValue));
  6850. LowerBailOnInvalidatedArrayHeadSegment(instr, isHelper);
  6851. bailOutKind ^= IR::BailOutOnInvalidatedArrayHeadSegment;
  6852. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  6853. }
  6854. else if(bailOutKind & IR::BailOutOnMissingValue)
  6855. {
  6856. LowerBailOnCreatedMissingValue(instr, isHelper);
  6857. bailOutKind ^= IR::BailOutOnMissingValue;
  6858. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  6859. }
  6860. if(bailOutKind & IR::BailOutOnInvalidatedArrayLength)
  6861. {
  6862. LowerBailOnInvalidatedArrayLength(instr, isHelper);
  6863. bailOutKind ^= IR::BailOutOnInvalidatedArrayLength;
  6864. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  6865. }
  6866. if(bailOutKind & IR::BailOutConvertedNativeArray)
  6867. {
  6868. IR::LabelInstr *labelSkipBailOut = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  6869. instr->InsertAfter(labelSkipBailOut);
  6870. LowerOneBailOutKind(instr, IR::BailOutConvertedNativeArray, isHelper);
  6871. newDst = IR::RegOpnd::New(TyMachReg, m_func);
  6872. InsertTestBranch(newDst, newDst, Js::OpCode::BrEq_A, labelSkipBailOut, instr->m_next);
  6873. }
  6874. }
  6875. instr->UnlinkDst();
  6876. instr->UnlinkSrc1();
  6877. IR::Opnd *indexOpnd = dst->AsIndirOpnd()->UnlinkIndexOpnd();
  6878. Assert(
  6879. helperMethod == IR::HelperOP_InitElemGetter ||
  6880. helperMethod == IR::HelperOP_InitElemSetter ||
  6881. helperMethod == IR::HelperOP_InitComputedProperty ||
  6882. helperMethod == IR::HelperOp_SetElementI ||
  6883. helperMethod == IR::HelperOp_InitClassMemberComputedName ||
  6884. helperMethod == IR::HelperOp_InitClassMemberGetComputedName ||
  6885. helperMethod == IR::HelperOp_InitClassMemberSetComputedName
  6886. );
  6887. if (indexOpnd && indexOpnd->GetType() != TyVar)
  6888. {
  6889. if (indexOpnd->GetType() == TyInt32)
  6890. {
  6891. helperMethod =
  6892. srcType == TyVar ? IR::HelperOp_SetElementI_Int32 :
  6893. srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI_Int32 :
  6894. IR::HelperOp_SetNativeFloatElementI_Int32;
  6895. }
  6896. else if (indexOpnd->GetType() == TyUint32)
  6897. {
  6898. helperMethod =
  6899. srcType == TyVar ? IR::HelperOp_SetElementI_UInt32 :
  6900. srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI_UInt32 :
  6901. IR::HelperOp_SetNativeFloatElementI_UInt32;
  6902. }
  6903. else
  6904. {
  6905. Assert(FALSE);
  6906. }
  6907. }
  6908. else
  6909. {
  6910. if (indexOpnd == nullptr)
  6911. {
  6912. // No index; the offset identifies the element.
  6913. IntConstType offset = (IntConstType)dst->AsIndirOpnd()->GetOffset();
  6914. indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
  6915. }
  6916. if (srcType != TyVar)
  6917. {
  6918. helperMethod =
  6919. srcType == TyInt32 ? IR::HelperOp_SetNativeIntElementI : IR::HelperOp_SetNativeFloatElementI;
  6920. }
  6921. }
  6922. if (srcType == TyFloat64)
  6923. {
  6924. m_lowererMD.LoadDoubleHelperArgument(instr, src1);
  6925. }
  6926. m_lowererMD.LoadHelperArgument(instr,
  6927. IR::IntConstOpnd::New(static_cast<IntConstType>(flags), IRType::TyInt32, m_func, true));
  6928. LoadScriptContext(instr);
  6929. if (srcType != TyFloat64)
  6930. {
  6931. m_lowererMD.LoadHelperArgument(instr, src1);
  6932. }
  6933. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  6934. IR::Opnd *baseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
  6935. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  6936. dst->Free(this->m_func);
  6937. if (newDst)
  6938. {
  6939. instr->SetDst(newDst);
  6940. }
  6941. m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, nullptr, nullptr, isHelper);
  6942. return instrPrev;
  6943. }
  6944. ///----------------------------------------------------------------------------
  6945. ///
  6946. /// Lowerer::LowerLdElemI
  6947. ///
  6948. ///----------------------------------------------------------------------------
  6949. IR::Instr *
  6950. Lowerer::LowerLdElemI(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool isHelper)
  6951. {
  6952. IR::Instr *instrPrev = instr->m_prev;
  6953. if(instr->IsJitProfilingInstr())
  6954. {
  6955. Assert(helperMethod == IR::HelperOp_GetElementI);
  6956. Assert(!isHelper);
  6957. LowerProfiledLdElemI(instr->AsJitProfilingInstr());
  6958. return instrPrev;
  6959. }
  6960. if (!isHelper && instr->DoStackArgsOpt(this->m_func))
  6961. {
  6962. IR::LabelInstr * labelLdElem = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  6963. // Pass in null for labelFallThru to only generate the LdHeapArgument call
  6964. GenerateFastArgumentsLdElemI(instr, labelLdElem, nullptr);
  6965. instr->InsertBefore(labelLdElem);
  6966. }
  6967. IR::Opnd *src1 = instr->UnlinkSrc1();
  6968. AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd");
  6969. IR::IndirOpnd *indirOpnd = src1->AsIndirOpnd();
  6970. bool loadScriptContext = true;
  6971. IRType dstType = instr->GetDst()->GetType();
  6972. IR::Opnd *indexOpnd = indirOpnd->UnlinkIndexOpnd();
  6973. if (indexOpnd && indexOpnd->GetType() != TyVar)
  6974. {
  6975. Assert(indexOpnd->GetType() == TyUint32 || indexOpnd->GetType() == TyInt32);
  6976. switch (helperMethod)
  6977. {
  6978. case IR::HelperOp_GetElementI:
  6979. if (indexOpnd->GetType() == TyUint32)
  6980. {
  6981. helperMethod =
  6982. dstType == TyVar ? IR::HelperOp_GetElementI_UInt32 :
  6983. dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI_UInt32 :
  6984. IR::HelperOp_GetNativeFloatElementI_UInt32;
  6985. }
  6986. else
  6987. {
  6988. helperMethod =
  6989. dstType == TyVar ? IR::HelperOp_GetElementI_Int32 :
  6990. dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI_Int32 :
  6991. IR::HelperOp_GetNativeFloatElementI_Int32;
  6992. }
  6993. break;
  6994. case IR::HelperOp_GetMethodElement:
  6995. Assert(dstType == TyVar);
  6996. helperMethod = indexOpnd->GetType() == TyUint32?
  6997. IR::HelperOp_GetMethodElement_UInt32 : IR::HelperOp_GetMethodElement_Int32;
  6998. break;
  6999. case IR::HelperOp_TypeofElem:
  7000. Assert(dstType == TyVar);
  7001. helperMethod = indexOpnd->GetType() == TyUint32?
  7002. IR::HelperOp_TypeofElem_UInt32 : IR::HelperOp_TypeofElem_Int32;
  7003. break;
  7004. default:
  7005. Assert(false);
  7006. }
  7007. }
  7008. else
  7009. {
  7010. if (indexOpnd == nullptr)
  7011. {
  7012. // No index; the offset identifies the element.
  7013. IntConstType offset = (IntConstType)src1->AsIndirOpnd()->GetOffset();
  7014. indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
  7015. }
  7016. if (dstType != TyVar)
  7017. {
  7018. loadScriptContext = false;
  7019. helperMethod =
  7020. dstType == TyInt32 ? IR::HelperOp_GetNativeIntElementI : IR::HelperOp_GetNativeFloatElementI;
  7021. }
  7022. }
  7023. // Jitted loop bodies have volatile information about values created outside the loop, so don't update array creation site
  7024. // profile data from jitted loop bodies
  7025. if(!m_func->IsLoopBody())
  7026. {
  7027. const ValueType baseValueType(indirOpnd->GetBaseOpnd()->GetValueType());
  7028. if( baseValueType.IsLikelyObject() &&
  7029. baseValueType.GetObjectType() == ObjectType::Array &&
  7030. !baseValueType.HasIntElements())
  7031. {
  7032. switch(helperMethod)
  7033. {
  7034. case IR::HelperOp_GetElementI:
  7035. helperMethod =
  7036. baseValueType.HasFloatElements()
  7037. ? IR::HelperOp_GetElementI_ExpectingNativeFloatArray
  7038. : IR::HelperOp_GetElementI_ExpectingVarArray;
  7039. break;
  7040. case IR::HelperOp_GetElementI_UInt32:
  7041. helperMethod =
  7042. baseValueType.HasFloatElements()
  7043. ? IR::HelperOp_GetElementI_UInt32_ExpectingNativeFloatArray
  7044. : IR::HelperOp_GetElementI_UInt32_ExpectingVarArray;
  7045. break;
  7046. case IR::HelperOp_GetElementI_Int32:
  7047. helperMethod =
  7048. baseValueType.HasFloatElements()
  7049. ? IR::HelperOp_GetElementI_Int32_ExpectingNativeFloatArray
  7050. : IR::HelperOp_GetElementI_Int32_ExpectingVarArray;
  7051. break;
  7052. }
  7053. }
  7054. }
  7055. if (loadScriptContext)
  7056. {
  7057. LoadScriptContext(instr);
  7058. }
  7059. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  7060. IR::Opnd *baseOpnd = indirOpnd->UnlinkBaseOpnd();
  7061. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  7062. src1->Free(this->m_func);
  7063. m_lowererMD.ChangeToHelperCall(instr, helperMethod, nullptr, nullptr, nullptr, isHelper);
  7064. return instrPrev;
  7065. }
  7066. void Lowerer::LowerLdLen(IR::Instr *const instr, const bool isHelper)
  7067. {
  7068. Assert(instr);
  7069. Assert(instr->m_opcode == Js::OpCode::LdLen_A);
  7070. // LdLen has persisted to this point for the sake of pre-lower opts.
  7071. // Turn it into a LdFld of the "length" property.
  7072. // This is normally a load of the internal "length" of an Array, so it probably doesn't benefit
  7073. // from inline caching.
  7074. // Changing the opcode to LdFld is done in LowerLdFld and needs to remain that way to take into
  7075. // account ProfiledLdLen_A
  7076. IR::RegOpnd * baseOpnd = instr->UnlinkSrc1()->AsRegOpnd();
  7077. PropertySym* fieldSym = PropertySym::FindOrCreate(baseOpnd->m_sym->m_id, Js::PropertyIds::length, (uint32)-1, (uint)-1, PropertyKindData, m_func);
  7078. baseOpnd->Free(this->m_func);
  7079. instr->SetSrc1(IR::SymOpnd::New(fieldSym, TyVar, m_func));
  7080. LowerLdFld(instr, IR::HelperOp_GetProperty, IR::HelperOp_GetProperty, false, nullptr, isHelper);
  7081. }
  7082. IR::Instr *
  7083. Lowerer::LowerLdArrViewElem(IR::Instr * instr)
  7084. {
  7085. Assert(m_func->GetJnFunction()->GetIsAsmjsMode());
  7086. Assert(instr);
  7087. Assert(instr->m_opcode == Js::OpCode::LdInt8ArrViewElem ||
  7088. instr->m_opcode == Js::OpCode::LdUInt8ArrViewElem ||
  7089. instr->m_opcode == Js::OpCode::LdInt16ArrViewElem ||
  7090. instr->m_opcode == Js::OpCode::LdUInt16ArrViewElem ||
  7091. instr->m_opcode == Js::OpCode::LdInt32ArrViewElem ||
  7092. instr->m_opcode == Js::OpCode::LdUInt32ArrViewElem ||
  7093. instr->m_opcode == Js::OpCode::LdFloat32ArrViewElem ||
  7094. instr->m_opcode == Js::OpCode::LdFloat64ArrViewElem);
  7095. IR::Instr * instrPrev = instr->m_prev;
  7096. IR::RegOpnd * indexOpnd = instr->GetSrc1()->AsIndirOpnd()->GetIndexOpnd();
  7097. IR::Opnd * dst = instr->GetDst();
  7098. IR::Opnd * src1 = instr->GetSrc1();
  7099. IR::Opnd * src2 = instr->GetSrc2();
  7100. IR::Instr * done;
  7101. if (indexOpnd || (uint32)src1->AsIndirOpnd()->GetOffset() >= 0x1000000)
  7102. {
  7103. // CMP indexOpnd, src2(arrSize)
  7104. // JA $helper
  7105. // JMP $load
  7106. // $helper:
  7107. // MOV dst, 0
  7108. // JMP $done
  7109. // $load:
  7110. // MOV dst, src1([arrayBuffer + indexOpnd])
  7111. // $done:
  7112. Assert(!dst->IsFloat32() || src1->IsFloat32());
  7113. Assert(!dst->IsFloat64() || src1->IsFloat64());
  7114. done = m_lowererMD.LowerAsmJsLdElemHelper(instr);
  7115. }
  7116. else
  7117. {
  7118. // any access below 0x1000000 is safe
  7119. instr->UnlinkDst();
  7120. instr->UnlinkSrc1();
  7121. if (src2)
  7122. {
  7123. instr->FreeSrc2();
  7124. }
  7125. done = instr;
  7126. }
  7127. InsertMove(dst, src1, done);
  7128. instr->Remove();
  7129. return instrPrev;
  7130. }
  7131. IR::Instr *
  7132. Lowerer::LowerMemset(IR::Instr * instr, IR::RegOpnd * helperRet)
  7133. {
  7134. IR::Opnd * dst = instr->UnlinkDst();
  7135. IR::Opnd * src1 = instr->UnlinkSrc1();
  7136. Assert(dst->IsIndirOpnd());
  7137. IR::Opnd *baseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
  7138. IR::Opnd *indexOpnd = dst->AsIndirOpnd()->UnlinkIndexOpnd();
  7139. IR::Opnd *sizeOpnd = instr->UnlinkSrc2();
  7140. Assert(baseOpnd);
  7141. Assert(sizeOpnd);
  7142. Assert(indexOpnd);
  7143. IR::JnHelperMethod helperMethod = IR::HelperOp_Memset;
  7144. IR::Instr *instrPrev = nullptr;
  7145. if (src1->IsRegOpnd() && !src1->IsVar())
  7146. {
  7147. IR::RegOpnd* varOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  7148. instrPrev = IR::Instr::New(Js::OpCode::ToVar, varOpnd, src1, instr->m_func);
  7149. instr->InsertBefore(instrPrev);
  7150. src1 = varOpnd;
  7151. }
  7152. instr->SetDst(helperRet);
  7153. LoadScriptContext(instr);
  7154. m_lowererMD.LoadHelperArgument(instr, sizeOpnd);
  7155. m_lowererMD.LoadHelperArgument(instr, src1);
  7156. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  7157. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  7158. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  7159. dst->Free(m_func);
  7160. return instrPrev;
  7161. }
  7162. IR::Instr *
  7163. Lowerer::LowerMemcopy(IR::Instr * instr, IR::RegOpnd * helperRet)
  7164. {
  7165. IR::Opnd * dst = instr->UnlinkDst();
  7166. IR::Opnd * src = instr->UnlinkSrc1();
  7167. Assert(dst->IsIndirOpnd());
  7168. Assert(src->IsIndirOpnd());
  7169. IR::Opnd *dstBaseOpnd = dst->AsIndirOpnd()->UnlinkBaseOpnd();
  7170. IR::Opnd *dstIndexOpnd = dst->AsIndirOpnd()->UnlinkIndexOpnd();
  7171. IR::Opnd *srcBaseOpnd = src->AsIndirOpnd()->UnlinkBaseOpnd();
  7172. IR::Opnd *srcIndexOpnd = src->AsIndirOpnd()->UnlinkIndexOpnd();
  7173. IR::Opnd *sizeOpnd = instr->UnlinkSrc2();
  7174. Assert(sizeOpnd);
  7175. Assert(dstBaseOpnd);
  7176. Assert(dstIndexOpnd);
  7177. Assert(srcBaseOpnd);
  7178. Assert(srcIndexOpnd);
  7179. IR::JnHelperMethod helperMethod = IR::HelperOp_Memcopy;
  7180. instr->SetDst(helperRet);
  7181. LoadScriptContext(instr);
  7182. m_lowererMD.LoadHelperArgument(instr, sizeOpnd);
  7183. m_lowererMD.LoadHelperArgument(instr, srcIndexOpnd);
  7184. m_lowererMD.LoadHelperArgument(instr, srcBaseOpnd);
  7185. m_lowererMD.LoadHelperArgument(instr, dstIndexOpnd);
  7186. m_lowererMD.LoadHelperArgument(instr, dstBaseOpnd);
  7187. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  7188. dst->Free(m_func);
  7189. src->Free(m_func);
  7190. return nullptr;
  7191. }
  7192. IR::Instr *
  7193. Lowerer::LowerMemOp(IR::Instr * instr)
  7194. {
  7195. Assert(instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  7196. IR::Instr *instrPrev = instr->m_prev;
  7197. IR::RegOpnd* helperRet = IR::RegOpnd::New(TyInt8, instr->m_func);
  7198. const bool isHelper = false;
  7199. AssertMsg(instr->HasBailOutInfo(), "Expected bailOut on MemOp instruction");
  7200. if (instr->HasBailOutInfo())
  7201. {
  7202. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  7203. if (bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment)
  7204. {
  7205. Assert(!(bailOutKind & IR::BailOutOnMissingValue));
  7206. LowerBailOnInvalidatedArrayHeadSegment(instr, isHelper);
  7207. bailOutKind ^= IR::BailOutOnInvalidatedArrayHeadSegment;
  7208. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7209. }
  7210. else if (bailOutKind & IR::BailOutOnMissingValue)
  7211. {
  7212. LowerBailOnCreatedMissingValue(instr, isHelper);
  7213. bailOutKind ^= IR::BailOutOnMissingValue;
  7214. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7215. }
  7216. if (bailOutKind & IR::BailOutOnInvalidatedArrayLength)
  7217. {
  7218. LowerBailOnInvalidatedArrayLength(instr, isHelper);
  7219. bailOutKind ^= IR::BailOutOnInvalidatedArrayLength;
  7220. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7221. }
  7222. AssertMsg(bailOutKind & IR::BailOutOnMemOpError, "Expected BailOutOnMemOpError on MemOp instruction");
  7223. if (bailOutKind & IR::BailOutOnMemOpError)
  7224. {
  7225. // Insert or get continue label
  7226. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isHelper);
  7227. Func *const func = instr->m_func;
  7228. LowerOneBailOutKind(instr, IR::BailOutOnMemOpError, isHelper);
  7229. IR::Instr *const insertBeforeInstr = instr->m_next;
  7230. // test helperRet, helperRet
  7231. // jz $skipBailOut
  7232. InsertCompareBranch(
  7233. helperRet,
  7234. IR::IntConstOpnd::New(0, TyInt8, func),
  7235. Js::OpCode::BrNeq_A,
  7236. skipBailOutLabel,
  7237. insertBeforeInstr);
  7238. // (Bail out with IR::BailOutOnMemOpError)
  7239. // $skipBailOut:
  7240. bailOutKind ^= IR::BailOutOnMemOpError;
  7241. Assert(!bailOutKind || instr->GetBailOutKind() == bailOutKind);
  7242. }
  7243. instr->ClearBailOutInfo();
  7244. }
  7245. IR::Instr* newInstrPrev = nullptr;
  7246. if (instr->m_opcode == Js::OpCode::Memset)
  7247. {
  7248. newInstrPrev = LowerMemset(instr, helperRet);
  7249. }
  7250. else if (instr->m_opcode == Js::OpCode::Memcopy)
  7251. {
  7252. newInstrPrev = LowerMemcopy(instr, helperRet);
  7253. }
  7254. if (newInstrPrev != nullptr)
  7255. {
  7256. instrPrev = newInstrPrev;
  7257. }
  7258. return instrPrev;
  7259. }
  7260. IR::Instr *
  7261. Lowerer::LowerStArrViewElem(IR::Instr * instr)
  7262. {
  7263. Assert(m_func->GetJnFunction()->GetIsAsmjsMode());
  7264. Assert(instr);
  7265. Assert(instr->m_opcode == Js::OpCode::StInt8ArrViewElem ||
  7266. instr->m_opcode == Js::OpCode::StUInt8ArrViewElem ||
  7267. instr->m_opcode == Js::OpCode::StInt16ArrViewElem ||
  7268. instr->m_opcode == Js::OpCode::StUInt16ArrViewElem ||
  7269. instr->m_opcode == Js::OpCode::StInt32ArrViewElem ||
  7270. instr->m_opcode == Js::OpCode::StUInt32ArrViewElem ||
  7271. instr->m_opcode == Js::OpCode::StFloat32ArrViewElem ||
  7272. instr->m_opcode == Js::OpCode::StFloat64ArrViewElem);
  7273. IR::Instr * instrPrev = instr->m_prev;
  7274. IR::Opnd * dst = instr->GetDst();
  7275. IR::Opnd * src1 = instr->GetSrc1();
  7276. IR::Opnd * src2 = instr->GetSrc2();
  7277. // type of dst is the type of array
  7278. IR::RegOpnd * indexOpnd = dst->AsIndirOpnd()->GetIndexOpnd();
  7279. Assert(!dst->IsFloat32() || src1->IsFloat32());
  7280. Assert(!dst->IsFloat64() || src1->IsFloat64());
  7281. IR::Instr * done;
  7282. if (indexOpnd || (uint32)dst->AsIndirOpnd()->GetOffset() >= 0x1000000)
  7283. {
  7284. // CMP indexOpnd, src2(arrSize)
  7285. // JA $helper
  7286. // JMP $store
  7287. // $helper:
  7288. // JMP $done
  7289. // $store:
  7290. // MOV dst([arrayBuffer + indexOpnd]), src1
  7291. // $done:
  7292. done = m_lowererMD.LowerAsmJsStElemHelper(instr);
  7293. }
  7294. else
  7295. {
  7296. // any constant access below 0x1000000 is safe, as that is the min heap size
  7297. instr->UnlinkDst();
  7298. instr->UnlinkSrc1();
  7299. done = instr;
  7300. if (src2)
  7301. {
  7302. instr->FreeSrc2();
  7303. }
  7304. }
  7305. InsertMove(dst, src1, done);
  7306. instr->Remove();
  7307. return instrPrev;
  7308. }
  7309. IR::Instr *
  7310. Lowerer::LowerArrayDetachedCheck(IR::Instr * instr)
  7311. {
  7312. // TEST isDetached, isDetached
  7313. // JE Done
  7314. // Helper:
  7315. // CALL Js::Throw::OutOfMemory
  7316. // Done:
  7317. Assert(m_func->GetJnFunction()->GetIsAsmjsMode());
  7318. IR::Instr * instrPrev = instr->m_prev;
  7319. IR::Opnd * isDetachedOpnd = instr->UnlinkSrc1();
  7320. Assert(isDetachedOpnd->IsIndirOpnd() || isDetachedOpnd->IsMemRefOpnd());
  7321. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  7322. IR::LabelInstr * helperLabel = InsertLabel(true, instr);
  7323. InsertTestBranch(isDetachedOpnd, isDetachedOpnd, Js::OpCode::BrNotNeq_A, doneLabel, helperLabel);
  7324. m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_OutOfMemoryError);
  7325. return instrPrev;
  7326. }
  7327. ///----------------------------------------------------------------------------
  7328. ///
  7329. /// Lowerer::LowerDeleteElemI
  7330. ///
  7331. ///----------------------------------------------------------------------------
  7332. IR::Instr *
  7333. Lowerer::LowerDeleteElemI(IR::Instr * instr, bool strictMode)
  7334. {
  7335. IR::Instr *instrPrev;
  7336. IR::Opnd *src1 = instr->UnlinkSrc1();
  7337. AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd on DeleteElementI");
  7338. Js::PropertyOperationFlags propertyOperationFlag = Js::PropertyOperation_None;
  7339. if (strictMode)
  7340. {
  7341. propertyOperationFlag = Js::PropertyOperation_StrictMode;
  7342. }
  7343. instrPrev = instr->m_prev;
  7344. IR::JnHelperMethod helperMethod = IR::HelperOp_DeleteElementI;
  7345. IR::Opnd *indexOpnd = src1->AsIndirOpnd()->UnlinkIndexOpnd();
  7346. if (indexOpnd)
  7347. {
  7348. if (indexOpnd->GetType() == TyInt32)
  7349. {
  7350. helperMethod = IR::HelperOp_DeleteElementI_Int32;
  7351. }
  7352. else if (indexOpnd->GetType() == TyUint32)
  7353. {
  7354. helperMethod = IR::HelperOp_DeleteElementI_UInt32;
  7355. }
  7356. else
  7357. {
  7358. Assert(indexOpnd->GetType() == TyVar);
  7359. }
  7360. }
  7361. else
  7362. {
  7363. // No index; the offset identifies the element.
  7364. IntConstType offset = (IntConstType)src1->AsIndirOpnd()->GetOffset();
  7365. indexOpnd = IR::AddrOpnd::NewFromNumber(offset, m_func);
  7366. }
  7367. m_lowererMD.LoadHelperArgument(instr, IR::IntConstOpnd::New((IntConstType)propertyOperationFlag, TyInt32, m_func, true));
  7368. LoadScriptContext(instr);
  7369. m_lowererMD.LoadHelperArgument(instr, indexOpnd);
  7370. IR::Opnd *baseOpnd = src1->AsIndirOpnd()->UnlinkBaseOpnd();
  7371. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  7372. src1->Free(this->m_func);
  7373. m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  7374. return instrPrev;
  7375. }
  7376. ///----------------------------------------------------------------------------
  7377. ///
  7378. /// Lowerer::LowerBrB - lower 1-operand (boolean) conditional branch
  7379. ///
  7380. ///----------------------------------------------------------------------------
  7381. IR::Instr *
  7382. Lowerer::LowerBrBReturn(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool isHelper)
  7383. {
  7384. IR::Instr * instrPrev;
  7385. IR::Instr * instrCall;
  7386. IR::HelperCallOpnd * opndHelper;
  7387. IR::Opnd * opndSrc;
  7388. IR::Opnd * opndDst;
  7389. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  7390. Assert(instr->m_opcode == Js::OpCode::BrOnEmpty || instr->m_opcode == Js::OpCode::BrOnNotEmpty);
  7391. opndSrc = instr->UnlinkSrc1();
  7392. instrPrev = m_lowererMD.LoadHelperArgument(instr, opndSrc);
  7393. // Generate helper call to convert the unknown operand to boolean
  7394. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  7395. opndDst = instr->UnlinkDst();
  7396. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  7397. instr->InsertBefore(instrCall);
  7398. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  7399. // Branch on the result of the call
  7400. instr->m_opcode = (instr->m_opcode == Js::OpCode::BrOnNotEmpty? Js::OpCode::BrTrue_A : Js::OpCode::BrFalse_A);
  7401. instr->SetSrc1(opndDst);
  7402. IR::Instr *loweredInstr;
  7403. loweredInstr = this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, isHelper);
  7404. #if DBG
  7405. if (isHelper)
  7406. {
  7407. if (!loweredInstr->IsBranchInstr())
  7408. {
  7409. loweredInstr = loweredInstr->GetNextBranchOrLabel();
  7410. }
  7411. if (loweredInstr->IsBranchInstr())
  7412. {
  7413. loweredInstr->AsBranchInstr()->m_isHelperToNonHelperBranch = true;
  7414. }
  7415. }
  7416. #endif
  7417. return instrPrev;
  7418. }
  7419. ///----------------------------------------------------------------------------
  7420. ///
  7421. /// Lowerer::LowerMultiBr
  7422. /// - Lowers the instruction for dictionary look up(string case arms)
  7423. ///
  7424. ///----------------------------------------------------------------------------
  7425. IR::Instr* Lowerer::LowerMultiBr(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  7426. {
  7427. IR::Instr * instrPrev = instr->m_prev;
  7428. IR::Instr * instrCall;
  7429. IR::HelperCallOpnd * opndHelper;
  7430. IR::Opnd * opndSrc;
  7431. IR::Opnd * opndDst;
  7432. StackSym * symDst;
  7433. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnd on BrB");
  7434. // Push the args in reverse order.
  7435. // The end and start labels for the function are used to guarantee
  7436. // that the dictionary jump destinations haven't been tampered with, so we
  7437. // will always jump to some location within this function
  7438. IR::LabelOpnd * endFuncOpnd = IR::LabelOpnd::New(m_func->EnsureFuncEndLabel(), m_func);
  7439. m_lowererMD.LoadHelperArgument(instr, endFuncOpnd);
  7440. IR::LabelOpnd * startFuncOpnd = IR::LabelOpnd::New(m_func->EnsureFuncStartLabel(), m_func);
  7441. m_lowererMD.LoadHelperArgument(instr, startFuncOpnd);
  7442. //Load the address of the dictionary pair- Js::StringDictionaryWrapper
  7443. IR::AddrOpnd* nativestringDictionaryOpnd = IR::AddrOpnd::New(instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary(), IR::AddrOpndKindDynamicMisc, this->m_func);
  7444. m_lowererMD.LoadHelperArgument(instr, nativestringDictionaryOpnd);
  7445. //Load the String passed in the Switch expression for look up - JavascriptString
  7446. opndSrc = instr->UnlinkSrc1();
  7447. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  7448. // Generate helper call for dictionary lookup.
  7449. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  7450. symDst = StackSym::New(TyMachPtr,this->m_func);
  7451. opndDst = IR::RegOpnd::New(symDst, TyMachPtr, this->m_func);
  7452. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  7453. instr->InsertBefore(instrCall);
  7454. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  7455. instr->SetSrc1(instrCall->GetDst());
  7456. m_lowererMD.LowerMultiBranch(instr);
  7457. return instrPrev;
  7458. }
  7459. void
  7460. Lowerer::LowerJumpTableMultiBranch(IR::MultiBranchInstr * multiBrInstr, IR::RegOpnd * indexOpnd)
  7461. {
  7462. Func * func = this->m_func;
  7463. IR::Opnd * opndDst = IR::RegOpnd::New(TyMachPtr, func);
  7464. //Move the native address of the jump table to a register
  7465. IR::LabelInstr * nativeJumpTableLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  7466. nativeJumpTableLabel->m_isDataLabel = true;
  7467. IR::LabelOpnd * nativeJumpTable = IR::LabelOpnd::New(nativeJumpTableLabel, m_func);
  7468. IR::RegOpnd * nativeJumpTableReg = IR::RegOpnd::New(TyMachPtr, func);
  7469. m_lowererMD.CreateAssign(nativeJumpTableReg, nativeJumpTable, multiBrInstr);
  7470. BranchJumpTableWrapper * branchJumpTable = multiBrInstr->GetBranchJumpTable();
  7471. AssertMsg(branchJumpTable->labelInstr == nullptr, "Should not be already assigned");
  7472. branchJumpTable->labelInstr = nativeJumpTableLabel;
  7473. //Indirect addressing @ target location in the jump table.
  7474. //MOV eax, [nativeJumpTableReg + (offset * indirScale)]
  7475. BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  7476. IR::Opnd * opndSrc = IR::IndirOpnd::New(nativeJumpTableReg, indexOpnd, indirScale, TyMachReg, this->m_func);
  7477. IR::Instr * indirInstr = m_lowererMD.CreateAssign(opndDst, opndSrc, multiBrInstr);
  7478. //MultiBr eax
  7479. multiBrInstr->SetSrc1(indirInstr->GetDst());
  7480. //Jump to the address at the target location in the jump table
  7481. m_lowererMD.LowerMultiBranch(multiBrInstr);
  7482. }
  7483. ///----------------------------------------------------------------------------
  7484. ///
  7485. /// Lowerer::LowerMultiBr
  7486. /// - Lowers the instruction for jump table(consecutive integer case arms)
  7487. ///
  7488. ///----------------------------------------------------------------------------
  7489. IR::Instr* Lowerer::LowerMultiBr(IR::Instr * instr)
  7490. {
  7491. IR::Instr * instrPrev = instr->m_prev;
  7492. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnd on BrB");
  7493. AssertMsg(instr->IsBranchInstr() && instr->AsBranchInstr()->IsMultiBranch(), "Bad Instruction Lowering Call to LowerMultiBr()");
  7494. IR::MultiBranchInstr * multiBrInstr = instr->AsBranchInstr()->AsMultiBrInstr();
  7495. IR::RegOpnd * offset = instr->UnlinkSrc1()->AsRegOpnd();
  7496. LowerJumpTableMultiBranch(multiBrInstr, offset);
  7497. return instrPrev;
  7498. }
  7499. IR::Instr* Lowerer::LowerBrBMem(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  7500. {
  7501. IR::Instr * instrPrev;
  7502. IR::Instr * instrCall;
  7503. IR::HelperCallOpnd * opndHelper;
  7504. IR::Opnd * opndSrc;
  7505. IR::Opnd * opndDst;
  7506. StackSym * symDst;
  7507. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  7508. instrPrev = LoadScriptContext(instr);
  7509. opndSrc = instr->UnlinkSrc1();
  7510. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  7511. // Generate helper call to convert the unknown operand to boolean
  7512. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  7513. symDst = StackSym::New(TyVar, this->m_func);
  7514. opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
  7515. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  7516. instr->InsertBefore(instrCall);
  7517. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  7518. // Branch on the result of the call
  7519. instr->SetSrc1(opndDst);
  7520. m_lowererMD.LowerCondBranch(instr);
  7521. return instrPrev;
  7522. }
  7523. IR::Instr* Lowerer::LowerBrOnObject(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  7524. {
  7525. IR::Instr * instrPrev;
  7526. IR::Instr * instrCall;
  7527. IR::HelperCallOpnd * opndHelper;
  7528. IR::Opnd * opndSrc;
  7529. IR::Opnd * opndDst;
  7530. StackSym * symDst;
  7531. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  7532. opndSrc = instr->UnlinkSrc1();
  7533. instrPrev = m_lowererMD.LoadHelperArgument(instr, opndSrc);
  7534. // Generate helper call to check if the operand's type is object
  7535. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  7536. symDst = StackSym::New(TyVar, this->m_func);
  7537. opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
  7538. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  7539. instr->InsertBefore(instrCall);
  7540. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  7541. // Branch on the result of the call
  7542. instr->SetSrc1(opndDst);
  7543. m_lowererMD.LowerCondBranch(instr);
  7544. return instrPrev;
  7545. }
  7546. IR::Instr * Lowerer::LowerBrOnClassConstructor(IR::Instr * instr, IR::JnHelperMethod helperMethod)
  7547. {
  7548. IR::Instr * instrPrev;
  7549. IR::Instr * instrCall;
  7550. IR::HelperCallOpnd * opndHelper;
  7551. IR::Opnd * opndSrc;
  7552. IR::Opnd * opndDst;
  7553. StackSym * symDst;
  7554. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr, "Expected 1 src opnds on BrB");
  7555. opndSrc = instr->UnlinkSrc1();
  7556. instrPrev = m_lowererMD.LoadHelperArgument(instr, opndSrc);
  7557. // Generate helper call to check if the operand's type is object
  7558. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  7559. symDst = StackSym::New(TyVar, this->m_func);
  7560. opndDst = IR::RegOpnd::New(symDst, TyVar, this->m_func);
  7561. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  7562. instr->InsertBefore(instrCall);
  7563. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  7564. // Branch on the result of the call
  7565. instr->SetSrc1(opndDst);
  7566. m_lowererMD.LowerCondBranch(instr);
  7567. return instrPrev;
  7568. }
  7569. IR::Instr *
  7570. Lowerer::LowerBrCMem(IR::Instr * instr, IR::JnHelperMethod helperMethod, bool noMathFastPath, bool isHelper)
  7571. {
  7572. IR::Instr * instrPrev = instr->m_prev;
  7573. IR::Instr * instrCall;
  7574. IR::HelperCallOpnd * opndHelper;
  7575. IR::Opnd * opndSrc;
  7576. IR::Opnd * opndDst;
  7577. StackSym * symDst;
  7578. bool inverted = false;
  7579. AssertMsg(instr->GetSrc1() != nullptr && instr->GetSrc2() != nullptr, "Expected 2 src opnds on BrC");
  7580. if (!noMathFastPath && !this->GenerateFastCondBranch(instr->AsBranchInstr(), &isHelper))
  7581. {
  7582. return instrPrev;
  7583. }
  7584. // Push the args in reverse order.
  7585. const bool loadScriptContext = !(helperMethod == IR::HelperOp_StrictEqualString || helperMethod == IR::HelperOp_StrictEqualEmptyString);
  7586. const bool loadArg2 = !(helperMethod == IR::HelperOp_StrictEqualEmptyString);
  7587. if (helperMethod == IR::HelperOp_NotEqual)
  7588. {
  7589. // Op_NotEqual() returns !Op_Equal(). It is faster to call Op_Equal() directly.
  7590. helperMethod = IR::HelperOp_Equal;
  7591. instr->AsBranchInstr()->Invert();
  7592. inverted = true;
  7593. }
  7594. else if(helperMethod == IR::HelperOp_NotStrictEqual)
  7595. {
  7596. // Op_NotStrictEqual() returns !Op_StrictEqual(). It is faster to call Op_StrictEqual() directly.
  7597. helperMethod = IR::HelperOp_StrictEqual;
  7598. instr->AsBranchInstr()->Invert();
  7599. inverted = true;
  7600. }
  7601. if (loadScriptContext)
  7602. LoadScriptContext(instr);
  7603. opndSrc = instr->UnlinkSrc2();
  7604. if (loadArg2)
  7605. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  7606. opndSrc = instr->UnlinkSrc1();
  7607. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  7608. // Generate helper call to compare the source operands.
  7609. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  7610. symDst = StackSym::New(TyMachReg, this->m_func);
  7611. opndDst = IR::RegOpnd::New(symDst, TyMachReg, this->m_func);
  7612. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  7613. instr->InsertBefore(instrCall);
  7614. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  7615. switch (instr->m_opcode)
  7616. {
  7617. case Js::OpCode::BrNotEq_A:
  7618. case Js::OpCode::BrNotNeq_A:
  7619. case Js::OpCode::BrSrNotEq_A:
  7620. case Js::OpCode::BrSrNotNeq_A:
  7621. if (instr->HasBailOutInfo())
  7622. {
  7623. instr->GetBailOutInfo()->isInvertedBranch = true;
  7624. }
  7625. break;
  7626. case Js::OpCode::BrNotGe_A:
  7627. case Js::OpCode::BrNotGt_A:
  7628. case Js::OpCode::BrNotLe_A:
  7629. case Js::OpCode::BrNotLt_A:
  7630. inverted = true;
  7631. break;
  7632. }
  7633. // Branch if the result is "true".
  7634. instr->SetSrc1(opndDst);
  7635. instr->m_opcode = (inverted ? Js::OpCode::BrFalse_A : Js::OpCode::BrTrue_A);
  7636. this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, !noMathFastPath && isHelper);
  7637. return instrPrev;
  7638. }
  7639. IR::Instr *
  7640. Lowerer::LowerBrFncApply(IR::Instr * instr, IR::JnHelperMethod helperMethod) {
  7641. IR::Instr * instrPrev = instr->m_prev;
  7642. IR::Instr * instrCall;
  7643. IR::HelperCallOpnd * opndHelper;
  7644. IR::Opnd * opndSrc;
  7645. IR::Opnd * opndDst;
  7646. StackSym * symDst;
  7647. AssertMsg(instr->GetSrc1() != nullptr, "Expected 1 src opnd on BrFncApply");
  7648. LoadScriptContext(instr);
  7649. opndSrc = instr->UnlinkSrc1();
  7650. m_lowererMD.LoadHelperArgument(instr, opndSrc);
  7651. // Generate helper call to compare the source operands.
  7652. opndHelper = IR::HelperCallOpnd::New(helperMethod, this->m_func);
  7653. symDst = StackSym::New(TyMachReg, this->m_func);
  7654. opndDst = IR::RegOpnd::New(symDst, TyMachReg, this->m_func);
  7655. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  7656. instr->InsertBefore(instrCall);
  7657. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  7658. // Branch if the result is "true".
  7659. instr->SetSrc1(opndDst);
  7660. instr->m_opcode = Js::OpCode::BrTrue_A;
  7661. m_lowererMD.LowerCondBranch(instr);
  7662. return instrPrev;
  7663. }
  7664. ///----------------------------------------------------------------------------
  7665. ///
  7666. /// Lowerer::LowerBrProperty - lower branch-on-has/no-property
  7667. ///
  7668. ///----------------------------------------------------------------------------
  7669. IR::Instr *
  7670. Lowerer::LowerBrProperty(IR::Instr * instr, IR::JnHelperMethod helper)
  7671. {
  7672. IR::Instr * instrPrev;
  7673. IR::Instr * instrCall;
  7674. IR::HelperCallOpnd * opndHelper;
  7675. IR::Opnd * opndSrc;
  7676. IR::Opnd * opndDst;
  7677. opndSrc = instr->UnlinkSrc1();
  7678. AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(),
  7679. "Expected propertySym as src of BrProperty");
  7680. instrPrev = LoadScriptContext(instr);
  7681. this->LoadPropertySymAsArgument(instr, opndSrc);
  7682. opndHelper = IR::HelperCallOpnd::New(helper, this->m_func);
  7683. opndDst = IR::RegOpnd::New(StackSym::New(TyMachReg, this->m_func), TyMachReg, this->m_func);
  7684. instrCall = IR::Instr::New(Js::OpCode::Call, opndDst, opndHelper, this->m_func);
  7685. instr->InsertBefore(instrCall);
  7686. instrCall = m_lowererMD.LowerCall(instrCall, 0);
  7687. // Branch on the result of the call
  7688. instr->SetSrc1(opndDst);
  7689. switch (instr->m_opcode)
  7690. {
  7691. case Js::OpCode::BrOnHasProperty:
  7692. instr->m_opcode = Js::OpCode::BrTrue_A;
  7693. break;
  7694. case Js::OpCode::BrOnNoProperty:
  7695. instr->m_opcode = Js::OpCode::BrFalse_A;
  7696. break;
  7697. default:
  7698. AssertMsg(0, "Unknown opcode on BrProperty branch");
  7699. break;
  7700. }
  7701. this->LowerCondBranchCheckBailOut(instr->AsBranchInstr(), instrCall, false);
  7702. return instrPrev;
  7703. }
  7704. ///----------------------------------------------------------------------------
  7705. ///
  7706. /// Lowerer::LowerElementUndefined
  7707. ///
  7708. ///----------------------------------------------------------------------------
  7709. IR::Instr *
  7710. Lowerer::LowerElementUndefined(IR::Instr * instr, IR::JnHelperMethod helper)
  7711. {
  7712. IR::Opnd *dst = instr->UnlinkDst();
  7713. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as dst of Ld Undefined");
  7714. // Pass the property sym to store to
  7715. this->LoadPropertySymAsArgument(instr, dst);
  7716. m_lowererMD.ChangeToHelperCall(instr, helper);
  7717. return instr;
  7718. }
  7719. IR::Instr *
  7720. Lowerer::LowerElementUndefinedMem(IR::Instr * instr, IR::JnHelperMethod helper)
  7721. {
  7722. // Pass script context
  7723. IR::Instr * instrPrev = LoadScriptContext(instr);
  7724. this->LowerElementUndefined(instr, helper);
  7725. return instrPrev;
  7726. }
  7727. IR::Instr *
  7728. Lowerer::LowerLdElemUndef(IR::Instr * instr)
  7729. {
  7730. if (this->m_func->GetJnFunction()->IsEval())
  7731. {
  7732. return LowerElementUndefinedMem(instr, IR::HelperOp_LdElemUndefDynamic);
  7733. }
  7734. else
  7735. {
  7736. return LowerElementUndefined(instr, IR::HelperOp_LdElemUndef);
  7737. }
  7738. }
  7739. ///----------------------------------------------------------------------------
  7740. ///
  7741. /// Lowerer::LowerElementUndefinedScoped
  7742. ///
  7743. ///----------------------------------------------------------------------------
  7744. IR::Instr *
  7745. Lowerer::LowerElementUndefinedScoped(IR::Instr * instr, IR::JnHelperMethod helper)
  7746. {
  7747. IR::Instr * instrPrev = instr->m_prev;
  7748. // Pass the default instance
  7749. IR::Opnd *src = instr->UnlinkSrc1();
  7750. m_lowererMD.LoadHelperArgument(instr, src);
  7751. // Pass the property sym to store to
  7752. IR::Opnd * dst = instr->UnlinkDst();
  7753. AssertMsg(dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected fieldSym as dst of Ld Undefined Scoped");
  7754. this->LoadPropertySymAsArgument(instr, dst);
  7755. m_lowererMD.ChangeToHelperCall(instr, helper);
  7756. return instrPrev;
  7757. }
  7758. IR::Instr *
  7759. Lowerer::LowerElementUndefinedScopedMem(IR::Instr * instr, IR::JnHelperMethod helper)
  7760. {
  7761. // Pass script context
  7762. IR::Instr * instrPrev = LoadScriptContext(instr);
  7763. this->LowerElementUndefinedScoped(instr, helper);
  7764. return instrPrev;
  7765. }
  7766. void
  7767. Lowerer::LowerStLoopBodyCount(IR::Instr* instr)
  7768. {
  7769. Js::LoopHeader *header = ((JsLoopBodyCodeGen*)m_func->m_workItem)->loopHeader;
  7770. IR::MemRefOpnd *loopBodyCounterOpnd = IR::MemRefOpnd::New((BYTE*)(header) + header->GetOffsetOfProfiledLoopCounter(), TyUint32, this->m_func);
  7771. instr->SetDst(loopBodyCounterOpnd);
  7772. instr->ReplaceSrc1(instr->GetSrc1()->AsRegOpnd()->UseWithNewType(TyUint32, this->m_func));
  7773. IR::AutoReuseOpnd(loopBodyCounterOpnd, this->m_func);
  7774. m_lowererMD.ChangeToAssign(instr);
  7775. return;
  7776. }
  7777. #if !FLOATVAR
  7778. IR::Instr *
  7779. Lowerer::LowerStSlotBoxTemp(IR::Instr *stSlot)
  7780. {
  7781. // regVar = BoxStackNumber(src, scriptContext)
  7782. IR::RegOpnd * regSrc = stSlot->UnlinkSrc1()->AsRegOpnd();
  7783. IR::Instr * instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  7784. IR::RegOpnd *regVar = IR::RegOpnd::New(TyVar, this->m_func);
  7785. instr->SetDst(regVar);
  7786. instr->SetSrc1(regSrc);
  7787. stSlot->InsertBefore(instr);
  7788. this->LowerUnaryHelperMem(instr, IR::HelperBoxStackNumber);
  7789. stSlot->SetSrc1(regVar);
  7790. return this->LowerStSlot(stSlot);
  7791. }
  7792. #endif
  7793. IR::Opnd *
  7794. Lowerer::CreateOpndForSlotAccess(IR::Opnd * opnd)
  7795. {
  7796. IR::SymOpnd * symOpnd = opnd->AsSymOpnd();
  7797. PropertySym * dstSym = symOpnd->m_sym->AsPropertySym();
  7798. if (!m_func->IsLoopBody() &&
  7799. m_func->DoStackFrameDisplay() &&
  7800. (dstSym->m_stackSym == m_func->GetLocalClosureSym() || dstSym->m_stackSym == m_func->GetLocalFrameDisplaySym()))
  7801. {
  7802. // Stack closure syms are made to look like slot accesses for the benefit of GlobOpt, so that it can do proper
  7803. // copy prop and implicit call bailout. But what we really want is local stack load/store.
  7804. // Don't do this for loop body, though, since we don't have the value saved on the stack.
  7805. return IR::SymOpnd::New(dstSym->m_stackSym, 0, TyMachReg, this->m_func);
  7806. }
  7807. int32 offset = dstSym->m_propertyId;
  7808. if (!m_func->GetJnFunction()->GetIsAsmJsFunction())
  7809. {
  7810. offset = offset * TySize[opnd->GetType()];
  7811. }
  7812. if (m_func->IsTJLoopBody())
  7813. {
  7814. offset = offset - m_func->GetJnFunction()->GetAsmJsFunctionInfoWithLock()->GetTotalSizeinBytes();
  7815. }
  7816. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(symOpnd->CreatePropertyOwnerOpnd(m_func),
  7817. offset , opnd->GetType(), this->m_func);
  7818. return indirOpnd;
  7819. }
  7820. IR::Instr *
  7821. Lowerer::LowerStSlot(IR::Instr *instr)
  7822. {
  7823. // StSlot stores the nth Var in the buffer pointed to by the property sym's stack sym.
  7824. IR::Opnd * dstOpnd = instr->UnlinkDst();
  7825. AssertMsg(dstOpnd, "Expected dst opnd on StSlot");
  7826. IR::Opnd * dstNew = this->CreateOpndForSlotAccess(dstOpnd);
  7827. dstOpnd->Free(this->m_func);
  7828. instr->SetDst(dstNew);
  7829. m_lowererMD.ChangeToWriteBarrierAssign(instr);
  7830. return instr;
  7831. }
  7832. IR::Instr *
  7833. Lowerer::LowerStSlotChkUndecl(IR::Instr *instrStSlot)
  7834. {
  7835. Assert(instrStSlot->GetSrc2() != nullptr);
  7836. // Src2 is required only to avoid dead store false positives during GlobOpt.
  7837. instrStSlot->FreeSrc2();
  7838. IR::Opnd *dstOpnd = this->CreateOpndForSlotAccess(instrStSlot->GetDst());
  7839. IR::Instr *instr = this->LowerStSlot(instrStSlot);
  7840. this->GenUndeclChk(instr, dstOpnd);
  7841. return instr;
  7842. }
  7843. void Lowerer::LowerProfileLdSlot(IR::Opnd *const valueOpnd, Func *const ldSlotFunc, const Js::ProfileId profileId, IR::Instr *const insertBeforeInstr)
  7844. {
  7845. Assert(valueOpnd);
  7846. Assert(profileId != Js::Constants::NoProfileId);
  7847. Assert(insertBeforeInstr);
  7848. Func *const irFunc = insertBeforeInstr->m_func;
  7849. m_lowererMD.LoadHelperArgument(insertBeforeInstr, IR::Opnd::CreateProfileIdOpnd(profileId, irFunc));
  7850. m_lowererMD.LoadHelperArgument(insertBeforeInstr, CreateFunctionBodyOpnd(ldSlotFunc));
  7851. m_lowererMD.LoadHelperArgument(insertBeforeInstr, valueOpnd);
  7852. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, irFunc);
  7853. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperProfileLdSlot, irFunc));
  7854. insertBeforeInstr->InsertBefore(callInstr);
  7855. m_lowererMD.LowerCall(callInstr, 0);
  7856. }
  7857. IR::Instr *
  7858. Lowerer::LowerLdSlot(IR::Instr *instr)
  7859. {
  7860. IR::Opnd * srcOpnd = instr->UnlinkSrc1();
  7861. AssertMsg(srcOpnd, "Expected src opnd on LdSlot");
  7862. IR::Opnd * srcNew = this->CreateOpndForSlotAccess(srcOpnd);
  7863. srcOpnd->Free(this->m_func);
  7864. instr->SetSrc1(srcNew);
  7865. m_lowererMD.ChangeToAssign(instr);
  7866. return instr;
  7867. }
  7868. IR::Instr *
  7869. Lowerer::LowerChkUndecl(IR::Instr *instr)
  7870. {
  7871. IR::Instr *instrPrev = instr->m_prev;
  7872. this->GenUndeclChk(instr, instr->GetSrc1());
  7873. instr->Remove();
  7874. return instrPrev;
  7875. }
  7876. void
  7877. Lowerer::GenUndeclChk(IR::Instr *instrInsert, IR::Opnd *opnd)
  7878. {
  7879. IR::LabelInstr *labelContinue = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  7880. InsertCompareBranch(
  7881. opnd,
  7882. LoadLibraryValueOpnd(instrInsert, LibraryValue::ValueUndeclBlockVar),
  7883. Js::OpCode::BrNeq_A, labelContinue, instrInsert);
  7884. IR::LabelInstr *labelThrow = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  7885. instrInsert->InsertBefore(labelThrow);
  7886. IR::Instr *instr = IR::Instr::New(
  7887. Js::OpCode::RuntimeReferenceError,
  7888. IR::RegOpnd::New(TyMachReg, m_func),
  7889. IR::IntConstOpnd::New(SCODE_CODE(JSERR_UseBeforeDeclaration), TyInt32, m_func),
  7890. m_func);
  7891. instrInsert->InsertBefore(instr);
  7892. this->LowerUnaryHelperMem(instr, IR::HelperOp_RuntimeReferenceError);
  7893. instrInsert->InsertBefore(labelContinue);
  7894. }
  7895. ///----------------------------------------------------------------------------
  7896. ///
  7897. /// Lowerer::LowerStElemC
  7898. ///
  7899. ///----------------------------------------------------------------------------
  7900. IR::Instr *
  7901. Lowerer::LowerStElemC(IR::Instr * stElem)
  7902. {
  7903. IR::Instr *instrPrev = stElem->m_prev;
  7904. IR::IndirOpnd * indirOpnd = stElem->GetDst()->AsIndirOpnd();
  7905. IR::RegOpnd *indexOpnd = indirOpnd->UnlinkIndexOpnd();
  7906. Assert(!indexOpnd || indexOpnd->m_sym->IsIntConst());
  7907. IntConstType value;
  7908. if (indexOpnd)
  7909. {
  7910. value = indexOpnd->AsRegOpnd()->m_sym->GetIntConstValue();
  7911. indexOpnd->Free(this->m_func);
  7912. }
  7913. else
  7914. {
  7915. value = (IntConstType)indirOpnd->GetOffset();
  7916. }
  7917. if (stElem->IsJitProfilingInstr())
  7918. {
  7919. Assert(stElem->AsJitProfilingInstr()->profileId == Js::Constants::NoProfileId);
  7920. m_lowererMD.LoadHelperArgument(stElem, stElem->UnlinkSrc1());
  7921. const auto meth = stElem->m_opcode == Js::OpCode::StElemC ? IR::HelperSimpleStoreArrayHelper : IR::HelperSimpleStoreArraySegHelper;
  7922. stElem->SetSrc1(IR::HelperCallOpnd::New(meth, m_func));
  7923. m_lowererMD.LoadHelperArgument(stElem, IR::IntConstOpnd::New(value, TyUint32, m_func));
  7924. m_lowererMD.LoadHelperArgument(stElem, indirOpnd->UnlinkBaseOpnd());
  7925. stElem->UnlinkDst()->Free(m_func);
  7926. m_lowererMD.LowerCall(stElem, 0);
  7927. return instrPrev;
  7928. }
  7929. IntConstType base;
  7930. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  7931. const ValueType baseValueType(baseOpnd->GetValueType());
  7932. if(baseValueType.IsLikelyNativeArray())
  7933. {
  7934. Assert(stElem->m_opcode == Js::OpCode::StElemC);
  7935. IR::LabelInstr *labelBailOut = nullptr;
  7936. IR::Instr *instrBailOut = nullptr;
  7937. if (stElem->HasBailOutInfo())
  7938. {
  7939. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  7940. instrBailOut = stElem;
  7941. stElem = IR::Instr::New(instrBailOut->m_opcode, m_func);
  7942. instrBailOut->TransferTo(stElem);
  7943. instrBailOut->InsertBefore(stElem);
  7944. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  7945. InsertBranch(Js::OpCode::Br, labelDone, instrBailOut);
  7946. instrBailOut->InsertBefore(labelBailOut);
  7947. instrBailOut->InsertAfter(labelDone);
  7948. instrBailOut->m_opcode = Js::OpCode::BailOut;
  7949. GenerateBailOut(instrBailOut);
  7950. }
  7951. if (!baseValueType.IsObject())
  7952. {
  7953. // Likely native array: do a vtable check and bail if it fails.
  7954. Assert(labelBailOut);
  7955. GenerateArrayTest(baseOpnd, labelBailOut, labelBailOut, stElem, true);
  7956. }
  7957. if (stElem->GetSrc1()->GetType() == TyVar)
  7958. {
  7959. // Storing a non-specialized value. This may cause array conversion, which invalidates all the code
  7960. // that depends on the array check we've already done.
  7961. // Call a helper that returns the type ID of the resulting array, check it here against the one we
  7962. // expect, and bail if it fails.
  7963. Assert(labelBailOut);
  7964. // Call a helper to (try and) unbox the var and store it.
  7965. // If we had to convert the array to do the store, we'll bail.
  7966. LoadScriptContext(stElem);
  7967. m_lowererMD.LoadHelperArgument(stElem, stElem->UnlinkSrc1());
  7968. IR::Opnd *indexOpnd = IR::IntConstOpnd::New(value, TyUint32, m_func);
  7969. m_lowererMD.LoadHelperArgument(stElem, indexOpnd);
  7970. m_lowererMD.LoadHelperArgument(stElem, indirOpnd->UnlinkBaseOpnd());
  7971. IR::JnHelperMethod helperMethod;
  7972. if (baseValueType.HasIntElements())
  7973. {
  7974. helperMethod = IR::HelperScrArr_SetNativeIntElementC;
  7975. }
  7976. else
  7977. {
  7978. helperMethod = IR::HelperScrArr_SetNativeFloatElementC;
  7979. }
  7980. IR::Instr *instrInsertBranch = stElem->m_next;
  7981. IR::RegOpnd *typeIdOpnd = IR::RegOpnd::New(TyUint32, m_func);
  7982. stElem->ReplaceDst(typeIdOpnd);
  7983. m_lowererMD.ChangeToHelperCall(stElem, helperMethod);
  7984. InsertCompareBranch(
  7985. typeIdOpnd,
  7986. IR::IntConstOpnd::New(
  7987. baseValueType.HasIntElements() ?
  7988. Js::TypeIds_NativeIntArray : Js::TypeIds_NativeFloatArray, TyUint32, m_func),
  7989. Js::OpCode::BrNeq_A,
  7990. labelBailOut,
  7991. instrInsertBranch);
  7992. return instrPrev;
  7993. }
  7994. else if (baseValueType.HasIntElements() && labelBailOut)
  7995. {
  7996. Assert(stElem->GetSrc1()->GetType() == GetArrayIndirType(baseValueType));
  7997. IR::Opnd* missingElementOpnd = GetMissingItemOpnd(stElem->GetSrc1()->GetType(), m_func);
  7998. if (!stElem->GetSrc1()->IsEqual(missingElementOpnd))
  7999. {
  8000. InsertCompareBranch(stElem->GetSrc1(), missingElementOpnd , Js::OpCode::BrEq_A, labelBailOut, stElem, true);
  8001. }
  8002. else
  8003. {
  8004. //Its a missing value store and data flow proves that src1 is always missing value. Array cannot be an int array at the first place
  8005. //if this code was ever hit. Just bailout, this code path would be updated with the profile information next time around.
  8006. InsertBranch(Js::OpCode::Br, labelBailOut, stElem);
  8007. #if DBG
  8008. labelBailOut->m_noHelperAssert = true;
  8009. #endif
  8010. stElem->Remove();
  8011. return instrPrev;
  8012. }
  8013. }
  8014. else
  8015. {
  8016. Assert(stElem->GetSrc1()->GetType() == GetArrayIndirType(baseValueType));
  8017. }
  8018. stElem->GetDst()->SetType(stElem->GetSrc1()->GetType());
  8019. Assert(value <= Js::SparseArraySegmentBase::INLINE_CHUNK_SIZE);
  8020. if(baseValueType.HasIntElements())
  8021. {
  8022. base = sizeof(Js::JavascriptNativeIntArray) + offsetof(Js::SparseArraySegment<int32>, elements);
  8023. }
  8024. else
  8025. {
  8026. base = sizeof(Js::JavascriptNativeFloatArray) + offsetof(Js::SparseArraySegment<double>, elements);
  8027. }
  8028. }
  8029. else if(baseValueType.IsLikelyObject() && baseValueType.GetObjectType() == ObjectType::Array)
  8030. {
  8031. Assert(stElem->m_opcode == Js::OpCode::StElemC);
  8032. Assert(value <= Js::SparseArraySegmentBase::INLINE_CHUNK_SIZE);
  8033. base = sizeof(Js::JavascriptArray) + offsetof(Js::SparseArraySegment<Js::Var>, elements);
  8034. }
  8035. else
  8036. {
  8037. Assert(stElem->m_opcode == Js::OpCode::StElemC || stElem->m_opcode == Js::OpCode::StArrSegElemC);
  8038. Assert(indirOpnd->GetBaseOpnd()->GetType() == TyVar);
  8039. base = offsetof(Js::SparseArraySegment<Js::Var>, elements);
  8040. }
  8041. Assert(value >= 0);
  8042. // MOV [r3 + offset(element) + index], src
  8043. const BYTE indirScale =
  8044. baseValueType.IsLikelyAnyOptimizedArray() ? GetArrayIndirScale(baseValueType) : m_lowererMD.GetDefaultIndirScale();
  8045. IntConstType offset = base + (value << indirScale);
  8046. Assert(Math::FitsInDWord(offset));
  8047. indirOpnd->SetOffset((int32)offset);
  8048. m_lowererMD.ChangeToWriteBarrierAssign(stElem);
  8049. return instrPrev;
  8050. }
  8051. void Lowerer::LowerLdArrHead(IR::Instr *const instr)
  8052. {
  8053. IR::RegOpnd *array = instr->UnlinkSrc1()->AsRegOpnd();
  8054. const ValueType arrayValueType(array->GetValueType());
  8055. Assert(arrayValueType.IsAnyOptimizedArray());
  8056. if(arrayValueType.GetObjectType() == ObjectType::ObjectWithArray)
  8057. {
  8058. array = LoadObjectArray(array, instr);
  8059. }
  8060. // mov arrayHeadSegment, [array + offset(headSegment)]
  8061. instr->GetDst()->SetType(TyMachPtr);
  8062. instr->SetSrc1(
  8063. IR::IndirOpnd::New(
  8064. array,
  8065. GetArrayOffsetOfHeadSegment(arrayValueType),
  8066. TyMachPtr,
  8067. instr->m_func));
  8068. LowererMD::ChangeToAssign(instr);
  8069. }
  8070. // Creates the rest parameter array.
  8071. // Var JavascriptArray::OP_NewScArrayWithElements(
  8072. // uint32 elementCount,
  8073. // Var *elements,
  8074. // ScriptContext* scriptContext)
  8075. IR::Instr *Lowerer::LowerRestParameter(IR::Opnd *formalsOpnd, IR::Opnd *dstOpnd, IR::Opnd *excessOpnd, IR::Instr *instr, IR::RegOpnd *generatorArgsPtrOpnd)
  8076. {
  8077. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, dstOpnd, instr->m_func);
  8078. instr->InsertAfter(helperCallInstr);
  8079. // Var JavascriptArray::OP_NewScArrayWithElements(
  8080. // int32 elementCount,
  8081. // Var *elements,
  8082. // ScriptContext* scriptContext)
  8083. IR::JnHelperMethod helperMethod = IR::HelperScrArr_OP_NewScArrayWithElements;
  8084. LoadScriptContext(helperCallInstr);
  8085. BOOL isGenerator = this->m_func->GetJnFunction()->IsGenerator();
  8086. // Elements pointer = ebp + (formals count + formals offset + 1)*sizeof(Var)
  8087. IR::RegOpnd *srcOpnd = isGenerator ? generatorArgsPtrOpnd : IR::Opnd::CreateFramePointerOpnd(this->m_func);
  8088. uint16 actualOffset = isGenerator ? 0 : GetFormalParamOffset(); //4
  8089. IR::RegOpnd *argPtrOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  8090. InsertAdd(false, argPtrOpnd, srcOpnd, IR::IntConstOpnd::New((formalsOpnd->AsIntConstOpnd()->GetValue() + actualOffset) * MachPtr, TyUint32, this->m_func), helperCallInstr);
  8091. m_lowererMD.LoadHelperArgument(helperCallInstr, argPtrOpnd);
  8092. m_lowererMD.LoadHelperArgument(helperCallInstr, excessOpnd);
  8093. m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
  8094. return helperCallInstr;
  8095. }
  8096. ///----------------------------------------------------------------------------
  8097. ///
  8098. /// Lowerer::LowerArgIn
  8099. ///
  8100. /// This function checks the passed-in argument count against the index of this
  8101. /// argument and uses null for a param value if the caller didn't explicitly
  8102. /// pass anything.
  8103. ///
  8104. ///----------------------------------------------------------------------------
  8105. IR::Instr *
  8106. Lowerer::LowerArgIn(IR::Instr *instrArgIn)
  8107. {
  8108. IR::LabelInstr * labelDone;
  8109. IR::LabelInstr * labelUndef;
  8110. IR::LabelInstr * labelNormal;
  8111. IR::LabelInstr * labelInit;
  8112. IR::LabelInstr * labelInitNext;
  8113. IR::BranchInstr * instrBranch;
  8114. IR::Instr * instrArgInNext;
  8115. IR::Instr * instrInsert;
  8116. IR::Instr * instrPrev;
  8117. IR::Instr * instrResume = nullptr;
  8118. IR::Opnd * dstOpnd;
  8119. IR::Opnd * srcOpnd;
  8120. IR::Opnd * opndUndef;
  8121. Js::ArgSlot argIndex;
  8122. StackSym * symParam;
  8123. BOOLEAN isDuplicate;
  8124. IR::RegOpnd * generatorArgsPtrOpnd = nullptr;
  8125. // We start with:
  8126. // s1 = ArgIn_A param1
  8127. // s2 = ArgIn_A param2
  8128. // ...
  8129. // sn = ArgIn_A paramn
  8130. //
  8131. // We want to end up with:
  8132. //
  8133. // s1 = ArgIn_A param1 -- Note that this is unconditional
  8134. // count = (load from param area)
  8135. // BrLt_A $start, count, n -- Forward cbranch to the uncommon case
  8136. // Br $Ln
  8137. // $start:
  8138. // sn = assign undef
  8139. // BrGe_A $Ln-1, count, n-1
  8140. // sn-1 = assign undef
  8141. // ...
  8142. // s2 = assign undef
  8143. // Br $done
  8144. // $Ln:
  8145. // sn = assign paramn
  8146. // $Ln-1:
  8147. // sn-1 = assign paramn-1
  8148. // ...
  8149. // s2 = assign param2
  8150. // $done:
  8151. IR::Opnd *restDst = nullptr;
  8152. bool hasRest = instrArgIn->m_opcode == Js::OpCode::ArgIn_Rest;
  8153. if (hasRest)
  8154. {
  8155. IR::Instr *restInstr = instrArgIn;
  8156. restDst = restInstr->UnlinkDst();
  8157. if (m_func->GetJnFunction()->GetHasImplicitArgIns() && m_func->GetInParamsCount() > 1)
  8158. {
  8159. while (instrArgIn->m_opcode != Js::OpCode::ArgIn_A)
  8160. {
  8161. instrArgIn = instrArgIn->m_prev;
  8162. if (instrResume == nullptr)
  8163. {
  8164. instrResume = instrArgIn;
  8165. }
  8166. }
  8167. restInstr->Remove();
  8168. }
  8169. else
  8170. {
  8171. IR::Instr * instrCount = m_lowererMD.LoadInputParamCount(instrArgIn, -this->m_func->GetInParamsCount());
  8172. IR::Opnd * excessOpnd = instrCount->GetDst();
  8173. IR::LabelInstr *createRestArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  8174. // BrGe $createRestArray, excess, 0
  8175. InsertCompareBranch(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), Js::OpCode::BrGe_A, createRestArrayLabel, instrArgIn);
  8176. // MOV excess, 0
  8177. InsertMove(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), instrArgIn);
  8178. // $createRestArray
  8179. instrArgIn->InsertBefore(createRestArrayLabel);
  8180. if (m_func->GetJnFunction()->IsGenerator())
  8181. {
  8182. generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrArgIn);
  8183. }
  8184. IR::IntConstOpnd * formalsOpnd = IR::IntConstOpnd::New(this->m_func->GetInParamsCount(), TyUint32, this->m_func);
  8185. IR::Instr *prev = LowerRestParameter(formalsOpnd, restDst, excessOpnd, instrArgIn, generatorArgsPtrOpnd);
  8186. instrArgIn->Remove();
  8187. return prev;
  8188. }
  8189. }
  8190. srcOpnd = instrArgIn->GetSrc1();
  8191. symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
  8192. argIndex = symParam->GetParamSlotNum();
  8193. if (argIndex == 1)
  8194. {
  8195. // The "this" argument is not source-dependent and doesn't need to be checked.
  8196. if (m_func->GetJnFunction()->IsGenerator())
  8197. {
  8198. generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrArgIn);
  8199. ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
  8200. }
  8201. m_lowererMD.ChangeToAssign(instrArgIn);
  8202. return instrResume == nullptr ? instrArgIn->m_prev : instrResume;
  8203. }
  8204. Js::ArgSlot formalsCount = this->m_func->GetInParamsCount();
  8205. AssertMsg(argIndex == formalsCount, "Expect to see the ArgIn's in numerical order");
  8206. // Because there may be instructions between the ArgIn's, such as saves to the frame object,
  8207. // we find the top of the sequence of ArgIn's and insert everything there. This assumes that
  8208. // ArgIn's use param symbols as src's and not the results of previous instructions.
  8209. instrPrev = instrArgIn;
  8210. instrInsert = instrArgIn->m_next;
  8211. while (argIndex > 2)
  8212. {
  8213. instrPrev = instrPrev->m_prev;
  8214. if (instrPrev->m_opcode == Js::OpCode::ArgIn_A)
  8215. {
  8216. srcOpnd = instrPrev->GetSrc1();
  8217. symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
  8218. AssertMsg(symParam->GetParamSlotNum() == argIndex - 1, "ArgIn's not in numerical order");
  8219. argIndex = symParam->GetParamSlotNum();
  8220. }
  8221. else
  8222. {
  8223. // Make sure that this instruction gets lowered.
  8224. if (instrResume == nullptr)
  8225. {
  8226. instrResume = instrPrev;
  8227. }
  8228. }
  8229. }
  8230. // The loading of parameters will be inserted above this instruction.
  8231. instrInsert = instrPrev;
  8232. if (instrResume == nullptr)
  8233. {
  8234. // We found no intervening non-ArgIn's, so lowering can resume at the previous instruction.
  8235. instrResume = instrInsert->m_prev;
  8236. }
  8237. // Now insert all the checks and undef-assigns.
  8238. if (m_func->GetJnFunction()->IsGenerator())
  8239. {
  8240. generatorArgsPtrOpnd = LoadGeneratorArgsPtr(instrInsert);
  8241. }
  8242. // excessOpnd = (load from param area) - formalCounts
  8243. IR::Instr * instrCount = this->m_lowererMD.LoadInputParamCount(instrInsert, -formalsCount, true);
  8244. IR::Opnd * excessOpnd = instrCount->GetDst();
  8245. labelUndef = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /*helperLabel*/ true);
  8246. Lowerer::InsertBranch(Js::OpCode::BrLt_A, labelUndef, instrInsert);
  8247. // Br $Ln
  8248. labelNormal = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  8249. labelInit = labelNormal;
  8250. instrBranch = IR::BranchInstr::New(Js::OpCode::Br, labelNormal, this->m_func);
  8251. instrInsert->InsertBefore(instrBranch);
  8252. this->m_lowererMD.LowerUncondBranch(instrBranch);
  8253. // Insert the labels
  8254. instrInsert->InsertBefore(labelUndef);
  8255. instrInsert->InsertBefore(labelNormal);
  8256. // MOV undefReg, undefAddress
  8257. IR::Opnd* opndUndefAddress = this->LoadLibraryValueOpnd(labelNormal, LibraryValue::ValueUndefined);
  8258. opndUndef = IR::RegOpnd::New(TyMachPtr, this->m_func);
  8259. LowererMD::CreateAssign(opndUndef, opndUndefAddress, labelNormal);
  8260. BVSparse<JitArenaAllocator> *formalsBv = JitAnew(this->m_func->m_alloc, BVSparse<JitArenaAllocator>, this->m_func->m_alloc);
  8261. while (formalsCount > 2)
  8262. {
  8263. dstOpnd = instrArgIn->GetDst();
  8264. Assert(dstOpnd->IsRegOpnd());
  8265. isDuplicate = formalsBv->TestAndSet(dstOpnd->AsRegOpnd()->m_sym->AsStackSym()->m_id);
  8266. // Now insert the undef initialization before the "normal" label
  8267. // sn = assign undef
  8268. LowererMD::CreateAssign(dstOpnd, opndUndef, labelNormal);
  8269. // INC excessOpnd
  8270. // BrEq_A $Ln-1
  8271. formalsCount--;
  8272. InsertAdd(true, excessOpnd, excessOpnd, IR::IntConstOpnd::New(1, TyInt32, this->m_func), labelNormal);
  8273. labelInitNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  8274. InsertBranch(Js::OpCode::BrEq_A, labelInitNext, labelNormal);
  8275. // And insert the "normal" initialization before the "done" label
  8276. // sn = assign paramn
  8277. // $Ln-1:
  8278. labelInit->InsertAfter(labelInitNext);
  8279. labelInit = labelInitNext;
  8280. instrArgInNext = instrArgIn->m_prev;
  8281. instrArgIn->Unlink();
  8282. // function foo(x, x) { use(x); }
  8283. // This should refer to the second 'x'. Since we reverse the order here however, we need to skip
  8284. // the initialization of the first 'x' to not override the one for the second. WOOB:1105504
  8285. if (isDuplicate)
  8286. {
  8287. instrArgIn->Free();
  8288. }
  8289. else
  8290. {
  8291. ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
  8292. labelInit->InsertBefore(instrArgIn);
  8293. this->m_lowererMD.ChangeToAssign(instrArgIn);
  8294. }
  8295. instrArgIn = instrArgInNext;
  8296. while (instrArgIn->m_opcode != Js::OpCode::ArgIn_A)
  8297. {
  8298. instrArgIn = instrArgIn->m_prev;
  8299. AssertMsg(instrArgIn, "???");
  8300. }
  8301. AssertMsg(instrArgIn->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum() == formalsCount,
  8302. "Expect all ArgIn's to be in numerical order by param slot");
  8303. }
  8304. // Insert final undef and normal initializations, jumping unconditionally to the end
  8305. // rather than checking against the decremented formals count as we did inside the loop above.
  8306. // s2 = assign undef
  8307. dstOpnd = instrArgIn->GetDst();
  8308. Assert(dstOpnd->IsRegOpnd());
  8309. isDuplicate = formalsBv->TestAndSet(dstOpnd->AsRegOpnd()->m_sym->AsStackSym()->m_id);
  8310. LowererMD::CreateAssign(dstOpnd, opndUndef, labelNormal);
  8311. if (hasRest)
  8312. {
  8313. InsertMove(excessOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func), labelNormal);
  8314. }
  8315. // Br $done
  8316. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  8317. instrBranch = IR::BranchInstr::New(Js::OpCode::Br, labelDone, this->m_func);
  8318. labelNormal->InsertBefore(instrBranch);
  8319. this->m_lowererMD.LowerUncondBranch(instrBranch);
  8320. // s2 = assign param2
  8321. // $done:
  8322. labelInit->InsertAfter(labelDone);
  8323. if (hasRest)
  8324. {
  8325. // The formals count has been tainted, so restore it before lowering rest
  8326. IR::IntConstOpnd * formalsOpnd = IR::IntConstOpnd::New(this->m_func->GetInParamsCount(), TyUint32, this->m_func);
  8327. LowerRestParameter(formalsOpnd, restDst, excessOpnd, labelDone, generatorArgsPtrOpnd);
  8328. }
  8329. instrArgIn->Unlink();
  8330. if (isDuplicate)
  8331. {
  8332. instrArgIn->Free();
  8333. }
  8334. else
  8335. {
  8336. ConvertArgOpndIfGeneratorFunction(instrArgIn, generatorArgsPtrOpnd);
  8337. labelDone->InsertBefore(instrArgIn);
  8338. this->m_lowererMD.ChangeToAssign(instrArgIn);
  8339. }
  8340. return instrResume;
  8341. }
  8342. void
  8343. Lowerer::ConvertArgOpndIfGeneratorFunction(IR::Instr *instrArgIn, IR::RegOpnd *generatorArgsPtrOpnd)
  8344. {
  8345. if (this->m_func->GetJnFunction()->IsGenerator())
  8346. {
  8347. // Replace stack param operand with offset into arguments array held by
  8348. // the generator object.
  8349. IR::Opnd * srcOpnd = instrArgIn->UnlinkSrc1();
  8350. StackSym * symParam = srcOpnd->AsSymOpnd()->m_sym->AsStackSym();
  8351. Js::ArgSlot argIndex = symParam->GetParamSlotNum();
  8352. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorArgsPtrOpnd, (argIndex - 1) * MachPtr, TyMachPtr, this->m_func);
  8353. srcOpnd->Free(this->m_func);
  8354. instrArgIn->SetSrc1(indirOpnd);
  8355. }
  8356. }
  8357. IR::RegOpnd *
  8358. Lowerer::LoadGeneratorArgsPtr(IR::Instr *instrInsert)
  8359. {
  8360. IR::Instr * instr = LoadGeneratorObject(instrInsert);
  8361. IR::RegOpnd * generatorRegOpnd = instr->GetDst()->AsRegOpnd();
  8362. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorRegOpnd, Js::JavascriptGenerator::GetArgsPtrOffset(), TyMachPtr, instrInsert->m_func);
  8363. IR::RegOpnd * argsPtrOpnd = IR::RegOpnd::New(TyMachReg, instrInsert->m_func);
  8364. LowererMD::CreateAssign(argsPtrOpnd, indirOpnd, instrInsert);
  8365. return argsPtrOpnd;
  8366. }
  8367. IR::Instr *
  8368. Lowerer::LoadGeneratorObject(IR::Instr * instrInsert)
  8369. {
  8370. StackSym * generatorSym = StackSym::NewParamSlotSym(1, instrInsert->m_func);
  8371. instrInsert->m_func->SetArgOffset(generatorSym, LowererMD::GetFormalParamOffset() * MachPtr);
  8372. IR::SymOpnd * generatorSymOpnd = IR::SymOpnd::New(generatorSym, TyMachPtr, instrInsert->m_func);
  8373. IR::RegOpnd * generatorRegOpnd = IR::RegOpnd::New(TyMachPtr, instrInsert->m_func);
  8374. return LowererMD::CreateAssign(generatorRegOpnd, generatorSymOpnd, instrInsert);
  8375. }
  8376. IR::Instr *
  8377. Lowerer::LowerArgInAsmJs(IR::Instr * instrArgIn)
  8378. {
  8379. Assert(m_func->GetJnFunction()->GetIsAsmjsMode());
  8380. Js::ArgSlot argCount = m_func->GetJnFunction()->GetAsmJsFunctionInfoWithLock()->GetArgCount();
  8381. IR::Instr * instr = instrArgIn;
  8382. for (int argNum = argCount - 1; argNum >= 0; --argNum)
  8383. {
  8384. IR::Instr * instrPrev = instr->m_prev;
  8385. m_lowererMD.ChangeToAssign(instr);
  8386. instr = instrPrev;
  8387. }
  8388. return instr;
  8389. }
  8390. bool
  8391. Lowerer::InlineBuiltInLibraryCall(IR::Instr *callInstr)
  8392. {
  8393. IR::Opnd *src1 = callInstr->GetSrc1();
  8394. IR::Opnd *src2 = callInstr->GetSrc2();
  8395. // Get the arg count by looking at the slot number of the last arg symbol.
  8396. if (!src2->IsSymOpnd())
  8397. {
  8398. // No args? Not sure this is possible, but handle it.
  8399. return false;
  8400. }
  8401. StackSym *argLinkSym = src2->AsSymOpnd()->m_sym->AsStackSym();
  8402. // Subtract "this" from the arg count.
  8403. IntConstType argCount = argLinkSym->GetArgSlotNum() - 1;
  8404. // Find the callee's built-in index (if any).
  8405. Js::BuiltinFunction index = Func::GetBuiltInIndex(src1);
  8406. // Warning!
  8407. // Don't add new built-in to following switch. Built-ins needs to be inlined in call direct way.
  8408. // Following is only for prejit scenarios where we don't get inlining always and generate fast path in lowerer.
  8409. // Generating fastpath here misses fixed functions and globopt optimizations.
  8410. switch(index)
  8411. {
  8412. case Js::BuiltinFunction::String_CharAt:
  8413. case Js::BuiltinFunction::String_CharCodeAt:
  8414. if (argCount != 1)
  8415. {
  8416. return false;
  8417. }
  8418. if (!callInstr->GetDst())
  8419. {
  8420. // Optimization of Char[Code]At assumes result is used.
  8421. return false;
  8422. }
  8423. break;
  8424. case Js::BuiltinFunction::Math_Abs:
  8425. #ifdef _M_IX86
  8426. if (!AutoSystemInfo::Data.SSE2Available())
  8427. {
  8428. return false;
  8429. }
  8430. #endif
  8431. if (argCount != 1)
  8432. {
  8433. return false;
  8434. }
  8435. if (!callInstr->GetDst())
  8436. {
  8437. // Optimization of Abs assumes result is used.
  8438. return false;
  8439. }
  8440. break;
  8441. case Js::BuiltinFunction::Array_Push:
  8442. {
  8443. if (argCount != 1)
  8444. {
  8445. return false;
  8446. }
  8447. if (callInstr->GetDst())
  8448. {
  8449. // Optimization of push assumes result is unused.
  8450. return false;
  8451. }
  8452. StackSym *linkSym = callInstr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym();
  8453. Assert(linkSym->IsSingleDef());
  8454. linkSym = linkSym->m_instrDef->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym();
  8455. Assert(linkSym->IsSingleDef());
  8456. IR::Opnd *const arrayOpnd = linkSym->m_instrDef->GetSrc1();
  8457. if(!arrayOpnd->IsRegOpnd())
  8458. {
  8459. // This should be rare, but needs to be handled.
  8460. // By now, we've already started some of the inlining. Simply jmp to the helper.
  8461. // The branch will get peeped later.
  8462. return false;
  8463. }
  8464. if(!ShouldGenerateArrayFastPath(arrayOpnd, false, false, false) ||
  8465. arrayOpnd->GetValueType().IsLikelyNativeArray())
  8466. {
  8467. // Rejecting native array for now, since we have to do a FromVar at the call site and bail out.
  8468. return false;
  8469. }
  8470. break;
  8471. }
  8472. case Js::BuiltinFunction::String_Replace:
  8473. {
  8474. if(argCount != 2)
  8475. {
  8476. return false;
  8477. }
  8478. if(!ShouldGenerateStringReplaceFastPath(callInstr, argCount))
  8479. {
  8480. return false;
  8481. }
  8482. break;
  8483. }
  8484. default:
  8485. return false;
  8486. }
  8487. Assert(Func::IsBuiltInInlinedInLowerer(callInstr->GetSrc1()));
  8488. IR::Opnd *callTargetOpnd = callInstr->GetSrc1();
  8489. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  8490. IR::Opnd *objRefOpnd = IR::MemRefOpnd::New((void*)this->GetObjRefForBuiltInTarget(callTargetOpnd->AsRegOpnd()), TyMachReg, this->m_func);
  8491. InsertCompareBranch(callTargetOpnd, objRefOpnd, Js::OpCode::BrNeq_A, labelHelper, callInstr);
  8492. callInstr->InsertBefore(labelHelper);
  8493. Assert(argCount <= 2);
  8494. IR::Opnd *argsOpnd[3];
  8495. IR::Opnd *linkOpnd = callInstr->GetSrc2();
  8496. while(linkOpnd->IsSymOpnd())
  8497. {
  8498. IR::SymOpnd *src2 = linkOpnd->AsSymOpnd();
  8499. StackSym *sym = src2->m_sym->AsStackSym();
  8500. Assert(sym->m_isSingleDef);
  8501. IR::Instr *argInstr = sym->m_instrDef;
  8502. Assert(argCount >= 0);
  8503. argsOpnd[argCount] = argInstr->GetSrc1();
  8504. argCount--;
  8505. argInstr->Unlink();
  8506. labelHelper->InsertAfter(argInstr);
  8507. linkOpnd = argInstr->GetSrc2();
  8508. }
  8509. AnalysisAssert(argCount == -1);
  8510. // Move startcall
  8511. Assert(linkOpnd->IsRegOpnd());
  8512. StackSym *sym = linkOpnd->AsRegOpnd()->m_sym;
  8513. Assert(sym->m_isSingleDef);
  8514. IR::Instr *startCall = sym->m_instrDef;
  8515. Assert(startCall->m_opcode == Js::OpCode::StartCall);
  8516. startCall->Unlink();
  8517. labelHelper->InsertAfter(startCall);
  8518. // $doneLabel:
  8519. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  8520. callInstr->InsertAfter(doneLabel);
  8521. bool success = true;
  8522. switch(index)
  8523. {
  8524. case Js::BuiltinFunction::Math_Abs:
  8525. this->m_lowererMD.GenerateFastAbs(callInstr->GetDst(), argsOpnd[1], callInstr, labelHelper, labelHelper, doneLabel);
  8526. break;
  8527. case Js::BuiltinFunction::String_CharCodeAt:
  8528. case Js::BuiltinFunction::String_CharAt:
  8529. success = this->m_lowererMD.GenerateFastCharAt(index, callInstr->GetDst(), argsOpnd[0], argsOpnd[1],
  8530. callInstr, labelHelper, labelHelper, doneLabel);
  8531. break;
  8532. case Js::BuiltinFunction::Array_Push:
  8533. success = GenerateFastPush(argsOpnd[0], argsOpnd[1], callInstr, labelHelper, labelHelper, nullptr, doneLabel);
  8534. break;
  8535. case Js::BuiltinFunction::String_Replace:
  8536. success = GenerateFastReplace(argsOpnd[0], argsOpnd[1], argsOpnd[2], callInstr, labelHelper, labelHelper, doneLabel);
  8537. break;
  8538. default:
  8539. Assert(UNREACHED);
  8540. }
  8541. IR::Instr *instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, doneLabel, this->m_func);
  8542. labelHelper->InsertBefore(instr);
  8543. return success;
  8544. }
  8545. // Perform lowerer part of inlining built-in function.
  8546. // For details, see inline.cpp.
  8547. //
  8548. // Description of changes here (note that taking care of Argouts are similar to InlineeStart):
  8549. // - Move ArgOut_A_InlineBuiltIn next to the call instr -- used by bailout processing in register allocator.
  8550. // - Remove StartCall and InlineBuiltInStart for this call.
  8551. // Before:
  8552. // StartCall fn
  8553. // d1 = BIA s1, link1
  8554. // ...
  8555. // InlineBuiltInStart fn, link0
  8556. // After:
  8557. // ...
  8558. // d1 = BIA s1, NULL
  8559. void Lowerer::LowerInlineBuiltIn(IR::Instr* builtInEndInstr)
  8560. {
  8561. Assert(builtInEndInstr->m_opcode == Js::OpCode::InlineBuiltInEnd || builtInEndInstr->m_opcode == Js::OpCode::InlineNonTrackingBuiltInEnd);
  8562. IR::Instr* startCallInstr;
  8563. builtInEndInstr->IterateArgInstrs([&](IR::Instr* argInstr) {
  8564. startCallInstr = argInstr->GetSrc2()->GetStackSym()->m_instrDef;
  8565. return false;
  8566. });
  8567. // Keep the startCall around as bailout refers to it. Just unlink it for now - do not delete it.
  8568. startCallInstr->Unlink();
  8569. builtInEndInstr->Remove();
  8570. }
  8571. Js::JavascriptFunction **
  8572. Lowerer::GetObjRefForBuiltInTarget(IR::RegOpnd * regOpnd)
  8573. {
  8574. Js::JavascriptFunction ** mathFns =
  8575. this->m_func->GetScriptContext()->GetLibrary()->GetBuiltinFunctions();
  8576. Js::BuiltinFunction index = regOpnd->m_sym->m_builtInIndex;
  8577. AssertMsg(index < Js::BuiltinFunction::Count, "Invalid built-in index on a call target marked as built-in");
  8578. return mathFns + index;
  8579. }
  8580. IR::Instr *
  8581. Lowerer::LowerNewRegEx(IR::Instr * instr)
  8582. {
  8583. IR::Opnd *src1 = instr->UnlinkSrc1();
  8584. Assert(src1->IsAddrOpnd());
  8585. #if ENABLE_REGEX_CONFIG_OPTIONS
  8586. if (REGEX_CONFIG_FLAG(RegexTracing))
  8587. {
  8588. Assert(!instr->GetDst()->CanStoreTemp());
  8589. IR::Instr * instrPrev = LoadScriptContext(instr);
  8590. instrPrev = m_lowererMD.LoadHelperArgument(instr, src1);
  8591. m_lowererMD.ChangeToHelperCall(instr, IR::HelperScrRegEx_OP_NewRegEx);
  8592. return instrPrev;
  8593. }
  8594. #endif
  8595. IR::Instr * instrPrev = instr->m_prev;
  8596. IR::RegOpnd * dstOpnd = instr->UnlinkDst()->AsRegOpnd();
  8597. IR::SymOpnd * tempObjectSymOpnd;
  8598. bool isZeroed = GenerateRecyclerOrMarkTempAlloc(instr, dstOpnd, IR::HelperAllocMemForJavascriptRegExp, sizeof(Js::JavascriptRegExp), &tempObjectSymOpnd);
  8599. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  8600. {
  8601. // Hoist the vtable and pattern init to the outer most loop top as it never changes
  8602. InsertMove(tempObjectSymOpnd,
  8603. LoadVTableValueOpnd(this->outerMostLoopLabel, VTableValue::VtableJavascriptRegExp),
  8604. this->outerMostLoopLabel, false);
  8605. }
  8606. else
  8607. {
  8608. GenerateMemInit(dstOpnd, 0, LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp), instr, isZeroed);
  8609. }
  8610. GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfType(),
  8611. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueRegexType), instr, isZeroed);
  8612. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfAuxSlots(), instr, isZeroed);
  8613. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfObjectArray(), instr, isZeroed);
  8614. if (tempObjectSymOpnd && !PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func) && this->outerMostLoopLabel)
  8615. {
  8616. InsertMove(IR::SymOpnd::New(tempObjectSymOpnd->m_sym,
  8617. tempObjectSymOpnd->m_offset + Js::JavascriptRegExp::GetOffsetOfPattern(), TyMachPtr, this->m_func),
  8618. src1, this->outerMostLoopLabel, false);
  8619. }
  8620. else
  8621. {
  8622. GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfPattern(), src1, instr, isZeroed);
  8623. }
  8624. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfSplitPattern(), instr, isZeroed);
  8625. GenerateMemInitNull(dstOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), instr, isZeroed);
  8626. GenerateMemInit(dstOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), 0, instr, isZeroed);
  8627. instr->Remove();
  8628. return instrPrev;
  8629. }
  8630. IR::Instr *
  8631. Lowerer::GenerateRuntimeError(IR::Instr * insertBeforeInstr, Js::MessageId errorCode, IR::JnHelperMethod helper /*= IR::JnHelperMethod::HelperOp_RuntimeTypeError*/)
  8632. {
  8633. IR::Instr * runtimeErrorInstr = IR::Instr::New(Js::OpCode::RuntimeTypeError, this->m_func);
  8634. runtimeErrorInstr->SetSrc1(IR::IntConstOpnd::New(errorCode, TyInt32, this->m_func, true));
  8635. insertBeforeInstr->InsertBefore(runtimeErrorInstr);
  8636. return this->LowerUnaryHelperMem(runtimeErrorInstr, helper);
  8637. }
  8638. bool Lowerer::IsNullOrUndefRegOpnd(IR::RegOpnd *opnd) const
  8639. {
  8640. StackSym *sym = opnd->m_sym;
  8641. if (!sym->IsConst() || sym->IsIntConst() || sym->IsFloatConst())
  8642. {
  8643. return false;
  8644. }
  8645. Js::Var var = sym->GetConstAddress();
  8646. Js::TypeId typeId = Js::RecyclableObject::FromVar(var)->GetTypeId();
  8647. return typeId == Js::TypeIds_Null || typeId == Js::TypeIds_Undefined;
  8648. }
  8649. bool Lowerer::IsConstRegOpnd(IR::RegOpnd *opnd) const
  8650. {
  8651. StackSym *sym = opnd->m_sym;
  8652. if (!sym->IsConst() || sym->IsIntConst() || sym->IsFloatConst())
  8653. {
  8654. return false;
  8655. }
  8656. Js::Var var = sym->GetConstAddress();
  8657. Js::TypeId typeId = Js::RecyclableObject::FromVar(var)->GetTypeId();
  8658. return typeId == Js::TypeIds_Null || typeId == Js::TypeIds_Undefined || typeId == Js::TypeIds_Boolean;
  8659. }
  8660. bool
  8661. Lowerer::HasSideEffects(IR::Instr *instr)
  8662. {
  8663. if (LowererMD::IsCall(instr))
  8664. {
  8665. #ifdef _M_IX86
  8666. IR::Opnd *src1 = instr->GetSrc1();
  8667. if (src1->IsHelperCallOpnd())
  8668. {
  8669. IR::HelperCallOpnd * helper = src1->AsHelperCallOpnd();
  8670. switch(helper->m_fnHelper)
  8671. {
  8672. case IR::HelperOp_Int32ToAtomInPlace:
  8673. case IR::HelperOp_Int32ToAtom:
  8674. case IR::HelperOp_UInt32ToAtom:
  8675. return false;
  8676. }
  8677. }
  8678. #endif
  8679. return true;
  8680. }
  8681. return instr->HasAnySideEffects();
  8682. }
  8683. IR::Instr*
  8684. Lowerer::GenerateFastInlineBuiltInMathRandom(IR::Instr* instr)
  8685. {
  8686. AssertMsg(instr->GetDst()->IsFloat(), "dst must be float.");
  8687. IR::Instr* retInstr = instr->m_prev;
  8688. IR::Opnd* dst = instr->GetDst();
  8689. #if defined(_M_X64)
  8690. if (m_func->GetScriptContext()->GetLibrary()->IsPRNGSeeded())
  8691. {
  8692. const uint64 mExp = 0x3FF0000000000000;
  8693. const uint64 mMant = 0x000FFFFFFFFFFFFF;
  8694. IR::RegOpnd* r0 = IR::RegOpnd::New(TyUint64, m_func); // s0
  8695. IR::RegOpnd* r1 = IR::RegOpnd::New(TyUint64, m_func); // s1
  8696. IR::RegOpnd* r3 = IR::RegOpnd::New(TyUint64, m_func); // helper uint64 reg
  8697. IR::RegOpnd* r4 = IR::RegOpnd::New(TyFloat64, m_func); // helper float64 reg
  8698. // ===========================================================
  8699. // s0 = scriptContext->GetLibrary()->GetRandSeed1();
  8700. // s1 = scriptContext->GetLibrary()->GetRandSeed0();
  8701. // ===========================================================
  8702. this->m_lowererMD.CreateAssign(r0,
  8703. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContext()->GetLibrary() + Js::JavascriptLibrary::GetRandSeed1Offset(), TyUint64, instr->m_func), instr);
  8704. this->m_lowererMD.CreateAssign(r1,
  8705. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContext()->GetLibrary() + Js::JavascriptLibrary::GetRandSeed0Offset(), TyUint64, instr->m_func), instr);
  8706. // ===========================================================
  8707. // s1 ^= s1 << 23;
  8708. // ===========================================================
  8709. this->m_lowererMD.CreateAssign(r3, r1, instr);
  8710. this->InsertShift(Js::OpCode::Shl_A, false, r3, r3, IR::IntConstOpnd::New(23, TyInt8, m_func), instr);
  8711. this->InsertXor(r1, r1, r3, instr);
  8712. // ===========================================================
  8713. // s1 ^= s1 >> 17;
  8714. // ===========================================================
  8715. this->m_lowererMD.CreateAssign(r3, r1, instr);
  8716. this->InsertShift(Js::OpCode::ShrU_A, false, r3, r3, IR::IntConstOpnd::New(17, TyInt8, m_func), instr);
  8717. this->InsertXor(r1, r1, r3, instr);
  8718. // ===========================================================
  8719. // s1 ^= s0;
  8720. // ===========================================================
  8721. this->InsertXor(r1, r1, r0, instr);
  8722. // ===========================================================
  8723. // s1 ^= s0 >> 26;
  8724. // ===========================================================
  8725. this->m_lowererMD.CreateAssign(r3, r0, instr);
  8726. this->InsertShift(Js::OpCode::ShrU_A, false, r3, r3, IR::IntConstOpnd::New(26, TyInt8, m_func), instr);
  8727. this->InsertXor(r1, r1, r3, instr);
  8728. // ===========================================================
  8729. // scriptContext->GetLibrary()->SetRandSeed0(s0);
  8730. // scriptContext->GetLibrary()->SetRandSeed1(s1);
  8731. // ===========================================================
  8732. this->m_lowererMD.CreateAssign(
  8733. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContext()->GetLibrary() + Js::JavascriptLibrary::GetRandSeed0Offset(), TyUint64, m_func), r0, instr);
  8734. this->m_lowererMD.CreateAssign(
  8735. IR::MemRefOpnd::New((BYTE*)m_func->GetScriptContext()->GetLibrary() + Js::JavascriptLibrary::GetRandSeed1Offset(), TyUint64, m_func), r1, instr);
  8736. // ===========================================================
  8737. // dst = bit_cast<float64>(((s0 + s1) & mMant) | mExp);
  8738. // ===========================================================
  8739. this->InsertAdd(false, r1, r1, r0, instr);
  8740. this->m_lowererMD.CreateAssign(r3, IR::AddrOpnd::New((Js::Var)mMant, IR::AddrOpndKindConstantVar, m_func, true), instr);
  8741. this->InsertAnd(r1, r1, r3, instr);
  8742. this->m_lowererMD.CreateAssign(r3, IR::AddrOpnd::New((Js::Var)mExp, IR::AddrOpndKindConstantVar, m_func, true), instr);
  8743. this->InsertOr(r1, r1, r3, instr);
  8744. this->InsertMoveBitCast(dst, r1, instr);
  8745. // ===================================================================
  8746. // dst -= 1.0;
  8747. // ===================================================================
  8748. this->m_lowererMD.CreateAssign(r4, IR::MemRefOpnd::New((double*)&Js::JavascriptNumber::ONE_POINT_ZERO, TyFloat64, m_func, IR::AddrOpndKindDynamicDoubleRef), instr);
  8749. this->InsertSub(false, dst, dst, r4, instr);
  8750. }
  8751. else
  8752. #endif
  8753. {
  8754. IR::Opnd* tmpdst = dst;
  8755. if (!dst->IsRegOpnd())
  8756. {
  8757. tmpdst = IR::RegOpnd::New(dst->GetType(), instr->m_func);
  8758. }
  8759. LoadScriptContext(instr);
  8760. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, tmpdst, instr->m_func);
  8761. instr->InsertBefore(helperCallInstr);
  8762. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperDirectMath_Random);
  8763. if (tmpdst != dst)
  8764. {
  8765. InsertMove(dst, tmpdst, instr);
  8766. }
  8767. }
  8768. instr->Remove();
  8769. return retInstr;
  8770. }
  8771. IR::Instr *
  8772. Lowerer::LowerCallDirect(IR::Instr * instr)
  8773. {
  8774. IR::Opnd* linkOpnd = instr->UnlinkSrc2();
  8775. StackSym *linkSym = linkOpnd->AsSymOpnd()->m_sym->AsStackSym();
  8776. IR::Instr* argInstr = linkSym->m_instrDef;
  8777. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_InlineSpecialized);
  8778. IR::Opnd* funcObj = argInstr->UnlinkSrc1();
  8779. instr->SetSrc2(argInstr->UnlinkSrc2());
  8780. argInstr->Remove();
  8781. if(instr->HasBailOutInfo())
  8782. {
  8783. IR::Instr * bailOutInstr = this->SplitBailOnImplicitCall(instr, instr->m_next, instr->m_next);
  8784. this->LowerBailOnEqualOrNotEqual(bailOutInstr);
  8785. }
  8786. Js::CallFlags flags = instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed;
  8787. return this->GenerateDirectCall(instr, funcObj, (ushort)flags);
  8788. }
  8789. IR::Instr *
  8790. Lowerer::GenerateDirectCall(IR::Instr* inlineInstr, IR::Opnd* funcObj, ushort callflags)
  8791. {
  8792. int32 argCount = m_lowererMD.LowerCallArgs(inlineInstr, callflags);
  8793. m_lowererMD.LoadHelperArgument(inlineInstr, funcObj);
  8794. m_lowererMD.LowerCall(inlineInstr, (Js::ArgSlot)argCount); //to account for function object and callinfo
  8795. return inlineInstr->m_prev;
  8796. }
  8797. /*
  8798. * GenerateHelperToArrayPushFastPath
  8799. * Generates Helper Call and pushes arguments to the Push HelperCall
  8800. */
  8801. IR::Instr *
  8802. Lowerer::GenerateHelperToArrayPushFastPath(IR::Instr * instr, IR::LabelInstr * bailOutLabelHelper)
  8803. {
  8804. IR::Opnd * arrayHelperOpnd = instr->UnlinkSrc1();
  8805. IR::Opnd * elementHelperOpnd = instr->UnlinkSrc2();
  8806. IR::JnHelperMethod helperMethod;
  8807. if(elementHelperOpnd->IsInt32())
  8808. {
  8809. Assert(arrayHelperOpnd->GetValueType().IsLikelyNativeIntArray());
  8810. helperMethod = IR::HelperArray_NativeIntPush;
  8811. m_lowererMD.LoadHelperArgument(instr, elementHelperOpnd);
  8812. }
  8813. else if(elementHelperOpnd->IsFloat())
  8814. {
  8815. Assert(arrayHelperOpnd->GetValueType().IsLikelyNativeFloatArray());
  8816. helperMethod = IR::HelperArray_NativeFloatPush;
  8817. m_lowererMD.LoadDoubleHelperArgument(instr, elementHelperOpnd);
  8818. }
  8819. else
  8820. {
  8821. helperMethod = IR::HelperArray_VarPush;
  8822. m_lowererMD.LoadHelperArgument(instr, elementHelperOpnd);
  8823. }
  8824. m_lowererMD.LoadHelperArgument(instr, arrayHelperOpnd);
  8825. LoadScriptContext(instr);
  8826. return m_lowererMD.ChangeToHelperCall(instr, helperMethod);
  8827. }
  8828. /*
  8829. * GenerateHelperToArrayPopFastPath
  8830. * Generates Helper Call and pushes arguments to the Pop HelperCall
  8831. */
  8832. IR::Instr *
  8833. Lowerer::GenerateHelperToArrayPopFastPath(IR::Instr * instr, IR::LabelInstr * doneLabel, IR::LabelInstr * bailOutLabelHelper)
  8834. {
  8835. IR::Opnd * arrayHelperOpnd = instr->UnlinkSrc1();
  8836. ValueType arrayValueType = arrayHelperOpnd->GetValueType();
  8837. IR::JnHelperMethod helperMethod;
  8838. //Decide the helperMethod based on dst availability and nativity of the array.
  8839. if(arrayValueType.IsLikelyNativeArray() && !instr->GetDst())
  8840. {
  8841. helperMethod = IR::HelperArray_NativePopWithNoDst;
  8842. }
  8843. else if(arrayValueType.IsLikelyNativeIntArray())
  8844. {
  8845. helperMethod = IR::HelperArray_NativeIntPop;
  8846. }
  8847. else if(arrayValueType.IsLikelyNativeFloatArray())
  8848. {
  8849. helperMethod = IR::HelperArray_NativeFloatPop;
  8850. }
  8851. else
  8852. {
  8853. helperMethod = IR::HelperArray_VarPop;
  8854. }
  8855. m_lowererMD.LoadHelperArgument(instr, arrayHelperOpnd);
  8856. //We do not need scriptContext for HelperArray_NativePopWithNoDst call.
  8857. if(helperMethod != IR::HelperArray_NativePopWithNoDst)
  8858. {
  8859. LoadScriptContext(instr);
  8860. }
  8861. IR::Instr * retInstr = m_lowererMD.ChangeToHelperCall(instr, helperMethod, bailOutLabelHelper);
  8862. //We don't need missing item check for var arrays, as there it is taken care by the helper.
  8863. if(arrayValueType.IsLikelyNativeArray())
  8864. {
  8865. if(retInstr->GetDst())
  8866. {
  8867. //Do this check only for native arrays with Dst. For Var arrays, this is taken care in the Runtime helper itself.
  8868. InsertCompareBranch(GetMissingItemOpnd(retInstr->GetDst()->GetType(), m_func), retInstr->GetDst(), Js::OpCode::BrNeq_A, doneLabel, bailOutLabelHelper);
  8869. }
  8870. else
  8871. {
  8872. //We need unconditional jump to doneLabel, if there is no dst in Pop instr.
  8873. InsertBranch(Js::OpCode::Br, true, doneLabel, bailOutLabelHelper);
  8874. }
  8875. }
  8876. return retInstr;
  8877. }
  8878. IR::Instr *
  8879. Lowerer::LowerCondBranchCheckBailOut(IR::BranchInstr * branchInstr, IR::Instr * helperCall, bool isHelper)
  8880. {
  8881. Assert(branchInstr->m_opcode == Js::OpCode::BrTrue_A || branchInstr->m_opcode == Js::OpCode::BrFalse_A);
  8882. if (branchInstr->HasBailOutInfo())
  8883. {
  8884. IR::BailOutKind debuggerBailOutKind = IR::BailOutInvalid;
  8885. if (branchInstr->HasAuxBailOut())
  8886. {
  8887. // We have shared debugger bailout. For branches we lower it here, not in SplitBailForDebugger.
  8888. // See SplitBailForDebugger for details.
  8889. AssertMsg(!(branchInstr->GetBailOutKind() & IR::BailOutForDebuggerBits), "There should be no debugger bits in main bailout kind.");
  8890. debuggerBailOutKind = branchInstr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits;
  8891. AssertMsg((debuggerBailOutKind & ~(IR::BailOutIgnoreException | IR::BailOutForceByFlag)) == 0, "Only IR::BailOutIgnoreException|ForceByFlag supported here.");
  8892. }
  8893. IR::Instr * bailOutInstr = this->SplitBailOnImplicitCall(branchInstr, helperCall, branchInstr);
  8894. IR::Instr* prevInstr = this->LowerBailOnEqualOrNotEqual(bailOutInstr, branchInstr, nullptr, nullptr, isHelper);
  8895. if (debuggerBailOutKind != IR::BailOutInvalid)
  8896. {
  8897. // Note that by this time implicit calls bailout is already lowered.
  8898. // What we do here is use same bailout info and lower debugger bailout which would be shared bailout.
  8899. BailOutInfo* bailOutInfo = bailOutInstr->GetBailOutInfo();
  8900. IR::BailOutInstr* debuggerBailoutInstr = IR::BailOutInstr::New(
  8901. Js::OpCode::BailForDebugger, debuggerBailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
  8902. prevInstr->InsertAfter(debuggerBailoutInstr);
  8903. // The result of that is:
  8904. // original helper op_* instr, then debugger bailout, then implicit calls bailout/etc with the branch instr.
  8905. // Example:
  8906. // s35(eax).i32 = CALL Op_GreaterEqual.u32 # -- original op_* helper
  8907. // s34.i32 = MOV s35(eax).i32 #
  8908. // BailForDebugger # Bailout: #0042 (BailOutIgnoreException) -- the debugger bailout
  8909. // CMP [0x0003BDE0].i8, 1 (0x1).i8 # -- implicit calls check
  8910. // JEQ $L10 #
  8911. //$L11: [helper] #
  8912. // CALL SaveAllRegistersAndBranchBailOut.u32 # Bailout: #0042 (BailOutOnImplicitCalls)
  8913. // JMP $L5 #
  8914. //$L10: [helper] #
  8915. // BrFalse_A $L3, s34.i32 #0034 -- The BrTrue/BrFalse branch (branch instr)
  8916. //$L6: [helper] #0042
  8917. this->LowerBailForDebugger(debuggerBailoutInstr, isHelper);
  8918. // After lowering this we will have a check which on bailout condition will JMP to $L11.
  8919. }
  8920. }
  8921. return m_lowererMD.LowerCondBranch(branchInstr);
  8922. }
  8923. IR::Instr *
  8924. Lowerer::LoadArgumentsFromStack(IR::Instr * instr)
  8925. {
  8926. IR::Instr * prevInstr = instr->m_prev;
  8927. Assert(instr->GetDst()->IsRegOpnd());
  8928. if (instr->m_func->IsInlinee())
  8929. {
  8930. instr->ReplaceSrc1(instr->m_func->GetInlineeArgumentsObjectSlotOpnd());
  8931. }
  8932. else
  8933. {
  8934. instr->ReplaceSrc1(this->m_lowererMD.CreateStackArgumentsSlotOpnd());
  8935. }
  8936. this->m_lowererMD.ChangeToAssign(instr);
  8937. return prevInstr;
  8938. }
  8939. IR::SymOpnd *
  8940. Lowerer::LoadCallInfo(IR::Instr * instrInsert)
  8941. {
  8942. IR::SymOpnd * srcOpnd;
  8943. Func * func = instrInsert->m_func;
  8944. if (func->GetJnFunction()->IsGenerator())
  8945. {
  8946. // Generator function arguments and ArgumentsInfo are not on the stack. Instead they
  8947. // are accessed off the generator object (which is prm1).
  8948. StackSym * generatorSym = StackSym::NewParamSlotSym(1, func);
  8949. func->SetArgOffset(generatorSym, LowererMD::GetFormalParamOffset() * MachPtr);
  8950. IR::SymOpnd * generatorSymOpnd = IR::SymOpnd::New(generatorSym, TyMachPtr, func);
  8951. IR::RegOpnd * generatorRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  8952. LowererMD::CreateAssign(generatorRegOpnd, generatorSymOpnd, instrInsert);
  8953. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(generatorRegOpnd, Js::JavascriptGenerator::GetCallInfoOffset(), TyMachPtr, func);
  8954. IR::Instr * instr = LowererMD::CreateAssign(IR::RegOpnd::New(TyMachPtr, func), indirOpnd, instrInsert);
  8955. StackSym * callInfoSym = StackSym::New(TyMachReg, func);
  8956. IR::SymOpnd * callInfoSymOpnd = IR::SymOpnd::New(callInfoSym, TyMachReg, func);
  8957. LowererMD::CreateAssign(callInfoSymOpnd, instr->GetDst(), instrInsert);
  8958. srcOpnd = IR::SymOpnd::New(callInfoSym, TyMachReg, func);
  8959. }
  8960. else
  8961. {
  8962. // Otherwise callInfo is always the "second" argument.
  8963. // The stack looks like this:
  8964. //
  8965. // script param N
  8966. // ...
  8967. // script param 1
  8968. // callinfo
  8969. // function object
  8970. // return addr
  8971. // FP -> FP chain
  8972. StackSym * srcSym = LowererMD::GetImplicitParamSlotSym(1, func);
  8973. srcOpnd = IR::SymOpnd::New(srcSym, TyMachReg, func);
  8974. }
  8975. return srcOpnd;
  8976. }
  8977. IR::Instr *
  8978. Lowerer::LowerBailOnNotStackArgs(IR::Instr * instr)
  8979. {
  8980. if (!this->m_func->GetHasStackArgs())
  8981. {
  8982. throw Js::RejitException(RejitReason::InlineApplyDisabled);
  8983. }
  8984. IR::Instr * prevInstr = instr->m_prev;
  8985. // Bail out test
  8986. // Label to skip Bailout and continue
  8987. IR::LabelInstr * continueLabelInstr;
  8988. IR::Instr *instrNext = instr->m_next;
  8989. if (instrNext->IsLabelInstr())
  8990. {
  8991. continueLabelInstr = instrNext->AsLabelInstr();
  8992. }
  8993. else
  8994. {
  8995. continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  8996. instr->InsertAfter(continueLabelInstr);
  8997. }
  8998. IR::LabelInstr * helperLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  8999. if (!instr->m_func->IsInlinee())
  9000. {
  9001. //BailOut if it is not stack args or the number of actuals (except "this" argument) is greater than or equal to 15.
  9002. IR::Opnd* stackArgs = instr->UnlinkSrc1();
  9003. InsertCompareBranch(stackArgs, instr->UnlinkSrc2(), Js::OpCode::BrNeq_A, helperLabelInstr, instr);
  9004. IR::RegOpnd* ldLenDstOpnd = IR::RegOpnd::New(TyUint32, instr->m_func);
  9005. IR::Instr* ldLen = IR::Instr::New(Js::OpCode::LdLen_A, ldLenDstOpnd, stackArgs, instr->m_func);
  9006. ldLenDstOpnd->SetValueType(ValueType::GetTaggedInt()); //LdLen_A works only on stack arguments
  9007. instr->InsertBefore(ldLen);
  9008. this->GenerateFastRealStackArgumentsLdLen(ldLen);
  9009. this->InsertCompareBranch(ldLenDstOpnd, IR::IntConstOpnd::New(Js::InlineeCallInfo::MaxInlineeArgoutCount, TyUint32, m_func, true), Js::OpCode::BrLt_A, true, continueLabelInstr, instr);
  9010. }
  9011. else
  9012. {
  9013. //For Inlined functions, we are sure actuals can't exceed Js::InlineeCallInfo::MaxInlineeArgoutCount (15).
  9014. InsertCompareBranch(instr->UnlinkSrc1(), instr->UnlinkSrc2(), Js::OpCode::BrEq_A, continueLabelInstr, instr);
  9015. }
  9016. instr->InsertBefore(helperLabelInstr);
  9017. this->GenerateBailOut(instr, nullptr, nullptr);
  9018. return prevInstr;
  9019. }
  9020. IR::Instr *
  9021. Lowerer::LowerBailOnNotSpreadable(IR::Instr *instr)
  9022. {
  9023. // We only avoid bailing out / throwing a rejit exception when the array operand is a simple, non-optimized, non-object array.
  9024. IR::Instr * prevInstr = instr->m_prev;
  9025. Func *func = instr->m_func;
  9026. IR::RegOpnd *arrayOpnd = nullptr;
  9027. IR::Opnd *arraySrcOpnd = instr->UnlinkSrc1();
  9028. if (!arraySrcOpnd->IsRegOpnd())
  9029. {
  9030. arrayOpnd = IR::RegOpnd::New(TyMachPtr, func);
  9031. LowererMD::CreateAssign(arrayOpnd, arraySrcOpnd, instr);
  9032. }
  9033. else
  9034. {
  9035. arrayOpnd = arraySrcOpnd->AsRegOpnd();
  9036. }
  9037. const ValueType baseValueType(arrayOpnd->GetValueType());
  9038. // Check if we can just throw a rejit exception based on valuetype alone instead of bailing out.
  9039. if (!baseValueType.IsLikelyArray()
  9040. || baseValueType.IsLikelyAnyOptimizedArray()
  9041. || (baseValueType.IsLikelyObject() && (baseValueType.GetObjectType() == ObjectType::ObjectWithArray))
  9042. // Validate that GenerateArrayTest will not fail.
  9043. || !(baseValueType.IsUninitialized() || baseValueType.HasBeenObject())
  9044. || m_func->IsInlinee())
  9045. {
  9046. throw Js::RejitException(RejitReason::InlineSpreadDisabled);
  9047. }
  9048. // Past this point, we will need to use a bailout.
  9049. IR::LabelInstr *bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true /* isOpHelper */);
  9050. // See if we can skip various array checks on value type alone
  9051. if (!baseValueType.IsArray())
  9052. {
  9053. GenerateArrayTest(arrayOpnd, bailOutLabel, bailOutLabel, instr, false);
  9054. }
  9055. if (!(baseValueType.IsArray() && baseValueType.HasNoMissingValues()))
  9056. {
  9057. InsertTestBranch(
  9058. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, func),
  9059. IR::IntConstOpnd::New(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues), TyUint8, func, true),
  9060. Js::OpCode::BrEq_A,
  9061. bailOutLabel,
  9062. instr);
  9063. }
  9064. IR::IndirOpnd *arrayLenPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, func);
  9065. InsertCompareBranch(arrayLenPtrOpnd, IR::IntConstOpnd::New(Js::InlineeCallInfo::MaxInlineeArgoutCount - 1, TyUint8, func), Js::OpCode::BrGt_A, true, bailOutLabel, instr);
  9066. IR::LabelInstr *skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  9067. InsertBranch(Js::OpCode::Br, skipBailOutLabel, instr);
  9068. instr->InsertBefore(bailOutLabel);
  9069. instr->InsertAfter(skipBailOutLabel);
  9070. GenerateBailOut(instr);
  9071. return prevInstr;
  9072. }
  9073. IR::Instr *
  9074. Lowerer::LowerBailOnNotPolymorphicInlinee(IR::Instr * instr)
  9075. {
  9076. Assert(instr->HasBailOutInfo() && (instr->GetBailOutKind() == IR::BailOutOnFailedPolymorphicInlineTypeCheck || instr->GetBailOutKind() == IR::BailOutOnPolymorphicInlineFunction));
  9077. IR::Instr* instrPrev = instr->m_prev;
  9078. this->GenerateBailOut(instr, nullptr, nullptr);
  9079. return instrPrev;
  9080. }
  9081. void
  9082. Lowerer::LowerBailoutCheckAndLabel(IR::Instr *instr, bool onEqual, bool isHelper)
  9083. {
  9084. // Label to skip Bailout and continue
  9085. IR::LabelInstr * continueLabelInstr;
  9086. IR::Instr *instrNext = instr->m_next;
  9087. if (instrNext->IsLabelInstr())
  9088. {
  9089. continueLabelInstr = instrNext->AsLabelInstr();
  9090. }
  9091. else
  9092. {
  9093. continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, isHelper);
  9094. instr->InsertAfter(continueLabelInstr);
  9095. }
  9096. if(instr->GetBailOutKind() == IR::BailOutInjected)
  9097. {
  9098. // BailOnEqual 0, 0
  9099. Assert(onEqual);
  9100. Assert(instr->GetSrc1()->IsEqual(instr->GetSrc2()));
  9101. Assert(instr->GetSrc1()->AsIntConstOpnd()->GetValue() == 0);
  9102. // The operands cannot be equal when generating a compare (assert) but since this is for testing purposes, hoist a src.
  9103. // Ideally, we would just create a BailOut instruction that generates a guaranteed bailout, but there seem to be issues
  9104. // with doing this in a non-helper path. So finally, it would generate:
  9105. // xor s0, s0
  9106. // test s0, s0
  9107. // jnz $continue
  9108. // $bailout:
  9109. // // bailout
  9110. // $continue:
  9111. instr->HoistSrc1(LowererMD::GetLoadOp(instr->GetSrc1()->GetType()));
  9112. }
  9113. InsertCompareBranch(instr->UnlinkSrc1(), instr->UnlinkSrc2(),
  9114. onEqual ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, continueLabelInstr, instr);
  9115. if (!isHelper)
  9116. {
  9117. IR::LabelInstr * helperLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  9118. instr->InsertBefore(helperLabelInstr);
  9119. }
  9120. }
  9121. IR::Instr *
  9122. Lowerer::LowerBailOnEqualOrNotEqual(IR::Instr * instr,
  9123. IR::BranchInstr *branchInstr, // = nullptr
  9124. IR::LabelInstr *labelBailOut, // = nullptr
  9125. IR::PropertySymOpnd * propSymOpnd, // = nullptr
  9126. bool isHelper) // = false
  9127. {
  9128. IR::Instr * prevInstr = instr->m_prev;
  9129. // Bail out test
  9130. bool onEqual = instr->m_opcode == Js::OpCode::BailOnEqual;
  9131. LowerBailoutCheckAndLabel(instr, onEqual, isHelper);
  9132. // BailOutOnImplicitCalls is a post-op bailout. Since we look at the profile info for LdFld/StFld to decide whether the instruction may or may not call an accessor,
  9133. // we need to update this profile information on the bailout path for BailOutOnImplicitCalls if the implicit call was an accessor call.
  9134. if(propSymOpnd && ((instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCalls) && (propSymOpnd->m_inlineCacheIndex != -1) &&
  9135. instr->m_func->GetJnFunction()->HasDynamicProfileInfo())
  9136. {
  9137. // result = AND implCallFlags, ~ImplicitCall_None
  9138. // TST result, ImplicitCall_Accessor
  9139. // JEQ $bail
  9140. // OR profiledFlags, FldInfoAccessor
  9141. // $bail
  9142. IR::Opnd * implicitCallFlags = GetImplicitCallFlagsOpnd();
  9143. IR::Opnd * accessorImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_Accessor & ~Js::ImplicitCall_None, GetImplicitCallFlagsType(), instr->m_func, true);
  9144. IR::Opnd * maskNoImplicitCall = IR::IntConstOpnd::New((Js::ImplicitCallFlags)~Js::ImplicitCall_None, GetImplicitCallFlagsType(), instr->m_func, true);
  9145. IR::Opnd * fldInfoAccessor = IR::IntConstOpnd::New(Js::FldInfo_FromAccessor, GetFldInfoFlagsType(), instr->m_func, true);
  9146. IR::LabelInstr * label = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true);
  9147. IR::Instr * andInstr = InsertAnd(IR::RegOpnd::New(GetImplicitCallFlagsType(), instr->m_func), implicitCallFlags, maskNoImplicitCall, instr);
  9148. InsertTestBranch(andInstr->GetDst(), accessorImplicitCall, Js::OpCode::BrEq_A, label, instr);
  9149. Js::FldInfo * info = instr->m_func->GetJnFunction()->GetAnyDynamicProfileInfo()->GetFldInfo(instr->m_func->GetJnFunction(), propSymOpnd->m_inlineCacheIndex);
  9150. IR::Opnd * profiledFlags = IR::MemRefOpnd::New((char*)info + info->GetOffsetOfFlags(), TyInt8, instr->m_func);
  9151. InsertOr(profiledFlags, profiledFlags, fldInfoAccessor, instr);
  9152. instr->InsertBefore(label);
  9153. }
  9154. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  9155. return prevInstr;
  9156. }
  9157. void Lowerer::LowerBailOnNegative(IR::Instr *const instr)
  9158. {
  9159. Assert(instr);
  9160. Assert(instr->m_opcode == Js::OpCode::BailOnNegative);
  9161. Assert(instr->HasBailOutInfo());
  9162. Assert(!instr->GetDst());
  9163. Assert(instr->GetSrc1());
  9164. Assert(instr->GetSrc1()->GetType() == TyInt32 || instr->GetSrc1()->GetType() == TyUint32);
  9165. Assert(!instr->GetSrc2());
  9166. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(false);
  9167. LowerOneBailOutKind(instr, instr->GetBailOutKind(), false);
  9168. Assert(!instr->HasBailOutInfo());
  9169. IR::Instr *insertBeforeInstr = instr->m_next;
  9170. Func *const func = instr->m_func;
  9171. // test src, src
  9172. // jns $skipBailOut
  9173. InsertCompareBranch(
  9174. instr->UnlinkSrc1(),
  9175. IR::IntConstOpnd::New(0, TyInt32, func, true),
  9176. Js::OpCode::BrGe_A,
  9177. skipBailOutLabel,
  9178. insertBeforeInstr);
  9179. instr->Remove();
  9180. }
  9181. IR::Instr *
  9182. Lowerer::LowerBailOnNotObject(IR::Instr *instr,
  9183. IR::BranchInstr *branchInstr /* = nullptr */,
  9184. IR::LabelInstr *labelBailOut /* = nullptr */)
  9185. {
  9186. IR::Instr *prevInstr = instr->m_prev;
  9187. IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label,
  9188. m_func);
  9189. instr->InsertAfter(continueLabelInstr);
  9190. this->m_lowererMD.GenerateObjectTest(instr->UnlinkSrc1(),
  9191. instr,
  9192. continueLabelInstr,
  9193. /* fContinueLabel = */ true);
  9194. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  9195. return prevInstr;
  9196. }
  9197. IR::Instr *
  9198. Lowerer::LowerBailOnTrue(IR::Instr* instr, IR::LabelInstr* labelBailOut /*nullptr*/)
  9199. {
  9200. IR::Instr* instrPrev = instr->m_prev;
  9201. IR::LabelInstr* continueLabel = instr->GetOrCreateContinueLabel();
  9202. IR::RegOpnd * regSrc1 = IR::RegOpnd::New(instr->GetSrc1()->GetType(), this->m_func);
  9203. InsertMove(regSrc1, instr->UnlinkSrc1(), instr);
  9204. InsertTestBranch(regSrc1, regSrc1, Js::OpCode::BrEq_A, continueLabel, instr);
  9205. GenerateBailOut(instr, nullptr, labelBailOut);
  9206. return instrPrev;
  9207. }
  9208. IR::Instr *
  9209. Lowerer::LowerBailOnNotBuiltIn(IR::Instr *instr,
  9210. IR::BranchInstr *branchInstr /* = nullptr */,
  9211. IR::LabelInstr *labelBailOut /* = nullptr */)
  9212. {
  9213. Assert(instr->GetSrc2()->IsIntConstOpnd());
  9214. IR::Instr *prevInstr = instr->m_prev;
  9215. Js::JavascriptFunction ** builtInFuncs = this->m_func->GetScriptContext()->GetLibrary()->GetBuiltinFunctions();
  9216. Js::BuiltinFunction builtInIndex = instr->UnlinkSrc2()->AsIntConstOpnd()->AsInt32();
  9217. IR::Opnd *builtIn = IR::MemRefOpnd::New((void*)(builtInFuncs + builtInIndex), TyMachReg, instr->m_func);
  9218. #if TESTBUILTINFORNULL
  9219. IR::LabelInstr * continueAfterTestLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  9220. InsertTestBranch(builtIn, builtIn, Js::OpCode::BrNeq_A, continueAfterTestLabel, instr);
  9221. this->m_lowererMD.GenerateDebugBreak(instr);
  9222. instr->InsertBefore(continueAfterTestLabel);
  9223. #endif
  9224. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  9225. instr->InsertAfter(continueLabel);
  9226. InsertCompareBranch(instr->UnlinkSrc1(), builtIn, Js::OpCode::BrEq_A, continueLabel, instr);
  9227. GenerateBailOut(instr, branchInstr, labelBailOut);
  9228. return prevInstr;
  9229. }
  9230. IR::Instr *
  9231. Lowerer::LowerBailForDebugger(IR::Instr* instr, bool isInsideHelper /* = false */)
  9232. {
  9233. IR::Instr * prevInstr = instr->m_prev;
  9234. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  9235. AssertMsg(bailOutKind, "bailOutKind should not be zero at this time.");
  9236. AssertMsg(!(bailOutKind & IR::BailOutExplicit) || bailOutKind == IR::BailOutExplicit,
  9237. "BailOutExplicit cannot be combined with any other bailout flags.");
  9238. IR::LabelInstr* bailOutLabel = nullptr;
  9239. if (!(bailOutKind & IR::BailOutExplicit))
  9240. {
  9241. Js::DebugManager* debugManager = this->GetScriptContext()->GetThreadContext()->GetDebugManager();
  9242. DebuggingFlags* flags = debugManager->GetDebuggingFlags();
  9243. // Check 1 (do we need to bail out?)
  9244. // JXX bailoutLabel
  9245. // Check 2 (do we need to bail out?)
  9246. // JXX bailoutLabel
  9247. // ...
  9248. // JMP continueLabel
  9249. // bailoutDocumentLabel:
  9250. // (determine if document boundary reached - if not, JMP to continueLabel)
  9251. // NOTE: THIS BLOCK IS CONDITIONALLY GENERATED BASED ON doGenerateBailOutDocumentBlock
  9252. // bailoutLabel:
  9253. // bail out
  9254. // continueLabel:
  9255. // ...
  9256. IR::LabelInstr* bailOutDocumentLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ true);
  9257. instr->InsertBefore(bailOutDocumentLabel);
  9258. IR::LabelInstr* bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ true);
  9259. instr->InsertBefore(bailOutLabel);
  9260. IR::LabelInstr* continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, /*isOpHelper*/ isInsideHelper);
  9261. instr->InsertAfter(continueLabel);
  9262. IR::BranchInstr* continueBranchInstr = this->InsertBranch(Js::OpCode::Br, continueLabel, bailOutDocumentLabel); // JMP continueLabel.
  9263. bool doGenerateBailOutDocumentBlock = false;
  9264. const IR::BailOutKind c_forceAndIgnoreEx = IR::BailOutForceByFlag | IR::BailOutIgnoreException;
  9265. if ((bailOutKind & c_forceAndIgnoreEx) == c_forceAndIgnoreEx)
  9266. {
  9267. // It's faster to check these together in 1 check rather than 2 separate checks at run time.
  9268. // CMP [&(flags->m_forceInterpreter, flags->m_isIgnoreException)], 0
  9269. // BNE bailout
  9270. IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + DebuggingFlags::GetForceInterpreterOffset(), TyInt16, m_func);
  9271. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt16, m_func, /*dontEncode*/ true);
  9272. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  9273. bailOutKind ^= c_forceAndIgnoreEx;
  9274. }
  9275. else
  9276. {
  9277. if (bailOutKind & IR::BailOutForceByFlag)
  9278. {
  9279. // CMP [&flags->m_forceInterpreter], 0
  9280. // BNE bailout
  9281. IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + DebuggingFlags::GetForceInterpreterOffset(), TyInt8, m_func);
  9282. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func, /*dontEncode*/ true);
  9283. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  9284. bailOutKind ^= IR::BailOutForceByFlag;
  9285. }
  9286. if (bailOutKind & IR::BailOutIgnoreException)
  9287. {
  9288. // CMP [&flags->m_byteCodeOffsetAfterIgnoreException], DebuggingFlags::InvalidByteCodeOffset
  9289. // BNE bailout
  9290. IR::Opnd* opnd1 = IR::MemRefOpnd::New((BYTE*)flags + flags->GetByteCodeOffsetAfterIgnoreExceptionOffset(), TyInt32, m_func);
  9291. IR::Opnd* opnd2 = IR::IntConstOpnd::New(DebuggingFlags::InvalidByteCodeOffset, TyInt32, m_func, /*dontEncode*/ true);
  9292. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  9293. bailOutKind ^= IR::BailOutIgnoreException;
  9294. }
  9295. }
  9296. if (bailOutKind & IR::BailOutBreakPointInFunction)
  9297. {
  9298. // CMP [&functionBody->m_sourceInfo.m_probeCount], 0
  9299. // BNE bailout
  9300. Js::FunctionBody* body = m_func->GetJnFunction();
  9301. IR::Opnd* opnd1 = IR::MemRefOpnd::New(&body->GetSourceInfo()->m_probeCount, TyInt32, m_func);
  9302. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt32, m_func, /*dontEncode*/ true);
  9303. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  9304. bailOutKind ^= IR::BailOutBreakPointInFunction;
  9305. }
  9306. // on method entry
  9307. if(bailOutKind & IR::BailOutStep)
  9308. {
  9309. // TEST STEP_BAILOUT, [&stepController->StepType]
  9310. // BNE BailoutLabel
  9311. IR::Opnd* opnd1 = IR::MemRefOpnd::New((void*)(debugManager->stepController.GetAddressOfStepType()), TyInt8, m_func);
  9312. IR::Opnd* opnd2 = IR::IntConstOpnd::New(Js::STEP_BAILOUT, TyInt8, this->m_func, /*dontEncode*/ true);
  9313. InsertTestBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  9314. // CMP STEP_DOCUMENT, [&stepController->StepType]
  9315. // BEQ BailoutDocumentLabel
  9316. opnd1 = IR::MemRefOpnd::New((void*)(debugManager->stepController.GetAddressOfStepType()), TyInt8, m_func);
  9317. opnd2 = IR::IntConstOpnd::New(Js::STEP_DOCUMENT, TyInt8, this->m_func, /*dontEncode*/ true);
  9318. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrEq_A, /*isUnsigned*/ true, bailOutDocumentLabel, continueBranchInstr);
  9319. doGenerateBailOutDocumentBlock = true;
  9320. bailOutKind ^= IR::BailOutStep;
  9321. }
  9322. // on method exit
  9323. if (bailOutKind & IR::BailOutStackFrameBase)
  9324. {
  9325. // CMP EffectiveFrameBase, [&stepController->frameAddrWhenSet]
  9326. // BA bailoutLabel
  9327. RegNum effectiveFrameBaseReg;
  9328. #ifdef _M_X64
  9329. effectiveFrameBaseReg = m_lowererMD.GetRegStackPointer();
  9330. #else
  9331. effectiveFrameBaseReg = m_lowererMD.GetRegFramePointer();
  9332. #endif
  9333. IR::Opnd* opnd1 = IR::RegOpnd::New(nullptr, effectiveFrameBaseReg, TyMachReg, m_func);
  9334. IR::Opnd* opnd2 = IR::MemRefOpnd::New(debugManager->stepController.GetAddressOfFrameAddress(), TyMachReg, m_func);
  9335. this->InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrGt_A, /*isUnsigned*/ true, bailOutLabel, continueBranchInstr);
  9336. // CMP STEP_DOCUMENT, [&stepController->StepType]
  9337. // BEQ BailoutDocumentLabel
  9338. opnd1 = IR::MemRefOpnd::New((void*)(debugManager->stepController.GetAddressOfStepType()), TyInt8, m_func);
  9339. opnd2 = IR::IntConstOpnd::New(Js::STEP_DOCUMENT, TyInt8, this->m_func, /*dontEncode*/ true);
  9340. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrEq_A, /*isUnsigned*/ true, bailOutDocumentLabel, continueBranchInstr);
  9341. doGenerateBailOutDocumentBlock = true;
  9342. bailOutKind ^= IR::BailOutStackFrameBase;
  9343. }
  9344. if (bailOutKind & IR::BailOutLocalValueChanged)
  9345. {
  9346. int32 hasLocalVarChangedOffset = m_func->GetHasLocalVarChangedOffset();
  9347. if (hasLocalVarChangedOffset != Js::Constants::InvalidOffset)
  9348. {
  9349. // CMP [EBP + hasLocalVarChangedStackOffset], 0
  9350. // BNE bailout
  9351. StackSym* sym = StackSym::New(TyInt8, m_func);
  9352. sym->m_offset = hasLocalVarChangedOffset;
  9353. sym->m_allocated = true;
  9354. IR::Opnd* opnd1 = IR::SymOpnd::New(sym, TyInt8, m_func);
  9355. IR::Opnd* opnd2 = IR::IntConstOpnd::New(0, TyInt8, m_func);
  9356. InsertCompareBranch(opnd1, opnd2, Js::OpCode::BrNeq_A, bailOutLabel, continueBranchInstr);
  9357. }
  9358. bailOutKind ^= IR::BailOutLocalValueChanged;
  9359. }
  9360. if (doGenerateBailOutDocumentBlock)
  9361. {
  9362. // GENERATE the BailoutDocumentLabel
  9363. // bailOutDocumentLabel:
  9364. // CMP CurrentScriptId, [&stepController->ScriptIdWhenSet]
  9365. // BEQ ContinueLabel
  9366. // bailOutLabel: // (fallthrough bailOutLabel)
  9367. Js::FunctionBody* body = m_func->GetJnFunction();
  9368. IR::Opnd* opnd1 = IR::MemRefOpnd::New(body->GetAddressOfScriptId(), TyInt32, m_func);
  9369. IR::Opnd* opnd2 = IR::MemRefOpnd::New(debugManager->stepController.GetAddressOfScriptIdWhenSet(), TyInt32, m_func);
  9370. IR::RegOpnd* reg1 = IR::RegOpnd::New(TyInt32, m_func);
  9371. InsertMove(reg1, opnd2, bailOutLabel);
  9372. InsertCompareBranch(opnd1, reg1, Js::OpCode::BrEq_A, /*isUnsigned*/ true, continueLabel, bailOutLabel);
  9373. }
  9374. AssertMsg(bailOutKind == (IR::BailOutKind)0, "Some of the bits in BailOutKind were not processed!");
  9375. // Note: at this time the 'instr' is in between bailoutLabel and continueLabel.
  9376. }
  9377. else
  9378. {
  9379. // For explicit/unconditional bailout use label which is not a helper, otherwise we would get a helper in main code path
  9380. // which breaks helper label consistency (you can only get to helper from a conditional branch in main code), see DbCheckPostLower.
  9381. bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  9382. }
  9383. this->GenerateBailOut(instr, nullptr, bailOutLabel);
  9384. return prevInstr;
  9385. }
  9386. IR::Instr*
  9387. Lowerer::LowerBailOnException(IR::Instr * instr)
  9388. {
  9389. Assert(instr->HasBailOutInfo());
  9390. IR::Instr * instrPrev = instr->m_prev;
  9391. Assert(instrPrev->m_opcode == Js::OpCode::Catch);
  9392. this->GenerateBailOut(instr, nullptr, nullptr);
  9393. return instrPrev;
  9394. }
  9395. // Generate BailOut Lowerer Instruction if the value is INT_MIN.
  9396. // It it's not INT_MIN, we continue without bailout.
  9397. IR::Instr *
  9398. Lowerer::LowerBailOnIntMin(IR::Instr *instr, IR::BranchInstr *branchInstr /* = nullptr */, IR::LabelInstr *labelBailOut /* = nullptr */)
  9399. {
  9400. Assert(instr);
  9401. Assert(instr->GetSrc1());
  9402. IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  9403. instr->InsertAfter(continueLabelInstr);
  9404. if(!instr->HasBailOutInfo())
  9405. {
  9406. instr->Remove();
  9407. }
  9408. else
  9409. {
  9410. Assert(instr->GetBailOutKind() == IR::BailOnIntMin);
  9411. // Note: src1 must be int32 at this point.
  9412. if (instr->GetSrc1()->IsIntConstOpnd())
  9413. {
  9414. // For consts we can check the value at JIT time. Note: without this check we'll have to legalize the CMP instr.
  9415. IR::IntConstOpnd* intConst = instr->UnlinkSrc1()->AsIntConstOpnd();
  9416. if (intConst->GetValue() == INT_MIN)
  9417. {
  9418. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  9419. intConst->Free(instr->m_func);
  9420. }
  9421. else
  9422. {
  9423. instr->Remove();
  9424. }
  9425. }
  9426. else
  9427. {
  9428. InsertCompareBranch(instr->UnlinkSrc1(), IR::IntConstOpnd::New(INT_MIN, TyInt32, this->m_func), Js::OpCode::BrNeq_A, continueLabelInstr, instr);
  9429. this->GenerateBailOut(instr, branchInstr, labelBailOut);
  9430. }
  9431. }
  9432. return continueLabelInstr;
  9433. }
  9434. ///----------------------------------------------------------------------------
  9435. ///
  9436. /// Lowerer::LowerBailOnNotString
  9437. /// Generate BailOut Lowerer Instruction if not a String
  9438. ///
  9439. ///----------------------------------------------------------------------------
  9440. void Lowerer::LowerBailOnNotString(IR::Instr *instr)
  9441. {
  9442. if (!instr->GetSrc1()->GetValueType().IsString())
  9443. {
  9444. /*Creating a MOV instruction*/
  9445. IR::Instr * movInstr = IR::Instr::New(instr->m_opcode, instr->UnlinkDst(), instr->UnlinkSrc1(), instr->m_func);
  9446. instr->InsertBefore(movInstr);
  9447. IR::LabelInstr *continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  9448. IR::LabelInstr *helperLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  9449. instr->InsertAfter(continueLabelInstr);
  9450. IR::RegOpnd *srcReg = movInstr->GetSrc1()->IsRegOpnd() ? movInstr->GetSrc1()->AsRegOpnd() : nullptr;
  9451. this->GenerateStringTest(srcReg, instr, helperLabelInstr, continueLabelInstr);
  9452. this->GenerateBailOut(instr, nullptr, helperLabelInstr);
  9453. }
  9454. else
  9455. {
  9456. instr->ClearBailOutInfo();
  9457. }
  9458. }
  9459. void Lowerer::LowerOneBailOutKind(
  9460. IR::Instr *const instr,
  9461. const IR::BailOutKind bailOutKindToLower,
  9462. const bool isInHelperBlock,
  9463. const bool preserveBailOutKindInInstr)
  9464. {
  9465. Assert(instr);
  9466. Assert(bailOutKindToLower);
  9467. Assert(!(bailOutKindToLower & IR::BailOutKindBits) || !(bailOutKindToLower & bailOutKindToLower - 1u));
  9468. Func *const func = instr->m_func;
  9469. // Split bailouts other than the one being lowered here
  9470. BailOutInfo *const bailOutInfo = instr->GetBailOutInfo();
  9471. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  9472. Assert(
  9473. bailOutKindToLower & IR::BailOutKindBits
  9474. ? bailOutKind & bailOutKindToLower
  9475. : (bailOutKind & ~IR::BailOutKindBits) == bailOutKindToLower);
  9476. if(!preserveBailOutKindInInstr)
  9477. {
  9478. bailOutKind -= bailOutKindToLower;
  9479. }
  9480. if(bailOutKind)
  9481. {
  9482. if(bailOutInfo->bailOutInstr == instr)
  9483. {
  9484. // Create a shared bailout point for the split bailout checks
  9485. IR::Instr *const sharedBail = instr->ShareBailOut();
  9486. Assert(sharedBail->GetBailOutInfo() == bailOutInfo);
  9487. GenerateBailOut(sharedBail);
  9488. }
  9489. instr->SetBailOutKind(bailOutKind);
  9490. }
  9491. else
  9492. {
  9493. instr->UnlinkBailOutInfo();
  9494. if(bailOutInfo->bailOutInstr == instr)
  9495. {
  9496. bailOutInfo->bailOutInstr = nullptr;
  9497. }
  9498. }
  9499. IR::Instr *const insertBeforeInstr = instr->m_next;
  9500. // (Bail out with the requested bail out kind)
  9501. IR::BailOutInstr *const bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOut, bailOutKindToLower, bailOutInfo, func);
  9502. bailOutInstr->SetByteCodeOffset(instr);
  9503. insertBeforeInstr->InsertBefore(bailOutInstr);
  9504. GenerateBailOut(bailOutInstr);
  9505. // The caller is expected to generate code to decide whether to bail out
  9506. }
  9507. void Lowerer::SplitBailOnNotArray(
  9508. IR::Instr *const instr,
  9509. IR::Instr * *const bailOnNotArrayRef,
  9510. IR::Instr * *const bailOnMissingValueRef)
  9511. {
  9512. Assert(instr);
  9513. Assert(!instr->GetDst());
  9514. Assert(instr->GetSrc1());
  9515. Assert(instr->GetSrc1()->IsRegOpnd());
  9516. Assert(!instr->GetSrc2());
  9517. Assert(bailOnNotArrayRef);
  9518. Assert(bailOnMissingValueRef);
  9519. IR::Instr *&bailOnNotArray = *bailOnNotArrayRef;
  9520. IR::Instr *&bailOnMissingValue = *bailOnMissingValueRef;
  9521. bailOnNotArray = instr;
  9522. bailOnMissingValue = nullptr;
  9523. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  9524. if(bailOutKind == IR::BailOutOnNotArray ||
  9525. bailOutKind == IR::BailOutOnNotNativeArray)
  9526. {
  9527. return;
  9528. }
  9529. // Split array checks
  9530. BailOutInfo *const bailOutInfo = instr->GetBailOutInfo();
  9531. if(bailOutInfo->bailOutInstr == instr)
  9532. {
  9533. // Create a shared bailout point for the split bailout checks
  9534. IR::Instr *const sharedBail = instr->ShareBailOut();
  9535. Assert(sharedBail->GetBailOutInfo() == bailOutInfo);
  9536. LowerBailTarget(sharedBail);
  9537. }
  9538. bailOutKind -= IR::BailOutOnMissingValue;
  9539. Assert(bailOutKind == IR::BailOutOnNotArray ||
  9540. bailOutKind == IR::BailOutOnNotNativeArray);
  9541. instr->SetBailOutKind(bailOutKind);
  9542. Func *const func = bailOutInfo->bailOutFunc;
  9543. IR::Instr *const insertBeforeInstr = instr->m_next;
  9544. // Split missing value checks
  9545. bailOnMissingValue = IR::BailOutInstr::New(Js::OpCode::BailOnNotArray, IR::BailOutOnMissingValue, bailOutInfo, func);
  9546. bailOnMissingValue->SetByteCodeOffset(instr);
  9547. insertBeforeInstr->InsertBefore(bailOnMissingValue);
  9548. }
  9549. IR::RegOpnd *Lowerer::LowerBailOnNotArray(IR::Instr *const instr)
  9550. {
  9551. Assert(instr);
  9552. Assert(!instr->GetDst());
  9553. Assert(instr->GetSrc1());
  9554. Assert(instr->GetSrc1()->IsRegOpnd());
  9555. Assert(!instr->GetSrc2());
  9556. Func *const func = instr->m_func;
  9557. // Label to jump to (or fall through to) when bailing out
  9558. const auto bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true /* isOpHelper */);
  9559. instr->InsertBefore(bailOutLabel);
  9560. // Label to jump to when not bailing out
  9561. const auto skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  9562. instr->InsertAfter(skipBailOutLabel);
  9563. // Do the array tests and jump to bailOutLabel if it's not an array. Fall through if it is an array.
  9564. IR::RegOpnd *const arrayOpnd =
  9565. GenerateArrayTest(instr->UnlinkSrc1()->AsRegOpnd(), bailOutLabel, bailOutLabel, bailOutLabel, true);
  9566. // Skip bail-out when it is an array
  9567. InsertBranch(Js::OpCode::Br, skipBailOutLabel, bailOutLabel);
  9568. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  9569. // ordering instructions anymore.
  9570. GenerateBailOut(instr);
  9571. return arrayOpnd;
  9572. }
  9573. void Lowerer::LowerBailOnMissingValue(IR::Instr *const instr, IR::RegOpnd *const arrayOpnd)
  9574. {
  9575. Assert(instr);
  9576. Assert(!instr->GetDst());
  9577. Assert(!instr->GetSrc1());
  9578. Assert(!instr->GetSrc2());
  9579. Assert(arrayOpnd);
  9580. Assert(arrayOpnd->GetValueType().IsArrayOrObjectWithArray());
  9581. Func *const func = instr->m_func;
  9582. // Label to jump to when not bailing out
  9583. const auto skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  9584. instr->InsertAfter(skipBailOutLabel);
  9585. // Skip bail-out when the array has no missing values
  9586. //
  9587. // test [array + offsetOf(objectArrayOrFlags)], Js::DynamicObjectFlags::HasNoMissingValues
  9588. // jnz $skipBailOut
  9589. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, func);
  9590. CompileAssert(
  9591. static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
  9592. Js::DynamicObjectFlags::HasNoMissingValues);
  9593. InsertTestBranch(
  9594. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, func),
  9595. IR::IntConstOpnd::New(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues), TyUint8, func, true),
  9596. Js::OpCode::BrNeq_A,
  9597. skipBailOutLabel,
  9598. instr);
  9599. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  9600. // ordering instructions anymore.
  9601. GenerateBailOut(instr);
  9602. }
  9603. void Lowerer::LowerBailOnInvalidatedArrayHeadSegment(IR::Instr *const instr, const bool isInHelperBlock)
  9604. {
  9605. /*
  9606. // Generate checks for whether the head segment or the head segment length changed during the helper call
  9607. if(!(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentSym()))
  9608. {
  9609. // Record the array head segment before the helper call
  9610. headSegmentBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayHeadSegmentForArrayOrObjectWithArray(base)
  9611. }
  9612. if(!(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentLengthSym()))
  9613. {
  9614. // Record the array head segment length before the helper call
  9615. if(baseValueType.IsArrayOrObjectWithArray() && arrayOpnd && arrayOpnd.HeadSegmentSym())
  9616. {
  9617. mov headSegmentLengthBeforeHelperCall, [headSegmentBeforeHelperCall + offsetOf(length)]
  9618. }
  9619. else
  9620. {
  9621. headSegmentLengthBeforeHelperCall =
  9622. Js::JavascriptArray::Jit_GetArrayHeadSegmentLength(headSegmentBeforeHelperCall)
  9623. }
  9624. }
  9625. helperCall:
  9626. (Helper call and other bailout checks)
  9627. // If the array has a different head segment or head segment length after the helper call, then this store needs to bail
  9628. // out
  9629. invalidatedHeadSegment =
  9630. JavascriptArray::Jit_OperationInvalidatedArrayHeadSegment(
  9631. headSegmentBeforeHelperCall,
  9632. headSegmentLengthBeforeHelperCall,
  9633. base)
  9634. test invalidatedHeadSegment, invalidatedHeadSegment
  9635. jz $skipBailOut
  9636. (Bail out with IR::BailOutOnInvalidatedArrayHeadSegment)
  9637. $skipBailOut:
  9638. */
  9639. Assert(instr);
  9640. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  9641. Assert(instr->GetDst());
  9642. Assert(instr->GetDst()->IsIndirOpnd());
  9643. Func *const func = instr->m_func;
  9644. IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  9645. const ValueType baseValueType(baseOpnd->GetValueType());
  9646. Assert(!baseValueType.IsNotArrayOrObjectWithArray());
  9647. const bool isArrayOrObjectWithArray = baseValueType.IsArrayOrObjectWithArray();
  9648. IR::ArrayRegOpnd *const arrayOpnd = baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd() : nullptr;
  9649. IR::RegOpnd *headSegmentBeforeHelperCallOpnd;
  9650. IR::AutoReuseOpnd autoReuseHeadSegmentBeforeHelperCallOpnd;
  9651. if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentSym())
  9652. {
  9653. headSegmentBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->HeadSegmentSym(), TyMachPtr, func);
  9654. autoReuseHeadSegmentBeforeHelperCallOpnd.Initialize(headSegmentBeforeHelperCallOpnd, func);
  9655. }
  9656. else
  9657. {
  9658. // Record the array head segment before the helper call
  9659. // headSegmentBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayHeadSegmentForArrayOrObjectWithArray(base)
  9660. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  9661. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  9662. headSegmentBeforeHelperCallOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, func), TyMachPtr, func);
  9663. autoReuseHeadSegmentBeforeHelperCallOpnd.Initialize(headSegmentBeforeHelperCallOpnd, func);
  9664. callInstr->SetDst(headSegmentBeforeHelperCallOpnd);
  9665. instr->InsertBefore(callInstr);
  9666. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayHeadSegmentForArrayOrObjectWithArray);
  9667. }
  9668. IR::RegOpnd *headSegmentLengthBeforeHelperCallOpnd;
  9669. IR::AutoReuseOpnd autoReuseHeadSegmentLengthBeforeHelperCallOpnd;
  9670. if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentLengthSym())
  9671. {
  9672. headSegmentLengthBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->HeadSegmentLengthSym(), TyUint32, func);
  9673. autoReuseHeadSegmentLengthBeforeHelperCallOpnd.Initialize(headSegmentLengthBeforeHelperCallOpnd, func);
  9674. }
  9675. else
  9676. {
  9677. headSegmentLengthBeforeHelperCallOpnd = IR::RegOpnd::New(StackSym::New(TyUint32, func), TyUint32, func);
  9678. autoReuseHeadSegmentLengthBeforeHelperCallOpnd.Initialize(headSegmentLengthBeforeHelperCallOpnd, func);
  9679. if(isArrayOrObjectWithArray && arrayOpnd && arrayOpnd->HeadSegmentSym())
  9680. {
  9681. // Record the array head segment length before the helper call
  9682. // mov headSegmentLengthBeforeHelperCall, [headSegmentBeforeHelperCall + offsetOf(length)]
  9683. InsertMove(
  9684. headSegmentLengthBeforeHelperCallOpnd,
  9685. IR::IndirOpnd::New(
  9686. headSegmentBeforeHelperCallOpnd,
  9687. Js::SparseArraySegmentBase::GetOffsetOfLength(),
  9688. TyUint32,
  9689. func),
  9690. instr);
  9691. }
  9692. else
  9693. {
  9694. // Record the array head segment length before the helper call
  9695. // headSegmentLengthBeforeHelperCall =
  9696. // Js::JavascriptArray::Jit_GetArrayHeadSegmentLength(headSegmentBeforeHelperCall)
  9697. m_lowererMD.LoadHelperArgument(instr, headSegmentBeforeHelperCallOpnd);
  9698. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  9699. callInstr->SetDst(headSegmentLengthBeforeHelperCallOpnd);
  9700. instr->InsertBefore(callInstr);
  9701. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayHeadSegmentLength);
  9702. }
  9703. }
  9704. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
  9705. LowerOneBailOutKind(instr, IR::BailOutOnInvalidatedArrayHeadSegment, isInHelperBlock);
  9706. IR::Instr *const insertBeforeInstr = instr->m_next;
  9707. // If the array has a different head segment or head segment length after the helper call, then this store needs to bail out
  9708. // invalidatedHeadSegment =
  9709. // JavascriptArray::Jit_OperationInvalidatedArrayHeadSegment(
  9710. // headSegmentBeforeHelperCall,
  9711. // headSegmentLengthBeforeHelperCall,
  9712. // base)
  9713. m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
  9714. m_lowererMD.LoadHelperArgument(insertBeforeInstr, headSegmentLengthBeforeHelperCallOpnd);
  9715. m_lowererMD.LoadHelperArgument(insertBeforeInstr, headSegmentBeforeHelperCallOpnd);
  9716. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  9717. IR::RegOpnd *const invalidatedHeadSegmentOpnd = IR::RegOpnd::New(TyUint8, func);
  9718. const IR::AutoReuseOpnd autoReuseInvalidatedHeadSegmentOpnd(invalidatedHeadSegmentOpnd, func);
  9719. callInstr->SetDst(invalidatedHeadSegmentOpnd);
  9720. insertBeforeInstr->InsertBefore(callInstr);
  9721. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationInvalidatedArrayHeadSegment);
  9722. // test invalidatedHeadSegment, invalidatedHeadSegment
  9723. // jz $skipBailOut
  9724. InsertTestBranch(
  9725. invalidatedHeadSegmentOpnd,
  9726. invalidatedHeadSegmentOpnd,
  9727. Js::OpCode::BrEq_A,
  9728. skipBailOutLabel,
  9729. insertBeforeInstr);
  9730. // (Bail out with IR::BailOutOnInvalidatedArrayHeadSegment)
  9731. // $skipBailOut:
  9732. }
  9733. void Lowerer::LowerBailOnInvalidatedArrayLength(IR::Instr *const instr, const bool isInHelperBlock)
  9734. {
  9735. /*
  9736. // Generate checks for whether the length changed during the helper call
  9737. if(!(arrayOpnd && arrayOpnd.LengthSym() && arrayOpnd.LengthSym() != arrayOpnd.HeadSegmentLengthSym()))
  9738. {
  9739. // Record the array length before the helper call
  9740. lengthBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayLength(base)
  9741. }
  9742. helperCall:
  9743. (Helper call and other bailout checks)
  9744. // If the array has a different length after the helper call, then this store needs to bail out
  9745. invalidatedLength = JavascriptArray::Jit_OperationInvalidatedArrayLength(lengthBeforeHelperCall, base)
  9746. test invalidatedLength, invalidatedLength
  9747. jz $skipBailOut
  9748. (Bail out with IR::BailOutOnInvalidatedArrayLength)
  9749. $skipBailOut:
  9750. */
  9751. Assert(instr);
  9752. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  9753. Assert(instr->GetDst());
  9754. Assert(instr->GetDst()->IsIndirOpnd());
  9755. Func *const func = instr->m_func;
  9756. IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  9757. const ValueType baseValueType(baseOpnd->GetValueType());
  9758. Assert(!baseValueType.IsNotArray());
  9759. IR::ArrayRegOpnd *const arrayOpnd = baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd() : nullptr;
  9760. IR::RegOpnd *lengthBeforeHelperCallOpnd;
  9761. IR::AutoReuseOpnd autoReuseLengthBeforeHelperCallOpnd;
  9762. if(arrayOpnd && arrayOpnd->LengthSym() && arrayOpnd->LengthSym() != arrayOpnd->HeadSegmentLengthSym())
  9763. {
  9764. lengthBeforeHelperCallOpnd = IR::RegOpnd::New(arrayOpnd->LengthSym(), arrayOpnd->LengthSym()->GetType(), func);
  9765. autoReuseLengthBeforeHelperCallOpnd.Initialize(lengthBeforeHelperCallOpnd, func);
  9766. }
  9767. else
  9768. {
  9769. // Record the array length before the helper call
  9770. // lengthBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayLength(base)
  9771. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  9772. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  9773. lengthBeforeHelperCallOpnd = IR::RegOpnd::New(TyUint32, func);
  9774. autoReuseLengthBeforeHelperCallOpnd.Initialize(lengthBeforeHelperCallOpnd, func);
  9775. callInstr->SetDst(lengthBeforeHelperCallOpnd);
  9776. instr->InsertBefore(callInstr);
  9777. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayLength);
  9778. }
  9779. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
  9780. LowerOneBailOutKind(instr, IR::BailOutOnInvalidatedArrayLength, isInHelperBlock);
  9781. IR::Instr *const insertBeforeInstr = instr->m_next;
  9782. // If the array has a different length after the helper call, then this store needs to bail out
  9783. // invalidatedLength = JavascriptArray::Jit_OperationInvalidatedArrayLength(lengthBeforeHelperCall, base)
  9784. m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
  9785. m_lowererMD.LoadHelperArgument(insertBeforeInstr, lengthBeforeHelperCallOpnd);
  9786. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  9787. IR::RegOpnd *const invalidatedLengthOpnd = IR::RegOpnd::New(TyUint8, func);
  9788. const IR::AutoReuseOpnd autoReuseInvalidatedLengthOpnd(invalidatedLengthOpnd, func);
  9789. callInstr->SetDst(invalidatedLengthOpnd);
  9790. insertBeforeInstr->InsertBefore(callInstr);
  9791. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationInvalidatedArrayLength);
  9792. // test invalidatedLength, invalidatedLength
  9793. // jz $skipBailOut
  9794. InsertTestBranch(
  9795. invalidatedLengthOpnd,
  9796. invalidatedLengthOpnd,
  9797. Js::OpCode::BrEq_A,
  9798. skipBailOutLabel,
  9799. insertBeforeInstr);
  9800. // (Bail out with IR::BailOutOnInvalidatedArrayLength)
  9801. // $skipBailOut:
  9802. }
  9803. void Lowerer::LowerBailOnCreatedMissingValue(IR::Instr *const instr, const bool isInHelperBlock)
  9804. {
  9805. /*
  9806. // Generate checks for whether the first missing value was created during the helper call
  9807. if(!(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()))
  9808. {
  9809. // Record whether the array has missing values before the helper call
  9810. arrayFlagsBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayFlagsForArrayOrObjectWithArray(base)
  9811. }
  9812. helperCall:
  9813. (Helper call and other bailout checks)
  9814. // If the array had no missing values before the helper call, and the array has missing values after the helper
  9815. // call, then this store created the first missing value in the array and needs to bail out
  9816. if(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues())
  9817. (arrayFlagsBeforeHelperCall = Js::DynamicObjectFlags::HasNoMissingValues)
  9818. createdFirstMissingValue = JavascriptArray::Jit_OperationCreatedFirstMissingValue(arrayFlagsBeforeHelperCall, base)
  9819. test createdFirstMissingValue, createdFirstMissingValue
  9820. jz $skipBailOut
  9821. (Bail out with IR::BailOutOnMissingValue)
  9822. $skipBailOut:
  9823. */
  9824. Assert(instr);
  9825. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || instr->m_opcode == Js::OpCode::Memset || instr->m_opcode == Js::OpCode::Memcopy);
  9826. Assert(instr->GetDst());
  9827. Assert(instr->GetDst()->IsIndirOpnd());
  9828. Func *const func = instr->m_func;
  9829. IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  9830. const ValueType baseValueType(baseOpnd->GetValueType());
  9831. Assert(!baseValueType.IsNotArrayOrObjectWithArray());
  9832. IR::Opnd *arrayFlagsBeforeHelperCallOpnd = nullptr;
  9833. IR::AutoReuseOpnd autoReuseArrayFlagsBeforeHelperCallOpnd;
  9834. const IRType arrayFlagsType = sizeof(uintptr_t) == sizeof(uint32) ? TyUint32 : TyUint64;
  9835. if(!(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()))
  9836. {
  9837. // Record whether the array has missing values before the helper call
  9838. // arrayFlagsBeforeHelperCall = Js::JavascriptArray::Jit_GetArrayFlagsForArrayOrObjectWithArray(base)
  9839. m_lowererMD.LoadHelperArgument(instr, baseOpnd);
  9840. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  9841. arrayFlagsBeforeHelperCallOpnd = IR::RegOpnd::New(arrayFlagsType, func);
  9842. autoReuseArrayFlagsBeforeHelperCallOpnd.Initialize(arrayFlagsBeforeHelperCallOpnd, func);
  9843. callInstr->SetDst(arrayFlagsBeforeHelperCallOpnd);
  9844. instr->InsertBefore(callInstr);
  9845. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_GetArrayFlagsForArrayOrObjectWithArray);
  9846. }
  9847. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(isInHelperBlock);
  9848. LowerOneBailOutKind(instr, IR::BailOutOnMissingValue, isInHelperBlock);
  9849. IR::Instr *const insertBeforeInstr = instr->m_next;
  9850. // If the array had no missing values before the helper call, and the array has missing values after the helper
  9851. // call, then this store created the first missing value in the array and needs to bail out
  9852. if(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues())
  9853. {
  9854. // (arrayFlagsBeforeHelperCall = Js::DynamicObjectFlags::HasNoMissingValues)
  9855. Assert(!arrayFlagsBeforeHelperCallOpnd);
  9856. arrayFlagsBeforeHelperCallOpnd =
  9857. arrayFlagsType == TyUint32
  9858. ? static_cast<IR::Opnd *>(
  9859. IR::IntConstOpnd::New(
  9860. static_cast<uintptr_t>(Js::DynamicObjectFlags::HasNoMissingValues),
  9861. arrayFlagsType,
  9862. func,
  9863. true))
  9864. : IR::AddrOpnd::New(
  9865. reinterpret_cast<void *>(Js::DynamicObjectFlags::HasNoMissingValues),
  9866. IR::AddrOpndKindConstantVar,
  9867. func,
  9868. true);
  9869. autoReuseArrayFlagsBeforeHelperCallOpnd.Initialize(arrayFlagsBeforeHelperCallOpnd, func);
  9870. }
  9871. else
  9872. {
  9873. Assert(arrayFlagsBeforeHelperCallOpnd);
  9874. }
  9875. // createdFirstMissingValue = JavascriptArray::Jit_OperationCreatedFirstMissingValue(arrayFlagsBeforeHelperCall, base)
  9876. m_lowererMD.LoadHelperArgument(insertBeforeInstr, baseOpnd);
  9877. m_lowererMD.LoadHelperArgument(insertBeforeInstr, arrayFlagsBeforeHelperCallOpnd);
  9878. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  9879. IR::RegOpnd *const createdFirstMissingValueOpnd = IR::RegOpnd::New(TyUint8, func);
  9880. IR::AutoReuseOpnd autoReuseCreatedFirstMissingValueOpnd(createdFirstMissingValueOpnd, func);
  9881. callInstr->SetDst(createdFirstMissingValueOpnd);
  9882. insertBeforeInstr->InsertBefore(callInstr);
  9883. m_lowererMD.ChangeToHelperCall(callInstr, IR::HelperArray_Jit_OperationCreatedFirstMissingValue);
  9884. // test createdFirstMissingValue, createdFirstMissingValue
  9885. // jz $skipBailOut
  9886. InsertCompareBranch(
  9887. createdFirstMissingValueOpnd,
  9888. IR::IntConstOpnd::New(0, createdFirstMissingValueOpnd->GetType(), func, true),
  9889. Js::OpCode::BrEq_A,
  9890. skipBailOutLabel,
  9891. insertBeforeInstr);
  9892. // (Bail out with IR::BailOutOnMissingValue)
  9893. // $skipBailOut:
  9894. }
  9895. void Lowerer::LowerBoundCheck(IR::Instr *const instr)
  9896. {
  9897. Assert(instr);
  9898. Assert(instr->m_opcode == Js::OpCode::BoundCheck || instr->m_opcode == Js::OpCode::UnsignedBoundCheck);
  9899. #if DBG
  9900. if(instr->m_opcode == Js::OpCode::UnsignedBoundCheck)
  9901. {
  9902. // UnsignedBoundCheck is currently only supported for the pattern:
  9903. // UnsignedBoundCheck s1 <= s2 + c, where c == 0 || c == -1
  9904. Assert(instr->GetSrc1()->IsRegOpnd());
  9905. Assert(instr->GetSrc1()->IsInt32());
  9906. Assert(instr->GetSrc2());
  9907. Assert(!instr->GetSrc2()->IsIntConstOpnd());
  9908. if(instr->GetDst())
  9909. {
  9910. const int32 c = instr->GetDst()->AsIntConstOpnd()->AsInt32();
  9911. Assert(c == 0 || c == -1);
  9912. }
  9913. }
  9914. #endif
  9915. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  9916. Assert(
  9917. bailOutKind == IR::BailOutOnArrayAccessHelperCall ||
  9918. bailOutKind == IR::BailOutOnInvalidatedArrayHeadSegment ||
  9919. bailOutKind == IR::BailOutOnFailedHoistedBoundCheck ||
  9920. bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  9921. IR::LabelInstr *const skipBailOutLabel = instr->GetOrCreateContinueLabel(false);
  9922. LowerOneBailOutKind(instr, bailOutKind, false);
  9923. Assert(!instr->HasBailOutInfo());
  9924. IR::Instr *insertBeforeInstr = instr->m_next;
  9925. #if DBG
  9926. const auto VerifyLeftOrRightOpnd = [&](IR::Opnd *const opnd, const bool isRightOpnd)
  9927. {
  9928. if(!opnd)
  9929. {
  9930. Assert(isRightOpnd);
  9931. return;
  9932. }
  9933. if(opnd->IsIntConstOpnd())
  9934. {
  9935. Assert(!isRightOpnd || opnd->AsIntConstOpnd()->GetValue() != 0);
  9936. return;
  9937. }
  9938. Assert(opnd->GetType() == TyInt32 || opnd->GetType() == TyUint32);
  9939. };
  9940. #endif
  9941. // left <= right + offset (src1 <= src2 + dst)
  9942. IR::Opnd *leftOpnd = instr->UnlinkSrc1();
  9943. DebugOnly(VerifyLeftOrRightOpnd(leftOpnd, false));
  9944. IR::Opnd *rightOpnd = instr->UnlinkSrc2();
  9945. DebugOnly(VerifyLeftOrRightOpnd(rightOpnd, true));
  9946. Assert(!leftOpnd->IsIntConstOpnd() || rightOpnd && !rightOpnd->IsIntConstOpnd());
  9947. IR::IntConstOpnd *offsetOpnd = instr->GetDst() ? instr->UnlinkDst()->AsIntConstOpnd() : nullptr;
  9948. Assert(!offsetOpnd || offsetOpnd->GetValue() != 0);
  9949. const bool doUnsignedCompare = instr->m_opcode == Js::OpCode::UnsignedBoundCheck;
  9950. instr->Remove();
  9951. Func *const func = insertBeforeInstr->m_func;
  9952. IntConstType offset = offsetOpnd ? offsetOpnd->GetValue() : 0;
  9953. Js::OpCode compareOpCode = Js::OpCode::BrLe_A;
  9954. if(leftOpnd->IsIntConstOpnd() && rightOpnd->IsRegOpnd() && offset != IntConstMin)
  9955. {
  9956. // Put the constants together: swap the operands, negate the offset, and invert the branch
  9957. IR::Opnd *const tempOpnd = leftOpnd;
  9958. leftOpnd = rightOpnd;
  9959. rightOpnd = tempOpnd;
  9960. offset = -offset;
  9961. compareOpCode = Js::OpCode::BrGe_A;
  9962. }
  9963. if(rightOpnd->IsIntConstOpnd())
  9964. {
  9965. // Try to aggregate right + offset into a constant offset
  9966. IntConstType newOffset;
  9967. if(!IntConstMath::Add(offset, rightOpnd->AsIntConstOpnd()->GetValue(), &newOffset))
  9968. {
  9969. offset = newOffset;
  9970. rightOpnd = nullptr;
  9971. offsetOpnd = nullptr;
  9972. }
  9973. }
  9974. // Determine if the Add for (right + offset) is necessary, and the op code that will be used for the comparison
  9975. IR::AutoReuseOpnd autoReuseAddResultOpnd;
  9976. if(offset == -1 && compareOpCode == Js::OpCode::BrLe_A)
  9977. {
  9978. offset = 0;
  9979. compareOpCode = Js::OpCode::BrLt_A;
  9980. }
  9981. else if(offset == 1 && compareOpCode == Js::OpCode::BrGe_A)
  9982. {
  9983. offset = 0;
  9984. compareOpCode = Js::OpCode::BrGt_A;
  9985. }
  9986. else if(offset != 0 && rightOpnd)
  9987. {
  9988. // Need to Add (right + offset). If it overflows, bail out.
  9989. IR::LabelInstr *const bailOutLabel = insertBeforeInstr->m_prev->GetOrCreateContinueLabel(true);
  9990. insertBeforeInstr = bailOutLabel;
  9991. // mov temp, right
  9992. // add temp, offset
  9993. // jo $bailOut
  9994. // $bailOut: (insertBeforeInstr)
  9995. Assert(!offsetOpnd || offsetOpnd->GetValue() == offset);
  9996. IR::RegOpnd *const addResultOpnd = IR::RegOpnd::New(TyMachReg, func);
  9997. autoReuseAddResultOpnd.Initialize(addResultOpnd, func);
  9998. InsertAdd(
  9999. true,
  10000. addResultOpnd,
  10001. rightOpnd,
  10002. offsetOpnd ? offsetOpnd : IR::IntConstOpnd::New(offset, TyMachReg, func, true),
  10003. insertBeforeInstr);
  10004. InsertBranch(LowererMD::MDOverflowBranchOpcode, bailOutLabel, insertBeforeInstr);
  10005. rightOpnd = addResultOpnd;
  10006. }
  10007. // cmp left, right
  10008. // jl[e] $skipBailOut
  10009. // $bailOut:
  10010. if(!rightOpnd)
  10011. {
  10012. rightOpnd = IR::IntConstOpnd::New(offset, TyInt32, func, true);
  10013. }
  10014. InsertCompareBranch(leftOpnd, rightOpnd, compareOpCode, doUnsignedCompare, skipBailOutLabel, insertBeforeInstr);
  10015. }
  10016. IR::Instr *
  10017. Lowerer::LowerBailTarget(IR::Instr * instr)
  10018. {
  10019. // this is just a bailout target, just skip over it and generate a label before so other bailout can jump here.
  10020. IR::Instr * prevInstr = instr->m_prev;
  10021. IR::LabelInstr * continueLabelInstr = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  10022. instr->InsertAfter(continueLabelInstr);
  10023. IR::BranchInstr * skipInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, continueLabelInstr, this->m_func);
  10024. instr->InsertBefore(skipInstr);
  10025. this->GenerateBailOut(instr);
  10026. return prevInstr;
  10027. }
  10028. IR::Instr *
  10029. Lowerer::SplitBailOnImplicitCall(IR::Instr *& instr)
  10030. {
  10031. Assert(instr->IsPlainInstr() || instr->IsProfiledInstr());
  10032. const auto bailOutKind = instr->GetBailOutKind();
  10033. Assert(
  10034. BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind) ||
  10035. bailOutKind == IR::BailOutExpectingObject);
  10036. IR::Opnd * implicitCallFlags = this->GetImplicitCallFlagsOpnd();
  10037. const IR::AutoReuseOpnd autoReuseImplicitCallFlags(implicitCallFlags, instr->m_func);
  10038. IR::IntConstOpnd * noImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_None, TyInt8, this->m_func, true);
  10039. const IR::AutoReuseOpnd autoReuseNoImplicitCall(noImplicitCall, instr->m_func);
  10040. // Reset the implicit call flag on every helper call
  10041. LowererMD::CreateAssign(implicitCallFlags, noImplicitCall, instr);
  10042. IR::Instr *disableImplicitCallsInstr = nullptr, *enableImplicitCallsInstr = nullptr;
  10043. if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
  10044. {
  10045. const auto disableImplicitCallAddress =
  10046. m_lowererMD.GenerateMemRef(
  10047. instr->m_func->GetScriptContext()->GetThreadContext()->GetAddressOfDisableImplicitFlags(),
  10048. TyInt8,
  10049. instr);
  10050. // Disable implicit calls since they will be called after bailing out
  10051. disableImplicitCallsInstr =
  10052. IR::Instr::New(
  10053. Js::OpCode::Ld_A,
  10054. disableImplicitCallAddress,
  10055. IR::IntConstOpnd::New(DisableImplicitCallFlag, TyInt8, instr->m_func, true),
  10056. instr->m_func);
  10057. instr->InsertBefore(disableImplicitCallsInstr);
  10058. // Create instruction for re-enabling implicit calls
  10059. enableImplicitCallsInstr =
  10060. IR::Instr::New(
  10061. Js::OpCode::Ld_A,
  10062. disableImplicitCallAddress,
  10063. IR::IntConstOpnd::New(DisableImplicitNoFlag, TyInt8, instr->m_func, true),
  10064. instr->m_func);
  10065. }
  10066. IR::Instr * bailOutInstr = instr;
  10067. instr = IR::Instr::New(instr->m_opcode, instr->m_func);
  10068. bailOutInstr->TransferTo(instr);
  10069. bailOutInstr->InsertBefore(instr);
  10070. if(disableImplicitCallsInstr)
  10071. {
  10072. // Re-enable implicit calls
  10073. Assert(enableImplicitCallsInstr);
  10074. bailOutInstr->InsertBefore(enableImplicitCallsInstr);
  10075. // Lower both instructions. Lowering an instruction may free the instruction's original operands, so do that last.
  10076. LowererMD::ChangeToAssign(disableImplicitCallsInstr);
  10077. LowererMD::ChangeToAssign(enableImplicitCallsInstr);
  10078. }
  10079. bailOutInstr->m_opcode = Js::OpCode::BailOnNotEqual;
  10080. bailOutInstr->SetSrc1(implicitCallFlags);
  10081. bailOutInstr->SetSrc2(noImplicitCall);
  10082. return bailOutInstr;
  10083. }
  10084. IR::Instr *
  10085. Lowerer::SplitBailOnImplicitCall(IR::Instr * instr, IR::Instr * helperCall, IR::Instr * insertBeforeInstr)
  10086. {
  10087. IR::Opnd * implicitCallFlags = this->GetImplicitCallFlagsOpnd();
  10088. const IR::AutoReuseOpnd autoReuseImplicitCallFlags(implicitCallFlags, instr->m_func);
  10089. IR::IntConstOpnd * noImplicitCall = IR::IntConstOpnd::New(Js::ImplicitCall_None, TyInt8, this->m_func, true);
  10090. const IR::AutoReuseOpnd autoReuseNoImplicitCall(noImplicitCall, instr->m_func);
  10091. // Reset the implicit call flag on every helper call
  10092. LowererMD::CreateAssign(implicitCallFlags, noImplicitCall, helperCall->m_prev);
  10093. BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
  10094. if (bailOutInfo->bailOutInstr == instr)
  10095. {
  10096. bailOutInfo->bailOutInstr = nullptr;
  10097. }
  10098. IR::Instr * bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOnNotEqual, IR::BailOutOnImplicitCalls, bailOutInfo, bailOutInfo->bailOutFunc);
  10099. bailOutInstr->SetSrc1(implicitCallFlags);
  10100. bailOutInstr->SetSrc2(noImplicitCall);
  10101. insertBeforeInstr->InsertBefore(bailOutInstr);
  10102. instr->ClearBailOutInfo();
  10103. return bailOutInstr;
  10104. }
  10105. // Split out bailout for debugger into separate bailout instr out of real instr which has bailout for debugger.
  10106. // Returns the instr which needs to lower next, which would normally be last of splitted instr.
  10107. // IR on input:
  10108. // - Real instr with BailOutInfo but it's opcode is not BailForDebugger.
  10109. // - debugger bailout is not shared. In this case we'll have debugger bailout in instr->GetBailOutKind().
  10110. // - debugger bailout is shared. In this case we'll have debugger bailout in instr->GetAuxBailOutKind().
  10111. // IR on output:
  10112. // - Either of:
  10113. // - real instr, then debuggerBailout -- in case we only had debugger bailout.
  10114. // - real instr with BailOutInfo w/o debugger bailout, then debuggerBailout, then sharedBailout -- in case bailout for debugger was shared w/some other b.o.
  10115. IR::Instr* Lowerer::SplitBailForDebugger(IR::Instr* instr)
  10116. {
  10117. Assert(m_func->IsJitInDebugMode() && instr->m_opcode != Js::OpCode::BailForDebugger);
  10118. IR::BailOutKind debuggerBailOutKind; // Used for splitted instr.
  10119. BailOutInfo* bailOutInfo = instr->GetBailOutInfo();
  10120. IR::Instr* sharedBailoutInstr = nullptr;
  10121. if (instr->GetBailOutKind() & IR::BailOutForDebuggerBits)
  10122. {
  10123. // debugger bailout is not shared.
  10124. Assert(!instr->HasAuxBailOut());
  10125. AssertMsg(!(instr->GetBailOutKind() & ~IR::BailOutForDebuggerBits), "There should only be debugger bailout bits in the instr.");
  10126. debuggerBailOutKind = instr->GetBailOutKind() & IR::BailOutForDebuggerBits;
  10127. // There is no non-debugger bailout in the instr, still can't clear bailout info, as we use it for the splitted instr,
  10128. // but we need to mark the bailout as hasn't been generated yet.
  10129. if (bailOutInfo->bailOutInstr == instr)
  10130. {
  10131. // null will be picked up by following BailOutInstr::New which will change it to new bailout instr.
  10132. bailOutInfo->bailOutInstr = nullptr;
  10133. }
  10134. // Remove bailout info from the original instr which from now on becomes just regular instr, w/o deallocating bailout info.
  10135. instr->ClearBailOutInfo();
  10136. }
  10137. else if (instr->IsBranchInstr() && instr->HasBailOutInfo() && instr->HasAuxBailOut())
  10138. {
  10139. // Branches with shared bailout are lowered in LowerCondBranchCheckBailOut,
  10140. // can't do here because we need to use BranchBailOutRecord but don't know which BrTrue/BrFalse to use for it.
  10141. debuggerBailOutKind = IR::BailOutInvalid;
  10142. }
  10143. else if (instr->HasAuxBailOut() && instr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits)
  10144. {
  10145. // debugger bailout is shared.
  10146. AssertMsg(!(instr->GetBailOutKind() & IR::BailOutForDebuggerBits), "There should be no debugger bits in main bailout kind.");
  10147. debuggerBailOutKind = instr->GetAuxBailOutKind() & IR::BailOutForDebuggerBits;
  10148. // This will insert SharedBail instr after current instr and set bailOutInfo->bailOutInstr to the shared one.
  10149. sharedBailoutInstr = instr->ShareBailOut();
  10150. // As we extracted aux bail out, invalidate all tracks of it in the instr.
  10151. instr->ResetAuxBailOut();
  10152. }
  10153. else
  10154. {
  10155. AssertMsg(FALSE, "shouldn't get here");
  10156. debuggerBailOutKind = IR::BailOutInvalid;
  10157. }
  10158. if (debuggerBailOutKind != IR::BailOutInvalid)
  10159. {
  10160. IR::BailOutInstr* debuggerBailoutInstr = IR::BailOutInstr::New(
  10161. Js::OpCode::BailForDebugger, debuggerBailOutKind, bailOutInfo, bailOutInfo->bailOutFunc);
  10162. instr->InsertAfter(debuggerBailoutInstr);
  10163. // Since we go backwards, we need to process extracted out bailout for debugger first.
  10164. instr = sharedBailoutInstr ? sharedBailoutInstr : debuggerBailoutInstr;
  10165. }
  10166. return instr;
  10167. }
  10168. IR::Instr *
  10169. Lowerer::SplitBailOnResultCondition(IR::Instr *const instr) const
  10170. {
  10171. Assert(instr);
  10172. Assert(!instr->IsLowered());
  10173. Assert(
  10174. instr->GetBailOutKind() & IR::BailOutOnResultConditions ||
  10175. instr->GetBailOutKind() == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  10176. const auto nonBailOutInstr = IR::Instr::New(instr->m_opcode, instr->m_func);
  10177. instr->TransferTo(nonBailOutInstr);
  10178. instr->InsertBefore(nonBailOutInstr);
  10179. return nonBailOutInstr;
  10180. }
  10181. void
  10182. Lowerer::LowerBailOnResultCondition(
  10183. IR::Instr *const instr,
  10184. IR::LabelInstr * *const bailOutLabel,
  10185. IR::LabelInstr * *const skipBailOutLabel)
  10186. {
  10187. Assert(instr);
  10188. Assert(
  10189. instr->GetBailOutKind() & IR::BailOutOnResultConditions ||
  10190. instr->GetBailOutKind() == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  10191. Assert(bailOutLabel);
  10192. Assert(skipBailOutLabel);
  10193. // Label to jump to (or fall through to) when bailing out. The actual bailout label
  10194. // (bailOutInfo->bailOutInstr->AsLabelInstr()) may be shared, and code may be added to restore values before the jump to the
  10195. // actual bailout label in the cloned bailout case, so always create a new bailout label for this particular path.
  10196. *bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, true /* isOpHelper */);
  10197. instr->InsertBefore(*bailOutLabel);
  10198. // Label to jump to when not bailing out
  10199. *skipBailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  10200. instr->InsertAfter(*skipBailOutLabel);
  10201. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  10202. // ordering instructions anymore.
  10203. GenerateBailOut(instr);
  10204. }
  10205. void
  10206. Lowerer::PreserveSourcesForBailOnResultCondition(IR::Instr *const instr, IR::LabelInstr *const skipBailOutLabel) const
  10207. {
  10208. Assert(instr);
  10209. Assert(!instr->IsLowered());
  10210. Assert(!instr->HasBailOutInfo());
  10211. // Since this instruction may bail out, writing to the destination cannot overwrite one of the sources, or we may lose one
  10212. // of the sources needed to redo the equivalent byte code instruction. Determine if the sources need to be preserved.
  10213. const auto dst = instr->GetDst();
  10214. Assert(dst);
  10215. const auto dstStackSym = dst->GetStackSym();
  10216. if(!dstStackSym || !dstStackSym->HasByteCodeRegSlot())
  10217. {
  10218. // We only need to ensure that a byte-code source is not being overwritten
  10219. return;
  10220. }
  10221. switch(instr->m_opcode)
  10222. {
  10223. // The sources of these instructions don't need restoring, or will be restored in the bailout path
  10224. case Js::OpCode::Neg_I4:
  10225. // In case of overflow or zero, the result is the same as the operand
  10226. case Js::OpCode::Add_I4:
  10227. case Js::OpCode::Sub_I4:
  10228. // In case of overflow, there is always enough information to restore the operands
  10229. return;
  10230. }
  10231. Assert(instr->GetSrc1());
  10232. if(!dst->IsEqual(instr->GetSrc1()) && !(instr->GetSrc2() && dst->IsEqual(instr->GetSrc2())))
  10233. {
  10234. // The destination is different from the sources
  10235. return;
  10236. }
  10237. // The destination is the same as one of the sources and the original sources cannot be restored after the instruction, so
  10238. // use a temporary destination for the result and move it back to the original destination after deciding not to bail out
  10239. LowererMD::ChangeToAssign(instr->SinkDst(Js::OpCode::Ld_I4, RegNOREG, skipBailOutLabel));
  10240. }
  10241. void
  10242. Lowerer::LowerInstrWithBailOnResultCondition(
  10243. IR::Instr *const instr,
  10244. const IR::BailOutKind bailOutKind,
  10245. IR::LabelInstr *const bailOutLabel,
  10246. IR::LabelInstr *const skipBailOutLabel) const
  10247. {
  10248. Assert(instr);
  10249. Assert(!instr->IsLowered());
  10250. Assert(!instr->HasBailOutInfo());
  10251. Assert(bailOutKind & IR::BailOutOnResultConditions || bailOutKind == IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck);
  10252. Assert(bailOutLabel);
  10253. Assert(instr->m_next == bailOutLabel);
  10254. Assert(skipBailOutLabel);
  10255. // Preserve sources that are overwritten by the instruction if needed
  10256. PreserveSourcesForBailOnResultCondition(instr, skipBailOutLabel);
  10257. // Lower the instruction
  10258. switch(instr->m_opcode)
  10259. {
  10260. case Js::OpCode::Neg_I4:
  10261. LowererMD::LowerInt4NegWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  10262. break;
  10263. case Js::OpCode::Add_I4:
  10264. LowererMD::LowerInt4AddWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  10265. break;
  10266. case Js::OpCode::Sub_I4:
  10267. LowererMD::LowerInt4SubWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  10268. break;
  10269. case Js::OpCode::Mul_I4:
  10270. LowererMD::LowerInt4MulWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  10271. break;
  10272. case Js::OpCode::Rem_I4:
  10273. m_lowererMD.LowerInt4RemWithBailOut(instr, bailOutKind, bailOutLabel, skipBailOutLabel);
  10274. break;
  10275. default:
  10276. Assert(false); // not implemented
  10277. __assume(false);
  10278. }
  10279. }
  10280. void
  10281. Lowerer::GenerateObjectTestAndTypeLoad(IR::Instr *instrLdSt, IR::RegOpnd *opndBase, IR::RegOpnd *opndType, IR::LabelInstr *labelHelper)
  10282. {
  10283. IR::IndirOpnd *opndIndir;
  10284. if (!opndBase->IsNotTaggedValue())
  10285. {
  10286. m_lowererMD.GenerateObjectTest(opndBase, instrLdSt, labelHelper);
  10287. }
  10288. opndIndir = IR::IndirOpnd::New(opndBase, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  10289. m_lowererMD.CreateAssign(opndType, opndIndir, instrLdSt);
  10290. }
  10291. IR::LabelInstr *
  10292. Lowerer::GenerateBailOut(IR::Instr * instr, IR::BranchInstr * branchInstr, IR::LabelInstr *bailOutLabel)
  10293. {
  10294. BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
  10295. IR::Instr * bailOutInstr = bailOutInfo->bailOutInstr;
  10296. IR::LabelInstr *collectRuntimeStatsLabel = nullptr;
  10297. if (instr->IsCloned())
  10298. {
  10299. Assert(bailOutInstr != instr);
  10300. // jump to the cloned bail out label
  10301. IR::LabelInstr * bailOutLabelInstr = bailOutInstr->AsLabelInstr();
  10302. IR::BranchInstr * bailOutBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, bailOutLabelInstr, this->m_func);
  10303. instr->InsertBefore(bailOutBranch);
  10304. instr->Remove();
  10305. return bailOutLabel;
  10306. }
  10307. if (bailOutInstr != instr)
  10308. {
  10309. // this bailOutInfo is shared, just jump to the bailout target
  10310. // Add helper label to trigger layout.
  10311. collectRuntimeStatsLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  10312. instr->InsertBefore(collectRuntimeStatsLabel);
  10313. IR::MemRefOpnd *pIndexOpndForBailOutKind =
  10314. IR::MemRefOpnd::New((BYTE*)bailOutInfo->bailOutRecord + BailOutRecord::GetOffsetOfBailOutKind(), TyUint32, this->m_func, IR::AddrOpndKindDynamicBailOutKindRef);
  10315. m_lowererMD.CreateAssign(
  10316. pIndexOpndForBailOutKind, IR::IntConstOpnd::New(instr->GetBailOutKind(), pIndexOpndForBailOutKind->GetType(), this->m_func), instr);
  10317. // No point in doing this for BailOutFailedEquivalentTypeCheck or BailOutFailedEquivalentFixedFieldTypeCheck,
  10318. // because the respective inline cache is already polymorphic, anyway.
  10319. if (instr->GetBailOutKind() == IR::BailOutFailedTypeCheck || instr->GetBailOutKind() == IR::BailOutFailedFixedFieldTypeCheck)
  10320. {
  10321. // We have a type check bailout that shares a bailout record with other instructions.
  10322. // Generate code to write the cache index into the bailout record before we jump to the call site.
  10323. Assert(bailOutInfo->polymorphicCacheIndex != (uint)-1);
  10324. Assert(bailOutInfo->bailOutRecord);
  10325. IR::MemRefOpnd *pIndexOpnd =
  10326. IR::MemRefOpnd::New((BYTE*)bailOutInfo->bailOutRecord + BailOutRecord::GetOffsetOfPolymorphicCacheIndex(), TyUint32, this->m_func);
  10327. m_lowererMD.CreateAssign(
  10328. pIndexOpnd, IR::IntConstOpnd::New(bailOutInfo->polymorphicCacheIndex, TyUint32, this->m_func), instr);
  10329. }
  10330. // GenerateBailOut should have replaced this as a label as we should have already lowered
  10331. // the main bailOutInstr.
  10332. IR::LabelInstr * bailOutTargetLabel = bailOutInstr->AsLabelInstr();
  10333. #if DBG
  10334. if (bailOutTargetLabel->m_noHelperAssert)
  10335. {
  10336. collectRuntimeStatsLabel->m_noHelperAssert = true;
  10337. }
  10338. #endif
  10339. Assert(bailOutLabel == nullptr || bailOutLabel == bailOutTargetLabel);
  10340. IR::BranchInstr * branchInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, bailOutTargetLabel, this->m_func);
  10341. instr->InsertAfter(branchInstr);
  10342. instr->Remove();
  10343. return collectRuntimeStatsLabel ? collectRuntimeStatsLabel : bailOutLabel;
  10344. }
  10345. // The bailout hasn't be generated yet.
  10346. Assert(!bailOutInstr->IsLabelInstr());
  10347. // Add helper label to trigger layout.
  10348. collectRuntimeStatsLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  10349. instr->InsertBefore(collectRuntimeStatsLabel);
  10350. // capture the condition for this bailout
  10351. if (bailOutLabel == nullptr)
  10352. {
  10353. // Create a label and place it in the bailout info so that shared bailout point can jump to this one
  10354. if (instr->m_prev->IsLabelInstr())
  10355. {
  10356. bailOutLabel = instr->m_prev->AsLabelInstr();
  10357. Assert(bailOutLabel->isOpHelper);
  10358. }
  10359. else
  10360. {
  10361. bailOutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  10362. instr->InsertBefore(bailOutLabel);
  10363. }
  10364. }
  10365. else
  10366. {
  10367. instr->InsertBefore(bailOutLabel);
  10368. }
  10369. #if DBG
  10370. if (bailOutInstr->m_opcode == Js::OpCode::BailOnNoSimdTypeSpec || bailOutInstr->m_opcode == Js::OpCode::BailOnNoProfile || bailOutInstr->m_opcode == Js::OpCode::BailOnException || bailOutInstr->m_opcode == Js::OpCode::Yield)
  10371. {
  10372. bailOutLabel->m_noHelperAssert = true;
  10373. }
  10374. #endif
  10375. bailOutInfo->bailOutInstr = bailOutLabel;
  10376. bailOutLabel->m_hasNonBranchRef = true;
  10377. // Create the bail out record
  10378. Assert(bailOutInfo->bailOutRecord == nullptr);
  10379. BailOutRecord * bailOutRecord;
  10380. IR::JnHelperMethod helperMethod;
  10381. if (branchInstr != nullptr)
  10382. {
  10383. Assert(branchInstr->GetSrc2() == nullptr);
  10384. Assert(branchInstr->GetDst() == nullptr);
  10385. IR::LabelInstr * targetLabel = branchInstr->GetTarget();
  10386. Assert(targetLabel->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset);
  10387. uint32 trueOffset;
  10388. uint32 falseOffset;
  10389. IR::Opnd *condOpnd = branchInstr->GetSrc1();
  10390. bool invertTarget = (branchInstr->m_opcode == Js::OpCode::BrFalse_A);
  10391. if (bailOutInfo->isInvertedBranch)
  10392. {
  10393. // Flip the condition
  10394. IR::Instr *subInstr = IR::Instr::New(Js::OpCode::Sub_I4, condOpnd, condOpnd, IR::IntConstOpnd::New(1, TyInt32, instr->m_func), instr->m_func);
  10395. instr->InsertBefore(subInstr);
  10396. this->m_lowererMD.EmitInt4Instr(subInstr);
  10397. // We should really do a DEC/NEG for a full 2's complement flip from 0/1 to 1/0,
  10398. // but DEC is sufficient to flip from 0/1 to -1/0, which is false/true to true/false...
  10399. //instr->InsertBefore(IR::Instr::New(Js::OpCode::Neg_I4, condOpnd, condOpnd, instr->m_func));
  10400. invertTarget = invertTarget ? false : true;
  10401. }
  10402. if (!invertTarget)
  10403. {
  10404. trueOffset = targetLabel->GetByteCodeOffset();
  10405. falseOffset = bailOutInfo->bailOutOffset;
  10406. }
  10407. else
  10408. {
  10409. falseOffset = targetLabel->GetByteCodeOffset();
  10410. trueOffset = bailOutInfo->bailOutOffset;
  10411. }
  10412. bailOutRecord = NativeCodeDataNewZ(this->m_func->GetNativeCodeDataAllocator(),
  10413. BranchBailOutRecord, trueOffset, falseOffset, branchInstr->GetByteCodeReg(), instr->GetBailOutKind(), bailOutInfo->bailOutFunc);
  10414. helperMethod = IR::HelperSaveAllRegistersAndBranchBailOut;
  10415. #ifdef _M_IX86
  10416. if(!AutoSystemInfo::Data.SSE2Available())
  10417. {
  10418. helperMethod = IR::HelperSaveAllRegistersNoSse2AndBranchBailOut;
  10419. }
  10420. #endif
  10421. // Save the condition. The register allocator will generate arguments.
  10422. bailOutInfo->branchConditionOpnd = branchInstr->GetSrc1()->Copy(branchInstr->m_func);
  10423. }
  10424. else
  10425. {
  10426. bailOutRecord = NativeCodeDataNewZ(this->m_func->GetNativeCodeDataAllocator(),
  10427. BailOutRecord, bailOutInfo->bailOutOffset, bailOutInfo->polymorphicCacheIndex, instr->GetBailOutKind(), bailOutInfo->bailOutFunc);
  10428. helperMethod = IR::HelperSaveAllRegistersAndBailOut;
  10429. #ifdef _M_IX86
  10430. if(!AutoSystemInfo::Data.SSE2Available())
  10431. {
  10432. helperMethod = IR::HelperSaveAllRegistersNoSse2AndBailOut;
  10433. }
  10434. #endif
  10435. }
  10436. // Save the bailout record. The register allocator will generate arguments.
  10437. bailOutInfo->bailOutRecord = bailOutRecord;
  10438. #if ENABLE_DEBUG_CONFIG_OPTIONS
  10439. bailOutRecord->bailOutOpcode = bailOutInfo->bailOutOpcode;
  10440. #endif
  10441. // Call the bail out wrapper
  10442. instr->m_opcode = Js::OpCode::Call;
  10443. if(instr->GetDst())
  10444. {
  10445. // To facilitate register allocation, don't assign a destination. The result will anyway go into the return register,
  10446. // but the register allocator does not need to kill that register for the call.
  10447. instr->FreeDst();
  10448. }
  10449. instr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, this->m_func));
  10450. m_lowererMD.LowerCall(instr, 0);
  10451. if (bailOutInstr->GetBailOutKind() != IR::BailOutForGeneratorYield)
  10452. {
  10453. // Defer introducing the JMP to epilog until LowerPrologEpilog phase for Yield bailouts so
  10454. // that Yield does not appear to have flow out of its containing block for the RegAlloc phase.
  10455. // Yield is an unconditional bailout but we want to simulate the flow as if the Yield were
  10456. // just like a call.
  10457. GenerateJumpToEpilogForBailOut(bailOutInfo, instr);
  10458. }
  10459. return collectRuntimeStatsLabel ? collectRuntimeStatsLabel : bailOutLabel;
  10460. }
  10461. void
  10462. Lowerer::GenerateJumpToEpilogForBailOut(BailOutInfo * bailOutInfo, IR::Instr *instr)
  10463. {
  10464. IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->m_prev;
  10465. // JMP to the epilog
  10466. IR::LabelInstr * exitTargetInstr;
  10467. if (exitPrevInstr->IsLabelInstr())
  10468. {
  10469. exitTargetInstr = exitPrevInstr->AsLabelInstr();
  10470. }
  10471. else
  10472. {
  10473. exitTargetInstr = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  10474. exitPrevInstr->InsertAfter(exitTargetInstr);
  10475. }
  10476. exitTargetInstr = m_lowererMD.GetBailOutStackRestoreLabel(bailOutInfo, exitTargetInstr);
  10477. IR::Instr * instrAfter = instr->m_next;
  10478. IR::BranchInstr * exitInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, exitTargetInstr, this->m_func);
  10479. instrAfter->InsertBefore(exitInstr);
  10480. }
  10481. ///----------------------------------------------------------------------------
  10482. ///
  10483. /// Lowerer::GenerateFastCondBranch
  10484. ///
  10485. ///----------------------------------------------------------------------------
  10486. bool
  10487. Lowerer::GenerateFastCondBranch(IR::BranchInstr * instrBranch, bool *pIsHelper)
  10488. {
  10489. // The idea is to do an inline compare if we can prove that both sources
  10490. // are tagged ints
  10491. //
  10492. // Given:
  10493. //
  10494. // Brxx_A $L, src1, src2
  10495. //
  10496. // Generate:
  10497. //
  10498. // (If not Int31's, goto $helper)
  10499. // Jxx $L, src1, src2
  10500. // JMP $fallthru
  10501. // $helper:
  10502. // (caller will generate normal helper call sequence)
  10503. // $fallthru:
  10504. IR::LabelInstr * labelHelper = nullptr;
  10505. IR::LabelInstr * labelFallThru;
  10506. IR::BranchInstr * instr;
  10507. IR::Opnd * opndSrc1;
  10508. IR::Opnd * opndSrc2;
  10509. opndSrc1 = instrBranch->GetSrc1();
  10510. opndSrc2 = instrBranch->GetSrc2();
  10511. AssertMsg(opndSrc1 && opndSrc2, "BrC expects 2 src operands");
  10512. // Not tagged ints?
  10513. if (opndSrc1->IsRegOpnd() && opndSrc1->AsRegOpnd()->IsNotInt())
  10514. {
  10515. return true;
  10516. }
  10517. if (opndSrc2->IsRegOpnd() && opndSrc2->AsRegOpnd()->IsNotInt())
  10518. {
  10519. return true;
  10520. }
  10521. // Tagged ints?
  10522. bool isTaggedInts = false;
  10523. if (opndSrc1->IsTaggedInt())
  10524. {
  10525. if (opndSrc2->IsTaggedInt())
  10526. {
  10527. isTaggedInts = true;
  10528. }
  10529. }
  10530. if (!isTaggedInts)
  10531. {
  10532. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  10533. this->m_lowererMD.GenerateSmIntPairTest(instrBranch, opndSrc1, opndSrc2, labelHelper);
  10534. }
  10535. // Jxx $L, src1, src2
  10536. opndSrc1 = opndSrc1->UseWithNewType(TyInt32, this->m_func);
  10537. opndSrc2 = opndSrc2->UseWithNewType(TyInt32, this->m_func);
  10538. instr = IR::BranchInstr::New(instrBranch->m_opcode, instrBranch->GetTarget(), opndSrc1, opndSrc2, this->m_func);
  10539. instrBranch->InsertBefore(instr);
  10540. this->m_lowererMD.LowerCondBranch(instr);
  10541. if (isTaggedInts)
  10542. {
  10543. instrBranch->Remove();
  10544. // Skip lowering call to helper
  10545. return false;
  10546. }
  10547. // JMP $fallthru
  10548. IR::Instr *instrNext = instrBranch->GetNextRealInstrOrLabel();
  10549. if (instrNext->IsLabelInstr())
  10550. {
  10551. labelFallThru = instrNext->AsLabelInstr();
  10552. }
  10553. else
  10554. {
  10555. labelFallThru = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, /**pIsHelper*/FALSE);
  10556. instrBranch->InsertAfter(labelFallThru);
  10557. }
  10558. instr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelFallThru, this->m_func);
  10559. instrBranch->InsertBefore(instr);
  10560. // $helper:
  10561. // (caller will generate normal helper call sequence)
  10562. // $fallthru:
  10563. AssertMsg(labelHelper, "Should not be NULL");
  10564. instrBranch->InsertBefore(labelHelper);
  10565. *pIsHelper = true;
  10566. return true;
  10567. }
  10568. void
  10569. Lowerer::LowerInlineeStart(IR::Instr * inlineeStartInstr)
  10570. {
  10571. IR::Opnd *linkOpnd = inlineeStartInstr->GetSrc2();
  10572. if (!linkOpnd)
  10573. {
  10574. Assert(inlineeStartInstr->m_func->m_hasInlineArgsOpt);
  10575. return;
  10576. }
  10577. AssertMsg(inlineeStartInstr->m_func->firstActualStackOffset != -1, "This should have been already done in backward pass");
  10578. IR::Instr *startCall;
  10579. // Free the argOut links and lower them to MOVs
  10580. inlineeStartInstr->IterateArgInstrs([&](IR::Instr* argInstr){
  10581. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A || argInstr->m_opcode == Js::OpCode::ArgOut_A_Inline);
  10582. startCall = argInstr->GetSrc2()->GetStackSym()->m_instrDef;
  10583. argInstr->FreeSrc2();
  10584. #pragma prefast(suppress:6235, "Non-Zero Constant in Condition")
  10585. if (!PHASE_ON(Js::EliminateArgoutForInlineePhase, this->m_func) || inlineeStartInstr->m_func->GetJnFunction()->GetHasOrParentHasArguments())
  10586. {
  10587. m_lowererMD.ChangeToAssign(argInstr);
  10588. }
  10589. else
  10590. {
  10591. argInstr->m_opcode = Js::OpCode::ArgOut_A_InlineBuiltIn;
  10592. }
  10593. return false;
  10594. });
  10595. IR::Instr *argInsertInstr = inlineeStartInstr;
  10596. uint i = 0;
  10597. inlineeStartInstr->IterateMetaArgs( [&] (IR::Instr* metaArg)
  10598. {
  10599. if(i == 0)
  10600. {
  10601. LowererMD::CreateAssign(metaArg->m_func->GetNextInlineeFrameArgCountSlotOpnd(),
  10602. IR::AddrOpnd::NewNull(metaArg->m_func),
  10603. argInsertInstr);
  10604. }
  10605. if (i == Js::Constants::InlineeMetaArgIndex_FunctionObject)
  10606. {
  10607. metaArg->SetSrc1(inlineeStartInstr->GetSrc1());
  10608. }
  10609. metaArg->Unlink();
  10610. argInsertInstr->InsertBefore(metaArg);
  10611. IR::Instr* prev = metaArg->m_prev;
  10612. m_lowererMD.ChangeToAssign(metaArg);
  10613. if (i == Js::Constants::InlineeMetaArgIndex_Argc)
  10614. {
  10615. #if defined(_M_IX86) || defined(_M_X64)
  10616. Assert(metaArg == prev->m_next);
  10617. #else //defined(_M_ARM)
  10618. Assert(prev->m_next->m_opcode == Js::OpCode::LDIMM);
  10619. #endif
  10620. metaArg = prev->m_next;
  10621. Assert(metaArg->GetSrc1()->AsAddrOpnd()->m_dontEncode == true);
  10622. metaArg->isInlineeEntryInstr = true;
  10623. LowererMD::Legalize(metaArg);
  10624. }
  10625. argInsertInstr = metaArg;
  10626. i++;
  10627. return false;
  10628. });
  10629. if (inlineeStartInstr->m_func->m_hasInlineArgsOpt)
  10630. {
  10631. inlineeStartInstr->FreeSrc1();
  10632. inlineeStartInstr->FreeSrc2();
  10633. inlineeStartInstr->FreeDst();
  10634. }
  10635. else
  10636. {
  10637. inlineeStartInstr->Remove();
  10638. }
  10639. }
  10640. void
  10641. Lowerer::LowerInlineeEnd(IR::Instr *instr)
  10642. {
  10643. Assert(instr->m_func->IsInlinee());
  10644. Assert(m_func->IsTopFunc());
  10645. // No need to emit code if the function wasn't marked as having implicit calls or bailout. Dead-Store should have removed inline overhead.
  10646. if (instr->m_func->GetHasImplicitCalls() || PHASE_OFF(Js::DeadStorePhase, this->m_func))
  10647. {
  10648. LowererMD::CreateAssign(instr->m_func->GetInlineeArgCountSlotOpnd(),
  10649. IR::AddrOpnd::New(0, IR::AddrOpndKindConstantVar, instr->m_func),
  10650. instr);
  10651. }
  10652. // Keep InlineeEnd around as it is used by register allocator, if we have optimized the arguments stack
  10653. if (instr->m_func->m_hasInlineArgsOpt)
  10654. {
  10655. instr->FreeSrc1();
  10656. }
  10657. else
  10658. {
  10659. instr->Remove();
  10660. }
  10661. }
  10662. IR::Instr *
  10663. Lowerer::LoadFloatFromNonReg(IR::Opnd * opndSrc, IR::Opnd * opndDst, IR::Instr * instrInsert)
  10664. {
  10665. double value;
  10666. if (opndSrc->IsAddrOpnd())
  10667. {
  10668. Js::Var var = opndSrc->AsAddrOpnd()->m_address;
  10669. if (Js::TaggedInt::Is(var))
  10670. {
  10671. value = Js::TaggedInt::ToDouble(var);
  10672. }
  10673. else
  10674. {
  10675. value = Js::JavascriptNumber::GetValue(var);
  10676. }
  10677. }
  10678. else if (opndSrc->IsIntConstOpnd())
  10679. {
  10680. if (opndSrc->IsUInt32())
  10681. {
  10682. value = (double)(uint32)opndSrc->AsIntConstOpnd()->GetValue();
  10683. }
  10684. else
  10685. {
  10686. value = (double)opndSrc->AsIntConstOpnd()->GetValue();
  10687. }
  10688. }
  10689. else if (opndSrc->IsFloatConstOpnd())
  10690. {
  10691. value = (double)opndSrc->AsFloatConstOpnd()->m_value;
  10692. }
  10693. else
  10694. {
  10695. AssertMsg(0, "Unexpected opnd type");
  10696. value = 0;
  10697. }
  10698. return LowererMD::LoadFloatValue(opndDst, value, instrInsert);
  10699. }
  10700. void
  10701. Lowerer::LoadInt32FromUntaggedVar(IR::Instr *const instrLoad)
  10702. {
  10703. Assert(instrLoad);
  10704. Assert(instrLoad->GetDst());
  10705. Assert(instrLoad->GetDst()->IsRegOpnd());
  10706. Assert(instrLoad->GetDst()->IsInt32());
  10707. Assert(instrLoad->GetSrc1());
  10708. Assert(instrLoad->GetSrc1()->IsRegOpnd());
  10709. Assert(instrLoad->GetSrc1()->IsVar());
  10710. Assert(!instrLoad->GetSrc2());
  10711. // push src
  10712. // int32Value = call JavascriptNumber::GetNonzeroInt32Value_NoChecks
  10713. // test int32Value, int32Value
  10714. // jne $done
  10715. // (fall through to 'instrLoad'; caller will generate code here)
  10716. // $done:
  10717. // (rest of program)
  10718. Func *const func = instrLoad->m_func;
  10719. IR::LabelInstr *const doneLabel = instrLoad->GetOrCreateContinueLabel();
  10720. // push src
  10721. // int32Value = call JavascriptNumber::GetNonzeroInt32Value_NoChecks
  10722. StackSym *const int32ValueSym = instrLoad->GetDst()->AsRegOpnd()->m_sym;
  10723. IR::Instr *const instr =
  10724. IR::Instr::New(
  10725. Js::OpCode::Call,
  10726. IR::RegOpnd::New(int32ValueSym, TyInt32, func),
  10727. instrLoad->GetSrc1()->AsRegOpnd(),
  10728. func);
  10729. instrLoad->InsertBefore(instr);
  10730. LowerUnaryHelper(instr, IR::HelperGetNonzeroInt32Value_NoTaggedIntCheck);
  10731. // test int32Value, int32Value
  10732. // jne $done
  10733. InsertCompareBranch(
  10734. IR::RegOpnd::New(int32ValueSym, TyInt32, func),
  10735. IR::IntConstOpnd::New(0, TyInt32, func, true),
  10736. Js::OpCode::BrNeq_A,
  10737. doneLabel,
  10738. instrLoad);
  10739. }
  10740. bool
  10741. Lowerer::GetValueFromIndirOpnd(IR::IndirOpnd *indirOpnd, IR::Opnd **pValueOpnd, IntConstType *pValue)
  10742. {
  10743. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  10744. IR::Opnd* valueOpnd = nullptr;
  10745. IntConstType value = 0;
  10746. if (!indexOpnd)
  10747. {
  10748. value = (IntConstType)indirOpnd->GetOffset();
  10749. if (value < 0)
  10750. {
  10751. // Can't do fast path for negative index
  10752. return false;
  10753. }
  10754. valueOpnd = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
  10755. }
  10756. else if (indexOpnd->m_sym->IsIntConst())
  10757. {
  10758. value = indexOpnd->AsRegOpnd()->m_sym->GetIntConstValue();
  10759. if (value < 0)
  10760. {
  10761. // Can't do fast path for negative index
  10762. return false;
  10763. }
  10764. valueOpnd = IR::IntConstOpnd::New(value, TyInt32, this->m_func);
  10765. }
  10766. *pValueOpnd = valueOpnd;
  10767. *pValue = value;
  10768. return true;
  10769. }
  10770. void
  10771. Lowerer::GenerateFastBrOnObject(IR::Instr *instr)
  10772. {
  10773. Assert(instr->m_opcode == Js::OpCode::BrOnObject_A);
  10774. IR::RegOpnd *object = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  10775. IR::LabelInstr *done = instr->GetOrCreateContinueLabel();
  10776. IR::LabelInstr *target = instr->AsBranchInstr()->GetTarget();
  10777. IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  10778. IR::IntConstOpnd *typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, instr->m_func);
  10779. if (!object)
  10780. {
  10781. object = IR::RegOpnd::New(TyVar, m_func);
  10782. LowererMD::CreateAssign(object, instr->GetSrc1(), instr);
  10783. }
  10784. // TEST object, 1
  10785. // JNE $done
  10786. // MOV typeRegOpnd, [object + offset(Type)]
  10787. // CMP [typeRegOpnd + offset(TypeId)], TypeIds_LastJavascriptPrimitiveType
  10788. // JGT $target
  10789. // $done:
  10790. m_lowererMD.GenerateObjectTest(object, instr, done);
  10791. InsertMove(typeRegOpnd,
  10792. IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  10793. instr);
  10794. InsertCompareBranch(
  10795. IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
  10796. typeIdOpnd, Js::OpCode::BrGt_A, target, instr);
  10797. instr->Remove();
  10798. }
  10799. void Lowerer::GenerateObjectHeaderInliningTest(IR::RegOpnd *baseOpnd, IR::LabelInstr * target,IR::Instr *insertBeforeInstr)
  10800. {
  10801. Assert(baseOpnd);
  10802. Assert(target);
  10803. AssertMsg(
  10804. baseOpnd->GetValueType().IsLikelyObject() &&
  10805. baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray,
  10806. "Why are we here, when the object is already known not to have an ObjArray");
  10807. Assert(insertBeforeInstr);
  10808. Func *const func = insertBeforeInstr->m_func;
  10809. // mov type, [base + offsetOf(type)]
  10810. IR::RegOpnd *const opnd = IR::RegOpnd::New(TyMachPtr, func);
  10811. m_lowererMD.CreateAssign(
  10812. opnd,
  10813. IR::IndirOpnd::New(
  10814. baseOpnd,
  10815. Js::DynamicObject::GetOffsetOfType(),
  10816. opnd->GetType(),
  10817. func),
  10818. insertBeforeInstr);
  10819. // mov typeHandler, [type + offsetOf(typeHandler)]
  10820. m_lowererMD.CreateAssign(
  10821. opnd,
  10822. IR::IndirOpnd::New(
  10823. opnd,
  10824. Js::DynamicType::GetOffsetOfTypeHandler(),
  10825. opnd->GetType(),
  10826. func),
  10827. insertBeforeInstr);
  10828. IR::IndirOpnd * offsetOfInlineSlotOpnd = IR::IndirOpnd::New(opnd,Js::DynamicTypeHandler::GetOffsetOfOffsetOfInlineSlots(), TyInt16, func);
  10829. IR::IntConstOpnd * objHeaderInlinedSlotOffset = IR::IntConstOpnd::New(Js::DynamicTypeHandler::GetOffsetOfObjectHeaderInlineSlots(), TyInt16, func);
  10830. // CMP [typeHandler + offsetOf(offsetOfInlineSlots)], objHeaderInlinedSlotOffset
  10831. InsertCompareBranch(
  10832. offsetOfInlineSlotOpnd,
  10833. objHeaderInlinedSlotOffset,
  10834. Js::OpCode::BrEq_A,
  10835. target,
  10836. insertBeforeInstr);
  10837. }
  10838. void Lowerer::GenerateObjectTypeTest(IR::RegOpnd *srcReg, IR::Instr *instrInsert, IR::LabelInstr *labelHelper)
  10839. {
  10840. Assert(srcReg);
  10841. if (!srcReg->IsNotTaggedValue())
  10842. {
  10843. m_lowererMD.GenerateObjectTest(srcReg, instrInsert, labelHelper);
  10844. }
  10845. // CMP [srcReg], Js::DynamicObject::`vtable'
  10846. // JNE $helper
  10847. InsertCompareBranch(
  10848. IR::IndirOpnd::New(srcReg, 0, TyMachPtr, m_func),
  10849. LoadVTableValueOpnd(instrInsert, VTableValue::VtableDynamicObject),
  10850. Js::OpCode::BrNeq_A,
  10851. labelHelper,
  10852. instrInsert);
  10853. }
  10854. const VTableValue Lowerer::VtableAddresses[static_cast<ValueType::TSize>(ObjectType::Count)] =
  10855. {
  10856. /* ObjectType::UninitializedObject */ VTableValue::VtableInvalid,
  10857. /* ObjectType::Object */ VTableValue::VtableInvalid,
  10858. /* ObjectType::RegExp */ VTableValue::VtableInvalid,
  10859. /* ObjectType::ObjectWithArray */ VTableValue::VtableJavascriptArray,
  10860. /* ObjectType::Array */ VTableValue::VtableJavascriptArray,
  10861. /* ObjectType::Int8Array */ VTableValue::VtableInt8Array,
  10862. /* ObjectType::Uint8Array */ VTableValue::VtableUint8Array,
  10863. /* ObjectType::Uint8ClampedArray */ VTableValue::VtableUint8ClampedArray,
  10864. /* ObjectType::Int16Array */ VTableValue::VtableInt16Array,
  10865. /* ObjectType::Uint16Array */ VTableValue::VtableUint16Array,
  10866. /* ObjectType::Int32Array */ VTableValue::VtableInt32Array,
  10867. /* ObjectType::Uint32Array */ VTableValue::VtableUint32Array,
  10868. /* ObjectType::Float32Array */ VTableValue::VtableFloat32Array,
  10869. /* ObjectType::Float64Array */ VTableValue::VtableFloat64Array,
  10870. /* ObjectType::Int8VirtualArray */ VTableValue::VtableInt8VirtualArray,
  10871. /* ObjectType::Uint8VirtualArray */ VTableValue::VtableUint8VirtualArray,
  10872. /* ObjectType::Uint8ClampedVirtualArray */ VTableValue::VtableUint8ClampedVirtualArray,
  10873. /* ObjectType::Int16VirtualArray */ VTableValue::VtableInt16VirtualArray,
  10874. /* ObjectType::Uint16VirtualArray */ VTableValue::VtableUint16VirtualArray,
  10875. /* ObjectType::Int32VirtualArray */ VTableValue::VtableInt32VirtualArray,
  10876. /* ObjectType::Uint32VirtualArray */ VTableValue::VtableUint32VirtualArray,
  10877. /* ObjectType::Float32VirtualArray */ VTableValue::VtableFloat32VirtualArray,
  10878. /* ObjectType::Float64VirtualArray */ VTableValue::VtableFloat64VirtualArray,
  10879. /* ObjectType::Int8MixedArray */ VTableValue::VtableInt8Array,
  10880. /* ObjectType::Uint8MixedArray */ VTableValue::VtableUint8Array,
  10881. /* ObjectType::Uint8ClampedMixedArray */ VTableValue::VtableUint8ClampedArray,
  10882. /* ObjectType::Int16MixedArray */ VTableValue::VtableInt16Array,
  10883. /* ObjectType::Uint16MixedArray */ VTableValue::VtableUint16Array,
  10884. /* ObjectType::Int32MixedArray */ VTableValue::VtableInt32Array,
  10885. /* ObjectType::Uint32MixedArray */ VTableValue::VtableUint32Array,
  10886. /* ObjectType::Float32MixedArray */ VTableValue::VtableFloat32Array,
  10887. /* ObjectType::Float64MixedArray */ VTableValue::VtableFloat64Array,
  10888. /* ObjectType::Int64Array */ VTableValue::VtableInt64Array,
  10889. /* ObjectType::Uint64Array */ VTableValue::VtableUint64Array,
  10890. /* ObjectType::BoolArray */ VTableValue::VtableBoolArray,
  10891. /* ObjectType::CharArray */ VTableValue::VtableCharArray
  10892. };
  10893. const uint32 Lowerer::OffsetsOfHeadSegment[static_cast<ValueType::TSize>(ObjectType::Count)] =
  10894. {
  10895. /* ObjectType::UninitializedObject */ static_cast<uint32>(-1),
  10896. /* ObjectType::Object */ static_cast<uint32>(-1),
  10897. /* ObjectType::RegExp */ static_cast<uint32>(-1),
  10898. /* ObjectType::ObjectWithArray */ Js::JavascriptArray::GetOffsetOfHead(),
  10899. /* ObjectType::Array */ Js::JavascriptArray::GetOffsetOfHead(),
  10900. /* ObjectType::Int8Array */ Js::Int8Array::GetOffsetOfBuffer(),
  10901. /* ObjectType::Uint8Array */ Js::Uint8Array::GetOffsetOfBuffer(),
  10902. /* ObjectType::Uint8ClampedArray */ Js::Uint8ClampedArray::GetOffsetOfBuffer(),
  10903. /* ObjectType::Int16Array */ Js::Int16Array::GetOffsetOfBuffer(),
  10904. /* ObjectType::Uint16Array */ Js::Uint16Array::GetOffsetOfBuffer(),
  10905. /* ObjectType::Int32Array */ Js::Int32Array::GetOffsetOfBuffer(),
  10906. /* ObjectType::Uint32Array */ Js::Uint32Array::GetOffsetOfBuffer(),
  10907. /* ObjectType::Float32Array */ Js::Float32Array::GetOffsetOfBuffer(),
  10908. /* ObjectType::Float64Array */ Js::Float64Array::GetOffsetOfBuffer(),
  10909. /* ObjectType::Int8VirtualArray */ Js::Int8VirtualArray::GetOffsetOfBuffer(),
  10910. /* ObjectType::Uint8VirtualArray */ Js::Uint8VirtualArray::GetOffsetOfBuffer(),
  10911. /* ObjectType::Uint8ClampedVirtualArray */ Js::Uint8ClampedVirtualArray::GetOffsetOfBuffer(),
  10912. /* ObjectType::Int16VirtualArray */ Js::Int16VirtualArray::GetOffsetOfBuffer(),
  10913. /* ObjectType::Uint16VirtualArray */ Js::Uint16VirtualArray::GetOffsetOfBuffer(),
  10914. /* ObjectType::Int32VirtualArray */ Js::Int32VirtualArray::GetOffsetOfBuffer(),
  10915. /* ObjectType::Uint32VirtualArray */ Js::Uint32VirtualArray::GetOffsetOfBuffer(),
  10916. /* ObjectType::Float32VirtualArray */ Js::Float32VirtualArray::GetOffsetOfBuffer(),
  10917. /* ObjectType::Float64VirtualArray */ Js::Float64VirtualArray::GetOffsetOfBuffer(),
  10918. /* ObjectType::Int8MixedArray */ Js::Int8Array::GetOffsetOfBuffer(),
  10919. /* ObjectType::Uint8MixedArray */ Js::Uint8Array::GetOffsetOfBuffer(),
  10920. /* ObjectType::Uint8ClampedMixedArray */ Js::Uint8ClampedArray::GetOffsetOfBuffer(),
  10921. /* ObjectType::Int16MixedArray */ Js::Int16Array::GetOffsetOfBuffer(),
  10922. /* ObjectType::Uint16MixedArray */ Js::Uint16Array::GetOffsetOfBuffer(),
  10923. /* ObjectType::Int32MixedArray */ Js::Int32Array::GetOffsetOfBuffer(),
  10924. /* ObjectType::Uint32MixedArray */ Js::Uint32Array::GetOffsetOfBuffer(),
  10925. /* ObjectType::Float32MixedArray */ Js::Float32Array::GetOffsetOfBuffer(),
  10926. /* ObjectType::Float64MixedArray */ Js::Float64Array::GetOffsetOfBuffer(),
  10927. /* ObjectType::Int64Array */ Js::Int64Array::GetOffsetOfBuffer(),
  10928. /* ObjectType::Uint64Array */ Js::Uint64Array::GetOffsetOfBuffer(),
  10929. /* ObjectType::BoolArray */ Js::BoolArray::GetOffsetOfBuffer(),
  10930. /* ObjectType::CharArray */ Js::CharArray::GetOffsetOfBuffer()
  10931. };
  10932. const uint32 Lowerer::OffsetsOfLength[static_cast<ValueType::TSize>(ObjectType::Count)] =
  10933. {
  10934. /* ObjectType::UninitializedObject */ static_cast<uint32>(-1),
  10935. /* ObjectType::Object */ static_cast<uint32>(-1),
  10936. /* ObjectType::RegExp */ static_cast<uint32>(-1),
  10937. /* ObjectType::ObjectWithArray */ Js::JavascriptArray::GetOffsetOfLength(),
  10938. /* ObjectType::Array */ Js::JavascriptArray::GetOffsetOfLength(),
  10939. /* ObjectType::Int8Array */ Js::Int8Array::GetOffsetOfLength(),
  10940. /* ObjectType::Uint8Array */ Js::Uint8Array::GetOffsetOfLength(),
  10941. /* ObjectType::Uint8ClampedArray */ Js::Uint8ClampedArray::GetOffsetOfLength(),
  10942. /* ObjectType::Int16Array */ Js::Int16Array::GetOffsetOfLength(),
  10943. /* ObjectType::Uint16Array */ Js::Uint16Array::GetOffsetOfLength(),
  10944. /* ObjectType::Int32Array */ Js::Int32Array::GetOffsetOfLength(),
  10945. /* ObjectType::Uint32Array */ Js::Uint32Array::GetOffsetOfLength(),
  10946. /* ObjectType::Float32Array */ Js::Float32Array::GetOffsetOfLength(),
  10947. /* ObjectType::Float64Array */ Js::Float64Array::GetOffsetOfLength(),
  10948. /* ObjectType::Int8VirtualArray */ Js::Int8VirtualArray::GetOffsetOfLength(),
  10949. /* ObjectType::Uint8VirtualArray */ Js::Uint8VirtualArray::GetOffsetOfLength(),
  10950. /* ObjectType::Uint8ClampedVirtualArray */ Js::Uint8ClampedVirtualArray::GetOffsetOfLength(),
  10951. /* ObjectType::Int16VirtualArray */ Js::Int16VirtualArray::GetOffsetOfLength(),
  10952. /* ObjectType::Uint16VirtualArray */ Js::Uint16VirtualArray::GetOffsetOfLength(),
  10953. /* ObjectType::Int32VirtualArray */ Js::Int32VirtualArray::GetOffsetOfLength(),
  10954. /* ObjectType::Uint32VirtualArray */ Js::Uint32VirtualArray::GetOffsetOfLength(),
  10955. /* ObjectType::Float32VirtualArray */ Js::Float32VirtualArray::GetOffsetOfLength(),
  10956. /* ObjectType::Float64VirtualArray */ Js::Float64VirtualArray::GetOffsetOfLength(),
  10957. /* ObjectType::Int8MixedArray */ Js::Int8Array::GetOffsetOfLength(),
  10958. /* ObjectType::Uint8MixedArray */ Js::Uint8Array::GetOffsetOfLength(),
  10959. /* ObjectType::Uint8ClampedMixedArray */ Js::Uint8ClampedArray::GetOffsetOfLength(),
  10960. /* ObjectType::Int16MixedArray */ Js::Int16Array::GetOffsetOfLength(),
  10961. /* ObjectType::Uint16MixedArray */ Js::Uint16Array::GetOffsetOfLength(),
  10962. /* ObjectType::Int32MixedArray */ Js::Int32Array::GetOffsetOfLength(),
  10963. /* ObjectType::Uint32MixedArray */ Js::Uint32Array::GetOffsetOfLength(),
  10964. /* ObjectType::Float32MixedArray */ Js::Float32Array::GetOffsetOfLength(),
  10965. /* ObjectType::Float64MixedArray */ Js::Float64Array::GetOffsetOfLength(),
  10966. /* ObjectType::Int64Array */ Js::Int64Array::GetOffsetOfLength(),
  10967. /* ObjectType::Uint64Array */ Js::Uint64Array::GetOffsetOfLength(),
  10968. /* ObjectType::BoolArray */ Js::BoolArray::GetOffsetOfLength(),
  10969. /* ObjectType::CharArray */ Js::CharArray::GetOffsetOfLength()
  10970. };
  10971. const IRType Lowerer::IndirTypes[static_cast<ValueType::TSize>(ObjectType::Count)] =
  10972. {
  10973. /* ObjectType::UninitializedObject */ TyIllegal,
  10974. /* ObjectType::Object */ TyIllegal,
  10975. /* ObjectType::RegExp */ TyIllegal,
  10976. /* ObjectType::ObjectWithArray */ TyVar,
  10977. /* ObjectType::Array */ TyVar,
  10978. /* ObjectType::Int8Array */ TyInt8,
  10979. /* ObjectType::Uint8Array */ TyUint8,
  10980. /* ObjectType::Uint8ClampedArray */ TyUint8,
  10981. /* ObjectType::Int16Array */ TyInt16,
  10982. /* ObjectType::Uint16Array */ TyUint16,
  10983. /* ObjectType::Int32Array */ TyInt32,
  10984. /* ObjectType::Uint32Array */ TyUint32,
  10985. /* ObjectType::Float32Array */ TyFloat32,
  10986. /* ObjectType::Float64Array */ TyFloat64,
  10987. /* ObjectType::Int8VirtualArray */ TyInt8,
  10988. /* ObjectType::Uint8VirtualArray */ TyUint8,
  10989. /* ObjectType::Uint8ClampedVirtualArray */ TyUint8,
  10990. /* ObjectType::Int16VirtualArray */ TyInt16,
  10991. /* ObjectType::Uint16vArray */ TyUint16,
  10992. /* ObjectType::Int32VirtualArray */ TyInt32,
  10993. /* ObjectType::Uint32VirtualArray */ TyUint32,
  10994. /* ObjectType::Float32VirtualArray */ TyFloat32,
  10995. /* ObjectType::Float64VirtualArray */ TyFloat64,
  10996. /* ObjectType::Int8MixedArray */ TyInt8,
  10997. /* ObjectType::Uint8MixedArray */ TyUint8,
  10998. /* ObjectType::Uint8ClampedMixedArray */ TyUint8,
  10999. /* ObjectType::Int16MixedArray */ TyInt16,
  11000. /* ObjectType::Uint16MixedArray */ TyUint16,
  11001. /* ObjectType::Int32MixedArray */ TyInt32,
  11002. /* ObjectType::Uint32MixedArray */ TyUint32,
  11003. /* ObjectType::Float32MixedArray */ TyFloat32,
  11004. /* ObjectType::Float64MixedArray */ TyFloat64,
  11005. /* ObjectType::Int64Array */ TyInt64,
  11006. /* ObjectType::Uint64Array */ TyUint64,
  11007. /* ObjectType::BoolArray */ TyUint8,
  11008. /* ObjectType::CharArray */ TyUint16
  11009. };
  11010. const BYTE Lowerer::IndirScales[static_cast<ValueType::TSize>(ObjectType::Count)] =
  11011. {
  11012. /* ObjectType::UninitializedObject */ static_cast<BYTE>(-1),
  11013. /* ObjectType::Object */ static_cast<BYTE>(-1),
  11014. /* ObjectType::RegExp */ static_cast<BYTE>(-1),
  11015. /* ObjectType::ObjectWithArray */ LowererMD::GetDefaultIndirScale(),
  11016. /* ObjectType::Array */ LowererMD::GetDefaultIndirScale(),
  11017. /* ObjectType::Int8Array */ 0, // log2(sizeof(int8))
  11018. /* ObjectType::Uint8Array */ 0, // log2(sizeof(uint8))
  11019. /* ObjectType::Uint8ClampedArray */ 0, // log2(sizeof(uint8))
  11020. /* ObjectType::Int16Array */ 1, // log2(sizeof(int16))
  11021. /* ObjectType::Uint16Array */ 1, // log2(sizeof(uint16))
  11022. /* ObjectType::Int32Array */ 2, // log2(sizeof(int32))
  11023. /* ObjectType::Uint32Array */ 2, // log2(sizeof(uint32))
  11024. /* ObjectType::Float32Array */ 2, // log2(sizeof(float))
  11025. /* ObjectType::Float64Array */ 3, // log2(sizeof(double))
  11026. /* ObjectType::Int8VirtualArray */ 0, // log2(sizeof(int8))
  11027. /* ObjectType::Uint8VirtualArray */ 0, // log2(sizeof(uint8))
  11028. /* ObjectType::Uint8ClampedVirtualArray */ 0, // log2(sizeof(uint8))
  11029. /* ObjectType::Int16VirtualArray */ 1, // log2(sizeof(int16))
  11030. /* ObjectType::Uint16VirtualArray */ 1, // log2(sizeof(uint16))
  11031. /* ObjectType::Int32VirtualArray */ 2, // log2(sizeof(int32))
  11032. /* ObjectType::Uint32VirtualArray */ 2, // log2(sizeof(uint32))
  11033. /* ObjectType::Float32VirtualArray */ 2, // log2(sizeof(float))
  11034. /* ObjectType::Float64VirtualArray */ 3, // log2(sizeof(double))
  11035. /* ObjectType::Int8MixedArray */ 0, // log2(sizeof(int8))
  11036. /* ObjectType::Uint8MixedArray */ 0, // log2(sizeof(uint8))
  11037. /* ObjectType::Uint8ClampedMixedArray */ 0, // log2(sizeof(uint8))
  11038. /* ObjectType::Int16MixedArray */ 1, // log2(sizeof(int16))
  11039. /* ObjectType::Uint16MixedArray */ 1, // log2(sizeof(uint16))
  11040. /* ObjectType::Int32MixedArray */ 2, // log2(sizeof(int32))
  11041. /* ObjectType::Uint32MixedArray */ 2, // log2(sizeof(uint32))
  11042. /* ObjectType::Float32MixedArray */ 2, // log2(sizeof(float))
  11043. /* ObjectType::Float64MixedArray */ 3, // log2(sizeof(double))
  11044. /* ObjectType::Int64Array */ 3, // log2(sizeof(int64))
  11045. /* ObjectType::Uint64Array */ 3, // log2(sizeof(uint64))
  11046. /* ObjectType::BoolArray */ 0, // log2(sizeof(bool))
  11047. /* ObjectType::CharArray */ 1 // log2(sizeof(wchar_t))
  11048. };
  11049. VTableValue Lowerer::GetArrayVtableAddress(const ValueType valueType, bool getVirtual)
  11050. {
  11051. Assert(valueType.IsLikelyAnyOptimizedArray());
  11052. if(valueType.IsLikelyArrayOrObjectWithArray())
  11053. {
  11054. if(valueType.HasIntElements())
  11055. {
  11056. return VTableValue::VtableNativeIntArray;
  11057. }
  11058. else if(valueType.HasFloatElements())
  11059. {
  11060. return VTableValue::VtableNativeFloatArray;
  11061. }
  11062. }
  11063. if (getVirtual && valueType.IsLikelyMixedTypedArrayType())
  11064. {
  11065. return VtableAddresses[static_cast<ValueType::TSize>(valueType.GetMixedToVirtualTypedArrayObjectType())];
  11066. }
  11067. return VtableAddresses[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  11068. }
  11069. uint32 Lowerer::GetArrayOffsetOfHeadSegment(const ValueType valueType)
  11070. {
  11071. Assert(valueType.IsLikelyAnyOptimizedArray());
  11072. return OffsetsOfHeadSegment[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  11073. }
  11074. uint32 Lowerer::GetArrayOffsetOfLength(const ValueType valueType)
  11075. {
  11076. Assert(valueType.IsLikelyAnyOptimizedArray());
  11077. return OffsetsOfLength[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  11078. }
  11079. IRType Lowerer::GetArrayIndirType(const ValueType valueType)
  11080. {
  11081. Assert(valueType.IsLikelyAnyOptimizedArray());
  11082. if(valueType.IsLikelyArrayOrObjectWithArray())
  11083. {
  11084. if(valueType.HasIntElements())
  11085. {
  11086. return TyInt32;
  11087. }
  11088. else if(valueType.HasFloatElements())
  11089. {
  11090. return TyFloat64;
  11091. }
  11092. }
  11093. return IndirTypes[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  11094. }
  11095. BYTE Lowerer::GetArrayIndirScale(const ValueType valueType)
  11096. {
  11097. Assert(valueType.IsLikelyAnyOptimizedArray());
  11098. if(valueType.IsLikelyArrayOrObjectWithArray())
  11099. {
  11100. if(valueType.HasIntElements())
  11101. {
  11102. return 2; // log2(sizeof(int32))
  11103. }
  11104. else if(valueType.HasFloatElements())
  11105. {
  11106. return 3; // log2(sizeof(double))
  11107. }
  11108. }
  11109. return IndirScales[static_cast<ValueType::TSize>(valueType.GetObjectType())];
  11110. }
  11111. int Lowerer::SimdGetElementCountFromBytes(ValueType arrValueType, uint8 dataWidth)
  11112. {
  11113. Assert(dataWidth == 4 || dataWidth == 8 || dataWidth == 12 || dataWidth == 16);
  11114. Assert(arrValueType.IsTypedArray());
  11115. BYTE bpe = 1 << Lowerer::GetArrayIndirScale(arrValueType);
  11116. // round up
  11117. return (int)::ceil(((float)dataWidth) / bpe);
  11118. }
  11119. bool Lowerer::ShouldGenerateArrayFastPath(
  11120. const IR::Opnd *const arrayOpnd,
  11121. const bool supportsObjectsWithArrays,
  11122. const bool supportsTypedArrays,
  11123. const bool requiresSse2ForFloatArrays) const
  11124. {
  11125. Assert(arrayOpnd);
  11126. const ValueType arrayValueType(arrayOpnd->GetValueType());
  11127. if(arrayValueType.IsUninitialized())
  11128. {
  11129. // Don't have info about the value type, better to generate the fast path anyway
  11130. return true;
  11131. }
  11132. if (!arrayValueType.IsLikelyObject())
  11133. {
  11134. if (!arrayValueType.HasBeenObject() || arrayValueType.IsLikelyString())
  11135. {
  11136. return false;
  11137. }
  11138. //We have seen at least once there is an object in the code path. Generate fastpath hoping it to be array.
  11139. //Its nice if we can get all the attributes set but valueType is only 16 bits. Consider expanding the same.
  11140. return true;
  11141. }
  11142. if( !supportsObjectsWithArrays && arrayValueType.GetObjectType() == ObjectType::ObjectWithArray ||
  11143. !supportsTypedArrays && arrayValueType.IsLikelyTypedArray())
  11144. {
  11145. // The fast path likely would not hit
  11146. return false;
  11147. }
  11148. if(arrayValueType.GetObjectType() == ObjectType::UninitializedObject)
  11149. {
  11150. // Don't have info about the object type, better to generate the fast path anyway
  11151. return true;
  11152. }
  11153. #ifdef _M_IX86
  11154. if(requiresSse2ForFloatArrays &&
  11155. (
  11156. arrayValueType.GetObjectType() == ObjectType::Float32Array ||
  11157. arrayValueType.GetObjectType() == ObjectType::Float64Array
  11158. ) &&
  11159. !AutoSystemInfo::Data.SSE2Available())
  11160. {
  11161. // Fast paths for float arrays rely on SSE2
  11162. return false;
  11163. }
  11164. #endif
  11165. return !arrayValueType.IsLikelyAnyUnOptimizedArray();
  11166. }
  11167. IR::RegOpnd *Lowerer::LoadObjectArray(IR::RegOpnd *const baseOpnd, IR::Instr *const insertBeforeInstr)
  11168. {
  11169. Assert(baseOpnd);
  11170. Assert(
  11171. baseOpnd->GetValueType().IsLikelyObject() &&
  11172. baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray);
  11173. Assert(insertBeforeInstr);
  11174. Func *const func = insertBeforeInstr->m_func;
  11175. // mov array, [base + offsetOf(objectArrayOrFlags)]
  11176. IR::RegOpnd *const arrayOpnd =
  11177. baseOpnd->IsArrayRegOpnd() ? baseOpnd->AsArrayRegOpnd()->CopyAsRegOpnd(func) : baseOpnd->Copy(func)->AsRegOpnd();
  11178. arrayOpnd->m_sym = StackSym::New(TyVar, func);
  11179. arrayOpnd->SetValueType(arrayOpnd->GetValueType().ToArray());
  11180. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, func, false /* autoDelete */);
  11181. m_lowererMD.CreateAssign(
  11182. arrayOpnd,
  11183. IR::IndirOpnd::New(
  11184. baseOpnd,
  11185. Js::DynamicObject::GetOffsetOfObjectArray(),
  11186. arrayOpnd->GetType(),
  11187. func),
  11188. insertBeforeInstr);
  11189. return arrayOpnd;
  11190. }
  11191. void
  11192. Lowerer::GenerateIsEnabledArraySetElementFastPathCheck(
  11193. IR::LabelInstr * isDisabledLabel,
  11194. IR::Instr * const insertBeforeInstr)
  11195. {
  11196. InsertCompareBranch(
  11197. this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable),
  11198. LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableInvalid),
  11199. Js::OpCode::BrEq_A,
  11200. isDisabledLabel,
  11201. insertBeforeInstr);
  11202. }
  11203. IR::RegOpnd *Lowerer::GenerateArrayTest(
  11204. IR::RegOpnd *const baseOpnd,
  11205. IR::LabelInstr *const isNotObjectLabel,
  11206. IR::LabelInstr *const isNotArrayLabel,
  11207. IR::Instr *const insertBeforeInstr,
  11208. const bool forceFloat,
  11209. const bool isStore,
  11210. const bool allowDefiniteArray)
  11211. {
  11212. Assert(baseOpnd);
  11213. const ValueType baseValueType(baseOpnd->GetValueType());
  11214. // Shouldn't request to do an array test when it's already known to be an array, or if it's unlikely to be an array
  11215. Assert(!baseValueType.IsAnyOptimizedArray() || allowDefiniteArray || baseValueType.IsNativeArray());
  11216. Assert(baseValueType.IsUninitialized() || baseValueType.HasBeenObject());
  11217. Assert(isNotObjectLabel);
  11218. Assert(isNotArrayLabel);
  11219. Assert(insertBeforeInstr);
  11220. Func *const func = insertBeforeInstr->m_func;
  11221. IR::RegOpnd *arrayOpnd;
  11222. IR::AutoReuseOpnd autoReuseArrayOpnd;
  11223. if(baseValueType.IsLikelyObject() && baseValueType.GetObjectType() == ObjectType::ObjectWithArray)
  11224. {
  11225. // Only DynamicObject is allowed (DynamicObject vtable is ensured) because some object types have special handling for
  11226. // index properties - arguments object, string object, external object, etc.
  11227. GenerateObjectTypeTest(baseOpnd, insertBeforeInstr, isNotObjectLabel);
  11228. GenerateObjectHeaderInliningTest(baseOpnd, isNotArrayLabel, insertBeforeInstr);
  11229. arrayOpnd = LoadObjectArray(baseOpnd, insertBeforeInstr);
  11230. autoReuseArrayOpnd.Initialize(arrayOpnd, func, false /* autoDelete */);
  11231. // test array, array
  11232. // je $isNotArrayLabel
  11233. // test array, 1
  11234. // jne $isNotArrayLabel
  11235. InsertTestBranch(
  11236. arrayOpnd,
  11237. arrayOpnd,
  11238. Js::OpCode::BrEq_A,
  11239. isNotArrayLabel,
  11240. insertBeforeInstr);
  11241. InsertTestBranch(
  11242. arrayOpnd,
  11243. IR::IntConstOpnd::New(1, TyUint8, func, true),
  11244. Js::OpCode::BrNeq_A,
  11245. isNotArrayLabel,
  11246. insertBeforeInstr);
  11247. }
  11248. else
  11249. {
  11250. if(!baseOpnd->IsNotTaggedValue())
  11251. {
  11252. m_lowererMD.GenerateObjectTest(baseOpnd, insertBeforeInstr, isNotObjectLabel);
  11253. }
  11254. arrayOpnd = baseOpnd->Copy(func)->AsRegOpnd();
  11255. if(!baseValueType.IsLikelyAnyOptimizedArray())
  11256. {
  11257. arrayOpnd->SetValueType(
  11258. ValueType::GetObject(ObjectType::Array)
  11259. .ToLikely()
  11260. .SetHasNoMissingValues(false)
  11261. .SetArrayTypeId(Js::TypeIds_Array));
  11262. }
  11263. autoReuseArrayOpnd.Initialize(arrayOpnd, func, false /* autoDelete */);
  11264. }
  11265. VTableValue vtableAddress = baseValueType.IsLikelyAnyOptimizedArray()
  11266. ? GetArrayVtableAddress(baseValueType)
  11267. : VTableValue::VtableJavascriptArray;
  11268. VTableValue virtualVtableAddress = VTableValue::VtableInvalid;
  11269. if (baseValueType.IsLikelyMixedTypedArrayType())
  11270. {
  11271. virtualVtableAddress = GetArrayVtableAddress(baseValueType, true);
  11272. }
  11273. IR::Opnd * vtableOpnd;
  11274. IR::Opnd * vtableVirtualOpnd = nullptr;
  11275. if (isStore &&
  11276. (vtableAddress == VTableValue::VtableJavascriptArray ||
  11277. baseValueType.IsLikelyNativeArray()))
  11278. {
  11279. vtableOpnd = IR::RegOpnd::New(TyMachPtr, func);
  11280. if (baseValueType.IsLikelyNativeArray())
  11281. {
  11282. if (baseValueType.HasIntElements())
  11283. {
  11284. InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesIntArraySetElementFastPathVtable), insertBeforeInstr);
  11285. }
  11286. else
  11287. {
  11288. Assert(baseValueType.HasFloatElements());
  11289. InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesFloatArraySetElementFastPathVtable), insertBeforeInstr);
  11290. }
  11291. }
  11292. else
  11293. {
  11294. InsertMove(vtableOpnd, this->LoadOptimizationOverridesValueOpnd(insertBeforeInstr, OptimizationOverridesValue::OptimizationOverridesArraySetElementFastPathVtable), insertBeforeInstr);
  11295. }
  11296. }
  11297. else
  11298. {
  11299. vtableOpnd = LoadVTableValueOpnd(insertBeforeInstr, vtableAddress);
  11300. }
  11301. // cmp [array], vtableAddress
  11302. // jne $isNotArrayLabel
  11303. if (forceFloat && baseValueType.IsLikelyNativeFloatArray())
  11304. {
  11305. // We expect a native float array. If we get native int instead, convert it on the spot and bail out afterward.
  11306. const auto goodArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  11307. InsertCompareBranch(
  11308. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  11309. vtableOpnd,
  11310. Js::OpCode::BrEq_A,
  11311. goodArrayLabel,
  11312. insertBeforeInstr);
  11313. IR::LabelInstr *notFloatArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  11314. insertBeforeInstr->InsertBefore(notFloatArrayLabel);
  11315. if (isStore)
  11316. {
  11317. vtableOpnd = IR::RegOpnd::New(TyMachPtr, func);
  11318. InsertMove(vtableOpnd, IR::MemRefOpnd::New(
  11319. func->GetScriptContext()->optimizationOverrides.GetAddressOfIntArraySetElementFastPathVtable(),
  11320. TyMachPtr, func), insertBeforeInstr);
  11321. }
  11322. else
  11323. {
  11324. vtableOpnd = LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableJavascriptNativeIntArray);
  11325. }
  11326. InsertCompareBranch(
  11327. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  11328. vtableOpnd,
  11329. Js::OpCode::BrNeq_A,
  11330. isNotArrayLabel,
  11331. insertBeforeInstr);
  11332. m_lowererMD.LoadHelperArgument(insertBeforeInstr, arrayOpnd);
  11333. IR::Instr *helperInstr = IR::Instr::New(Js::OpCode::Call, m_func);
  11334. insertBeforeInstr->InsertBefore(helperInstr);
  11335. m_lowererMD.ChangeToHelperCall(helperInstr, IR::HelperIntArr_ToNativeFloatArray);
  11336. // Branch to the (bailout) label, because converting the array may have made our array checks unsafe.
  11337. InsertBranch(Js::OpCode::Br, isNotArrayLabel, insertBeforeInstr);
  11338. insertBeforeInstr->InsertBefore(goodArrayLabel);
  11339. }
  11340. else
  11341. {
  11342. IR::LabelInstr* goodArrayLabel = nullptr;
  11343. if (baseValueType.IsLikelyMixedTypedArrayType())
  11344. {
  11345. goodArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  11346. InsertCompareBranch(
  11347. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  11348. vtableOpnd,
  11349. Js::OpCode::BrEq_A,
  11350. goodArrayLabel,
  11351. insertBeforeInstr);
  11352. Assert(virtualVtableAddress);
  11353. vtableVirtualOpnd = LoadVTableValueOpnd(insertBeforeInstr, virtualVtableAddress);
  11354. Assert(vtableVirtualOpnd);
  11355. InsertCompareBranch(
  11356. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  11357. vtableVirtualOpnd,
  11358. Js::OpCode::BrNeq_A,
  11359. isNotArrayLabel,
  11360. insertBeforeInstr);
  11361. insertBeforeInstr->InsertBefore(goodArrayLabel);
  11362. }
  11363. else
  11364. {
  11365. InsertCompareBranch(
  11366. IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
  11367. vtableOpnd,
  11368. Js::OpCode::BrNeq_A,
  11369. isNotArrayLabel,
  11370. insertBeforeInstr);
  11371. }
  11372. }
  11373. ValueType arrayValueType(arrayOpnd->GetValueType());
  11374. if(arrayValueType.IsLikelyArrayOrObjectWithArray() && !arrayValueType.IsObject())
  11375. {
  11376. arrayValueType = arrayValueType.SetHasNoMissingValues(false);
  11377. }
  11378. arrayValueType = arrayValueType.ToDefiniteObject();
  11379. arrayOpnd->SetValueType(arrayValueType);
  11380. return arrayOpnd;
  11381. }
  11382. IR::LabelInstr *Lowerer::InsertLabel(const bool isHelper, IR::Instr *const insertBeforeInstr)
  11383. {
  11384. Assert(insertBeforeInstr);
  11385. Func *const func = insertBeforeInstr->m_func;
  11386. IR::LabelInstr *const instr = IR::LabelInstr::New(Js::OpCode::Label, func, isHelper);
  11387. insertBeforeInstr->InsertBefore(instr);
  11388. return instr;
  11389. }
  11390. IR::Instr *Lowerer::InsertMoveWithBarrier(IR::Opnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr)
  11391. {
  11392. return Lowerer::InsertMove(dst, src, insertBeforeInstr, true);
  11393. }
  11394. IR::Instr *Lowerer::InsertMove(IR::Opnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr, bool generateWriteBarrier)
  11395. {
  11396. Assert(dst);
  11397. Assert(src);
  11398. Assert(insertBeforeInstr);
  11399. Func *const func = insertBeforeInstr->m_func;
  11400. if(dst->IsFloat() && src->IsConstOpnd())
  11401. {
  11402. return LoadFloatFromNonReg(src, dst, insertBeforeInstr);
  11403. }
  11404. if(TySize[dst->GetType()] < TySize[src->GetType()])
  11405. {
  11406. src = src->UseWithNewType(dst->GetType(), func);
  11407. }
  11408. IR::Instr *const instr = IR::Instr::New(Js::OpCode::Ld_A, dst, src, func);
  11409. insertBeforeInstr->InsertBefore(instr);
  11410. if (generateWriteBarrier)
  11411. {
  11412. LowererMD::ChangeToWriteBarrierAssign(instr);
  11413. }
  11414. else
  11415. {
  11416. LowererMD::ChangeToAssign(instr);
  11417. }
  11418. return instr;
  11419. }
  11420. IR::BranchInstr *Lowerer::InsertBranch(
  11421. const Js::OpCode opCode,
  11422. IR::LabelInstr *const target,
  11423. IR::Instr *const insertBeforeInstr)
  11424. {
  11425. return InsertBranch(opCode, false /* isUnsigned */, target, insertBeforeInstr);
  11426. }
  11427. IR::BranchInstr *Lowerer::InsertBranch(
  11428. const Js::OpCode opCode,
  11429. const bool isUnsigned,
  11430. IR::LabelInstr *const target,
  11431. IR::Instr *const insertBeforeInstr)
  11432. {
  11433. Assert(target);
  11434. Assert(insertBeforeInstr);
  11435. Func *const func = insertBeforeInstr->m_func;
  11436. IR::BranchInstr *const instr = IR::BranchInstr::New(opCode, target, func);
  11437. if(!instr->IsLowered())
  11438. {
  11439. if(opCode == Js::OpCode::Br)
  11440. {
  11441. instr->m_opcode = LowererMD::MDUncondBranchOpcode;
  11442. }
  11443. else if(isUnsigned)
  11444. {
  11445. instr->m_opcode = LowererMD::MDUnsignedBranchOpcode(opCode);
  11446. }
  11447. else
  11448. {
  11449. instr->m_opcode = LowererMD::MDBranchOpcode(opCode);
  11450. }
  11451. }
  11452. insertBeforeInstr->InsertBefore(instr);
  11453. return instr;
  11454. }
  11455. IR::Instr *Lowerer::InsertCompare(IR::Opnd *const src1, IR::Opnd *const src2, IR::Instr *const insertBeforeInstr)
  11456. {
  11457. Assert(src1);
  11458. Assert(!src1->IsFloat64()); // not implemented
  11459. Assert(src2);
  11460. Assert(!src2->IsFloat64()); // not implemented
  11461. Assert(!src1->IsEqual(src2));
  11462. Assert(insertBeforeInstr);
  11463. Func *const func = insertBeforeInstr->m_func;
  11464. IR::Instr *const instr = IR::Instr::New(Js::OpCode::CMP, func);
  11465. instr->SetSrc1(src1);
  11466. instr->SetSrc2(src2);
  11467. insertBeforeInstr->InsertBefore(instr);
  11468. LowererMD::Legalize(instr);
  11469. return instr;
  11470. }
  11471. IR::BranchInstr *Lowerer::InsertCompareBranch(
  11472. IR::Opnd *const compareSrc1,
  11473. IR::Opnd *const compareSrc2,
  11474. Js::OpCode branchOpCode,
  11475. IR::LabelInstr *const target,
  11476. IR::Instr *const insertBeforeInstr,
  11477. const bool ignoreNaN)
  11478. {
  11479. return InsertCompareBranch(compareSrc1, compareSrc2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr, ignoreNaN);
  11480. }
  11481. IR::BranchInstr *Lowerer::InsertCompareBranch(
  11482. IR::Opnd *compareSrc1,
  11483. IR::Opnd *compareSrc2,
  11484. Js::OpCode branchOpCode,
  11485. const bool isUnsigned,
  11486. IR::LabelInstr *const target,
  11487. IR::Instr *const insertBeforeInstr,
  11488. const bool ignoreNaN)
  11489. {
  11490. Assert(compareSrc1);
  11491. Assert(compareSrc2);
  11492. Func *const func = insertBeforeInstr->m_func;
  11493. if(compareSrc1->IsFloat64())
  11494. {
  11495. Assert(compareSrc2->IsFloat64());
  11496. Assert(!isUnsigned);
  11497. IR::BranchInstr *const instr = IR::BranchInstr::New(branchOpCode, target, compareSrc1, compareSrc2, func);
  11498. insertBeforeInstr->InsertBefore(instr);
  11499. return LowererMD::LowerFloatCondBranch(instr, ignoreNaN);
  11500. }
  11501. Js::OpCode swapSrcsBranchOpCode;
  11502. switch(branchOpCode)
  11503. {
  11504. case Js::OpCode::BrEq_A:
  11505. case Js::OpCode::BrNeq_A:
  11506. swapSrcsBranchOpCode = branchOpCode;
  11507. goto Common_BrEqNeqGeGtLeLt;
  11508. case Js::OpCode::BrGe_A:
  11509. swapSrcsBranchOpCode = Js::OpCode::BrLe_A;
  11510. goto Common_BrEqNeqGeGtLeLt;
  11511. case Js::OpCode::BrGt_A:
  11512. swapSrcsBranchOpCode = Js::OpCode::BrLt_A;
  11513. goto Common_BrEqNeqGeGtLeLt;
  11514. case Js::OpCode::BrLe_A:
  11515. swapSrcsBranchOpCode = Js::OpCode::BrGe_A;
  11516. goto Common_BrEqNeqGeGtLeLt;
  11517. case Js::OpCode::BrLt_A:
  11518. swapSrcsBranchOpCode = Js::OpCode::BrGt_A;
  11519. // fall through
  11520. Common_BrEqNeqGeGtLeLt:
  11521. // Check if src1 is a constant and src2 is not, and facilitate folding the constant into the Cmp instruction
  11522. if( (
  11523. compareSrc1->IsIntConstOpnd() ||
  11524. (
  11525. compareSrc1->IsAddrOpnd() &&
  11526. Math::FitsInDWord(reinterpret_cast<size_t>(compareSrc1->AsAddrOpnd()->m_address))
  11527. )
  11528. ) &&
  11529. !compareSrc2->IsIntConstOpnd() &&
  11530. !compareSrc2->IsAddrOpnd())
  11531. {
  11532. // Swap the sources and branch
  11533. IR::Opnd *const tempSrc = compareSrc1;
  11534. compareSrc1 = compareSrc2;
  11535. compareSrc2 = tempSrc;
  11536. branchOpCode = swapSrcsBranchOpCode;
  11537. }
  11538. // Check for compare with zero, to prefer using Test instead of Cmp
  11539. if( !compareSrc1->IsRegOpnd() ||
  11540. !(
  11541. compareSrc2->IsIntConstOpnd() && compareSrc2->AsIntConstOpnd()->GetValue() == 0 ||
  11542. compareSrc2->IsAddrOpnd() && !compareSrc2->AsAddrOpnd()->m_address
  11543. ) ||
  11544. branchOpCode == Js::OpCode::BrGt_A || branchOpCode == Js::OpCode::BrLe_A)
  11545. {
  11546. goto Default;
  11547. }
  11548. if(branchOpCode == Js::OpCode::BrGe_A || branchOpCode == Js::OpCode::BrLt_A)
  11549. {
  11550. if(isUnsigned)
  11551. {
  11552. goto Default;
  11553. }
  11554. branchOpCode = LowererMD::MDCompareWithZeroBranchOpcode(branchOpCode);
  11555. }
  11556. if(!compareSrc2->IsInUse())
  11557. {
  11558. compareSrc2->Free(func);
  11559. }
  11560. InsertTest(compareSrc1, compareSrc1, insertBeforeInstr);
  11561. break;
  11562. default:
  11563. Default:
  11564. InsertCompare(compareSrc1, compareSrc2, insertBeforeInstr);
  11565. break;
  11566. }
  11567. return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
  11568. }
  11569. IR::Instr *Lowerer::InsertTest(IR::Opnd *const src1, IR::Opnd *const src2, IR::Instr *const insertBeforeInstr)
  11570. {
  11571. Assert(src1);
  11572. Assert(!src1->IsFloat64()); // not implemented
  11573. Assert(src2);
  11574. Assert(!src2->IsFloat64()); // not implemented
  11575. Assert(insertBeforeInstr);
  11576. Func *const func = insertBeforeInstr->m_func;
  11577. IR::Instr *const instr = IR::Instr::New(LowererMD::MDTestOpcode, func);
  11578. instr->SetSrc1(src1);
  11579. instr->SetSrc2(src2);
  11580. insertBeforeInstr->InsertBefore(instr);
  11581. LowererMD::Legalize(instr);
  11582. return instr;
  11583. }
  11584. IR::BranchInstr *Lowerer::InsertTestBranch(
  11585. IR::Opnd *const testSrc1,
  11586. IR::Opnd *const testSrc2,
  11587. const Js::OpCode branchOpCode,
  11588. IR::LabelInstr *const target,
  11589. IR::Instr *const insertBeforeInstr)
  11590. {
  11591. return InsertTestBranch(testSrc1, testSrc2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr);
  11592. }
  11593. IR::BranchInstr *Lowerer::InsertTestBranch(
  11594. IR::Opnd *const testSrc1,
  11595. IR::Opnd *const testSrc2,
  11596. const Js::OpCode branchOpCode,
  11597. const bool isUnsigned,
  11598. IR::LabelInstr *const target,
  11599. IR::Instr *const insertBeforeInstr)
  11600. {
  11601. InsertTest(testSrc1, testSrc2, insertBeforeInstr);
  11602. return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
  11603. }
  11604. IR::Instr *Lowerer::InsertAdd(
  11605. const bool needFlags,
  11606. IR::Opnd *const dst,
  11607. IR::Opnd *src1,
  11608. IR::Opnd *src2,
  11609. IR::Instr *const insertBeforeInstr)
  11610. {
  11611. Assert(dst);
  11612. Assert(src1);
  11613. Assert(src2);
  11614. Assert(insertBeforeInstr);
  11615. Func *const func = insertBeforeInstr->m_func;
  11616. if(src2->IsIntConstOpnd())
  11617. {
  11618. IR::IntConstOpnd *const intConstOpnd = src2->AsIntConstOpnd();
  11619. const IntConstType value = intConstOpnd->GetValue();
  11620. if(value < 0 && value != IntConstMin)
  11621. {
  11622. // Change (s1 = s1 + -5) into (s1 = s1 - 5)
  11623. IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
  11624. newSrc2->SetValue(-value);
  11625. return InsertSub(needFlags, dst, src1, newSrc2, insertBeforeInstr);
  11626. }
  11627. }
  11628. else if(src1->IsIntConstOpnd())
  11629. {
  11630. IR::IntConstOpnd *const intConstOpnd = src1->AsIntConstOpnd();
  11631. const IntConstType value = intConstOpnd->GetValue();
  11632. if(value < 0 && value != IntConstMin)
  11633. {
  11634. // Change (s1 = -5 + s1) into (s1 = s1 - 5)
  11635. IR::Opnd *const newSrc1 = src2;
  11636. IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
  11637. newSrc2->SetValue(-value);
  11638. return InsertSub(needFlags, dst, newSrc1, newSrc2, insertBeforeInstr);
  11639. }
  11640. }
  11641. IR::Instr *const instr = IR::Instr::New(Js::OpCode::Add_A, dst, src1, src2, func);
  11642. insertBeforeInstr->InsertBefore(instr);
  11643. LowererMD::ChangeToAdd(instr, needFlags);
  11644. LowererMD::Legalize(instr);
  11645. return instr;
  11646. }
  11647. IR::Instr *Lowerer::InsertSub(
  11648. const bool needFlags,
  11649. IR::Opnd *const dst,
  11650. IR::Opnd *src1,
  11651. IR::Opnd *src2,
  11652. IR::Instr *const insertBeforeInstr)
  11653. {
  11654. Assert(dst);
  11655. Assert(src1);
  11656. Assert(src2);
  11657. Assert(insertBeforeInstr);
  11658. Func *const func = insertBeforeInstr->m_func;
  11659. if(src2->IsIntConstOpnd())
  11660. {
  11661. IR::IntConstOpnd *const intConstOpnd = src2->AsIntConstOpnd();
  11662. const IntConstType value = intConstOpnd->GetValue();
  11663. if(value < 0 && value != IntConstMin)
  11664. {
  11665. // Change (s1 = s1 - -5) into (s1 = s1 + 5)
  11666. IR::IntConstOpnd *const newSrc2 = intConstOpnd->CopyInternal(func);
  11667. newSrc2->SetValue(-value);
  11668. return InsertAdd(needFlags, dst, src1, newSrc2, insertBeforeInstr);
  11669. }
  11670. }
  11671. IR::Instr *const instr = IR::Instr::New(Js::OpCode::Sub_A, dst, src1, src2, func);
  11672. insertBeforeInstr->InsertBefore(instr);
  11673. LowererMD::ChangeToSub(instr, needFlags);
  11674. LowererMD::Legalize(instr);
  11675. return instr;
  11676. }
  11677. IR::Instr *Lowerer::InsertLea(IR::RegOpnd *const dst, IR::Opnd *const src, IR::Instr *const insertBeforeInstr)
  11678. {
  11679. Assert(dst);
  11680. Assert(src);
  11681. Assert(src->IsIndirOpnd() || src->IsSymOpnd());
  11682. Assert(insertBeforeInstr);
  11683. Func *const func = insertBeforeInstr->m_func;
  11684. IR::Instr *const instr = IR::Instr::New(Js::OpCode::LEA, dst, src, func);
  11685. insertBeforeInstr->InsertBefore(instr);
  11686. return LowererMD::ChangeToLea(instr);
  11687. }
  11688. #if _M_X64
  11689. IR::Instr *Lowerer::InsertMoveBitCast(
  11690. IR::Opnd *const dst,
  11691. IR::Opnd *const src1,
  11692. IR::Instr *const insertBeforeInstr)
  11693. {
  11694. Assert(dst);
  11695. Assert(dst->GetType() == TyFloat64);
  11696. Assert(src1);
  11697. Assert(src1->GetType() == TyUint64);
  11698. Assert(insertBeforeInstr);
  11699. Func *const func = insertBeforeInstr->m_func;
  11700. IR::Instr *const instr = IR::Instr::New(LowererMD::MDMovUint64ToFloat64Opcode, dst, src1, func);
  11701. insertBeforeInstr->InsertBefore(instr);
  11702. LowererMD::Legalize(instr);
  11703. return instr;
  11704. }
  11705. #endif
  11706. IR::Instr *Lowerer::InsertXor(
  11707. IR::Opnd *const dst,
  11708. IR::Opnd *const src1,
  11709. IR::Opnd *const src2,
  11710. IR::Instr *const insertBeforeInstr)
  11711. {
  11712. Assert(dst);
  11713. Assert(src1);
  11714. Assert(src2);
  11715. Assert(insertBeforeInstr);
  11716. Func *const func = insertBeforeInstr->m_func;
  11717. IR::Instr *const instr = IR::Instr::New(LowererMD::MDXorOpcode, dst, src1, src2, func);
  11718. insertBeforeInstr->InsertBefore(instr);
  11719. LowererMD::Legalize(instr);
  11720. return instr;
  11721. }
  11722. IR::Instr *Lowerer::InsertAnd(
  11723. IR::Opnd *const dst,
  11724. IR::Opnd *const src1,
  11725. IR::Opnd *const src2,
  11726. IR::Instr *const insertBeforeInstr)
  11727. {
  11728. Assert(dst);
  11729. Assert(src1);
  11730. Assert(src2);
  11731. Assert(insertBeforeInstr);
  11732. Func *const func = insertBeforeInstr->m_func;
  11733. IR::Instr *const instr = IR::Instr::New(Js::OpCode::AND, dst, src1, src2, func);
  11734. insertBeforeInstr->InsertBefore(instr);
  11735. LowererMD::Legalize(instr);
  11736. return instr;
  11737. }
  11738. IR::Instr *Lowerer::InsertOr(
  11739. IR::Opnd *const dst,
  11740. IR::Opnd *const src1,
  11741. IR::Opnd *const src2,
  11742. IR::Instr *const insertBeforeInstr)
  11743. {
  11744. Assert(dst);
  11745. Assert(src1);
  11746. Assert(src2);
  11747. Assert(insertBeforeInstr);
  11748. Func *const func = insertBeforeInstr->m_func;
  11749. IR::Instr *const instr = IR::Instr::New(LowererMD::MDOrOpcode, dst, src1, src2, func);
  11750. insertBeforeInstr->InsertBefore(instr);
  11751. LowererMD::Legalize(instr);
  11752. return instr;
  11753. }
  11754. IR::Instr *Lowerer::InsertShift(
  11755. const Js::OpCode opCode,
  11756. const bool needFlags,
  11757. IR::Opnd *const dst,
  11758. IR::Opnd *const src1,
  11759. IR::Opnd *const src2,
  11760. IR::Instr *const insertBeforeInstr)
  11761. {
  11762. Assert(dst);
  11763. Assert(!dst->IsFloat64()); // not implemented
  11764. Assert(src1);
  11765. Assert(!src1->IsFloat64()); // not implemented
  11766. Assert(src2);
  11767. Assert(!src2->IsFloat64()); // not implemented
  11768. Assert(insertBeforeInstr);
  11769. Func *const func = insertBeforeInstr->m_func;
  11770. IR::Instr *const instr = IR::Instr::New(opCode, dst, src1, src2, func);
  11771. insertBeforeInstr->InsertBefore(instr);
  11772. LowererMD::ChangeToShift(instr, needFlags);
  11773. LowererMD::Legalize(instr);
  11774. return instr;
  11775. }
  11776. IR::Instr *Lowerer::InsertShiftBranch(
  11777. const Js::OpCode shiftOpCode,
  11778. IR::Opnd *const dst,
  11779. IR::Opnd *const src1,
  11780. IR::Opnd *const src2,
  11781. const Js::OpCode branchOpCode,
  11782. IR::LabelInstr *const target,
  11783. IR::Instr *const insertBeforeInstr)
  11784. {
  11785. return InsertShiftBranch(shiftOpCode, dst, src1, src2, branchOpCode, false /* isUnsigned */, target, insertBeforeInstr);
  11786. }
  11787. IR::Instr *Lowerer::InsertShiftBranch(
  11788. const Js::OpCode shiftOpCode,
  11789. IR::Opnd *const dst,
  11790. IR::Opnd *const src1,
  11791. IR::Opnd *const src2,
  11792. const Js::OpCode branchOpCode,
  11793. const bool isUnsigned,
  11794. IR::LabelInstr *const target,
  11795. IR::Instr *const insertBeforeInstr)
  11796. {
  11797. InsertShift(shiftOpCode, true /* needFlags */, dst, src1, src2, insertBeforeInstr);
  11798. return InsertBranch(branchOpCode, isUnsigned, target, insertBeforeInstr);
  11799. }
  11800. IR::Instr *Lowerer::InsertConvertFloat32ToFloat64(
  11801. IR::Opnd *const dst,
  11802. IR::Opnd *const src,
  11803. IR::Instr *const insertBeforeInstr)
  11804. {
  11805. Assert(dst);
  11806. Assert(dst->IsFloat64());
  11807. Assert(src);
  11808. Assert(src->IsFloat32());
  11809. Assert(insertBeforeInstr);
  11810. Func *const func = insertBeforeInstr->m_func;
  11811. IR::Instr *const instr = IR::Instr::New(LowererMD::MDConvertFloat32ToFloat64Opcode, dst, src, func);
  11812. insertBeforeInstr->InsertBefore(instr);
  11813. LowererMD::Legalize(instr);
  11814. return instr;
  11815. }
  11816. IR::Instr *Lowerer::InsertConvertFloat64ToFloat32(
  11817. IR::Opnd *const dst,
  11818. IR::Opnd *const src,
  11819. IR::Instr *const insertBeforeInstr)
  11820. {
  11821. Assert(dst);
  11822. Assert(dst->IsFloat32());
  11823. Assert(src);
  11824. Assert(src->IsFloat64());
  11825. Assert(insertBeforeInstr);
  11826. Func *const func = insertBeforeInstr->m_func;
  11827. IR::Instr *const instr = IR::Instr::New(LowererMD::MDConvertFloat64ToFloat32Opcode, dst, src, func);
  11828. insertBeforeInstr->InsertBefore(instr);
  11829. LowererMD::Legalize(instr);
  11830. return instr;
  11831. }
  11832. void Lowerer::InsertIncUInt8PreventOverflow(
  11833. IR::Opnd *const dst,
  11834. IR::Opnd *const src,
  11835. IR::Instr *const insertBeforeInstr,
  11836. IR::Instr * *const onOverflowInsertBeforeInstrRef)
  11837. {
  11838. LowererMD::InsertIncUInt8PreventOverflow(dst, src, insertBeforeInstr, onOverflowInsertBeforeInstrRef);
  11839. }
  11840. void Lowerer::InsertDecUInt8PreventOverflow(
  11841. IR::Opnd *const dst,
  11842. IR::Opnd *const src,
  11843. IR::Instr *const insertBeforeInstr,
  11844. IR::Instr * *const onOverflowInsertBeforeInstrRef)
  11845. {
  11846. LowererMD::InsertDecUInt8PreventOverflow(dst, src, insertBeforeInstr, onOverflowInsertBeforeInstrRef);
  11847. }
  11848. void Lowerer::InsertFloatCheckForZeroOrNanBranch(
  11849. IR::Opnd *const src,
  11850. const bool branchOnZeroOrNan,
  11851. IR::LabelInstr *const target,
  11852. IR::LabelInstr *const fallthroughLabel,
  11853. IR::Instr *const insertBeforeInstr)
  11854. {
  11855. Assert(src);
  11856. Assert(src->IsFloat64());
  11857. Assert(target);
  11858. Assert(!fallthroughLabel || fallthroughLabel != target);
  11859. Assert(insertBeforeInstr);
  11860. Func *const func = insertBeforeInstr->m_func;
  11861. IR::BranchInstr *const branchOnEqualOrNotEqual =
  11862. InsertCompareBranch(
  11863. src,
  11864. IR::MemRefOpnd::New((double*)&(Js::JavascriptNumber::k_Zero), TyFloat64, func),
  11865. branchOnZeroOrNan ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A,
  11866. target,
  11867. insertBeforeInstr,
  11868. true /* ignoreNaN */);
  11869. // x86/x64
  11870. // When NaN is ignored, on x86 and x64, JE branches when equal or unordered since an unordered result sets the zero
  11871. // flag, and JNE branches when not equal and not unordered. By comparing with zero, JE will branch when src is zero or
  11872. // NaN, and JNE will branch when src is not zero and not NaN.
  11873. //
  11874. // ARM
  11875. // When NaN is ignored, BEQ branches when equal and not unordered, and BNE branches when not equal or unordered. So,
  11876. // when comparing src with zero, an unordered check needs to be added before the BEQ/BNE.
  11877. branchOnEqualOrNotEqual; // satisfy the compiler
  11878. #ifdef _M_ARM
  11879. InsertBranch(
  11880. Js::OpCode::BVS,
  11881. branchOnZeroOrNan
  11882. ? target
  11883. : fallthroughLabel ? fallthroughLabel : insertBeforeInstr->m_prev->GetOrCreateContinueLabel(),
  11884. branchOnEqualOrNotEqual);
  11885. #endif
  11886. }
  11887. IR::IndirOpnd *
  11888. Lowerer::GenerateFastElemICommon(
  11889. IR::Instr * ldElem,
  11890. bool isStore,
  11891. IR::IndirOpnd * indirOpnd,
  11892. IR::LabelInstr * labelHelper,
  11893. IR::LabelInstr * labelCantUseArray,
  11894. IR::LabelInstr *labelFallthrough,
  11895. bool * pIsTypedArrayElement,
  11896. bool * pIsStringIndex,
  11897. bool *emitBailoutRef,
  11898. IR::LabelInstr **pLabelSegmentLengthIncreased /*= nullptr*/,
  11899. bool checkArrayLengthOverflow /*= true*/,
  11900. bool forceGenerateFastPath /* = false */,
  11901. bool returnLength,
  11902. IR::LabelInstr *bailOutLabelInstr /* = nullptr*/)
  11903. {
  11904. *pIsTypedArrayElement = false;
  11905. *pIsStringIndex = false;
  11906. if(pLabelSegmentLengthIncreased)
  11907. {
  11908. *pLabelSegmentLengthIncreased = nullptr;
  11909. }
  11910. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  11911. AssertMsg(baseOpnd, "This shouldn't be NULL");
  11912. // Caution: If making changes to the conditions under which we don't emit the typical array checks, make sure
  11913. // the code in GlobOpt::ShouldAssumeIndirOpndHasNonNegativeIntIndex is updated accordingly. We don't want the
  11914. // global optimizer to type specialize instructions, for which the lowerer is forced to emit unconditional
  11915. // bailouts.
  11916. if (baseOpnd->IsTaggedInt())
  11917. {
  11918. return NULL;
  11919. }
  11920. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  11921. if (indexOpnd)
  11922. {
  11923. if (indexOpnd->GetValueType().IsString())
  11924. {
  11925. if (!baseOpnd->GetValueType().IsLikelyOptimizedTypedArray())
  11926. {
  11927. // If profile data says that it's a typed array - do not generate the property string fast path as the src. could be a temp and that would cause a bug.
  11928. *pIsTypedArrayElement = false;
  11929. *pIsStringIndex = true;
  11930. return m_lowererMD.GenerateFastElemIStringIndexCommon(ldElem, isStore, indirOpnd, labelHelper);
  11931. }
  11932. else
  11933. {
  11934. // There's no point in generating the int index fast path if we know the index has a string value.
  11935. return nullptr;
  11936. }
  11937. }
  11938. }
  11939. return
  11940. GenerateFastElemIIntIndexCommon(
  11941. ldElem,
  11942. isStore,
  11943. indirOpnd,
  11944. labelHelper,
  11945. labelCantUseArray,
  11946. labelFallthrough,
  11947. pIsTypedArrayElement,
  11948. emitBailoutRef,
  11949. pLabelSegmentLengthIncreased,
  11950. checkArrayLengthOverflow,
  11951. false,
  11952. returnLength,
  11953. bailOutLabelInstr);
  11954. }
  11955. IR::IndirOpnd *
  11956. Lowerer::GenerateFastElemIIntIndexCommon(
  11957. IR::Instr * ldElem,
  11958. bool isStore,
  11959. IR::IndirOpnd * indirOpnd,
  11960. IR::LabelInstr * labelHelper,
  11961. IR::LabelInstr * labelCantUseArray,
  11962. IR::LabelInstr *labelFallthrough,
  11963. bool * pIsTypedArrayElement,
  11964. bool *emitBailoutRef,
  11965. IR::LabelInstr **pLabelSegmentLengthIncreased,
  11966. bool checkArrayLengthOverflow /*= true*/,
  11967. bool forceGenerateFastPath /* = false */,
  11968. bool returnLength,
  11969. IR::LabelInstr *bailOutLabelInstr /* = nullptr*/)
  11970. {
  11971. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  11972. IR::RegOpnd *baseOpnd = indirOpnd->GetBaseOpnd();
  11973. Assert(!baseOpnd->IsTaggedInt() || (indexOpnd && indexOpnd->IsNotInt()));
  11974. BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  11975. IRType indirType = TyVar;
  11976. const ValueType baseValueType(baseOpnd->GetValueType());
  11977. // TEST base, AtomTag -- check base not tagged int
  11978. // JNE $helper
  11979. // if (base.GetValueType() != Array) {
  11980. // CMP [base], JavascriptArray::`vtable'
  11981. // JNE $helper
  11982. // }
  11983. // TEST index, 1 -- index tagged int
  11984. // JEQ $helper
  11985. // if (inputIndex is not int const) {
  11986. // MOV index, inputIndex
  11987. // SAR index, Js::VarTag_Shift -- remote atom tag
  11988. // JS $helper -- exclude negative index
  11989. // }
  11990. // MOV headSegment, [base + offset(head)]
  11991. // CMP [headSegment + offset(length)], index -- bounds check
  11992. // if (opcode == StElemI_A) {
  11993. // JA $done (for typedarray, JA $toNumberHelper)
  11994. // CMP [headSegment + offset(size)], index -- chunk has room?
  11995. // JBE $helper
  11996. // if (index is not int const) {
  11997. // LEA newLength, [index + 1]
  11998. // } else {
  11999. // newLength = index + 1
  12000. // }
  12001. // MOV [headSegment + offset(length)], newLength -- update length on chunk
  12002. // CMP [base + offset(length)], newLength
  12003. // JAE $done
  12004. // MOV [base + offset(length)], newLength -- update length on array
  12005. // if(length to be returned){
  12006. // SHL newLength, AtomTag
  12007. // INC newLength
  12008. // MOV dst, newLength
  12009. // }
  12010. // JMP $done
  12011. //
  12012. // $toNumberHelper: Call HelperOp_ConvNumber_Full
  12013. // JMP $done
  12014. // $done
  12015. // } else {la
  12016. // JBE $helper
  12017. // }
  12018. // return [headSegment + offset(elements) + index]
  12019. // Caution: If making changes to the conditions under which we don't emit the typical array checks, make sure
  12020. // the code in GlobOpt::ShouldAssumeIndirOpndHasNonNegativeIntIndex is updated accordingly. We don't want the
  12021. // global optimizer to type specialize instructions, for which the lowerer is forced to emit unconditional
  12022. // bailouts.
  12023. bool isIndexNotInt = false;
  12024. IntConstType value = 0;
  12025. IR::Opnd * indexValueOpnd = nullptr;
  12026. bool invertBoundCheckComparison = false;
  12027. if (indirOpnd->TryGetIntConstIndexValue(true, &value, &isIndexNotInt))
  12028. {
  12029. if (value >= 0)
  12030. {
  12031. indexValueOpnd = IR::IntConstOpnd::New(value, TyUint32, this->m_func);
  12032. invertBoundCheckComparison = true; // facilitate folding the constant index into the compare instruction
  12033. }
  12034. else
  12035. {
  12036. // If the index is a negative int constant we go directly to helper.
  12037. Assert(!forceGenerateFastPath);
  12038. return nullptr;
  12039. }
  12040. }
  12041. else if (isIndexNotInt)
  12042. {
  12043. // If we know the index is not an int we go directly to helper.
  12044. Assert(!forceGenerateFastPath);
  12045. return nullptr;
  12046. }
  12047. //At this point indexValueOpnd is either NULL or contains the valueOpnd
  12048. if(!forceGenerateFastPath && !ShouldGenerateArrayFastPath(baseOpnd, true, true, true))
  12049. {
  12050. return nullptr;
  12051. }
  12052. if(baseValueType.IsLikelyAnyOptimizedArray())
  12053. {
  12054. indirScale = GetArrayIndirScale(baseValueType);
  12055. indirType = GetArrayIndirType(baseValueType);
  12056. }
  12057. IRType elementType = TyIllegal;
  12058. IR::Opnd * element = nullptr;
  12059. if(ldElem->m_opcode == Js::OpCode::InlineArrayPush)
  12060. {
  12061. element = ldElem->GetSrc2();
  12062. elementType = element->GetType();
  12063. }
  12064. else if(isStore && ldElem->GetSrc1())
  12065. {
  12066. element = ldElem->GetSrc1();
  12067. elementType = element->GetType();
  12068. }
  12069. Assert(isStore || (element == nullptr && elementType == TyIllegal));
  12070. if (isStore && baseValueType.IsLikelyNativeArray() && indirType != elementType)
  12071. {
  12072. // We're trying to write a value of the wrong type, which should force a conversion of the array.
  12073. // Go to the helper for that.
  12074. return nullptr;
  12075. }
  12076. IR::RegOpnd *arrayOpnd = baseOpnd;
  12077. IR::RegOpnd *headSegmentOpnd = nullptr;
  12078. IR::Opnd *headSegmentLengthOpnd = nullptr;
  12079. IR::AutoReuseOpnd autoReuseHeadSegmentOpnd, autoReuseHeadSegmentLengthOpnd;
  12080. bool indexIsNonnegative = indexValueOpnd || indexOpnd->GetType() == TyUint32 || !checkArrayLengthOverflow;
  12081. bool indexIsLessThanHeadSegmentLength = false;
  12082. if(!baseValueType.IsAnyOptimizedArray())
  12083. {
  12084. arrayOpnd = GenerateArrayTest(baseOpnd, labelCantUseArray, labelCantUseArray, ldElem, true, isStore);
  12085. }
  12086. else
  12087. {
  12088. if(arrayOpnd->IsArrayRegOpnd())
  12089. {
  12090. IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
  12091. if(arrayRegOpnd->HeadSegmentSym())
  12092. {
  12093. headSegmentOpnd = IR::RegOpnd::New(arrayRegOpnd->HeadSegmentSym(), TyMachPtr, m_func);
  12094. DebugOnly(headSegmentOpnd->FreezeSymValue());
  12095. autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
  12096. }
  12097. if(arrayRegOpnd->HeadSegmentLengthSym())
  12098. {
  12099. headSegmentLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->HeadSegmentLengthSym(), TyUint32, m_func);
  12100. DebugOnly(headSegmentLengthOpnd->AsRegOpnd()->FreezeSymValue());
  12101. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  12102. }
  12103. if (arrayRegOpnd->EliminatedLowerBoundCheck())
  12104. {
  12105. indexIsNonnegative = true;
  12106. }
  12107. if(arrayRegOpnd->EliminatedUpperBoundCheck())
  12108. {
  12109. indexIsLessThanHeadSegmentLength = true;
  12110. }
  12111. }
  12112. }
  12113. IR::AutoReuseOpnd autoReuseArrayOpnd;
  12114. if(arrayOpnd->GetValueType().GetObjectType() != ObjectType::ObjectWithArray)
  12115. {
  12116. autoReuseArrayOpnd.Initialize(arrayOpnd, m_func);
  12117. }
  12118. const auto EnsureObjectArrayLoaded = [&]()
  12119. {
  12120. if(arrayOpnd->GetValueType().GetObjectType() != ObjectType::ObjectWithArray)
  12121. {
  12122. return;
  12123. }
  12124. arrayOpnd = LoadObjectArray(arrayOpnd, ldElem);
  12125. autoReuseArrayOpnd.Initialize(arrayOpnd, m_func);
  12126. };
  12127. const bool doUpperBoundCheck = checkArrayLengthOverflow && !indexIsLessThanHeadSegmentLength;
  12128. if(!indexValueOpnd)
  12129. {
  12130. indexValueOpnd =
  12131. m_lowererMD.LoadNonnegativeIndex(
  12132. indexOpnd,
  12133. (
  12134. indexIsNonnegative
  12135. #if !INT32VAR
  12136. ||
  12137. // On 32-bit platforms, skip the negative check since for now, the unsigned upper bound check covers it
  12138. doUpperBoundCheck
  12139. #endif
  12140. ),
  12141. labelCantUseArray,
  12142. labelHelper,
  12143. ldElem);
  12144. }
  12145. const IR::AutoReuseOpnd autoReuseIndexValueOpnd(indexValueOpnd, m_func);
  12146. if (baseValueType.IsLikelyTypedArray())
  12147. {
  12148. *pIsTypedArrayElement = true;
  12149. if(doUpperBoundCheck)
  12150. {
  12151. if(!headSegmentLengthOpnd)
  12152. {
  12153. // (headSegmentLength = [base + offset(length)])
  12154. int lengthOffset;
  12155. lengthOffset = Js::Float64Array::GetOffsetOfLength();
  12156. headSegmentLengthOpnd = IR::IndirOpnd::New(arrayOpnd, lengthOffset, TyUint32, m_func);
  12157. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  12158. }
  12159. // CMP index, headSegmentLength -- upper bound check
  12160. if(!invertBoundCheckComparison)
  12161. {
  12162. InsertCompare(indexValueOpnd, headSegmentLengthOpnd, ldElem);
  12163. }
  12164. else
  12165. {
  12166. InsertCompare(headSegmentLengthOpnd, indexValueOpnd, ldElem);
  12167. }
  12168. }
  12169. }
  12170. else
  12171. {
  12172. *pIsTypedArrayElement = false;
  12173. if (isStore &&
  12174. baseValueType.IsLikelyNativeIntArray() &&
  12175. (!element->IsIntConstOpnd() || Js::SparseArraySegment<int32>::GetMissingItem() == element->AsIntConstOpnd()->AsInt32()))
  12176. {
  12177. Assert(ldElem->m_opcode != Js::OpCode::InlineArrayPush || bailOutLabelInstr);
  12178. // Check for a write of the MissingItem value.
  12179. InsertCompareBranch(
  12180. element,
  12181. GetMissingItemOpnd(elementType, m_func),
  12182. Js::OpCode::BrEq_A,
  12183. ldElem->m_opcode == Js::OpCode::InlineArrayPush ? bailOutLabelInstr : labelCantUseArray,
  12184. ldElem,
  12185. true);
  12186. }
  12187. if(!headSegmentOpnd)
  12188. {
  12189. EnsureObjectArrayLoaded();
  12190. // MOV headSegment, [base + offset(head)]
  12191. indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfHead(), TyMachPtr, this->m_func);
  12192. headSegmentOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  12193. autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
  12194. InsertMove(headSegmentOpnd, indirOpnd, ldElem);
  12195. }
  12196. if(doUpperBoundCheck)
  12197. {
  12198. if(!headSegmentLengthOpnd)
  12199. {
  12200. // (headSegmentLength = [headSegment + offset(length)])
  12201. headSegmentLengthOpnd =
  12202. IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
  12203. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  12204. }
  12205. // CMP index, headSegmentLength -- upper bound check
  12206. if(!invertBoundCheckComparison)
  12207. {
  12208. InsertCompare(indexValueOpnd, headSegmentLengthOpnd, ldElem);
  12209. }
  12210. else
  12211. {
  12212. InsertCompare(headSegmentLengthOpnd, indexValueOpnd, ldElem);
  12213. }
  12214. }
  12215. }
  12216. const IR::BailOutKind bailOutKind = ldElem->HasBailOutInfo() ? ldElem->GetBailOutKind() : IR::BailOutInvalid;
  12217. if(indexIsLessThanHeadSegmentLength ||
  12218. bailOutKind & (IR::BailOutOnArrayAccessHelperCall | IR::BailOutOnInvalidatedArrayHeadSegment))
  12219. {
  12220. if(bailOutKind & (IR::BailOutOnArrayAccessHelperCall | IR::BailOutOnInvalidatedArrayHeadSegment))
  12221. {
  12222. // The bailout must be pre-op because it will not have completed the operation
  12223. Assert(ldElem->GetBailOutInfo()->bailOutOffset == ldElem->GetByteCodeOffset());
  12224. // Verify other bailouts these can be combined with
  12225. Assert(
  12226. !(
  12227. bailOutKind &
  12228. IR::BailOutKindBits &
  12229. ~(
  12230. IR::BailOutOnArrayAccessHelperCall |
  12231. IR::BailOutOnInvalidatedArrayHeadSegment |
  12232. IR::BailOutOnInvalidatedArrayLength |
  12233. IR::BailOutConventionalNativeArrayAccessOnly |
  12234. (bailOutKind & IR::BailOutOnArrayAccessHelperCall ? IR::BailOutInvalid : IR::BailOutConvertedNativeArray)
  12235. )
  12236. ));
  12237. if(bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  12238. {
  12239. // Omit the helper call and generate a bailout instead
  12240. Assert(emitBailoutRef);
  12241. *emitBailoutRef = true;
  12242. }
  12243. }
  12244. if(indexIsLessThanHeadSegmentLength)
  12245. {
  12246. Assert(!(bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment));
  12247. }
  12248. else
  12249. {
  12250. IR::LabelInstr *bailOutLabel;
  12251. if(bailOutKind & IR::BailOutOnInvalidatedArrayHeadSegment)
  12252. {
  12253. Assert(isStore);
  12254. // Lower a separate (but shared) bailout for this case, and preserve the bailout kind in the instruction if the
  12255. // helper call is going to be generated, because the bailout kind needs to be lowered again and differently in the
  12256. // helper call path.
  12257. //
  12258. // Generate:
  12259. // (LdElem)
  12260. // jmp $continue
  12261. // $bailOut:
  12262. // Bail out with IR::BailOutOnInvalidatedArrayHeadSegment
  12263. // $continue:
  12264. LowerOneBailOutKind(
  12265. ldElem,
  12266. IR::BailOutOnInvalidatedArrayHeadSegment,
  12267. false,
  12268. !(bailOutKind & IR::BailOutOnArrayAccessHelperCall));
  12269. bailOutLabel = ldElem->GetOrCreateContinueLabel(true);
  12270. InsertBranch(Js::OpCode::Br, labelFallthrough, bailOutLabel);
  12271. }
  12272. else
  12273. {
  12274. Assert(bailOutKind & IR::BailOutOnArrayAccessHelperCall);
  12275. bailOutLabel = labelHelper;
  12276. }
  12277. // Bail out if the index is outside the head segment bounds
  12278. // jae $bailOut
  12279. Assert(checkArrayLengthOverflow);
  12280. InsertBranch(
  12281. !invertBoundCheckComparison ? Js::OpCode::BrGe_A : Js::OpCode::BrLe_A,
  12282. true /* isUnsigned */,
  12283. bailOutLabel,
  12284. ldElem);
  12285. }
  12286. }
  12287. else if (isStore && !baseValueType.IsLikelyTypedArray()) // #if (opcode == StElemI_A)
  12288. {
  12289. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  12290. IR::LabelInstr *labelSegmentLengthIncreased = nullptr;
  12291. const bool isPush = ldElem->m_opcode != Js::OpCode::StElemI_A && ldElem->m_opcode != Js::OpCode::StElemI_A_Strict;
  12292. // Put the head segment size check and length updates in a helper block since they're not the common path for StElem.
  12293. // For push, that is the common path so keep it in a non-helper block.
  12294. const bool isInHelperBlock = !isPush;
  12295. if(checkArrayLengthOverflow)
  12296. {
  12297. if(pLabelSegmentLengthIncreased &&
  12298. !(
  12299. baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues() ||
  12300. (ldElem->m_opcode == Js::OpCode::StElemI_A || ldElem->m_opcode == Js::OpCode::StElemI_A_Strict) &&
  12301. ldElem->IsProfiledInstr() && !ldElem->AsProfiledInstr()->u.stElemInfo->LikelyFillsMissingValue()
  12302. ))
  12303. {
  12304. // For arrays that are not guaranteed to have no missing values, before storing to an element where
  12305. // (index < length), the element value needs to be checked to see if it's a missing value, and if so, fall back
  12306. // to the helper. This is done to keep the missing value tracking precise in arrays. So, create a separate label
  12307. // for the case where the length was increased (index >= length), and pass it back to GenerateFastStElemI, which
  12308. // will fill in the rest.
  12309. labelSegmentLengthIncreased = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isInHelperBlock);
  12310. *pLabelSegmentLengthIncreased = labelSegmentLengthIncreased;
  12311. }
  12312. else
  12313. {
  12314. labelSegmentLengthIncreased = labelDone;
  12315. }
  12316. // JB $done
  12317. InsertBranch(
  12318. !invertBoundCheckComparison ? Js::OpCode::BrLt_A : Js::OpCode::BrGt_A,
  12319. true /* isUnsigned */,
  12320. labelDone,
  12321. ldElem);
  12322. }
  12323. if(isInHelperBlock)
  12324. {
  12325. InsertLabel(true /* isHelper */, ldElem);
  12326. }
  12327. EnsureObjectArrayLoaded();
  12328. do // while(false);
  12329. {
  12330. if(checkArrayLengthOverflow)
  12331. {
  12332. if(ldElem->HasBailOutInfo() && ldElem->GetBailOutKind() & IR::BailOutOnMissingValue)
  12333. {
  12334. // Need to bail out if this store would create a missing value. The store would cause a missing value to be
  12335. // created if (index > length && index < size). If (index >= size) we would go to helper anyway, and the bailout
  12336. // handling for this is done after the helper call, so just go to helper if (index > length).
  12337. //
  12338. // jne $helper // branch for (cmp index, headSegmentLength)
  12339. InsertBranch(Js::OpCode::BrNeq_A, labelHelper, ldElem);
  12340. }
  12341. else
  12342. {
  12343. // If (index < size) we will not call the helper, so the array flags must be updated to reflect that it no
  12344. // longer has no missing values.
  12345. //
  12346. // jne indexGreaterThanLength // branch for (cmp index, headSegmentLength)
  12347. // cmp index, [headSegment + offset(size)]
  12348. // jae $helper
  12349. // jmp indexLessThanSize
  12350. // indexGreaterThanLength:
  12351. // cmp index, [headSegment + offset(size)]
  12352. // jae $helper
  12353. // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
  12354. // indexLessThanSize:
  12355. IR::LabelInstr *const indexGreaterThanLengthLabel = InsertLabel(true /* isHelper */, ldElem);
  12356. IR::LabelInstr *const indexLessThanSizeLabel = InsertLabel(isInHelperBlock, ldElem);
  12357. // jne indexGreaterThanLength // branch for (cmp index, headSegmentLength)
  12358. // cmp index, [headSegment + offset(size)]
  12359. // jae $helper
  12360. // jmp indexLessThanSize
  12361. // indexGreaterThanLength:
  12362. InsertBranch(Js::OpCode::BrNeq_A, indexGreaterThanLengthLabel, indexGreaterThanLengthLabel);
  12363. InsertCompareBranch(
  12364. indexValueOpnd,
  12365. IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, m_func),
  12366. Js::OpCode::BrGe_A,
  12367. true /* isUnsigned */,
  12368. labelHelper,
  12369. indexGreaterThanLengthLabel);
  12370. InsertBranch(Js::OpCode::Br, indexLessThanSizeLabel, indexGreaterThanLengthLabel);
  12371. // indexGreaterThanLength:
  12372. // cmp index, [headSegment + offset(size)]
  12373. // jae $helper
  12374. // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
  12375. // indexLessThanSize:
  12376. InsertCompareBranch(
  12377. indexValueOpnd,
  12378. IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, m_func),
  12379. Js::OpCode::BrGe_A,
  12380. true /* isUnsigned */,
  12381. labelHelper,
  12382. indexLessThanSizeLabel);
  12383. CompileAssert(
  12384. static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
  12385. Js::DynamicObjectFlags::HasNoMissingValues);
  12386. InsertAnd(
  12387. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  12388. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  12389. IR::IntConstOpnd::New(
  12390. static_cast<uint8>(~Js::DynamicObjectFlags::HasNoMissingValues),
  12391. TyUint8,
  12392. m_func,
  12393. true),
  12394. indexLessThanSizeLabel);
  12395. // indexLessThanSize:
  12396. break;
  12397. }
  12398. }
  12399. // CMP index, [headSegment + offset(size)]
  12400. // JAE $helper
  12401. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, size), TyUint32, this->m_func);
  12402. InsertCompareBranch(indexValueOpnd, indirOpnd, Js::OpCode::BrGe_A, true /* isUnsigned */, labelHelper, ldElem);
  12403. } while(false);
  12404. if(isPush)
  12405. {
  12406. IR::LabelInstr *const updateLengthLabel = InsertLabel(isInHelperBlock, ldElem);
  12407. if(!doUpperBoundCheck && !headSegmentLengthOpnd)
  12408. {
  12409. // (headSegmentLength = [headSegment + offset(length)])
  12410. headSegmentLengthOpnd =
  12411. IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
  12412. autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
  12413. }
  12414. // For push, it is guaranteed that (index >= length). We already know that (index < size), but we need to check if
  12415. // (index > length) because in that case a missing value will be created and the missing value tracking in the array
  12416. // needs to be updated.
  12417. //
  12418. // cmp index, headSegmentLength
  12419. // je $updateLength
  12420. // and [array + offsetOf(objectArrayOrFlags)], ~Js::DynamicObjectFlags::HasNoMissingValues
  12421. // updateLength:
  12422. InsertCompareBranch(
  12423. indexValueOpnd,
  12424. headSegmentLengthOpnd,
  12425. Js::OpCode::BrEq_A,
  12426. updateLengthLabel,
  12427. updateLengthLabel);
  12428. CompileAssert(
  12429. static_cast<Js::DynamicObjectFlags>(static_cast<uint8>(Js::DynamicObjectFlags::HasNoMissingValues)) ==
  12430. Js::DynamicObjectFlags::HasNoMissingValues);
  12431. InsertAnd(
  12432. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  12433. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfArrayFlags(), TyUint8, m_func),
  12434. IR::IntConstOpnd::New(
  12435. static_cast<uint8>(~Js::DynamicObjectFlags::HasNoMissingValues),
  12436. TyUint8,
  12437. m_func,
  12438. true),
  12439. updateLengthLabel);
  12440. }
  12441. if (baseValueType.IsArrayOrObjectWithArray())
  12442. {
  12443. // We didn't emit an array check, but if we are going to grow the array
  12444. // We need to go to helper if there is an ES5 array/objectarray used as prototype
  12445. GenerateIsEnabledArraySetElementFastPathCheck(labelHelper, ldElem);
  12446. }
  12447. IR::Opnd *newLengthOpnd;
  12448. IR::AutoReuseOpnd autoReuseNewLengthOpnd;
  12449. if (indexValueOpnd->IsRegOpnd())
  12450. {
  12451. // LEA newLength, [index + 1]
  12452. newLengthOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
  12453. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  12454. InsertAdd(false /* needFlags */, newLengthOpnd, indexValueOpnd, IR::IntConstOpnd::New(1, TyUint32, m_func), ldElem);
  12455. }
  12456. else
  12457. {
  12458. newLengthOpnd = IR::IntConstOpnd::New(value + 1, TyUint32, this->m_func);
  12459. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  12460. }
  12461. // MOV [headSegment + offset(length)], newLength
  12462. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, length), TyUint32, this->m_func);
  12463. InsertMove(indirOpnd, newLengthOpnd, ldElem);
  12464. if (checkArrayLengthOverflow)
  12465. {
  12466. // CMP newLength, [base + offset(length)]
  12467. // JBE $segmentLengthIncreased
  12468. Assert(labelSegmentLengthIncreased);
  12469. indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
  12470. InsertCompareBranch(
  12471. newLengthOpnd,
  12472. indirOpnd,
  12473. Js::OpCode::BrLe_A,
  12474. true /* isUnsigned */,
  12475. labelSegmentLengthIncreased,
  12476. ldElem);
  12477. if(!isInHelperBlock)
  12478. {
  12479. InsertLabel(true /* isHelper */, ldElem);
  12480. }
  12481. }
  12482. // MOV [base + offset(length)], newLength
  12483. indirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
  12484. InsertMove(indirOpnd, newLengthOpnd, ldElem);
  12485. if(returnLength)
  12486. {
  12487. if(newLengthOpnd->GetSize() != MachPtr)
  12488. {
  12489. newLengthOpnd = newLengthOpnd->UseWithNewType(TyMachPtr, m_func)->AsRegOpnd();
  12490. }
  12491. // SHL newLength, AtomTag
  12492. // INC newLength
  12493. this->m_lowererMD.GenerateInt32ToVarConversion(newLengthOpnd, ldElem);
  12494. // MOV dst, newLength
  12495. InsertMove(ldElem->GetDst(), newLengthOpnd, ldElem);
  12496. }
  12497. if(labelSegmentLengthIncreased && labelSegmentLengthIncreased != labelDone)
  12498. {
  12499. // labelSegmentLengthIncreased:
  12500. ldElem->InsertBefore(labelSegmentLengthIncreased);
  12501. }
  12502. // $done
  12503. ldElem->InsertBefore(labelDone);
  12504. }
  12505. else // #else
  12506. {
  12507. if (checkArrayLengthOverflow)
  12508. {
  12509. if (*pIsTypedArrayElement && isStore)
  12510. {
  12511. IR::LabelInstr *labelInlineSet = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  12512. //For positive index beyond length or negative index its essentially nop for typed array store
  12513. InsertBranch(
  12514. !invertBoundCheckComparison ? Js::OpCode::BrLt_A : Js::OpCode::BrGt_A,
  12515. true /* isUnsigned */,
  12516. labelInlineSet,
  12517. ldElem);
  12518. // For typed array, call ToNumber before we fallThrough.
  12519. if (ldElem->GetSrc1()->GetType() == TyVar && !ldElem->GetSrc1()->GetValueType().IsPrimitive())
  12520. {
  12521. IR::Instr *toNumberInstr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  12522. toNumberInstr->SetSrc1(ldElem->GetSrc1());
  12523. ldElem->InsertBefore(toNumberInstr);
  12524. LowerUnaryHelperMem(toNumberInstr, IR::HelperOp_ConvNumber_Full);
  12525. }
  12526. InsertBranch(Js::OpCode::Br, labelFallthrough, ldElem); //Jump to fallThrough
  12527. ldElem->InsertBefore(labelInlineSet);
  12528. }
  12529. else
  12530. {
  12531. // JAE $helper
  12532. InsertBranch(
  12533. !invertBoundCheckComparison ? Js::OpCode::BrGe_A : Js::OpCode::BrLe_A,
  12534. true /* isUnsigned */,
  12535. labelHelper,
  12536. ldElem);
  12537. }
  12538. }
  12539. EnsureObjectArrayLoaded();
  12540. if (ldElem->m_opcode == Js::OpCode::InlineArrayPop)
  12541. {
  12542. Assert(!baseValueType.IsLikelyTypedArray());
  12543. Assert(bailOutLabelInstr);
  12544. if (indexValueOpnd->IsIntConstOpnd())
  12545. {
  12546. // indirOpnd = [headSegment + index + offset(elements)]
  12547. IntConstType offset = offsetof(Js::SparseArraySegment<Js::Var>, elements) + (value << indirScale);
  12548. // TODO: Assert(Math::FitsInDWord(offset));
  12549. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
  12550. }
  12551. else
  12552. {
  12553. // indirOpnd = [headSegment + offset(elements) + (index << scale)]
  12554. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
  12555. indirOpnd->SetOffset(offsetof(Js::SparseArraySegment<Js::Var>, elements));
  12556. }
  12557. IR::Opnd * tmpDst = nullptr;
  12558. IR::Opnd * dst = ldElem->GetDst();
  12559. //Pop might not have a dst, if not don't worry about returning the last element. But we still have to
  12560. // worry about gaps, because these force us to access the prototype chain, which may have side-effects.
  12561. if (dst || !baseValueType.HasNoMissingValues())
  12562. {
  12563. if (!dst)
  12564. {
  12565. dst = IR::RegOpnd::New(indirType, this->m_func);
  12566. }
  12567. else if (dst->AsRegOpnd()->m_sym == arrayOpnd->m_sym)
  12568. {
  12569. tmpDst = IR::RegOpnd::New(TyVar, this->m_func);
  12570. dst = tmpDst;
  12571. }
  12572. // MOV dst, [head + offset]
  12573. InsertMove(dst, indirOpnd, ldElem);
  12574. //If the array has missing values, check for one
  12575. if (!baseValueType.HasNoMissingValues())
  12576. {
  12577. InsertCompareBranch(
  12578. dst,
  12579. GetMissingItemOpnd(indirType, m_func),
  12580. Js::OpCode::BrEq_A,
  12581. bailOutLabelInstr,
  12582. ldElem,
  12583. true);
  12584. }
  12585. }
  12586. // MOV [head + offset], missing
  12587. InsertMove(indirOpnd, GetMissingItemOpnd(indirType, m_func), ldElem);
  12588. IR::Opnd *newLengthOpnd;
  12589. IR::AutoReuseOpnd autoReuseNewLengthOpnd;
  12590. if (indexValueOpnd->IsRegOpnd())
  12591. {
  12592. // LEA newLength, [index]
  12593. newLengthOpnd = indexValueOpnd;
  12594. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  12595. }
  12596. else
  12597. {
  12598. newLengthOpnd = IR::IntConstOpnd::New(value, TyUint32, this->m_func);
  12599. autoReuseNewLengthOpnd.Initialize(newLengthOpnd, m_func);
  12600. }
  12601. //update segment length and array length
  12602. // MOV [headSegment + offset(length)], newLength
  12603. IR::IndirOpnd *lengthIndirOpnd = IR::IndirOpnd::New(headSegmentOpnd, offsetof(Js::SparseArraySegmentBase, length), TyUint32, this->m_func);
  12604. InsertMove(lengthIndirOpnd, newLengthOpnd, ldElem);
  12605. // MOV [base + offset(length)], newLength
  12606. lengthIndirOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, this->m_func);
  12607. InsertMove(lengthIndirOpnd, newLengthOpnd, ldElem);
  12608. if (tmpDst)
  12609. {
  12610. // The array opnd and the destination is the same, need to move the value in the tmp dst
  12611. // to the actual dst
  12612. InsertMove(ldElem->GetDst(), tmpDst, ldElem);
  12613. }
  12614. return indirOpnd;
  12615. }
  12616. } // #endif
  12617. if (baseValueType.IsLikelyTypedArray())
  12618. {
  12619. if(!headSegmentOpnd)
  12620. {
  12621. // MOV headSegment, [base + offset(arrayBuffer)]
  12622. int bufferOffset;
  12623. bufferOffset = Js::Float64Array::GetOffsetOfBuffer();
  12624. indirOpnd = IR::IndirOpnd::New(arrayOpnd, bufferOffset, TyMachPtr, this->m_func);
  12625. headSegmentOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  12626. autoReuseHeadSegmentOpnd.Initialize(headSegmentOpnd, m_func);
  12627. InsertMove(headSegmentOpnd, indirOpnd, ldElem);
  12628. }
  12629. // indirOpnd = [headSegment + index]
  12630. if (indexValueOpnd->IsIntConstOpnd())
  12631. {
  12632. IntConstType offset = (value << indirScale);
  12633. // TODO: Assert(Math::FitsInDWord(offset));
  12634. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
  12635. }
  12636. else
  12637. {
  12638. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
  12639. }
  12640. }
  12641. else if (indexValueOpnd->IsIntConstOpnd())
  12642. {
  12643. // indirOpnd = [headSegment + index + offset(elements)]
  12644. IntConstType offset = offsetof(Js::SparseArraySegment<Js::Var>, elements) + (value << indirScale);
  12645. // TODO: Assert(Math::FitsInDWord(offset));
  12646. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, (int32)offset, indirType, this->m_func);
  12647. }
  12648. else
  12649. {
  12650. // indirOpnd = [headSegment + offset(elements) + (index << scale)]
  12651. indirOpnd = IR::IndirOpnd::New(headSegmentOpnd, indexValueOpnd->AsRegOpnd(), indirScale, indirType, this->m_func);
  12652. indirOpnd->SetOffset(offsetof(Js::SparseArraySegment<Js::Var>, elements));
  12653. }
  12654. return indirOpnd;
  12655. }
  12656. void
  12657. Lowerer::GenerateTypeIdCheck(Js::TypeId typeId, IR::RegOpnd * opnd, IR::LabelInstr * labelFail, IR::Instr * insertBeforeInstr, bool generateObjectCheck)
  12658. {
  12659. if (generateObjectCheck && !opnd->IsNotTaggedValue())
  12660. {
  12661. m_lowererMD.GenerateObjectTest(opnd, insertBeforeInstr, labelFail);
  12662. }
  12663. // MOV r1, [opnd + offset(type)]
  12664. IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
  12665. const IR::AutoReuseOpnd autoReuseR1(r1, m_func);
  12666. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(opnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  12667. InsertMove(r1, indirOpnd, insertBeforeInstr);
  12668. // CMP [r1 + offset(typeId)], typeid -- check src isString
  12669. // JNE $fail
  12670. indirOpnd = IR::IndirOpnd::New(r1, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
  12671. InsertCompareBranch(
  12672. indirOpnd,
  12673. IR::IntConstOpnd::New(typeId, TyInt32, this->m_func),
  12674. Js::OpCode::BrNeq_A,
  12675. labelFail,
  12676. insertBeforeInstr);
  12677. }
  12678. IR::RegOpnd *
  12679. Lowerer::GenerateUntagVar(IR::RegOpnd * opnd, IR::LabelInstr * labelFail, IR::Instr * insertBeforeInstr, bool generateTagCheck)
  12680. {
  12681. if (!opnd->IsVar())
  12682. {
  12683. AssertMsg(opnd->GetSize() == 4, "This should be 32-bit wide");
  12684. return opnd;
  12685. }
  12686. return m_lowererMD.GenerateUntagVar(opnd, labelFail, insertBeforeInstr, generateTagCheck && !opnd->IsTaggedInt());
  12687. }
  12688. void
  12689. Lowerer::GenerateNotZeroTest( IR::Opnd * opndSrc, IR::LabelInstr * isZeroLabel, IR::Instr * insertBeforeInstr)
  12690. {
  12691. InsertTestBranch(opndSrc, opndSrc, Js::OpCode::BrEq_A, isZeroLabel, insertBeforeInstr);
  12692. }
  12693. bool
  12694. Lowerer::GenerateFastStringLdElem(IR::Instr * ldElem, IR::LabelInstr * labelHelper, IR::LabelInstr * labelFallThru)
  12695. {
  12696. IR::IndirOpnd * indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  12697. IR::RegOpnd * baseOpnd = indirOpnd->GetBaseOpnd();
  12698. // don't generate the fast path if the instance is not likely string
  12699. if (!baseOpnd->GetValueType().IsLikelyString())
  12700. {
  12701. return false;
  12702. }
  12703. Assert(!baseOpnd->IsTaggedInt());
  12704. IR::RegOpnd * indexOpnd = indirOpnd->GetIndexOpnd();
  12705. // Don't generate the fast path if the index operand is not likely int
  12706. if (indexOpnd && !indexOpnd->GetValueType().IsLikelyInt())
  12707. {
  12708. return false;
  12709. }
  12710. // Make sure the instance is a string
  12711. Assert(!indexOpnd || !indexOpnd->IsNotInt());
  12712. GenerateStringTest(baseOpnd, ldElem, labelHelper);
  12713. IR::Opnd * index32CmpOpnd;
  12714. IR::RegOpnd * bufferOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  12715. const IR::AutoReuseOpnd autoReuseBufferOpnd(bufferOpnd, m_func);
  12716. IR::IndirOpnd * charIndirOpnd;
  12717. if (indexOpnd)
  12718. {
  12719. // Untag the var and generate the indir into the string buffer
  12720. IR::RegOpnd * index32Opnd = GenerateUntagVar(indexOpnd, labelHelper, ldElem);
  12721. charIndirOpnd = IR::IndirOpnd::New(bufferOpnd, index32Opnd, 1, TyUint16, this->m_func);
  12722. index32CmpOpnd = index32Opnd;
  12723. }
  12724. else
  12725. {
  12726. // Just use the offset to indirect into the string buffer
  12727. charIndirOpnd = IR::IndirOpnd::New(bufferOpnd, indirOpnd->GetOffset() * sizeof(wchar_t), TyUint16, this->m_func);
  12728. index32CmpOpnd = IR::IntConstOpnd::New((uint32)indirOpnd->GetOffset(), TyUint32, this->m_func);
  12729. }
  12730. // Check if the index is in range of the string length
  12731. // CMP [baseOpnd + offset(length)], indexOpnd -- string length
  12732. // JBE $helper -- unsigned compare, and string length are at most INT_MAX - 1
  12733. // -- so that even if we have a negative index, this will fail
  12734. InsertCompareBranch(IR::IndirOpnd::New(baseOpnd, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func)
  12735. , index32CmpOpnd, Js::OpCode::BrLe_A, true, labelHelper, ldElem);
  12736. // Load the string buffer and make sure it is not null
  12737. // MOV bufferOpnd, [baseOpnd + offset(m_pszValue)]
  12738. // TEST bufferOpnd, bufferOpnd
  12739. // JEQ $labelHelper
  12740. indirOpnd = IR::IndirOpnd::New(baseOpnd, offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, this->m_func);
  12741. InsertMove(bufferOpnd, indirOpnd, ldElem);
  12742. GenerateNotZeroTest(bufferOpnd, labelHelper, ldElem);
  12743. // Load the character and check if it is 7bit ASCI (which we have the cache for)
  12744. // MOV charOpnd, [bufferOpnd + index32Opnd]
  12745. // CMP charOpnd, 0x80
  12746. // JAE $helper
  12747. IR::RegOpnd * charOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
  12748. const IR::AutoReuseOpnd autoReuseCharOpnd(charOpnd, m_func);
  12749. InsertMove(charOpnd, charIndirOpnd, ldElem);
  12750. InsertCompareBranch(charOpnd, IR::IntConstOpnd::New(Js::CharStringCache::CharStringCacheSize, TyUint16, this->m_func),
  12751. Js::OpCode::BrGe_A, true, labelHelper, ldElem);
  12752. // Load the string from the cache
  12753. // MOV charStringCache, <charStringCache, address>
  12754. // MOV stringOpnd, [charStringCache + charOpnd * 4]
  12755. IR::RegOpnd * cacheOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  12756. const IR::AutoReuseOpnd autoReuseCacheOpnd(cacheOpnd, m_func);
  12757. Assert(Js::JavascriptLibrary::GetCharStringCacheAOffset() == Js::JavascriptLibrary::GetCharStringCacheOffset());
  12758. InsertMove(cacheOpnd, this->LoadLibraryValueOpnd(ldElem, LibraryValue::ValueCharStringCache), ldElem);
  12759. // Check if we have created the string or not
  12760. // TEST stringOpnd, stringOpnd
  12761. // JE $helper
  12762. IR::RegOpnd * stringOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  12763. const IR::AutoReuseOpnd autoReuseStringOpnd(stringOpnd, m_func);
  12764. InsertMove(stringOpnd, IR::IndirOpnd::New(cacheOpnd, charOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyVar, this->m_func), ldElem);
  12765. GenerateNotZeroTest(stringOpnd, labelHelper, ldElem);
  12766. InsertMove(ldElem->GetDst(), stringOpnd, ldElem);
  12767. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  12768. return true;
  12769. }
  12770. bool
  12771. Lowerer::GenerateFastLdElemI(IR::Instr *& ldElem, bool *instrIsInHelperBlockRef)
  12772. {
  12773. Assert(instrIsInHelperBlockRef);
  12774. bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
  12775. instrIsInHelperBlock = false;
  12776. IR::LabelInstr * labelHelper;
  12777. IR::LabelInstr * labelFallThru;
  12778. IR::LabelInstr * labelBailOut = nullptr;
  12779. IR::LabelInstr * labelMissingNative = nullptr;
  12780. IR::Opnd *src1 = ldElem->GetSrc1();
  12781. AssertMsg(src1->IsIndirOpnd(), "Expected indirOpnd on LdElementI");
  12782. IR::IndirOpnd * indirOpnd = src1->AsIndirOpnd();
  12783. // From FastElemICommon:
  12784. // TEST base, AtomTag -- check base not tagged int
  12785. // JNE $helper
  12786. // MOV r1, [base + offset(type)] -- check base isArray
  12787. // CMP [r1 + offset(typeId)], TypeIds_Array
  12788. // JNE $helper
  12789. // TEST index, 1 -- index tagged int
  12790. // JEQ $helper
  12791. // MOV r2, index
  12792. // SAR r2, Js::VarTag_Shift -- remote atom tag
  12793. // JS $helper -- exclude negative index
  12794. // MOV r4, [base + offset(head)]
  12795. // CMP r2, [r4 + offset(length)] -- bounds check
  12796. // JAE $helper
  12797. // MOV r3, [r4 + offset(elements)]
  12798. // Generated here:
  12799. // MOV dst, [r3 + r2]
  12800. // TEST dst, dst
  12801. // JNE $fallthrough
  12802. if(ldElem->m_opcode == Js::OpCode::LdMethodElem && indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyOptimizedTypedArray())
  12803. {
  12804. // Typed arrays don't return objects, so it's not worth generating a fast path for LdMethodElem. Calling the helper also
  12805. // generates a better error message. Skip the fast path and just generate a helper call.
  12806. return true;
  12807. }
  12808. labelFallThru = ldElem->GetOrCreateContinueLabel();
  12809. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  12810. // If we know for sure (based on flow graph) we're loading from the arguments object, then ignore the (path-based) profile info.
  12811. bool isNativeArrayLoad = !ldElem->DoStackArgsOpt(this->m_func) && indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyNativeArray();
  12812. bool needMissingValueCheck = true;
  12813. bool emittedFastPath = false;
  12814. bool emitBailout = false;
  12815. if (ldElem->DoStackArgsOpt(this->m_func))
  12816. {
  12817. emittedFastPath = GenerateFastArgumentsLdElemI(ldElem, labelHelper, labelFallThru);
  12818. }
  12819. else if (GenerateFastStringLdElem(ldElem, labelHelper, labelFallThru))
  12820. {
  12821. emittedFastPath = true;
  12822. }
  12823. else
  12824. {
  12825. IR::LabelInstr * labelCantUseArray = labelHelper;
  12826. if (isNativeArrayLoad)
  12827. {
  12828. if (ldElem->GetDst()->GetType() == TyVar)
  12829. {
  12830. // Skip the fast path and just generate a helper call
  12831. return true;
  12832. }
  12833. // Specialized native array lowering for LdElem requires that it is profiled. When not profiled, GlobOpt should not
  12834. // have specialized it.
  12835. Assert(ldElem->IsProfiledInstr());
  12836. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  12837. labelCantUseArray = labelBailOut;
  12838. }
  12839. bool isTypedArrayElement, isStringIndex;
  12840. indirOpnd =
  12841. GenerateFastElemICommon(
  12842. ldElem,
  12843. false,
  12844. src1->AsIndirOpnd(),
  12845. labelHelper,
  12846. labelCantUseArray,
  12847. labelFallThru,
  12848. &isTypedArrayElement,
  12849. &isStringIndex,
  12850. &emitBailout);
  12851. IR::Opnd *dst = ldElem->GetDst();
  12852. IRType dstType = dst->AsRegOpnd()->GetType();
  12853. // The index is negative or not int.
  12854. if (indirOpnd == nullptr)
  12855. {
  12856. Assert(!(ldElem->HasBailOutInfo() && ldElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall));
  12857. // The global optimizer should never type specialize a LdElem for which the index is not int or an integer constant
  12858. // with a negative value. This would force an unconditional bail out on the main code path.
  12859. if (dst->IsVar())
  12860. {
  12861. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  12862. {
  12863. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  12864. Output::Print(L"Typed Array Lowering: function: %s (%s): instr %s, not specialized by glob opt due to negative or not likely int index.\n",
  12865. this->m_func->GetJnFunction()->GetDisplayName(),
  12866. this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  12867. Js::OpCodeUtil::GetOpCodeName(ldElem->m_opcode));
  12868. Output::Flush();
  12869. }
  12870. // We must be dealing with some unconventional index value. Don't emit fast path, but go directly to helper.
  12871. emittedFastPath = false;
  12872. return true;
  12873. }
  12874. else
  12875. {
  12876. AssertMsg(false, "Global optimizer shouldn't have specialized this instruction.");
  12877. Assert(dst->IsRegOpnd());
  12878. // If global optimizer failed to notice the unconventional index and type specialized the dst,
  12879. // there is nothing to do but bail out. This could happen if global optimizer's information based
  12880. // on value tracking fails to recognize a non-integer index or a constant int index that is negative.
  12881. // The bailout below ensures that we behave correctly in retail builds even under
  12882. // these (unlikely) conditions. To satisfy the downstream code we must populate the type specialized operand
  12883. // with some made up values, even though we will unconditionally bail out here and the values will never be
  12884. // used.
  12885. IR::IntConstOpnd *constOpnd = IR::IntConstOpnd::New(0, TyInt32, this->m_func, true);
  12886. InsertMove(dst, constOpnd, ldElem);
  12887. ldElem->UnlinkSrc1();
  12888. ldElem->UnlinkDst();
  12889. GenerateBailOut(ldElem, nullptr, nullptr);
  12890. emittedFastPath = true;
  12891. return false;
  12892. }
  12893. }
  12894. const IR::AutoReuseOpnd autoReuseIndirOpnd(indirOpnd, m_func);
  12895. const ValueType baseValueType(src1->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
  12896. if (ldElem->HasBailOutInfo() &&
  12897. ldElem->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset &&
  12898. ldElem->GetBailOutInfo()->bailOutOffset <= ldElem->GetByteCodeOffset() &&
  12899. dst->IsEqual(src1->AsIndirOpnd()->GetBaseOpnd()) ||
  12900. (src1->AsIndirOpnd()->GetIndexOpnd() && dst->IsEqual(src1->AsIndirOpnd()->GetIndexOpnd())))
  12901. {
  12902. // This is a pre-op bailout where the dst is the same as one of the srcs. The dst may be trashed before bailing out,
  12903. // but since the operation will be processed again in the interpreter, src values need to be kept intact. Use a
  12904. // temporary dst until after the operation is complete.
  12905. IR::Instr *instrSink = ldElem->SinkDst(Js::OpCode::Ld_A);
  12906. // The sink instruction needs to be on the fall-through path
  12907. instrSink->Unlink();
  12908. labelFallThru->InsertAfter(instrSink);
  12909. LowererMD::ChangeToAssign(instrSink);
  12910. dst = ldElem->GetDst();
  12911. }
  12912. if (isTypedArrayElement)
  12913. {
  12914. // For typedArrays, convert the loaded element to the appropriate type
  12915. IR::RegOpnd *reg;
  12916. IR::AutoReuseOpnd autoReuseReg;
  12917. Assert(dst->IsRegOpnd());
  12918. if(indirOpnd->IsFloat())
  12919. {
  12920. AssertMsg((dstType == TyFloat64) || (dstType == TyVar), "For Float32Array LdElemI's dst should be specialized to TyFloat64 or not at all.");
  12921. if(indirOpnd->IsFloat32())
  12922. {
  12923. // MOVSS reg32.f32, indirOpnd.f32
  12924. IR::RegOpnd *reg32 = IR::RegOpnd::New(TyFloat32, this->m_func);
  12925. const IR::AutoReuseOpnd autoReuseReg32(reg32, m_func);
  12926. InsertMove(reg32, indirOpnd, ldElem);
  12927. // CVTPS2PD dst/reg.f64, reg32.f64
  12928. reg = dstType == TyFloat64 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyFloat64, this->m_func);
  12929. autoReuseReg.Initialize(reg, m_func);
  12930. InsertConvertFloat32ToFloat64(reg, reg32, ldElem);
  12931. }
  12932. else
  12933. {
  12934. Assert(indirOpnd->IsFloat64());
  12935. // MOVSD dst/reg.f64, indirOpnd.f64
  12936. reg = dstType == TyFloat64 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyFloat64, this->m_func);
  12937. autoReuseReg.Initialize(reg, m_func);
  12938. InsertMove(reg, indirOpnd, ldElem);
  12939. }
  12940. if (dstType != TyFloat64)
  12941. {
  12942. // Convert reg.f64 to var
  12943. m_lowererMD.SaveDoubleToVar(dst->AsRegOpnd(), reg, ldElem, ldElem);
  12944. }
  12945. #if FLOATVAR
  12946. // For NaNs, go to the helper to guarantee we don't have an illegal NaN
  12947. // UCOMISD reg, reg
  12948. {
  12949. IR::Instr *const instr = IR::Instr::New(Js::OpCode::UCOMISD, this->m_func);
  12950. instr->SetSrc1(reg);
  12951. instr->SetSrc2(reg);
  12952. ldElem->InsertBefore(instr);
  12953. }
  12954. // JP $helper
  12955. {
  12956. IR::Instr *const instr = IR::BranchInstr::New(Js::OpCode::JP, labelHelper, this->m_func);
  12957. ldElem->InsertBefore(instr);
  12958. }
  12959. #endif
  12960. if(dstType == TyFloat64)
  12961. {
  12962. emitBailout = true;
  12963. }
  12964. }
  12965. else
  12966. {
  12967. AssertMsg((dstType == TyInt32) || (dstType == TyVar), "For Int/UintArray LdElemI's dst should be specialized to TyInt32 or not at all.");
  12968. reg = dstType == TyInt32 ? dst->AsRegOpnd() : IR::RegOpnd::New(TyInt32, this->m_func);
  12969. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  12970. // Int32 and Uint32 arrays could overflow an int31, but the others can't
  12971. if (indirOpnd->GetType() != TyUint32
  12972. #if !INT32VAR
  12973. && indirOpnd->GetType() != TyInt32
  12974. #endif
  12975. )
  12976. {
  12977. reg->SetValueType(ValueType::GetTaggedInt()); // Fits as a tagged-int
  12978. }
  12979. // MOV/MOVZX/MOVSX dst/reg.int32, IndirOpnd.type
  12980. IR::Instr *const instr = InsertMove(reg, indirOpnd, ldElem);
  12981. if (dstType == TyInt32)
  12982. {
  12983. instr->dstIsTempNumber = ldElem->dstIsTempNumber;
  12984. instr->dstIsTempNumberTransferred = ldElem->dstIsTempNumberTransferred;
  12985. if (indirOpnd->GetType() == TyUint32)
  12986. {
  12987. // TEST dst, dst
  12988. // JSB $helper (bailout)
  12989. InsertCompareBranch(
  12990. reg,
  12991. IR::IntConstOpnd::New(0, TyUint32, this->m_func, /* dontEncode = */ true),
  12992. Js::OpCode::BrLt_A,
  12993. labelHelper,
  12994. ldElem);
  12995. }
  12996. emitBailout = true;
  12997. }
  12998. else
  12999. {
  13000. // MOV dst, reg
  13001. IR::Instr *const instr = IR::Instr::New(Js::OpCode::ToVar, dst, reg, this->m_func);
  13002. instr->dstIsTempNumber = ldElem->dstIsTempNumber;
  13003. instr->dstIsTempNumberTransferred = ldElem->dstIsTempNumberTransferred;
  13004. ldElem->InsertBefore(instr);
  13005. // Convert dst to var
  13006. m_lowererMD.EmitLoadVar(instr, /* isFromUint32 = */ (indirOpnd->GetType() == TyUint32));
  13007. }
  13008. }
  13009. // JMP $fallthrough
  13010. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  13011. emittedFastPath = true;
  13012. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  13013. {
  13014. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  13015. baseValueType.ToString(baseValueTypeStr);
  13016. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  13017. Output::Print(L"Typed Array Lowering: function: %s (%s), instr: %s, base value type: %S, %s.",
  13018. this->m_func->GetJnFunction()->GetDisplayName(),
  13019. this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  13020. Js::OpCodeUtil::GetOpCodeName(ldElem->m_opcode),
  13021. baseValueTypeStr,
  13022. (!dst->IsVar() ? L"specialized" : L"not specialized"));
  13023. Output::Print(L"\n");
  13024. Output::Flush();
  13025. }
  13026. }
  13027. else
  13028. {
  13029. // MOV dst, indirOpnd
  13030. InsertMove(dst, indirOpnd, ldElem);
  13031. // The string index fast path does not operate on index properties (we don't get a PropertyString in that case), so
  13032. // we don't need to do any further checks in that case
  13033. // For LdMethodElem, if the loaded value is a tagged number, the error message generated by the helper call is
  13034. // better than if we were to just try to call the number. Also, the call arguments need to be evaluated before
  13035. // throwing the error, so just test whether it's an object and jump to helper if it's not.
  13036. const bool needObjectTest = !isStringIndex && !isNativeArrayLoad && ldElem->m_opcode == Js::OpCode::LdMethodElem;
  13037. needMissingValueCheck =
  13038. !isStringIndex && !(baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues());
  13039. if(needMissingValueCheck)
  13040. {
  13041. // TEST dst, dst
  13042. // JEQ $helper | JNE $fallthrough
  13043. InsertCompareBranch(
  13044. dst,
  13045. GetMissingItemOpnd(dst->GetType(), m_func),
  13046. needObjectTest ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A,
  13047. needObjectTest ? labelHelper : labelFallThru,
  13048. ldElem,
  13049. true);
  13050. if (isNativeArrayLoad)
  13051. {
  13052. Assert(!needObjectTest);
  13053. Assert(labelHelper != labelBailOut);
  13054. if(ldElem->AsProfiledInstr()->u.ldElemInfo->GetElementType().HasBeenUndefined())
  13055. {
  13056. // We're going to bail out trying to load "missing value" into a type-spec'd opnd.
  13057. // Branch to a point where we'll convert the array so that we don't keep bailing here.
  13058. // (Gappy arrays are not well-suited to nativeness.)
  13059. labelMissingNative = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  13060. InsertBranch(Js::OpCode::Br, labelMissingNative, ldElem);
  13061. }
  13062. else
  13063. {
  13064. // If the value has not been profiled to be undefined at some point, jump directly to bail out
  13065. InsertBranch(Js::OpCode::Br, labelBailOut, ldElem);
  13066. }
  13067. }
  13068. }
  13069. if(needObjectTest)
  13070. {
  13071. // GenerateObjectTest(dst)
  13072. // JIsObject $fallthrough
  13073. m_lowererMD.GenerateObjectTest(dst, ldElem, labelFallThru, true);
  13074. }
  13075. else if(!needMissingValueCheck)
  13076. {
  13077. // JMP $fallthrough
  13078. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  13079. }
  13080. emittedFastPath = true;
  13081. }
  13082. }
  13083. // $helper:
  13084. // bailout or caller generated helper call
  13085. // $fallthru:
  13086. if (!emittedFastPath)
  13087. {
  13088. labelHelper->isOpHelper = false;
  13089. }
  13090. ldElem->InsertBefore(labelHelper);
  13091. instrIsInHelperBlock = true;
  13092. if (isNativeArrayLoad)
  13093. {
  13094. Assert(ldElem->HasBailOutInfo());
  13095. Assert(labelHelper != labelBailOut);
  13096. // Transform the original instr:
  13097. //
  13098. // $helper:
  13099. // dst = LdElemI_A src (BailOut)
  13100. // $fallthrough:
  13101. //
  13102. // to:
  13103. //
  13104. // b $fallthru <--- we get here if we loaded a valid element directly
  13105. // $helper:
  13106. // dst = LdElemI_A src
  13107. // cmp dst, MissingItem
  13108. // bne $fallthrough
  13109. // $bailout:
  13110. // BailOut
  13111. // $fallthrough:
  13112. LowerOneBailOutKind(ldElem, IR::BailOutConventionalNativeArrayAccessOnly, instrIsInHelperBlock);
  13113. IR::Instr *const insertBeforeInstr = ldElem->m_next;
  13114. // Do missing value check on value returned from helper so that we don't have to check the index against
  13115. // array length. (We already checked it above against the segment length.)
  13116. bool hasBeenUndefined = ldElem->AsProfiledInstr()->u.ldElemInfo->GetElementType().HasBeenUndefined();
  13117. if (hasBeenUndefined)
  13118. {
  13119. if(!emitBailout)
  13120. {
  13121. if (labelMissingNative == nullptr)
  13122. {
  13123. labelMissingNative = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  13124. }
  13125. InsertCompareBranch(GetMissingItemOpnd(ldElem->GetDst()->GetType(), m_func), ldElem->GetDst(), Js::OpCode::BrEq_A, labelMissingNative, insertBeforeInstr, true);
  13126. }
  13127. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  13128. if(labelMissingNative)
  13129. {
  13130. // We're going to bail out on a load from a gap, but convert the array to Var first, so we don't just
  13131. // bail here over and over. Gappy arrays are not well suited to nativeness.
  13132. // NOTE: only emit this call if the profile tells us that this has happened before ("hasBeenUndefined").
  13133. // Emitting this in Navier-Stokes brutalizes the score.
  13134. insertBeforeInstr->InsertBefore(labelMissingNative);
  13135. IR::JnHelperMethod helperMethod;
  13136. indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  13137. if (indirOpnd->GetBaseOpnd()->GetValueType().HasIntElements())
  13138. {
  13139. helperMethod = IR::HelperIntArr_ToVarArray;
  13140. }
  13141. else
  13142. {
  13143. Assert(indirOpnd->GetBaseOpnd()->GetValueType().HasFloatElements());
  13144. helperMethod = IR::HelperFloatArr_ToVarArray;
  13145. }
  13146. m_lowererMD.LoadHelperArgument(insertBeforeInstr, indirOpnd->GetBaseOpnd());
  13147. IR::Instr *instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
  13148. instrHelper->SetSrc1(IR::HelperCallOpnd::New(helperMethod, m_func));
  13149. insertBeforeInstr->InsertBefore(instrHelper);
  13150. m_lowererMD.LowerCall(instrHelper, 0);
  13151. }
  13152. }
  13153. else
  13154. {
  13155. if(!emitBailout)
  13156. {
  13157. InsertCompareBranch(GetMissingItemOpnd(ldElem->GetDst()->GetType(), m_func), ldElem->GetDst(), Js::OpCode::BrEq_A, labelBailOut, insertBeforeInstr, true);
  13158. }
  13159. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  13160. }
  13161. insertBeforeInstr->InsertBefore(labelBailOut);
  13162. }
  13163. if (emitBailout)
  13164. {
  13165. ldElem->UnlinkSrc1();
  13166. ldElem->UnlinkDst();
  13167. GenerateBailOut(ldElem, nullptr, nullptr);
  13168. }
  13169. return !emitBailout;
  13170. }
  13171. IR::Opnd *
  13172. Lowerer::GetMissingItemOpnd(IRType type, Func *func)
  13173. {
  13174. if (type == TyVar)
  13175. {
  13176. return IR::AddrOpnd::New(Js::JavascriptArray::MissingItem, IR::AddrOpndKindConstant, func, true);
  13177. }
  13178. if (type == TyInt32)
  13179. {
  13180. return IR::IntConstOpnd::New(Js::JavascriptNativeIntArray::MissingItem, TyInt32, func, true);
  13181. }
  13182. Assert(type == TyFloat64);
  13183. return IR::MemRefOpnd::New((BYTE*)&Js::JavascriptNativeFloatArray::MissingItem, TyFloat64, func);
  13184. }
  13185. bool
  13186. Lowerer::GenerateFastStElemI(IR::Instr *& stElem, bool *instrIsInHelperBlockRef)
  13187. {
  13188. Assert(instrIsInHelperBlockRef);
  13189. bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
  13190. instrIsInHelperBlock = false;
  13191. IR::LabelInstr * labelHelper;
  13192. IR::LabelInstr * labelSegmentLengthIncreased;
  13193. IR::LabelInstr * labelFallThru;
  13194. IR::LabelInstr * labelBailOut = nullptr;
  13195. IR::Opnd *dst = stElem->GetDst();
  13196. IR::IndirOpnd * indirOpnd = dst->AsIndirOpnd();
  13197. AssertMsg(dst->IsIndirOpnd(), "Expected indirOpnd on StElementI");
  13198. // From FastElemICommon:
  13199. // TEST base, AtomTag -- check base not tagged int
  13200. // JNE $helper
  13201. // MOV r1, [base + offset(type)] -- check base isArray
  13202. // CMP [r1 + offset(typeId)], TypeIds_Array
  13203. // JNE $helper
  13204. // TEST index, 1 -- index tagged int
  13205. // JEQ $helper
  13206. // MOV r2, index
  13207. // SAR r2, Js::VarTag_Shift -- remote atom tag
  13208. // JS $helper -- exclude negative index
  13209. // MOV r4, [base + offset(head)]
  13210. // CMP r2, [r4 + offset(length)] -- bounds check
  13211. // JB $done
  13212. // CMP r2, [r4 + offset(size)] -- chunk has room?
  13213. // JAE $helper
  13214. // LEA r5, [r2 + 1]
  13215. // MOV [r4 + offset(length)], r5 -- update length on chunk
  13216. // CMP r5, [base + offset(length)]
  13217. // JBE $done
  13218. // MOV [base + offset(length)], r5 -- update length on array
  13219. // $done
  13220. // LEA r3, [r4 + offset(elements)]
  13221. // Generated here.
  13222. // MOV [r3 + r2], src
  13223. labelFallThru = stElem->GetOrCreateContinueLabel();
  13224. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  13225. bool emitBailout = false;
  13226. bool isNativeArrayStore = indirOpnd->GetBaseOpnd()->GetValueType().IsLikelyNativeArray();
  13227. IR::LabelInstr * labelCantUseArray = labelHelper;
  13228. if (isNativeArrayStore)
  13229. {
  13230. if (stElem->GetSrc1()->GetType() != GetArrayIndirType(indirOpnd->GetBaseOpnd()->GetValueType()))
  13231. {
  13232. // Skip the fast path and just generate a helper call
  13233. return true;
  13234. }
  13235. if(stElem->HasBailOutInfo())
  13236. {
  13237. const IR::BailOutKind bailOutKind = stElem->GetBailOutKind();
  13238. if (bailOutKind & IR::BailOutConventionalNativeArrayAccessOnly)
  13239. {
  13240. labelBailOut = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  13241. labelCantUseArray = labelBailOut;
  13242. }
  13243. }
  13244. }
  13245. bool isTypedArrayElement, isStringIndex;
  13246. indirOpnd =
  13247. GenerateFastElemICommon(
  13248. stElem,
  13249. true,
  13250. indirOpnd,
  13251. labelHelper,
  13252. labelCantUseArray,
  13253. labelFallThru,
  13254. &isTypedArrayElement,
  13255. &isStringIndex,
  13256. &emitBailout,
  13257. &labelSegmentLengthIncreased);
  13258. IR::Opnd *src = stElem->GetSrc1();
  13259. const IR::AutoReuseOpnd autoReuseSrc(src, m_func);
  13260. // The index is negative or not int.
  13261. if (indirOpnd == nullptr)
  13262. {
  13263. Assert(!(stElem->HasBailOutInfo() && stElem->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall));
  13264. // The global optimizer should never type specialize a StElem for which we know the index is not int or is a negative
  13265. // int constant. This would result in an unconditional bailout on the main code path.
  13266. if (src->IsVar())
  13267. {
  13268. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  13269. {
  13270. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  13271. Output::Print(L"Typed Array Lowering: function: %s (%s): instr %s, not specialized by glob opt due to negative or not likely int index.\n",
  13272. this->m_func->GetJnFunction()->GetDisplayName(),
  13273. this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  13274. Js::OpCodeUtil::GetOpCodeName(stElem->m_opcode));
  13275. Output::Flush();
  13276. }
  13277. // We must be dealing with some atypical index value. Don't emit fast path, but go directly to helper.
  13278. return true;
  13279. }
  13280. else
  13281. {
  13282. // If global optimizer failed to notice the unconventional index and type specialized the src,
  13283. // there is nothing to do but bail out. We should never hit this code path, unless the global optimizer's conditions
  13284. // for not specializing the instruction don't match the lowerer's conditions for not emitting the array checks (see above).
  13285. // This could happen if global optimizer's information based on value tracking fails to recognize a non-integer index or
  13286. // a constant int index that is negative. The bailout below ensures that we behave correctly in retail builds even under
  13287. // these (unlikely) conditions.
  13288. AssertMsg(false, "Global optimizer shouldn't have specialized this instruction.");
  13289. stElem->UnlinkSrc1();
  13290. stElem->UnlinkDst();
  13291. GenerateBailOut(stElem, nullptr, nullptr);
  13292. return false;
  13293. }
  13294. }
  13295. const IR::AutoReuseOpnd autoReuseIndirOpnd(indirOpnd, m_func);
  13296. const ValueType baseValueType(dst->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
  13297. if (isTypedArrayElement)
  13298. {
  13299. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->m_func) && PHASE_TRACE(Js::LowererPhase, this->m_func))
  13300. {
  13301. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  13302. baseValueType.ToString(baseValueTypeStr);
  13303. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  13304. Output::Print(L"Typed Array Lowering: function: %s (%s), instr: %s, base value type: %S, %s.",
  13305. this->m_func->GetJnFunction()->GetDisplayName(),
  13306. this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  13307. Js::OpCodeUtil::GetOpCodeName(stElem->m_opcode),
  13308. baseValueTypeStr,
  13309. (!src->IsVar() ? L"specialized" : L"not specialized"));
  13310. Output::Print(L"\n");
  13311. Output::Flush();
  13312. }
  13313. ObjectType objectType = baseValueType.GetObjectType();
  13314. if(indirOpnd->IsFloat())
  13315. {
  13316. if (src->GetType() == TyFloat64)
  13317. {
  13318. IR::RegOpnd *const regSrc = src->AsRegOpnd();
  13319. if (indirOpnd->IsFloat32())
  13320. {
  13321. // CVTSD2SS reg.f32, regSrc.f64 -- Convert regSrc from f64 to f32
  13322. IR::RegOpnd *const reg = IR::RegOpnd::New(TyFloat32, this->m_func);
  13323. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  13324. InsertConvertFloat64ToFloat32(reg, regSrc, stElem);
  13325. // MOVSS indirOpnd, reg
  13326. InsertMove(indirOpnd, reg, stElem);
  13327. }
  13328. else
  13329. {
  13330. // MOVSD indirOpnd, regSrc
  13331. InsertMove(indirOpnd, regSrc, stElem);
  13332. }
  13333. emitBailout = true;
  13334. }
  13335. else
  13336. {
  13337. Assert(src->GetType() == TyVar);
  13338. // MOV reg, src
  13339. IR::RegOpnd *const reg = IR::RegOpnd::New(TyVar, this->m_func);
  13340. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  13341. InsertMove(reg, src, stElem);
  13342. // Convert to float, and assign to indirOpnd
  13343. if (baseValueType.IsLikelyOptimizedVirtualTypedArray())
  13344. {
  13345. IR::RegOpnd* dstReg = IR::RegOpnd::New(indirOpnd->GetType(), this->m_func);
  13346. m_lowererMD.EmitLoadFloat(dstReg, reg, stElem);
  13347. InsertMove(indirOpnd, dstReg, stElem);
  13348. }
  13349. else
  13350. {
  13351. m_lowererMD.EmitLoadFloat(indirOpnd, reg, stElem);
  13352. }
  13353. }
  13354. }
  13355. else if (objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedVirtualArray || objectType == ObjectType::Uint8ClampedMixedArray)
  13356. {
  13357. Assert(indirOpnd->GetType() == TyUint8);
  13358. IR::RegOpnd *regSrc;
  13359. IR::AutoReuseOpnd autoReuseRegSrc;
  13360. if(src->IsRegOpnd())
  13361. {
  13362. regSrc = src->AsRegOpnd();
  13363. }
  13364. else
  13365. {
  13366. regSrc = IR::RegOpnd::New(StackSym::New(src->GetType(), m_func), src->GetType(), m_func);
  13367. autoReuseRegSrc.Initialize(regSrc, m_func);
  13368. InsertMove(regSrc, src, stElem);
  13369. }
  13370. IR::Opnd *bitMaskOpnd;
  13371. IRType srcType = regSrc->GetType();
  13372. if ((srcType == TyFloat64) || (srcType == TyInt32))
  13373. {
  13374. // if (srcType == TyInt32) {
  13375. // TEST regSrc, ~255
  13376. // JE $storeValue
  13377. // JSB $handleNegative
  13378. // MOV indirOpnd, 255
  13379. // JMP $fallThru
  13380. // $handleNegative [isHelper = false]
  13381. // MOV indirOpnd, 0
  13382. // JMP $fallThru
  13383. // $storeValue
  13384. // MOV indirOpnd, regSrc
  13385. // }
  13386. // else {
  13387. // MOVSD regTmp, regSrc
  13388. // ADDSD regTmp, 0.5
  13389. // CVTTSD2SI regOpnd, regTmp
  13390. // TEST regOpnd, ~255
  13391. // JE $storeValue
  13392. // $handleOutOfBounds [isHelper = true]
  13393. // COMISD regSrc, [&FloatZero]
  13394. // JB $handleNegative
  13395. // MOV regOpnd, 255
  13396. // JMP $storeValue
  13397. // $handleNegative [isHelper = true]
  13398. // MOV regOpnd, 0
  13399. // $storeValue
  13400. // MOV indirOpnd, regOpnd
  13401. // }
  13402. // $fallThru
  13403. IR::RegOpnd *regOpnd;
  13404. IR::AutoReuseOpnd autoReuseRegOpnd;
  13405. if (srcType == TyInt32)
  13406. {
  13407. // When srcType == TyInt32 we will never call the helper and we will never
  13408. // modify the regOpnd. Therefore, it's okay to use regSrc directly, and it
  13409. // reduces register pressure.
  13410. regOpnd = regSrc;
  13411. }
  13412. else
  13413. {
  13414. #ifdef _M_IX86
  13415. AssertMsg(AutoSystemInfo::Data.SSE2Available(), "GlobOpt shouldn't have specialized Uint8ClampedArray StElem to float64 if SSE2 is unavailable.");
  13416. #endif
  13417. regOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  13418. autoReuseRegOpnd.Initialize(regOpnd, m_func);
  13419. Assert(objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedVirtualArray || objectType == ObjectType::Uint8ClampedMixedArray);
  13420. // Uint8ClampedArray follows IEEE 754 rounding rules for ties which round up
  13421. // odd integers and round down even integers. Both ties result in the nearest
  13422. // even integer value.
  13423. //
  13424. // CVTSD2SI regOpnd, regSrc
  13425. LowererMD::InsertConvertFloat64ToInt32(RoundModeHalfToEven, regOpnd, regSrc, stElem);
  13426. }
  13427. IR::LabelInstr *labelStoreValue = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, false);
  13428. #ifndef _M_ARM
  13429. // TEST regOpnd, ~255
  13430. // JE $storeValue
  13431. bitMaskOpnd = IR::IntConstOpnd::New(~255, TyInt32, this->m_func, true);
  13432. InsertTestBranch(regOpnd, bitMaskOpnd, Js::OpCode::BrEq_A, labelStoreValue, stElem);
  13433. #else // ARM
  13434. // Special case for ARM, a shift may be better
  13435. //
  13436. // ASRS tempReg, src, 8
  13437. // BEQ $inlineSet
  13438. InsertShiftBranch(
  13439. Js::OpCode::Shr_A,
  13440. IR::RegOpnd::New(TyInt32, this->m_func),
  13441. regOpnd,
  13442. IR::IntConstOpnd::New(8, TyInt8, this->m_func),
  13443. Js::OpCode::BrEq_A,
  13444. labelStoreValue,
  13445. stElem);
  13446. #endif
  13447. IR::LabelInstr *labelHandleNegative = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, srcType == TyFloat64);
  13448. if (srcType == TyInt32)
  13449. {
  13450. // JSB $handleNegativeOrOverflow
  13451. InsertBranch(
  13452. LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A),
  13453. labelHandleNegative,
  13454. stElem);
  13455. // MOV IndirOpnd.u8, 255
  13456. InsertMove(indirOpnd, IR::IntConstOpnd::New(255, TyUint8, this->m_func, true), stElem);
  13457. // JMP $fallThru
  13458. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  13459. // $handleNegative [isHelper = false]
  13460. stElem->InsertBefore(labelHandleNegative);
  13461. // MOV IndirOpnd.u8, 0
  13462. InsertMove(indirOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func, true), stElem);
  13463. // JMP $fallThru
  13464. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  13465. }
  13466. else
  13467. {
  13468. Assert(regOpnd != regSrc);
  13469. // This label is just to ensure the following code is moved to the helper block.
  13470. // $handleOutOfBounds [isHelper = true]
  13471. IR::LabelInstr *labelHandleOutOfBounds = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  13472. stElem->InsertBefore(labelHandleOutOfBounds);
  13473. // COMISD regSrc, FloatZero
  13474. // JB labelHandleNegative
  13475. IR::MemRefOpnd * zeroOpnd = IR::MemRefOpnd::New((double*)&(Js::JavascriptNumber::k_Zero), TyMachDouble, this->m_func);
  13476. InsertCompareBranch(regSrc, zeroOpnd, Js::OpCode::BrNotGe_A, labelHandleNegative, stElem);
  13477. // MOV regOpnd, 255
  13478. InsertMove(regOpnd, IR::IntConstOpnd::New(255, TyUint8, this->m_func, true), stElem);
  13479. // JMP $storeValue
  13480. InsertBranch(Js::OpCode::Br, labelStoreValue, stElem);
  13481. // $handleNegative [isHelper = true]
  13482. stElem->InsertBefore(labelHandleNegative);
  13483. // MOV regOpnd, 0
  13484. InsertMove(regOpnd, IR::IntConstOpnd::New(0, TyUint8, this->m_func, true), stElem);
  13485. }
  13486. // $storeValue
  13487. stElem->InsertBefore(labelStoreValue);
  13488. // MOV IndirOpnd.u8, regOpnd.u8
  13489. InsertMove(indirOpnd, regOpnd, stElem);
  13490. emitBailout = true;
  13491. }
  13492. else
  13493. {
  13494. Assert(srcType == TyVar);
  13495. #if INT32VAR
  13496. bitMaskOpnd = IR::AddrOpnd::New((Js::Var)~(INT_PTR)(Js::TaggedInt::ToVarUnchecked(255)), IR::AddrOpndKindConstantVar, this->m_func, true);
  13497. #else
  13498. bitMaskOpnd = IR::IntConstOpnd::New(~(INT_PTR)(Js::TaggedInt::ToVarUnchecked(255)), TyMachReg, this->m_func, true);
  13499. #endif
  13500. // Note: We are assuming that if no bits other than ~(TaggedInt(255)) are 1, that we have a tagged
  13501. // int value between 0 - 255.
  13502. // #if INT32VAR
  13503. // This works for pointers because tagged int bit can't be on, and first 64k are not valid addresses
  13504. // This works for floats because a valid float would have one of the upper 13 bits on.
  13505. // #else
  13506. // Any pointer is larger than 512 because first 64k memory is reserved by the OS
  13507. // #endif
  13508. IR::LabelInstr *labelInlineSet = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  13509. #ifndef _M_ARM
  13510. // TEST src, ~(TaggedInt(255)) -- Check for tagged int >= 255 and <= 0
  13511. // JEQ $inlineSet
  13512. InsertTestBranch(regSrc, bitMaskOpnd, Js::OpCode::BrEq_A, labelInlineSet, stElem);
  13513. #else // ARM
  13514. // Special case for ARM, a shift may be better
  13515. //
  13516. // ASRS tempReg, src, 8
  13517. // BEQ $inlineSet
  13518. InsertShiftBranch(
  13519. Js::OpCode::Shr_A,
  13520. IR::RegOpnd::New(TyInt32, this->m_func),
  13521. regSrc,
  13522. IR::IntConstOpnd::New(8, TyInt8, this->m_func),
  13523. Js::OpCode::BrEq_A,
  13524. labelInlineSet,
  13525. stElem);
  13526. #endif
  13527. // Uint8ClampedArray::DirectSetItem(array, index, value);
  13528. m_lowererMD.LoadHelperArgument(stElem, regSrc);
  13529. IR::Opnd *indexOpnd = indirOpnd->GetIndexOpnd();
  13530. if (indexOpnd == nullptr)
  13531. {
  13532. indexOpnd = IR::IntConstOpnd::New(indirOpnd->GetOffset(), TyInt32, this->m_func);
  13533. }
  13534. else
  13535. {
  13536. Assert(indirOpnd->GetOffset() == 0);
  13537. }
  13538. m_lowererMD.LoadHelperArgument(stElem, indexOpnd);
  13539. m_lowererMD.LoadHelperArgument(stElem, stElem->GetDst()->AsIndirOpnd()->GetBaseOpnd());
  13540. IR::Instr *instr = IR::Instr::New(Js::OpCode::Call, this->m_func);
  13541. Assert(objectType == ObjectType::Uint8ClampedArray || objectType == ObjectType::Uint8ClampedMixedArray || objectType == ObjectType::Uint8ClampedVirtualArray);
  13542. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperUint8ClampedArraySetItem, this->m_func));
  13543. stElem->InsertBefore(instr);
  13544. m_lowererMD.LowerCall(instr, 0);
  13545. // JMP $fallThrough
  13546. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  13547. //$inlineSet
  13548. stElem->InsertBefore(labelInlineSet);
  13549. IR::RegOpnd *regOpnd;
  13550. IR::AutoReuseOpnd autoReuseRegOpnd;
  13551. #if INT32VAR
  13552. regOpnd = regSrc;
  13553. #else
  13554. // MOV r1, src
  13555. // SAR r1, 1
  13556. regOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  13557. autoReuseRegOpnd.Initialize(regOpnd, m_func);
  13558. InsertShift(
  13559. Js::OpCode::Shr_A,
  13560. false /* needFlags */,
  13561. regOpnd,
  13562. regSrc,
  13563. IR::IntConstOpnd::New(1, TyInt8, this->m_func),
  13564. stElem);
  13565. #endif
  13566. // MOV IndirOpnd.u8, reg.u8
  13567. InsertMove(indirOpnd, regOpnd, stElem);
  13568. }
  13569. }
  13570. else
  13571. {
  13572. if (src->IsInt32())
  13573. {
  13574. // MOV indirOpnd, src
  13575. InsertMove(indirOpnd, src, stElem);
  13576. emitBailout = true;
  13577. }
  13578. else if (src->IsFloat64())
  13579. {
  13580. AssertMsg(indirOpnd->GetType() == TyUint32, "Only StElemI to Uint32Array could be specialized to float64.");
  13581. #ifdef _M_IX86
  13582. AssertMsg(AutoSystemInfo::Data.SSE2Available(), "GloOpt shouldn't have specialized Uint32Array StElemI to float64 if SSE2 is unavailable.");
  13583. #endif
  13584. IR::RegOpnd *const reg = IR::RegOpnd::New(TyInt32, this->m_func);
  13585. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  13586. m_lowererMD.EmitFloatToInt(reg, src, stElem);
  13587. // MOV indirOpnd, reg
  13588. InsertMove(indirOpnd, reg, stElem);
  13589. emitBailout = true;
  13590. }
  13591. else
  13592. {
  13593. Assert(src->IsVar());
  13594. if(src->IsAddrOpnd())
  13595. {
  13596. IR::AddrOpnd *const addrSrc = src->AsAddrOpnd();
  13597. Assert(addrSrc->IsVar());
  13598. Assert(Js::TaggedInt::Is(addrSrc->m_address));
  13599. // MOV indirOpnd, intValue
  13600. InsertMove(
  13601. indirOpnd,
  13602. IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(addrSrc->m_address), TyInt32, m_func),
  13603. stElem);
  13604. }
  13605. else
  13606. {
  13607. IR::RegOpnd *const regSrc = src->AsRegOpnd();
  13608. // FromVar reg, Src
  13609. IR::RegOpnd *const reg = IR::RegOpnd::New(TyInt32, this->m_func);
  13610. const IR::AutoReuseOpnd autoReuseReg(reg, m_func);
  13611. IR::Instr *const instr = IR::Instr::New(Js::OpCode::FromVar, reg, regSrc, stElem->m_func);
  13612. stElem->InsertBefore(instr);
  13613. // Convert reg to int32
  13614. // Note: ToUint32 is implemented as (uint32)ToInt32()
  13615. m_lowererMD.EmitLoadInt32(instr, true /*conversionFromObjectAllowed*/);
  13616. // MOV indirOpnd, reg
  13617. InsertMove(indirOpnd, reg, stElem);
  13618. }
  13619. }
  13620. }
  13621. }
  13622. else
  13623. {
  13624. if(labelSegmentLengthIncreased)
  13625. {
  13626. IR::Instr *const insertBeforeInstr = labelSegmentLengthIncreased->m_next;
  13627. // labelSegmentLengthIncreased:
  13628. // mov [segment + index], src
  13629. // jmp $fallThru
  13630. InsertMove(indirOpnd, src, insertBeforeInstr);
  13631. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  13632. }
  13633. if (!(isStringIndex || baseValueType.IsArrayOrObjectWithArray() && baseValueType.HasNoMissingValues()))
  13634. {
  13635. if(!stElem->IsProfiledInstr() || stElem->AsProfiledInstr()->u.stElemInfo->LikelyFillsMissingValue())
  13636. {
  13637. // Check whether the store is filling a missing value. If so, fall back to the helper so that it can check whether
  13638. // this store is filling the last missing value in the array. This is necessary to keep the missing value tracking
  13639. // in arrays precise. The check is omitted when profile data says that the store is likely to create missing values.
  13640. //
  13641. // cmp [segment + index], Js::SparseArraySegment::MissingValue
  13642. // je $helper
  13643. InsertCompareBranch(
  13644. indirOpnd,
  13645. GetMissingItemOpnd(src->GetType(), m_func),
  13646. Js::OpCode::BrEq_A,
  13647. labelHelper,
  13648. stElem,
  13649. true);
  13650. }
  13651. else
  13652. {
  13653. GenerateIsEnabledArraySetElementFastPathCheck(labelHelper, stElem);
  13654. }
  13655. }
  13656. // MOV [r3 + r2], src
  13657. InsertMoveWithBarrier(indirOpnd, src, stElem);
  13658. }
  13659. // JMP $fallThru
  13660. InsertBranch(Js::OpCode::Br, labelFallThru, stElem);
  13661. // $helper:
  13662. // bailout or caller generated helper call
  13663. // $fallThru:
  13664. stElem->InsertBefore(labelHelper);
  13665. instrIsInHelperBlock = true;
  13666. if (isNativeArrayStore && !isStringIndex)
  13667. {
  13668. Assert(stElem->HasBailOutInfo());
  13669. Assert(labelHelper != labelBailOut);
  13670. // Transform the original instr:
  13671. //
  13672. // $helper:
  13673. // dst = LdElemI_A src (BailOut)
  13674. // $fallthrough:
  13675. //
  13676. // to:
  13677. //
  13678. // $helper:
  13679. // dst = LdElemI_A src
  13680. // b $fallthrough
  13681. // $bailout:
  13682. // BailOut
  13683. // $fallthrough:
  13684. LowerOneBailOutKind(stElem, IR::BailOutConventionalNativeArrayAccessOnly, instrIsInHelperBlock);
  13685. IR::Instr *const insertBeforeInstr = stElem->m_next;
  13686. InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr);
  13687. insertBeforeInstr->InsertBefore(labelBailOut);
  13688. }
  13689. if (emitBailout)
  13690. {
  13691. stElem->UnlinkSrc1();
  13692. stElem->UnlinkDst();
  13693. GenerateBailOut(stElem, nullptr, nullptr);
  13694. }
  13695. return !emitBailout;
  13696. }
  13697. bool
  13698. Lowerer::GenerateFastLdLen(IR::Instr *ldLen, bool *instrIsInHelperBlockRef)
  13699. {
  13700. Assert(instrIsInHelperBlockRef);
  13701. bool &instrIsInHelperBlock = *instrIsInHelperBlockRef;
  13702. instrIsInHelperBlock = false;
  13703. // TEST src, AtomTag -- check src not tagged int
  13704. // JNE $helper
  13705. // CMP [src], JavascriptArray::`vtable' -- check base isArray
  13706. // JNE $string
  13707. // MOV length, [src + offset(length)] -- Load array length
  13708. // JMP $tovar
  13709. // $string:
  13710. // CMP [src + offset(type)], static_string_type -- check src isString
  13711. // JNE $helper
  13712. // MOV length, [src + offset(length)] -- Load string length
  13713. // $toVar:
  13714. // TEST length, 0xC0000000 -- test for overflow of SHL, or negative
  13715. // JNE $helper
  13716. // SHL length, Js::VarTag_Shift -- restore the var tag on the result
  13717. // INC length
  13718. // MOV dst, length
  13719. // JMP $fallthru
  13720. // $helper:
  13721. // CALL GetProperty(src, length_property_id, scriptContext)
  13722. // $fallthru:
  13723. IR::RegOpnd * opnd = ldLen->GetSrc1()->AsRegOpnd();
  13724. IR::RegOpnd * dst = ldLen->GetDst()->AsRegOpnd();
  13725. IR::RegOpnd * src = opnd->AsRegOpnd();
  13726. const ValueType srcValueType(src->GetValueType());
  13727. AssertMsg(src->IsRegOpnd(), "Expected regOpnd on LdLen");
  13728. IR::LabelInstr *const labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  13729. if (ldLen->DoStackArgsOpt(this->m_func))
  13730. {
  13731. GenerateFastArgumentsLdLen(ldLen, labelHelper, ldLen->GetOrCreateContinueLabel());
  13732. }
  13733. else
  13734. {
  13735. const bool arrayFastPath = ShouldGenerateArrayFastPath(src, false, true, false);
  13736. // HasBeenString instead of IsLikelyString because it could be a merge between StringObject and String, and this
  13737. // information about whether it's a StringObject or some other object is not available in the profile data
  13738. const bool stringFastPath = srcValueType.IsUninitialized() || srcValueType.HasBeenString();
  13739. if(!(arrayFastPath || stringFastPath))
  13740. {
  13741. return true;
  13742. }
  13743. const int32 arrayOffsetOfLength =
  13744. srcValueType.IsLikelyAnyOptimizedArray()
  13745. ? GetArrayOffsetOfLength(srcValueType)
  13746. : Js::JavascriptArray::GetOffsetOfLength();
  13747. IR::LabelInstr *labelString = nullptr;
  13748. IR::RegOpnd *arrayOpnd = src;
  13749. IR::RegOpnd *arrayLengthOpnd = nullptr;
  13750. IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
  13751. if(arrayFastPath)
  13752. {
  13753. if(!srcValueType.IsAnyOptimizedArray())
  13754. {
  13755. if(stringFastPath)
  13756. {
  13757. // If we don't have info about the src value type or its object type, the array and string fast paths are
  13758. // generated
  13759. labelString = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  13760. }
  13761. arrayOpnd = GenerateArrayTest(src, labelHelper, stringFastPath ? labelString : labelHelper, ldLen, false);
  13762. }
  13763. else if(src->IsArrayRegOpnd())
  13764. {
  13765. IR::ArrayRegOpnd *const arrayRegOpnd = src->AsArrayRegOpnd();
  13766. if(arrayRegOpnd->LengthSym())
  13767. {
  13768. arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), TyUint32, m_func);
  13769. DebugOnly(arrayLengthOpnd->FreezeSymValue());
  13770. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  13771. }
  13772. }
  13773. }
  13774. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
  13775. IR::RegOpnd *lengthOpnd = nullptr;
  13776. IR::AutoReuseOpnd autoReuseLengthOpnd;
  13777. const auto EnsureLengthOpnd = [&]()
  13778. {
  13779. if(lengthOpnd)
  13780. {
  13781. return;
  13782. }
  13783. lengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  13784. autoReuseLengthOpnd.Initialize(lengthOpnd, m_func);
  13785. };
  13786. if(arrayFastPath)
  13787. {
  13788. if(arrayLengthOpnd)
  13789. {
  13790. lengthOpnd = arrayLengthOpnd;
  13791. autoReuseLengthOpnd.Initialize(lengthOpnd, m_func);
  13792. Assert(!stringFastPath);
  13793. }
  13794. else
  13795. {
  13796. // MOV length, [array + offset(length)] -- Load array length
  13797. EnsureLengthOpnd();
  13798. IR::IndirOpnd *const indirOpnd = IR::IndirOpnd::New(arrayOpnd, arrayOffsetOfLength, TyUint32, this->m_func);
  13799. InsertMove(lengthOpnd, indirOpnd, ldLen);
  13800. }
  13801. }
  13802. if(stringFastPath)
  13803. {
  13804. IR::LabelInstr *labelToVar = nullptr;
  13805. if(arrayFastPath)
  13806. {
  13807. // JMP $tovar
  13808. labelToVar = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  13809. InsertBranch(Js::OpCode::Br, labelToVar, ldLen);
  13810. // $string:
  13811. ldLen->InsertBefore(labelString);
  13812. }
  13813. // CMP [src + offset(type)], static_stringtype -- check src isString
  13814. // JNE $helper
  13815. GenerateStringTest(src, ldLen, labelHelper, nullptr, !arrayFastPath);
  13816. // MOV length, [src + offset(length)] -- Load string length
  13817. EnsureLengthOpnd();
  13818. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(src, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func);
  13819. InsertMove(lengthOpnd, indirOpnd, ldLen);
  13820. if(arrayFastPath)
  13821. {
  13822. // $toVar:
  13823. ldLen->InsertBefore(labelToVar);
  13824. }
  13825. }
  13826. Assert(lengthOpnd);
  13827. if(ldLen->HasBailOutInfo() && (ldLen->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutOnIrregularLength)
  13828. {
  13829. Assert(ldLen->GetBailOutKind() == IR::BailOutOnIrregularLength);
  13830. Assert(dst->IsInt32());
  13831. // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative
  13832. // test length, length
  13833. // js $helper
  13834. // mov dst, length
  13835. // jmp $fallthrough
  13836. InsertCompareBranch(
  13837. lengthOpnd,
  13838. IR::IntConstOpnd::New(0, lengthOpnd->GetType(), m_func, true),
  13839. Js::OpCode::BrLt_A,
  13840. labelHelper,
  13841. ldLen);
  13842. InsertMove(dst, lengthOpnd, ldLen);
  13843. InsertBranch(Js::OpCode::Br, ldLen->GetOrCreateContinueLabel(), ldLen);
  13844. // $helper:
  13845. // (Bail out with IR::BailOutOnIrregularLength)
  13846. ldLen->InsertBefore(labelHelper);
  13847. instrIsInHelperBlock = true;
  13848. ldLen->FreeDst();
  13849. ldLen->FreeSrc1();
  13850. GenerateBailOut(ldLen);
  13851. return false;
  13852. }
  13853. #if INT32VAR
  13854. // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative
  13855. // test length, length
  13856. // js $helper
  13857. InsertCompareBranch(
  13858. lengthOpnd,
  13859. IR::IntConstOpnd::New(0, lengthOpnd->GetType(), m_func, true),
  13860. Js::OpCode::BrLt_A,
  13861. labelHelper,
  13862. ldLen);
  13863. #else
  13864. // Since the length is an unsigned int32, verify that when interpreted as a signed int32, it is not negative.
  13865. // Additionally, verify that the signed value's width is not greater than 31 bits, since it needs to be tagged.
  13866. // test length, 0xC0000000
  13867. // jne $helper
  13868. InsertTestBranch(
  13869. lengthOpnd,
  13870. IR::IntConstOpnd::New(0xC0000000, TyUint32, this->m_func, true),
  13871. Js::OpCode::BrNeq_A,
  13872. labelHelper,
  13873. ldLen);
  13874. #endif
  13875. #if INT32VAR
  13876. //
  13877. // dst_32 = MOV length
  13878. // dst_64 = OR dst_64, Js::AtomTag_IntPtr
  13879. //
  13880. Assert(dst->GetType() == TyVar);
  13881. IR::Opnd *dst32 = dst->Copy(this->m_func);
  13882. dst32->SetType(TyInt32);
  13883. // This will clear the top bits.
  13884. InsertMove(dst32, lengthOpnd, ldLen);
  13885. m_lowererMD.GenerateInt32ToVarConversion(dst, ldLen);
  13886. #else
  13887. // dst = SHL length, Js::VarTag_Shift -- restore the var tag on the result
  13888. InsertShift(
  13889. Js::OpCode::Shl_A,
  13890. false /* needFlags */,
  13891. dst,
  13892. lengthOpnd,
  13893. IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func),
  13894. ldLen);
  13895. // dst = ADD dst, AtomTag
  13896. InsertAdd(
  13897. false /* needFlags */,
  13898. dst,
  13899. dst,
  13900. IR::IntConstOpnd::New(Js::AtomTag_Int32, TyUint32, m_func, true),
  13901. ldLen);
  13902. #endif
  13903. // JMP $fallthrough
  13904. InsertBranch(Js::OpCode::Br, ldLen->GetOrCreateContinueLabel(), ldLen);
  13905. }
  13906. // $helper:
  13907. // (caller generates helper call)
  13908. ldLen->InsertBefore(labelHelper);
  13909. instrIsInHelperBlock = true;
  13910. return true; // fast path was generated, helper call will be in a helper block
  13911. }
  13912. void
  13913. Lowerer::GenerateFastInlineStringCodePointAt(IR::Instr* lastInstr, Func* func, IR::Opnd *strLength, IR::Opnd *srcIndex, IR::RegOpnd *lowerChar, IR::RegOpnd *strPtr)
  13914. {
  13915. //// Required State:
  13916. // strLength - UInt32
  13917. // srcIndex - TyVar if not Address
  13918. // lowerChar - TyMachReg
  13919. // strPtr - Addr
  13920. //// Instructions
  13921. // CMP [strLength], srcIndex + 1
  13922. // JBE charCodeAt
  13923. // CMP lowerChar 0xDC00
  13924. // JGE charCodeAt
  13925. // CMP lowerChar 0xD7FF
  13926. // JLE charCodeAt
  13927. // upperChar = MOVZX [strPtr + srcIndex + 1]
  13928. // CMP upperChar 0xE000
  13929. // JGE charCodeAt
  13930. // CMP lowerChar 0xDBFF
  13931. // JLE charCodeAt
  13932. // lowerChar = SUB lowerChar - 0xD800
  13933. // lowerChar = SHL lowerChar, 10
  13934. // lowerChar = ADD lowerChar + upperChar
  13935. // lowerChar = ADD lowerChar + 0x2400
  13936. // :charCodeAt
  13937. // :done
  13938. // Asserts
  13939. // Arm should change to Uint32 for the strLength
  13940. Assert(strLength->GetType() == TyUint32 || strLength->GetType() == TyMachReg);
  13941. Assert(srcIndex->GetType() == TyVar || srcIndex->IsAddrOpnd());
  13942. Assert(lowerChar->GetType() == TyMachReg || lowerChar->GetType() == TyUint32);
  13943. Assert(strPtr->IsRegOpnd());
  13944. IR::RegOpnd *tempReg = IR::RegOpnd::New(TyMachReg, func);
  13945. IR::LabelInstr *labelCharCodeAt = IR::LabelInstr::New(Js::OpCode::Label, func);
  13946. IR::IndirOpnd *tempIndirOpnd;
  13947. if (srcIndex->IsAddrOpnd())
  13948. {
  13949. uint32 length = Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address) + 1U;
  13950. InsertCompareBranch(strLength, IR::IntConstOpnd::New(length, TyUint32, func), Js::OpCode::BrLe_A, true, labelCharCodeAt, lastInstr);
  13951. tempIndirOpnd = IR::IndirOpnd::New(strPtr, (length) * sizeof(wchar_t), TyUint16, func);
  13952. }
  13953. else
  13954. {
  13955. InsertMove(tempReg, srcIndex, lastInstr);
  13956. #if INT32VAR
  13957. IR::Opnd * reg32Bit = tempReg->UseWithNewType(TyInt32, func);
  13958. InsertMove(tempReg, reg32Bit, lastInstr);
  13959. tempReg = reg32Bit->AsRegOpnd();
  13960. #else
  13961. InsertShift(Js::OpCode::Shr_A, false, tempReg, tempReg, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, func), lastInstr);
  13962. #endif
  13963. InsertAdd(false, tempReg, tempReg, IR::IntConstOpnd::New(1, TyInt32, func), lastInstr);
  13964. InsertCompareBranch(strLength, tempReg, Js::OpCode::BrLe_A, true, labelCharCodeAt, lastInstr);
  13965. if(tempReg->GetSize() != MachPtr)
  13966. {
  13967. tempReg = tempReg->UseWithNewType(TyMachPtr, func)->AsRegOpnd();
  13968. }
  13969. tempIndirOpnd = IR::IndirOpnd::New(strPtr, tempReg, 1, TyUint16, func);
  13970. }
  13971. // By this point, we have added instructions before labelCharCodeAt to check for extra length required for the surrogate pair
  13972. // The branching for that is already handled, all we have to do now is to check for correct values.
  13973. // Validate char is in range [D800, DBFF]; otherwise just get a charCodeAt
  13974. InsertCompareBranch(lowerChar, IR::IntConstOpnd::New(0xDC00, TyUint32, func), Js::OpCode::BrGe_A, labelCharCodeAt, lastInstr);
  13975. InsertCompareBranch(lowerChar, IR::IntConstOpnd::New(0xD7FF, TyUint32, func), Js::OpCode::BrLe_A, labelCharCodeAt, lastInstr);
  13976. // upperChar = MOVZX r3, [r1 + r3 * 2] -- this is the value of the upper surrogate pair char
  13977. IR::RegOpnd *upperChar = IR::RegOpnd::New(TyInt32, func);
  13978. InsertMove(upperChar, tempIndirOpnd, lastInstr);
  13979. // Validate upper is in range [DC00, DFFF]; otherwise just get a charCodeAt
  13980. InsertCompareBranch(upperChar, IR::IntConstOpnd::New(0xE000, TyUint32, func), Js::OpCode::BrGe_A, labelCharCodeAt, lastInstr);
  13981. InsertCompareBranch(upperChar, IR::IntConstOpnd::New(0xDBFF, TyUint32, func), Js::OpCode::BrLe_A, labelCharCodeAt, lastInstr);
  13982. // (lower - 0xD800) << 10 + second - 0xDC00 + 0x10000 -- 0x10000 - 0xDC00 = 0x2400
  13983. // lowerChar = SUB lowerChar - 0xD800
  13984. // lowerChar = SHL lowerChar, 10
  13985. // lowerChar = ADD lowerChar + upperChar
  13986. // lowerChar = ADD lowerChar + 0x2400
  13987. InsertSub(false, lowerChar, lowerChar, IR::IntConstOpnd::New(0xD800, TyUint32, func), lastInstr);
  13988. InsertShift(Js::OpCode::Shl_A, false, lowerChar, lowerChar, IR::IntConstOpnd::New(10, TyUint32, func), lastInstr);
  13989. InsertAdd(false, lowerChar, lowerChar, upperChar, lastInstr);
  13990. InsertAdd(false, lowerChar, lowerChar, IR::IntConstOpnd::New(0x2400, TyUint32, func), lastInstr);
  13991. lastInstr->InsertBefore(labelCharCodeAt);
  13992. }
  13993. bool
  13994. Lowerer::GenerateFastInlineStringFromCodePoint(IR::Instr* instr)
  13995. {
  13996. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  13997. // ArgOut sequence
  13998. // s8.var = StartCall 2 (0x2).i32 #000c
  13999. // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, String object
  14000. // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to FromCharCode
  14001. // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
  14002. // s0[LikelyTaggedInt].var = CallDirect String_FromCodePoint.u32, arg1(s11)<0>.u32 #001c
  14003. IR::Opnd * linkOpnd = instr->GetSrc2();
  14004. IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  14005. linkOpnd = tmpInstr->GetSrc2();
  14006. #if DBG
  14007. IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  14008. Assert(argCount == 2);
  14009. #endif
  14010. IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
  14011. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  14012. IR::Opnd *src1 = argInstr->GetSrc1();
  14013. if (src1->GetValueType().IsLikelyNumber())
  14014. {
  14015. //Trying to generate this code
  14016. // MOV resultOpnd, dst
  14017. // MOV fromCharCodeIntArgOpnd, src1
  14018. // SAR fromCharCodeIntArgOpnd, Js::VarTag_Shift
  14019. // JAE $Helper
  14020. // CMP fromCharCodeIntArgOpnd, Js::ScriptContext::CharStringCacheSize
  14021. //
  14022. // JAE $labelWCharStringCheck <
  14023. // MOV resultOpnd, GetCharStringCache[fromCharCodeIntArgOpnd]
  14024. // TST resultOpnd, resultOpnd //Check for null
  14025. // JEQ $helper
  14026. // JMP $Done
  14027. //
  14028. //$labelWCharStringCheck:
  14029. // resultOpnd = Call HelperGetStringForCharW
  14030. // JMP $Done
  14031. //$helper:
  14032. IR::RegOpnd * resultOpnd = nullptr;
  14033. if (!instr->GetDst()->IsRegOpnd() || instr->GetDst()->IsEqual(src1))
  14034. {
  14035. resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  14036. }
  14037. else
  14038. {
  14039. resultOpnd = instr->GetDst()->AsRegOpnd();
  14040. }
  14041. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14042. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14043. IR::RegOpnd * fromCodePointIntArgOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  14044. IR::AutoReuseOpnd autoReuseFromCodePointIntArgOpnd(fromCodePointIntArgOpnd, instr->m_func);
  14045. InsertMove(fromCodePointIntArgOpnd, src1, instr);
  14046. //Check for tagged int and get the untagged version.
  14047. fromCodePointIntArgOpnd = GenerateUntagVar(fromCodePointIntArgOpnd, labelHelper, instr);
  14048. GenerateGetSingleCharString(fromCodePointIntArgOpnd, resultOpnd, labelHelper, doneLabel, instr, true);
  14049. instr->InsertBefore(labelHelper);
  14050. instr->InsertAfter(doneLabel);
  14051. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  14052. }
  14053. return true;
  14054. }
  14055. bool
  14056. Lowerer::GenerateFastInlineStringFromCharCode(IR::Instr* instr)
  14057. {
  14058. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  14059. // ArgOut sequence
  14060. // s8.var = StartCall 2 (0x2).i32 #000c
  14061. // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, String object
  14062. // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to FromCharCode
  14063. // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
  14064. // s0[LikelyTaggedInt].var = CallDirect String_FromCharCode.u32, arg1(s11)<0>.u32 #001c
  14065. IR::Opnd * linkOpnd = instr->GetSrc2();
  14066. IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  14067. linkOpnd = tmpInstr->GetSrc2();
  14068. #if DBG
  14069. IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  14070. Assert(argCount == 2);
  14071. #endif
  14072. IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
  14073. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  14074. IR::Opnd *src1 = argInstr->GetSrc1();
  14075. if (src1->GetValueType().IsLikelyNumber())
  14076. {
  14077. //Trying to generate this code
  14078. // MOV resultOpnd, dst
  14079. // MOV fromCharCodeIntArgOpnd, src1
  14080. // SAR fromCharCodeIntArgOpnd, Js::VarTag_Shift
  14081. // JAE $Helper
  14082. // CMP fromCharCodeIntArgOpnd, Js::ScriptContext::CharStringCacheSize
  14083. //
  14084. // JAE $labelWCharStringCheck <
  14085. // MOV resultOpnd, GetCharStringCache[fromCharCodeIntArgOpnd]
  14086. // TST resultOpnd, resultOpnd //Check for null
  14087. // JEQ $helper
  14088. // JMP $Done
  14089. //
  14090. //$labelWCharStringCheck:
  14091. // resultOpnd = Call HelperGetStringForCharW
  14092. // JMP $Done
  14093. //$helper:
  14094. IR::RegOpnd * resultOpnd = nullptr;
  14095. if (!instr->GetDst()->IsRegOpnd() || instr->GetDst()->IsEqual(src1))
  14096. {
  14097. resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
  14098. }
  14099. else
  14100. {
  14101. resultOpnd = instr->GetDst()->AsRegOpnd();
  14102. }
  14103. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14104. IR::RegOpnd * fromCharCodeIntArgOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  14105. IR::AutoReuseOpnd autoReuseFromCharCodeIntArgOpnd(fromCharCodeIntArgOpnd, instr->m_func);
  14106. InsertMove(fromCharCodeIntArgOpnd, src1, instr);
  14107. //Check for tagged int and get the untagged version.
  14108. fromCharCodeIntArgOpnd = GenerateUntagVar(fromCharCodeIntArgOpnd, labelHelper, instr);
  14109. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14110. GenerateGetSingleCharString(fromCharCodeIntArgOpnd, resultOpnd, labelHelper, doneLabel, instr, false);
  14111. instr->InsertBefore(labelHelper);
  14112. instr->InsertAfter(doneLabel);
  14113. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  14114. }
  14115. return true;
  14116. }
  14117. void
  14118. Lowerer::GenerateGetSingleCharString(IR::RegOpnd * charCodeOpnd, IR::Opnd * resultOpnd, IR::LabelInstr * labelHelper, IR::LabelInstr * doneLabel, IR::Instr * instr, bool isCodePoint)
  14119. {
  14120. // MOV cacheReg, CharStringCache
  14121. // CMP charCodeOpnd, Js::ScriptContext::CharStringCacheSize
  14122. // JAE $labelWCharStringCheck <
  14123. // MOV resultOpnd, cacheReg[charCodeOpnd]
  14124. // TST resultOpnd, resultOpnd //Check for null
  14125. // JEQ $helper
  14126. // JMP $Done
  14127. //
  14128. //$labelWCharStringCheck:
  14129. // Arg1 = charCodeOpnd
  14130. // Arg0 = cacheReg
  14131. // resultOpnd = Call HelperGetStringForCharW/CodePoint
  14132. // JMP $Done
  14133. //$helper:
  14134. IR::LabelInstr *labelWCharStringCheck = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14135. //Try to load from in CharStringCacheA
  14136. IR::RegOpnd *cacheRegOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  14137. IR::AutoReuseOpnd autoReuseCacheRegOpnd(cacheRegOpnd, instr->m_func);
  14138. Assert(Js::JavascriptLibrary::GetCharStringCacheAOffset() == Js::JavascriptLibrary::GetCharStringCacheOffset());
  14139. InsertMove(cacheRegOpnd, this->LoadLibraryValueOpnd(instr, LibraryValue::ValueCharStringCache), instr);
  14140. InsertCompareBranch(charCodeOpnd, IR::IntConstOpnd::New(Js::CharStringCache::CharStringCacheSize, TyUint32, this->m_func), Js::OpCode::BrGe_A, true, labelWCharStringCheck, instr);
  14141. InsertMove(resultOpnd, IR::IndirOpnd::New(cacheRegOpnd, charCodeOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyVar, instr->m_func), instr);
  14142. InsertTestBranch(resultOpnd, resultOpnd, Js::OpCode::BrEq_A, labelHelper, instr);
  14143. InsertMove(instr->GetDst(), resultOpnd, instr);
  14144. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  14145. instr->InsertBefore(labelWCharStringCheck);
  14146. IR::JnHelperMethod helperMethod;
  14147. if (isCodePoint)
  14148. {
  14149. helperMethod = IR::HelperGetStringForCharCodePoint;
  14150. }
  14151. else
  14152. {
  14153. InsertMove(charCodeOpnd, charCodeOpnd->UseWithNewType(TyUint16, instr->m_func), instr);
  14154. helperMethod = IR::HelperGetStringForChar;
  14155. }
  14156. //Try to load from in CharStringCacheW or CharStringCacheCodePoint, this is a helper call.
  14157. this->m_lowererMD.LoadHelperArgument(instr, charCodeOpnd);
  14158. this->m_lowererMD.LoadHelperArgument(instr, cacheRegOpnd);
  14159. IR::Instr* helperCallInstr = IR::Instr::New(Js::OpCode::Call, resultOpnd, IR::HelperCallOpnd::New(helperMethod, this->m_func), this->m_func);
  14160. instr->InsertBefore(helperCallInstr);
  14161. this->m_lowererMD.LowerCall(helperCallInstr, 0);
  14162. InsertMove(instr->GetDst(), resultOpnd, instr);
  14163. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  14164. }
  14165. bool
  14166. Lowerer::GenerateFastInlineGlobalObjectParseInt(IR::Instr *instr)
  14167. {
  14168. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  14169. // ArgOut sequence
  14170. // s8.var = StartCall 2 (0x2).i32 #000c
  14171. // arg1(s9)<0>.var = ArgOut_A s2.var, s8.var #0014 //Implicit this, global object
  14172. // arg2(s10)<4>.var = ArgOut_A s3.var, arg1(s9)<0>.var #0018 //First argument to parseInt
  14173. // arg1(s11)<0>.u32 = ArgOut_A_InlineSpecialized 0x012C26C0 (DynamicObject).var, arg2(s10)<4>.var #
  14174. // s0[LikelyTaggedInt].var = CallDirect GlobalObject_ParseInt.u32, arg1(s11)<0>.u32 #001c
  14175. IR::Opnd * linkOpnd = instr->GetSrc2();
  14176. IR::Instr * tmpInstr = Inline::GetDefInstr(linkOpnd);// linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  14177. linkOpnd = tmpInstr->GetSrc2();
  14178. #if DBG
  14179. IntConstType argCount = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  14180. Assert(argCount == 2);
  14181. #endif
  14182. IR::Instr *argInstr = Inline::GetDefInstr(linkOpnd);
  14183. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  14184. IR::Opnd *parseIntArgOpnd = argInstr->GetSrc1();
  14185. if (parseIntArgOpnd->GetValueType().IsLikelyNumber())
  14186. {
  14187. //If likely int check for tagged int and set the dst
  14188. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14189. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14190. if (!parseIntArgOpnd->IsTaggedInt())
  14191. {
  14192. this->m_lowererMD.GenerateSmIntTest(parseIntArgOpnd, instr, labelHelper);
  14193. }
  14194. if (instr->GetDst())
  14195. {
  14196. this->m_lowererMD.CreateAssign(instr->GetDst(), parseIntArgOpnd, instr);
  14197. }
  14198. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  14199. instr->InsertBefore(labelHelper);
  14200. instr->InsertAfter(doneLabel);
  14201. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  14202. }
  14203. return true;
  14204. }
  14205. void
  14206. Lowerer::GenerateFastInlineArrayPop(IR::Instr * instr)
  14207. {
  14208. Assert(instr->m_opcode == Js::OpCode::InlineArrayPop);
  14209. IR::Opnd *arrayOpnd = instr->GetSrc1();
  14210. IR::LabelInstr *bailOutLabelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14211. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14212. bool isLikelyNativeArray = arrayOpnd->GetValueType().IsLikelyNativeArray();
  14213. if (ShouldGenerateArrayFastPath(arrayOpnd, false, false, false))
  14214. {
  14215. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14216. if(isLikelyNativeArray)
  14217. {
  14218. //We bailOut on cases like length == 0, Array Test failing cases (Runtime helper cannot handle these cases)
  14219. GenerateFastPop(arrayOpnd, instr, labelHelper, doneLabel, bailOutLabelHelper);
  14220. }
  14221. else
  14222. {
  14223. //We jump to helper on cases like length == 0, Array Test failing cases
  14224. GenerateFastPop(arrayOpnd, instr, labelHelper, doneLabel, labelHelper);
  14225. }
  14226. instr->InsertBefore(labelHelper);
  14227. ///JMP to $doneLabel
  14228. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  14229. }
  14230. else
  14231. {
  14232. //We assume here that the array will be a Var array. - Runtime Helper calls assume this.
  14233. Assert(!isLikelyNativeArray);
  14234. }
  14235. instr->InsertAfter(doneLabel);
  14236. if(isLikelyNativeArray)
  14237. {
  14238. //Lower IR::BailOutConventionalNativeArrayAccessOnly here.
  14239. LowerOneBailOutKind(instr, IR::BailOutConventionalNativeArrayAccessOnly, false, false);
  14240. instr->InsertAfter(bailOutLabelHelper);
  14241. }
  14242. GenerateHelperToArrayPopFastPath(instr, doneLabel, bailOutLabelHelper);
  14243. }
  14244. bool
  14245. Lowerer::ShouldGenerateStringReplaceFastPath(IR::Instr * callInstr, IntConstType argCount)
  14246. {
  14247. // a.replace(b,c)
  14248. // We want to emit the fast path if 'a' and 'c' are strings and 'b' is a regex
  14249. //
  14250. // argout sequence:
  14251. // arg1(s12)<0>.var = ArgOut_A s2.var, s11.var #0014 <---- a
  14252. // arg2(s13)<4>.var = ArgOut_A s3.var, arg1(s12)<0>.var #0018 <---- b
  14253. // arg3(s14)<8>.var = ArgOut_A s4.var, arg2(s13)<4>.var #001c <---- c
  14254. // s0[LikelyString].var = CallI s5[ffunc].var, arg3(s14)<8>.var #0020
  14255. IR::Opnd *linkOpnd = callInstr->GetSrc2();
  14256. Assert(argCount == 2);
  14257. while(linkOpnd->IsSymOpnd())
  14258. {
  14259. IR::SymOpnd *src2 = linkOpnd->AsSymOpnd();
  14260. StackSym *sym = src2->m_sym->AsStackSym();
  14261. Assert(sym->m_isSingleDef);
  14262. IR::Instr *argInstr = sym->m_instrDef;
  14263. Assert(argCount >= 0);
  14264. // check to see if 'a' and 'c' are likely strings
  14265. if((argCount == 2 || argCount == 0) && (!argInstr->GetSrc1()->GetValueType().IsLikelyString()))
  14266. {
  14267. return false;
  14268. }
  14269. // we want 'b' to be regex. Don't generate fastpath if it is a tagged int
  14270. if((argCount == 1) && (argInstr->GetSrc1()->IsTaggedInt()))
  14271. {
  14272. return false;
  14273. }
  14274. argCount--;
  14275. linkOpnd = argInstr->GetSrc2();
  14276. }
  14277. return true;
  14278. }
  14279. bool
  14280. Lowerer::GenerateFastReplace(IR::Opnd* strOpnd, IR::Opnd* src1, IR::Opnd* src2, IR::Instr *callInstr, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
  14281. {
  14282. // a.replace(b,c)
  14283. // We want to emit the fast path if 'a' and 'c' are strings and 'b' is a regex
  14284. //
  14285. // strOpnd --> a
  14286. // src1 --> b
  14287. // src2 --> c
  14288. IR::Opnd * callDst = callInstr->GetDst();
  14289. Assert(strOpnd->GetValueType().IsLikelyString() && src2->GetValueType().IsLikelyString());
  14290. if(!strOpnd->GetValueType().IsString())
  14291. {
  14292. if(!strOpnd->IsRegOpnd())
  14293. {
  14294. IR::RegOpnd *strOpndReg = IR::RegOpnd::New(TyVar, m_func);
  14295. LowererMD::CreateAssign(strOpndReg, strOpnd, insertInstr);
  14296. strOpnd = strOpndReg;
  14297. }
  14298. this->GenerateStringTest(strOpnd->AsRegOpnd(), insertInstr, labelHelper);
  14299. }
  14300. if(!src1->IsNotTaggedValue())
  14301. {
  14302. m_lowererMD.GenerateObjectTest(src1, insertInstr, labelHelper);
  14303. }
  14304. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(insertInstr, VTableValue::VtableJavascriptRegExp);
  14305. // cmp [regex], vtableAddress
  14306. // jne $labelHelper
  14307. if(!src1->IsRegOpnd())
  14308. {
  14309. IR::RegOpnd *src1Reg = IR::RegOpnd::New(TyVar, m_func);
  14310. LowererMD::CreateAssign(src1Reg, src1, insertInstr);
  14311. src1 = src1Reg;
  14312. }
  14313. InsertCompareBranch(
  14314. IR::IndirOpnd::New(src1->AsRegOpnd(), 0, TyMachPtr, insertInstr->m_func),
  14315. vtableOpnd,
  14316. Js::OpCode::BrNeq_A,
  14317. labelHelper,
  14318. insertInstr);
  14319. if(!src2->GetValueType().IsString())
  14320. {
  14321. if(!src2->IsRegOpnd())
  14322. {
  14323. IR::RegOpnd *src2Reg = IR::RegOpnd::New(TyVar, m_func);
  14324. LowererMD::CreateAssign(src2Reg, src2, insertInstr);
  14325. src2 = src2Reg;
  14326. }
  14327. this->GenerateStringTest(src2->AsRegOpnd(), insertInstr, labelHelper);
  14328. }
  14329. //scriptContext, pRegEx, pThis, pReplace (to be pushed in reverse order)
  14330. // pReplace, pThis, pRegEx
  14331. this->m_lowererMD.LoadHelperArgument(insertInstr, src2);
  14332. this->m_lowererMD.LoadHelperArgument(insertInstr, strOpnd);
  14333. this->m_lowererMD.LoadHelperArgument(insertInstr, src1);
  14334. // script context
  14335. LoadScriptContext(insertInstr);
  14336. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, insertInstr->m_func);
  14337. if(callDst)
  14338. {
  14339. helperCallInstr->SetDst(callDst);
  14340. }
  14341. insertInstr->InsertBefore(helperCallInstr);
  14342. if(callDst)
  14343. {
  14344. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperRegExp_ReplaceStringResultUsed);
  14345. }
  14346. else
  14347. {
  14348. m_lowererMD.ChangeToHelperCall(helperCallInstr, IR::JnHelperMethod::HelperRegExp_ReplaceStringResultNotUsed);
  14349. }
  14350. return true;
  14351. }
  14352. ///----
  14353. void
  14354. Lowerer::GenerateFastInlineStringSplitMatch(IR::Instr * instr)
  14355. {
  14356. // a.split(b,c (optional) )
  14357. // We want to emit the fast path when
  14358. // 1. c is not present, and
  14359. // 2. 'a' is a string and 'b' is a regex.
  14360. //
  14361. // a.match(b)
  14362. // We want to emit the fast path when 'a' is a string and 'b' is a regex.
  14363. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  14364. IR::Opnd * callDst = instr->GetDst();
  14365. //helperCallOpnd
  14366. IR::Opnd * src1 = instr->GetSrc1();
  14367. //ArgOut_A_InlineSpecialized
  14368. IR::Instr * tmpInstr = instr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  14369. IR::Opnd * argsOpnd[2];
  14370. if(!instr->FetchOperands(argsOpnd, 2))
  14371. {
  14372. return;
  14373. }
  14374. if(!argsOpnd[0]->GetValueType().IsLikelyString() || argsOpnd[1]->IsTaggedInt())
  14375. {
  14376. return;
  14377. }
  14378. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14379. if(!argsOpnd[0]->GetValueType().IsString())
  14380. {
  14381. if(!argsOpnd[0]->IsRegOpnd())
  14382. {
  14383. IR::RegOpnd *opndReg = IR::RegOpnd::New(TyVar, m_func);
  14384. LowererMD::CreateAssign(opndReg, argsOpnd[0], instr);
  14385. argsOpnd[0] = opndReg;
  14386. }
  14387. this->GenerateStringTest(argsOpnd[0]->AsRegOpnd(), instr, labelHelper);
  14388. }
  14389. if(!argsOpnd[1]->IsNotTaggedValue())
  14390. {
  14391. m_lowererMD.GenerateObjectTest(argsOpnd[1], instr, labelHelper);
  14392. }
  14393. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  14394. // cmp [regex], vtableAddress
  14395. // jne $labelHelper
  14396. if(!argsOpnd[1]->IsRegOpnd())
  14397. {
  14398. IR::RegOpnd *opndReg = IR::RegOpnd::New(TyVar, m_func);
  14399. LowererMD::CreateAssign(opndReg, argsOpnd[1], instr);
  14400. argsOpnd[1] = opndReg;
  14401. }
  14402. InsertCompareBranch(
  14403. IR::IndirOpnd::New(argsOpnd[1]->AsRegOpnd(), 0, TyMachPtr, instr->m_func),
  14404. vtableOpnd,
  14405. Js::OpCode::BrNeq_A,
  14406. labelHelper,
  14407. instr);
  14408. // [stackAllocationPointer, ]scriptcontext, regexp, input[, limit] (to be pushed in reverse order)
  14409. if(src1->AsHelperCallOpnd()->m_fnHelper == IR::JnHelperMethod::HelperString_Split)
  14410. {
  14411. //limit
  14412. //As we are optimizing only for two operands, make limit UINT_MAX
  14413. IR::Opnd* limit = IR::IntConstOpnd::New(UINT_MAX, TyUint32, instr->m_func);
  14414. this->m_lowererMD.LoadHelperArgument(instr, limit);
  14415. }
  14416. //input, regexp
  14417. this->m_lowererMD.LoadHelperArgument(instr, argsOpnd[0]);
  14418. this->m_lowererMD.LoadHelperArgument(instr, argsOpnd[1]);
  14419. // script context
  14420. LoadScriptContext(instr);
  14421. IR::JnHelperMethod helperMethod;
  14422. IR::AutoReuseOpnd autoReuseStackAllocationOpnd;
  14423. if(callDst && instr->dstIsTempObject)
  14424. {
  14425. switch(src1->AsHelperCallOpnd()->m_fnHelper)
  14426. {
  14427. case IR::JnHelperMethod::HelperString_Split:
  14428. helperMethod = IR::JnHelperMethod::HelperRegExp_SplitResultUsedAndMayBeTemp;
  14429. break;
  14430. case IR::JnHelperMethod::HelperString_Match:
  14431. helperMethod = IR::JnHelperMethod::HelperRegExp_MatchResultUsedAndMayBeTemp;
  14432. break;
  14433. default:
  14434. Assert(false);
  14435. __assume(false);
  14436. }
  14437. // Allocate some space on the stack for the result array
  14438. IR::RegOpnd *const stackAllocationOpnd = IR::RegOpnd::New(TyVar, m_func);
  14439. autoReuseStackAllocationOpnd.Initialize(stackAllocationOpnd, m_func);
  14440. stackAllocationOpnd->SetValueType(callDst->GetValueType());
  14441. GenerateMarkTempAlloc(stackAllocationOpnd, Js::JavascriptArray::StackAllocationSize, instr);
  14442. m_lowererMD.LoadHelperArgument(instr, stackAllocationOpnd);
  14443. }
  14444. else
  14445. {
  14446. switch(src1->AsHelperCallOpnd()->m_fnHelper)
  14447. {
  14448. case IR::JnHelperMethod::HelperString_Split:
  14449. helperMethod =
  14450. callDst
  14451. ? IR::JnHelperMethod::HelperRegExp_SplitResultUsed
  14452. : IR::JnHelperMethod::HelperRegExp_SplitResultNotUsed;
  14453. break;
  14454. case IR::JnHelperMethod::HelperString_Match:
  14455. helperMethod =
  14456. callDst
  14457. ? IR::JnHelperMethod::HelperRegExp_MatchResultUsed
  14458. : IR::JnHelperMethod::HelperRegExp_MatchResultNotUsed;
  14459. break;
  14460. default:
  14461. Assert(false);
  14462. __assume(false);
  14463. }
  14464. }
  14465. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
  14466. if(callDst)
  14467. {
  14468. helperCallInstr->SetDst(callDst);
  14469. }
  14470. instr->InsertBefore(helperCallInstr);
  14471. m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
  14472. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14473. instr->InsertAfter(doneLabel);
  14474. instr->InsertBefore(labelHelper);
  14475. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  14476. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  14477. }
  14478. void
  14479. Lowerer::GenerateFastInlineRegExpExec(IR::Instr * instr)
  14480. {
  14481. // a.exec(b)
  14482. // We want to emit the fast path when 'a' is a regex and 'b' is a string
  14483. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  14484. IR::Opnd * callDst = instr->GetDst();
  14485. //ArgOut_A_InlineSpecialized
  14486. IR::Instr * tmpInstr = instr->GetSrc2()->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  14487. IR::Opnd * argsOpnd[2];
  14488. if (!instr->FetchOperands(argsOpnd, 2))
  14489. {
  14490. return;
  14491. }
  14492. IR::Opnd *opndString = argsOpnd[1];
  14493. if(!opndString->GetValueType().IsLikelyString() || argsOpnd[0]->IsTaggedInt())
  14494. {
  14495. return;
  14496. }
  14497. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14498. if(!opndString->GetValueType().IsString())
  14499. {
  14500. if(!opndString->IsRegOpnd())
  14501. {
  14502. IR::RegOpnd *opndReg = IR::RegOpnd::New(TyVar, m_func);
  14503. LowererMD::CreateAssign(opndReg, opndString, instr);
  14504. opndString = opndReg;
  14505. }
  14506. this->GenerateStringTest(opndString->AsRegOpnd(), instr, labelHelper);
  14507. }
  14508. IR::Opnd *opndRegex = argsOpnd[0];
  14509. if(!opndRegex->IsNotTaggedValue())
  14510. {
  14511. m_lowererMD.GenerateObjectTest(opndRegex, instr, labelHelper);
  14512. }
  14513. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  14514. // cmp [regex], vtableAddress
  14515. // jne $labelHelper
  14516. if(!opndRegex->IsRegOpnd())
  14517. {
  14518. IR::RegOpnd *opndReg = IR::RegOpnd::New(TyVar, m_func);
  14519. LowererMD::CreateAssign(opndReg, opndRegex, instr);
  14520. opndRegex = opndReg;
  14521. }
  14522. InsertCompareBranch(
  14523. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), 0, TyMachPtr, instr->m_func),
  14524. vtableOpnd,
  14525. Js::OpCode::BrNeq_A,
  14526. labelHelper,
  14527. instr);
  14528. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  14529. if (!PHASE_OFF(Js::ExecBOIFastPathPhase, m_func))
  14530. {
  14531. // Load pattern from regex operand
  14532. IR::RegOpnd *opndPattern = IR::RegOpnd::New(TyMachPtr, m_func);
  14533. LowererMD::CreateAssign(
  14534. opndPattern,
  14535. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfPattern(), TyMachPtr, m_func),
  14536. instr);
  14537. // Load program from pattern
  14538. IR::RegOpnd *opndProgram = IR::RegOpnd::New(TyMachPtr, m_func);
  14539. LowererMD::CreateAssign(
  14540. opndProgram,
  14541. IR::IndirOpnd::New(opndPattern, offsetof(UnifiedRegex::RegexPattern, rep) + offsetof(UnifiedRegex::RegexPattern::UnifiedRep, program), TyMachPtr, m_func),
  14542. instr);
  14543. IR::LabelInstr *labelFastHelper = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  14544. // We want the program's tag to be BOILiteral2Tag
  14545. InsertCompareBranch(
  14546. IR::IndirOpnd::New(opndProgram, (int32)UnifiedRegex::Program::GetOffsetOfTag(), TyUint8, m_func),
  14547. IR::IntConstOpnd::New(UnifiedRegex::Program::GetBOILiteral2Tag(), TyUint8, m_func),
  14548. Js::OpCode::BrNeq_A,
  14549. labelFastHelper,
  14550. instr);
  14551. // Test the program's flags for "global"
  14552. InsertTestBranch(
  14553. IR::IndirOpnd::New(opndProgram, offsetof(UnifiedRegex::Program, flags), TyUint8, m_func),
  14554. IR::IntConstOpnd::New(UnifiedRegex::GlobalRegexFlag, TyUint8, m_func),
  14555. Js::OpCode::BrNeq_A,
  14556. labelFastHelper,
  14557. instr);
  14558. IR::LabelInstr *labelNoMatch = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  14559. // If string length < 2...
  14560. InsertCompareBranch(
  14561. IR::IndirOpnd::New(opndString->AsRegOpnd(), offsetof(Js::JavascriptString, m_charLength), TyUint32, m_func),
  14562. IR::IntConstOpnd::New(2, TyUint32, m_func),
  14563. Js::OpCode::BrLt_A,
  14564. labelNoMatch,
  14565. instr);
  14566. // ...or the DWORD doesn't match the pattern...
  14567. IR::RegOpnd *opndBuffer = IR::RegOpnd::New(TyMachReg, m_func);
  14568. LowererMD::CreateAssign(
  14569. opndBuffer,
  14570. IR::IndirOpnd::New(opndString->AsRegOpnd(), offsetof(Js::JavascriptString, m_pszValue), TyMachPtr, m_func),
  14571. instr);
  14572. IR::LabelInstr *labelGotString = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  14573. InsertTestBranch(opndBuffer, opndBuffer, Js::OpCode::BrNeq_A, labelGotString, instr);
  14574. m_lowererMD.LoadHelperArgument(instr, opndString);
  14575. IR::Instr *instrCall = IR::Instr::New(Js::OpCode::Call, opndBuffer, IR::HelperCallOpnd::New(IR::HelperString_GetSz, m_func), m_func);
  14576. instr->InsertBefore(instrCall);
  14577. m_lowererMD.LowerCall(instrCall, 0);
  14578. instr->InsertBefore(labelGotString);
  14579. IR::RegOpnd *opndBufferDWORD = IR::RegOpnd::New(TyUint32, m_func);
  14580. LowererMD::CreateAssign(
  14581. opndBufferDWORD,
  14582. IR::IndirOpnd::New(opndBuffer, 0, TyUint32, m_func),
  14583. instr);
  14584. InsertCompareBranch(
  14585. IR::IndirOpnd::New(opndProgram, (int32)(UnifiedRegex::Program::GetOffsetOfRep() + UnifiedRegex::Program::GetOffsetOfBOILiteral2Literal()), TyUint32, m_func),
  14586. opndBufferDWORD,
  14587. Js::OpCode::BrEq_A,
  14588. labelFastHelper,
  14589. instr);
  14590. // ...then set the last index to 0...
  14591. instr->InsertBefore(labelNoMatch);
  14592. LowererMD::CreateAssign(
  14593. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), TyVar, m_func),
  14594. IR::AddrOpnd::NewNull(m_func),
  14595. instr);
  14596. LowererMD::CreateAssign(
  14597. IR::IndirOpnd::New(opndRegex->AsRegOpnd(), Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), TyUint32, m_func),
  14598. IR::IntConstOpnd::New(0, TyUint32, m_func),
  14599. instr);
  14600. // ...and set the dst to null...
  14601. if (callDst)
  14602. {
  14603. LowererMD::CreateAssign(
  14604. callDst,
  14605. LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  14606. instr);
  14607. }
  14608. // ...and we're done.
  14609. this->InsertBranch(Js::OpCode::Br, doneLabel, instr);
  14610. instr->InsertBefore(labelFastHelper);
  14611. }
  14612. // [stackAllocationPointer, ]scriptcontext, regexp, string (to be pushed in reverse order)
  14613. //string, regexp
  14614. this->m_lowererMD.LoadHelperArgument(instr, opndString);
  14615. this->m_lowererMD.LoadHelperArgument(instr, opndRegex);
  14616. // script context
  14617. LoadScriptContext(instr);
  14618. IR::JnHelperMethod helperMethod;
  14619. IR::AutoReuseOpnd autoReuseStackAllocationOpnd;
  14620. if(callDst)
  14621. {
  14622. if(instr->dstIsTempObject)
  14623. {
  14624. helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultUsedAndMayBeTemp;
  14625. // Allocate some space on the stack for the result array
  14626. IR::RegOpnd *const stackAllocationOpnd = IR::RegOpnd::New(TyVar, m_func);
  14627. autoReuseStackAllocationOpnd.Initialize(stackAllocationOpnd, m_func);
  14628. stackAllocationOpnd->SetValueType(callDst->GetValueType());
  14629. GenerateMarkTempAlloc(stackAllocationOpnd, Js::JavascriptArray::StackAllocationSize, instr);
  14630. m_lowererMD.LoadHelperArgument(instr, stackAllocationOpnd);
  14631. }
  14632. else
  14633. {
  14634. helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultUsed;
  14635. }
  14636. }
  14637. else
  14638. {
  14639. helperMethod = IR::JnHelperMethod::HelperRegExp_ExecResultNotUsed;
  14640. }
  14641. IR::Instr * helperCallInstr = IR::Instr::New(LowererMD::MDCallOpcode, instr->m_func);
  14642. if(callDst)
  14643. {
  14644. helperCallInstr->SetDst(callDst);
  14645. }
  14646. instr->InsertBefore(helperCallInstr);
  14647. m_lowererMD.ChangeToHelperCall(helperCallInstr, helperMethod);
  14648. instr->InsertAfter(doneLabel);
  14649. instr->InsertBefore(labelHelper);
  14650. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  14651. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  14652. }
  14653. void
  14654. Lowerer::RelocateCallDirectToHelperPath(IR::Instr* argoutInlineSpecialized, IR::LabelInstr* labelHelper)
  14655. {
  14656. IR::Opnd *linkOpnd = argoutInlineSpecialized->GetSrc2(); //ArgOut_A_InlineSpecialized src2; link to actual argouts.
  14657. argoutInlineSpecialized->Unlink();
  14658. labelHelper->InsertAfter(argoutInlineSpecialized);
  14659. while(linkOpnd->IsSymOpnd())
  14660. {
  14661. IR::SymOpnd *src2 = linkOpnd->AsSymOpnd();
  14662. StackSym *sym = src2->m_sym->AsStackSym();
  14663. Assert(sym->m_isSingleDef);
  14664. IR::Instr *argInstr = sym->m_instrDef;
  14665. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A);
  14666. argInstr->Unlink();
  14667. labelHelper->InsertAfter(argInstr);
  14668. linkOpnd = argInstr->GetSrc2();
  14669. }
  14670. // Move startcall
  14671. Assert(linkOpnd->IsRegOpnd());
  14672. StackSym *sym = linkOpnd->AsRegOpnd()->m_sym;
  14673. Assert(sym->m_isSingleDef);
  14674. IR::Instr *startCall = sym->m_instrDef;
  14675. Assert(startCall->m_opcode == Js::OpCode::StartCall);
  14676. startCall->Unlink();
  14677. labelHelper->InsertAfter(startCall);
  14678. }
  14679. bool
  14680. Lowerer::GenerateFastInlineStringCharCodeAt(IR::Instr * instr, Js::BuiltinFunction index)
  14681. {
  14682. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  14683. //CallDirect src2
  14684. IR::Opnd * linkOpnd = instr->GetSrc2();
  14685. //ArgOut_A_InlineSpecialized
  14686. IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  14687. IR::Opnd * argsOpnd[2] = {0};
  14688. bool result = instr->FetchOperands(argsOpnd, 2);
  14689. Assert(result);
  14690. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14691. instr->InsertAfter(doneLabel);
  14692. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14693. bool success = this->m_lowererMD.GenerateFastCharAt(index, instr->GetDst(), argsOpnd[0], argsOpnd[1],
  14694. instr, instr, labelHelper, doneLabel);
  14695. instr->InsertBefore(labelHelper);
  14696. if (!success)
  14697. {
  14698. return false;
  14699. }
  14700. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  14701. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  14702. return true;
  14703. }
  14704. void
  14705. Lowerer::GenerateFastInlineMathClz32(IR::Instr* instr)
  14706. {
  14707. Assert(instr->GetDst()->IsInt32());
  14708. Assert(instr->GetSrc1()->IsInt32());
  14709. m_lowererMD.GenerateClz(instr);
  14710. }
  14711. void
  14712. Lowerer::GenerateFastInlineMathImul(IR::Instr* instr)
  14713. {
  14714. IR::Opnd* src1 = instr->GetSrc1();
  14715. IR::Opnd* src2 = instr->GetSrc2();
  14716. IR::Opnd* dst = instr->GetDst();
  14717. Assert(dst->IsInt32());
  14718. Assert(src1->IsInt32());
  14719. Assert(src2->IsInt32());
  14720. IR::Instr* imul = IR::Instr::New(LowererMD::MDImulOpcode, dst, src1, src2, instr->m_func);
  14721. instr->InsertBefore(imul);
  14722. LowererMD::Legalize(imul);
  14723. instr->Remove();
  14724. }
  14725. void
  14726. Lowerer::GenerateFastInlineMathFround(IR::Instr* instr)
  14727. {
  14728. IR::Opnd* src1 = instr->GetSrc1();
  14729. IR::Opnd* dst = instr->GetDst();
  14730. Assert(dst->IsFloat());
  14731. Assert(src1->IsFloat());
  14732. IR::Instr* fcvt64to32 = IR::Instr::New(LowererMD::MDConvertFloat64ToFloat32Opcode, dst, src1, instr->m_func);
  14733. instr->InsertBefore(fcvt64to32);
  14734. LowererMD::Legalize(fcvt64to32);
  14735. if (dst->IsFloat64())
  14736. {
  14737. IR::Instr* fcvt32to64 = IR::Instr::New(LowererMD::MDConvertFloat32ToFloat64Opcode, dst, dst, instr->m_func);
  14738. instr->InsertBefore(fcvt32to64);
  14739. LowererMD::Legalize(fcvt32to64);
  14740. }
  14741. instr->Remove();
  14742. return;
  14743. }
  14744. bool
  14745. Lowerer::GenerateFastInlineStringReplace(IR::Instr * instr)
  14746. {
  14747. Assert(instr->m_opcode == Js::OpCode::CallDirect);
  14748. //CallDirect src2
  14749. IR::Opnd * linkOpnd = instr->GetSrc2();
  14750. //ArgOut_A_InlineSpecialized
  14751. IR::Instr * tmpInstr = linkOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_instrDef;
  14752. IR::Opnd * argsOpnd[3] = {0};
  14753. bool result = instr->FetchOperands(argsOpnd, 3);
  14754. Assert(result);
  14755. AnalysisAssert(argsOpnd[0] && argsOpnd[1] && argsOpnd[2]);
  14756. if (!argsOpnd[0]->GetValueType().IsLikelyString()
  14757. || argsOpnd[1]->GetValueType().IsNotObject()
  14758. || !argsOpnd[2]->GetValueType().IsLikelyString())
  14759. {
  14760. return false;
  14761. }
  14762. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14763. instr->InsertAfter(doneLabel);
  14764. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14765. bool success = this->GenerateFastReplace(argsOpnd[0], argsOpnd[1], argsOpnd[2],
  14766. instr, instr, labelHelper, doneLabel);
  14767. instr->InsertBefore(labelHelper);
  14768. if (!success)
  14769. {
  14770. return false;
  14771. }
  14772. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  14773. RelocateCallDirectToHelperPath(tmpInstr, labelHelper);
  14774. return true;
  14775. }
  14776. #ifdef ENABLE_DOM_FAST_PATH
  14777. /*
  14778. Lower the DOMFastPathGetter opcode
  14779. We have inliner generated bytecode:
  14780. (dst)helpArg1: ExtendArg_A (src1)thisObject (src2)null
  14781. (dst)helpArg2: ExtendArg_A (src1)funcObject (src2)helpArg1
  14782. method: DOMFastPathGetter (src1)HelperCall (src2)helpArg2
  14783. We'll convert it to a JavascriptFunction entry method call:
  14784. CALL Helper funcObject CallInfo(CallFlags_Value, 3) thisObj
  14785. */
  14786. void
  14787. Lowerer::LowerFastInlineDOMFastPathGetter(IR::Instr* instr)
  14788. {
  14789. IR::Opnd* helperOpnd = instr->UnlinkSrc1();
  14790. Assert(helperOpnd->IsHelperCallOpnd());
  14791. IR::Opnd *linkOpnd = instr->UnlinkSrc2();
  14792. Assert(linkOpnd->IsRegOpnd());
  14793. IR::Instr* prevInstr = linkOpnd->AsRegOpnd()->m_sym->m_instrDef;
  14794. Assert(prevInstr->m_opcode == Js::OpCode::ExtendArg_A);
  14795. IR::Opnd* funcObj = prevInstr->GetSrc1();
  14796. Assert(funcObj->IsRegOpnd());
  14797. // If the Extended_arg was CSE's across a loop or hoisted out of a loop,
  14798. // adding a new reference down here might cause funcObj to now be liveOnBackEdge.
  14799. // Use the addToLiveOnBackEdgeSyms bit vector to add it to a loop if we encounter one.
  14800. // We'll clear it once we reach the Extended arg.
  14801. this->addToLiveOnBackEdgeSyms->Set(funcObj->AsRegOpnd()->m_sym->m_id);
  14802. Assert(prevInstr->GetSrc2() != nullptr);
  14803. prevInstr = prevInstr->GetSrc2()->AsRegOpnd()->m_sym->m_instrDef;
  14804. Assert(prevInstr->m_opcode == Js::OpCode::ExtendArg_A);
  14805. IR::Opnd* thisObj = prevInstr->GetSrc1();
  14806. Assert(prevInstr->GetSrc2() == nullptr);
  14807. Assert(thisObj->IsRegOpnd());
  14808. this->addToLiveOnBackEdgeSyms->Set(thisObj->AsRegOpnd()->m_sym->m_id);
  14809. const auto info = Lowerer::MakeCallInfoConst(Js::CallFlags_Value, 1, m_func);
  14810. m_lowererMD.LoadHelperArgument(instr, thisObj);
  14811. m_lowererMD.LoadHelperArgument(instr, info);
  14812. m_lowererMD.LoadHelperArgument(instr, funcObj);
  14813. instr->m_opcode = Js::OpCode::Call;
  14814. IR::HelperCallOpnd *helperCallOpnd = Lowerer::CreateHelperCallOpnd(helperOpnd->AsHelperCallOpnd()->m_fnHelper, 3, m_func);
  14815. instr->SetSrc1(helperCallOpnd);
  14816. m_lowererMD.LowerCall(instr, 3); // we have funcobj, callInfo, and this.
  14817. }
  14818. #endif
  14819. void
  14820. Lowerer::GenerateFastInlineArrayPush(IR::Instr * instr)
  14821. {
  14822. Assert(instr->m_opcode == Js::OpCode::InlineArrayPush);
  14823. IR::Opnd * baseOpnd = instr->GetSrc1();
  14824. IR::Opnd * srcOpnd = instr->GetSrc2();
  14825. bool returnLength = false;
  14826. if(instr->GetDst())
  14827. {
  14828. returnLength = true;
  14829. }
  14830. IR::LabelInstr * bailOutLabelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14831. IR::LabelInstr *doneLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  14832. instr->InsertAfter(doneLabel);
  14833. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  14834. //Don't Generate fast path according to ShouldGenerateArrayFastPath()
  14835. //AND, Don't Generate fast path if the array is LikelyNative and the element is not specialized
  14836. if(ShouldGenerateArrayFastPath(baseOpnd, false, false, false) &&
  14837. !(baseOpnd->GetValueType().IsLikelyNativeArray() && srcOpnd->IsVar()))
  14838. {
  14839. GenerateFastPush(baseOpnd, srcOpnd, instr, instr, labelHelper, doneLabel, bailOutLabelHelper, returnLength);
  14840. instr->InsertBefore(labelHelper);
  14841. InsertBranch(Js::OpCode::Br, true, doneLabel, labelHelper);
  14842. }
  14843. if(baseOpnd->GetValueType().IsLikelyNativeArray())
  14844. {
  14845. //Lower IR::BailOutConventionalNativeArrayAccessOnly here.
  14846. LowerOneBailOutKind(instr, IR::BailOutConventionalNativeArrayAccessOnly, false, false);
  14847. instr->InsertAfter(bailOutLabelHelper);
  14848. InsertBranch(Js::OpCode::Br, doneLabel, bailOutLabelHelper);
  14849. }
  14850. GenerateHelperToArrayPushFastPath(instr, bailOutLabelHelper);
  14851. }
  14852. bool Lowerer::GenerateFastPop(IR::Opnd *baseOpndParam, IR::Instr *callInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel, IR::LabelInstr * bailOutLabelHelper)
  14853. {
  14854. Assert(ShouldGenerateArrayFastPath(baseOpndParam, false, false, false));
  14855. // TEST baseOpnd, AtomTag -- check baseOpnd not tagged int
  14856. // JNE $helper
  14857. // CMP [baseOpnd], JavascriptArray::`vtable' -- check baseOpnd isArray
  14858. // JNE $helper
  14859. // MOV r2, [baseOpnd + offset(length)] -- Load array length
  14860. IR::RegOpnd * baseOpnd = baseOpndParam->AsRegOpnd();
  14861. const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
  14862. ValueType arrValueType(baseOpndParam->GetValueType());
  14863. IR::RegOpnd *arrayOpnd = baseOpnd;
  14864. IR::RegOpnd *arrayLengthOpnd = nullptr;
  14865. IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
  14866. if(!arrValueType.IsAnyOptimizedArray())
  14867. {
  14868. arrayOpnd = GenerateArrayTest(baseOpnd, bailOutLabelHelper, bailOutLabelHelper, callInstr, false, true);
  14869. arrValueType = arrayOpnd->GetValueType().ToDefiniteObject().SetHasNoMissingValues(false);
  14870. }
  14871. else if(arrayOpnd->IsArrayRegOpnd())
  14872. {
  14873. IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
  14874. if(arrayRegOpnd->LengthSym())
  14875. {
  14876. arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), arrayRegOpnd->LengthSym()->GetType(), m_func);
  14877. DebugOnly(arrayLengthOpnd->FreezeSymValue());
  14878. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  14879. }
  14880. }
  14881. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
  14882. IR::AutoReuseOpnd autoReuseMutableArrayLengthOpnd;
  14883. {
  14884. IR::RegOpnd *const mutableArrayLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  14885. autoReuseMutableArrayLengthOpnd.Initialize(mutableArrayLengthOpnd, m_func);
  14886. if(arrayLengthOpnd)
  14887. {
  14888. // mov mutableArrayLength, arrayLength
  14889. InsertMove(mutableArrayLengthOpnd, arrayLengthOpnd, callInstr);
  14890. }
  14891. else
  14892. {
  14893. // MOV mutableArrayLength, [array + offset(length)] -- Load array length
  14894. // We know this index is safe since, so mark it as UInt32 to avoid unnecessary conversion/checks
  14895. InsertMove(
  14896. mutableArrayLengthOpnd,
  14897. IR::IndirOpnd::New(
  14898. arrayOpnd,
  14899. Js::JavascriptArray::GetOffsetOfLength(),
  14900. mutableArrayLengthOpnd->GetType(),
  14901. this->m_func),
  14902. callInstr);
  14903. }
  14904. arrayLengthOpnd = mutableArrayLengthOpnd;
  14905. }
  14906. InsertCompareBranch(arrayLengthOpnd, IR::IntConstOpnd::New(0, TyUint32, this->m_func), Js::OpCode::BrEq_A, true, bailOutLabelHelper, callInstr);
  14907. InsertSub(false, arrayLengthOpnd, arrayLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func),callInstr);
  14908. IR::IndirOpnd *arrayRef = IR::IndirOpnd::New(arrayOpnd, arrayLengthOpnd, TyVar, this->m_func);
  14909. arrayRef->GetBaseOpnd()->SetValueType(arrValueType);
  14910. //Array length is going to overflow, hence don't check for Array.length and Segment.length overflow.
  14911. bool isTypedArrayElement, isStringIndex;
  14912. IR::IndirOpnd *const indirOpnd =
  14913. GenerateFastElemICommon(
  14914. callInstr,
  14915. false,
  14916. arrayRef,
  14917. labelHelper,
  14918. labelHelper,
  14919. nullptr,
  14920. &isTypedArrayElement,
  14921. &isStringIndex,
  14922. nullptr,
  14923. nullptr /*pLabelSegmentLengthIncreased*/,
  14924. true /*checkArrayLengthOverflow*/,
  14925. true /* forceGenerateFastPath */,
  14926. false/* = returnLength */,
  14927. bailOutLabelHelper /* = bailOutLabelInstr*/);
  14928. Assert(!isTypedArrayElement);
  14929. Assert(indirOpnd);
  14930. return true;
  14931. }
  14932. bool Lowerer::GenerateFastPush(IR::Opnd *baseOpndParam, IR::Opnd *src, IR::Instr *callInstr,
  14933. IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel, IR::LabelInstr * bailOutLabelHelper, bool returnLength)
  14934. {
  14935. Assert(ShouldGenerateArrayFastPath(baseOpndParam, false, false, false));
  14936. // TEST baseOpnd, AtomTag -- check baseOpnd not tagged int
  14937. // JNE $helper
  14938. // CMP [baseOpnd], JavascriptArray::`vtable' -- check baseOpnd isArray
  14939. // JNE $helper
  14940. // MOV r2, [baseOpnd + offset(length)] -- Load array length
  14941. IR::RegOpnd * baseOpnd = baseOpndParam->AsRegOpnd();
  14942. const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
  14943. ValueType arrValueType(baseOpndParam->GetValueType());
  14944. IR::RegOpnd *arrayOpnd = baseOpnd;
  14945. IR::RegOpnd *arrayLengthOpnd = nullptr;
  14946. IR::AutoReuseOpnd autoReuseArrayLengthOpnd;
  14947. if(!arrValueType.IsAnyOptimizedArray())
  14948. {
  14949. arrayOpnd = GenerateArrayTest(baseOpnd, labelHelper, labelHelper, insertInstr, false, true);
  14950. arrValueType = arrayOpnd->GetValueType().ToDefiniteObject().SetHasNoMissingValues(false);
  14951. }
  14952. else if(arrayOpnd->IsArrayRegOpnd())
  14953. {
  14954. IR::ArrayRegOpnd *const arrayRegOpnd = arrayOpnd->AsArrayRegOpnd();
  14955. if(arrayRegOpnd->LengthSym())
  14956. {
  14957. arrayLengthOpnd = IR::RegOpnd::New(arrayRegOpnd->LengthSym(), arrayRegOpnd->LengthSym()->GetType(), m_func);
  14958. DebugOnly(arrayLengthOpnd->FreezeSymValue());
  14959. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  14960. }
  14961. }
  14962. const IR::AutoReuseOpnd autoReuseArrayOpnd(arrayOpnd, m_func);
  14963. if(!arrayLengthOpnd)
  14964. {
  14965. // MOV arrayLength, [array + offset(length)] -- Load array length
  14966. // We know this index is safe since, so mark it as UInt32 to avoid unnecessary conversion/checks
  14967. arrayLengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
  14968. autoReuseArrayLengthOpnd.Initialize(arrayLengthOpnd, m_func);
  14969. InsertMove(
  14970. arrayLengthOpnd,
  14971. IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), arrayLengthOpnd->GetType(), this->m_func),
  14972. insertInstr);
  14973. }
  14974. IR::IndirOpnd *arrayRef = IR::IndirOpnd::New(arrayOpnd, arrayLengthOpnd, TyVar, this->m_func);
  14975. arrayRef->GetBaseOpnd()->SetValueType(arrValueType);
  14976. if (returnLength && src->IsEqual(insertInstr->GetDst()))
  14977. {
  14978. //If the dst is same as the src, then dst is going to be overridden by GenerateFastElemICommon in process of updating the length.
  14979. //Save it in a temp register.
  14980. IR::RegOpnd *opnd = IR::RegOpnd::New(src->GetType(), this->m_func);
  14981. InsertMove(opnd, src, insertInstr);
  14982. src = opnd;
  14983. }
  14984. //Array length is going to overflow, hence don't check for Array.length and Segment.length overflow.
  14985. bool isTypedArrayElement, isStringIndex;
  14986. IR::IndirOpnd *const indirOpnd =
  14987. GenerateFastElemICommon(
  14988. insertInstr,
  14989. true,
  14990. arrayRef,
  14991. labelHelper,
  14992. labelHelper,
  14993. nullptr,
  14994. &isTypedArrayElement,
  14995. &isStringIndex,
  14996. nullptr,
  14997. nullptr /*pLabelSegmentLengthIncreased*/,
  14998. false /*checkArrayLengthOverflow*/,
  14999. true /* forceGenerateFastPath */,
  15000. returnLength,
  15001. bailOutLabelHelper);
  15002. Assert(!isTypedArrayElement);
  15003. Assert(indirOpnd);
  15004. // MOV [r3 + r2], src
  15005. InsertMoveWithBarrier(indirOpnd, src, insertInstr);
  15006. return true;
  15007. }
  15008. IR::Opnd*
  15009. Lowerer::GenerateArgOutForInlineeStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  15010. {
  15011. Assert(callInstr->m_func->IsInlinee());
  15012. Func *func = callInstr->m_func;
  15013. uint32 actualCount = func->actualCount - 1; // don't count this pointer
  15014. Assert(actualCount < Js::InlineeCallInfo::MaxInlineeArgoutCount);
  15015. const auto firstRealArgStackSym = func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  15016. this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr); //Start after this pointer
  15017. IR::SymOpnd *firstArg = IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, func);
  15018. const IR::AutoReuseOpnd autoReuseFirstArg(firstArg, func);
  15019. IR::RegOpnd* argInOpnd = IR::RegOpnd::New(TyMachReg, func);
  15020. const IR::AutoReuseOpnd autoReuseArgInOpnd(argInOpnd, func);
  15021. InsertLea(argInOpnd, firstArg, callInstr);
  15022. IR::IndirOpnd *argIndirOpnd = nullptr;
  15023. IR::Instr* argout = nullptr;
  15024. #if defined(_M_IX86)
  15025. // Maintain alignment
  15026. if ((actualCount & 1) == 0)
  15027. {
  15028. IR::Instr *alignPush = IR::Instr::New(Js::OpCode::PUSH, this->m_func);
  15029. alignPush->SetSrc1(IR::IntConstOpnd::New(1, TyInt32, this->m_func));
  15030. callInstr->InsertBefore(alignPush);
  15031. }
  15032. #endif
  15033. for(uint i = actualCount; i > 0; i--)
  15034. {
  15035. argIndirOpnd = IR::IndirOpnd::New(argInOpnd, (i - 1) * MachPtr, TyMachReg, func);
  15036. argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  15037. argout->SetSrc1(argIndirOpnd);
  15038. callInstr->InsertBefore(argout);
  15039. // i represents ith arguments from actuals, with is i + 3 counting this, callInfo and function object
  15040. this->m_lowererMD.LoadDynamicArgument(argout, i + 3);
  15041. }
  15042. return IR::IntConstOpnd::New(func->actualCount, TyInt32, func);
  15043. }
  15044. // For AMD64 and ARM only.
  15045. void
  15046. Lowerer::LowerInlineSpreadArgOutLoopUsingRegisters(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd)
  15047. {
  15048. Func *const func = callInstr->m_func;
  15049. IR::LabelInstr *oneArgLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  15050. InsertCompareBranch(indexOpnd, IR::IntConstOpnd::New(1, TyUint8, func), Js::OpCode::BrEq_A, true, oneArgLabel, callInstr);
  15051. IR::LabelInstr *startLoopLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  15052. startLoopLabel->m_isLoopTop = true;
  15053. Loop *loop = JitAnew(func->m_alloc, Loop, func->m_alloc, this->m_func);
  15054. startLoopLabel->SetLoop(loop);
  15055. loop->SetLoopTopInstr(startLoopLabel);
  15056. loop->regAlloc.liveOnBackEdgeSyms = AllocatorNew(JitArenaAllocator, func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
  15057. loop->regAlloc.liveOnBackEdgeSyms->Set(indexOpnd->m_sym->m_id);
  15058. loop->regAlloc.liveOnBackEdgeSyms->Set(arrayElementsStartOpnd->m_sym->m_id);
  15059. callInstr->InsertBefore(startLoopLabel);
  15060. InsertSub(false, indexOpnd, indexOpnd, IR::IntConstOpnd::New(1, TyInt8, func), callInstr);
  15061. IR::IndirOpnd *elemPtrOpnd = IR::IndirOpnd::New(arrayElementsStartOpnd, indexOpnd, this->m_lowererMD.GetDefaultIndirScale(), TyMachPtr, func);
  15062. // Generate argout for n+2 arg (skipping function object + this)
  15063. IR::Instr *argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  15064. // X64 requires a reg opnd
  15065. IR::RegOpnd *elemRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  15066. LowererMD::CreateAssign(elemRegOpnd, elemPtrOpnd, callInstr);
  15067. argout->SetSrc1(elemRegOpnd);
  15068. argout->SetSrc2(indexOpnd);
  15069. callInstr->InsertBefore(argout);
  15070. this->m_lowererMD.LoadDynamicArgumentUsingLength(argout);
  15071. InsertCompareBranch(indexOpnd, IR::IntConstOpnd::New(1, TyUint8, func), Js::OpCode::BrNeq_A, true, startLoopLabel, callInstr);
  15072. // Emit final argument into register 4 on AMD64 and ARM
  15073. callInstr->InsertBefore(oneArgLabel);
  15074. argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  15075. argout->SetSrc1(elemPtrOpnd);
  15076. callInstr->InsertBefore(argout);
  15077. this->m_lowererMD.LoadDynamicArgument(argout, 4); //4 to denote this is 4th register after this, callinfo & function object
  15078. }
  15079. IR::Instr *
  15080. Lowerer::LowerCallIDynamicSpread(IR::Instr *callInstr, ushort callFlags)
  15081. {
  15082. Assert(callInstr->m_opcode == Js::OpCode::CallIDynamicSpread);
  15083. IR::Instr * insertBeforeInstrForCFG = nullptr;
  15084. Func *const func = callInstr->m_func;
  15085. if (func->IsInlinee())
  15086. {
  15087. throw Js::RejitException(RejitReason::InlineSpreadDisabled);
  15088. }
  15089. IR::Instr *spreadArrayInstr = callInstr;
  15090. IR::SymOpnd *argLinkOpnd = spreadArrayInstr->UnlinkSrc2()->AsSymOpnd();
  15091. StackSym *argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  15092. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  15093. argLinkOpnd->Free(this->m_func);
  15094. spreadArrayInstr = argLinkSym->m_instrDef;
  15095. Assert(spreadArrayInstr->m_opcode == Js::OpCode::ArgOut_A_SpreadArg);
  15096. IR::RegOpnd *arrayOpnd = nullptr;
  15097. IR::Opnd *arraySrcOpnd = spreadArrayInstr->UnlinkSrc1();
  15098. if (!arraySrcOpnd->IsRegOpnd())
  15099. {
  15100. arrayOpnd = IR::RegOpnd::New(TyMachPtr, func);
  15101. LowererMD::CreateAssign(arrayOpnd, arraySrcOpnd, spreadArrayInstr);
  15102. }
  15103. else
  15104. {
  15105. arrayOpnd = arraySrcOpnd->AsRegOpnd();
  15106. }
  15107. argLinkOpnd = spreadArrayInstr->UnlinkSrc2()->AsSymOpnd();
  15108. // Walk the arg chain and find the start call
  15109. argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  15110. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  15111. argLinkOpnd->Free(this->m_func);
  15112. // Nothing to be done for the function object, emit as normal
  15113. IR::Instr *thisInstr = argLinkSym->m_instrDef;
  15114. IR::RegOpnd *thisOpnd = thisInstr->UnlinkSrc2()->AsRegOpnd();
  15115. argLinkSym = thisOpnd->m_sym->AsStackSym();
  15116. thisInstr->Unlink();
  15117. thisInstr->FreeDst();
  15118. // Remove the array ArgOut instr and StartCall, they are no longer needed
  15119. spreadArrayInstr->Unlink();
  15120. spreadArrayInstr->FreeDst();
  15121. IR::Instr *startCallInstr = argLinkSym->m_instrDef;
  15122. Assert(startCallInstr->m_opcode == Js::OpCode::StartCall);
  15123. insertBeforeInstrForCFG = startCallInstr->GetNextRealInstr();
  15124. startCallInstr->Remove();
  15125. IR::RegOpnd *argsLengthOpnd = IR::RegOpnd::New(TyUint32, func);
  15126. IR::IndirOpnd *arrayLengthPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfLength(), TyUint32, func);
  15127. LowererMD::CreateAssign(argsLengthOpnd, arrayLengthPtrOpnd, callInstr);
  15128. // Don't bother expanding args if there are zero
  15129. IR::LabelInstr *zeroArgsLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  15130. InsertCompareBranch(argsLengthOpnd, IR::IntConstOpnd::New(0, TyInt8, func), Js::OpCode::BrEq_A, true, zeroArgsLabel, callInstr);
  15131. IR::RegOpnd *indexOpnd = IR::RegOpnd::New(TyUint32, func);
  15132. LowererMD::CreateAssign(indexOpnd, argsLengthOpnd, callInstr);
  15133. // Get the array head offset and length
  15134. IR::IndirOpnd *arrayHeadPtrOpnd = IR::IndirOpnd::New(arrayOpnd, Js::JavascriptArray::GetOffsetOfHead(), TyMachPtr, func);
  15135. IR::RegOpnd *arrayElementsStartOpnd = IR::RegOpnd::New(TyMachPtr, func);
  15136. InsertAdd(false, arrayElementsStartOpnd, arrayHeadPtrOpnd, IR::IntConstOpnd::New(offsetof(Js::SparseArraySegment<Js::Var>, elements), TyUint8, func), callInstr);
  15137. this->m_lowererMD.LowerInlineSpreadArgOutLoop(callInstr, indexOpnd, arrayElementsStartOpnd);
  15138. // Resume if we have zero args
  15139. callInstr->InsertBefore(zeroArgsLabel);
  15140. // Lower call
  15141. callInstr->m_opcode = Js::OpCode::CallIDynamic;
  15142. callInstr = m_lowererMD.LowerCallIDynamic(callInstr, thisInstr, argsLengthOpnd, callFlags, insertBeforeInstrForCFG);
  15143. return callInstr;
  15144. }
  15145. IR::Instr *
  15146. Lowerer::LowerCallIDynamic(IR::Instr * callInstr, ushort callFlags)
  15147. {
  15148. if (!this->m_func->GetHasStackArgs())
  15149. {
  15150. throw Js::RejitException(RejitReason::InlineApplyDisabled);
  15151. }
  15152. IR::Instr * insertBeforeInstrForCFG = nullptr;
  15153. // Lower args and look for StartCall
  15154. IR::Instr * argInstr = callInstr;
  15155. IR::SymOpnd * argLinkOpnd = argInstr->UnlinkSrc2()->AsSymOpnd();
  15156. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  15157. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  15158. argLinkOpnd->Free(this->m_func);
  15159. argInstr = argLinkSym->m_instrDef;
  15160. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  15161. IR::Instr* saveThisArgOutInstr = argInstr;
  15162. saveThisArgOutInstr->Unlink();
  15163. saveThisArgOutInstr->FreeDst();
  15164. argLinkOpnd = argInstr->UnlinkSrc2()->AsSymOpnd();
  15165. argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  15166. AssertMsg(argLinkSym->IsArgSlotSym() && argLinkSym->m_isSingleDef, "Arg tree not single def...");
  15167. argLinkOpnd->Free(this->m_func);
  15168. argInstr = argLinkSym->m_instrDef;
  15169. Assert(argInstr->m_opcode == Js::OpCode::ArgOut_A_FromStackArgs);
  15170. IR::Opnd* argsLength = m_lowererMD.GenerateArgOutForStackArgs(callInstr, argInstr);
  15171. IR::RegOpnd* startCallDstOpnd = argInstr->UnlinkSrc2()->AsRegOpnd();
  15172. argLinkSym = startCallDstOpnd->m_sym->AsStackSym();
  15173. startCallDstOpnd->Free(this->m_func);
  15174. argInstr->Remove();// Remove ArgOut_A_FromStackArgs
  15175. argInstr = argLinkSym->m_instrDef;
  15176. Assert(argInstr->m_opcode == Js::OpCode::StartCall);
  15177. insertBeforeInstrForCFG = argInstr->GetNextRealInstr();
  15178. argInstr->Remove(); //Remove start call
  15179. return m_lowererMD.LowerCallIDynamic(callInstr, saveThisArgOutInstr, argsLength, callFlags, insertBeforeInstrForCFG);
  15180. }
  15181. //This is only for x64 & ARM.
  15182. IR::Opnd*
  15183. Lowerer::GenerateArgOutForStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
  15184. {
  15185. // s25.var = LdLen_A s4.var
  15186. // s26.var = Ld_A s25.var
  15187. // BrNeq_I4 $L3, s25.var,0
  15188. // $L2:
  15189. // BrNeq_I4 $L4, s25.var,1
  15190. // s25.var = SUB_I4 s25.var, 0x1
  15191. // s10.var = LdElemI_A [s4.var+s25.var].var
  15192. // ArgOut_A_Dynamic s10.var, s25.var
  15193. // Br $L2
  15194. // $L4:
  15195. // s10.var = LdElemI_A [s4.var].var
  15196. // ArgOut_A_Dynamic s10.var, 4
  15197. // $L3
  15198. #if defined(_M_IX86)
  15199. Assert(false);
  15200. #endif
  15201. Assert(stackArgsInstr->m_opcode == Js::OpCode::ArgOut_A_FromStackArgs);
  15202. Assert(callInstr->m_opcode == Js::OpCode::CallIDynamic);
  15203. this->m_lowererMD.GenerateFunctionObjectTest(callInstr, callInstr->GetSrc1()->AsRegOpnd(), false);
  15204. if (callInstr->m_func->IsInlinee())
  15205. {
  15206. return this->GenerateArgOutForInlineeStackArgs(callInstr, stackArgsInstr);
  15207. }
  15208. Func *func = callInstr->m_func;
  15209. IR::RegOpnd* stackArgs = stackArgsInstr->GetSrc1()->AsRegOpnd();
  15210. IR::RegOpnd* ldLenDstOpnd = IR::RegOpnd::New(TyUint32, func);
  15211. IR::Instr* ldLen = IR::Instr::New(Js::OpCode::LdLen_A, ldLenDstOpnd ,stackArgs, func);
  15212. ldLenDstOpnd->SetValueType(ValueType::GetTaggedInt()); /*LdLen_A works only on stack arguments*/
  15213. callInstr->InsertBefore(ldLen);
  15214. GenerateFastRealStackArgumentsLdLen(ldLen);
  15215. IR::Instr* saveLenInstr = IR::Instr::New(Js::OpCode::MOV, IR::RegOpnd::New(TyUint32, func), ldLenDstOpnd, func);
  15216. saveLenInstr->GetDst()->SetValueType(ValueType::GetTaggedInt());
  15217. callInstr->InsertBefore(saveLenInstr);
  15218. IR::LabelInstr* doneArgs = IR::LabelInstr::New(Js::OpCode::Label, func);
  15219. IR::Instr* branchDoneArgs = IR::BranchInstr::New(Js::OpCode::BrEq_I4, doneArgs, ldLenDstOpnd, IR::IntConstOpnd::New(0, TyInt8, func),func);
  15220. callInstr->InsertBefore(branchDoneArgs);
  15221. this->m_lowererMD.EmitInt4Instr(branchDoneArgs);
  15222. IR::LabelInstr* startLoop = IR::LabelInstr::New(Js::OpCode::Label, func);
  15223. IR::LabelInstr* endLoop = IR::LabelInstr::New(Js::OpCode::Label, func);
  15224. startLoop->m_isLoopTop = true;
  15225. Loop *loop = JitAnew(func->m_alloc, Loop, func->m_alloc, this->m_func);
  15226. startLoop->SetLoop(loop);
  15227. loop->SetLoopTopInstr(startLoop);
  15228. loop->regAlloc.liveOnBackEdgeSyms = AllocatorNew(JitArenaAllocator, func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
  15229. callInstr->InsertBefore(startLoop);
  15230. IR::Instr* branchOutOfLoop = IR::BranchInstr::New(Js::OpCode::BrEq_I4, endLoop, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyInt8, func),func);
  15231. callInstr->InsertBefore(branchOutOfLoop);
  15232. this->m_lowererMD.EmitInt4Instr(branchOutOfLoop);
  15233. IR::Instr* subInstr = IR::Instr::New(Js::OpCode::Sub_I4, ldLenDstOpnd, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyInt8, func),func);
  15234. callInstr->InsertBefore(subInstr);
  15235. this->m_lowererMD.EmitInt4Instr(subInstr);
  15236. IR::IndirOpnd *nthArgument = IR::IndirOpnd::New(stackArgs, ldLenDstOpnd, TyMachReg, func);
  15237. IR::RegOpnd* ldElemDstOpnd = IR::RegOpnd::New(TyMachReg,func);
  15238. IR::Instr* ldElem = IR::Instr::New(Js::OpCode::LdElemI_A, ldElemDstOpnd, nthArgument, func);
  15239. callInstr->InsertBefore(ldElem);
  15240. GenerateFastStackArgumentsLdElemI(ldElem);
  15241. IR::Instr* argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  15242. argout->SetSrc1(ldElemDstOpnd);
  15243. argout->SetSrc2(ldLenDstOpnd);
  15244. callInstr->InsertBefore(argout);
  15245. this->m_lowererMD.LoadDynamicArgumentUsingLength(argout);
  15246. IR::BranchInstr *tailBranch = IR::BranchInstr::New(Js::OpCode::Br, startLoop, func);
  15247. callInstr->InsertBefore(tailBranch);
  15248. callInstr->InsertBefore(endLoop);
  15249. this->m_lowererMD.LowerUncondBranch(tailBranch);
  15250. loop->regAlloc.liveOnBackEdgeSyms->Set(ldLenDstOpnd->m_sym->m_id);
  15251. subInstr = IR::Instr::New(Js::OpCode::Sub_I4, ldLenDstOpnd, ldLenDstOpnd, IR::IntConstOpnd::New(1, TyInt8, func),func);
  15252. callInstr->InsertBefore(subInstr);
  15253. this->m_lowererMD.EmitInt4Instr(subInstr);
  15254. nthArgument = IR::IndirOpnd::New(stackArgs, ldLenDstOpnd, TyMachReg, func);
  15255. ldElemDstOpnd = IR::RegOpnd::New(TyMachReg,func);
  15256. ldElem = IR::Instr::New(Js::OpCode::LdElemI_A, ldElemDstOpnd, nthArgument, func);
  15257. callInstr->InsertBefore(ldElem);
  15258. GenerateFastStackArgumentsLdElemI(ldElem);
  15259. argout = IR::Instr::New(Js::OpCode::ArgOut_A_Dynamic, func);
  15260. argout->SetSrc1(ldElemDstOpnd);
  15261. callInstr->InsertBefore(argout);
  15262. this->m_lowererMD.LoadDynamicArgument(argout, 4); //4 to denote this is 4th register after this, callinfo & function object
  15263. callInstr->InsertBefore(doneArgs);
  15264. /*return the length which will be used for callInfo generations & stack allocation*/
  15265. return saveLenInstr->GetDst()->AsRegOpnd();
  15266. }
  15267. void
  15268. Lowerer::GenerateLoadStackArgumentByIndex(IR::Opnd *dst, IR::RegOpnd *indexOpnd, IR::Instr *instr, int32 offset, Func *func)
  15269. {
  15270. // Load argument set dst = [ebp + index].
  15271. IR::RegOpnd *ebpOpnd = IR::Opnd::CreateFramePointerOpnd(func);
  15272. IR::IndirOpnd *argIndirOpnd = nullptr;
  15273. // The stack looks like this:
  15274. // [new.target or FrameDisplay] <== EBP + formalParamOffset (4) + callInfo.Count - 1
  15275. // arguments[n] <== EBP + formalParamOffset (4) + n
  15276. // ...
  15277. // arguments[1] <== EBP + formalParamOffset (4) + 2
  15278. // arguments[0] <== EBP + formalParamOffset (4) + 1
  15279. // this or new.target <== EBP + formalParamOffset (4)
  15280. // callinfo
  15281. // function object
  15282. // return addr
  15283. // EBP-> EBP chain
  15284. //actual arguments offset is LowererMD::GetFormalParamOffset() + 1 (this)
  15285. int32 actualOffset = GetFormalParamOffset() + offset;
  15286. Assert(GetFormalParamOffset() == 4);
  15287. const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  15288. argIndirOpnd = IR::IndirOpnd::New(ebpOpnd, indexOpnd, indirScale, TyMachReg, this->m_func);
  15289. argIndirOpnd->SetOffset(actualOffset << indirScale);
  15290. LowererMD::CreateAssign(dst, argIndirOpnd, instr);
  15291. }
  15292. //This function assumes there is stackargs bailout and index is always on the range.
  15293. bool
  15294. Lowerer::GenerateFastStackArgumentsLdElemI(IR::Instr* ldElem)
  15295. {
  15296. // MOV dst, ebp [(valueOpnd + 5) *4] // 5 for the stack layout
  15297. //
  15298. IR::IndirOpnd *indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  15299. // Now load the index and check if it is an integer.
  15300. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  15301. Assert (indexOpnd && indexOpnd->IsTaggedInt());
  15302. if(ldElem->m_func->IsInlinee())
  15303. {
  15304. IR::IndirOpnd *argIndirOpnd = GetArgsIndirOpndForInlinee(ldElem, indexOpnd);
  15305. LowererMD::CreateAssign(ldElem->GetDst(), argIndirOpnd, ldElem);
  15306. }
  15307. else
  15308. {
  15309. GenerateLoadStackArgumentByIndex(ldElem->GetDst(), indexOpnd, ldElem, indirOpnd->GetOffset() + 1, m_func); // +1 to offset 'this'
  15310. }
  15311. ldElem->Remove();
  15312. return false;
  15313. }
  15314. IR::IndirOpnd*
  15315. Lowerer::GetArgsIndirOpndForInlinee(IR::Instr* ldElem, IR::Opnd* valueOpnd)
  15316. {
  15317. Assert(ldElem->m_func->IsInlinee());
  15318. IR::IndirOpnd* argIndirOpnd = nullptr;
  15319. // Address of argument after 'this'
  15320. const auto firstRealArgStackSym = ldElem->m_func->GetInlineeArgvSlotOpnd()->m_sym->AsStackSym();
  15321. this->m_func->SetArgOffset(firstRealArgStackSym, firstRealArgStackSym->m_offset + MachPtr); //Start after this pointer
  15322. IR::SymOpnd *firstArg = IR::SymOpnd::New(firstRealArgStackSym, TyMachPtr, ldElem->m_func);
  15323. const IR::AutoReuseOpnd autoReuseFirstArg(firstArg, m_func);
  15324. IR::RegOpnd *const baseOpnd = IR::RegOpnd::New(TyMachReg, ldElem->m_func);
  15325. const IR::AutoReuseOpnd autoReuseBaseOpnd(baseOpnd, m_func);
  15326. InsertLea(baseOpnd, firstArg, ldElem);
  15327. if (valueOpnd->IsIntConstOpnd())
  15328. {
  15329. IntConstType offset = valueOpnd->AsIntConstOpnd()->GetValue() * MachPtr;
  15330. // TODO: Assert(Math::FitsInDWord(offset));
  15331. argIndirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, ldElem->m_func);
  15332. }
  15333. else
  15334. {
  15335. Assert(valueOpnd->IsRegOpnd());
  15336. const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  15337. argIndirOpnd = IR::IndirOpnd::New(baseOpnd, valueOpnd->AsRegOpnd(), indirScale, TyMachReg, ldElem->m_func);
  15338. }
  15339. return argIndirOpnd;
  15340. }
  15341. IR::IndirOpnd*
  15342. Lowerer::GetArgsIndirOpndForTopFunction(IR::Instr* ldElem, IR::Opnd* valueOpnd)
  15343. {
  15344. // Load argument set dst = [ebp + index] (or grab from the generator object if m_func is a generator function).
  15345. IR::RegOpnd *baseOpnd = m_func->GetJnFunction()->IsGenerator() ? LoadGeneratorArgsPtr(ldElem) : IR::Opnd::CreateFramePointerOpnd(m_func);
  15346. IR::IndirOpnd* argIndirOpnd = nullptr;
  15347. // The stack looks like this:
  15348. // ...
  15349. // arguments[1]
  15350. // arguments[0]
  15351. // this
  15352. // callinfo
  15353. // function object
  15354. // return addr
  15355. // EBP-> EBP chain
  15356. //actual arguments offset is LowererMD::GetFormalParamOffset() + 1 (this)
  15357. uint16 actualOffset = m_func->GetJnFunction()->IsGenerator() ? 1 : GetFormalParamOffset() + 1; //5
  15358. Assert(actualOffset == 5 || m_func->GetJnFunction()->IsGenerator());
  15359. if (valueOpnd->IsIntConstOpnd())
  15360. {
  15361. IntConstType offset = (valueOpnd->AsIntConstOpnd()->GetValue() + actualOffset) * MachPtr;
  15362. // TODO: Assert(Math::FitsInDWord(offset));
  15363. argIndirOpnd = IR::IndirOpnd::New(baseOpnd, (int32)offset, TyMachReg, this->m_func);
  15364. }
  15365. else
  15366. {
  15367. const BYTE indirScale = this->m_lowererMD.GetDefaultIndirScale();
  15368. argIndirOpnd = IR::IndirOpnd::New(baseOpnd->AsRegOpnd(), valueOpnd->AsRegOpnd(), indirScale, TyMachReg, this->m_func);
  15369. // Need to offset valueOpnd by 5. Instead of changing valueOpnd, we can just add an offset to the indir. Changing
  15370. // valueOpnd requires creation of a temp sym (if it's not already a temp) so that the value of the sym that
  15371. // valueOpnd represents is not changed.
  15372. argIndirOpnd->SetOffset(actualOffset << indirScale);
  15373. }
  15374. return argIndirOpnd;
  15375. }
  15376. void
  15377. Lowerer::GenerateCheckForArgumentsLength(IR::Instr* ldElem, IR::LabelInstr* labelCreateHeapArgs, IR::Opnd* actualParamOpnd, IR::Opnd* valueOpnd, Js::OpCode opcode)
  15378. {
  15379. // Check if index < nr_actuals.
  15380. InsertCompare(actualParamOpnd, valueOpnd, ldElem);
  15381. // Jump to helper if index >= nr_actuals.
  15382. // Do an unsigned check here so that a negative index will also fail.
  15383. // (GenerateLdValueFromCheckedIndexOpnd does not guarantee positive index on x86.)
  15384. InsertBranch(opcode, true, labelCreateHeapArgs, ldElem);
  15385. }
  15386. bool
  15387. Lowerer::GenerateFastArgumentsLdElemI(IR::Instr* ldElem, IR::LabelInstr * labelHelper, IR::LabelInstr *labelFallThru)
  15388. {
  15389. // TEST argsSlot, argsSlot
  15390. // JNE $helper // There is an arguments object created jump to helper.
  15391. // ---GenerateSmIntTest
  15392. // ---GenerateLdValueFromCheckedIndexOpnd
  15393. // ---LoadInputParamCount
  15394. // CMP actualParamOpnd, valueOpnd //Compare between the actual count & the index count (say i in arguments[i])
  15395. // JLE $labelCreateHeapArgs
  15396. // MOV dst, ebp [(valueOpnd + 5) *4] // 5 for the stack layout
  15397. // JMP $fallthrough
  15398. //
  15399. //labelCreateHeapArgs:
  15400. // ---LoadHeapArguments
  15401. Assert(ldElem->DoStackArgsOpt(this->m_func));
  15402. IR::IndirOpnd *indirOpnd = ldElem->GetSrc1()->AsIndirOpnd();
  15403. bool isInlinee = ldElem->m_func->IsInlinee();
  15404. Func *func = ldElem->m_func;
  15405. // First check the slot on the frame to see if there is a heap arguments object.
  15406. IR::Opnd *cachedArgsObjectSlotOpnd = isInlinee? ldElem->m_func->GetInlineeArgumentsObjectSlotOpnd() : this->m_lowererMD.CreateStackArgumentsSlotOpnd() ;
  15407. // Re-use the base pointer here so that we're loading the current heap args into the reg we will pass
  15408. // to the helper if necessary.
  15409. IR::RegOpnd *argsObjRegOpnd = indirOpnd->GetBaseOpnd();
  15410. LowererMD::CreateAssign(argsObjRegOpnd, cachedArgsObjectSlotOpnd, ldElem);
  15411. InsertTest(argsObjRegOpnd, argsObjRegOpnd, ldElem);
  15412. IR::LabelInstr *labelCreateHeapArgs = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  15413. InsertBranch(Js::OpCode::BrNeq_A, labelHelper, ldElem);
  15414. // Now load the index and check if it is an integer.
  15415. bool emittedFastPath = false;
  15416. bool isNotInt = false;
  15417. IntConstType value = 0;
  15418. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  15419. IR::Opnd *valueOpnd = nullptr;
  15420. IR::Opnd *actualParamOpnd = nullptr;
  15421. bool hasIntConstIndex = indirOpnd->TryGetIntConstIndexValue(true, &value, &isNotInt);
  15422. if (isInlinee && hasIntConstIndex && value >= (ldElem->m_func->actualCount - 1))
  15423. {
  15424. //Outside the range of actuals, skip
  15425. }
  15426. else if (labelFallThru != nullptr && !(hasIntConstIndex && value < 0)) //if index is not a negative int constant
  15427. {
  15428. if (isInlinee)
  15429. {
  15430. actualParamOpnd = IR::IntConstOpnd::New(ldElem->m_func->actualCount - 1, TyInt32, func);
  15431. }
  15432. else
  15433. {
  15434. // Load actuals count, LoadHeapArguments will reuse the generated instructions here
  15435. IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldElem, -1 /* don't include 'this' while counting actuals. */);
  15436. actualParamOpnd = loadInputParamCountInstr->GetDst()->AsRegOpnd();
  15437. }
  15438. if (hasIntConstIndex)
  15439. {
  15440. //Constant index
  15441. valueOpnd = IR::IntConstOpnd::New(value, TyInt32, func);
  15442. }
  15443. else
  15444. {
  15445. //Load valueOpnd from the index
  15446. valueOpnd =
  15447. m_lowererMD.LoadNonnegativeIndex(
  15448. indexOpnd,
  15449. (
  15450. #if INT32VAR
  15451. indexOpnd->GetType() == TyUint32
  15452. #else
  15453. // On 32-bit platforms, skip the negative check since for now, the unsigned upper bound check covers it
  15454. true
  15455. #endif
  15456. ),
  15457. labelCreateHeapArgs,
  15458. labelCreateHeapArgs,
  15459. ldElem);
  15460. }
  15461. if (isInlinee)
  15462. {
  15463. if (!hasIntConstIndex)
  15464. {
  15465. //Runtime check if to make sure length is within the arguments.length range.
  15466. GenerateCheckForArgumentsLength(ldElem, labelCreateHeapArgs, valueOpnd, actualParamOpnd, Js::OpCode::BrGe_A);
  15467. }
  15468. }
  15469. else
  15470. {
  15471. GenerateCheckForArgumentsLength(ldElem, labelCreateHeapArgs, actualParamOpnd, valueOpnd, Js::OpCode::BrLe_A);
  15472. }
  15473. IR::Opnd *argIndirOpnd = nullptr;
  15474. if (isInlinee)
  15475. {
  15476. argIndirOpnd = GetArgsIndirOpndForInlinee(ldElem, valueOpnd);
  15477. }
  15478. else
  15479. {
  15480. argIndirOpnd = GetArgsIndirOpndForTopFunction(ldElem, valueOpnd);
  15481. }
  15482. LowererMD::CreateAssign(ldElem->GetDst(), argIndirOpnd, ldElem);
  15483. // JMP $done
  15484. InsertBranch(Js::OpCode::Br, labelFallThru, ldElem);
  15485. // $labelCreateHeapArgs:
  15486. ldElem->InsertBefore(labelCreateHeapArgs);
  15487. emittedFastPath = true;
  15488. }
  15489. IR::Opnd *nullOpnd = this->LoadLibraryValueOpnd(ldElem, LibraryValue::ValueNull);
  15490. IR::Instr *instrArgs = IR::Instr::New(Js::OpCode::LdHeapArguments,
  15491. indirOpnd->GetBaseOpnd(),
  15492. nullOpnd,
  15493. nullOpnd,
  15494. func);
  15495. ldElem->InsertBefore(instrArgs);
  15496. this->m_lowererMD.LoadHeapArguments(instrArgs, true, actualParamOpnd);
  15497. return emittedFastPath;
  15498. }
  15499. bool
  15500. Lowerer::GenerateFastRealStackArgumentsLdLen(IR::Instr *ldLen)
  15501. {
  15502. if(ldLen->m_func->IsInlinee())
  15503. {
  15504. //Get the length of the arguments
  15505. LowererMD::CreateAssign(ldLen->GetDst(),
  15506. IR::IntConstOpnd::New(ldLen->m_func->actualCount - 1, TyUint32, ldLen->m_func),
  15507. ldLen);
  15508. }
  15509. else
  15510. {
  15511. IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldLen, -1);
  15512. IR::RegOpnd *actualCountOpnd = loadInputParamCountInstr->GetDst()->AsRegOpnd();
  15513. LowererMD::CreateAssign(ldLen->GetDst(), actualCountOpnd, ldLen);
  15514. }
  15515. ldLen->Remove();
  15516. return false;
  15517. }
  15518. bool
  15519. Lowerer::GenerateFastArgumentsLdLen(IR::Instr *ldLen, IR::LabelInstr* labelHelper, IR::LabelInstr* labelFallThru)
  15520. {
  15521. // TEST argslot, argslot //Test if the arguments slot is zero
  15522. // JNE $helper
  15523. // actualCountOpnd <-LoadInputParamCount fastpath
  15524. // SHL actualCountOpnd, actualCountOpnd, 1 // Left shift for tagging
  15525. // INC actualCountOpnd // Tagging
  15526. // MOV dst, actualCountOpnd
  15527. // JMP $fallthrough
  15528. //$helper:
  15529. Assert(ldLen->DoStackArgsOpt(this->m_func));
  15530. if(ldLen->m_func->IsInlinee())
  15531. {
  15532. IR::Opnd *cachedArgsObjectSlotOpnd = ldLen->m_func->GetInlineeArgumentsObjectSlotOpnd();
  15533. // Re-use the LdLen_A source here so that we're loading the current heap args into the reg we will pass
  15534. // to the helper if necessary.
  15535. IR::RegOpnd *argsObjectRegOpnd = ldLen->GetSrc1()->AsRegOpnd();
  15536. LowererMD::CreateAssign(argsObjectRegOpnd, cachedArgsObjectSlotOpnd, ldLen);
  15537. InsertTest(argsObjectRegOpnd, argsObjectRegOpnd, ldLen);
  15538. InsertBranch(Js::OpCode::BrNeq_A, labelHelper, ldLen);
  15539. //Get the length of the arguments
  15540. LowererMD::CreateAssign(ldLen->GetDst(),
  15541. IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(ldLen->m_func->actualCount - 1), IR::AddrOpndKindConstantVar, ldLen->m_func), // -1 to exclude this pointer
  15542. ldLen);
  15543. }
  15544. else
  15545. {
  15546. IR::Opnd *cachedArgsObjectSlotOpnd = this->m_lowererMD.CreateStackArgumentsSlotOpnd();
  15547. // Re-use the LdLen_A source here so that we're loading the current heap args into the reg we will pass
  15548. // to the helper if necessary.
  15549. IR::RegOpnd *argsObjectRegOpnd = ldLen->GetSrc1()->AsRegOpnd();
  15550. LowererMD::CreateAssign(argsObjectRegOpnd, cachedArgsObjectSlotOpnd, ldLen);
  15551. InsertTest(argsObjectRegOpnd, argsObjectRegOpnd, ldLen);
  15552. InsertBranch(Js::OpCode::BrNeq_A, labelHelper, ldLen);
  15553. IR::Instr *loadInputParamCountInstr = this->m_lowererMD.LoadInputParamCount(ldLen, -1);
  15554. IR::RegOpnd *actualCountOpnd = loadInputParamCountInstr->GetDst()->AsRegOpnd();
  15555. this->m_lowererMD.GenerateInt32ToVarConversion(actualCountOpnd, ldLen);
  15556. LowererMD::CreateAssign(ldLen->GetDst(), actualCountOpnd, ldLen);
  15557. }
  15558. InsertBranch(Js::OpCode::Br, labelFallThru, ldLen);
  15559. return true;
  15560. }
  15561. IR::RegOpnd*
  15562. Lowerer::GenerateFunctionTypeFromFixedFunctionObject(IR::Instr *insertInstrPt, IR::Opnd* functionObjOpnd)
  15563. {
  15564. IR::RegOpnd * functionTypeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  15565. IR::Opnd *functionTypeOpnd = nullptr;
  15566. if(functionObjOpnd->IsAddrOpnd())
  15567. {
  15568. IR::AddrOpnd* functionObjAddrOpnd = functionObjOpnd->AsAddrOpnd();
  15569. // functionTypeRegOpnd = MOV [fixed function address + type offset]
  15570. functionObjAddrOpnd->m_address;
  15571. functionTypeOpnd = IR::MemRefOpnd::New((void *)((intptr_t)functionObjAddrOpnd->m_address + Js::RecyclableObject::GetOffsetOfType()), TyMachPtr, this->m_func,
  15572. IR::AddrOpndKindDynamicObjectTypeRef);
  15573. }
  15574. else
  15575. {
  15576. functionTypeOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, this->m_func);
  15577. }
  15578. LowererMD::CreateAssign(functionTypeRegOpnd, functionTypeOpnd, insertInstrPt);
  15579. return functionTypeRegOpnd;
  15580. }
  15581. void
  15582. Lowerer::FinalLower()
  15583. {
  15584. this->m_lowererMD.FinalLower();
  15585. // ensure that the StartLabel and EndLabel are inserted
  15586. // before the prolog and after the epilog respectively
  15587. IR::LabelInstr * startLabel = m_func->GetFuncStartLabel();
  15588. if (startLabel != nullptr)
  15589. {
  15590. m_func->m_headInstr->InsertAfter(startLabel);
  15591. }
  15592. IR::LabelInstr * endLabel = m_func->GetFuncEndLabel();
  15593. if (endLabel != nullptr)
  15594. {
  15595. m_func->m_tailInstr->GetPrevRealInstr()->InsertBefore(endLabel);
  15596. }
  15597. }
  15598. void
  15599. Lowerer::EHBailoutPatchUp()
  15600. {
  15601. Assert(this->m_func->isPostLayout);
  15602. // 1. Insert return thunks for all the regions.
  15603. // 2. Set the hasBailedOut bit to true on all bailout paths in EH regions.
  15604. // 3. Insert code after every bailout in a try or catch region to save the return value on the stack, and jump to the return thunk (See Region.h) of that region.
  15605. // 4. Insert code right before the epilog, to restore the return value (saved in 2.) from a bailout into eax.
  15606. IR::LabelInstr * restoreReturnValueFromBailoutLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  15607. IR::LabelInstr * epilogLabel;
  15608. IR::Instr * exitPrevInstr = this->m_func->m_exitInstr->GetPrevRealInstrOrLabel();
  15609. if (exitPrevInstr->IsLabelInstr())
  15610. {
  15611. epilogLabel = exitPrevInstr->AsLabelInstr();
  15612. }
  15613. else
  15614. {
  15615. epilogLabel = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  15616. this->m_func->m_exitInstr->InsertBefore(epilogLabel);
  15617. }
  15618. IR::Instr * tmpInstr = nullptr;
  15619. bool restoreReturnFromBailoutEmitted = false;
  15620. FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, this->m_func)
  15621. {
  15622. if (instr->IsLabelInstr())
  15623. {
  15624. this->currentRegion = instr->AsLabelInstr()->GetRegion();
  15625. }
  15626. // Consider (radua): Assert(this->currentRegion) here?
  15627. if (this->currentRegion)
  15628. {
  15629. RegionType currentRegionType = this->currentRegion->GetType();
  15630. if (currentRegionType == RegionTypeTry || currentRegionType == RegionTypeCatch)
  15631. {
  15632. this->InsertReturnThunkForRegion(this->currentRegion, restoreReturnValueFromBailoutLabel);
  15633. if (instr->HasBailOutInfo())
  15634. {
  15635. this->SetHasBailedOut(instr);
  15636. tmpInstr = this->EmitEHBailoutStackRestore(instr);
  15637. this->EmitSaveEHBailoutReturnValueAndJumpToRetThunk(tmpInstr);
  15638. if (!restoreReturnFromBailoutEmitted)
  15639. {
  15640. this->EmitRestoreReturnValueFromEHBailout(restoreReturnValueFromBailoutLabel, epilogLabel);
  15641. restoreReturnFromBailoutEmitted = true;
  15642. }
  15643. }
  15644. }
  15645. }
  15646. }
  15647. NEXT_INSTR_IN_FUNC_EDITING
  15648. }
  15649. bool
  15650. Lowerer::GenerateFastLdFld(IR::Instr * const instrLdFld, IR::JnHelperMethod helperMethod, IR::JnHelperMethod polymorphicHelperMethod,
  15651. IR::LabelInstr ** labelBailOut, IR::RegOpnd* typeOpnd, bool* pIsHelper, IR::LabelInstr** pLabelHelper)
  15652. {
  15653. // Generates:
  15654. //
  15655. // r1 = object->type
  15656. // if (r1 is taggedInt) goto helper
  15657. // Load inline cache
  15658. // if monomorphic
  15659. // r2 = address of the monomorphic inline cache
  15660. // if polymorphic
  15661. // r2 = address of the polymorphic inline cache array
  15662. // r3 = (type >> PIC shift amount) & (PIC size - 1)
  15663. // r2 = r2 + r3
  15664. // Try load property using proto cache (if protoFirst)
  15665. // Try load property using local cache
  15666. // Try loading property using proto cache (if !protoFirst)
  15667. // Try loading property using flags cache
  15668. //
  15669. // Loading property using local cache:
  15670. // if (r1 == r2->u.local.type)
  15671. // result = load inline slot r2->u.local.slotIndex from r1
  15672. // goto fallthru
  15673. // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.local.type)
  15674. // result = load aux slot r2->u.local.slotIndex from r1
  15675. // goto fallthru
  15676. //
  15677. // Loading property using proto cache:
  15678. // if (r1 == r2->u.proto.type)
  15679. // r3 = r2->u.proto.prototypeObject
  15680. // result = load inline slot r2->u.proto.slotIndex from r3
  15681. // goto fallthru
  15682. // if (r1 | InlineCacheAuxSlotTypeTag) == r2.u.proto.type)
  15683. // r3 = r2->u.proto.prototypeObject
  15684. // result = load aux slot r2->u.proto.slotIndex from r3
  15685. // goto fallthru
  15686. //
  15687. // Loading property using flags cache:
  15688. // if (r2->u.accessor.flags & (Js::InlineCacheGetterFlag | Js::InlineCacheSetterFlag) == 0)
  15689. // if (r1 == r2->u.accessor.type)
  15690. // result = load inline slot r2->u.accessor.slotIndex from r1
  15691. // goto fallthru
  15692. // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.accessor.type)
  15693. // result = load aux slot r2->u.accessor.slotIndex from r1
  15694. // goto fallthru
  15695. //
  15696. // Loading an inline slot:
  15697. // result = [r1 + slotIndex * sizeof(Var)]
  15698. //
  15699. // Loading an aux slot:
  15700. // slotArray = r1->auxSlots
  15701. // result = [slotArray + slotIndex * sizeof(Var)]
  15702. //
  15703. // We only emit the code block for a type of cache (local/proto/flags) if the profile data
  15704. // indicates that type of cache was used to load the property in the past.
  15705. // We don't emit the type check with aux slot tag if the profile data indicates that we didn't
  15706. // load the property from an aux slot before.
  15707. // We don't emit the type check without an aux slot tag if the profile data indicates that we didn't
  15708. // load the property from an inline slot before.
  15709. IR::Opnd * opndSrc = instrLdFld->GetSrc1();
  15710. AssertMsg(opndSrc->IsSymOpnd() && opndSrc->AsSymOpnd()->IsPropertySymOpnd() && opndSrc->AsSymOpnd()->m_sym->IsPropertySym(), "Expected PropertySym as src of LdFld");
  15711. Assert(!instrLdFld->DoStackArgsOpt(this->m_func));
  15712. IR::PropertySymOpnd * propertySymOpnd = opndSrc->AsPropertySymOpnd();
  15713. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  15714. PHASE_PRINT_TESTTRACE(
  15715. Js::ObjTypeSpecPhase,
  15716. this->m_func,
  15717. L"Field load: %s, property: %s, func: %s, cache ID: %d, cloned cache: false\n",
  15718. Js::OpCodeUtil::GetOpCodeName(instrLdFld->m_opcode),
  15719. this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySym->m_propertyId)->GetBuffer(),
  15720. this->m_func->GetJnFunction()->GetDisplayName(),
  15721. propertySymOpnd->m_inlineCacheIndex);
  15722. Assert(pIsHelper != nullptr);
  15723. bool& isHelper = *pIsHelper;
  15724. Assert(pLabelHelper != nullptr);
  15725. IR::LabelInstr*& labelHelper = *pLabelHelper;
  15726. bool doLocal = true;
  15727. bool doProto = instrLdFld->m_opcode == Js::OpCode::LdMethodFld
  15728. || instrLdFld->m_opcode == Js::OpCode::LdRootMethodFld
  15729. || instrLdFld->m_opcode == Js::OpCode::ScopedLdMethodFld;
  15730. bool doProtoFirst = doProto;
  15731. bool doInlineSlots = true;
  15732. bool doAuxSlots = true;
  15733. if (!PHASE_OFF(Js::ProfileBasedFldFastPathPhase, this->m_func) && instrLdFld->IsProfiledInstr())
  15734. {
  15735. IR::ProfiledInstr * profiledInstrLdFld = instrLdFld->AsProfiledInstr();
  15736. if (profiledInstrLdFld->u.FldInfo().flags != Js::FldInfo_NoInfo)
  15737. {
  15738. doProto = !!(profiledInstrLdFld->u.FldInfo().flags & Js::FldInfo_FromProto);
  15739. doLocal = !!(profiledInstrLdFld->u.FldInfo().flags & Js::FldInfo_FromLocal);
  15740. if ((profiledInstrLdFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromInlineSlots)
  15741. {
  15742. // If the inline slots flag is set and the aux slots flag is not, only generate the inline slots check
  15743. doAuxSlots = false;
  15744. }
  15745. else if ((profiledInstrLdFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromAuxSlots)
  15746. {
  15747. // If the aux slots flag is set and the inline slots flag is not, only generate the aux slots check
  15748. doInlineSlots = false;
  15749. }
  15750. }
  15751. else if (!profiledInstrLdFld->u.FldInfo().valueType.IsUninitialized())
  15752. {
  15753. // We have value type info about the field but no flags. This means we shouldn't generate any
  15754. // fast paths for this field load.
  15755. doLocal = false;
  15756. doProto = false;
  15757. }
  15758. }
  15759. if (!doLocal && !doProto)
  15760. {
  15761. return false;
  15762. }
  15763. IR::LabelInstr * labelFallThru = instrLdFld->GetOrCreateContinueLabel();
  15764. if (labelHelper == nullptr)
  15765. {
  15766. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  15767. }
  15768. IR::RegOpnd * opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  15769. bool usePolymorphicInlineCache = !!propertySymOpnd->m_runtimePolymorphicInlineCache;
  15770. IR::RegOpnd * opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  15771. if (usePolymorphicInlineCache)
  15772. {
  15773. LowererMD::CreateAssign(opndInlineCache, IR::AddrOpnd::New(propertySymOpnd->m_runtimePolymorphicInlineCache->GetInlineCaches(), IR::AddrOpndKindDynamicInlineCache, this->m_func, true), instrLdFld);
  15774. }
  15775. else
  15776. {
  15777. LowererMD::CreateAssign(opndInlineCache, this->LoadRuntimeInlineCacheOpnd(instrLdFld, propertySymOpnd, isHelper), instrLdFld);
  15778. }
  15779. if (typeOpnd == nullptr)
  15780. {
  15781. typeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  15782. GenerateObjectTestAndTypeLoad(instrLdFld, opndBase, typeOpnd, labelHelper);
  15783. }
  15784. if (usePolymorphicInlineCache)
  15785. {
  15786. LowererMD::GenerateLoadPolymorphicInlineCacheSlot(instrLdFld, opndInlineCache, typeOpnd, propertySymOpnd->m_runtimePolymorphicInlineCache->GetSize());
  15787. }
  15788. IR::LabelInstr * labelNext = nullptr;
  15789. IR::Opnd * opndDst = instrLdFld->GetDst();
  15790. IR::RegOpnd * opndTaggedType = nullptr;
  15791. IR::BranchInstr * labelNextBranchToPatch = nullptr;
  15792. if (doProto && doProtoFirst)
  15793. {
  15794. if (doInlineSlots)
  15795. {
  15796. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  15797. labelNextBranchToPatch = LowererMD::GenerateProtoInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
  15798. LowererMD::GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  15799. instrLdFld->InsertBefore(labelNext);
  15800. }
  15801. if (doAuxSlots)
  15802. {
  15803. if (opndTaggedType == nullptr)
  15804. {
  15805. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  15806. LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
  15807. }
  15808. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  15809. labelNextBranchToPatch = LowererMD::GenerateProtoInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
  15810. LowererMD::GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  15811. instrLdFld->InsertBefore(labelNext);
  15812. }
  15813. }
  15814. if (doLocal)
  15815. {
  15816. if (doInlineSlots)
  15817. {
  15818. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  15819. labelNextBranchToPatch = LowererMD::GenerateLocalInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
  15820. LowererMD::GenerateLdFldFromLocalInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  15821. instrLdFld->InsertBefore(labelNext);
  15822. }
  15823. if (doAuxSlots)
  15824. {
  15825. if (opndTaggedType == nullptr)
  15826. {
  15827. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  15828. LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
  15829. }
  15830. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  15831. labelNextBranchToPatch = LowererMD::GenerateLocalInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
  15832. LowererMD::GenerateLdFldFromLocalInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  15833. instrLdFld->InsertBefore(labelNext);
  15834. }
  15835. }
  15836. if (doProto && !doProtoFirst)
  15837. {
  15838. if (doInlineSlots)
  15839. {
  15840. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  15841. labelNextBranchToPatch = LowererMD::GenerateProtoInlineCacheCheck(instrLdFld, typeOpnd, opndInlineCache, labelNext);
  15842. LowererMD::GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, true);
  15843. instrLdFld->InsertBefore(labelNext);
  15844. }
  15845. if (doAuxSlots)
  15846. {
  15847. if (opndTaggedType == nullptr)
  15848. {
  15849. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  15850. LowererMD::GenerateLoadTaggedType(instrLdFld, typeOpnd, opndTaggedType);
  15851. }
  15852. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  15853. labelNextBranchToPatch = LowererMD::GenerateProtoInlineCacheCheck(instrLdFld, opndTaggedType, opndInlineCache, labelNext);
  15854. LowererMD::GenerateLdFldFromProtoInlineCache(instrLdFld, opndBase, opndDst, opndInlineCache, labelFallThru, false);
  15855. instrLdFld->InsertBefore(labelNext);
  15856. }
  15857. }
  15858. Assert(labelNextBranchToPatch);
  15859. labelNextBranchToPatch->SetTarget(labelHelper);
  15860. labelNext->Remove();
  15861. // $helper:
  15862. // dst = CALL Helper(inlineCache, base, field, scriptContext)
  15863. // $fallthru:
  15864. isHelper = true;
  15865. // Return false to indicate the original instruction was not lowered. Caller will insert the helper label.
  15866. return false;
  15867. }
  15868. void
  15869. Lowerer::GenerateAuxSlotAdjustmentRequiredCheck(
  15870. IR::Instr * instrToInsertBefore,
  15871. IR::RegOpnd * opndInlineCache,
  15872. IR::LabelInstr * labelHelper)
  15873. {
  15874. // regSlotCap = MOV [&(inlineCache->u.local.rawUInt16)] // sized to 16 bits
  15875. IR::RegOpnd * regSlotCap = IR::RegOpnd::New(TyMachReg, instrToInsertBefore->m_func);
  15876. IR::IndirOpnd * memSlotCap = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.rawUInt16), TyUint16, instrToInsertBefore->m_func);
  15877. InsertMove(regSlotCap, memSlotCap, instrToInsertBefore);
  15878. // SAR regSlotCap, Js::InlineCache::CacheLayoutSelectorBitCount
  15879. IR::IntConstOpnd * constSelectorBitCount = IR::IntConstOpnd::New(Js::InlineCache::CacheLayoutSelectorBitCount, TyUint16, instrToInsertBefore->m_func, /* dontEncode = */ true);
  15880. InsertShiftBranch(Js::OpCode::Shr_A, regSlotCap, regSlotCap, constSelectorBitCount, Js::OpCode::BrNeq_A, true, labelHelper, instrToInsertBefore);
  15881. }
  15882. void
  15883. Lowerer::GenerateSetObjectTypeFromInlineCache(
  15884. IR::Instr * instrToInsertBefore,
  15885. IR::RegOpnd * opndBase,
  15886. IR::RegOpnd * opndInlineCache,
  15887. bool isTypeTagged)
  15888. {
  15889. // regNewType = MOV [&(inlineCache->u.local.type)]
  15890. IR::RegOpnd * regNewType = IR::RegOpnd::New(TyMachReg, instrToInsertBefore->m_func);
  15891. IR::IndirOpnd * memNewType = IR::IndirOpnd::New(opndInlineCache, (int32)offsetof(Js::InlineCache, u.local.type), TyMachReg, instrToInsertBefore->m_func);
  15892. InsertMove(regNewType, memNewType, instrToInsertBefore);
  15893. // AND regNewType, ~InlineCacheAuxSlotTypeTag
  15894. if (isTypeTagged)
  15895. {
  15896. // On 64-bit platforms IntConstOpnd isn't big enough to hold TyMachReg values.
  15897. IR::AddrOpnd * constTypeTagComplement = IR::AddrOpnd::New((Js::Var)~InlineCacheAuxSlotTypeTag, IR::AddrOpndKindConstant, instrToInsertBefore->m_func, /* dontEncode = */ true);
  15898. InsertAnd(regNewType, regNewType, constTypeTagComplement, instrToInsertBefore);
  15899. }
  15900. // MOV base->type, regNewType
  15901. IR::IndirOpnd * memObjType = IR::IndirOpnd::New(opndBase, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, instrToInsertBefore->m_func);
  15902. InsertMove(memObjType, regNewType, instrToInsertBefore);
  15903. }
  15904. bool
  15905. Lowerer::GenerateFastStFld(IR::Instr * const instrStFld, IR::JnHelperMethod helperMethod, IR::JnHelperMethod polymorphicHelperMethod, IR::LabelInstr ** labelBailOut, IR::RegOpnd* typeOpnd,
  15906. bool* pIsHelper, IR::LabelInstr** pLabelHelper, bool withPutFlags, Js::PropertyOperationFlags flags)
  15907. {
  15908. // Generates:
  15909. //
  15910. // r1 = object->type
  15911. // if (r1 is taggedInt) goto helper
  15912. // Load inline cache
  15913. // if monomorphic
  15914. // r2 = address of the monomorphic inline cache
  15915. // if polymorphic
  15916. // r2 = address of the polymorphic inline cache array
  15917. // r3 = (type >> PIC shift amount) & (PIC size - 1)
  15918. // r2 = r2 + r3
  15919. // Try store property using local cache
  15920. //
  15921. // Loading property using local cache:
  15922. // if (r1 == r2->u.local.type)
  15923. // store value to inline slot r2->u.local.slotIndex on r1
  15924. // goto fallthru
  15925. // if ((r1 | InlineCacheAuxSlotTypeTag) == r2->u.local.type)
  15926. // store value to aux slot r2->u.local.slotIndex on r1
  15927. // goto fallthru
  15928. //
  15929. // Storing to an inline slot:
  15930. // [r1 + slotIndex * sizeof(Var)] = value
  15931. //
  15932. // Storing to an aux slot:
  15933. // slotArray = r1->auxSlots
  15934. // [slotArray + slotIndex * sizeof(Var)] = value
  15935. //
  15936. // We don't emit the type check with aux slot tag if the profile data indicates that we didn't
  15937. // store the property to an aux slot before.
  15938. // We don't emit the type check without an aux slot tag if the profile data indicates that we didn't
  15939. // store the property to an inline slot before.
  15940. IR::Opnd * opndSrc = instrStFld->GetSrc1();
  15941. IR::Opnd * opndDst = instrStFld->GetDst();
  15942. AssertMsg(opndDst->IsSymOpnd() && opndDst->AsSymOpnd()->IsPropertySymOpnd() && opndDst->AsSymOpnd()->m_sym->IsPropertySym(), "Expected PropertySym as dst of StFld");
  15943. IR::PropertySymOpnd * propertySymOpnd = opndDst->AsPropertySymOpnd();
  15944. PropertySym * propertySym = propertySymOpnd->m_sym->AsPropertySym();
  15945. PHASE_PRINT_TESTTRACE(
  15946. Js::ObjTypeSpecPhase,
  15947. this->m_func,
  15948. L"Field store: %s, property: %s, func: %s, cache ID: %d, cloned cache: false\n",
  15949. Js::OpCodeUtil::GetOpCodeName(instrStFld->m_opcode),
  15950. this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySym->m_propertyId)->GetBuffer(),
  15951. this->m_func->GetJnFunction()->GetDisplayName(),
  15952. propertySymOpnd->m_inlineCacheIndex);
  15953. Assert(pIsHelper != nullptr);
  15954. bool& isHelper = *pIsHelper;
  15955. Assert(pLabelHelper != nullptr);
  15956. IR::LabelInstr*& labelHelper = *pLabelHelper;
  15957. bool doStore = true;
  15958. bool doAdd = false;
  15959. bool doInlineSlots = true;
  15960. bool doAuxSlots = true;
  15961. if (!PHASE_OFF(Js::ProfileBasedFldFastPathPhase, this->m_func) && instrStFld->IsProfiledInstr())
  15962. {
  15963. IR::ProfiledInstr * profiledInstrStFld = instrStFld->AsProfiledInstr();
  15964. if (profiledInstrStFld->u.FldInfo().flags != Js::FldInfo_NoInfo)
  15965. {
  15966. if (!(profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromLocal | Js::FldInfo_FromLocalWithoutProperty)))
  15967. {
  15968. return false;
  15969. }
  15970. if (!PHASE_OFF(Js::AddFldFastPathPhase, this->m_func))
  15971. {
  15972. // We always try to do the store field fast path, unless the profile specifically says we never set, but always add a property here.
  15973. if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromLocal | Js::FldInfo_FromLocalWithoutProperty)) == Js::FldInfo_FromLocalWithoutProperty)
  15974. {
  15975. doStore = false;
  15976. }
  15977. // On the other hand, we only emit the add field fast path, if the profile explicitly says we do add properties here.
  15978. if (!!(profiledInstrStFld->u.FldInfo().flags & Js::FldInfo_FromLocalWithoutProperty))
  15979. {
  15980. doAdd = true;
  15981. }
  15982. }
  15983. else
  15984. {
  15985. #if ENABLE_DEBUG_CONFIG_OPTIONS
  15986. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  15987. #endif
  15988. PHASE_PRINT_TRACE(Js::AddFldFastPathPhase, this->m_func,
  15989. L"AddFldFastPath: function: %s(%s) property: %s(#%d) no fast path, because the phase is off.\n",
  15990. this->m_func->GetJnFunction()->GetDisplayName(), this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  15991. this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySym->m_propertyId)->GetBuffer(), propertySym->m_propertyId);
  15992. }
  15993. if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromInlineSlots)
  15994. {
  15995. // If the inline slots flag is set and the aux slots flag is not, only generate the inline slots check
  15996. doAuxSlots = false;
  15997. }
  15998. else if ((profiledInstrStFld->u.FldInfo().flags & (Js::FldInfo_FromInlineSlots | Js::FldInfo_FromAuxSlots)) == Js::FldInfo_FromAuxSlots)
  15999. {
  16000. // If the aux slots flag is set and the inline slots flag is not, only generate the aux slots check
  16001. doInlineSlots = false;
  16002. }
  16003. }
  16004. else if (!profiledInstrStFld->u.FldInfo().valueType.IsUninitialized())
  16005. {
  16006. // We have value type info about the field but no flags. This means we shouldn't generate any
  16007. // fast paths for this field store.
  16008. return false;
  16009. }
  16010. }
  16011. Assert(doStore || doAdd);
  16012. if (labelHelper == nullptr)
  16013. {
  16014. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  16015. }
  16016. IR::LabelInstr * labelFallThru = instrStFld->GetOrCreateContinueLabel();
  16017. IR::RegOpnd * opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  16018. bool usePolymorphicInlineCache = !!propertySymOpnd->m_runtimePolymorphicInlineCache;
  16019. if (doAdd)
  16020. {
  16021. #if ENABLE_DEBUG_CONFIG_OPTIONS
  16022. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  16023. #endif
  16024. PHASE_PRINT_TRACE(Js::AddFldFastPathPhase, this->m_func,
  16025. L"AddFldFastPath: function: %s(%s) property: %s(#%d) %s fast path for %s.\n",
  16026. this->m_func->GetJnFunction()->GetDisplayName(), this->m_func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  16027. this->m_func->GetScriptContext()->GetPropertyNameLocked(propertySym->m_propertyId)->GetBuffer(), propertySym->m_propertyId,
  16028. usePolymorphicInlineCache ? L"poly" : L"mono", doStore ? L"store and add" : L"add only");
  16029. }
  16030. IR::RegOpnd * opndInlineCache = IR::RegOpnd::New(TyMachPtr, this->m_func);
  16031. if (usePolymorphicInlineCache)
  16032. {
  16033. LowererMD::CreateAssign(opndInlineCache, IR::AddrOpnd::New(propertySymOpnd->m_runtimePolymorphicInlineCache->GetInlineCaches(), IR::AddrOpndKindDynamicInlineCache, this->m_func, true), instrStFld);
  16034. }
  16035. else
  16036. {
  16037. LowererMD::CreateAssign(opndInlineCache, this->LoadRuntimeInlineCacheOpnd(instrStFld, propertySymOpnd, isHelper), instrStFld);
  16038. }
  16039. if (typeOpnd == nullptr)
  16040. {
  16041. typeOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  16042. GenerateObjectTestAndTypeLoad(instrStFld, opndBase, typeOpnd, labelHelper);
  16043. }
  16044. if (usePolymorphicInlineCache)
  16045. {
  16046. LowererMD::GenerateLoadPolymorphicInlineCacheSlot(instrStFld, opndInlineCache, typeOpnd, propertySymOpnd->m_runtimePolymorphicInlineCache->GetSize());
  16047. }
  16048. IR::LabelInstr * labelNext = nullptr;
  16049. IR::RegOpnd * opndTaggedType = nullptr;
  16050. IR::BranchInstr * lastBranchToNext = nullptr;
  16051. if (doStore)
  16052. {
  16053. if (doInlineSlots)
  16054. {
  16055. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  16056. lastBranchToNext = LowererMD::GenerateLocalInlineCacheCheck(instrStFld, typeOpnd, opndInlineCache, labelNext);
  16057. LowererMD::GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, true);
  16058. instrStFld->InsertBefore(labelNext);
  16059. }
  16060. if (doAuxSlots)
  16061. {
  16062. if (opndTaggedType == nullptr)
  16063. {
  16064. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  16065. LowererMD::GenerateLoadTaggedType(instrStFld, typeOpnd, opndTaggedType);
  16066. }
  16067. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  16068. lastBranchToNext = LowererMD::GenerateLocalInlineCacheCheck(instrStFld, opndTaggedType, opndInlineCache, labelNext);
  16069. LowererMD::GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, false);
  16070. instrStFld->InsertBefore(labelNext);
  16071. }
  16072. }
  16073. if (doAdd)
  16074. {
  16075. if (doInlineSlots)
  16076. {
  16077. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, isHelper);
  16078. lastBranchToNext = LowererMD::GenerateLocalInlineCacheCheck(instrStFld, typeOpnd, opndInlineCache, labelNext, true);
  16079. GenerateSetObjectTypeFromInlineCache(instrStFld, opndBase, opndInlineCache, false);
  16080. LowererMD::GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, true);
  16081. instrStFld->InsertBefore(labelNext);
  16082. }
  16083. if (doAuxSlots)
  16084. {
  16085. if (opndTaggedType == nullptr)
  16086. {
  16087. opndTaggedType = IR::RegOpnd::New(TyMachPtr, this->m_func);
  16088. LowererMD::GenerateLoadTaggedType(instrStFld, typeOpnd, opndTaggedType);
  16089. }
  16090. labelNext = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16091. lastBranchToNext = LowererMD::GenerateLocalInlineCacheCheck(instrStFld, opndTaggedType, opndInlineCache, labelNext, true);
  16092. GenerateAuxSlotAdjustmentRequiredCheck(instrStFld, opndInlineCache, labelHelper);
  16093. GenerateSetObjectTypeFromInlineCache(instrStFld, opndBase, opndInlineCache, true);
  16094. LowererMD::GenerateStFldFromLocalInlineCache(instrStFld, opndBase, opndSrc, opndInlineCache, labelFallThru, false);
  16095. instrStFld->InsertBefore(labelNext);
  16096. }
  16097. }
  16098. Assert(lastBranchToNext);
  16099. lastBranchToNext->SetTarget(labelHelper);
  16100. labelNext->Remove();
  16101. // $helper:
  16102. // CALL Helper(inlineCache, base, field, src, scriptContext)
  16103. // $fallthru:
  16104. isHelper = true;
  16105. // Return false to indicate the original instruction was not lowered. Caller will insert the helper label.
  16106. return false;
  16107. }
  16108. bool Lowerer::GenerateFastStFldForCustomProperty(IR::Instr *const instr, IR::LabelInstr * *const labelHelperRef)
  16109. {
  16110. Assert(instr);
  16111. Assert(labelHelperRef);
  16112. Assert(!*labelHelperRef);
  16113. switch(instr->m_opcode)
  16114. {
  16115. case Js::OpCode::StFld:
  16116. case Js::OpCode::StFldStrict:
  16117. break;
  16118. default:
  16119. return false;
  16120. }
  16121. IR::SymOpnd *const symOpnd = instr->GetDst()->AsSymOpnd();
  16122. PropertySym *const propertySym = symOpnd->m_sym->AsPropertySym();
  16123. if(propertySym->m_propertyId != Js::PropertyIds::lastIndex || !symOpnd->IsPropertySymOpnd())
  16124. {
  16125. return false;
  16126. }
  16127. const ValueType objectValueType(symOpnd->GetPropertyOwnerValueType());
  16128. if(!objectValueType.IsLikelyRegExp())
  16129. {
  16130. return false;
  16131. }
  16132. if(instr->HasBailOutInfo())
  16133. {
  16134. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  16135. if(!BailOutInfo::IsBailOutOnImplicitCalls(bailOutKind) || bailOutKind & IR::BailOutKindBits)
  16136. {
  16137. // Other bailout kinds will likely need bailout checks that would not be generated here. In particular, if a type
  16138. // check is necessary here to guard against downstream property accesses on the same object, the type check will
  16139. // fail and cause a bailout if the object is a RegExp object since the "lastIndex" property accesses are not cached.
  16140. return false;
  16141. }
  16142. }
  16143. Func *const func = instr->m_func;
  16144. IR::RegOpnd *const objectOpnd = symOpnd->CreatePropertyOwnerOpnd(func);
  16145. const IR::AutoReuseOpnd autoReuseObjectOpnd(objectOpnd, func);
  16146. IR::LabelInstr *labelHelper = nullptr;
  16147. if(!objectOpnd->IsNotTaggedValue())
  16148. {
  16149. // test object, 1
  16150. // jnz $helper
  16151. if(!labelHelper)
  16152. {
  16153. *labelHelperRef = labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  16154. }
  16155. m_lowererMD.GenerateObjectTest(objectOpnd, instr, labelHelper);
  16156. }
  16157. if(!objectValueType.IsObject())
  16158. {
  16159. // cmp [object], Js::JavascriptRegExp::vtable
  16160. // jne $helper
  16161. if(!labelHelper)
  16162. {
  16163. *labelHelperRef = labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  16164. }
  16165. InsertCompareBranch(
  16166. IR::IndirOpnd::New(objectOpnd, 0, TyMachPtr, func),
  16167. LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp),
  16168. Js::OpCode::BrNeq_A,
  16169. labelHelper,
  16170. instr);
  16171. objectOpnd->SetValueType(objectValueType.ToDefiniteObject());
  16172. }
  16173. // mov [object + offset(lastIndexVar)], src
  16174. // mov [object + offset(lastIndexOrFlag)], Js::JavascriptRegExp::NotCachedValue
  16175. // jmp $done
  16176. InsertMove(
  16177. IR::IndirOpnd::New(objectOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexVar(), TyVar, func),
  16178. instr->GetSrc1(),
  16179. instr);
  16180. InsertMove(
  16181. IR::IndirOpnd::New(objectOpnd, Js::JavascriptRegExp::GetOffsetOfLastIndexOrFlag(), TyUint32, func),
  16182. IR::IntConstOpnd::New(Js::JavascriptRegExp::NotCachedValue, TyUint32, func, true),
  16183. instr);
  16184. InsertBranch(Js::OpCode::Br, instr->GetOrCreateContinueLabel(), instr);
  16185. return true;
  16186. }
  16187. IR::RegOpnd *
  16188. Lowerer::GenerateIsBuiltinRecyclableObject(IR::RegOpnd *regOpnd, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, bool checkObjectAndDynamicObject, IR::LabelInstr *labelContinue)
  16189. {
  16190. // CMP [srcReg], Js::DynamicObject::`vtable'
  16191. // JEQ $fallThough
  16192. // MOV r1, [src1 + offset(type)] -- get the type id
  16193. // MOV r1, [r1 + offset(typeId)]
  16194. // ADD r1, ~TypeIds_LastStaticType -- if (typeId > TypeIds_LastStaticType && typeId <= TypeIds_LastBuiltinDynamicObject)
  16195. // CMP r1, (TypeIds_LastBuiltinDynamicObject - TypeIds_LastStaticType - 1)
  16196. // JA $helper
  16197. //fallThrough:
  16198. IR::LabelInstr *labelFallthrough = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16199. if (checkObjectAndDynamicObject)
  16200. {
  16201. if (!regOpnd->IsNotTaggedValue())
  16202. {
  16203. m_lowererMD.GenerateObjectTest(regOpnd, insertInstr, labelHelper);
  16204. }
  16205. m_lowererMD.GenerateIsDynamicObject(regOpnd, insertInstr, labelFallthrough, true);
  16206. }
  16207. IR::RegOpnd * typeRegOpnd = IR::RegOpnd::New(TyMachReg, this->m_func);
  16208. IR::RegOpnd * typeIdRegOpnd = IR::RegOpnd::New(TyInt32, this->m_func);
  16209. IR::IndirOpnd *indirOpnd;
  16210. // MOV typeRegOpnd, [src1 + offset(type)]
  16211. indirOpnd = IR::IndirOpnd::New(regOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
  16212. m_lowererMD.CreateAssign(typeRegOpnd, indirOpnd, insertInstr);
  16213. // MOV typeIdRegOpnd, [typeRegOpnd + offset(typeId)]
  16214. indirOpnd = IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
  16215. m_lowererMD.CreateAssign(typeIdRegOpnd, indirOpnd, insertInstr);
  16216. // ADD typeIdRegOpnd, ~TypeIds_LastStaticType
  16217. InsertAdd(false, typeIdRegOpnd, typeIdRegOpnd,
  16218. IR::IntConstOpnd::New(~Js::TypeIds_LastStaticType, TyInt32, this->m_func, true), insertInstr);
  16219. // CMP typeIdRegOpnd, (TypeIds_LastBuiltinDynamicObject - TypeIds_LastStaticType - 1)
  16220. InsertCompare(
  16221. typeIdRegOpnd,
  16222. IR::IntConstOpnd::New(Js::TypeIds_LastBuiltinDynamicObject - Js::TypeIds_LastStaticType - 1, TyInt32, this->m_func),
  16223. insertInstr);
  16224. if (labelContinue)
  16225. {
  16226. // On success, go to continuation label.
  16227. InsertBranch(Js::OpCode::BrLe_A, true, labelContinue, insertInstr);
  16228. }
  16229. else
  16230. {
  16231. // On failure, go to helper.
  16232. InsertBranch(Js::OpCode::BrGt_A, true, labelHelper, insertInstr);
  16233. }
  16234. // $fallThrough
  16235. insertInstr->InsertBefore(labelFallthrough);
  16236. return typeRegOpnd;
  16237. }
  16238. bool Lowerer::GenerateFastBrEqLikely(IR::BranchInstr * instrBranch, bool *pNeedHelper)
  16239. {
  16240. IR::Opnd *src1 = instrBranch->GetSrc1();
  16241. IR::Opnd *src2 = instrBranch->GetSrc2();
  16242. IR::LabelInstr *targetInstr = instrBranch->GetTarget();
  16243. IR::LabelInstr *labelBooleanCmp = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16244. IR::LabelInstr *labelTrue = instrBranch->GetOrCreateContinueLabel();
  16245. IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  16246. bool isStrictBr = false;
  16247. bool isStrictMode = this->m_func->GetJnFunction()->GetIsStrictMode();
  16248. *pNeedHelper = true;
  16249. switch (instrBranch->m_opcode)
  16250. {
  16251. case Js::OpCode::BrSrEq_A:
  16252. case Js::OpCode::BrSrNotNeq_A:
  16253. case Js::OpCode::BrSrNeq_A:
  16254. case Js::OpCode::BrSrNotEq_A:
  16255. isStrictBr = true;
  16256. break;
  16257. }
  16258. if (src1->GetValueType().IsLikelyBoolean() && src2->GetValueType().IsLikelyBoolean())
  16259. {
  16260. //
  16261. // Booleans
  16262. //
  16263. if (isStrictBr)
  16264. {
  16265. if (!src1->GetValueType().IsBoolean() && !src2->GetValueType().IsBoolean())
  16266. {
  16267. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instrBranch, labelHelper, false);
  16268. if (this->m_lowererMD.GenerateJSBooleanTest(src2->AsRegOpnd(), instrBranch, labelBooleanCmp, true))
  16269. {
  16270. instrBranch->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func));
  16271. }
  16272. }
  16273. else
  16274. {
  16275. *pNeedHelper = false;
  16276. }
  16277. }
  16278. else
  16279. {
  16280. this->m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instrBranch, labelHelper, false);
  16281. this->m_lowererMD.GenerateJSBooleanTest(src1->AsRegOpnd(), instrBranch, labelHelper, false);
  16282. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instrBranch, labelHelper, false);
  16283. if (this->m_lowererMD.GenerateJSBooleanTest(src2->AsRegOpnd(), instrBranch, labelBooleanCmp, true))
  16284. {
  16285. instrBranch->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func));
  16286. }
  16287. }
  16288. }
  16289. else if (src1->GetValueType().IsLikelyObject() && src2->GetValueType().IsLikelyObject())
  16290. {
  16291. //
  16292. // Objects
  16293. //
  16294. IR::LabelInstr *labelTypeIdCheck = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  16295. if (!isStrictBr)
  16296. {
  16297. // If not strictBr, verify both sides are dynamic objects
  16298. this->m_lowererMD.GenerateObjectTest(src1->AsRegOpnd(), instrBranch, labelHelper, false);
  16299. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instrBranch, labelHelper, false);
  16300. this->m_lowererMD.GenerateIsDynamicObject(src1->AsRegOpnd(), instrBranch, labelTypeIdCheck, false);
  16301. }
  16302. else
  16303. {
  16304. this->m_lowererMD.GenerateObjectTest(src2->AsRegOpnd(), instrBranch, labelHelper, false);
  16305. }
  16306. this->m_lowererMD.GenerateIsDynamicObject(src2->AsRegOpnd(), instrBranch, labelBooleanCmp, true);
  16307. instrBranch->InsertBefore(labelTypeIdCheck);
  16308. if (isStrictMode)
  16309. {
  16310. labelTypeIdCheck->isOpHelper = true;
  16311. IR::BranchInstr *branchToHelper = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelHelper, this->m_func);
  16312. instrBranch->InsertBefore(branchToHelper);
  16313. }
  16314. else
  16315. {
  16316. if (!ExternalLowerer::TryGenerateFastExternalEqTest(src1, src2, instrBranch, labelHelper, labelBooleanCmp, this, isStrictBr))
  16317. {
  16318. if (!isStrictBr)
  16319. {
  16320. GenerateIsBuiltinRecyclableObject(src1->AsRegOpnd(), instrBranch, labelHelper, false /*checkObjectAndDynamicObject*/);
  16321. }
  16322. GenerateIsBuiltinRecyclableObject(src2->AsRegOpnd(), instrBranch, labelHelper, false /*checkObjectAndDynamicObject*/);
  16323. }
  16324. }
  16325. }
  16326. else
  16327. {
  16328. return false;
  16329. }
  16330. instrBranch->InsertBefore(labelBooleanCmp);
  16331. IR::BranchInstr *newBranch = IR::BranchInstr::New(instrBranch->m_opcode, targetInstr, src1, src2, this->m_func);
  16332. instrBranch->InsertBefore(newBranch);
  16333. this->m_lowererMD.LowerCondBranch(newBranch);
  16334. newBranch = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelTrue, this->m_func);
  16335. instrBranch->InsertBefore(newBranch);
  16336. instrBranch->InsertBefore(labelHelper);
  16337. return true;
  16338. }
  16339. bool Lowerer::GenerateFastBrBool(IR::BranchInstr *const instr)
  16340. {
  16341. Assert(instr);
  16342. Assert(instr->m_opcode == Js::OpCode::BrFalse_A || instr->m_opcode == Js::OpCode::BrTrue_A);
  16343. Func *const func = instr->m_func;
  16344. if(!instr->GetSrc1()->IsRegOpnd())
  16345. {
  16346. LowererMD::ChangeToAssign(instr->HoistSrc1(Js::OpCode::Ld_A));
  16347. }
  16348. IR::RegOpnd *const src = instr->GetSrc1()->Copy(func)->AsRegOpnd();
  16349. const IR::AutoReuseOpnd autoReuseSrc(src, func);
  16350. const ValueType srcOriginalValueType(src->GetValueType());
  16351. ValueType srcValueType(srcOriginalValueType);
  16352. IR::LabelInstr *const labelTarget = instr->GetTarget();
  16353. IR::LabelInstr *const labelFallthrough = instr->GetOrCreateContinueLabel();
  16354. if(labelTarget == labelFallthrough)
  16355. {
  16356. // Nothing to do
  16357. instr->Remove();
  16358. return false;
  16359. }
  16360. const bool branchOnFalse = instr->m_opcode == Js::OpCode::BrFalse_A;
  16361. IR::LabelInstr *const labelFalse = branchOnFalse ? labelTarget : labelFallthrough;
  16362. IR::LabelInstr *const labelTrue = branchOnFalse ? labelFallthrough : labelTarget;
  16363. const Js::OpCode compareWithFalseBranchToTargetOpCode = branchOnFalse ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A;
  16364. IR::LabelInstr *lastLabelBeforeHelper = nullptr;
  16365. /// Typespec'd float
  16366. if (instr->GetSrc1()->GetType() == TyFloat64)
  16367. {
  16368. InsertFloatCheckForZeroOrNanBranch(instr->GetSrc1(), branchOnFalse, labelTarget, labelFallthrough, instr);
  16369. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  16370. instr->Remove();
  16371. return false;
  16372. }
  16373. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  16374. // Null fast path
  16375. if (srcValueType.HasBeenNull() || srcOriginalValueType.IsUninitialized())
  16376. {
  16377. if(srcValueType.IsNull())
  16378. {
  16379. // jmp $false
  16380. InsertBranch(Js::OpCode::Br, labelFalse, instr);
  16381. // Skip lowering call to helper
  16382. Assert(instr->m_prev->IsBranchInstr());
  16383. instr->Remove();
  16384. return false;
  16385. }
  16386. // cmp src, null
  16387. // je $false
  16388. InsertCompareBranch(
  16389. src,
  16390. LoadLibraryValueOpnd(instr, LibraryValue::ValueNull),
  16391. Js::OpCode::BrEq_A,
  16392. labelFalse,
  16393. instr);
  16394. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Null));
  16395. }
  16396. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  16397. // Undefined fast path
  16398. if(srcValueType.HasBeenUndefined() || srcOriginalValueType.IsUninitialized())
  16399. {
  16400. if(srcValueType.IsUndefined())
  16401. {
  16402. // jmp $false
  16403. InsertBranch(Js::OpCode::Br, labelFalse, instr);
  16404. // Skip lowering call to helper
  16405. Assert(instr->m_prev->IsBranchInstr());
  16406. instr->Remove();
  16407. return false;
  16408. }
  16409. // cmp src, undefined
  16410. // je $false
  16411. InsertCompareBranch(
  16412. src,
  16413. LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined),
  16414. Js::OpCode::BrEq_A,
  16415. labelFalse,
  16416. instr);
  16417. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Undefined));
  16418. }
  16419. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  16420. // Tagged int fast path
  16421. const bool isNotInt = src->IsNotInt();
  16422. bool checkedForTaggedInt = isNotInt;
  16423. if( (
  16424. srcValueType.HasBeenInt() ||
  16425. srcValueType.HasBeenUnknownNumber() ||
  16426. srcOriginalValueType.IsUninitialized()
  16427. ) && !isNotInt)
  16428. {
  16429. checkedForTaggedInt = true;
  16430. IR::LabelInstr *notTaggedIntLabel = nullptr;
  16431. if(!src->IsTaggedInt())
  16432. {
  16433. // test src, 1
  16434. // jz $notTaggedInt
  16435. notTaggedIntLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  16436. m_lowererMD.GenerateSmIntTest(src, instr, notTaggedIntLabel);
  16437. }
  16438. // cmp src, tag(0)
  16439. // je/jne $target
  16440. m_lowererMD.GenerateTaggedZeroTest(src, instr);
  16441. Lowerer::InsertBranch(compareWithFalseBranchToTargetOpCode, labelTarget, instr);
  16442. if(src->IsTaggedInt())
  16443. {
  16444. // Skip lowering call to helper
  16445. Assert(instr->m_prev->IsBranchInstr());
  16446. instr->Remove();
  16447. return false;
  16448. }
  16449. // jmp $fallthrough
  16450. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  16451. // $notTaggedInt:
  16452. if(notTaggedIntLabel)
  16453. {
  16454. instr->InsertBefore(notTaggedIntLabel);
  16455. lastLabelBeforeHelper = notTaggedIntLabel;
  16456. }
  16457. }
  16458. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  16459. // Float fast path
  16460. bool generateFloatTest = srcValueType.IsLikelyFloat();
  16461. #ifdef _M_IX86
  16462. if (!AutoSystemInfo::Data.SSE2Available())
  16463. {
  16464. generateFloatTest = false;
  16465. }
  16466. #endif
  16467. bool checkedForTaggedFloat =
  16468. #if FLOATVAR
  16469. srcValueType.IsNotNumber();
  16470. #else
  16471. true; // there are no tagged floats, indicate that it has been checked
  16472. #endif
  16473. if (generateFloatTest)
  16474. {
  16475. // if(srcValueType.IsFloat()) // skip tagged int check?
  16476. //
  16477. // ValueType::IsFloat() does not guarantee that the storage is not in a tagged int.
  16478. // The tagged int check is necessary. It does, however, guarantee that as long as the value is not
  16479. // stored in a tagged int, that it is definitely stored in a JavascriptNumber/TaggedFloat.
  16480. IR::LabelInstr *const notFloatLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  16481. if(!checkedForTaggedInt)
  16482. {
  16483. checkedForTaggedInt = true;
  16484. m_lowererMD.GenerateSmIntTest(src, instr, notFloatLabel, nullptr, true);
  16485. }
  16486. // cmp [src], JavascriptNumber::vtable
  16487. // jne $notFloat
  16488. #if FLOATVAR
  16489. checkedForTaggedFloat = true;
  16490. IR::RegOpnd *const floatOpnd = m_lowererMD.CheckFloatAndUntag(src, instr, notFloatLabel);
  16491. #else
  16492. m_lowererMD.GenerateFloatTest(src, instr, notFloatLabel);
  16493. IR::IndirOpnd *const floatOpnd = IR::IndirOpnd::New(src, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, func);
  16494. #endif
  16495. // cmp src, 0.0
  16496. // jp $false
  16497. // je/jne $target
  16498. // jmp $fallthrough
  16499. InsertFloatCheckForZeroOrNanBranch(floatOpnd, branchOnFalse, labelTarget, labelFallthrough, instr);
  16500. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  16501. // $notFloat:
  16502. instr->InsertBefore(notFloatLabel);
  16503. lastLabelBeforeHelper = notFloatLabel;
  16504. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::AnyNumber));
  16505. }
  16506. IR::LabelInstr *labelHelper = nullptr;
  16507. bool _didObjectTest = checkedForTaggedInt && checkedForTaggedFloat;
  16508. const auto EnsureObjectTest = [&]()
  16509. {
  16510. if(_didObjectTest)
  16511. {
  16512. return;
  16513. }
  16514. if(!labelHelper)
  16515. {
  16516. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  16517. }
  16518. m_lowererMD.GenerateObjectTest(src, instr, labelHelper);
  16519. _didObjectTest = true;
  16520. };
  16521. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  16522. // Boolean fast path
  16523. if (srcValueType.HasBeenBoolean() || srcOriginalValueType.IsUninitialized())
  16524. {
  16525. IR::LabelInstr *notBooleanLabel = nullptr;
  16526. if (!srcValueType.IsBoolean())
  16527. {
  16528. EnsureObjectTest();
  16529. // cmp [src], JavascriptBoolean::vtable
  16530. // jne $notBoolean
  16531. notBooleanLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  16532. InsertCompareBranch(
  16533. IR::IndirOpnd::New(src, 0, TyMachPtr, func),
  16534. LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptBoolean),
  16535. Js::OpCode::BrNeq_A,
  16536. notBooleanLabel,
  16537. instr);
  16538. }
  16539. // cmp src, false
  16540. // je/jne $target
  16541. InsertCompareBranch(
  16542. src,
  16543. LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse),
  16544. compareWithFalseBranchToTargetOpCode,
  16545. labelTarget,
  16546. instr);
  16547. if (srcValueType.IsBoolean())
  16548. {
  16549. // Skip lowering call to helper
  16550. Assert(!labelHelper);
  16551. Assert(instr->m_prev->IsBranchInstr());
  16552. instr->Remove();
  16553. return false;
  16554. }
  16555. // jmp $fallthrough
  16556. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  16557. if (notBooleanLabel)
  16558. {
  16559. instr->InsertBefore(notBooleanLabel);
  16560. lastLabelBeforeHelper = notBooleanLabel;
  16561. }
  16562. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::Boolean));
  16563. }
  16564. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  16565. // String fast path
  16566. if(srcValueType.HasBeenString())
  16567. {
  16568. IR::LabelInstr *notStringLabel = nullptr;
  16569. if(!srcValueType.IsString())
  16570. {
  16571. EnsureObjectTest();
  16572. notStringLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  16573. GenerateStringTest(src, instr, notStringLabel, nullptr, false);
  16574. }
  16575. // cmp [src + offset(length)], 0
  16576. // jeq/jne $target
  16577. InsertCompareBranch(
  16578. IR::IndirOpnd::New(src, Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func),
  16579. IR::IntConstOpnd::New(0, TyUint32, func, true),
  16580. compareWithFalseBranchToTargetOpCode,
  16581. labelTarget,
  16582. instr);
  16583. if(srcValueType.IsString())
  16584. {
  16585. // Skip lowering call to helper
  16586. Assert(!labelHelper);
  16587. Assert(instr->m_prev->IsBranchInstr());
  16588. instr->Remove();
  16589. return false;
  16590. }
  16591. // jmp $fallthrough
  16592. Lowerer::InsertBranch(Js::OpCode::Br, labelFallthrough, instr);
  16593. if(notStringLabel)
  16594. {
  16595. instr->InsertBefore(notStringLabel);
  16596. lastLabelBeforeHelper = notStringLabel;
  16597. }
  16598. src->SetValueType(srcValueType = srcValueType.SetIsNotAnyOf(ValueType::String));
  16599. }
  16600. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  16601. // Object fast path
  16602. if (srcValueType.IsLikelyObject())
  16603. {
  16604. if(srcValueType.IsObject())
  16605. {
  16606. if(srcValueType.GetObjectType() > ObjectType::Object)
  16607. {
  16608. // Specific object types that are tracked are equivalent to 'true'
  16609. // jmp $true
  16610. InsertBranch(Js::OpCode::Br, labelTrue, instr);
  16611. // Skip lowering call to helper
  16612. Assert(!labelHelper);
  16613. Assert(instr->m_prev->IsBranchInstr());
  16614. instr->Remove();
  16615. return false;
  16616. }
  16617. }
  16618. else
  16619. {
  16620. EnsureObjectTest();
  16621. }
  16622. // mov srcType, [src + offset(type)] -- load type
  16623. IR::RegOpnd *const srcType = IR::RegOpnd::New(TyMachPtr, func);
  16624. const IR::AutoReuseOpnd autoReuseR1(srcType, func);
  16625. InsertMove(srcType, IR::IndirOpnd::New(src, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func), instr);
  16626. // test [srcType + offset(flags)], TypeFlagMask_IsFalsy -- check if falsy
  16627. // jnz $false
  16628. InsertTestBranch(
  16629. IR::IndirOpnd::New(srcType, Js::Type::GetOffsetOfFlags(), TyUint8, func),
  16630. IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyUint8, func),
  16631. Js::OpCode::BrNeq_A,
  16632. labelFalse,
  16633. instr);
  16634. // cmp [srcType + offset(typeId)], TypeIds_LastJavascriptPrimitiveType -- check base TypeIds_LastJavascriptPrimitiveType
  16635. // ja $true
  16636. InsertCompareBranch(
  16637. IR::IndirOpnd::New(srcType, Js::Type::GetOffsetOfTypeId(), TyInt32, func),
  16638. IR::IntConstOpnd::New(Js::TypeIds_LastJavascriptPrimitiveType, TyInt32, func),
  16639. Js::OpCode::BrGt_A,
  16640. true /* isUnsigned */,
  16641. labelTrue,
  16642. instr);
  16643. if(!labelHelper)
  16644. {
  16645. labelHelper = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  16646. }
  16647. lastLabelBeforeHelper = nullptr;
  16648. }
  16649. ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
  16650. // Helper call
  16651. // $helper:
  16652. if(lastLabelBeforeHelper)
  16653. {
  16654. Assert(instr->m_prev == lastLabelBeforeHelper);
  16655. lastLabelBeforeHelper->isOpHelper = true;
  16656. }
  16657. if (labelHelper)
  16658. {
  16659. Assert(labelHelper->isOpHelper);
  16660. instr->InsertBefore(labelHelper);
  16661. }
  16662. // call JavascriptConversion::ToBoolean
  16663. IR::RegOpnd *const toBoolDst = IR::RegOpnd::New(TyInt32, func);
  16664. const IR::AutoReuseOpnd autoReuseToBoolDst(toBoolDst, func);
  16665. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, toBoolDst, instr->GetSrc1(), func);
  16666. instr->InsertBefore(callInstr);
  16667. LowerUnaryHelperMem(callInstr, IR::HelperConv_ToBoolean);
  16668. // test eax, eax
  16669. InsertTest(toBoolDst, toBoolDst, instr);
  16670. // je/jne $target
  16671. Assert(instr->IsBranchInstr());
  16672. instr->FreeSrc1();
  16673. instr->m_opcode = LowererMD::MDBranchOpcode(compareWithFalseBranchToTargetOpCode);
  16674. Assert(instr->AsBranchInstr()->GetTarget() == labelTarget);
  16675. // Skip lowering another call to helper
  16676. return false;
  16677. }
  16678. // Helper method used in LowerMD by all platforms.
  16679. // Creates HelperCallOpnd or DiagHelperCallOpnd, based on helperMethod and state.
  16680. // static
  16681. IR::HelperCallOpnd*
  16682. Lowerer::CreateHelperCallOpnd(IR::JnHelperMethod helperMethod, int helperArgCount, Func* func)
  16683. {
  16684. Assert(func);
  16685. IR::HelperCallOpnd* helperCallOpnd;
  16686. if (CONFIG_FLAG(EnableContinueAfterExceptionWrappersForHelpers) &&
  16687. func->IsJitInDebugMode() &&
  16688. HelperMethodAttributes::CanThrow(helperMethod))
  16689. {
  16690. // Create DiagHelperCallOpnd to indicate that it's needed to wrap original helper with try-catch wrapper,
  16691. // so that we can ignore exception and bailout to next stmt in debugger.
  16692. // For details, see: Lib\Runtime\Debug\DiagHelperMethodWrapper.{h,cpp}.
  16693. helperCallOpnd = IR::DiagHelperCallOpnd::New(helperMethod, func, helperArgCount);
  16694. }
  16695. else
  16696. {
  16697. helperCallOpnd = IR::HelperCallOpnd::New(helperMethod, func);
  16698. }
  16699. return helperCallOpnd;
  16700. }
  16701. bool
  16702. Lowerer::TryGenerateFastBrOrCmTypeOf(IR::Instr *instr, IR::Instr **prev, bool *pfNoLower)
  16703. {
  16704. Assert(prev);
  16705. Assert(instr->m_opcode == Js::OpCode::BrSrEq_A ||
  16706. instr->m_opcode == Js::OpCode::BrSrNeq_A ||
  16707. instr->m_opcode == Js::OpCode::BrSrNotEq_A ||
  16708. instr->m_opcode == Js::OpCode::BrSrNotNeq_A ||
  16709. instr->m_opcode == Js::OpCode::CmSrEq_A ||
  16710. instr->m_opcode == Js::OpCode::CmSrNeq_A ||
  16711. instr->m_opcode == Js::OpCode::BrEq_A ||
  16712. instr->m_opcode == Js::OpCode::BrNeq_A ||
  16713. instr->m_opcode == Js::OpCode::BrNotEq_A ||
  16714. instr->m_opcode == Js::OpCode::BrNotNeq_A ||
  16715. instr->m_opcode == Js::OpCode::CmEq_A ||
  16716. instr->m_opcode == Js::OpCode::CmNeq_A);
  16717. //
  16718. // instr - (Br/Cm)Sr(N)eq_A
  16719. // instr->m_prev - typeOf
  16720. //
  16721. IR::Instr *instrLd = instr->GetPrevRealInstrOrLabel();
  16722. bool skippedLoads = false;
  16723. //Skip intermediate Ld_A which might be inserted by flow graph peeps
  16724. while (instrLd && instrLd->m_opcode == Js::OpCode::Ld_A )
  16725. {
  16726. if (!(instrLd->GetDst()->IsRegOpnd() && instrLd->GetDst()->AsRegOpnd()->m_fgPeepTmp))
  16727. {
  16728. return false;
  16729. }
  16730. if (instrLd->HasBailOutInfo())
  16731. {
  16732. return false;
  16733. }
  16734. instrLd = instrLd->GetPrevRealInstrOrLabel();
  16735. skippedLoads = true;
  16736. }
  16737. IR::Instr *typeOf = instrLd;
  16738. if (typeOf && (typeOf->m_opcode == Js::OpCode::Typeof))
  16739. {
  16740. IR::RegOpnd *typeOfDst = typeOf->GetDst()->IsRegOpnd() ? typeOf->GetDst()->AsRegOpnd() : nullptr;
  16741. IR::RegOpnd *instrSrc1 = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd() : nullptr;
  16742. IR::RegOpnd *instrSrc2 = instr->GetSrc2()->IsRegOpnd() ? instr->GetSrc2()->AsRegOpnd() : nullptr;
  16743. if (typeOfDst && instrSrc1 && instrSrc2)
  16744. {
  16745. if (instrSrc1->m_sym == typeOfDst->m_sym)
  16746. {
  16747. if (!instrSrc1->m_isTempLastUse)
  16748. {
  16749. return false;
  16750. }
  16751. if (!(instrSrc2->m_sym->m_isSingleDef && instrSrc2->m_sym->m_isStrConst))
  16752. {
  16753. return false;
  16754. }
  16755. // The second argument to [Cm|Br]TypeOf is the typeid.
  16756. IR::IntConstOpnd *typeIdOpnd = nullptr;
  16757. Assert(instrSrc2->m_sym->m_isSingleDef);
  16758. Assert(instrSrc2->m_sym->m_instrDef->GetSrc1()->IsAddrOpnd());
  16759. // We can't optimize non-javascript type strings.
  16760. Js::JavascriptString *typeNameJsString = Js::JavascriptString::FromVar(instrSrc2->m_sym->m_instrDef->GetSrc1()->AsAddrOpnd()->m_address);
  16761. const wchar_t *typeName = typeNameJsString->GetString();
  16762. Js::InternalString typeNameString(typeName, typeNameJsString->GetLength());
  16763. if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::UndefinedTypeNameString))
  16764. {
  16765. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Undefined, TyInt32, instr->m_func);
  16766. }
  16767. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::ObjectTypeNameString))
  16768. {
  16769. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Object, TyInt32, instr->m_func);
  16770. }
  16771. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::BooleanTypeNameString))
  16772. {
  16773. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Boolean, TyInt32, instr->m_func);
  16774. }
  16775. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::NumberTypeNameString))
  16776. {
  16777. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Number, TyInt32, instr->m_func);
  16778. }
  16779. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::StringTypeNameString))
  16780. {
  16781. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_String, TyInt32, instr->m_func);
  16782. }
  16783. else if (Js::InternalStringComparer::Equals(typeNameString, Js::Type::FunctionTypeNameString))
  16784. {
  16785. typeIdOpnd = IR::IntConstOpnd::New(Js::TypeIds_Function, TyInt32, instr->m_func);
  16786. }
  16787. else
  16788. {
  16789. return false;
  16790. }
  16791. if (skippedLoads)
  16792. {
  16793. //validate none of dst of Ld_A overlaps with typeof src or dst
  16794. IR::Opnd* typeOfSrc = typeOf->GetSrc1();
  16795. instrLd = typeOf->GetNextRealInstr();
  16796. while (instrLd != instr)
  16797. {
  16798. if (instrLd->GetDst()->IsEqual(typeOfDst) || instrLd->GetDst()->IsEqual(typeOfSrc))
  16799. {
  16800. return false;
  16801. }
  16802. instrLd = instrLd->GetNextRealInstr();
  16803. }
  16804. typeOf->Unlink();
  16805. instr->InsertBefore(typeOf);
  16806. }
  16807. // The first argument to [Cm|Br]TypeOf is the first arg to the TypeOf instruction.
  16808. IR::Opnd *objectOpnd = typeOf->GetSrc1();
  16809. Assert(objectOpnd->IsRegOpnd());
  16810. // Now emit this instruction and remove the ldstr and typeOf.
  16811. *prev = typeOf->m_prev;
  16812. *pfNoLower = false;
  16813. if (instr->IsBranchInstr())
  16814. {
  16815. GenerateFastBrTypeOf(instr, objectOpnd->AsRegOpnd(), typeIdOpnd, typeOf, pfNoLower);
  16816. }
  16817. else
  16818. {
  16819. GenerateFastCmTypeOf(instr, objectOpnd->AsRegOpnd(), typeIdOpnd, typeOf, pfNoLower);
  16820. }
  16821. return true;
  16822. }
  16823. }
  16824. }
  16825. return false;
  16826. }
  16827. void
  16828. Lowerer::GenerateFalsyObjectTest(IR::Instr *insertInstr, IR::RegOpnd *TypeOpnd, Js::TypeId typeIdToCheck, IR::LabelInstr* target, IR::LabelInstr* done, bool isNeqOp)
  16829. {
  16830. if (!this->m_func->GetScriptContext()->GetThreadContext()->CanBeFalsy(typeIdToCheck) && typeIdToCheck != Js::TypeIds_Undefined)
  16831. {
  16832. // Don't need the check for falsy, the typeId we are looking for doesn't care
  16833. return;
  16834. }
  16835. IR::Opnd *flagsOpnd = IR::IndirOpnd::New(TypeOpnd, Js::Type::GetOffsetOfFlags(), TyInt32, this->m_func);
  16836. InsertTest(flagsOpnd, IR::IntConstOpnd::New(TypeFlagMask_IsFalsy, TyInt32, this->m_func), insertInstr);
  16837. if (typeIdToCheck == Js::TypeIds_Undefined)
  16838. {
  16839. //Falsy object returns true for undefined ((typeof falsyObj) == "undefined")
  16840. InsertBranch( Js::OpCode::BrNeq_A, true, isNeqOp ? done : target, insertInstr);
  16841. }
  16842. else
  16843. {
  16844. //Falsy object returns false for all other types ((typeof falsyObj) != "function")
  16845. InsertBranch( Js::OpCode::BrNeq_A, true, isNeqOp? target : done , insertInstr);
  16846. }
  16847. }
  16848. ///----------------------------------------------------------------------------
  16849. ///
  16850. /// LowererMD::GenerateFastBrTypeOf
  16851. ///
  16852. ///----------------------------------------------------------------------------
  16853. void
  16854. Lowerer::GenerateFastBrTypeOf(IR::Instr *branch, IR::RegOpnd *object, IR::IntConstOpnd *typeIdOpnd, IR::Instr *typeOf, bool *pfNoLower)
  16855. {
  16856. Js::TypeId typeId = static_cast<Js::TypeId>(typeIdOpnd->GetValue());
  16857. IR::LabelInstr *target = branch->AsBranchInstr()->GetTarget();
  16858. IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  16859. IR::LabelInstr *helper = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  16860. IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  16861. bool isNeqOp;
  16862. switch(branch->m_opcode)
  16863. {
  16864. case Js::OpCode::BrSrNeq_A:
  16865. case Js::OpCode::BrNeq_A:
  16866. case Js::OpCode::BrSrNotEq_A:
  16867. case Js::OpCode::BrNotEq_A:
  16868. isNeqOp = true;
  16869. break;
  16870. case Js::OpCode::BrSrEq_A:
  16871. case Js::OpCode::BrEq_A:
  16872. case Js::OpCode::BrSrNotNeq_A:
  16873. case Js::OpCode::BrNotNeq_A:
  16874. isNeqOp = false;
  16875. break;
  16876. default:
  16877. Assert(UNREACHED);
  16878. __assume(UNREACHED);
  16879. }
  16880. // JNE/BNE (typeId == Js::TypeIds_Number) ? $target : $done
  16881. IR::LabelInstr *label = (typeId == Js::TypeIds_Number) ? target : done;
  16882. if (isNeqOp)
  16883. label = (label == target) ? done : target;
  16884. m_lowererMD.GenerateObjectTest(object, branch, label);
  16885. // MOV typeRegOpnd, [object + offset(Type)]
  16886. InsertMove(typeRegOpnd,
  16887. IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  16888. branch);
  16889. GenerateFalsyObjectTest(branch, typeRegOpnd, typeId, target, done, isNeqOp);
  16890. // MOV objTypeId, [typeRegOpnd + offset(TypeId)]
  16891. IR::RegOpnd* objTypeIdOpnd = IR::RegOpnd::New(TyInt32, m_func);
  16892. InsertMove(objTypeIdOpnd,
  16893. IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
  16894. branch);
  16895. // CMP objTypeId, typeId
  16896. // JEQ/JGE $done
  16897. if (typeId == Js::TypeIds_Object)
  16898. {
  16899. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrGe_A, isNeqOp ? done : target, branch);
  16900. }
  16901. else if (typeId == Js::TypeIds_Function)
  16902. {
  16903. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrEq_A, isNeqOp ? done : target, branch);
  16904. }
  16905. else if (typeId == Js::TypeIds_Number)
  16906. {
  16907. //Check for the typeIds between TypeIds_FirstNumberType <= typeIds <= TypeIds_LastNumberType
  16908. InsertSub(false, objTypeIdOpnd, objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_FirstNumberType, TyInt32, branch->m_func),branch);
  16909. InsertCompare(objTypeIdOpnd, IR::IntConstOpnd::New(Js::TypeIds_LastNumberType - Js::TypeIds_FirstNumberType, TyInt32, branch->m_func), branch);
  16910. InsertBranch(isNeqOp ? Js::OpCode::BrGt_A : Js::OpCode::BrLe_A, true, target, branch);
  16911. }
  16912. else
  16913. {
  16914. InsertCompare(objTypeIdOpnd, typeIdOpnd, branch);
  16915. InsertBranch(isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, target, branch);
  16916. }
  16917. // This could be 'null' which, for historical reasons, has a TypeId < TypeIds_Object but
  16918. // is still a Javascript "object."
  16919. if (typeId == Js::TypeIds_Object)
  16920. {
  16921. // CMP object, 0xXXXXXXXX
  16922. // JEQ isNeqOp ? $done : $target
  16923. InsertCompareBranch(object,
  16924. LoadLibraryValueOpnd(branch, LibraryValue::ValueNull),
  16925. Js::OpCode::BrEq_A,
  16926. isNeqOp ? done : target,
  16927. branch);
  16928. }
  16929. branch->InsertAfter(done); // Get this label first
  16930. // "object" or "function" may come from HostDispatch. Needs helper if that's the case.
  16931. if (typeId == Js::TypeIds_Object || typeId == Js::TypeIds_Function)
  16932. {
  16933. // CMP objTypeId, TypeIds_Proxy. typeof proxy could be 'object' or 'function' depends on the target
  16934. // JNE isNeqOp ? $target : $done
  16935. InsertCompareBranch(objTypeIdOpnd,
  16936. IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyInt32, m_func),
  16937. Js::OpCode::BrEq_A,
  16938. helper,
  16939. branch);
  16940. // CMP objTypeId, TypeIds_HostDispatch
  16941. // JNE isNeqOp ? $target : $done
  16942. InsertCompareBranch(objTypeIdOpnd,
  16943. IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, m_func),
  16944. Js::OpCode::BrNeq_A,
  16945. isNeqOp ? target : done,
  16946. branch);
  16947. // Now emit Typeof and lower it like we would've for the helper call.
  16948. {
  16949. branch->InsertBefore(helper);
  16950. typeOf->Unlink();
  16951. branch->InsertBefore(typeOf);
  16952. LowerUnaryHelperMem(typeOf, IR::HelperOp_Typeof);
  16953. }
  16954. }
  16955. else // Other primitive types don't need helper
  16956. {
  16957. typeOf->Remove();
  16958. branch->Remove();
  16959. *pfNoLower = true;
  16960. }
  16961. // $done:
  16962. }
  16963. ///----------------------------------------------------------------------------
  16964. ///
  16965. /// LowererMD::GenerateFastCmTypeOf
  16966. ///
  16967. ///----------------------------------------------------------------------------
  16968. void
  16969. Lowerer::GenerateFastCmTypeOf(IR::Instr *compare, IR::RegOpnd *object, IR::IntConstOpnd *typeIdOpnd, IR::Instr *typeOf, bool *pfNoLower)
  16970. {
  16971. Assert(compare->m_opcode == Js::OpCode::CmSrEq_A ||
  16972. compare->m_opcode == Js::OpCode::CmEq_A ||
  16973. compare->m_opcode == Js::OpCode::CmSrNeq_A ||
  16974. compare->m_opcode == Js::OpCode::CmNeq_A);
  16975. Js::TypeId typeId = static_cast<Js::TypeId>(typeIdOpnd->GetValue());
  16976. IR::LabelInstr *movFalse = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  16977. IR::LabelInstr *done = IR::LabelInstr::New(Js::OpCode::Label, m_func, false);
  16978. IR::LabelInstr *helper= IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  16979. IR::RegOpnd *dst = compare->GetDst()->IsRegOpnd() ? compare->GetDst()->AsRegOpnd() : nullptr;
  16980. IR::RegOpnd *typeRegOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  16981. bool isNeqOp = compare->m_opcode == Js::OpCode::CmSrNeq_A ||
  16982. compare->m_opcode == Js::OpCode::CmNeq_A;
  16983. Assert(dst);
  16984. if (dst->IsEqual(object))
  16985. {
  16986. //dst same as the src of typeof. As we need to move true to dst first we need to save the src to a new opnd
  16987. IR::RegOpnd *newObject = IR::RegOpnd::New(object->GetType(), m_func);
  16988. InsertMove(newObject, object, compare); //Save src
  16989. object = newObject;
  16990. }
  16991. // mov dst, 'true'
  16992. InsertMove(dst,
  16993. LoadLibraryValueOpnd(compare, LibraryValue::ValueTrue),
  16994. compare);
  16995. // TEST object, 1
  16996. // JNE (typeId == Js::TypeIds_Number) ? $done : $movFalse
  16997. IR::LabelInstr *target = (typeId == Js::TypeIds_Number) ? done : movFalse;
  16998. if (isNeqOp)
  16999. {
  17000. target = (target == done) ? movFalse : done;
  17001. }
  17002. m_lowererMD.GenerateObjectTest(object, compare, target);
  17003. // MOV typeRegOpnd, [object + offset(Type)]
  17004. InsertMove(typeRegOpnd,
  17005. IR::IndirOpnd::New(object, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func),
  17006. compare);
  17007. GenerateFalsyObjectTest(compare, typeRegOpnd, typeId, done, movFalse, isNeqOp);
  17008. // MOV objTypeId, [typeRegOpnd + offset(TypeId)]
  17009. IR::RegOpnd* objTypeIdOpnd = IR::RegOpnd::New(TyInt32, m_func);
  17010. InsertMove(objTypeIdOpnd,
  17011. IR::IndirOpnd::New(typeRegOpnd, Js::Type::GetOffsetOfTypeId(), TyInt32, m_func),
  17012. compare);
  17013. // CMP objTypeId, typeId
  17014. // JEQ/JGE $done
  17015. if (typeId == Js::TypeIds_Object)
  17016. {
  17017. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrGe_A, isNeqOp ? movFalse : done, compare);
  17018. }
  17019. else if (typeId == Js::TypeIds_Function)
  17020. {
  17021. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, Js::OpCode::BrEq_A, isNeqOp ? movFalse : done, compare);
  17022. }
  17023. else if (typeId == Js::TypeIds_Number)
  17024. {
  17025. //Check for the typeIds between TypeIds_FirstNumberType <= typeIds <= TypeIds_LastNumberType
  17026. InsertCompareBranch(objTypeIdOpnd,
  17027. IR::IntConstOpnd::New(Js::TypeIds_LastNumberType, TyInt32, compare->m_func),
  17028. Js::OpCode::BrGt_A,
  17029. isNeqOp ? done : movFalse,
  17030. compare);
  17031. InsertCompareBranch(objTypeIdOpnd,
  17032. IR::IntConstOpnd::New(Js::TypeIds_FirstNumberType, TyInt32, compare->m_func),
  17033. isNeqOp? Js::OpCode::BrLt_A : Js::OpCode::BrGe_A,
  17034. done,
  17035. compare);
  17036. }
  17037. else
  17038. {
  17039. InsertCompareBranch(objTypeIdOpnd, typeIdOpnd, isNeqOp ? Js::OpCode::BrNeq_A : Js::OpCode::BrEq_A, done, compare);
  17040. }
  17041. // This could be 'null' which, for historical reasons, has a TypeId < TypeIds_Object but
  17042. // is still a Javascript "object."
  17043. if (typeId == Js::TypeIds_Object)
  17044. {
  17045. // CMP object, 0xXXXXXXXX
  17046. // JEQ isNeqOp ? $movFalse : $done
  17047. InsertCompareBranch(object,
  17048. LoadLibraryValueOpnd(compare, LibraryValue::ValueNull),
  17049. Js::OpCode::BrEq_A,
  17050. isNeqOp ? movFalse : done,
  17051. compare);
  17052. }
  17053. compare->InsertAfter(done); // Get this label first
  17054. // "object" or "function" may come from HostDispatch. Needs helper if that's the case.
  17055. if (typeId == Js::TypeIds_Object || typeId == Js::TypeIds_Function)
  17056. {
  17057. // CMP objTypeId, TypeIds_Proxy
  17058. // JNE isNeqOp ? $done : $movFalse
  17059. InsertCompareBranch(objTypeIdOpnd,
  17060. IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyInt32, m_func),
  17061. Js::OpCode::BrEq_A,
  17062. helper,
  17063. compare);
  17064. // CMP objTypeId, TypeIds_HostDispatch
  17065. // JNE isNeqOp ? $done : $movFalse
  17066. InsertCompareBranch(objTypeIdOpnd,
  17067. IR::IntConstOpnd::New(Js::TypeIds_HostDispatch, TyInt32, m_func),
  17068. Js::OpCode::BrNeq_A,
  17069. isNeqOp ? done : movFalse,
  17070. compare);
  17071. // Now emit Typeof like we would've for the helper call.
  17072. {
  17073. compare->InsertBefore(helper);
  17074. typeOf->Unlink();
  17075. compare->InsertBefore(typeOf);
  17076. LowerUnaryHelperMem(typeOf, IR::HelperOp_Typeof);
  17077. }
  17078. // JMP/B $done
  17079. InsertBranch(Js::OpCode::Br, done, done);
  17080. }
  17081. else // Other primitive types don't need helper
  17082. {
  17083. typeOf->Remove();
  17084. compare->Remove();
  17085. *pfNoLower = true;
  17086. }
  17087. // $movFalse: (insert before $done)
  17088. done->InsertBefore(movFalse);
  17089. // MOV dst, 'false'
  17090. InsertMove(dst, LoadLibraryValueOpnd(done, LibraryValue::ValueFalse), done);
  17091. // $done:
  17092. }
  17093. void
  17094. Lowerer::GenerateCheckForCallFlagNew(IR::Instr* instrInsert)
  17095. {
  17096. Func *func = instrInsert->m_func;
  17097. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  17098. Assert(!func->IsInlinee());
  17099. // MOV s1, [ebp + 4] // s1 = call info
  17100. // AND s2, s1, Js::CallFlags_New // s2 = s1 & Js::CallFlags_New
  17101. // CMP s2, 0
  17102. // JNE $Done
  17103. // CALL RuntimeTypeError
  17104. // $Done
  17105. IR::SymOpnd* callInfoOpnd = Lowerer::LoadCallInfo(instrInsert);
  17106. Assert(Js::CallInfo::ksizeofCount == 24);
  17107. IR::RegOpnd* isNewFlagSetRegOpnd = IR::RegOpnd::New(TyUint32, func);
  17108. InsertAnd(isNewFlagSetRegOpnd, callInfoOpnd, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_New << Js::CallInfo::ksizeofCount, TyUint32, func, true), instrInsert);
  17109. InsertTestBranch(isNewFlagSetRegOpnd, isNewFlagSetRegOpnd, Js::OpCode::BrNeq_A, labelDone, instrInsert);
  17110. IR::Instr *throwInstr = IR::Instr::New(
  17111. Js::OpCode::RuntimeTypeError,
  17112. IR::RegOpnd::New(TyMachReg, m_func),
  17113. IR::IntConstOpnd::New(SCODE_CODE(JSERR_ClassConstructorCannotBeCalledWithoutNew), TyInt32, m_func),
  17114. m_func);
  17115. instrInsert->InsertBefore(throwInstr);
  17116. this->LowerUnaryHelperMem(throwInstr, IR::HelperOp_RuntimeTypeError);
  17117. instrInsert->InsertBefore(labelDone);
  17118. instrInsert->Remove();
  17119. }
  17120. void
  17121. Lowerer::GenerateJavascriptOperatorsIsConstructorGotoElse(IR::Instr *instrInsert, IR::RegOpnd *instanceRegOpnd, IR::LabelInstr *labelReturnTrue, IR::LabelInstr *labelReturnFalse)
  17122. {
  17123. // $ProxyLoop:
  17124. // // if (!RecyclableObject::Is(instance)) { goto $ReturnFalse }; // omitted: RecyclableObject::Is(instance) always true
  17125. // MOV s0, instance->type
  17126. // MOV s1, s0->typeId
  17127. // CMP s1, TypeIds_Proxy
  17128. // JNE $NotProxy
  17129. //
  17130. // MOV instance, instance->target
  17131. // JMP $ProxyLoop
  17132. //
  17133. // $NotProxy:
  17134. // CMP s1, TypeIds_Function
  17135. // JNE $ReturnFalse // external
  17136. //
  17137. // MOV s0, instance->functionInfo
  17138. // MOV s1, s0->attributes
  17139. // TEST s1, ErrorOnNew
  17140. // JNE $ReturnFalse // external
  17141. //
  17142. // JMP $ReturnTrue // external
  17143. Func *func = instrInsert->m_func;
  17144. IR::LabelInstr *labelProxyLoop = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  17145. IR::LabelInstr *labelNotProxy = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  17146. IR::RegOpnd *indir0RegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  17147. IR::RegOpnd *indir1RegOpnd = IR::RegOpnd::New(TyUint32, func);
  17148. instrInsert->InsertBefore(labelProxyLoop);
  17149. labelProxyLoop->m_isLoopTop = true;
  17150. Loop *loop = JitAnew(func->m_alloc, Loop, func->m_alloc, this->m_func);
  17151. labelProxyLoop->SetLoop(loop);
  17152. loop->SetLoopTopInstr(labelProxyLoop);
  17153. loop->regAlloc.liveOnBackEdgeSyms = JitAnew(func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
  17154. loop->regAlloc.liveOnBackEdgeSyms->Set(instanceRegOpnd->m_sym->m_id);
  17155. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func);
  17156. LowererMD::CreateAssign(indir0RegOpnd, indirOpnd, instrInsert);
  17157. indirOpnd = IR::IndirOpnd::New(indir0RegOpnd, Js::Type::GetOffsetOfTypeId(), TyUint32, func);
  17158. LowererMD::CreateAssign(indir1RegOpnd, indirOpnd, instrInsert);
  17159. InsertCompareBranch(indir1RegOpnd, IR::IntConstOpnd::New(Js::TypeIds_Proxy, TyUint32, func, true), Js::OpCode::BrNeq_A, labelNotProxy, instrInsert);
  17160. indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::JavascriptProxy::GetOffsetOfTarget(), TyMachPtr, func);
  17161. LowererMD::CreateAssign(instanceRegOpnd, indirOpnd, instrInsert);
  17162. InsertBranch(Js::OpCode::Br, labelProxyLoop, instrInsert);
  17163. instrInsert->InsertBefore(labelNotProxy);
  17164. InsertCompareBranch(indir1RegOpnd, IR::IntConstOpnd::New(Js::TypeIds_Function, TyUint32, func, true), Js::OpCode::BrNeq_A, labelReturnFalse, instrInsert);
  17165. indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::JavascriptFunction::GetOffsetOfFunctionInfo(), TyMachPtr, func);
  17166. LowererMD::CreateAssign(indir0RegOpnd, indirOpnd, instrInsert);
  17167. indirOpnd = IR::IndirOpnd::New(indir0RegOpnd, Js::FunctionInfo::GetAttributesOffset(), TyUint32, func);
  17168. LowererMD::CreateAssign(indir1RegOpnd, indirOpnd, instrInsert);
  17169. InsertTestBranch(indir1RegOpnd, IR::IntConstOpnd::New(Js::FunctionInfo::Attributes::ErrorOnNew, TyUint32, func, true), Js::OpCode::BrNeq_A, labelReturnFalse, instrInsert);
  17170. InsertBranch(Js::OpCode::Br, labelReturnTrue, instrInsert);
  17171. }
  17172. void
  17173. Lowerer::GenerateRecyclableObjectGetPrototypeNullptrGoto(IR::Instr *instrInsert, IR::RegOpnd *instanceRegOpnd, IR::LabelInstr *labelReturnNullptr)
  17174. {
  17175. // MOV instance, instance->type
  17176. // MOV flags, instance->flags
  17177. // TEST flags, TypeFlagMask_HasSpecialPrototype
  17178. // JNE $ReturnNullptr // external, bypassing nullptr check
  17179. // MOV instance, instance->prototype
  17180. Func *func = instrInsert->m_func;
  17181. IR::RegOpnd *flagsRegOpnd = IR::RegOpnd::New(TyUint32, func);
  17182. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachPtr, func);
  17183. LowererMD::CreateAssign(instanceRegOpnd, indirOpnd, instrInsert);
  17184. indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::Type::GetOffsetOfFlags(), TyUint32, func);
  17185. LowererMD::CreateAssign(flagsRegOpnd, indirOpnd, instrInsert);
  17186. InsertTestBranch(flagsRegOpnd, IR::IntConstOpnd::New(TypeFlagMask_HasSpecialPrototype, TyUint32, func, true), Js::OpCode::BrNeq_A, labelReturnNullptr, instrInsert);
  17187. indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::Type::GetOffsetOfPrototype(), TyMachPtr, func);
  17188. LowererMD::CreateAssign(instanceRegOpnd, indirOpnd, instrInsert);
  17189. }
  17190. void
  17191. Lowerer::GenerateRecyclableObjectIsElse(IR::Instr *instrInsert, IR::RegOpnd *instanceRegOpnd, IR::LabelInstr *labelFalse)
  17192. {
  17193. Func *func = instrInsert->m_func;
  17194. #if INT32VAR
  17195. InsertTestBranch(instanceRegOpnd, IR::AddrOpnd::New((Js::Var)0xffff000000000000, IR::AddrOpndKindConstantVar, func, true), Js::OpCode::BrNeq_A, labelFalse, instrInsert);
  17196. #else
  17197. InsertTestBranch(instanceRegOpnd, IR::IntConstOpnd::New(Js::AtomTag, TyUint32, func, true), Js::OpCode::BrNeq_A, labelFalse, instrInsert);
  17198. #endif
  17199. }
  17200. void
  17201. Lowerer::GenerateLdSuper(IR::Instr* instrInsert)
  17202. {
  17203. // MOV dst, undefined
  17204. // MOV instance, functionObject // functionObject through stack params or src1
  17205. // CMP [instance], VtableStackScriptFunction
  17206. // JE $Done
  17207. // MOV instance, instance->homeObj
  17208. // TEST instance, instance
  17209. // JZ $Done
  17210. //
  17211. // if (!RecyclableObject::Is(instance)) goto $Done
  17212. //
  17213. // instance = ((RecyclableObject*)instance)->GetPrototype();
  17214. // if (instance == nullptr) goto $Done;
  17215. //
  17216. // if (!RecyclableObject::Is(instance)) goto $Done
  17217. //
  17218. // MOV dst, instance
  17219. // $Done:
  17220. Func *func = instrInsert->m_func;
  17221. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  17222. IR::Opnd *opndUndefAddress = this->LoadLibraryValueOpnd(instrInsert, LibraryValue::ValueUndefined);
  17223. IR::RegOpnd *instanceRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  17224. IR::Opnd *dstOpnd = instrInsert->GetDst();
  17225. Assert(dstOpnd->IsRegOpnd());
  17226. LowererMD::CreateAssign(dstOpnd, opndUndefAddress, instrInsert);
  17227. IR::Opnd * functionObjOpnd;
  17228. m_lowererMD.LoadFunctionObjectOpnd(instrInsert, functionObjOpnd);
  17229. LowererMD::CreateAssign(instanceRegOpnd, functionObjOpnd, instrInsert);
  17230. IR::Opnd * vtableAddressOpnd = this->LoadVTableValueOpnd(instrInsert, VTableValue::VtableStackScriptFunction);
  17231. InsertCompareBranch(IR::IndirOpnd::New(instanceRegOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
  17232. Js::OpCode::BrEq_A, true, labelDone, instrInsert);
  17233. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::ScriptFunction::GetOffsetOfHomeObj(), TyMachPtr, func);
  17234. LowererMD::CreateAssign(instanceRegOpnd, indirOpnd, instrInsert);
  17235. InsertTestBranch(instanceRegOpnd, instanceRegOpnd, Js::OpCode::BrEq_A, labelDone, instrInsert);
  17236. this->GenerateRecyclableObjectIsElse(instrInsert, instanceRegOpnd, labelDone);
  17237. this->GenerateRecyclableObjectGetPrototypeNullptrGoto(instrInsert, instanceRegOpnd, labelDone);
  17238. this->GenerateRecyclableObjectIsElse(instrInsert, instanceRegOpnd, labelDone);
  17239. LowererMD::CreateAssign(dstOpnd, instanceRegOpnd, instrInsert);
  17240. instrInsert->InsertBefore(labelDone);
  17241. instrInsert->Remove();
  17242. }
  17243. void
  17244. Lowerer::GenerateLdSuperCtor(IR::Instr* instrInsert)
  17245. {
  17246. // MOV instance, functionObject // functionObject through stack params or src1
  17247. //
  17248. // instance = ((RecyclableObject*)instance)->GetPrototype();
  17249. // if (instance == nullptr) goto $ThrowTypeError;
  17250. //
  17251. // MOV dst, instance
  17252. //
  17253. // if (!JavascriptOperators::IsConstructor(instance))
  17254. // goto $ThrowTypeError;
  17255. // else
  17256. // goto $Done;
  17257. //
  17258. // $helperLabelThrowTypeError:
  17259. // ThrowRunTimeError(JSERR_NotAConstructor);
  17260. //
  17261. // $Done:
  17262. Func *func = instrInsert->m_func;
  17263. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  17264. IR::LabelInstr *helperLabelThrowTypeError = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  17265. IR::RegOpnd *instanceRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  17266. IR::Opnd *dstOpnd = instrInsert->GetDst();
  17267. IR::Opnd * functionObjOpnd = nullptr;
  17268. m_lowererMD.LoadFunctionObjectOpnd(instrInsert, functionObjOpnd);
  17269. LowererMD::CreateAssign(instanceRegOpnd, functionObjOpnd, instrInsert);
  17270. this->GenerateRecyclableObjectGetPrototypeNullptrGoto(instrInsert, instanceRegOpnd, helperLabelThrowTypeError);
  17271. LowererMD::CreateAssign(dstOpnd, instanceRegOpnd, instrInsert);
  17272. this->GenerateJavascriptOperatorsIsConstructorGotoElse(instrInsert, instanceRegOpnd, labelDone, helperLabelThrowTypeError);
  17273. instrInsert->InsertBefore(helperLabelThrowTypeError);
  17274. this->GenerateRuntimeError(instrInsert, JSERR_NotAConstructor);
  17275. instrInsert->InsertBefore(labelDone);
  17276. instrInsert->Remove();
  17277. }
  17278. void
  17279. Lowerer::GenerateSetHomeObj(IR::Instr* instrInsert)
  17280. {
  17281. // MOV funcObj, src1
  17282. // CMP [funcObj], VtableJavascriptGeneratorFunction
  17283. // JNE $ScriptFunction
  17284. //
  17285. // MOV funcObj, funcObj->scriptFunction
  17286. //
  17287. // $ScriptFunction:
  17288. // MOV funcObj->homeObj, src2
  17289. Func *func = instrInsert->m_func;
  17290. IR::LabelInstr *labelScriptFunction = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  17291. IR::Opnd *src2Opnd = instrInsert->UnlinkSrc2();
  17292. IR::Opnd *src1Opnd = instrInsert->UnlinkSrc1();
  17293. IR::RegOpnd *funcObjRegOpnd = IR::RegOpnd::New(TyMachPtr, func);
  17294. IR::IndirOpnd *indirOpnd = nullptr;
  17295. Assert(src1Opnd != nullptr && src2Opnd != nullptr);
  17296. LowererMD::CreateAssign(funcObjRegOpnd, src1Opnd, instrInsert);
  17297. IR::Opnd * vtableAddressOpnd = this->LoadVTableValueOpnd(instrInsert, VTableValue::VtableJavascriptGeneratorFunction);
  17298. InsertCompareBranch(IR::IndirOpnd::New(funcObjRegOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
  17299. Js::OpCode::BrNeq_A, true, labelScriptFunction, instrInsert);
  17300. indirOpnd = IR::IndirOpnd::New(funcObjRegOpnd, Js::JavascriptGeneratorFunction::GetOffsetOfScriptFunction() , TyMachPtr, func);
  17301. LowererMD::CreateAssign(funcObjRegOpnd, indirOpnd, instrInsert);
  17302. instrInsert->InsertBefore(labelScriptFunction);
  17303. indirOpnd = IR::IndirOpnd::New(funcObjRegOpnd, Js::ScriptFunction::GetOffsetOfHomeObj(), TyMachPtr, func);
  17304. LowererMD::CreateAssign(indirOpnd, src2Opnd, instrInsert);
  17305. instrInsert->Remove();
  17306. }
  17307. void
  17308. Lowerer::GenerateLoadNewTarget(IR::Instr* instrInsert)
  17309. {
  17310. Func *func = instrInsert->m_func;
  17311. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  17312. IR::LabelInstr * labelLoadArgNewTarget = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  17313. IR::Opnd* opndUndefAddress = this->LoadLibraryValueOpnd(instrInsert, LibraryValue::ValueUndefined);
  17314. Assert(!func->IsInlinee());
  17315. if (func->GetJnFunction()->IsGenerator())
  17316. {
  17317. instrInsert->SetSrc1(opndUndefAddress);
  17318. LowererMD::ChangeToAssign(instrInsert);
  17319. return;
  17320. }
  17321. // MOV dst, undefined // dst = undefined
  17322. // MOV s1, callInfo // s1 = callInfo
  17323. // TEST s1, Js::CallFlags_NewTarget << 24 // if (callInfo.Flags & Js::CallFlags_NewTarget)
  17324. // JNE $LoadLastArgument // goto $LoadLastArgument
  17325. // TEST s1, Js::CallFlags_New << 24 // if (!(callInfo.Flags & Js::CallFlags_New))
  17326. // JE $Done // goto $Done
  17327. // MOV dst, functionObject // dst = functionObject
  17328. // JMP $Done // goto $Done
  17329. // $LoadLastArgument
  17330. // AND s1, s1, (0x00FFFFFF) // s2 = callInfo.Count == arguments.length + 2
  17331. // MOV dst, [ebp + (s1 - 1) * sizeof(Var) + formalParamOffset * sizeof(Var) ] // points to new.target
  17332. // $Done
  17333. IR::Opnd *dstOpnd = instrInsert->GetDst();
  17334. Assert(dstOpnd->IsRegOpnd());
  17335. LowererMD::CreateAssign(dstOpnd, opndUndefAddress, instrInsert);
  17336. IR::SymOpnd *callInfoOpnd = Lowerer::LoadCallInfo(instrInsert);
  17337. Assert(Js::CallInfo::ksizeofCount == 24);
  17338. IR::RegOpnd *s1 = IR::RegOpnd::New(TyUint32, func);
  17339. LowererMD::CreateAssign(s1, callInfoOpnd, instrInsert);
  17340. InsertTestBranch(s1, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_NewTarget << Js::CallInfo::ksizeofCount, TyUint32, func, true), Js::OpCode::BrNeq_A, labelLoadArgNewTarget, instrInsert);
  17341. InsertTestBranch(s1, IR::IntConstOpnd::New((IntConstType)Js::CallFlags_New << Js::CallInfo::ksizeofCount, TyUint32, func, true), Js::OpCode::BrEq_A, labelDone, instrInsert);
  17342. IR::Instr* loadFuncInstr = IR::Instr::New(Js::OpCode::AND, func);
  17343. loadFuncInstr->SetDst(instrInsert->GetDst());
  17344. m_lowererMD.LoadFuncExpression(loadFuncInstr);
  17345. instrInsert->InsertBefore(loadFuncInstr);
  17346. InsertBranch(Js::OpCode::Br, labelDone, instrInsert);
  17347. instrInsert->InsertBefore(labelLoadArgNewTarget);
  17348. InsertAnd(s1, s1, IR::IntConstOpnd::New(0x00FFFFFF, TyUint32, func, true), instrInsert); // callInfo.Count
  17349. // [formalOffset (4) + callInfo.Count -1] points to 'new.target' - see diagram in GenerateLoadStackArgumentByIndex()
  17350. GenerateLoadStackArgumentByIndex(dstOpnd, s1, instrInsert, -1, m_func);
  17351. instrInsert->InsertBefore(labelDone);
  17352. instrInsert->Remove();
  17353. }
  17354. void
  17355. Lowerer::GenerateGetCurrentFunctionObject(IR::Instr * instr)
  17356. {
  17357. Func * func = this->m_func;
  17358. IR::Instr * insertBeforeInstr = instr->m_next;
  17359. IR::RegOpnd * functionObjectOpnd = instr->GetDst()->AsRegOpnd();
  17360. IR::Opnd * vtableAddressOpnd = this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableStackScriptFunction);
  17361. IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
  17362. InsertCompareBranch(IR::IndirOpnd::New(functionObjectOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
  17363. Js::OpCode::BrNeq_A, true, labelDone, insertBeforeInstr);
  17364. IR::RegOpnd * boxedFunctionObjectOpnd = IR::RegOpnd::New(TyMachPtr, func);
  17365. InsertMove(boxedFunctionObjectOpnd, IR::IndirOpnd::New(functionObjectOpnd,
  17366. Js::StackScriptFunction::GetOffsetOfBoxedScriptFunction(), TyMachPtr, func), insertBeforeInstr);
  17367. InsertTestBranch(boxedFunctionObjectOpnd, boxedFunctionObjectOpnd, Js::OpCode::BrEq_A, true, labelDone, insertBeforeInstr);
  17368. InsertMove(functionObjectOpnd, boxedFunctionObjectOpnd, insertBeforeInstr);
  17369. insertBeforeInstr->InsertBefore(labelDone);
  17370. }
  17371. IR::Opnd *
  17372. Lowerer::GetInlineCacheFromFuncObjectForRuntimeUse(IR::Instr * instr, IR::PropertySymOpnd * propSymOpnd, bool isHelper)
  17373. {
  17374. // MOV s1, [ebp + 8] //s1 = function object
  17375. // MOV s2, [s1 + offset(hasInlineCaches)]
  17376. // TEST s2, s2
  17377. // JE $L1
  17378. // MOV s3, [s1 + offset(m_inlineCaches)] //s3 = inlineCaches from function object
  17379. // MOV s4, [s3 + index*scale] //s4 = inlineCaches[index]
  17380. // JMP $L2
  17381. // $L1
  17382. // MOV s3, propSym->m_runtimeCache
  17383. // $L2
  17384. byte indirScale = this->m_lowererMD.GetDefaultIndirScale();
  17385. IR::RegOpnd * funcObjOpnd = IR::RegOpnd::New(TyMachPtr, this->m_func);
  17386. IR::Instr * funcObjInstr = IR::Instr::New(Js::OpCode::Ld_A, funcObjOpnd, instr->m_func);
  17387. instr->InsertBefore(funcObjInstr);
  17388. this->m_lowererMD.LoadFuncExpression(funcObjInstr);
  17389. IR::RegOpnd * funcObjHasInlineCachesOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  17390. this->m_lowererMD.CreateAssign(funcObjHasInlineCachesOpnd, IR::IndirOpnd::New(funcObjOpnd, Js::ScriptFunction::GetOffsetOfHasInlineCaches(), TyUint8, instr->m_func), instr);
  17391. IR::LabelInstr * inlineCachesNullLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, isHelper);
  17392. InsertTestBranch(funcObjHasInlineCachesOpnd, funcObjHasInlineCachesOpnd, Js::OpCode::BrEq_A, inlineCachesNullLabel, instr);
  17393. IR::RegOpnd * inlineCachesOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  17394. Lowerer::InsertMove(inlineCachesOpnd, IR::IndirOpnd::New(funcObjOpnd, Js::ScriptFunctionWithInlineCache::GetOffsetOfInlineCaches(), TyMachPtr, instr->m_func), instr);
  17395. IR::RegOpnd * inlineCacheOpnd = IR::RegOpnd::New(TyMachPtr, instr->m_func);
  17396. IR::RegOpnd * indexOpnd = IR::RegOpnd::New(TyMachReg, instr->m_func);
  17397. int inlineCacheOffset;
  17398. if (!Int32Math::Mul(sizeof(Js::InlineCache *), propSymOpnd->m_inlineCacheIndex, &inlineCacheOffset))
  17399. {
  17400. Lowerer::InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(inlineCachesOpnd, inlineCacheOffset, TyMachPtr, instr->m_func), instr);
  17401. }
  17402. else
  17403. {
  17404. Lowerer::InsertMove(indexOpnd, IR::IntConstOpnd::New(propSymOpnd->m_inlineCacheIndex, TyUint32, instr->m_func), instr);
  17405. Lowerer::InsertMove(inlineCacheOpnd, IR::IndirOpnd::New(inlineCachesOpnd, indexOpnd, indirScale, TyMachPtr, instr->m_func), instr);
  17406. }
  17407. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func, isHelper);
  17408. InsertBranch(LowererMD::MDUncondBranchOpcode, continueLabel, instr);
  17409. IR::Instr * ldCacheFromPropSymOpndInstr = this->m_lowererMD.CreateAssign(inlineCacheOpnd, IR::AddrOpnd::New(propSymOpnd->m_runtimeInlineCache, IR::AddrOpndKindDynamicInlineCache, this->m_func), instr);
  17410. ldCacheFromPropSymOpndInstr->InsertBefore(inlineCachesNullLabel);
  17411. ldCacheFromPropSymOpndInstr->InsertAfter(continueLabel);
  17412. return inlineCacheOpnd;
  17413. }
  17414. IR::Instr *
  17415. Lowerer::LowerInitClass(IR::Instr * instr)
  17416. {
  17417. // scriptContext
  17418. IR::Instr * prevInstr = LoadScriptContext(instr);
  17419. // extends
  17420. if (instr->GetSrc2() != nullptr)
  17421. {
  17422. IR::Opnd * extendsOpnd = instr->UnlinkSrc2();
  17423. m_lowererMD.LoadHelperArgument(instr, extendsOpnd);
  17424. }
  17425. else
  17426. {
  17427. IR::AddrOpnd* extendsOpnd = IR::AddrOpnd::NewNull(this->m_func);
  17428. m_lowererMD.LoadHelperArgument(instr, extendsOpnd);
  17429. }
  17430. // constructor
  17431. IR::Opnd * ctorOpnd = instr->UnlinkSrc1();
  17432. m_lowererMD.LoadHelperArgument(instr, ctorOpnd);
  17433. // call
  17434. m_lowererMD.ChangeToHelperCall(instr, IR::HelperOP_InitClass);
  17435. return prevInstr;
  17436. }
  17437. void
  17438. Lowerer::LowerNewConcatStrMulti(IR::Instr * instr)
  17439. {
  17440. IR::IntConstOpnd * countOpnd = instr->UnlinkSrc1()->AsIntConstOpnd();
  17441. IR::RegOpnd * dstOpnd = instr->UnlinkDst()->AsRegOpnd();
  17442. uint8 count = (uint8)countOpnd->GetValue();
  17443. Assert(dstOpnd->GetValueType().IsString());
  17444. GenerateRecyclerAlloc(IR::HelperAllocMemForConcatStringMulti, Js::ConcatStringMulti::GetAllocSize(count), dstOpnd, instr);
  17445. GenerateRecyclerMemInit(dstOpnd, 0, this->LoadVTableValueOpnd(instr, VTableValue::VtableConcatStringMulti), instr);
  17446. GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfType(),
  17447. this->LoadLibraryValueOpnd(instr, LibraryValue::ValueStringTypeStatic), instr);
  17448. GenerateRecyclerMemInitNull(dstOpnd, Js::ConcatStringMulti::GetOffsetOfpszValue(), instr);
  17449. GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), 0, instr);
  17450. GenerateRecyclerMemInit(dstOpnd, Js::ConcatStringMulti::GetOffsetOfSlotCount(), countOpnd->AsUint32(), instr);
  17451. instr->Remove();
  17452. }
  17453. void
  17454. Lowerer::LowerNewConcatStrMultiBE(IR::Instr * instr)
  17455. {
  17456. // Lower
  17457. // t1 = SetConcatStrMultiBE s1
  17458. // t2 = SetConcatStrMultiBE s2, t1
  17459. // t3 = SetConcatStrMultiBE s3, t2
  17460. // s = NewConcatStrMultiBE 3, t3
  17461. // to
  17462. // s = new concat string
  17463. // s+0 = s1
  17464. // s+1 = s2
  17465. // s+2 = s3
  17466. Assert(instr->GetSrc1()->IsConstOpnd());
  17467. Assert(instr->GetDst()->IsRegOpnd());
  17468. IR::RegOpnd * newString = instr->GetDst()->AsRegOpnd();
  17469. IR::Opnd * newConcatItemOpnd = nullptr;
  17470. uint index = instr->GetSrc1()->AsIntConstOpnd()->AsUint32() - 1;
  17471. IR::Instr * concatItemInstr = nullptr;
  17472. IR::Opnd * linkOpnd = instr->GetSrc2();
  17473. while (linkOpnd)
  17474. {
  17475. Assert(linkOpnd->IsRegOpnd());
  17476. concatItemInstr = linkOpnd->GetStackSym()->GetInstrDef();
  17477. Assert(concatItemInstr->m_opcode == Js::OpCode::SetConcatStrMultiItemBE);
  17478. IR::Opnd * concatItemOpnd = concatItemInstr->GetSrc1();
  17479. Assert(concatItemOpnd->IsRegOpnd());
  17480. // If one of the concat items is equal to the dst of the concat expressions (s = s + a + b),
  17481. // hoist the load of that item to before the setting of the new string to the dst.
  17482. if (concatItemOpnd->IsEqual(newString))
  17483. {
  17484. if (!newConcatItemOpnd)
  17485. {
  17486. IR::Instr * hoistSrcInstr = concatItemInstr->HoistSrc1(Js::OpCode::Ld_A);
  17487. newConcatItemOpnd = hoistSrcInstr->GetDst();
  17488. }
  17489. concatItemOpnd = newConcatItemOpnd;
  17490. }
  17491. else
  17492. {
  17493. // If only some of the SetConcatStrMultiItemBE instructions were CSE'd and the rest, along with the NewConcatStrMultiBE
  17494. // instruction, were in a loop, the strings on the CSE'd Set*BE instructions will become live on back edge. Add them to
  17495. // addToLiveOnBackEdgeSyms here and clear when we reach the Set*BE instruction.
  17496. // Note that we are doing this only for string opnds which are not the same as the dst of the concat expression. Reasoning
  17497. // behind this is that if a loop has a concat expression with one of its sources same as the dst, the Set*BE instruction
  17498. // for the dst wouldn't have been CSE'd as the dst's value is changing in the loop and the backward pass should have set the
  17499. // symbol as live on backedge.
  17500. this->addToLiveOnBackEdgeSyms->Set(concatItemOpnd->GetStackSym()->m_id);
  17501. }
  17502. IR::Instr * newConcatItemInstr = IR::Instr::New(Js::OpCode::SetConcatStrMultiItem,
  17503. IR::IndirOpnd::New(newString, index, TyVar, instr->m_func),
  17504. concatItemOpnd,
  17505. instr->m_func);
  17506. instr->InsertAfter(newConcatItemInstr);
  17507. this->LowerSetConcatStrMultiItem(newConcatItemInstr);
  17508. linkOpnd = concatItemInstr->GetSrc2();
  17509. index--;
  17510. }
  17511. Assert(index == -1);
  17512. this->LowerNewConcatStrMulti(instr);
  17513. }
  17514. void
  17515. Lowerer::LowerSetConcatStrMultiItem(IR::Instr * instr)
  17516. {
  17517. Func * func = this->m_func;
  17518. IR::IndirOpnd * dstOpnd = instr->GetDst()->AsIndirOpnd();
  17519. IR::RegOpnd * concatStrOpnd = dstOpnd->GetBaseOpnd();
  17520. IR::RegOpnd * srcOpnd = instr->UnlinkSrc1()->AsRegOpnd();
  17521. Assert(concatStrOpnd->GetValueType().IsString());
  17522. Assert(srcOpnd->GetValueType().IsString());
  17523. srcOpnd = GenerateGetImmutableOrScriptUnreferencedString(srcOpnd, instr, IR::HelperOp_CompoundStringCloneForConcat);
  17524. instr->SetSrc1(srcOpnd);
  17525. IR::IndirOpnd * dstLength = IR::IndirOpnd::New(concatStrOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), TyUint32, func);
  17526. IR::Opnd * srcLength;
  17527. if (srcOpnd->m_sym->m_isStrConst)
  17528. {
  17529. srcLength = IR::IntConstOpnd::New(Js::JavascriptString::FromVar(srcOpnd->m_sym->GetConstAddress())->GetLength(),
  17530. TyUint32, func);
  17531. }
  17532. else
  17533. {
  17534. srcLength = IR::RegOpnd::New(TyUint32, func);
  17535. InsertMove(srcLength, IR::IndirOpnd::New(srcOpnd, Js::ConcatStringMulti::GetOffsetOfcharLength(), TyUint32, func), instr);
  17536. }
  17537. InsertAdd(false, dstLength, dstLength, srcLength, instr);
  17538. dstOpnd->SetOffset(dstOpnd->GetOffset() * sizeof(Js::JavascriptString *) + Js::ConcatStringMulti::GetOffsetOfSlots());
  17539. this->m_lowererMD.ChangeToAssign(instr);
  17540. }
  17541. IR::RegOpnd *
  17542. Lowerer::GenerateGetImmutableOrScriptUnreferencedString(IR::RegOpnd * strOpnd, IR::Instr * insertBeforeInstr, IR::JnHelperMethod helperMethod, bool reloadDst)
  17543. {
  17544. if (strOpnd->m_sym->m_isStrConst)
  17545. {
  17546. return strOpnd;
  17547. }
  17548. Func * const func = this->m_func;
  17549. IR::RegOpnd *dstOpnd = reloadDst == true ? IR::RegOpnd::New(TyVar, func) : strOpnd;
  17550. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  17551. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  17552. if (!strOpnd->IsNotTaggedValue())
  17553. {
  17554. this->m_lowererMD.GenerateObjectTest(strOpnd, insertBeforeInstr, doneLabel);
  17555. }
  17556. // CMP [strOpnd], Js::CompoundString::`vtable'
  17557. // JEQ $helper
  17558. InsertCompareBranch(
  17559. IR::IndirOpnd::New(strOpnd, 0, TyMachPtr, func),
  17560. this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableCompoundString),
  17561. Js::OpCode::BrEq_A,
  17562. helperLabel,
  17563. insertBeforeInstr);
  17564. if (reloadDst)
  17565. {
  17566. InsertMove(dstOpnd, strOpnd, insertBeforeInstr);
  17567. }
  17568. InsertBranch(Js::OpCode::Br, doneLabel, insertBeforeInstr);
  17569. insertBeforeInstr->InsertBefore(helperLabel);
  17570. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, strOpnd);
  17571. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, dstOpnd, func);
  17572. callInstr->SetSrc1(IR::HelperCallOpnd::New(helperMethod, func));
  17573. insertBeforeInstr->InsertBefore(callInstr);
  17574. this->m_lowererMD.LowerCall(callInstr, 0);
  17575. insertBeforeInstr->InsertBefore(doneLabel);
  17576. return dstOpnd;
  17577. }
  17578. void
  17579. Lowerer::LowerConvStrCommon(IR::JnHelperMethod helper, IR::Instr * instr)
  17580. {
  17581. IR::RegOpnd * src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
  17582. if (!src1Opnd->GetValueType().IsNotString())
  17583. {
  17584. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  17585. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17586. this->GenerateStringTest(src1Opnd, instr, helperLabel);
  17587. InsertMove(instr->GetDst(), src1Opnd, instr);
  17588. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  17589. instr->InsertBefore(helperLabel);
  17590. instr->InsertAfter(doneLabel);
  17591. }
  17592. if (instr->GetSrc2())
  17593. {
  17594. this->m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc2());
  17595. }
  17596. this->LoadScriptContext(instr);
  17597. this->m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  17598. this->m_lowererMD.ChangeToHelperCall(instr, helper);
  17599. }
  17600. void
  17601. Lowerer::LowerConvStr(IR::Instr * instr)
  17602. {
  17603. LowerConvStrCommon(IR::HelperOp_ConvString, instr);
  17604. }
  17605. void
  17606. Lowerer::LowerCoerseStr(IR::Instr* instr)
  17607. {
  17608. LowerConvStrCommon(IR::HelperOp_CoerseString, instr);
  17609. }
  17610. ///----------------------------------------------------------------------------
  17611. ///
  17612. /// Lowerer::LowerCoerseStrOrRegex - This method is used for String.Replace(arg1, arg2)
  17613. /// where arg1 is regex or string
  17614. /// if arg1 is not regex, then do String.Replace(CoerseStr(arg1), arg2);
  17615. ///
  17616. /// CoerseStrOrRegex arg1
  17617. ///
  17618. /// if (value == regex) goto :done
  17619. /// else
  17620. ///helper:
  17621. /// ConvStr value
  17622. ///done:
  17623. ///----------------------------------------------------------------------------
  17624. void
  17625. Lowerer::LowerCoerseStrOrRegex(IR::Instr* instr)
  17626. {
  17627. IR::RegOpnd * src1Opnd = instr->GetSrc1()->AsRegOpnd();
  17628. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  17629. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17630. // if (value == regex) goto :done
  17631. if (!src1Opnd->IsNotTaggedValue())
  17632. {
  17633. this->m_lowererMD.GenerateObjectTest(src1Opnd, instr, helperLabel);
  17634. }
  17635. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  17636. InsertCompareBranch(IR::IndirOpnd::New(src1Opnd, 0, TyMachPtr, instr->m_func),
  17637. vtableOpnd, Js::OpCode::BrNeq_A, helperLabel, instr);
  17638. InsertMove(instr->GetDst(), src1Opnd, instr);
  17639. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  17640. instr->InsertBefore(helperLabel);
  17641. instr->InsertAfter(doneLabel);
  17642. // helper: ConvStr value
  17643. LowerConvStr(instr);
  17644. }
  17645. ///----------------------------------------------------------------------------
  17646. ///
  17647. /// Lowerer::LowerCoerseRegex - This method is used for String.Match(arg1)
  17648. /// if arg1 is regex, then pass CreateRegEx(arg1) to String.Match
  17649. ///
  17650. ///----------------------------------------------------------------------------
  17651. void
  17652. Lowerer::LowerCoerseRegex(IR::Instr* instr)
  17653. {
  17654. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  17655. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  17656. IR::RegOpnd * src1Opnd = instr->UnlinkSrc1()->AsRegOpnd();
  17657. if (!src1Opnd->IsNotTaggedValue())
  17658. {
  17659. this->m_lowererMD.GenerateObjectTest(src1Opnd, instr, helperLabel);
  17660. }
  17661. IR::Opnd * vtableOpnd = LoadVTableValueOpnd(instr, VTableValue::VtableJavascriptRegExp);
  17662. InsertCompareBranch(IR::IndirOpnd::New(src1Opnd, 0, TyMachPtr, instr->m_func),
  17663. vtableOpnd, Js::OpCode::BrNeq_A, helperLabel, instr);
  17664. InsertMove(instr->GetDst(), src1Opnd, instr);
  17665. InsertBranch(Js::OpCode::Br, doneLabel, instr);
  17666. instr->InsertBefore(helperLabel);
  17667. instr->InsertAfter(doneLabel);
  17668. this->LoadScriptContext(instr);
  17669. this->m_lowererMD.LoadHelperArgument(instr, IR::AddrOpnd::NewNull(instr->m_func)); // option
  17670. this->m_lowererMD.LoadHelperArgument(instr, src1Opnd); // regex
  17671. this->m_lowererMD.ChangeToHelperCall(instr, IR::HelperOp_CoerseRegex);
  17672. }
  17673. void
  17674. Lowerer::LowerConvPrimStr(IR::Instr * instr)
  17675. {
  17676. LowerConvStrCommon(IR::HelperOp_ConvPrimitiveString, instr);
  17677. }
  17678. void
  17679. Lowerer::GenerateRecyclerAlloc(IR::JnHelperMethod allocHelper, size_t allocSize, IR::RegOpnd* newObjDst, IR::Instr* insertionPointInstr, bool inOpHelper)
  17680. {
  17681. size_t alignedSize = HeapInfo::GetAlignedSizeNoCheck(allocSize);
  17682. this->GenerateRecyclerAllocAligned(allocHelper, alignedSize, newObjDst, insertionPointInstr, inOpHelper);
  17683. }
  17684. void
  17685. Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, int value, IR::Instr * insertBeforeInstr, bool isZeroed)
  17686. {
  17687. IRType type = TyInt32;
  17688. if (isZeroed)
  17689. {
  17690. if (value == 0)
  17691. {
  17692. // Recycler memory are zero initialized
  17693. return;
  17694. }
  17695. if (value > 0 && value <= USHORT_MAX)
  17696. {
  17697. // Recycler memory are zero initialized, so we can just initialize the 8 or 16 bits of value
  17698. type = (value <= UCHAR_MAX)? TyUint8 : TyUint16;
  17699. }
  17700. }
  17701. Func * func = this->m_func;
  17702. InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), IR::IntConstOpnd::New(value, type, func), insertBeforeInstr);
  17703. }
  17704. void
  17705. Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, uint32 value, IR::Instr * insertBeforeInstr, bool isZeroed)
  17706. {
  17707. IRType type = TyUint32;
  17708. if (isZeroed)
  17709. {
  17710. if (value == 0)
  17711. {
  17712. // Recycler memory are zero initialized
  17713. return;
  17714. }
  17715. if (value <= USHORT_MAX)
  17716. {
  17717. // Recycler memory are zero initialized, so we can just initialize the 8 or 16 bits of value
  17718. type = (value <= UCHAR_MAX)? TyUint8 : TyUint16;
  17719. }
  17720. }
  17721. Func * func = this->m_func;
  17722. InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), IR::IntConstOpnd::New(value, type, func), insertBeforeInstr);
  17723. }
  17724. void
  17725. Lowerer::GenerateMemInitNull(IR::RegOpnd * opnd, int32 offset, IR::Instr * insertBeforeInstr, bool isZeroed)
  17726. {
  17727. if (isZeroed)
  17728. {
  17729. return;
  17730. }
  17731. GenerateMemInit(opnd, offset, IR::AddrOpnd::NewNull(m_func), insertBeforeInstr);
  17732. }
  17733. void
  17734. Lowerer::GenerateMemInit(IR::RegOpnd * opnd, int32 offset, IR::Opnd * value, IR::Instr * insertBeforeInstr, bool isZeroed)
  17735. {
  17736. IRType type = value->GetType();
  17737. Func * func = this->m_func;
  17738. InsertMove(IR::IndirOpnd::New(opnd, offset, type, func), value, insertBeforeInstr);
  17739. }
  17740. void
  17741. Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, int32 value, IR::Instr * insertBeforeInstr)
  17742. {
  17743. GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
  17744. }
  17745. void
  17746. Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, uint32 value, IR::Instr * insertBeforeInstr)
  17747. {
  17748. GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
  17749. }
  17750. void
  17751. Lowerer::GenerateRecyclerMemInitNull(IR::RegOpnd * opnd, int32 offset, IR::Instr * insertBeforeInstr)
  17752. {
  17753. GenerateMemInitNull(opnd, offset, insertBeforeInstr, true);
  17754. }
  17755. void
  17756. Lowerer::GenerateRecyclerMemInit(IR::RegOpnd * opnd, int32 offset, IR::Opnd * value, IR::Instr * insertBeforeInstr)
  17757. {
  17758. GenerateMemInit(opnd, offset, value, insertBeforeInstr, true);
  17759. }
  17760. void
  17761. Lowerer::GenerateMemCopy(IR::Opnd * dst, IR::Opnd * src, uint32 size, IR::Instr * insertBeforeInstr)
  17762. {
  17763. Func * func = this->m_func;
  17764. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, IR::IntConstOpnd::New(size, TyUint32, func));
  17765. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, src);
  17766. this->m_lowererMD.LoadHelperArgument(insertBeforeInstr, dst);
  17767. IR::Instr * memcpyInstr = IR::Instr::New(Js::OpCode::Call, func);
  17768. memcpyInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperMemCpy, func));
  17769. insertBeforeInstr->InsertBefore(memcpyInstr);
  17770. m_lowererMD.LowerCall(memcpyInstr, 3);
  17771. }
  17772. bool
  17773. Lowerer::GenerateSimplifiedInt4Rem(
  17774. IR::Instr *const remInstr,
  17775. IR::LabelInstr *const skipBailOutLabel) const
  17776. {
  17777. Assert(remInstr);
  17778. Assert(remInstr->m_opcode == Js::OpCode::Rem_I4);
  17779. auto *dst = remInstr->GetDst(), *src1 = remInstr->GetSrc1(), *src2 = remInstr->GetSrc2();
  17780. Assert(src1 && src2);
  17781. Assert(dst->IsRegOpnd());
  17782. bool isModByPowerOf2 = (remInstr->HasBailOutInfo() && remInstr->GetBailOutKind() == IR::BailOnModByPowerOf2);
  17783. if (PHASE_OFF(Js::Phase::MathFastPathPhase, remInstr->m_func->GetTopFunc()) && !isModByPowerOf2)
  17784. return false;
  17785. if (!(src2->IsIntConstOpnd() && Math::IsPow2(src2->AsIntConstOpnd()->AsInt32())) && !isModByPowerOf2)
  17786. {
  17787. return false;
  17788. }
  17789. // We have:
  17790. // s3 = s1 % s2 , where s2 = +2^i
  17791. //
  17792. // Generate:
  17793. // test s1, s1
  17794. // js $slowPathLabel
  17795. // s3 = and s1, 0x00..fff (2^i - 1)
  17796. // jmp $doneLabel
  17797. // $slowPathLabel:
  17798. // (Slow path)
  17799. // (Neg zero check)
  17800. // (Bailout code)
  17801. // $doneLabel:
  17802. IR::LabelInstr *doneLabel = skipBailOutLabel, *slowPathLabel;
  17803. if (!doneLabel)
  17804. {
  17805. doneLabel = IR::LabelInstr::New(Js::OpCode::Label, remInstr->m_func);
  17806. remInstr->InsertAfter(doneLabel);
  17807. }
  17808. slowPathLabel = IR::LabelInstr::New(Js::OpCode::Label, remInstr->m_func, isModByPowerOf2);
  17809. remInstr->InsertBefore(slowPathLabel);
  17810. // test s1, s1
  17811. InsertTest(src1, src1, slowPathLabel);
  17812. // jsb $slowPathLabel
  17813. InsertBranch(LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), slowPathLabel, slowPathLabel);
  17814. // s3 = and s1, 0x00..fff (2^i - 1)
  17815. IR::Opnd* maskOpnd;
  17816. if(isModByPowerOf2)
  17817. {
  17818. Assert(isModByPowerOf2);
  17819. maskOpnd = IR::RegOpnd::New(TyInt32, remInstr->m_func);
  17820. // mov maskOpnd, s2
  17821. InsertMove(maskOpnd, src2, slowPathLabel);
  17822. // dec maskOpnd
  17823. InsertSub(/*needFlags*/ true, maskOpnd, maskOpnd, IR::IntConstOpnd::New(1, TyInt32, this->m_func, /*dontEncode*/true), slowPathLabel);
  17824. // maskOpnd < 0 goto $slowPath
  17825. InsertBranch(LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), slowPathLabel, slowPathLabel);
  17826. // TEST src2, maskOpnd
  17827. InsertTestBranch(src2, maskOpnd, Js::OpCode::BrNeq_A, slowPathLabel, slowPathLabel);
  17828. }
  17829. else
  17830. {
  17831. Assert(src2->IsIntConstOpnd());
  17832. int32 mask = src2->AsIntConstOpnd()->AsInt32() - 1;
  17833. maskOpnd = IR::IntConstOpnd::New(mask, TyInt32, remInstr->m_func);
  17834. }
  17835. // dst = src1 & maskOpnd
  17836. InsertAnd(dst, src1, maskOpnd, slowPathLabel);
  17837. // jmp $doneLabel
  17838. InsertBranch(Js::OpCode::Br, doneLabel, slowPathLabel);
  17839. return true;
  17840. }
  17841. #if DBG
  17842. bool
  17843. Lowerer::ValidOpcodeAfterLower(IR::Instr* instr, Func * func)
  17844. {
  17845. Js::OpCode opcode = instr->m_opcode;
  17846. if (opcode > Js::OpCode::MDStart)
  17847. {
  17848. return true;
  17849. }
  17850. switch (opcode)
  17851. {
  17852. case Js::OpCode::Label:
  17853. case Js::OpCode::StatementBoundary:
  17854. case Js::OpCode::DeletedNonHelperBranch:
  17855. case Js::OpCode::FunctionEntry:
  17856. case Js::OpCode::FunctionExit:
  17857. case Js::OpCode::TryCatch:
  17858. case Js::OpCode::TryFinally:
  17859. case Js::OpCode::Catch:
  17860. case Js::OpCode::GeneratorResumeJumpTable:
  17861. case Js::OpCode::Break:
  17862. #ifdef _M_X64
  17863. case Js::OpCode::PrologStart:
  17864. case Js::OpCode::PrologEnd:
  17865. #endif
  17866. #ifdef _M_IX86
  17867. case Js::OpCode::BailOutStackRestore:
  17868. #endif
  17869. return true;
  17870. case Js::OpCode::RestoreOutParam:
  17871. Assert(func->isPostRegAlloc);
  17872. return true;
  17873. // These may be removed by peep
  17874. case Js::OpCode::StartCall:
  17875. case Js::OpCode::LoweredStartCall:
  17876. case Js::OpCode::Nop:
  17877. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  17878. return func && !func->isPostPeeps;
  17879. case Js::OpCode::InlineeStart:
  17880. case Js::OpCode::InlineeEnd:
  17881. return instr->m_func->m_hasInlineArgsOpt;
  17882. #ifdef _M_X64
  17883. case Js::OpCode::LdArgSize:
  17884. case Js::OpCode::LdSpillSize:
  17885. return func && !func->isPostFinalLower;
  17886. #endif
  17887. case Js::OpCode::Leave:
  17888. Assert(!func->IsLoopBodyInTry());
  17889. Assert(func->HasTry() && func->DoOptimizeTryCatch());
  17890. return func && !func->isPostFinalLower; //Lowered in FinalLower phase
  17891. };
  17892. return false;
  17893. }
  17894. #endif
  17895. void Lowerer::LowerProfiledBeginSwitch(IR::JitProfilingInstr* instr)
  17896. {
  17897. Assert(instr->isBeginSwitch);
  17898. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  17899. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
  17900. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  17901. instr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleProfiledSwitch, m_func));
  17902. m_lowererMD.LowerCall(instr, 0);
  17903. }
  17904. void Lowerer::LowerProfiledBinaryOp(IR::JitProfilingInstr* instr, IR::JnHelperMethod meth)
  17905. {
  17906. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc2());
  17907. m_lowererMD.LoadHelperArgument(instr, instr->UnlinkSrc1());
  17908. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(instr->profileId, m_func));
  17909. m_lowererMD.LoadHelperArgument(instr, CreateFunctionBodyOpnd(instr->m_func));
  17910. instr->SetSrc1(IR::HelperCallOpnd::New(meth, m_func));
  17911. m_lowererMD.LowerCall(instr, 0);
  17912. }
  17913. void Lowerer::GenerateNullOutGeneratorFrame(IR::Instr* insertInstr)
  17914. {
  17915. // null out frame pointer on generator object to signal completion to JavascriptGenerator::CallGenerator
  17916. // s = MOV prm1
  17917. // s[offset of JavascriptGenerator::frame] = MOV nullptr
  17918. StackSym *symSrc = StackSym::NewParamSlotSym(1, m_func);
  17919. m_func->SetArgOffset(symSrc, LowererMD::GetFormalParamOffset() * MachPtr);
  17920. IR::SymOpnd *srcOpnd = IR::SymOpnd::New(symSrc, TyMachPtr, m_func);
  17921. IR::RegOpnd *dstOpnd = IR::RegOpnd::New(TyMachReg, m_func);
  17922. m_lowererMD.CreateAssign(dstOpnd, srcOpnd, insertInstr);
  17923. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(dstOpnd, Js::JavascriptGenerator::GetFrameOffset(), TyMachPtr, m_func);
  17924. IR::AddrOpnd *addrOpnd = IR::AddrOpnd::NewNull(m_func);
  17925. m_lowererMD.CreateAssign(indirOpnd, addrOpnd, insertInstr);
  17926. }
  17927. void Lowerer::LowerFunctionExit(IR::Instr* funcExit)
  17928. {
  17929. if (m_func->GetJnFunction()->IsGenerator())
  17930. {
  17931. GenerateNullOutGeneratorFrame(funcExit->m_prev);
  17932. }
  17933. if (!m_func->DoSimpleJitDynamicProfile())
  17934. {
  17935. return;
  17936. }
  17937. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, m_func);
  17938. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleCleanImplicitCallFlags, m_func));
  17939. funcExit->m_prev->InsertBefore(callInstr);
  17940. m_lowererMD.LoadHelperArgument(callInstr, CreateFunctionBodyOpnd(funcExit->m_func));
  17941. m_lowererMD.LowerCall(callInstr, 0);
  17942. }
  17943. void Lowerer::LowerFunctionEntry(IR::Instr* funcEntry)
  17944. {
  17945. Assert(funcEntry->m_opcode == Js::OpCode::FunctionEntry);
  17946. //Don't do a body call increment for loops or asm.js
  17947. if (m_func->IsLoopBody() || m_func->GetJnFunction()->GetIsAsmjsMode())
  17948. {
  17949. return;
  17950. }
  17951. IR::Instr *const insertBeforeInstr = this->m_func->GetFunctionEntryInsertionPoint();
  17952. LowerFunctionBodyCallCountChange(insertBeforeInstr);
  17953. if (m_func->DoSimpleJitDynamicProfile())
  17954. {
  17955. const auto jn = m_func->GetJnFunction();
  17956. // Only generate the argument profiling if the function expects to have some arguments to profile and only if
  17957. // it has implicit ArgIns (the latter is a restriction imposed by the Interpreter, so it is mirrored in SimpleJit)
  17958. if (jn->GetInParamsCount() > 1 && jn->GetHasImplicitArgIns())
  17959. {
  17960. // Call out to the argument profiling helper
  17961. IR::Instr* callInstr = IR::Instr::New(Js::OpCode::Call, m_func);
  17962. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperSimpleProfileParameters, m_func));
  17963. insertBeforeInstr->InsertBefore(callInstr);
  17964. m_lowererMD.LoadHelperArgument(callInstr, IR::Opnd::CreateFramePointerOpnd(m_func));
  17965. m_lowererMD.LowerCall(callInstr, 0);
  17966. }
  17967. // Clear existing ImplicitCallFlags
  17968. const auto starFlag = GetImplicitCallFlagsOpnd();
  17969. this->InsertMove(starFlag, CreateClearImplicitCallFlagsOpnd(), insertBeforeInstr);
  17970. }
  17971. }
  17972. void Lowerer::LowerFunctionBodyCallCountChange(IR::Instr *const insertBeforeInstr)
  17973. {
  17974. Assert(insertBeforeInstr);
  17975. Func *const func = insertBeforeInstr->m_func;
  17976. const bool isSimpleJit = func->IsSimpleJit();
  17977. if ((isSimpleJit && !func->GetTopFunc()->GetJnFunction()->DoFullJit()))
  17978. {
  17979. return;
  17980. }
  17981. // mov countAddress, <countAddress>
  17982. IR::RegOpnd *const countAddressOpnd = IR::RegOpnd::New(StackSym::New(TyMachPtr, func), TyMachPtr, func);
  17983. const IR::AutoReuseOpnd autoReuseCountAddressOpnd(countAddressOpnd, func);
  17984. InsertMove(
  17985. countAddressOpnd,
  17986. IR::AddrOpnd::New(func->GetCallsCountAddress(), IR::AddrOpndKindDynamicMisc, func, true),
  17987. insertBeforeInstr);
  17988. IR::IndirOpnd *const countOpnd = IR::IndirOpnd::New(countAddressOpnd, 0, TyUint8, func);
  17989. const IR::AutoReuseOpnd autoReuseCountOpnd(countOpnd, func);
  17990. if(!isSimpleJit)
  17991. {
  17992. // InsertIncUint8PreventOverflow [countAddress]
  17993. InsertIncUInt8PreventOverflow(countOpnd, countOpnd, insertBeforeInstr);
  17994. return;
  17995. }
  17996. // InsertDecUint8PreventOverflow [countAddress]
  17997. IR::Instr *onOverflowInsertBeforeInstr;
  17998. InsertDecUInt8PreventOverflow(
  17999. countOpnd,
  18000. countOpnd,
  18001. insertBeforeInstr,
  18002. &onOverflowInsertBeforeInstr);
  18003. // ($overflow:)
  18004. // TransitionFromSimpleJit(framePointer)
  18005. m_lowererMD.LoadHelperArgument(onOverflowInsertBeforeInstr, IR::Opnd::CreateFramePointerOpnd(func));
  18006. IR::Instr *const callInstr = IR::Instr::New(Js::OpCode::Call, func);
  18007. callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::HelperTransitionFromSimpleJit, func));
  18008. onOverflowInsertBeforeInstr->InsertBefore(callInstr);
  18009. m_lowererMD.LowerCall(callInstr, 0);
  18010. }
  18011. IR::Opnd*
  18012. Lowerer::GetImplicitCallFlagsOpnd()
  18013. {
  18014. return GetImplicitCallFlagsOpnd(m_func);
  18015. }
  18016. IR::Opnd*
  18017. Lowerer::GetImplicitCallFlagsOpnd(Func * func)
  18018. {
  18019. return IR::MemRefOpnd::New(func->GetScriptContext()->GetThreadContext()->GetAddressOfImplicitCallFlags(), GetImplicitCallFlagsType(), func);
  18020. }
  18021. IR::Opnd*
  18022. Lowerer::CreateClearImplicitCallFlagsOpnd()
  18023. {
  18024. return IR::IntConstOpnd::New(Js::ImplicitCall_None, GetImplicitCallFlagsType(), m_func);
  18025. }
  18026. void
  18027. Lowerer::LowerSpreadArrayLiteral(IR::Instr *instr)
  18028. {
  18029. LoadScriptContext(instr);
  18030. IR::Opnd *src2Opnd = instr->UnlinkSrc2();
  18031. m_lowererMD.LoadHelperArgument(instr, src2Opnd);
  18032. IR::Opnd *src1Opnd = instr->UnlinkSrc1();
  18033. m_lowererMD.LoadHelperArgument(instr, src1Opnd);
  18034. this->m_lowererMD.ChangeToHelperCall(instr, IR::HelperSpreadArrayLiteral);
  18035. }
  18036. IR::Instr *
  18037. Lowerer::LowerSpreadCall(IR::Instr *instr, Js::CallFlags callFlags, bool setupProfiledVersion)
  18038. {
  18039. // Get the target function object, and emit function object test.
  18040. IR::RegOpnd * functionObjOpnd = instr->UnlinkSrc1()->AsRegOpnd();
  18041. functionObjOpnd->m_isCallArg = true;
  18042. if (!(callFlags & Js::CallFlags_New) && !setupProfiledVersion)
  18043. {
  18044. IR::LabelInstr* continueAfterExLabel = InsertContinueAfterExceptionLabelForDebugger(m_func, instr, false);
  18045. this->m_lowererMD.GenerateFunctionObjectTest(instr, functionObjOpnd, false, continueAfterExLabel);
  18046. }
  18047. IR::Instr *spreadIndicesInstr;
  18048. spreadIndicesInstr = GetLdSpreadIndicesInstr(instr);
  18049. Assert(spreadIndicesInstr->m_opcode == Js::OpCode::LdSpreadIndices);
  18050. // Get AuxArray
  18051. IR::Opnd *spreadIndicesOpnd = spreadIndicesInstr->UnlinkSrc1();
  18052. // Remove LdSpreadIndices from the argument chain
  18053. instr->ReplaceSrc2(spreadIndicesInstr->UnlinkSrc2());
  18054. // Emit the normal args
  18055. callFlags = (Js::CallFlags)(callFlags | (instr->GetDst() ? Js::CallFlags_Value : Js::CallFlags_NotUsed));
  18056. // Profiled helper call requires three more parameters, ArrayProfileId, profileId, and the frame pointer.
  18057. // This is just following the convention of HelperProfiledNewScObjArray call.
  18058. const unsigned short extraArgsCount = setupProfiledVersion ? 5 : 2; // function object and AuxArray
  18059. int32 argCount = this->m_lowererMD.LowerCallArgs(instr, (ushort)callFlags, extraArgsCount);
  18060. // Emit our extra (first) args for the Spread helper in reverse order
  18061. if (setupProfiledVersion)
  18062. {
  18063. IR::JitProfilingInstr* jitInstr = (IR::JitProfilingInstr*)instr;
  18064. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(jitInstr->arrayProfileId, m_func));
  18065. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateProfileIdOpnd(jitInstr->profileId, m_func));
  18066. m_lowererMD.LoadHelperArgument(instr, IR::Opnd::CreateFramePointerOpnd(m_func));
  18067. }
  18068. m_lowererMD.LoadHelperArgument(instr, functionObjOpnd);
  18069. m_lowererMD.LoadHelperArgument(instr, spreadIndicesOpnd);
  18070. // Change the call target to our helper
  18071. IR::HelperCallOpnd *helperOpnd = IR::HelperCallOpnd::New(setupProfiledVersion ? IR::HelperProfiledNewScObjArraySpread : IR::HelperSpreadCall, this->m_func);
  18072. instr->SetSrc1(helperOpnd);
  18073. return this->m_lowererMD.LowerCall(instr, (Js::ArgSlot)argCount);
  18074. }
  18075. void
  18076. Lowerer::LowerDivI4Common(IR::Instr * instr)
  18077. {
  18078. Assert(instr);
  18079. Assert(instr->m_opcode == Js::OpCode::Rem_I4 || instr->m_opcode == Js::OpCode::Div_I4);
  18080. Assert(m_func->GetJnFunction()->GetIsAsmjsMode());
  18081. // MIN_INT/-1 path is only needed for signed operations
  18082. // TEST src2, src2
  18083. // JEQ $div0
  18084. // CMP src1, MIN_INT
  18085. // JEQ $minInt
  18086. // JMP $div
  18087. // $div0: [helper]
  18088. // MOV dst, 0
  18089. // JMP $done
  18090. // $minInt: [helper]
  18091. // CMP src2, -1
  18092. // JNE $div
  18093. // dst = MOV src1 / 0
  18094. // JMP $done
  18095. // $div:
  18096. // dst = IDIV src2, src1
  18097. // $done:
  18098. IR::LabelInstr * div0Label = InsertLabel(true, instr);
  18099. IR::LabelInstr * divLabel = InsertLabel(false, instr);
  18100. IR::LabelInstr * doneLabel = InsertLabel(false, instr->m_next);
  18101. InsertTestBranch(instr->GetSrc2(), instr->GetSrc2(), Js::OpCode::BrEq_A, div0Label, div0Label);
  18102. InsertMove(instr->GetDst(), IR::IntConstOpnd::New(0, TyInt32, m_func), divLabel);
  18103. InsertBranch(Js::OpCode::Br, doneLabel, divLabel);
  18104. if (instr->GetSrc1()->GetType() == TyInt32)
  18105. {
  18106. IR::LabelInstr * minIntLabel = nullptr;
  18107. // we need to check for INT_MIN/-1 if divisor is either -1 or variable, and dividend is either INT_MIN or variable
  18108. bool needsMinOverNeg1Check = !(instr->GetSrc2()->IsIntConstOpnd() && instr->GetSrc2()->AsIntConstOpnd()->GetValue() != -1);
  18109. if (instr->GetSrc1()->IsIntConstOpnd())
  18110. {
  18111. if (needsMinOverNeg1Check && instr->GetSrc1()->AsIntConstOpnd()->GetValue() == INT_MIN)
  18112. {
  18113. minIntLabel = InsertLabel(true, divLabel);
  18114. InsertBranch(Js::OpCode::Br, minIntLabel, div0Label);
  18115. }
  18116. else
  18117. {
  18118. needsMinOverNeg1Check = false;
  18119. }
  18120. }
  18121. else if(needsMinOverNeg1Check)
  18122. {
  18123. minIntLabel = InsertLabel(true, divLabel);
  18124. InsertCompareBranch(instr->GetSrc1(), IR::IntConstOpnd::New(INT_MIN, TyInt32, m_func), Js::OpCode::BrEq_A, minIntLabel, div0Label);
  18125. }
  18126. if (needsMinOverNeg1Check)
  18127. {
  18128. Assert(minIntLabel);
  18129. Assert(!instr->GetSrc2()->IsIntConstOpnd() || instr->GetSrc2()->AsIntConstOpnd()->GetValue() == -1);
  18130. if (!instr->GetSrc2()->IsIntConstOpnd())
  18131. {
  18132. InsertCompareBranch(instr->GetSrc2(), IR::IntConstOpnd::New(-1, TyInt32, m_func), Js::OpCode::BrNeq_A, divLabel, divLabel);
  18133. }
  18134. InsertMove(instr->GetDst(), instr->m_opcode == Js::OpCode::Div_I4 ? instr->GetSrc1() : IR::IntConstOpnd::New(0, TyInt32, m_func), divLabel);
  18135. InsertBranch(Js::OpCode::Br, doneLabel, divLabel);
  18136. }
  18137. }
  18138. InsertBranch(Js::OpCode::Br, divLabel, div0Label);
  18139. m_lowererMD.EmitInt4Instr(instr);
  18140. }
  18141. void
  18142. Lowerer::LowerRemI4(IR::Instr * instr)
  18143. {
  18144. Assert(instr);
  18145. Assert(instr->m_opcode == Js::OpCode::Rem_I4);
  18146. if (m_func->GetJnFunction()->GetIsAsmjsMode())
  18147. {
  18148. LowerDivI4Common(instr);
  18149. }
  18150. else
  18151. {
  18152. m_lowererMD.EmitInt4Instr(instr);
  18153. }
  18154. }
  18155. void
  18156. Lowerer::LowerDivI4(IR::Instr * instr)
  18157. {
  18158. Assert(instr);
  18159. Assert(instr->m_opcode == Js::OpCode::Div_I4);
  18160. if (m_func->GetJnFunction()->GetIsAsmjsMode())
  18161. {
  18162. LowerDivI4Common(instr);
  18163. return;
  18164. }
  18165. if(!instr->HasBailOutInfo())
  18166. {
  18167. m_lowererMD.EmitInt4Instr(instr);
  18168. return;
  18169. }
  18170. Assert(!(instr->GetBailOutKind() & ~(IR::BailOnDivResultNotInt | IR::BailOutOnNegativeZero | IR::BailOutOnDivByZero | IR::BailOutOnDivOfMinInt)));
  18171. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  18172. // Split out and generate the bailout instruction
  18173. const auto nonBailOutInstr = IR::Instr::New(instr->m_opcode, instr->m_func);
  18174. instr->TransferTo(nonBailOutInstr);
  18175. instr->InsertBefore(nonBailOutInstr);
  18176. IR::LabelInstr * doneLabel = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
  18177. instr->InsertAfter(doneLabel);
  18178. // Generate the bailout helper call. 'instr' will be changed to the CALL into the bailout function, so it can't be used for
  18179. // ordering instructions anymore.
  18180. IR::LabelInstr * bailOutLabel = GenerateBailOut(instr);
  18181. IR::Opnd * denominatorOpnd = nonBailOutInstr->GetSrc2();
  18182. IR::Opnd * nominatorOpnd = nonBailOutInstr->GetSrc1();
  18183. if (bailOutKind & IR::BailOutOnDivOfMinInt)
  18184. {
  18185. // Bailout if numerator is MIN_INT (could also check for denominator being -1
  18186. // before bailing out, but does not seem worth the extra code..)
  18187. InsertCompareBranch(nominatorOpnd, IR::IntConstOpnd::New(INT32_MIN, TyInt32, this->m_func, true), Js::OpCode::BrEq_A, bailOutLabel, nonBailOutInstr);
  18188. }
  18189. if (denominatorOpnd->IsIntConstOpnd() && Math::IsPow2(denominatorOpnd->AsIntConstOpnd()->AsInt32()))
  18190. {
  18191. Assert((bailOutKind & (IR::BailOutOnNegativeZero | IR::BailOutOnDivByZero)) == 0);
  18192. int pow2 = denominatorOpnd->AsIntConstOpnd()->AsInt32();
  18193. InsertTestBranch(nominatorOpnd, IR::IntConstOpnd::New(pow2 - 1, TyInt32, this->m_func, true),
  18194. Js::OpCode::BrNeq_A, bailOutLabel, nonBailOutInstr);
  18195. nonBailOutInstr->m_opcode = Js::OpCode::Shr_A;
  18196. nonBailOutInstr->ReplaceSrc2(IR::IntConstOpnd::New(Math::Log2(pow2), TyInt32, this->m_func, true));
  18197. LowererMD::ChangeToShift(nonBailOutInstr, false);
  18198. LowererMD::Legalize(nonBailOutInstr);
  18199. }
  18200. else
  18201. {
  18202. if (bailOutKind & IR::BailOutOnDivByZero)
  18203. {
  18204. // Bailout if denominator is 0
  18205. InsertTestBranch(denominatorOpnd, denominatorOpnd, Js::OpCode::BrEq_A, bailOutLabel, nonBailOutInstr);
  18206. }
  18207. // Lower the div and bailout if there is a reminder (machine specific)
  18208. IR::Instr * insertBeforeInstr = m_lowererMD.LowerDivI4AndBailOnReminder(nonBailOutInstr, bailOutLabel);
  18209. IR::Opnd * resultOpnd = nonBailOutInstr->GetDst();
  18210. if (bailOutKind & IR::BailOutOnNegativeZero)
  18211. {
  18212. // TEST result, result
  18213. // JNE skipNegDenominatorCheckLabel // Result not 0
  18214. // TEST denominator, denominator
  18215. // JNSB/BMI bailout // bail if negative
  18216. // skipNegDenominatorCheckLabel:
  18217. IR::LabelInstr * skipNegDenominatorCheckLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  18218. // Skip negative denominator check if the result is not 0
  18219. InsertTestBranch(resultOpnd, resultOpnd, Js::OpCode::BrNeq_A, skipNegDenominatorCheckLabel, insertBeforeInstr);
  18220. IR::LabelInstr * negDenominatorCheckLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  18221. insertBeforeInstr->InsertBefore(negDenominatorCheckLabel);
  18222. // Jump to done if the denominator is not negative
  18223. InsertTestBranch(denominatorOpnd, denominatorOpnd,
  18224. LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), bailOutLabel, insertBeforeInstr);
  18225. insertBeforeInstr->InsertBefore(skipNegDenominatorCheckLabel);
  18226. }
  18227. }
  18228. // We are all fine, jump around the bailout to done
  18229. InsertBranch(Js::OpCode::Br, doneLabel, bailOutLabel);
  18230. }
  18231. void
  18232. Lowerer::LowerRemR8(IR::Instr * instr)
  18233. {
  18234. Assert(instr);
  18235. Assert(instr->m_opcode == Js::OpCode::Rem_A);
  18236. Assert(m_func->GetJnFunction()->GetIsAsmjsMode());
  18237. m_lowererMD.LoadDoubleHelperArgument(instr, instr->UnlinkSrc2());
  18238. m_lowererMD.LoadDoubleHelperArgument(instr, instr->UnlinkSrc1());
  18239. instr->SetSrc1(IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperOp_Rem_Double, m_func));
  18240. m_lowererMD.LowerCall(instr, 0);
  18241. }
  18242. void
  18243. Lowerer::LowerNewScopeSlots(IR::Instr * instr, bool doStackSlots)
  18244. {
  18245. Func * func = m_func;
  18246. if (PHASE_OFF(Js::NewScopeSlotFastPathPhase, func))
  18247. {
  18248. this->LowerUnaryHelperMemWithFuncBody(instr, IR::HelperOP_NewScopeSlots);
  18249. return;
  18250. }
  18251. uint const count = instr->GetSrc1()->AsIntConstOpnd()->AsUint32();
  18252. uint const allocSize = count * sizeof(Js::Var);
  18253. uint const actualSlotCount = count - Js::ScopeSlots::FirstSlotIndex;
  18254. IR::RegOpnd * dst = instr->UnlinkDst()->AsRegOpnd();
  18255. // dst = RecyclerAlloc(allocSize)
  18256. // dst[EncodedSlotCountSlotIndex = EncodedSlotCountSlotIOndex];
  18257. // dst[ScopeMetadataSlotIndex] = FunctionBody;
  18258. // mov undefinedOpnd, undefined
  18259. // dst[FirstSlotIndex..count] = undefinedOpnd;
  18260. // Note: stack allocation of both scope slots and frame display are done together
  18261. // in lowering of NewStackFrameDisplay
  18262. if (!doStackSlots)
  18263. {
  18264. GenerateRecyclerAlloc(IR::HelperAllocMemForVarArray, allocSize, dst, instr);
  18265. }
  18266. GenerateMemInit(dst, Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
  18267. min<uint>(actualSlotCount, Js::ScopeSlots::MaxEncodedSlotCount), instr, !doStackSlots);
  18268. IR::Opnd * functionBodyOpnd = this->LoadFunctionBodyOpnd(instr);
  18269. GenerateMemInit(dst, Js::ScopeSlots::ScopeMetadataSlotIndex * sizeof(Js::Var),
  18270. functionBodyOpnd, instr, !doStackSlots);
  18271. IR::Opnd * undefinedOpnd = this->LoadLibraryValueOpnd(instr, LibraryValue::ValueUndefined);
  18272. const IR::AutoReuseOpnd autoReuseUndefinedOpnd(undefinedOpnd, func);
  18273. // avoid using a register for the undefined pointer if we are going to assign 1 or 2
  18274. if (actualSlotCount > 2 && !undefinedOpnd->IsRegOpnd())
  18275. {
  18276. // mov undefinedOpnd, undefined
  18277. IR::RegOpnd * regOpnd = IR::RegOpnd::New(TyVar, func);
  18278. InsertMove(regOpnd, undefinedOpnd, instr);
  18279. undefinedOpnd = regOpnd;
  18280. }
  18281. int const loopUnrollCount = 8;
  18282. if (actualSlotCount <= loopUnrollCount * 2)
  18283. {
  18284. // Just generate all the assignment in straight line code
  18285. // mov[dst + Js::FirstSlotIndex], undefinedOpnd
  18286. // ...
  18287. // mov[dst + count - 1], undefinedOpnd
  18288. for (unsigned int i = Js::ScopeSlots::FirstSlotIndex; i < count; i++)
  18289. {
  18290. GenerateMemInit(dst, sizeof(Js::Var) * i, undefinedOpnd, instr, !doStackSlots);
  18291. }
  18292. }
  18293. else
  18294. {
  18295. // Just generate all the assignment in loop of loopUnrollCount and the rest as straight line code
  18296. //
  18297. // lea currOpnd, [dst + sizeof(Var) * (loopAssignCount + Js::ScopeSlots::FirstSlotIndex - loopUnrollCount)];
  18298. // mov [currOpnd + loopUnrollCount + leftOverAssignCount - 1] , undefinedOpnd
  18299. // mov [currOpnd + loopUnrollCount + leftOverAssignCount - 2] , undefinedOpnd
  18300. // ...
  18301. // mov [currOpnd + loopUnrollCount], undefinedOpnd
  18302. // $LoopTop:
  18303. // mov [currOpnd + loopUnrollCount - 1], undefinedOpnd
  18304. // mov [currOpnd + loopUnrollCount - 2], undefinedOpnd
  18305. // ...
  18306. // mov [currOpnd], undefinedOpnd
  18307. // lea currOpnd, [currOpnd - loopUnrollCount]
  18308. // cmp dst, currOpnd
  18309. // jlt $Looptop
  18310. uint nLoop = actualSlotCount / loopUnrollCount;
  18311. uint loopAssignCount = nLoop * loopUnrollCount;
  18312. uint leftOverAssignCount = actualSlotCount - loopAssignCount; // The left over assignments
  18313. IR::RegOpnd * currOpnd = IR::RegOpnd::New(TyMachPtr, func);
  18314. const IR::AutoReuseOpnd autoReuseCurrOpnd(currOpnd, m_func);
  18315. InsertLea(
  18316. currOpnd,
  18317. IR::IndirOpnd::New(
  18318. dst,
  18319. sizeof(Js::Var) * (loopAssignCount + Js::ScopeSlots::FirstSlotIndex - loopUnrollCount),
  18320. TyMachPtr,
  18321. func),
  18322. instr);
  18323. for (unsigned int i = 0; i < leftOverAssignCount; i++)
  18324. {
  18325. GenerateMemInit(currOpnd, sizeof(Js::Var) * (loopUnrollCount + leftOverAssignCount - i - 1), undefinedOpnd, instr, !doStackSlots);
  18326. }
  18327. IR::LabelInstr * loopTop = IR::LabelInstr::New(Js::OpCode::Label, func);
  18328. instr->InsertBefore(loopTop);
  18329. loopTop->m_isLoopTop = true;
  18330. Loop *loop = JitAnew(func->m_alloc, Loop, func->m_alloc, this->m_func);
  18331. loopTop->SetLoop(loop);
  18332. loop->SetLoopTopInstr(loopTop);
  18333. loop->regAlloc.liveOnBackEdgeSyms = JitAnew(func->m_alloc, BVSparse<JitArenaAllocator>, func->m_alloc);
  18334. for (unsigned int i = 0; i < loopUnrollCount; i++)
  18335. {
  18336. GenerateMemInit(currOpnd, sizeof(Js::Var) * (loopUnrollCount - i - 1), undefinedOpnd, instr, !doStackSlots);
  18337. }
  18338. InsertLea(currOpnd, IR::IndirOpnd::New(currOpnd, -((int)sizeof(Js::Var) * loopUnrollCount), TyMachPtr, func), instr);
  18339. InsertCompareBranch(dst, currOpnd, Js::OpCode::BrLt_A, true, loopTop, instr);
  18340. loop->regAlloc.liveOnBackEdgeSyms->Set(currOpnd->m_sym->m_id);
  18341. loop->regAlloc.liveOnBackEdgeSyms->Set(dst->m_sym->m_id);
  18342. loop->regAlloc.liveOnBackEdgeSyms->Set(undefinedOpnd->AsRegOpnd()->m_sym->m_id);
  18343. }
  18344. if (!doStackSlots)
  18345. {
  18346. InsertMove(IR::RegOpnd::New(instr->m_func->GetLocalClosureSym(), TyMachPtr, func), dst, instr);
  18347. }
  18348. instr->Remove();
  18349. }
  18350. void Lowerer::LowerLdInnerFrameDisplay(IR::Instr *instr)
  18351. {
  18352. bool isStrict = instr->m_func->GetJnFunction()->GetIsStrictMode();
  18353. if (isStrict)
  18354. {
  18355. if (instr->GetSrc2())
  18356. {
  18357. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdStrictInnerFrameDisplay);
  18358. }
  18359. else
  18360. {
  18361. #if DBG
  18362. instr->m_opcode = Js::OpCode::LdInnerFrameDisplayNoParent;
  18363. #endif
  18364. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdStrictInnerFrameDisplayNoParent);
  18365. }
  18366. }
  18367. else
  18368. {
  18369. if (instr->GetSrc2())
  18370. {
  18371. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdInnerFrameDisplay);
  18372. }
  18373. else
  18374. {
  18375. #if DBG
  18376. instr->m_opcode = Js::OpCode::LdInnerFrameDisplayNoParent;
  18377. #endif
  18378. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdInnerFrameDisplayNoParent);
  18379. }
  18380. }
  18381. }
  18382. void Lowerer::LowerLdFrameDisplay(IR::Instr *instr, bool doStackFrameDisplay)
  18383. {
  18384. bool isStrict = instr->m_func->GetJnFunction()->GetIsStrictMode();
  18385. uint16 envDepth = instr->m_func->GetJnFunction()->GetEnvDepth();
  18386. Func *func = this->m_func;
  18387. // envDepth of -1 indicates unknown depth (eval expression or HTML event handler).
  18388. // We could still fast-path these by generating a loop over the (dynamically loaded) scope chain length,
  18389. // but I doubt it's worth it.
  18390. // If the dst opnd is a byte code temp, that indicates we're prepending a block scope or some such and
  18391. // shouldn't attempt to do this.
  18392. if (envDepth == (uint16)-1 ||
  18393. (!doStackFrameDisplay && instr->GetDst()->AsRegOpnd()->m_sym->IsTempReg(instr->m_func)) ||
  18394. PHASE_OFF(Js::FrameDisplayFastPathPhase, func))
  18395. {
  18396. if (isStrict)
  18397. {
  18398. if (instr->GetSrc2())
  18399. {
  18400. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdStrictFrameDisplay);
  18401. }
  18402. else
  18403. {
  18404. #if DBG
  18405. instr->m_opcode = Js::OpCode::LdFrameDisplayNoParent;
  18406. #endif
  18407. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdStrictFrameDisplayNoParent);
  18408. }
  18409. }
  18410. else
  18411. {
  18412. if (instr->GetSrc2())
  18413. {
  18414. this->LowerBinaryHelperMem(instr, IR::HelperScrObj_LdFrameDisplay);
  18415. }
  18416. else
  18417. {
  18418. #if DBG
  18419. instr->m_opcode = Js::OpCode::LdFrameDisplayNoParent;
  18420. #endif
  18421. this->LowerUnaryHelperMem(instr, IR::HelperScrObj_LdFrameDisplayNoParent);
  18422. }
  18423. }
  18424. return;
  18425. }
  18426. uint16 frameDispLength = envDepth + 1;
  18427. Assert(frameDispLength > 0);
  18428. IR::RegOpnd *dstOpnd = instr->UnlinkDst()->AsRegOpnd();
  18429. IR::RegOpnd *currentFrameOpnd = instr->UnlinkSrc1()->AsRegOpnd();
  18430. uint allocSize = sizeof(Js::FrameDisplay) + (frameDispLength * sizeof(Js::Var));
  18431. if (doStackFrameDisplay)
  18432. {
  18433. IR::Instr *insertInstr = func->GetFunctionEntryInsertionPoint();
  18434. // Initialize stack pointers for scope slots and frame display together at the top of the function
  18435. // (in case we bail out before executing the instructions).
  18436. IR::LabelInstr *labelNoStackFunc = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  18437. IR::LabelInstr *labelDone = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  18438. // Check whether stack functions have been disabled since we jitted.
  18439. // If they have, then we must allocate closure memory on the heap.
  18440. InsertTestBranch(IR::MemRefOpnd::New(m_func->GetJnFunction()->GetAddressOfFlags(), TyInt8, m_func),
  18441. IR::IntConstOpnd::New(Js::FunctionBody::Flags_StackNestedFunc, TyInt8, m_func, true),
  18442. Js::OpCode::BrEq_A, labelNoStackFunc, insertInstr);
  18443. // allocSize is greater than TyMachPtr and hence changing the initial size to TyMisc
  18444. StackSym * stackSym = StackSym::New(TyMisc, instr->m_func);
  18445. m_func->StackAllocate(stackSym, allocSize);
  18446. InsertLea(dstOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertInstr);
  18447. uint scopeSlotAllocSize =
  18448. (m_func->GetJnFunction()->scopeSlotArraySize + Js::ScopeSlots::FirstSlotIndex) * sizeof(Js::Var);
  18449. stackSym = StackSym::New(TyMisc, instr->m_func);
  18450. m_func->StackAllocate(stackSym, scopeSlotAllocSize);
  18451. InsertLea(currentFrameOpnd, IR::SymOpnd::New(stackSym, TyMachPtr, func), insertInstr);
  18452. InsertBranch(Js::OpCode::Br, labelDone, insertInstr);
  18453. insertInstr->InsertBefore(labelNoStackFunc);
  18454. GenerateRecyclerAlloc(IR::HelperAllocMemForFrameDisplay, allocSize, dstOpnd, insertInstr, true);
  18455. GenerateRecyclerAlloc(IR::HelperAllocMemForVarArray, scopeSlotAllocSize, currentFrameOpnd, insertInstr, true);
  18456. insertInstr->InsertBefore(labelDone);
  18457. m_lowererMD.CreateAssign(IR::SymOpnd::New(m_func->GetLocalFrameDisplaySym(), 0, TyMachReg, m_func), dstOpnd, insertInstr);
  18458. m_lowererMD.CreateAssign(IR::SymOpnd::New(m_func->GetLocalClosureSym(), 0, TyMachReg, m_func), currentFrameOpnd, insertInstr);
  18459. }
  18460. else
  18461. {
  18462. GenerateRecyclerAlloc(IR::HelperAllocMemForFrameDisplay, allocSize, dstOpnd, instr);
  18463. }
  18464. // Copy contents of environment
  18465. // Work back to front to leave the head element(s) in cache
  18466. if (envDepth > 0)
  18467. {
  18468. IR::RegOpnd *envOpnd = instr->UnlinkSrc2()->AsRegOpnd();
  18469. for (uint16 i = envDepth; i >= 1; i--)
  18470. {
  18471. IR::Opnd *scopeOpnd = IR::RegOpnd::New(TyMachReg, func);
  18472. IR::Opnd *envLoadOpnd =
  18473. IR::IndirOpnd::New(envOpnd, Js::FrameDisplay::GetOffsetOfScopes() + ((i - 1) * sizeof(Js::Var)), TyMachReg, func);
  18474. m_lowererMD.CreateAssign(scopeOpnd, envLoadOpnd, instr);
  18475. IR::Opnd *dstStoreOpnd =
  18476. IR::IndirOpnd::New(dstOpnd, Js::FrameDisplay::GetOffsetOfScopes() + (i * sizeof(Js::Var)), TyMachReg, func);
  18477. m_lowererMD.CreateAssign(dstStoreOpnd, scopeOpnd, instr);
  18478. }
  18479. }
  18480. // Assign current element.
  18481. m_lowererMD.CreateAssign(
  18482. IR::IndirOpnd::New(dstOpnd, Js::FrameDisplay::GetOffsetOfScopes(), TyMachReg, func),
  18483. currentFrameOpnd,
  18484. instr);
  18485. // Combine tag, strict mode flag, and length
  18486. uintptr_t bits = 1 |
  18487. (isStrict << (Js::FrameDisplay::GetOffsetOfStrictMode() * 8)) |
  18488. (frameDispLength << (Js::FrameDisplay::GetOffsetOfLength() * 8));
  18489. m_lowererMD.CreateAssign(
  18490. IR::IndirOpnd::New(dstOpnd, 0, TyMachReg, func),
  18491. IR::AddrOpnd::New((void*)bits, IR::AddrOpndKindConstant, func, true),
  18492. instr);
  18493. instr->Remove();
  18494. }
  18495. IR::AddrOpnd *Lowerer::CreateFunctionBodyOpnd(Func *const func) const
  18496. {
  18497. return CreateFunctionBodyOpnd(func->GetJnFunction());
  18498. }
  18499. IR::AddrOpnd *Lowerer::CreateFunctionBodyOpnd(Js::FunctionBody *const functionBody) const
  18500. {
  18501. return IR::AddrOpnd::New(functionBody, IR::AddrOpndKindDynamicFunctionBody, m_func, true);
  18502. }
  18503. bool
  18504. Lowerer::GenerateRecyclerOrMarkTempAlloc(IR::Instr * instr, IR::RegOpnd * dstOpnd, IR::JnHelperMethod allocHelper, size_t allocSize, IR::SymOpnd ** tempObjectSymOpnd)
  18505. {
  18506. if (instr->dstIsTempObject)
  18507. {
  18508. *tempObjectSymOpnd = GenerateMarkTempAlloc(dstOpnd, allocSize, instr);
  18509. return false;
  18510. }
  18511. this->GenerateRecyclerAlloc(allocHelper, allocSize, dstOpnd, instr);
  18512. *tempObjectSymOpnd = nullptr;
  18513. return true;
  18514. }
  18515. IR::SymOpnd *
  18516. Lowerer::GenerateMarkTempAlloc(IR::RegOpnd *const dstOpnd, const size_t allocSize, IR::Instr *const insertBeforeInstr)
  18517. {
  18518. Assert(dstOpnd);
  18519. Assert(allocSize != 0);
  18520. Assert(insertBeforeInstr);
  18521. Func *const func = insertBeforeInstr->m_func;
  18522. // Allocate stack space for the reg exp instance, and a slot for the boxed value
  18523. StackSym *const tempObjectSym = StackSym::New(TyMisc, func);
  18524. m_func->StackAllocate(tempObjectSym, (int)(allocSize + sizeof(void *)));
  18525. IR::SymOpnd * tempObjectOpnd = IR::SymOpnd::New(tempObjectSym, sizeof(void *), TyVar, func);
  18526. InsertLea(dstOpnd, tempObjectOpnd, insertBeforeInstr);
  18527. // Initialize the boxed instance slot
  18528. if (this->outerMostLoopLabel == nullptr)
  18529. {
  18530. GenerateMemInit(dstOpnd, -(int)sizeof(void *), IR::AddrOpnd::NewNull(func), insertBeforeInstr, false);
  18531. }
  18532. else if (!PHASE_OFF(Js::HoistMarkTempInitPhase, this->m_func))
  18533. {
  18534. InsertMove(IR::SymOpnd::New(tempObjectSym, TyMachPtr, func), IR::AddrOpnd::NewNull(func), this->outerMostLoopLabel, false);
  18535. }
  18536. return tempObjectOpnd;
  18537. }
  18538. void Lowerer::LowerBrFncCachedScopeEq(IR::Instr *instr)
  18539. {
  18540. Assert(instr->m_opcode == Js::OpCode::BrFncCachedScopeEq || instr->m_opcode == Js::OpCode::BrFncCachedScopeNeq);
  18541. Js::OpCode opcode = (instr->m_opcode == Js::OpCode::BrFncCachedScopeEq ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A);
  18542. IR::RegOpnd *src1Reg = instr->UnlinkSrc1()->AsRegOpnd();
  18543. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src1Reg, Js::ScriptFunction::GetOffsetOfCachedScopeObj(), TyMachReg, this->m_func);
  18544. this->InsertCompareBranch(indirOpnd, instr->UnlinkSrc2(), opcode, false, instr->AsBranchInstr()->GetTarget(), instr->m_next);
  18545. instr->Remove();
  18546. }
  18547. IR::Instr* Lowerer::InsertLoweredRegionStartMarker(IR::Instr* instrToInsertBefore)
  18548. {
  18549. AssertMsg(instrToInsertBefore->m_prev != nullptr, "Can't insert lowered region start marker as the first instr in the func.");
  18550. IR::LabelInstr* startMarkerLabel = IR::LabelInstr::New(Js::OpCode::Label, instrToInsertBefore->m_func);
  18551. instrToInsertBefore->InsertBefore(startMarkerLabel);
  18552. return startMarkerLabel;
  18553. }
  18554. IR::Instr* Lowerer::RemoveLoweredRegionStartMarker(IR::Instr* startMarkerInstr)
  18555. {
  18556. AssertMsg(startMarkerInstr->m_prev != nullptr, "Lowered region start marker became the first instruction in the func after lowering?");
  18557. IR::Instr* prevInstr = startMarkerInstr->m_prev;
  18558. startMarkerInstr->Remove();
  18559. return prevInstr;
  18560. }
  18561. IR::Instr* Lowerer::GetLdSpreadIndicesInstr(IR::Instr *instr)
  18562. {
  18563. IR::Opnd *src2 = instr->GetSrc2();
  18564. if (!src2->IsSymOpnd())
  18565. {
  18566. return nullptr;
  18567. }
  18568. IR::SymOpnd * argLinkOpnd = src2->AsSymOpnd();
  18569. StackSym * argLinkSym = argLinkOpnd->m_sym->AsStackSym();
  18570. Assert(argLinkSym->IsSingleDef());
  18571. return argLinkSym->m_instrDef;
  18572. }
  18573. bool Lowerer::IsSpreadCall(IR::Instr *instr)
  18574. {
  18575. IR::Instr *lastInstr = GetLdSpreadIndicesInstr(instr);
  18576. return lastInstr && lastInstr->m_opcode == Js::OpCode::LdSpreadIndices;
  18577. }
  18578. // When under debugger, generate a new label to be used as safe place to jump after ignore exception,
  18579. // insert it after insertAfterInstr, and return the label inserted.
  18580. // Returns nullptr/NoOP for non-debugger code path.
  18581. //static
  18582. IR::LabelInstr* Lowerer::InsertContinueAfterExceptionLabelForDebugger(Func* func, IR::Instr* insertAfterInstr, bool isHelper)
  18583. {
  18584. Assert(func);
  18585. Assert(insertAfterInstr);
  18586. IR::LabelInstr* continueAfterExLabel = nullptr;
  18587. if (func->IsJitInDebugMode())
  18588. {
  18589. continueAfterExLabel = IR::LabelInstr::New(Js::OpCode::Label, func, isHelper);
  18590. insertAfterInstr->InsertAfter(continueAfterExLabel);
  18591. }
  18592. return continueAfterExLabel;
  18593. }
  18594. void Lowerer::GenerateSingleCharStrJumpTableLookup(IR::Instr * instr)
  18595. {
  18596. IR::MultiBranchInstr * multiBrInstr = instr->AsBranchInstr()->AsMultiBrInstr();
  18597. Func * func = instr->m_func;
  18598. IR::LabelInstr * helperLabel = IR::LabelInstr::New(Js::OpCode::Label, func, true);
  18599. IR::LabelInstr * continueLabel = IR::LabelInstr::New(Js::OpCode::Label, func);
  18600. // MOV strLengthOpnd, str->length
  18601. IR::RegOpnd * strLengthOpnd = IR::RegOpnd::New(TyUint32, func);
  18602. InsertMove(strLengthOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func), instr);
  18603. // CMP strLengthOpnd, 1
  18604. // JNE defaultLabel
  18605. IR::LabelInstr * defaultLabelInstr = (IR::LabelInstr *)multiBrInstr->GetBranchJumpTable()->defaultTarget;
  18606. InsertCompareBranch(strLengthOpnd, IR::IntConstOpnd::New(1, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
  18607. // MOV strBuffer, str->psz
  18608. IR::RegOpnd * strBufferOpnd = IR::RegOpnd::New(TyMachPtr, func);
  18609. InsertMove(strBufferOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, func), instr);
  18610. // TST strBuffer, strBuffer
  18611. // JNE $continue
  18612. InsertTestBranch(strBufferOpnd, strBufferOpnd, Js::OpCode::BrNeq_A, continueLabel, instr);
  18613. // $helper:
  18614. // PUSH str
  18615. // CALL JavascriptString::GetSzHelper
  18616. // MOV strBuffer, eax
  18617. // $continue:
  18618. instr->InsertBefore(helperLabel);
  18619. m_lowererMD.LoadHelperArgument(instr, instr->GetSrc1());
  18620. IR::Instr * instrCall = IR::Instr::New(Js::OpCode::Call, strBufferOpnd, IR::HelperCallOpnd::New(IR::HelperString_GetSz, func), func);
  18621. instr->InsertBefore(instrCall);
  18622. m_lowererMD.LowerCall(instrCall, 0);
  18623. instr->InsertBefore(continueLabel);
  18624. // MOV charOpnd, [strBuffer]
  18625. IR::RegOpnd * charOpnd = IR::RegOpnd::New(TyUint32, func);
  18626. InsertMove(charOpnd, IR::IndirOpnd::New(strBufferOpnd, 0, TyUint16, func), instr);
  18627. if (multiBrInstr->m_baseCaseValue != 0)
  18628. {
  18629. // SUB charOpnd, baseIndex
  18630. InsertSub(false, charOpnd, charOpnd, IR::IntConstOpnd::New(multiBrInstr->m_baseCaseValue, TyUint32, func), instr);
  18631. }
  18632. // CMP charOpnd, lastCaseIndex - baseCaseIndex
  18633. // JA defaultLabel
  18634. InsertCompareBranch(charOpnd, IR::IntConstOpnd::New(multiBrInstr->m_lastCaseValue - multiBrInstr->m_baseCaseValue, TyUint32, func, true),
  18635. Js::OpCode::BrGt_A, true, defaultLabelInstr, instr);
  18636. instr->UnlinkSrc1();
  18637. LowerJumpTableMultiBranch(multiBrInstr, charOpnd);
  18638. }
  18639. void Lowerer::GenerateSwitchStringLookup(IR::Instr * instr)
  18640. {
  18641. /* Collect information about string length in all the case*/
  18642. charcount_t minLength = UINT_MAX;
  18643. charcount_t maxLength = 0;
  18644. BVUnit32 bvLength;
  18645. instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary()->dictionary.Map([&](Js::JavascriptString * str, void *)
  18646. {
  18647. charcount_t len = str->GetLength();
  18648. minLength = min(minLength, str->GetLength());
  18649. maxLength = max(maxLength, str->GetLength());
  18650. if (len < 32)
  18651. {
  18652. bvLength.Set(len);
  18653. }
  18654. });
  18655. Func * func = instr->m_func;
  18656. IR::RegOpnd * strLengthOpnd = IR::RegOpnd::New(TyUint32, func);
  18657. InsertMove(strLengthOpnd, IR::IndirOpnd::New(instr->GetSrc1()->AsRegOpnd(), Js::JavascriptString::GetOffsetOfcharLength(), TyUint32, func), instr);
  18658. IR::LabelInstr * defaultLabelInstr = (IR::LabelInstr *)instr->AsBranchInstr()->AsMultiBrInstr()->GetBranchDictionary()->defaultTarget;
  18659. if (minLength == maxLength)
  18660. {
  18661. // Generate single length filter
  18662. InsertCompareBranch(strLengthOpnd, IR::IntConstOpnd::New(minLength, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
  18663. }
  18664. else if (maxLength < 32)
  18665. {
  18666. // Generate bit filter
  18667. // Jump to default label if the bit is not on for the length % 32
  18668. IR::IntConstOpnd * lenBitMaskOpnd = IR::IntConstOpnd::New(bvLength.GetWord(), TyUint32, func);
  18669. InsertBitTestBranch(lenBitMaskOpnd, strLengthOpnd, false, defaultLabelInstr, instr);
  18670. // Jump to default label if the bit is > 32
  18671. InsertTestBranch(strLengthOpnd, IR::IntConstOpnd::New(UINT32_MAX ^ 31, TyUint32, func), Js::OpCode::BrNeq_A, defaultLabelInstr, instr);
  18672. }
  18673. else
  18674. {
  18675. // CONSIDER: Generate range filter
  18676. }
  18677. this->LowerMultiBr(instr, IR::HelperOp_SwitchStringLookUp);
  18678. }
  18679. IR::Instr *
  18680. Lowerer::LowerTry(IR::Instr* instr, bool tryCatch)
  18681. {
  18682. if (this->m_func->hasBailout)
  18683. {
  18684. this->EnsureBailoutReturnValueSym();
  18685. }
  18686. this->EnsureHasBailedOutSym();
  18687. IR::SymOpnd * hasBailedOutOpnd = IR::SymOpnd::New(this->m_func->m_hasBailedOutSym, TyUint32, this->m_func);
  18688. IR::Instr * setInstr = IR::Instr::New(LowererMD::GetStoreOp(TyUint32), hasBailedOutOpnd, IR::IntConstOpnd::New(0, TyUint32, this->m_func), this->m_func);
  18689. instr->InsertBefore(setInstr);
  18690. LowererMD::Legalize(setInstr);
  18691. return m_lowererMD.LowerTry(instr, tryCatch ? IR::HelperOp_TryCatch : IR::HelperOp_TryFinally);
  18692. }
  18693. void
  18694. Lowerer::EnsureBailoutReturnValueSym()
  18695. {
  18696. if (this->m_func->m_bailoutReturnValueSym == nullptr)
  18697. {
  18698. this->m_func->m_bailoutReturnValueSym = StackSym::New(TyVar, this->m_func);
  18699. this->m_func->StackAllocate(this->m_func->m_bailoutReturnValueSym, sizeof(Js::Var));
  18700. }
  18701. }
  18702. void
  18703. Lowerer::EnsureHasBailedOutSym()
  18704. {
  18705. if (this->m_func->m_hasBailedOutSym == nullptr)
  18706. {
  18707. this->m_func->m_hasBailedOutSym = StackSym::New(TyUint32, this->m_func);
  18708. this->m_func->StackAllocate(this->m_func->m_hasBailedOutSym, MachRegInt);
  18709. }
  18710. }
  18711. void
  18712. Lowerer::InsertReturnThunkForRegion(Region* region, IR::LabelInstr* restoreLabel)
  18713. {
  18714. Assert(this->m_func->isPostLayout);
  18715. Assert(region->GetType() == RegionTypeTry || region->GetType() == RegionTypeCatch);
  18716. if (!region->returnThunkEmitted)
  18717. {
  18718. this->m_func->m_exitInstr->InsertAfter(region->GetBailoutReturnThunkLabel());
  18719. bool newLastInstrInserted = false;
  18720. IR::Instr * insertBeforeInstr = region->GetBailoutReturnThunkLabel()->m_next;
  18721. if (insertBeforeInstr == nullptr)
  18722. {
  18723. Assert(this->m_func->m_exitInstr == this->m_func->m_tailInstr);
  18724. insertBeforeInstr = IR::Instr::New(Js::OpCode::Nop, this->m_func);
  18725. newLastInstrInserted = true;
  18726. region->GetBailoutReturnThunkLabel()->InsertAfter(insertBeforeInstr);
  18727. this->m_func->m_tailInstr = insertBeforeInstr;
  18728. }
  18729. IR::LabelOpnd * continuationAddr;
  18730. if (region->GetParent()->GetType() != RegionTypeRoot)
  18731. {
  18732. continuationAddr = IR::LabelOpnd::New(region->GetParent()->GetBailoutReturnThunkLabel(), this->m_func);
  18733. }
  18734. else
  18735. {
  18736. continuationAddr = IR::LabelOpnd::New(restoreLabel, this->m_func);
  18737. }
  18738. IR::Instr * lastInstr = m_lowererMD.LowerEHRegionReturn(insertBeforeInstr, continuationAddr);
  18739. if (newLastInstrInserted)
  18740. {
  18741. Assert(this->m_func->m_tailInstr == insertBeforeInstr);
  18742. insertBeforeInstr->Remove();
  18743. this->m_func->m_tailInstr = lastInstr;
  18744. }
  18745. region->returnThunkEmitted = true;
  18746. }
  18747. }
  18748. void
  18749. Lowerer::SetHasBailedOut(IR::Instr * bailoutInstr)
  18750. {
  18751. Assert(this->m_func->isPostLayout);
  18752. IR::SymOpnd * hasBailedOutOpnd = IR::SymOpnd::New(this->m_func->m_hasBailedOutSym, TyUint32, this->m_func);
  18753. IR::Instr * setInstr = IR::Instr::New(LowererMD::GetStoreOp(TyUint32), hasBailedOutOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), this->m_func);
  18754. bailoutInstr->InsertBefore(setInstr);
  18755. LowererMD::Legalize(setInstr, true);
  18756. }
  18757. IR::Instr*
  18758. Lowerer::EmitEHBailoutStackRestore(IR::Instr * bailoutInstr)
  18759. {
  18760. Assert(this->m_func->isPostLayout);
  18761. #ifdef _M_IX86
  18762. BailOutInfo * bailoutInfo = bailoutInstr->GetBailOutInfo();
  18763. if (bailoutInfo->startCallCount != 0)
  18764. {
  18765. uint totalStackToBeRestored = 0;
  18766. uint stackAlignmentAdjustment = 0;
  18767. for (uint i = 0; i < bailoutInfo->startCallCount; i++)
  18768. {
  18769. uint startCallOutParamCount = bailoutInfo->GetStartCallOutParamCount(i);
  18770. if ((Math::Align<int32>(startCallOutParamCount * MachPtr, MachStackAlignment) - (startCallOutParamCount * MachPtr)) != 0)
  18771. {
  18772. stackAlignmentAdjustment++;
  18773. }
  18774. }
  18775. totalStackToBeRestored = (bailoutInfo->totalOutParamCount + stackAlignmentAdjustment) * MachPtr;
  18776. IR::RegOpnd * espOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegStackPointer(), TyMachReg, this->m_func);
  18777. IR::Opnd * opnd = IR::IndirOpnd::New(espOpnd, totalStackToBeRestored, TyMachReg, this->m_func);
  18778. IR::Instr * stackRestoreInstr = IR::Instr::New(Js::OpCode::LEA, espOpnd, opnd, this->m_func);
  18779. bailoutInstr->InsertAfter(stackRestoreInstr);
  18780. return stackRestoreInstr;
  18781. }
  18782. #endif
  18783. return bailoutInstr;
  18784. }
  18785. void
  18786. Lowerer::EmitSaveEHBailoutReturnValueAndJumpToRetThunk(IR::Instr * insertAfterInstr)
  18787. {
  18788. Assert(this->m_func->isPostLayout);
  18789. // After the CALL SaveAllRegistersAndBailout instruction, emit
  18790. //
  18791. // MOV bailoutReturnValueSym, eax
  18792. // JMP $currentRegion->bailoutReturnThunkLabel
  18793. IR::SymOpnd * bailoutReturnValueSymOpnd = IR::SymOpnd::New(this->m_func->m_bailoutReturnValueSym, TyVar, this->m_func);
  18794. IR::RegOpnd *eaxOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  18795. IR::Instr * movInstr = IR::Instr::New(LowererMD::GetStoreOp(TyVar), bailoutReturnValueSymOpnd, eaxOpnd, this->m_func);
  18796. insertAfterInstr->InsertAfter(movInstr);
  18797. LowererMD::Legalize(movInstr, true);
  18798. IR::BranchInstr * jumpInstr = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, this->currentRegion->GetBailoutReturnThunkLabel(), this->m_func);
  18799. movInstr->InsertAfter(jumpInstr);
  18800. }
  18801. void
  18802. Lowerer::EmitRestoreReturnValueFromEHBailout(IR::LabelInstr * restoreLabel, IR::LabelInstr * epilogLabel)
  18803. {
  18804. Assert(this->m_func->isPostLayout);
  18805. // JMP $epilog
  18806. // $restore:
  18807. // MOV eax, bailoutReturnValueSym
  18808. // $epilog:
  18809. IR::SymOpnd * bailoutReturnValueSymOpnd = IR::SymOpnd::New(this->m_func->m_bailoutReturnValueSym, TyVar, this->m_func);
  18810. IR::RegOpnd * eaxOpnd = IR::RegOpnd::New(NULL, LowererMD::GetRegReturn(TyMachReg), TyMachReg, this->m_func);
  18811. IR::Instr * movInstr = IR::Instr::New(LowererMD::GetLoadOp(TyVar), eaxOpnd, bailoutReturnValueSymOpnd, this->m_func);
  18812. epilogLabel->InsertBefore(restoreLabel);
  18813. epilogLabel->InsertBefore(movInstr);
  18814. LowererMD::Legalize(movInstr, true);
  18815. restoreLabel->InsertBefore(IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, epilogLabel, this->m_func));
  18816. }
  18817. void
  18818. Lowerer::InsertBitTestBranch(IR::Opnd * bitMaskOpnd, IR::Opnd * bitIndex, bool jumpIfBitOn, IR::LabelInstr * targetLabel, IR::Instr * insertBeforeInstr)
  18819. {
  18820. #if defined(_M_IX86) || defined(_M_AMD64)
  18821. // Generate bit test and branch
  18822. // BT bitMaskOpnd, bitIndex
  18823. // JB/JAE targetLabel
  18824. Func * func = this->m_func;
  18825. IR::Instr * instr = IR::Instr::New(Js::OpCode::BT, func);
  18826. instr->SetSrc1(bitMaskOpnd);
  18827. instr->SetSrc2(bitIndex);
  18828. insertBeforeInstr->InsertBefore(instr);
  18829. if (!(bitMaskOpnd->IsRegOpnd() || bitMaskOpnd->IsIndirOpnd() || bitMaskOpnd->IsMemRefOpnd()))
  18830. {
  18831. instr->HoistSrc1(Js::OpCode::MOV);
  18832. }
  18833. InsertBranch(jumpIfBitOn ? Js::OpCode::JB : Js::OpCode::JAE, targetLabel, insertBeforeInstr);
  18834. #elif defined(_M_ARM)
  18835. // ARM don't have bit test instruction, so just generated
  18836. // MOV r1, 1
  18837. // SHL r1, bitIndex
  18838. // TEST bitMaskOpnd, r1
  18839. // BEQ/BNEQ targetLabel
  18840. Func * func = this->m_func;
  18841. IR::RegOpnd * lenBitOpnd = IR::RegOpnd::New(TyUint32, func);
  18842. InsertMove(lenBitOpnd, IR::IntConstOpnd::New(1, TyUint32, this->m_func), insertBeforeInstr);
  18843. InsertShift(Js::OpCode::Shl_I4, false, lenBitOpnd, lenBitOpnd, bitIndex, insertBeforeInstr);
  18844. InsertTestBranch(lenBitOpnd, bitMaskOpnd, jumpIfBitOn? Js::OpCode::BrNeq_A :Js::OpCode::BrEq_A, targetLabel, insertBeforeInstr);
  18845. #else
  18846. AssertMsg(false, "Not implemented");
  18847. #endif
  18848. }
  18849. //
  18850. // Generates an object test and then a string test with the static string type
  18851. //
  18852. void
  18853. Lowerer::GenerateStringTest(IR::RegOpnd *srcReg, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr * continueLabel, bool generateObjectCheck)
  18854. {
  18855. Assert(srcReg);
  18856. if (!srcReg->GetValueType().IsString())
  18857. {
  18858. if (generateObjectCheck && !srcReg->IsNotTaggedValue())
  18859. {
  18860. this->m_lowererMD.GenerateObjectTest(srcReg, insertInstr, labelHelper);
  18861. }
  18862. // CMP [regSrcStr + offset(type)] , static string type -- check base string type
  18863. // BrEq/BrNeq labelHelper.
  18864. IR::IndirOpnd * src1 = IR::IndirOpnd::New(srcReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func);
  18865. IR::Opnd * src2 = this->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueStringTypeStatic);
  18866. if (continueLabel)
  18867. {
  18868. InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, continueLabel, insertInstr);
  18869. }
  18870. else
  18871. {
  18872. InsertCompareBranch(src1, src2, Js::OpCode::BrNeq_A, labelHelper, insertInstr);
  18873. }
  18874. }
  18875. }
  18876. void
  18877. Lowerer::LowerConvNum(IR::Instr *instrLoad, bool noMathFastPath)
  18878. {
  18879. if (PHASE_OFF(Js::OtherFastPathPhase, this->m_func) || noMathFastPath || !instrLoad->GetSrc1()->IsRegOpnd())
  18880. {
  18881. this->LowerUnaryHelperMemWithTemp2(instrLoad, IR_HELPER_OP_FULL_OR_INPLACE(ConvNumber));
  18882. return;
  18883. }
  18884. // MOV dst, src1
  18885. // TEST src1, 1
  18886. // JNE $done
  18887. // call ToNumber
  18888. //$done:
  18889. bool isInt = false;
  18890. bool isNotInt = false;
  18891. IR::RegOpnd *src1 = instrLoad->GetSrc1()->AsRegOpnd();
  18892. IR::LabelInstr *labelDone = NULL;
  18893. IR::Instr *instr;
  18894. if (src1->IsTaggedInt())
  18895. {
  18896. isInt = true;
  18897. }
  18898. else if (src1->IsNotInt())
  18899. {
  18900. isNotInt = true;
  18901. }
  18902. if (!isNotInt)
  18903. {
  18904. // MOV dst, src1
  18905. instr = LowererMD::CreateAssign(instrLoad->GetDst(), src1, instrLoad);
  18906. if (!isInt)
  18907. {
  18908. labelDone = IR::LabelInstr::New(Js::OpCode::Label, this->m_func);
  18909. bool didTest = m_lowererMD.GenerateObjectTest(src1, instrLoad, labelDone);
  18910. if (didTest)
  18911. {
  18912. // This label is needed only to mark the helper block
  18913. IR::LabelInstr * labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
  18914. instrLoad->InsertBefore(labelHelper);
  18915. }
  18916. }
  18917. }
  18918. if (!isInt)
  18919. {
  18920. if (labelDone)
  18921. {
  18922. instrLoad->InsertAfter(labelDone);
  18923. }
  18924. this->LowerUnaryHelperMemWithTemp2(instrLoad, IR_HELPER_OP_FULL_OR_INPLACE(ConvNumber));
  18925. }
  18926. else
  18927. {
  18928. instrLoad->Remove();
  18929. }
  18930. }
  18931. IR::Opnd *
  18932. Lowerer::LoadSlotArrayWithCachedLocalType(IR::Instr * instrInsert, IR::PropertySymOpnd *propertySymOpnd)
  18933. {
  18934. IR::RegOpnd *opndBase = propertySymOpnd->CreatePropertyOwnerOpnd(m_func);
  18935. if (propertySymOpnd->UsesAuxSlot())
  18936. {
  18937. // If we use the auxiliary slot array, load it and return it
  18938. IR::RegOpnd *opndSlotArray = IR::RegOpnd::New(TyMachReg, this->m_func);
  18939. IR::Opnd *opndIndir = IR::IndirOpnd::New(opndBase, Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func);
  18940. LowererMD::CreateAssign(opndSlotArray, opndIndir, instrInsert);
  18941. return opndSlotArray;
  18942. }
  18943. else
  18944. {
  18945. // If we use inline slot return the address to the object header
  18946. return opndBase;
  18947. }
  18948. }
  18949. IR::Opnd *
  18950. Lowerer::LoadSlotArrayWithCachedProtoType(IR::Instr * instrInsert, IR::PropertySymOpnd *propertySymOpnd)
  18951. {
  18952. // Get the prototype object from the cache
  18953. Js::RecyclableObject *prototypeObject = propertySymOpnd->GetProtoObject();
  18954. Assert(prototypeObject != nullptr);
  18955. if (propertySymOpnd->UsesAuxSlot())
  18956. {
  18957. // If we use the auxiliary slot array, load it from the prototype object and return it
  18958. IR::RegOpnd *opndSlotArray = IR::RegOpnd::New(TyMachReg, this->m_func);
  18959. IR::Opnd *opnd = IR::MemRefOpnd::New((char*)prototypeObject + Js::DynamicObject::GetOffsetOfAuxSlots(), TyMachReg, this->m_func, IR::AddrOpndKindDynamicAuxSlotArrayRef);
  18960. LowererMD::CreateAssign(opndSlotArray, opnd, instrInsert);
  18961. return opndSlotArray;
  18962. }
  18963. else
  18964. {
  18965. // If we use inline slot return the address of the prototype object
  18966. return IR::MemRefOpnd::New(prototypeObject, TyMachReg, this->m_func);
  18967. }
  18968. }
  18969. IR::Instr *
  18970. Lowerer::LowerLdAsmJsEnv(IR::Instr * instr)
  18971. {
  18972. Assert(m_func->GetJnFunction()->GetIsAsmJsFunction());
  18973. IR::Opnd * functionObjOpnd;
  18974. IR::Instr * instrPrev = this->m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  18975. Assert(!instr->GetSrc1());
  18976. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(), Js::AsmJsScriptFunction::GetOffsetOfModuleMemory(), TyMachPtr, m_func);
  18977. instr->SetSrc1(indirOpnd);
  18978. LowererMD::ChangeToAssign(instr);
  18979. return instrPrev;
  18980. }
  18981. IR::Instr *
  18982. Lowerer::LowerLdEnv(IR::Instr * instr)
  18983. {
  18984. IR::Opnd * src1 = instr->GetSrc1();
  18985. IR::Opnd * functionObjOpnd;
  18986. IR::Instr * instrPrev = this->m_lowererMD.LoadFunctionObjectOpnd(instr, functionObjOpnd);
  18987. Assert(!instr->GetSrc1());
  18988. if (src1 == nullptr || functionObjOpnd->IsRegOpnd())
  18989. {
  18990. IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(functionObjOpnd->AsRegOpnd(),
  18991. Js::ScriptFunction::GetOffsetOfEnvironment(), TyMachPtr, m_func);
  18992. instr->SetSrc1(indirOpnd);
  18993. }
  18994. else
  18995. {
  18996. Assert(functionObjOpnd->IsAddrOpnd());
  18997. IR::AddrOpnd* functionObjAddrOpnd = functionObjOpnd->AsAddrOpnd();
  18998. IR::MemRefOpnd* functionEnvMemRefOpnd = IR::MemRefOpnd::New((void *)((intptr_t)functionObjAddrOpnd->m_address + Js::ScriptFunction::GetOffsetOfEnvironment()),
  18999. TyMachPtr, this->m_func, IR::AddrOpndKindDynamicFunctionEnvironmentRef);
  19000. instr->SetSrc1(functionEnvMemRefOpnd);
  19001. }
  19002. LowererMD::ChangeToAssign(instr);
  19003. return instrPrev;
  19004. }
  19005. IR::Instr *
  19006. Lowerer::LowerFrameDisplayCheck(IR::Instr * instr)
  19007. {
  19008. IR::Instr *instrPrev = instr->m_prev;
  19009. IR::Instr *insertInstr = instr->m_next;
  19010. IR::AddrOpnd *addrOpnd = instr->UnlinkSrc2()->AsAddrOpnd();
  19011. FrameDisplayCheckRecord *record = (FrameDisplayCheckRecord*)addrOpnd->m_address;
  19012. IR::LabelInstr *errorLabel = nullptr;
  19013. IR::LabelInstr *continueLabel = nullptr;
  19014. IR::RegOpnd *envOpnd = instr->GetDst()->AsRegOpnd();
  19015. uint32 frameDisplayOffset = Js::FrameDisplay::GetOffsetOfScopes()/sizeof(Js::Var);
  19016. if (record->slotId != (uint32)-1 && record->slotId > frameDisplayOffset)
  19017. {
  19018. // Check that the frame display has enough scopes in it to satisfy the code.
  19019. errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  19020. continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  19021. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(envOpnd,
  19022. Js::FrameDisplay::GetOffsetOfLength(),
  19023. TyUint16, m_func, true);
  19024. IR::IntConstOpnd *slotIdOpnd = IR::IntConstOpnd::New(record->slotId - frameDisplayOffset, TyUint16, m_func);
  19025. InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrLe_A, true, errorLabel, insertInstr);
  19026. }
  19027. if (record->table)
  19028. {
  19029. // Check the size of each of the slot arrays in the scope chain.
  19030. FOREACH_HASHTABLE_ENTRY(uint32, bucket, record->table)
  19031. {
  19032. uint32 slotId = bucket.element;
  19033. if (slotId != (uint32)-1 && slotId > Js::ScopeSlots::FirstSlotIndex)
  19034. {
  19035. if (errorLabel == nullptr)
  19036. {
  19037. errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  19038. continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  19039. }
  19040. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(envOpnd,
  19041. bucket.value * sizeof(Js::Var),
  19042. TyVar, m_func, true);
  19043. IR::RegOpnd * slotArrayOpnd = IR::RegOpnd::New(TyVar, m_func);
  19044. InsertMove(slotArrayOpnd, indirOpnd, insertInstr);
  19045. indirOpnd = IR::IndirOpnd::New(slotArrayOpnd,
  19046. Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
  19047. TyUint32, m_func, true);
  19048. IR::IntConstOpnd * slotIdOpnd = IR::IntConstOpnd::New(slotId - Js::ScopeSlots::FirstSlotIndex,
  19049. TyUint32, m_func);
  19050. InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrLe_A, true, errorLabel, insertInstr);
  19051. }
  19052. }
  19053. NEXT_HASHTABLE_ENTRY;
  19054. }
  19055. if (errorLabel)
  19056. {
  19057. InsertBranch(Js::OpCode::Br, continueLabel, insertInstr);
  19058. insertInstr->InsertBefore(errorLabel);
  19059. IR::Instr * instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
  19060. insertInstr->InsertBefore(instrHelper);
  19061. m_lowererMD.ChangeToHelperCall(instrHelper, IR::HelperOp_FatalInternalError);
  19062. insertInstr->InsertBefore(continueLabel);
  19063. }
  19064. m_lowererMD.ChangeToAssign(instr);
  19065. return instrPrev;
  19066. }
  19067. IR::Instr *
  19068. Lowerer::LowerSlotArrayCheck(IR::Instr * instr)
  19069. {
  19070. IR::Instr *instrPrev = instr->m_prev;
  19071. IR::Instr *insertInstr = instr->m_next;
  19072. IR::RegOpnd *slotArrayOpnd = instr->GetDst()->AsRegOpnd();
  19073. StackSym *stackSym = slotArrayOpnd->m_sym;
  19074. IR::IntConstOpnd *slotIdOpnd = instr->UnlinkSrc2()->AsIntConstOpnd();
  19075. uint32 slotId = (uint32)slotIdOpnd->GetValue();
  19076. Assert(slotId != (uint32)-1 && slotId >= Js::ScopeSlots::FirstSlotIndex);
  19077. if (slotId > Js::ScopeSlots::FirstSlotIndex)
  19078. {
  19079. if (m_func->DoStackFrameDisplay() && stackSym->m_id == m_func->GetLocalClosureSym()->m_id)
  19080. {
  19081. // The pointer we loaded points to the reserved/known address where the slot array can be boxed.
  19082. // Deref to get the real value.
  19083. IR::IndirOpnd * srcOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(stackSym, TyVar, m_func), 0, TyVar, m_func);
  19084. IR::RegOpnd * dstOpnd = IR::RegOpnd::New(TyVar, m_func);
  19085. InsertMove(dstOpnd, srcOpnd, insertInstr);
  19086. stackSym = dstOpnd->m_sym;
  19087. }
  19088. IR::LabelInstr *errorLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
  19089. IR::LabelInstr *continueLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
  19090. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(IR::RegOpnd::New(stackSym, TyVar, m_func),
  19091. Js::ScopeSlots::EncodedSlotCountSlotIndex * sizeof(Js::Var),
  19092. TyUint32, m_func, true);
  19093. slotIdOpnd->SetValue(slotId - Js::ScopeSlots::FirstSlotIndex);
  19094. InsertCompareBranch(indirOpnd, slotIdOpnd, Js::OpCode::BrGt_A, true, continueLabel, insertInstr);
  19095. insertInstr->InsertBefore(errorLabel);
  19096. IR::Instr * instrHelper = IR::Instr::New(Js::OpCode::Call, m_func);
  19097. insertInstr->InsertBefore(instrHelper);
  19098. m_lowererMD.ChangeToHelperCall(instrHelper, IR::HelperOp_FatalInternalError);
  19099. insertInstr->InsertBefore(continueLabel);
  19100. }
  19101. m_lowererMD.ChangeToAssign(instr);
  19102. return instrPrev;
  19103. }
  19104. IR::RegOpnd *
  19105. Lowerer::LoadIndexFromLikelyFloat(
  19106. IR::RegOpnd *indexOpnd,
  19107. const bool skipNegativeCheck,
  19108. IR::LabelInstr *const notIntLabel,
  19109. IR::LabelInstr *const negativeLabel,
  19110. IR::Instr *const insertBeforeInstr)
  19111. {
  19112. #ifdef _M_IX86
  19113. // We should only generate this if sse2 is available
  19114. Assert(AutoSystemInfo::Data.SSE2Available());
  19115. #endif
  19116. Func *func = insertBeforeInstr->m_func;
  19117. IR::LabelInstr * convertToUint = IR::LabelInstr::New(Js::OpCode::Label, func);
  19118. IR::LabelInstr * fallThrough = IR::LabelInstr::New(Js::OpCode::Label, func);
  19119. // First generate test for tagged int even though profile data says likely float. Indices are usually int and we need a fast path before we try to convert float to int
  19120. // mov intIndex, index
  19121. // sar intIndex, 1
  19122. // jae convertToInt
  19123. IR::RegOpnd *int32IndexOpnd = GenerateUntagVar(indexOpnd, convertToUint, insertBeforeInstr, !indexOpnd->IsTaggedInt());
  19124. if (!skipNegativeCheck)
  19125. {
  19126. // test index, index
  19127. // js $notTaggedIntOrNegative
  19128. InsertTestBranch(int32IndexOpnd, int32IndexOpnd, LowererMD::MDCompareWithZeroBranchOpcode(Js::OpCode::BrLt_A), negativeLabel, insertBeforeInstr);
  19129. }
  19130. InsertBranch(Js::OpCode::Br, fallThrough, insertBeforeInstr);
  19131. insertBeforeInstr->InsertBefore(convertToUint);
  19132. // try to convert float to int in a fast path
  19133. #if FLOATVAR
  19134. IR::RegOpnd* floatIndexOpnd = m_lowererMD.CheckFloatAndUntag(indexOpnd, insertBeforeInstr, notIntLabel);
  19135. #else
  19136. m_lowererMD.GenerateFloatTest(indexOpnd, insertBeforeInstr, notIntLabel);
  19137. IR::IndirOpnd * floatIndexOpnd = IR::IndirOpnd::New(indexOpnd, Js::JavascriptNumber::GetValueOffset(), TyMachDouble, this->m_func);
  19138. #endif
  19139. IR::LabelInstr * doneConvUint32 = IR::LabelInstr::New(Js::OpCode::Label, func);
  19140. IR::LabelInstr * helperConvUint32 = IR::LabelInstr::New(Js::OpCode::Label, func, true /*helper*/);
  19141. m_lowererMD.ConvertFloatToInt32(int32IndexOpnd, floatIndexOpnd, helperConvUint32, doneConvUint32, insertBeforeInstr);
  19142. // helper path
  19143. insertBeforeInstr->InsertBefore(helperConvUint32);
  19144. m_lowererMD.LoadDoubleHelperArgument(insertBeforeInstr, floatIndexOpnd);
  19145. IR::Instr * helperCall = IR::Instr::New(Js::OpCode::Call, int32IndexOpnd, this->m_func);
  19146. insertBeforeInstr->InsertBefore(helperCall);
  19147. m_lowererMD.ChangeToHelperCall(helperCall, IR::HelperConv_ToUInt32Core);
  19148. // main path
  19149. insertBeforeInstr->InsertBefore(doneConvUint32);
  19150. //Convert uint32 to back to float for comparison that conversion was indeed successful
  19151. IR::RegOpnd *floatOpndFromUint32 = IR::RegOpnd::New(TyFloat64, func);
  19152. m_lowererMD.EmitUIntToFloat(floatOpndFromUint32, int32IndexOpnd, insertBeforeInstr);
  19153. // compare with float from the original indexOpnd, we need floatIndex == (float64)(uint32)floatIndex
  19154. InsertCompareBranch(floatOpndFromUint32, floatIndexOpnd, Js::OpCode::BrNeq_A, notIntLabel, insertBeforeInstr, false);
  19155. insertBeforeInstr->InsertBefore(fallThrough);
  19156. return int32IndexOpnd;
  19157. }
  19158. #if DBG
  19159. void
  19160. Lowerer::LegalizeVerifyRange(IR::Instr * instrStart, IR::Instr * instrLast)
  19161. {
  19162. FOREACH_INSTR_IN_RANGE(verifyLegalizeInstr, instrStart, instrLast)
  19163. {
  19164. LowererMD::Legalize<true>(verifyLegalizeInstr);
  19165. }
  19166. NEXT_INSTR_IN_RANGE;
  19167. }
  19168. #endif