2
0

BackwardPass.cpp 283 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. class JitArenaAllocator;
  6. template <>
  7. void
  8. BVSparse<JitArenaAllocator>::QueueInFreeList(BVSparseNode *curNode)
  9. {
  10. AllocatorDeleteInline(JitArenaAllocator, this->alloc, curNode);
  11. }
  12. #include "Backend.h"
  13. #define INLINEEMETAARG_COUNT 3
  14. BackwardPass::BackwardPass(Func * func, GlobOpt * globOpt, Js::Phase tag)
  15. : func(func), globOpt(globOpt), tag(tag), currentPrePassLoop(nullptr), tempAlloc(nullptr),
  16. preOpBailOutInstrToProcess(nullptr),
  17. considerSymAsRealUseInNoImplicitCallUses(nullptr),
  18. isCollectionPass(false), currentRegion(nullptr)
  19. {
  20. // Those are the only two phase dead store will be used currently
  21. Assert(tag == Js::BackwardPhase || tag == Js::DeadStorePhase);
  22. this->implicitCallBailouts = 0;
  23. this->fieldOpts = 0;
  24. #if DBG_DUMP
  25. this->numDeadStore = 0;
  26. this->numMarkTempNumber = 0;
  27. this->numMarkTempNumberTransferred = 0;
  28. this->numMarkTempObject = 0;
  29. #endif
  30. }
  31. bool
  32. BackwardPass::DoSetDead() const
  33. {
  34. // Note: Dead bit on the Opnd records flow-based liveness.
  35. // This is distinct from isLastUse, which records lexical last-ness.
  36. return this->tag == Js::BackwardPhase && !this->IsPrePass();
  37. }
  38. bool
  39. BackwardPass::DoByteCodeUpwardExposedUsed() const
  40. {
  41. return (this->tag == Js::DeadStorePhase && this->func->hasBailout) ||
  42. (this->func->HasTry() && this->func->DoOptimizeTryCatch() && this->tag == Js::BackwardPhase);
  43. }
  44. bool
  45. BackwardPass::DoFieldHoistCandidates() const
  46. {
  47. return DoFieldHoistCandidates(this->currentBlock->loop);
  48. }
  49. bool
  50. BackwardPass::DoFieldHoistCandidates(Loop * loop) const
  51. {
  52. // We only need to do one pass to generate this data
  53. return this->tag == Js::BackwardPhase
  54. && !this->IsPrePass() && loop && GlobOpt::DoFieldHoisting(loop);
  55. }
  56. bool
  57. BackwardPass::DoMarkTempNumbers() const
  58. {
  59. #if FLOATVAR
  60. return false;
  61. #else
  62. // only mark temp number on the dead store phase
  63. return (tag == Js::DeadStorePhase) && !PHASE_OFF(Js::MarkTempPhase, this->func) &&
  64. !PHASE_OFF(Js::MarkTempNumberPhase, this->func) && func->DoFastPaths() && (!this->func->HasTry());
  65. #endif
  66. }
  67. bool
  68. BackwardPass::DoMarkTempObjects() const
  69. {
  70. // only mark temp object on the backward store phase
  71. return (tag == Js::BackwardPhase) && !PHASE_OFF(Js::MarkTempPhase, this->func) &&
  72. !PHASE_OFF(Js::MarkTempObjectPhase, this->func) && func->DoGlobOpt() && func->GetHasTempObjectProducingInstr() &&
  73. !func->IsJitInDebugMode() &&
  74. func->DoGlobOptsForGeneratorFunc();
  75. // Why MarkTempObject is disabled under debugger:
  76. // We add 'identified so far dead non-temp locals' to byteCodeUpwardExposedUsed in ProcessBailOutInfo,
  77. // this may cause MarkTempObject to convert some temps back to non-temp when it sees a 'transferred exposed use'
  78. // from a temp to non-temp. That's in general not a supported conversion (while non-temp -> temp is fine).
  79. }
  80. bool
  81. BackwardPass::DoMarkTempNumbersOnTempObjects() const
  82. {
  83. return !PHASE_OFF(Js::MarkTempNumberOnTempObjectPhase, this->func) && DoMarkTempNumbers() && this->func->GetHasMarkTempObjects();
  84. }
  85. #if DBG
  86. bool
  87. BackwardPass::DoMarkTempObjectVerify() const
  88. {
  89. // only mark temp object on the backward store phase
  90. return (tag == Js::DeadStorePhase) && !PHASE_OFF(Js::MarkTempPhase, this->func) &&
  91. !PHASE_OFF(Js::MarkTempObjectPhase, this->func) && func->DoGlobOpt() && func->GetHasTempObjectProducingInstr();
  92. }
  93. #endif
  94. // static
  95. bool
  96. BackwardPass::DoDeadStore(Func* func)
  97. {
  98. return
  99. !PHASE_OFF(Js::DeadStorePhase, func) &&
  100. (!func->HasTry() || func->DoOptimizeTryCatch());
  101. }
  102. bool
  103. BackwardPass::DoDeadStore() const
  104. {
  105. return
  106. this->tag == Js::DeadStorePhase &&
  107. DoDeadStore(this->func);
  108. }
  109. bool
  110. BackwardPass::DoDeadStoreSlots() const
  111. {
  112. // only dead store fields if glob opt is on to generate the trackable fields bitvector
  113. return (tag == Js::DeadStorePhase && this->func->DoGlobOpt()
  114. && (!this->func->HasTry()));
  115. }
  116. // Whether dead store is enabled for given func and sym.
  117. // static
  118. bool
  119. BackwardPass::DoDeadStore(Func* func, StackSym* sym)
  120. {
  121. // Dead store is disabled under debugger for non-temp local vars.
  122. return
  123. DoDeadStore(func) &&
  124. !(func->IsJitInDebugMode() && sym->HasByteCodeRegSlot() && func->IsNonTempLocalVar(sym->GetByteCodeRegSlot())) &&
  125. func->DoGlobOptsForGeneratorFunc();
  126. }
  127. bool
  128. BackwardPass::DoTrackNegativeZero() const
  129. {
  130. return
  131. !PHASE_OFF(Js::TrackIntUsagePhase, func) &&
  132. !PHASE_OFF(Js::TrackNegativeZeroPhase, func) &&
  133. func->DoGlobOpt() &&
  134. !IsPrePass() &&
  135. !func->IsJitInDebugMode() &&
  136. func->DoGlobOptsForGeneratorFunc();
  137. }
  138. bool
  139. BackwardPass::DoTrackBitOpsOrNumber() const
  140. {
  141. #if _WIN64
  142. return
  143. !PHASE_OFF1(Js::TypedArrayVirtualPhase) &&
  144. tag == Js::BackwardPhase &&
  145. func->DoGlobOpt() &&
  146. !IsPrePass() &&
  147. !func->IsJitInDebugMode() &&
  148. func->DoGlobOptsForGeneratorFunc();
  149. #else
  150. return false;
  151. #endif
  152. }
  153. bool
  154. BackwardPass::DoTrackIntOverflow() const
  155. {
  156. return
  157. !PHASE_OFF(Js::TrackIntUsagePhase, func) &&
  158. !PHASE_OFF(Js::TrackIntOverflowPhase, func) &&
  159. tag == Js::BackwardPhase &&
  160. !IsPrePass() &&
  161. globOpt->DoLossyIntTypeSpec() &&
  162. !func->IsJitInDebugMode() &&
  163. func->DoGlobOptsForGeneratorFunc();
  164. }
  165. bool
  166. BackwardPass::DoTrackCompoundedIntOverflow() const
  167. {
  168. return
  169. !PHASE_OFF(Js::TrackCompoundedIntOverflowPhase, func) &&
  170. DoTrackIntOverflow() &&
  171. !func->GetProfileInfo()->IsTrackCompoundedIntOverflowDisabled();
  172. }
  173. bool
  174. BackwardPass::DoTrackNon32BitOverflow() const
  175. {
  176. // enabled only for IA
  177. #if defined(_M_IX86) || defined(_M_X64)
  178. return true;
  179. #else
  180. return false;
  181. #endif
  182. }
  183. void
  184. BackwardPass::CleanupBackwardPassInfoInFlowGraph()
  185. {
  186. if (!this->func->m_fg->hasBackwardPassInfo)
  187. {
  188. // No information to clean up
  189. return;
  190. }
  191. // The backward pass temp arena has already been deleted, we can just reset the data
  192. FOREACH_BLOCK_IN_FUNC_DEAD_OR_ALIVE(block, this->func)
  193. {
  194. block->upwardExposedUses = nullptr;
  195. block->upwardExposedFields = nullptr;
  196. block->typesNeedingKnownObjectLayout = nullptr;
  197. block->fieldHoistCandidates = nullptr;
  198. block->slotDeadStoreCandidates = nullptr;
  199. block->byteCodeUpwardExposedUsed = nullptr;
  200. #if DBG
  201. block->byteCodeRestoreSyms = nullptr;
  202. #endif
  203. block->tempNumberTracker = nullptr;
  204. block->tempObjectTracker = nullptr;
  205. #if DBG
  206. block->tempObjectVerifyTracker = nullptr;
  207. #endif
  208. block->stackSymToFinalType = nullptr;
  209. block->stackSymToGuardedProperties = nullptr;
  210. block->stackSymToWriteGuardsMap = nullptr;
  211. block->cloneStrCandidates = nullptr;
  212. block->noImplicitCallUses = nullptr;
  213. block->noImplicitCallNoMissingValuesUses = nullptr;
  214. block->noImplicitCallNativeArrayUses = nullptr;
  215. block->noImplicitCallJsArrayHeadSegmentSymUses = nullptr;
  216. block->noImplicitCallArrayLengthSymUses = nullptr;
  217. if (block->loop != nullptr)
  218. {
  219. block->loop->hasDeadStoreCollectionPass = false;
  220. block->loop->hasDeadStorePrepass = false;
  221. }
  222. }
  223. NEXT_BLOCK_IN_FUNC_DEAD_OR_ALIVE;
  224. }
  225. void
  226. BackwardPass::Optimize()
  227. {
  228. if (tag == Js::BackwardPhase && PHASE_OFF(tag, this->func))
  229. {
  230. return;
  231. }
  232. NoRecoverMemoryJitArenaAllocator localAlloc(tag == Js::BackwardPhase? L"BE-Backward" : L"BE-DeadStore",
  233. this->func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  234. this->tempAlloc = &localAlloc;
  235. #if DBG_DUMP
  236. if (this->IsTraceEnabled())
  237. {
  238. this->func->DumpHeader();
  239. }
  240. #endif
  241. this->CleanupBackwardPassInfoInFlowGraph();
  242. // Info about whether a sym is used in a way in which -0 differs from +0, or whether the sym is used in a way in which an
  243. // int32 overflow when generating the value of the sym matters, in the current block. The info is transferred to
  244. // instructions that define the sym in the current block as they are encountered. The info in these bit vectors is discarded
  245. // after optimizing each block, so the only info that remains for GlobOpt is that which is transferred to instructions.
  246. BVSparse<JitArenaAllocator> localNegativeZeroDoesNotMatterBySymId(tempAlloc);
  247. negativeZeroDoesNotMatterBySymId = &localNegativeZeroDoesNotMatterBySymId;
  248. BVSparse<JitArenaAllocator> localSymUsedOnlyForBitOpsBySymId(tempAlloc);
  249. symUsedOnlyForBitOpsBySymId = &localSymUsedOnlyForBitOpsBySymId;
  250. BVSparse<JitArenaAllocator> localSymUsedOnlyForNumberBySymId(tempAlloc);
  251. symUsedOnlyForNumberBySymId = &localSymUsedOnlyForNumberBySymId;
  252. BVSparse<JitArenaAllocator> localIntOverflowDoesNotMatterBySymId(tempAlloc);
  253. intOverflowDoesNotMatterBySymId = &localIntOverflowDoesNotMatterBySymId;
  254. BVSparse<JitArenaAllocator> localIntOverflowDoesNotMatterInRangeBySymId(tempAlloc);
  255. intOverflowDoesNotMatterInRangeBySymId = &localIntOverflowDoesNotMatterInRangeBySymId;
  256. BVSparse<JitArenaAllocator> localCandidateSymsRequiredToBeInt(tempAlloc);
  257. candidateSymsRequiredToBeInt = &localCandidateSymsRequiredToBeInt;
  258. BVSparse<JitArenaAllocator> localCandidateSymsRequiredToBeLossyInt(tempAlloc);
  259. candidateSymsRequiredToBeLossyInt = &localCandidateSymsRequiredToBeLossyInt;
  260. intOverflowCurrentlyMattersInRange = true;
  261. FloatSymEquivalenceMap localFloatSymEquivalenceMap(tempAlloc);
  262. floatSymEquivalenceMap = &localFloatSymEquivalenceMap;
  263. NumberTempRepresentativePropertySymMap localNumberTempRepresentativePropertySym(tempAlloc);
  264. numberTempRepresentativePropertySym = &localNumberTempRepresentativePropertySym;
  265. FOREACH_BLOCK_BACKWARD_IN_FUNC_DEAD_OR_ALIVE(block, this->func)
  266. {
  267. this->OptBlock(block);
  268. }
  269. NEXT_BLOCK_BACKWARD_IN_FUNC_DEAD_OR_ALIVE;
  270. if (this->tag == Js::DeadStorePhase && !PHASE_OFF(Js::MemOpPhase, this->func))
  271. {
  272. this->RemoveEmptyLoops();
  273. }
  274. this->func->m_fg->hasBackwardPassInfo = true;
  275. if(DoTrackCompoundedIntOverflow())
  276. {
  277. // Tracking int overflow makes use of a scratch field in stack syms, which needs to be cleared
  278. func->m_symTable->ClearStackSymScratch();
  279. }
  280. #if DBG_DUMP
  281. if (PHASE_STATS(this->tag, this->func))
  282. {
  283. this->func->DumpHeader();
  284. Output::Print(this->tag == Js::BackwardPhase? L"Backward Phase Stats:\n" : L"Deadstore Phase Stats:\n");
  285. if (this->DoDeadStore())
  286. {
  287. Output::Print(L" Deadstore : %3d\n", this->numDeadStore);
  288. }
  289. if (this->DoMarkTempNumbers())
  290. {
  291. Output::Print(L" Temp Number : %3d\n", this->numMarkTempNumber);
  292. Output::Print(L" Transferred Temp Number: %3d\n", this->numMarkTempNumberTransferred);
  293. }
  294. if (this->DoMarkTempObjects())
  295. {
  296. Output::Print(L" Temp Object : %3d\n", this->numMarkTempObject);
  297. }
  298. }
  299. #endif
  300. }
  301. void
  302. BackwardPass::MergeSuccBlocksInfo(BasicBlock * block)
  303. {
  304. // Can't reuse the bv in the current block, because its successor can be itself.
  305. TempNumberTracker * tempNumberTracker = nullptr;
  306. TempObjectTracker * tempObjectTracker = nullptr;
  307. #if DBG
  308. TempObjectVerifyTracker * tempObjectVerifyTracker = nullptr;
  309. #endif
  310. HashTable<AddPropertyCacheBucket> * stackSymToFinalType = nullptr;
  311. HashTable<ObjTypeGuardBucket> * stackSymToGuardedProperties = nullptr;
  312. HashTable<ObjWriteGuardBucket> * stackSymToWriteGuardsMap = nullptr;
  313. BVSparse<JitArenaAllocator> * cloneStrCandidates = nullptr;
  314. BVSparse<JitArenaAllocator> * noImplicitCallUses = nullptr;
  315. BVSparse<JitArenaAllocator> * noImplicitCallNoMissingValuesUses = nullptr;
  316. BVSparse<JitArenaAllocator> * noImplicitCallNativeArrayUses = nullptr;
  317. BVSparse<JitArenaAllocator> * noImplicitCallJsArrayHeadSegmentSymUses = nullptr;
  318. BVSparse<JitArenaAllocator> * noImplicitCallArrayLengthSymUses = nullptr;
  319. BVSparse<JitArenaAllocator> * upwardExposedUses = nullptr;
  320. BVSparse<JitArenaAllocator> * upwardExposedFields = nullptr;
  321. BVSparse<JitArenaAllocator> * typesNeedingKnownObjectLayout = nullptr;
  322. BVSparse<JitArenaAllocator> * fieldHoistCandidates = nullptr;
  323. BVSparse<JitArenaAllocator> * slotDeadStoreCandidates = nullptr;
  324. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed = nullptr;
  325. #if DBG
  326. uint byteCodeLocalsCount = func->GetJnFunction()->GetLocalsCount();
  327. StackSym ** byteCodeRestoreSyms = nullptr;
  328. #endif
  329. Assert(!block->isDead || block->GetSuccList()->Empty());
  330. if (this->DoByteCodeUpwardExposedUsed())
  331. {
  332. byteCodeUpwardExposedUsed = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  333. #if DBG
  334. byteCodeRestoreSyms = JitAnewArrayZ(this->tempAlloc, StackSym *, byteCodeLocalsCount);
  335. #endif
  336. }
  337. #if DBG
  338. if (!IsCollectionPass() && this->DoMarkTempObjectVerify())
  339. {
  340. tempObjectVerifyTracker = JitAnew(this->tempAlloc, TempObjectVerifyTracker, this->tempAlloc, block->loop != nullptr);
  341. }
  342. #endif
  343. if (!block->isDead)
  344. {
  345. bool keepUpwardExposed = (this->tag == Js::BackwardPhase);
  346. JitArenaAllocator *upwardExposedArena = nullptr;
  347. if(!IsCollectionPass())
  348. {
  349. upwardExposedArena = keepUpwardExposed ? this->globOpt->alloc : this->tempAlloc;
  350. upwardExposedUses = JitAnew(upwardExposedArena, BVSparse<JitArenaAllocator>, upwardExposedArena);
  351. upwardExposedFields = JitAnew(upwardExposedArena, BVSparse<JitArenaAllocator>, upwardExposedArena);
  352. if (this->tag == Js::DeadStorePhase)
  353. {
  354. typesNeedingKnownObjectLayout = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  355. }
  356. if (this->DoFieldHoistCandidates())
  357. {
  358. fieldHoistCandidates = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  359. }
  360. if (this->DoDeadStoreSlots())
  361. {
  362. slotDeadStoreCandidates = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  363. }
  364. if (this->DoMarkTempNumbers())
  365. {
  366. tempNumberTracker = JitAnew(this->tempAlloc, TempNumberTracker, this->tempAlloc, block->loop != nullptr);
  367. }
  368. if (this->DoMarkTempObjects())
  369. {
  370. tempObjectTracker = JitAnew(this->tempAlloc, TempObjectTracker, this->tempAlloc, block->loop != nullptr);
  371. }
  372. noImplicitCallUses = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  373. noImplicitCallNoMissingValuesUses = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  374. noImplicitCallNativeArrayUses = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  375. noImplicitCallJsArrayHeadSegmentSymUses = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  376. noImplicitCallArrayLengthSymUses = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  377. if (this->tag == Js::BackwardPhase)
  378. {
  379. cloneStrCandidates = JitAnew(this->globOpt->alloc, BVSparse<JitArenaAllocator>, this->globOpt->alloc);
  380. }
  381. }
  382. bool firstSucc = true;
  383. FOREACH_SUCCESSOR_BLOCK(blockSucc, block)
  384. {
  385. #if defined(DBG_DUMP) || defined(ENABLE_DEBUG_CONFIG_OPTIONS)
  386. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  387. #endif
  388. // save the byteCodeUpwardExposedUsed from deleting for the block right after the memop loop
  389. if (this->tag == Js::DeadStorePhase && !this->IsPrePass() && globOpt->DoMemOp(block->loop) && blockSucc->loop != block->loop)
  390. {
  391. Assert(block->loop->memOpInfo->inductionVariablesUsedAfterLoop == nullptr);
  392. block->loop->memOpInfo->inductionVariablesUsedAfterLoop = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  393. block->loop->memOpInfo->inductionVariablesUsedAfterLoop->Or(blockSucc->byteCodeUpwardExposedUsed);
  394. block->loop->memOpInfo->inductionVariablesUsedAfterLoop->Or(blockSucc->upwardExposedUses);
  395. }
  396. bool deleteData = false;
  397. if (!blockSucc->isLoopHeader && blockSucc->backwardPassCurrentLoop == this->currentPrePassLoop)
  398. {
  399. Assert(blockSucc->GetDataUseCount() != 0);
  400. deleteData = (blockSucc->DecrementDataUseCount() == 0);
  401. }
  402. Assert((byteCodeUpwardExposedUsed == nullptr) == !this->DoByteCodeUpwardExposedUsed());
  403. if (byteCodeUpwardExposedUsed && blockSucc->byteCodeUpwardExposedUsed)
  404. {
  405. byteCodeUpwardExposedUsed->Or(blockSucc->byteCodeUpwardExposedUsed);
  406. if (this->tag == Js::DeadStorePhase)
  407. {
  408. #if DBG
  409. for (uint i = 0; i < byteCodeLocalsCount; i++)
  410. {
  411. if (byteCodeRestoreSyms[i] == nullptr)
  412. {
  413. byteCodeRestoreSyms[i] = blockSucc->byteCodeRestoreSyms[i];
  414. }
  415. else
  416. {
  417. Assert(blockSucc->byteCodeRestoreSyms[i] == nullptr
  418. || byteCodeRestoreSyms[i] == blockSucc->byteCodeRestoreSyms[i]);
  419. }
  420. }
  421. #endif
  422. if (deleteData)
  423. {
  424. // byteCodeUpwardExposedUsed is required to populate the writeThroughSymbolsSet for the try region. So, don't delete it in the backwards pass.
  425. JitAdelete(this->tempAlloc, blockSucc->byteCodeUpwardExposedUsed);
  426. blockSucc->byteCodeUpwardExposedUsed = nullptr;
  427. }
  428. }
  429. #if DBG
  430. if (deleteData)
  431. {
  432. JitAdeleteArray(this->tempAlloc, byteCodeLocalsCount, blockSucc->byteCodeRestoreSyms);
  433. blockSucc->byteCodeRestoreSyms = nullptr;
  434. }
  435. #endif
  436. }
  437. else
  438. {
  439. Assert(blockSucc->byteCodeUpwardExposedUsed == nullptr);
  440. Assert(blockSucc->byteCodeRestoreSyms == nullptr);
  441. }
  442. if(IsCollectionPass())
  443. {
  444. continue;
  445. }
  446. Assert((blockSucc->upwardExposedUses != nullptr)
  447. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop))));
  448. Assert((blockSucc->upwardExposedFields != nullptr)
  449. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop))));
  450. Assert((blockSucc->typesNeedingKnownObjectLayout != nullptr)
  451. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop)))
  452. || this->tag != Js::DeadStorePhase);
  453. Assert((blockSucc->fieldHoistCandidates != nullptr)
  454. || blockSucc->isLoopHeader
  455. || !this->DoFieldHoistCandidates(blockSucc->loop));
  456. Assert((blockSucc->slotDeadStoreCandidates != nullptr)
  457. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop)))
  458. || !this->DoDeadStoreSlots());
  459. Assert((blockSucc->tempNumberTracker != nullptr)
  460. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop)))
  461. || !this->DoMarkTempNumbers());
  462. Assert((blockSucc->tempObjectTracker != nullptr)
  463. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop)))
  464. || !this->DoMarkTempObjects());
  465. Assert((blockSucc->tempObjectVerifyTracker != nullptr)
  466. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop)))
  467. || !this->DoMarkTempObjectVerify());
  468. if (blockSucc->upwardExposedUses != nullptr)
  469. {
  470. upwardExposedUses->Or(blockSucc->upwardExposedUses);
  471. if (deleteData && (!keepUpwardExposed
  472. || (this->IsPrePass() && blockSucc->backwardPassCurrentLoop == this->currentPrePassLoop)))
  473. {
  474. JitAdelete(upwardExposedArena, blockSucc->upwardExposedUses);
  475. blockSucc->upwardExposedUses = nullptr;
  476. }
  477. }
  478. if (blockSucc->upwardExposedFields != nullptr)
  479. {
  480. upwardExposedFields->Or(blockSucc->upwardExposedFields);
  481. if (deleteData && (!keepUpwardExposed
  482. || (this->IsPrePass() && blockSucc->backwardPassCurrentLoop == this->currentPrePassLoop)))
  483. {
  484. JitAdelete(upwardExposedArena, blockSucc->upwardExposedFields);
  485. blockSucc->upwardExposedFields = nullptr;
  486. }
  487. }
  488. if (blockSucc->typesNeedingKnownObjectLayout != nullptr)
  489. {
  490. typesNeedingKnownObjectLayout->Or(blockSucc->typesNeedingKnownObjectLayout);
  491. if (deleteData)
  492. {
  493. JitAdelete(this->tempAlloc, blockSucc->typesNeedingKnownObjectLayout);
  494. blockSucc->typesNeedingKnownObjectLayout = nullptr;
  495. }
  496. }
  497. if (fieldHoistCandidates && blockSucc->fieldHoistCandidates != nullptr)
  498. {
  499. fieldHoistCandidates->Or(blockSucc->fieldHoistCandidates);
  500. if (deleteData)
  501. {
  502. JitAdelete(this->tempAlloc, blockSucc->fieldHoistCandidates);
  503. blockSucc->fieldHoistCandidates = nullptr;
  504. }
  505. }
  506. if (blockSucc->slotDeadStoreCandidates != nullptr)
  507. {
  508. slotDeadStoreCandidates->And(blockSucc->slotDeadStoreCandidates);
  509. if (deleteData)
  510. {
  511. JitAdelete(this->tempAlloc, blockSucc->slotDeadStoreCandidates);
  512. blockSucc->slotDeadStoreCandidates = nullptr;
  513. }
  514. }
  515. if (blockSucc->tempNumberTracker != nullptr)
  516. {
  517. Assert((blockSucc->loop != nullptr) == blockSucc->tempNumberTracker->HasTempTransferDependencies());
  518. tempNumberTracker->MergeData(blockSucc->tempNumberTracker, deleteData);
  519. if (deleteData)
  520. {
  521. blockSucc->tempNumberTracker = nullptr;
  522. }
  523. }
  524. if (blockSucc->tempObjectTracker != nullptr)
  525. {
  526. Assert((blockSucc->loop != nullptr) == blockSucc->tempObjectTracker->HasTempTransferDependencies());
  527. tempObjectTracker->MergeData(blockSucc->tempObjectTracker, deleteData);
  528. if (deleteData)
  529. {
  530. blockSucc->tempObjectTracker = nullptr;
  531. }
  532. }
  533. #if DBG
  534. if (blockSucc->tempObjectVerifyTracker != nullptr)
  535. {
  536. Assert((blockSucc->loop != nullptr) == blockSucc->tempObjectVerifyTracker->HasTempTransferDependencies());
  537. tempObjectVerifyTracker->MergeData(blockSucc->tempObjectVerifyTracker, deleteData);
  538. if (deleteData)
  539. {
  540. blockSucc->tempObjectVerifyTracker = nullptr;
  541. }
  542. }
  543. #endif
  544. PHASE_PRINT_TRACE(Js::ObjTypeSpecStorePhase, this->func,
  545. L"ObjTypeSpecStore: func %s, edge %d => %d: ",
  546. this->func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  547. block->GetBlockNum(), blockSucc->GetBlockNum());
  548. auto fixupFrom = [block, blockSucc, this](Bucket<AddPropertyCacheBucket> &bucket)
  549. {
  550. AddPropertyCacheBucket *fromData = &bucket.element;
  551. if (fromData->GetInitialType() == nullptr ||
  552. fromData->GetFinalType() == fromData->GetInitialType())
  553. {
  554. return;
  555. }
  556. this->InsertTypeTransitionsAtPriorSuccessors(block, blockSucc, bucket.value, fromData);
  557. };
  558. auto fixupTo = [blockSucc, this](Bucket<AddPropertyCacheBucket> &bucket)
  559. {
  560. AddPropertyCacheBucket *toData = &bucket.element;
  561. if (toData->GetInitialType() == nullptr ||
  562. toData->GetFinalType() == toData->GetInitialType())
  563. {
  564. return;
  565. }
  566. this->InsertTypeTransitionAtBlock(blockSucc, bucket.value, toData);
  567. };
  568. if (blockSucc->stackSymToFinalType != nullptr)
  569. {
  570. #if DBG_DUMP
  571. if (PHASE_TRACE(Js::ObjTypeSpecStorePhase, this->func))
  572. {
  573. blockSucc->stackSymToFinalType->Dump();
  574. }
  575. #endif
  576. if (firstSucc)
  577. {
  578. stackSymToFinalType = blockSucc->stackSymToFinalType->Copy();
  579. }
  580. else if (stackSymToFinalType != nullptr)
  581. {
  582. if (this->IsPrePass())
  583. {
  584. stackSymToFinalType->And(blockSucc->stackSymToFinalType);
  585. }
  586. else
  587. {
  588. // Insert any type transitions that can't be merged past this point.
  589. stackSymToFinalType->AndWithFixup(blockSucc->stackSymToFinalType, fixupFrom, fixupTo);
  590. }
  591. }
  592. else if (!this->IsPrePass())
  593. {
  594. FOREACH_HASHTABLE_ENTRY(AddPropertyCacheBucket, bucket, blockSucc->stackSymToFinalType)
  595. {
  596. fixupTo(bucket);
  597. }
  598. NEXT_HASHTABLE_ENTRY;
  599. }
  600. if (deleteData)
  601. {
  602. blockSucc->stackSymToFinalType->Delete();
  603. blockSucc->stackSymToFinalType = nullptr;
  604. }
  605. }
  606. else
  607. {
  608. PHASE_PRINT_TRACE(Js::ObjTypeSpecStorePhase, this->func, L"null\n");
  609. if (stackSymToFinalType)
  610. {
  611. if (!this->IsPrePass())
  612. {
  613. FOREACH_HASHTABLE_ENTRY(AddPropertyCacheBucket, bucket, stackSymToFinalType)
  614. {
  615. fixupFrom(bucket);
  616. }
  617. NEXT_HASHTABLE_ENTRY;
  618. }
  619. stackSymToFinalType->Delete();
  620. stackSymToFinalType = nullptr;
  621. }
  622. }
  623. if (tag == Js::BackwardPhase)
  624. {
  625. if (blockSucc->cloneStrCandidates != nullptr)
  626. {
  627. Assert(cloneStrCandidates != nullptr);
  628. cloneStrCandidates->Or(blockSucc->cloneStrCandidates);
  629. if (deleteData)
  630. {
  631. JitAdelete(this->globOpt->alloc, blockSucc->cloneStrCandidates);
  632. blockSucc->cloneStrCandidates = nullptr;
  633. }
  634. }
  635. #if DBG_DUMP
  636. if (PHASE_VERBOSE_TRACE(Js::TraceObjTypeSpecWriteGuardsPhase, this->func))
  637. {
  638. wchar_t debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  639. Js::FunctionBody* topFunctionBody = this->func->GetTopFunc()->GetJnFunction();
  640. Js::FunctionBody* functionBody = this->func->GetJnFunction();
  641. Output::Print(L"ObjTypeSpec: top function %s (%s), function %s (%s), write guard symbols on edge %d => %d: ",
  642. topFunctionBody->GetDisplayName(), topFunctionBody->GetDebugNumberSet(debugStringBuffer), functionBody->GetDisplayName(),
  643. functionBody->GetDebugNumberSet(debugStringBuffer2), block->GetBlockNum(), blockSucc->GetBlockNum());
  644. }
  645. #endif
  646. if (blockSucc->stackSymToWriteGuardsMap != nullptr)
  647. {
  648. #if DBG_DUMP
  649. if (PHASE_VERBOSE_TRACE(Js::TraceObjTypeSpecWriteGuardsPhase, this->func))
  650. {
  651. Output::Print(L"\n");
  652. blockSucc->stackSymToWriteGuardsMap->Dump();
  653. }
  654. #endif
  655. if (stackSymToWriteGuardsMap == nullptr)
  656. {
  657. stackSymToWriteGuardsMap = blockSucc->stackSymToWriteGuardsMap->Copy();
  658. }
  659. else
  660. {
  661. stackSymToWriteGuardsMap->Or(
  662. blockSucc->stackSymToWriteGuardsMap, &BackwardPass::MergeWriteGuards);
  663. }
  664. if (deleteData)
  665. {
  666. blockSucc->stackSymToWriteGuardsMap->Delete();
  667. blockSucc->stackSymToWriteGuardsMap = nullptr;
  668. }
  669. }
  670. else
  671. {
  672. #if DBG_DUMP
  673. if (PHASE_VERBOSE_TRACE(Js::TraceObjTypeSpecWriteGuardsPhase, this->func))
  674. {
  675. Output::Print(L"null\n");
  676. }
  677. #endif
  678. }
  679. }
  680. else
  681. {
  682. #if DBG_DUMP
  683. if (PHASE_VERBOSE_TRACE(Js::TraceObjTypeSpecTypeGuardsPhase, this->func))
  684. {
  685. Js::FunctionBody* topFunctionBody = this->func->GetTopFunc()->GetJnFunction();
  686. Js::FunctionBody* functionBody = this->func->GetJnFunction();
  687. wchar_t debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  688. Output::Print(L"ObjTypeSpec: top function %s (%s), function %s (%s), guarded property operations on edge %d => %d: \n",
  689. topFunctionBody->GetDisplayName(), topFunctionBody->GetDebugNumberSet(debugStringBuffer), functionBody->GetDisplayName(), functionBody->GetDebugNumberSet(debugStringBuffer2),
  690. block->GetBlockNum(), blockSucc->GetBlockNum());
  691. }
  692. #endif
  693. if (blockSucc->stackSymToGuardedProperties != nullptr)
  694. {
  695. #if DBG_DUMP
  696. if (PHASE_VERBOSE_TRACE(Js::TraceObjTypeSpecTypeGuardsPhase, this->func))
  697. {
  698. blockSucc->stackSymToGuardedProperties->Dump();
  699. Output::Print(L"\n");
  700. }
  701. #endif
  702. if (stackSymToGuardedProperties == nullptr)
  703. {
  704. stackSymToGuardedProperties = blockSucc->stackSymToGuardedProperties->Copy();
  705. }
  706. else
  707. {
  708. stackSymToGuardedProperties->Or(
  709. blockSucc->stackSymToGuardedProperties, &BackwardPass::MergeGuardedProperties);
  710. }
  711. if (deleteData)
  712. {
  713. blockSucc->stackSymToGuardedProperties->Delete();
  714. blockSucc->stackSymToGuardedProperties = nullptr;
  715. }
  716. }
  717. else
  718. {
  719. #if DBG_DUMP
  720. if (PHASE_VERBOSE_TRACE(Js::TraceObjTypeSpecTypeGuardsPhase, this->func))
  721. {
  722. Output::Print(L"null\n");
  723. }
  724. #endif
  725. }
  726. }
  727. if (blockSucc->noImplicitCallUses != nullptr)
  728. {
  729. noImplicitCallUses->Or(blockSucc->noImplicitCallUses);
  730. if (deleteData)
  731. {
  732. JitAdelete(this->tempAlloc, blockSucc->noImplicitCallUses);
  733. blockSucc->noImplicitCallUses = nullptr;
  734. }
  735. }
  736. if (blockSucc->noImplicitCallNoMissingValuesUses != nullptr)
  737. {
  738. noImplicitCallNoMissingValuesUses->Or(blockSucc->noImplicitCallNoMissingValuesUses);
  739. if (deleteData)
  740. {
  741. JitAdelete(this->tempAlloc, blockSucc->noImplicitCallNoMissingValuesUses);
  742. blockSucc->noImplicitCallNoMissingValuesUses = nullptr;
  743. }
  744. }
  745. if (blockSucc->noImplicitCallNativeArrayUses != nullptr)
  746. {
  747. noImplicitCallNativeArrayUses->Or(blockSucc->noImplicitCallNativeArrayUses);
  748. if (deleteData)
  749. {
  750. JitAdelete(this->tempAlloc, blockSucc->noImplicitCallNativeArrayUses);
  751. blockSucc->noImplicitCallNativeArrayUses = nullptr;
  752. }
  753. }
  754. if (blockSucc->noImplicitCallJsArrayHeadSegmentSymUses != nullptr)
  755. {
  756. noImplicitCallJsArrayHeadSegmentSymUses->Or(blockSucc->noImplicitCallJsArrayHeadSegmentSymUses);
  757. if (deleteData)
  758. {
  759. JitAdelete(this->tempAlloc, blockSucc->noImplicitCallJsArrayHeadSegmentSymUses);
  760. blockSucc->noImplicitCallJsArrayHeadSegmentSymUses = nullptr;
  761. }
  762. }
  763. if (blockSucc->noImplicitCallArrayLengthSymUses != nullptr)
  764. {
  765. noImplicitCallArrayLengthSymUses->Or(blockSucc->noImplicitCallArrayLengthSymUses);
  766. if (deleteData)
  767. {
  768. JitAdelete(this->tempAlloc, blockSucc->noImplicitCallArrayLengthSymUses);
  769. blockSucc->noImplicitCallArrayLengthSymUses = nullptr;
  770. }
  771. }
  772. firstSucc = false;
  773. }
  774. NEXT_SUCCESSOR_BLOCK;
  775. #if DBG_DUMP
  776. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  777. if (PHASE_TRACE(Js::ObjTypeSpecStorePhase, this->func))
  778. {
  779. Output::Print(L"ObjTypeSpecStore: func %s, block %d: ",
  780. this->func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  781. block->GetBlockNum());
  782. if (stackSymToFinalType)
  783. {
  784. stackSymToFinalType->Dump();
  785. }
  786. else
  787. {
  788. Output::Print(L"null\n");
  789. }
  790. }
  791. if (PHASE_TRACE(Js::TraceObjTypeSpecTypeGuardsPhase, this->func))
  792. {
  793. Output::Print(L"ObjTypeSpec: func %s, block %d, guarded properties:\n",
  794. this->func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer), block->GetBlockNum());
  795. if (stackSymToGuardedProperties)
  796. {
  797. stackSymToGuardedProperties->Dump();
  798. Output::Print(L"\n");
  799. }
  800. else
  801. {
  802. Output::Print(L"null\n");
  803. }
  804. }
  805. if (PHASE_TRACE(Js::TraceObjTypeSpecWriteGuardsPhase, this->func))
  806. {
  807. Output::Print(L"ObjTypeSpec: func %s, block %d, write guards: ",
  808. this->func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer), block->GetBlockNum());
  809. if (stackSymToWriteGuardsMap)
  810. {
  811. Output::Print(L"\n");
  812. stackSymToWriteGuardsMap->Dump();
  813. Output::Print(L"\n");
  814. }
  815. else
  816. {
  817. Output::Print(L"null\n");
  818. }
  819. }
  820. #endif
  821. }
  822. #if DBG
  823. if (tempObjectVerifyTracker)
  824. {
  825. FOREACH_DEAD_SUCCESSOR_BLOCK(deadBlockSucc, block)
  826. {
  827. Assert(deadBlockSucc->tempObjectVerifyTracker || deadBlockSucc->isLoopHeader);
  828. if (deadBlockSucc->tempObjectVerifyTracker != nullptr)
  829. {
  830. Assert((deadBlockSucc->loop != nullptr) == deadBlockSucc->tempObjectVerifyTracker->HasTempTransferDependencies());
  831. // Dead block don't effect non temp use, we only need to carry the removed use bit vector forward
  832. // and put all the upward exposed use to the set that we might found out to be mark temp
  833. // after globopt
  834. tempObjectVerifyTracker->MergeDeadData(deadBlockSucc);
  835. }
  836. if (!byteCodeUpwardExposedUsed)
  837. {
  838. if (!deadBlockSucc->isLoopHeader && deadBlockSucc->backwardPassCurrentLoop == this->currentPrePassLoop)
  839. {
  840. Assert(deadBlockSucc->GetDataUseCount() != 0);
  841. if (deadBlockSucc->DecrementDataUseCount() == 0)
  842. {
  843. this->DeleteBlockData(deadBlockSucc);
  844. }
  845. }
  846. }
  847. }
  848. NEXT_DEAD_SUCCESSOR_BLOCK;
  849. }
  850. #endif
  851. if (byteCodeUpwardExposedUsed)
  852. {
  853. FOREACH_DEAD_SUCCESSOR_BLOCK(deadBlockSucc, block)
  854. {
  855. Assert(deadBlockSucc->byteCodeUpwardExposedUsed || deadBlockSucc->isLoopHeader);
  856. if (deadBlockSucc->byteCodeUpwardExposedUsed)
  857. {
  858. byteCodeUpwardExposedUsed->Or(deadBlockSucc->byteCodeUpwardExposedUsed);
  859. if (this->tag == Js::DeadStorePhase)
  860. {
  861. #if DBG
  862. for (uint i = 0; i < byteCodeLocalsCount; i++)
  863. {
  864. if (byteCodeRestoreSyms[i] == nullptr)
  865. {
  866. byteCodeRestoreSyms[i] = deadBlockSucc->byteCodeRestoreSyms[i];
  867. }
  868. else
  869. {
  870. Assert(deadBlockSucc->byteCodeRestoreSyms[i] == nullptr
  871. || byteCodeRestoreSyms[i] == deadBlockSucc->byteCodeRestoreSyms[i]);
  872. }
  873. }
  874. #endif
  875. }
  876. }
  877. if (!deadBlockSucc->isLoopHeader && deadBlockSucc->backwardPassCurrentLoop == this->currentPrePassLoop)
  878. {
  879. Assert(deadBlockSucc->GetDataUseCount() != 0);
  880. if (deadBlockSucc->DecrementDataUseCount() == 0)
  881. {
  882. this->DeleteBlockData(deadBlockSucc);
  883. }
  884. }
  885. }
  886. NEXT_DEAD_SUCCESSOR_BLOCK;
  887. }
  888. if (block->isLoopHeader)
  889. {
  890. this->DeleteBlockData(block);
  891. }
  892. else
  893. {
  894. if(block->GetDataUseCount() == 0)
  895. {
  896. Assert(block->slotDeadStoreCandidates == nullptr);
  897. Assert(block->tempNumberTracker == nullptr);
  898. Assert(block->tempObjectTracker == nullptr);
  899. Assert(block->tempObjectVerifyTracker == nullptr);
  900. Assert(block->upwardExposedUses == nullptr);
  901. Assert(block->upwardExposedFields == nullptr);
  902. Assert(block->typesNeedingKnownObjectLayout == nullptr);
  903. Assert(block->fieldHoistCandidates == nullptr);
  904. // byteCodeUpwardExposedUsed is required to populate the writeThroughSymbolsSet for the try region in the backwards pass
  905. Assert(block->byteCodeUpwardExposedUsed == nullptr || (this->tag == Js::BackwardPhase && this->func->HasTry() && this->func->DoOptimizeTryCatch()));
  906. Assert(block->byteCodeRestoreSyms == nullptr);
  907. Assert(block->stackSymToFinalType == nullptr);
  908. Assert(block->stackSymToGuardedProperties == nullptr);
  909. Assert(block->stackSymToWriteGuardsMap == nullptr);
  910. Assert(block->cloneStrCandidates == nullptr);
  911. Assert(block->noImplicitCallUses == nullptr);
  912. Assert(block->noImplicitCallNoMissingValuesUses == nullptr);
  913. Assert(block->noImplicitCallNativeArrayUses == nullptr);
  914. Assert(block->noImplicitCallJsArrayHeadSegmentSymUses == nullptr);
  915. Assert(block->noImplicitCallArrayLengthSymUses == nullptr);
  916. }
  917. else
  918. {
  919. // The collection pass sometimes does not know whether it can delete a successor block's data, so it may leave some
  920. // blocks with data intact. Delete the block data now.
  921. Assert(block->backwardPassCurrentLoop);
  922. Assert(block->backwardPassCurrentLoop->hasDeadStoreCollectionPass);
  923. Assert(!block->backwardPassCurrentLoop->hasDeadStorePrepass);
  924. DeleteBlockData(block);
  925. }
  926. block->backwardPassCurrentLoop = this->currentPrePassLoop;
  927. if (this->DoByteCodeUpwardExposedUsed()
  928. #if DBG
  929. || this->DoMarkTempObjectVerify()
  930. #endif
  931. )
  932. {
  933. block->SetDataUseCount(block->GetPredList()->Count() + block->GetDeadPredList()->Count());
  934. }
  935. else
  936. {
  937. block->SetDataUseCount(block->GetPredList()->Count());
  938. }
  939. }
  940. block->upwardExposedUses = upwardExposedUses;
  941. block->upwardExposedFields = upwardExposedFields;
  942. block->typesNeedingKnownObjectLayout = typesNeedingKnownObjectLayout;
  943. block->fieldHoistCandidates = fieldHoistCandidates;
  944. block->byteCodeUpwardExposedUsed = byteCodeUpwardExposedUsed;
  945. #if DBG
  946. block->byteCodeRestoreSyms = byteCodeRestoreSyms;
  947. #endif
  948. block->slotDeadStoreCandidates = slotDeadStoreCandidates;
  949. block->tempNumberTracker = tempNumberTracker;
  950. block->tempObjectTracker = tempObjectTracker;
  951. #if DBG
  952. block->tempObjectVerifyTracker = tempObjectVerifyTracker;
  953. #endif
  954. block->stackSymToFinalType = stackSymToFinalType;
  955. block->stackSymToGuardedProperties = stackSymToGuardedProperties;
  956. block->stackSymToWriteGuardsMap = stackSymToWriteGuardsMap;
  957. block->cloneStrCandidates = cloneStrCandidates;
  958. block->noImplicitCallUses = noImplicitCallUses;
  959. block->noImplicitCallNoMissingValuesUses = noImplicitCallNoMissingValuesUses;
  960. block->noImplicitCallNativeArrayUses = noImplicitCallNativeArrayUses;
  961. block->noImplicitCallJsArrayHeadSegmentSymUses = noImplicitCallJsArrayHeadSegmentSymUses;
  962. block->noImplicitCallArrayLengthSymUses = noImplicitCallArrayLengthSymUses;
  963. }
  964. ObjTypeGuardBucket
  965. BackwardPass::MergeGuardedProperties(ObjTypeGuardBucket bucket1, ObjTypeGuardBucket bucket2)
  966. {
  967. BVSparse<JitArenaAllocator> *guardedPropertyOps1 = bucket1.GetGuardedPropertyOps();
  968. BVSparse<JitArenaAllocator> *guardedPropertyOps2 = bucket2.GetGuardedPropertyOps();
  969. Assert(guardedPropertyOps1 || guardedPropertyOps2);
  970. BVSparse<JitArenaAllocator> *mergedPropertyOps;
  971. if (guardedPropertyOps1)
  972. {
  973. mergedPropertyOps = guardedPropertyOps1->CopyNew();
  974. if (guardedPropertyOps2)
  975. {
  976. mergedPropertyOps->Or(guardedPropertyOps2);
  977. }
  978. }
  979. else
  980. {
  981. mergedPropertyOps = guardedPropertyOps2->CopyNew();
  982. }
  983. ObjTypeGuardBucket bucket;
  984. bucket.SetGuardedPropertyOps(mergedPropertyOps);
  985. Js::Type *monoGuardType = bucket1.GetMonoGuardType();
  986. if (monoGuardType != nullptr)
  987. {
  988. Assert(!bucket2.NeedsMonoCheck() || monoGuardType == bucket2.GetMonoGuardType());
  989. }
  990. else
  991. {
  992. monoGuardType = bucket2.GetMonoGuardType();
  993. }
  994. bucket.SetMonoGuardType(monoGuardType);
  995. return bucket;
  996. }
  997. ObjWriteGuardBucket
  998. BackwardPass::MergeWriteGuards(ObjWriteGuardBucket bucket1, ObjWriteGuardBucket bucket2)
  999. {
  1000. BVSparse<JitArenaAllocator> *writeGuards1 = bucket1.GetWriteGuards();
  1001. BVSparse<JitArenaAllocator> *writeGuards2 = bucket2.GetWriteGuards();
  1002. Assert(writeGuards1 || writeGuards2);
  1003. BVSparse<JitArenaAllocator> *mergedWriteGuards;
  1004. if (writeGuards1)
  1005. {
  1006. mergedWriteGuards = writeGuards1->CopyNew();
  1007. if (writeGuards2)
  1008. {
  1009. mergedWriteGuards->Or(writeGuards2);
  1010. }
  1011. }
  1012. else
  1013. {
  1014. mergedWriteGuards = writeGuards2->CopyNew();
  1015. }
  1016. ObjWriteGuardBucket bucket;
  1017. bucket.SetWriteGuards(mergedWriteGuards);
  1018. return bucket;
  1019. }
  1020. void
  1021. BackwardPass::DeleteBlockData(BasicBlock * block)
  1022. {
  1023. if (block->slotDeadStoreCandidates != nullptr)
  1024. {
  1025. JitAdelete(this->tempAlloc, block->slotDeadStoreCandidates);
  1026. block->slotDeadStoreCandidates = nullptr;
  1027. }
  1028. if (block->tempNumberTracker != nullptr)
  1029. {
  1030. JitAdelete(this->tempAlloc, block->tempNumberTracker);
  1031. block->tempNumberTracker = nullptr;
  1032. }
  1033. if (block->tempObjectTracker != nullptr)
  1034. {
  1035. JitAdelete(this->tempAlloc, block->tempObjectTracker);
  1036. block->tempObjectTracker = nullptr;
  1037. }
  1038. #if DBG
  1039. if (block->tempObjectVerifyTracker != nullptr)
  1040. {
  1041. JitAdelete(this->tempAlloc, block->tempObjectVerifyTracker);
  1042. block->tempObjectVerifyTracker = nullptr;
  1043. }
  1044. #endif
  1045. if (block->stackSymToFinalType != nullptr)
  1046. {
  1047. block->stackSymToFinalType->Delete();
  1048. block->stackSymToFinalType = nullptr;
  1049. }
  1050. if (block->stackSymToGuardedProperties != nullptr)
  1051. {
  1052. block->stackSymToGuardedProperties->Delete();
  1053. block->stackSymToGuardedProperties = nullptr;
  1054. }
  1055. if (block->stackSymToWriteGuardsMap != nullptr)
  1056. {
  1057. block->stackSymToWriteGuardsMap->Delete();
  1058. block->stackSymToWriteGuardsMap = nullptr;
  1059. }
  1060. if (block->cloneStrCandidates != nullptr)
  1061. {
  1062. Assert(this->tag == Js::BackwardPhase);
  1063. JitAdelete(this->globOpt->alloc, block->cloneStrCandidates);
  1064. block->cloneStrCandidates = nullptr;
  1065. }
  1066. if (block->noImplicitCallUses != nullptr)
  1067. {
  1068. JitAdelete(this->tempAlloc, block->noImplicitCallUses);
  1069. block->noImplicitCallUses = nullptr;
  1070. }
  1071. if (block->noImplicitCallNoMissingValuesUses != nullptr)
  1072. {
  1073. JitAdelete(this->tempAlloc, block->noImplicitCallNoMissingValuesUses);
  1074. block->noImplicitCallNoMissingValuesUses = nullptr;
  1075. }
  1076. if (block->noImplicitCallNativeArrayUses != nullptr)
  1077. {
  1078. JitAdelete(this->tempAlloc, block->noImplicitCallNativeArrayUses);
  1079. block->noImplicitCallNativeArrayUses = nullptr;
  1080. }
  1081. if (block->noImplicitCallJsArrayHeadSegmentSymUses != nullptr)
  1082. {
  1083. JitAdelete(this->tempAlloc, block->noImplicitCallJsArrayHeadSegmentSymUses);
  1084. block->noImplicitCallJsArrayHeadSegmentSymUses = nullptr;
  1085. }
  1086. if (block->noImplicitCallArrayLengthSymUses != nullptr)
  1087. {
  1088. JitAdelete(this->tempAlloc, block->noImplicitCallArrayLengthSymUses);
  1089. block->noImplicitCallArrayLengthSymUses = nullptr;
  1090. }
  1091. if (block->upwardExposedUses != nullptr)
  1092. {
  1093. JitArenaAllocator *upwardExposedArena = (this->tag == Js::BackwardPhase) ? this->globOpt->alloc : this->tempAlloc;
  1094. JitAdelete(upwardExposedArena, block->upwardExposedUses);
  1095. block->upwardExposedUses = nullptr;
  1096. }
  1097. if (block->upwardExposedFields != nullptr)
  1098. {
  1099. JitArenaAllocator *upwardExposedArena = (this->tag == Js::BackwardPhase) ? this->globOpt->alloc : this->tempAlloc;
  1100. JitAdelete(upwardExposedArena, block->upwardExposedFields);
  1101. block->upwardExposedFields = nullptr;
  1102. }
  1103. if (block->typesNeedingKnownObjectLayout != nullptr)
  1104. {
  1105. JitAdelete(this->tempAlloc, block->typesNeedingKnownObjectLayout);
  1106. block->typesNeedingKnownObjectLayout = nullptr;
  1107. }
  1108. if (block->fieldHoistCandidates != nullptr)
  1109. {
  1110. JitAdelete(this->tempAlloc, block->fieldHoistCandidates);
  1111. block->fieldHoistCandidates = nullptr;
  1112. }
  1113. if (block->byteCodeUpwardExposedUsed != nullptr)
  1114. {
  1115. JitAdelete(this->tempAlloc, block->byteCodeUpwardExposedUsed);
  1116. block->byteCodeUpwardExposedUsed = nullptr;
  1117. #if DBG
  1118. JitAdeleteArray(this->tempAlloc, func->GetJnFunction()->GetLocalsCount(), block->byteCodeRestoreSyms);
  1119. block->byteCodeRestoreSyms = nullptr;
  1120. #endif
  1121. }
  1122. }
  1123. void
  1124. BackwardPass::ProcessLoopCollectionPass(BasicBlock *const lastBlock)
  1125. {
  1126. // The collection pass is done before the prepass, to collect and propagate a minimal amount of information into nested
  1127. // loops, for cases where the information is needed to make appropriate decisions on changing other state. For instance,
  1128. // bailouts in nested loops need to be able to see all byte-code uses that are exposed to the bailout so that the
  1129. // appropriate syms can be made upwards-exposed during the prepass. Byte-code uses that occur before the bailout in the
  1130. // flow, or byte-code uses after the current loop, are not seen by bailouts inside the loop. The collection pass collects
  1131. // byte-code uses and propagates them at least into each loop's header such that when bailouts are processed in the prepass,
  1132. // they will have full visibility of byte-code upwards-exposed uses.
  1133. //
  1134. // For the collection pass, one pass is needed to collect all byte-code uses of a loop to the loop header. If the loop has
  1135. // inner loops, another pass is needed to propagate byte-code uses in the outer loop into the inner loop's header, since
  1136. // some byte-code uses may occur before the inner loop in the flow. The process continues recursively for inner loops. The
  1137. // second pass only needs to walk as far as the first inner loop's header, since the purpose of that pass is only to
  1138. // propagate collected information into the inner loops' headers.
  1139. //
  1140. // Consider the following case:
  1141. // (Block 1, Loop 1 header)
  1142. // ByteCodeUses s1
  1143. // (Block 2, Loop 2 header)
  1144. // (Block 3, Loop 3 header)
  1145. // (Block 4)
  1146. // BailOut
  1147. // (Block 5, Loop 3 back-edge)
  1148. // (Block 6, Loop 2 back-edge)
  1149. // (Block 7, Loop 1 back-edge)
  1150. //
  1151. // Assume that the exit branch in each of these loops is in the loop's header block, like a 'while' loop. For the byte-code
  1152. // use of 's1' to become visible to the bailout in the innermost loop, we need to walk the following blocks:
  1153. // - Collection pass
  1154. // - 7, 6, 5, 4, 3, 2, 1, 7 - block 1 is the first block in loop 1 that sees 's1', and since block 7 has block 1 as its
  1155. // successor, block 7 sees 's1' now as well
  1156. // - 6, 5, 4, 3, 2, 6 - block 2 is the first block in loop 2 that sees 's1', and since block 6 has block 2 as its
  1157. // successor, block 6 sees 's1' now as well
  1158. // - 5, 4, 3 - block 3 is the first block in loop 3 that sees 's1'
  1159. // - The collection pass does not have to do another pass through the innermost loop because it does not have any inner
  1160. // loops of its own. It's sufficient to propagate the byte-code uses up to the loop header of each loop, as the
  1161. // prepass will do the remaining propagation.
  1162. // - Prepass
  1163. // - 7, 6, 5, 4, ... - since block 5 has block 3 as its successor, block 5 sees 's1', and so does block 4. So, the bailout
  1164. // finally sees 's1' as a byte-code upwards-exposed use.
  1165. //
  1166. // The collection pass walks as described above, and consists of one pass, followed by another pass if there are inner
  1167. // loops. The second pass only walks up to the first inner loop's header block, and during this pass upon reaching an inner
  1168. // loop, the algorithm goes recursively for that inner loop, and once it returns, the second pass continues from above that
  1169. // inner loop. Each bullet of the walk in the example above is a recursive call to ProcessLoopCollectionPass, except the
  1170. // first line, which is the initial call.
  1171. //
  1172. // Imagine the whole example above is inside another loop, and at the bottom of that loop there is an assignment to 's1'. If
  1173. // the bailout is the only use of 's1', then it needs to register 's1' as a use in the prepass to prevent treating the
  1174. // assignment to 's1' as a dead store.
  1175. Assert(tag == Js::DeadStorePhase);
  1176. Assert(IsCollectionPass());
  1177. Assert(lastBlock);
  1178. Loop *const collectionPassLoop = lastBlock->loop;
  1179. Assert(collectionPassLoop);
  1180. Assert(!collectionPassLoop->hasDeadStoreCollectionPass);
  1181. collectionPassLoop->hasDeadStoreCollectionPass = true;
  1182. Loop *const previousPrepassLoop = currentPrePassLoop;
  1183. currentPrePassLoop = collectionPassLoop;
  1184. Assert(IsPrePass());
  1185. // First pass
  1186. BasicBlock *firstInnerLoopHeader = nullptr;
  1187. {
  1188. #if DBG_DUMP
  1189. if(IsTraceEnabled())
  1190. {
  1191. Output::Print(L"******* COLLECTION PASS 1 START: Loop %u ********\n", collectionPassLoop->GetLoopTopInstr()->m_id);
  1192. }
  1193. #endif
  1194. FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, lastBlock, nullptr)
  1195. {
  1196. ProcessBlock(block);
  1197. if(block->isLoopHeader)
  1198. {
  1199. if(block->loop == collectionPassLoop)
  1200. {
  1201. break;
  1202. }
  1203. // Keep track of the first inner loop's header for the second pass, which need only walk up to that block
  1204. firstInnerLoopHeader = block;
  1205. }
  1206. } NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE;
  1207. #if DBG_DUMP
  1208. if(IsTraceEnabled())
  1209. {
  1210. Output::Print(L"******** COLLECTION PASS 1 END: Loop %u *********\n", collectionPassLoop->GetLoopTopInstr()->m_id);
  1211. }
  1212. #endif
  1213. }
  1214. // Second pass, only needs to run if there are any inner loops, to propagate collected information into those loops
  1215. if(firstInnerLoopHeader)
  1216. {
  1217. #if DBG_DUMP
  1218. if(IsTraceEnabled())
  1219. {
  1220. Output::Print(L"******* COLLECTION PASS 2 START: Loop %u ********\n", collectionPassLoop->GetLoopTopInstr()->m_id);
  1221. }
  1222. #endif
  1223. FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, lastBlock, firstInnerLoopHeader)
  1224. {
  1225. Loop *const loop = block->loop;
  1226. if(loop && loop != collectionPassLoop && !loop->hasDeadStoreCollectionPass)
  1227. {
  1228. // About to make a recursive call, so when jitting in the foreground, probe the stack
  1229. if(!func->IsBackgroundJIT())
  1230. {
  1231. PROBE_STACK(func->GetScriptContext(), Js::Constants::MinStackDefault);
  1232. }
  1233. ProcessLoopCollectionPass(block);
  1234. // The inner loop's collection pass would have propagated collected information to its header block. Skip to the
  1235. // inner loop's header block and continue from the block before it.
  1236. block = loop->GetHeadBlock();
  1237. Assert(block->isLoopHeader);
  1238. continue;
  1239. }
  1240. ProcessBlock(block);
  1241. } NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE;
  1242. #if DBG_DUMP
  1243. if(IsTraceEnabled())
  1244. {
  1245. Output::Print(L"******** COLLECTION PASS 2 END: Loop %u *********\n", collectionPassLoop->GetLoopTopInstr()->m_id);
  1246. }
  1247. #endif
  1248. }
  1249. currentPrePassLoop = previousPrepassLoop;
  1250. }
  1251. void
  1252. BackwardPass::ProcessLoop(BasicBlock * lastBlock)
  1253. {
  1254. #if DBG_DUMP
  1255. if (this->IsTraceEnabled())
  1256. {
  1257. Output::Print(L"******* PREPASS START ********\n");
  1258. }
  1259. #endif
  1260. Loop *loop = lastBlock->loop;
  1261. // This code doesn't work quite as intended. It is meant to capture fields that are live out of a loop to limit the
  1262. // number of implicit call bailouts the forward pass must create (only compiler throughput optimization, no impact
  1263. // on emitted code), but because it looks only at the lexically last block in the loop, it does the right thing only
  1264. // for do-while loops. For other loops (for and while) the last block does not exit the loop. Even for do-while loops
  1265. // this tracking can have the adverse effect of killing fields that should stay live after copy prop. Disabled by default.
  1266. // Left in under a flag, in case we find compiler throughput issues and want to do additional experiments.
  1267. if (PHASE_ON(Js::LiveOutFieldsPhase, this->func))
  1268. {
  1269. if (this->globOpt->DoFieldOpts(loop) || this->globOpt->DoFieldRefOpts(loop))
  1270. {
  1271. // Get the live-out set at the loop bottom.
  1272. // This may not be the only loop exit, but all loop exits either leave the function or pass through here.
  1273. // In the forward pass, we'll use this set to trim the live fields on exit from the loop
  1274. // in order to limit the number of bailout points following the loop.
  1275. BVSparse<JitArenaAllocator> *bv = JitAnew(this->func->m_fg->alloc, BVSparse<JitArenaAllocator>, this->func->m_fg->alloc);
  1276. FOREACH_SUCCESSOR_BLOCK(blockSucc, lastBlock)
  1277. {
  1278. if (blockSucc->loop != loop)
  1279. {
  1280. // Would like to assert this, but in strange exprgen cases involving "break LABEL" in nested
  1281. // loops the loop graph seems to get confused.
  1282. //Assert(!blockSucc->loop || blockSucc->loop->IsDescendentOrSelf(loop));
  1283. Assert(!blockSucc->loop || blockSucc->loop->hasDeadStorePrepass);
  1284. bv->Or(blockSucc->upwardExposedFields);
  1285. }
  1286. }
  1287. NEXT_SUCCESSOR_BLOCK;
  1288. lastBlock->loop->liveOutFields = bv;
  1289. }
  1290. }
  1291. if(tag == Js::DeadStorePhase && !loop->hasDeadStoreCollectionPass)
  1292. {
  1293. Assert(!IsCollectionPass());
  1294. Assert(!IsPrePass());
  1295. isCollectionPass = true;
  1296. ProcessLoopCollectionPass(lastBlock);
  1297. isCollectionPass = false;
  1298. }
  1299. Assert(!this->IsPrePass());
  1300. this->currentPrePassLoop = loop;
  1301. FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, lastBlock, nullptr)
  1302. {
  1303. this->ProcessBlock(block);
  1304. if (block->isLoopHeader && block->loop == lastBlock->loop)
  1305. {
  1306. Assert(block->fieldHoistCandidates == nullptr);
  1307. break;
  1308. }
  1309. }
  1310. NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE;
  1311. this->currentPrePassLoop = nullptr;
  1312. Assert(lastBlock);
  1313. __analysis_assume(lastBlock);
  1314. lastBlock->loop->hasDeadStorePrepass = true;
  1315. #if DBG_DUMP
  1316. if (this->IsTraceEnabled())
  1317. {
  1318. Output::Print(L"******** PREPASS END *********\n");
  1319. }
  1320. #endif
  1321. }
  1322. void
  1323. BackwardPass::OptBlock(BasicBlock * block)
  1324. {
  1325. this->func->ThrowIfScriptClosed();
  1326. if (block->loop && !block->loop->hasDeadStorePrepass)
  1327. {
  1328. ProcessLoop(block);
  1329. }
  1330. this->ProcessBlock(block);
  1331. if(DoTrackNegativeZero())
  1332. {
  1333. negativeZeroDoesNotMatterBySymId->ClearAll();
  1334. }
  1335. if (DoTrackBitOpsOrNumber())
  1336. {
  1337. symUsedOnlyForBitOpsBySymId->ClearAll();
  1338. symUsedOnlyForNumberBySymId->ClearAll();
  1339. }
  1340. if(DoTrackIntOverflow())
  1341. {
  1342. intOverflowDoesNotMatterBySymId->ClearAll();
  1343. if(DoTrackCompoundedIntOverflow())
  1344. {
  1345. intOverflowDoesNotMatterInRangeBySymId->ClearAll();
  1346. }
  1347. }
  1348. }
  1349. void
  1350. BackwardPass::ProcessBailOutArgObj(BailOutInfo * bailOutInfo, BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed)
  1351. {
  1352. Assert(this->tag != Js::BackwardPhase);
  1353. if (this->globOpt->TrackArgumentsObject() && bailOutInfo->capturedValues.argObjSyms)
  1354. {
  1355. FOREACH_BITSET_IN_SPARSEBV(symId, bailOutInfo->capturedValues.argObjSyms)
  1356. {
  1357. if (byteCodeUpwardExposedUsed->TestAndClear(symId))
  1358. {
  1359. if (bailOutInfo->usedCapturedValues.argObjSyms == nullptr)
  1360. {
  1361. bailOutInfo->usedCapturedValues.argObjSyms = JitAnew(this->func->m_alloc,
  1362. BVSparse<JitArenaAllocator>, this->func->m_alloc);
  1363. }
  1364. bailOutInfo->usedCapturedValues.argObjSyms->Set(symId);
  1365. }
  1366. }
  1367. NEXT_BITSET_IN_SPARSEBV;
  1368. }
  1369. if (bailOutInfo->usedCapturedValues.argObjSyms)
  1370. {
  1371. byteCodeUpwardExposedUsed->Minus(bailOutInfo->usedCapturedValues.argObjSyms);
  1372. }
  1373. }
  1374. void
  1375. BackwardPass::ProcessBailOutConstants(BailOutInfo * bailOutInfo, BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed, BVSparse<JitArenaAllocator>* bailoutReferencedArgSymsBv)
  1376. {
  1377. Assert(this->tag != Js::BackwardPhase);
  1378. // Remove constants that we are already going to restore
  1379. SListBase<ConstantStackSymValue> * usedConstantValues = &bailOutInfo->usedCapturedValues.constantValues;
  1380. FOREACH_SLISTBASE_ENTRY(ConstantStackSymValue, value, usedConstantValues)
  1381. {
  1382. byteCodeUpwardExposedUsed->Clear(value.Key()->m_id);
  1383. bailoutReferencedArgSymsBv->Clear(value.Key()->m_id);
  1384. }
  1385. NEXT_SLISTBASE_ENTRY;
  1386. // Find other constants that we need to restore
  1387. FOREACH_SLISTBASE_ENTRY_EDITING(ConstantStackSymValue, value, &bailOutInfo->capturedValues.constantValues, iter)
  1388. {
  1389. if (byteCodeUpwardExposedUsed->TestAndClear(value.Key()->m_id) || bailoutReferencedArgSymsBv->TestAndClear(value.Key()->m_id))
  1390. {
  1391. // Constant need to be restore, move it to the restore list
  1392. iter.MoveCurrentTo(usedConstantValues);
  1393. }
  1394. else if (!this->IsPrePass())
  1395. {
  1396. // Constants don't need to be restored, delete
  1397. iter.RemoveCurrent(this->func->m_alloc);
  1398. }
  1399. }
  1400. NEXT_SLISTBASE_ENTRY_EDITING;
  1401. }
  1402. void
  1403. BackwardPass::ProcessBailOutCopyProps(BailOutInfo * bailOutInfo, BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed, BVSparse<JitArenaAllocator>* bailoutReferencedArgSymsBv)
  1404. {
  1405. Assert(this->tag != Js::BackwardPhase);
  1406. Assert(!this->func->GetJnFunction()->GetIsAsmjsMode());
  1407. // Remove copy prop that we were already going to restore
  1408. SListBase<CopyPropSyms> * usedCopyPropSyms = &bailOutInfo->usedCapturedValues.copyPropSyms;
  1409. FOREACH_SLISTBASE_ENTRY(CopyPropSyms, copyPropSyms, usedCopyPropSyms)
  1410. {
  1411. byteCodeUpwardExposedUsed->Clear(copyPropSyms.Key()->m_id);
  1412. this->currentBlock->upwardExposedUses->Set(copyPropSyms.Value()->m_id);
  1413. }
  1414. NEXT_SLISTBASE_ENTRY;
  1415. JitArenaAllocator * allocator = this->func->m_alloc;
  1416. BasicBlock * block = this->currentBlock;
  1417. BVSparse<JitArenaAllocator> * upwardExposedUses = block->upwardExposedUses;
  1418. // Find other copy prop that we need to restore
  1419. FOREACH_SLISTBASE_ENTRY_EDITING(CopyPropSyms, copyPropSyms, &bailOutInfo->capturedValues.copyPropSyms, iter)
  1420. {
  1421. // Copy prop syms should be vars
  1422. Assert(!copyPropSyms.Key()->IsTypeSpec());
  1423. Assert(!copyPropSyms.Value()->IsTypeSpec());
  1424. if (byteCodeUpwardExposedUsed->TestAndClear(copyPropSyms.Key()->m_id) || bailoutReferencedArgSymsBv->TestAndClear(copyPropSyms.Key()->m_id))
  1425. {
  1426. // This copy-prop sym needs to be restored; add it to the restore list.
  1427. /*
  1428. - copyPropSyms.Key() - original sym that is byte-code upwards-exposed, its corresponding byte-code register needs
  1429. to be restored
  1430. - copyPropSyms.Value() - copy-prop sym whose value the original sym has at the point of this instruction
  1431. Heuristic:
  1432. - By default, use the copy-prop sym to restore its corresponding byte code register
  1433. - This is typically better because that allows the value of the original sym, if it's not used after the copy-prop
  1434. sym is changed, to be discarded and we only have one lifetime (the copy-prop sym's lifetime) in to deal with for
  1435. register allocation
  1436. - Additionally, if the transferring store, which caused the original sym to have the same value as the copy-prop
  1437. sym, becomes a dead store, the original sym won't actually attain the value of the copy-prop sym. In that case,
  1438. the copy-prop sym must be used to restore the byte code register corresponding to original sym.
  1439. Special case for functional correctness:
  1440. - Consider that we always use the copy-prop sym to restore, and consider the following case:
  1441. b = a
  1442. a = c * d <Pre-op bail-out>
  1443. = b
  1444. - This is rewritten by the lowerer as follows:
  1445. b = a
  1446. a = c
  1447. a = a * d <Pre-op bail-out> (to make dst and src1 the same)
  1448. = b
  1449. - The problem here is that at the point of the bail-out instruction, 'a' would be used to restore the value of 'b',
  1450. but the value of 'a' has changed before the bail-out (at 'a = c').
  1451. - In this case, we need to use 'b' (the original sym) to restore the value of 'b'. Because 'b' is upwards-exposed,
  1452. 'b = a' cannot be a dead store, therefore making it valid to use 'b' to restore.
  1453. - Use the original sym to restore when all of the following are true:
  1454. - The bailout is a pre-op bailout, and the bailout check is done after overwriting the destination
  1455. - It's an int-specialized unary or binary operation that produces a value
  1456. - The copy-prop sym is the destination of this instruction
  1457. - None of the sources are the copy-prop sym. Otherwise, the value of the copy-prop sym will be saved as
  1458. necessary by the bailout code.
  1459. */
  1460. StackSym * stackSym = copyPropSyms.Key(); // assume that we'll use the original sym to restore
  1461. SymID symId = stackSym->m_id;
  1462. IR::Instr *const instr = bailOutInfo->bailOutInstr;
  1463. StackSym *const dstSym = IR::RegOpnd::TryGetStackSym(instr->GetDst());
  1464. if(instr->GetBailOutKind() & IR::BailOutOnResultConditions &&
  1465. instr->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset &&
  1466. bailOutInfo->bailOutOffset <= instr->GetByteCodeOffset() &&
  1467. dstSym &&
  1468. dstSym->IsInt32() &&
  1469. dstSym->IsTypeSpec() &&
  1470. dstSym->GetVarEquivSym(nullptr) == copyPropSyms.Value() &&
  1471. instr->GetSrc1() &&
  1472. !instr->GetDst()->IsEqual(instr->GetSrc1()) &&
  1473. !(instr->GetSrc2() && instr->GetDst()->IsEqual(instr->GetSrc2())))
  1474. {
  1475. Assert(bailOutInfo->bailOutOffset == instr->GetByteCodeOffset());
  1476. // Need to use the original sym to restore. The original sym is byte-code upwards-exposed, which is why it needs
  1477. // to be restored. Because the original sym needs to be restored and the copy-prop sym is changing here, the
  1478. // original sym must be live in some fashion at the point of this instruction, that will be verified below. The
  1479. // original sym will also be made upwards-exposed from here, so the aforementioned transferring store of the
  1480. // copy-prop sym to the original sym will not be a dead store.
  1481. }
  1482. else if (block->upwardExposedUses->Test(stackSym->m_id) && !block->upwardExposedUses->Test(copyPropSyms.Value()->m_id))
  1483. {
  1484. // Don't use the copy prop sym if it is not used and the orig sym still has uses.
  1485. // No point in extending the lifetime of the copy prop sym unnecessarily.
  1486. }
  1487. else
  1488. {
  1489. // Need to use the copy-prop sym to restore
  1490. stackSym = copyPropSyms.Value();
  1491. symId = stackSym->m_id;
  1492. }
  1493. // Prefer to restore from type-specialized versions of the sym, as that will reduce the need for potentially
  1494. // expensive ToVars that can more easily be eliminated due to being dead stores
  1495. StackSym * int32StackSym = nullptr;
  1496. StackSym * float64StackSym = nullptr;
  1497. StackSym * simd128StackSym = nullptr;
  1498. if (bailOutInfo->liveLosslessInt32Syms->Test(symId))
  1499. {
  1500. // Var version of the sym is not live, use the int32 version
  1501. int32StackSym = stackSym->GetInt32EquivSym(nullptr);
  1502. Assert(int32StackSym);
  1503. }
  1504. else if(bailOutInfo->liveFloat64Syms->Test(symId))
  1505. {
  1506. // Var/int32 version of the sym is not live, use the float64 version
  1507. float64StackSym = stackSym->GetFloat64EquivSym(nullptr);
  1508. Assert(float64StackSym);
  1509. }
  1510. // SIMD_JS
  1511. else if (bailOutInfo->liveSimd128F4Syms->Test(symId))
  1512. {
  1513. simd128StackSym = stackSym->GetSimd128F4EquivSym(nullptr);
  1514. }
  1515. else if (bailOutInfo->liveSimd128I4Syms->Test(symId))
  1516. {
  1517. simd128StackSym = stackSym->GetSimd128I4EquivSym(nullptr);
  1518. }
  1519. else
  1520. {
  1521. Assert(bailOutInfo->liveVarSyms->Test(symId));
  1522. }
  1523. // We did not end up using the copy prop sym. Let's make sure the use of the original sym by the bailout is captured.
  1524. if (stackSym != copyPropSyms.Value() && stackSym->HasArgSlotNum())
  1525. {
  1526. bailoutReferencedArgSymsBv->Set(stackSym->m_id);
  1527. }
  1528. if (int32StackSym != nullptr)
  1529. {
  1530. Assert(float64StackSym == nullptr);
  1531. usedCopyPropSyms->PrependNode(allocator, copyPropSyms.Key(), int32StackSym);
  1532. iter.RemoveCurrent(allocator);
  1533. upwardExposedUses->Set(int32StackSym->m_id);
  1534. }
  1535. else if (float64StackSym != nullptr)
  1536. {
  1537. // This float-specialized sym is going to be used to restore the corresponding byte-code register. Need to
  1538. // ensure that the float value can be precisely coerced back to the original Var value by requiring that it is
  1539. // specialized using BailOutNumberOnly.
  1540. float64StackSym->m_requiresBailOnNotNumber = true;
  1541. usedCopyPropSyms->PrependNode(allocator, copyPropSyms.Key(), float64StackSym);
  1542. iter.RemoveCurrent(allocator);
  1543. upwardExposedUses->Set(float64StackSym->m_id);
  1544. }
  1545. // SIMD_JS
  1546. else if (simd128StackSym != nullptr)
  1547. {
  1548. usedCopyPropSyms->PrependNode(allocator, copyPropSyms.Key(), simd128StackSym);
  1549. iter.RemoveCurrent(allocator);
  1550. upwardExposedUses->Set(simd128StackSym->m_id);
  1551. }
  1552. else
  1553. {
  1554. usedCopyPropSyms->PrependNode(allocator, copyPropSyms.Key(), stackSym);
  1555. iter.RemoveCurrent(allocator);
  1556. upwardExposedUses->Set(symId);
  1557. }
  1558. }
  1559. else if (!this->IsPrePass())
  1560. {
  1561. // Copy prop sym doesn't need to be restored, delete.
  1562. iter.RemoveCurrent(allocator);
  1563. }
  1564. }
  1565. NEXT_SLISTBASE_ENTRY_EDITING;
  1566. }
  1567. bool
  1568. BackwardPass::ProcessBailOutInfo(IR::Instr * instr)
  1569. {
  1570. if (this->tag == Js::BackwardPhase)
  1571. {
  1572. // We don't need to fill in the bailout instruction in backward pass
  1573. Assert(this->func->hasBailout || !instr->HasBailOutInfo());
  1574. Assert(!instr->HasBailOutInfo() || instr->GetBailOutInfo()->byteCodeUpwardExposedUsed == nullptr || (this->func->HasTry() && this->func->DoOptimizeTryCatch()));
  1575. if (instr->IsByteCodeUsesInstr())
  1576. {
  1577. // FGPeeps inserts bytecodeuses instrs with srcs. We need to look at them to set the proper
  1578. // UpwardExposedUsed info and keep the defs alive.
  1579. // The inliner inserts bytecodeuses instrs withs dsts, but we don't want to look at them for upwardExposedUsed
  1580. // as it would cause real defs to look dead. We use these for bytecodeUpwardExposedUsed info only, which is needed
  1581. // in the dead-store pass only.
  1582. //
  1583. // Handle the source side.
  1584. IR::ByteCodeUsesInstr *byteCodeUsesInstr = instr->AsByteCodeUsesInstr();
  1585. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed = byteCodeUsesInstr->byteCodeUpwardExposedUsed;
  1586. if (byteCodeUpwardExposedUsed != nullptr)
  1587. {
  1588. this->currentBlock->upwardExposedUses->Or(byteCodeUpwardExposedUsed);
  1589. }
  1590. return true;
  1591. }
  1592. return false;
  1593. }
  1594. if (instr->IsByteCodeUsesInstr())
  1595. {
  1596. Assert(instr->m_opcode == Js::OpCode::ByteCodeUses);
  1597. #if DBG
  1598. if (this->DoMarkTempObjectVerify() && (this->currentBlock->isDead || !this->func->hasBailout))
  1599. {
  1600. if (IsCollectionPass())
  1601. {
  1602. if (!this->func->hasBailout)
  1603. {
  1604. // Prevent byte code uses from being remove on collection pass for mark temp object verify
  1605. // if we don't have any bailout
  1606. return true;
  1607. }
  1608. }
  1609. else
  1610. {
  1611. this->currentBlock->tempObjectVerifyTracker->NotifyDeadByteCodeUses(instr);
  1612. }
  1613. }
  1614. #endif
  1615. if (this->func->hasBailout)
  1616. {
  1617. Assert(this->DoByteCodeUpwardExposedUsed());
  1618. // Just collect the byte code uses, and remove the instruction
  1619. // We are going backward, process the dst first and then the src
  1620. IR::Opnd * dst = instr->GetDst();
  1621. if (dst)
  1622. {
  1623. IR::RegOpnd * dstRegOpnd = dst->AsRegOpnd();
  1624. StackSym * dstStackSym = dstRegOpnd->m_sym->AsStackSym();
  1625. Assert(!dstRegOpnd->GetIsJITOptimizedReg());
  1626. Assert(dstStackSym->GetByteCodeRegSlot() != Js::Constants::NoRegister);
  1627. if (dstStackSym->GetType() != TyVar)
  1628. {
  1629. dstStackSym = dstStackSym->GetVarEquivSym(nullptr);
  1630. }
  1631. // If the current region is a Try, symbols in its write-through set shouldn't be cleared.
  1632. // Otherwise, symbols in the write-through set of the first try ancestor shouldn't be cleared.
  1633. if (!this->currentRegion ||
  1634. !this->CheckWriteThroughSymInRegion(this->currentRegion, dstStackSym))
  1635. {
  1636. this->currentBlock->byteCodeUpwardExposedUsed->Clear(dstStackSym->m_id);
  1637. #if DBG
  1638. // We can only track first level function stack syms right now
  1639. if (dstStackSym->GetByteCodeFunc() == this->func)
  1640. {
  1641. this->currentBlock->byteCodeRestoreSyms[dstStackSym->GetByteCodeRegSlot()] = nullptr;
  1642. }
  1643. #endif
  1644. }
  1645. }
  1646. IR::ByteCodeUsesInstr *byteCodeUsesInstr = instr->AsByteCodeUsesInstr();
  1647. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed = byteCodeUsesInstr->byteCodeUpwardExposedUsed;
  1648. if (byteCodeUpwardExposedUsed != nullptr)
  1649. {
  1650. this->currentBlock->byteCodeUpwardExposedUsed->Or(byteCodeUpwardExposedUsed);
  1651. #if DBG
  1652. FOREACH_BITSET_IN_SPARSEBV(symId, byteCodeUpwardExposedUsed)
  1653. {
  1654. StackSym * stackSym = this->func->m_symTable->FindStackSym(symId);
  1655. Assert(!stackSym->IsTypeSpec());
  1656. // We can only track first level function stack syms right now
  1657. if (stackSym->GetByteCodeFunc() == this->func)
  1658. {
  1659. Js::RegSlot byteCodeRegSlot = stackSym->GetByteCodeRegSlot();
  1660. Assert(byteCodeRegSlot != Js::Constants::NoRegister);
  1661. if (this->currentBlock->byteCodeRestoreSyms[byteCodeRegSlot] != stackSym)
  1662. {
  1663. AssertMsg(this->currentBlock->byteCodeRestoreSyms[byteCodeRegSlot] == nullptr,
  1664. "Can't have two active lifetime for the same byte code register");
  1665. this->currentBlock->byteCodeRestoreSyms[byteCodeRegSlot] = stackSym;
  1666. }
  1667. }
  1668. }
  1669. NEXT_BITSET_IN_SPARSEBV;
  1670. #endif
  1671. }
  1672. if(IsCollectionPass())
  1673. {
  1674. return true;
  1675. }
  1676. ProcessPendingPreOpBailOutInfo(instr);
  1677. PropertySym *propertySymUse = byteCodeUsesInstr->propertySymUse;
  1678. if (propertySymUse && !this->currentBlock->isDead)
  1679. {
  1680. this->currentBlock->upwardExposedFields->Set(propertySymUse->m_id);
  1681. }
  1682. if (this->IsPrePass())
  1683. {
  1684. // Don't remove the instruction yet if we are in the prepass
  1685. // But tell the caller we don't need to process the instruction any more
  1686. return true;
  1687. }
  1688. }
  1689. this->currentBlock->RemoveInstr(instr);
  1690. return true;
  1691. }
  1692. if(IsCollectionPass())
  1693. {
  1694. return false;
  1695. }
  1696. if (instr->HasBailOutInfo())
  1697. {
  1698. Assert(this->func->hasBailout);
  1699. Assert(this->DoByteCodeUpwardExposedUsed());
  1700. BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
  1701. // Only process the bailout info if this is the main bailout point (instead of shared)
  1702. if (bailOutInfo->bailOutInstr == instr)
  1703. {
  1704. if(instr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset ||
  1705. bailOutInfo->bailOutOffset > instr->GetByteCodeOffset())
  1706. {
  1707. // Currently, we only have post-op bailout with BailOutOnImplicitCalls
  1708. // or JIT inserted operation (which no byte code offsets).
  1709. // If there are other bailouts that we want to bailout after the operation,
  1710. // we have to make sure that it still doesn't do the implicit call
  1711. // if it is done on the stack object.
  1712. // Otherwise, the stack object will be passed to the implicit call functions.
  1713. Assert(instr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset
  1714. || (instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCalls
  1715. || (instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutInvalid);
  1716. // This instruction bails out to a later byte-code instruction, so process the bailout info now
  1717. ProcessBailOutInfo(instr, bailOutInfo);
  1718. }
  1719. else
  1720. {
  1721. // This instruction bails out to the equivalent byte code instruction. This instruction and ByteCodeUses
  1722. // instructions relevant to this instruction need to be processed before the bailout info for this instruction
  1723. // can be processed, so that it can be determined what byte code registers are used by the equivalent byte code
  1724. // instruction and need to be restored. Save the instruction for bailout info processing later.
  1725. Assert(bailOutInfo->bailOutOffset == instr->GetByteCodeOffset());
  1726. Assert(!preOpBailOutInstrToProcess);
  1727. preOpBailOutInstrToProcess = instr;
  1728. }
  1729. }
  1730. }
  1731. return false;
  1732. }
  1733. bool
  1734. BackwardPass::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, bool mayNeedImplicitCallBailOut, bool hasLiveFields)
  1735. {
  1736. return this->globOpt->IsImplicitCallBailOutCurrentlyNeeded(
  1737. instr, nullptr, nullptr, this->currentBlock, hasLiveFields, mayNeedImplicitCallBailOut, false);
  1738. }
  1739. void
  1740. BackwardPass::DeadStoreTypeCheckBailOut(IR::Instr * instr)
  1741. {
  1742. // Good news: There are cases where the forward pass installs BailOutFailedTypeCheck, but the dead store pass
  1743. // discovers that the checked type is dead.
  1744. // Bad news: We may still need implicit call bailout, and it's up to the dead store pass to figure this out.
  1745. // Worse news: BailOutFailedTypeCheck is pre-op, and BailOutOnImplicitCall is post-op. We'll use a special
  1746. // bailout kind to indicate implicit call bailout that targets its own instruction. The lowerer will emit
  1747. // code to disable/re-enable implicit calls around the operation.
  1748. Assert(this->tag == Js::DeadStorePhase);
  1749. if (this->IsPrePass() || !instr->HasBailOutInfo())
  1750. {
  1751. return;
  1752. }
  1753. IR::BailOutKind oldBailOutKind = instr->GetBailOutKind();
  1754. if (!IR::IsTypeCheckBailOutKind(oldBailOutKind))
  1755. {
  1756. return;
  1757. }
  1758. // Either src1 or dst must be a property sym operand
  1759. Assert((instr->GetSrc1() && instr->GetSrc1()->IsSymOpnd() && instr->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd()) ||
  1760. (instr->GetDst() && instr->GetDst()->IsSymOpnd() && instr->GetDst()->AsSymOpnd()->IsPropertySymOpnd()));
  1761. IR::PropertySymOpnd *propertySymOpnd =
  1762. (instr->GetDst() && instr->GetDst()->IsSymOpnd()) ? instr->GetDst()->AsPropertySymOpnd() : instr->GetSrc1()->AsPropertySymOpnd();
  1763. bool isTypeCheckProtected = false;
  1764. IR::BailOutKind bailOutKind;
  1765. if (GlobOpt::NeedsTypeCheckBailOut(instr, propertySymOpnd, propertySymOpnd == instr->GetDst(), &isTypeCheckProtected, &bailOutKind))
  1766. {
  1767. // If we installed a failed type check bailout in the forward pass, but we are now discovering that the checked
  1768. // type is dead, we may still need a bailout on failed fixed field type check. These type checks are required
  1769. // regardless of whether the checked type is dead. Hence, the bailout kind may change here.
  1770. Assert((oldBailOutKind & ~IR::BailOutKindBits) == bailOutKind ||
  1771. bailOutKind == IR::BailOutFailedFixedFieldTypeCheck || bailOutKind == IR::BailOutFailedEquivalentFixedFieldTypeCheck);
  1772. instr->SetBailOutKind(bailOutKind);
  1773. return;
  1774. }
  1775. else if (isTypeCheckProtected)
  1776. {
  1777. instr->ClearBailOutInfo();
  1778. if (preOpBailOutInstrToProcess == instr)
  1779. {
  1780. preOpBailOutInstrToProcess = nullptr;
  1781. }
  1782. return;
  1783. }
  1784. Assert(!propertySymOpnd->IsTypeCheckProtected());
  1785. // If all we're doing here is checking the type (e.g. because we've hoisted a field load or store out of the loop, but needed
  1786. // the type check to remain in the loop), and now it turns out we don't need the type checked, we can simply turn this into
  1787. // a NOP and remove the bailout.
  1788. if (instr->m_opcode == Js::OpCode::CheckObjType)
  1789. {
  1790. Assert(instr->GetDst() == nullptr && instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr);
  1791. instr->m_opcode = Js::OpCode::Nop;
  1792. instr->FreeSrc1();
  1793. instr->ClearBailOutInfo();
  1794. if (this->preOpBailOutInstrToProcess == instr)
  1795. {
  1796. this->preOpBailOutInstrToProcess = nullptr;
  1797. }
  1798. return;
  1799. }
  1800. // We don't need BailOutFailedTypeCheck but may need BailOutOnImplicitCall.
  1801. // Consider: are we in the loop landing pad? If so, no bailout, since implicit calls will be checked at
  1802. // the end of the block.
  1803. if (this->currentBlock->IsLandingPad())
  1804. {
  1805. // We're in the landing pad.
  1806. if (preOpBailOutInstrToProcess == instr)
  1807. {
  1808. preOpBailOutInstrToProcess = nullptr;
  1809. }
  1810. instr->UnlinkBailOutInfo();
  1811. return;
  1812. }
  1813. // We're not checking for polymorphism, so don't let the bailout indicate that we
  1814. // detected polymorphism.
  1815. instr->GetBailOutInfo()->polymorphicCacheIndex = (uint)-1;
  1816. // Keep the mark temp object bit if it is there so that we will not remove the implicit call check
  1817. instr->SetBailOutKind(IR::BailOutOnImplicitCallsPreOp | (oldBailOutKind & IR::BailOutMarkTempObject));
  1818. }
  1819. void
  1820. BackwardPass::DeadStoreImplicitCallBailOut(IR::Instr * instr, bool hasLiveFields)
  1821. {
  1822. Assert(this->tag == Js::DeadStorePhase);
  1823. if (this->IsPrePass() || !instr->HasBailOutInfo())
  1824. {
  1825. // Don't do this in the pre-pass, because, for instance, we don't have live-on-back-edge fields yet.
  1826. return;
  1827. }
  1828. if (OpCodeAttr::BailOutRec(instr->m_opcode))
  1829. {
  1830. // This is something like OpCode::BailOutOnNotEqual. Assume it needs what it's got.
  1831. return;
  1832. }
  1833. UpdateArrayBailOutKind(instr);
  1834. // Install the implicit call PreOp for mark temp object if we need one.
  1835. IR::BailOutKind kind = instr->GetBailOutKind();
  1836. IR::BailOutKind kindNoBits = kind & ~IR::BailOutKindBits;
  1837. if ((kind & IR::BailOutMarkTempObject) != 0 && kindNoBits != IR::BailOutOnImplicitCallsPreOp)
  1838. {
  1839. Assert(kindNoBits != IR::BailOutOnImplicitCalls);
  1840. if (kindNoBits == IR::BailOutInvalid)
  1841. {
  1842. // We should only have combined with array bits
  1843. Assert((kind & ~IR::BailOutForArrayBits) == IR::BailOutMarkTempObject);
  1844. // Don't need to install if we are not going to do helper calls,
  1845. // or we are in the landingPad since implicit calls are already turned off.
  1846. if ((kind & IR::BailOutOnArrayAccessHelperCall) == 0 && !this->currentBlock->IsLandingPad())
  1847. {
  1848. kind += IR::BailOutOnImplicitCallsPreOp;
  1849. instr->SetBailOutKind(kind);
  1850. }
  1851. }
  1852. }
  1853. // Currently only try to eliminate these bailout kinds. The others are required in cases
  1854. // where we don't necessarily have live/hoisted fields.
  1855. const bool mayNeedBailOnImplicitCall = BailOutInfo::IsBailOutOnImplicitCalls(kind);
  1856. if (!mayNeedBailOnImplicitCall && kind != IR::BailOutExpectingObject)
  1857. {
  1858. if (kind & IR::BailOutMarkTempObject)
  1859. {
  1860. if (kind == IR::BailOutMarkTempObject)
  1861. {
  1862. // Landing pad does not need per-instr implicit call bailouts.
  1863. Assert(this->currentBlock->IsLandingPad());
  1864. instr->ClearBailOutInfo();
  1865. if (this->preOpBailOutInstrToProcess == instr)
  1866. {
  1867. this->preOpBailOutInstrToProcess = nullptr;
  1868. }
  1869. }
  1870. else
  1871. {
  1872. // Mark temp object bit is not needed after dead store pass
  1873. instr->SetBailOutKind(kind & ~IR::BailOutMarkTempObject);
  1874. }
  1875. }
  1876. return;
  1877. }
  1878. // We have an implicit call bailout in the code, and we want to make sure that it's required.
  1879. // Do this now, because only in the dead store pass do we have complete forward and backward liveness info.
  1880. bool needsBailOutOnImplicitCall = this->IsImplicitCallBailOutCurrentlyNeeded(instr, mayNeedBailOnImplicitCall, hasLiveFields);
  1881. if(!UpdateImplicitCallBailOutKind(instr, needsBailOutOnImplicitCall))
  1882. {
  1883. instr->ClearBailOutInfo();
  1884. if (preOpBailOutInstrToProcess == instr)
  1885. {
  1886. preOpBailOutInstrToProcess = nullptr;
  1887. }
  1888. #if DBG
  1889. if (this->DoMarkTempObjectVerify())
  1890. {
  1891. this->currentBlock->tempObjectVerifyTracker->NotifyBailOutRemoval(instr, this);
  1892. }
  1893. #endif
  1894. }
  1895. }
  1896. void
  1897. BackwardPass::ProcessPendingPreOpBailOutInfo(IR::Instr *const currentInstr)
  1898. {
  1899. Assert(!IsCollectionPass());
  1900. if(!preOpBailOutInstrToProcess)
  1901. {
  1902. return;
  1903. }
  1904. IR::Instr *const prevInstr = currentInstr->m_prev;
  1905. if(prevInstr &&
  1906. prevInstr->IsByteCodeUsesInstr() &&
  1907. prevInstr->AsByteCodeUsesInstr()->GetByteCodeOffset() == preOpBailOutInstrToProcess->GetByteCodeOffset())
  1908. {
  1909. return;
  1910. }
  1911. // A pre-op bailout instruction was saved for bailout info processing after the instruction and relevant ByteCodeUses
  1912. // instructions before it have been processed. We can process the bailout info for that instruction now.
  1913. BailOutInfo *const bailOutInfo = preOpBailOutInstrToProcess->GetBailOutInfo();
  1914. Assert(bailOutInfo->bailOutInstr == preOpBailOutInstrToProcess);
  1915. Assert(bailOutInfo->bailOutOffset == preOpBailOutInstrToProcess->GetByteCodeOffset());
  1916. ProcessBailOutInfo(preOpBailOutInstrToProcess, bailOutInfo);
  1917. preOpBailOutInstrToProcess = nullptr;
  1918. }
  1919. void
  1920. BackwardPass::ProcessBailOutInfo(IR::Instr * instr, BailOutInfo * bailOutInfo)
  1921. {
  1922. /*
  1923. When we optimize functions having try-catch, we install a bailout at the starting of the catch block, namely, BailOnException.
  1924. We don't have flow edges from all the possible exception points in the try to the catch block. As a result, this bailout should
  1925. not try to restore from the constant values or copy-prop syms or the type specialized syms, as these may not necessarily be/have
  1926. the right values. For example,
  1927. //constant values
  1928. c =
  1929. try
  1930. {
  1931. <exception>
  1932. c = k (constant)
  1933. }
  1934. catch
  1935. {
  1936. BailOnException
  1937. = c <-- We need to restore c from the value outside the try.
  1938. }
  1939. //copy-prop syms
  1940. c =
  1941. try
  1942. {
  1943. b = a
  1944. <exception>
  1945. c = b
  1946. }
  1947. catch
  1948. {
  1949. BailOnException
  1950. = c <-- We really want to restore c from its original sym, and not from its copy-prop sym, a
  1951. }
  1952. //type specialized syms
  1953. a =
  1954. try
  1955. {
  1956. <exception>
  1957. a++ <-- type specializes a
  1958. }
  1959. catch
  1960. {
  1961. BailOnException
  1962. = a <-- We need to restore a from its var version.
  1963. }
  1964. */
  1965. BasicBlock * block = this->currentBlock;
  1966. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed = block->byteCodeUpwardExposedUsed;
  1967. Assert(bailOutInfo->bailOutInstr == instr);
  1968. // The byteCodeUpwardExposedUsed should only be assigned once. The only case which would break this
  1969. // assumption is when we are optimizing a function having try-catch. In that case, we need the
  1970. // byteCodeUpwardExposedUsed analysis in the initial backward pass too.
  1971. Assert(bailOutInfo->byteCodeUpwardExposedUsed == nullptr || (this->func->HasTry() && this->func->DoOptimizeTryCatch()));
  1972. // Make a copy of the byteCodeUpwardExposedUsed so we can remove the constants
  1973. if (!this->IsPrePass())
  1974. {
  1975. // Create the BV of symbols that need to be restored in the BailOutRecord
  1976. byteCodeUpwardExposedUsed = byteCodeUpwardExposedUsed->CopyNew(this->func->m_alloc);
  1977. bailOutInfo->byteCodeUpwardExposedUsed = byteCodeUpwardExposedUsed;
  1978. }
  1979. else
  1980. {
  1981. // Create a temporary byteCodeUpwardExposedUsed
  1982. byteCodeUpwardExposedUsed = byteCodeUpwardExposedUsed->CopyNew(this->tempAlloc);
  1983. }
  1984. // All the register-based argument syms need to be tracked. They are either:
  1985. // 1. Referenced as constants in bailOutInfo->usedcapturedValues.constantValues
  1986. // 2. Referenced using copy prop syms in bailOutInfo->usedcapturedValues.copyPropSyms
  1987. // 3. Marked as m_isBailOutReferenced = true & added to upwardExposedUsed bit vector to ensure we do not dead store their defs.
  1988. // The third set of syms is represented by the bailoutReferencedArgSymsBv.
  1989. BVSparse<JitArenaAllocator>* bailoutReferencedArgSymsBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  1990. if (!this->IsPrePass())
  1991. {
  1992. bailOutInfo->IterateArgOutSyms([=](uint, uint, StackSym* sym) {
  1993. if (!sym->IsArgSlotSym())
  1994. {
  1995. bailoutReferencedArgSymsBv->Set(sym->m_id);
  1996. }
  1997. });
  1998. }
  1999. // Process Argument object first, as they can be found on the stack and don't need to rely on copy prop
  2000. this->ProcessBailOutArgObj(bailOutInfo, byteCodeUpwardExposedUsed);
  2001. if (instr->m_opcode != Js::OpCode::BailOnException) // see comment at the beginning of this function
  2002. {
  2003. this->ProcessBailOutConstants(bailOutInfo, byteCodeUpwardExposedUsed, bailoutReferencedArgSymsBv);
  2004. this->ProcessBailOutCopyProps(bailOutInfo, byteCodeUpwardExposedUsed, bailoutReferencedArgSymsBv);
  2005. }
  2006. BVSparse<JitArenaAllocator> * tempBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  2007. if (bailOutInfo->liveVarSyms)
  2008. {
  2009. // Prefer to restore from type-specialized versions of the sym, as that will reduce the need for potentially expensive
  2010. // ToVars that can more easily be eliminated due to being dead stores.
  2011. #if DBG
  2012. // SIMD_JS
  2013. // Simd128 syms should be live in at most one form
  2014. tempBv->And(bailOutInfo->liveSimd128F4Syms, bailOutInfo->liveSimd128I4Syms);
  2015. Assert(tempBv->IsEmpty());
  2016. // Verify that all syms to restore are live in some fashion
  2017. tempBv->Minus(byteCodeUpwardExposedUsed, bailOutInfo->liveVarSyms);
  2018. tempBv->Minus(bailOutInfo->liveLosslessInt32Syms);
  2019. tempBv->Minus(bailOutInfo->liveFloat64Syms);
  2020. tempBv->Minus(bailOutInfo->liveSimd128F4Syms);
  2021. tempBv->Minus(bailOutInfo->liveSimd128I4Syms);
  2022. Assert(tempBv->IsEmpty());
  2023. #endif
  2024. if (this->func->IsJitInDebugMode())
  2025. {
  2026. // Add to byteCodeUpwardExposedUsed the non-temp local vars used so far to restore during bail out.
  2027. // The ones that are not used so far will get their values from bytecode when we continue after bail out in interpreter.
  2028. Assert(this->func->m_nonTempLocalVars);
  2029. tempBv->And(this->func->m_nonTempLocalVars, bailOutInfo->liveVarSyms);
  2030. // Remove syms that are restored in other ways than byteCodeUpwardExposedUsed.
  2031. FOREACH_SLIST_ENTRY(ConstantStackSymValue, value, &bailOutInfo->usedCapturedValues.constantValues)
  2032. {
  2033. Assert(value.Key()->HasByteCodeRegSlot() || value.Key()->GetInstrDef()->m_opcode == Js::OpCode::BytecodeArgOutCapture);
  2034. if (value.Key()->HasByteCodeRegSlot())
  2035. {
  2036. tempBv->Clear(value.Key()->GetByteCodeRegSlot());
  2037. }
  2038. }
  2039. NEXT_SLIST_ENTRY;
  2040. FOREACH_SLIST_ENTRY(CopyPropSyms, value, &bailOutInfo->usedCapturedValues.copyPropSyms)
  2041. {
  2042. Assert(value.Key()->HasByteCodeRegSlot() || value.Key()->GetInstrDef()->m_opcode == Js::OpCode::BytecodeArgOutCapture);
  2043. if (value.Key()->HasByteCodeRegSlot())
  2044. {
  2045. tempBv->Clear(value.Key()->GetByteCodeRegSlot());
  2046. }
  2047. }
  2048. NEXT_SLIST_ENTRY;
  2049. if (bailOutInfo->usedCapturedValues.argObjSyms)
  2050. {
  2051. tempBv->Minus(bailOutInfo->usedCapturedValues.argObjSyms);
  2052. }
  2053. byteCodeUpwardExposedUsed->Or(tempBv);
  2054. }
  2055. if (instr->m_opcode != Js::OpCode::BailOnException) // see comment at the beginning of this function
  2056. {
  2057. // Int32
  2058. tempBv->And(byteCodeUpwardExposedUsed, bailOutInfo->liveLosslessInt32Syms);
  2059. byteCodeUpwardExposedUsed->Minus(tempBv);
  2060. FOREACH_BITSET_IN_SPARSEBV(symId, tempBv)
  2061. {
  2062. StackSym * stackSym = this->func->m_symTable->FindStackSym(symId);
  2063. Assert(stackSym->GetType() == TyVar);
  2064. StackSym * int32StackSym = stackSym->GetInt32EquivSym(nullptr);
  2065. Assert(int32StackSym);
  2066. byteCodeUpwardExposedUsed->Set(int32StackSym->m_id);
  2067. }
  2068. NEXT_BITSET_IN_SPARSEBV;
  2069. // Float64
  2070. tempBv->And(byteCodeUpwardExposedUsed, bailOutInfo->liveFloat64Syms);
  2071. byteCodeUpwardExposedUsed->Minus(tempBv);
  2072. FOREACH_BITSET_IN_SPARSEBV(symId, tempBv)
  2073. {
  2074. StackSym * stackSym = this->func->m_symTable->FindStackSym(symId);
  2075. Assert(stackSym->GetType() == TyVar);
  2076. StackSym * float64StackSym = stackSym->GetFloat64EquivSym(nullptr);
  2077. Assert(float64StackSym);
  2078. byteCodeUpwardExposedUsed->Set(float64StackSym->m_id);
  2079. // This float-specialized sym is going to be used to restore the corresponding byte-code register. Need to
  2080. // ensure that the float value can be precisely coerced back to the original Var value by requiring that it is
  2081. // specialized using BailOutNumberOnly.
  2082. float64StackSym->m_requiresBailOnNotNumber = true;
  2083. }
  2084. NEXT_BITSET_IN_SPARSEBV;
  2085. // SIMD_JS
  2086. tempBv->Or(bailOutInfo->liveSimd128F4Syms, bailOutInfo->liveSimd128I4Syms);
  2087. tempBv->And(byteCodeUpwardExposedUsed);
  2088. byteCodeUpwardExposedUsed->Minus(tempBv);
  2089. FOREACH_BITSET_IN_SPARSEBV(symId, tempBv)
  2090. {
  2091. StackSym * stackSym = this->func->m_symTable->FindStackSym(symId);
  2092. Assert(stackSym->GetType() == TyVar);
  2093. StackSym * simd128Sym = nullptr;
  2094. if (bailOutInfo->liveSimd128F4Syms->Test(symId))
  2095. {
  2096. simd128Sym = stackSym->GetSimd128F4EquivSym(nullptr);
  2097. }
  2098. else
  2099. {
  2100. Assert(bailOutInfo->liveSimd128I4Syms->Test(symId));
  2101. simd128Sym = stackSym->GetSimd128I4EquivSym(nullptr);
  2102. }
  2103. byteCodeUpwardExposedUsed->Set(simd128Sym->m_id);
  2104. }
  2105. NEXT_BITSET_IN_SPARSEBV;
  2106. }
  2107. // Var
  2108. // Any remaining syms to restore will be restored from their var versions
  2109. }
  2110. else
  2111. {
  2112. Assert(!this->func->DoGlobOpt());
  2113. }
  2114. JitAdelete(this->tempAlloc, tempBv);
  2115. // BailOnNoProfile makes some edges dead. Upward exposed symbols info set after the BailOnProfile won't
  2116. // flow through these edges, and, in turn, not through predecessor edges of the block containing the
  2117. // BailOnNoProfile. This is specifically bad for an inlinee's argout syms as they are set as upward exposed
  2118. // when we see the InlineeEnd, but may not look so to some blocks and may get overwritten.
  2119. // Set the argout syms as upward exposed here.
  2120. if (instr->m_opcode == Js::OpCode::BailOnNoProfile && instr->m_func->IsInlinee() &&
  2121. instr->m_func->m_hasInlineArgsOpt && instr->m_func->frameInfo->isRecorded)
  2122. {
  2123. instr->m_func->frameInfo->IterateSyms([=](StackSym* argSym)
  2124. {
  2125. this->currentBlock->upwardExposedUses->Set(argSym->m_id);
  2126. });
  2127. }
  2128. // Mark all the register that we need to restore as used (excluding constants)
  2129. block->upwardExposedUses->Or(byteCodeUpwardExposedUsed);
  2130. block->upwardExposedUses->Or(bailoutReferencedArgSymsBv);
  2131. if (!this->IsPrePass())
  2132. {
  2133. bailOutInfo->IterateArgOutSyms([=](uint index, uint, StackSym* sym) {
  2134. if (sym->IsArgSlotSym() || bailoutReferencedArgSymsBv->Test(sym->m_id))
  2135. {
  2136. bailOutInfo->argOutSyms[index]->m_isBailOutReferenced = true;
  2137. }
  2138. });
  2139. }
  2140. JitAdelete(this->tempAlloc, bailoutReferencedArgSymsBv);
  2141. if (this->IsPrePass())
  2142. {
  2143. JitAdelete(this->tempAlloc, byteCodeUpwardExposedUsed);
  2144. }
  2145. }
  2146. void
  2147. BackwardPass::ProcessBlock(BasicBlock * block)
  2148. {
  2149. this->currentBlock = block;
  2150. this->MergeSuccBlocksInfo(block);
  2151. #if DBG_DUMP
  2152. if (this->IsTraceEnabled())
  2153. {
  2154. Output::Print(L"******************************* Before Process Block *******************************n");
  2155. DumpBlockData(block);
  2156. }
  2157. #endif
  2158. FOREACH_INSTR_BACKWARD_IN_BLOCK_EDITING(instr, instrPrev, block)
  2159. {
  2160. #if DBG_DUMP
  2161. if (!IsCollectionPass() && IsTraceEnabled() && Js::Configuration::Global.flags.Verbose)
  2162. {
  2163. Output::Print(L">>>>>>>>>>>>>>>>>>>>>> %s: Instr Start\n", tag == Js::BackwardPhase? L"BACKWARD" : L"DEADSTORE");
  2164. instr->Dump();
  2165. Output::SkipToColumn(10);
  2166. Output::Print(L" Exposed Use: ");
  2167. block->upwardExposedUses->Dump();
  2168. Output::SkipToColumn(10);
  2169. Output::Print(L"Exposed Fields: ");
  2170. block->upwardExposedFields->Dump();
  2171. if (block->byteCodeUpwardExposedUsed)
  2172. {
  2173. Output::SkipToColumn(10);
  2174. Output::Print(L" Byte Code Use: ");
  2175. block->byteCodeUpwardExposedUsed->Dump();
  2176. }
  2177. Output::Print(L"--------------------\n");
  2178. }
  2179. #endif
  2180. this->currentInstr = instr;
  2181. this->currentRegion = this->currentBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
  2182. if (ProcessNoImplicitCallUses(instr) || this->ProcessBailOutInfo(instr))
  2183. {
  2184. continue;
  2185. }
  2186. IR::Instr *instrNext = instr->m_next;
  2187. if (this->TrackNoImplicitCallInlinees(instr))
  2188. {
  2189. instrPrev = instrNext->m_prev;
  2190. continue;
  2191. }
  2192. bool hasLiveFields = (block->upwardExposedFields && !block->upwardExposedFields->IsEmpty());
  2193. IR::Opnd * opnd = instr->GetDst();
  2194. if (opnd != nullptr)
  2195. {
  2196. bool isRemoved = ReverseCopyProp(instr);
  2197. if (isRemoved)
  2198. {
  2199. instrPrev = instrNext->m_prev;
  2200. continue;
  2201. }
  2202. if (instr->m_opcode == Js::OpCode::Conv_Bool)
  2203. {
  2204. isRemoved = this->FoldCmBool(instr);
  2205. if (isRemoved)
  2206. {
  2207. continue;
  2208. }
  2209. }
  2210. ProcessNewScObject(instr);
  2211. this->ProcessTransfers(instr);
  2212. isRemoved = this->ProcessDef(opnd);
  2213. if (isRemoved)
  2214. {
  2215. continue;
  2216. }
  2217. }
  2218. if(!IsCollectionPass())
  2219. {
  2220. this->MarkTempProcessInstr(instr);
  2221. this->ProcessFieldKills(instr);
  2222. if (this->DoDeadStoreSlots()
  2223. && (instr->HasAnyImplicitCalls() || instr->HasBailOutInfo() || instr->UsesAllFields()))
  2224. {
  2225. // Can't dead-store slots if there can be an implicit-call, an exception, or a bailout
  2226. block->slotDeadStoreCandidates->ClearAll();
  2227. }
  2228. if (this->DoFieldHoistCandidates())
  2229. {
  2230. this->ProcessFieldHoistKills(instr);
  2231. }
  2232. TrackIntUsage(instr);
  2233. TrackBitWiseOrNumberOp(instr);
  2234. TrackFloatSymEquivalence(instr);
  2235. }
  2236. opnd = instr->GetSrc1();
  2237. if (opnd != nullptr)
  2238. {
  2239. this->ProcessUse(opnd);
  2240. opnd = instr->GetSrc2();
  2241. if (opnd != nullptr)
  2242. {
  2243. this->ProcessUse(opnd);
  2244. }
  2245. }
  2246. if(IsCollectionPass())
  2247. {
  2248. continue;
  2249. }
  2250. if (this->tag == Js::DeadStorePhase)
  2251. {
  2252. switch(instr->m_opcode)
  2253. {
  2254. case Js::OpCode::InlineArrayPush:
  2255. case Js::OpCode::InlineArrayPop:
  2256. {
  2257. IR::Opnd *const thisOpnd = instr->GetSrc1();
  2258. if(thisOpnd && thisOpnd->IsRegOpnd())
  2259. {
  2260. IR::RegOpnd *const thisRegOpnd = thisOpnd->AsRegOpnd();
  2261. if(thisRegOpnd->IsArrayRegOpnd())
  2262. {
  2263. // Process the array use at the point of the array built-in call, since the array will actually
  2264. // be used at the call, not at the ArgOut_A_InlineBuiltIn
  2265. ProcessArrayRegOpndUse(instr, thisRegOpnd->AsArrayRegOpnd());
  2266. }
  2267. }
  2268. }
  2269. #if !INT32VAR // the following is not valid on 64-bit platforms
  2270. case Js::OpCode::BoundCheck:
  2271. {
  2272. if(IsPrePass())
  2273. {
  2274. break;
  2275. }
  2276. // Look for:
  2277. // BoundCheck 0 <= s1
  2278. // BoundCheck s1 <= s2 + c, where c == 0 || c == -1
  2279. //
  2280. // And change it to:
  2281. // UnsignedBoundCheck s1 <= s2 + c
  2282. //
  2283. // The BoundCheck instruction is a signed operation, so any unsigned operand used in the instruction must be
  2284. // guaranteed to be >= 0 and <= int32 max when its value is interpreted as signed. Due to the restricted
  2285. // range of s2 above, by using an unsigned comparison instead, the negative check on s1 will also be
  2286. // covered.
  2287. //
  2288. // A BoundCheck instruction takes the form (src1 <= src2 + dst).
  2289. // Check the current instruction's pattern for:
  2290. // BoundCheck s1 <= s2 + c, where c <= 0
  2291. if(!instr->GetSrc1()->IsRegOpnd() ||
  2292. !instr->GetSrc1()->IsInt32() ||
  2293. !instr->GetSrc2() ||
  2294. instr->GetSrc2()->IsIntConstOpnd())
  2295. {
  2296. break;
  2297. }
  2298. if(instr->GetDst())
  2299. {
  2300. const int c = instr->GetDst()->AsIntConstOpnd()->GetValue();
  2301. if(c != 0 && c != -1)
  2302. {
  2303. break;
  2304. }
  2305. }
  2306. // Check the previous instruction's pattern for:
  2307. // BoundCheck 0 <= s1
  2308. IR::Instr *const lowerBoundCheck = instr->m_prev;
  2309. if(lowerBoundCheck->m_opcode != Js::OpCode::BoundCheck ||
  2310. !lowerBoundCheck->GetSrc1()->IsIntConstOpnd() ||
  2311. lowerBoundCheck->GetSrc1()->AsIntConstOpnd()->GetValue() != 0 ||
  2312. !lowerBoundCheck->GetSrc2() ||
  2313. !instr->GetSrc1()->AsRegOpnd()->IsEqual(lowerBoundCheck->GetSrc2()) ||
  2314. lowerBoundCheck->GetDst() && lowerBoundCheck->GetDst()->AsIntConstOpnd()->GetValue() != 0)
  2315. {
  2316. break;
  2317. }
  2318. // Remove the previous lower bound check, and change the current upper bound check to:
  2319. // UnsignedBoundCheck s1 <= s2 + c
  2320. instr->m_opcode = Js::OpCode::UnsignedBoundCheck;
  2321. currentBlock->RemoveInstr(lowerBoundCheck);
  2322. instrPrev = instr->m_prev;
  2323. break;
  2324. }
  2325. #endif
  2326. }
  2327. DeadStoreTypeCheckBailOut(instr);
  2328. DeadStoreImplicitCallBailOut(instr, hasLiveFields);
  2329. if (block->stackSymToFinalType != nullptr)
  2330. {
  2331. this->InsertTypeTransitionsAtPotentialKills();
  2332. }
  2333. // NoImplicitCallUses transfers need to be processed after determining whether implicit calls need to be disabled
  2334. // for the current instruction, because the instruction where the def occurs also needs implicit calls disabled.
  2335. // Array value type for the destination needs to be updated before transfers have been processed by
  2336. // ProcessNoImplicitCallDef, and array value types for sources need to be updated after transfers have been
  2337. // processed by ProcessNoImplicitCallDef, as it requires the no-implicit-call tracking bit-vectors to be precise at
  2338. // the point of the update.
  2339. if(!IsPrePass())
  2340. {
  2341. UpdateArrayValueTypes(instr, instr->GetDst());
  2342. }
  2343. ProcessNoImplicitCallDef(instr);
  2344. if(!IsPrePass())
  2345. {
  2346. UpdateArrayValueTypes(instr, instr->GetSrc1());
  2347. UpdateArrayValueTypes(instr, instr->GetSrc2());
  2348. }
  2349. }
  2350. else
  2351. {
  2352. switch (instr->m_opcode)
  2353. {
  2354. case Js::OpCode::BailOnNoProfile:
  2355. {
  2356. this->ProcessBailOnNoProfile(instr, block);
  2357. // this call could change the last instr of the previous block... Adjust instrStop.
  2358. instrStop = block->GetFirstInstr()->m_prev;
  2359. Assert(this->tag != Js::DeadStorePhase);
  2360. continue;
  2361. }
  2362. case Js::OpCode::Catch:
  2363. {
  2364. if (this->func->DoOptimizeTryCatch() && !this->IsPrePass())
  2365. {
  2366. // Execute the "Catch" in the JIT'ed code, and bailout to the next instruction. This way, the bailout will restore the exception object automatically.
  2367. IR::BailOutInstr* bailOnException = IR::BailOutInstr::New(Js::OpCode::BailOnException, IR::BailOutOnException, instr->m_next, instr->m_func);
  2368. instr->InsertAfter(bailOnException);
  2369. Assert(instr->GetDst()->IsRegOpnd() && instr->GetDst()->GetStackSym()->HasByteCodeRegSlot());
  2370. StackSym * exceptionObjSym = instr->GetDst()->GetStackSym();
  2371. Assert(instr->m_prev->IsLabelInstr() && (instr->m_prev->AsLabelInstr()->GetRegion()->GetType() == RegionTypeCatch));
  2372. instr->m_prev->AsLabelInstr()->GetRegion()->SetExceptionObjectSym(exceptionObjSym);
  2373. }
  2374. break;
  2375. }
  2376. }
  2377. }
  2378. if (instr->m_opcode == Js::OpCode::InlineeEnd)
  2379. {
  2380. this->ProcessInlineeEnd(instr);
  2381. }
  2382. if (instr->IsLabelInstr() && instr->m_next->m_opcode == Js::OpCode::Catch)
  2383. {
  2384. if (!this->currentRegion)
  2385. {
  2386. Assert(!this->func->DoOptimizeTryCatch() && !(this->func->IsSimpleJit() && this->func->hasBailout));
  2387. }
  2388. else
  2389. {
  2390. Assert(this->currentRegion->GetType() == RegionTypeCatch);
  2391. Region * matchingTryRegion = this->currentRegion->GetMatchingTryRegion();
  2392. Assert(matchingTryRegion);
  2393. // We need live-on-back-edge info to accurately set write-through symbols for try-catches in a loop.
  2394. // Don't set write-through symbols in pre-pass
  2395. if (!this->IsPrePass() && !matchingTryRegion->writeThroughSymbolsSet)
  2396. {
  2397. if (this->tag == Js::DeadStorePhase)
  2398. {
  2399. Assert(!this->func->DoGlobOpt());
  2400. }
  2401. // FullJit: Write-through symbols info must be populated in the backward pass as
  2402. // 1. the forward pass needs it to insert ToVars.
  2403. // 2. the deadstore pass needs it to not clear such symbols from the
  2404. // byteCodeUpwardExposedUsed BV upon a def in the try region. This is required
  2405. // because any bailout in the try region needs to restore all write-through
  2406. // symbols.
  2407. // SimpleJit: Won't run the initial backward pass, but write-through symbols info is still
  2408. // needed in the deadstore pass for <2> above.
  2409. this->SetWriteThroughSymbolsSetForRegion(this->currentBlock, matchingTryRegion);
  2410. }
  2411. }
  2412. }
  2413. #if DBG
  2414. if (instr->m_opcode == Js::OpCode::TryCatch)
  2415. {
  2416. if (!this->IsPrePass() && (this->func->DoOptimizeTryCatch() || (this->func->IsSimpleJit() && this->func->hasBailout)))
  2417. {
  2418. Assert(instr->m_next->IsLabelInstr() && (instr->m_next->AsLabelInstr()->GetRegion() != nullptr));
  2419. Region * tryRegion = instr->m_next->AsLabelInstr()->GetRegion();
  2420. Assert(tryRegion->writeThroughSymbolsSet);
  2421. }
  2422. }
  2423. #endif
  2424. ProcessPendingPreOpBailOutInfo(instr);
  2425. #if DBG_DUMP
  2426. if (!IsCollectionPass() && IsTraceEnabled() && Js::Configuration::Global.flags.Verbose)
  2427. {
  2428. Output::Print(L"-------------------\n");
  2429. instr->Dump();
  2430. Output::SkipToColumn(10);
  2431. Output::Print(L" Exposed Use: ");
  2432. block->upwardExposedUses->Dump();
  2433. Output::SkipToColumn(10);
  2434. Output::Print(L"Exposed Fields: ");
  2435. block->upwardExposedFields->Dump();
  2436. if (block->byteCodeUpwardExposedUsed)
  2437. {
  2438. Output::SkipToColumn(10);
  2439. Output::Print(L" Byte Code Use: ");
  2440. block->byteCodeUpwardExposedUsed->Dump();
  2441. }
  2442. Output::Print(L"<<<<<<<<<<<<<<<<<<<<<< %s: Instr End\n", tag == Js::BackwardPhase? L"BACKWARD" : L"DEADSTORE");
  2443. }
  2444. #endif
  2445. }
  2446. NEXT_INSTR_BACKWARD_IN_BLOCK_EDITING;
  2447. EndIntOverflowDoesNotMatterRange();
  2448. if (this->DoFieldHoistCandidates() && !block->isDead && block->isLoopHeader)
  2449. {
  2450. Assert(block->loop->fieldHoistCandidates == nullptr);
  2451. block->loop->fieldHoistCandidates = block->fieldHoistCandidates->CopyNew(this->func->m_alloc);
  2452. }
  2453. if (!this->IsPrePass() && !block->isDead && block->isLoopHeader)
  2454. {
  2455. // Copy the upward exposed use as the live on back edge regs
  2456. block->loop->regAlloc.liveOnBackEdgeSyms = block->upwardExposedUses->CopyNew(this->func->m_alloc);
  2457. }
  2458. Assert(!considerSymAsRealUseInNoImplicitCallUses);
  2459. #if DBG_DUMP
  2460. if (this->IsTraceEnabled())
  2461. {
  2462. Output::Print(L"******************************* After Process Block *******************************n");
  2463. DumpBlockData(block);
  2464. }
  2465. #endif
  2466. }
  2467. #if DBG_DUMP
  2468. void
  2469. BackwardPass::DumpBlockData(BasicBlock * block)
  2470. {
  2471. block->DumpHeader();
  2472. if (block->upwardExposedUses) // may be null for dead blocks
  2473. {
  2474. Output::Print(L" Exposed Uses: ");
  2475. block->upwardExposedUses->Dump();
  2476. }
  2477. if (block->typesNeedingKnownObjectLayout)
  2478. {
  2479. Output::Print(L" Needs Known Object Layout: ");
  2480. block->typesNeedingKnownObjectLayout->Dump();
  2481. }
  2482. if (this->DoFieldHoistCandidates() && !block->isDead)
  2483. {
  2484. Output::Print(L" Exposed Field: ");
  2485. block->fieldHoistCandidates->Dump();
  2486. }
  2487. if (block->byteCodeUpwardExposedUsed)
  2488. {
  2489. Output::Print(L" Byte Code Exposed Uses: ");
  2490. block->byteCodeUpwardExposedUsed->Dump();
  2491. }
  2492. if (!this->IsCollectionPass())
  2493. {
  2494. if (!block->isDead)
  2495. {
  2496. if (this->DoDeadStoreSlots())
  2497. {
  2498. Output::Print(L"Slot deadStore candidates: ");
  2499. block->slotDeadStoreCandidates->Dump();
  2500. }
  2501. DumpMarkTemp();
  2502. }
  2503. }
  2504. Output::Flush();
  2505. }
  2506. #endif
  2507. bool
  2508. BackwardPass::UpdateImplicitCallBailOutKind(IR::Instr *const instr, bool needsBailOutOnImplicitCall)
  2509. {
  2510. Assert(instr);
  2511. Assert(instr->HasBailOutInfo());
  2512. IR::BailOutKind implicitCallBailOutKind = needsBailOutOnImplicitCall ? IR::BailOutOnImplicitCalls : IR::BailOutInvalid;
  2513. const IR::BailOutKind instrBailOutKind = instr->GetBailOutKind();
  2514. if (instrBailOutKind & IR::BailOutMarkTempObject)
  2515. {
  2516. // Don't remove the implicit call pre op bailout for mark temp object
  2517. // Remove the mark temp object bit, as we don't need it after the dead store pass
  2518. instr->SetBailOutKind(instrBailOutKind & ~IR::BailOutMarkTempObject);
  2519. return true;
  2520. }
  2521. const IR::BailOutKind instrImplicitCallBailOutKind = instrBailOutKind & ~IR::BailOutKindBits;
  2522. if(instrImplicitCallBailOutKind == IR::BailOutOnImplicitCallsPreOp)
  2523. {
  2524. if(needsBailOutOnImplicitCall)
  2525. {
  2526. implicitCallBailOutKind = IR::BailOutOnImplicitCallsPreOp;
  2527. }
  2528. }
  2529. else if(instrImplicitCallBailOutKind != IR::BailOutOnImplicitCalls && instrImplicitCallBailOutKind != IR::BailOutInvalid)
  2530. {
  2531. // This bailout kind (the value of 'instrImplicitCallBailOutKind') must guarantee that implicit calls will not happen.
  2532. // If it doesn't make such a guarantee, it must be possible to merge this bailout kind with an implicit call bailout
  2533. // kind, and therefore should be part of BailOutKindBits.
  2534. Assert(!needsBailOutOnImplicitCall);
  2535. return true;
  2536. }
  2537. if(instrImplicitCallBailOutKind == implicitCallBailOutKind)
  2538. {
  2539. return true;
  2540. }
  2541. const IR::BailOutKind newBailOutKind = instrBailOutKind - instrImplicitCallBailOutKind + implicitCallBailOutKind;
  2542. if(newBailOutKind == IR::BailOutInvalid)
  2543. {
  2544. return false;
  2545. }
  2546. instr->SetBailOutKind(newBailOutKind);
  2547. return true;
  2548. }
  2549. bool
  2550. BackwardPass::ProcessNoImplicitCallUses(IR::Instr *const instr)
  2551. {
  2552. Assert(instr);
  2553. if(instr->m_opcode != Js::OpCode::NoImplicitCallUses)
  2554. {
  2555. return false;
  2556. }
  2557. Assert(tag == Js::DeadStorePhase);
  2558. Assert(!instr->GetDst());
  2559. Assert(instr->GetSrc1());
  2560. Assert(instr->GetSrc1()->IsRegOpnd() || instr->GetSrc1()->IsSymOpnd());
  2561. Assert(!instr->GetSrc2() || instr->GetSrc2()->IsRegOpnd() || instr->GetSrc2()->IsSymOpnd());
  2562. if(IsCollectionPass())
  2563. {
  2564. return true;
  2565. }
  2566. IR::Opnd *const srcs[] = { instr->GetSrc1(), instr->GetSrc2() };
  2567. for(int i = 0; i < sizeof(srcs) / sizeof(srcs[0]) && srcs[i]; ++i)
  2568. {
  2569. IR::Opnd *const src = srcs[i];
  2570. IR::ArrayRegOpnd *arraySrc = nullptr;
  2571. Sym *sym;
  2572. switch(src->GetKind())
  2573. {
  2574. case IR::OpndKindReg:
  2575. {
  2576. IR::RegOpnd *const regSrc = src->AsRegOpnd();
  2577. sym = regSrc->m_sym;
  2578. if(considerSymAsRealUseInNoImplicitCallUses && considerSymAsRealUseInNoImplicitCallUses == sym)
  2579. {
  2580. considerSymAsRealUseInNoImplicitCallUses = nullptr;
  2581. ProcessStackSymUse(sym->AsStackSym(), true);
  2582. }
  2583. if(regSrc->IsArrayRegOpnd())
  2584. {
  2585. arraySrc = regSrc->AsArrayRegOpnd();
  2586. }
  2587. break;
  2588. }
  2589. case IR::OpndKindSym:
  2590. sym = src->AsSymOpnd()->m_sym;
  2591. Assert(sym->IsPropertySym());
  2592. break;
  2593. default:
  2594. Assert(false);
  2595. __assume(false);
  2596. }
  2597. currentBlock->noImplicitCallUses->Set(sym->m_id);
  2598. const ValueType valueType(src->GetValueType());
  2599. if(valueType.IsArrayOrObjectWithArray())
  2600. {
  2601. if(valueType.HasNoMissingValues())
  2602. {
  2603. currentBlock->noImplicitCallNoMissingValuesUses->Set(sym->m_id);
  2604. }
  2605. if(!valueType.HasVarElements())
  2606. {
  2607. currentBlock->noImplicitCallNativeArrayUses->Set(sym->m_id);
  2608. }
  2609. if(arraySrc)
  2610. {
  2611. ProcessArrayRegOpndUse(instr, arraySrc);
  2612. }
  2613. }
  2614. }
  2615. if(!IsPrePass())
  2616. {
  2617. currentBlock->RemoveInstr(instr);
  2618. }
  2619. return true;
  2620. }
  2621. void
  2622. BackwardPass::ProcessNoImplicitCallDef(IR::Instr *const instr)
  2623. {
  2624. Assert(tag == Js::DeadStorePhase);
  2625. Assert(instr);
  2626. IR::Opnd *const dst = instr->GetDst();
  2627. if(!dst)
  2628. {
  2629. return;
  2630. }
  2631. Sym *dstSym;
  2632. switch(dst->GetKind())
  2633. {
  2634. case IR::OpndKindReg:
  2635. dstSym = dst->AsRegOpnd()->m_sym;
  2636. break;
  2637. case IR::OpndKindSym:
  2638. dstSym = dst->AsSymOpnd()->m_sym;
  2639. if(!dstSym->IsPropertySym())
  2640. {
  2641. return;
  2642. }
  2643. break;
  2644. default:
  2645. return;
  2646. }
  2647. if(!currentBlock->noImplicitCallUses->TestAndClear(dstSym->m_id))
  2648. {
  2649. Assert(!currentBlock->noImplicitCallNoMissingValuesUses->Test(dstSym->m_id));
  2650. Assert(!currentBlock->noImplicitCallNativeArrayUses->Test(dstSym->m_id));
  2651. Assert(!currentBlock->noImplicitCallJsArrayHeadSegmentSymUses->Test(dstSym->m_id));
  2652. Assert(!currentBlock->noImplicitCallArrayLengthSymUses->Test(dstSym->m_id));
  2653. return;
  2654. }
  2655. const bool transferNoMissingValuesUse = !!currentBlock->noImplicitCallNoMissingValuesUses->TestAndClear(dstSym->m_id);
  2656. const bool transferNativeArrayUse = !!currentBlock->noImplicitCallNativeArrayUses->TestAndClear(dstSym->m_id);
  2657. const bool transferJsArrayHeadSegmentSymUse =
  2658. !!currentBlock->noImplicitCallJsArrayHeadSegmentSymUses->TestAndClear(dstSym->m_id);
  2659. const bool transferArrayLengthSymUse = !!currentBlock->noImplicitCallArrayLengthSymUses->TestAndClear(dstSym->m_id);
  2660. IR::Opnd *const src = instr->GetSrc1();
  2661. if(!src || instr->GetSrc2())
  2662. {
  2663. return;
  2664. }
  2665. if(dst->IsRegOpnd() && src->IsRegOpnd())
  2666. {
  2667. if(!OpCodeAttr::NonIntTransfer(instr->m_opcode))
  2668. {
  2669. return;
  2670. }
  2671. }
  2672. else if(
  2673. !(
  2674. // LdFld or similar
  2675. dst->IsRegOpnd() && src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym() ||
  2676. // StFld or similar. Don't transfer a field opnd from StFld into the reg opnd src unless the field's value type is
  2677. // definitely array or object with array, because only those value types require implicit calls to be disabled as
  2678. // long as they are live. Other definite value types only require implicit calls to be disabled as long as a live
  2679. // field holds the value, which is up to the StFld when going backwards.
  2680. src->IsRegOpnd() && dst->GetValueType().IsArrayOrObjectWithArray()
  2681. ) ||
  2682. !GlobOpt::TransferSrcValue(instr))
  2683. {
  2684. return;
  2685. }
  2686. Sym *srcSym;
  2687. switch(src->GetKind())
  2688. {
  2689. case IR::OpndKindReg:
  2690. srcSym = src->AsRegOpnd()->m_sym;
  2691. break;
  2692. case IR::OpndKindSym:
  2693. srcSym = src->AsSymOpnd()->m_sym;
  2694. Assert(srcSym->IsPropertySym());
  2695. break;
  2696. default:
  2697. Assert(false);
  2698. __assume(false);
  2699. }
  2700. currentBlock->noImplicitCallUses->Set(srcSym->m_id);
  2701. if(transferNoMissingValuesUse)
  2702. {
  2703. currentBlock->noImplicitCallNoMissingValuesUses->Set(srcSym->m_id);
  2704. }
  2705. if(transferNativeArrayUse)
  2706. {
  2707. currentBlock->noImplicitCallNativeArrayUses->Set(srcSym->m_id);
  2708. }
  2709. if(transferJsArrayHeadSegmentSymUse)
  2710. {
  2711. currentBlock->noImplicitCallJsArrayHeadSegmentSymUses->Set(srcSym->m_id);
  2712. }
  2713. if(transferArrayLengthSymUse)
  2714. {
  2715. currentBlock->noImplicitCallArrayLengthSymUses->Set(srcSym->m_id);
  2716. }
  2717. }
  2718. template<class F>
  2719. IR::Opnd *
  2720. BackwardPass::FindNoImplicitCallUse(
  2721. IR::Instr *const instr,
  2722. StackSym *const sym,
  2723. const F IsCheckedUse,
  2724. IR::Instr * *const noImplicitCallUsesInstrRef)
  2725. {
  2726. IR::RegOpnd *const opnd = IR::RegOpnd::New(sym, sym->GetType(), instr->m_func);
  2727. IR::Opnd *const use = FindNoImplicitCallUse(instr, opnd, IsCheckedUse, noImplicitCallUsesInstrRef);
  2728. opnd->FreeInternal(instr->m_func);
  2729. return use;
  2730. }
  2731. template<class F>
  2732. IR::Opnd *
  2733. BackwardPass::FindNoImplicitCallUse(
  2734. IR::Instr *const instr,
  2735. IR::Opnd *const opnd,
  2736. const F IsCheckedUse,
  2737. IR::Instr * *const noImplicitCallUsesInstrRef)
  2738. {
  2739. Assert(instr);
  2740. Assert(instr->m_opcode != Js::OpCode::NoImplicitCallUses);
  2741. // Skip byte-code uses
  2742. IR::Instr *prevInstr = instr->m_prev;
  2743. while(
  2744. prevInstr &&
  2745. !prevInstr->IsLabelInstr() &&
  2746. (!prevInstr->IsRealInstr() || prevInstr->IsByteCodeUsesInstr()) &&
  2747. prevInstr->m_opcode != Js::OpCode::NoImplicitCallUses)
  2748. {
  2749. prevInstr = prevInstr->m_prev;
  2750. }
  2751. // Find the corresponding use in a NoImplicitCallUses instruction
  2752. for(; prevInstr && prevInstr->m_opcode == Js::OpCode::NoImplicitCallUses; prevInstr = prevInstr->m_prev)
  2753. {
  2754. IR::Opnd *const checkedSrcs[] = { prevInstr->GetSrc1(), prevInstr->GetSrc2() };
  2755. for(int i = 0; i < sizeof(checkedSrcs) / sizeof(checkedSrcs[0]) && checkedSrcs[i]; ++i)
  2756. {
  2757. IR::Opnd *const checkedSrc = checkedSrcs[i];
  2758. if(checkedSrc->IsEqual(opnd) && IsCheckedUse(checkedSrc))
  2759. {
  2760. if(noImplicitCallUsesInstrRef)
  2761. {
  2762. *noImplicitCallUsesInstrRef = prevInstr;
  2763. }
  2764. return checkedSrc;
  2765. }
  2766. }
  2767. }
  2768. if(noImplicitCallUsesInstrRef)
  2769. {
  2770. *noImplicitCallUsesInstrRef = nullptr;
  2771. }
  2772. return nullptr;
  2773. }
  2774. void
  2775. BackwardPass::ProcessArrayRegOpndUse(IR::Instr *const instr, IR::ArrayRegOpnd *const arrayRegOpnd)
  2776. {
  2777. Assert(tag == Js::DeadStorePhase);
  2778. Assert(!IsCollectionPass());
  2779. Assert(instr);
  2780. Assert(arrayRegOpnd);
  2781. if(!(arrayRegOpnd->HeadSegmentSym() || arrayRegOpnd->HeadSegmentLengthSym() || arrayRegOpnd->LengthSym()))
  2782. {
  2783. return;
  2784. }
  2785. const ValueType arrayValueType(arrayRegOpnd->GetValueType());
  2786. const bool isJsArray = !arrayValueType.IsLikelyTypedArray();
  2787. Assert(isJsArray == arrayValueType.IsArrayOrObjectWithArray());
  2788. Assert(!isJsArray == arrayValueType.IsOptimizedTypedArray());
  2789. BasicBlock *const block = currentBlock;
  2790. if(!IsPrePass() &&
  2791. (arrayRegOpnd->HeadSegmentSym() || arrayRegOpnd->HeadSegmentLengthSym()) &&
  2792. (!isJsArray || instr->m_opcode != Js::OpCode::NoImplicitCallUses))
  2793. {
  2794. bool headSegmentIsLoadedButUnused =
  2795. instr->loadedArrayHeadSegment &&
  2796. arrayRegOpnd->HeadSegmentSym() &&
  2797. !block->upwardExposedUses->Test(arrayRegOpnd->HeadSegmentSym()->m_id);
  2798. const bool headSegmentLengthIsLoadedButUnused =
  2799. instr->loadedArrayHeadSegmentLength &&
  2800. arrayRegOpnd->HeadSegmentLengthSym() &&
  2801. !block->upwardExposedUses->Test(arrayRegOpnd->HeadSegmentLengthSym()->m_id);
  2802. if(headSegmentLengthIsLoadedButUnused && instr->extractedUpperBoundCheckWithoutHoisting)
  2803. {
  2804. // Find the upper bound check (index[src1] <= headSegmentLength[src2] + offset[dst])
  2805. IR::Instr *upperBoundCheck = this->globOpt->FindUpperBoundsCheckInstr(instr);
  2806. Assert(upperBoundCheck && upperBoundCheck != instr);
  2807. Assert(upperBoundCheck->GetSrc2()->AsRegOpnd()->m_sym == arrayRegOpnd->HeadSegmentLengthSym());
  2808. // Find the head segment length load
  2809. IR::Instr *headSegmentLengthLoad = this->globOpt->FindArraySegmentLoadInstr(upperBoundCheck);
  2810. Assert(headSegmentLengthLoad->GetDst()->AsRegOpnd()->m_sym == arrayRegOpnd->HeadSegmentLengthSym());
  2811. Assert(
  2812. headSegmentLengthLoad->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->m_sym ==
  2813. (isJsArray ? arrayRegOpnd->HeadSegmentSym() : arrayRegOpnd->m_sym));
  2814. // Fold the head segment length load into the upper bound check. Keep the load instruction there with a Nop so that
  2815. // the head segment length sym can be marked as unused before the Nop. The lowerer will remove it.
  2816. upperBoundCheck->ReplaceSrc2(headSegmentLengthLoad->UnlinkSrc1());
  2817. headSegmentLengthLoad->m_opcode = Js::OpCode::Nop;
  2818. if(isJsArray)
  2819. {
  2820. // The head segment length is on the head segment, so the bound check now uses the head segment sym
  2821. headSegmentIsLoadedButUnused = false;
  2822. }
  2823. }
  2824. if(headSegmentIsLoadedButUnused || headSegmentLengthIsLoadedButUnused)
  2825. {
  2826. // Check if the head segment / head segment length are being loaded here. If so, remove them and let the fast
  2827. // path load them since it does a better job.
  2828. IR::ArrayRegOpnd *noImplicitCallArrayUse = nullptr;
  2829. if(isJsArray)
  2830. {
  2831. IR::Opnd *const use =
  2832. FindNoImplicitCallUse(
  2833. instr,
  2834. arrayRegOpnd,
  2835. [&](IR::Opnd *const checkedSrc) -> bool
  2836. {
  2837. const ValueType checkedSrcValueType(checkedSrc->GetValueType());
  2838. if(!checkedSrcValueType.IsLikelyObject() ||
  2839. checkedSrcValueType.GetObjectType() != arrayValueType.GetObjectType())
  2840. {
  2841. return false;
  2842. }
  2843. IR::RegOpnd *const checkedRegSrc = checkedSrc->AsRegOpnd();
  2844. if(!checkedRegSrc->IsArrayRegOpnd())
  2845. {
  2846. return false;
  2847. }
  2848. IR::ArrayRegOpnd *const checkedArraySrc = checkedRegSrc->AsArrayRegOpnd();
  2849. if(headSegmentIsLoadedButUnused &&
  2850. checkedArraySrc->HeadSegmentSym() != arrayRegOpnd->HeadSegmentSym())
  2851. {
  2852. return false;
  2853. }
  2854. if(headSegmentLengthIsLoadedButUnused &&
  2855. checkedArraySrc->HeadSegmentLengthSym() != arrayRegOpnd->HeadSegmentLengthSym())
  2856. {
  2857. return false;
  2858. }
  2859. return true;
  2860. });
  2861. if(use)
  2862. {
  2863. noImplicitCallArrayUse = use->AsRegOpnd()->AsArrayRegOpnd();
  2864. }
  2865. }
  2866. else if(headSegmentLengthIsLoadedButUnused)
  2867. {
  2868. // A typed array's head segment length may be zeroed when the typed array's buffer is transferred to a web
  2869. // worker, so the head segment length sym use is included in a NoImplicitCallUses instruction. Since there
  2870. // are no forward uses of the head segment length sym, to allow removing the extracted head segment length
  2871. // load, the corresponding head segment length sym use in the NoImplicitCallUses instruction must also be
  2872. // removed.
  2873. IR::Instr *noImplicitCallUsesInstr;
  2874. IR::Opnd *const use =
  2875. FindNoImplicitCallUse(
  2876. instr,
  2877. arrayRegOpnd->HeadSegmentLengthSym(),
  2878. [&](IR::Opnd *const checkedSrc) -> bool
  2879. {
  2880. return checkedSrc->AsRegOpnd()->m_sym == arrayRegOpnd->HeadSegmentLengthSym();
  2881. },
  2882. &noImplicitCallUsesInstr);
  2883. if(use)
  2884. {
  2885. Assert(noImplicitCallUsesInstr);
  2886. Assert(!noImplicitCallUsesInstr->GetDst());
  2887. Assert(noImplicitCallUsesInstr->GetSrc1());
  2888. if(use == noImplicitCallUsesInstr->GetSrc1())
  2889. {
  2890. if(noImplicitCallUsesInstr->GetSrc2())
  2891. {
  2892. noImplicitCallUsesInstr->ReplaceSrc1(noImplicitCallUsesInstr->UnlinkSrc2());
  2893. }
  2894. else
  2895. {
  2896. noImplicitCallUsesInstr->FreeSrc1();
  2897. noImplicitCallUsesInstr->m_opcode = Js::OpCode::Nop;
  2898. }
  2899. }
  2900. else
  2901. {
  2902. Assert(use == noImplicitCallUsesInstr->GetSrc2());
  2903. noImplicitCallUsesInstr->FreeSrc2();
  2904. }
  2905. }
  2906. }
  2907. if(headSegmentIsLoadedButUnused &&
  2908. (!isJsArray || !arrayRegOpnd->HeadSegmentLengthSym() || headSegmentLengthIsLoadedButUnused))
  2909. {
  2910. // For JS arrays, the head segment length load is dependent on the head segment. So, only remove the head
  2911. // segment load if the head segment length load can also be removed.
  2912. arrayRegOpnd->RemoveHeadSegmentSym();
  2913. instr->loadedArrayHeadSegment = false;
  2914. if(noImplicitCallArrayUse)
  2915. {
  2916. noImplicitCallArrayUse->RemoveHeadSegmentSym();
  2917. }
  2918. }
  2919. if(headSegmentLengthIsLoadedButUnused)
  2920. {
  2921. arrayRegOpnd->RemoveHeadSegmentLengthSym();
  2922. instr->loadedArrayHeadSegmentLength = false;
  2923. if(noImplicitCallArrayUse)
  2924. {
  2925. noImplicitCallArrayUse->RemoveHeadSegmentLengthSym();
  2926. }
  2927. }
  2928. }
  2929. }
  2930. if(isJsArray && instr->m_opcode != Js::OpCode::NoImplicitCallUses)
  2931. {
  2932. // Only uses in NoImplicitCallUses instructions are counted toward liveness
  2933. return;
  2934. }
  2935. // Treat dependent syms as uses. For JS arrays, only uses in NoImplicitCallUses count because only then the assumptions made
  2936. // on the dependent syms are guaranteed to be valid. Similarly for typed arrays, a head segment length sym use counts toward
  2937. // liveness only in a NoImplicitCallUses instruction.
  2938. if(arrayRegOpnd->HeadSegmentSym())
  2939. {
  2940. ProcessStackSymUse(arrayRegOpnd->HeadSegmentSym(), true);
  2941. if(isJsArray)
  2942. {
  2943. block->noImplicitCallUses->Set(arrayRegOpnd->HeadSegmentSym()->m_id);
  2944. block->noImplicitCallJsArrayHeadSegmentSymUses->Set(arrayRegOpnd->HeadSegmentSym()->m_id);
  2945. }
  2946. }
  2947. if(arrayRegOpnd->HeadSegmentLengthSym())
  2948. {
  2949. if(isJsArray)
  2950. {
  2951. ProcessStackSymUse(arrayRegOpnd->HeadSegmentLengthSym(), true);
  2952. block->noImplicitCallUses->Set(arrayRegOpnd->HeadSegmentLengthSym()->m_id);
  2953. block->noImplicitCallJsArrayHeadSegmentSymUses->Set(arrayRegOpnd->HeadSegmentLengthSym()->m_id);
  2954. }
  2955. else
  2956. {
  2957. // ProcessNoImplicitCallUses automatically marks JS array reg opnds and their corresponding syms as live. A typed
  2958. // array's head segment length sym also needs to be marked as live at its use in the NoImplicitCallUses instruction,
  2959. // but it is just in a reg opnd. Flag the opnd to have the sym be marked as live when that instruction is processed.
  2960. Assert(!considerSymAsRealUseInNoImplicitCallUses);
  2961. IR::Opnd *const use =
  2962. FindNoImplicitCallUse(
  2963. instr,
  2964. arrayRegOpnd->HeadSegmentLengthSym(),
  2965. [&](IR::Opnd *const checkedSrc) -> bool
  2966. {
  2967. return checkedSrc->AsRegOpnd()->m_sym == arrayRegOpnd->HeadSegmentLengthSym();
  2968. });
  2969. if(use)
  2970. {
  2971. considerSymAsRealUseInNoImplicitCallUses = arrayRegOpnd->HeadSegmentLengthSym();
  2972. }
  2973. }
  2974. }
  2975. StackSym *const lengthSym = arrayRegOpnd->LengthSym();
  2976. if(lengthSym && lengthSym != arrayRegOpnd->HeadSegmentLengthSym())
  2977. {
  2978. ProcessStackSymUse(lengthSym, true);
  2979. Assert(arrayValueType.IsArray());
  2980. block->noImplicitCallUses->Set(lengthSym->m_id);
  2981. block->noImplicitCallArrayLengthSymUses->Set(lengthSym->m_id);
  2982. }
  2983. }
  2984. void
  2985. BackwardPass::ProcessNewScObject(IR::Instr* instr)
  2986. {
  2987. if (this->tag != Js::DeadStorePhase || IsCollectionPass())
  2988. {
  2989. return;
  2990. }
  2991. if (!instr->IsNewScObjectInstr())
  2992. {
  2993. return;
  2994. }
  2995. if (instr->HasBailOutInfo())
  2996. {
  2997. Assert(instr->IsProfiledInstr());
  2998. Assert(instr->GetBailOutKind() == IR::BailOutFailedCtorGuardCheck);
  2999. Assert(instr->GetDst()->IsRegOpnd());
  3000. BasicBlock * block = this->currentBlock;
  3001. StackSym* objSym = instr->GetDst()->AsRegOpnd()->GetStackSym();
  3002. if (block->upwardExposedUses->Test(objSym->m_id))
  3003. {
  3004. // If the object created here is used downstream, let's capture any property operations we must protect.
  3005. Assert(instr->GetDst()->AsRegOpnd()->GetStackSym()->HasObjectTypeSym());
  3006. Js::JitTimeConstructorCache* ctorCache = instr->m_func->GetConstructorCache(static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId));
  3007. if (block->stackSymToFinalType != nullptr)
  3008. {
  3009. // NewScObject is the origin of the object pointer. If we have a final type in hand, do the
  3010. // transition here.
  3011. AddPropertyCacheBucket *pBucket = block->stackSymToFinalType->Get(objSym->m_id);
  3012. if (pBucket &&
  3013. pBucket->GetInitialType() != nullptr &&
  3014. pBucket->GetFinalType() != pBucket->GetInitialType())
  3015. {
  3016. Assert(pBucket->GetInitialType() == ctorCache->type);
  3017. if (!this->IsPrePass())
  3018. {
  3019. this->InsertTypeTransition(instr->m_next, objSym, pBucket);
  3020. }
  3021. #if DBG
  3022. pBucket->deadStoreUnavailableInitialType = pBucket->GetInitialType();
  3023. if (pBucket->deadStoreUnavailableFinalType == nullptr)
  3024. {
  3025. pBucket->deadStoreUnavailableFinalType = pBucket->GetFinalType();
  3026. }
  3027. pBucket->SetInitialType(nullptr);
  3028. pBucket->SetFinalType(nullptr);
  3029. #else
  3030. block->stackSymToFinalType->Clear(objSym->m_id);
  3031. #endif
  3032. }
  3033. }
  3034. if (block->stackSymToGuardedProperties != nullptr)
  3035. {
  3036. ObjTypeGuardBucket* bucket = block->stackSymToGuardedProperties->Get(objSym->m_id);
  3037. if (bucket != nullptr)
  3038. {
  3039. BVSparse<JitArenaAllocator>* guardedPropertyOps = bucket->GetGuardedPropertyOps();
  3040. if (guardedPropertyOps != nullptr)
  3041. {
  3042. ctorCache->EnsureGuardedPropOps(this->func->m_alloc);
  3043. ctorCache->AddGuardedPropOps(guardedPropertyOps);
  3044. bucket->SetGuardedPropertyOps(nullptr);
  3045. JitAdelete(this->tempAlloc, guardedPropertyOps);
  3046. block->stackSymToGuardedProperties->Clear(objSym->m_id);
  3047. }
  3048. }
  3049. }
  3050. }
  3051. else
  3052. {
  3053. // If the object is not used downstream, let's remove the bailout and let the lowerer emit a fast path along with
  3054. // the fallback on helper, if the ctor cache ever became invalid.
  3055. instr->ClearBailOutInfo();
  3056. if (preOpBailOutInstrToProcess == instr)
  3057. {
  3058. preOpBailOutInstrToProcess = nullptr;
  3059. }
  3060. #if DBG
  3061. // We're creating a brand new object here, so no type check upstream could protect any properties of this
  3062. // object. Let's make sure we don't have any left to protect.
  3063. ObjTypeGuardBucket* bucket = block->stackSymToGuardedProperties != nullptr ?
  3064. block->stackSymToGuardedProperties->Get(objSym->m_id) : nullptr;
  3065. Assert(bucket == nullptr || bucket->GetGuardedPropertyOps()->IsEmpty());
  3066. #endif
  3067. }
  3068. }
  3069. }
  3070. void
  3071. BackwardPass::UpdateArrayValueTypes(IR::Instr *const instr, IR::Opnd *origOpnd)
  3072. {
  3073. Assert(tag == Js::DeadStorePhase);
  3074. Assert(!IsPrePass());
  3075. Assert(instr);
  3076. if(!origOpnd)
  3077. {
  3078. return;
  3079. }
  3080. IR::Instr *opndOwnerInstr = instr;
  3081. switch(instr->m_opcode)
  3082. {
  3083. case Js::OpCode::StElemC:
  3084. case Js::OpCode::StArrSegElemC:
  3085. // These may not be fixed if we are unsure about the type of the array they're storing to
  3086. // (because it relies on profile data) and we weren't able to hoist the array check.
  3087. return;
  3088. }
  3089. Sym *sym;
  3090. IR::Opnd* opnd = origOpnd;
  3091. IR::ArrayRegOpnd *arrayOpnd;
  3092. switch(opnd->GetKind())
  3093. {
  3094. case IR::OpndKindIndir:
  3095. opnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  3096. // fall-through
  3097. case IR::OpndKindReg:
  3098. {
  3099. IR::RegOpnd *const regOpnd = opnd->AsRegOpnd();
  3100. sym = regOpnd->m_sym;
  3101. arrayOpnd = regOpnd->IsArrayRegOpnd() ? regOpnd->AsArrayRegOpnd() : nullptr;
  3102. break;
  3103. }
  3104. case IR::OpndKindSym:
  3105. sym = opnd->AsSymOpnd()->m_sym;
  3106. if(!sym->IsPropertySym())
  3107. {
  3108. return;
  3109. }
  3110. arrayOpnd = nullptr;
  3111. break;
  3112. default:
  3113. return;
  3114. }
  3115. const ValueType valueType(opnd->GetValueType());
  3116. if(!valueType.IsAnyOptimizedArray())
  3117. {
  3118. return;
  3119. }
  3120. const bool isJsArray = valueType.IsArrayOrObjectWithArray();
  3121. Assert(!isJsArray == valueType.IsOptimizedTypedArray());
  3122. const bool noForwardImplicitCallUses = currentBlock->noImplicitCallUses->IsEmpty();
  3123. bool changeArray = isJsArray && !opnd->IsValueTypeFixed() && noForwardImplicitCallUses;
  3124. bool changeNativeArray =
  3125. isJsArray &&
  3126. !opnd->IsValueTypeFixed() &&
  3127. !valueType.HasVarElements() &&
  3128. currentBlock->noImplicitCallNativeArrayUses->IsEmpty();
  3129. bool changeNoMissingValues =
  3130. isJsArray &&
  3131. !opnd->IsValueTypeFixed() &&
  3132. valueType.HasNoMissingValues() &&
  3133. currentBlock->noImplicitCallNoMissingValuesUses->IsEmpty();
  3134. const bool noForwardJsArrayHeadSegmentSymUses = currentBlock->noImplicitCallJsArrayHeadSegmentSymUses->IsEmpty();
  3135. bool removeHeadSegmentSym = isJsArray && arrayOpnd && arrayOpnd->HeadSegmentSym() && noForwardJsArrayHeadSegmentSymUses;
  3136. bool removeHeadSegmentLengthSym =
  3137. arrayOpnd &&
  3138. arrayOpnd->HeadSegmentLengthSym() &&
  3139. (isJsArray ? noForwardJsArrayHeadSegmentSymUses : noForwardImplicitCallUses);
  3140. Assert(!isJsArray || !arrayOpnd || !arrayOpnd->LengthSym() || valueType.IsArray());
  3141. bool removeLengthSym =
  3142. isJsArray &&
  3143. arrayOpnd &&
  3144. arrayOpnd->LengthSym() &&
  3145. currentBlock->noImplicitCallArrayLengthSymUses->IsEmpty();
  3146. if(!(changeArray || changeNoMissingValues || changeNativeArray || removeHeadSegmentSym || removeHeadSegmentLengthSym))
  3147. {
  3148. return;
  3149. }
  3150. // We have a definitely-array value type for the base, but either implicit calls are not currently being disabled for
  3151. // legally using the value type as a definite array, or we are not currently bailing out upon creating a missing value
  3152. // for legally using the value type as a definite array with no missing values.
  3153. // For source opnds, ensure that a NoImplicitCallUses immediately precedes this instruction. Otherwise, convert the value
  3154. // type to an appropriate version so that the lowerer doesn't incorrectly treat it as it says.
  3155. if(opnd != opndOwnerInstr->GetDst())
  3156. {
  3157. if(isJsArray)
  3158. {
  3159. IR::Opnd *const checkedSrc =
  3160. FindNoImplicitCallUse(
  3161. instr,
  3162. opnd,
  3163. [&](IR::Opnd *const checkedSrc) -> bool
  3164. {
  3165. const ValueType checkedSrcValueType(checkedSrc->GetValueType());
  3166. return
  3167. checkedSrcValueType.IsLikelyObject() &&
  3168. checkedSrcValueType.GetObjectType() == valueType.GetObjectType();
  3169. });
  3170. if(checkedSrc)
  3171. {
  3172. // Implicit calls will be disabled to the point immediately before this instruction
  3173. changeArray = false;
  3174. const ValueType checkedSrcValueType(checkedSrc->GetValueType());
  3175. if(changeNativeArray &&
  3176. !checkedSrcValueType.HasVarElements() &&
  3177. checkedSrcValueType.HasIntElements() == valueType.HasIntElements())
  3178. {
  3179. // If necessary, instructions before this will bail out on converting a native array
  3180. changeNativeArray = false;
  3181. }
  3182. if(changeNoMissingValues && checkedSrcValueType.HasNoMissingValues())
  3183. {
  3184. // If necessary, instructions before this will bail out on creating a missing value
  3185. changeNoMissingValues = false;
  3186. }
  3187. if((removeHeadSegmentSym || removeHeadSegmentLengthSym || removeLengthSym) && checkedSrc->IsRegOpnd())
  3188. {
  3189. IR::RegOpnd *const checkedRegSrc = checkedSrc->AsRegOpnd();
  3190. if(checkedRegSrc->IsArrayRegOpnd())
  3191. {
  3192. IR::ArrayRegOpnd *const checkedArraySrc = checkedSrc->AsRegOpnd()->AsArrayRegOpnd();
  3193. if(removeHeadSegmentSym && checkedArraySrc->HeadSegmentSym() == arrayOpnd->HeadSegmentSym())
  3194. {
  3195. // If necessary, instructions before this will bail out upon invalidating head segment sym
  3196. removeHeadSegmentSym = false;
  3197. }
  3198. if(removeHeadSegmentLengthSym &&
  3199. checkedArraySrc->HeadSegmentLengthSym() == arrayOpnd->HeadSegmentLengthSym())
  3200. {
  3201. // If necessary, instructions before this will bail out upon invalidating head segment length sym
  3202. removeHeadSegmentLengthSym = false;
  3203. }
  3204. if(removeLengthSym && checkedArraySrc->LengthSym() == arrayOpnd->LengthSym())
  3205. {
  3206. // If necessary, instructions before this will bail out upon invalidating a length sym
  3207. removeLengthSym = false;
  3208. }
  3209. }
  3210. }
  3211. }
  3212. }
  3213. else
  3214. {
  3215. Assert(removeHeadSegmentLengthSym);
  3216. // A typed array's head segment length may be zeroed when the typed array's buffer is transferred to a web worker,
  3217. // so the head segment length sym use is included in a NoImplicitCallUses instruction. Since there are no forward
  3218. // uses of any head segment length syms, to allow removing the extracted head segment length
  3219. // load, the corresponding head segment length sym use in the NoImplicitCallUses instruction must also be
  3220. // removed.
  3221. IR::Opnd *const use =
  3222. FindNoImplicitCallUse(
  3223. instr,
  3224. arrayOpnd->HeadSegmentLengthSym(),
  3225. [&](IR::Opnd *const checkedSrc) -> bool
  3226. {
  3227. return checkedSrc->AsRegOpnd()->m_sym == arrayOpnd->HeadSegmentLengthSym();
  3228. });
  3229. if(use)
  3230. {
  3231. // Implicit calls will be disabled to the point immediately before this instruction
  3232. removeHeadSegmentLengthSym = false;
  3233. }
  3234. }
  3235. }
  3236. if(changeArray || changeNativeArray)
  3237. {
  3238. if(arrayOpnd)
  3239. {
  3240. opnd = arrayOpnd->CopyAsRegOpnd(opndOwnerInstr->m_func);
  3241. if (origOpnd->IsIndirOpnd())
  3242. {
  3243. origOpnd->AsIndirOpnd()->ReplaceBaseOpnd(opnd->AsRegOpnd());
  3244. }
  3245. else
  3246. {
  3247. opndOwnerInstr->Replace(arrayOpnd, opnd);
  3248. }
  3249. arrayOpnd = nullptr;
  3250. }
  3251. opnd->SetValueType(valueType.ToLikely());
  3252. }
  3253. else
  3254. {
  3255. if(changeNoMissingValues)
  3256. {
  3257. opnd->SetValueType(valueType.SetHasNoMissingValues(false));
  3258. }
  3259. if(removeHeadSegmentSym)
  3260. {
  3261. Assert(arrayOpnd);
  3262. arrayOpnd->RemoveHeadSegmentSym();
  3263. }
  3264. if(removeHeadSegmentLengthSym)
  3265. {
  3266. Assert(arrayOpnd);
  3267. arrayOpnd->RemoveHeadSegmentLengthSym();
  3268. }
  3269. if(removeLengthSym)
  3270. {
  3271. Assert(arrayOpnd);
  3272. arrayOpnd->RemoveLengthSym();
  3273. }
  3274. }
  3275. }
  3276. void
  3277. BackwardPass::UpdateArrayBailOutKind(IR::Instr *const instr)
  3278. {
  3279. Assert(!IsPrePass());
  3280. Assert(instr);
  3281. Assert(instr->HasBailOutInfo());
  3282. if (instr->m_opcode != Js::OpCode::StElemI_A && instr->m_opcode != Js::OpCode::StElemI_A_Strict &&
  3283. instr->m_opcode != Js::OpCode::Memcopy && instr->m_opcode != Js::OpCode::Memset ||
  3284. !instr->GetDst()->IsIndirOpnd())
  3285. {
  3286. return;
  3287. }
  3288. IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  3289. const ValueType baseValueType(baseOpnd->GetValueType());
  3290. if(baseValueType.IsNotArrayOrObjectWithArray())
  3291. {
  3292. return;
  3293. }
  3294. IR::BailOutKind includeBailOutKinds = IR::BailOutInvalid;
  3295. if(!baseValueType.IsNotNativeArray() &&
  3296. (!baseValueType.IsLikelyNativeArray() || instr->GetSrc1()->IsVar()) &&
  3297. !currentBlock->noImplicitCallNativeArrayUses->IsEmpty())
  3298. {
  3299. // There is an upwards-exposed use of a native array. Since the array referenced by this instruction can be aliased,
  3300. // this instruction needs to bail out if it converts the native array even if this array specifically is not
  3301. // upwards-exposed.
  3302. includeBailOutKinds |= IR::BailOutConvertedNativeArray;
  3303. }
  3304. if(baseOpnd->IsArrayRegOpnd() && baseOpnd->AsArrayRegOpnd()->EliminatedUpperBoundCheck())
  3305. {
  3306. if(instr->extractedUpperBoundCheckWithoutHoisting && !currentBlock->noImplicitCallJsArrayHeadSegmentSymUses->IsEmpty())
  3307. {
  3308. // See comment below regarding head segment invalidation. A failed upper bound check usually means that it will
  3309. // invalidate the head segment length, so change the bailout kind on the upper bound check to have it bail out for
  3310. // the right reason. Even though the store may actually occur in a non-head segment, which would not invalidate the
  3311. // head segment or length, any store outside the head segment bounds causes head segment load elimination to be
  3312. // turned off for the store, because the segment structure of the array is not guaranteed to be the same every time.
  3313. IR::Instr *upperBoundCheck = this->globOpt->FindUpperBoundsCheckInstr(instr);
  3314. Assert(upperBoundCheck && upperBoundCheck != instr);
  3315. if(upperBoundCheck->GetBailOutKind() == IR::BailOutOnArrayAccessHelperCall)
  3316. {
  3317. upperBoundCheck->SetBailOutKind(IR::BailOutOnInvalidatedArrayHeadSegment);
  3318. }
  3319. else
  3320. {
  3321. Assert(upperBoundCheck->GetBailOutKind() == IR::BailOutOnFailedHoistedBoundCheck);
  3322. }
  3323. }
  3324. }
  3325. else
  3326. {
  3327. if(!currentBlock->noImplicitCallJsArrayHeadSegmentSymUses->IsEmpty())
  3328. {
  3329. // There is an upwards-exposed use of a segment sym. Since the head segment syms referenced by this instruction can
  3330. // be aliased, this instruction needs to bail out if it changes the segment syms it references even if the ones it
  3331. // references specifically are not upwards-exposed. This bailout kind also guarantees that this element store will
  3332. // not create missing values.
  3333. includeBailOutKinds |= IR::BailOutOnInvalidatedArrayHeadSegment;
  3334. }
  3335. else if(
  3336. !currentBlock->noImplicitCallNoMissingValuesUses->IsEmpty() &&
  3337. !(instr->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall))
  3338. {
  3339. // There is an upwards-exposed use of an array with no missing values. Since the array referenced by this
  3340. // instruction can be aliased, this instruction needs to bail out if it creates a missing value in the array even if
  3341. // this array specifically is not upwards-exposed.
  3342. includeBailOutKinds |= IR::BailOutOnMissingValue;
  3343. }
  3344. if(!baseValueType.IsNotArray() && !currentBlock->noImplicitCallArrayLengthSymUses->IsEmpty())
  3345. {
  3346. // There is an upwards-exposed use of a length sym. Since the length sym referenced by this instruction can be
  3347. // aliased, this instruction needs to bail out if it changes the length sym it references even if the ones it
  3348. // references specifically are not upwards-exposed.
  3349. includeBailOutKinds |= IR::BailOutOnInvalidatedArrayLength;
  3350. }
  3351. }
  3352. if(!includeBailOutKinds)
  3353. {
  3354. return;
  3355. }
  3356. Assert(!(includeBailOutKinds & ~IR::BailOutKindBits));
  3357. instr->SetBailOutKind(instr->GetBailOutKind() | includeBailOutKinds);
  3358. }
  3359. bool
  3360. BackwardPass::ProcessStackSymUse(StackSym * stackSym, BOOLEAN isNonByteCodeUse)
  3361. {
  3362. BasicBlock * block = this->currentBlock;
  3363. if (this->DoByteCodeUpwardExposedUsed())
  3364. {
  3365. if (!isNonByteCodeUse && stackSym->HasByteCodeRegSlot())
  3366. {
  3367. // Always track the sym use on the var sym.
  3368. StackSym * byteCodeUseSym = stackSym;
  3369. if (byteCodeUseSym->IsTypeSpec())
  3370. {
  3371. // It has to have a var version for byte code regs
  3372. byteCodeUseSym = byteCodeUseSym->GetVarEquivSym(nullptr);
  3373. }
  3374. block->byteCodeUpwardExposedUsed->Set(byteCodeUseSym->m_id);
  3375. #if DBG
  3376. // We can only track first level function stack syms right now
  3377. if (byteCodeUseSym->GetByteCodeFunc() == this->func)
  3378. {
  3379. Js::RegSlot byteCodeRegSlot = byteCodeUseSym->GetByteCodeRegSlot();
  3380. if (block->byteCodeRestoreSyms[byteCodeRegSlot] != byteCodeUseSym)
  3381. {
  3382. AssertMsg(block->byteCodeRestoreSyms[byteCodeRegSlot] == nullptr,
  3383. "Can't have two active lifetime for the same byte code register");
  3384. block->byteCodeRestoreSyms[byteCodeRegSlot] = byteCodeUseSym;
  3385. }
  3386. }
  3387. #endif
  3388. }
  3389. }
  3390. if(IsCollectionPass())
  3391. {
  3392. return true;
  3393. }
  3394. if (this->DoMarkTempObjects())
  3395. {
  3396. Assert((block->loop != nullptr) == block->tempObjectTracker->HasTempTransferDependencies());
  3397. block->tempObjectTracker->ProcessUse(stackSym, this);
  3398. }
  3399. #if DBG
  3400. if (this->DoMarkTempObjectVerify())
  3401. {
  3402. Assert((block->loop != nullptr) == block->tempObjectVerifyTracker->HasTempTransferDependencies());
  3403. block->tempObjectVerifyTracker->ProcessUse(stackSym, this);
  3404. }
  3405. #endif
  3406. return !!block->upwardExposedUses->TestAndSet(stackSym->m_id);
  3407. }
  3408. bool
  3409. BackwardPass::ProcessSymUse(Sym * sym, bool isRegOpndUse, BOOLEAN isNonByteCodeUse)
  3410. {
  3411. BasicBlock * block = this->currentBlock;
  3412. if (sym->IsPropertySym())
  3413. {
  3414. PropertySym * propertySym = sym->AsPropertySym();
  3415. ProcessStackSymUse(propertySym->m_stackSym, isNonByteCodeUse);
  3416. if(IsCollectionPass())
  3417. {
  3418. return true;
  3419. }
  3420. Assert((block->fieldHoistCandidates != nullptr) == this->DoFieldHoistCandidates());
  3421. if (block->fieldHoistCandidates && GlobOpt::TransferSrcValue(this->currentInstr))
  3422. {
  3423. // If the instruction doesn't transfer the src value to dst, it will not be copyprop'd
  3424. // So we can't hoist those.
  3425. block->fieldHoistCandidates->Set(propertySym->m_id);
  3426. }
  3427. if (this->DoDeadStoreSlots())
  3428. {
  3429. block->slotDeadStoreCandidates->Clear(propertySym->m_id);
  3430. }
  3431. if (tag == Js::BackwardPhase)
  3432. {
  3433. // Backward phase tracks liveness of fields to tell GlobOpt where we may need bailout.
  3434. return this->ProcessPropertySymUse(propertySym);
  3435. }
  3436. else
  3437. {
  3438. // Dead-store phase tracks copy propped syms, so it only cares about ByteCodeUses we inserted,
  3439. // not live fields.
  3440. return false;
  3441. }
  3442. }
  3443. StackSym * stackSym = sym->AsStackSym();
  3444. bool isUsed = ProcessStackSymUse(stackSym, isNonByteCodeUse);
  3445. if (!IsCollectionPass() && isRegOpndUse && this->DoMarkTempNumbers())
  3446. {
  3447. // Collect mark temp number information
  3448. Assert((block->loop != nullptr) == block->tempNumberTracker->HasTempTransferDependencies());
  3449. block->tempNumberTracker->ProcessUse(stackSym, this);
  3450. }
  3451. return isUsed;
  3452. }
  3453. bool
  3454. BackwardPass::MayPropertyBeWrittenTo(Js::PropertyId propertyId)
  3455. {
  3456. return this->func->anyPropertyMayBeWrittenTo ||
  3457. (this->func->propertiesWrittenTo != nullptr && this->func->propertiesWrittenTo->ContainsKey(propertyId));
  3458. }
  3459. void
  3460. BackwardPass::ProcessPropertySymOpndUse(IR::PropertySymOpnd * opnd)
  3461. {
  3462. // If this operand doesn't participate in the type check sequence it's a pass-through.
  3463. // We will not set any bits on the operand and we will ignore them when lowering.
  3464. if (!opnd->IsTypeCheckSeqCandidate())
  3465. {
  3466. return;
  3467. }
  3468. AssertMsg(opnd->HasObjectTypeSym(), "Optimized property sym operand without a type sym?");
  3469. SymID typeSymId = opnd->GetObjectTypeSym()->m_id;
  3470. BasicBlock * block = this->currentBlock;
  3471. if (this->tag == Js::BackwardPhase)
  3472. {
  3473. // In the backward phase, we have no availability info, and we're trying to see
  3474. // where there are live fields so we can decide where to put bailouts.
  3475. Assert(opnd->MayNeedTypeCheckProtection());
  3476. block->upwardExposedFields->Set(typeSymId);
  3477. TrackObjTypeSpecWriteGuards(opnd, block);
  3478. }
  3479. else
  3480. {
  3481. // In the dead-store phase, we're trying to see where the lowered code needs to make sure to check
  3482. // types for downstream load/stores. We're also setting up the upward-exposed uses at loop headers
  3483. // so register allocation will be correct.
  3484. Assert(opnd->MayNeedTypeCheckProtection());
  3485. const bool isStore = opnd == this->currentInstr->GetDst();
  3486. // Note that we don't touch upwardExposedUses here.
  3487. if (opnd->IsTypeAvailable())
  3488. {
  3489. opnd->SetTypeDead(!block->upwardExposedFields->TestAndSet(typeSymId));
  3490. if (opnd->IsTypeChecked() && opnd->IsObjectHeaderInlined())
  3491. {
  3492. // The object's type must not change in a way that changes the layout.
  3493. // If we see a StFld with a type check bailout between here and the type check that guards this
  3494. // property, we must not dead-store the StFld's type check bailout, even if that operand's type appears
  3495. // dead, because that object may alias this one.
  3496. BVSparse<JitArenaAllocator>* bv = block->typesNeedingKnownObjectLayout;
  3497. if (bv == nullptr)
  3498. {
  3499. bv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  3500. block->typesNeedingKnownObjectLayout = bv;
  3501. }
  3502. bv->Set(typeSymId);
  3503. }
  3504. }
  3505. else
  3506. {
  3507. opnd->SetTypeDead(
  3508. !block->upwardExposedFields->TestAndClear(typeSymId) &&
  3509. (
  3510. // Don't set the type dead if this is a store that may change the layout in a way that invalidates
  3511. // optimized load/stores downstream. Leave it non-dead in that case so the type check bailout
  3512. // is preserved and so that Lower will generate the bailout properly.
  3513. !isStore ||
  3514. !block->typesNeedingKnownObjectLayout ||
  3515. block->typesNeedingKnownObjectLayout->IsEmpty()
  3516. )
  3517. );
  3518. BVSparse<JitArenaAllocator>* bv = block->typesNeedingKnownObjectLayout;
  3519. if (bv != nullptr)
  3520. {
  3521. bv->Clear(typeSymId);
  3522. }
  3523. }
  3524. bool mayNeedTypeTransition = true;
  3525. if (!opnd->HasTypeMismatch() && func->DoGlobOpt())
  3526. {
  3527. mayNeedTypeTransition = !isStore;
  3528. }
  3529. if (mayNeedTypeTransition &&
  3530. !this->IsPrePass() &&
  3531. !this->currentInstr->HasBailOutInfo() &&
  3532. (opnd->NeedsPrimaryTypeCheck() ||
  3533. opnd->NeedsLocalTypeCheck() ||
  3534. opnd->NeedsLoadFromProtoTypeCheck()))
  3535. {
  3536. // This is a "checked" opnd that nevertheless will have some kind of type check generated for it.
  3537. // (Typical case is a load from prototype with no upstream guard.)
  3538. // If the type check fails, we will call a helper, which will require that the type be correct here.
  3539. // Final type can't be pushed up past this point. Do whatever type transition is required.
  3540. if (block->stackSymToFinalType != nullptr)
  3541. {
  3542. StackSym *baseSym = opnd->GetObjectSym();
  3543. AddPropertyCacheBucket *pBucket = block->stackSymToFinalType->Get(baseSym->m_id);
  3544. if (pBucket &&
  3545. pBucket->GetFinalType() != nullptr &&
  3546. pBucket->GetFinalType() != pBucket->GetInitialType())
  3547. {
  3548. this->InsertTypeTransition(this->currentInstr->m_next, baseSym, pBucket);
  3549. pBucket->SetFinalType(pBucket->GetInitialType());
  3550. }
  3551. }
  3552. }
  3553. if (!opnd->HasTypeMismatch() && func->DoGlobOpt())
  3554. {
  3555. // Do this after the above code, as the value of the final type may change there.
  3556. TrackAddPropertyTypes(opnd, block);
  3557. }
  3558. TrackObjTypeSpecProperties(opnd, block);
  3559. TrackObjTypeSpecWriteGuards(opnd, block);
  3560. }
  3561. }
  3562. void
  3563. BackwardPass::TrackObjTypeSpecProperties(IR::PropertySymOpnd *opnd, BasicBlock *block)
  3564. {
  3565. Assert(tag == Js::DeadStorePhase);
  3566. Assert(opnd->IsTypeCheckSeqCandidate());
  3567. // Now that we're in the dead store pass and we know definitively which operations will have a type
  3568. // check and which are protected by an upstream type check, we can push the lists of guarded properties
  3569. // up the flow graph and drop them on the type checks for the corresponding object symbol.
  3570. if (opnd->IsTypeCheckSeqParticipant())
  3571. {
  3572. // Add this operation to the list of guarded operations for this object symbol.
  3573. HashTable<ObjTypeGuardBucket>* stackSymToGuardedProperties = block->stackSymToGuardedProperties;
  3574. if (stackSymToGuardedProperties == nullptr)
  3575. {
  3576. stackSymToGuardedProperties = HashTable<ObjTypeGuardBucket>::New(this->tempAlloc, 8);
  3577. block->stackSymToGuardedProperties = stackSymToGuardedProperties;
  3578. }
  3579. StackSym* objSym = opnd->GetObjectSym();
  3580. ObjTypeGuardBucket* bucket = stackSymToGuardedProperties->FindOrInsertNew(objSym->m_id);
  3581. BVSparse<JitArenaAllocator>* guardedPropertyOps = bucket->GetGuardedPropertyOps();
  3582. if (guardedPropertyOps == nullptr)
  3583. {
  3584. // The bit vectors we push around the flow graph only need to live as long as this phase.
  3585. guardedPropertyOps = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  3586. bucket->SetGuardedPropertyOps(guardedPropertyOps);
  3587. }
  3588. #if DBG
  3589. FOREACH_BITSET_IN_SPARSEBV(propOpId, guardedPropertyOps)
  3590. {
  3591. Js::ObjTypeSpecFldInfo* existingFldInfo = this->func->GetGlobalObjTypeSpecFldInfo(propOpId);
  3592. Assert(existingFldInfo != nullptr);
  3593. if (existingFldInfo->GetPropertyId() != opnd->GetPropertyId())
  3594. {
  3595. continue;
  3596. }
  3597. // It would be very nice to assert that the info we have for this property matches all properties guarded thus far.
  3598. // Unfortunately, in some cases of object pointer copy propagation into a loop, we may end up with conflicting
  3599. // information for the same property. We simply ignore the conflict and emit an equivalent type check, which
  3600. // will attempt to check for one property on two different slots, and obviously fail. Thus we may have a
  3601. // guaranteed bailout, but we'll simply re-JIT with equivalent object type spec disabled. To avoid this
  3602. // issue altogether, we would need to track the set of guarded properties along with the type value in the
  3603. // forward pass, and when a conflict is detected either not optimize the offending instruction, or correct
  3604. // its information based on the info from the property in the type value info.
  3605. //Assert(!existingFldInfo->IsPoly() || !opnd->IsPoly() || GlobOpt::AreTypeSetsIdentical(existingFldInfo->GetEquivalentTypeSet(), opnd->GetEquivalentTypeSet()));
  3606. //Assert(existingFldInfo->GetSlotIndex() == opnd->GetSlotIndex());
  3607. if (PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->func))
  3608. {
  3609. if (existingFldInfo->IsPoly() && opnd->IsPoly() &&
  3610. (!GlobOpt::AreTypeSetsIdentical(existingFldInfo->GetEquivalentTypeSet(), opnd->GetEquivalentTypeSet()) ||
  3611. (existingFldInfo->GetSlotIndex() != opnd->GetSlotIndex())))
  3612. {
  3613. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  3614. Js::FunctionBody* topFunctionBody = this->func->GetJnFunction();
  3615. Js::ScriptContext* scriptContext = topFunctionBody->GetScriptContext();
  3616. Output::Print(L"EquivObjTypeSpec: top function %s (%s): duplicate property clash on %s(#%d) on operation %u \n",
  3617. topFunctionBody->GetDisplayName(), topFunctionBody->GetDebugNumberSet(debugStringBuffer),
  3618. scriptContext->GetPropertyNameLocked(opnd->GetPropertyId())->GetBuffer(), opnd->GetPropertyId(), opnd->GetObjTypeSpecFldId());
  3619. Output::Flush();
  3620. }
  3621. }
  3622. }
  3623. NEXT_BITSET_IN_SPARSEBV
  3624. #endif
  3625. bucket->AddToGuardedPropertyOps(opnd->GetObjTypeSpecFldId());
  3626. if (opnd->NeedsMonoCheck())
  3627. {
  3628. Assert(opnd->IsMono());
  3629. Js::Type *monoGuardType = opnd->IsInitialTypeChecked() ? opnd->GetInitialType() : opnd->GetType();
  3630. bucket->SetMonoGuardType(monoGuardType);
  3631. }
  3632. if (opnd->NeedsPrimaryTypeCheck())
  3633. {
  3634. // Grab the guarded properties which match this type check with respect to polymorphism and drop them
  3635. // on the operand. Only equivalent type checks can protect polymorphic properties to avoid a case where
  3636. // we have 1) a cache with type set {t1, t2} and property a, followed by 2) a cache with type t3 and
  3637. // property b, and 3) a cache with type set {t1, t2} and property c, where the slot index of property c
  3638. // on t1 and t2 is different than on t3. If cache 2 were to protect property c it would not verify that
  3639. // it resides on the correct slot for cache 3. Yes, an equivalent type check could protect monomorphic
  3640. // properties, but it would then unnecessarily verify their equivalence on the slow path.
  3641. // Also, make sure the guarded properties on the operand are allocated from the func's allocator to
  3642. // persists until lowering.
  3643. Assert(guardedPropertyOps != nullptr);
  3644. opnd->EnsureGuardedPropOps(this->func->m_alloc);
  3645. opnd->AddGuardedPropOps(guardedPropertyOps);
  3646. if (bucket->NeedsMonoCheck() && !opnd->IsTypeAvailable())
  3647. {
  3648. if (this->currentInstr->HasEquivalentTypeCheckBailOut())
  3649. {
  3650. // Some instr protected by this one requires a monomorphic type check. (E.g., final type opt,
  3651. // fixed field not loaded from prototype.) Note the IsTypeAvailable test above: only do this at
  3652. // the initial type check that protects this path.
  3653. opnd->SetMonoGuardType(bucket->GetMonoGuardType());
  3654. this->currentInstr->ChangeEquivalentToMonoTypeCheckBailOut();
  3655. }
  3656. bucket->SetMonoGuardType(nullptr);
  3657. }
  3658. bucket->SetGuardedPropertyOps(nullptr);
  3659. JitAdelete(this->tempAlloc, guardedPropertyOps);
  3660. block->stackSymToGuardedProperties->Clear(objSym->m_id);
  3661. #if DBG
  3662. // If there is no upstream type check that is live and could protect guarded properties, we better
  3663. // not have any properties remaining.
  3664. ObjTypeGuardBucket* bucket = block->stackSymToGuardedProperties->Get(opnd->GetObjectSym()->m_id);
  3665. Assert(opnd->IsTypeAvailable() || bucket == nullptr || bucket->GetGuardedPropertyOps()->IsEmpty());
  3666. #endif
  3667. }
  3668. }
  3669. else if (opnd->NeedsLocalTypeCheck())
  3670. {
  3671. opnd->EnsureGuardedPropOps(this->func->m_alloc);
  3672. opnd->SetGuardedPropOp(opnd->GetObjTypeSpecFldId());
  3673. }
  3674. }
  3675. void
  3676. BackwardPass::TrackObjTypeSpecWriteGuards(IR::PropertySymOpnd *opnd, BasicBlock *block)
  3677. {
  3678. // TODO (ObjTypeSpec): Move write guard tracking to the forward pass, by recording on the type value
  3679. // which property IDs have been written since the last type check. This will result in more accurate
  3680. // tracking in cases when object pointer copy prop kicks in.
  3681. if (this->tag == Js::BackwardPhase)
  3682. {
  3683. // If this operation may need a write guard (load from proto or fixed field check) then add its
  3684. // write guard symbol to the map for this object. If it remains live (hasn't been written to)
  3685. // until the type check upstream, it will get recorded there so that the type check can be registered
  3686. // for invalidation on this property used in this operation.
  3687. // (ObjTypeSpec): Consider supporting polymorphic write guards as well. We can't currently distinguish between mono and
  3688. // poly write guards, and a type check can only protect operations matching with respect to polymorphism (see
  3689. // BackwardPass::TrackObjTypeSpecProperties for details), so for now we only target monomorphic operations.
  3690. if (opnd->IsMono() && opnd->MayNeedWriteGuardProtection())
  3691. {
  3692. if (block->stackSymToWriteGuardsMap == nullptr)
  3693. {
  3694. block->stackSymToWriteGuardsMap = HashTable<ObjWriteGuardBucket>::New(this->tempAlloc, 8);
  3695. }
  3696. ObjWriteGuardBucket* bucket = block->stackSymToWriteGuardsMap->FindOrInsertNew(opnd->GetObjectSym()->m_id);
  3697. BVSparse<JitArenaAllocator>* writeGuards = bucket->GetWriteGuards();
  3698. if (writeGuards == nullptr)
  3699. {
  3700. // The bit vectors we push around the flow graph only need to live as long as this phase.
  3701. writeGuards = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  3702. bucket->SetWriteGuards(writeGuards);
  3703. }
  3704. PropertySym *propertySym = opnd->m_sym->AsPropertySym();
  3705. Assert(propertySym->m_writeGuardSym != nullptr);
  3706. SymID writeGuardSymId = propertySym->m_writeGuardSym->m_id;
  3707. writeGuards->Set(writeGuardSymId);
  3708. }
  3709. // Record any live (upward exposed) write guards on this operation, if this operation may end up with
  3710. // a type check. If we ultimately don't need a type check here, we will simply ignore the guards, because
  3711. // an earlier type check will protect them.
  3712. if (!IsPrePass() && opnd->IsMono() && !opnd->IsTypeDead())
  3713. {
  3714. Assert(opnd->GetWriteGuards() == nullptr);
  3715. if (block->stackSymToWriteGuardsMap != nullptr)
  3716. {
  3717. ObjWriteGuardBucket* bucket = block->stackSymToWriteGuardsMap->Get(opnd->GetObjectSym()->m_id);
  3718. if (bucket != nullptr)
  3719. {
  3720. // Get all the write guards associated with this object sym and filter them down to those that
  3721. // are upward exposed. If we end up emitting a type check for this instruction, we will create
  3722. // a type property guard registered for all guarded proto properties and we will set the write
  3723. // guard syms live during forward pass, such that we can avoid unnecessary write guard type
  3724. // checks and bailouts on every proto property (as long as it hasn't been written to since the
  3725. // primary type check).
  3726. auto writeGuards = bucket->GetWriteGuards()->CopyNew(this->func->m_alloc);
  3727. writeGuards->And(block->upwardExposedFields);
  3728. opnd->SetWriteGuards(writeGuards);
  3729. }
  3730. }
  3731. }
  3732. }
  3733. else
  3734. {
  3735. // If we know this property has never been written to in this function (either on this object or any
  3736. // of its aliases) we don't need the local type check.
  3737. if (opnd->MayNeedWriteGuardProtection() && !opnd->IsWriteGuardChecked() && !MayPropertyBeWrittenTo(opnd->GetPropertyId()))
  3738. {
  3739. opnd->SetWriteGuardChecked(true);
  3740. }
  3741. // If we don't need a primary type check here let's clear the write guards. The primary type check upstream will
  3742. // register the type check for the corresponding properties.
  3743. if (!IsPrePass() && !opnd->NeedsPrimaryTypeCheck())
  3744. {
  3745. opnd->ClearWriteGuards();
  3746. }
  3747. }
  3748. }
  3749. void
  3750. BackwardPass::TrackAddPropertyTypes(IR::PropertySymOpnd *opnd, BasicBlock *block)
  3751. {
  3752. // Do the work of objtypespec add-property opt even if it's disabled by PHASE option, so that we have
  3753. // the dataflow info that can be inspected.
  3754. Assert(this->tag == Js::DeadStorePhase);
  3755. Assert(opnd->IsMono() || opnd->HasEquivalentTypeSet());
  3756. Js::Type *typeWithProperty = opnd->IsMono() ? opnd->GetType() : opnd->GetFirstEquivalentType();
  3757. Js::Type *typeWithoutProperty = opnd->HasInitialType() ? opnd->GetInitialType() : nullptr;
  3758. if (typeWithoutProperty == nullptr ||
  3759. typeWithProperty == typeWithoutProperty ||
  3760. (opnd->IsTypeChecked() && !opnd->IsInitialTypeChecked()))
  3761. {
  3762. if (!this->IsPrePass() && block->stackSymToFinalType != nullptr && !this->currentInstr->HasBailOutInfo())
  3763. {
  3764. PropertySym *propertySym = opnd->m_sym->AsPropertySym();
  3765. AddPropertyCacheBucket *pBucket =
  3766. block->stackSymToFinalType->Get(propertySym->m_stackSym->m_id);
  3767. if (pBucket && pBucket->GetFinalType() && pBucket->GetInitialType() != pBucket->GetFinalType())
  3768. {
  3769. opnd->SetFinalType(pBucket->GetFinalType());
  3770. }
  3771. }
  3772. return;
  3773. }
  3774. #if DBG
  3775. Assert(typeWithProperty != nullptr);
  3776. Js::DynamicTypeHandler * typeWithoutPropertyTypeHandler = static_cast<Js::DynamicType *>(typeWithoutProperty)->GetTypeHandler();
  3777. Js::DynamicTypeHandler * typeWithPropertyTypeHandler = static_cast<Js::DynamicType *>(typeWithProperty)->GetTypeHandler();
  3778. Assert(typeWithoutPropertyTypeHandler->GetPropertyCount() + 1 == typeWithPropertyTypeHandler->GetPropertyCount());
  3779. AssertMsg(Js::DynamicObject::IsTypeHandlerCompatibleForObjectHeaderInlining(typeWithoutPropertyTypeHandler, typeWithPropertyTypeHandler),
  3780. "TypeHandlers are not compatible for transition?");
  3781. Assert(typeWithoutPropertyTypeHandler->GetSlotCapacity() <= typeWithPropertyTypeHandler->GetSlotCapacity());
  3782. #endif
  3783. // If there's already a final type for this instance, record it on the operand.
  3784. // If not, start tracking it.
  3785. if (block->stackSymToFinalType == nullptr)
  3786. {
  3787. block->stackSymToFinalType = HashTable<AddPropertyCacheBucket>::New(this->tempAlloc, 8);
  3788. }
  3789. // Find or create the type-tracking record for this instance in this block.
  3790. PropertySym *propertySym = opnd->m_sym->AsPropertySym();
  3791. AddPropertyCacheBucket *pBucket =
  3792. block->stackSymToFinalType->FindOrInsertNew(propertySym->m_stackSym->m_id);
  3793. Js::Type* finalType = nullptr;
  3794. #if DBG
  3795. Js::Type * deadStoreUnavailableFinalType = nullptr;
  3796. #endif
  3797. if (pBucket->GetInitialType() == nullptr || opnd->GetType() != pBucket->GetInitialType())
  3798. {
  3799. #if DBG
  3800. if (opnd->GetType() == pBucket->deadStoreUnavailableInitialType)
  3801. {
  3802. deadStoreUnavailableFinalType = pBucket->deadStoreUnavailableFinalType;
  3803. }
  3804. #endif
  3805. // No info found, or the info was bad, so initialize it from this cache.
  3806. finalType = opnd->GetType();
  3807. pBucket->SetFinalType(finalType);
  3808. }
  3809. else
  3810. {
  3811. // Match: The type we push upward is now the typeWithoutProperty at this point,
  3812. // and the final type is the one we've been tracking.
  3813. finalType = pBucket->GetFinalType();
  3814. #if DBG
  3815. deadStoreUnavailableFinalType = pBucket->deadStoreUnavailableFinalType;
  3816. #endif
  3817. }
  3818. pBucket->SetInitialType(typeWithoutProperty);
  3819. if (!PHASE_OFF(Js::ObjTypeSpecStorePhase, this->func))
  3820. {
  3821. #if DBG
  3822. // We may regress in this case:
  3823. // if (b)
  3824. // t1 = {};
  3825. // o = t1;
  3826. // o.x =
  3827. // else
  3828. // t2 = {};
  3829. // o = t2;
  3830. // o.x =
  3831. // o.y =
  3832. //
  3833. // Where the backward pass will propagate the final type in o.y to o.x, then globopt will copy prop t1 and t2 to o.x.
  3834. // But not o.y (because of the merge). Then, in the dead store pass, o.y's final type will not propagate to t1.x and t2.x
  3835. // respectively, thus regression the final type. However, in both cases, the types of t1 and t2 are dead anyways.
  3836. //
  3837. // if the type is dead, we don't care if we have regressed the type, as no one is depending on it to skip type check anyways
  3838. if (!opnd->IsTypeDead())
  3839. {
  3840. // This is the type that would have been propagated if we didn't kill it because the type isn't available
  3841. Js::Type * checkFinalType = deadStoreUnavailableFinalType ? deadStoreUnavailableFinalType : finalType;
  3842. if (opnd->HasFinalType() && opnd->GetFinalType() != checkFinalType)
  3843. {
  3844. // Final type discovery must be progressively better (unless we kill it in the deadstore pass
  3845. // when the type is not available during the forward pass)
  3846. Js::DynamicTypeHandler * oldFinalTypeHandler = static_cast<Js::DynamicType *>(opnd->GetFinalType())->GetTypeHandler();
  3847. Js::DynamicTypeHandler * checkFinalTypeHandler = static_cast<Js::DynamicType *>(checkFinalType)->GetTypeHandler();
  3848. Assert(oldFinalTypeHandler->GetPropertyCount() < checkFinalTypeHandler->GetPropertyCount());
  3849. AssertMsg(Js::DynamicObject::IsTypeHandlerCompatibleForObjectHeaderInlining(oldFinalTypeHandler, checkFinalTypeHandler),
  3850. "TypeHandlers should be compatible for transition.");
  3851. Assert(oldFinalTypeHandler->GetSlotCapacity() <= checkFinalTypeHandler->GetSlotCapacity());
  3852. }
  3853. }
  3854. #endif
  3855. Assert(opnd->IsBeingAdded());
  3856. if (!this->IsPrePass())
  3857. {
  3858. opnd->SetFinalType(finalType);
  3859. }
  3860. if (!opnd->IsTypeChecked())
  3861. {
  3862. // Transition from initial to final type will only happen at type check points.
  3863. if (opnd->IsTypeAvailable())
  3864. {
  3865. pBucket->SetFinalType(pBucket->GetInitialType());
  3866. }
  3867. }
  3868. }
  3869. #if DBG_DUMP
  3870. if (PHASE_TRACE(Js::ObjTypeSpecStorePhase, this->func))
  3871. {
  3872. Output::Print(L"ObjTypeSpecStore: ");
  3873. this->currentInstr->Dump();
  3874. pBucket->Dump();
  3875. }
  3876. #endif
  3877. // In the dead-store pass, we have forward information that tells us whether a "final type"
  3878. // reached this point from an earlier store. If it didn't (i.e., it's not available here),
  3879. // remove it from the backward map so that upstream stores will use the final type that is
  3880. // live there. (This avoids unnecessary bailouts in cases where the final type is only live
  3881. // on one branch of an "if", a case that the initial backward pass can't detect.)
  3882. // An example:
  3883. // if (cond)
  3884. // o.x =
  3885. // o.y =
  3886. if (!opnd->IsTypeAvailable())
  3887. {
  3888. #if DBG
  3889. pBucket->deadStoreUnavailableInitialType = pBucket->GetInitialType();
  3890. if (pBucket->deadStoreUnavailableFinalType == nullptr)
  3891. {
  3892. pBucket->deadStoreUnavailableFinalType = pBucket->GetFinalType();
  3893. }
  3894. pBucket->SetInitialType(nullptr);
  3895. pBucket->SetFinalType(nullptr);
  3896. #else
  3897. block->stackSymToFinalType->Clear(propertySym->m_stackSym->m_id);
  3898. #endif
  3899. }
  3900. }
  3901. void
  3902. BackwardPass::InsertTypeTransition(IR::Instr *instrInsertBefore, int symId, AddPropertyCacheBucket *data)
  3903. {
  3904. StackSym *objSym = this->func->m_symTable->FindStackSym(symId);
  3905. Assert(objSym);
  3906. this->InsertTypeTransition(instrInsertBefore, objSym, data);
  3907. }
  3908. void
  3909. BackwardPass::InsertTypeTransition(IR::Instr *instrInsertBefore, StackSym *objSym, AddPropertyCacheBucket *data)
  3910. {
  3911. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(objSym, TyMachReg, this->func);
  3912. baseOpnd->SetIsJITOptimizedReg(true);
  3913. IR::AddrOpnd *initialTypeOpnd =
  3914. IR::AddrOpnd::New(data->GetInitialType(), IR::AddrOpndKindDynamicType, this->func);
  3915. IR::AddrOpnd *finalTypeOpnd =
  3916. IR::AddrOpnd::New(data->GetFinalType(), IR::AddrOpndKindDynamicType, this->func);
  3917. IR::Instr *adjustTypeInstr =
  3918. IR::Instr::New(Js::OpCode::AdjustObjType, finalTypeOpnd, baseOpnd, initialTypeOpnd, this->func);
  3919. instrInsertBefore->InsertBefore(adjustTypeInstr);
  3920. }
  3921. void
  3922. BackwardPass::InsertTypeTransitionAfterInstr(IR::Instr *instr, int symId, AddPropertyCacheBucket *data)
  3923. {
  3924. if (!this->IsPrePass())
  3925. {
  3926. // Transition to the final type if we don't bail out.
  3927. if (instr->EndsBasicBlock())
  3928. {
  3929. // The instr with the bailout is something like a branch that may not fall through.
  3930. // Insert the transitions instead at the beginning of each successor block.
  3931. this->InsertTypeTransitionsAtPriorSuccessors(this->currentBlock, nullptr, symId, data);
  3932. }
  3933. else
  3934. {
  3935. this->InsertTypeTransition(instr->m_next, symId, data);
  3936. }
  3937. }
  3938. // Note: we could probably clear this entry out of the table, but I don't know
  3939. // whether it's worth it, because it's likely coming right back.
  3940. data->SetFinalType(data->GetInitialType());
  3941. }
  3942. void
  3943. BackwardPass::InsertTypeTransitionAtBlock(BasicBlock *block, int symId, AddPropertyCacheBucket *data)
  3944. {
  3945. bool inserted = false;
  3946. FOREACH_INSTR_IN_BLOCK(instr, block)
  3947. {
  3948. if (instr->IsRealInstr())
  3949. {
  3950. // Check for pre-existing type transition. There may be more than one AdjustObjType here,
  3951. // so look at them all.
  3952. if (instr->m_opcode == Js::OpCode::AdjustObjType)
  3953. {
  3954. if (instr->GetSrc1()->AsRegOpnd()->m_sym->m_id == (SymID)symId)
  3955. {
  3956. // This symbol already has a type transition at this point.
  3957. // It *must* be doing the same transition we're already trying to do.
  3958. Assert(instr->GetDst()->AsAddrOpnd()->m_address == data->GetFinalType() &&
  3959. instr->GetSrc2()->AsAddrOpnd()->m_address == data->GetInitialType());
  3960. // Nothing to do.
  3961. return;
  3962. }
  3963. }
  3964. else
  3965. {
  3966. this->InsertTypeTransition(instr, symId, data);
  3967. inserted = true;
  3968. break;
  3969. }
  3970. }
  3971. }
  3972. NEXT_INSTR_IN_BLOCK;
  3973. if (!inserted)
  3974. {
  3975. Assert(block->GetLastInstr()->m_next);
  3976. this->InsertTypeTransition(block->GetLastInstr()->m_next, symId, data);
  3977. }
  3978. }
  3979. void
  3980. BackwardPass::InsertTypeTransitionsAtPriorSuccessors(
  3981. BasicBlock *block,
  3982. BasicBlock *blockSucc,
  3983. int symId,
  3984. AddPropertyCacheBucket *data)
  3985. {
  3986. // For each successor of block prior to blockSucc, adjust the type.
  3987. FOREACH_SUCCESSOR_BLOCK(blockFix, block)
  3988. {
  3989. if (blockFix == blockSucc)
  3990. {
  3991. return;
  3992. }
  3993. this->InsertTypeTransitionAtBlock(blockFix, symId, data);
  3994. }
  3995. NEXT_SUCCESSOR_BLOCK;
  3996. }
  3997. void
  3998. BackwardPass::InsertTypeTransitionsAtPotentialKills()
  3999. {
  4000. // Final types can't be pushed up past certain instructions.
  4001. IR::Instr *instr = this->currentInstr;
  4002. if (instr->HasBailOutInfo() || instr->m_opcode == Js::OpCode::UpdateNewScObjectCache)
  4003. {
  4004. // Final types can't be pushed up past a bailout point.
  4005. // Insert any transitions called for by the current state of add-property buckets.
  4006. // Also do this for ctor cache updates, to avoid putting a type in the ctor cache that extends past
  4007. // the end of the ctor that the cache covers.
  4008. this->ForEachAddPropertyCacheBucket([&](int symId, AddPropertyCacheBucket *data)->bool {
  4009. this->InsertTypeTransitionAfterInstr(instr, symId, data);
  4010. return false;
  4011. });
  4012. }
  4013. else
  4014. {
  4015. // If this is a load/store that expects an object-header-inlined type, don't push another sym's transition from
  4016. // object-header-inlined to non-object-header-inlined type past it, because the two syms may be aliases.
  4017. IR::PropertySymOpnd *propertySymOpnd = instr->GetPropertySymOpnd();
  4018. if (propertySymOpnd && propertySymOpnd->IsObjectHeaderInlined())
  4019. {
  4020. SymID opndId = propertySymOpnd->m_sym->AsPropertySym()->m_stackSym->m_id;
  4021. this->ForEachAddPropertyCacheBucket([&](int symId, AddPropertyCacheBucket *data)->bool {
  4022. if ((SymID)symId == opndId)
  4023. {
  4024. // This is the sym we're tracking. No aliasing to worry about.
  4025. return false;
  4026. }
  4027. if (propertySymOpnd->IsMono() && data->GetInitialType() != propertySymOpnd->GetType())
  4028. {
  4029. // Type mismatch in a monomorphic case -- no aliasing.
  4030. return false;
  4031. }
  4032. if (this->TransitionUndoesObjectHeaderInlining(data))
  4033. {
  4034. // We're transitioning from inlined to non-inlined, so we can't push it up any farther.
  4035. this->InsertTypeTransitionAfterInstr(instr, symId, data);
  4036. }
  4037. return false;
  4038. });
  4039. }
  4040. }
  4041. }
  4042. template<class Fn>
  4043. void
  4044. BackwardPass::ForEachAddPropertyCacheBucket(Fn fn)
  4045. {
  4046. BasicBlock *block = this->currentBlock;
  4047. if (block->stackSymToFinalType == nullptr)
  4048. {
  4049. return;
  4050. }
  4051. FOREACH_HASHTABLE_ENTRY(AddPropertyCacheBucket, bucket, block->stackSymToFinalType)
  4052. {
  4053. AddPropertyCacheBucket *data = &bucket.element;
  4054. if (data->GetInitialType() != nullptr &&
  4055. data->GetInitialType() != data->GetFinalType())
  4056. {
  4057. bool done = fn(bucket.value, data);
  4058. if (done)
  4059. {
  4060. break;
  4061. }
  4062. }
  4063. }
  4064. NEXT_HASHTABLE_ENTRY;
  4065. }
  4066. bool
  4067. BackwardPass::TransitionUndoesObjectHeaderInlining(AddPropertyCacheBucket *data) const
  4068. {
  4069. Js::Type *type = data->GetInitialType();
  4070. if (type == nullptr || !Js::DynamicType::Is(type->GetTypeId()))
  4071. {
  4072. return false;
  4073. }
  4074. Js::DynamicType *dynamicType = static_cast<Js::DynamicType*>(type);
  4075. if (!dynamicType->GetTypeHandler()->IsObjectHeaderInlinedTypeHandler())
  4076. {
  4077. return false;
  4078. }
  4079. type = data->GetFinalType();
  4080. if (type == nullptr || !Js::DynamicType::Is(type->GetTypeId()))
  4081. {
  4082. return false;
  4083. }
  4084. dynamicType = static_cast<Js::DynamicType*>(type);
  4085. return !dynamicType->GetTypeHandler()->IsObjectHeaderInlinedTypeHandler();
  4086. }
  4087. void
  4088. BackwardPass::CollectCloneStrCandidate(IR::Opnd * opnd)
  4089. {
  4090. IR::RegOpnd *regOpnd = opnd->AsRegOpnd();
  4091. Assert(regOpnd != nullptr);
  4092. StackSym *sym = regOpnd->m_sym;
  4093. if (tag == Js::BackwardPhase
  4094. && currentInstr->m_opcode == Js::OpCode::Add_A
  4095. && currentInstr->GetSrc1() == opnd
  4096. && !this->IsPrePass()
  4097. && !this->IsCollectionPass()
  4098. && this->currentBlock->loop)
  4099. {
  4100. Assert(currentBlock->cloneStrCandidates != nullptr);
  4101. currentBlock->cloneStrCandidates->Set(sym->m_id);
  4102. }
  4103. }
  4104. void
  4105. BackwardPass::InvalidateCloneStrCandidate(IR::Opnd * opnd)
  4106. {
  4107. IR::RegOpnd *regOpnd = opnd->AsRegOpnd();
  4108. Assert(regOpnd != nullptr);
  4109. StackSym *sym = regOpnd->m_sym;
  4110. if (tag == Js::BackwardPhase &&
  4111. (currentInstr->m_opcode != Js::OpCode::Add_A || currentInstr->GetSrc1()->AsRegOpnd()->m_sym->m_id != sym->m_id) &&
  4112. !this->IsPrePass() &&
  4113. !this->IsCollectionPass() &&
  4114. this->currentBlock->loop)
  4115. {
  4116. currentBlock->cloneStrCandidates->Clear(sym->m_id);
  4117. }
  4118. }
  4119. void
  4120. BackwardPass::ProcessUse(IR::Opnd * opnd)
  4121. {
  4122. switch (opnd->GetKind())
  4123. {
  4124. case IR::OpndKindReg:
  4125. {
  4126. IR::RegOpnd *regOpnd = opnd->AsRegOpnd();
  4127. StackSym *sym = regOpnd->m_sym;
  4128. if (!IsCollectionPass())
  4129. {
  4130. // isTempLastUse is only used for string concat right now, so lets not mark it if it's not a string.
  4131. // If it's upward exposed, it is not it's last use.
  4132. if (regOpnd->m_isTempLastUse && (regOpnd->GetValueType().IsNotString() || this->currentBlock->upwardExposedUses->Test(sym->m_id) || sym->m_mayNotBeTempLastUse))
  4133. {
  4134. regOpnd->m_isTempLastUse = false;
  4135. }
  4136. this->CollectCloneStrCandidate(opnd);
  4137. }
  4138. if (!this->ProcessSymUse(sym, true, regOpnd->GetIsJITOptimizedReg()) && this->DoSetDead())
  4139. {
  4140. regOpnd->SetIsDead();
  4141. }
  4142. if (IsCollectionPass())
  4143. {
  4144. break;
  4145. }
  4146. if (tag == Js::DeadStorePhase && regOpnd->IsArrayRegOpnd())
  4147. {
  4148. ProcessArrayRegOpndUse(currentInstr, regOpnd->AsArrayRegOpnd());
  4149. }
  4150. if (currentInstr->m_opcode == Js::OpCode::BailOnNotArray)
  4151. {
  4152. Assert(tag == Js::DeadStorePhase);
  4153. const ValueType valueType(regOpnd->GetValueType());
  4154. if(valueType.IsLikelyArrayOrObjectWithArray())
  4155. {
  4156. currentBlock->noImplicitCallUses->Clear(sym->m_id);
  4157. // We are being conservative here to always check for missing value
  4158. // if any of them expect no missing value. That is because we don't know
  4159. // what set of sym is equivalent (copied) from the one we are testing for right now.
  4160. if(valueType.HasNoMissingValues() &&
  4161. !currentBlock->noImplicitCallNoMissingValuesUses->IsEmpty() &&
  4162. !IsPrePass())
  4163. {
  4164. // There is a use of this sym that requires this array to have no missing values, so this instruction
  4165. // needs to bail out if the array has missing values.
  4166. Assert(currentInstr->GetBailOutKind() == IR::BailOutOnNotArray ||
  4167. currentInstr->GetBailOutKind() == IR::BailOutOnNotNativeArray);
  4168. currentInstr->SetBailOutKind(currentInstr->GetBailOutKind() | IR::BailOutOnMissingValue);
  4169. }
  4170. currentBlock->noImplicitCallNoMissingValuesUses->Clear(sym->m_id);
  4171. currentBlock->noImplicitCallNativeArrayUses->Clear(sym->m_id);
  4172. }
  4173. }
  4174. }
  4175. break;
  4176. case IR::OpndKindSym:
  4177. {
  4178. IR::SymOpnd *symOpnd = opnd->AsSymOpnd();
  4179. Sym * sym = symOpnd->m_sym;
  4180. if (!this->ProcessSymUse(sym, false, opnd->GetIsJITOptimizedReg()) && this->DoSetDead())
  4181. {
  4182. symOpnd->SetIsDead();
  4183. }
  4184. if (IsCollectionPass())
  4185. {
  4186. break;
  4187. }
  4188. if (sym->IsPropertySym())
  4189. {
  4190. // TODO: We don't have last use info for property sym
  4191. // and we don't set the last use of the stacksym inside the property sym
  4192. if (tag == Js::BackwardPhase)
  4193. {
  4194. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  4195. {
  4196. this->globOpt->PreparePropertySymOpndForTypeCheckSeq(symOpnd->AsPropertySymOpnd(), this->currentInstr, this->currentBlock->loop);
  4197. }
  4198. }
  4199. if (this->DoMarkTempNumbersOnTempObjects())
  4200. {
  4201. this->currentBlock->tempNumberTracker->ProcessPropertySymUse(symOpnd, this->currentInstr, this);
  4202. }
  4203. if (symOpnd->IsPropertySymOpnd())
  4204. {
  4205. this->ProcessPropertySymOpndUse(symOpnd->AsPropertySymOpnd());
  4206. }
  4207. }
  4208. }
  4209. break;
  4210. case IR::OpndKindIndir:
  4211. {
  4212. IR::IndirOpnd * indirOpnd = opnd->AsIndirOpnd();
  4213. IR::RegOpnd * baseOpnd = indirOpnd->GetBaseOpnd();
  4214. if (!this->ProcessSymUse(baseOpnd->m_sym, false, baseOpnd->GetIsJITOptimizedReg()) && this->DoSetDead())
  4215. {
  4216. baseOpnd->SetIsDead();
  4217. }
  4218. IR::RegOpnd * indexOpnd = indirOpnd->GetIndexOpnd();
  4219. if (indexOpnd)
  4220. {
  4221. if (!this->ProcessSymUse(indexOpnd->m_sym, false, indexOpnd->GetIsJITOptimizedReg()) && this->DoSetDead())
  4222. {
  4223. indexOpnd->SetIsDead();
  4224. }
  4225. }
  4226. if(IsCollectionPass())
  4227. {
  4228. break;
  4229. }
  4230. if (this->DoMarkTempNumbersOnTempObjects())
  4231. {
  4232. this->currentBlock->tempNumberTracker->ProcessIndirUse(indirOpnd, currentInstr, this);
  4233. }
  4234. if(tag == Js::DeadStorePhase && baseOpnd->IsArrayRegOpnd())
  4235. {
  4236. ProcessArrayRegOpndUse(currentInstr, baseOpnd->AsArrayRegOpnd());
  4237. }
  4238. }
  4239. break;
  4240. }
  4241. }
  4242. bool
  4243. BackwardPass::ProcessPropertySymUse(PropertySym *propertySym)
  4244. {
  4245. Assert(this->tag == Js::BackwardPhase);
  4246. BasicBlock *block = this->currentBlock;
  4247. bool isLive = !!block->upwardExposedFields->TestAndSet(propertySym->m_id);
  4248. if (propertySym->m_propertyEquivSet)
  4249. {
  4250. block->upwardExposedFields->Or(propertySym->m_propertyEquivSet);
  4251. }
  4252. return isLive;
  4253. }
  4254. void
  4255. BackwardPass::MarkTemp(StackSym * sym)
  4256. {
  4257. Assert(!IsCollectionPass());
  4258. // Don't care about type specialized syms
  4259. if (!sym->IsVar())
  4260. {
  4261. return;
  4262. }
  4263. BasicBlock * block = this->currentBlock;
  4264. if (this->DoMarkTempNumbers())
  4265. {
  4266. Assert((block->loop != nullptr) == block->tempNumberTracker->HasTempTransferDependencies());
  4267. block->tempNumberTracker->MarkTemp(sym, this);
  4268. }
  4269. if (this->DoMarkTempObjects())
  4270. {
  4271. Assert((block->loop != nullptr) == block->tempObjectTracker->HasTempTransferDependencies());
  4272. block->tempObjectTracker->MarkTemp(sym, this);
  4273. }
  4274. #if DBG
  4275. if (this->DoMarkTempObjectVerify())
  4276. {
  4277. Assert((block->loop != nullptr) == block->tempObjectVerifyTracker->HasTempTransferDependencies());
  4278. block->tempObjectVerifyTracker->MarkTemp(sym, this);
  4279. }
  4280. #endif
  4281. }
  4282. void
  4283. BackwardPass::MarkTempProcessInstr(IR::Instr * instr)
  4284. {
  4285. Assert(!IsCollectionPass());
  4286. if (this->currentBlock->isDead)
  4287. {
  4288. return;
  4289. }
  4290. BasicBlock * block;
  4291. block = this->currentBlock;
  4292. if (this->DoMarkTempNumbers())
  4293. {
  4294. block->tempNumberTracker->ProcessInstr(instr, this);
  4295. }
  4296. if (this->DoMarkTempObjects())
  4297. {
  4298. block->tempObjectTracker->ProcessInstr(instr);
  4299. }
  4300. #if DBG
  4301. if (this->DoMarkTempObjectVerify())
  4302. {
  4303. block->tempObjectVerifyTracker->ProcessInstr(instr, this);
  4304. }
  4305. #endif
  4306. }
  4307. #if DBG_DUMP
  4308. void
  4309. BackwardPass::DumpMarkTemp()
  4310. {
  4311. Assert(!IsCollectionPass());
  4312. BasicBlock * block = this->currentBlock;
  4313. if (this->DoMarkTempNumbers())
  4314. {
  4315. block->tempNumberTracker->Dump();
  4316. }
  4317. if (this->DoMarkTempObjects())
  4318. {
  4319. block->tempObjectTracker->Dump();
  4320. }
  4321. #if DBG
  4322. if (this->DoMarkTempObjectVerify())
  4323. {
  4324. block->tempObjectVerifyTracker->Dump();
  4325. }
  4326. #endif
  4327. }
  4328. #endif
  4329. void
  4330. BackwardPass::SetSymIsUsedOnlyInNumberIfLastUse(IR::Opnd *const opnd)
  4331. {
  4332. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  4333. if (stackSym && !currentBlock->upwardExposedUses->Test(stackSym->m_id))
  4334. {
  4335. symUsedOnlyForNumberBySymId->Set(stackSym->m_id);
  4336. }
  4337. }
  4338. void
  4339. BackwardPass::SetSymIsNotUsedOnlyInNumber(IR::Opnd *const opnd)
  4340. {
  4341. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  4342. if (stackSym)
  4343. {
  4344. symUsedOnlyForNumberBySymId->Clear(stackSym->m_id);
  4345. }
  4346. }
  4347. void
  4348. BackwardPass::SetSymIsUsedOnlyInBitOpsIfLastUse(IR::Opnd *const opnd)
  4349. {
  4350. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  4351. if (stackSym && !currentBlock->upwardExposedUses->Test(stackSym->m_id))
  4352. {
  4353. symUsedOnlyForBitOpsBySymId->Set(stackSym->m_id);
  4354. }
  4355. }
  4356. void
  4357. BackwardPass::SetSymIsNotUsedOnlyInBitOps(IR::Opnd *const opnd)
  4358. {
  4359. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  4360. if (stackSym)
  4361. {
  4362. symUsedOnlyForBitOpsBySymId->Clear(stackSym->m_id);
  4363. }
  4364. }
  4365. void
  4366. BackwardPass::TrackBitWiseOrNumberOp(IR::Instr *const instr)
  4367. {
  4368. Assert(instr);
  4369. const bool trackBitWiseop = DoTrackBitOpsOrNumber();
  4370. const bool trackNumberop = trackBitWiseop;
  4371. const Js::OpCode opcode = instr->m_opcode;
  4372. StackSym *const dstSym = IR::RegOpnd::TryGetStackSym(instr->GetDst());
  4373. if (!trackBitWiseop && !trackNumberop)
  4374. {
  4375. return;
  4376. }
  4377. if (!instr->IsRealInstr())
  4378. {
  4379. return;
  4380. }
  4381. if (dstSym)
  4382. {
  4383. // For a dst where the def is in this block, transfer the current info into the instruction
  4384. if (trackBitWiseop && symUsedOnlyForBitOpsBySymId->TestAndClear(dstSym->m_id))
  4385. {
  4386. instr->dstIsAlwaysConvertedToInt32 = true;
  4387. }
  4388. if (trackNumberop && symUsedOnlyForNumberBySymId->TestAndClear(dstSym->m_id))
  4389. {
  4390. instr->dstIsAlwaysConvertedToNumber = true;
  4391. }
  4392. }
  4393. // If the instruction can cause src values to escape the local scope, the srcs can't be optimized
  4394. if (OpCodeAttr::NonTempNumberSources(opcode))
  4395. {
  4396. if (trackBitWiseop)
  4397. {
  4398. SetSymIsNotUsedOnlyInBitOps(instr->GetSrc1());
  4399. SetSymIsNotUsedOnlyInBitOps(instr->GetSrc2());
  4400. }
  4401. if (trackNumberop)
  4402. {
  4403. SetSymIsNotUsedOnlyInNumber(instr->GetSrc1());
  4404. SetSymIsNotUsedOnlyInNumber(instr->GetSrc2());
  4405. }
  4406. return;
  4407. }
  4408. if (trackBitWiseop)
  4409. {
  4410. switch (opcode)
  4411. {
  4412. // Instructions that can cause src values to escape the local scope have already been excluded
  4413. case Js::OpCode::Not_A:
  4414. case Js::OpCode::And_A:
  4415. case Js::OpCode::Or_A:
  4416. case Js::OpCode::Xor_A:
  4417. case Js::OpCode::Shl_A:
  4418. case Js::OpCode::Shr_A:
  4419. case Js::OpCode::Not_I4:
  4420. case Js::OpCode::And_I4:
  4421. case Js::OpCode::Or_I4:
  4422. case Js::OpCode::Xor_I4:
  4423. case Js::OpCode::Shl_I4:
  4424. case Js::OpCode::Shr_I4:
  4425. // These instructions don't generate -0, and their behavior is the same for any src that is -0 or +0
  4426. SetSymIsUsedOnlyInBitOpsIfLastUse(instr->GetSrc1());
  4427. SetSymIsUsedOnlyInBitOpsIfLastUse(instr->GetSrc2());
  4428. break;
  4429. default:
  4430. SetSymIsNotUsedOnlyInBitOps(instr->GetSrc1());
  4431. SetSymIsNotUsedOnlyInBitOps(instr->GetSrc2());
  4432. break;
  4433. }
  4434. }
  4435. if (trackNumberop)
  4436. {
  4437. switch (opcode)
  4438. {
  4439. // Instructions that can cause src values to escape the local scope have already been excluded
  4440. case Js::OpCode::Conv_Num:
  4441. case Js::OpCode::Div_A:
  4442. case Js::OpCode::Mul_A:
  4443. case Js::OpCode::Sub_A:
  4444. case Js::OpCode::Rem_A:
  4445. case Js::OpCode::Incr_A:
  4446. case Js::OpCode::Decr_A:
  4447. case Js::OpCode::Neg_A:
  4448. case Js::OpCode::Not_A:
  4449. case Js::OpCode::ShrU_A:
  4450. case Js::OpCode::ShrU_I4:
  4451. case Js::OpCode::And_A:
  4452. case Js::OpCode::Or_A:
  4453. case Js::OpCode::Xor_A:
  4454. case Js::OpCode::Shl_A:
  4455. case Js::OpCode::Shr_A:
  4456. // These instructions don't generate -0, and their behavior is the same for any src that is -0 or +0
  4457. SetSymIsUsedOnlyInNumberIfLastUse(instr->GetSrc1());
  4458. SetSymIsUsedOnlyInNumberIfLastUse(instr->GetSrc2());
  4459. break;
  4460. default:
  4461. SetSymIsNotUsedOnlyInNumber(instr->GetSrc1());
  4462. SetSymIsNotUsedOnlyInNumber(instr->GetSrc2());
  4463. break;
  4464. }
  4465. }
  4466. }
  4467. void
  4468. BackwardPass::TrackIntUsage(IR::Instr *const instr)
  4469. {
  4470. Assert(instr);
  4471. const bool trackNegativeZero = DoTrackNegativeZero();
  4472. const bool trackIntOverflow = DoTrackIntOverflow();
  4473. const bool trackCompoundedIntOverflow = DoTrackCompoundedIntOverflow();
  4474. const bool trackNon32BitOverflow = DoTrackNon32BitOverflow();
  4475. if(!(trackNegativeZero || trackIntOverflow || trackCompoundedIntOverflow))
  4476. {
  4477. return;
  4478. }
  4479. const Js::OpCode opcode = instr->m_opcode;
  4480. if(trackCompoundedIntOverflow && opcode == Js::OpCode::StatementBoundary && instr->AsPragmaInstr()->m_statementIndex == 0)
  4481. {
  4482. // Cannot bail out before the first statement boundary, so the range cannot extend beyond this instruction
  4483. Assert(!instr->ignoreIntOverflowInRange);
  4484. EndIntOverflowDoesNotMatterRange();
  4485. return;
  4486. }
  4487. if(!instr->IsRealInstr())
  4488. {
  4489. return;
  4490. }
  4491. StackSym *const dstSym = IR::RegOpnd::TryGetStackSym(instr->GetDst());
  4492. bool ignoreIntOverflowCandidate = false;
  4493. if(dstSym)
  4494. {
  4495. // For a dst where the def is in this block, transfer the current info into the instruction
  4496. if(trackNegativeZero && negativeZeroDoesNotMatterBySymId->TestAndClear(dstSym->m_id))
  4497. {
  4498. instr->ignoreNegativeZero = true;
  4499. if(tag == Js::DeadStorePhase && instr->HasBailOutInfo())
  4500. {
  4501. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  4502. if(bailOutKind & IR::BailOutOnNegativeZero)
  4503. {
  4504. bailOutKind -= IR::BailOutOnNegativeZero;
  4505. if(bailOutKind)
  4506. {
  4507. instr->SetBailOutKind(bailOutKind);
  4508. }
  4509. else
  4510. {
  4511. instr->ClearBailOutInfo();
  4512. if(preOpBailOutInstrToProcess == instr)
  4513. {
  4514. preOpBailOutInstrToProcess = nullptr;
  4515. }
  4516. }
  4517. }
  4518. }
  4519. }
  4520. if(trackIntOverflow)
  4521. {
  4522. ignoreIntOverflowCandidate = !!intOverflowDoesNotMatterBySymId->TestAndClear(dstSym->m_id);
  4523. if(trackCompoundedIntOverflow)
  4524. {
  4525. instr->ignoreIntOverflowInRange = !!intOverflowDoesNotMatterInRangeBySymId->TestAndClear(dstSym->m_id);
  4526. }
  4527. }
  4528. }
  4529. // If the instruction can cause src values to escape the local scope, the srcs can't be optimized
  4530. if(OpCodeAttr::NonTempNumberSources(opcode))
  4531. {
  4532. if(trackNegativeZero)
  4533. {
  4534. SetNegativeZeroMatters(instr->GetSrc1());
  4535. SetNegativeZeroMatters(instr->GetSrc2());
  4536. }
  4537. if(trackIntOverflow)
  4538. {
  4539. SetIntOverflowMatters(instr->GetSrc1());
  4540. SetIntOverflowMatters(instr->GetSrc2());
  4541. if(trackCompoundedIntOverflow)
  4542. {
  4543. instr->ignoreIntOverflowInRange = false;
  4544. SetIntOverflowMattersInRange(instr->GetSrc1());
  4545. SetIntOverflowMattersInRange(instr->GetSrc2());
  4546. EndIntOverflowDoesNotMatterRange();
  4547. }
  4548. }
  4549. return;
  4550. }
  4551. // -0 tracking
  4552. if(trackNegativeZero)
  4553. {
  4554. switch(opcode)
  4555. {
  4556. // Instructions that can cause src values to escape the local scope have already been excluded
  4557. case Js::OpCode::FromVar:
  4558. case Js::OpCode::Conv_Prim:
  4559. Assert(dstSym);
  4560. Assert(instr->GetSrc1());
  4561. Assert(!instr->GetSrc2());
  4562. if(instr->GetDst()->IsInt32())
  4563. {
  4564. // Conversion to int32 that is either explicit, or has a bailout check ensuring that it's an int value
  4565. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  4566. break;
  4567. }
  4568. // fall-through
  4569. default:
  4570. if(dstSym && !instr->ignoreNegativeZero)
  4571. {
  4572. // -0 matters for dst, so -0 also matters for srcs
  4573. SetNegativeZeroMatters(instr->GetSrc1());
  4574. SetNegativeZeroMatters(instr->GetSrc2());
  4575. break;
  4576. }
  4577. if(opcode == Js::OpCode::Div_A || opcode == Js::OpCode::Div_I4)
  4578. {
  4579. // src1 is being divided by src2, so -0 matters for src2
  4580. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  4581. SetNegativeZeroMatters(instr->GetSrc2());
  4582. break;
  4583. }
  4584. // fall-through
  4585. case Js::OpCode::Incr_A:
  4586. case Js::OpCode::Decr_A:
  4587. // Adding 1 to something or subtracting 1 from something does not generate -0
  4588. case Js::OpCode::Not_A:
  4589. case Js::OpCode::And_A:
  4590. case Js::OpCode::Or_A:
  4591. case Js::OpCode::Xor_A:
  4592. case Js::OpCode::Shl_A:
  4593. case Js::OpCode::Shr_A:
  4594. case Js::OpCode::ShrU_A:
  4595. case Js::OpCode::Not_I4:
  4596. case Js::OpCode::And_I4:
  4597. case Js::OpCode::Or_I4:
  4598. case Js::OpCode::Xor_I4:
  4599. case Js::OpCode::Shl_I4:
  4600. case Js::OpCode::Shr_I4:
  4601. case Js::OpCode::ShrU_I4:
  4602. case Js::OpCode::Conv_Str:
  4603. case Js::OpCode::Coerse_Str:
  4604. case Js::OpCode::Coerse_Regex:
  4605. case Js::OpCode::Coerse_StrOrRegex:
  4606. case Js::OpCode::Conv_PrimStr:
  4607. case Js::OpCode::Add_Ptr:
  4608. // These instructions don't generate -0, and their behavior is the same for any src that is -0 or +0
  4609. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  4610. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  4611. break;
  4612. case Js::OpCode::Add_I4:
  4613. Assert(dstSym);
  4614. Assert(instr->GetSrc1());
  4615. Assert(instr->GetSrc1()->IsRegOpnd() || instr->GetSrc1()->IsIntConstOpnd());
  4616. Assert(instr->GetSrc2());
  4617. Assert(instr->GetSrc2()->IsRegOpnd() || instr->GetSrc2()->IsIntConstOpnd());
  4618. if(instr->ignoreNegativeZero ||
  4619. !(instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->m_wasNegativeZeroPreventedByBailout) ||
  4620. !(instr->GetSrc2()->IsRegOpnd() && instr->GetSrc2()->AsRegOpnd()->m_wasNegativeZeroPreventedByBailout))
  4621. {
  4622. // -0 does not matter for dst, or this instruction does not generate -0 since one of the srcs is not -0
  4623. // (regardless of -0 bailout checks)
  4624. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  4625. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  4626. break;
  4627. }
  4628. // -0 + -0 == -0. As long as one src is guaranteed to not be -0, -0 does not matter for the other src. Pick a
  4629. // src for which to ignore negative zero, based on which sym is last-use. If both syms are last-use, src2 is
  4630. // picked arbitrarily.
  4631. if(instr->GetSrc2()->IsRegOpnd() &&
  4632. !currentBlock->upwardExposedUses->Test(instr->GetSrc2()->AsRegOpnd()->m_sym->m_id))
  4633. {
  4634. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  4635. SetNegativeZeroMatters(instr->GetSrc1());
  4636. }
  4637. else
  4638. {
  4639. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  4640. SetNegativeZeroMatters(instr->GetSrc2());
  4641. }
  4642. break;
  4643. case Js::OpCode::Add_A:
  4644. Assert(dstSym);
  4645. Assert(instr->GetSrc1());
  4646. Assert(instr->GetSrc1()->IsRegOpnd() || instr->GetSrc1()->IsAddrOpnd());
  4647. Assert(instr->GetSrc2());
  4648. Assert(instr->GetSrc2()->IsRegOpnd() || instr->GetSrc2()->IsAddrOpnd());
  4649. if(instr->ignoreNegativeZero || instr->GetSrc1()->IsAddrOpnd() || instr->GetSrc2()->IsAddrOpnd())
  4650. {
  4651. // -0 does not matter for dst, or this instruction does not generate -0 since one of the srcs is not -0
  4652. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  4653. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  4654. break;
  4655. }
  4656. SetNegativeZeroMatters(instr->GetSrc1());
  4657. SetNegativeZeroMatters(instr->GetSrc2());
  4658. break;
  4659. case Js::OpCode::Sub_I4:
  4660. Assert(dstSym);
  4661. Assert(instr->GetSrc1());
  4662. Assert(instr->GetSrc1()->IsRegOpnd() || instr->GetSrc1()->IsIntConstOpnd());
  4663. Assert(instr->GetSrc2());
  4664. Assert(instr->GetSrc2()->IsRegOpnd() || instr->GetSrc2()->IsIntConstOpnd());
  4665. if(instr->ignoreNegativeZero ||
  4666. !(instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->m_wasNegativeZeroPreventedByBailout) ||
  4667. instr->GetSrc2()->IsIntConstOpnd() && instr->GetSrc2()->AsIntConstOpnd()->GetValue() != 0)
  4668. {
  4669. // At least one of the following is true:
  4670. // - -0 does not matter for dst
  4671. // - Src1 is not -0 (regardless of -0 bailout checks), and so this instruction cannot generate -0
  4672. // - Src2 is a nonzero int constant, and so this instruction cannot generate -0
  4673. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  4674. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  4675. break;
  4676. }
  4677. goto NegativeZero_Sub_Default;
  4678. case Js::OpCode::Sub_A:
  4679. Assert(dstSym);
  4680. Assert(instr->GetSrc1());
  4681. Assert(instr->GetSrc1()->IsRegOpnd() || instr->GetSrc1()->IsAddrOpnd());
  4682. Assert(instr->GetSrc2());
  4683. Assert(instr->GetSrc2()->IsRegOpnd() || instr->GetSrc2()->IsAddrOpnd() || instr->GetSrc2()->IsIntConstOpnd());
  4684. if(instr->ignoreNegativeZero ||
  4685. instr->GetSrc1()->IsAddrOpnd() ||
  4686. (
  4687. instr->GetSrc2()->IsAddrOpnd() &&
  4688. instr->GetSrc2()->AsAddrOpnd()->IsVar() &&
  4689. Js::TaggedInt::ToInt32(instr->GetSrc2()->AsAddrOpnd()->m_address) != 0
  4690. ))
  4691. {
  4692. // At least one of the following is true:
  4693. // - -0 does not matter for dst
  4694. // - Src1 is not -0, and so this instruction cannot generate -0
  4695. // - Src2 is a nonzero tagged int constant, and so this instruction cannot generate -0
  4696. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  4697. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  4698. break;
  4699. }
  4700. // fall-through
  4701. NegativeZero_Sub_Default:
  4702. // -0 - 0 == -0. As long as src1 is guaranteed to not be -0, -0 does not matter for src2.
  4703. SetNegativeZeroMatters(instr->GetSrc1());
  4704. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  4705. break;
  4706. case Js::OpCode::BrEq_I4:
  4707. case Js::OpCode::BrTrue_I4:
  4708. case Js::OpCode::BrFalse_I4:
  4709. case Js::OpCode::BrGe_I4:
  4710. case Js::OpCode::BrUnGe_I4:
  4711. case Js::OpCode::BrGt_I4:
  4712. case Js::OpCode::BrUnGt_I4:
  4713. case Js::OpCode::BrLt_I4:
  4714. case Js::OpCode::BrUnLt_I4:
  4715. case Js::OpCode::BrLe_I4:
  4716. case Js::OpCode::BrUnLe_I4:
  4717. case Js::OpCode::BrNeq_I4:
  4718. // Int-specialized branches may prove that one of the src must be zero purely based on the int range, in which
  4719. // case they rely on prior -0 bailouts to guarantee that the src cannot be -0. So, consider that -0 matters for
  4720. // the srcs.
  4721. // fall-through
  4722. case Js::OpCode::InlineMathAtan2:
  4723. // Atan(y,x) - signs of y, x is used to determine the quadrant of the result
  4724. SetNegativeZeroMatters(instr->GetSrc1());
  4725. SetNegativeZeroMatters(instr->GetSrc2());
  4726. break;
  4727. case Js::OpCode::Expo_A:
  4728. case Js::OpCode::InlineMathPow:
  4729. // Negative zero matters for src1
  4730. // Pow( 0, <neg>) is Infinity
  4731. // Pow(-0, <neg>) is -Infinity
  4732. SetNegativeZeroMatters(instr->GetSrc1());
  4733. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  4734. break;
  4735. case Js::OpCode::LdElemI_A:
  4736. // There is an implicit ToString on the index operand, which doesn't differentiate -0 from +0
  4737. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1()->AsIndirOpnd()->GetIndexOpnd());
  4738. break;
  4739. case Js::OpCode::StElemI_A:
  4740. case Js::OpCode::StElemI_A_Strict:
  4741. // There is an implicit ToString on the index operand, which doesn't differentiate -0 from +0
  4742. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetDst()->AsIndirOpnd()->GetIndexOpnd());
  4743. break;
  4744. }
  4745. }
  4746. // Int overflow tracking
  4747. if(!trackIntOverflow)
  4748. {
  4749. return;
  4750. }
  4751. switch(opcode)
  4752. {
  4753. // Instructions that can cause src values to escape the local scope have already been excluded
  4754. default:
  4755. // Unlike the -0 tracking, we use an inclusion list of op-codes for overflow tracking rather than an exclusion list.
  4756. // Assume for any instructions other than those listed above, that int-overflowed values in the srcs are
  4757. // insufficient.
  4758. ignoreIntOverflowCandidate = false;
  4759. // fall-through
  4760. case Js::OpCode::Incr_A:
  4761. case Js::OpCode::Decr_A:
  4762. case Js::OpCode::Add_A:
  4763. case Js::OpCode::Sub_A:
  4764. // The sources are not guaranteed to be converted to int32. Let the compounded int overflow tracking handle this.
  4765. SetIntOverflowMatters(instr->GetSrc1());
  4766. SetIntOverflowMatters(instr->GetSrc2());
  4767. break;
  4768. case Js::OpCode::Mul_A:
  4769. if (trackNon32BitOverflow)
  4770. {
  4771. if (ignoreIntOverflowCandidate)
  4772. instr->ignoreOverflowBitCount = 53;
  4773. }
  4774. else
  4775. {
  4776. ignoreIntOverflowCandidate = false;
  4777. }
  4778. SetIntOverflowMatters(instr->GetSrc1());
  4779. SetIntOverflowMatters(instr->GetSrc2());
  4780. break;
  4781. case Js::OpCode::Neg_A:
  4782. case Js::OpCode::Ld_A:
  4783. case Js::OpCode::Conv_Num:
  4784. case Js::OpCode::ShrU_A:
  4785. if(!ignoreIntOverflowCandidate)
  4786. {
  4787. // Int overflow matters for dst, so int overflow also matters for srcs
  4788. SetIntOverflowMatters(instr->GetSrc1());
  4789. SetIntOverflowMatters(instr->GetSrc2());
  4790. break;
  4791. }
  4792. // fall-through
  4793. case Js::OpCode::Not_A:
  4794. case Js::OpCode::And_A:
  4795. case Js::OpCode::Or_A:
  4796. case Js::OpCode::Xor_A:
  4797. case Js::OpCode::Shl_A:
  4798. case Js::OpCode::Shr_A:
  4799. // These instructions convert their srcs to int32s, and hence don't care about int-overflowed values in the srcs (as
  4800. // long as the overflowed values did not overflow the 53 bits that 'double' values have to precisely represent
  4801. // ints). ShrU_A is not included here because it converts its srcs to uint32 rather than int32, so it would make a
  4802. // difference if the srcs have int32-overflowed values.
  4803. SetIntOverflowDoesNotMatterIfLastUse(instr->GetSrc1());
  4804. SetIntOverflowDoesNotMatterIfLastUse(instr->GetSrc2());
  4805. break;
  4806. }
  4807. if(ignoreIntOverflowCandidate)
  4808. {
  4809. instr->ignoreIntOverflow = true;
  4810. }
  4811. // Compounded int overflow tracking
  4812. if(!trackCompoundedIntOverflow)
  4813. {
  4814. return;
  4815. }
  4816. if(instr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset)
  4817. {
  4818. // The forward pass may need to insert conversions with bailouts before the first instruction in the range. Since this
  4819. // instruction does not have a valid byte code offset for bailout purposes, end the current range now.
  4820. instr->ignoreIntOverflowInRange = false;
  4821. SetIntOverflowMattersInRange(instr->GetSrc1());
  4822. SetIntOverflowMattersInRange(instr->GetSrc2());
  4823. EndIntOverflowDoesNotMatterRange();
  4824. return;
  4825. }
  4826. if(ignoreIntOverflowCandidate)
  4827. {
  4828. instr->ignoreIntOverflowInRange = true;
  4829. if(dstSym)
  4830. {
  4831. dstSym->scratch.globOpt.numCompoundedAddSubUses = 0;
  4832. }
  4833. }
  4834. bool lossy = false;
  4835. switch(opcode)
  4836. {
  4837. // Instructions that can cause src values to escape the local scope have already been excluded
  4838. case Js::OpCode::Incr_A:
  4839. case Js::OpCode::Decr_A:
  4840. case Js::OpCode::Add_A:
  4841. case Js::OpCode::Sub_A:
  4842. {
  4843. if(!instr->ignoreIntOverflowInRange)
  4844. {
  4845. // Int overflow matters for dst, so int overflow also matters for srcs
  4846. SetIntOverflowMattersInRange(instr->GetSrc1());
  4847. SetIntOverflowMattersInRange(instr->GetSrc2());
  4848. break;
  4849. }
  4850. AnalysisAssert(dstSym);
  4851. // The number of compounded add/sub uses of each src is at least the number of compounded add/sub uses of the dst,
  4852. // + 1 for the current instruction
  4853. Assert(dstSym->scratch.globOpt.numCompoundedAddSubUses >= 0);
  4854. Assert(dstSym->scratch.globOpt.numCompoundedAddSubUses <= MaxCompoundedUsesInAddSubForIgnoringIntOverflow);
  4855. const int addSubUses = dstSym->scratch.globOpt.numCompoundedAddSubUses + 1;
  4856. if(addSubUses > MaxCompoundedUsesInAddSubForIgnoringIntOverflow)
  4857. {
  4858. // There are too many compounded add/sub uses of the srcs. There is a possibility that combined, the number
  4859. // eventually overflows the 53 bits that 'double' values have to precisely represent ints
  4860. instr->ignoreIntOverflowInRange = false;
  4861. SetIntOverflowMattersInRange(instr->GetSrc1());
  4862. SetIntOverflowMattersInRange(instr->GetSrc2());
  4863. break;
  4864. }
  4865. TransferCompoundedAddSubUsesToSrcs(instr, addSubUses);
  4866. break;
  4867. }
  4868. case Js::OpCode::Neg_A:
  4869. case Js::OpCode::Ld_A:
  4870. case Js::OpCode::Conv_Num:
  4871. case Js::OpCode::ShrU_A:
  4872. {
  4873. if(!instr->ignoreIntOverflowInRange)
  4874. {
  4875. // Int overflow matters for dst, so int overflow also matters for srcs
  4876. SetIntOverflowMattersInRange(instr->GetSrc1());
  4877. SetIntOverflowMattersInRange(instr->GetSrc2());
  4878. break;
  4879. }
  4880. AnalysisAssert(dstSym);
  4881. TransferCompoundedAddSubUsesToSrcs(instr, dstSym->scratch.globOpt.numCompoundedAddSubUses);
  4882. lossy = opcode == Js::OpCode::ShrU_A;
  4883. break;
  4884. }
  4885. case Js::OpCode::Not_A:
  4886. case Js::OpCode::And_A:
  4887. case Js::OpCode::Or_A:
  4888. case Js::OpCode::Xor_A:
  4889. case Js::OpCode::Shl_A:
  4890. case Js::OpCode::Shr_A:
  4891. // These instructions convert their srcs to int32s, and hence don't care about int-overflowed values in the srcs (as
  4892. // long as the overflowed values did not overflow the 53 bits that 'double' values have to precisely represent
  4893. // ints). ShrU_A is not included here because it converts its srcs to uint32 rather than int32, so it would make a
  4894. // difference if the srcs have int32-overflowed values.
  4895. instr->ignoreIntOverflowInRange = true;
  4896. lossy = true;
  4897. SetIntOverflowDoesNotMatterInRangeIfLastUse(instr->GetSrc1(), 0);
  4898. SetIntOverflowDoesNotMatterInRangeIfLastUse(instr->GetSrc2(), 0);
  4899. break;
  4900. case Js::OpCode::LdSlotArr:
  4901. case Js::OpCode::LdSlot:
  4902. {
  4903. Assert(dstSym);
  4904. Assert(!instr->GetSrc2()); // at the moment, this list contains only unary operations
  4905. if(intOverflowCurrentlyMattersInRange)
  4906. {
  4907. // These instructions will not begin a range, so just return. They don't begin a range because their initial
  4908. // value may not be available until after the instruction is processed in the forward pass.
  4909. Assert(!instr->ignoreIntOverflowInRange);
  4910. return;
  4911. }
  4912. Assert(currentBlock->intOverflowDoesNotMatterRange);
  4913. // Int overflow does not matter for dst, so the srcs need to be tracked as inputs into the region of
  4914. // instructions where int overflow does not matter. Since these instructions will not begin or end a range, they
  4915. // are tracked in separate candidates bit-vectors and once we have confirmed that they don't begin the range,
  4916. // they will be transferred to 'SymsRequiredToBe[Lossy]Int'. Furthermore, once this instruction is included in
  4917. // the range, its dst sym has to be removed. Since this instructions may not be included in the range, add the
  4918. // dst sym to the candidates bit-vectors. If they are included, the process of transferring will remove the dst
  4919. // syms and add the src syms.
  4920. // Remove the dst using the candidate bit-vectors
  4921. Assert(
  4922. !instr->ignoreIntOverflowInRange ||
  4923. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Test(dstSym->m_id));
  4924. if(instr->ignoreIntOverflowInRange ||
  4925. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Test(dstSym->m_id))
  4926. {
  4927. candidateSymsRequiredToBeInt->Set(dstSym->m_id);
  4928. if(currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Test(dstSym->m_id))
  4929. {
  4930. candidateSymsRequiredToBeLossyInt->Set(dstSym->m_id);
  4931. }
  4932. }
  4933. if(!instr->ignoreIntOverflowInRange)
  4934. {
  4935. // These instructions will not end a range, so just return. They may be included in the middle of a range, but
  4936. // since int overflow matters for the dst, the src does not need to be counted as an input into the range.
  4937. return;
  4938. }
  4939. instr->ignoreIntOverflowInRange = false;
  4940. // Add the src using the candidate bit-vectors. The src property sym may already be included in the range or as
  4941. // a candidate. The xor of the final bit-vector with the candidate is the set of syms required to be int,
  4942. // assuming all instructions up to and not including this one are included in the range.
  4943. const SymID srcSymId = instr->GetSrc1()->AsSymOpnd()->m_sym->m_id;
  4944. const bool srcIncluded =
  4945. !!currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Test(srcSymId) ^
  4946. !!candidateSymsRequiredToBeInt->Test(srcSymId);
  4947. const bool srcIncludedAsLossy =
  4948. srcIncluded &&
  4949. !!currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Test(srcSymId) ^
  4950. !!candidateSymsRequiredToBeLossyInt->Test(srcSymId);
  4951. const bool srcNeedsToBeLossless =
  4952. !currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Test(dstSym->m_id) ||
  4953. srcIncluded && !srcIncludedAsLossy;
  4954. if(srcIncluded)
  4955. {
  4956. if(srcIncludedAsLossy && srcNeedsToBeLossless)
  4957. {
  4958. candidateSymsRequiredToBeLossyInt->Compliment(srcSymId);
  4959. }
  4960. }
  4961. else
  4962. {
  4963. candidateSymsRequiredToBeInt->Compliment(srcSymId);
  4964. if(!srcNeedsToBeLossless)
  4965. {
  4966. candidateSymsRequiredToBeLossyInt->Compliment(srcSymId);
  4967. }
  4968. }
  4969. // These instructions will not end a range, so just return. They may be included in the middle of a range, and the
  4970. // src has been included as a candidate input into the range.
  4971. return;
  4972. }
  4973. case Js::OpCode::Mul_A:
  4974. if (trackNon32BitOverflow)
  4975. {
  4976. // MULs will always be at the start of a range. Either included in the range if int32 overflow is ignored, or excluded if int32 overflow matters. Even if int32 can be ignored, MULs can still bailout on 53-bit.
  4977. // That's why it cannot be in the middle of a range.
  4978. if (instr->ignoreIntOverflowInRange)
  4979. {
  4980. AnalysisAssert(dstSym);
  4981. Assert(dstSym->scratch.globOpt.numCompoundedAddSubUses >= 0);
  4982. Assert(dstSym->scratch.globOpt.numCompoundedAddSubUses <= MaxCompoundedUsesInAddSubForIgnoringIntOverflow);
  4983. instr->ignoreOverflowBitCount = (uint8) (53 - dstSym->scratch.globOpt.numCompoundedAddSubUses);
  4984. // We have the max number of compounded adds/subs. 32-bit overflow cannot be ignored.
  4985. if (instr->ignoreOverflowBitCount == 32)
  4986. {
  4987. instr->ignoreIntOverflowInRange = false;
  4988. }
  4989. }
  4990. SetIntOverflowMattersInRange(instr->GetSrc1());
  4991. SetIntOverflowMattersInRange(instr->GetSrc2());
  4992. break;
  4993. }
  4994. // fall-through
  4995. default:
  4996. // Unlike the -0 tracking, we use an inclusion list of op-codes for overflow tracking rather than an exclusion list.
  4997. // Assume for any instructions other than those listed above, that int-overflowed values in the srcs are
  4998. // insufficient.
  4999. instr->ignoreIntOverflowInRange = false;
  5000. SetIntOverflowMattersInRange(instr->GetSrc1());
  5001. SetIntOverflowMattersInRange(instr->GetSrc2());
  5002. break;
  5003. }
  5004. if(!instr->ignoreIntOverflowInRange)
  5005. {
  5006. EndIntOverflowDoesNotMatterRange();
  5007. return;
  5008. }
  5009. if(intOverflowCurrentlyMattersInRange)
  5010. {
  5011. // This is the last instruction in a new range of instructions where int overflow does not matter
  5012. intOverflowCurrentlyMattersInRange = false;
  5013. IR::Instr *const boundaryInstr = IR::PragmaInstr::New(Js::OpCode::NoIntOverflowBoundary, 0, instr->m_func);
  5014. boundaryInstr->SetByteCodeOffset(instr);
  5015. currentBlock->InsertInstrAfter(boundaryInstr, instr);
  5016. currentBlock->intOverflowDoesNotMatterRange =
  5017. IntOverflowDoesNotMatterRange::New(
  5018. globOpt->alloc,
  5019. instr,
  5020. boundaryInstr,
  5021. currentBlock->intOverflowDoesNotMatterRange);
  5022. }
  5023. else
  5024. {
  5025. Assert(currentBlock->intOverflowDoesNotMatterRange);
  5026. // Extend the current range of instructions where int overflow does not matter, to include this instruction. We also need to
  5027. // include the tracked syms for instructions that have not yet been included in the range, which are tracked in the range's
  5028. // bit-vector. 'SymsRequiredToBeInt' will contain both the dst and src syms of instructions not yet included in the range;
  5029. // the xor will remove the dst syms and add the src syms.
  5030. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Xor(candidateSymsRequiredToBeInt);
  5031. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Xor(candidateSymsRequiredToBeLossyInt);
  5032. candidateSymsRequiredToBeInt->ClearAll();
  5033. candidateSymsRequiredToBeLossyInt->ClearAll();
  5034. currentBlock->intOverflowDoesNotMatterRange->SetFirstInstr(instr);
  5035. }
  5036. // Track syms that are inputs into the range based on the current instruction, which was just added to the range. The dst
  5037. // sym is obtaining a new value so it isn't required to be an int at the start of the range, but the srcs are.
  5038. if(dstSym)
  5039. {
  5040. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Clear(dstSym->m_id);
  5041. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Clear(dstSym->m_id);
  5042. }
  5043. IR::Opnd *const srcs[] = { instr->GetSrc1(), instr->GetSrc2() };
  5044. for(int i = 0; i < sizeof(srcs) / sizeof(srcs[0]) && srcs[i]; ++i)
  5045. {
  5046. StackSym *srcSym = IR::RegOpnd::TryGetStackSym(srcs[i]);
  5047. if(!srcSym)
  5048. {
  5049. continue;
  5050. }
  5051. if(currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->TestAndSet(srcSym->m_id))
  5052. {
  5053. if(!lossy)
  5054. {
  5055. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Clear(srcSym->m_id);
  5056. }
  5057. }
  5058. else if(lossy)
  5059. {
  5060. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Set(srcSym->m_id);
  5061. }
  5062. }
  5063. // If the last instruction included in the range is a MUL, we have to end the range.
  5064. // MULs with ignoreIntOverflow can still bailout on 53-bit overflow, so they cannot be in the middle of a range
  5065. if (trackNon32BitOverflow && instr->m_opcode == Js::OpCode::Mul_A)
  5066. {
  5067. // range would have ended already if int32 overflow matters
  5068. Assert(instr->ignoreIntOverflowInRange && instr->ignoreOverflowBitCount != 32);
  5069. EndIntOverflowDoesNotMatterRange();
  5070. }
  5071. }
  5072. void
  5073. BackwardPass::SetNegativeZeroDoesNotMatterIfLastUse(IR::Opnd *const opnd)
  5074. {
  5075. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  5076. if(stackSym && !currentBlock->upwardExposedUses->Test(stackSym->m_id))
  5077. {
  5078. negativeZeroDoesNotMatterBySymId->Set(stackSym->m_id);
  5079. }
  5080. }
  5081. void
  5082. BackwardPass::SetNegativeZeroMatters(IR::Opnd *const opnd)
  5083. {
  5084. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  5085. if(stackSym)
  5086. {
  5087. negativeZeroDoesNotMatterBySymId->Clear(stackSym->m_id);
  5088. }
  5089. }
  5090. void
  5091. BackwardPass::SetIntOverflowDoesNotMatterIfLastUse(IR::Opnd *const opnd)
  5092. {
  5093. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  5094. if(stackSym && !currentBlock->upwardExposedUses->Test(stackSym->m_id))
  5095. {
  5096. intOverflowDoesNotMatterBySymId->Set(stackSym->m_id);
  5097. }
  5098. }
  5099. void
  5100. BackwardPass::SetIntOverflowMatters(IR::Opnd *const opnd)
  5101. {
  5102. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  5103. if(stackSym)
  5104. {
  5105. intOverflowDoesNotMatterBySymId->Clear(stackSym->m_id);
  5106. }
  5107. }
  5108. bool
  5109. BackwardPass::SetIntOverflowDoesNotMatterInRangeIfLastUse(IR::Opnd *const opnd, const int addSubUses)
  5110. {
  5111. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  5112. return stackSym && SetIntOverflowDoesNotMatterInRangeIfLastUse(stackSym, addSubUses);
  5113. }
  5114. bool
  5115. BackwardPass::SetIntOverflowDoesNotMatterInRangeIfLastUse(StackSym *const stackSym, const int addSubUses)
  5116. {
  5117. Assert(stackSym);
  5118. Assert(addSubUses >= 0);
  5119. Assert(addSubUses <= MaxCompoundedUsesInAddSubForIgnoringIntOverflow);
  5120. if(currentBlock->upwardExposedUses->Test(stackSym->m_id))
  5121. {
  5122. return false;
  5123. }
  5124. intOverflowDoesNotMatterInRangeBySymId->Set(stackSym->m_id);
  5125. stackSym->scratch.globOpt.numCompoundedAddSubUses = addSubUses;
  5126. return true;
  5127. }
  5128. void
  5129. BackwardPass::SetIntOverflowMattersInRange(IR::Opnd *const opnd)
  5130. {
  5131. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  5132. if(stackSym)
  5133. {
  5134. intOverflowDoesNotMatterInRangeBySymId->Clear(stackSym->m_id);
  5135. }
  5136. }
  5137. void
  5138. BackwardPass::TransferCompoundedAddSubUsesToSrcs(IR::Instr *const instr, const int addSubUses)
  5139. {
  5140. Assert(instr);
  5141. Assert(addSubUses >= 0);
  5142. Assert(addSubUses <= MaxCompoundedUsesInAddSubForIgnoringIntOverflow);
  5143. IR::Opnd *const srcs[] = { instr->GetSrc1(), instr->GetSrc2() };
  5144. for(int i = 0; i < _countof(srcs) && srcs[i]; ++i)
  5145. {
  5146. StackSym *const srcSym = IR::RegOpnd::TryGetStackSym(srcs[i]);
  5147. if(!srcSym)
  5148. {
  5149. // Int overflow tracking is only done for StackSyms in RegOpnds. Int overflow matters for the src, so it is
  5150. // guaranteed to be in the int range at this point if the instruction is int-specialized.
  5151. continue;
  5152. }
  5153. Assert(srcSym->scratch.globOpt.numCompoundedAddSubUses >= 0);
  5154. Assert(srcSym->scratch.globOpt.numCompoundedAddSubUses <= MaxCompoundedUsesInAddSubForIgnoringIntOverflow);
  5155. if(SetIntOverflowDoesNotMatterInRangeIfLastUse(srcSym, addSubUses))
  5156. {
  5157. // This is the last use of the src
  5158. continue;
  5159. }
  5160. if(intOverflowDoesNotMatterInRangeBySymId->Test(srcSym->m_id))
  5161. {
  5162. // Since a src may be compounded through different chains of add/sub instructions, the greater number must be
  5163. // preserved
  5164. srcSym->scratch.globOpt.numCompoundedAddSubUses =
  5165. max(srcSym->scratch.globOpt.numCompoundedAddSubUses, addSubUses);
  5166. }
  5167. else
  5168. {
  5169. // Int overflow matters for the src, so it is guaranteed to be in the int range at this point if the instruction is
  5170. // int-specialized
  5171. }
  5172. }
  5173. }
  5174. void
  5175. BackwardPass::EndIntOverflowDoesNotMatterRange()
  5176. {
  5177. if(intOverflowCurrentlyMattersInRange)
  5178. {
  5179. return;
  5180. }
  5181. intOverflowCurrentlyMattersInRange = true;
  5182. if(currentBlock->intOverflowDoesNotMatterRange->FirstInstr()->m_next ==
  5183. currentBlock->intOverflowDoesNotMatterRange->LastInstr())
  5184. {
  5185. // Don't need a range for a single-instruction range
  5186. IntOverflowDoesNotMatterRange *const rangeToDelete = currentBlock->intOverflowDoesNotMatterRange;
  5187. currentBlock->intOverflowDoesNotMatterRange = currentBlock->intOverflowDoesNotMatterRange->Next();
  5188. currentBlock->RemoveInstr(rangeToDelete->LastInstr());
  5189. rangeToDelete->Delete(globOpt->alloc);
  5190. }
  5191. else
  5192. {
  5193. // End the current range of instructions where int overflow does not matter
  5194. IR::Instr *const boundaryInstr =
  5195. IR::PragmaInstr::New(
  5196. Js::OpCode::NoIntOverflowBoundary,
  5197. 0,
  5198. currentBlock->intOverflowDoesNotMatterRange->FirstInstr()->m_func);
  5199. boundaryInstr->SetByteCodeOffset(currentBlock->intOverflowDoesNotMatterRange->FirstInstr());
  5200. currentBlock->InsertInstrBefore(boundaryInstr, currentBlock->intOverflowDoesNotMatterRange->FirstInstr());
  5201. currentBlock->intOverflowDoesNotMatterRange->SetFirstInstr(boundaryInstr);
  5202. #if DBG_DUMP
  5203. if(PHASE_TRACE(Js::TrackCompoundedIntOverflowPhase, func->GetJnFunction()))
  5204. {
  5205. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  5206. Output::Print(
  5207. L"TrackCompoundedIntOverflow - Top function: %s (%s), Phase: %s, Block: %u\n",
  5208. func->GetJnFunction()->GetDisplayName(),
  5209. func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  5210. Js::PhaseNames[Js::BackwardPhase],
  5211. currentBlock->GetBlockNum());
  5212. Output::Print(L" Input syms to be int-specialized (lossless): ");
  5213. candidateSymsRequiredToBeInt->Minus(
  5214. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt(),
  5215. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()); // candidate bit-vectors are cleared below anyway
  5216. candidateSymsRequiredToBeInt->Dump();
  5217. Output::Print(L" Input syms to be converted to int (lossy): ");
  5218. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Dump();
  5219. Output::Print(L" First instr: ");
  5220. currentBlock->intOverflowDoesNotMatterRange->FirstInstr()->m_next->Dump();
  5221. Output::Flush();
  5222. }
  5223. #endif
  5224. }
  5225. // Reset candidates for the next range
  5226. candidateSymsRequiredToBeInt->ClearAll();
  5227. candidateSymsRequiredToBeLossyInt->ClearAll();
  5228. // Syms are not tracked across different ranges of instructions where int overflow does not matter, since instructions
  5229. // between the ranges may bail out. The value of the dst of an int operation where overflow is ignored is incorrect until
  5230. // the last use of that sym is converted to int. If the int operation and the last use of the sym are in different ranges
  5231. // and an instruction between the ranges bails out, other inputs into the second range are no longer guaranteed to be ints,
  5232. // so the incorrect value of the sym may be used in non-int operations.
  5233. intOverflowDoesNotMatterInRangeBySymId->ClearAll();
  5234. }
  5235. void
  5236. BackwardPass::TrackFloatSymEquivalence(IR::Instr *const instr)
  5237. {
  5238. /*
  5239. This function determines sets of float-specialized syms where any two syms in a set may have the same value number at some
  5240. point in the function. Conversely, if two float-specialized syms are not in the same set, it guarantees that those two syms
  5241. will never have the same value number. These sets are referred to as equivalence classes here.
  5242. The equivalence class for a sym is used to determine whether a bailout FromVar generating a float value for the sym needs to
  5243. bail out on any non-number value. For instance, for syms s1 and s5 in an equivalence class (say we have s5 = s1 at some
  5244. point), if there's a FromVar that generates a float value for s1 but only bails out on strings or non-primitives, and s5 is
  5245. returned from the function, it has to be ensured that s5 is not converted to Var. If the source of the FromVar was null, the
  5246. FromVar would not have bailed out, and s1 and s5 would have the value +0. When s5 is returned, we need to return null and
  5247. not +0, so the equivalence class is used to determine that since s5 requires a bailout on any non-number value, so does s1.
  5248. The tracking is very conservative because the bit that says "I require bailout on any non-number value" is on the sym itself
  5249. (referred to as non-number bailout bit below).
  5250. Data:
  5251. - BackwardPass::floatSymEquivalenceMap
  5252. - hash table mapping a float sym ID to its equivalence class
  5253. - FloatSymEquivalenceClass
  5254. - bit vector of float sym IDs that are in the equivalence class
  5255. - one non-number bailout bit for all syms in the equivalence class
  5256. Algorithm:
  5257. - In a loop prepass or when not in loop:
  5258. - For a float sym transfer (s0.f = s1.f), add both syms to an equivalence class (set the syms in a bit vector)
  5259. - If either sym requires bailout on any non-number value, set the equivalence class' non-number bailout bit
  5260. - If one of the syms is already in an equivalence class, merge the two equivalence classes by OR'ing the two bit vectors
  5261. and the non-number bailout bit.
  5262. - Note that for functions with a loop, dependency tracking is done using equivalence classes and that information is not
  5263. transferred back into each sym's non-number bailout bit
  5264. - In a loop non-prepass or when not in loop, for a FromVar instruction that requires bailout only on strings and
  5265. non-primitives:
  5266. - If the destination float sym's non-number bailout bit is set, or the sym is in an equivalence class whose non-number
  5267. bailout bit is set, change the bailout to bail out on any non-number value
  5268. The result is that if a float-specialized sym's value is used in a way in which it would be invalid to use the float value
  5269. through any other float-specialized sym that acquires the value, the FromVar generating the float value will be modified to
  5270. bail out on any non-number value.
  5271. */
  5272. Assert(instr);
  5273. if(tag != Js::DeadStorePhase || instr->GetSrc2() || !instr->m_func->hasBailout)
  5274. {
  5275. return;
  5276. }
  5277. if(!instr->GetDst() || !instr->GetDst()->IsRegOpnd())
  5278. {
  5279. return;
  5280. }
  5281. const auto dst = instr->GetDst()->AsRegOpnd()->m_sym;
  5282. if(!dst->IsFloat64())
  5283. {
  5284. return;
  5285. }
  5286. if(!instr->GetSrc1() || !instr->GetSrc1()->IsRegOpnd())
  5287. {
  5288. return;
  5289. }
  5290. const auto src = instr->GetSrc1()->AsRegOpnd()->m_sym;
  5291. if(OpCodeAttr::NonIntTransfer(instr->m_opcode) && (!currentBlock->loop || IsPrePass()))
  5292. {
  5293. Assert(src->IsFloat64()); // dst is specialized, and since this is a float transfer, src must be specialized too
  5294. if(dst == src)
  5295. {
  5296. return;
  5297. }
  5298. if(!func->m_fg->hasLoop)
  5299. {
  5300. // Special case for functions with no loops, since there can only be in-order dependencies. Just merge the two
  5301. // non-number bailout bits and put the result in the source.
  5302. if(dst->m_requiresBailOnNotNumber)
  5303. {
  5304. src->m_requiresBailOnNotNumber = true;
  5305. }
  5306. return;
  5307. }
  5308. FloatSymEquivalenceClass *dstEquivalenceClass, *srcEquivalenceClass;
  5309. const bool dstHasEquivalenceClass = floatSymEquivalenceMap->TryGetValue(dst->m_id, &dstEquivalenceClass);
  5310. const bool srcHasEquivalenceClass = floatSymEquivalenceMap->TryGetValue(src->m_id, &srcEquivalenceClass);
  5311. if(!dstHasEquivalenceClass)
  5312. {
  5313. if(srcHasEquivalenceClass)
  5314. {
  5315. // Just add the destination into the source's equivalence class
  5316. srcEquivalenceClass->Set(dst);
  5317. floatSymEquivalenceMap->Add(dst->m_id, srcEquivalenceClass);
  5318. return;
  5319. }
  5320. dstEquivalenceClass = JitAnew(tempAlloc, FloatSymEquivalenceClass, tempAlloc);
  5321. dstEquivalenceClass->Set(dst);
  5322. floatSymEquivalenceMap->Add(dst->m_id, dstEquivalenceClass);
  5323. }
  5324. if(!srcHasEquivalenceClass)
  5325. {
  5326. // Just add the source into the destination's equivalence class
  5327. dstEquivalenceClass->Set(src);
  5328. floatSymEquivalenceMap->Add(src->m_id, dstEquivalenceClass);
  5329. return;
  5330. }
  5331. if(dstEquivalenceClass == srcEquivalenceClass)
  5332. {
  5333. return;
  5334. }
  5335. Assert(!dstEquivalenceClass->Bv()->Test(src->m_id));
  5336. Assert(!srcEquivalenceClass->Bv()->Test(dst->m_id));
  5337. // Merge the two equivalence classes. The source's equivalence class is typically smaller, so it's merged into the
  5338. // destination's equivalence class. To save space and prevent a potential explosion of bit vector size,
  5339. // 'floatSymEquivalenceMap' is updated for syms in the source's equivalence class to map to the destination's now merged
  5340. // equivalence class, and the source's equivalence class is discarded.
  5341. dstEquivalenceClass->Or(srcEquivalenceClass);
  5342. FOREACH_BITSET_IN_SPARSEBV(id, srcEquivalenceClass->Bv())
  5343. {
  5344. floatSymEquivalenceMap->Item(id, dstEquivalenceClass);
  5345. } NEXT_BITSET_IN_SPARSEBV;
  5346. JitAdelete(tempAlloc, srcEquivalenceClass);
  5347. return;
  5348. }
  5349. // Not a float transfer, and non-prepass (not necessarily in a loop)
  5350. if(!instr->HasBailOutInfo() || instr->GetBailOutKind() != IR::BailOutPrimitiveButString)
  5351. {
  5352. return;
  5353. }
  5354. Assert(instr->m_opcode == Js::OpCode::FromVar);
  5355. // If either the destination or its equivalence class says it requires bailout on any non-number value, adjust the bailout
  5356. // kind on the instruction. Both are checked because in functions without loops, equivalence tracking is not done and only
  5357. // the sym's non-number bailout bit will have the information, and in functions with loops, equivalence tracking is done
  5358. // throughout the function and checking just the sym's non-number bailout bit is insufficient.
  5359. FloatSymEquivalenceClass *dstEquivalenceClass;
  5360. if(dst->m_requiresBailOnNotNumber ||
  5361. floatSymEquivalenceMap->TryGetValue(dst->m_id, &dstEquivalenceClass) && dstEquivalenceClass->RequiresBailOnNotNumber())
  5362. {
  5363. instr->SetBailOutKind(IR::BailOutNumberOnly);
  5364. }
  5365. }
  5366. bool
  5367. BackwardPass::ProcessDef(IR::Opnd * opnd)
  5368. {
  5369. BOOLEAN isJITOptimizedReg = false;
  5370. Sym * sym;
  5371. if (opnd->IsRegOpnd())
  5372. {
  5373. sym = opnd->AsRegOpnd()->m_sym;
  5374. isJITOptimizedReg = opnd->GetIsJITOptimizedReg();
  5375. if (!IsCollectionPass())
  5376. {
  5377. this->InvalidateCloneStrCandidate(opnd);
  5378. }
  5379. }
  5380. else if (opnd->IsSymOpnd())
  5381. {
  5382. sym = opnd->AsSymOpnd()->m_sym;
  5383. isJITOptimizedReg = opnd->GetIsJITOptimizedReg();
  5384. }
  5385. else
  5386. {
  5387. if (opnd->IsIndirOpnd())
  5388. {
  5389. this->ProcessUse(opnd);
  5390. }
  5391. return false;
  5392. }
  5393. BasicBlock * block = this->currentBlock;
  5394. BOOLEAN isUsed = true;
  5395. BOOLEAN keepSymLiveForException = false;
  5396. BOOLEAN keepVarSymLiveForException = false;
  5397. IR::Instr * instr = this->currentInstr;
  5398. Assert(!instr->IsByteCodeUsesInstr());
  5399. if (sym->IsPropertySym())
  5400. {
  5401. if(IsCollectionPass())
  5402. {
  5403. return false;
  5404. }
  5405. Assert((block->fieldHoistCandidates != nullptr) == this->DoFieldHoistCandidates());
  5406. if (block->fieldHoistCandidates)
  5407. {
  5408. block->fieldHoistCandidates->Clear(sym->m_id);
  5409. }
  5410. PropertySym *propertySym = sym->AsPropertySym();
  5411. if (this->DoDeadStoreSlots())
  5412. {
  5413. if (propertySym->m_fieldKind == PropertyKindLocalSlots || propertySym->m_fieldKind == PropertyKindSlots)
  5414. {
  5415. isUsed = !block->slotDeadStoreCandidates->TestAndSet(propertySym->m_id);
  5416. // we should not do any dead slots in asmjs loop body
  5417. Assert(!(this->func->GetJnFunction()->GetIsAsmJsFunction() && this->func->IsLoopBody() && !isUsed));
  5418. Assert(isUsed || !block->upwardExposedUses->Test(propertySym->m_id));
  5419. }
  5420. }
  5421. if (!block->upwardExposedFields->TestAndClear(propertySym->m_id) && this->DoSetDead())
  5422. {
  5423. opnd->SetIsDead();
  5424. }
  5425. ProcessStackSymUse(propertySym->m_stackSym, isJITOptimizedReg);
  5426. if (tag == Js::BackwardPhase)
  5427. {
  5428. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  5429. {
  5430. this->globOpt->PreparePropertySymOpndForTypeCheckSeq(opnd->AsPropertySymOpnd(), instr, this->currentBlock->loop);
  5431. }
  5432. }
  5433. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  5434. {
  5435. this->ProcessPropertySymOpndUse(opnd->AsPropertySymOpnd());
  5436. }
  5437. }
  5438. else
  5439. {
  5440. Assert(!instr->IsByteCodeUsesInstr());
  5441. if (this->DoByteCodeUpwardExposedUsed())
  5442. {
  5443. if (sym->AsStackSym()->HasByteCodeRegSlot())
  5444. {
  5445. StackSym * varSym = sym->AsStackSym();
  5446. if (varSym->IsTypeSpec())
  5447. {
  5448. // It has to have a var version for byte code regs
  5449. varSym = varSym->GetVarEquivSym(nullptr);
  5450. }
  5451. if (this->currentRegion)
  5452. {
  5453. keepSymLiveForException = this->CheckWriteThroughSymInRegion(this->currentRegion, sym->AsStackSym());
  5454. keepVarSymLiveForException = this->CheckWriteThroughSymInRegion(this->currentRegion, varSym);
  5455. }
  5456. if (!isJITOptimizedReg)
  5457. {
  5458. if (!DoDeadStore(this->func, sym->AsStackSym()))
  5459. {
  5460. // Don't deadstore the bytecodereg sym, so that we could do write to get the locals inspection
  5461. if (opnd->IsRegOpnd())
  5462. {
  5463. opnd->AsRegOpnd()->m_dontDeadStore = true;
  5464. }
  5465. }
  5466. // write through symbols should not be cleared from the byteCodeUpwardExposedUsed BV upon defs in the Try region:
  5467. // try
  5468. // x =
  5469. // <bailout> <-- this bailout should restore x from its first def. This would not happen if x is cleared
  5470. // from byteCodeUpwardExposedUsed when we process its second def
  5471. // <exception>
  5472. // x =
  5473. // catch
  5474. // = x
  5475. if (!keepVarSymLiveForException)
  5476. {
  5477. // Always track the sym use on the var sym.
  5478. block->byteCodeUpwardExposedUsed->Clear(varSym->m_id);
  5479. #if DBG
  5480. // TODO: We can only track first level function stack syms right now
  5481. if (varSym->GetByteCodeFunc() == this->func)
  5482. {
  5483. block->byteCodeRestoreSyms[varSym->GetByteCodeRegSlot()] = nullptr;
  5484. }
  5485. #endif
  5486. }
  5487. }
  5488. }
  5489. }
  5490. if(IsCollectionPass())
  5491. {
  5492. return false;
  5493. }
  5494. // Don't care about property sym for mark temps
  5495. if (opnd->IsRegOpnd())
  5496. {
  5497. this->MarkTemp(sym->AsStackSym());
  5498. }
  5499. if (this->tag == Js::BackwardPhase &&
  5500. instr->m_opcode == Js::OpCode::Ld_A &&
  5501. instr->GetSrc1()->IsRegOpnd() &&
  5502. block->upwardExposedFields->Test(sym->m_id))
  5503. {
  5504. block->upwardExposedFields->Set(instr->GetSrc1()->AsRegOpnd()->m_sym->m_id);
  5505. }
  5506. if (!keepSymLiveForException)
  5507. {
  5508. isUsed = block->upwardExposedUses->TestAndClear(sym->m_id);
  5509. }
  5510. }
  5511. if (isUsed || !this->DoDeadStore())
  5512. {
  5513. return false;
  5514. }
  5515. // FromVar on a primitive value has no side-effects
  5516. // TODO: There may be more cases where FromVars can be dead-stored, such as cases where they have a bailout that would bail
  5517. // out on non-primitive vars, thereby causing no side effects anyway. However, it needs to be ensured that no assumptions
  5518. // that depend on the bailout are made later in the function.
  5519. // Special case StFld for trackable fields
  5520. bool hasSideEffects = instr->HasAnySideEffects()
  5521. && instr->m_opcode != Js::OpCode::StFld
  5522. && instr->m_opcode != Js::OpCode::StRootFld
  5523. && instr->m_opcode != Js::OpCode::StFldStrict
  5524. && instr->m_opcode != Js::OpCode::StRootFldStrict;
  5525. if (this->IsPrePass() || hasSideEffects)
  5526. {
  5527. return false;
  5528. }
  5529. if (opnd->IsRegOpnd() && opnd->AsRegOpnd()->m_dontDeadStore)
  5530. {
  5531. return false;
  5532. }
  5533. if (instr->HasBailOutInfo())
  5534. {
  5535. // A bailout inserted for aggressive or lossy int type specialization causes assumptions to be made on the value of
  5536. // the instruction's destination later on, as though the bailout did not happen. If the value is an int constant and
  5537. // that value is propagated forward, it can cause the bailout instruction to become a dead store and be removed,
  5538. // thereby invalidating the assumptions made. Or for lossy int type specialization, the lossy conversion to int32
  5539. // may have side effects and so cannot be dead-store-removed. As one way of solving that problem, bailout
  5540. // instructions resulting from aggressive or lossy int type spec are not dead-stored.
  5541. const auto bailOutKind = instr->GetBailOutKind();
  5542. if(bailOutKind & IR::BailOutOnResultConditions)
  5543. {
  5544. return false;
  5545. }
  5546. switch(bailOutKind & ~IR::BailOutKindBits)
  5547. {
  5548. case IR::BailOutIntOnly:
  5549. case IR::BailOutNumberOnly:
  5550. case IR::BailOutExpectingInteger:
  5551. case IR::BailOutPrimitiveButString:
  5552. case IR::BailOutExpectingString:
  5553. case IR::BailOutOnNotPrimitive:
  5554. case IR::BailOutFailedInlineTypeCheck:
  5555. case IR::BailOutOnFloor:
  5556. case IR::BailOnModByPowerOf2:
  5557. case IR::BailOnDivResultNotInt:
  5558. case IR::BailOnIntMin:
  5559. return false;
  5560. }
  5561. }
  5562. // Dead store
  5563. DeadStoreInstr(instr);
  5564. return true;
  5565. }
  5566. bool
  5567. BackwardPass::DeadStoreInstr(IR::Instr *instr)
  5568. {
  5569. BasicBlock * block = this->currentBlock;
  5570. #if DBG_DUMP
  5571. if (this->IsTraceEnabled())
  5572. {
  5573. Output::Print(L"Deadstore instr: ");
  5574. instr->Dump();
  5575. }
  5576. this->numDeadStore++;
  5577. #endif
  5578. // Before we remove the dead store, we need to track the byte code uses
  5579. if (this->DoByteCodeUpwardExposedUsed())
  5580. {
  5581. #if DBG
  5582. BVSparse<JitArenaAllocator> tempBv(this->tempAlloc);
  5583. tempBv.Copy(this->currentBlock->byteCodeUpwardExposedUsed);
  5584. #endif
  5585. PropertySym *unusedPropertySym = nullptr;
  5586. GlobOpt::TrackByteCodeSymUsed(instr, this->currentBlock->byteCodeUpwardExposedUsed, &unusedPropertySym);
  5587. #if DBG
  5588. BVSparse<JitArenaAllocator> tempBv2(this->tempAlloc);
  5589. tempBv2.Copy(this->currentBlock->byteCodeUpwardExposedUsed);
  5590. tempBv2.Minus(&tempBv);
  5591. FOREACH_BITSET_IN_SPARSEBV(symId, &tempBv2)
  5592. {
  5593. StackSym * stackSym = this->func->m_symTable->FindStackSym(symId);
  5594. Assert(stackSym->GetType() == TyVar);
  5595. // TODO: We can only track first level function stack syms right now
  5596. if (stackSym->GetByteCodeFunc() == this->func)
  5597. {
  5598. Js::RegSlot byteCodeRegSlot = stackSym->GetByteCodeRegSlot();
  5599. Assert(byteCodeRegSlot != Js::Constants::NoRegister);
  5600. if (this->currentBlock->byteCodeRestoreSyms[byteCodeRegSlot] != stackSym)
  5601. {
  5602. AssertMsg(this->currentBlock->byteCodeRestoreSyms[byteCodeRegSlot] == nullptr,
  5603. "Can't have two active lifetime for the same byte code register");
  5604. this->currentBlock->byteCodeRestoreSyms[byteCodeRegSlot] = stackSym;
  5605. }
  5606. }
  5607. }
  5608. NEXT_BITSET_IN_SPARSEBV;
  5609. #endif
  5610. }
  5611. // If this is a pre-op bailout instruction, we may have saved it for bailout info processing. It's being removed now, so no
  5612. // need to process the bailout info anymore.
  5613. Assert(!preOpBailOutInstrToProcess || preOpBailOutInstrToProcess == instr);
  5614. preOpBailOutInstrToProcess = nullptr;
  5615. #if DBG
  5616. if (this->DoMarkTempObjectVerify())
  5617. {
  5618. this->currentBlock->tempObjectVerifyTracker->NotifyDeadStore(instr, this);
  5619. }
  5620. #endif
  5621. block->RemoveInstr(instr);
  5622. return true;
  5623. }
  5624. void
  5625. BackwardPass::ProcessTransfers(IR::Instr * instr)
  5626. {
  5627. if (this->tag == Js::DeadStorePhase &&
  5628. this->currentBlock->upwardExposedFields &&
  5629. instr->m_opcode == Js::OpCode::Ld_A &&
  5630. instr->GetDst()->GetStackSym() &&
  5631. !instr->GetDst()->GetStackSym()->IsTypeSpec() &&
  5632. instr->GetDst()->GetStackSym()->HasObjectInfo() &&
  5633. instr->GetSrc1() &&
  5634. instr->GetSrc1()->GetStackSym() &&
  5635. !instr->GetSrc1()->GetStackSym()->IsTypeSpec() &&
  5636. instr->GetSrc1()->GetStackSym()->HasObjectInfo())
  5637. {
  5638. StackSym * dstStackSym = instr->GetDst()->GetStackSym();
  5639. PropertySym * dstPropertySym = dstStackSym->GetObjectInfo()->m_propertySymList;
  5640. BVSparse<JitArenaAllocator> transferFields(this->tempAlloc);
  5641. while (dstPropertySym != nullptr)
  5642. {
  5643. Assert(dstPropertySym->m_stackSym == dstStackSym);
  5644. transferFields.Set(dstPropertySym->m_id);
  5645. dstPropertySym = dstPropertySym->m_nextInStackSymList;
  5646. }
  5647. StackSym * srcStackSym = instr->GetSrc1()->GetStackSym();
  5648. PropertySym * srcPropertySym = srcStackSym->GetObjectInfo()->m_propertySymList;
  5649. BVSparse<JitArenaAllocator> equivFields(this->tempAlloc);
  5650. while (srcPropertySym != nullptr && !transferFields.IsEmpty())
  5651. {
  5652. Assert(srcPropertySym->m_stackSym == srcStackSym);
  5653. if (srcPropertySym->m_propertyEquivSet)
  5654. {
  5655. equivFields.And(&transferFields, srcPropertySym->m_propertyEquivSet);
  5656. if (!equivFields.IsEmpty())
  5657. {
  5658. transferFields.Minus(&equivFields);
  5659. this->currentBlock->upwardExposedFields->Set(srcPropertySym->m_id);
  5660. }
  5661. }
  5662. srcPropertySym = srcPropertySym->m_nextInStackSymList;
  5663. }
  5664. }
  5665. }
  5666. void
  5667. BackwardPass::ProcessFieldKills(IR::Instr * instr)
  5668. {
  5669. if (this->currentBlock->upwardExposedFields)
  5670. {
  5671. this->globOpt->ProcessFieldKills(instr, this->currentBlock->upwardExposedFields, false);
  5672. }
  5673. this->ClearBucketsOnFieldKill(instr, currentBlock->stackSymToFinalType);
  5674. this->ClearBucketsOnFieldKill(instr, currentBlock->stackSymToGuardedProperties);
  5675. }
  5676. template<typename T>
  5677. void
  5678. BackwardPass::ClearBucketsOnFieldKill(IR::Instr *instr, HashTable<T> *table)
  5679. {
  5680. if (table)
  5681. {
  5682. if (instr->UsesAllFields())
  5683. {
  5684. table->ClearAll();
  5685. }
  5686. else
  5687. {
  5688. IR::Opnd *dst = instr->GetDst();
  5689. if (dst && dst->IsRegOpnd())
  5690. {
  5691. table->Clear(dst->AsRegOpnd()->m_sym->m_id);
  5692. }
  5693. }
  5694. }
  5695. }
  5696. void
  5697. BackwardPass::ProcessFieldHoistKills(IR::Instr * instr)
  5698. {
  5699. // The backward pass, we optimistically will not kill on a[] access
  5700. // So that the field hoist candidate will be more then what can be hoisted
  5701. // The root prepass will figure out the exact set of field that is hoisted
  5702. this->globOpt->ProcessFieldKills(instr, this->currentBlock->fieldHoistCandidates, false);
  5703. switch (instr->m_opcode)
  5704. {
  5705. case Js::OpCode::BrOnHasProperty:
  5706. case Js::OpCode::BrOnNoProperty:
  5707. // Should not hoist pass these instructions
  5708. this->currentBlock->fieldHoistCandidates->Clear(instr->GetSrc1()->AsSymOpnd()->m_sym->m_id);
  5709. break;
  5710. }
  5711. }
  5712. bool
  5713. BackwardPass::TrackNoImplicitCallInlinees(IR::Instr *instr)
  5714. {
  5715. if (this->tag != Js::DeadStorePhase || this->IsPrePass())
  5716. {
  5717. return false;
  5718. }
  5719. if (instr->HasBailOutInfo()
  5720. || OpCodeAttr::CallInstr(instr->m_opcode)
  5721. || instr->CallsAccessor()
  5722. || GlobOpt::MayNeedBailOnImplicitCall(instr, nullptr, nullptr)
  5723. || instr->m_opcode == Js::OpCode::LdHeapArguments
  5724. || instr->m_opcode == Js::OpCode::LdLetHeapArguments
  5725. || instr->m_opcode == Js::OpCode::LdHeapArgsCached
  5726. || instr->m_opcode == Js::OpCode::LdLetHeapArgsCached
  5727. || instr->m_opcode == Js::OpCode::LdFuncExpr)
  5728. {
  5729. // This func has instrs with bailouts or implicit calls
  5730. Assert(instr->m_opcode != Js::OpCode::InlineeStart);
  5731. instr->m_func->SetHasImplicitCallsOnSelfAndParents();
  5732. return false;
  5733. }
  5734. if (instr->m_opcode == Js::OpCode::InlineeStart)
  5735. {
  5736. if (!instr->GetSrc1())
  5737. {
  5738. Assert(instr->m_func->m_hasInlineArgsOpt);
  5739. return false;
  5740. }
  5741. return this->ProcessInlineeStart(instr);
  5742. }
  5743. return false;
  5744. }
  5745. bool
  5746. BackwardPass::ProcessInlineeStart(IR::Instr* inlineeStart)
  5747. {
  5748. inlineeStart->m_func->SetFirstArgOffset(inlineeStart);
  5749. IR::Instr* startCallInstr = nullptr;
  5750. bool noImplicitCallsInInlinee = false;
  5751. // Inlinee has no bailouts or implicit calls. Get rid of the inline overhead.
  5752. auto removeInstr = [&](IR::Instr* argInstr)
  5753. {
  5754. Assert(argInstr->m_opcode == Js::OpCode::InlineeStart || argInstr->m_opcode == Js::OpCode::ArgOut_A || argInstr->m_opcode == Js::OpCode::ArgOut_A_Inline);
  5755. IR::Opnd *opnd = argInstr->GetSrc1();
  5756. StackSym *sym = opnd->GetStackSym();
  5757. if (!opnd->GetIsJITOptimizedReg() && sym && sym->HasByteCodeRegSlot())
  5758. {
  5759. // Replace instrs with bytecodeUses
  5760. IR::ByteCodeUsesInstr *bytecodeUse = IR::ByteCodeUsesInstr::New(argInstr, sym->m_id);
  5761. argInstr->InsertBefore(bytecodeUse);
  5762. }
  5763. startCallInstr = argInstr->GetSrc2()->GetStackSym()->m_instrDef;
  5764. FlowGraph::SafeRemoveInstr(argInstr);
  5765. return false;
  5766. };
  5767. // If there are no implicit calls - bailouts/throws - we can remove all inlining overhead.
  5768. if (!inlineeStart->m_func->GetHasImplicitCalls())
  5769. {
  5770. noImplicitCallsInInlinee = true;
  5771. inlineeStart->IterateArgInstrs(removeInstr);
  5772. inlineeStart->IterateMetaArgs([](IR::Instr* metArg)
  5773. {
  5774. FlowGraph::SafeRemoveInstr(metArg);
  5775. return false;
  5776. });
  5777. inlineeStart->m_func->m_hasInlineArgsOpt = false;
  5778. removeInstr(inlineeStart);
  5779. return true;
  5780. }
  5781. if (!inlineeStart->m_func->m_hasInlineArgsOpt)
  5782. {
  5783. PHASE_PRINT_TESTTRACE(Js::InlineArgsOptPhase, func, L"%s[%d]: Skipping inline args optimization: %s[%d] HasCalls: %s 'arguments' access: %s Can do inlinee args opt: %s\n",
  5784. func->GetJnFunction()->GetExternalDisplayName(), func->GetJnFunction()->GetFunctionNumber(),
  5785. inlineeStart->m_func->GetJnFunction()->GetExternalDisplayName(), inlineeStart->m_func->GetJnFunction()->GetFunctionNumber(),
  5786. IsTrueOrFalse(inlineeStart->m_func->GetHasCalls()),
  5787. IsTrueOrFalse(inlineeStart->m_func->GetHasUnoptimizedArgumentsAcccess()),
  5788. IsTrueOrFalse(inlineeStart->m_func->m_canDoInlineArgsOpt));
  5789. return false;
  5790. }
  5791. if (!inlineeStart->m_func->frameInfo->isRecorded)
  5792. {
  5793. PHASE_PRINT_TESTTRACE(Js::InlineArgsOptPhase, func, L"%s[%d]: InlineeEnd not found - usually due to a throw or a BailOnNoProfile (stressed, most likely)\n",
  5794. func->GetJnFunction()->GetExternalDisplayName(), func->GetJnFunction()->GetFunctionNumber());
  5795. inlineeStart->m_func->DisableCanDoInlineArgOpt();
  5796. return false;
  5797. }
  5798. inlineeStart->IterateArgInstrs(removeInstr);
  5799. int i = 0;
  5800. inlineeStart->IterateMetaArgs([&](IR::Instr* metaArg)
  5801. {
  5802. if (i == Js::Constants::InlineeMetaArgIndex_ArgumentsObject &&
  5803. inlineeStart->m_func->GetHasArgumentObject())
  5804. {
  5805. Assert(!inlineeStart->m_func->GetHasUnoptimizedArgumentsAcccess());
  5806. // Do not remove arguments object meta arg if there is a reference to arguments object
  5807. }
  5808. else
  5809. {
  5810. FlowGraph::SafeRemoveInstr(metaArg);
  5811. }
  5812. i++;
  5813. return false;
  5814. });
  5815. IR::Opnd *src1 = inlineeStart->GetSrc1();
  5816. StackSym *sym = src1->GetStackSym();
  5817. if (!src1->GetIsJITOptimizedReg() && sym && sym->HasByteCodeRegSlot())
  5818. {
  5819. // Replace instrs with bytecodeUses
  5820. IR::ByteCodeUsesInstr *bytecodeUse = IR::ByteCodeUsesInstr::New(inlineeStart, sym->m_id);
  5821. inlineeStart->InsertBefore(bytecodeUse);
  5822. }
  5823. // This indicates to the lowerer that this inlinee has been optimized
  5824. // and it should not be lowered - Now this instruction is used to mark inlineeStart
  5825. inlineeStart->FreeSrc1();
  5826. inlineeStart->FreeSrc2();
  5827. inlineeStart->FreeDst();
  5828. return true;
  5829. }
  5830. void
  5831. BackwardPass::ProcessInlineeEnd(IR::Instr* instr)
  5832. {
  5833. if (this->IsPrePass())
  5834. {
  5835. return;
  5836. }
  5837. if (this->tag == Js::BackwardPhase)
  5838. {
  5839. if (!GlobOpt::DoInlineArgsOpt(instr->m_func))
  5840. {
  5841. return;
  5842. }
  5843. // This adds a use for function sym as part of InlineeStart & all the syms referenced by the args.
  5844. // It ensure they do not get cleared from the copy prop sym map.
  5845. instr->IterateArgInstrs([=](IR::Instr* argInstr){
  5846. if (argInstr->GetSrc1()->IsRegOpnd())
  5847. {
  5848. this->currentBlock->upwardExposedUses->Set(argInstr->GetSrc1()->AsRegOpnd()->m_sym->m_id);
  5849. }
  5850. return false;
  5851. });
  5852. }
  5853. else if (this->tag == Js::DeadStorePhase)
  5854. {
  5855. if (instr->m_func->m_hasInlineArgsOpt)
  5856. {
  5857. Assert(instr->m_func->frameInfo);
  5858. instr->m_func->frameInfo->IterateSyms([=](StackSym* argSym)
  5859. {
  5860. this->currentBlock->upwardExposedUses->Set(argSym->m_id);
  5861. });
  5862. }
  5863. }
  5864. }
  5865. bool
  5866. BackwardPass::ProcessBailOnNoProfile(IR::Instr *instr, BasicBlock *block)
  5867. {
  5868. Assert(this->tag == Js::BackwardPhase);
  5869. Assert(instr->m_opcode == Js::OpCode::BailOnNoProfile);
  5870. Assert(!instr->HasBailOutInfo());
  5871. AnalysisAssert(block);
  5872. if (this->IsPrePass())
  5873. {
  5874. return false;
  5875. }
  5876. IR::Instr *curInstr = instr->m_prev;
  5877. if (curInstr->IsLabelInstr() && curInstr->AsLabelInstr()->isOpHelper)
  5878. {
  5879. // Already processed
  5880. if (this->DoMarkTempObjects())
  5881. {
  5882. block->tempObjectTracker->ProcessBailOnNoProfile(instr);
  5883. }
  5884. return false;
  5885. }
  5886. // Don't hoist if we see calls with profile data (recursive calls)
  5887. while(!curInstr->StartsBasicBlock())
  5888. {
  5889. // If a function was inlined, it must have had profile info.
  5890. if (curInstr->m_opcode == Js::OpCode::InlineeEnd || curInstr->m_opcode == Js::OpCode::InlineBuiltInEnd || curInstr->m_opcode == Js::OpCode::InlineNonTrackingBuiltInEnd
  5891. || curInstr->m_opcode == Js::OpCode::InlineeStart || curInstr->m_opcode == Js::OpCode::EndCallForPolymorphicInlinee)
  5892. {
  5893. break;
  5894. }
  5895. else if (OpCodeAttr::CallInstr(curInstr->m_opcode))
  5896. {
  5897. if (curInstr->m_prev->m_opcode != Js::OpCode::BailOnNoProfile)
  5898. {
  5899. break;
  5900. }
  5901. }
  5902. curInstr = curInstr->m_prev;
  5903. }
  5904. // Didn't get to the top of the block, delete this BailOnNoProfile...
  5905. if (!curInstr->IsLabelInstr())
  5906. {
  5907. block->RemoveInstr(instr);
  5908. return true;
  5909. }
  5910. // We can't bail in the middle of a "tmp = CmEq s1, s2; BrTrue tmp" turned into a "BrEq s1, s2",
  5911. // because the bailout wouldn't be able to restore tmp.
  5912. IR::Instr *curNext = curInstr->GetNextRealInstrOrLabel();
  5913. if (curNext->m_opcode == Js::OpCode::Ld_A && curNext->GetDst()->IsRegOpnd() && curNext->GetDst()->AsRegOpnd()->m_fgPeepTmp)
  5914. {
  5915. block->RemoveInstr(instr);
  5916. return true;
  5917. }
  5918. curInstr = instr->m_prev;
  5919. // Move to top of block.
  5920. while(!curInstr->StartsBasicBlock())
  5921. {
  5922. // Delete redundant BailOnNoProfile
  5923. if (curInstr->m_opcode == Js::OpCode::BailOnNoProfile)
  5924. {
  5925. Assert(!curInstr->HasBailOutInfo());
  5926. curInstr = curInstr->m_next;
  5927. curInstr->m_prev->Remove();
  5928. }
  5929. curInstr = curInstr->m_prev;
  5930. }
  5931. if (instr == block->GetLastInstr())
  5932. {
  5933. block->SetLastInstr(instr->m_prev);
  5934. }
  5935. instr->Unlink();
  5936. // Now try to move this up the flowgraph to the predecessor blocks
  5937. bool curBlockNeedsBail = false;
  5938. FOREACH_PREDECESSOR_BLOCK(pred, block)
  5939. {
  5940. bool hoistBailToPred = true;
  5941. if (block->isLoopHeader && pred->loop == block->loop)
  5942. {
  5943. // Skip loop back-edges
  5944. continue;
  5945. }
  5946. // If all successors of this predecessor start with a BailOnNoProfile, we should be
  5947. // okay to hoist this bail to the predecessor.
  5948. FOREACH_SUCCESSOR_BLOCK(predSucc, pred)
  5949. {
  5950. if (predSucc == block)
  5951. {
  5952. continue;
  5953. }
  5954. if (predSucc->GetFirstInstr()->m_next->m_opcode != Js::OpCode::BailOnNoProfile)
  5955. {
  5956. hoistBailToPred = false;
  5957. break;
  5958. }
  5959. } NEXT_SUCCESSOR_BLOCK;
  5960. if (hoistBailToPred)
  5961. {
  5962. IR::Instr *predInstr = pred->GetLastInstr();
  5963. IR::Instr *instrCopy = instr->Copy();
  5964. if (predInstr->EndsBasicBlock())
  5965. {
  5966. if (predInstr->m_prev->m_opcode == Js::OpCode::BailOnNoProfile)
  5967. {
  5968. // We already have one, we don't need a second.
  5969. instrCopy->Free();
  5970. }
  5971. else if (predInstr->AsBranchInstr()->m_isSwitchBr)
  5972. {
  5973. // Don't put a bailout in the middle of a switch dispatch sequence.
  5974. // The bytecode offsets are not in order, and it would lead to incorrect
  5975. // bailout info.
  5976. curBlockNeedsBail = true;
  5977. }
  5978. else
  5979. {
  5980. instrCopy->m_func = predInstr->m_func;
  5981. predInstr->InsertBefore(instrCopy);
  5982. }
  5983. }
  5984. else
  5985. {
  5986. if (predInstr->m_opcode == Js::OpCode::BailOnNoProfile)
  5987. {
  5988. // We already have one, we don't need a second.
  5989. instrCopy->Free();
  5990. }
  5991. else
  5992. {
  5993. instrCopy->m_func = predInstr->m_func;
  5994. predInstr->InsertAfter(instrCopy);
  5995. pred->SetLastInstr(instrCopy);
  5996. }
  5997. }
  5998. }
  5999. else
  6000. {
  6001. curBlockNeedsBail = true;
  6002. }
  6003. } NEXT_PREDECESSOR_BLOCK;
  6004. if (curBlockNeedsBail)
  6005. {
  6006. curInstr->AsLabelInstr()->isOpHelper = true;
  6007. #if DBG
  6008. curInstr->AsLabelInstr()->m_noHelperAssert = true;
  6009. #endif
  6010. instr->m_func = curInstr->m_func;
  6011. curInstr->InsertAfter(instr);
  6012. bool setLastInstr = (curInstr == block->GetLastInstr());
  6013. if (setLastInstr)
  6014. {
  6015. block->SetLastInstr(instr);
  6016. }
  6017. if (this->DoMarkTempObjects())
  6018. {
  6019. block->tempObjectTracker->ProcessBailOnNoProfile(instr);
  6020. }
  6021. return false;
  6022. }
  6023. else
  6024. {
  6025. instr->Free();
  6026. return true;
  6027. }
  6028. }
  6029. bool
  6030. BackwardPass::ReverseCopyProp(IR::Instr *instr)
  6031. {
  6032. // Look for :
  6033. //
  6034. // t1 = instr
  6035. // [bytecodeuse t1]
  6036. // t2 = Ld_A t1 >> t1 !upwardExposed
  6037. //
  6038. // Transform into:
  6039. //
  6040. // t2 = instr
  6041. //
  6042. if (PHASE_OFF(Js::ReverseCopyPropPhase, this->func))
  6043. {
  6044. return false;
  6045. }
  6046. if (this->tag != Js::DeadStorePhase || this->IsPrePass() || this->IsCollectionPass())
  6047. {
  6048. return false;
  6049. }
  6050. if (this->func->HasTry())
  6051. {
  6052. // UpwardExposedUsed info can't be relied on
  6053. return false;
  6054. }
  6055. // Find t2 = Ld_A t1
  6056. switch (instr->m_opcode)
  6057. {
  6058. case Js::OpCode::Ld_A:
  6059. case Js::OpCode::Ld_I4:
  6060. break;
  6061. default:
  6062. return false;
  6063. }
  6064. if (!instr->GetDst()->IsRegOpnd())
  6065. {
  6066. return false;
  6067. }
  6068. if (!instr->GetSrc1()->IsRegOpnd())
  6069. {
  6070. return false;
  6071. }
  6072. if (instr->HasBailOutInfo())
  6073. {
  6074. return false;
  6075. }
  6076. IR::RegOpnd *dst = instr->GetDst()->AsRegOpnd();
  6077. IR::RegOpnd *src = instr->GetSrc1()->AsRegOpnd();
  6078. IR::Instr *instrPrev = instr->GetPrevRealInstrOrLabel();
  6079. IR::ByteCodeUsesInstr *byteCodeUseInstr = nullptr;
  6080. StackSym *varSym = src->m_sym;
  6081. if (varSym->IsTypeSpec())
  6082. {
  6083. varSym = varSym->GetVarEquivSym(this->func);
  6084. }
  6085. // SKip ByteCodeUse instr if possible
  6086. // [bytecodeuse t1]
  6087. if (!instrPrev->GetDst())
  6088. {
  6089. if (instrPrev->m_opcode == Js::OpCode::ByteCodeUses)
  6090. {
  6091. byteCodeUseInstr = instrPrev->AsByteCodeUsesInstr();
  6092. if (byteCodeUseInstr->byteCodeUpwardExposedUsed && byteCodeUseInstr->byteCodeUpwardExposedUsed->Test(varSym->m_id) && byteCodeUseInstr->byteCodeUpwardExposedUsed->Count() == 1)
  6093. {
  6094. instrPrev = byteCodeUseInstr->GetPrevRealInstrOrLabel();
  6095. if (!instrPrev->GetDst())
  6096. {
  6097. return false;
  6098. }
  6099. }
  6100. else
  6101. {
  6102. return false;
  6103. }
  6104. }
  6105. else
  6106. {
  6107. return false;
  6108. }
  6109. }
  6110. // The fast-path for these doesn't handle dst == src.
  6111. // REVIEW: I believe the fast-path for LdElemI_A has been fixed... Nope, still broken for "i = A[i]" for prejit
  6112. switch (instrPrev->m_opcode)
  6113. {
  6114. case Js::OpCode::LdElemI_A:
  6115. case Js::OpCode::IsInst:
  6116. case Js::OpCode::ByteCodeUses:
  6117. return false;
  6118. }
  6119. // Can't do it if post-op bailout would need result
  6120. // REVIEW: enable for pre-opt bailout?
  6121. if (instrPrev->HasBailOutInfo() && instrPrev->GetByteCodeOffset() != instrPrev->GetBailOutInfo()->bailOutOffset)
  6122. {
  6123. return false;
  6124. }
  6125. // Make sure src of Ld_A == dst of instr
  6126. // t1 = instr
  6127. if (!instrPrev->GetDst()->IsEqual(src))
  6128. {
  6129. return false;
  6130. }
  6131. // Make sure t1 isn't used later
  6132. if (this->currentBlock->upwardExposedUses->Test(src->m_sym->m_id))
  6133. {
  6134. return false;
  6135. }
  6136. if (this->currentBlock->byteCodeUpwardExposedUsed && this->currentBlock->byteCodeUpwardExposedUsed->Test(varSym->m_id))
  6137. {
  6138. return false;
  6139. }
  6140. // Make sure we can dead-store this sym (debugger mode?)
  6141. if (!this->DoDeadStore(this->func, src->m_sym))
  6142. {
  6143. return false;
  6144. }
  6145. StackSym *const dstSym = dst->m_sym;
  6146. if(instrPrev->HasBailOutInfo() && dstSym->IsInt32() && dstSym->IsTypeSpec())
  6147. {
  6148. StackSym *const prevDstSym = IR::RegOpnd::TryGetStackSym(instrPrev->GetDst());
  6149. if(instrPrev->GetBailOutKind() & IR::BailOutOnResultConditions &&
  6150. prevDstSym &&
  6151. prevDstSym->IsInt32() &&
  6152. prevDstSym->IsTypeSpec() &&
  6153. instrPrev->GetSrc1() &&
  6154. !instrPrev->GetDst()->IsEqual(instrPrev->GetSrc1()) &&
  6155. !(instrPrev->GetSrc2() && instrPrev->GetDst()->IsEqual(instrPrev->GetSrc2())))
  6156. {
  6157. // The previous instruction's dst value may be trashed by the time of the pre-op bailout. Skip reverse copy-prop if
  6158. // it would replace the previous instruction's dst with a sym that bailout had decided to use to restore a value for
  6159. // the pre-op bailout, which can't be trashed before bailout. See big comment in ProcessBailOutCopyProps for the
  6160. // reasoning behind the tests above.
  6161. FOREACH_SLISTBASE_ENTRY(
  6162. CopyPropSyms,
  6163. usedCopyPropSym,
  6164. &instrPrev->GetBailOutInfo()->usedCapturedValues.copyPropSyms)
  6165. {
  6166. if(dstSym == usedCopyPropSym.Value())
  6167. {
  6168. return false;
  6169. }
  6170. } NEXT_SLISTBASE_ENTRY;
  6171. }
  6172. }
  6173. if (byteCodeUseInstr)
  6174. {
  6175. if (this->currentBlock->byteCodeUpwardExposedUsed && instrPrev->GetDst()->AsRegOpnd()->GetIsJITOptimizedReg() && varSym->HasByteCodeRegSlot())
  6176. {
  6177. if(varSym->HasByteCodeRegSlot())
  6178. {
  6179. this->currentBlock->byteCodeUpwardExposedUsed->Set(varSym->m_id);
  6180. }
  6181. if (src->IsEqual(dst) && instrPrev->GetDst()->GetIsJITOptimizedReg())
  6182. {
  6183. // s2(s1).i32 = FromVar s1.var #0000 Bailout: #0000 (BailOutIntOnly)
  6184. // ByteCodeUses s1
  6185. // s2(s1).i32 = Ld_A s2(s1).i32
  6186. //
  6187. // Since the dst on the FromVar is marked JITOptimized, we need to set it on the new dst as well,
  6188. // or we'll change the bytecode liveness of s1
  6189. dst->SetIsJITOptimizedReg(true);
  6190. }
  6191. }
  6192. byteCodeUseInstr->Remove();
  6193. }
  6194. else if (instrPrev->GetDst()->AsRegOpnd()->GetIsJITOptimizedReg() && !src->GetIsJITOptimizedReg() && varSym->HasByteCodeRegSlot())
  6195. {
  6196. this->currentBlock->byteCodeUpwardExposedUsed->Set(varSym->m_id);
  6197. }
  6198. #if DBG
  6199. if (this->DoMarkTempObjectVerify())
  6200. {
  6201. this->currentBlock->tempObjectVerifyTracker->NotifyReverseCopyProp(instrPrev);
  6202. }
  6203. #endif
  6204. dst->SetValueType(instrPrev->GetDst()->GetValueType());
  6205. instrPrev->ReplaceDst(dst);
  6206. instr->Remove();
  6207. return true;
  6208. }
  6209. bool
  6210. BackwardPass::FoldCmBool(IR::Instr *instr)
  6211. {
  6212. Assert(instr->m_opcode == Js::OpCode::Conv_Bool);
  6213. if (this->tag != Js::DeadStorePhase || this->IsPrePass() || this->IsCollectionPass())
  6214. {
  6215. return false;
  6216. }
  6217. if (this->func->HasTry())
  6218. {
  6219. // UpwardExposedUsed info can't be relied on
  6220. return false;
  6221. }
  6222. IR::RegOpnd *intOpnd = instr->GetSrc1()->AsRegOpnd();
  6223. Assert(intOpnd->m_sym->IsInt32());
  6224. if (!intOpnd->m_sym->IsSingleDef())
  6225. {
  6226. return false;
  6227. }
  6228. IR::Instr *cmInstr = intOpnd->m_sym->GetInstrDef();
  6229. // Should be a Cm instr...
  6230. if (!cmInstr->GetSrc2())
  6231. {
  6232. return false;
  6233. }
  6234. IR::Instr *instrPrev = instr->GetPrevRealInstrOrLabel();
  6235. if (instrPrev != cmInstr)
  6236. {
  6237. return false;
  6238. }
  6239. switch (cmInstr->m_opcode)
  6240. {
  6241. case Js::OpCode::CmEq_A:
  6242. case Js::OpCode::CmGe_A:
  6243. case Js::OpCode::CmUnGe_A:
  6244. case Js::OpCode::CmGt_A:
  6245. case Js::OpCode::CmUnGt_A:
  6246. case Js::OpCode::CmLt_A:
  6247. case Js::OpCode::CmUnLt_A:
  6248. case Js::OpCode::CmLe_A:
  6249. case Js::OpCode::CmUnLe_A:
  6250. case Js::OpCode::CmNeq_A:
  6251. case Js::OpCode::CmSrEq_A:
  6252. case Js::OpCode::CmSrNeq_A:
  6253. case Js::OpCode::CmEq_I4:
  6254. case Js::OpCode::CmNeq_I4:
  6255. case Js::OpCode::CmLt_I4:
  6256. case Js::OpCode::CmLe_I4:
  6257. case Js::OpCode::CmGt_I4:
  6258. case Js::OpCode::CmGe_I4:
  6259. case Js::OpCode::CmUnLt_I4:
  6260. case Js::OpCode::CmUnLe_I4:
  6261. case Js::OpCode::CmUnGt_I4:
  6262. case Js::OpCode::CmUnGe_I4:
  6263. break;
  6264. default:
  6265. return false;
  6266. }
  6267. IR::RegOpnd *varDst = instr->GetDst()->AsRegOpnd();
  6268. if (this->currentBlock->upwardExposedUses->Test(intOpnd->m_sym->m_id) || !this->currentBlock->upwardExposedUses->Test(varDst->m_sym->m_id))
  6269. {
  6270. return false;
  6271. }
  6272. varDst = instr->UnlinkDst()->AsRegOpnd();
  6273. cmInstr->ReplaceDst(varDst);
  6274. this->currentBlock->RemoveInstr(instr);
  6275. return true;
  6276. }
  6277. void
  6278. BackwardPass::SetWriteThroughSymbolsSetForRegion(BasicBlock * catchBlock, Region * tryRegion)
  6279. {
  6280. tryRegion->writeThroughSymbolsSet = JitAnew(this->func->m_alloc, BVSparse<JitArenaAllocator>, this->func->m_alloc);
  6281. if (this->DoByteCodeUpwardExposedUsed())
  6282. {
  6283. Assert(catchBlock->byteCodeUpwardExposedUsed);
  6284. if (!catchBlock->byteCodeUpwardExposedUsed->IsEmpty())
  6285. {
  6286. FOREACH_BITSET_IN_SPARSEBV(id, catchBlock->byteCodeUpwardExposedUsed)
  6287. {
  6288. tryRegion->writeThroughSymbolsSet->Set(id);
  6289. }
  6290. NEXT_BITSET_IN_SPARSEBV
  6291. }
  6292. #if DBG
  6293. // Symbols write-through in the parent try region should be marked as write-through in the current try region as well.
  6294. // x =
  6295. // try{
  6296. // try{
  6297. // x = <-- x needs to be write-through here. With the current mechanism of not clearing a write-through
  6298. // symbol from the bytecode upward-exposed on a def, x should be marked as write-through as
  6299. // write-through symbols for a try are basically the bytecode upward exposed symbols at the
  6300. // beginning of the corresponding catch block).
  6301. // Verify that it still holds.
  6302. // <exception>
  6303. // }
  6304. // catch(){}
  6305. // x =
  6306. // }
  6307. // catch(){}
  6308. // = x
  6309. if (tryRegion->GetParent()->GetType() == RegionTypeTry)
  6310. {
  6311. Region * parentTry = tryRegion->GetParent();
  6312. Assert(parentTry->writeThroughSymbolsSet);
  6313. FOREACH_BITSET_IN_SPARSEBV(id, parentTry->writeThroughSymbolsSet)
  6314. {
  6315. Assert(tryRegion->writeThroughSymbolsSet->Test(id));
  6316. }
  6317. NEXT_BITSET_IN_SPARSEBV
  6318. }
  6319. #endif
  6320. }
  6321. else
  6322. {
  6323. // this can happen with -off:globopt
  6324. return;
  6325. }
  6326. }
  6327. bool
  6328. BackwardPass::CheckWriteThroughSymInRegion(Region* region, StackSym* sym)
  6329. {
  6330. if (region->GetType() == RegionTypeRoot || region->GetType() == RegionTypeFinally)
  6331. {
  6332. return false;
  6333. }
  6334. // if the current region is a try region, check in its write-through set,
  6335. // otherwise (current = catch region) look in the first try ancestor's write-through set
  6336. Region * selfOrFirstTryAncestor = region->GetSelfOrFirstTryAncestor();
  6337. if (!selfOrFirstTryAncestor)
  6338. {
  6339. return false;
  6340. }
  6341. Assert(selfOrFirstTryAncestor->GetType() == RegionTypeTry);
  6342. return selfOrFirstTryAncestor->writeThroughSymbolsSet && selfOrFirstTryAncestor->writeThroughSymbolsSet->Test(sym->m_id);
  6343. }
  6344. bool
  6345. BackwardPass::DoDeadStoreLdStForMemop(IR::Instr *instr)
  6346. {
  6347. Assert(this->tag == Js::DeadStorePhase && this->currentBlock->loop != nullptr);
  6348. Loop *loop = this->currentBlock->loop;
  6349. if (globOpt->DoMemOp(loop))
  6350. {
  6351. if (instr->m_opcode == Js::OpCode::StElemI_A && instr->GetDst()->IsIndirOpnd())
  6352. {
  6353. SymID base = this->globOpt->GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym());
  6354. SymID index = this->globOpt->GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym());
  6355. FOREACH_MEMOP_CANDIDATES(candidate, loop)
  6356. {
  6357. if (base == candidate->base && index == candidate->index)
  6358. {
  6359. return true;
  6360. }
  6361. } NEXT_MEMOP_CANDIDATE
  6362. }
  6363. else if (instr->m_opcode == Js::OpCode::LdElemI_A && instr->GetSrc1()->IsIndirOpnd())
  6364. {
  6365. SymID base = this->globOpt->GetVarSymID(instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym());
  6366. SymID index = this->globOpt->GetVarSymID(instr->GetSrc1()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym());
  6367. FOREACH_MEMCOPY_CANDIDATES(candidate, loop)
  6368. {
  6369. if (base == candidate->ldBase && index == candidate->index)
  6370. {
  6371. return true;
  6372. }
  6373. } NEXT_MEMCOPY_CANDIDATE
  6374. }
  6375. }
  6376. return false;
  6377. }
  6378. void
  6379. BackwardPass::RestoreInductionVariableValuesAfterMemOp(Loop *loop)
  6380. {
  6381. const auto RestoreInductionVariable = [&](SymID symId, Loop::InductionVariableChangeInfo inductionVariableChangeInfo, Loop *loop)
  6382. {
  6383. Js::OpCode opCode = Js::OpCode::Add_I4;
  6384. if (!inductionVariableChangeInfo.isIncremental)
  6385. {
  6386. opCode = Js::OpCode::Sub_I4;
  6387. }
  6388. Func *localFunc = loop->GetFunc();
  6389. StackSym *sym = localFunc->m_symTable->FindStackSym(symId)->GetInt32EquivSym(localFunc);
  6390. IR::Opnd *inductionVariableOpnd = IR::RegOpnd::New(sym, IRType::TyInt32, localFunc);
  6391. IR::Opnd *sizeOpnd = globOpt->GenerateInductionVariableChangeForMemOp(loop, inductionVariableChangeInfo.unroll);
  6392. loop->landingPad->InsertAfter(IR::Instr::New(opCode, inductionVariableOpnd, inductionVariableOpnd, sizeOpnd, loop->GetFunc()));
  6393. };
  6394. for (auto it = loop->memOpInfo->inductionVariableChangeInfoMap->GetIterator(); it.IsValid(); it.MoveNext())
  6395. {
  6396. Loop::InductionVariableChangeInfo iv = it.CurrentValue();
  6397. SymID sym = it.CurrentKey();
  6398. if (iv.unroll != Js::Constants::InvalidLoopUnrollFactor)
  6399. {
  6400. // if the variable is being used after the loop restore it
  6401. if (loop->memOpInfo->inductionVariablesUsedAfterLoop->Test(sym))
  6402. {
  6403. RestoreInductionVariable(sym, iv, loop);
  6404. }
  6405. }
  6406. }
  6407. }
  6408. bool
  6409. BackwardPass::IsEmptyLoopAfterMemOp(Loop *loop)
  6410. {
  6411. if (globOpt->DoMemOp(loop))
  6412. {
  6413. const auto IsInductionVariableUse = [&](IR::Opnd *opnd) -> bool
  6414. {
  6415. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
  6416. return (opnd &&
  6417. opnd->GetStackSym() &&
  6418. loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(this->globOpt->GetVarSymID(opnd->GetStackSym())) &&
  6419. (((Loop::InductionVariableChangeInfo)
  6420. loop->memOpInfo->inductionVariableChangeInfoMap->
  6421. LookupWithKey(this->globOpt->GetVarSymID(opnd->GetStackSym()), inductionVariableChangeInfo)).unroll != Js::Constants::InvalidLoopUnrollFactor));
  6422. };
  6423. Assert(loop->blockList.HasTwo());
  6424. FOREACH_BLOCK_IN_LOOP(bblock, loop)
  6425. {
  6426. FOREACH_INSTR_IN_BLOCK_EDITING(instr, instrPrev, bblock)
  6427. {
  6428. if (instr->IsLabelInstr() || !instr->IsRealInstr() || instr->m_opcode == Js::OpCode::IncrLoopBodyCount || instr->m_opcode == Js::OpCode::StLoopBodyCount
  6429. || (instr->IsBranchInstr() && instr->AsBranchInstr()->IsUnconditional()))
  6430. {
  6431. continue;
  6432. }
  6433. else
  6434. {
  6435. switch (instr->m_opcode)
  6436. {
  6437. case Js::OpCode::Nop:
  6438. break;
  6439. case Js::OpCode::Ld_I4:
  6440. case Js::OpCode::Add_I4:
  6441. case Js::OpCode::Sub_I4:
  6442. if (!IsInductionVariableUse(instr->GetDst()))
  6443. {
  6444. Assert(instr->GetDst());
  6445. if (instr->GetDst()->GetStackSym()
  6446. && loop->memOpInfo->inductionVariablesUsedAfterLoop->Test(globOpt->GetVarSymID(instr->GetDst()->GetStackSym())))
  6447. {
  6448. // We have use after the loop for a variable defined inside the loop. So the loop can't be removed.
  6449. return false;
  6450. }
  6451. }
  6452. break;
  6453. case Js::OpCode::Decr_A:
  6454. case Js::OpCode::Incr_A:
  6455. if (!IsInductionVariableUse(instr->GetSrc1()))
  6456. {
  6457. return false;
  6458. }
  6459. break;
  6460. default:
  6461. if (instr->IsBranchInstr())
  6462. {
  6463. if (IsInductionVariableUse(instr->GetSrc1()) || IsInductionVariableUse(instr->GetSrc2()))
  6464. {
  6465. break;
  6466. }
  6467. }
  6468. return false;
  6469. }
  6470. }
  6471. }
  6472. NEXT_INSTR_IN_BLOCK_EDITING;
  6473. }NEXT_BLOCK_IN_LIST;
  6474. return true;
  6475. }
  6476. return false;
  6477. }
  6478. void
  6479. BackwardPass::RemoveEmptyLoops()
  6480. {
  6481. if (PHASE_OFF(Js::MemOpPhase, this->func))
  6482. {
  6483. return;
  6484. }
  6485. const auto DeleteMemOpInfo = [&](Loop *loop)
  6486. {
  6487. JitArenaAllocator *alloc = this->func->GetTopFunc()->m_fg->alloc;
  6488. if (!loop->memOpInfo)
  6489. {
  6490. return;
  6491. }
  6492. if (loop->memOpInfo->candidates)
  6493. {
  6494. loop->memOpInfo->candidates->Clear();
  6495. JitAdelete(alloc, loop->memOpInfo->candidates);
  6496. }
  6497. if (loop->memOpInfo->inductionVariableChangeInfoMap)
  6498. {
  6499. loop->memOpInfo->inductionVariableChangeInfoMap->Clear();
  6500. JitAdelete(alloc, loop->memOpInfo->inductionVariableChangeInfoMap);
  6501. }
  6502. if (loop->memOpInfo->inductionVariableOpndPerUnrollMap)
  6503. {
  6504. loop->memOpInfo->inductionVariableOpndPerUnrollMap->Clear();
  6505. JitAdelete(alloc, loop->memOpInfo->inductionVariableOpndPerUnrollMap);
  6506. }
  6507. if (loop->memOpInfo->inductionVariablesUsedAfterLoop)
  6508. {
  6509. JitAdelete(this->tempAlloc, loop->memOpInfo->inductionVariablesUsedAfterLoop);
  6510. }
  6511. JitAdelete(alloc, loop->memOpInfo);
  6512. };
  6513. FOREACH_LOOP_IN_FUNC_EDITING(loop, this->func)
  6514. {
  6515. if (IsEmptyLoopAfterMemOp(loop))
  6516. {
  6517. RestoreInductionVariableValuesAfterMemOp(loop);
  6518. RemoveEmptyLoopAfterMemOp(loop);
  6519. }
  6520. // Remove memop info as we don't need them after this point.
  6521. DeleteMemOpInfo(loop);
  6522. } NEXT_LOOP_IN_FUNC_EDITING;
  6523. }
  6524. void
  6525. BackwardPass::RemoveEmptyLoopAfterMemOp(Loop *loop)
  6526. {
  6527. BasicBlock *head = loop->GetHeadBlock();
  6528. BasicBlock *tail = head->next;
  6529. BasicBlock *landingPad = loop->landingPad;
  6530. BasicBlock *outerBlock = nullptr;
  6531. SListBaseCounted<FlowEdge *> *succList = head->GetSuccList();
  6532. Assert(succList->HasTwo());
  6533. // Between the two successors of head, one is tail and the other one is the outerBlock
  6534. SListBaseCounted<FlowEdge *>::Iterator iter(succList);
  6535. iter.Next();
  6536. if (iter.Data()->GetSucc() == tail)
  6537. {
  6538. iter.Next();
  6539. outerBlock = iter.Data()->GetSucc();
  6540. }
  6541. else
  6542. {
  6543. outerBlock = iter.Data()->GetSucc();
  6544. #ifdef DBG
  6545. iter.Next();
  6546. Assert(iter.Data()->GetSucc() == tail);
  6547. #endif
  6548. }
  6549. outerBlock->RemovePred(head, this->func->m_fg);
  6550. landingPad->RemoveSucc(head, this->func->m_fg);
  6551. this->func->m_fg->AddEdge(landingPad, outerBlock);
  6552. this->func->m_fg->RemoveBlock(head, nullptr);
  6553. if (head != tail)
  6554. {
  6555. this->func->m_fg->RemoveBlock(tail, nullptr);
  6556. }
  6557. }
  6558. #if DBG_DUMP
  6559. bool
  6560. BackwardPass::IsTraceEnabled() const
  6561. {
  6562. return
  6563. Js::Configuration::Global.flags.Trace.IsEnabled(tag, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()) &&
  6564. (PHASE_TRACE(Js::SimpleJitPhase, func->GetJnFunction()) || !func->IsSimpleJit());
  6565. }
  6566. #endif