BackwardPass.cpp 351 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747774877497750775177527753775477557756775777587759776077617762776377647765776677677768776977707771777277737774777577767777777877797780778177827783778477857786778777887789779077917792779377947795779677977798779978007801780278037804780578067807780878097810781178127813781478157816781778187819782078217822782378247825782678277828782978307831783278337834783578367837783878397840784178427843784478457846784778487849785078517852785378547855785678577858785978607861786278637864786578667867786878697870787178727873787478757876787778787879788078817882788378847885788678877888788978907891789278937894789578967897789878997900790179027903790479057906790779087909791079117912791379147915791679177918791979207921792279237924792579267927792879297930793179327933793479357936793779387939794079417942794379447945794679477948794979507951795279537954795579567957795879597960796179627963796479657966796779687969797079717972797379747975797679777978797979807981798279837984798579867987798879897990799179927993799479957996799779987999800080018002800380048005800680078008800980108011801280138014801580168017801880198020802180228023802480258026802780288029803080318032803380348035803680378038803980408041804280438044804580468047804880498050805180528053805480558056805780588059806080618062806380648065806680678068806980708071807280738074807580768077807880798080808180828083808480858086808780888089809080918092809380948095809680978098809981008101810281038104810581068107810881098110811181128113811481158116811781188119812081218122812381248125812681278128812981308131813281338134813581368137813881398140814181428143814481458146814781488149815081518152815381548155815681578158815981608161816281638164816581668167816881698170817181728173817481758176817781788179818081818182818381848185818681878188818981908191819281938194819581968197819881998200820182028203820482058206820782088209821082118212821382148215821682178218821982208221822282238224822582268227822882298230823182328233823482358236823782388239824082418242824382448245824682478248824982508251825282538254825582568257825882598260826182628263826482658266826782688269827082718272827382748275827682778278827982808281828282838284828582868287828882898290829182928293829482958296829782988299830083018302830383048305830683078308830983108311831283138314831583168317831883198320832183228323832483258326832783288329833083318332833383348335833683378338833983408341834283438344834583468347834883498350835183528353835483558356835783588359836083618362836383648365836683678368836983708371837283738374837583768377837883798380838183828383838483858386838783888389839083918392839383948395839683978398839984008401840284038404840584068407840884098410841184128413841484158416841784188419842084218422842384248425842684278428842984308431843284338434843584368437843884398440844184428443844484458446844784488449845084518452845384548455845684578458845984608461846284638464846584668467846884698470847184728473847484758476847784788479848084818482848384848485848684878488848984908491849284938494849584968497849884998500850185028503850485058506850785088509851085118512851385148515851685178518851985208521852285238524852585268527852885298530853185328533853485358536853785388539854085418542854385448545854685478548854985508551855285538554855585568557855885598560856185628563856485658566856785688569857085718572857385748575857685778578857985808581858285838584858585868587858885898590859185928593859485958596859785988599860086018602860386048605860686078608860986108611861286138614861586168617861886198620862186228623862486258626862786288629863086318632863386348635863686378638863986408641864286438644864586468647864886498650865186528653865486558656865786588659866086618662866386648665866686678668866986708671867286738674867586768677867886798680868186828683868486858686868786888689869086918692869386948695869686978698
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #define INLINEEMETAARG_COUNT 3
  7. BackwardPass::BackwardPass(Func * func, GlobOpt * globOpt, Js::Phase tag)
  8. : func(func), globOpt(globOpt), tag(tag), currentPrePassLoop(nullptr), tempAlloc(nullptr),
  9. preOpBailOutInstrToProcess(nullptr),
  10. considerSymAsRealUseInNoImplicitCallUses(nullptr),
  11. isCollectionPass(false), currentRegion(nullptr),
  12. collectionPassSubPhase(CollectionPassSubPhase::None),
  13. isLoopPrepass(false)
  14. {
  15. // Those are the only two phase dead store will be used currently
  16. Assert(tag == Js::BackwardPhase || tag == Js::DeadStorePhase);
  17. this->implicitCallBailouts = 0;
  18. this->fieldOpts = 0;
  19. #if DBG_DUMP
  20. this->numDeadStore = 0;
  21. this->numMarkTempNumber = 0;
  22. this->numMarkTempNumberTransferred = 0;
  23. this->numMarkTempObject = 0;
  24. #endif
  25. }
  26. void
  27. BackwardPass::DoSetDead(IR::Opnd * opnd, bool isDead) const
  28. {
  29. // Note: Dead bit on the Opnd records flow-based liveness.
  30. // This is distinct from isLastUse, which records lexical last-ness.
  31. if (isDead && this->tag == Js::BackwardPhase && !this->IsPrePass())
  32. {
  33. opnd->SetIsDead();
  34. }
  35. else if (this->tag == Js::DeadStorePhase)
  36. {
  37. // Set or reset in DeadStorePhase.
  38. // CSE could make previous dead operand not the last use, so reset it.
  39. opnd->SetIsDead(isDead);
  40. }
  41. }
  42. bool
  43. BackwardPass::DoByteCodeUpwardExposedUsed() const
  44. {
  45. return
  46. !this->func->GetJITFunctionBody()->IsAsmJsMode() &&
  47. (
  48. (this->tag == Js::DeadStorePhase && this->func->hasBailout) ||
  49. (this->func->HasTry() && this->func->DoOptimizeTry() && this->tag == Js::BackwardPhase)
  50. #if DBG
  51. || this->tag == Js::BackwardPhase
  52. #endif
  53. );
  54. }
  55. bool
  56. BackwardPass::DoMarkTempNumbers() const
  57. {
  58. #if FLOATVAR
  59. return false;
  60. #else
  61. // only mark temp number on the dead store phase
  62. return (tag == Js::DeadStorePhase) && !PHASE_OFF(Js::MarkTempPhase, this->func) &&
  63. !PHASE_OFF(Js::MarkTempNumberPhase, this->func) && func->DoFastPaths() && (!this->func->HasTry());
  64. #endif
  65. }
  66. bool
  67. BackwardPass::DoMarkTempObjects() const
  68. {
  69. // only mark temp object on the backward store phase
  70. return (tag == Js::BackwardPhase) && !PHASE_OFF(Js::MarkTempPhase, this->func) &&
  71. !PHASE_OFF(Js::MarkTempObjectPhase, this->func) && func->DoGlobOpt() && func->GetHasTempObjectProducingInstr() &&
  72. !func->IsJitInDebugMode() &&
  73. func->DoGlobOptsForGeneratorFunc();
  74. // Why MarkTempObject is disabled under debugger:
  75. // We add 'identified so far dead non-temp locals' to byteCodeUpwardExposedUsed in ProcessBailOutInfo,
  76. // this may cause MarkTempObject to convert some temps back to non-temp when it sees a 'transferred exposed use'
  77. // from a temp to non-temp. That's in general not a supported conversion (while non-temp -> temp is fine).
  78. }
  79. bool
  80. BackwardPass::DoMarkTempNumbersOnTempObjects() const
  81. {
  82. return !PHASE_OFF(Js::MarkTempNumberOnTempObjectPhase, this->func) && DoMarkTempNumbers() && this->func->GetHasMarkTempObjects();
  83. }
  84. #if DBG
  85. bool
  86. BackwardPass::DoMarkTempObjectVerify() const
  87. {
  88. // only mark temp object on the backward store phase
  89. return (tag == Js::DeadStorePhase) && !PHASE_OFF(Js::MarkTempPhase, this->func) &&
  90. !PHASE_OFF(Js::MarkTempObjectPhase, this->func) && func->DoGlobOpt() && func->GetHasTempObjectProducingInstr();
  91. }
  92. #endif
  93. // static
  94. bool
  95. BackwardPass::DoDeadStore(Func* func)
  96. {
  97. return
  98. !PHASE_OFF(Js::DeadStorePhase, func) &&
  99. (!func->HasTry() || func->DoOptimizeTry());
  100. }
  101. bool
  102. BackwardPass::DoDeadStore() const
  103. {
  104. return
  105. this->tag == Js::DeadStorePhase &&
  106. DoDeadStore(this->func);
  107. }
  108. bool
  109. BackwardPass::DoDeadStoreSlots() const
  110. {
  111. // only dead store fields if glob opt is on to generate the trackable fields bitvector
  112. return (tag == Js::DeadStorePhase && this->func->DoGlobOpt()
  113. && (!this->func->HasTry()));
  114. }
  115. // Whether dead store is enabled for given func and sym.
  116. // static
  117. bool
  118. BackwardPass::DoDeadStore(Func* func, StackSym* sym)
  119. {
  120. // Dead store is disabled under debugger for non-temp local vars.
  121. return
  122. DoDeadStore(func) &&
  123. !(func->IsJitInDebugMode() && sym->HasByteCodeRegSlot() && func->IsNonTempLocalVar(sym->GetByteCodeRegSlot())) &&
  124. func->DoGlobOptsForGeneratorFunc();
  125. }
  126. bool
  127. BackwardPass::DoTrackNegativeZero() const
  128. {
  129. return
  130. !PHASE_OFF(Js::TrackIntUsagePhase, func) &&
  131. !PHASE_OFF(Js::TrackNegativeZeroPhase, func) &&
  132. func->DoGlobOpt() &&
  133. !IsPrePass() &&
  134. !func->IsJitInDebugMode() &&
  135. func->DoGlobOptsForGeneratorFunc();
  136. }
  137. bool
  138. BackwardPass::DoTrackBitOpsOrNumber() const
  139. {
  140. #if defined(_WIN32) && defined(TARGET_64)
  141. return
  142. !PHASE_OFF1(Js::TypedArrayVirtualPhase) &&
  143. tag == Js::BackwardPhase &&
  144. func->DoGlobOpt() &&
  145. !IsPrePass() &&
  146. !func->IsJitInDebugMode() &&
  147. func->DoGlobOptsForGeneratorFunc();
  148. #else
  149. return false;
  150. #endif
  151. }
  152. bool
  153. BackwardPass::DoTrackIntOverflow() const
  154. {
  155. return
  156. !PHASE_OFF(Js::TrackIntUsagePhase, func) &&
  157. !PHASE_OFF(Js::TrackIntOverflowPhase, func) &&
  158. tag == Js::BackwardPhase &&
  159. !IsPrePass() &&
  160. globOpt->DoLossyIntTypeSpec() &&
  161. !func->IsJitInDebugMode() &&
  162. func->DoGlobOptsForGeneratorFunc();
  163. }
  164. bool
  165. BackwardPass::DoTrackCompoundedIntOverflow() const
  166. {
  167. return
  168. !PHASE_OFF(Js::TrackCompoundedIntOverflowPhase, func) &&
  169. DoTrackIntOverflow() && !func->IsTrackCompoundedIntOverflowDisabled();
  170. }
  171. bool
  172. BackwardPass::DoTrackNon32BitOverflow() const
  173. {
  174. // enabled only for IA
  175. #if defined(_M_IX86) || defined(_M_X64)
  176. return true;
  177. #else
  178. return false;
  179. #endif
  180. }
  181. void
  182. BackwardPass::CleanupBackwardPassInfoInFlowGraph()
  183. {
  184. if (!this->func->m_fg->hasBackwardPassInfo)
  185. {
  186. // No information to clean up
  187. return;
  188. }
  189. // The backward pass temp arena has already been deleted, we can just reset the data
  190. FOREACH_BLOCK_IN_FUNC_DEAD_OR_ALIVE(block, this->func)
  191. {
  192. block->upwardExposedUses = nullptr;
  193. block->upwardExposedFields = nullptr;
  194. block->typesNeedingKnownObjectLayout = nullptr;
  195. block->slotDeadStoreCandidates = nullptr;
  196. block->byteCodeUpwardExposedUsed = nullptr;
  197. #if DBG
  198. block->byteCodeRestoreSyms = nullptr;
  199. block->excludeByteCodeUpwardExposedTracking = nullptr;
  200. #endif
  201. block->tempNumberTracker = nullptr;
  202. block->tempObjectTracker = nullptr;
  203. #if DBG
  204. block->tempObjectVerifyTracker = nullptr;
  205. #endif
  206. block->stackSymToFinalType = nullptr;
  207. block->stackSymToGuardedProperties = nullptr;
  208. block->stackSymToWriteGuardsMap = nullptr;
  209. block->cloneStrCandidates = nullptr;
  210. block->noImplicitCallUses = nullptr;
  211. block->noImplicitCallNoMissingValuesUses = nullptr;
  212. block->noImplicitCallNativeArrayUses = nullptr;
  213. block->noImplicitCallJsArrayHeadSegmentSymUses = nullptr;
  214. block->noImplicitCallArrayLengthSymUses = nullptr;
  215. block->couldRemoveNegZeroBailoutForDef = nullptr;
  216. if (block->loop != nullptr)
  217. {
  218. block->loop->hasDeadStoreCollectionPass = false;
  219. block->loop->hasDeadStorePrepass = false;
  220. }
  221. }
  222. NEXT_BLOCK_IN_FUNC_DEAD_OR_ALIVE;
  223. }
  224. /*
  225. * We Insert ArgIns at the start of the function for all the formals.
  226. * Unused formals will be deadstored during the deadstore pass.
  227. * We need ArgIns only for the outermost function(inliner).
  228. */
  229. void
  230. BackwardPass::InsertArgInsForFormals()
  231. {
  232. if (func->IsStackArgsEnabled() && !func->GetJITFunctionBody()->HasImplicitArgIns())
  233. {
  234. IR::Instr * insertAfterInstr = func->m_headInstr->m_next;
  235. AssertMsg(insertAfterInstr->IsLabelInstr(), "First Instr of the first block should always have a label");
  236. Js::ArgSlot paramsCount = insertAfterInstr->m_func->GetJITFunctionBody()->GetInParamsCount() - 1;
  237. IR::Instr * argInInstr = nullptr;
  238. for (Js::ArgSlot argumentIndex = 1; argumentIndex <= paramsCount; argumentIndex++)
  239. {
  240. IR::SymOpnd * srcOpnd;
  241. StackSym * symSrc = StackSym::NewParamSlotSym(argumentIndex + 1, func);
  242. StackSym * symDst = StackSym::New(func);
  243. IR::RegOpnd * dstOpnd = IR::RegOpnd::New(symDst, TyVar, func);
  244. func->SetArgOffset(symSrc, (argumentIndex + LowererMD::GetFormalParamOffset()) * MachPtr);
  245. srcOpnd = IR::SymOpnd::New(symSrc, TyVar, func);
  246. argInInstr = IR::Instr::New(Js::OpCode::ArgIn_A, dstOpnd, srcOpnd, func);
  247. insertAfterInstr->InsertAfter(argInInstr);
  248. insertAfterInstr = argInInstr;
  249. AssertMsg(!func->HasStackSymForFormal(argumentIndex - 1), "Already has a stack sym for this formal?");
  250. this->func->TrackStackSymForFormalIndex(argumentIndex - 1, symDst);
  251. }
  252. if (PHASE_VERBOSE_TRACE1(Js::StackArgFormalsOptPhase) && paramsCount > 0)
  253. {
  254. Output::Print(_u("StackArgFormals : %s (%d) :Inserting ArgIn_A for LdSlot (formals) in the start of Deadstore pass. \n"), func->GetJITFunctionBody()->GetDisplayName(), func->GetFunctionNumber());
  255. Output::Flush();
  256. }
  257. }
  258. }
  259. void
  260. BackwardPass::MarkScopeObjSymUseForStackArgOpt()
  261. {
  262. IR::Instr * instr = this->currentInstr;
  263. if (tag == Js::DeadStorePhase)
  264. {
  265. if (instr->DoStackArgsOpt(this->func) && instr->m_func->GetScopeObjSym() != nullptr && this->DoByteCodeUpwardExposedUsed())
  266. {
  267. if (this->currentBlock->byteCodeUpwardExposedUsed == nullptr)
  268. {
  269. this->currentBlock->byteCodeUpwardExposedUsed = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  270. }
  271. this->currentBlock->byteCodeUpwardExposedUsed->Set(instr->m_func->GetScopeObjSym()->m_id);
  272. }
  273. }
  274. }
  275. void
  276. BackwardPass::ProcessBailOnStackArgsOutOfActualsRange()
  277. {
  278. IR::Instr * instr = this->currentInstr;
  279. if (tag == Js::DeadStorePhase &&
  280. (instr->m_opcode == Js::OpCode::LdElemI_A || instr->m_opcode == Js::OpCode::TypeofElem) &&
  281. instr->HasBailOutInfo() && !IsPrePass())
  282. {
  283. if (instr->DoStackArgsOpt(this->func))
  284. {
  285. AssertMsg(instr->GetBailOutKind() & IR::BailOnStackArgsOutOfActualsRange, "Stack args bail out is not set when the optimization is turned on? ");
  286. if (instr->GetBailOutKind() & ~IR::BailOnStackArgsOutOfActualsRange)
  287. {
  288. Assert(instr->GetBailOutKind() == (IR::BailOnStackArgsOutOfActualsRange | IR::BailOutOnImplicitCallsPreOp));
  289. //We are sure at this point, that we will not have any implicit calls as we wouldn't have done this optimization in the first place.
  290. instr->SetBailOutKind(IR::BailOnStackArgsOutOfActualsRange);
  291. }
  292. }
  293. else if (instr->GetBailOutKind() & IR::BailOnStackArgsOutOfActualsRange)
  294. {
  295. //If we don't decide to do StackArgs, then remove the bail out at this point.
  296. //We would have optimistically set the bailout in the forward pass, and by the end of forward pass - we
  297. //turned off stack args for some reason. So we are removing it in the deadstore pass.
  298. IR::BailOutKind bailOutKind = instr->GetBailOutKind() & ~IR::BailOnStackArgsOutOfActualsRange;
  299. if (bailOutKind == IR::BailOutInvalid)
  300. {
  301. instr->ClearBailOutInfo();
  302. }
  303. else
  304. {
  305. instr->SetBailOutKind(bailOutKind);
  306. }
  307. }
  308. }
  309. }
  310. void
  311. BackwardPass::Optimize()
  312. {
  313. if (tag == Js::BackwardPhase && PHASE_OFF(tag, this->func))
  314. {
  315. return;
  316. }
  317. if (tag == Js::DeadStorePhase)
  318. {
  319. if (!this->func->DoLoopFastPaths() || !this->func->DoFastPaths())
  320. {
  321. //arguments[] access is similar to array fast path hence disable when array fastpath is disabled.
  322. //loopFastPath is always true except explicitly disabled
  323. //defaultDoFastPath can be false when we the source code size is huge
  324. func->SetHasStackArgs(false);
  325. }
  326. InsertArgInsForFormals();
  327. }
  328. NoRecoverMemoryJitArenaAllocator localAlloc(tag == Js::BackwardPhase? _u("BE-Backward") : _u("BE-DeadStore"),
  329. this->func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  330. this->tempAlloc = &localAlloc;
  331. #if DBG_DUMP
  332. if (this->IsTraceEnabled())
  333. {
  334. this->func->DumpHeader();
  335. }
  336. #endif
  337. this->CleanupBackwardPassInfoInFlowGraph();
  338. // Info about whether a sym is used in a way in which -0 differs from +0, or whether the sym is used in a way in which an
  339. // int32 overflow when generating the value of the sym matters, in the current block. The info is transferred to
  340. // instructions that define the sym in the current block as they are encountered. The info in these bit vectors is discarded
  341. // after optimizing each block, so the only info that remains for GlobOpt is that which is transferred to instructions.
  342. BVSparse<JitArenaAllocator> localNegativeZeroDoesNotMatterBySymId(tempAlloc);
  343. negativeZeroDoesNotMatterBySymId = &localNegativeZeroDoesNotMatterBySymId;
  344. BVSparse<JitArenaAllocator> localSymUsedOnlyForBitOpsBySymId(tempAlloc);
  345. symUsedOnlyForBitOpsBySymId = &localSymUsedOnlyForBitOpsBySymId;
  346. BVSparse<JitArenaAllocator> localSymUsedOnlyForNumberBySymId(tempAlloc);
  347. symUsedOnlyForNumberBySymId = &localSymUsedOnlyForNumberBySymId;
  348. BVSparse<JitArenaAllocator> localIntOverflowDoesNotMatterBySymId(tempAlloc);
  349. intOverflowDoesNotMatterBySymId = &localIntOverflowDoesNotMatterBySymId;
  350. BVSparse<JitArenaAllocator> localIntOverflowDoesNotMatterInRangeBySymId(tempAlloc);
  351. intOverflowDoesNotMatterInRangeBySymId = &localIntOverflowDoesNotMatterInRangeBySymId;
  352. BVSparse<JitArenaAllocator> localCandidateSymsRequiredToBeInt(tempAlloc);
  353. candidateSymsRequiredToBeInt = &localCandidateSymsRequiredToBeInt;
  354. BVSparse<JitArenaAllocator> localCandidateSymsRequiredToBeLossyInt(tempAlloc);
  355. candidateSymsRequiredToBeLossyInt = &localCandidateSymsRequiredToBeLossyInt;
  356. intOverflowCurrentlyMattersInRange = true;
  357. FloatSymEquivalenceMap localFloatSymEquivalenceMap(tempAlloc);
  358. floatSymEquivalenceMap = &localFloatSymEquivalenceMap;
  359. NumberTempRepresentativePropertySymMap localNumberTempRepresentativePropertySym(tempAlloc);
  360. numberTempRepresentativePropertySym = &localNumberTempRepresentativePropertySym;
  361. FOREACH_BLOCK_BACKWARD_IN_FUNC_DEAD_OR_ALIVE(block, this->func)
  362. {
  363. this->OptBlock(block);
  364. }
  365. NEXT_BLOCK_BACKWARD_IN_FUNC_DEAD_OR_ALIVE;
  366. if (this->tag == Js::DeadStorePhase && !PHASE_OFF(Js::MemOpPhase, this->func))
  367. {
  368. this->RemoveEmptyLoops();
  369. }
  370. this->func->m_fg->hasBackwardPassInfo = true;
  371. if(DoTrackCompoundedIntOverflow())
  372. {
  373. // Tracking int overflow makes use of a scratch field in stack syms, which needs to be cleared
  374. func->m_symTable->ClearStackSymScratch();
  375. }
  376. #if DBG_DUMP
  377. if (PHASE_STATS(this->tag, this->func))
  378. {
  379. this->func->DumpHeader();
  380. Output::Print(this->tag == Js::BackwardPhase? _u("Backward Phase Stats:\n") : _u("Deadstore Phase Stats:\n"));
  381. if (this->DoDeadStore())
  382. {
  383. Output::Print(_u(" Deadstore : %3d\n"), this->numDeadStore);
  384. }
  385. if (this->DoMarkTempNumbers())
  386. {
  387. Output::Print(_u(" Temp Number : %3d\n"), this->numMarkTempNumber);
  388. Output::Print(_u(" Transferred Temp Number: %3d\n"), this->numMarkTempNumberTransferred);
  389. }
  390. if (this->DoMarkTempObjects())
  391. {
  392. Output::Print(_u(" Temp Object : %3d\n"), this->numMarkTempObject);
  393. }
  394. }
  395. #endif
  396. }
  397. void
  398. BackwardPass::MergeSuccBlocksInfo(BasicBlock * block)
  399. {
  400. // Can't reuse the bv in the current block, because its successor can be itself.
  401. TempNumberTracker * tempNumberTracker = nullptr;
  402. TempObjectTracker * tempObjectTracker = nullptr;
  403. #if DBG
  404. TempObjectVerifyTracker * tempObjectVerifyTracker = nullptr;
  405. #endif
  406. HashTable<AddPropertyCacheBucket> * stackSymToFinalType = nullptr;
  407. HashTable<ObjTypeGuardBucket> * stackSymToGuardedProperties = nullptr;
  408. HashTable<ObjWriteGuardBucket> * stackSymToWriteGuardsMap = nullptr;
  409. BVSparse<JitArenaAllocator> * cloneStrCandidates = nullptr;
  410. BVSparse<JitArenaAllocator> * noImplicitCallUses = nullptr;
  411. BVSparse<JitArenaAllocator> * noImplicitCallNoMissingValuesUses = nullptr;
  412. BVSparse<JitArenaAllocator> * noImplicitCallNativeArrayUses = nullptr;
  413. BVSparse<JitArenaAllocator> * noImplicitCallJsArrayHeadSegmentSymUses = nullptr;
  414. BVSparse<JitArenaAllocator> * noImplicitCallArrayLengthSymUses = nullptr;
  415. BVSparse<JitArenaAllocator> * upwardExposedUses = nullptr;
  416. BVSparse<JitArenaAllocator> * upwardExposedFields = nullptr;
  417. BVSparse<JitArenaAllocator> * typesNeedingKnownObjectLayout = nullptr;
  418. BVSparse<JitArenaAllocator> * slotDeadStoreCandidates = nullptr;
  419. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed = nullptr;
  420. BVSparse<JitArenaAllocator> * couldRemoveNegZeroBailoutForDef = nullptr;
  421. #if DBG
  422. uint byteCodeLocalsCount = func->GetJITFunctionBody()->GetLocalsCount();
  423. StackSym ** byteCodeRestoreSyms = nullptr;
  424. BVSparse<JitArenaAllocator> * excludeByteCodeUpwardExposedTracking = nullptr;
  425. #endif
  426. Assert(!block->isDead || block->GetSuccList()->Empty());
  427. if (this->DoByteCodeUpwardExposedUsed())
  428. {
  429. byteCodeUpwardExposedUsed = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  430. #if DBG
  431. byteCodeRestoreSyms = JitAnewArrayZ(this->tempAlloc, StackSym *, byteCodeLocalsCount);
  432. excludeByteCodeUpwardExposedTracking = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  433. #endif
  434. }
  435. #if DBG
  436. if (!IsCollectionPass() && this->DoMarkTempObjectVerify())
  437. {
  438. tempObjectVerifyTracker = JitAnew(this->tempAlloc, TempObjectVerifyTracker, this->tempAlloc, block->loop != nullptr);
  439. }
  440. #endif
  441. if (!block->isDead)
  442. {
  443. bool keepUpwardExposed = (this->tag == Js::BackwardPhase);
  444. JitArenaAllocator *upwardExposedArena = nullptr;
  445. if(!IsCollectionPass())
  446. {
  447. upwardExposedArena = keepUpwardExposed ? this->globOpt->alloc : this->tempAlloc;
  448. upwardExposedUses = JitAnew(upwardExposedArena, BVSparse<JitArenaAllocator>, upwardExposedArena);
  449. upwardExposedFields = JitAnew(upwardExposedArena, BVSparse<JitArenaAllocator>, upwardExposedArena);
  450. if (this->tag == Js::DeadStorePhase)
  451. {
  452. typesNeedingKnownObjectLayout = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  453. }
  454. if (this->DoDeadStoreSlots())
  455. {
  456. slotDeadStoreCandidates = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  457. }
  458. if (this->DoMarkTempNumbers())
  459. {
  460. tempNumberTracker = JitAnew(this->tempAlloc, TempNumberTracker, this->tempAlloc, block->loop != nullptr);
  461. }
  462. if (this->DoMarkTempObjects())
  463. {
  464. tempObjectTracker = JitAnew(this->tempAlloc, TempObjectTracker, this->tempAlloc, block->loop != nullptr);
  465. }
  466. noImplicitCallUses = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  467. noImplicitCallNoMissingValuesUses = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  468. noImplicitCallNativeArrayUses = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  469. noImplicitCallJsArrayHeadSegmentSymUses = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  470. noImplicitCallArrayLengthSymUses = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  471. if (this->tag == Js::BackwardPhase)
  472. {
  473. cloneStrCandidates = JitAnew(this->globOpt->alloc, BVSparse<JitArenaAllocator>, this->globOpt->alloc);
  474. }
  475. else
  476. {
  477. couldRemoveNegZeroBailoutForDef = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  478. }
  479. }
  480. bool firstSucc = true;
  481. FOREACH_SUCCESSOR_BLOCK(blockSucc, block)
  482. {
  483. #if defined(DBG_DUMP) || defined(ENABLE_DEBUG_CONFIG_OPTIONS)
  484. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  485. #endif
  486. // save the byteCodeUpwardExposedUsed from deleting for the block right after the memop loop
  487. if (this->tag == Js::DeadStorePhase && !this->IsPrePass() && globOpt->HasMemOp(block->loop) && blockSucc->loop != block->loop)
  488. {
  489. Assert(block->loop->memOpInfo->inductionVariablesUsedAfterLoop == nullptr);
  490. block->loop->memOpInfo->inductionVariablesUsedAfterLoop = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  491. block->loop->memOpInfo->inductionVariablesUsedAfterLoop->Or(blockSucc->byteCodeUpwardExposedUsed);
  492. block->loop->memOpInfo->inductionVariablesUsedAfterLoop->Or(blockSucc->upwardExposedUses);
  493. }
  494. bool deleteData = false;
  495. if (!blockSucc->isLoopHeader && blockSucc->backwardPassCurrentLoop == this->currentPrePassLoop)
  496. {
  497. Assert(blockSucc->GetDataUseCount() != 0);
  498. deleteData = (blockSucc->DecrementDataUseCount() == 0);
  499. if (blockSucc->GetFirstInstr()->m_next->m_opcode == Js::OpCode::SpeculatedLoadFence)
  500. {
  501. // We hold on to data for these blocks until the arena gets cleared due to unusual data lifetimes.
  502. deleteData = false;
  503. blockSucc->IncrementDataUseCount();
  504. }
  505. }
  506. #if DBG
  507. if (excludeByteCodeUpwardExposedTracking && blockSucc->excludeByteCodeUpwardExposedTracking)
  508. {
  509. excludeByteCodeUpwardExposedTracking->Or(blockSucc->excludeByteCodeUpwardExposedTracking);
  510. }
  511. #endif
  512. Assert((byteCodeUpwardExposedUsed == nullptr) == !this->DoByteCodeUpwardExposedUsed());
  513. if (byteCodeUpwardExposedUsed && blockSucc->byteCodeUpwardExposedUsed)
  514. {
  515. byteCodeUpwardExposedUsed->Or(blockSucc->byteCodeUpwardExposedUsed);
  516. if (this->tag == Js::DeadStorePhase)
  517. {
  518. #if DBG
  519. for (uint i = 0; i < byteCodeLocalsCount; i++)
  520. {
  521. if (byteCodeRestoreSyms[i] == nullptr)
  522. {
  523. byteCodeRestoreSyms[i] = blockSucc->byteCodeRestoreSyms[i];
  524. }
  525. else
  526. {
  527. Assert(blockSucc->byteCodeRestoreSyms[i] == nullptr
  528. || byteCodeRestoreSyms[i] == blockSucc->byteCodeRestoreSyms[i]);
  529. }
  530. }
  531. #endif
  532. if (deleteData)
  533. {
  534. // byteCodeUpwardExposedUsed is required to populate the writeThroughSymbolsSet for the try region. So, don't delete it in the backwards pass.
  535. JitAdelete(this->tempAlloc, blockSucc->byteCodeUpwardExposedUsed);
  536. blockSucc->byteCodeUpwardExposedUsed = nullptr;
  537. }
  538. }
  539. #if DBG
  540. if (deleteData)
  541. {
  542. JitAdeleteArray(this->tempAlloc, byteCodeLocalsCount, blockSucc->byteCodeRestoreSyms);
  543. blockSucc->byteCodeRestoreSyms = nullptr;
  544. JitAdelete(this->tempAlloc, blockSucc->excludeByteCodeUpwardExposedTracking);
  545. blockSucc->excludeByteCodeUpwardExposedTracking = nullptr;
  546. }
  547. #endif
  548. }
  549. else
  550. {
  551. Assert(blockSucc->byteCodeUpwardExposedUsed == nullptr);
  552. Assert(blockSucc->byteCodeRestoreSyms == nullptr);
  553. Assert(blockSucc->excludeByteCodeUpwardExposedTracking == nullptr);
  554. }
  555. if(IsCollectionPass())
  556. {
  557. continue;
  558. }
  559. Assert((blockSucc->upwardExposedUses != nullptr)
  560. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop))));
  561. Assert((blockSucc->upwardExposedFields != nullptr)
  562. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop))));
  563. Assert((blockSucc->typesNeedingKnownObjectLayout != nullptr)
  564. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop)))
  565. || this->tag != Js::DeadStorePhase);
  566. Assert((blockSucc->slotDeadStoreCandidates != nullptr)
  567. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop)))
  568. || !this->DoDeadStoreSlots());
  569. Assert((blockSucc->tempNumberTracker != nullptr)
  570. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop)))
  571. || !this->DoMarkTempNumbers());
  572. Assert((blockSucc->tempObjectTracker != nullptr)
  573. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop)))
  574. || !this->DoMarkTempObjects());
  575. Assert((blockSucc->tempObjectVerifyTracker != nullptr)
  576. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop)))
  577. || !this->DoMarkTempObjectVerify());
  578. if (blockSucc->upwardExposedUses != nullptr)
  579. {
  580. upwardExposedUses->Or(blockSucc->upwardExposedUses);
  581. if (deleteData && (!keepUpwardExposed
  582. || (this->IsPrePass() && blockSucc->backwardPassCurrentLoop == this->currentPrePassLoop)))
  583. {
  584. JitAdelete(upwardExposedArena, blockSucc->upwardExposedUses);
  585. blockSucc->upwardExposedUses = nullptr;
  586. }
  587. }
  588. if (blockSucc->upwardExposedFields != nullptr)
  589. {
  590. upwardExposedFields->Or(blockSucc->upwardExposedFields);
  591. if (deleteData && (!keepUpwardExposed
  592. || (this->IsPrePass() && blockSucc->backwardPassCurrentLoop == this->currentPrePassLoop)))
  593. {
  594. JitAdelete(upwardExposedArena, blockSucc->upwardExposedFields);
  595. blockSucc->upwardExposedFields = nullptr;
  596. }
  597. }
  598. if (blockSucc->typesNeedingKnownObjectLayout != nullptr)
  599. {
  600. typesNeedingKnownObjectLayout->Or(blockSucc->typesNeedingKnownObjectLayout);
  601. if (deleteData)
  602. {
  603. JitAdelete(this->tempAlloc, blockSucc->typesNeedingKnownObjectLayout);
  604. blockSucc->typesNeedingKnownObjectLayout = nullptr;
  605. }
  606. }
  607. if (blockSucc->slotDeadStoreCandidates != nullptr)
  608. {
  609. slotDeadStoreCandidates->And(blockSucc->slotDeadStoreCandidates);
  610. if (deleteData)
  611. {
  612. JitAdelete(this->tempAlloc, blockSucc->slotDeadStoreCandidates);
  613. blockSucc->slotDeadStoreCandidates = nullptr;
  614. }
  615. }
  616. if (blockSucc->tempNumberTracker != nullptr)
  617. {
  618. Assert((blockSucc->loop != nullptr) == blockSucc->tempNumberTracker->HasTempTransferDependencies());
  619. tempNumberTracker->MergeData(blockSucc->tempNumberTracker, deleteData);
  620. if (deleteData)
  621. {
  622. blockSucc->tempNumberTracker = nullptr;
  623. }
  624. }
  625. if (blockSucc->tempObjectTracker != nullptr)
  626. {
  627. Assert((blockSucc->loop != nullptr) == blockSucc->tempObjectTracker->HasTempTransferDependencies());
  628. tempObjectTracker->MergeData(blockSucc->tempObjectTracker, deleteData);
  629. if (deleteData)
  630. {
  631. blockSucc->tempObjectTracker = nullptr;
  632. }
  633. }
  634. #if DBG
  635. if (blockSucc->tempObjectVerifyTracker != nullptr)
  636. {
  637. Assert((blockSucc->loop != nullptr) == blockSucc->tempObjectVerifyTracker->HasTempTransferDependencies());
  638. tempObjectVerifyTracker->MergeData(blockSucc->tempObjectVerifyTracker, deleteData);
  639. if (deleteData)
  640. {
  641. blockSucc->tempObjectVerifyTracker = nullptr;
  642. }
  643. }
  644. #endif
  645. PHASE_PRINT_TRACE(Js::ObjTypeSpecStorePhase, this->func,
  646. _u("ObjTypeSpecStore: func %s, edge %d => %d: "),
  647. this->func->GetDebugNumberSet(debugStringBuffer),
  648. block->GetBlockNum(), blockSucc->GetBlockNum());
  649. auto fixupFrom = [block, blockSucc, upwardExposedUses, this](Bucket<AddPropertyCacheBucket> &bucket)
  650. {
  651. AddPropertyCacheBucket *fromData = &bucket.element;
  652. if (fromData->GetInitialType() == nullptr ||
  653. fromData->GetFinalType() == fromData->GetInitialType())
  654. {
  655. return;
  656. }
  657. this->InsertTypeTransitionsAtPriorSuccessors(block, blockSucc, bucket.value, fromData, upwardExposedUses);
  658. };
  659. auto fixupTo = [blockSucc, upwardExposedUses, this](Bucket<AddPropertyCacheBucket> &bucket)
  660. {
  661. AddPropertyCacheBucket *toData = &bucket.element;
  662. if (toData->GetInitialType() == nullptr ||
  663. toData->GetFinalType() == toData->GetInitialType())
  664. {
  665. return;
  666. }
  667. this->InsertTypeTransitionAtBlock(blockSucc, bucket.value, toData, upwardExposedUses);
  668. };
  669. if (blockSucc->stackSymToFinalType != nullptr)
  670. {
  671. #if DBG_DUMP
  672. if (PHASE_TRACE(Js::ObjTypeSpecStorePhase, this->func))
  673. {
  674. blockSucc->stackSymToFinalType->Dump();
  675. }
  676. #endif
  677. if (firstSucc)
  678. {
  679. stackSymToFinalType = blockSucc->stackSymToFinalType->Copy();
  680. }
  681. else if (stackSymToFinalType != nullptr)
  682. {
  683. if (this->IsPrePass())
  684. {
  685. stackSymToFinalType->And(blockSucc->stackSymToFinalType);
  686. }
  687. else
  688. {
  689. // Insert any type transitions that can't be merged past this point.
  690. stackSymToFinalType->AndWithFixup(blockSucc->stackSymToFinalType, fixupFrom, fixupTo);
  691. }
  692. }
  693. else if (!this->IsPrePass())
  694. {
  695. FOREACH_HASHTABLE_ENTRY(AddPropertyCacheBucket, bucket, blockSucc->stackSymToFinalType)
  696. {
  697. fixupTo(bucket);
  698. }
  699. NEXT_HASHTABLE_ENTRY;
  700. }
  701. if (deleteData)
  702. {
  703. blockSucc->stackSymToFinalType->Delete();
  704. blockSucc->stackSymToFinalType = nullptr;
  705. }
  706. }
  707. else
  708. {
  709. PHASE_PRINT_TRACE(Js::ObjTypeSpecStorePhase, this->func, _u("null\n"));
  710. if (stackSymToFinalType)
  711. {
  712. if (!this->IsPrePass())
  713. {
  714. FOREACH_HASHTABLE_ENTRY(AddPropertyCacheBucket, bucket, stackSymToFinalType)
  715. {
  716. fixupFrom(bucket);
  717. }
  718. NEXT_HASHTABLE_ENTRY;
  719. }
  720. stackSymToFinalType->Delete();
  721. stackSymToFinalType = nullptr;
  722. }
  723. }
  724. if (tag == Js::BackwardPhase)
  725. {
  726. if (blockSucc->cloneStrCandidates != nullptr)
  727. {
  728. Assert(cloneStrCandidates != nullptr);
  729. cloneStrCandidates->Or(blockSucc->cloneStrCandidates);
  730. if (deleteData)
  731. {
  732. JitAdelete(this->globOpt->alloc, blockSucc->cloneStrCandidates);
  733. blockSucc->cloneStrCandidates = nullptr;
  734. }
  735. }
  736. #if DBG_DUMP
  737. if (PHASE_VERBOSE_TRACE(Js::TraceObjTypeSpecWriteGuardsPhase, this->func))
  738. {
  739. char16 debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  740. Output::Print(_u("ObjTypeSpec: top function %s (%s), function %s (%s), write guard symbols on edge %d => %d: "),
  741. this->func->GetTopFunc()->GetJITFunctionBody()->GetDisplayName(),
  742. this->func->GetTopFunc()->GetDebugNumberSet(debugStringBuffer),
  743. this->func->GetJITFunctionBody()->GetDisplayName(),
  744. this->func->GetDebugNumberSet(debugStringBuffer2), block->GetBlockNum(),
  745. blockSucc->GetBlockNum());
  746. }
  747. #endif
  748. if (blockSucc->stackSymToWriteGuardsMap != nullptr)
  749. {
  750. #if DBG_DUMP
  751. if (PHASE_VERBOSE_TRACE(Js::TraceObjTypeSpecWriteGuardsPhase, this->func))
  752. {
  753. Output::Print(_u("\n"));
  754. blockSucc->stackSymToWriteGuardsMap->Dump();
  755. }
  756. #endif
  757. if (stackSymToWriteGuardsMap == nullptr)
  758. {
  759. stackSymToWriteGuardsMap = blockSucc->stackSymToWriteGuardsMap->Copy();
  760. }
  761. else
  762. {
  763. stackSymToWriteGuardsMap->Or(
  764. blockSucc->stackSymToWriteGuardsMap, &BackwardPass::MergeWriteGuards);
  765. }
  766. if (deleteData)
  767. {
  768. blockSucc->stackSymToWriteGuardsMap->Delete();
  769. blockSucc->stackSymToWriteGuardsMap = nullptr;
  770. }
  771. }
  772. else
  773. {
  774. #if DBG_DUMP
  775. if (PHASE_VERBOSE_TRACE(Js::TraceObjTypeSpecWriteGuardsPhase, this->func))
  776. {
  777. Output::Print(_u("null\n"));
  778. }
  779. #endif
  780. }
  781. }
  782. else
  783. {
  784. #if DBG_DUMP
  785. if (PHASE_VERBOSE_TRACE(Js::TraceObjTypeSpecTypeGuardsPhase, this->func))
  786. {
  787. char16 debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  788. Output::Print(_u("ObjTypeSpec: top function %s (%s), function %s (%s), guarded property operations on edge %d => %d: \n"),
  789. this->func->GetTopFunc()->GetJITFunctionBody()->GetDisplayName(),
  790. this->func->GetTopFunc()->GetDebugNumberSet(debugStringBuffer),
  791. this->func->GetJITFunctionBody()->GetDisplayName(),
  792. this->func->GetDebugNumberSet(debugStringBuffer2),
  793. block->GetBlockNum(), blockSucc->GetBlockNum());
  794. }
  795. #endif
  796. if (blockSucc->stackSymToGuardedProperties != nullptr)
  797. {
  798. #if DBG_DUMP
  799. if (PHASE_VERBOSE_TRACE(Js::TraceObjTypeSpecTypeGuardsPhase, this->func))
  800. {
  801. blockSucc->stackSymToGuardedProperties->Dump();
  802. Output::Print(_u("\n"));
  803. }
  804. #endif
  805. if (stackSymToGuardedProperties == nullptr)
  806. {
  807. stackSymToGuardedProperties = blockSucc->stackSymToGuardedProperties->Copy();
  808. }
  809. else
  810. {
  811. stackSymToGuardedProperties->Or(
  812. blockSucc->stackSymToGuardedProperties, &BackwardPass::MergeGuardedProperties);
  813. }
  814. if (deleteData)
  815. {
  816. blockSucc->stackSymToGuardedProperties->Delete();
  817. blockSucc->stackSymToGuardedProperties = nullptr;
  818. }
  819. }
  820. else
  821. {
  822. #if DBG_DUMP
  823. if (PHASE_VERBOSE_TRACE(Js::TraceObjTypeSpecTypeGuardsPhase, this->func))
  824. {
  825. Output::Print(_u("null\n"));
  826. }
  827. #endif
  828. }
  829. if (blockSucc->couldRemoveNegZeroBailoutForDef != nullptr)
  830. {
  831. couldRemoveNegZeroBailoutForDef->And(blockSucc->couldRemoveNegZeroBailoutForDef);
  832. if (deleteData)
  833. {
  834. JitAdelete(this->tempAlloc, blockSucc->couldRemoveNegZeroBailoutForDef);
  835. blockSucc->couldRemoveNegZeroBailoutForDef = nullptr;
  836. }
  837. }
  838. }
  839. if (blockSucc->noImplicitCallUses != nullptr)
  840. {
  841. noImplicitCallUses->Or(blockSucc->noImplicitCallUses);
  842. if (deleteData)
  843. {
  844. JitAdelete(this->tempAlloc, blockSucc->noImplicitCallUses);
  845. blockSucc->noImplicitCallUses = nullptr;
  846. }
  847. }
  848. if (blockSucc->noImplicitCallNoMissingValuesUses != nullptr)
  849. {
  850. noImplicitCallNoMissingValuesUses->Or(blockSucc->noImplicitCallNoMissingValuesUses);
  851. if (deleteData)
  852. {
  853. JitAdelete(this->tempAlloc, blockSucc->noImplicitCallNoMissingValuesUses);
  854. blockSucc->noImplicitCallNoMissingValuesUses = nullptr;
  855. }
  856. }
  857. if (blockSucc->noImplicitCallNativeArrayUses != nullptr)
  858. {
  859. noImplicitCallNativeArrayUses->Or(blockSucc->noImplicitCallNativeArrayUses);
  860. if (deleteData)
  861. {
  862. JitAdelete(this->tempAlloc, blockSucc->noImplicitCallNativeArrayUses);
  863. blockSucc->noImplicitCallNativeArrayUses = nullptr;
  864. }
  865. }
  866. if (blockSucc->noImplicitCallJsArrayHeadSegmentSymUses != nullptr)
  867. {
  868. noImplicitCallJsArrayHeadSegmentSymUses->Or(blockSucc->noImplicitCallJsArrayHeadSegmentSymUses);
  869. if (deleteData)
  870. {
  871. JitAdelete(this->tempAlloc, blockSucc->noImplicitCallJsArrayHeadSegmentSymUses);
  872. blockSucc->noImplicitCallJsArrayHeadSegmentSymUses = nullptr;
  873. }
  874. }
  875. if (blockSucc->noImplicitCallArrayLengthSymUses != nullptr)
  876. {
  877. noImplicitCallArrayLengthSymUses->Or(blockSucc->noImplicitCallArrayLengthSymUses);
  878. if (deleteData)
  879. {
  880. JitAdelete(this->tempAlloc, blockSucc->noImplicitCallArrayLengthSymUses);
  881. blockSucc->noImplicitCallArrayLengthSymUses = nullptr;
  882. }
  883. }
  884. firstSucc = false;
  885. }
  886. NEXT_SUCCESSOR_BLOCK;
  887. #if DBG_DUMP
  888. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  889. if (PHASE_TRACE(Js::ObjTypeSpecStorePhase, this->func))
  890. {
  891. Output::Print(_u("ObjTypeSpecStore: func %s, block %d: "),
  892. this->func->GetDebugNumberSet(debugStringBuffer),
  893. block->GetBlockNum());
  894. if (stackSymToFinalType)
  895. {
  896. stackSymToFinalType->Dump();
  897. }
  898. else
  899. {
  900. Output::Print(_u("null\n"));
  901. }
  902. }
  903. if (PHASE_TRACE(Js::TraceObjTypeSpecTypeGuardsPhase, this->func))
  904. {
  905. Output::Print(_u("ObjTypeSpec: func %s, block %d, guarded properties:\n"),
  906. this->func->GetDebugNumberSet(debugStringBuffer), block->GetBlockNum());
  907. if (stackSymToGuardedProperties)
  908. {
  909. stackSymToGuardedProperties->Dump();
  910. Output::Print(_u("\n"));
  911. }
  912. else
  913. {
  914. Output::Print(_u("null\n"));
  915. }
  916. }
  917. if (PHASE_TRACE(Js::TraceObjTypeSpecWriteGuardsPhase, this->func))
  918. {
  919. Output::Print(_u("ObjTypeSpec: func %s, block %d, write guards: "),
  920. this->func->GetDebugNumberSet(debugStringBuffer), block->GetBlockNum());
  921. if (stackSymToWriteGuardsMap)
  922. {
  923. Output::Print(_u("\n"));
  924. stackSymToWriteGuardsMap->Dump();
  925. Output::Print(_u("\n"));
  926. }
  927. else
  928. {
  929. Output::Print(_u("null\n"));
  930. }
  931. }
  932. #endif
  933. }
  934. #if DBG
  935. if (tempObjectVerifyTracker)
  936. {
  937. FOREACH_DEAD_SUCCESSOR_BLOCK(deadBlockSucc, block)
  938. {
  939. Assert(deadBlockSucc->tempObjectVerifyTracker || deadBlockSucc->isLoopHeader);
  940. if (deadBlockSucc->tempObjectVerifyTracker != nullptr)
  941. {
  942. Assert((deadBlockSucc->loop != nullptr) == deadBlockSucc->tempObjectVerifyTracker->HasTempTransferDependencies());
  943. // Dead block don't effect non temp use, we only need to carry the removed use bit vector forward
  944. // and put all the upward exposed use to the set that we might found out to be mark temp
  945. // after globopt
  946. tempObjectVerifyTracker->MergeDeadData(deadBlockSucc);
  947. }
  948. if (!byteCodeUpwardExposedUsed)
  949. {
  950. if (!deadBlockSucc->isLoopHeader && deadBlockSucc->backwardPassCurrentLoop == this->currentPrePassLoop)
  951. {
  952. Assert(deadBlockSucc->GetDataUseCount() != 0);
  953. if (deadBlockSucc->DecrementDataUseCount() == 0)
  954. {
  955. this->DeleteBlockData(deadBlockSucc);
  956. }
  957. }
  958. }
  959. }
  960. NEXT_DEAD_SUCCESSOR_BLOCK;
  961. }
  962. #endif
  963. if (byteCodeUpwardExposedUsed)
  964. {
  965. FOREACH_DEAD_SUCCESSOR_BLOCK(deadBlockSucc, block)
  966. {
  967. #if DBG
  968. if (excludeByteCodeUpwardExposedTracking && deadBlockSucc->excludeByteCodeUpwardExposedTracking)
  969. {
  970. excludeByteCodeUpwardExposedTracking->Or(deadBlockSucc->excludeByteCodeUpwardExposedTracking);
  971. }
  972. #endif
  973. Assert(deadBlockSucc->byteCodeUpwardExposedUsed || deadBlockSucc->isLoopHeader);
  974. if (deadBlockSucc->byteCodeUpwardExposedUsed)
  975. {
  976. byteCodeUpwardExposedUsed->Or(deadBlockSucc->byteCodeUpwardExposedUsed);
  977. if (this->tag == Js::DeadStorePhase)
  978. {
  979. #if DBG
  980. for (uint i = 0; i < byteCodeLocalsCount; i++)
  981. {
  982. if (byteCodeRestoreSyms[i] == nullptr)
  983. {
  984. byteCodeRestoreSyms[i] = deadBlockSucc->byteCodeRestoreSyms[i];
  985. }
  986. else
  987. {
  988. Assert(deadBlockSucc->byteCodeRestoreSyms[i] == nullptr
  989. || byteCodeRestoreSyms[i] == deadBlockSucc->byteCodeRestoreSyms[i]);
  990. }
  991. }
  992. #endif
  993. }
  994. }
  995. if (!deadBlockSucc->isLoopHeader && deadBlockSucc->backwardPassCurrentLoop == this->currentPrePassLoop)
  996. {
  997. Assert(deadBlockSucc->GetDataUseCount() != 0);
  998. if (deadBlockSucc->DecrementDataUseCount() == 0)
  999. {
  1000. this->DeleteBlockData(deadBlockSucc);
  1001. }
  1002. }
  1003. }
  1004. NEXT_DEAD_SUCCESSOR_BLOCK;
  1005. }
  1006. if (block->isLoopHeader)
  1007. {
  1008. this->DeleteBlockData(block);
  1009. }
  1010. else
  1011. {
  1012. if(block->GetDataUseCount() == 0)
  1013. {
  1014. Assert(block->slotDeadStoreCandidates == nullptr);
  1015. Assert(block->tempNumberTracker == nullptr);
  1016. Assert(block->tempObjectTracker == nullptr);
  1017. Assert(block->tempObjectVerifyTracker == nullptr);
  1018. Assert(block->upwardExposedUses == nullptr);
  1019. Assert(block->upwardExposedFields == nullptr);
  1020. Assert(block->typesNeedingKnownObjectLayout == nullptr);
  1021. // byteCodeUpwardExposedUsed is required to populate the writeThroughSymbolsSet for the try region in the backwards pass
  1022. Assert(block->byteCodeUpwardExposedUsed == nullptr || (this->DoByteCodeUpwardExposedUsed()));
  1023. Assert(block->byteCodeRestoreSyms == nullptr);
  1024. Assert(block->excludeByteCodeUpwardExposedTracking == nullptr || (this->DoByteCodeUpwardExposedUsed()));
  1025. Assert(block->stackSymToFinalType == nullptr);
  1026. Assert(block->stackSymToGuardedProperties == nullptr);
  1027. Assert(block->stackSymToWriteGuardsMap == nullptr);
  1028. Assert(block->cloneStrCandidates == nullptr);
  1029. Assert(block->noImplicitCallUses == nullptr);
  1030. Assert(block->noImplicitCallNoMissingValuesUses == nullptr);
  1031. Assert(block->noImplicitCallNativeArrayUses == nullptr);
  1032. Assert(block->noImplicitCallJsArrayHeadSegmentSymUses == nullptr);
  1033. Assert(block->noImplicitCallArrayLengthSymUses == nullptr);
  1034. Assert(block->couldRemoveNegZeroBailoutForDef == nullptr);
  1035. }
  1036. else
  1037. {
  1038. // The collection pass sometimes does not know whether it can delete a successor block's data, so it may leave some
  1039. // blocks with data intact. Delete the block data now.
  1040. Assert(block->backwardPassCurrentLoop);
  1041. Assert(block->backwardPassCurrentLoop->hasDeadStoreCollectionPass);
  1042. // The two situations where we might be keeping data around are either before we do
  1043. // the prepass, or when we're storing the data because we have a speculation-cancel
  1044. // block, which has longer lifetimes for its data.
  1045. Assert(!block->backwardPassCurrentLoop->hasDeadStorePrepass || block->GetFirstInstr()->m_next->m_opcode == Js::OpCode::SpeculatedLoadFence);
  1046. DeleteBlockData(block);
  1047. }
  1048. block->backwardPassCurrentLoop = this->currentPrePassLoop;
  1049. if (this->DoByteCodeUpwardExposedUsed()
  1050. #if DBG
  1051. || this->DoMarkTempObjectVerify()
  1052. #endif
  1053. )
  1054. {
  1055. block->SetDataUseCount(block->GetPredList()->Count() + block->GetDeadPredList()->Count());
  1056. }
  1057. else
  1058. {
  1059. block->SetDataUseCount(block->GetPredList()->Count());
  1060. }
  1061. }
  1062. block->upwardExposedUses = upwardExposedUses;
  1063. block->upwardExposedFields = upwardExposedFields;
  1064. block->typesNeedingKnownObjectLayout = typesNeedingKnownObjectLayout;
  1065. block->byteCodeUpwardExposedUsed = byteCodeUpwardExposedUsed;
  1066. #if DBG
  1067. block->byteCodeRestoreSyms = byteCodeRestoreSyms;
  1068. block->excludeByteCodeUpwardExposedTracking = excludeByteCodeUpwardExposedTracking;
  1069. #endif
  1070. block->slotDeadStoreCandidates = slotDeadStoreCandidates;
  1071. block->tempNumberTracker = tempNumberTracker;
  1072. block->tempObjectTracker = tempObjectTracker;
  1073. #if DBG
  1074. block->tempObjectVerifyTracker = tempObjectVerifyTracker;
  1075. #endif
  1076. block->stackSymToFinalType = stackSymToFinalType;
  1077. block->stackSymToGuardedProperties = stackSymToGuardedProperties;
  1078. block->stackSymToWriteGuardsMap = stackSymToWriteGuardsMap;
  1079. block->cloneStrCandidates = cloneStrCandidates;
  1080. block->noImplicitCallUses = noImplicitCallUses;
  1081. block->noImplicitCallNoMissingValuesUses = noImplicitCallNoMissingValuesUses;
  1082. block->noImplicitCallNativeArrayUses = noImplicitCallNativeArrayUses;
  1083. block->noImplicitCallJsArrayHeadSegmentSymUses = noImplicitCallJsArrayHeadSegmentSymUses;
  1084. block->noImplicitCallArrayLengthSymUses = noImplicitCallArrayLengthSymUses;
  1085. block->couldRemoveNegZeroBailoutForDef = couldRemoveNegZeroBailoutForDef;
  1086. }
  1087. ObjTypeGuardBucket
  1088. BackwardPass::MergeGuardedProperties(ObjTypeGuardBucket bucket1, ObjTypeGuardBucket bucket2)
  1089. {
  1090. BVSparse<JitArenaAllocator> *guardedPropertyOps1 = bucket1.GetGuardedPropertyOps();
  1091. BVSparse<JitArenaAllocator> *guardedPropertyOps2 = bucket2.GetGuardedPropertyOps();
  1092. Assert(guardedPropertyOps1 || guardedPropertyOps2);
  1093. BVSparse<JitArenaAllocator> *mergedPropertyOps;
  1094. if (guardedPropertyOps1)
  1095. {
  1096. mergedPropertyOps = guardedPropertyOps1->CopyNew();
  1097. if (guardedPropertyOps2)
  1098. {
  1099. mergedPropertyOps->Or(guardedPropertyOps2);
  1100. }
  1101. }
  1102. else
  1103. {
  1104. mergedPropertyOps = guardedPropertyOps2->CopyNew();
  1105. }
  1106. ObjTypeGuardBucket bucket;
  1107. bucket.SetGuardedPropertyOps(mergedPropertyOps);
  1108. JITTypeHolder monoGuardType = bucket1.GetMonoGuardType();
  1109. if (monoGuardType != nullptr)
  1110. {
  1111. Assert(!bucket2.NeedsMonoCheck() || monoGuardType == bucket2.GetMonoGuardType());
  1112. }
  1113. else
  1114. {
  1115. monoGuardType = bucket2.GetMonoGuardType();
  1116. }
  1117. bucket.SetMonoGuardType(monoGuardType);
  1118. return bucket;
  1119. }
  1120. ObjWriteGuardBucket
  1121. BackwardPass::MergeWriteGuards(ObjWriteGuardBucket bucket1, ObjWriteGuardBucket bucket2)
  1122. {
  1123. BVSparse<JitArenaAllocator> *writeGuards1 = bucket1.GetWriteGuards();
  1124. BVSparse<JitArenaAllocator> *writeGuards2 = bucket2.GetWriteGuards();
  1125. Assert(writeGuards1 || writeGuards2);
  1126. BVSparse<JitArenaAllocator> *mergedWriteGuards;
  1127. if (writeGuards1)
  1128. {
  1129. mergedWriteGuards = writeGuards1->CopyNew();
  1130. if (writeGuards2)
  1131. {
  1132. mergedWriteGuards->Or(writeGuards2);
  1133. }
  1134. }
  1135. else
  1136. {
  1137. mergedWriteGuards = writeGuards2->CopyNew();
  1138. }
  1139. ObjWriteGuardBucket bucket;
  1140. bucket.SetWriteGuards(mergedWriteGuards);
  1141. return bucket;
  1142. }
  1143. void
  1144. BackwardPass::DeleteBlockData(BasicBlock * block)
  1145. {
  1146. if (block->slotDeadStoreCandidates != nullptr)
  1147. {
  1148. JitAdelete(this->tempAlloc, block->slotDeadStoreCandidates);
  1149. block->slotDeadStoreCandidates = nullptr;
  1150. }
  1151. if (block->tempNumberTracker != nullptr)
  1152. {
  1153. JitAdelete(this->tempAlloc, block->tempNumberTracker);
  1154. block->tempNumberTracker = nullptr;
  1155. }
  1156. if (block->tempObjectTracker != nullptr)
  1157. {
  1158. JitAdelete(this->tempAlloc, block->tempObjectTracker);
  1159. block->tempObjectTracker = nullptr;
  1160. }
  1161. #if DBG
  1162. if (block->tempObjectVerifyTracker != nullptr)
  1163. {
  1164. JitAdelete(this->tempAlloc, block->tempObjectVerifyTracker);
  1165. block->tempObjectVerifyTracker = nullptr;
  1166. }
  1167. #endif
  1168. if (block->stackSymToFinalType != nullptr)
  1169. {
  1170. block->stackSymToFinalType->Delete();
  1171. block->stackSymToFinalType = nullptr;
  1172. }
  1173. if (block->stackSymToGuardedProperties != nullptr)
  1174. {
  1175. block->stackSymToGuardedProperties->Delete();
  1176. block->stackSymToGuardedProperties = nullptr;
  1177. }
  1178. if (block->stackSymToWriteGuardsMap != nullptr)
  1179. {
  1180. block->stackSymToWriteGuardsMap->Delete();
  1181. block->stackSymToWriteGuardsMap = nullptr;
  1182. }
  1183. if (block->cloneStrCandidates != nullptr)
  1184. {
  1185. Assert(this->tag == Js::BackwardPhase);
  1186. JitAdelete(this->globOpt->alloc, block->cloneStrCandidates);
  1187. block->cloneStrCandidates = nullptr;
  1188. }
  1189. if (block->noImplicitCallUses != nullptr)
  1190. {
  1191. JitAdelete(this->tempAlloc, block->noImplicitCallUses);
  1192. block->noImplicitCallUses = nullptr;
  1193. }
  1194. if (block->noImplicitCallNoMissingValuesUses != nullptr)
  1195. {
  1196. JitAdelete(this->tempAlloc, block->noImplicitCallNoMissingValuesUses);
  1197. block->noImplicitCallNoMissingValuesUses = nullptr;
  1198. }
  1199. if (block->noImplicitCallNativeArrayUses != nullptr)
  1200. {
  1201. JitAdelete(this->tempAlloc, block->noImplicitCallNativeArrayUses);
  1202. block->noImplicitCallNativeArrayUses = nullptr;
  1203. }
  1204. if (block->noImplicitCallJsArrayHeadSegmentSymUses != nullptr)
  1205. {
  1206. JitAdelete(this->tempAlloc, block->noImplicitCallJsArrayHeadSegmentSymUses);
  1207. block->noImplicitCallJsArrayHeadSegmentSymUses = nullptr;
  1208. }
  1209. if (block->noImplicitCallArrayLengthSymUses != nullptr)
  1210. {
  1211. JitAdelete(this->tempAlloc, block->noImplicitCallArrayLengthSymUses);
  1212. block->noImplicitCallArrayLengthSymUses = nullptr;
  1213. }
  1214. if (block->upwardExposedUses != nullptr)
  1215. {
  1216. JitArenaAllocator *upwardExposedArena = (this->tag == Js::BackwardPhase) ? this->globOpt->alloc : this->tempAlloc;
  1217. JitAdelete(upwardExposedArena, block->upwardExposedUses);
  1218. block->upwardExposedUses = nullptr;
  1219. }
  1220. if (block->upwardExposedFields != nullptr)
  1221. {
  1222. JitArenaAllocator *upwardExposedArena = (this->tag == Js::BackwardPhase) ? this->globOpt->alloc : this->tempAlloc;
  1223. JitAdelete(upwardExposedArena, block->upwardExposedFields);
  1224. block->upwardExposedFields = nullptr;
  1225. }
  1226. if (block->typesNeedingKnownObjectLayout != nullptr)
  1227. {
  1228. JitAdelete(this->tempAlloc, block->typesNeedingKnownObjectLayout);
  1229. block->typesNeedingKnownObjectLayout = nullptr;
  1230. }
  1231. if (block->byteCodeUpwardExposedUsed != nullptr)
  1232. {
  1233. JitAdelete(this->tempAlloc, block->byteCodeUpwardExposedUsed);
  1234. block->byteCodeUpwardExposedUsed = nullptr;
  1235. #if DBG
  1236. JitAdeleteArray(this->tempAlloc, func->GetJITFunctionBody()->GetLocalsCount(), block->byteCodeRestoreSyms);
  1237. block->byteCodeRestoreSyms = nullptr;
  1238. JitAdelete(this->tempAlloc, block->excludeByteCodeUpwardExposedTracking);
  1239. block->excludeByteCodeUpwardExposedTracking = nullptr;
  1240. #endif
  1241. }
  1242. if (block->couldRemoveNegZeroBailoutForDef != nullptr)
  1243. {
  1244. JitAdelete(this->tempAlloc, block->couldRemoveNegZeroBailoutForDef);
  1245. block->couldRemoveNegZeroBailoutForDef = nullptr;
  1246. }
  1247. }
  1248. void
  1249. BackwardPass::ProcessLoopCollectionPass(BasicBlock *const lastBlock)
  1250. {
  1251. // The collection pass is done before the prepass, to collect and propagate a minimal amount of information into nested
  1252. // loops, for cases where the information is needed to make appropriate decisions on changing other state. For instance,
  1253. // bailouts in nested loops need to be able to see all byte-code uses that are exposed to the bailout so that the
  1254. // appropriate syms can be made upwards-exposed during the prepass. Byte-code uses that occur before the bailout in the
  1255. // flow, or byte-code uses after the current loop, are not seen by bailouts inside the loop. The collection pass collects
  1256. // byte-code uses and propagates them at least into each loop's header such that when bailouts are processed in the prepass,
  1257. // they will have full visibility of byte-code upwards-exposed uses.
  1258. //
  1259. // For the collection pass, one pass is needed to collect all byte-code uses of a loop to the loop header. If the loop has
  1260. // inner loops, another pass is needed to propagate byte-code uses in the outer loop into the inner loop's header, since
  1261. // some byte-code uses may occur before the inner loop in the flow. The process continues recursively for inner loops. The
  1262. // second pass only needs to walk as far as the first inner loop's header, since the purpose of that pass is only to
  1263. // propagate collected information into the inner loops' headers.
  1264. //
  1265. // Consider the following case:
  1266. // (Block 1, Loop 1 header)
  1267. // ByteCodeUses s1
  1268. // (Block 2, Loop 2 header)
  1269. // (Block 3, Loop 3 header)
  1270. // (Block 4)
  1271. // BailOut
  1272. // (Block 5, Loop 3 back-edge)
  1273. // (Block 6, Loop 2 back-edge)
  1274. // (Block 7, Loop 1 back-edge)
  1275. //
  1276. // Assume that the exit branch in each of these loops is in the loop's header block, like a 'while' loop. For the byte-code
  1277. // use of 's1' to become visible to the bailout in the innermost loop, we need to walk the following blocks:
  1278. // - Collection pass
  1279. // - 7, 6, 5, 4, 3, 2, 1, 7 - block 1 is the first block in loop 1 that sees 's1', and since block 7 has block 1 as its
  1280. // successor, block 7 sees 's1' now as well
  1281. // - 6, 5, 4, 3, 2, 6 - block 2 is the first block in loop 2 that sees 's1', and since block 6 has block 2 as its
  1282. // successor, block 6 sees 's1' now as well
  1283. // - 5, 4, 3 - block 3 is the first block in loop 3 that sees 's1'
  1284. // - The collection pass does not have to do another pass through the innermost loop because it does not have any inner
  1285. // loops of its own. It's sufficient to propagate the byte-code uses up to the loop header of each loop, as the
  1286. // prepass will do the remaining propagation.
  1287. // - Prepass
  1288. // - 7, 6, 5, 4, ... - since block 5 has block 3 as its successor, block 5 sees 's1', and so does block 4. So, the bailout
  1289. // finally sees 's1' as a byte-code upwards-exposed use.
  1290. //
  1291. // The collection pass walks as described above, and consists of one pass, followed by another pass if there are inner
  1292. // loops. The second pass only walks up to the first inner loop's header block, and during this pass upon reaching an inner
  1293. // loop, the algorithm goes recursively for that inner loop, and once it returns, the second pass continues from above that
  1294. // inner loop. Each bullet of the walk in the example above is a recursive call to ProcessLoopCollectionPass, except the
  1295. // first line, which is the initial call.
  1296. //
  1297. // Imagine the whole example above is inside another loop, and at the bottom of that loop there is an assignment to 's1'. If
  1298. // the bailout is the only use of 's1', then it needs to register 's1' as a use in the prepass to prevent treating the
  1299. // assignment to 's1' as a dead store.
  1300. Assert(tag == Js::DeadStorePhase);
  1301. Assert(IsCollectionPass());
  1302. Assert(lastBlock);
  1303. Loop *const collectionPassLoop = lastBlock->loop;
  1304. Assert(collectionPassLoop);
  1305. Assert(!collectionPassLoop->hasDeadStoreCollectionPass);
  1306. collectionPassLoop->hasDeadStoreCollectionPass = true;
  1307. Loop *const previousPrepassLoop = currentPrePassLoop;
  1308. currentPrePassLoop = collectionPassLoop;
  1309. Assert(IsPrePass());
  1310. // This is also the location where we do the additional step of tracking what opnds
  1311. // are used inside the loop in memory dereferences, and thus need masking for cache
  1312. // attacks (Spectre). This is a fairly conservative approach, where we just track a
  1313. // set of symbols which are determined by each other inside the loop. This lets the
  1314. // second pass later on determine if a particular operation generating a symbol can
  1315. // avoid the Spectre masking overhead, since a symbol not dereferenced in the loops
  1316. // can be masked on the out-edge of the loop, which should be significantly cheaper
  1317. // than masking it every iteration.
  1318. AssertMsg(collectionPassLoop->symClusterList == nullptr, "clusterList should not have been initialized yet!");
  1319. // This is needed to work around tokenization issues with preprocessor macros which
  1320. // present themselves when using multiple template parameters.
  1321. #ifndef _M_ARM
  1322. typedef SegmentClusterList<SymID, JitArenaAllocator> symClusterListType;
  1323. collectionPassLoop->symClusterList = JitAnew(this->func->m_fg->alloc, symClusterListType, this->func->m_fg->alloc, 256);
  1324. collectionPassLoop->internallyDereferencedSyms = JitAnew(this->func->m_fg->alloc, BVSparse<JitArenaAllocator>, this->func->m_fg->alloc);
  1325. #endif
  1326. // First pass
  1327. BasicBlock *firstInnerLoopHeader = nullptr;
  1328. {
  1329. #if DBG_DUMP
  1330. if(IsTraceEnabled())
  1331. {
  1332. Output::Print(_u("******* COLLECTION PASS 1 START: Loop %u ********\n"), collectionPassLoop->GetLoopTopInstr()->m_id);
  1333. }
  1334. #endif
  1335. // We want to be able to disambiguate this in ProcessBlock
  1336. CollectionPassSubPhase prevCollectionPassSubPhase = this->collectionPassSubPhase;
  1337. this->collectionPassSubPhase = CollectionPassSubPhase::FirstPass;
  1338. FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, lastBlock, nullptr)
  1339. {
  1340. ProcessBlock(block);
  1341. if(block->isLoopHeader)
  1342. {
  1343. if(block->loop == collectionPassLoop)
  1344. {
  1345. break;
  1346. }
  1347. // Keep track of the first inner loop's header for the second pass, which need only walk up to that block
  1348. firstInnerLoopHeader = block;
  1349. }
  1350. } NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE;
  1351. this->collectionPassSubPhase = prevCollectionPassSubPhase;
  1352. #if DBG_DUMP
  1353. if(IsTraceEnabled())
  1354. {
  1355. Output::Print(_u("******** COLLECTION PASS 1 END: Loop %u *********\n"), collectionPassLoop->GetLoopTopInstr()->m_id);
  1356. }
  1357. #endif
  1358. }
  1359. #ifndef _M_ARM
  1360. // Since we generated the base data structures for the spectre handling, we can now
  1361. // cross-reference them to get the full set of what may be dereferenced in the loop
  1362. // and what is safe in speculation.
  1363. #if DBG_DUMP
  1364. if (PHASE_TRACE(Js::SpeculationPropagationAnalysisPhase, this->func))
  1365. {
  1366. Output::Print(_u("Analysis Results for loop %u:\n"), collectionPassLoop->GetLoopNumber());
  1367. Output::Print(_u("ClusterList pre-consolidation: "));
  1368. collectionPassLoop->symClusterList->Dump();
  1369. }
  1370. #endif // DBG_DUMP
  1371. collectionPassLoop->symClusterList->Consolidate();
  1372. #if DBG_DUMP
  1373. if (PHASE_TRACE(Js::SpeculationPropagationAnalysisPhase, this->func))
  1374. {
  1375. Output::Print(_u("ClusterList post-consolidation: "));
  1376. collectionPassLoop->symClusterList->Dump();
  1377. Output::Print(_u("Internally dereferenced syms pre-propagation: "));
  1378. collectionPassLoop->internallyDereferencedSyms->Dump();
  1379. }
  1380. #endif // DBG_DUMP
  1381. collectionPassLoop->symClusterList->Map<BVSparse<JitArenaAllocator>*, true>([](SymID index, SymID containingSetRoot, BVSparse<JitArenaAllocator>* bv){
  1382. if (bv->Test(index))
  1383. {
  1384. bv->Set(containingSetRoot);
  1385. }
  1386. }, collectionPassLoop->internallyDereferencedSyms);
  1387. collectionPassLoop->symClusterList->Map<BVSparse<JitArenaAllocator>*, true>([](SymID index, SymID containingSetRoot, BVSparse<JitArenaAllocator>* bv){
  1388. if (bv->Test(containingSetRoot))
  1389. {
  1390. bv->Set(index);
  1391. }
  1392. }, collectionPassLoop->internallyDereferencedSyms);
  1393. #if DBG_DUMP
  1394. if (PHASE_TRACE(Js::SpeculationPropagationAnalysisPhase, this->func))
  1395. {
  1396. Output::Print(_u("Internally dereferenced syms post-propagation: "));
  1397. collectionPassLoop->internallyDereferencedSyms->Dump();
  1398. }
  1399. #endif // DBG_DUMP
  1400. #endif // defined(_M_ARM)
  1401. // Second pass, only needs to run if there are any inner loops, to propagate collected information into those loops
  1402. if(firstInnerLoopHeader)
  1403. {
  1404. #if DBG_DUMP
  1405. if(IsTraceEnabled())
  1406. {
  1407. Output::Print(_u("******* COLLECTION PASS 2 START: Loop %u ********\n"), collectionPassLoop->GetLoopTopInstr()->m_id);
  1408. }
  1409. #endif
  1410. // We want to be able to disambiguate this in ProcessBlock
  1411. CollectionPassSubPhase prevCollectionPassSubPhase = this->collectionPassSubPhase;
  1412. this->collectionPassSubPhase = CollectionPassSubPhase::SecondPass;
  1413. FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, lastBlock, firstInnerLoopHeader)
  1414. {
  1415. Loop *const loop = block->loop;
  1416. if(loop && loop != collectionPassLoop && !loop->hasDeadStoreCollectionPass)
  1417. {
  1418. // About to make a recursive call, so when jitting in the foreground, probe the stack
  1419. if(!func->IsBackgroundJIT())
  1420. {
  1421. PROBE_STACK_NO_DISPOSE(func->GetScriptContext(), Js::Constants::MinStackDefault);
  1422. }
  1423. ProcessLoopCollectionPass(block);
  1424. // The inner loop's collection pass would have propagated collected information to its header block. Skip to the
  1425. // inner loop's header block and continue from the block before it.
  1426. block = loop->GetHeadBlock();
  1427. Assert(block->isLoopHeader);
  1428. continue;
  1429. }
  1430. ProcessBlock(block);
  1431. } NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE;
  1432. this->collectionPassSubPhase = prevCollectionPassSubPhase;
  1433. #if DBG_DUMP
  1434. if(IsTraceEnabled())
  1435. {
  1436. Output::Print(_u("******** COLLECTION PASS 2 END: Loop %u *********\n"), collectionPassLoop->GetLoopTopInstr()->m_id);
  1437. }
  1438. #endif
  1439. }
  1440. currentPrePassLoop = previousPrepassLoop;
  1441. }
  1442. void
  1443. BackwardPass::ProcessLoop(BasicBlock * lastBlock)
  1444. {
  1445. #if DBG_DUMP
  1446. if (this->IsTraceEnabled())
  1447. {
  1448. Output::Print(_u("******* PREPASS START ********\n"));
  1449. }
  1450. #endif
  1451. Loop *loop = lastBlock->loop;
  1452. bool prevIsLoopPrepass = this->isLoopPrepass;
  1453. this->isLoopPrepass = true;
  1454. // This code doesn't work quite as intended. It is meant to capture fields that are live out of a loop to limit the
  1455. // number of implicit call bailouts the forward pass must create (only compiler throughput optimization, no impact
  1456. // on emitted code), but because it looks only at the lexically last block in the loop, it does the right thing only
  1457. // for do-while loops. For other loops (for and while) the last block does not exit the loop. Even for do-while loops
  1458. // this tracking can have the adverse effect of killing fields that should stay live after copy prop. Disabled by default.
  1459. // Left in under a flag, in case we find compiler throughput issues and want to do additional experiments.
  1460. if (PHASE_ON(Js::LiveOutFieldsPhase, this->func))
  1461. {
  1462. if (this->globOpt->DoFieldOpts(loop) || this->globOpt->DoFieldRefOpts(loop))
  1463. {
  1464. // Get the live-out set at the loop bottom.
  1465. // This may not be the only loop exit, but all loop exits either leave the function or pass through here.
  1466. // In the forward pass, we'll use this set to trim the live fields on exit from the loop
  1467. // in order to limit the number of bailout points following the loop.
  1468. BVSparse<JitArenaAllocator> *bv = JitAnew(this->func->m_fg->alloc, BVSparse<JitArenaAllocator>, this->func->m_fg->alloc);
  1469. FOREACH_SUCCESSOR_BLOCK(blockSucc, lastBlock)
  1470. {
  1471. if (blockSucc->loop != loop)
  1472. {
  1473. // Would like to assert this, but in strange exprgen cases involving "break LABEL" in nested
  1474. // loops the loop graph seems to get confused.
  1475. //Assert(!blockSucc->loop || blockSucc->loop->IsDescendentOrSelf(loop));
  1476. Assert(!blockSucc->loop || blockSucc->loop->hasDeadStorePrepass);
  1477. bv->Or(blockSucc->upwardExposedFields);
  1478. }
  1479. }
  1480. NEXT_SUCCESSOR_BLOCK;
  1481. lastBlock->loop->liveOutFields = bv;
  1482. }
  1483. }
  1484. if(tag == Js::DeadStorePhase && !loop->hasDeadStoreCollectionPass)
  1485. {
  1486. Assert(!IsCollectionPass());
  1487. Assert(!IsPrePass());
  1488. isCollectionPass = true;
  1489. ProcessLoopCollectionPass(lastBlock);
  1490. isCollectionPass = false;
  1491. }
  1492. Assert(!this->IsPrePass());
  1493. this->currentPrePassLoop = loop;
  1494. if (tag == Js::BackwardPhase)
  1495. {
  1496. Assert(loop->symsAssignedToInLoop == nullptr);
  1497. loop->symsAssignedToInLoop = JitAnew(this->globOpt->alloc, BVSparse<JitArenaAllocator>, this->globOpt->alloc);
  1498. }
  1499. FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, lastBlock, nullptr)
  1500. {
  1501. this->ProcessBlock(block);
  1502. if (block->isLoopHeader && block->loop == lastBlock->loop)
  1503. {
  1504. break;
  1505. }
  1506. }
  1507. NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE;
  1508. this->currentPrePassLoop = nullptr;
  1509. Assert(lastBlock);
  1510. __analysis_assume(lastBlock);
  1511. lastBlock->loop->hasDeadStorePrepass = true;
  1512. this->isLoopPrepass = prevIsLoopPrepass;
  1513. #if DBG_DUMP
  1514. if (this->IsTraceEnabled())
  1515. {
  1516. Output::Print(_u("******** PREPASS END *********\n"));
  1517. }
  1518. #endif
  1519. }
  1520. void
  1521. BackwardPass::OptBlock(BasicBlock * block)
  1522. {
  1523. this->func->ThrowIfScriptClosed();
  1524. if (block->loop && !block->loop->hasDeadStorePrepass)
  1525. {
  1526. ProcessLoop(block);
  1527. }
  1528. this->ProcessBlock(block);
  1529. if(DoTrackNegativeZero())
  1530. {
  1531. negativeZeroDoesNotMatterBySymId->ClearAll();
  1532. }
  1533. if (DoTrackBitOpsOrNumber())
  1534. {
  1535. symUsedOnlyForBitOpsBySymId->ClearAll();
  1536. symUsedOnlyForNumberBySymId->ClearAll();
  1537. }
  1538. if(DoTrackIntOverflow())
  1539. {
  1540. intOverflowDoesNotMatterBySymId->ClearAll();
  1541. if(DoTrackCompoundedIntOverflow())
  1542. {
  1543. intOverflowDoesNotMatterInRangeBySymId->ClearAll();
  1544. }
  1545. }
  1546. #if DBG
  1547. if (this->DoByteCodeUpwardExposedUsed())
  1548. {
  1549. if (this->tag == Js::BackwardPhase)
  1550. {
  1551. // Keep track of all the bytecode upward exposed after Backward's pass
  1552. BVSparse<JitArenaAllocator>* byteCodeUpwardExposedUsed = nullptr;
  1553. const auto EnsureBV = [&] { if (!byteCodeUpwardExposedUsed) byteCodeUpwardExposedUsed = JitAnew(this->globOpt->alloc, BVSparse<JitArenaAllocator>, this->globOpt->alloc); };
  1554. Assert(block->byteCodeUpwardExposedUsed != nullptr);
  1555. FOREACH_BITSET_IN_SPARSEBV(symID, block->byteCodeUpwardExposedUsed)
  1556. {
  1557. Sym* sym = this->func->m_symTable->Find(symID);
  1558. if (sym != nullptr && sym->IsStackSym())
  1559. {
  1560. StackSym* stackSym = sym->AsStackSym();
  1561. if (stackSym->HasByteCodeRegSlot())
  1562. {
  1563. Js::RegSlot bytecode = stackSym->GetByteCodeRegSlot();
  1564. EnsureBV();
  1565. byteCodeUpwardExposedUsed->Set(bytecode);
  1566. }
  1567. }
  1568. }
  1569. NEXT_BITSET_IN_SPARSEBV;
  1570. if (byteCodeUpwardExposedUsed)
  1571. {
  1572. // Exclude unwanted syms
  1573. byteCodeUpwardExposedUsed->Minus(block->excludeByteCodeUpwardExposedTracking);
  1574. Assert(block->trackingByteCodeUpwardExposedUsed == nullptr);
  1575. block->trackingByteCodeUpwardExposedUsed = byteCodeUpwardExposedUsed;
  1576. }
  1577. }
  1578. else
  1579. {
  1580. // The calculated bytecode upward exposed should be the same between Backward and DeadStore passes
  1581. Assert(this->tag == Js::DeadStorePhase);
  1582. if (block->trackingByteCodeUpwardExposedUsed)
  1583. {
  1584. // We don't need to track bytecodeUpwardExposeUses if we don't have bailout
  1585. // We've collected the Backward bytecodeUpwardExposeUses for nothing, oh well.
  1586. if (this->func->hasBailout)
  1587. {
  1588. Assert(block->byteCodeUpwardExposedUsed);
  1589. BVSparse<JitArenaAllocator>* byteCodeUpwardExposedUsed = JitAnew(this->globOpt->tempAlloc, BVSparse<JitArenaAllocator>, this->globOpt->tempAlloc);
  1590. FOREACH_BITSET_IN_SPARSEBV(symID, block->byteCodeUpwardExposedUsed)
  1591. {
  1592. Sym* sym = this->func->m_symTable->Find(symID);
  1593. Js::RegSlot bytecode = sym->AsStackSym()->GetByteCodeRegSlot();
  1594. byteCodeUpwardExposedUsed->Set(bytecode);
  1595. }
  1596. NEXT_BITSET_IN_SPARSEBV;
  1597. // Exclude unwanted syms
  1598. byteCodeUpwardExposedUsed->Minus(block->excludeByteCodeUpwardExposedTracking);
  1599. Assert(block->trackingByteCodeUpwardExposedUsed->Equal(byteCodeUpwardExposedUsed));
  1600. JitAdelete(this->globOpt->tempAlloc, byteCodeUpwardExposedUsed);
  1601. }
  1602. JitAdelete(this->globOpt->alloc, block->trackingByteCodeUpwardExposedUsed);
  1603. block->trackingByteCodeUpwardExposedUsed = nullptr;
  1604. }
  1605. }
  1606. }
  1607. #endif
  1608. }
  1609. void
  1610. BackwardPass::ProcessBailOutArgObj(BailOutInfo * bailOutInfo, BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed)
  1611. {
  1612. Assert(this->tag != Js::BackwardPhase);
  1613. if (this->globOpt->TrackArgumentsObject() && bailOutInfo->capturedValues->argObjSyms)
  1614. {
  1615. FOREACH_BITSET_IN_SPARSEBV(symId, bailOutInfo->capturedValues->argObjSyms)
  1616. {
  1617. if (byteCodeUpwardExposedUsed->TestAndClear(symId))
  1618. {
  1619. if (bailOutInfo->usedCapturedValues.argObjSyms == nullptr)
  1620. {
  1621. bailOutInfo->usedCapturedValues.argObjSyms = JitAnew(this->func->m_alloc,
  1622. BVSparse<JitArenaAllocator>, this->func->m_alloc);
  1623. }
  1624. bailOutInfo->usedCapturedValues.argObjSyms->Set(symId);
  1625. }
  1626. }
  1627. NEXT_BITSET_IN_SPARSEBV;
  1628. }
  1629. if (bailOutInfo->usedCapturedValues.argObjSyms)
  1630. {
  1631. byteCodeUpwardExposedUsed->Minus(bailOutInfo->usedCapturedValues.argObjSyms);
  1632. }
  1633. }
  1634. void
  1635. BackwardPass::ProcessBailOutConstants(BailOutInfo * bailOutInfo, BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed, BVSparse<JitArenaAllocator>* bailoutReferencedArgSymsBv)
  1636. {
  1637. Assert(this->tag != Js::BackwardPhase);
  1638. // Remove constants that we are already going to restore
  1639. SListBase<ConstantStackSymValue> * usedConstantValues = &bailOutInfo->usedCapturedValues.constantValues;
  1640. FOREACH_SLISTBASE_ENTRY(ConstantStackSymValue, value, usedConstantValues)
  1641. {
  1642. byteCodeUpwardExposedUsed->Clear(value.Key()->m_id);
  1643. bailoutReferencedArgSymsBv->Clear(value.Key()->m_id);
  1644. }
  1645. NEXT_SLISTBASE_ENTRY;
  1646. // Find other constants that we need to restore
  1647. FOREACH_SLISTBASE_ENTRY_EDITING(ConstantStackSymValue, value, &bailOutInfo->capturedValues->constantValues, iter)
  1648. {
  1649. if (byteCodeUpwardExposedUsed->TestAndClear(value.Key()->m_id) || bailoutReferencedArgSymsBv->TestAndClear(value.Key()->m_id))
  1650. {
  1651. // Constant need to be restore, move it to the restore list
  1652. iter.MoveCurrentTo(usedConstantValues);
  1653. }
  1654. else if (!this->IsPrePass())
  1655. {
  1656. // Constants don't need to be restored, delete
  1657. iter.RemoveCurrent(this->func->m_alloc);
  1658. }
  1659. }
  1660. NEXT_SLISTBASE_ENTRY_EDITING;
  1661. }
  1662. void
  1663. BackwardPass::ProcessBailOutCopyProps(BailOutInfo * bailOutInfo, BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed, BVSparse<JitArenaAllocator>* bailoutReferencedArgSymsBv)
  1664. {
  1665. Assert(this->tag != Js::BackwardPhase);
  1666. Assert(!this->func->GetJITFunctionBody()->IsAsmJsMode());
  1667. // Remove copy prop that we were already going to restore
  1668. SListBase<CopyPropSyms> * usedCopyPropSyms = &bailOutInfo->usedCapturedValues.copyPropSyms;
  1669. FOREACH_SLISTBASE_ENTRY(CopyPropSyms, copyPropSyms, usedCopyPropSyms)
  1670. {
  1671. byteCodeUpwardExposedUsed->Clear(copyPropSyms.Key()->m_id);
  1672. this->currentBlock->upwardExposedUses->Set(copyPropSyms.Value()->m_id);
  1673. }
  1674. NEXT_SLISTBASE_ENTRY;
  1675. JitArenaAllocator * allocator = this->func->m_alloc;
  1676. BasicBlock * block = this->currentBlock;
  1677. BVSparse<JitArenaAllocator> * upwardExposedUses = block->upwardExposedUses;
  1678. // Find other copy prop that we need to restore
  1679. FOREACH_SLISTBASE_ENTRY_EDITING(CopyPropSyms, copyPropSyms, &bailOutInfo->capturedValues->copyPropSyms, iter)
  1680. {
  1681. // Copy prop syms should be vars
  1682. Assert(!copyPropSyms.Key()->IsTypeSpec());
  1683. Assert(!copyPropSyms.Value()->IsTypeSpec());
  1684. if (byteCodeUpwardExposedUsed->TestAndClear(copyPropSyms.Key()->m_id) || bailoutReferencedArgSymsBv->TestAndClear(copyPropSyms.Key()->m_id))
  1685. {
  1686. // This copy-prop sym needs to be restored; add it to the restore list.
  1687. /*
  1688. - copyPropSyms.Key() - original sym that is byte-code upwards-exposed, its corresponding byte-code register needs
  1689. to be restored
  1690. - copyPropSyms.Value() - copy-prop sym whose value the original sym has at the point of this instruction
  1691. Heuristic:
  1692. - By default, use the copy-prop sym to restore its corresponding byte code register
  1693. - This is typically better because that allows the value of the original sym, if it's not used after the copy-prop
  1694. sym is changed, to be discarded and we only have one lifetime (the copy-prop sym's lifetime) in to deal with for
  1695. register allocation
  1696. - Additionally, if the transferring store, which caused the original sym to have the same value as the copy-prop
  1697. sym, becomes a dead store, the original sym won't actually attain the value of the copy-prop sym. In that case,
  1698. the copy-prop sym must be used to restore the byte code register corresponding to original sym.
  1699. Special case for functional correctness:
  1700. - Consider that we always use the copy-prop sym to restore, and consider the following case:
  1701. b = a
  1702. a = c * d <Pre-op bail-out>
  1703. = b
  1704. - This is rewritten by the lowerer as follows:
  1705. b = a
  1706. a = c
  1707. a = a * d <Pre-op bail-out> (to make dst and src1 the same)
  1708. = b
  1709. - The problem here is that at the point of the bail-out instruction, 'a' would be used to restore the value of 'b',
  1710. but the value of 'a' has changed before the bail-out (at 'a = c').
  1711. - In this case, we need to use 'b' (the original sym) to restore the value of 'b'. Because 'b' is upwards-exposed,
  1712. 'b = a' cannot be a dead store, therefore making it valid to use 'b' to restore.
  1713. - Use the original sym to restore when all of the following are true:
  1714. - The bailout is a pre-op bailout, and the bailout check is done after overwriting the destination
  1715. - It's an int-specialized unary or binary operation that produces a value
  1716. - The copy-prop sym is the destination of this instruction
  1717. - None of the sources are the copy-prop sym. Otherwise, the value of the copy-prop sym will be saved as
  1718. necessary by the bailout code.
  1719. */
  1720. StackSym * stackSym = copyPropSyms.Key(); // assume that we'll use the original sym to restore
  1721. SymID symId = stackSym->m_id;
  1722. // Prefer to restore from type-specialized versions of the sym, as that will reduce the need for potentially
  1723. // expensive ToVars that can more easily be eliminated due to being dead stores
  1724. StackSym * int32StackSym = nullptr;
  1725. StackSym * float64StackSym = nullptr;
  1726. StackSym * simd128StackSym = nullptr;
  1727. // If the sym is type specialized, we need to check for upward exposed uses of the specialized sym and not the equivalent var sym. If there are no
  1728. // uses and we use the copy prop sym to restore, we'll need to find the type specialize sym for that sym as well.
  1729. StackSym * typeSpecSym = nullptr;
  1730. auto findTypeSpecSym = [&]()
  1731. {
  1732. if (bailOutInfo->liveLosslessInt32Syms->Test(symId))
  1733. {
  1734. // Var version of the sym is not live, use the int32 version
  1735. int32StackSym = stackSym->GetInt32EquivSym(nullptr);
  1736. typeSpecSym = int32StackSym;
  1737. Assert(int32StackSym);
  1738. }
  1739. else if(bailOutInfo->liveFloat64Syms->Test(symId))
  1740. {
  1741. // Var/int32 version of the sym is not live, use the float64 version
  1742. float64StackSym = stackSym->GetFloat64EquivSym(nullptr);
  1743. typeSpecSym = float64StackSym;
  1744. Assert(float64StackSym);
  1745. }
  1746. else
  1747. {
  1748. Assert(bailOutInfo->liveVarSyms->Test(symId));
  1749. typeSpecSym = stackSym;
  1750. }
  1751. };
  1752. findTypeSpecSym();
  1753. Assert(typeSpecSym != nullptr);
  1754. IR::Instr *const instr = bailOutInfo->bailOutInstr;
  1755. StackSym *const dstSym = IR::RegOpnd::TryGetStackSym(instr->GetDst());
  1756. if(instr->GetBailOutKind() & IR::BailOutOnResultConditions &&
  1757. instr->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset &&
  1758. bailOutInfo->bailOutOffset <= instr->GetByteCodeOffset() &&
  1759. dstSym &&
  1760. dstSym->IsInt32() &&
  1761. dstSym->IsTypeSpec() &&
  1762. dstSym->GetVarEquivSym(nullptr) == copyPropSyms.Value() &&
  1763. instr->GetSrc1() &&
  1764. !instr->GetDst()->IsEqual(instr->GetSrc1()) &&
  1765. !(instr->GetSrc2() && instr->GetDst()->IsEqual(instr->GetSrc2())))
  1766. {
  1767. Assert(bailOutInfo->bailOutOffset == instr->GetByteCodeOffset());
  1768. // Need to use the original sym to restore. The original sym is byte-code upwards-exposed, which is why it needs
  1769. // to be restored. Because the original sym needs to be restored and the copy-prop sym is changing here, the
  1770. // original sym must be live in some fashion at the point of this instruction, that will be verified below. The
  1771. // original sym will also be made upwards-exposed from here, so the aforementioned transferring store of the
  1772. // copy-prop sym to the original sym will not be a dead store.
  1773. }
  1774. else if (block->upwardExposedUses->Test(typeSpecSym->m_id) && !block->upwardExposedUses->Test(copyPropSyms.Value()->m_id))
  1775. {
  1776. // Don't use the copy prop sym if it is not used and the orig sym still has uses.
  1777. // No point in extending the lifetime of the copy prop sym unnecessarily.
  1778. }
  1779. else
  1780. {
  1781. // Need to use the copy-prop sym to restore
  1782. stackSym = copyPropSyms.Value();
  1783. symId = stackSym->m_id;
  1784. int32StackSym = nullptr;
  1785. float64StackSym = nullptr;
  1786. simd128StackSym = nullptr;
  1787. findTypeSpecSym();
  1788. }
  1789. // We did not end up using the copy prop sym. Let's make sure the use of the original sym by the bailout is captured.
  1790. if (stackSym != copyPropSyms.Value() && stackSym->HasArgSlotNum())
  1791. {
  1792. bailoutReferencedArgSymsBv->Set(stackSym->m_id);
  1793. }
  1794. if (int32StackSym != nullptr)
  1795. {
  1796. Assert(float64StackSym == nullptr);
  1797. usedCopyPropSyms->PrependNode(allocator, copyPropSyms.Key(), int32StackSym);
  1798. iter.RemoveCurrent(allocator);
  1799. upwardExposedUses->Set(int32StackSym->m_id);
  1800. }
  1801. else if (float64StackSym != nullptr)
  1802. {
  1803. // This float-specialized sym is going to be used to restore the corresponding byte-code register. Need to
  1804. // ensure that the float value can be precisely coerced back to the original Var value by requiring that it is
  1805. // specialized using BailOutNumberOnly.
  1806. float64StackSym->m_requiresBailOnNotNumber = true;
  1807. usedCopyPropSyms->PrependNode(allocator, copyPropSyms.Key(), float64StackSym);
  1808. iter.RemoveCurrent(allocator);
  1809. upwardExposedUses->Set(float64StackSym->m_id);
  1810. }
  1811. // SIMD_JS
  1812. else if (simd128StackSym != nullptr)
  1813. {
  1814. usedCopyPropSyms->PrependNode(allocator, copyPropSyms.Key(), simd128StackSym);
  1815. iter.RemoveCurrent(allocator);
  1816. upwardExposedUses->Set(simd128StackSym->m_id);
  1817. }
  1818. else
  1819. {
  1820. usedCopyPropSyms->PrependNode(allocator, copyPropSyms.Key(), stackSym);
  1821. iter.RemoveCurrent(allocator);
  1822. upwardExposedUses->Set(symId);
  1823. }
  1824. }
  1825. else if (!this->IsPrePass())
  1826. {
  1827. // Copy prop sym doesn't need to be restored, delete.
  1828. iter.RemoveCurrent(allocator);
  1829. }
  1830. }
  1831. NEXT_SLISTBASE_ENTRY_EDITING;
  1832. }
  1833. StackSym*
  1834. BackwardPass::ProcessByteCodeUsesDst(IR::ByteCodeUsesInstr * byteCodeUsesInstr)
  1835. {
  1836. Assert(this->DoByteCodeUpwardExposedUsed());
  1837. IR::Opnd * dst = byteCodeUsesInstr->GetDst();
  1838. if (dst)
  1839. {
  1840. IR::RegOpnd * dstRegOpnd = dst->AsRegOpnd();
  1841. StackSym * dstStackSym = dstRegOpnd->m_sym->AsStackSym();
  1842. Assert(!dstRegOpnd->GetIsJITOptimizedReg());
  1843. Assert(dstStackSym->GetByteCodeRegSlot() != Js::Constants::NoRegister);
  1844. if (dstStackSym->GetType() != TyVar)
  1845. {
  1846. dstStackSym = dstStackSym->GetVarEquivSym(nullptr);
  1847. }
  1848. // If the current region is a Try, symbols in its write-through set shouldn't be cleared.
  1849. // Otherwise, symbols in the write-through set of the first try ancestor shouldn't be cleared.
  1850. if (!this->currentRegion ||
  1851. !this->CheckWriteThroughSymInRegion(this->currentRegion, dstStackSym))
  1852. {
  1853. this->currentBlock->byteCodeUpwardExposedUsed->Clear(dstStackSym->m_id);
  1854. return dstStackSym;
  1855. }
  1856. }
  1857. return nullptr;
  1858. }
  1859. const BVSparse<JitArenaAllocator>*
  1860. BackwardPass::ProcessByteCodeUsesSrcs(IR::ByteCodeUsesInstr * byteCodeUsesInstr)
  1861. {
  1862. Assert(this->DoByteCodeUpwardExposedUsed() || tag == Js::BackwardPhase);
  1863. const BVSparse<JitArenaAllocator>* byteCodeUpwardExposedUsed = byteCodeUsesInstr->GetByteCodeUpwardExposedUsed();
  1864. if (byteCodeUpwardExposedUsed && this->DoByteCodeUpwardExposedUsed())
  1865. {
  1866. this->currentBlock->byteCodeUpwardExposedUsed->Or(byteCodeUpwardExposedUsed);
  1867. }
  1868. return byteCodeUpwardExposedUsed;
  1869. }
  1870. bool
  1871. BackwardPass::ProcessByteCodeUsesInstr(IR::Instr * instr)
  1872. {
  1873. if (!instr->IsByteCodeUsesInstr())
  1874. {
  1875. return false;
  1876. }
  1877. IR::ByteCodeUsesInstr * byteCodeUsesInstr = instr->AsByteCodeUsesInstr();
  1878. if (this->tag == Js::BackwardPhase)
  1879. {
  1880. // FGPeeps inserts bytecodeuses instrs with srcs. We need to look at them to set the proper
  1881. // UpwardExposedUsed info and keep the defs alive.
  1882. // The inliner inserts bytecodeuses instrs withs dsts, but we don't want to look at them for upwardExposedUsed
  1883. // as it would cause real defs to look dead. We use these for bytecodeUpwardExposedUsed info only, which is needed
  1884. // in the dead-store pass only.
  1885. //
  1886. // Handle the source side.
  1887. const BVSparse<JitArenaAllocator>* byteCodeUpwardExposedUsed = ProcessByteCodeUsesSrcs(byteCodeUsesInstr);
  1888. if (byteCodeUpwardExposedUsed != nullptr)
  1889. {
  1890. this->currentBlock->upwardExposedUses->Or(byteCodeUpwardExposedUsed);
  1891. }
  1892. }
  1893. else
  1894. {
  1895. Assert(tag == Js::DeadStorePhase);
  1896. Assert(instr->m_opcode == Js::OpCode::ByteCodeUses);
  1897. #if DBG
  1898. if (this->DoMarkTempObjectVerify() && (this->currentBlock->isDead || !this->func->hasBailout))
  1899. {
  1900. if (IsCollectionPass())
  1901. {
  1902. if (!this->func->hasBailout)
  1903. {
  1904. // Prevent byte code uses from being remove on collection pass for mark temp object verify
  1905. // if we don't have any bailout
  1906. return true;
  1907. }
  1908. }
  1909. else
  1910. {
  1911. this->currentBlock->tempObjectVerifyTracker->NotifyDeadByteCodeUses(instr);
  1912. }
  1913. }
  1914. #endif
  1915. if (this->func->hasBailout)
  1916. {
  1917. // Just collect the byte code uses, and remove the instruction
  1918. // We are going backward, process the dst first and then the src
  1919. StackSym *dstStackSym = ProcessByteCodeUsesDst(byteCodeUsesInstr);
  1920. #if DBG
  1921. // We can only track first level function stack syms right now
  1922. if (dstStackSym && dstStackSym->GetByteCodeFunc() == this->func)
  1923. {
  1924. this->currentBlock->byteCodeRestoreSyms[dstStackSym->GetByteCodeRegSlot()] = nullptr;
  1925. }
  1926. #endif
  1927. const BVSparse<JitArenaAllocator>* byteCodeUpwardExposedUsed = ProcessByteCodeUsesSrcs(byteCodeUsesInstr);
  1928. #if DBG
  1929. if (byteCodeUpwardExposedUsed)
  1930. {
  1931. FOREACH_BITSET_IN_SPARSEBV(symId, byteCodeUpwardExposedUsed)
  1932. {
  1933. StackSym * stackSym = this->func->m_symTable->FindStackSym(symId);
  1934. Assert(!stackSym->IsTypeSpec());
  1935. // We can only track first level function stack syms right now
  1936. if (stackSym->GetByteCodeFunc() == this->func)
  1937. {
  1938. Js::RegSlot byteCodeRegSlot = stackSym->GetByteCodeRegSlot();
  1939. Assert(byteCodeRegSlot != Js::Constants::NoRegister);
  1940. if (this->currentBlock->byteCodeRestoreSyms[byteCodeRegSlot] != stackSym)
  1941. {
  1942. AssertMsg(this->currentBlock->byteCodeRestoreSyms[byteCodeRegSlot] == nullptr,
  1943. "Can't have two active lifetime for the same byte code register");
  1944. this->currentBlock->byteCodeRestoreSyms[byteCodeRegSlot] = stackSym;
  1945. }
  1946. }
  1947. }
  1948. NEXT_BITSET_IN_SPARSEBV;
  1949. }
  1950. #endif
  1951. if (IsCollectionPass())
  1952. {
  1953. return true;
  1954. }
  1955. PropertySym *propertySymUse = byteCodeUsesInstr->propertySymUse;
  1956. if (propertySymUse && !this->currentBlock->isDead)
  1957. {
  1958. this->currentBlock->upwardExposedFields->Set(propertySymUse->m_id);
  1959. }
  1960. if (this->IsPrePass())
  1961. {
  1962. // Don't remove the instruction yet if we are in the prepass
  1963. // But tell the caller we don't need to process the instruction any more
  1964. return true;
  1965. }
  1966. }
  1967. this->currentBlock->RemoveInstr(instr);
  1968. }
  1969. return true;
  1970. }
  1971. bool
  1972. BackwardPass::ProcessBailOutInfo(IR::Instr * instr)
  1973. {
  1974. Assert(!instr->IsByteCodeUsesInstr());
  1975. if (this->tag == Js::BackwardPhase)
  1976. {
  1977. // We don't need to fill in the bailout instruction in backward pass
  1978. Assert(this->func->hasBailout || !instr->HasBailOutInfo());
  1979. Assert(!instr->HasBailOutInfo() || instr->GetBailOutInfo()->byteCodeUpwardExposedUsed == nullptr || (this->func->HasTry() && this->func->DoOptimizeTry()));
  1980. return false;
  1981. }
  1982. if(IsCollectionPass())
  1983. {
  1984. return false;
  1985. }
  1986. Assert(tag == Js::DeadStorePhase);
  1987. if (instr->HasBailOutInfo())
  1988. {
  1989. Assert(this->func->hasBailout);
  1990. Assert(this->DoByteCodeUpwardExposedUsed());
  1991. BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
  1992. // Only process the bailout info if this is the main bailout point (instead of shared)
  1993. if (bailOutInfo->bailOutInstr == instr)
  1994. {
  1995. if(instr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset ||
  1996. bailOutInfo->bailOutOffset > instr->GetByteCodeOffset())
  1997. {
  1998. // Currently, we only have post-op bailout with BailOutOnImplicitCalls
  1999. // or JIT inserted operation (which no byte code offsets).
  2000. // If there are other bailouts that we want to bailout after the operation,
  2001. // we have to make sure that it still doesn't do the implicit call
  2002. // if it is done on the stack object.
  2003. // Otherwise, the stack object will be passed to the implicit call functions.
  2004. Assert(instr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset
  2005. || (instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCalls
  2006. || (instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutInvalid);
  2007. // This instruction bails out to a later byte-code instruction, so process the bailout info now
  2008. ProcessBailOutInfo(instr, bailOutInfo);
  2009. }
  2010. else
  2011. {
  2012. // This instruction bails out to the equivalent byte code instruction. This instruction and ByteCodeUses
  2013. // instructions relevant to this instruction need to be processed before the bailout info for this instruction
  2014. // can be processed, so that it can be determined what byte code registers are used by the equivalent byte code
  2015. // instruction and need to be restored. Save the instruction for bailout info processing later.
  2016. Assert(bailOutInfo->bailOutOffset == instr->GetByteCodeOffset());
  2017. Assert(!preOpBailOutInstrToProcess);
  2018. preOpBailOutInstrToProcess = instr;
  2019. }
  2020. }
  2021. }
  2022. return false;
  2023. }
  2024. bool
  2025. BackwardPass::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, bool mayNeedImplicitCallBailOut, bool hasLiveFields)
  2026. {
  2027. return this->globOpt->IsImplicitCallBailOutCurrentlyNeeded(instr, nullptr, nullptr, this->currentBlock, hasLiveFields, mayNeedImplicitCallBailOut, false) ||
  2028. this->NeedBailOutOnImplicitCallsForTypedArrayStore(instr);
  2029. }
  2030. void
  2031. BackwardPass::DeadStoreTypeCheckBailOut(IR::Instr * instr)
  2032. {
  2033. // Good news: There are cases where the forward pass installs BailOutFailedTypeCheck, but the dead store pass
  2034. // discovers that the checked type is dead.
  2035. // Bad news: We may still need implicit call bailout, and it's up to the dead store pass to figure this out.
  2036. // Worse news: BailOutFailedTypeCheck is pre-op, and BailOutOnImplicitCall is post-op. We'll use a special
  2037. // bailout kind to indicate implicit call bailout that targets its own instruction. The lowerer will emit
  2038. // code to disable/re-enable implicit calls around the operation.
  2039. Assert(this->tag == Js::DeadStorePhase);
  2040. if (this->IsPrePass() || !instr->HasBailOutInfo())
  2041. {
  2042. return;
  2043. }
  2044. // By default, do not do this for stores, as it makes the presence of type checks unpredictable in the forward pass.
  2045. // For instance, we can't predict which stores may cause reallocation of aux slots.
  2046. if (!PHASE_ON(Js::DeadStoreTypeChecksOnStoresPhase, this->func) && instr->GetDst() && instr->GetDst()->IsSymOpnd())
  2047. {
  2048. return;
  2049. }
  2050. IR::BailOutKind oldBailOutKind = instr->GetBailOutKind();
  2051. if (!IR::IsTypeCheckBailOutKind(oldBailOutKind))
  2052. {
  2053. return;
  2054. }
  2055. // Either src1 or dst must be a property sym operand
  2056. Assert((instr->GetSrc1() && instr->GetSrc1()->IsSymOpnd() && instr->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd()) ||
  2057. (instr->GetDst() && instr->GetDst()->IsSymOpnd() && instr->GetDst()->AsSymOpnd()->IsPropertySymOpnd()));
  2058. IR::PropertySymOpnd *propertySymOpnd =
  2059. (instr->GetDst() && instr->GetDst()->IsSymOpnd()) ? instr->GetDst()->AsPropertySymOpnd() : instr->GetSrc1()->AsPropertySymOpnd();
  2060. if (propertySymOpnd->TypeCheckRequired())
  2061. {
  2062. return;
  2063. }
  2064. bool isTypeCheckProtected = false;
  2065. IR::BailOutKind bailOutKind;
  2066. if (GlobOpt::NeedsTypeCheckBailOut(instr, propertySymOpnd, propertySymOpnd == instr->GetDst(), &isTypeCheckProtected, &bailOutKind))
  2067. {
  2068. // If we installed a failed type check bailout in the forward pass, but we are now discovering that the checked
  2069. // type is dead, we may still need a bailout on failed fixed field type check. These type checks are required
  2070. // regardless of whether the checked type is dead. Hence, the bailout kind may change here.
  2071. Assert((oldBailOutKind & ~IR::BailOutKindBits) == bailOutKind ||
  2072. bailOutKind == IR::BailOutFailedFixedFieldTypeCheck || bailOutKind == IR::BailOutFailedEquivalentFixedFieldTypeCheck);
  2073. instr->SetBailOutKind(bailOutKind);
  2074. return;
  2075. }
  2076. else if (isTypeCheckProtected)
  2077. {
  2078. instr->ClearBailOutInfo();
  2079. if (preOpBailOutInstrToProcess == instr)
  2080. {
  2081. preOpBailOutInstrToProcess = nullptr;
  2082. }
  2083. return;
  2084. }
  2085. Assert(!propertySymOpnd->IsTypeCheckProtected());
  2086. // If all we're doing here is checking the type (e.g. because we've hoisted a field load or store out of the loop, but needed
  2087. // the type check to remain in the loop), and now it turns out we don't need the type checked, we can simply turn this into
  2088. // a NOP and remove the bailout.
  2089. if (instr->m_opcode == Js::OpCode::CheckObjType)
  2090. {
  2091. Assert(instr->GetDst() == nullptr && instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr);
  2092. instr->m_opcode = Js::OpCode::Nop;
  2093. instr->FreeSrc1();
  2094. instr->ClearBailOutInfo();
  2095. if (this->preOpBailOutInstrToProcess == instr)
  2096. {
  2097. this->preOpBailOutInstrToProcess = nullptr;
  2098. }
  2099. return;
  2100. }
  2101. // We don't need BailOutFailedTypeCheck but may need BailOutOnImplicitCall.
  2102. // Consider: are we in the loop landing pad? If so, no bailout, since implicit calls will be checked at
  2103. // the end of the block.
  2104. if (this->currentBlock->IsLandingPad())
  2105. {
  2106. // We're in the landing pad.
  2107. if (preOpBailOutInstrToProcess == instr)
  2108. {
  2109. preOpBailOutInstrToProcess = nullptr;
  2110. }
  2111. instr->UnlinkBailOutInfo();
  2112. return;
  2113. }
  2114. // If bailOutKind is equivTypeCheck then leave alone the bailout
  2115. if (bailOutKind == IR::BailOutFailedEquivalentTypeCheck ||
  2116. bailOutKind == IR::BailOutFailedEquivalentFixedFieldTypeCheck)
  2117. {
  2118. return;
  2119. }
  2120. // We're not checking for polymorphism, so don't let the bailout indicate that we
  2121. // detected polymorphism.
  2122. instr->GetBailOutInfo()->polymorphicCacheIndex = (uint)-1;
  2123. // Keep the mark temp object bit if it is there so that we will not remove the implicit call check
  2124. instr->SetBailOutKind(IR::BailOutOnImplicitCallsPreOp | (oldBailOutKind & IR::BailOutMarkTempObject));
  2125. }
  2126. void
  2127. BackwardPass::DeadStoreImplicitCallBailOut(IR::Instr * instr, bool hasLiveFields)
  2128. {
  2129. Assert(this->tag == Js::DeadStorePhase);
  2130. if (this->IsPrePass() || !instr->HasBailOutInfo())
  2131. {
  2132. // Don't do this in the pre-pass, because, for instance, we don't have live-on-back-edge fields yet.
  2133. return;
  2134. }
  2135. if (OpCodeAttr::BailOutRec(instr->m_opcode))
  2136. {
  2137. // This is something like OpCode::BailOutOnNotEqual. Assume it needs what it's got.
  2138. return;
  2139. }
  2140. UpdateArrayBailOutKind(instr);
  2141. // Install the implicit call PreOp for mark temp object if we need one.
  2142. IR::BailOutKind kind = instr->GetBailOutKind();
  2143. IR::BailOutKind kindNoBits = kind & ~IR::BailOutKindBits;
  2144. if ((kind & IR::BailOutMarkTempObject) != 0 && kindNoBits != IR::BailOutOnImplicitCallsPreOp)
  2145. {
  2146. Assert(kindNoBits != IR::BailOutOnImplicitCalls);
  2147. if (kindNoBits == IR::BailOutInvalid)
  2148. {
  2149. // We should only have combined with array bits
  2150. Assert((kind & ~IR::BailOutForArrayBits) == IR::BailOutMarkTempObject);
  2151. // Don't need to install if we are not going to do helper calls,
  2152. // or we are in the landingPad since implicit calls are already turned off.
  2153. if ((kind & IR::BailOutOnArrayAccessHelperCall) == 0 && !this->currentBlock->IsLandingPad())
  2154. {
  2155. kind += IR::BailOutOnImplicitCallsPreOp;
  2156. instr->SetBailOutKind(kind);
  2157. }
  2158. }
  2159. }
  2160. // Currently only try to eliminate these bailout kinds. The others are required in cases
  2161. // where we don't necessarily have live/hoisted fields.
  2162. const bool mayNeedBailOnImplicitCall = BailOutInfo::IsBailOutOnImplicitCalls(kind);
  2163. if (!mayNeedBailOnImplicitCall)
  2164. {
  2165. if (kind & IR::BailOutMarkTempObject)
  2166. {
  2167. if (kind == IR::BailOutMarkTempObject)
  2168. {
  2169. // Landing pad does not need per-instr implicit call bailouts.
  2170. Assert(this->currentBlock->IsLandingPad());
  2171. instr->ClearBailOutInfo();
  2172. if (this->preOpBailOutInstrToProcess == instr)
  2173. {
  2174. this->preOpBailOutInstrToProcess = nullptr;
  2175. }
  2176. }
  2177. else
  2178. {
  2179. // Mark temp object bit is not needed after dead store pass
  2180. instr->SetBailOutKind(kind & ~IR::BailOutMarkTempObject);
  2181. }
  2182. }
  2183. return;
  2184. }
  2185. // We have an implicit call bailout in the code, and we want to make sure that it's required.
  2186. // Do this now, because only in the dead store pass do we have complete forward and backward liveness info.
  2187. bool needsBailOutOnImplicitCall = this->IsImplicitCallBailOutCurrentlyNeeded(instr, mayNeedBailOnImplicitCall, hasLiveFields);
  2188. if(!UpdateImplicitCallBailOutKind(instr, needsBailOutOnImplicitCall))
  2189. {
  2190. instr->ClearBailOutInfo();
  2191. if (preOpBailOutInstrToProcess == instr)
  2192. {
  2193. preOpBailOutInstrToProcess = nullptr;
  2194. }
  2195. #if DBG
  2196. if (this->DoMarkTempObjectVerify())
  2197. {
  2198. this->currentBlock->tempObjectVerifyTracker->NotifyBailOutRemoval(instr, this);
  2199. }
  2200. #endif
  2201. }
  2202. }
  2203. bool
  2204. BackwardPass::NeedBailOutOnImplicitCallsForTypedArrayStore(IR::Instr* instr)
  2205. {
  2206. if ((instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict) &&
  2207. instr->GetDst()->IsIndirOpnd() &&
  2208. instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyTypedArray())
  2209. {
  2210. IR::Opnd * opnd = instr->GetSrc1();
  2211. if (opnd->IsRegOpnd())
  2212. {
  2213. return !opnd->AsRegOpnd()->GetValueType().IsPrimitive() &&
  2214. !opnd->AsRegOpnd()->m_sym->IsInt32() &&
  2215. !opnd->AsRegOpnd()->m_sym->IsFloat64() &&
  2216. !opnd->AsRegOpnd()->m_sym->IsFloatConst() &&
  2217. !opnd->AsRegOpnd()->m_sym->IsIntConst();
  2218. }
  2219. else
  2220. {
  2221. Assert(opnd->IsIntConstOpnd() || opnd->IsInt64ConstOpnd() || opnd->IsFloat32ConstOpnd() || opnd->IsFloatConstOpnd() || opnd->IsAddrOpnd());
  2222. }
  2223. }
  2224. return false;
  2225. }
  2226. IR::Instr*
  2227. BackwardPass::ProcessPendingPreOpBailOutInfo(IR::Instr *const currentInstr)
  2228. {
  2229. Assert(!IsCollectionPass());
  2230. if(!preOpBailOutInstrToProcess)
  2231. {
  2232. return currentInstr->m_prev;
  2233. }
  2234. Assert(preOpBailOutInstrToProcess == currentInstr);
  2235. if (!this->IsPrePass())
  2236. {
  2237. IR::Instr* prev = preOpBailOutInstrToProcess->m_prev;
  2238. while (prev && preOpBailOutInstrToProcess->CanAggregateByteCodeUsesAcrossInstr(prev))
  2239. {
  2240. IR::Instr* instr = prev;
  2241. prev = prev->m_prev;
  2242. if (instr->IsByteCodeUsesInstrFor(preOpBailOutInstrToProcess))
  2243. {
  2244. // If instr is a ByteCodeUsesInstr, it will remove it
  2245. ProcessByteCodeUsesInstr(instr);
  2246. }
  2247. }
  2248. }
  2249. // A pre-op bailout instruction was saved for bailout info processing after the instruction and relevant ByteCodeUses
  2250. // instructions before it have been processed. We can process the bailout info for that instruction now.
  2251. BailOutInfo *const bailOutInfo = preOpBailOutInstrToProcess->GetBailOutInfo();
  2252. Assert(bailOutInfo->bailOutInstr == preOpBailOutInstrToProcess);
  2253. Assert(bailOutInfo->bailOutOffset == preOpBailOutInstrToProcess->GetByteCodeOffset());
  2254. ProcessBailOutInfo(preOpBailOutInstrToProcess, bailOutInfo);
  2255. preOpBailOutInstrToProcess = nullptr;
  2256. // We might have removed the prev instr if it was a ByteCodeUsesInstr
  2257. // Update the prevInstr on the main loop
  2258. return currentInstr->m_prev;
  2259. }
  2260. void
  2261. BackwardPass::ProcessBailOutInfo(IR::Instr * instr, BailOutInfo * bailOutInfo)
  2262. {
  2263. /*
  2264. When we optimize functions having try-catch, we install a bailout at the starting of the catch block, namely, BailOnException.
  2265. We don't have flow edges from all the possible exception points in the try to the catch block. As a result, this bailout should
  2266. not try to restore from the constant values or copy-prop syms or the type specialized syms, as these may not necessarily be/have
  2267. the right values. For example,
  2268. //constant values
  2269. c =
  2270. try
  2271. {
  2272. <exception>
  2273. c = k (constant)
  2274. }
  2275. catch
  2276. {
  2277. BailOnException
  2278. = c <-- We need to restore c from the value outside the try.
  2279. }
  2280. //copy-prop syms
  2281. c =
  2282. try
  2283. {
  2284. b = a
  2285. <exception>
  2286. c = b
  2287. }
  2288. catch
  2289. {
  2290. BailOnException
  2291. = c <-- We really want to restore c from its original sym, and not from its copy-prop sym, a
  2292. }
  2293. //type specialized syms
  2294. a =
  2295. try
  2296. {
  2297. <exception>
  2298. a++ <-- type specializes a
  2299. }
  2300. catch
  2301. {
  2302. BailOnException
  2303. = a <-- We need to restore a from its var version.
  2304. }
  2305. */
  2306. BasicBlock * block = this->currentBlock;
  2307. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed = block->byteCodeUpwardExposedUsed;
  2308. Assert(bailOutInfo->bailOutInstr == instr);
  2309. // The byteCodeUpwardExposedUsed should only be assigned once. The only case which would break this
  2310. // assumption is when we are optimizing a function having try-catch. In that case, we need the
  2311. // byteCodeUpwardExposedUsed analysis in the initial backward pass too.
  2312. Assert(bailOutInfo->byteCodeUpwardExposedUsed == nullptr || (this->func->HasTry() && this->func->DoOptimizeTry()));
  2313. // Make a copy of the byteCodeUpwardExposedUsed so we can remove the constants
  2314. if (!this->IsPrePass())
  2315. {
  2316. // Create the BV of symbols that need to be restored in the BailOutRecord
  2317. byteCodeUpwardExposedUsed = byteCodeUpwardExposedUsed->CopyNew(this->func->m_alloc);
  2318. bailOutInfo->byteCodeUpwardExposedUsed = byteCodeUpwardExposedUsed;
  2319. }
  2320. else
  2321. {
  2322. // Create a temporary byteCodeUpwardExposedUsed
  2323. byteCodeUpwardExposedUsed = byteCodeUpwardExposedUsed->CopyNew(this->tempAlloc);
  2324. }
  2325. // All the register-based argument syms need to be tracked. They are either:
  2326. // 1. Referenced as constants in bailOutInfo->usedcapturedValues.constantValues
  2327. // 2. Referenced using copy prop syms in bailOutInfo->usedcapturedValues.copyPropSyms
  2328. // 3. Marked as m_isBailOutReferenced = true & added to upwardExposedUsed bit vector to ensure we do not dead store their defs.
  2329. // The third set of syms is represented by the bailoutReferencedArgSymsBv.
  2330. BVSparse<JitArenaAllocator>* bailoutReferencedArgSymsBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  2331. if (!this->IsPrePass())
  2332. {
  2333. bailOutInfo->IterateArgOutSyms([=](uint, uint, StackSym* sym) {
  2334. if (!sym->IsArgSlotSym())
  2335. {
  2336. bailoutReferencedArgSymsBv->Set(sym->m_id);
  2337. }
  2338. });
  2339. }
  2340. // Process Argument object first, as they can be found on the stack and don't need to rely on copy prop
  2341. this->ProcessBailOutArgObj(bailOutInfo, byteCodeUpwardExposedUsed);
  2342. if (instr->m_opcode != Js::OpCode::BailOnException) // see comment at the beginning of this function
  2343. {
  2344. this->ProcessBailOutConstants(bailOutInfo, byteCodeUpwardExposedUsed, bailoutReferencedArgSymsBv);
  2345. this->ProcessBailOutCopyProps(bailOutInfo, byteCodeUpwardExposedUsed, bailoutReferencedArgSymsBv);
  2346. }
  2347. BVSparse<JitArenaAllocator> * tempBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  2348. if (bailOutInfo->liveVarSyms)
  2349. {
  2350. // Prefer to restore from type-specialized versions of the sym, as that will reduce the need for potentially expensive
  2351. // ToVars that can more easily be eliminated due to being dead stores.
  2352. #if DBG
  2353. Assert(tempBv->IsEmpty());
  2354. // Verify that all syms to restore are live in some fashion
  2355. tempBv->Minus(byteCodeUpwardExposedUsed, bailOutInfo->liveVarSyms);
  2356. tempBv->Minus(bailOutInfo->liveLosslessInt32Syms);
  2357. tempBv->Minus(bailOutInfo->liveFloat64Syms);
  2358. Assert(tempBv->IsEmpty());
  2359. #endif
  2360. if (this->func->IsJitInDebugMode())
  2361. {
  2362. // Add to byteCodeUpwardExposedUsed the non-temp local vars used so far to restore during bail out.
  2363. // The ones that are not used so far will get their values from bytecode when we continue after bail out in interpreter.
  2364. Assert(this->func->m_nonTempLocalVars);
  2365. tempBv->And(this->func->m_nonTempLocalVars, bailOutInfo->liveVarSyms);
  2366. // Remove syms that are restored in other ways than byteCodeUpwardExposedUsed.
  2367. FOREACH_SLIST_ENTRY(ConstantStackSymValue, value, &bailOutInfo->usedCapturedValues.constantValues)
  2368. {
  2369. Assert(value.Key()->HasByteCodeRegSlot() || value.Key()->GetInstrDef()->m_opcode == Js::OpCode::BytecodeArgOutCapture);
  2370. if (value.Key()->HasByteCodeRegSlot())
  2371. {
  2372. tempBv->Clear(value.Key()->GetByteCodeRegSlot());
  2373. }
  2374. }
  2375. NEXT_SLIST_ENTRY;
  2376. FOREACH_SLIST_ENTRY(CopyPropSyms, value, &bailOutInfo->usedCapturedValues.copyPropSyms)
  2377. {
  2378. Assert(value.Key()->HasByteCodeRegSlot() || value.Key()->GetInstrDef()->m_opcode == Js::OpCode::BytecodeArgOutCapture);
  2379. if (value.Key()->HasByteCodeRegSlot())
  2380. {
  2381. tempBv->Clear(value.Key()->GetByteCodeRegSlot());
  2382. }
  2383. }
  2384. NEXT_SLIST_ENTRY;
  2385. if (bailOutInfo->usedCapturedValues.argObjSyms)
  2386. {
  2387. tempBv->Minus(bailOutInfo->usedCapturedValues.argObjSyms);
  2388. }
  2389. byteCodeUpwardExposedUsed->Or(tempBv);
  2390. }
  2391. if (instr->m_opcode != Js::OpCode::BailOnException) // see comment at the beginning of this function
  2392. {
  2393. // Int32
  2394. tempBv->And(byteCodeUpwardExposedUsed, bailOutInfo->liveLosslessInt32Syms);
  2395. byteCodeUpwardExposedUsed->Minus(tempBv);
  2396. FOREACH_BITSET_IN_SPARSEBV(symId, tempBv)
  2397. {
  2398. StackSym * stackSym = this->func->m_symTable->FindStackSym(symId);
  2399. Assert(stackSym->GetType() == TyVar);
  2400. StackSym * int32StackSym = stackSym->GetInt32EquivSym(nullptr);
  2401. Assert(int32StackSym);
  2402. byteCodeUpwardExposedUsed->Set(int32StackSym->m_id);
  2403. }
  2404. NEXT_BITSET_IN_SPARSEBV;
  2405. // Float64
  2406. tempBv->And(byteCodeUpwardExposedUsed, bailOutInfo->liveFloat64Syms);
  2407. byteCodeUpwardExposedUsed->Minus(tempBv);
  2408. FOREACH_BITSET_IN_SPARSEBV(symId, tempBv)
  2409. {
  2410. StackSym * stackSym = this->func->m_symTable->FindStackSym(symId);
  2411. Assert(stackSym->GetType() == TyVar);
  2412. StackSym * float64StackSym = stackSym->GetFloat64EquivSym(nullptr);
  2413. Assert(float64StackSym);
  2414. byteCodeUpwardExposedUsed->Set(float64StackSym->m_id);
  2415. // This float-specialized sym is going to be used to restore the corresponding byte-code register. Need to
  2416. // ensure that the float value can be precisely coerced back to the original Var value by requiring that it is
  2417. // specialized using BailOutNumberOnly.
  2418. float64StackSym->m_requiresBailOnNotNumber = true;
  2419. }
  2420. NEXT_BITSET_IN_SPARSEBV;
  2421. }
  2422. // Var
  2423. // Any remaining syms to restore will be restored from their var versions
  2424. }
  2425. else
  2426. {
  2427. Assert(!this->func->DoGlobOpt());
  2428. }
  2429. JitAdelete(this->tempAlloc, tempBv);
  2430. // BailOnNoProfile makes some edges dead. Upward exposed symbols info set after the BailOnProfile won't
  2431. // flow through these edges, and, in turn, not through predecessor edges of the block containing the
  2432. // BailOnNoProfile. This is specifically bad for an inlinee's argout syms as they are set as upward exposed
  2433. // when we see the InlineeEnd, but may not look so to some blocks and may get overwritten.
  2434. // Set the argout syms as upward exposed here.
  2435. if (instr->m_opcode == Js::OpCode::BailOnNoProfile && instr->m_func->IsInlinee() &&
  2436. instr->m_func->m_hasInlineArgsOpt && instr->m_func->frameInfo->isRecorded)
  2437. {
  2438. instr->m_func->frameInfo->IterateSyms([=](StackSym* argSym)
  2439. {
  2440. this->currentBlock->upwardExposedUses->Set(argSym->m_id);
  2441. });
  2442. }
  2443. // Mark all the register that we need to restore as used (excluding constants)
  2444. block->upwardExposedUses->Or(byteCodeUpwardExposedUsed);
  2445. block->upwardExposedUses->Or(bailoutReferencedArgSymsBv);
  2446. if (!this->IsPrePass())
  2447. {
  2448. bailOutInfo->IterateArgOutSyms([=](uint index, uint, StackSym* sym) {
  2449. if (sym->IsArgSlotSym() || bailoutReferencedArgSymsBv->Test(sym->m_id))
  2450. {
  2451. bailOutInfo->argOutSyms[index]->m_isBailOutReferenced = true;
  2452. }
  2453. });
  2454. }
  2455. JitAdelete(this->tempAlloc, bailoutReferencedArgSymsBv);
  2456. if (this->IsPrePass())
  2457. {
  2458. JitAdelete(this->tempAlloc, byteCodeUpwardExposedUsed);
  2459. }
  2460. }
  2461. void
  2462. BackwardPass::ProcessBlock(BasicBlock * block)
  2463. {
  2464. this->currentBlock = block;
  2465. this->MergeSuccBlocksInfo(block);
  2466. #if DBG_DUMP
  2467. if (this->IsTraceEnabled())
  2468. {
  2469. Output::Print(_u("******************************* Before Process Block *******************************n"));
  2470. DumpBlockData(block);
  2471. }
  2472. #endif
  2473. FOREACH_INSTR_BACKWARD_IN_BLOCK_EDITING(instr, instrPrev, block)
  2474. {
  2475. #if DBG_DUMP
  2476. if (!IsCollectionPass() && IsTraceEnabled() && Js::Configuration::Global.flags.Verbose)
  2477. {
  2478. Output::Print(_u(">>>>>>>>>>>>>>>>>>>>>> %s: Instr Start\n"), tag == Js::BackwardPhase? _u("BACKWARD") : _u("DEADSTORE"));
  2479. instr->Dump();
  2480. if (block->upwardExposedUses)
  2481. {
  2482. Output::SkipToColumn(10);
  2483. Output::Print(_u(" Exposed Use: "));
  2484. block->upwardExposedUses->Dump();
  2485. }
  2486. if (block->upwardExposedFields)
  2487. {
  2488. Output::SkipToColumn(10);
  2489. Output::Print(_u("Exposed Fields: "));
  2490. block->upwardExposedFields->Dump();
  2491. }
  2492. if (block->byteCodeUpwardExposedUsed)
  2493. {
  2494. Output::SkipToColumn(10);
  2495. Output::Print(_u(" Byte Code Use: "));
  2496. block->byteCodeUpwardExposedUsed->Dump();
  2497. }
  2498. Output::Print(_u("--------------------\n"));
  2499. }
  2500. #endif
  2501. #if DBG
  2502. // Track Symbol with weird lifetime to exclude them from the ByteCodeUpwardExpose verification
  2503. if (DoByteCodeUpwardExposedUsed() && instr->m_func->GetScopeObjSym())
  2504. {
  2505. StackSym* sym = instr->m_func->GetScopeObjSym();
  2506. if (sym->HasByteCodeRegSlot())
  2507. {
  2508. block->excludeByteCodeUpwardExposedTracking->Set(sym->GetByteCodeRegSlot());
  2509. }
  2510. }
  2511. #endif
  2512. AssertOrFailFastMsg(!instr->IsLowered(), "Lowered instruction detected in pre-lower context!");
  2513. this->currentInstr = instr;
  2514. this->currentRegion = this->currentBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
  2515. IR::Instr * insertedInstr = TryChangeInstrForStackArgOpt();
  2516. if (insertedInstr != nullptr)
  2517. {
  2518. instrPrev = insertedInstr;
  2519. continue;
  2520. }
  2521. MarkScopeObjSymUseForStackArgOpt();
  2522. ProcessBailOnStackArgsOutOfActualsRange();
  2523. if (ProcessNoImplicitCallUses(instr) || this->ProcessByteCodeUsesInstr(instr) || this->ProcessBailOutInfo(instr))
  2524. {
  2525. continue;
  2526. }
  2527. IR::Instr *instrNext = instr->m_next;
  2528. if (this->TrackNoImplicitCallInlinees(instr))
  2529. {
  2530. instrPrev = instrNext->m_prev;
  2531. continue;
  2532. }
  2533. if (CanDeadStoreInstrForScopeObjRemoval() && DeadStoreOrChangeInstrForScopeObjRemoval(&instrPrev))
  2534. {
  2535. continue;
  2536. }
  2537. bool hasLiveFields = (block->upwardExposedFields && !block->upwardExposedFields->IsEmpty());
  2538. IR::Opnd * opnd = instr->GetDst();
  2539. if (opnd != nullptr)
  2540. {
  2541. bool isRemoved = ReverseCopyProp(instr);
  2542. if (isRemoved)
  2543. {
  2544. instrPrev = instrNext->m_prev;
  2545. continue;
  2546. }
  2547. if (instr->m_opcode == Js::OpCode::Conv_Bool)
  2548. {
  2549. isRemoved = this->FoldCmBool(instr);
  2550. if (isRemoved)
  2551. {
  2552. continue;
  2553. }
  2554. }
  2555. ProcessNewScObject(instr);
  2556. this->ProcessTransfers(instr);
  2557. isRemoved = this->ProcessDef(opnd);
  2558. if (isRemoved)
  2559. {
  2560. continue;
  2561. }
  2562. }
  2563. if(!IsCollectionPass())
  2564. {
  2565. this->MarkTempProcessInstr(instr);
  2566. this->ProcessFieldKills(instr);
  2567. if (this->DoDeadStoreSlots()
  2568. && (instr->HasAnyImplicitCalls() || instr->HasBailOutInfo() || instr->UsesAllFields()))
  2569. {
  2570. // Can't dead-store slots if there can be an implicit-call, an exception, or a bailout
  2571. block->slotDeadStoreCandidates->ClearAll();
  2572. }
  2573. TrackIntUsage(instr);
  2574. TrackBitWiseOrNumberOp(instr);
  2575. TrackFloatSymEquivalence(instr);
  2576. }
  2577. opnd = instr->GetSrc1();
  2578. if (opnd != nullptr)
  2579. {
  2580. this->ProcessUse(opnd);
  2581. opnd = instr->GetSrc2();
  2582. if (opnd != nullptr)
  2583. {
  2584. this->ProcessUse(opnd);
  2585. }
  2586. }
  2587. if(IsCollectionPass())
  2588. {
  2589. #ifndef _M_ARM
  2590. if (
  2591. this->collectionPassSubPhase == CollectionPassSubPhase::FirstPass
  2592. && !this->func->IsSimpleJit()
  2593. )
  2594. {
  2595. // In the collection pass we do multiple passes over loops. In these passes we keep
  2596. // track of sets of symbols, such that we can know whether or not they are used in
  2597. // ways that we need to protect them from side-channel attacks.
  2598. IR::Opnd const * src1 = instr->GetSrc1();
  2599. IR::Opnd const * src2 = instr->GetSrc2();
  2600. IR::Opnd const * dest = instr->GetDst();
  2601. // The marking is as follows, by default:
  2602. // 1. symbols on an instruction directly get marked as being part of the same set.
  2603. // 2. symbols used in indiropnds on an instruction get marked as being dereferenced.
  2604. // 3. symbols used as sources for some instructions get marked as being dereferenced.
  2605. // 4. non-type-specialized symbols tend to get marked as dereferenced.
  2606. // First, we need to find any symbol associated with this instruction as a targeted
  2607. // symid for the merge operations. This simplifies the later code.
  2608. auto getAnyDirectSymID = [](IR::Opnd const* opnd)
  2609. {
  2610. SymID temp = SymID_Invalid;
  2611. if (opnd == nullptr)
  2612. {
  2613. return temp;
  2614. }
  2615. switch (opnd->m_kind)
  2616. {
  2617. case IR::OpndKind::OpndKindInvalid:
  2618. AssertOrFailFastMsg(false, "There should be no invalid operand kinds at this point...");
  2619. break;
  2620. case IR::OpndKind::OpndKindIntConst:
  2621. case IR::OpndKind::OpndKindInt64Const:
  2622. case IR::OpndKind::OpndKindFloatConst:
  2623. case IR::OpndKind::OpndKindFloat32Const:
  2624. case IR::OpndKind::OpndKindSimd128Const:
  2625. // Nothing to do here, no symbols involved
  2626. break;
  2627. case IR::OpndKind::OpndKindHelperCall:
  2628. // Nothing here either, I think?
  2629. break;
  2630. case IR::OpndKind::OpndKindSym:
  2631. temp = opnd->AsSymOpnd()->m_sym->m_id;
  2632. break;
  2633. case IR::OpndKind::OpndKindReg:
  2634. temp = opnd->AsRegOpnd()->m_sym->m_id;
  2635. break;
  2636. case IR::OpndKind::OpndKindAddr:
  2637. // Should be constant, so nothing to do
  2638. break;
  2639. case IR::OpndKind::OpndKindIndir:
  2640. // IndirOpnds don't themselves have symbols
  2641. break;
  2642. case IR::OpndKind::OpndKindLabel:
  2643. // Should be constant, so not an issue
  2644. break;
  2645. case IR::OpndKind::OpndKindMemRef:
  2646. // Should get a closer look, but looks ok?
  2647. break;
  2648. case IR::OpndKind::OpndKindRegBV:
  2649. // Should be ok
  2650. break;
  2651. case IR::OpndKind::OpndKindList:
  2652. // Since it's a list of RegOpnds, we just need to look at the first
  2653. {
  2654. IR::ListOpnd const* list = opnd->AsListOpnd();
  2655. if (list->Count() > 0)
  2656. {
  2657. temp = list->Item(0)->m_sym->m_id;
  2658. }
  2659. }
  2660. break;
  2661. default:
  2662. AssertOrFailFastMsg(false, "This should be unreachable - if we've added another OpndKind, add proper handling for it");
  2663. break;
  2664. }
  2665. return temp;
  2666. };
  2667. SymID destSymID = getAnyDirectSymID(dest);
  2668. if (destSymID == SymID_Invalid)
  2669. {
  2670. // It looks like we have no assignment to a symbol. As this pass is to mark the
  2671. // symbols that are in the same set through assignment or computation, the lack
  2672. // of a destination means that we don't have any set joins to do. We may need a
  2673. // pass over the source operands to mark dereferences, but that's simpler.
  2674. }
  2675. else
  2676. {
  2677. // We have a base, so now we want to go through and add any symbols to that set
  2678. // if they're on the base level of operands on the function.
  2679. auto addSymbolToSet = [](IR::Opnd const* opnd, Loop::LoopSymClusterList* scl, SymID targetSymID)
  2680. {
  2681. if (opnd == nullptr)
  2682. {
  2683. return;
  2684. }
  2685. switch (opnd->m_kind)
  2686. {
  2687. case IR::OpndKind::OpndKindInvalid:
  2688. AssertOrFailFastMsg(false, "There should be no invalid operand kinds at this point...");
  2689. break;
  2690. case IR::OpndKind::OpndKindIntConst:
  2691. case IR::OpndKind::OpndKindInt64Const:
  2692. case IR::OpndKind::OpndKindFloatConst:
  2693. case IR::OpndKind::OpndKindFloat32Const:
  2694. case IR::OpndKind::OpndKindSimd128Const:
  2695. // Nothing to do here, no symbols involved
  2696. break;
  2697. case IR::OpndKind::OpndKindHelperCall:
  2698. // Nothing here either, I think?
  2699. break;
  2700. case IR::OpndKind::OpndKindSym:
  2701. scl->Merge(targetSymID, opnd->AsSymOpnd()->m_sym->m_id);
  2702. break;
  2703. case IR::OpndKind::OpndKindReg:
  2704. scl->Merge(targetSymID, opnd->AsRegOpnd()->m_sym->m_id);
  2705. break;
  2706. case IR::OpndKind::OpndKindAddr:
  2707. // Should be constant, so nothing to do
  2708. break;
  2709. case IR::OpndKind::OpndKindIndir:
  2710. // IndirOpnds don't themselves have symbols
  2711. break;
  2712. case IR::OpndKind::OpndKindLabel:
  2713. // Should be constant, so not an issue
  2714. break;
  2715. case IR::OpndKind::OpndKindMemRef:
  2716. // Should get a closer look, but looks ok?
  2717. break;
  2718. case IR::OpndKind::OpndKindRegBV:
  2719. // Should be ok
  2720. break;
  2721. case IR::OpndKind::OpndKindList:
  2722. // Needs iteration, but is straightforward beyond that
  2723. {
  2724. IR::ListOpnd const* list = opnd->AsListOpnd();
  2725. for (int iter = 0; iter < list->Count(); iter++)
  2726. {
  2727. scl->Merge(targetSymID, list->Item(iter)->m_sym->m_id);
  2728. }
  2729. }
  2730. break;
  2731. default:
  2732. AssertOrFailFastMsg(false, "This should be unreachable - if we've added another OpndKind, add proper handling for it");
  2733. break;
  2734. }
  2735. };
  2736. addSymbolToSet(src1, this->currentPrePassLoop->symClusterList, destSymID);
  2737. addSymbolToSet(src2, this->currentPrePassLoop->symClusterList, destSymID);
  2738. }
  2739. // Now we get to the second part - symbols used in indiropnds get marked as dereferenced
  2740. // This is just a matter of updating a bitvector, so it's fairly straightforward.
  2741. auto markDereferences = [](IR::Opnd const* opnd, BVSparse<JitArenaAllocator>* bv)
  2742. {
  2743. if (opnd == nullptr)
  2744. {
  2745. return;
  2746. }
  2747. switch (opnd->m_kind)
  2748. {
  2749. case IR::OpndKind::OpndKindInvalid:
  2750. AssertOrFailFastMsg(false, "There should be no invalid operand kinds at this point...");
  2751. break;
  2752. case IR::OpndKind::OpndKindIntConst:
  2753. case IR::OpndKind::OpndKindInt64Const:
  2754. case IR::OpndKind::OpndKindFloatConst:
  2755. case IR::OpndKind::OpndKindFloat32Const:
  2756. case IR::OpndKind::OpndKindSimd128Const:
  2757. // Nothing to do here, no symbols involved
  2758. break;
  2759. case IR::OpndKind::OpndKindHelperCall:
  2760. // Nothing here either, I think?
  2761. break;
  2762. case IR::OpndKind::OpndKindSym:
  2763. // If it's not type-specialized, we may dereference it.
  2764. if (!(opnd->GetValueType().IsNotObject()))
  2765. {
  2766. bv->Set(opnd->AsSymOpnd()->m_sym->m_id);
  2767. }
  2768. break;
  2769. case IR::OpndKind::OpndKindReg:
  2770. // If it's not type-specialized, we may dereference it.
  2771. if (!(opnd->GetValueType().IsNotObject()) && !opnd->AsRegOpnd()->m_sym->IsTypeSpec())
  2772. {
  2773. bv->Set(opnd->AsRegOpnd()->m_sym->m_id);
  2774. }
  2775. break;
  2776. case IR::OpndKind::OpndKindAddr:
  2777. // Should be constant, so nothing to do
  2778. break;
  2779. case IR::OpndKind::OpndKindIndir:
  2780. // Need to handle each component
  2781. {
  2782. IR::IndirOpnd const* indirOpnd = opnd->AsIndirOpnd();
  2783. if (indirOpnd->GetBaseOpnd())
  2784. {
  2785. bv->Set(indirOpnd->GetBaseOpnd()->m_sym->m_id);
  2786. }
  2787. if (indirOpnd->GetIndexOpnd())
  2788. {
  2789. bv->Set(indirOpnd->GetIndexOpnd()->m_sym->m_id);
  2790. }
  2791. }
  2792. break;
  2793. case IR::OpndKind::OpndKindLabel:
  2794. // Should be constant, so not an issue
  2795. break;
  2796. case IR::OpndKind::OpndKindMemRef:
  2797. // Should get a closer look, but looks ok?
  2798. break;
  2799. case IR::OpndKind::OpndKindRegBV:
  2800. // Should be ok
  2801. break;
  2802. case IR::OpndKind::OpndKindList:
  2803. // Needs iteration, but is straightforward beyond that
  2804. {
  2805. IR::ListOpnd const* list = opnd->AsListOpnd();
  2806. for (int iter = 0; iter < list->Count(); iter++)
  2807. {
  2808. // should be the same as OpndKindReg, since ListOpndType is RegOpnd
  2809. if (!(list->Item(iter)->GetValueType().IsNotObject()) && !opnd->AsRegOpnd()->m_sym->IsTypeSpec())
  2810. {
  2811. bv->Set(list->Item(iter)->m_sym->m_id);
  2812. }
  2813. }
  2814. }
  2815. break;
  2816. default:
  2817. AssertOrFailFastMsg(false, "This should be unreachable - if we've added another OpndKind, add proper handling for it");
  2818. break;
  2819. }
  2820. };
  2821. markDereferences(dest, this->currentPrePassLoop->internallyDereferencedSyms);
  2822. markDereferences(src1, this->currentPrePassLoop->internallyDereferencedSyms);
  2823. markDereferences(src2, this->currentPrePassLoop->internallyDereferencedSyms);
  2824. auto explicitlyMarkDereferenced = [](IR::Opnd const* opnd, BVSparse<JitArenaAllocator>* bv)
  2825. {
  2826. if (opnd == nullptr)
  2827. {
  2828. return;
  2829. }
  2830. switch (opnd->m_kind)
  2831. {
  2832. case IR::OpndKind::OpndKindInvalid:
  2833. AssertOrFailFastMsg(false, "There should be no invalid operand kinds at this point...");
  2834. break;
  2835. case IR::OpndKind::OpndKindIntConst:
  2836. case IR::OpndKind::OpndKindInt64Const:
  2837. case IR::OpndKind::OpndKindFloatConst:
  2838. case IR::OpndKind::OpndKindFloat32Const:
  2839. case IR::OpndKind::OpndKindSimd128Const:
  2840. // Nothing to do here, no symbols involved
  2841. break;
  2842. case IR::OpndKind::OpndKindHelperCall:
  2843. // Nothing here either, I think?
  2844. break;
  2845. case IR::OpndKind::OpndKindSym:
  2846. // The instruction using this means that we may dereference the symbol,
  2847. // regardless of type spec
  2848. bv->Set(opnd->AsSymOpnd()->m_sym->m_id);
  2849. break;
  2850. case IR::OpndKind::OpndKindReg:
  2851. // The instruction using this means that we may dereference the symbol,
  2852. // regardless of type spec
  2853. bv->Set(opnd->AsRegOpnd()->m_sym->m_id);
  2854. break;
  2855. case IR::OpndKind::OpndKindAddr:
  2856. // Should be constant, so nothing to do
  2857. break;
  2858. case IR::OpndKind::OpndKindIndir:
  2859. // Need to handle each component
  2860. {
  2861. IR::IndirOpnd const* indirOpnd = opnd->AsIndirOpnd();
  2862. if (indirOpnd->GetBaseOpnd())
  2863. {
  2864. bv->Set(indirOpnd->GetBaseOpnd()->m_sym->m_id);
  2865. }
  2866. if (indirOpnd->GetIndexOpnd())
  2867. {
  2868. bv->Set(indirOpnd->GetIndexOpnd()->m_sym->m_id);
  2869. }
  2870. }
  2871. break;
  2872. case IR::OpndKind::OpndKindLabel:
  2873. // Should be constant, so not an issue
  2874. break;
  2875. case IR::OpndKind::OpndKindMemRef:
  2876. // Should get a closer look, but looks ok?
  2877. break;
  2878. case IR::OpndKind::OpndKindRegBV:
  2879. // Should be ok
  2880. break;
  2881. case IR::OpndKind::OpndKindList:
  2882. // Needs iteration, but is straightforward beyond that
  2883. {
  2884. IR::ListOpnd const* list = opnd->AsListOpnd();
  2885. for (int iter = 0; iter < list->Count(); iter++)
  2886. {
  2887. // The instruction using this means that we may dereference the symbol,
  2888. // regardless of type spec
  2889. bv->Set(list->Item(iter)->m_sym->m_id);
  2890. }
  2891. }
  2892. break;
  2893. default:
  2894. AssertOrFailFastMsg(false, "This should be unreachable - if we've added another OpndKind, add proper handling for it");
  2895. break;
  2896. }
  2897. };
  2898. // We may also have some specific instructions that dereference things - we can
  2899. // handle those specifically, since there's only a few of them
  2900. switch (instr->m_opcode)
  2901. {
  2902. case Js::OpCode::StArrInlineItem_CI4:
  2903. case Js::OpCode::StArrItemC_CI4:
  2904. case Js::OpCode::StArrItemI_CI4:
  2905. case Js::OpCode::StArrSegElemC:
  2906. case Js::OpCode::StArrSegItem_A:
  2907. case Js::OpCode::StArrSegItem_CI4:
  2908. case Js::OpCode::StArrViewElem:
  2909. case Js::OpCode::StAtomicWasm:
  2910. case Js::OpCode::StElemC:
  2911. case Js::OpCode::StElemI_A:
  2912. case Js::OpCode::StElemI_A_Strict:
  2913. case Js::OpCode::StEnvObjSlot:
  2914. case Js::OpCode::StEnvObjSlotChkUndecl:
  2915. case Js::OpCode::StFld:
  2916. case Js::OpCode::StFldStrict:
  2917. case Js::OpCode::StFuncExpr:
  2918. case Js::OpCode::StInnerObjSlot:
  2919. case Js::OpCode::StInnerObjSlotChkUndecl:
  2920. case Js::OpCode::StInnerSlot:
  2921. case Js::OpCode::StInnerSlotChkUndecl:
  2922. case Js::OpCode::StLocalFld:
  2923. case Js::OpCode::StLocalFuncExpr:
  2924. case Js::OpCode::StLocalObjSlot:
  2925. case Js::OpCode::StLocalObjSlotChkUndecl:
  2926. case Js::OpCode::StLocalSlot:
  2927. case Js::OpCode::StLocalSlotChkUndecl:
  2928. case Js::OpCode::StLoopBodyCount:
  2929. case Js::OpCode::StModuleSlot:
  2930. case Js::OpCode::StObjSlot:
  2931. case Js::OpCode::StObjSlotChkUndecl:
  2932. case Js::OpCode::StParamObjSlot:
  2933. case Js::OpCode::StParamObjSlotChkUndecl:
  2934. case Js::OpCode::StParamSlot:
  2935. case Js::OpCode::StParamSlotChkUndecl:
  2936. case Js::OpCode::StRootFld:
  2937. case Js::OpCode::StRootFldStrict:
  2938. case Js::OpCode::StSlot:
  2939. case Js::OpCode::StSlotBoxTemp:
  2940. case Js::OpCode::StSlotChkUndecl:
  2941. case Js::OpCode::StSuperFld:
  2942. case Js::OpCode::ProfiledStElemI_A:
  2943. case Js::OpCode::ProfiledStElemI_A_Strict:
  2944. case Js::OpCode::ProfiledStFld:
  2945. case Js::OpCode::ProfiledStFldStrict:
  2946. case Js::OpCode::ProfiledStLocalFld:
  2947. case Js::OpCode::ProfiledStRootFld:
  2948. case Js::OpCode::ProfiledStRootFldStrict:
  2949. case Js::OpCode::ProfiledStSuperFld:
  2950. // Unfortunately, being fed into a store means that we could have aliasing, and the
  2951. // consequence is that it may be re-read and then dereferenced. Note that we can do
  2952. // this case if we poison any array symbol that we store to on the way out, but the
  2953. // aliasing problem remains.
  2954. case Js::OpCode::ArgOut_A:
  2955. case Js::OpCode::ArgOut_ANonVar:
  2956. case Js::OpCode::ArgOut_A_Dynamic:
  2957. case Js::OpCode::ArgOut_A_FixupForStackArgs:
  2958. case Js::OpCode::ArgOut_A_FromStackArgs:
  2959. case Js::OpCode::ProfiledArgOut_A:
  2960. // Getting passed to another function is a boundary that we can't analyze over.
  2961. case Js::OpCode::Ret:
  2962. // Return arcs are pretty short in speculation, so we have to assume that we may be
  2963. // returning to a situation that will dereference the symbol. Note that we will not
  2964. // hit this path in normal jitted code, but it's more common in jitloopbody'd code.
  2965. explicitlyMarkDereferenced(instr->GetSrc1(), this->currentPrePassLoop->internallyDereferencedSyms);
  2966. break;
  2967. default:
  2968. // most instructions don't have this sort of behavior
  2969. break;
  2970. }
  2971. }
  2972. #endif
  2973. // Continue normal CollectionPass behavior
  2974. continue;
  2975. }
  2976. if (this->tag == Js::DeadStorePhase)
  2977. {
  2978. #ifndef _M_ARM
  2979. if(
  2980. block->loop
  2981. && !this->isLoopPrepass
  2982. && !this->func->IsSimpleJit()
  2983. )
  2984. {
  2985. // In the second pass, we mark instructions that we go by as being safe or unsafe.
  2986. //
  2987. // This is all based on the information which we gathered in the previous pass. The
  2988. // symbol sets are cross-referenced and the bit-vector information is set such that
  2989. // the bit vector now holds a complete list of which symbols are dereferenced, both
  2990. // directly or indirectly, in the loop, so we can see if a particular instr creates
  2991. // such a symbol. If it doesn't, then we will not mask its destination, as it's not
  2992. // necessary to create a safe program.
  2993. //
  2994. // Note that if we avoiding doing the masking here, we need to instead do it on the
  2995. // out-edges of the loop - otherwise an unsafe use of the symbol could happen after
  2996. // the loop and not get caught.
  2997. // This helper goes through and marks loop out-edges for a particular symbol set.
  2998. static void (*addOutEdgeMasking)(SymID, Loop*, JitArenaAllocator*) = [](SymID symID, Loop* loop, JitArenaAllocator *alloc) -> void
  2999. {
  3000. // There are rare cases where we have no out-edges (the only way to leave this loop
  3001. // is via a return inside the jitloopbody); in this case, we don't need to mask any
  3002. // symbols on the out-edges, as we only need to worry about the store cases.
  3003. if(loop->outwardSpeculationMaskInstrs == nullptr)
  3004. {
  3005. return;
  3006. }
  3007. BVSparse<JitArenaAllocator> *syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  3008. // We only need to do this for stack syms, and only for ones that are upwardexposed
  3009. // in the block sourcing to the masking block, but it needs to be for all symbols a
  3010. // mask-skipped load may be written to.
  3011. loop->symClusterList->MapSet<BVSparse<JitArenaAllocator>*>(symID, [](SymID a, BVSparse<JitArenaAllocator> *symbols) {
  3012. symbols->Set(a);
  3013. }, syms);
  3014. SymTable* symTable = loop->GetFunc()->m_symTable;
  3015. FOREACH_BITSET_IN_SPARSEBV(curSymID, syms)
  3016. {
  3017. Sym* potentialSym = symTable->Find(curSymID);
  3018. if (potentialSym == nullptr || !potentialSym->IsStackSym())
  3019. {
  3020. syms->Clear(curSymID);
  3021. }
  3022. } NEXT_BITSET_IN_SPARSEBV;
  3023. if (syms->IsEmpty())
  3024. {
  3025. // If there's no non-stack symids, we have nothing to mask
  3026. return;
  3027. }
  3028. // Now that we have a bitvector of things to try to mask on the out-edges, we'll go
  3029. // over the list of outmask instructions.
  3030. FOREACH_SLIST_ENTRY(IR::ByteCodeUsesInstr*, bcuInstr, loop->outwardSpeculationMaskInstrs)
  3031. {
  3032. // Get the upwardExposed information for the previous block
  3033. IR::LabelInstr *blockLabel = bcuInstr->GetBlockStartInstr()->AsLabelInstr();
  3034. BasicBlock* maskingBlock = blockLabel->GetBasicBlock();
  3035. // Since it's possible we have a multi-level loop structure (each with its own mask
  3036. // instructions and dereferenced symbol list), we may be able to avoid masking some
  3037. // symbols in interior loop->exterior loop edges if they're not dereferenced in the
  3038. // exterior loop. This does mean, however, that we need to mask them further out.
  3039. Loop* maskingBlockLoop = maskingBlock->loop;
  3040. if (maskingBlockLoop != nullptr && !maskingBlockLoop->internallyDereferencedSyms->Test(symID))
  3041. {
  3042. addOutEdgeMasking(symID, maskingBlockLoop, alloc);
  3043. continue;
  3044. }
  3045. // Instead of looking at the previous block (inside the loop), which may be cleaned
  3046. // up or may yet be processed for dead stores, we instead can look at the mask/cmov
  3047. // block, which we can keep from being cleaned up, and which will always be handled
  3048. // before the loop is looked at (in this phase), since it is placed after the loop.
  3049. AssertOrFailFast(maskingBlock->upwardExposedUses);
  3050. AssertOrFailFast(maskingBlock->upwardExposedFields);
  3051. BVSparse<JitArenaAllocator> *symsToMask = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  3052. symsToMask->Or(maskingBlock->upwardExposedUses);
  3053. symsToMask->Or(maskingBlock->upwardExposedFields);
  3054. symsToMask->And(syms);
  3055. // If nothing is exposed, we have nothing to mask, and nothing to do here.
  3056. if (!symsToMask->IsEmpty())
  3057. {
  3058. if (bcuInstr->GetByteCodeUpwardExposedUsed() == nullptr)
  3059. {
  3060. // This will initialize the internal structure properly
  3061. bcuInstr->SetBV(JitAnew(bcuInstr->m_func->m_alloc, BVSparse<JitArenaAllocator>, bcuInstr->m_func->m_alloc));
  3062. }
  3063. #if DBG_DUMP
  3064. if (PHASE_TRACE(Js::SpeculationPropagationAnalysisPhase, loop->topFunc))
  3065. {
  3066. Output::Print(_u("Adding symbols to out-edge masking for loop %u outward block %u:\n"), loop->GetLoopNumber(), maskingBlock->GetBlockNum());
  3067. symsToMask->Dump();
  3068. }
  3069. #endif
  3070. // Add the syms to the mask set
  3071. const_cast<BVSparse<JitArenaAllocator> *>(bcuInstr->GetByteCodeUpwardExposedUsed())->Or(symsToMask);
  3072. }
  3073. } NEXT_SLIST_ENTRY;
  3074. };
  3075. switch (instr->m_opcode)
  3076. {
  3077. case Js::OpCode::LdElemI_A:
  3078. case Js::OpCode::ProfiledLdElemI_A:
  3079. {
  3080. IR::Opnd* dest = instr->GetDst();
  3081. if (dest->IsRegOpnd())
  3082. {
  3083. SymID symid = dest->AsRegOpnd()->m_sym->m_id;
  3084. if (!block->loop->internallyDereferencedSyms->Test(symid))
  3085. {
  3086. instr->SetIsSafeToSpeculate(true);
  3087. addOutEdgeMasking(symid, block->loop, this->tempAlloc);
  3088. #if DBG_DUMP
  3089. if (PHASE_TRACE(Js::SpeculationPropagationAnalysisPhase, this->func))
  3090. {
  3091. Output::Print(_u("Marking instruction as safe:\n"));
  3092. instr->highlight = 0x0f;
  3093. instr->Dump();
  3094. }
  3095. #endif
  3096. }
  3097. }
  3098. else if (dest->IsSymOpnd())
  3099. {
  3100. SymID symid = dest->AsSymOpnd()->m_sym->m_id;
  3101. if (!block->loop->internallyDereferencedSyms->Test(symid))
  3102. {
  3103. instr->SetIsSafeToSpeculate(true);
  3104. addOutEdgeMasking(symid, block->loop, this->tempAlloc);
  3105. #if DBG_DUMP
  3106. if (PHASE_TRACE(Js::SpeculationPropagationAnalysisPhase, this->func))
  3107. {
  3108. Output::Print(_u("Marking instruction as safe:\n"));
  3109. instr->highlight = 0x0f;
  3110. instr->Dump();
  3111. }
  3112. #endif
  3113. }
  3114. }
  3115. }
  3116. break;
  3117. default:
  3118. // Most instructions don't have any particular handling needed here, as they don't
  3119. // get any masking regardless.
  3120. break;
  3121. }
  3122. }
  3123. #endif
  3124. switch(instr->m_opcode)
  3125. {
  3126. case Js::OpCode::LdSlot:
  3127. {
  3128. DeadStoreOrChangeInstrForScopeObjRemoval(&instrPrev);
  3129. break;
  3130. }
  3131. case Js::OpCode::InlineArrayPush:
  3132. case Js::OpCode::InlineArrayPop:
  3133. {
  3134. IR::Opnd *const thisOpnd = instr->GetSrc1();
  3135. if(thisOpnd && thisOpnd->IsRegOpnd())
  3136. {
  3137. IR::RegOpnd *const thisRegOpnd = thisOpnd->AsRegOpnd();
  3138. if(thisRegOpnd->IsArrayRegOpnd())
  3139. {
  3140. // Process the array use at the point of the array built-in call, since the array will actually
  3141. // be used at the call, not at the ArgOut_A_InlineBuiltIn
  3142. ProcessArrayRegOpndUse(instr, thisRegOpnd->AsArrayRegOpnd());
  3143. }
  3144. }
  3145. }
  3146. #if !INT32VAR // the following is not valid on 64-bit platforms
  3147. case Js::OpCode::BoundCheck:
  3148. {
  3149. if(IsPrePass())
  3150. {
  3151. break;
  3152. }
  3153. // Look for:
  3154. // BoundCheck 0 <= s1
  3155. // BoundCheck s1 <= s2 + c, where c == 0 || c == -1
  3156. //
  3157. // And change it to:
  3158. // UnsignedBoundCheck s1 <= s2 + c
  3159. //
  3160. // The BoundCheck instruction is a signed operation, so any unsigned operand used in the instruction must be
  3161. // guaranteed to be >= 0 and <= int32 max when its value is interpreted as signed. Due to the restricted
  3162. // range of s2 above, by using an unsigned comparison instead, the negative check on s1 will also be
  3163. // covered.
  3164. //
  3165. // A BoundCheck instruction takes the form (src1 <= src2 + dst).
  3166. // Check the current instruction's pattern for:
  3167. // BoundCheck s1 <= s2 + c, where c <= 0
  3168. if(!instr->GetSrc1()->IsRegOpnd() ||
  3169. !instr->GetSrc1()->IsInt32() ||
  3170. !instr->GetSrc2() ||
  3171. instr->GetSrc2()->IsIntConstOpnd())
  3172. {
  3173. break;
  3174. }
  3175. if(instr->GetDst())
  3176. {
  3177. const int c = instr->GetDst()->AsIntConstOpnd()->GetValue();
  3178. if(c != 0 && c != -1)
  3179. {
  3180. break;
  3181. }
  3182. }
  3183. // Check the previous instruction's pattern for:
  3184. // BoundCheck 0 <= s1
  3185. IR::Instr *const lowerBoundCheck = instr->m_prev;
  3186. if(lowerBoundCheck->m_opcode != Js::OpCode::BoundCheck ||
  3187. !lowerBoundCheck->GetSrc1()->IsIntConstOpnd() ||
  3188. lowerBoundCheck->GetSrc1()->AsIntConstOpnd()->GetValue() != 0 ||
  3189. !lowerBoundCheck->GetSrc2() ||
  3190. !instr->GetSrc1()->AsRegOpnd()->IsEqual(lowerBoundCheck->GetSrc2()) ||
  3191. lowerBoundCheck->GetDst() && lowerBoundCheck->GetDst()->AsIntConstOpnd()->GetValue() != 0)
  3192. {
  3193. break;
  3194. }
  3195. // Remove the previous lower bound check, and change the current upper bound check to:
  3196. // UnsignedBoundCheck s1 <= s2 + c
  3197. instr->m_opcode = Js::OpCode::UnsignedBoundCheck;
  3198. currentBlock->RemoveInstr(lowerBoundCheck);
  3199. instrPrev = instr->m_prev;
  3200. break;
  3201. }
  3202. #endif
  3203. }
  3204. DeadStoreTypeCheckBailOut(instr);
  3205. DeadStoreImplicitCallBailOut(instr, hasLiveFields);
  3206. if (block->stackSymToFinalType != nullptr)
  3207. {
  3208. this->InsertTypeTransitionsAtPotentialKills();
  3209. }
  3210. // NoImplicitCallUses transfers need to be processed after determining whether implicit calls need to be disabled
  3211. // for the current instruction, because the instruction where the def occurs also needs implicit calls disabled.
  3212. // Array value type for the destination needs to be updated before transfers have been processed by
  3213. // ProcessNoImplicitCallDef, and array value types for sources need to be updated after transfers have been
  3214. // processed by ProcessNoImplicitCallDef, as it requires the no-implicit-call tracking bit-vectors to be precise at
  3215. // the point of the update.
  3216. if(!IsPrePass())
  3217. {
  3218. UpdateArrayValueTypes(instr, instr->GetDst());
  3219. }
  3220. ProcessNoImplicitCallDef(instr);
  3221. if(!IsPrePass())
  3222. {
  3223. UpdateArrayValueTypes(instr, instr->GetSrc1());
  3224. UpdateArrayValueTypes(instr, instr->GetSrc2());
  3225. }
  3226. }
  3227. else
  3228. {
  3229. switch (instr->m_opcode)
  3230. {
  3231. case Js::OpCode::BailOnNoProfile:
  3232. {
  3233. this->ProcessBailOnNoProfile(instr, block);
  3234. // this call could change the last instr of the previous block... Adjust instrStop.
  3235. instrStop = block->GetFirstInstr()->m_prev;
  3236. Assert(this->tag != Js::DeadStorePhase);
  3237. continue;
  3238. }
  3239. case Js::OpCode::Catch:
  3240. {
  3241. if (this->func->DoOptimizeTry() && !this->IsPrePass())
  3242. {
  3243. // Execute the "Catch" in the JIT'ed code, and bailout to the next instruction. This way, the bailout will restore the exception object automatically.
  3244. IR::BailOutInstr* bailOnException = IR::BailOutInstr::New(Js::OpCode::BailOnException, IR::BailOutOnException, instr->m_next, instr->m_func);
  3245. instr->InsertAfter(bailOnException);
  3246. Assert(instr->GetDst()->IsRegOpnd() && instr->GetDst()->GetStackSym()->HasByteCodeRegSlot());
  3247. StackSym * exceptionObjSym = instr->GetDst()->GetStackSym();
  3248. Assert(instr->m_prev->IsLabelInstr() && (instr->m_prev->AsLabelInstr()->GetRegion()->GetType() == RegionTypeCatch));
  3249. instr->m_prev->AsLabelInstr()->GetRegion()->SetExceptionObjectSym(exceptionObjSym);
  3250. }
  3251. break;
  3252. }
  3253. case Js::OpCode::Throw:
  3254. case Js::OpCode::EHThrow:
  3255. case Js::OpCode::InlineThrow:
  3256. this->func->SetHasThrow();
  3257. break;
  3258. }
  3259. }
  3260. if (instr->m_opcode == Js::OpCode::InlineeEnd)
  3261. {
  3262. this->ProcessInlineeEnd(instr);
  3263. }
  3264. if ((instr->IsLabelInstr() && instr->m_next->m_opcode == Js::OpCode::Catch) || (instr->IsLabelInstr() && instr->m_next->m_opcode == Js::OpCode::Finally))
  3265. {
  3266. if (!this->currentRegion)
  3267. {
  3268. Assert(!this->func->DoOptimizeTry() && !(this->func->IsSimpleJit() && this->func->hasBailout));
  3269. }
  3270. else
  3271. {
  3272. Assert(this->currentRegion->GetType() == RegionTypeCatch || this->currentRegion->GetType() == RegionTypeFinally);
  3273. Region * matchingTryRegion = this->currentRegion->GetMatchingTryRegion();
  3274. Assert(matchingTryRegion);
  3275. // We need live-on-back-edge info to accurately set write-through symbols for try-catches in a loop.
  3276. // Don't set write-through symbols in pre-pass
  3277. if (!this->IsPrePass() && !matchingTryRegion->writeThroughSymbolsSet)
  3278. {
  3279. if (this->tag == Js::DeadStorePhase)
  3280. {
  3281. Assert(!this->func->DoGlobOpt());
  3282. }
  3283. // FullJit: Write-through symbols info must be populated in the backward pass as
  3284. // 1. the forward pass needs it to insert ToVars.
  3285. // 2. the deadstore pass needs it to not clear such symbols from the
  3286. // byteCodeUpwardExposedUsed BV upon a def in the try region. This is required
  3287. // because any bailout in the try region needs to restore all write-through
  3288. // symbols.
  3289. // SimpleJit: Won't run the initial backward pass, but write-through symbols info is still
  3290. // needed in the deadstore pass for <2> above.
  3291. this->SetWriteThroughSymbolsSetForRegion(this->currentBlock, matchingTryRegion);
  3292. }
  3293. }
  3294. }
  3295. #if DBG
  3296. if (instr->m_opcode == Js::OpCode::TryCatch)
  3297. {
  3298. if (!this->IsPrePass() && (this->func->DoOptimizeTry() || (this->func->IsSimpleJit() && this->func->hasBailout)))
  3299. {
  3300. Assert(instr->m_next->IsLabelInstr() && (instr->m_next->AsLabelInstr()->GetRegion() != nullptr));
  3301. Region * tryRegion = instr->m_next->AsLabelInstr()->GetRegion();
  3302. Assert(tryRegion && tryRegion->GetType() == RegionType::RegionTypeTry && tryRegion->GetMatchingCatchRegion() != nullptr);
  3303. Assert(tryRegion->writeThroughSymbolsSet);
  3304. }
  3305. }
  3306. #endif
  3307. instrPrev = ProcessPendingPreOpBailOutInfo(instr);
  3308. #if DBG_DUMP
  3309. if (!IsCollectionPass() && IsTraceEnabled() && Js::Configuration::Global.flags.Verbose)
  3310. {
  3311. Output::Print(_u("-------------------\n"));
  3312. instr->Dump();
  3313. if (block->upwardExposedUses)
  3314. {
  3315. Output::SkipToColumn(10);
  3316. Output::Print(_u(" Exposed Use: "));
  3317. block->upwardExposedUses->Dump();
  3318. }
  3319. if (block->upwardExposedFields)
  3320. {
  3321. Output::SkipToColumn(10);
  3322. Output::Print(_u("Exposed Fields: "));
  3323. block->upwardExposedFields->Dump();
  3324. }
  3325. if (block->byteCodeUpwardExposedUsed)
  3326. {
  3327. Output::SkipToColumn(10);
  3328. Output::Print(_u(" Byte Code Use: "));
  3329. block->byteCodeUpwardExposedUsed->Dump();
  3330. }
  3331. Output::Print(_u("<<<<<<<<<<<<<<<<<<<<<< %s: Instr End\n"), tag == Js::BackwardPhase? _u("BACKWARD") : _u("DEADSTORE"));
  3332. }
  3333. #endif
  3334. }
  3335. NEXT_INSTR_BACKWARD_IN_BLOCK_EDITING;
  3336. #ifndef _M_ARM
  3337. if (
  3338. this->tag == Js::DeadStorePhase
  3339. // We don't do the masking in simplejit due to reduced perf concerns and the issues
  3340. // with handling try/catch structures with late-added blocks
  3341. && !this->func->IsSimpleJit()
  3342. // We don't need the masking blocks in asmjs/wasm mode
  3343. && !block->GetFirstInstr()->m_func->GetJITFunctionBody()->IsAsmJsMode()
  3344. && !block->GetFirstInstr()->m_func->GetJITFunctionBody()->IsWasmFunction()
  3345. && !block->isDead
  3346. && !block->isDeleted
  3347. )
  3348. {
  3349. FOREACH_PREDECESSOR_BLOCK(blockPred, block)
  3350. {
  3351. // Now we need to handle loop out-edges. These need blocks inserted to prevent load
  3352. // of those symbols in speculation; the easiest way to do this is to CMOV them with
  3353. // a flag that we always know will be false, as this introduces a dependency on the
  3354. // register that can't be speculated (currently).
  3355. //
  3356. // Note that we're doing this backwards - looking from the target into the loop. We
  3357. // do this because this way because we're going backwards over the blocks anyway; a
  3358. // block inserted after the branch may be impossible to correctly handle.
  3359. if (!blockPred->isDead && !blockPred->isDeleted && blockPred->loop != nullptr)
  3360. {
  3361. Loop* targetLoop = block->loop;
  3362. Loop* startingLoop = blockPred->loop;
  3363. bool addMaskingBlock = false;
  3364. if (targetLoop == nullptr)
  3365. {
  3366. // If we're leaving to a non-looping context, we definitely want the masking block
  3367. addMaskingBlock = true;
  3368. }
  3369. else if (targetLoop == startingLoop)
  3370. {
  3371. // If we're still inside the same loop, we don't want a masking block
  3372. addMaskingBlock = false;
  3373. }
  3374. else
  3375. {
  3376. // We want a masking block if we're going to a loop enclosing the current one.
  3377. Loop* loopTest = targetLoop;
  3378. addMaskingBlock = true;
  3379. while (loopTest != nullptr)
  3380. {
  3381. if (loopTest == startingLoop)
  3382. {
  3383. // the target loop is a child of the starting loop, so don't mask on the way
  3384. addMaskingBlock = false;
  3385. break;
  3386. }
  3387. loopTest = loopTest->parent;
  3388. }
  3389. }
  3390. if (addMaskingBlock)
  3391. {
  3392. // Avoid masking on the way from a masking block - we're already masking this jmp
  3393. if (block->GetFirstInstr()->m_next->m_opcode == Js::OpCode::SpeculatedLoadFence)
  3394. {
  3395. addMaskingBlock = false;
  3396. }
  3397. }
  3398. if (addMaskingBlock)
  3399. {
  3400. // It's architecture dependent, so we just mark the block here and leave the actual
  3401. // generation of the masking to the Lowerer.
  3402. // Generated code here:
  3403. // newTarget:
  3404. // syms = targetedloadfence syms
  3405. // jmp oldTarget
  3406. // We need to increment the data use count since we're changing a successor.
  3407. blockPred->IncrementDataUseCount();
  3408. BasicBlock *newBlock = this->func->m_fg->InsertAirlockBlock(this->func->m_fg->FindEdge(blockPred, block), true);
  3409. LABELNAMESET(newBlock->GetFirstInstr()->AsLabelInstr(), "Loop out-edge masking block");
  3410. // This is a little bit of a misuse of ByteCodeUsesInstr - we're using it as just
  3411. // a bitvector that we can add things to.
  3412. IR::ByteCodeUsesInstr* masker = IR::ByteCodeUsesInstr::New(newBlock->GetFirstInstr());
  3413. masker->m_opcode = Js::OpCode::SpeculatedLoadFence;
  3414. // Add the one instruction we need to this block
  3415. newBlock->GetFirstInstr()->InsertAfter(masker);
  3416. // We need to initialize the data for this block, so that later stages of deadstore work properly.
  3417. // Setting use count to 0 makes mergesucc create the structures
  3418. newBlock->SetDataUseCount(0);
  3419. // If we inserted an airlock block compensation block, we need to set the use count on that too.
  3420. if (newBlock->prev && newBlock->prev->isAirLockCompensationBlock)
  3421. {
  3422. newBlock->prev->SetDataUseCount(0);
  3423. }
  3424. if (startingLoop->outwardSpeculationMaskInstrs == nullptr)
  3425. {
  3426. startingLoop->outwardSpeculationMaskInstrs = JitAnew(this->func->m_fg->alloc, SList<IR::ByteCodeUsesInstr*>, this->func->m_fg->alloc);
  3427. }
  3428. // We fill in the instruction later, so we need to add it to the loop's list of such instructions.
  3429. startingLoop->outwardSpeculationMaskInstrs->Prepend(masker);
  3430. }
  3431. }
  3432. } NEXT_PREDECESSOR_BLOCK;
  3433. }
  3434. #endif
  3435. EndIntOverflowDoesNotMatterRange();
  3436. if (!this->IsPrePass() && !block->isDead && block->isLoopHeader)
  3437. {
  3438. // Copy the upward exposed use as the live on back edge regs
  3439. block->loop->regAlloc.liveOnBackEdgeSyms = block->upwardExposedUses->CopyNew(this->func->m_alloc);
  3440. }
  3441. Assert(!considerSymAsRealUseInNoImplicitCallUses);
  3442. #if DBG_DUMP
  3443. if (this->IsTraceEnabled())
  3444. {
  3445. Output::Print(_u("******************************* After Process Block *******************************n"));
  3446. DumpBlockData(block);
  3447. }
  3448. #endif
  3449. }
  3450. bool
  3451. BackwardPass::CanDeadStoreInstrForScopeObjRemoval(Sym *sym) const
  3452. {
  3453. if (tag == Js::DeadStorePhase && this->currentInstr->m_func->IsStackArgsEnabled())
  3454. {
  3455. Func * currFunc = this->currentInstr->m_func;
  3456. bool doScopeObjCreation = currFunc->GetJITFunctionBody()->GetDoScopeObjectCreation();
  3457. switch (this->currentInstr->m_opcode)
  3458. {
  3459. case Js::OpCode::InitCachedScope:
  3460. {
  3461. if(!doScopeObjCreation && this->currentInstr->GetDst()->IsScopeObjOpnd(currFunc))
  3462. {
  3463. /*
  3464. * We don't really dead store this instruction. We just want the source sym of this instruction
  3465. * to NOT be tracked as USED by this instruction.
  3466. * This instr will effectively be lowered to dest = MOV NULLObject, in the lowerer phase.
  3467. */
  3468. return true;
  3469. }
  3470. break;
  3471. }
  3472. case Js::OpCode::LdSlot:
  3473. {
  3474. if (sym && IsFormalParamSym(currFunc, sym))
  3475. {
  3476. return true;
  3477. }
  3478. break;
  3479. }
  3480. case Js::OpCode::CommitScope:
  3481. case Js::OpCode::GetCachedFunc:
  3482. {
  3483. return !doScopeObjCreation && this->currentInstr->GetSrc1()->IsScopeObjOpnd(currFunc);
  3484. }
  3485. case Js::OpCode::BrFncCachedScopeEq:
  3486. case Js::OpCode::BrFncCachedScopeNeq:
  3487. {
  3488. return !doScopeObjCreation && this->currentInstr->GetSrc2()->IsScopeObjOpnd(currFunc);
  3489. }
  3490. case Js::OpCode::CallHelper:
  3491. {
  3492. if (!doScopeObjCreation && this->currentInstr->GetSrc1()->AsHelperCallOpnd()->m_fnHelper == IR::JnHelperMethod::HelperOP_InitCachedFuncs)
  3493. {
  3494. IR::RegOpnd * scopeObjOpnd = this->currentInstr->GetSrc2()->GetStackSym()->GetInstrDef()->GetSrc1()->AsRegOpnd();
  3495. return scopeObjOpnd->IsScopeObjOpnd(currFunc);
  3496. }
  3497. break;
  3498. }
  3499. }
  3500. }
  3501. return false;
  3502. }
  3503. /*
  3504. * This is for Eliminating Scope Object Creation during Heap arguments optimization.
  3505. */
  3506. bool
  3507. BackwardPass::DeadStoreOrChangeInstrForScopeObjRemoval(IR::Instr ** pInstrPrev)
  3508. {
  3509. IR::Instr * instr = this->currentInstr;
  3510. Func * currFunc = instr->m_func;
  3511. if (this->tag == Js::DeadStorePhase && instr->m_func->IsStackArgsEnabled() && !IsPrePass())
  3512. {
  3513. switch (instr->m_opcode)
  3514. {
  3515. /*
  3516. * This LdSlot loads the formal from the formals array. We replace this a Ld_A <ArgInSym>.
  3517. * ArgInSym is inserted at the beginning of the function during the start of the deadstore pass- for the top func.
  3518. * In case of inlinee, it will be from the source sym of the ArgOut Instruction to the inlinee.
  3519. */
  3520. case Js::OpCode::LdSlot:
  3521. {
  3522. IR::Opnd * src1 = instr->GetSrc1();
  3523. if (src1 && src1->IsSymOpnd())
  3524. {
  3525. Sym * sym = src1->AsSymOpnd()->m_sym;
  3526. Assert(sym);
  3527. if (IsFormalParamSym(currFunc, sym))
  3528. {
  3529. AssertMsg(!currFunc->GetJITFunctionBody()->HasImplicitArgIns(), "We don't have mappings between named formals and arguments object here");
  3530. instr->m_opcode = Js::OpCode::Ld_A;
  3531. PropertySym * propSym = sym->AsPropertySym();
  3532. Js::ArgSlot value = (Js::ArgSlot)propSym->m_propertyId;
  3533. Assert(currFunc->HasStackSymForFormal(value));
  3534. StackSym * paramStackSym = currFunc->GetStackSymForFormal(value);
  3535. IR::RegOpnd * srcOpnd = IR::RegOpnd::New(paramStackSym, TyVar, currFunc);
  3536. instr->ReplaceSrc1(srcOpnd);
  3537. this->ProcessSymUse(paramStackSym, true, true);
  3538. if (PHASE_VERBOSE_TRACE1(Js::StackArgFormalsOptPhase))
  3539. {
  3540. Output::Print(_u("StackArgFormals : %s (%d) :Replacing LdSlot with Ld_A in Deadstore pass. \n"), instr->m_func->GetJITFunctionBody()->GetDisplayName(), instr->m_func->GetFunctionNumber());
  3541. Output::Flush();
  3542. }
  3543. }
  3544. }
  3545. break;
  3546. }
  3547. case Js::OpCode::CommitScope:
  3548. {
  3549. if (instr->GetSrc1()->IsScopeObjOpnd(currFunc))
  3550. {
  3551. instr->Remove();
  3552. return true;
  3553. }
  3554. break;
  3555. }
  3556. case Js::OpCode::BrFncCachedScopeEq:
  3557. case Js::OpCode::BrFncCachedScopeNeq:
  3558. {
  3559. if (instr->GetSrc2()->IsScopeObjOpnd(currFunc))
  3560. {
  3561. instr->Remove();
  3562. return true;
  3563. }
  3564. break;
  3565. }
  3566. case Js::OpCode::CallHelper:
  3567. {
  3568. //Remove the CALL and all its Argout instrs.
  3569. if (instr->GetSrc1()->AsHelperCallOpnd()->m_fnHelper == IR::JnHelperMethod::HelperOP_InitCachedFuncs)
  3570. {
  3571. IR::RegOpnd * scopeObjOpnd = instr->GetSrc2()->GetStackSym()->GetInstrDef()->GetSrc1()->AsRegOpnd();
  3572. if (scopeObjOpnd->IsScopeObjOpnd(currFunc))
  3573. {
  3574. IR::Instr * instrDef = instr;
  3575. IR::Instr * nextInstr = instr->m_next;
  3576. while (instrDef != nullptr)
  3577. {
  3578. IR::Instr * instrToDelete = instrDef;
  3579. if (instrDef->GetSrc2() != nullptr)
  3580. {
  3581. instrDef = instrDef->GetSrc2()->GetStackSym()->GetInstrDef();
  3582. Assert(instrDef->m_opcode == Js::OpCode::ArgOut_A);
  3583. }
  3584. else
  3585. {
  3586. instrDef = nullptr;
  3587. }
  3588. instrToDelete->Remove();
  3589. }
  3590. Assert(nextInstr != nullptr);
  3591. *pInstrPrev = nextInstr->m_prev;
  3592. return true;
  3593. }
  3594. }
  3595. break;
  3596. }
  3597. case Js::OpCode::GetCachedFunc:
  3598. {
  3599. // <dst> = GetCachedFunc <scopeObject>, <functionNum>
  3600. // is converted to
  3601. // <dst> = NewScFunc <functionNum>, <env: FrameDisplay>
  3602. if (instr->GetSrc1()->IsScopeObjOpnd(currFunc))
  3603. {
  3604. instr->m_opcode = Js::OpCode::NewScFunc;
  3605. IR::Opnd * intConstOpnd = instr->UnlinkSrc2();
  3606. Assert(intConstOpnd->IsIntConstOpnd());
  3607. uint nestedFuncIndex = instr->m_func->GetJITFunctionBody()->GetNestedFuncIndexForSlotIdInCachedScope(intConstOpnd->AsIntConstOpnd()->AsUint32());
  3608. intConstOpnd->Free(instr->m_func);
  3609. instr->ReplaceSrc1(IR::IntConstOpnd::New(nestedFuncIndex, TyUint32, instr->m_func));
  3610. instr->SetSrc2(IR::RegOpnd::New(currFunc->GetLocalFrameDisplaySym(), IRType::TyVar, currFunc));
  3611. }
  3612. break;
  3613. }
  3614. }
  3615. }
  3616. return false;
  3617. }
  3618. IR::Instr *
  3619. BackwardPass::TryChangeInstrForStackArgOpt()
  3620. {
  3621. IR::Instr * instr = this->currentInstr;
  3622. if (tag == Js::DeadStorePhase && instr->DoStackArgsOpt(this->func))
  3623. {
  3624. switch (instr->m_opcode)
  3625. {
  3626. case Js::OpCode::TypeofElem:
  3627. {
  3628. /*
  3629. Before:
  3630. dst = TypeOfElem arguments[i] <(BailOnStackArgsOutOfActualsRange)>
  3631. After:
  3632. tmpdst = LdElemI_A arguments[i] <(BailOnStackArgsOutOfActualsRange)>
  3633. dst = TypeOf tmpdst
  3634. */
  3635. AssertMsg(instr->HasBailOutInfo() && (instr->GetBailOutKind() & IR::BailOutKind::BailOnStackArgsOutOfActualsRange), "Why is the bailout kind not set, when it is StackArgOptimized?");
  3636. instr->m_opcode = Js::OpCode::LdElemI_A;
  3637. IR::Opnd * dstOpnd = instr->UnlinkDst();
  3638. IR::RegOpnd * elementOpnd = IR::RegOpnd::New(StackSym::New(instr->m_func), IRType::TyVar, instr->m_func);
  3639. instr->SetDst(elementOpnd);
  3640. IR::Instr * typeOfInstr = IR::Instr::New(Js::OpCode::Typeof, dstOpnd, elementOpnd, instr->m_func);
  3641. instr->InsertAfter(typeOfInstr);
  3642. return typeOfInstr;
  3643. }
  3644. }
  3645. }
  3646. /*
  3647. * Scope Object Sym is kept alive in all code paths.
  3648. * -This is to facilitate Bailout to record the live Scope object Sym, whenever required.
  3649. * -Reason for doing is this because - Scope object has to be implicitly live whenever Heap Arguments object is live.
  3650. * -When we restore HeapArguments object in the bail out path, it expects the scope object also to be restored - if one was created.
  3651. * -We do not know detailed information about Heap arguments obj syms(aliasing etc.) until we complete Forward Pass.
  3652. * -And we want to avoid dead sym clean up (in this case, scope object though not explicitly live, it is live implicitly) during Block merging in the forward pass.
  3653. * -Hence this is the optimal spot to do this.
  3654. */
  3655. if (tag == Js::BackwardPhase && instr->m_func->GetScopeObjSym() != nullptr)
  3656. {
  3657. this->currentBlock->upwardExposedUses->Set(instr->m_func->GetScopeObjSym()->m_id);
  3658. }
  3659. return nullptr;
  3660. }
  3661. void
  3662. BackwardPass::TraceDeadStoreOfInstrsForScopeObjectRemoval()
  3663. {
  3664. IR::Instr * instr = this->currentInstr;
  3665. if (instr->m_func->IsStackArgsEnabled())
  3666. {
  3667. if ((instr->m_opcode == Js::OpCode::InitCachedScope || instr->m_opcode == Js::OpCode::NewScopeObject) && !IsPrePass())
  3668. {
  3669. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase))
  3670. {
  3671. Output::Print(_u("StackArgFormals : %s (%d) :Removing Scope object creation in Deadstore pass. \n"), instr->m_func->GetJITFunctionBody()->GetDisplayName(), instr->m_func->GetFunctionNumber());
  3672. Output::Flush();
  3673. }
  3674. }
  3675. }
  3676. }
  3677. bool
  3678. BackwardPass::IsFormalParamSym(Func * func, Sym * sym) const
  3679. {
  3680. Assert(sym);
  3681. if (sym->IsPropertySym())
  3682. {
  3683. //If the sym is a propertySym, then see if the propertyId is within the range of the formals
  3684. //We can have other properties stored in the scope object other than the formals (following the formals).
  3685. PropertySym * propSym = sym->AsPropertySym();
  3686. IntConstType value = propSym->m_propertyId;
  3687. return func->IsFormalsArraySym(propSym->m_stackSym->m_id) &&
  3688. (value >= 0 && value < func->GetJITFunctionBody()->GetInParamsCount() - 1);
  3689. }
  3690. else
  3691. {
  3692. Assert(sym->IsStackSym());
  3693. return !!func->IsFormalsArraySym(sym->AsStackSym()->m_id);
  3694. }
  3695. }
  3696. #if DBG_DUMP
  3697. void
  3698. BackwardPass::DumpBlockData(BasicBlock * block)
  3699. {
  3700. block->DumpHeader();
  3701. if (block->upwardExposedUses) // may be null for dead blocks
  3702. {
  3703. Output::Print(_u(" Exposed Uses: "));
  3704. block->upwardExposedUses->Dump();
  3705. }
  3706. if (block->typesNeedingKnownObjectLayout)
  3707. {
  3708. Output::Print(_u(" Needs Known Object Layout: "));
  3709. block->typesNeedingKnownObjectLayout->Dump();
  3710. }
  3711. if (block->byteCodeUpwardExposedUsed)
  3712. {
  3713. Output::Print(_u(" Byte Code Exposed Uses: "));
  3714. block->byteCodeUpwardExposedUsed->Dump();
  3715. }
  3716. if (!this->IsCollectionPass())
  3717. {
  3718. if (!block->isDead)
  3719. {
  3720. if (this->DoDeadStoreSlots())
  3721. {
  3722. Output::Print(_u("Slot deadStore candidates: "));
  3723. block->slotDeadStoreCandidates->Dump();
  3724. }
  3725. DumpMarkTemp();
  3726. }
  3727. }
  3728. Output::Flush();
  3729. }
  3730. #endif
  3731. bool
  3732. BackwardPass::UpdateImplicitCallBailOutKind(IR::Instr *const instr, bool needsBailOutOnImplicitCall)
  3733. {
  3734. Assert(instr);
  3735. Assert(instr->HasBailOutInfo());
  3736. IR::BailOutKind implicitCallBailOutKind = needsBailOutOnImplicitCall ? IR::BailOutOnImplicitCalls : IR::BailOutInvalid;
  3737. const IR::BailOutKind instrBailOutKind = instr->GetBailOutKind();
  3738. if (instrBailOutKind & IR::BailOutMarkTempObject)
  3739. {
  3740. // Don't remove the implicit call pre op bailout for mark temp object
  3741. // Remove the mark temp object bit, as we don't need it after the dead store pass
  3742. instr->SetBailOutKind(instrBailOutKind & ~IR::BailOutMarkTempObject);
  3743. return true;
  3744. }
  3745. const IR::BailOutKind instrImplicitCallBailOutKind = instrBailOutKind & ~IR::BailOutKindBits;
  3746. if(instrImplicitCallBailOutKind == IR::BailOutOnImplicitCallsPreOp)
  3747. {
  3748. if(needsBailOutOnImplicitCall)
  3749. {
  3750. implicitCallBailOutKind = IR::BailOutOnImplicitCallsPreOp;
  3751. }
  3752. }
  3753. else if(instrImplicitCallBailOutKind != IR::BailOutOnImplicitCalls && instrImplicitCallBailOutKind != IR::BailOutInvalid)
  3754. {
  3755. // This bailout kind (the value of 'instrImplicitCallBailOutKind') must guarantee that implicit calls will not happen.
  3756. // If it doesn't make such a guarantee, it must be possible to merge this bailout kind with an implicit call bailout
  3757. // kind, and therefore should be part of BailOutKindBits.
  3758. Assert(!needsBailOutOnImplicitCall);
  3759. return true;
  3760. }
  3761. if(instrImplicitCallBailOutKind == implicitCallBailOutKind)
  3762. {
  3763. return true;
  3764. }
  3765. const IR::BailOutKind newBailOutKind = instrBailOutKind - instrImplicitCallBailOutKind + implicitCallBailOutKind;
  3766. if(newBailOutKind == IR::BailOutInvalid)
  3767. {
  3768. return false;
  3769. }
  3770. instr->SetBailOutKind(newBailOutKind);
  3771. return true;
  3772. }
  3773. bool
  3774. BackwardPass::ProcessNoImplicitCallUses(IR::Instr *const instr)
  3775. {
  3776. Assert(instr);
  3777. if(instr->m_opcode != Js::OpCode::NoImplicitCallUses)
  3778. {
  3779. return false;
  3780. }
  3781. Assert(tag == Js::DeadStorePhase);
  3782. Assert(!instr->GetDst());
  3783. Assert(instr->GetSrc1());
  3784. Assert(instr->GetSrc1()->IsRegOpnd() || instr->GetSrc1()->IsSymOpnd());
  3785. Assert(!instr->GetSrc2() || instr->GetSrc2()->IsRegOpnd() || instr->GetSrc2()->IsSymOpnd());
  3786. if(IsCollectionPass())
  3787. {
  3788. return true;
  3789. }
  3790. IR::Opnd *const srcs[] = { instr->GetSrc1(), instr->GetSrc2() };
  3791. for(int i = 0; i < sizeof(srcs) / sizeof(srcs[0]) && srcs[i]; ++i)
  3792. {
  3793. IR::Opnd *const src = srcs[i];
  3794. IR::ArrayRegOpnd *arraySrc = nullptr;
  3795. Sym *sym = nullptr;
  3796. switch(src->GetKind())
  3797. {
  3798. case IR::OpndKindReg:
  3799. {
  3800. IR::RegOpnd *const regSrc = src->AsRegOpnd();
  3801. sym = regSrc->m_sym;
  3802. if(considerSymAsRealUseInNoImplicitCallUses && considerSymAsRealUseInNoImplicitCallUses == sym)
  3803. {
  3804. considerSymAsRealUseInNoImplicitCallUses = nullptr;
  3805. ProcessStackSymUse(sym->AsStackSym(), true);
  3806. }
  3807. if(regSrc->IsArrayRegOpnd())
  3808. {
  3809. arraySrc = regSrc->AsArrayRegOpnd();
  3810. }
  3811. break;
  3812. }
  3813. case IR::OpndKindSym:
  3814. sym = src->AsSymOpnd()->m_sym;
  3815. Assert(sym->IsPropertySym());
  3816. break;
  3817. default:
  3818. Assert(false);
  3819. __assume(false);
  3820. }
  3821. currentBlock->noImplicitCallUses->Set(sym->m_id);
  3822. const ValueType valueType(src->GetValueType());
  3823. if(valueType.IsArrayOrObjectWithArray())
  3824. {
  3825. if(valueType.HasNoMissingValues())
  3826. {
  3827. currentBlock->noImplicitCallNoMissingValuesUses->Set(sym->m_id);
  3828. }
  3829. if(!valueType.HasVarElements())
  3830. {
  3831. currentBlock->noImplicitCallNativeArrayUses->Set(sym->m_id);
  3832. }
  3833. if(arraySrc)
  3834. {
  3835. ProcessArrayRegOpndUse(instr, arraySrc);
  3836. }
  3837. }
  3838. }
  3839. if(!IsPrePass())
  3840. {
  3841. currentBlock->RemoveInstr(instr);
  3842. }
  3843. return true;
  3844. }
  3845. void
  3846. BackwardPass::ProcessNoImplicitCallDef(IR::Instr *const instr)
  3847. {
  3848. Assert(tag == Js::DeadStorePhase);
  3849. Assert(instr);
  3850. IR::Opnd *const dst = instr->GetDst();
  3851. if(!dst)
  3852. {
  3853. return;
  3854. }
  3855. Sym *dstSym;
  3856. switch(dst->GetKind())
  3857. {
  3858. case IR::OpndKindReg:
  3859. dstSym = dst->AsRegOpnd()->m_sym;
  3860. break;
  3861. case IR::OpndKindSym:
  3862. dstSym = dst->AsSymOpnd()->m_sym;
  3863. if(!dstSym->IsPropertySym())
  3864. {
  3865. return;
  3866. }
  3867. break;
  3868. default:
  3869. return;
  3870. }
  3871. if(!currentBlock->noImplicitCallUses->TestAndClear(dstSym->m_id))
  3872. {
  3873. Assert(!currentBlock->noImplicitCallNoMissingValuesUses->Test(dstSym->m_id));
  3874. Assert(!currentBlock->noImplicitCallNativeArrayUses->Test(dstSym->m_id));
  3875. Assert(!currentBlock->noImplicitCallJsArrayHeadSegmentSymUses->Test(dstSym->m_id));
  3876. Assert(!currentBlock->noImplicitCallArrayLengthSymUses->Test(dstSym->m_id));
  3877. return;
  3878. }
  3879. const bool transferNoMissingValuesUse = !!currentBlock->noImplicitCallNoMissingValuesUses->TestAndClear(dstSym->m_id);
  3880. const bool transferNativeArrayUse = !!currentBlock->noImplicitCallNativeArrayUses->TestAndClear(dstSym->m_id);
  3881. const bool transferJsArrayHeadSegmentSymUse =
  3882. !!currentBlock->noImplicitCallJsArrayHeadSegmentSymUses->TestAndClear(dstSym->m_id);
  3883. const bool transferArrayLengthSymUse = !!currentBlock->noImplicitCallArrayLengthSymUses->TestAndClear(dstSym->m_id);
  3884. IR::Opnd *const src = instr->GetSrc1();
  3885. if(!src || instr->GetSrc2())
  3886. {
  3887. return;
  3888. }
  3889. if(dst->IsRegOpnd() && src->IsRegOpnd())
  3890. {
  3891. if(!OpCodeAttr::NonIntTransfer(instr->m_opcode))
  3892. {
  3893. return;
  3894. }
  3895. }
  3896. else if(
  3897. !(
  3898. // LdFld or similar
  3899. (dst->IsRegOpnd() && src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym()) ||
  3900. // StFld or similar. Don't transfer a field opnd from StFld into the reg opnd src unless the field's value type is
  3901. // definitely array or object with array, because only those value types require implicit calls to be disabled as
  3902. // long as they are live. Other definite value types only require implicit calls to be disabled as long as a live
  3903. // field holds the value, which is up to the StFld when going backwards.
  3904. (src->IsRegOpnd() && dst->GetValueType().IsArrayOrObjectWithArray())
  3905. ) ||
  3906. !instr->TransfersSrcValue())
  3907. {
  3908. return;
  3909. }
  3910. Sym *srcSym = nullptr;
  3911. switch(src->GetKind())
  3912. {
  3913. case IR::OpndKindReg:
  3914. srcSym = src->AsRegOpnd()->m_sym;
  3915. break;
  3916. case IR::OpndKindSym:
  3917. srcSym = src->AsSymOpnd()->m_sym;
  3918. Assert(srcSym->IsPropertySym());
  3919. break;
  3920. default:
  3921. Assert(false);
  3922. __assume(false);
  3923. }
  3924. currentBlock->noImplicitCallUses->Set(srcSym->m_id);
  3925. if(transferNoMissingValuesUse)
  3926. {
  3927. currentBlock->noImplicitCallNoMissingValuesUses->Set(srcSym->m_id);
  3928. }
  3929. if(transferNativeArrayUse)
  3930. {
  3931. currentBlock->noImplicitCallNativeArrayUses->Set(srcSym->m_id);
  3932. }
  3933. if(transferJsArrayHeadSegmentSymUse)
  3934. {
  3935. currentBlock->noImplicitCallJsArrayHeadSegmentSymUses->Set(srcSym->m_id);
  3936. }
  3937. if(transferArrayLengthSymUse)
  3938. {
  3939. currentBlock->noImplicitCallArrayLengthSymUses->Set(srcSym->m_id);
  3940. }
  3941. }
  3942. template<class F>
  3943. IR::Opnd *
  3944. BackwardPass::FindNoImplicitCallUse(
  3945. IR::Instr *const instr,
  3946. StackSym *const sym,
  3947. const F IsCheckedUse,
  3948. IR::Instr * *const noImplicitCallUsesInstrRef)
  3949. {
  3950. IR::RegOpnd *const opnd = IR::RegOpnd::New(sym, sym->GetType(), instr->m_func);
  3951. IR::Opnd *const use = FindNoImplicitCallUse(instr, opnd, IsCheckedUse, noImplicitCallUsesInstrRef);
  3952. opnd->FreeInternal(instr->m_func);
  3953. return use;
  3954. }
  3955. template<class F>
  3956. IR::Opnd *
  3957. BackwardPass::FindNoImplicitCallUse(
  3958. IR::Instr *const instr,
  3959. IR::Opnd *const opnd,
  3960. const F IsCheckedUse,
  3961. IR::Instr * *const noImplicitCallUsesInstrRef)
  3962. {
  3963. Assert(instr);
  3964. Assert(instr->m_opcode != Js::OpCode::NoImplicitCallUses);
  3965. // Skip byte-code uses
  3966. IR::Instr *prevInstr = instr->m_prev;
  3967. while(
  3968. prevInstr &&
  3969. !prevInstr->IsLabelInstr() &&
  3970. (!prevInstr->IsRealInstr() || prevInstr->IsByteCodeUsesInstr()) &&
  3971. prevInstr->m_opcode != Js::OpCode::NoImplicitCallUses)
  3972. {
  3973. prevInstr = prevInstr->m_prev;
  3974. }
  3975. // Find the corresponding use in a NoImplicitCallUses instruction
  3976. for(; prevInstr && prevInstr->m_opcode == Js::OpCode::NoImplicitCallUses; prevInstr = prevInstr->m_prev)
  3977. {
  3978. IR::Opnd *const checkedSrcs[] = { prevInstr->GetSrc1(), prevInstr->GetSrc2() };
  3979. for(int i = 0; i < sizeof(checkedSrcs) / sizeof(checkedSrcs[0]) && checkedSrcs[i]; ++i)
  3980. {
  3981. IR::Opnd *const checkedSrc = checkedSrcs[i];
  3982. if(checkedSrc->IsEqual(opnd) && IsCheckedUse(checkedSrc))
  3983. {
  3984. if(noImplicitCallUsesInstrRef)
  3985. {
  3986. *noImplicitCallUsesInstrRef = prevInstr;
  3987. }
  3988. return checkedSrc;
  3989. }
  3990. }
  3991. }
  3992. if(noImplicitCallUsesInstrRef)
  3993. {
  3994. *noImplicitCallUsesInstrRef = nullptr;
  3995. }
  3996. return nullptr;
  3997. }
  3998. void
  3999. BackwardPass::ProcessArrayRegOpndUse(IR::Instr *const instr, IR::ArrayRegOpnd *const arrayRegOpnd)
  4000. {
  4001. Assert(tag == Js::DeadStorePhase);
  4002. Assert(!IsCollectionPass());
  4003. Assert(instr);
  4004. Assert(arrayRegOpnd);
  4005. if(!(arrayRegOpnd->HeadSegmentSym() || arrayRegOpnd->HeadSegmentLengthSym() || arrayRegOpnd->LengthSym()))
  4006. {
  4007. return;
  4008. }
  4009. const ValueType arrayValueType(arrayRegOpnd->GetValueType());
  4010. const bool isJsArray = !arrayValueType.IsLikelyTypedArray();
  4011. Assert(isJsArray == arrayValueType.IsArrayOrObjectWithArray());
  4012. Assert(!isJsArray == arrayValueType.IsOptimizedTypedArray());
  4013. BasicBlock *const block = currentBlock;
  4014. if(!IsPrePass() &&
  4015. (arrayRegOpnd->HeadSegmentSym() || arrayRegOpnd->HeadSegmentLengthSym()) &&
  4016. (!isJsArray || instr->m_opcode != Js::OpCode::NoImplicitCallUses))
  4017. {
  4018. bool headSegmentIsLoadedButUnused =
  4019. instr->loadedArrayHeadSegment &&
  4020. arrayRegOpnd->HeadSegmentSym() &&
  4021. !block->upwardExposedUses->Test(arrayRegOpnd->HeadSegmentSym()->m_id);
  4022. const bool headSegmentLengthIsLoadedButUnused =
  4023. instr->loadedArrayHeadSegmentLength &&
  4024. arrayRegOpnd->HeadSegmentLengthSym() &&
  4025. !block->upwardExposedUses->Test(arrayRegOpnd->HeadSegmentLengthSym()->m_id);
  4026. if(headSegmentLengthIsLoadedButUnused && instr->extractedUpperBoundCheckWithoutHoisting)
  4027. {
  4028. // Find the upper bound check (index[src1] <= headSegmentLength[src2] + offset[dst])
  4029. IR::Instr *upperBoundCheck = this->globOpt->FindUpperBoundsCheckInstr(instr);
  4030. Assert(upperBoundCheck && upperBoundCheck != instr);
  4031. Assert(upperBoundCheck->GetSrc2()->AsRegOpnd()->m_sym == arrayRegOpnd->HeadSegmentLengthSym());
  4032. // Find the head segment length load
  4033. IR::Instr *headSegmentLengthLoad = this->globOpt->FindArraySegmentLoadInstr(upperBoundCheck);
  4034. Assert(headSegmentLengthLoad->GetDst()->AsRegOpnd()->m_sym == arrayRegOpnd->HeadSegmentLengthSym());
  4035. Assert(
  4036. headSegmentLengthLoad->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->m_sym ==
  4037. (isJsArray ? arrayRegOpnd->HeadSegmentSym() : arrayRegOpnd->m_sym));
  4038. // Fold the head segment length load into the upper bound check. Keep the load instruction there with a Nop so that
  4039. // the head segment length sym can be marked as unused before the Nop. The lowerer will remove it.
  4040. upperBoundCheck->ReplaceSrc2(headSegmentLengthLoad->UnlinkSrc1());
  4041. headSegmentLengthLoad->m_opcode = Js::OpCode::Nop;
  4042. if(isJsArray)
  4043. {
  4044. // The head segment length is on the head segment, so the bound check now uses the head segment sym
  4045. headSegmentIsLoadedButUnused = false;
  4046. }
  4047. }
  4048. if(headSegmentIsLoadedButUnused || headSegmentLengthIsLoadedButUnused)
  4049. {
  4050. // Check if the head segment / head segment length are being loaded here. If so, remove them and let the fast
  4051. // path load them since it does a better job.
  4052. IR::ArrayRegOpnd *noImplicitCallArrayUse = nullptr;
  4053. if(isJsArray)
  4054. {
  4055. IR::Opnd *const use =
  4056. FindNoImplicitCallUse(
  4057. instr,
  4058. arrayRegOpnd,
  4059. [&](IR::Opnd *const checkedSrc) -> bool
  4060. {
  4061. const ValueType checkedSrcValueType(checkedSrc->GetValueType());
  4062. if(!checkedSrcValueType.IsLikelyObject() ||
  4063. checkedSrcValueType.GetObjectType() != arrayValueType.GetObjectType())
  4064. {
  4065. return false;
  4066. }
  4067. IR::RegOpnd *const checkedRegSrc = checkedSrc->AsRegOpnd();
  4068. if(!checkedRegSrc->IsArrayRegOpnd())
  4069. {
  4070. return false;
  4071. }
  4072. IR::ArrayRegOpnd *const checkedArraySrc = checkedRegSrc->AsArrayRegOpnd();
  4073. if(headSegmentIsLoadedButUnused &&
  4074. checkedArraySrc->HeadSegmentSym() != arrayRegOpnd->HeadSegmentSym())
  4075. {
  4076. return false;
  4077. }
  4078. if(headSegmentLengthIsLoadedButUnused &&
  4079. checkedArraySrc->HeadSegmentLengthSym() != arrayRegOpnd->HeadSegmentLengthSym())
  4080. {
  4081. return false;
  4082. }
  4083. return true;
  4084. });
  4085. if(use)
  4086. {
  4087. noImplicitCallArrayUse = use->AsRegOpnd()->AsArrayRegOpnd();
  4088. }
  4089. }
  4090. else if(headSegmentLengthIsLoadedButUnused)
  4091. {
  4092. // A typed array's head segment length may be zeroed when the typed array's buffer is transferred to a web
  4093. // worker, so the head segment length sym use is included in a NoImplicitCallUses instruction. Since there
  4094. // are no forward uses of the head segment length sym, to allow removing the extracted head segment length
  4095. // load, the corresponding head segment length sym use in the NoImplicitCallUses instruction must also be
  4096. // removed.
  4097. IR::Instr *noImplicitCallUsesInstr;
  4098. IR::Opnd *const use =
  4099. FindNoImplicitCallUse(
  4100. instr,
  4101. arrayRegOpnd->HeadSegmentLengthSym(),
  4102. [&](IR::Opnd *const checkedSrc) -> bool
  4103. {
  4104. return checkedSrc->AsRegOpnd()->m_sym == arrayRegOpnd->HeadSegmentLengthSym();
  4105. },
  4106. &noImplicitCallUsesInstr);
  4107. if(use)
  4108. {
  4109. Assert(noImplicitCallUsesInstr);
  4110. Assert(!noImplicitCallUsesInstr->GetDst());
  4111. Assert(noImplicitCallUsesInstr->GetSrc1());
  4112. if(use == noImplicitCallUsesInstr->GetSrc1())
  4113. {
  4114. if(noImplicitCallUsesInstr->GetSrc2())
  4115. {
  4116. noImplicitCallUsesInstr->ReplaceSrc1(noImplicitCallUsesInstr->UnlinkSrc2());
  4117. }
  4118. else
  4119. {
  4120. noImplicitCallUsesInstr->FreeSrc1();
  4121. noImplicitCallUsesInstr->m_opcode = Js::OpCode::Nop;
  4122. }
  4123. }
  4124. else
  4125. {
  4126. Assert(use == noImplicitCallUsesInstr->GetSrc2());
  4127. noImplicitCallUsesInstr->FreeSrc2();
  4128. }
  4129. }
  4130. }
  4131. if(headSegmentIsLoadedButUnused &&
  4132. (!isJsArray || !arrayRegOpnd->HeadSegmentLengthSym() || headSegmentLengthIsLoadedButUnused))
  4133. {
  4134. // For JS arrays, the head segment length load is dependent on the head segment. So, only remove the head
  4135. // segment load if the head segment length load can also be removed.
  4136. arrayRegOpnd->RemoveHeadSegmentSym();
  4137. instr->loadedArrayHeadSegment = false;
  4138. if(noImplicitCallArrayUse)
  4139. {
  4140. noImplicitCallArrayUse->RemoveHeadSegmentSym();
  4141. }
  4142. }
  4143. if(headSegmentLengthIsLoadedButUnused)
  4144. {
  4145. arrayRegOpnd->RemoveHeadSegmentLengthSym();
  4146. instr->loadedArrayHeadSegmentLength = false;
  4147. if(noImplicitCallArrayUse)
  4148. {
  4149. noImplicitCallArrayUse->RemoveHeadSegmentLengthSym();
  4150. }
  4151. }
  4152. }
  4153. }
  4154. if(isJsArray && instr->m_opcode != Js::OpCode::NoImplicitCallUses)
  4155. {
  4156. // Only uses in NoImplicitCallUses instructions are counted toward liveness
  4157. return;
  4158. }
  4159. // Treat dependent syms as uses. For JS arrays, only uses in NoImplicitCallUses count because only then the assumptions made
  4160. // on the dependent syms are guaranteed to be valid. Similarly for typed arrays, a head segment length sym use counts toward
  4161. // liveness only in a NoImplicitCallUses instruction.
  4162. if(arrayRegOpnd->HeadSegmentSym())
  4163. {
  4164. ProcessStackSymUse(arrayRegOpnd->HeadSegmentSym(), true);
  4165. if(isJsArray)
  4166. {
  4167. block->noImplicitCallUses->Set(arrayRegOpnd->HeadSegmentSym()->m_id);
  4168. block->noImplicitCallJsArrayHeadSegmentSymUses->Set(arrayRegOpnd->HeadSegmentSym()->m_id);
  4169. }
  4170. }
  4171. if(arrayRegOpnd->HeadSegmentLengthSym())
  4172. {
  4173. if(isJsArray)
  4174. {
  4175. ProcessStackSymUse(arrayRegOpnd->HeadSegmentLengthSym(), true);
  4176. block->noImplicitCallUses->Set(arrayRegOpnd->HeadSegmentLengthSym()->m_id);
  4177. block->noImplicitCallJsArrayHeadSegmentSymUses->Set(arrayRegOpnd->HeadSegmentLengthSym()->m_id);
  4178. }
  4179. else
  4180. {
  4181. // ProcessNoImplicitCallUses automatically marks JS array reg opnds and their corresponding syms as live. A typed
  4182. // array's head segment length sym also needs to be marked as live at its use in the NoImplicitCallUses instruction,
  4183. // but it is just in a reg opnd. Flag the opnd to have the sym be marked as live when that instruction is processed.
  4184. Assert(!considerSymAsRealUseInNoImplicitCallUses);
  4185. IR::Opnd *const use =
  4186. FindNoImplicitCallUse(
  4187. instr,
  4188. arrayRegOpnd->HeadSegmentLengthSym(),
  4189. [&](IR::Opnd *const checkedSrc) -> bool
  4190. {
  4191. return checkedSrc->AsRegOpnd()->m_sym == arrayRegOpnd->HeadSegmentLengthSym();
  4192. });
  4193. if(use)
  4194. {
  4195. considerSymAsRealUseInNoImplicitCallUses = arrayRegOpnd->HeadSegmentLengthSym();
  4196. }
  4197. }
  4198. }
  4199. StackSym *const lengthSym = arrayRegOpnd->LengthSym();
  4200. if(lengthSym && lengthSym != arrayRegOpnd->HeadSegmentLengthSym())
  4201. {
  4202. ProcessStackSymUse(lengthSym, true);
  4203. Assert(arrayValueType.IsArray());
  4204. block->noImplicitCallUses->Set(lengthSym->m_id);
  4205. block->noImplicitCallArrayLengthSymUses->Set(lengthSym->m_id);
  4206. }
  4207. }
  4208. void
  4209. BackwardPass::ProcessNewScObject(IR::Instr* instr)
  4210. {
  4211. if (this->tag != Js::DeadStorePhase || IsCollectionPass())
  4212. {
  4213. return;
  4214. }
  4215. if (!instr->IsNewScObjectInstr())
  4216. {
  4217. return;
  4218. }
  4219. if (instr->HasBailOutInfo())
  4220. {
  4221. Assert(instr->IsProfiledInstr());
  4222. Assert(instr->GetBailOutKind() == IR::BailOutFailedCtorGuardCheck);
  4223. Assert(instr->GetDst()->IsRegOpnd());
  4224. BasicBlock * block = this->currentBlock;
  4225. StackSym* objSym = instr->GetDst()->AsRegOpnd()->GetStackSym();
  4226. if (block->upwardExposedUses->Test(objSym->m_id))
  4227. {
  4228. // If the object created here is used downstream, let's capture any property operations we must protect.
  4229. Assert(instr->GetDst()->AsRegOpnd()->GetStackSym()->HasObjectTypeSym());
  4230. JITTimeConstructorCache* ctorCache = instr->m_func->GetConstructorCache(static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId));
  4231. if (block->stackSymToFinalType != nullptr)
  4232. {
  4233. // NewScObject is the origin of the object pointer. If we have a final type in hand, do the
  4234. // transition here.
  4235. AddPropertyCacheBucket *pBucket = block->stackSymToFinalType->Get(objSym->m_id);
  4236. if (pBucket &&
  4237. pBucket->GetInitialType() != nullptr &&
  4238. pBucket->GetFinalType() != pBucket->GetInitialType())
  4239. {
  4240. Assert(pBucket->GetInitialType() == ctorCache->GetType());
  4241. if (!this->IsPrePass())
  4242. {
  4243. this->InsertTypeTransition(instr->m_next, objSym, pBucket, block->upwardExposedUses);
  4244. }
  4245. #if DBG
  4246. pBucket->deadStoreUnavailableInitialType = pBucket->GetInitialType();
  4247. if (pBucket->deadStoreUnavailableFinalType == nullptr)
  4248. {
  4249. pBucket->deadStoreUnavailableFinalType = pBucket->GetFinalType();
  4250. }
  4251. pBucket->SetInitialType(nullptr);
  4252. pBucket->SetFinalType(nullptr);
  4253. #else
  4254. block->stackSymToFinalType->Clear(objSym->m_id);
  4255. #endif
  4256. }
  4257. }
  4258. if (block->stackSymToGuardedProperties != nullptr)
  4259. {
  4260. ObjTypeGuardBucket* bucket = block->stackSymToGuardedProperties->Get(objSym->m_id);
  4261. if (bucket != nullptr)
  4262. {
  4263. BVSparse<JitArenaAllocator>* guardedPropertyOps = bucket->GetGuardedPropertyOps();
  4264. if (guardedPropertyOps != nullptr)
  4265. {
  4266. ctorCache->EnsureGuardedPropOps(this->func->m_alloc);
  4267. ctorCache->AddGuardedPropOps(guardedPropertyOps);
  4268. bucket->SetGuardedPropertyOps(nullptr);
  4269. JitAdelete(this->tempAlloc, guardedPropertyOps);
  4270. block->stackSymToGuardedProperties->Clear(objSym->m_id);
  4271. }
  4272. }
  4273. }
  4274. }
  4275. else
  4276. {
  4277. // If the object is not used downstream, let's remove the bailout and let the lowerer emit a fast path along with
  4278. // the fallback on helper, if the ctor cache ever became invalid.
  4279. instr->ClearBailOutInfo();
  4280. if (preOpBailOutInstrToProcess == instr)
  4281. {
  4282. preOpBailOutInstrToProcess = nullptr;
  4283. }
  4284. #if DBG
  4285. // We're creating a brand new object here, so no type check upstream could protect any properties of this
  4286. // object. Let's make sure we don't have any left to protect.
  4287. ObjTypeGuardBucket* bucket = block->stackSymToGuardedProperties != nullptr ?
  4288. block->stackSymToGuardedProperties->Get(objSym->m_id) : nullptr;
  4289. Assert(bucket == nullptr || bucket->GetGuardedPropertyOps()->IsEmpty());
  4290. #endif
  4291. }
  4292. }
  4293. }
  4294. void
  4295. BackwardPass::UpdateArrayValueTypes(IR::Instr *const instr, IR::Opnd *origOpnd)
  4296. {
  4297. Assert(tag == Js::DeadStorePhase);
  4298. Assert(!IsPrePass());
  4299. Assert(instr);
  4300. if(!origOpnd)
  4301. {
  4302. return;
  4303. }
  4304. IR::Instr *opndOwnerInstr = instr;
  4305. switch(instr->m_opcode)
  4306. {
  4307. case Js::OpCode::StElemC:
  4308. case Js::OpCode::StArrSegElemC:
  4309. // These may not be fixed if we are unsure about the type of the array they're storing to
  4310. // (because it relies on profile data) and we weren't able to hoist the array check.
  4311. return;
  4312. }
  4313. Sym *sym;
  4314. IR::Opnd* opnd = origOpnd;
  4315. IR::ArrayRegOpnd *arrayOpnd;
  4316. switch(opnd->GetKind())
  4317. {
  4318. case IR::OpndKindIndir:
  4319. opnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  4320. // fall-through
  4321. case IR::OpndKindReg:
  4322. {
  4323. IR::RegOpnd *const regOpnd = opnd->AsRegOpnd();
  4324. sym = regOpnd->m_sym;
  4325. arrayOpnd = regOpnd->IsArrayRegOpnd() ? regOpnd->AsArrayRegOpnd() : nullptr;
  4326. break;
  4327. }
  4328. case IR::OpndKindSym:
  4329. sym = opnd->AsSymOpnd()->m_sym;
  4330. if(!sym->IsPropertySym())
  4331. {
  4332. return;
  4333. }
  4334. arrayOpnd = nullptr;
  4335. break;
  4336. default:
  4337. return;
  4338. }
  4339. const ValueType valueType(opnd->GetValueType());
  4340. if(!valueType.IsAnyOptimizedArray())
  4341. {
  4342. return;
  4343. }
  4344. const bool isJsArray = valueType.IsArrayOrObjectWithArray();
  4345. Assert(!isJsArray == valueType.IsOptimizedTypedArray());
  4346. const bool noForwardImplicitCallUses = currentBlock->noImplicitCallUses->IsEmpty();
  4347. bool changeArray = isJsArray && !opnd->IsValueTypeFixed() && noForwardImplicitCallUses;
  4348. bool changeNativeArray =
  4349. isJsArray &&
  4350. !opnd->IsValueTypeFixed() &&
  4351. !valueType.HasVarElements() &&
  4352. currentBlock->noImplicitCallNativeArrayUses->IsEmpty();
  4353. bool changeNoMissingValues =
  4354. isJsArray &&
  4355. !opnd->IsValueTypeFixed() &&
  4356. valueType.HasNoMissingValues() &&
  4357. currentBlock->noImplicitCallNoMissingValuesUses->IsEmpty();
  4358. const bool noForwardJsArrayHeadSegmentSymUses = currentBlock->noImplicitCallJsArrayHeadSegmentSymUses->IsEmpty();
  4359. bool removeHeadSegmentSym = isJsArray && arrayOpnd && arrayOpnd->HeadSegmentSym() && noForwardJsArrayHeadSegmentSymUses;
  4360. bool removeHeadSegmentLengthSym =
  4361. arrayOpnd &&
  4362. arrayOpnd->HeadSegmentLengthSym() &&
  4363. (isJsArray ? noForwardJsArrayHeadSegmentSymUses : noForwardImplicitCallUses);
  4364. Assert(!isJsArray || !arrayOpnd || !arrayOpnd->LengthSym() || valueType.IsArray());
  4365. bool removeLengthSym =
  4366. isJsArray &&
  4367. arrayOpnd &&
  4368. arrayOpnd->LengthSym() &&
  4369. currentBlock->noImplicitCallArrayLengthSymUses->IsEmpty();
  4370. if(!(changeArray || changeNoMissingValues || changeNativeArray || removeHeadSegmentSym || removeHeadSegmentLengthSym))
  4371. {
  4372. return;
  4373. }
  4374. // We have a definitely-array value type for the base, but either implicit calls are not currently being disabled for
  4375. // legally using the value type as a definite array, or we are not currently bailing out upon creating a missing value
  4376. // for legally using the value type as a definite array with no missing values.
  4377. // For source opnds, ensure that a NoImplicitCallUses immediately precedes this instruction. Otherwise, convert the value
  4378. // type to an appropriate version so that the lowerer doesn't incorrectly treat it as it says.
  4379. if(opnd != opndOwnerInstr->GetDst())
  4380. {
  4381. if(isJsArray)
  4382. {
  4383. IR::Opnd *const checkedSrc =
  4384. FindNoImplicitCallUse(
  4385. instr,
  4386. opnd,
  4387. [&](IR::Opnd *const checkedSrc) -> bool
  4388. {
  4389. const ValueType checkedSrcValueType(checkedSrc->GetValueType());
  4390. return
  4391. checkedSrcValueType.IsLikelyObject() &&
  4392. checkedSrcValueType.GetObjectType() == valueType.GetObjectType();
  4393. });
  4394. if(checkedSrc)
  4395. {
  4396. // Implicit calls will be disabled to the point immediately before this instruction
  4397. changeArray = false;
  4398. const ValueType checkedSrcValueType(checkedSrc->GetValueType());
  4399. if(changeNativeArray &&
  4400. !checkedSrcValueType.HasVarElements() &&
  4401. checkedSrcValueType.HasIntElements() == valueType.HasIntElements())
  4402. {
  4403. // If necessary, instructions before this will bail out on converting a native array
  4404. changeNativeArray = false;
  4405. }
  4406. if(changeNoMissingValues && checkedSrcValueType.HasNoMissingValues())
  4407. {
  4408. // If necessary, instructions before this will bail out on creating a missing value
  4409. changeNoMissingValues = false;
  4410. }
  4411. if((removeHeadSegmentSym || removeHeadSegmentLengthSym || removeLengthSym) && checkedSrc->IsRegOpnd())
  4412. {
  4413. IR::RegOpnd *const checkedRegSrc = checkedSrc->AsRegOpnd();
  4414. if(checkedRegSrc->IsArrayRegOpnd())
  4415. {
  4416. IR::ArrayRegOpnd *const checkedArraySrc = checkedSrc->AsRegOpnd()->AsArrayRegOpnd();
  4417. if(removeHeadSegmentSym && checkedArraySrc->HeadSegmentSym() == arrayOpnd->HeadSegmentSym())
  4418. {
  4419. // If necessary, instructions before this will bail out upon invalidating head segment sym
  4420. removeHeadSegmentSym = false;
  4421. }
  4422. if(removeHeadSegmentLengthSym &&
  4423. checkedArraySrc->HeadSegmentLengthSym() == arrayOpnd->HeadSegmentLengthSym())
  4424. {
  4425. // If necessary, instructions before this will bail out upon invalidating head segment length sym
  4426. removeHeadSegmentLengthSym = false;
  4427. }
  4428. if(removeLengthSym && checkedArraySrc->LengthSym() == arrayOpnd->LengthSym())
  4429. {
  4430. // If necessary, instructions before this will bail out upon invalidating a length sym
  4431. removeLengthSym = false;
  4432. }
  4433. }
  4434. }
  4435. }
  4436. }
  4437. else
  4438. {
  4439. Assert(removeHeadSegmentLengthSym);
  4440. // A typed array's head segment length may be zeroed when the typed array's buffer is transferred to a web worker,
  4441. // so the head segment length sym use is included in a NoImplicitCallUses instruction. Since there are no forward
  4442. // uses of any head segment length syms, to allow removing the extracted head segment length
  4443. // load, the corresponding head segment length sym use in the NoImplicitCallUses instruction must also be
  4444. // removed.
  4445. IR::Opnd *const use =
  4446. FindNoImplicitCallUse(
  4447. instr,
  4448. arrayOpnd->HeadSegmentLengthSym(),
  4449. [&](IR::Opnd *const checkedSrc) -> bool
  4450. {
  4451. return checkedSrc->AsRegOpnd()->m_sym == arrayOpnd->HeadSegmentLengthSym();
  4452. });
  4453. if(use)
  4454. {
  4455. // Implicit calls will be disabled to the point immediately before this instruction
  4456. removeHeadSegmentLengthSym = false;
  4457. }
  4458. }
  4459. }
  4460. if(changeArray || changeNativeArray)
  4461. {
  4462. if(arrayOpnd)
  4463. {
  4464. opnd = arrayOpnd->CopyAsRegOpnd(opndOwnerInstr->m_func);
  4465. if (origOpnd->IsIndirOpnd())
  4466. {
  4467. origOpnd->AsIndirOpnd()->ReplaceBaseOpnd(opnd->AsRegOpnd());
  4468. }
  4469. else
  4470. {
  4471. opndOwnerInstr->Replace(arrayOpnd, opnd);
  4472. }
  4473. arrayOpnd = nullptr;
  4474. }
  4475. opnd->SetValueType(valueType.ToLikely());
  4476. }
  4477. else
  4478. {
  4479. if(changeNoMissingValues)
  4480. {
  4481. opnd->SetValueType(valueType.SetHasNoMissingValues(false));
  4482. }
  4483. if(removeHeadSegmentSym)
  4484. {
  4485. Assert(arrayOpnd);
  4486. arrayOpnd->RemoveHeadSegmentSym();
  4487. }
  4488. if(removeHeadSegmentLengthSym)
  4489. {
  4490. Assert(arrayOpnd);
  4491. arrayOpnd->RemoveHeadSegmentLengthSym();
  4492. }
  4493. if(removeLengthSym)
  4494. {
  4495. Assert(arrayOpnd);
  4496. arrayOpnd->RemoveLengthSym();
  4497. }
  4498. }
  4499. }
  4500. void
  4501. BackwardPass::UpdateArrayBailOutKind(IR::Instr *const instr)
  4502. {
  4503. Assert(!IsPrePass());
  4504. Assert(instr);
  4505. Assert(instr->HasBailOutInfo());
  4506. if ((instr->m_opcode != Js::OpCode::StElemI_A && instr->m_opcode != Js::OpCode::StElemI_A_Strict &&
  4507. instr->m_opcode != Js::OpCode::Memcopy && instr->m_opcode != Js::OpCode::Memset) ||
  4508. !instr->GetDst()->IsIndirOpnd())
  4509. {
  4510. return;
  4511. }
  4512. IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  4513. const ValueType baseValueType(baseOpnd->GetValueType());
  4514. if(baseValueType.IsNotArrayOrObjectWithArray())
  4515. {
  4516. return;
  4517. }
  4518. IR::BailOutKind includeBailOutKinds = IR::BailOutInvalid;
  4519. if (!baseValueType.IsNotNativeArray() &&
  4520. (!baseValueType.IsLikelyNativeArray() || instr->GetSrc1()->IsVar()) &&
  4521. !currentBlock->noImplicitCallNativeArrayUses->IsEmpty() &&
  4522. !(instr->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall))
  4523. {
  4524. // There is an upwards-exposed use of a native array. Since the array referenced by this instruction can be aliased,
  4525. // this instruction needs to bail out if it converts the native array even if this array specifically is not
  4526. // upwards-exposed.
  4527. includeBailOutKinds |= IR::BailOutConvertedNativeArray;
  4528. }
  4529. if(baseOpnd->IsArrayRegOpnd() && baseOpnd->AsArrayRegOpnd()->EliminatedUpperBoundCheck())
  4530. {
  4531. if(instr->extractedUpperBoundCheckWithoutHoisting && !currentBlock->noImplicitCallJsArrayHeadSegmentSymUses->IsEmpty())
  4532. {
  4533. // See comment below regarding head segment invalidation. A failed upper bound check usually means that it will
  4534. // invalidate the head segment length, so change the bailout kind on the upper bound check to have it bail out for
  4535. // the right reason. Even though the store may actually occur in a non-head segment, which would not invalidate the
  4536. // head segment or length, any store outside the head segment bounds causes head segment load elimination to be
  4537. // turned off for the store, because the segment structure of the array is not guaranteed to be the same every time.
  4538. IR::Instr *upperBoundCheck = this->globOpt->FindUpperBoundsCheckInstr(instr);
  4539. Assert(upperBoundCheck && upperBoundCheck != instr);
  4540. if(upperBoundCheck->GetBailOutKind() == IR::BailOutOnArrayAccessHelperCall)
  4541. {
  4542. upperBoundCheck->SetBailOutKind(IR::BailOutOnInvalidatedArrayHeadSegment);
  4543. }
  4544. else
  4545. {
  4546. Assert(upperBoundCheck->GetBailOutKind() == IR::BailOutOnFailedHoistedBoundCheck);
  4547. }
  4548. }
  4549. }
  4550. else
  4551. {
  4552. if(!currentBlock->noImplicitCallJsArrayHeadSegmentSymUses->IsEmpty())
  4553. {
  4554. // There is an upwards-exposed use of a segment sym. Since the head segment syms referenced by this instruction can
  4555. // be aliased, this instruction needs to bail out if it changes the segment syms it references even if the ones it
  4556. // references specifically are not upwards-exposed. This bailout kind also guarantees that this element store will
  4557. // not create missing values.
  4558. includeBailOutKinds |= IR::BailOutOnInvalidatedArrayHeadSegment;
  4559. }
  4560. else if(
  4561. !currentBlock->noImplicitCallNoMissingValuesUses->IsEmpty() &&
  4562. !(instr->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall))
  4563. {
  4564. // There is an upwards-exposed use of an array with no missing values. Since the array referenced by this
  4565. // instruction can be aliased, this instruction needs to bail out if it creates a missing value in the array even if
  4566. // this array specifically is not upwards-exposed.
  4567. includeBailOutKinds |= IR::BailOutOnMissingValue;
  4568. }
  4569. if(!baseValueType.IsNotArray() && !currentBlock->noImplicitCallArrayLengthSymUses->IsEmpty())
  4570. {
  4571. // There is an upwards-exposed use of a length sym. Since the length sym referenced by this instruction can be
  4572. // aliased, this instruction needs to bail out if it changes the length sym it references even if the ones it
  4573. // references specifically are not upwards-exposed.
  4574. includeBailOutKinds |= IR::BailOutOnInvalidatedArrayLength;
  4575. }
  4576. }
  4577. if(!includeBailOutKinds)
  4578. {
  4579. return;
  4580. }
  4581. Assert(!(includeBailOutKinds & ~IR::BailOutKindBits));
  4582. instr->SetBailOutKind(instr->GetBailOutKind() | includeBailOutKinds);
  4583. }
  4584. bool
  4585. BackwardPass::ProcessStackSymUse(StackSym * stackSym, BOOLEAN isNonByteCodeUse)
  4586. {
  4587. BasicBlock * block = this->currentBlock;
  4588. if (this->DoByteCodeUpwardExposedUsed())
  4589. {
  4590. if (!isNonByteCodeUse && stackSym->HasByteCodeRegSlot())
  4591. {
  4592. // Always track the sym use on the var sym.
  4593. StackSym * byteCodeUseSym = stackSym;
  4594. if (byteCodeUseSym->IsTypeSpec())
  4595. {
  4596. // It has to have a var version for byte code regs
  4597. byteCodeUseSym = byteCodeUseSym->GetVarEquivSym(nullptr);
  4598. }
  4599. block->byteCodeUpwardExposedUsed->Set(byteCodeUseSym->m_id);
  4600. #if DBG
  4601. // We can only track first level function stack syms right now
  4602. if (byteCodeUseSym->GetByteCodeFunc() == this->func)
  4603. {
  4604. Js::RegSlot byteCodeRegSlot = byteCodeUseSym->GetByteCodeRegSlot();
  4605. if (block->byteCodeRestoreSyms[byteCodeRegSlot] != byteCodeUseSym)
  4606. {
  4607. AssertMsg(block->byteCodeRestoreSyms[byteCodeRegSlot] == nullptr,
  4608. "Can't have two active lifetime for the same byte code register");
  4609. block->byteCodeRestoreSyms[byteCodeRegSlot] = byteCodeUseSym;
  4610. }
  4611. }
  4612. #endif
  4613. }
  4614. }
  4615. if(IsCollectionPass())
  4616. {
  4617. return true;
  4618. }
  4619. if (this->DoMarkTempNumbers())
  4620. {
  4621. Assert((block->loop != nullptr) == block->tempNumberTracker->HasTempTransferDependencies());
  4622. block->tempNumberTracker->ProcessUse(stackSym, this);
  4623. }
  4624. if (this->DoMarkTempObjects())
  4625. {
  4626. Assert((block->loop != nullptr) == block->tempObjectTracker->HasTempTransferDependencies());
  4627. block->tempObjectTracker->ProcessUse(stackSym, this);
  4628. }
  4629. #if DBG
  4630. if (this->DoMarkTempObjectVerify())
  4631. {
  4632. Assert((block->loop != nullptr) == block->tempObjectVerifyTracker->HasTempTransferDependencies());
  4633. block->tempObjectVerifyTracker->ProcessUse(stackSym, this);
  4634. }
  4635. #endif
  4636. return !!block->upwardExposedUses->TestAndSet(stackSym->m_id);
  4637. }
  4638. bool
  4639. BackwardPass::ProcessSymUse(Sym * sym, bool isRegOpndUse, BOOLEAN isNonByteCodeUse)
  4640. {
  4641. BasicBlock * block = this->currentBlock;
  4642. if (CanDeadStoreInstrForScopeObjRemoval(sym))
  4643. {
  4644. return false;
  4645. }
  4646. if (sym->IsPropertySym())
  4647. {
  4648. PropertySym * propertySym = sym->AsPropertySym();
  4649. ProcessStackSymUse(propertySym->m_stackSym, isNonByteCodeUse);
  4650. if(IsCollectionPass())
  4651. {
  4652. return true;
  4653. }
  4654. if (this->DoDeadStoreSlots())
  4655. {
  4656. block->slotDeadStoreCandidates->Clear(propertySym->m_id);
  4657. }
  4658. if (tag == Js::BackwardPhase)
  4659. {
  4660. // Backward phase tracks liveness of fields to tell GlobOpt where we may need bailout.
  4661. return this->ProcessPropertySymUse(propertySym);
  4662. }
  4663. else
  4664. {
  4665. // Dead-store phase tracks copy propped syms, so it only cares about ByteCodeUses we inserted,
  4666. // not live fields.
  4667. return false;
  4668. }
  4669. }
  4670. return ProcessStackSymUse(sym->AsStackSym(), isNonByteCodeUse);
  4671. }
  4672. bool
  4673. BackwardPass::MayPropertyBeWrittenTo(Js::PropertyId propertyId)
  4674. {
  4675. return this->func->anyPropertyMayBeWrittenTo ||
  4676. (this->func->propertiesWrittenTo != nullptr && this->func->propertiesWrittenTo->ContainsKey(propertyId));
  4677. }
  4678. void
  4679. BackwardPass::ProcessPropertySymOpndUse(IR::PropertySymOpnd * opnd)
  4680. {
  4681. // If this operand doesn't participate in the type check sequence it's a pass-through.
  4682. // We will not set any bits on the operand and we will ignore them when lowering.
  4683. if (!opnd->IsTypeCheckSeqCandidate())
  4684. {
  4685. return;
  4686. }
  4687. AssertMsg(opnd->HasObjectTypeSym(), "Optimized property sym operand without a type sym?");
  4688. SymID typeSymId = opnd->GetObjectTypeSym()->m_id;
  4689. BasicBlock * block = this->currentBlock;
  4690. if (this->tag == Js::BackwardPhase)
  4691. {
  4692. // In the backward phase, we have no availability info, and we're trying to see
  4693. // where there are live fields so we can decide where to put bailouts.
  4694. Assert(opnd->MayNeedTypeCheckProtection());
  4695. block->upwardExposedFields->Set(typeSymId);
  4696. TrackObjTypeSpecWriteGuards(opnd, block);
  4697. }
  4698. else
  4699. {
  4700. // In the dead-store phase, we're trying to see where the lowered code needs to make sure to check
  4701. // types for downstream load/stores. We're also setting up the upward-exposed uses at loop headers
  4702. // so register allocation will be correct.
  4703. Assert(opnd->MayNeedTypeCheckProtection());
  4704. const bool isStore = opnd == this->currentInstr->GetDst();
  4705. // Note that we don't touch upwardExposedUses here.
  4706. if (opnd->IsTypeAvailable())
  4707. {
  4708. opnd->SetTypeDead(!block->upwardExposedFields->TestAndSet(typeSymId));
  4709. if (opnd->IsTypeChecked() && opnd->IsObjectHeaderInlined())
  4710. {
  4711. // The object's type must not change in a way that changes the layout.
  4712. // If we see a StFld with a type check bailout between here and the type check that guards this
  4713. // property, we must not dead-store the StFld's type check bailout, even if that operand's type appears
  4714. // dead, because that object may alias this one.
  4715. BVSparse<JitArenaAllocator>* bv = block->typesNeedingKnownObjectLayout;
  4716. if (bv == nullptr)
  4717. {
  4718. bv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  4719. block->typesNeedingKnownObjectLayout = bv;
  4720. }
  4721. bv->Set(typeSymId);
  4722. }
  4723. }
  4724. else
  4725. {
  4726. opnd->SetTypeDead(
  4727. !block->upwardExposedFields->TestAndClear(typeSymId) &&
  4728. (
  4729. // Don't set the type dead if this is a store that may change the layout in a way that invalidates
  4730. // optimized load/stores downstream. Leave it non-dead in that case so the type check bailout
  4731. // is preserved and so that Lower will generate the bailout properly.
  4732. !isStore ||
  4733. !block->typesNeedingKnownObjectLayout ||
  4734. block->typesNeedingKnownObjectLayout->IsEmpty()
  4735. )
  4736. );
  4737. BVSparse<JitArenaAllocator>* bv = block->typesNeedingKnownObjectLayout;
  4738. if (bv != nullptr)
  4739. {
  4740. bv->Clear(typeSymId);
  4741. }
  4742. }
  4743. bool mayNeedTypeTransition = true;
  4744. if (!opnd->HasTypeMismatch() && func->DoGlobOpt())
  4745. {
  4746. mayNeedTypeTransition = !isStore;
  4747. }
  4748. if (mayNeedTypeTransition &&
  4749. !this->IsPrePass() &&
  4750. !this->currentInstr->HasBailOutInfo() &&
  4751. (opnd->NeedsPrimaryTypeCheck() ||
  4752. opnd->NeedsLocalTypeCheck() ||
  4753. opnd->NeedsLoadFromProtoTypeCheck()))
  4754. {
  4755. // This is a "checked" opnd that nevertheless will have some kind of type check generated for it.
  4756. // (Typical case is a load from prototype with no upstream guard.)
  4757. // If the type check fails, we will call a helper, which will require that the type be correct here.
  4758. // Final type can't be pushed up past this point. Do whatever type transition is required.
  4759. if (block->stackSymToFinalType != nullptr)
  4760. {
  4761. StackSym *baseSym = opnd->GetObjectSym();
  4762. AddPropertyCacheBucket *pBucket = block->stackSymToFinalType->Get(baseSym->m_id);
  4763. if (pBucket &&
  4764. pBucket->GetFinalType() != nullptr &&
  4765. pBucket->GetFinalType() != pBucket->GetInitialType())
  4766. {
  4767. this->InsertTypeTransition(this->currentInstr->m_next, baseSym, pBucket, block->upwardExposedUses);
  4768. pBucket->SetFinalType(pBucket->GetInitialType());
  4769. }
  4770. }
  4771. }
  4772. if (!opnd->HasTypeMismatch() && func->DoGlobOpt())
  4773. {
  4774. // Do this after the above code, as the value of the final type may change there.
  4775. TrackAddPropertyTypes(opnd, block);
  4776. }
  4777. TrackObjTypeSpecProperties(opnd, block);
  4778. TrackObjTypeSpecWriteGuards(opnd, block);
  4779. }
  4780. }
  4781. void
  4782. BackwardPass::TrackObjTypeSpecProperties(IR::PropertySymOpnd *opnd, BasicBlock *block)
  4783. {
  4784. Assert(tag == Js::DeadStorePhase);
  4785. Assert(opnd->IsTypeCheckSeqCandidate());
  4786. // Now that we're in the dead store pass and we know definitively which operations will have a type
  4787. // check and which are protected by an upstream type check, we can push the lists of guarded properties
  4788. // up the flow graph and drop them on the type checks for the corresponding object symbol.
  4789. if (opnd->IsTypeCheckSeqParticipant())
  4790. {
  4791. // Add this operation to the list of guarded operations for this object symbol.
  4792. HashTable<ObjTypeGuardBucket>* stackSymToGuardedProperties = block->stackSymToGuardedProperties;
  4793. if (stackSymToGuardedProperties == nullptr)
  4794. {
  4795. stackSymToGuardedProperties = HashTable<ObjTypeGuardBucket>::New(this->tempAlloc, 8);
  4796. block->stackSymToGuardedProperties = stackSymToGuardedProperties;
  4797. }
  4798. StackSym* objSym = opnd->GetObjectSym();
  4799. ObjTypeGuardBucket* bucket = stackSymToGuardedProperties->FindOrInsertNew(objSym->m_id);
  4800. BVSparse<JitArenaAllocator>* guardedPropertyOps = bucket->GetGuardedPropertyOps();
  4801. if (guardedPropertyOps == nullptr)
  4802. {
  4803. // The bit vectors we push around the flow graph only need to live as long as this phase.
  4804. guardedPropertyOps = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  4805. bucket->SetGuardedPropertyOps(guardedPropertyOps);
  4806. }
  4807. #if DBG
  4808. FOREACH_BITSET_IN_SPARSEBV(propOpId, guardedPropertyOps)
  4809. {
  4810. ObjTypeSpecFldInfo* existingFldInfo = this->func->GetGlobalObjTypeSpecFldInfo(propOpId);
  4811. Assert(existingFldInfo != nullptr);
  4812. if (existingFldInfo->GetPropertyId() != opnd->GetPropertyId())
  4813. {
  4814. continue;
  4815. }
  4816. // It would be very nice to assert that the info we have for this property matches all properties guarded thus far.
  4817. // Unfortunately, in some cases of object pointer copy propagation into a loop, we may end up with conflicting
  4818. // information for the same property. We simply ignore the conflict and emit an equivalent type check, which
  4819. // will attempt to check for one property on two different slots, and obviously fail. Thus we may have a
  4820. // guaranteed bailout, but we'll simply re-JIT with equivalent object type spec disabled. To avoid this
  4821. // issue altogether, we would need to track the set of guarded properties along with the type value in the
  4822. // forward pass, and when a conflict is detected either not optimize the offending instruction, or correct
  4823. // its information based on the info from the property in the type value info.
  4824. //Assert(!existingFldInfo->IsPoly() || !opnd->IsPoly() || GlobOpt::AreTypeSetsIdentical(existingFldInfo->GetEquivalentTypeSet(), opnd->GetEquivalentTypeSet()));
  4825. //Assert(existingFldInfo->GetSlotIndex() == opnd->GetSlotIndex());
  4826. if (PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->func) && !JITManager::GetJITManager()->IsJITServer())
  4827. {
  4828. if (existingFldInfo->IsPoly() && opnd->IsPoly() &&
  4829. (!GlobOpt::AreTypeSetsIdentical(existingFldInfo->GetEquivalentTypeSet(), opnd->GetEquivalentTypeSet()) ||
  4830. (existingFldInfo->GetSlotIndex() != opnd->GetSlotIndex())))
  4831. {
  4832. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4833. Output::Print(_u("EquivObjTypeSpec: top function %s (%s): duplicate property clash on %s(#%d) on operation %u \n"),
  4834. this->func->GetJITFunctionBody()->GetDisplayName(), this->func->GetDebugNumberSet(debugStringBuffer),
  4835. this->func->GetInProcThreadContext()->GetPropertyRecord(opnd->GetPropertyId())->GetBuffer(), opnd->GetPropertyId(), opnd->GetObjTypeSpecFldId());
  4836. Output::Flush();
  4837. }
  4838. }
  4839. }
  4840. NEXT_BITSET_IN_SPARSEBV
  4841. #endif
  4842. bucket->AddToGuardedPropertyOps(opnd->GetObjTypeSpecFldId());
  4843. if (opnd->NeedsMonoCheck())
  4844. {
  4845. Assert(opnd->IsMono());
  4846. JITTypeHolder monoGuardType = opnd->IsInitialTypeChecked() ? opnd->GetInitialType() : opnd->GetType();
  4847. bucket->SetMonoGuardType(monoGuardType);
  4848. }
  4849. if (opnd->NeedsPrimaryTypeCheck())
  4850. {
  4851. // Grab the guarded properties which match this type check with respect to polymorphism and drop them
  4852. // on the operand. Only equivalent type checks can protect polymorphic properties to avoid a case where
  4853. // we have 1) a cache with type set {t1, t2} and property a, followed by 2) a cache with type t3 and
  4854. // property b, and 3) a cache with type set {t1, t2} and property c, where the slot index of property c
  4855. // on t1 and t2 is different than on t3. If cache 2 were to protect property c it would not verify that
  4856. // it resides on the correct slot for cache 3. Yes, an equivalent type check could protect monomorphic
  4857. // properties, but it would then unnecessarily verify their equivalence on the slow path.
  4858. // Also, make sure the guarded properties on the operand are allocated from the func's allocator to
  4859. // persists until lowering.
  4860. Assert(guardedPropertyOps != nullptr);
  4861. opnd->EnsureGuardedPropOps(this->func->m_alloc);
  4862. opnd->AddGuardedPropOps(guardedPropertyOps);
  4863. if (this->currentInstr->HasTypeCheckBailOut())
  4864. {
  4865. // Stop pushing the mono guard type up if it is being checked here.
  4866. if (bucket->NeedsMonoCheck())
  4867. {
  4868. if (this->currentInstr->HasEquivalentTypeCheckBailOut())
  4869. {
  4870. // Some instr protected by this one requires a monomorphic type check. (E.g., final type opt,
  4871. // fixed field not loaded from prototype.) Note the IsTypeAvailable test above: only do this at
  4872. // the initial type check that protects this path.
  4873. opnd->SetMonoGuardType(bucket->GetMonoGuardType());
  4874. this->currentInstr->ChangeEquivalentToMonoTypeCheckBailOut();
  4875. }
  4876. bucket->SetMonoGuardType(nullptr);
  4877. }
  4878. if (!opnd->IsTypeAvailable())
  4879. {
  4880. // Stop tracking the guarded properties if there's not another type check upstream.
  4881. bucket->SetGuardedPropertyOps(nullptr);
  4882. JitAdelete(this->tempAlloc, guardedPropertyOps);
  4883. block->stackSymToGuardedProperties->Clear(objSym->m_id);
  4884. }
  4885. }
  4886. #if DBG
  4887. {
  4888. // If there is no upstream type check that is live and could protect guarded properties, we better
  4889. // not have any properties remaining.
  4890. ObjTypeGuardBucket* objTypeGuardBucket = block->stackSymToGuardedProperties->Get(opnd->GetObjectSym()->m_id);
  4891. Assert(opnd->IsTypeAvailable() || objTypeGuardBucket == nullptr || objTypeGuardBucket->GetGuardedPropertyOps()->IsEmpty());
  4892. }
  4893. #endif
  4894. }
  4895. }
  4896. else if (opnd->NeedsLocalTypeCheck())
  4897. {
  4898. opnd->EnsureGuardedPropOps(this->func->m_alloc);
  4899. opnd->SetGuardedPropOp(opnd->GetObjTypeSpecFldId());
  4900. }
  4901. if (opnd->UsesAuxSlot() && opnd->IsTypeCheckSeqParticipant() && !opnd->HasTypeMismatch() && !opnd->IsLoadedFromProto())
  4902. {
  4903. bool auxSlotPtrUpwardExposed = false;
  4904. StackSym *auxSlotPtrSym = opnd->GetAuxSlotPtrSym();
  4905. if (opnd->IsAuxSlotPtrSymAvailable())
  4906. {
  4907. // This is an upward-exposed use of the aux slot pointer.
  4908. Assert(auxSlotPtrSym);
  4909. auxSlotPtrUpwardExposed = this->currentBlock->upwardExposedUses->TestAndSet(auxSlotPtrSym->m_id);
  4910. }
  4911. else if (auxSlotPtrSym != nullptr)
  4912. {
  4913. // The aux slot pointer is not upward-exposed at this point.
  4914. auxSlotPtrUpwardExposed = this->currentBlock->upwardExposedUses->TestAndClear(auxSlotPtrSym->m_id);
  4915. }
  4916. if (!this->IsPrePass() && auxSlotPtrUpwardExposed)
  4917. {
  4918. opnd->SetProducesAuxSlotPtr(true);
  4919. }
  4920. }
  4921. }
  4922. void
  4923. BackwardPass::TrackObjTypeSpecWriteGuards(IR::PropertySymOpnd *opnd, BasicBlock *block)
  4924. {
  4925. // TODO (ObjTypeSpec): Move write guard tracking to the forward pass, by recording on the type value
  4926. // which property IDs have been written since the last type check. This will result in more accurate
  4927. // tracking in cases when object pointer copy prop kicks in.
  4928. if (this->tag == Js::BackwardPhase)
  4929. {
  4930. // If this operation may need a write guard (load from proto or fixed field check) then add its
  4931. // write guard symbol to the map for this object. If it remains live (hasn't been written to)
  4932. // until the type check upstream, it will get recorded there so that the type check can be registered
  4933. // for invalidation on this property used in this operation.
  4934. // (ObjTypeSpec): Consider supporting polymorphic write guards as well. We can't currently distinguish between mono and
  4935. // poly write guards, and a type check can only protect operations matching with respect to polymorphism (see
  4936. // BackwardPass::TrackObjTypeSpecProperties for details), so for now we only target monomorphic operations.
  4937. if (opnd->IsMono() && opnd->MayNeedWriteGuardProtection())
  4938. {
  4939. if (block->stackSymToWriteGuardsMap == nullptr)
  4940. {
  4941. block->stackSymToWriteGuardsMap = HashTable<ObjWriteGuardBucket>::New(this->tempAlloc, 8);
  4942. }
  4943. ObjWriteGuardBucket* bucket = block->stackSymToWriteGuardsMap->FindOrInsertNew(opnd->GetObjectSym()->m_id);
  4944. BVSparse<JitArenaAllocator>* writeGuards = bucket->GetWriteGuards();
  4945. if (writeGuards == nullptr)
  4946. {
  4947. // The bit vectors we push around the flow graph only need to live as long as this phase.
  4948. writeGuards = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  4949. bucket->SetWriteGuards(writeGuards);
  4950. }
  4951. PropertySym *propertySym = opnd->m_sym->AsPropertySym();
  4952. Assert(propertySym->m_writeGuardSym != nullptr);
  4953. SymID writeGuardSymId = propertySym->m_writeGuardSym->m_id;
  4954. writeGuards->Set(writeGuardSymId);
  4955. }
  4956. // Record any live (upward exposed) write guards on this operation, if this operation may end up with
  4957. // a type check. If we ultimately don't need a type check here, we will simply ignore the guards, because
  4958. // an earlier type check will protect them.
  4959. if (!IsPrePass() && opnd->IsMono() && !opnd->IsTypeDead())
  4960. {
  4961. Assert(opnd->GetWriteGuards() == nullptr);
  4962. if (block->stackSymToWriteGuardsMap != nullptr)
  4963. {
  4964. ObjWriteGuardBucket* bucket = block->stackSymToWriteGuardsMap->Get(opnd->GetObjectSym()->m_id);
  4965. if (bucket != nullptr)
  4966. {
  4967. // Get all the write guards associated with this object sym and filter them down to those that
  4968. // are upward exposed. If we end up emitting a type check for this instruction, we will create
  4969. // a type property guard registered for all guarded proto properties and we will set the write
  4970. // guard syms live during forward pass, such that we can avoid unnecessary write guard type
  4971. // checks and bailouts on every proto property (as long as it hasn't been written to since the
  4972. // primary type check).
  4973. auto writeGuards = bucket->GetWriteGuards()->CopyNew(this->func->m_alloc);
  4974. writeGuards->And(block->upwardExposedFields);
  4975. opnd->SetWriteGuards(writeGuards);
  4976. }
  4977. }
  4978. }
  4979. }
  4980. else
  4981. {
  4982. // If we know this property has never been written to in this function (either on this object or any
  4983. // of its aliases) we don't need the local type check.
  4984. if (opnd->MayNeedWriteGuardProtection() && !opnd->IsWriteGuardChecked() && !MayPropertyBeWrittenTo(opnd->GetPropertyId()))
  4985. {
  4986. opnd->SetWriteGuardChecked(true);
  4987. }
  4988. // If we don't need a primary type check here let's clear the write guards. The primary type check upstream will
  4989. // register the type check for the corresponding properties.
  4990. if (!IsPrePass() && !opnd->NeedsPrimaryTypeCheck())
  4991. {
  4992. opnd->ClearWriteGuards();
  4993. }
  4994. }
  4995. }
  4996. void
  4997. BackwardPass::TrackAddPropertyTypes(IR::PropertySymOpnd *opnd, BasicBlock *block)
  4998. {
  4999. // Do the work of objtypespec add-property opt even if it's disabled by PHASE option, so that we have
  5000. // the dataflow info that can be inspected.
  5001. Assert(this->tag == Js::DeadStorePhase);
  5002. Assert(opnd->IsMono() || opnd->HasEquivalentTypeSet());
  5003. JITTypeHolder typeWithProperty = opnd->IsMono() ? opnd->GetType() : opnd->GetFirstEquivalentType();
  5004. JITTypeHolder typeWithoutProperty = opnd->HasInitialType() ? opnd->GetInitialType() : JITTypeHolder(nullptr);
  5005. if (typeWithoutProperty == nullptr ||
  5006. typeWithProperty == typeWithoutProperty ||
  5007. (opnd->IsTypeChecked() && !opnd->IsInitialTypeChecked()))
  5008. {
  5009. if (!this->IsPrePass() && block->stackSymToFinalType != nullptr && !this->currentInstr->HasBailOutInfo())
  5010. {
  5011. PropertySym *propertySym = opnd->m_sym->AsPropertySym();
  5012. AddPropertyCacheBucket *pBucket =
  5013. block->stackSymToFinalType->Get(propertySym->m_stackSym->m_id);
  5014. if (pBucket && pBucket->GetFinalType() != nullptr && pBucket->GetInitialType() != pBucket->GetFinalType())
  5015. {
  5016. opnd->SetFinalType(pBucket->GetFinalType());
  5017. }
  5018. }
  5019. return;
  5020. }
  5021. #if DBG
  5022. Assert(typeWithProperty != nullptr);
  5023. const JITTypeHandler * typeWithoutPropertyTypeHandler = typeWithoutProperty->GetTypeHandler();
  5024. const JITTypeHandler * typeWithPropertyTypeHandler = typeWithProperty->GetTypeHandler();
  5025. // TODO: OOP JIT, reenable assert
  5026. //Assert(typeWithoutPropertyTypeHandler->GetPropertyCount() + 1 == typeWithPropertyTypeHandler->GetPropertyCount());
  5027. AssertMsg(JITTypeHandler::IsTypeHandlerCompatibleForObjectHeaderInlining(typeWithoutPropertyTypeHandler, typeWithPropertyTypeHandler),
  5028. "TypeHandlers are not compatible for transition?");
  5029. Assert(typeWithoutPropertyTypeHandler->GetSlotCapacity() <= typeWithPropertyTypeHandler->GetSlotCapacity());
  5030. #endif
  5031. // If there's already a final type for this instance, record it on the operand.
  5032. // If not, start tracking it.
  5033. if (block->stackSymToFinalType == nullptr)
  5034. {
  5035. block->stackSymToFinalType = HashTable<AddPropertyCacheBucket>::New(this->tempAlloc, 8);
  5036. }
  5037. // Find or create the type-tracking record for this instance in this block.
  5038. PropertySym *propertySym = opnd->m_sym->AsPropertySym();
  5039. AddPropertyCacheBucket *pBucket =
  5040. block->stackSymToFinalType->FindOrInsertNew(propertySym->m_stackSym->m_id);
  5041. JITTypeHolder finalType(nullptr);
  5042. #if DBG
  5043. JITTypeHolder deadStoreUnavailableFinalType(nullptr);
  5044. #endif
  5045. if (pBucket->GetInitialType() == nullptr || opnd->GetType() != pBucket->GetInitialType())
  5046. {
  5047. #if DBG
  5048. if (opnd->GetType() == pBucket->deadStoreUnavailableInitialType)
  5049. {
  5050. deadStoreUnavailableFinalType = pBucket->deadStoreUnavailableFinalType;
  5051. }
  5052. #endif
  5053. // No info found, or the info was bad, so initialize it from this cache.
  5054. finalType = opnd->GetType();
  5055. pBucket->SetFinalType(finalType);
  5056. }
  5057. else
  5058. {
  5059. // Match: The type we push upward is now the typeWithoutProperty at this point,
  5060. // and the final type is the one we've been tracking.
  5061. finalType = pBucket->GetFinalType();
  5062. #if DBG
  5063. deadStoreUnavailableFinalType = pBucket->deadStoreUnavailableFinalType;
  5064. #endif
  5065. }
  5066. pBucket->SetInitialType(typeWithoutProperty);
  5067. if (!PHASE_OFF(Js::ObjTypeSpecStorePhase, this->func))
  5068. {
  5069. #if DBG
  5070. // We may regress in this case:
  5071. // if (b)
  5072. // t1 = {};
  5073. // o = t1;
  5074. // o.x =
  5075. // else
  5076. // t2 = {};
  5077. // o = t2;
  5078. // o.x =
  5079. // o.y =
  5080. //
  5081. // Where the backward pass will propagate the final type in o.y to o.x, then globopt will copy prop t1 and t2 to o.x.
  5082. // But not o.y (because of the merge). Then, in the dead store pass, o.y's final type will not propagate to t1.x and t2.x
  5083. // respectively, thus regression the final type. However, in both cases, the types of t1 and t2 are dead anyways.
  5084. //
  5085. // if the type is dead, we don't care if we have regressed the type, as no one is depending on it to skip type check anyways
  5086. if (!opnd->IsTypeDead())
  5087. {
  5088. // This is the type that would have been propagated if we didn't kill it because the type isn't available
  5089. JITTypeHolder checkFinalType = deadStoreUnavailableFinalType != nullptr ? deadStoreUnavailableFinalType : finalType;
  5090. if (opnd->HasFinalType() && opnd->GetFinalType() != checkFinalType)
  5091. {
  5092. // Final type discovery must be progressively better (unless we kill it in the deadstore pass
  5093. // when the type is not available during the forward pass)
  5094. const JITTypeHandler * oldFinalTypeHandler = opnd->GetFinalType()->GetTypeHandler();
  5095. const JITTypeHandler * checkFinalTypeHandler = checkFinalType->GetTypeHandler();
  5096. // TODO: OOP JIT, enable assert
  5097. //Assert(oldFinalTypeHandler->GetPropertyCount() < checkFinalTypeHandler->GetPropertyCount());
  5098. AssertMsg(JITTypeHandler::IsTypeHandlerCompatibleForObjectHeaderInlining(oldFinalTypeHandler, checkFinalTypeHandler),
  5099. "TypeHandlers should be compatible for transition.");
  5100. Assert(oldFinalTypeHandler->GetSlotCapacity() <= checkFinalTypeHandler->GetSlotCapacity());
  5101. }
  5102. }
  5103. #endif
  5104. Assert(opnd->IsBeingAdded());
  5105. if (!this->IsPrePass())
  5106. {
  5107. opnd->SetFinalType(finalType);
  5108. }
  5109. if (!opnd->IsTypeChecked())
  5110. {
  5111. // Transition from initial to final type will only happen at type check points.
  5112. if (opnd->IsTypeAvailable())
  5113. {
  5114. pBucket->SetFinalType(pBucket->GetInitialType());
  5115. }
  5116. }
  5117. }
  5118. #if DBG_DUMP
  5119. if (PHASE_TRACE(Js::ObjTypeSpecStorePhase, this->func))
  5120. {
  5121. Output::Print(_u("ObjTypeSpecStore: "));
  5122. this->currentInstr->Dump();
  5123. pBucket->Dump();
  5124. }
  5125. #endif
  5126. // In the dead-store pass, we have forward information that tells us whether a "final type"
  5127. // reached this point from an earlier store. If it didn't (i.e., it's not available here),
  5128. // remove it from the backward map so that upstream stores will use the final type that is
  5129. // live there. (This avoids unnecessary bailouts in cases where the final type is only live
  5130. // on one branch of an "if", a case that the initial backward pass can't detect.)
  5131. // An example:
  5132. // if (cond)
  5133. // o.x =
  5134. // o.y =
  5135. if (!opnd->IsTypeAvailable())
  5136. {
  5137. #if DBG
  5138. pBucket->deadStoreUnavailableInitialType = pBucket->GetInitialType();
  5139. if (pBucket->deadStoreUnavailableFinalType == nullptr)
  5140. {
  5141. pBucket->deadStoreUnavailableFinalType = pBucket->GetFinalType();
  5142. }
  5143. pBucket->SetInitialType(nullptr);
  5144. pBucket->SetFinalType(nullptr);
  5145. #else
  5146. block->stackSymToFinalType->Clear(propertySym->m_stackSym->m_id);
  5147. #endif
  5148. }
  5149. }
  5150. void
  5151. BackwardPass::InsertTypeTransition(IR::Instr *instrInsertBefore, int symId, AddPropertyCacheBucket *data, BVSparse<JitArenaAllocator>* upwardExposedUses)
  5152. {
  5153. StackSym *objSym = this->func->m_symTable->FindStackSym(symId);
  5154. Assert(objSym);
  5155. this->InsertTypeTransition(instrInsertBefore, objSym, data, upwardExposedUses);
  5156. }
  5157. void
  5158. BackwardPass::InsertTypeTransition(IR::Instr *instrInsertBefore, StackSym *objSym, AddPropertyCacheBucket *data, BVSparse<JitArenaAllocator>* upwardExposedUses)
  5159. {
  5160. Assert(!this->IsPrePass());
  5161. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(objSym, TyMachReg, this->func);
  5162. baseOpnd->SetIsJITOptimizedReg(true);
  5163. JITTypeHolder initialType = data->GetInitialType();
  5164. IR::AddrOpnd *initialTypeOpnd =
  5165. IR::AddrOpnd::New(data->GetInitialType()->GetAddr(), IR::AddrOpndKindDynamicType, this->func);
  5166. initialTypeOpnd->m_metadata = initialType.t;
  5167. JITTypeHolder finalType = data->GetFinalType();
  5168. IR::AddrOpnd *finalTypeOpnd =
  5169. IR::AddrOpnd::New(data->GetFinalType()->GetAddr(), IR::AddrOpndKindDynamicType, this->func);
  5170. finalTypeOpnd->m_metadata = finalType.t;
  5171. IR::Instr *adjustTypeInstr =
  5172. IR::Instr::New(Js::OpCode::AdjustObjType, finalTypeOpnd, baseOpnd, initialTypeOpnd, this->func);
  5173. if (upwardExposedUses)
  5174. {
  5175. // If this type change causes a slot adjustment, the aux slot pointer (if any) will be reloaded here, so take it out of upwardExposedUses.
  5176. int oldCount;
  5177. int newCount;
  5178. Js::PropertyIndex inlineSlotCapacity;
  5179. Js::PropertyIndex newInlineSlotCapacity;
  5180. bool needSlotAdjustment =
  5181. JITTypeHandler::NeedSlotAdjustment(initialType->GetTypeHandler(), finalType->GetTypeHandler(), &oldCount, &newCount, &inlineSlotCapacity, &newInlineSlotCapacity);
  5182. if (needSlotAdjustment)
  5183. {
  5184. StackSym *auxSlotPtrSym = baseOpnd->m_sym->GetAuxSlotPtrSym();
  5185. if (auxSlotPtrSym)
  5186. {
  5187. if (upwardExposedUses->Test(auxSlotPtrSym->m_id))
  5188. {
  5189. adjustTypeInstr->m_opcode = Js::OpCode::AdjustObjTypeReloadAuxSlotPtr;
  5190. }
  5191. }
  5192. }
  5193. }
  5194. instrInsertBefore->InsertBefore(adjustTypeInstr);
  5195. }
  5196. void
  5197. BackwardPass::InsertTypeTransitionAfterInstr(IR::Instr *instr, int symId, AddPropertyCacheBucket *data, BVSparse<JitArenaAllocator>* upwardExposedUses)
  5198. {
  5199. if (!this->IsPrePass())
  5200. {
  5201. // Transition to the final type if we don't bail out.
  5202. if (instr->EndsBasicBlock())
  5203. {
  5204. // The instr with the bailout is something like a branch that may not fall through.
  5205. // Insert the transitions instead at the beginning of each successor block.
  5206. this->InsertTypeTransitionsAtPriorSuccessors(this->currentBlock, nullptr, symId, data, upwardExposedUses);
  5207. }
  5208. else
  5209. {
  5210. this->InsertTypeTransition(instr->m_next, symId, data, upwardExposedUses);
  5211. }
  5212. }
  5213. // Note: we could probably clear this entry out of the table, but I don't know
  5214. // whether it's worth it, because it's likely coming right back.
  5215. data->SetFinalType(data->GetInitialType());
  5216. }
  5217. void
  5218. BackwardPass::InsertTypeTransitionAtBlock(BasicBlock *block, int symId, AddPropertyCacheBucket *data, BVSparse<JitArenaAllocator>* upwardExposedUses)
  5219. {
  5220. bool inserted = false;
  5221. FOREACH_INSTR_IN_BLOCK(instr, block)
  5222. {
  5223. if (instr->IsRealInstr())
  5224. {
  5225. // Check for pre-existing type transition. There may be more than one AdjustObjType here,
  5226. // so look at them all.
  5227. if (instr->m_opcode == Js::OpCode::AdjustObjType)
  5228. {
  5229. if (instr->GetSrc1()->AsRegOpnd()->m_sym->m_id == (SymID)symId)
  5230. {
  5231. // This symbol already has a type transition at this point.
  5232. // It *must* be doing the same transition we're already trying to do.
  5233. Assert((intptr_t)instr->GetDst()->AsAddrOpnd()->m_address == data->GetFinalType()->GetAddr() &&
  5234. (intptr_t)instr->GetSrc2()->AsAddrOpnd()->m_address == data->GetInitialType()->GetAddr());
  5235. // Nothing to do.
  5236. return;
  5237. }
  5238. }
  5239. else
  5240. {
  5241. this->InsertTypeTransition(instr, symId, data, upwardExposedUses);
  5242. inserted = true;
  5243. break;
  5244. }
  5245. }
  5246. }
  5247. NEXT_INSTR_IN_BLOCK;
  5248. if (!inserted)
  5249. {
  5250. Assert(block->GetLastInstr()->m_next);
  5251. this->InsertTypeTransition(block->GetLastInstr()->m_next, symId, data, upwardExposedUses);
  5252. }
  5253. }
  5254. void
  5255. BackwardPass::InsertTypeTransitionsAtPriorSuccessors(
  5256. BasicBlock *block,
  5257. BasicBlock *blockSucc,
  5258. int symId,
  5259. AddPropertyCacheBucket *data,
  5260. BVSparse<JitArenaAllocator>* upwardExposedUses)
  5261. {
  5262. // For each successor of block prior to blockSucc, adjust the type.
  5263. FOREACH_SUCCESSOR_BLOCK(blockFix, block)
  5264. {
  5265. if (blockFix == blockSucc)
  5266. {
  5267. return;
  5268. }
  5269. this->InsertTypeTransitionAtBlock(blockFix, symId, data, upwardExposedUses);
  5270. }
  5271. NEXT_SUCCESSOR_BLOCK;
  5272. }
  5273. void
  5274. BackwardPass::InsertTypeTransitionsAtPotentialKills()
  5275. {
  5276. // Final types can't be pushed up past certain instructions.
  5277. IR::Instr *instr = this->currentInstr;
  5278. if (instr->HasBailOutInfo() || instr->m_opcode == Js::OpCode::UpdateNewScObjectCache)
  5279. {
  5280. // Final types can't be pushed up past a bailout point.
  5281. // Insert any transitions called for by the current state of add-property buckets.
  5282. // Also do this for ctor cache updates, to avoid putting a type in the ctor cache that extends past
  5283. // the end of the ctor that the cache covers.
  5284. this->ForEachAddPropertyCacheBucket([&](int symId, AddPropertyCacheBucket *data)->bool {
  5285. this->InsertTypeTransitionAfterInstr(instr, symId, data, this->currentBlock->upwardExposedUses);
  5286. return false;
  5287. });
  5288. }
  5289. else
  5290. {
  5291. // If this is a load/store that expects an object-header-inlined type, don't push another sym's transition from
  5292. // object-header-inlined to non-object-header-inlined type past it, because the two syms may be aliases.
  5293. IR::PropertySymOpnd *propertySymOpnd = instr->GetPropertySymOpnd();
  5294. if (propertySymOpnd && propertySymOpnd->IsObjectHeaderInlined())
  5295. {
  5296. SymID opndId = propertySymOpnd->m_sym->AsPropertySym()->m_stackSym->m_id;
  5297. this->ForEachAddPropertyCacheBucket([&](int symId, AddPropertyCacheBucket *data)->bool {
  5298. if ((SymID)symId == opndId)
  5299. {
  5300. // This is the sym we're tracking. No aliasing to worry about.
  5301. return false;
  5302. }
  5303. if (propertySymOpnd->IsMono() && data->GetInitialType() != propertySymOpnd->GetType())
  5304. {
  5305. // Type mismatch in a monomorphic case -- no aliasing.
  5306. return false;
  5307. }
  5308. if (this->TransitionUndoesObjectHeaderInlining(data))
  5309. {
  5310. // We're transitioning from inlined to non-inlined, so we can't push it up any farther.
  5311. this->InsertTypeTransitionAfterInstr(instr, symId, data, this->currentBlock->upwardExposedUses);
  5312. }
  5313. return false;
  5314. });
  5315. }
  5316. }
  5317. }
  5318. template<class Fn>
  5319. void
  5320. BackwardPass::ForEachAddPropertyCacheBucket(Fn fn)
  5321. {
  5322. BasicBlock *block = this->currentBlock;
  5323. if (block->stackSymToFinalType == nullptr)
  5324. {
  5325. return;
  5326. }
  5327. FOREACH_HASHTABLE_ENTRY(AddPropertyCacheBucket, bucket, block->stackSymToFinalType)
  5328. {
  5329. AddPropertyCacheBucket *data = &bucket.element;
  5330. if (data->GetInitialType() != nullptr &&
  5331. data->GetInitialType() != data->GetFinalType())
  5332. {
  5333. bool done = fn(bucket.value, data);
  5334. if (done)
  5335. {
  5336. break;
  5337. }
  5338. }
  5339. }
  5340. NEXT_HASHTABLE_ENTRY;
  5341. }
  5342. bool
  5343. BackwardPass::TransitionUndoesObjectHeaderInlining(AddPropertyCacheBucket *data) const
  5344. {
  5345. JITTypeHolder type = data->GetInitialType();
  5346. if (type == nullptr || !Js::DynamicType::Is(type->GetTypeId()))
  5347. {
  5348. return false;
  5349. }
  5350. if (!type->GetTypeHandler()->IsObjectHeaderInlinedTypeHandler())
  5351. {
  5352. return false;
  5353. }
  5354. type = data->GetFinalType();
  5355. if (type == nullptr || !Js::DynamicType::Is(type->GetTypeId()))
  5356. {
  5357. return false;
  5358. }
  5359. return !type->GetTypeHandler()->IsObjectHeaderInlinedTypeHandler();
  5360. }
  5361. void
  5362. BackwardPass::CollectCloneStrCandidate(IR::Opnd * opnd)
  5363. {
  5364. IR::RegOpnd *regOpnd = opnd->AsRegOpnd();
  5365. Assert(regOpnd != nullptr);
  5366. StackSym *sym = regOpnd->m_sym;
  5367. if (tag == Js::BackwardPhase
  5368. && currentInstr->m_opcode == Js::OpCode::Add_A
  5369. && currentInstr->GetSrc1() == opnd
  5370. && !this->IsPrePass()
  5371. && !this->IsCollectionPass()
  5372. && this->currentBlock->loop)
  5373. {
  5374. Assert(currentBlock->cloneStrCandidates != nullptr);
  5375. currentBlock->cloneStrCandidates->Set(sym->m_id);
  5376. }
  5377. }
  5378. void
  5379. BackwardPass::InvalidateCloneStrCandidate(IR::Opnd * opnd)
  5380. {
  5381. IR::RegOpnd *regOpnd = opnd->AsRegOpnd();
  5382. Assert(regOpnd != nullptr);
  5383. StackSym *sym = regOpnd->m_sym;
  5384. if (tag == Js::BackwardPhase &&
  5385. (currentInstr->m_opcode != Js::OpCode::Add_A || currentInstr->GetSrc1()->AsRegOpnd()->m_sym->m_id != sym->m_id) &&
  5386. !this->IsPrePass() &&
  5387. !this->IsCollectionPass() &&
  5388. this->currentBlock->loop)
  5389. {
  5390. currentBlock->cloneStrCandidates->Clear(sym->m_id);
  5391. }
  5392. }
  5393. void
  5394. BackwardPass::ProcessUse(IR::Opnd * opnd)
  5395. {
  5396. switch (opnd->GetKind())
  5397. {
  5398. case IR::OpndKindReg:
  5399. {
  5400. IR::RegOpnd *regOpnd = opnd->AsRegOpnd();
  5401. StackSym *sym = regOpnd->m_sym;
  5402. if (!IsCollectionPass())
  5403. {
  5404. // isTempLastUse is only used for string concat right now, so lets not mark it if it's not a string.
  5405. // If it's upward exposed, it is not it's last use.
  5406. if (regOpnd->m_isTempLastUse && (regOpnd->GetValueType().IsNotString() || this->currentBlock->upwardExposedUses->Test(sym->m_id) || sym->m_mayNotBeTempLastUse))
  5407. {
  5408. regOpnd->m_isTempLastUse = false;
  5409. }
  5410. this->CollectCloneStrCandidate(opnd);
  5411. }
  5412. this->DoSetDead(regOpnd, !this->ProcessSymUse(sym, true, regOpnd->GetIsJITOptimizedReg()));
  5413. if (IsCollectionPass())
  5414. {
  5415. break;
  5416. }
  5417. if (tag == Js::DeadStorePhase && regOpnd->IsArrayRegOpnd())
  5418. {
  5419. ProcessArrayRegOpndUse(currentInstr, regOpnd->AsArrayRegOpnd());
  5420. }
  5421. if (currentInstr->m_opcode == Js::OpCode::BailOnNotArray)
  5422. {
  5423. Assert(tag == Js::DeadStorePhase);
  5424. const ValueType valueType(regOpnd->GetValueType());
  5425. if(valueType.IsLikelyArrayOrObjectWithArray())
  5426. {
  5427. currentBlock->noImplicitCallUses->Clear(sym->m_id);
  5428. // We are being conservative here to always check for missing value
  5429. // if any of them expect no missing value. That is because we don't know
  5430. // what set of sym is equivalent (copied) from the one we are testing for right now.
  5431. if(valueType.HasNoMissingValues() &&
  5432. !currentBlock->noImplicitCallNoMissingValuesUses->IsEmpty() &&
  5433. !IsPrePass())
  5434. {
  5435. // There is a use of this sym that requires this array to have no missing values, so this instruction
  5436. // needs to bail out if the array has missing values.
  5437. Assert(currentInstr->GetBailOutKind() == IR::BailOutOnNotArray ||
  5438. currentInstr->GetBailOutKind() == IR::BailOutOnNotNativeArray);
  5439. currentInstr->SetBailOutKind(currentInstr->GetBailOutKind() | IR::BailOutOnMissingValue);
  5440. }
  5441. currentBlock->noImplicitCallNoMissingValuesUses->Clear(sym->m_id);
  5442. currentBlock->noImplicitCallNativeArrayUses->Clear(sym->m_id);
  5443. }
  5444. }
  5445. }
  5446. break;
  5447. case IR::OpndKindSym:
  5448. {
  5449. IR::SymOpnd *symOpnd = opnd->AsSymOpnd();
  5450. Sym * sym = symOpnd->m_sym;
  5451. this->DoSetDead(symOpnd, !this->ProcessSymUse(sym, false, opnd->GetIsJITOptimizedReg()));
  5452. if (IsCollectionPass())
  5453. {
  5454. break;
  5455. }
  5456. if (sym->IsPropertySym())
  5457. {
  5458. // TODO: We don't have last use info for property sym
  5459. // and we don't set the last use of the stacksym inside the property sym
  5460. if (tag == Js::BackwardPhase)
  5461. {
  5462. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  5463. {
  5464. this->globOpt->PreparePropertySymOpndForTypeCheckSeq(symOpnd->AsPropertySymOpnd(), this->currentInstr, this->currentBlock->loop);
  5465. }
  5466. }
  5467. if (this->DoMarkTempNumbersOnTempObjects())
  5468. {
  5469. this->currentBlock->tempNumberTracker->ProcessPropertySymUse(symOpnd, this->currentInstr, this);
  5470. }
  5471. if (symOpnd->IsPropertySymOpnd())
  5472. {
  5473. this->ProcessPropertySymOpndUse(symOpnd->AsPropertySymOpnd());
  5474. }
  5475. }
  5476. }
  5477. break;
  5478. case IR::OpndKindIndir:
  5479. {
  5480. IR::IndirOpnd * indirOpnd = opnd->AsIndirOpnd();
  5481. IR::RegOpnd * baseOpnd = indirOpnd->GetBaseOpnd();
  5482. this->DoSetDead(baseOpnd, !this->ProcessSymUse(baseOpnd->m_sym, false, baseOpnd->GetIsJITOptimizedReg()));
  5483. IR::RegOpnd * indexOpnd = indirOpnd->GetIndexOpnd();
  5484. if (indexOpnd)
  5485. {
  5486. this->DoSetDead(indexOpnd, !this->ProcessSymUse(indexOpnd->m_sym, false, indexOpnd->GetIsJITOptimizedReg()));
  5487. }
  5488. if(IsCollectionPass())
  5489. {
  5490. break;
  5491. }
  5492. if (this->DoMarkTempNumbersOnTempObjects())
  5493. {
  5494. this->currentBlock->tempNumberTracker->ProcessIndirUse(indirOpnd, currentInstr, this);
  5495. }
  5496. if(tag == Js::DeadStorePhase && baseOpnd->IsArrayRegOpnd())
  5497. {
  5498. ProcessArrayRegOpndUse(currentInstr, baseOpnd->AsArrayRegOpnd());
  5499. }
  5500. }
  5501. break;
  5502. }
  5503. }
  5504. bool
  5505. BackwardPass::ProcessPropertySymUse(PropertySym *propertySym)
  5506. {
  5507. Assert(this->tag == Js::BackwardPhase);
  5508. BasicBlock *block = this->currentBlock;
  5509. bool isLive = !!block->upwardExposedFields->TestAndSet(propertySym->m_id);
  5510. if (propertySym->m_propertyEquivSet)
  5511. {
  5512. block->upwardExposedFields->Or(propertySym->m_propertyEquivSet);
  5513. }
  5514. return isLive;
  5515. }
  5516. void
  5517. BackwardPass::MarkTemp(StackSym * sym)
  5518. {
  5519. Assert(!IsCollectionPass());
  5520. // Don't care about type specialized syms
  5521. if (!sym->IsVar())
  5522. {
  5523. return;
  5524. }
  5525. BasicBlock * block = this->currentBlock;
  5526. if (this->DoMarkTempNumbers())
  5527. {
  5528. Assert((block->loop != nullptr) == block->tempNumberTracker->HasTempTransferDependencies());
  5529. block->tempNumberTracker->MarkTemp(sym, this);
  5530. }
  5531. if (this->DoMarkTempObjects())
  5532. {
  5533. Assert((block->loop != nullptr) == block->tempObjectTracker->HasTempTransferDependencies());
  5534. block->tempObjectTracker->MarkTemp(sym, this);
  5535. }
  5536. #if DBG
  5537. if (this->DoMarkTempObjectVerify())
  5538. {
  5539. Assert((block->loop != nullptr) == block->tempObjectVerifyTracker->HasTempTransferDependencies());
  5540. block->tempObjectVerifyTracker->MarkTemp(sym, this);
  5541. }
  5542. #endif
  5543. }
  5544. void
  5545. BackwardPass::MarkTempProcessInstr(IR::Instr * instr)
  5546. {
  5547. Assert(!IsCollectionPass());
  5548. if (this->currentBlock->isDead)
  5549. {
  5550. return;
  5551. }
  5552. BasicBlock * block;
  5553. block = this->currentBlock;
  5554. if (this->DoMarkTempNumbers())
  5555. {
  5556. block->tempNumberTracker->ProcessInstr(instr, this);
  5557. }
  5558. if (this->DoMarkTempObjects())
  5559. {
  5560. block->tempObjectTracker->ProcessInstr(instr);
  5561. }
  5562. #if DBG
  5563. if (this->DoMarkTempObjectVerify())
  5564. {
  5565. block->tempObjectVerifyTracker->ProcessInstr(instr, this);
  5566. }
  5567. #endif
  5568. }
  5569. #if DBG_DUMP
  5570. void
  5571. BackwardPass::DumpMarkTemp()
  5572. {
  5573. Assert(!IsCollectionPass());
  5574. BasicBlock * block = this->currentBlock;
  5575. if (this->DoMarkTempNumbers())
  5576. {
  5577. block->tempNumberTracker->Dump();
  5578. }
  5579. if (this->DoMarkTempObjects())
  5580. {
  5581. block->tempObjectTracker->Dump();
  5582. }
  5583. #if DBG
  5584. if (this->DoMarkTempObjectVerify())
  5585. {
  5586. block->tempObjectVerifyTracker->Dump();
  5587. }
  5588. #endif
  5589. }
  5590. #endif
  5591. void
  5592. BackwardPass::SetSymIsUsedOnlyInNumberIfLastUse(IR::Opnd *const opnd)
  5593. {
  5594. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  5595. if (stackSym && !currentBlock->upwardExposedUses->Test(stackSym->m_id))
  5596. {
  5597. symUsedOnlyForNumberBySymId->Set(stackSym->m_id);
  5598. }
  5599. }
  5600. void
  5601. BackwardPass::SetSymIsNotUsedOnlyInNumber(IR::Opnd *const opnd)
  5602. {
  5603. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  5604. if (stackSym)
  5605. {
  5606. symUsedOnlyForNumberBySymId->Clear(stackSym->m_id);
  5607. }
  5608. }
  5609. void
  5610. BackwardPass::SetSymIsUsedOnlyInBitOpsIfLastUse(IR::Opnd *const opnd)
  5611. {
  5612. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  5613. if (stackSym && !currentBlock->upwardExposedUses->Test(stackSym->m_id))
  5614. {
  5615. symUsedOnlyForBitOpsBySymId->Set(stackSym->m_id);
  5616. }
  5617. }
  5618. void
  5619. BackwardPass::SetSymIsNotUsedOnlyInBitOps(IR::Opnd *const opnd)
  5620. {
  5621. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  5622. if (stackSym)
  5623. {
  5624. symUsedOnlyForBitOpsBySymId->Clear(stackSym->m_id);
  5625. }
  5626. }
  5627. void
  5628. BackwardPass::TrackBitWiseOrNumberOp(IR::Instr *const instr)
  5629. {
  5630. Assert(instr);
  5631. const bool trackBitWiseop = DoTrackBitOpsOrNumber();
  5632. const bool trackNumberop = trackBitWiseop;
  5633. const Js::OpCode opcode = instr->m_opcode;
  5634. StackSym *const dstSym = IR::RegOpnd::TryGetStackSym(instr->GetDst());
  5635. if (!trackBitWiseop && !trackNumberop)
  5636. {
  5637. return;
  5638. }
  5639. if (!instr->IsRealInstr())
  5640. {
  5641. return;
  5642. }
  5643. if (dstSym)
  5644. {
  5645. // For a dst where the def is in this block, transfer the current info into the instruction
  5646. if (trackBitWiseop && symUsedOnlyForBitOpsBySymId->TestAndClear(dstSym->m_id))
  5647. {
  5648. instr->dstIsAlwaysConvertedToInt32 = true;
  5649. }
  5650. if (trackNumberop && symUsedOnlyForNumberBySymId->TestAndClear(dstSym->m_id))
  5651. {
  5652. instr->dstIsAlwaysConvertedToNumber = true;
  5653. }
  5654. }
  5655. // If the instruction can cause src values to escape the local scope, the srcs can't be optimized
  5656. if (OpCodeAttr::NonTempNumberSources(opcode))
  5657. {
  5658. if (trackBitWiseop)
  5659. {
  5660. SetSymIsNotUsedOnlyInBitOps(instr->GetSrc1());
  5661. SetSymIsNotUsedOnlyInBitOps(instr->GetSrc2());
  5662. }
  5663. if (trackNumberop)
  5664. {
  5665. SetSymIsNotUsedOnlyInNumber(instr->GetSrc1());
  5666. SetSymIsNotUsedOnlyInNumber(instr->GetSrc2());
  5667. }
  5668. return;
  5669. }
  5670. if (trackBitWiseop)
  5671. {
  5672. switch (opcode)
  5673. {
  5674. // Instructions that can cause src values to escape the local scope have already been excluded
  5675. case Js::OpCode::Not_A:
  5676. case Js::OpCode::And_A:
  5677. case Js::OpCode::Or_A:
  5678. case Js::OpCode::Xor_A:
  5679. case Js::OpCode::Shl_A:
  5680. case Js::OpCode::Shr_A:
  5681. case Js::OpCode::Not_I4:
  5682. case Js::OpCode::And_I4:
  5683. case Js::OpCode::Or_I4:
  5684. case Js::OpCode::Xor_I4:
  5685. case Js::OpCode::Shl_I4:
  5686. case Js::OpCode::Shr_I4:
  5687. // These instructions don't generate -0, and their behavior is the same for any src that is -0 or +0
  5688. SetSymIsUsedOnlyInBitOpsIfLastUse(instr->GetSrc1());
  5689. SetSymIsUsedOnlyInBitOpsIfLastUse(instr->GetSrc2());
  5690. break;
  5691. default:
  5692. SetSymIsNotUsedOnlyInBitOps(instr->GetSrc1());
  5693. SetSymIsNotUsedOnlyInBitOps(instr->GetSrc2());
  5694. break;
  5695. }
  5696. }
  5697. if (trackNumberop)
  5698. {
  5699. switch (opcode)
  5700. {
  5701. // Instructions that can cause src values to escape the local scope have already been excluded
  5702. case Js::OpCode::Conv_Num:
  5703. case Js::OpCode::Div_A:
  5704. case Js::OpCode::Mul_A:
  5705. case Js::OpCode::Sub_A:
  5706. case Js::OpCode::Rem_A:
  5707. case Js::OpCode::Incr_A:
  5708. case Js::OpCode::Decr_A:
  5709. case Js::OpCode::Neg_A:
  5710. case Js::OpCode::Not_A:
  5711. case Js::OpCode::ShrU_A:
  5712. case Js::OpCode::ShrU_I4:
  5713. case Js::OpCode::And_A:
  5714. case Js::OpCode::Or_A:
  5715. case Js::OpCode::Xor_A:
  5716. case Js::OpCode::Shl_A:
  5717. case Js::OpCode::Shr_A:
  5718. // These instructions don't generate -0, and their behavior is the same for any src that is -0 or +0
  5719. SetSymIsUsedOnlyInNumberIfLastUse(instr->GetSrc1());
  5720. SetSymIsUsedOnlyInNumberIfLastUse(instr->GetSrc2());
  5721. break;
  5722. default:
  5723. SetSymIsNotUsedOnlyInNumber(instr->GetSrc1());
  5724. SetSymIsNotUsedOnlyInNumber(instr->GetSrc2());
  5725. break;
  5726. }
  5727. }
  5728. }
  5729. void
  5730. BackwardPass::RemoveNegativeZeroBailout(IR::Instr* instr)
  5731. {
  5732. Assert(instr->HasBailOutInfo() && (instr->GetBailOutKind() & IR::BailOutOnNegativeZero));
  5733. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  5734. bailOutKind = bailOutKind & ~IR::BailOutOnNegativeZero;
  5735. if (bailOutKind)
  5736. {
  5737. instr->SetBailOutKind(bailOutKind);
  5738. }
  5739. else
  5740. {
  5741. instr->ClearBailOutInfo();
  5742. if (preOpBailOutInstrToProcess == instr)
  5743. {
  5744. preOpBailOutInstrToProcess = nullptr;
  5745. }
  5746. }
  5747. }
  5748. void
  5749. BackwardPass::TrackIntUsage(IR::Instr *const instr)
  5750. {
  5751. Assert(instr);
  5752. const bool trackNegativeZero = DoTrackNegativeZero();
  5753. const bool trackIntOverflow = DoTrackIntOverflow();
  5754. const bool trackCompoundedIntOverflow = DoTrackCompoundedIntOverflow();
  5755. const bool trackNon32BitOverflow = DoTrackNon32BitOverflow();
  5756. if(!(trackNegativeZero || trackIntOverflow || trackCompoundedIntOverflow))
  5757. {
  5758. return;
  5759. }
  5760. const Js::OpCode opcode = instr->m_opcode;
  5761. if(trackCompoundedIntOverflow && opcode == Js::OpCode::StatementBoundary && instr->AsPragmaInstr()->m_statementIndex == 0)
  5762. {
  5763. // Cannot bail out before the first statement boundary, so the range cannot extend beyond this instruction
  5764. Assert(!instr->ignoreIntOverflowInRange);
  5765. EndIntOverflowDoesNotMatterRange();
  5766. return;
  5767. }
  5768. if(!instr->IsRealInstr())
  5769. {
  5770. return;
  5771. }
  5772. StackSym *const dstSym = IR::RegOpnd::TryGetStackSym(instr->GetDst());
  5773. bool ignoreIntOverflowCandidate = false;
  5774. if(dstSym)
  5775. {
  5776. // For a dst where the def is in this block, transfer the current info into the instruction
  5777. if(trackNegativeZero)
  5778. {
  5779. if (negativeZeroDoesNotMatterBySymId->Test(dstSym->m_id))
  5780. {
  5781. instr->ignoreNegativeZero = true;
  5782. }
  5783. if (tag == Js::DeadStorePhase)
  5784. {
  5785. if (negativeZeroDoesNotMatterBySymId->TestAndClear(dstSym->m_id))
  5786. {
  5787. if (instr->HasBailOutInfo())
  5788. {
  5789. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  5790. if (bailOutKind & IR::BailOutOnNegativeZero)
  5791. {
  5792. RemoveNegativeZeroBailout(instr);
  5793. }
  5794. }
  5795. }
  5796. else
  5797. {
  5798. if (instr->HasBailOutInfo())
  5799. {
  5800. if (instr->GetBailOutKind() & IR::BailOutOnNegativeZero)
  5801. {
  5802. if (this->currentBlock->couldRemoveNegZeroBailoutForDef->TestAndClear(dstSym->m_id))
  5803. {
  5804. RemoveNegativeZeroBailout(instr);
  5805. }
  5806. }
  5807. // This instruction could potentially bail out. Hence, we cannot reliably remove negative zero
  5808. // bailouts upstream. If we did, and the operation actually produced a -0, and this instruction
  5809. // bailed out, we'd use +0 instead of -0 in the interpreter.
  5810. this->currentBlock->couldRemoveNegZeroBailoutForDef->ClearAll();
  5811. }
  5812. }
  5813. }
  5814. else
  5815. {
  5816. this->negativeZeroDoesNotMatterBySymId->Clear(dstSym->m_id);
  5817. }
  5818. }
  5819. if(trackIntOverflow)
  5820. {
  5821. ignoreIntOverflowCandidate = !!intOverflowDoesNotMatterBySymId->TestAndClear(dstSym->m_id);
  5822. if(trackCompoundedIntOverflow)
  5823. {
  5824. instr->ignoreIntOverflowInRange = !!intOverflowDoesNotMatterInRangeBySymId->TestAndClear(dstSym->m_id);
  5825. }
  5826. }
  5827. }
  5828. // If the instruction can cause src values to escape the local scope, the srcs can't be optimized
  5829. if(OpCodeAttr::NonTempNumberSources(opcode))
  5830. {
  5831. if(trackNegativeZero)
  5832. {
  5833. SetNegativeZeroMatters(instr->GetSrc1());
  5834. SetNegativeZeroMatters(instr->GetSrc2());
  5835. }
  5836. if(trackIntOverflow)
  5837. {
  5838. SetIntOverflowMatters(instr->GetSrc1());
  5839. SetIntOverflowMatters(instr->GetSrc2());
  5840. if(trackCompoundedIntOverflow)
  5841. {
  5842. instr->ignoreIntOverflowInRange = false;
  5843. SetIntOverflowMattersInRange(instr->GetSrc1());
  5844. SetIntOverflowMattersInRange(instr->GetSrc2());
  5845. EndIntOverflowDoesNotMatterRange();
  5846. }
  5847. }
  5848. return;
  5849. }
  5850. // -0 tracking
  5851. if(trackNegativeZero)
  5852. {
  5853. switch(opcode)
  5854. {
  5855. // Instructions that can cause src values to escape the local scope have already been excluded
  5856. case Js::OpCode::FromVar:
  5857. case Js::OpCode::Conv_Prim:
  5858. Assert(dstSym);
  5859. Assert(instr->GetSrc1());
  5860. Assert(!instr->GetSrc2() || instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  5861. if(instr->GetDst()->IsInt32())
  5862. {
  5863. // Conversion to int32 that is either explicit, or has a bailout check ensuring that it's an int value
  5864. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  5865. break;
  5866. }
  5867. // fall-through
  5868. default:
  5869. if(dstSym && !instr->ignoreNegativeZero)
  5870. {
  5871. // -0 matters for dst, so -0 also matters for srcs
  5872. SetNegativeZeroMatters(instr->GetSrc1());
  5873. SetNegativeZeroMatters(instr->GetSrc2());
  5874. break;
  5875. }
  5876. if(opcode == Js::OpCode::Div_A || opcode == Js::OpCode::Div_I4)
  5877. {
  5878. // src1 is being divided by src2, so -0 matters for src2
  5879. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  5880. SetNegativeZeroMatters(instr->GetSrc2());
  5881. break;
  5882. }
  5883. // fall-through
  5884. case Js::OpCode::Incr_A:
  5885. case Js::OpCode::Decr_A:
  5886. // Adding 1 to something or subtracting 1 from something does not generate -0
  5887. case Js::OpCode::Not_A:
  5888. case Js::OpCode::And_A:
  5889. case Js::OpCode::Or_A:
  5890. case Js::OpCode::Xor_A:
  5891. case Js::OpCode::Shl_A:
  5892. case Js::OpCode::Shr_A:
  5893. case Js::OpCode::ShrU_A:
  5894. case Js::OpCode::Not_I4:
  5895. case Js::OpCode::And_I4:
  5896. case Js::OpCode::Or_I4:
  5897. case Js::OpCode::Xor_I4:
  5898. case Js::OpCode::Shl_I4:
  5899. case Js::OpCode::Shr_I4:
  5900. case Js::OpCode::ShrU_I4:
  5901. case Js::OpCode::Conv_Str:
  5902. case Js::OpCode::Coerce_Str:
  5903. case Js::OpCode::Coerce_Regex:
  5904. case Js::OpCode::Coerce_StrOrRegex:
  5905. case Js::OpCode::Conv_PrimStr:
  5906. // These instructions don't generate -0, and their behavior is the same for any src that is -0 or +0
  5907. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  5908. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  5909. break;
  5910. case Js::OpCode::Add_I4:
  5911. {
  5912. Assert(dstSym);
  5913. Assert(instr->GetSrc1());
  5914. Assert(instr->GetSrc1()->IsRegOpnd() || instr->GetSrc1()->IsImmediateOpnd());
  5915. Assert(instr->GetSrc2());
  5916. Assert(instr->GetSrc2()->IsRegOpnd() || instr->GetSrc2()->IsImmediateOpnd());
  5917. if (instr->ignoreNegativeZero ||
  5918. (instr->GetSrc1()->IsImmediateOpnd() && instr->GetSrc1()->GetImmediateValue(func) != 0) ||
  5919. (instr->GetSrc2()->IsImmediateOpnd() && instr->GetSrc2()->GetImmediateValue(func) != 0))
  5920. {
  5921. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  5922. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  5923. break;
  5924. }
  5925. // -0 + -0 == -0. As long as one src is guaranteed to not be -0, -0 does not matter for the other src. Pick a
  5926. // src for which to ignore negative zero, based on which sym is last-use. If both syms are last-use, src2 is
  5927. // picked arbitrarily.
  5928. SetNegativeZeroMatters(instr->GetSrc1());
  5929. SetNegativeZeroMatters(instr->GetSrc2());
  5930. if (tag == Js::DeadStorePhase)
  5931. {
  5932. if (instr->GetSrc2()->IsRegOpnd() &&
  5933. !currentBlock->upwardExposedUses->Test(instr->GetSrc2()->AsRegOpnd()->m_sym->m_id))
  5934. {
  5935. SetCouldRemoveNegZeroBailoutForDefIfLastUse(instr->GetSrc2());
  5936. }
  5937. else
  5938. {
  5939. SetCouldRemoveNegZeroBailoutForDefIfLastUse(instr->GetSrc1());
  5940. }
  5941. }
  5942. break;
  5943. }
  5944. case Js::OpCode::Add_A:
  5945. Assert(dstSym);
  5946. Assert(instr->GetSrc1());
  5947. Assert(instr->GetSrc1()->IsRegOpnd() || instr->GetSrc1()->IsAddrOpnd());
  5948. Assert(instr->GetSrc2());
  5949. Assert(instr->GetSrc2()->IsRegOpnd() || instr->GetSrc2()->IsAddrOpnd());
  5950. if(instr->ignoreNegativeZero || instr->GetSrc1()->IsAddrOpnd() || instr->GetSrc2()->IsAddrOpnd())
  5951. {
  5952. // -0 does not matter for dst, or this instruction does not generate -0 since one of the srcs is not -0
  5953. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  5954. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  5955. break;
  5956. }
  5957. SetNegativeZeroMatters(instr->GetSrc1());
  5958. SetNegativeZeroMatters(instr->GetSrc2());
  5959. break;
  5960. case Js::OpCode::Sub_I4:
  5961. {
  5962. Assert(dstSym);
  5963. Assert(instr->GetSrc1());
  5964. Assert(instr->GetSrc1()->IsRegOpnd() || instr->GetSrc1()->IsImmediateOpnd());
  5965. Assert(instr->GetSrc2());
  5966. Assert(instr->GetSrc2()->IsRegOpnd() || instr->GetSrc2()->IsImmediateOpnd());
  5967. if (instr->ignoreNegativeZero ||
  5968. (instr->GetSrc1()->IsImmediateOpnd() && instr->GetSrc1()->GetImmediateValue(func) != 0) ||
  5969. (instr->GetSrc2()->IsImmediateOpnd() && instr->GetSrc2()->GetImmediateValue(func) != 0))
  5970. {
  5971. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  5972. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  5973. }
  5974. else
  5975. {
  5976. goto NegativeZero_Sub_Default;
  5977. }
  5978. break;
  5979. }
  5980. case Js::OpCode::Sub_A:
  5981. Assert(dstSym);
  5982. Assert(instr->GetSrc1());
  5983. Assert(instr->GetSrc1()->IsRegOpnd() || instr->GetSrc1()->IsAddrOpnd());
  5984. Assert(instr->GetSrc2());
  5985. Assert(instr->GetSrc2()->IsRegOpnd() || instr->GetSrc2()->IsAddrOpnd() || instr->GetSrc2()->IsIntConstOpnd());
  5986. if(instr->ignoreNegativeZero ||
  5987. instr->GetSrc1()->IsAddrOpnd() ||
  5988. (
  5989. instr->GetSrc2()->IsAddrOpnd() &&
  5990. instr->GetSrc2()->AsAddrOpnd()->IsVar() &&
  5991. Js::TaggedInt::ToInt32(instr->GetSrc2()->AsAddrOpnd()->m_address) != 0
  5992. ))
  5993. {
  5994. // At least one of the following is true:
  5995. // - -0 does not matter for dst
  5996. // - Src1 is not -0, and so this instruction cannot generate -0
  5997. // - Src2 is a nonzero tagged int constant, and so this instruction cannot generate -0
  5998. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  5999. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  6000. break;
  6001. }
  6002. // fall-through
  6003. NegativeZero_Sub_Default:
  6004. // -0 - 0 == -0. As long as src1 is guaranteed to not be -0, -0 does not matter for src2.
  6005. SetNegativeZeroMatters(instr->GetSrc1());
  6006. SetNegativeZeroMatters(instr->GetSrc2());
  6007. if (this->tag == Js::DeadStorePhase)
  6008. {
  6009. SetCouldRemoveNegZeroBailoutForDefIfLastUse(instr->GetSrc2());
  6010. }
  6011. break;
  6012. case Js::OpCode::BrEq_I4:
  6013. case Js::OpCode::BrTrue_I4:
  6014. case Js::OpCode::BrFalse_I4:
  6015. case Js::OpCode::BrGe_I4:
  6016. case Js::OpCode::BrUnGe_I4:
  6017. case Js::OpCode::BrGt_I4:
  6018. case Js::OpCode::BrUnGt_I4:
  6019. case Js::OpCode::BrLt_I4:
  6020. case Js::OpCode::BrUnLt_I4:
  6021. case Js::OpCode::BrLe_I4:
  6022. case Js::OpCode::BrUnLe_I4:
  6023. case Js::OpCode::BrNeq_I4:
  6024. // Int-specialized branches may prove that one of the src must be zero purely based on the int range, in which
  6025. // case they rely on prior -0 bailouts to guarantee that the src cannot be -0. So, consider that -0 matters for
  6026. // the srcs.
  6027. // fall-through
  6028. case Js::OpCode::InlineMathAtan2:
  6029. // Atan(y,x) - signs of y, x is used to determine the quadrant of the result
  6030. SetNegativeZeroMatters(instr->GetSrc1());
  6031. SetNegativeZeroMatters(instr->GetSrc2());
  6032. break;
  6033. case Js::OpCode::Expo_A:
  6034. case Js::OpCode::InlineMathPow:
  6035. // Negative zero matters for src1
  6036. // Pow( 0, <neg>) is Infinity
  6037. // Pow(-0, <neg>) is -Infinity
  6038. SetNegativeZeroMatters(instr->GetSrc1());
  6039. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  6040. break;
  6041. case Js::OpCode::LdElemI_A:
  6042. // There is an implicit ToString on the index operand, which doesn't differentiate -0 from +0
  6043. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1()->AsIndirOpnd()->GetIndexOpnd());
  6044. break;
  6045. case Js::OpCode::StElemI_A:
  6046. case Js::OpCode::StElemI_A_Strict:
  6047. // There is an implicit ToString on the index operand, which doesn't differentiate -0 from +0
  6048. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetDst()->AsIndirOpnd()->GetIndexOpnd());
  6049. break;
  6050. }
  6051. }
  6052. // Int overflow tracking
  6053. if(!trackIntOverflow)
  6054. {
  6055. return;
  6056. }
  6057. switch(opcode)
  6058. {
  6059. // Instructions that can cause src values to escape the local scope have already been excluded
  6060. default:
  6061. // Unlike the -0 tracking, we use an inclusion list of op-codes for overflow tracking rather than an exclusion list.
  6062. // Assume for any instructions other than those listed above, that int-overflowed values in the srcs are
  6063. // insufficient.
  6064. ignoreIntOverflowCandidate = false;
  6065. // fall-through
  6066. case Js::OpCode::Incr_A:
  6067. case Js::OpCode::Decr_A:
  6068. case Js::OpCode::Add_A:
  6069. case Js::OpCode::Sub_A:
  6070. // The sources are not guaranteed to be converted to int32. Let the compounded int overflow tracking handle this.
  6071. SetIntOverflowMatters(instr->GetSrc1());
  6072. SetIntOverflowMatters(instr->GetSrc2());
  6073. break;
  6074. case Js::OpCode::Mul_A:
  6075. if (trackNon32BitOverflow)
  6076. {
  6077. if (ignoreIntOverflowCandidate)
  6078. instr->ignoreOverflowBitCount = 53;
  6079. }
  6080. else
  6081. {
  6082. ignoreIntOverflowCandidate = false;
  6083. }
  6084. SetIntOverflowMatters(instr->GetSrc1());
  6085. SetIntOverflowMatters(instr->GetSrc2());
  6086. break;
  6087. case Js::OpCode::Neg_A:
  6088. case Js::OpCode::Ld_A:
  6089. case Js::OpCode::Conv_Num:
  6090. case Js::OpCode::ShrU_A:
  6091. if(!ignoreIntOverflowCandidate)
  6092. {
  6093. // Int overflow matters for dst, so int overflow also matters for srcs
  6094. SetIntOverflowMatters(instr->GetSrc1());
  6095. SetIntOverflowMatters(instr->GetSrc2());
  6096. break;
  6097. }
  6098. // fall-through
  6099. case Js::OpCode::Not_A:
  6100. case Js::OpCode::And_A:
  6101. case Js::OpCode::Or_A:
  6102. case Js::OpCode::Xor_A:
  6103. case Js::OpCode::Shl_A:
  6104. case Js::OpCode::Shr_A:
  6105. // These instructions convert their srcs to int32s, and hence don't care about int-overflowed values in the srcs (as
  6106. // long as the overflowed values did not overflow the 53 bits that 'double' values have to precisely represent
  6107. // ints). ShrU_A is not included here because it converts its srcs to uint32 rather than int32, so it would make a
  6108. // difference if the srcs have int32-overflowed values.
  6109. SetIntOverflowDoesNotMatterIfLastUse(instr->GetSrc1());
  6110. SetIntOverflowDoesNotMatterIfLastUse(instr->GetSrc2());
  6111. break;
  6112. }
  6113. if(ignoreIntOverflowCandidate)
  6114. {
  6115. instr->ignoreIntOverflow = true;
  6116. }
  6117. // Compounded int overflow tracking
  6118. if(!trackCompoundedIntOverflow)
  6119. {
  6120. return;
  6121. }
  6122. if(instr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset)
  6123. {
  6124. // The forward pass may need to insert conversions with bailouts before the first instruction in the range. Since this
  6125. // instruction does not have a valid byte code offset for bailout purposes, end the current range now.
  6126. instr->ignoreIntOverflowInRange = false;
  6127. SetIntOverflowMattersInRange(instr->GetSrc1());
  6128. SetIntOverflowMattersInRange(instr->GetSrc2());
  6129. EndIntOverflowDoesNotMatterRange();
  6130. return;
  6131. }
  6132. if(ignoreIntOverflowCandidate)
  6133. {
  6134. instr->ignoreIntOverflowInRange = true;
  6135. if(dstSym)
  6136. {
  6137. dstSym->scratch.globOpt.numCompoundedAddSubUses = 0;
  6138. }
  6139. }
  6140. bool lossy = false;
  6141. switch(opcode)
  6142. {
  6143. // Instructions that can cause src values to escape the local scope have already been excluded
  6144. case Js::OpCode::Incr_A:
  6145. case Js::OpCode::Decr_A:
  6146. case Js::OpCode::Add_A:
  6147. case Js::OpCode::Sub_A:
  6148. {
  6149. if(!instr->ignoreIntOverflowInRange)
  6150. {
  6151. // Int overflow matters for dst, so int overflow also matters for srcs
  6152. SetIntOverflowMattersInRange(instr->GetSrc1());
  6153. SetIntOverflowMattersInRange(instr->GetSrc2());
  6154. break;
  6155. }
  6156. AnalysisAssert(dstSym);
  6157. // The number of compounded add/sub uses of each src is at least the number of compounded add/sub uses of the dst,
  6158. // + 1 for the current instruction
  6159. Assert(dstSym->scratch.globOpt.numCompoundedAddSubUses >= 0);
  6160. Assert(dstSym->scratch.globOpt.numCompoundedAddSubUses <= MaxCompoundedUsesInAddSubForIgnoringIntOverflow);
  6161. const int addSubUses = dstSym->scratch.globOpt.numCompoundedAddSubUses + 1;
  6162. if(addSubUses > MaxCompoundedUsesInAddSubForIgnoringIntOverflow)
  6163. {
  6164. // There are too many compounded add/sub uses of the srcs. There is a possibility that combined, the number
  6165. // eventually overflows the 53 bits that 'double' values have to precisely represent ints
  6166. instr->ignoreIntOverflowInRange = false;
  6167. SetIntOverflowMattersInRange(instr->GetSrc1());
  6168. SetIntOverflowMattersInRange(instr->GetSrc2());
  6169. break;
  6170. }
  6171. TransferCompoundedAddSubUsesToSrcs(instr, addSubUses);
  6172. break;
  6173. }
  6174. case Js::OpCode::Neg_A:
  6175. case Js::OpCode::Ld_A:
  6176. case Js::OpCode::Conv_Num:
  6177. case Js::OpCode::ShrU_A:
  6178. {
  6179. if(!instr->ignoreIntOverflowInRange)
  6180. {
  6181. // Int overflow matters for dst, so int overflow also matters for srcs
  6182. SetIntOverflowMattersInRange(instr->GetSrc1());
  6183. SetIntOverflowMattersInRange(instr->GetSrc2());
  6184. break;
  6185. }
  6186. AnalysisAssert(dstSym);
  6187. TransferCompoundedAddSubUsesToSrcs(instr, dstSym->scratch.globOpt.numCompoundedAddSubUses);
  6188. lossy = opcode == Js::OpCode::ShrU_A;
  6189. break;
  6190. }
  6191. case Js::OpCode::Not_A:
  6192. case Js::OpCode::And_A:
  6193. case Js::OpCode::Or_A:
  6194. case Js::OpCode::Xor_A:
  6195. case Js::OpCode::Shl_A:
  6196. case Js::OpCode::Shr_A:
  6197. // These instructions convert their srcs to int32s, and hence don't care about int-overflowed values in the srcs (as
  6198. // long as the overflowed values did not overflow the 53 bits that 'double' values have to precisely represent
  6199. // ints). ShrU_A is not included here because it converts its srcs to uint32 rather than int32, so it would make a
  6200. // difference if the srcs have int32-overflowed values.
  6201. instr->ignoreIntOverflowInRange = true;
  6202. lossy = true;
  6203. SetIntOverflowDoesNotMatterInRangeIfLastUse(instr->GetSrc1(), 0);
  6204. SetIntOverflowDoesNotMatterInRangeIfLastUse(instr->GetSrc2(), 0);
  6205. break;
  6206. case Js::OpCode::LdSlotArr:
  6207. case Js::OpCode::LdSlot:
  6208. {
  6209. Assert(dstSym);
  6210. Assert(!instr->GetSrc2()); // at the moment, this list contains only unary operations
  6211. if(intOverflowCurrentlyMattersInRange)
  6212. {
  6213. // These instructions will not begin a range, so just return. They don't begin a range because their initial
  6214. // value may not be available until after the instruction is processed in the forward pass.
  6215. Assert(!instr->ignoreIntOverflowInRange);
  6216. return;
  6217. }
  6218. Assert(currentBlock->intOverflowDoesNotMatterRange);
  6219. // Int overflow does not matter for dst, so the srcs need to be tracked as inputs into the region of
  6220. // instructions where int overflow does not matter. Since these instructions will not begin or end a range, they
  6221. // are tracked in separate candidates bit-vectors and once we have confirmed that they don't begin the range,
  6222. // they will be transferred to 'SymsRequiredToBe[Lossy]Int'. Furthermore, once this instruction is included in
  6223. // the range, its dst sym has to be removed. Since this instructions may not be included in the range, add the
  6224. // dst sym to the candidates bit-vectors. If they are included, the process of transferring will remove the dst
  6225. // syms and add the src syms.
  6226. // Remove the dst using the candidate bit-vectors
  6227. Assert(
  6228. !instr->ignoreIntOverflowInRange ||
  6229. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Test(dstSym->m_id));
  6230. if(instr->ignoreIntOverflowInRange ||
  6231. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Test(dstSym->m_id))
  6232. {
  6233. candidateSymsRequiredToBeInt->Set(dstSym->m_id);
  6234. if(currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Test(dstSym->m_id))
  6235. {
  6236. candidateSymsRequiredToBeLossyInt->Set(dstSym->m_id);
  6237. }
  6238. }
  6239. if(!instr->ignoreIntOverflowInRange)
  6240. {
  6241. // These instructions will not end a range, so just return. They may be included in the middle of a range, but
  6242. // since int overflow matters for the dst, the src does not need to be counted as an input into the range.
  6243. return;
  6244. }
  6245. instr->ignoreIntOverflowInRange = false;
  6246. // Add the src using the candidate bit-vectors. The src property sym may already be included in the range or as
  6247. // a candidate. The xor of the final bit-vector with the candidate is the set of syms required to be int,
  6248. // assuming all instructions up to and not including this one are included in the range.
  6249. const SymID srcSymId = instr->GetSrc1()->AsSymOpnd()->m_sym->m_id;
  6250. const bool srcIncluded =
  6251. !!currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Test(srcSymId) ^
  6252. !!candidateSymsRequiredToBeInt->Test(srcSymId);
  6253. const bool srcIncludedAsLossy =
  6254. srcIncluded &&
  6255. !!currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Test(srcSymId) ^
  6256. !!candidateSymsRequiredToBeLossyInt->Test(srcSymId);
  6257. const bool srcNeedsToBeLossless =
  6258. !currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Test(dstSym->m_id) ||
  6259. (srcIncluded && !srcIncludedAsLossy);
  6260. if(srcIncluded)
  6261. {
  6262. if(srcIncludedAsLossy && srcNeedsToBeLossless)
  6263. {
  6264. candidateSymsRequiredToBeLossyInt->Compliment(srcSymId);
  6265. }
  6266. }
  6267. else
  6268. {
  6269. candidateSymsRequiredToBeInt->Compliment(srcSymId);
  6270. if(!srcNeedsToBeLossless)
  6271. {
  6272. candidateSymsRequiredToBeLossyInt->Compliment(srcSymId);
  6273. }
  6274. }
  6275. // These instructions will not end a range, so just return. They may be included in the middle of a range, and the
  6276. // src has been included as a candidate input into the range.
  6277. return;
  6278. }
  6279. case Js::OpCode::Mul_A:
  6280. if (trackNon32BitOverflow)
  6281. {
  6282. // MULs will always be at the start of a range. Either included in the range if int32 overflow is ignored, or excluded if int32 overflow matters. Even if int32 can be ignored, MULs can still bailout on 53-bit.
  6283. // That's why it cannot be in the middle of a range.
  6284. if (instr->ignoreIntOverflowInRange)
  6285. {
  6286. AnalysisAssert(dstSym);
  6287. Assert(dstSym->scratch.globOpt.numCompoundedAddSubUses >= 0);
  6288. Assert(dstSym->scratch.globOpt.numCompoundedAddSubUses <= MaxCompoundedUsesInAddSubForIgnoringIntOverflow);
  6289. instr->ignoreOverflowBitCount = (uint8) (53 - dstSym->scratch.globOpt.numCompoundedAddSubUses);
  6290. // We have the max number of compounded adds/subs. 32-bit overflow cannot be ignored.
  6291. if (instr->ignoreOverflowBitCount == 32)
  6292. {
  6293. instr->ignoreIntOverflowInRange = false;
  6294. }
  6295. }
  6296. SetIntOverflowMattersInRange(instr->GetSrc1());
  6297. SetIntOverflowMattersInRange(instr->GetSrc2());
  6298. break;
  6299. }
  6300. // fall-through
  6301. default:
  6302. // Unlike the -0 tracking, we use an inclusion list of op-codes for overflow tracking rather than an exclusion list.
  6303. // Assume for any instructions other than those listed above, that int-overflowed values in the srcs are
  6304. // insufficient.
  6305. instr->ignoreIntOverflowInRange = false;
  6306. SetIntOverflowMattersInRange(instr->GetSrc1());
  6307. SetIntOverflowMattersInRange(instr->GetSrc2());
  6308. break;
  6309. }
  6310. if(!instr->ignoreIntOverflowInRange)
  6311. {
  6312. EndIntOverflowDoesNotMatterRange();
  6313. return;
  6314. }
  6315. if(intOverflowCurrentlyMattersInRange)
  6316. {
  6317. // This is the last instruction in a new range of instructions where int overflow does not matter
  6318. intOverflowCurrentlyMattersInRange = false;
  6319. IR::Instr *const boundaryInstr = IR::PragmaInstr::New(Js::OpCode::NoIntOverflowBoundary, 0, instr->m_func);
  6320. boundaryInstr->SetByteCodeOffset(instr);
  6321. currentBlock->InsertInstrAfter(boundaryInstr, instr);
  6322. currentBlock->intOverflowDoesNotMatterRange =
  6323. IntOverflowDoesNotMatterRange::New(
  6324. globOpt->alloc,
  6325. instr,
  6326. boundaryInstr,
  6327. currentBlock->intOverflowDoesNotMatterRange);
  6328. }
  6329. else
  6330. {
  6331. Assert(currentBlock->intOverflowDoesNotMatterRange);
  6332. // Extend the current range of instructions where int overflow does not matter, to include this instruction. We also need to
  6333. // include the tracked syms for instructions that have not yet been included in the range, which are tracked in the range's
  6334. // bit-vector. 'SymsRequiredToBeInt' will contain both the dst and src syms of instructions not yet included in the range;
  6335. // the xor will remove the dst syms and add the src syms.
  6336. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Xor(candidateSymsRequiredToBeInt);
  6337. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Xor(candidateSymsRequiredToBeLossyInt);
  6338. candidateSymsRequiredToBeInt->ClearAll();
  6339. candidateSymsRequiredToBeLossyInt->ClearAll();
  6340. currentBlock->intOverflowDoesNotMatterRange->SetFirstInstr(instr);
  6341. }
  6342. // Track syms that are inputs into the range based on the current instruction, which was just added to the range. The dst
  6343. // sym is obtaining a new value so it isn't required to be an int at the start of the range, but the srcs are.
  6344. if(dstSym)
  6345. {
  6346. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Clear(dstSym->m_id);
  6347. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Clear(dstSym->m_id);
  6348. }
  6349. IR::Opnd *const srcs[] = { instr->GetSrc1(), instr->GetSrc2() };
  6350. for(int i = 0; i < sizeof(srcs) / sizeof(srcs[0]) && srcs[i]; ++i)
  6351. {
  6352. StackSym *srcSym = IR::RegOpnd::TryGetStackSym(srcs[i]);
  6353. if(!srcSym)
  6354. {
  6355. continue;
  6356. }
  6357. if(currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->TestAndSet(srcSym->m_id))
  6358. {
  6359. if(!lossy)
  6360. {
  6361. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Clear(srcSym->m_id);
  6362. }
  6363. }
  6364. else if(lossy)
  6365. {
  6366. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Set(srcSym->m_id);
  6367. }
  6368. }
  6369. // If the last instruction included in the range is a MUL, we have to end the range.
  6370. // MULs with ignoreIntOverflow can still bailout on 53-bit overflow, so they cannot be in the middle of a range
  6371. if (trackNon32BitOverflow && instr->m_opcode == Js::OpCode::Mul_A)
  6372. {
  6373. // range would have ended already if int32 overflow matters
  6374. Assert(instr->ignoreIntOverflowInRange && instr->ignoreOverflowBitCount != 32);
  6375. EndIntOverflowDoesNotMatterRange();
  6376. }
  6377. }
  6378. void
  6379. BackwardPass::SetNegativeZeroDoesNotMatterIfLastUse(IR::Opnd *const opnd)
  6380. {
  6381. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  6382. if(stackSym && !currentBlock->upwardExposedUses->Test(stackSym->m_id))
  6383. {
  6384. negativeZeroDoesNotMatterBySymId->Set(stackSym->m_id);
  6385. }
  6386. }
  6387. void
  6388. BackwardPass::SetNegativeZeroMatters(IR::Opnd *const opnd)
  6389. {
  6390. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  6391. if(stackSym)
  6392. {
  6393. negativeZeroDoesNotMatterBySymId->Clear(stackSym->m_id);
  6394. }
  6395. }
  6396. void
  6397. BackwardPass::SetCouldRemoveNegZeroBailoutForDefIfLastUse(IR::Opnd *const opnd)
  6398. {
  6399. StackSym * stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  6400. if (stackSym && !this->currentBlock->upwardExposedUses->Test(stackSym->m_id))
  6401. {
  6402. this->currentBlock->couldRemoveNegZeroBailoutForDef->Set(stackSym->m_id);
  6403. }
  6404. }
  6405. void
  6406. BackwardPass::SetIntOverflowDoesNotMatterIfLastUse(IR::Opnd *const opnd)
  6407. {
  6408. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  6409. if(stackSym && !currentBlock->upwardExposedUses->Test(stackSym->m_id))
  6410. {
  6411. intOverflowDoesNotMatterBySymId->Set(stackSym->m_id);
  6412. }
  6413. }
  6414. void
  6415. BackwardPass::SetIntOverflowMatters(IR::Opnd *const opnd)
  6416. {
  6417. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  6418. if(stackSym)
  6419. {
  6420. intOverflowDoesNotMatterBySymId->Clear(stackSym->m_id);
  6421. }
  6422. }
  6423. bool
  6424. BackwardPass::SetIntOverflowDoesNotMatterInRangeIfLastUse(IR::Opnd *const opnd, const int addSubUses)
  6425. {
  6426. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  6427. return stackSym && SetIntOverflowDoesNotMatterInRangeIfLastUse(stackSym, addSubUses);
  6428. }
  6429. bool
  6430. BackwardPass::SetIntOverflowDoesNotMatterInRangeIfLastUse(StackSym *const stackSym, const int addSubUses)
  6431. {
  6432. Assert(stackSym);
  6433. Assert(addSubUses >= 0);
  6434. Assert(addSubUses <= MaxCompoundedUsesInAddSubForIgnoringIntOverflow);
  6435. if(currentBlock->upwardExposedUses->Test(stackSym->m_id))
  6436. {
  6437. return false;
  6438. }
  6439. intOverflowDoesNotMatterInRangeBySymId->Set(stackSym->m_id);
  6440. stackSym->scratch.globOpt.numCompoundedAddSubUses = addSubUses;
  6441. return true;
  6442. }
  6443. void
  6444. BackwardPass::SetIntOverflowMattersInRange(IR::Opnd *const opnd)
  6445. {
  6446. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  6447. if(stackSym)
  6448. {
  6449. intOverflowDoesNotMatterInRangeBySymId->Clear(stackSym->m_id);
  6450. }
  6451. }
  6452. void
  6453. BackwardPass::TransferCompoundedAddSubUsesToSrcs(IR::Instr *const instr, const int addSubUses)
  6454. {
  6455. Assert(instr);
  6456. Assert(addSubUses >= 0);
  6457. Assert(addSubUses <= MaxCompoundedUsesInAddSubForIgnoringIntOverflow);
  6458. IR::Opnd *const srcs[] = { instr->GetSrc1(), instr->GetSrc2() };
  6459. for(int i = 0; i < _countof(srcs) && srcs[i]; ++i)
  6460. {
  6461. StackSym *const srcSym = IR::RegOpnd::TryGetStackSym(srcs[i]);
  6462. if(!srcSym)
  6463. {
  6464. // Int overflow tracking is only done for StackSyms in RegOpnds. Int overflow matters for the src, so it is
  6465. // guaranteed to be in the int range at this point if the instruction is int-specialized.
  6466. continue;
  6467. }
  6468. Assert(srcSym->scratch.globOpt.numCompoundedAddSubUses >= 0);
  6469. Assert(srcSym->scratch.globOpt.numCompoundedAddSubUses <= MaxCompoundedUsesInAddSubForIgnoringIntOverflow);
  6470. if(SetIntOverflowDoesNotMatterInRangeIfLastUse(srcSym, addSubUses))
  6471. {
  6472. // This is the last use of the src
  6473. continue;
  6474. }
  6475. if(intOverflowDoesNotMatterInRangeBySymId->Test(srcSym->m_id))
  6476. {
  6477. // Since a src may be compounded through different chains of add/sub instructions, the greater number must be
  6478. // preserved
  6479. srcSym->scratch.globOpt.numCompoundedAddSubUses =
  6480. max(srcSym->scratch.globOpt.numCompoundedAddSubUses, addSubUses);
  6481. }
  6482. else
  6483. {
  6484. // Int overflow matters for the src, so it is guaranteed to be in the int range at this point if the instruction is
  6485. // int-specialized
  6486. }
  6487. }
  6488. }
  6489. void
  6490. BackwardPass::EndIntOverflowDoesNotMatterRange()
  6491. {
  6492. if(intOverflowCurrentlyMattersInRange)
  6493. {
  6494. return;
  6495. }
  6496. intOverflowCurrentlyMattersInRange = true;
  6497. if(currentBlock->intOverflowDoesNotMatterRange->FirstInstr()->m_next ==
  6498. currentBlock->intOverflowDoesNotMatterRange->LastInstr())
  6499. {
  6500. // Don't need a range for a single-instruction range
  6501. IntOverflowDoesNotMatterRange *const rangeToDelete = currentBlock->intOverflowDoesNotMatterRange;
  6502. currentBlock->intOverflowDoesNotMatterRange = currentBlock->intOverflowDoesNotMatterRange->Next();
  6503. currentBlock->RemoveInstr(rangeToDelete->LastInstr());
  6504. rangeToDelete->Delete(globOpt->alloc);
  6505. }
  6506. else
  6507. {
  6508. // End the current range of instructions where int overflow does not matter
  6509. IR::Instr *const boundaryInstr =
  6510. IR::PragmaInstr::New(
  6511. Js::OpCode::NoIntOverflowBoundary,
  6512. 0,
  6513. currentBlock->intOverflowDoesNotMatterRange->FirstInstr()->m_func);
  6514. boundaryInstr->SetByteCodeOffset(currentBlock->intOverflowDoesNotMatterRange->FirstInstr());
  6515. currentBlock->InsertInstrBefore(boundaryInstr, currentBlock->intOverflowDoesNotMatterRange->FirstInstr());
  6516. currentBlock->intOverflowDoesNotMatterRange->SetFirstInstr(boundaryInstr);
  6517. #if DBG_DUMP
  6518. if(PHASE_TRACE(Js::TrackCompoundedIntOverflowPhase, func))
  6519. {
  6520. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6521. Output::Print(
  6522. _u("TrackCompoundedIntOverflow - Top function: %s (%s), Phase: %s, Block: %u\n"),
  6523. func->GetJITFunctionBody()->GetDisplayName(),
  6524. func->GetDebugNumberSet(debugStringBuffer),
  6525. Js::PhaseNames[Js::BackwardPhase],
  6526. currentBlock->GetBlockNum());
  6527. Output::Print(_u(" Input syms to be int-specialized (lossless): "));
  6528. candidateSymsRequiredToBeInt->Minus(
  6529. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt(),
  6530. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()); // candidate bit-vectors are cleared below anyway
  6531. candidateSymsRequiredToBeInt->Dump();
  6532. Output::Print(_u(" Input syms to be converted to int (lossy): "));
  6533. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Dump();
  6534. Output::Print(_u(" First instr: "));
  6535. currentBlock->intOverflowDoesNotMatterRange->FirstInstr()->m_next->Dump();
  6536. Output::Flush();
  6537. }
  6538. #endif
  6539. }
  6540. // Reset candidates for the next range
  6541. candidateSymsRequiredToBeInt->ClearAll();
  6542. candidateSymsRequiredToBeLossyInt->ClearAll();
  6543. // Syms are not tracked across different ranges of instructions where int overflow does not matter, since instructions
  6544. // between the ranges may bail out. The value of the dst of an int operation where overflow is ignored is incorrect until
  6545. // the last use of that sym is converted to int. If the int operation and the last use of the sym are in different ranges
  6546. // and an instruction between the ranges bails out, other inputs into the second range are no longer guaranteed to be ints,
  6547. // so the incorrect value of the sym may be used in non-int operations.
  6548. intOverflowDoesNotMatterInRangeBySymId->ClearAll();
  6549. }
  6550. void
  6551. BackwardPass::TrackFloatSymEquivalence(IR::Instr *const instr)
  6552. {
  6553. /*
  6554. This function determines sets of float-specialized syms where any two syms in a set may have the same value number at some
  6555. point in the function. Conversely, if two float-specialized syms are not in the same set, it guarantees that those two syms
  6556. will never have the same value number. These sets are referred to as equivalence classes here.
  6557. The equivalence class for a sym is used to determine whether a bailout FromVar generating a float value for the sym needs to
  6558. bail out on any non-number value. For instance, for syms s1 and s5 in an equivalence class (say we have s5 = s1 at some
  6559. point), if there's a FromVar that generates a float value for s1 but only bails out on strings or non-primitives, and s5 is
  6560. returned from the function, it has to be ensured that s5 is not converted to Var. If the source of the FromVar was null, the
  6561. FromVar would not have bailed out, and s1 and s5 would have the value +0. When s5 is returned, we need to return null and
  6562. not +0, so the equivalence class is used to determine that since s5 requires a bailout on any non-number value, so does s1.
  6563. The tracking is very conservative because the bit that says "I require bailout on any non-number value" is on the sym itself
  6564. (referred to as non-number bailout bit below).
  6565. Data:
  6566. - BackwardPass::floatSymEquivalenceMap
  6567. - hash table mapping a float sym ID to its equivalence class
  6568. - FloatSymEquivalenceClass
  6569. - bit vector of float sym IDs that are in the equivalence class
  6570. - one non-number bailout bit for all syms in the equivalence class
  6571. Algorithm:
  6572. - In a loop prepass or when not in loop:
  6573. - For a float sym transfer (s0.f = s1.f), add both syms to an equivalence class (set the syms in a bit vector)
  6574. - If either sym requires bailout on any non-number value, set the equivalence class' non-number bailout bit
  6575. - If one of the syms is already in an equivalence class, merge the two equivalence classes by OR'ing the two bit vectors
  6576. and the non-number bailout bit.
  6577. - Note that for functions with a loop, dependency tracking is done using equivalence classes and that information is not
  6578. transferred back into each sym's non-number bailout bit
  6579. - In a loop non-prepass or when not in loop, for a FromVar instruction that requires bailout only on strings and
  6580. non-primitives:
  6581. - If the destination float sym's non-number bailout bit is set, or the sym is in an equivalence class whose non-number
  6582. bailout bit is set, change the bailout to bail out on any non-number value
  6583. The result is that if a float-specialized sym's value is used in a way in which it would be invalid to use the float value
  6584. through any other float-specialized sym that acquires the value, the FromVar generating the float value will be modified to
  6585. bail out on any non-number value.
  6586. */
  6587. Assert(instr);
  6588. if(tag != Js::DeadStorePhase || instr->GetSrc2() || !instr->m_func->hasBailout)
  6589. {
  6590. return;
  6591. }
  6592. if(!instr->GetDst() || !instr->GetDst()->IsRegOpnd())
  6593. {
  6594. return;
  6595. }
  6596. const auto dst = instr->GetDst()->AsRegOpnd()->m_sym;
  6597. if(!dst->IsFloat64())
  6598. {
  6599. return;
  6600. }
  6601. if(!instr->GetSrc1() || !instr->GetSrc1()->IsRegOpnd())
  6602. {
  6603. return;
  6604. }
  6605. const auto src = instr->GetSrc1()->AsRegOpnd()->m_sym;
  6606. if(OpCodeAttr::NonIntTransfer(instr->m_opcode) && (!currentBlock->loop || IsPrePass()))
  6607. {
  6608. Assert(src->IsFloat64()); // dst is specialized, and since this is a float transfer, src must be specialized too
  6609. if(dst == src)
  6610. {
  6611. return;
  6612. }
  6613. if(!func->m_fg->hasLoop)
  6614. {
  6615. // Special case for functions with no loops, since there can only be in-order dependencies. Just merge the two
  6616. // non-number bailout bits and put the result in the source.
  6617. if(dst->m_requiresBailOnNotNumber)
  6618. {
  6619. src->m_requiresBailOnNotNumber = true;
  6620. }
  6621. return;
  6622. }
  6623. FloatSymEquivalenceClass *dstEquivalenceClass = nullptr, *srcEquivalenceClass = nullptr;
  6624. const bool dstHasEquivalenceClass = floatSymEquivalenceMap->TryGetValue(dst->m_id, &dstEquivalenceClass);
  6625. const bool srcHasEquivalenceClass = floatSymEquivalenceMap->TryGetValue(src->m_id, &srcEquivalenceClass);
  6626. if(!dstHasEquivalenceClass)
  6627. {
  6628. if(srcHasEquivalenceClass)
  6629. {
  6630. // Just add the destination into the source's equivalence class
  6631. srcEquivalenceClass->Set(dst);
  6632. floatSymEquivalenceMap->Add(dst->m_id, srcEquivalenceClass);
  6633. return;
  6634. }
  6635. dstEquivalenceClass = JitAnew(tempAlloc, FloatSymEquivalenceClass, tempAlloc);
  6636. dstEquivalenceClass->Set(dst);
  6637. floatSymEquivalenceMap->Add(dst->m_id, dstEquivalenceClass);
  6638. }
  6639. if(!srcHasEquivalenceClass)
  6640. {
  6641. // Just add the source into the destination's equivalence class
  6642. dstEquivalenceClass->Set(src);
  6643. floatSymEquivalenceMap->Add(src->m_id, dstEquivalenceClass);
  6644. return;
  6645. }
  6646. if(dstEquivalenceClass == srcEquivalenceClass)
  6647. {
  6648. return;
  6649. }
  6650. Assert(!dstEquivalenceClass->Bv()->Test(src->m_id));
  6651. Assert(!srcEquivalenceClass->Bv()->Test(dst->m_id));
  6652. // Merge the two equivalence classes. The source's equivalence class is typically smaller, so it's merged into the
  6653. // destination's equivalence class. To save space and prevent a potential explosion of bit vector size,
  6654. // 'floatSymEquivalenceMap' is updated for syms in the source's equivalence class to map to the destination's now merged
  6655. // equivalence class, and the source's equivalence class is discarded.
  6656. dstEquivalenceClass->Or(srcEquivalenceClass);
  6657. FOREACH_BITSET_IN_SPARSEBV(id, srcEquivalenceClass->Bv())
  6658. {
  6659. floatSymEquivalenceMap->Item(id, dstEquivalenceClass);
  6660. } NEXT_BITSET_IN_SPARSEBV;
  6661. JitAdelete(tempAlloc, srcEquivalenceClass);
  6662. return;
  6663. }
  6664. // Not a float transfer, and non-prepass (not necessarily in a loop)
  6665. if(!instr->HasBailOutInfo() || instr->GetBailOutKind() != IR::BailOutPrimitiveButString)
  6666. {
  6667. return;
  6668. }
  6669. Assert(instr->m_opcode == Js::OpCode::FromVar);
  6670. // If either the destination or its equivalence class says it requires bailout on any non-number value, adjust the bailout
  6671. // kind on the instruction. Both are checked because in functions without loops, equivalence tracking is not done and only
  6672. // the sym's non-number bailout bit will have the information, and in functions with loops, equivalence tracking is done
  6673. // throughout the function and checking just the sym's non-number bailout bit is insufficient.
  6674. FloatSymEquivalenceClass *dstEquivalenceClass = nullptr;
  6675. if(dst->m_requiresBailOnNotNumber ||
  6676. (floatSymEquivalenceMap->TryGetValue(dst->m_id, &dstEquivalenceClass) && dstEquivalenceClass->RequiresBailOnNotNumber()))
  6677. {
  6678. instr->SetBailOutKind(IR::BailOutNumberOnly);
  6679. }
  6680. }
  6681. bool
  6682. BackwardPass::ProcessDef(IR::Opnd * opnd)
  6683. {
  6684. BOOLEAN isJITOptimizedReg = false;
  6685. Sym * sym;
  6686. if (opnd->IsRegOpnd())
  6687. {
  6688. sym = opnd->AsRegOpnd()->m_sym;
  6689. isJITOptimizedReg = opnd->GetIsJITOptimizedReg();
  6690. if (!IsCollectionPass())
  6691. {
  6692. this->InvalidateCloneStrCandidate(opnd);
  6693. if ((tag == Js::BackwardPhase) && IsPrePass())
  6694. {
  6695. this->currentPrePassLoop->symsAssignedToInLoop->Set(sym->m_id);
  6696. }
  6697. }
  6698. }
  6699. else if (opnd->IsSymOpnd())
  6700. {
  6701. sym = opnd->AsSymOpnd()->m_sym;
  6702. isJITOptimizedReg = opnd->GetIsJITOptimizedReg();
  6703. }
  6704. else
  6705. {
  6706. if (opnd->IsIndirOpnd())
  6707. {
  6708. this->ProcessUse(opnd);
  6709. }
  6710. return false;
  6711. }
  6712. BasicBlock * block = this->currentBlock;
  6713. BOOLEAN isUsed = true;
  6714. BOOLEAN keepSymLiveForException = false;
  6715. BOOLEAN keepVarSymLiveForException = false;
  6716. IR::Instr * instr = this->currentInstr;
  6717. Assert(!instr->IsByteCodeUsesInstr());
  6718. if (sym->IsPropertySym())
  6719. {
  6720. if(IsCollectionPass())
  6721. {
  6722. return false;
  6723. }
  6724. PropertySym *propertySym = sym->AsPropertySym();
  6725. if (this->DoDeadStoreSlots())
  6726. {
  6727. if (propertySym->m_fieldKind == PropertyKindLocalSlots || propertySym->m_fieldKind == PropertyKindSlots)
  6728. {
  6729. BOOLEAN isPropertySymUsed = !block->slotDeadStoreCandidates->TestAndSet(propertySym->m_id);
  6730. Assert(isPropertySymUsed || !block->upwardExposedUses->Test(propertySym->m_id));
  6731. isUsed = isPropertySymUsed || block->upwardExposedUses->Test(propertySym->m_stackSym->m_id);
  6732. }
  6733. }
  6734. this->DoSetDead(opnd, !block->upwardExposedFields->TestAndClear(propertySym->m_id));
  6735. ProcessStackSymUse(propertySym->m_stackSym, isJITOptimizedReg);
  6736. if (tag == Js::BackwardPhase)
  6737. {
  6738. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  6739. {
  6740. this->globOpt->PreparePropertySymOpndForTypeCheckSeq(opnd->AsPropertySymOpnd(), instr, this->currentBlock->loop);
  6741. }
  6742. }
  6743. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  6744. {
  6745. this->ProcessPropertySymOpndUse(opnd->AsPropertySymOpnd());
  6746. }
  6747. }
  6748. else
  6749. {
  6750. Assert(!instr->IsByteCodeUsesInstr());
  6751. if (this->DoByteCodeUpwardExposedUsed())
  6752. {
  6753. if (sym->AsStackSym()->HasByteCodeRegSlot())
  6754. {
  6755. StackSym * varSym = sym->AsStackSym();
  6756. if (varSym->IsTypeSpec())
  6757. {
  6758. // It has to have a var version for byte code regs
  6759. varSym = varSym->GetVarEquivSym(nullptr);
  6760. }
  6761. if (this->currentRegion)
  6762. {
  6763. keepSymLiveForException = this->CheckWriteThroughSymInRegion(this->currentRegion, sym->AsStackSym());
  6764. keepVarSymLiveForException = this->CheckWriteThroughSymInRegion(this->currentRegion, varSym);
  6765. }
  6766. if (!isJITOptimizedReg)
  6767. {
  6768. if (!DoDeadStore(this->func, sym->AsStackSym()))
  6769. {
  6770. // Don't deadstore the bytecodereg sym, so that we could do write to get the locals inspection
  6771. if (opnd->IsRegOpnd())
  6772. {
  6773. opnd->AsRegOpnd()->m_dontDeadStore = true;
  6774. }
  6775. }
  6776. // write through symbols should not be cleared from the byteCodeUpwardExposedUsed BV upon defs in the Try region:
  6777. // try
  6778. // x =
  6779. // <bailout> <-- this bailout should restore x from its first def. This would not happen if x is cleared
  6780. // from byteCodeUpwardExposedUsed when we process its second def
  6781. // <exception>
  6782. // x =
  6783. // catch
  6784. // = x
  6785. if (!keepVarSymLiveForException)
  6786. {
  6787. // Always track the sym use on the var sym.
  6788. block->byteCodeUpwardExposedUsed->Clear(varSym->m_id);
  6789. #if DBG
  6790. // TODO: We can only track first level function stack syms right now
  6791. if (varSym->GetByteCodeFunc() == this->func)
  6792. {
  6793. block->byteCodeRestoreSyms[varSym->GetByteCodeRegSlot()] = nullptr;
  6794. }
  6795. #endif
  6796. }
  6797. }
  6798. }
  6799. }
  6800. if(IsCollectionPass())
  6801. {
  6802. return false;
  6803. }
  6804. // Don't care about property sym for mark temps
  6805. if (opnd->IsRegOpnd())
  6806. {
  6807. this->MarkTemp(sym->AsStackSym());
  6808. }
  6809. if (this->tag == Js::BackwardPhase &&
  6810. instr->m_opcode == Js::OpCode::Ld_A &&
  6811. instr->GetSrc1()->IsRegOpnd() &&
  6812. block->upwardExposedFields->Test(sym->m_id))
  6813. {
  6814. block->upwardExposedFields->Set(instr->GetSrc1()->AsRegOpnd()->m_sym->m_id);
  6815. }
  6816. if (!keepSymLiveForException)
  6817. {
  6818. isUsed = block->upwardExposedUses->TestAndClear(sym->m_id);
  6819. }
  6820. }
  6821. if (isUsed || !this->DoDeadStore())
  6822. {
  6823. return false;
  6824. }
  6825. // FromVar on a primitive value has no side-effects
  6826. // TODO: There may be more cases where FromVars can be dead-stored, such as cases where they have a bailout that would bail
  6827. // out on non-primitive vars, thereby causing no side effects anyway. However, it needs to be ensured that no assumptions
  6828. // that depend on the bailout are made later in the function.
  6829. // Special case StFld for trackable fields
  6830. bool hasSideEffects = instr->HasAnySideEffects()
  6831. && instr->m_opcode != Js::OpCode::StFld
  6832. && instr->m_opcode != Js::OpCode::StRootFld
  6833. && instr->m_opcode != Js::OpCode::StFldStrict
  6834. && instr->m_opcode != Js::OpCode::StRootFldStrict;
  6835. if (this->IsPrePass() || hasSideEffects)
  6836. {
  6837. return false;
  6838. }
  6839. if (opnd->IsRegOpnd() && opnd->AsRegOpnd()->m_dontDeadStore)
  6840. {
  6841. return false;
  6842. }
  6843. if (instr->HasBailOutInfo())
  6844. {
  6845. // A bailout inserted for aggressive or lossy int type specialization causes assumptions to be made on the value of
  6846. // the instruction's destination later on, as though the bailout did not happen. If the value is an int constant and
  6847. // that value is propagated forward, it can cause the bailout instruction to become a dead store and be removed,
  6848. // thereby invalidating the assumptions made. Or for lossy int type specialization, the lossy conversion to int32
  6849. // may have side effects and so cannot be dead-store-removed. As one way of solving that problem, bailout
  6850. // instructions resulting from aggressive or lossy int type spec are not dead-stored.
  6851. const auto bailOutKind = instr->GetBailOutKind();
  6852. if(bailOutKind & IR::BailOutOnResultConditions)
  6853. {
  6854. return false;
  6855. }
  6856. switch(bailOutKind & ~IR::BailOutKindBits)
  6857. {
  6858. case IR::BailOutIntOnly:
  6859. case IR::BailOutNumberOnly:
  6860. case IR::BailOutExpectingInteger:
  6861. case IR::BailOutPrimitiveButString:
  6862. case IR::BailOutExpectingString:
  6863. case IR::BailOutOnNotPrimitive:
  6864. case IR::BailOutFailedInlineTypeCheck:
  6865. case IR::BailOutOnFloor:
  6866. case IR::BailOnModByPowerOf2:
  6867. case IR::BailOnDivResultNotInt:
  6868. case IR::BailOnIntMin:
  6869. return false;
  6870. }
  6871. }
  6872. // Dead store
  6873. DeadStoreInstr(instr);
  6874. return true;
  6875. }
  6876. bool
  6877. BackwardPass::DeadStoreInstr(IR::Instr *instr)
  6878. {
  6879. BasicBlock * block = this->currentBlock;
  6880. #if DBG_DUMP
  6881. if (this->IsTraceEnabled())
  6882. {
  6883. Output::Print(_u("Deadstore instr: "));
  6884. instr->Dump();
  6885. }
  6886. this->numDeadStore++;
  6887. #endif
  6888. // Before we remove the dead store, we need to track the byte code uses
  6889. if (this->DoByteCodeUpwardExposedUsed())
  6890. {
  6891. #if DBG
  6892. BVSparse<JitArenaAllocator> tempBv(this->tempAlloc);
  6893. tempBv.Copy(this->currentBlock->byteCodeUpwardExposedUsed);
  6894. #endif
  6895. PropertySym *unusedPropertySym = nullptr;
  6896. GlobOpt::TrackByteCodeSymUsed(instr, this->currentBlock->byteCodeUpwardExposedUsed, &unusedPropertySym);
  6897. #if DBG
  6898. BVSparse<JitArenaAllocator> tempBv2(this->tempAlloc);
  6899. tempBv2.Copy(this->currentBlock->byteCodeUpwardExposedUsed);
  6900. tempBv2.Minus(&tempBv);
  6901. FOREACH_BITSET_IN_SPARSEBV(symId, &tempBv2)
  6902. {
  6903. StackSym * stackSym = this->func->m_symTable->FindStackSym(symId);
  6904. Assert(stackSym->GetType() == TyVar);
  6905. // TODO: We can only track first level function stack syms right now
  6906. if (stackSym->GetByteCodeFunc() == this->func)
  6907. {
  6908. Js::RegSlot byteCodeRegSlot = stackSym->GetByteCodeRegSlot();
  6909. Assert(byteCodeRegSlot != Js::Constants::NoRegister);
  6910. if (this->currentBlock->byteCodeRestoreSyms[byteCodeRegSlot] != stackSym)
  6911. {
  6912. AssertMsg(this->currentBlock->byteCodeRestoreSyms[byteCodeRegSlot] == nullptr,
  6913. "Can't have two active lifetime for the same byte code register");
  6914. this->currentBlock->byteCodeRestoreSyms[byteCodeRegSlot] = stackSym;
  6915. }
  6916. }
  6917. }
  6918. NEXT_BITSET_IN_SPARSEBV;
  6919. #endif
  6920. }
  6921. // If this is a pre-op bailout instruction, we may have saved it for bailout info processing. It's being removed now, so no
  6922. // need to process the bailout info anymore.
  6923. Assert(!preOpBailOutInstrToProcess || preOpBailOutInstrToProcess == instr);
  6924. preOpBailOutInstrToProcess = nullptr;
  6925. #if DBG
  6926. if (this->DoMarkTempObjectVerify())
  6927. {
  6928. this->currentBlock->tempObjectVerifyTracker->NotifyDeadStore(instr, this);
  6929. }
  6930. #endif
  6931. if (instr->m_opcode == Js::OpCode::ArgIn_A)
  6932. {
  6933. //Ignore tracking ArgIn for "this", as argInsCount only tracks other params - unless it is a asmjs function(which doesn't have a "this").
  6934. if (instr->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum() != 1 || func->GetJITFunctionBody()->IsAsmJsMode())
  6935. {
  6936. Assert(this->func->argInsCount > 0);
  6937. this->func->argInsCount--;
  6938. }
  6939. }
  6940. TraceDeadStoreOfInstrsForScopeObjectRemoval();
  6941. block->RemoveInstr(instr);
  6942. return true;
  6943. }
  6944. void
  6945. BackwardPass::ProcessTransfers(IR::Instr * instr)
  6946. {
  6947. if (this->tag == Js::DeadStorePhase &&
  6948. this->currentBlock->upwardExposedFields &&
  6949. instr->m_opcode == Js::OpCode::Ld_A &&
  6950. instr->GetDst()->GetStackSym() &&
  6951. !instr->GetDst()->GetStackSym()->IsTypeSpec() &&
  6952. instr->GetDst()->GetStackSym()->HasObjectInfo() &&
  6953. instr->GetSrc1() &&
  6954. instr->GetSrc1()->GetStackSym() &&
  6955. !instr->GetSrc1()->GetStackSym()->IsTypeSpec() &&
  6956. instr->GetSrc1()->GetStackSym()->HasObjectInfo())
  6957. {
  6958. StackSym * dstStackSym = instr->GetDst()->GetStackSym();
  6959. PropertySym * dstPropertySym = dstStackSym->GetObjectInfo()->m_propertySymList;
  6960. BVSparse<JitArenaAllocator> transferFields(this->tempAlloc);
  6961. while (dstPropertySym != nullptr)
  6962. {
  6963. Assert(dstPropertySym->m_stackSym == dstStackSym);
  6964. transferFields.Set(dstPropertySym->m_id);
  6965. dstPropertySym = dstPropertySym->m_nextInStackSymList;
  6966. }
  6967. StackSym * srcStackSym = instr->GetSrc1()->GetStackSym();
  6968. PropertySym * srcPropertySym = srcStackSym->GetObjectInfo()->m_propertySymList;
  6969. BVSparse<JitArenaAllocator> equivFields(this->tempAlloc);
  6970. while (srcPropertySym != nullptr && !transferFields.IsEmpty())
  6971. {
  6972. Assert(srcPropertySym->m_stackSym == srcStackSym);
  6973. if (srcPropertySym->m_propertyEquivSet)
  6974. {
  6975. equivFields.And(&transferFields, srcPropertySym->m_propertyEquivSet);
  6976. if (!equivFields.IsEmpty())
  6977. {
  6978. transferFields.Minus(&equivFields);
  6979. this->currentBlock->upwardExposedFields->Set(srcPropertySym->m_id);
  6980. }
  6981. }
  6982. srcPropertySym = srcPropertySym->m_nextInStackSymList;
  6983. }
  6984. }
  6985. }
  6986. void
  6987. BackwardPass::ProcessFieldKills(IR::Instr * instr)
  6988. {
  6989. if (this->currentBlock->upwardExposedFields)
  6990. {
  6991. this->globOpt->ProcessFieldKills(instr, this->currentBlock->upwardExposedFields, false);
  6992. }
  6993. this->ClearBucketsOnFieldKill(instr, currentBlock->stackSymToFinalType);
  6994. this->ClearBucketsOnFieldKill(instr, currentBlock->stackSymToGuardedProperties);
  6995. }
  6996. template<typename T>
  6997. void
  6998. BackwardPass::ClearBucketsOnFieldKill(IR::Instr *instr, HashTable<T> *table)
  6999. {
  7000. if (table)
  7001. {
  7002. if (instr->UsesAllFields())
  7003. {
  7004. table->ClearAll();
  7005. }
  7006. else
  7007. {
  7008. IR::Opnd *dst = instr->GetDst();
  7009. if (dst && dst->IsRegOpnd())
  7010. {
  7011. table->Clear(dst->AsRegOpnd()->m_sym->m_id);
  7012. }
  7013. }
  7014. }
  7015. }
  7016. bool
  7017. BackwardPass::TrackNoImplicitCallInlinees(IR::Instr *instr)
  7018. {
  7019. if (this->tag != Js::DeadStorePhase || this->IsPrePass())
  7020. {
  7021. return false;
  7022. }
  7023. if (instr->HasBailOutInfo()
  7024. || OpCodeAttr::CallInstr(instr->m_opcode)
  7025. || instr->CallsAccessor()
  7026. || GlobOpt::MayNeedBailOnImplicitCall(instr, nullptr, nullptr)
  7027. || instr->HasAnyLoadHeapArgsOpCode()
  7028. || instr->m_opcode == Js::OpCode::LdFuncExpr)
  7029. {
  7030. // This func has instrs with bailouts or implicit calls
  7031. Assert(instr->m_opcode != Js::OpCode::InlineeStart);
  7032. instr->m_func->SetHasImplicitCallsOnSelfAndParents();
  7033. return false;
  7034. }
  7035. if (instr->m_opcode == Js::OpCode::InlineeStart)
  7036. {
  7037. if (!instr->GetSrc1())
  7038. {
  7039. Assert(instr->m_func->m_hasInlineArgsOpt);
  7040. return false;
  7041. }
  7042. return this->ProcessInlineeStart(instr);
  7043. }
  7044. return false;
  7045. }
  7046. bool
  7047. BackwardPass::ProcessInlineeStart(IR::Instr* inlineeStart)
  7048. {
  7049. inlineeStart->m_func->SetFirstArgOffset(inlineeStart);
  7050. IR::Instr* startCallInstr = nullptr;
  7051. bool noImplicitCallsInInlinee = false;
  7052. // Inlinee has no bailouts or implicit calls. Get rid of the inline overhead.
  7053. auto removeInstr = [&](IR::Instr* argInstr)
  7054. {
  7055. Assert(argInstr->m_opcode == Js::OpCode::InlineeStart || argInstr->m_opcode == Js::OpCode::ArgOut_A || argInstr->m_opcode == Js::OpCode::ArgOut_A_Inline);
  7056. IR::Opnd *opnd = argInstr->GetSrc1();
  7057. StackSym *sym = opnd->GetStackSym();
  7058. if (!opnd->GetIsJITOptimizedReg() && sym && sym->HasByteCodeRegSlot())
  7059. {
  7060. // Replace instrs with bytecodeUses
  7061. IR::ByteCodeUsesInstr *bytecodeUse = IR::ByteCodeUsesInstr::New(argInstr);
  7062. bytecodeUse->Set(opnd);
  7063. argInstr->InsertBefore(bytecodeUse);
  7064. }
  7065. startCallInstr = argInstr->GetSrc2()->GetStackSym()->m_instrDef;
  7066. FlowGraph::SafeRemoveInstr(argInstr);
  7067. return false;
  7068. };
  7069. // If there are no implicit calls - bailouts/throws - we can remove all inlining overhead.
  7070. if (!inlineeStart->m_func->GetHasImplicitCalls())
  7071. {
  7072. noImplicitCallsInInlinee = true;
  7073. inlineeStart->IterateArgInstrs(removeInstr);
  7074. inlineeStart->IterateMetaArgs([](IR::Instr* metArg)
  7075. {
  7076. FlowGraph::SafeRemoveInstr(metArg);
  7077. return false;
  7078. });
  7079. inlineeStart->m_func->m_hasInlineArgsOpt = false;
  7080. removeInstr(inlineeStart);
  7081. return true;
  7082. }
  7083. if (!inlineeStart->m_func->m_hasInlineArgsOpt)
  7084. {
  7085. PHASE_PRINT_TESTTRACE(Js::InlineArgsOptPhase, func, _u("%s[%d]: Skipping inline args optimization: %s[%d] HasCalls: %s 'arguments' access: %s Can do inlinee args opt: %s\n"),
  7086. func->GetJITFunctionBody()->GetDisplayName(), func->GetJITFunctionBody()->GetFunctionNumber(),
  7087. inlineeStart->m_func->GetJITFunctionBody()->GetDisplayName(), inlineeStart->m_func->GetJITFunctionBody()->GetFunctionNumber(),
  7088. IsTrueOrFalse(inlineeStart->m_func->GetHasCalls()),
  7089. IsTrueOrFalse(inlineeStart->m_func->GetHasUnoptimizedArgumentsAccess()),
  7090. IsTrueOrFalse(inlineeStart->m_func->m_canDoInlineArgsOpt));
  7091. return false;
  7092. }
  7093. if (!inlineeStart->m_func->frameInfo->isRecorded)
  7094. {
  7095. PHASE_PRINT_TESTTRACE(Js::InlineArgsOptPhase, func, _u("%s[%d]: InlineeEnd not found - usually due to a throw or a BailOnNoProfile (stressed, most likely)\n"),
  7096. func->GetJITFunctionBody()->GetDisplayName(), func->GetJITFunctionBody()->GetFunctionNumber());
  7097. inlineeStart->m_func->DisableCanDoInlineArgOpt();
  7098. return false;
  7099. }
  7100. inlineeStart->IterateArgInstrs(removeInstr);
  7101. int i = 0;
  7102. inlineeStart->IterateMetaArgs([&](IR::Instr* metaArg)
  7103. {
  7104. if (i == Js::Constants::InlineeMetaArgIndex_ArgumentsObject &&
  7105. inlineeStart->m_func->GetJITFunctionBody()->UsesArgumentsObject())
  7106. {
  7107. Assert(!inlineeStart->m_func->GetHasUnoptimizedArgumentsAccess());
  7108. // Do not remove arguments object meta arg if there is a reference to arguments object
  7109. }
  7110. else
  7111. {
  7112. FlowGraph::SafeRemoveInstr(metaArg);
  7113. }
  7114. i++;
  7115. return false;
  7116. });
  7117. IR::Opnd *src1 = inlineeStart->GetSrc1();
  7118. StackSym *sym = src1->GetStackSym();
  7119. if (!src1->GetIsJITOptimizedReg() && sym && sym->HasByteCodeRegSlot())
  7120. {
  7121. // Replace instrs with bytecodeUses
  7122. IR::ByteCodeUsesInstr *bytecodeUse = IR::ByteCodeUsesInstr::New(inlineeStart);
  7123. bytecodeUse->Set(src1);
  7124. inlineeStart->InsertBefore(bytecodeUse);
  7125. }
  7126. // This indicates to the lowerer that this inlinee has been optimized
  7127. // and it should not be lowered - Now this instruction is used to mark inlineeStart
  7128. inlineeStart->FreeSrc1();
  7129. inlineeStart->FreeSrc2();
  7130. inlineeStart->FreeDst();
  7131. return true;
  7132. }
  7133. void
  7134. BackwardPass::ProcessInlineeEnd(IR::Instr* instr)
  7135. {
  7136. if (this->IsPrePass())
  7137. {
  7138. return;
  7139. }
  7140. if (this->tag == Js::BackwardPhase)
  7141. {
  7142. if (!GlobOpt::DoInlineArgsOpt(instr->m_func))
  7143. {
  7144. return;
  7145. }
  7146. // This adds a use for function sym as part of InlineeStart & all the syms referenced by the args.
  7147. // It ensure they do not get cleared from the copy prop sym map.
  7148. instr->IterateArgInstrs([=](IR::Instr* argInstr){
  7149. if (argInstr->GetSrc1()->IsRegOpnd())
  7150. {
  7151. this->currentBlock->upwardExposedUses->Set(argInstr->GetSrc1()->AsRegOpnd()->m_sym->m_id);
  7152. }
  7153. return false;
  7154. });
  7155. }
  7156. else if (this->tag == Js::DeadStorePhase)
  7157. {
  7158. if (instr->m_func->m_hasInlineArgsOpt)
  7159. {
  7160. Assert(instr->m_func->frameInfo);
  7161. instr->m_func->frameInfo->IterateSyms([=](StackSym* argSym)
  7162. {
  7163. this->currentBlock->upwardExposedUses->Set(argSym->m_id);
  7164. });
  7165. }
  7166. }
  7167. }
  7168. bool
  7169. BackwardPass::ProcessBailOnNoProfile(IR::Instr *instr, BasicBlock *block)
  7170. {
  7171. Assert(this->tag == Js::BackwardPhase);
  7172. Assert(instr->m_opcode == Js::OpCode::BailOnNoProfile);
  7173. Assert(!instr->HasBailOutInfo());
  7174. AnalysisAssert(block);
  7175. if (this->IsPrePass())
  7176. {
  7177. return false;
  7178. }
  7179. if (this->currentRegion && (this->currentRegion->GetType() == RegionTypeCatch || this->currentRegion->GetType() == RegionTypeFinally))
  7180. {
  7181. return false;
  7182. }
  7183. IR::Instr *curInstr = instr->m_prev;
  7184. if (curInstr->IsLabelInstr() && curInstr->AsLabelInstr()->isOpHelper)
  7185. {
  7186. // Already processed
  7187. if (this->DoMarkTempObjects())
  7188. {
  7189. block->tempObjectTracker->ProcessBailOnNoProfile(instr);
  7190. }
  7191. return false;
  7192. }
  7193. // Don't hoist if we see calls with profile data (recursive calls)
  7194. while(!curInstr->StartsBasicBlock())
  7195. {
  7196. // If a function was inlined, it must have had profile info.
  7197. if (curInstr->m_opcode == Js::OpCode::InlineeEnd || curInstr->m_opcode == Js::OpCode::InlineBuiltInEnd || curInstr->m_opcode == Js::OpCode::InlineNonTrackingBuiltInEnd
  7198. || curInstr->m_opcode == Js::OpCode::InlineeStart || curInstr->m_opcode == Js::OpCode::EndCallForPolymorphicInlinee)
  7199. {
  7200. break;
  7201. }
  7202. else if (OpCodeAttr::CallInstr(curInstr->m_opcode))
  7203. {
  7204. if (curInstr->m_prev->m_opcode != Js::OpCode::BailOnNoProfile)
  7205. {
  7206. break;
  7207. }
  7208. }
  7209. curInstr = curInstr->m_prev;
  7210. }
  7211. // Didn't get to the top of the block, delete this BailOnNoProfile.
  7212. if (!curInstr->IsLabelInstr())
  7213. {
  7214. block->RemoveInstr(instr);
  7215. return true;
  7216. }
  7217. // Save the head instruction for later use.
  7218. IR::LabelInstr *blockHeadInstr = curInstr->AsLabelInstr();
  7219. // We can't bail in the middle of a "tmp = CmEq s1, s2; BrTrue tmp" turned into a "BrEq s1, s2",
  7220. // because the bailout wouldn't be able to restore tmp.
  7221. IR::Instr *curNext = curInstr->GetNextRealInstrOrLabel();
  7222. IR::Instr *instrNope = nullptr;
  7223. if (curNext->m_opcode == Js::OpCode::Ld_A && curNext->GetDst()->IsRegOpnd() && curNext->GetDst()->AsRegOpnd()->m_fgPeepTmp)
  7224. {
  7225. block->RemoveInstr(instr);
  7226. return true;
  7227. /*while (curNext->m_opcode == Js::OpCode::Ld_A && curNext->GetDst()->IsRegOpnd() && curNext->GetDst()->AsRegOpnd()->m_fgPeepTmp)
  7228. {
  7229. // Instead of just giving up, we can be a little trickier. We can instead treat the tmp declaration(s) as a
  7230. // part of the block prefix, and put the bailonnoprofile immediately after them. This has the added benefit
  7231. // that we can still merge up blocks beginning with bailonnoprofile, even if they would otherwise not allow
  7232. // us to, due to the fact that these tmp declarations would be pre-empted by the higher-level bailout.
  7233. instrNope = curNext;
  7234. curNext = curNext->GetNextRealInstrOrLabel();
  7235. }*/
  7236. }
  7237. curInstr = instr->m_prev;
  7238. // Move to top of block (but just below any fgpeeptemp lds).
  7239. while(!curInstr->StartsBasicBlock() && curInstr != instrNope)
  7240. {
  7241. // Delete redundant BailOnNoProfile
  7242. if (curInstr->m_opcode == Js::OpCode::BailOnNoProfile)
  7243. {
  7244. Assert(!curInstr->HasBailOutInfo());
  7245. curInstr = curInstr->m_next;
  7246. curInstr->m_prev->Remove();
  7247. }
  7248. curInstr = curInstr->m_prev;
  7249. }
  7250. if (instr == block->GetLastInstr())
  7251. {
  7252. block->SetLastInstr(instr->m_prev);
  7253. }
  7254. instr->Unlink();
  7255. // Now try to move this up the flowgraph to the predecessor blocks
  7256. FOREACH_PREDECESSOR_BLOCK(pred, block)
  7257. {
  7258. bool hoistBailToPred = true;
  7259. if (block->isLoopHeader && pred->loop == block->loop)
  7260. {
  7261. // Skip loop back-edges
  7262. continue;
  7263. }
  7264. if (pred->GetFirstInstr()->AsLabelInstr()->GetRegion() != this->currentRegion)
  7265. {
  7266. break;
  7267. }
  7268. // If all successors of this predecessor start with a BailOnNoProfile, we should be
  7269. // okay to hoist this bail to the predecessor.
  7270. FOREACH_SUCCESSOR_BLOCK(predSucc, pred)
  7271. {
  7272. if (predSucc == block)
  7273. {
  7274. continue;
  7275. }
  7276. if (!predSucc->beginsBailOnNoProfile)
  7277. {
  7278. hoistBailToPred = false;
  7279. break;
  7280. }
  7281. } NEXT_SUCCESSOR_BLOCK;
  7282. if (hoistBailToPred)
  7283. {
  7284. IR::Instr *predInstr = pred->GetLastInstr();
  7285. IR::Instr *instrCopy = instr->Copy();
  7286. if (predInstr->EndsBasicBlock())
  7287. {
  7288. if (predInstr->m_prev->m_opcode == Js::OpCode::BailOnNoProfile)
  7289. {
  7290. // We already have one, we don't need a second.
  7291. instrCopy->Free();
  7292. }
  7293. else if (!predInstr->AsBranchInstr()->m_isSwitchBr)
  7294. {
  7295. // Don't put a bailout in the middle of a switch dispatch sequence.
  7296. // The bytecode offsets are not in order, and it would lead to incorrect
  7297. // bailout info.
  7298. instrCopy->m_func = predInstr->m_func;
  7299. predInstr->InsertBefore(instrCopy);
  7300. }
  7301. }
  7302. else
  7303. {
  7304. if (predInstr->m_opcode == Js::OpCode::BailOnNoProfile)
  7305. {
  7306. // We already have one, we don't need a second.
  7307. instrCopy->Free();
  7308. }
  7309. else
  7310. {
  7311. instrCopy->m_func = predInstr->m_func;
  7312. predInstr->InsertAfter(instrCopy);
  7313. pred->SetLastInstr(instrCopy);
  7314. }
  7315. }
  7316. }
  7317. } NEXT_PREDECESSOR_BLOCK;
  7318. // If we have a BailOnNoProfile in the first block, there must have been at least one path out of this block that always throws.
  7319. // Don't bother keeping the bailout in the first block as there are some issues in restoring the ArgIn bytecode registers on bailout
  7320. // and throw case should be rare enough that it won't matter for perf.
  7321. if (block->GetBlockNum() != 0)
  7322. {
  7323. blockHeadInstr->isOpHelper = true;
  7324. #if DBG
  7325. blockHeadInstr->m_noHelperAssert = true;
  7326. #endif
  7327. block->beginsBailOnNoProfile = true;
  7328. instr->m_func = curInstr->m_func;
  7329. curInstr->InsertAfter(instr);
  7330. bool setLastInstr = (curInstr == block->GetLastInstr());
  7331. if (setLastInstr)
  7332. {
  7333. block->SetLastInstr(instr);
  7334. }
  7335. if (this->DoMarkTempObjects())
  7336. {
  7337. block->tempObjectTracker->ProcessBailOnNoProfile(instr);
  7338. }
  7339. return false;
  7340. }
  7341. else
  7342. {
  7343. instr->Free();
  7344. return true;
  7345. }
  7346. }
  7347. bool
  7348. BackwardPass::ReverseCopyProp(IR::Instr *instr)
  7349. {
  7350. // Look for :
  7351. //
  7352. // t1 = instr
  7353. // [bytecodeuse t1]
  7354. // t2 = Ld_A t1 >> t1 !upwardExposed
  7355. //
  7356. // Transform into:
  7357. //
  7358. // t2 = instr
  7359. //
  7360. if (PHASE_OFF(Js::ReverseCopyPropPhase, this->func))
  7361. {
  7362. return false;
  7363. }
  7364. if (this->tag != Js::DeadStorePhase || this->IsPrePass() || this->IsCollectionPass())
  7365. {
  7366. return false;
  7367. }
  7368. if (this->func->HasTry())
  7369. {
  7370. // UpwardExposedUsed info can't be relied on
  7371. return false;
  7372. }
  7373. // Find t2 = Ld_A t1
  7374. switch (instr->m_opcode)
  7375. {
  7376. case Js::OpCode::Ld_A:
  7377. case Js::OpCode::Ld_I4:
  7378. break;
  7379. default:
  7380. return false;
  7381. }
  7382. if (!instr->GetDst()->IsRegOpnd())
  7383. {
  7384. return false;
  7385. }
  7386. if (!instr->GetSrc1()->IsRegOpnd())
  7387. {
  7388. return false;
  7389. }
  7390. if (instr->HasBailOutInfo())
  7391. {
  7392. return false;
  7393. }
  7394. IR::RegOpnd *dst = instr->GetDst()->AsRegOpnd();
  7395. IR::RegOpnd *src = instr->GetSrc1()->AsRegOpnd();
  7396. IR::Instr *instrPrev = instr->GetPrevRealInstrOrLabel();
  7397. IR::ByteCodeUsesInstr *byteCodeUseInstr = nullptr;
  7398. StackSym *varSym = src->m_sym;
  7399. if (varSym->IsTypeSpec())
  7400. {
  7401. varSym = varSym->GetVarEquivSym(this->func);
  7402. }
  7403. // SKip ByteCodeUse instr if possible
  7404. // [bytecodeuse t1]
  7405. if (!instrPrev->GetDst())
  7406. {
  7407. if (instrPrev->m_opcode == Js::OpCode::ByteCodeUses)
  7408. {
  7409. byteCodeUseInstr = instrPrev->AsByteCodeUsesInstr();
  7410. const BVSparse<JitArenaAllocator>* byteCodeUpwardExposedUsed = byteCodeUseInstr->GetByteCodeUpwardExposedUsed();
  7411. if (byteCodeUpwardExposedUsed && byteCodeUpwardExposedUsed->Test(varSym->m_id) && byteCodeUpwardExposedUsed->Count() == 1)
  7412. {
  7413. instrPrev = byteCodeUseInstr->GetPrevRealInstrOrLabel();
  7414. if (!instrPrev->GetDst())
  7415. {
  7416. return false;
  7417. }
  7418. }
  7419. else
  7420. {
  7421. return false;
  7422. }
  7423. }
  7424. else
  7425. {
  7426. return false;
  7427. }
  7428. }
  7429. // The fast-path for these doesn't handle dst == src.
  7430. // REVIEW: I believe the fast-path for LdElemI_A has been fixed... Nope, still broken for "i = A[i]" for prejit
  7431. switch (instrPrev->m_opcode)
  7432. {
  7433. case Js::OpCode::LdElemI_A:
  7434. case Js::OpCode::IsInst:
  7435. case Js::OpCode::ByteCodeUses:
  7436. return false;
  7437. }
  7438. // Can't do it if post-op bailout would need result
  7439. // REVIEW: enable for pre-opt bailout?
  7440. if (instrPrev->HasBailOutInfo() && instrPrev->GetByteCodeOffset() != instrPrev->GetBailOutInfo()->bailOutOffset)
  7441. {
  7442. return false;
  7443. }
  7444. // Make sure src of Ld_A == dst of instr
  7445. // t1 = instr
  7446. if (!instrPrev->GetDst()->IsEqual(src))
  7447. {
  7448. return false;
  7449. }
  7450. // Make sure t1 isn't used later
  7451. if (this->currentBlock->upwardExposedUses->Test(src->m_sym->m_id))
  7452. {
  7453. return false;
  7454. }
  7455. if (this->currentBlock->byteCodeUpwardExposedUsed && this->currentBlock->byteCodeUpwardExposedUsed->Test(varSym->m_id))
  7456. {
  7457. return false;
  7458. }
  7459. // Make sure we can dead-store this sym (debugger mode?)
  7460. if (!this->DoDeadStore(this->func, src->m_sym))
  7461. {
  7462. return false;
  7463. }
  7464. StackSym *const dstSym = dst->m_sym;
  7465. if(instrPrev->HasBailOutInfo() && dstSym->IsInt32() && dstSym->IsTypeSpec())
  7466. {
  7467. StackSym *const prevDstSym = IR::RegOpnd::TryGetStackSym(instrPrev->GetDst());
  7468. if(instrPrev->GetBailOutKind() & IR::BailOutOnResultConditions &&
  7469. prevDstSym &&
  7470. prevDstSym->IsInt32() &&
  7471. prevDstSym->IsTypeSpec() &&
  7472. instrPrev->GetSrc1() &&
  7473. !instrPrev->GetDst()->IsEqual(instrPrev->GetSrc1()) &&
  7474. !(instrPrev->GetSrc2() && instrPrev->GetDst()->IsEqual(instrPrev->GetSrc2())))
  7475. {
  7476. // The previous instruction's dst value may be trashed by the time of the pre-op bailout. Skip reverse copy-prop if
  7477. // it would replace the previous instruction's dst with a sym that bailout had decided to use to restore a value for
  7478. // the pre-op bailout, which can't be trashed before bailout. See big comment in ProcessBailOutCopyProps for the
  7479. // reasoning behind the tests above.
  7480. FOREACH_SLISTBASE_ENTRY(
  7481. CopyPropSyms,
  7482. usedCopyPropSym,
  7483. &instrPrev->GetBailOutInfo()->usedCapturedValues.copyPropSyms)
  7484. {
  7485. if(dstSym == usedCopyPropSym.Value())
  7486. {
  7487. return false;
  7488. }
  7489. } NEXT_SLISTBASE_ENTRY;
  7490. }
  7491. }
  7492. if (byteCodeUseInstr)
  7493. {
  7494. if (this->currentBlock->byteCodeUpwardExposedUsed && instrPrev->GetDst()->AsRegOpnd()->GetIsJITOptimizedReg() && varSym->HasByteCodeRegSlot())
  7495. {
  7496. if(varSym->HasByteCodeRegSlot())
  7497. {
  7498. this->currentBlock->byteCodeUpwardExposedUsed->Set(varSym->m_id);
  7499. }
  7500. if (src->IsEqual(dst) && instrPrev->GetDst()->GetIsJITOptimizedReg())
  7501. {
  7502. // s2(s1).i32 = FromVar s1.var #0000 Bailout: #0000 (BailOutIntOnly)
  7503. // ByteCodeUses s1
  7504. // s2(s1).i32 = Ld_A s2(s1).i32
  7505. //
  7506. // Since the dst on the FromVar is marked JITOptimized, we need to set it on the new dst as well,
  7507. // or we'll change the bytecode liveness of s1
  7508. dst->SetIsJITOptimizedReg(true);
  7509. }
  7510. }
  7511. byteCodeUseInstr->Remove();
  7512. }
  7513. else if (instrPrev->GetDst()->AsRegOpnd()->GetIsJITOptimizedReg() && !src->GetIsJITOptimizedReg() && varSym->HasByteCodeRegSlot())
  7514. {
  7515. this->currentBlock->byteCodeUpwardExposedUsed->Set(varSym->m_id);
  7516. }
  7517. #if DBG
  7518. if (this->DoMarkTempObjectVerify())
  7519. {
  7520. this->currentBlock->tempObjectVerifyTracker->NotifyReverseCopyProp(instrPrev);
  7521. }
  7522. #endif
  7523. dst->SetValueType(instrPrev->GetDst()->GetValueType());
  7524. instrPrev->ReplaceDst(dst);
  7525. instr->Remove();
  7526. return true;
  7527. }
  7528. bool
  7529. BackwardPass::FoldCmBool(IR::Instr *instr)
  7530. {
  7531. Assert(instr->m_opcode == Js::OpCode::Conv_Bool);
  7532. if (this->tag != Js::DeadStorePhase || this->IsPrePass() || this->IsCollectionPass())
  7533. {
  7534. return false;
  7535. }
  7536. if (this->func->HasTry())
  7537. {
  7538. // UpwardExposedUsed info can't be relied on
  7539. return false;
  7540. }
  7541. IR::RegOpnd *intOpnd = instr->GetSrc1()->AsRegOpnd();
  7542. Assert(intOpnd->m_sym->IsInt32());
  7543. if (!intOpnd->m_sym->IsSingleDef())
  7544. {
  7545. return false;
  7546. }
  7547. IR::Instr *cmInstr = intOpnd->m_sym->GetInstrDef();
  7548. // Should be a Cm instr...
  7549. if (!cmInstr->GetSrc2())
  7550. {
  7551. return false;
  7552. }
  7553. IR::Instr *instrPrev = instr->GetPrevRealInstrOrLabel();
  7554. if (instrPrev != cmInstr)
  7555. {
  7556. return false;
  7557. }
  7558. switch (cmInstr->m_opcode)
  7559. {
  7560. case Js::OpCode::CmEq_A:
  7561. case Js::OpCode::CmGe_A:
  7562. case Js::OpCode::CmUnGe_A:
  7563. case Js::OpCode::CmGt_A:
  7564. case Js::OpCode::CmUnGt_A:
  7565. case Js::OpCode::CmLt_A:
  7566. case Js::OpCode::CmUnLt_A:
  7567. case Js::OpCode::CmLe_A:
  7568. case Js::OpCode::CmUnLe_A:
  7569. case Js::OpCode::CmNeq_A:
  7570. case Js::OpCode::CmSrEq_A:
  7571. case Js::OpCode::CmSrNeq_A:
  7572. case Js::OpCode::CmEq_I4:
  7573. case Js::OpCode::CmNeq_I4:
  7574. case Js::OpCode::CmLt_I4:
  7575. case Js::OpCode::CmLe_I4:
  7576. case Js::OpCode::CmGt_I4:
  7577. case Js::OpCode::CmGe_I4:
  7578. case Js::OpCode::CmUnLt_I4:
  7579. case Js::OpCode::CmUnLe_I4:
  7580. case Js::OpCode::CmUnGt_I4:
  7581. case Js::OpCode::CmUnGe_I4:
  7582. break;
  7583. default:
  7584. return false;
  7585. }
  7586. IR::RegOpnd *varDst = instr->GetDst()->AsRegOpnd();
  7587. if (this->currentBlock->upwardExposedUses->Test(intOpnd->m_sym->m_id) || !this->currentBlock->upwardExposedUses->Test(varDst->m_sym->m_id))
  7588. {
  7589. return false;
  7590. }
  7591. varDst = instr->UnlinkDst()->AsRegOpnd();
  7592. cmInstr->ReplaceDst(varDst);
  7593. this->currentBlock->RemoveInstr(instr);
  7594. return true;
  7595. }
  7596. void
  7597. BackwardPass::SetWriteThroughSymbolsSetForRegion(BasicBlock * catchOrFinallyBlock, Region * tryRegion)
  7598. {
  7599. tryRegion->writeThroughSymbolsSet = JitAnew(this->func->m_alloc, BVSparse<JitArenaAllocator>, this->func->m_alloc);
  7600. if (this->DoByteCodeUpwardExposedUsed())
  7601. {
  7602. Assert(catchOrFinallyBlock->byteCodeUpwardExposedUsed);
  7603. if (!catchOrFinallyBlock->byteCodeUpwardExposedUsed->IsEmpty())
  7604. {
  7605. FOREACH_BITSET_IN_SPARSEBV(id, catchOrFinallyBlock->byteCodeUpwardExposedUsed)
  7606. {
  7607. tryRegion->writeThroughSymbolsSet->Set(id);
  7608. }
  7609. NEXT_BITSET_IN_SPARSEBV
  7610. }
  7611. #if DBG
  7612. // Symbols write-through in the parent try region should be marked as write-through in the current try region as well.
  7613. // x =
  7614. // try{
  7615. // try{
  7616. // x = <-- x needs to be write-through here. With the current mechanism of not clearing a write-through
  7617. // symbol from the bytecode upward-exposed on a def, x should be marked as write-through as
  7618. // write-through symbols for a try are basically the bytecode upward exposed symbols at the
  7619. // beginning of the corresponding catch block).
  7620. // Verify that it still holds.
  7621. // <exception>
  7622. // }
  7623. // catch(){}
  7624. // x =
  7625. // }
  7626. // catch(){}
  7627. // = x
  7628. if (tryRegion->GetParent()->GetType() == RegionTypeTry)
  7629. {
  7630. Region * parentTry = tryRegion->GetParent();
  7631. Assert(parentTry->writeThroughSymbolsSet);
  7632. FOREACH_BITSET_IN_SPARSEBV(id, parentTry->writeThroughSymbolsSet)
  7633. {
  7634. Assert(tryRegion->writeThroughSymbolsSet->Test(id));
  7635. }
  7636. NEXT_BITSET_IN_SPARSEBV
  7637. }
  7638. #endif
  7639. }
  7640. else
  7641. {
  7642. // this can happen with -off:globopt
  7643. return;
  7644. }
  7645. }
  7646. bool
  7647. BackwardPass::CheckWriteThroughSymInRegion(Region* region, StackSym* sym)
  7648. {
  7649. if (region->GetType() == RegionTypeRoot)
  7650. {
  7651. return false;
  7652. }
  7653. // if the current region is a try region, check in its write-through set,
  7654. // otherwise (current = catch region) look in the first try ancestor's write-through set
  7655. Region * selfOrFirstTryAncestor = region->GetSelfOrFirstTryAncestor();
  7656. if (!selfOrFirstTryAncestor)
  7657. {
  7658. return false;
  7659. }
  7660. Assert(selfOrFirstTryAncestor->GetType() == RegionTypeTry);
  7661. return selfOrFirstTryAncestor->writeThroughSymbolsSet && selfOrFirstTryAncestor->writeThroughSymbolsSet->Test(sym->m_id);
  7662. }
  7663. bool
  7664. BackwardPass::DoDeadStoreLdStForMemop(IR::Instr *instr)
  7665. {
  7666. Assert(this->tag == Js::DeadStorePhase && this->currentBlock->loop != nullptr);
  7667. Loop *loop = this->currentBlock->loop;
  7668. if (globOpt->HasMemOp(loop))
  7669. {
  7670. if (instr->m_opcode == Js::OpCode::StElemI_A && instr->GetDst()->IsIndirOpnd())
  7671. {
  7672. SymID base = this->globOpt->GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym());
  7673. SymID index = this->globOpt->GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym());
  7674. FOREACH_MEMOP_CANDIDATES(candidate, loop)
  7675. {
  7676. if (base == candidate->base && index == candidate->index)
  7677. {
  7678. return true;
  7679. }
  7680. } NEXT_MEMOP_CANDIDATE
  7681. }
  7682. else if (instr->m_opcode == Js::OpCode::LdElemI_A && instr->GetSrc1()->IsIndirOpnd())
  7683. {
  7684. SymID base = this->globOpt->GetVarSymID(instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym());
  7685. SymID index = this->globOpt->GetVarSymID(instr->GetSrc1()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym());
  7686. FOREACH_MEMCOPY_CANDIDATES(candidate, loop)
  7687. {
  7688. if (base == candidate->ldBase && index == candidate->index)
  7689. {
  7690. return true;
  7691. }
  7692. } NEXT_MEMCOPY_CANDIDATE
  7693. }
  7694. }
  7695. return false;
  7696. }
  7697. void
  7698. BackwardPass::RestoreInductionVariableValuesAfterMemOp(Loop *loop)
  7699. {
  7700. const auto RestoreInductionVariable = [&](SymID symId, Loop::InductionVariableChangeInfo inductionVariableChangeInfo, Loop *loop)
  7701. {
  7702. Js::OpCode opCode = Js::OpCode::Add_I4;
  7703. if (!inductionVariableChangeInfo.isIncremental)
  7704. {
  7705. opCode = Js::OpCode::Sub_I4;
  7706. }
  7707. Func *localFunc = loop->GetFunc();
  7708. StackSym *sym = localFunc->m_symTable->FindStackSym(symId)->GetInt32EquivSym(localFunc);
  7709. IR::Opnd *inductionVariableOpnd = IR::RegOpnd::New(sym, IRType::TyInt32, localFunc);
  7710. IR::Opnd *sizeOpnd = globOpt->GenerateInductionVariableChangeForMemOp(loop, inductionVariableChangeInfo.unroll);
  7711. loop->landingPad->InsertAfter(IR::Instr::New(opCode, inductionVariableOpnd, inductionVariableOpnd, sizeOpnd, loop->GetFunc()));
  7712. };
  7713. for (auto it = loop->memOpInfo->inductionVariableChangeInfoMap->GetIterator(); it.IsValid(); it.MoveNext())
  7714. {
  7715. Loop::InductionVariableChangeInfo iv = it.CurrentValue();
  7716. SymID sym = it.CurrentKey();
  7717. if (iv.unroll != Js::Constants::InvalidLoopUnrollFactor)
  7718. {
  7719. // if the variable is being used after the loop restore it
  7720. if (loop->memOpInfo->inductionVariablesUsedAfterLoop->Test(sym))
  7721. {
  7722. RestoreInductionVariable(sym, iv, loop);
  7723. }
  7724. }
  7725. }
  7726. }
  7727. bool
  7728. BackwardPass::IsEmptyLoopAfterMemOp(Loop *loop)
  7729. {
  7730. if (globOpt->HasMemOp(loop))
  7731. {
  7732. const auto IsInductionVariableUse = [&](IR::Opnd *opnd) -> bool
  7733. {
  7734. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
  7735. return (opnd &&
  7736. opnd->GetStackSym() &&
  7737. loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(this->globOpt->GetVarSymID(opnd->GetStackSym())) &&
  7738. (((Loop::InductionVariableChangeInfo)
  7739. loop->memOpInfo->inductionVariableChangeInfoMap->
  7740. LookupWithKey(this->globOpt->GetVarSymID(opnd->GetStackSym()), inductionVariableChangeInfo)).unroll != Js::Constants::InvalidLoopUnrollFactor));
  7741. };
  7742. Assert(loop->blockList.HasTwo());
  7743. FOREACH_BLOCK_IN_LOOP(bblock, loop)
  7744. {
  7745. FOREACH_INSTR_IN_BLOCK_EDITING(instr, instrPrev, bblock)
  7746. {
  7747. if (instr->IsLabelInstr() || !instr->IsRealInstr() || instr->m_opcode == Js::OpCode::IncrLoopBodyCount || instr->m_opcode == Js::OpCode::StLoopBodyCount
  7748. || (instr->IsBranchInstr() && instr->AsBranchInstr()->IsUnconditional()))
  7749. {
  7750. continue;
  7751. }
  7752. else
  7753. {
  7754. switch (instr->m_opcode)
  7755. {
  7756. case Js::OpCode::Nop:
  7757. break;
  7758. case Js::OpCode::Ld_I4:
  7759. case Js::OpCode::Add_I4:
  7760. case Js::OpCode::Sub_I4:
  7761. if (!IsInductionVariableUse(instr->GetDst()))
  7762. {
  7763. Assert(instr->GetDst());
  7764. if (instr->GetDst()->GetStackSym()
  7765. && loop->memOpInfo->inductionVariablesUsedAfterLoop->Test(globOpt->GetVarSymID(instr->GetDst()->GetStackSym())))
  7766. {
  7767. // We have use after the loop for a variable defined inside the loop. So the loop can't be removed.
  7768. return false;
  7769. }
  7770. }
  7771. break;
  7772. case Js::OpCode::Decr_A:
  7773. case Js::OpCode::Incr_A:
  7774. if (!IsInductionVariableUse(instr->GetSrc1()))
  7775. {
  7776. return false;
  7777. }
  7778. break;
  7779. default:
  7780. if (instr->IsBranchInstr())
  7781. {
  7782. if (IsInductionVariableUse(instr->GetSrc1()) || IsInductionVariableUse(instr->GetSrc2()))
  7783. {
  7784. break;
  7785. }
  7786. }
  7787. return false;
  7788. }
  7789. }
  7790. }
  7791. NEXT_INSTR_IN_BLOCK_EDITING;
  7792. }NEXT_BLOCK_IN_LIST;
  7793. return true;
  7794. }
  7795. return false;
  7796. }
  7797. void
  7798. BackwardPass::RemoveEmptyLoops()
  7799. {
  7800. if (PHASE_OFF(Js::MemOpPhase, this->func))
  7801. {
  7802. return;
  7803. }
  7804. const auto DeleteMemOpInfo = [&](Loop *loop)
  7805. {
  7806. JitArenaAllocator *alloc = this->func->GetTopFunc()->m_fg->alloc;
  7807. if (!loop->memOpInfo)
  7808. {
  7809. return;
  7810. }
  7811. if (loop->memOpInfo->candidates)
  7812. {
  7813. loop->memOpInfo->candidates->Clear();
  7814. JitAdelete(alloc, loop->memOpInfo->candidates);
  7815. }
  7816. if (loop->memOpInfo->inductionVariableChangeInfoMap)
  7817. {
  7818. loop->memOpInfo->inductionVariableChangeInfoMap->Clear();
  7819. JitAdelete(alloc, loop->memOpInfo->inductionVariableChangeInfoMap);
  7820. }
  7821. if (loop->memOpInfo->inductionVariableOpndPerUnrollMap)
  7822. {
  7823. loop->memOpInfo->inductionVariableOpndPerUnrollMap->Clear();
  7824. JitAdelete(alloc, loop->memOpInfo->inductionVariableOpndPerUnrollMap);
  7825. }
  7826. if (loop->memOpInfo->inductionVariablesUsedAfterLoop)
  7827. {
  7828. JitAdelete(this->tempAlloc, loop->memOpInfo->inductionVariablesUsedAfterLoop);
  7829. }
  7830. JitAdelete(alloc, loop->memOpInfo);
  7831. };
  7832. FOREACH_LOOP_IN_FUNC_EDITING(loop, this->func)
  7833. {
  7834. if (IsEmptyLoopAfterMemOp(loop))
  7835. {
  7836. RestoreInductionVariableValuesAfterMemOp(loop);
  7837. RemoveEmptyLoopAfterMemOp(loop);
  7838. }
  7839. // Remove memop info as we don't need them after this point.
  7840. DeleteMemOpInfo(loop);
  7841. } NEXT_LOOP_IN_FUNC_EDITING;
  7842. }
  7843. void
  7844. BackwardPass::RemoveEmptyLoopAfterMemOp(Loop *loop)
  7845. {
  7846. BasicBlock *head = loop->GetHeadBlock();
  7847. BasicBlock *tail = head->next;
  7848. BasicBlock *landingPad = loop->landingPad;
  7849. BasicBlock *outerBlock = nullptr;
  7850. SListBaseCounted<FlowEdge *> *succList = head->GetSuccList();
  7851. Assert(succList->HasTwo());
  7852. // Between the two successors of head, one is tail and the other one is the outerBlock
  7853. SListBaseCounted<FlowEdge *>::Iterator iter(succList);
  7854. iter.Next();
  7855. if (iter.Data()->GetSucc() == tail)
  7856. {
  7857. iter.Next();
  7858. outerBlock = iter.Data()->GetSucc();
  7859. }
  7860. else
  7861. {
  7862. outerBlock = iter.Data()->GetSucc();
  7863. #ifdef DBG
  7864. iter.Next();
  7865. Assert(iter.Data()->GetSucc() == tail);
  7866. #endif
  7867. }
  7868. outerBlock->RemovePred(head, this->func->m_fg);
  7869. landingPad->RemoveSucc(head, this->func->m_fg);
  7870. Assert(landingPad->GetSuccList()->Count() == 0);
  7871. IR::Instr* firstOuterInstr = outerBlock->GetFirstInstr();
  7872. AssertOrFailFast(firstOuterInstr->IsLabelInstr() && !landingPad->GetLastInstr()->EndsBasicBlock());
  7873. IR::LabelInstr* label = firstOuterInstr->AsLabelInstr();
  7874. // Add br to Outer block to keep coherence between branches and flow graph
  7875. IR::BranchInstr *outerBr = IR::BranchInstr::New(Js::OpCode::Br, label, this->func);
  7876. landingPad->InsertAfter(outerBr);
  7877. this->func->m_fg->AddEdge(landingPad, outerBlock);
  7878. this->func->m_fg->RemoveBlock(head, nullptr);
  7879. if (head != tail)
  7880. {
  7881. this->func->m_fg->RemoveBlock(tail, nullptr);
  7882. }
  7883. }
  7884. #if DBG_DUMP
  7885. bool
  7886. BackwardPass::IsTraceEnabled() const
  7887. {
  7888. return
  7889. Js::Configuration::Global.flags.Trace.IsEnabled(tag, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()) &&
  7890. (PHASE_TRACE(Js::SimpleJitPhase, func) || !func->IsSimpleJit());
  7891. }
  7892. #endif