| 1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368 |
- //-------------------------------------------------------------------------------------------------------
- // Copyright (C) Microsoft. All rights reserved.
- // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
- //-------------------------------------------------------------------------------------------------------
- #include "Backend.h"
- FlowGraph *
- FlowGraph::New(Func * func, JitArenaAllocator * alloc)
- {
- FlowGraph * graph;
- graph = JitAnew(alloc, FlowGraph, func, alloc);
- return graph;
- }
- // When there is an early exit within an EH region,
- // Leave instructions are inserted in the bytecode to jump up the region tree one by one
- // We delete this Leave chain of instructions, and add an edge to Finally
- IR::LabelInstr * FlowGraph::DeleteLeaveChainBlocks(IR::BranchInstr *leaveInstr, IR::Instr * &instrPrev)
- {
- // Cleanup Rest of the Leave chain
- IR::LabelInstr * leaveTarget = leaveInstr->GetTarget();
- Assert(leaveTarget->GetNextBranchOrLabel()->IsBranchInstr());
- IR::BranchInstr *leaveChain = leaveTarget->GetNextBranchOrLabel()->AsBranchInstr();
- IR::LabelInstr * curLabel = leaveTarget->AsLabelInstr();
- while (leaveChain->m_opcode != Js::OpCode::Br)
- {
- Assert(leaveChain->m_opcode == Js::OpCode::Leave || leaveChain->m_opcode == Js::OpCode::BrOnException);
- IR::Instr * nextLabel = leaveChain->GetNextRealInstrOrLabel();
- if (!nextLabel->GetNextRealInstrOrLabel()->IsBranchInstr())
- {
- // For jit loop bodies - we can encounter ProfiledLoopEnd before every early return
- Assert(nextLabel->GetNextRealInstrOrLabel()->m_opcode == Js::OpCode::ProfiledLoopEnd);
- IR::Instr * loopEnd = nextLabel->GetNextRealInstrOrLabel();
- while (!loopEnd->GetNextRealInstrOrLabel()->IsBranchInstr())
- {
- Assert(loopEnd->m_opcode == Js::OpCode::ProfiledLoopEnd);
- loopEnd = loopEnd->GetNextRealInstrOrLabel();
- }
- leaveChain = loopEnd->GetNextRealInstrOrLabel()->AsBranchInstr();
- }
- else
- {
- leaveChain = nextLabel->GetNextRealInstrOrLabel()->AsBranchInstr();
- }
- BasicBlock *curBlock = curLabel->GetBasicBlock();
- this->RemoveBlock(curBlock);
- curLabel = nextLabel->AsLabelInstr();
- }
- instrPrev = leaveChain->m_next;
- IR::LabelInstr * exitLabel = leaveChain->GetTarget();
- BasicBlock * curBlock = curLabel->GetBasicBlock();
- this->RemoveBlock(curBlock);
- return exitLabel;
- }
- bool FlowGraph::Dominates(Region *region1, Region *region2)
- {
- Assert(region1);
- Assert(region2);
- Region *startR1 = region1;
- Region *startR2 = region2;
- int region1Depth = 0;
- while (startR1 != nullptr)
- {
- region1Depth++;
- startR1 = startR1->GetParent();
- }
- int region2Depth = 0;
- while (startR2 != nullptr)
- {
- region2Depth++;
- startR2 = startR2->GetParent();
- }
- return region1Depth > region2Depth;
- }
- bool FlowGraph::DoesExitLabelDominate(IR::BranchInstr *leaveInstr)
- {
- IR::LabelInstr * leaveTarget = leaveInstr->GetTarget();
- Assert(leaveTarget->GetNextRealInstr()->IsBranchInstr() || leaveTarget->GetNextRealInstr()->m_opcode == Js::OpCode::ProfiledLoopEnd);
- IR::BranchInstr *leaveChain = leaveTarget->GetNextBranchOrLabel()->AsBranchInstr();
- while (leaveChain->m_opcode != Js::OpCode::Br)
- {
- Assert(leaveChain->m_opcode == Js::OpCode::Leave || leaveChain->m_opcode == Js::OpCode::BrOnException);
- IR::LabelInstr * nextLabel = leaveChain->m_next->AsLabelInstr();
- if (!nextLabel->m_next->IsBranchInstr())
- {
- // For jit loop bodies - we can encounter ProfiledLoopEnd before every early return
- Assert(nextLabel->m_next->m_opcode == Js::OpCode::ProfiledLoopEnd);
- break;
- }
- leaveChain = nextLabel->m_next->AsBranchInstr();
- }
- IR::LabelInstr * exitLabel = leaveChain->GetTarget();
- return Dominates(exitLabel->GetRegion(), finallyLabelStack->Top()->GetRegion());
- }
- bool FlowGraph::CheckIfEarlyExitAndAddEdgeToFinally(IR::BranchInstr *leaveInstr, Region *currentRegion, Region *branchTargetRegion, IR::Instr * &instrPrev, IR::LabelInstr * &exitLabel)
- {
- if (finallyLabelStack->Empty())
- {
- return false;
- }
- if (currentRegion->GetType() == RegionTypeTry)
- {
- if (currentRegion->GetMatchingCatchRegion() == nullptr)
- {
- // try of try-finally
- bool isEarly =
- (branchTargetRegion != currentRegion->GetMatchingFinallyRegion(false) &&
- branchTargetRegion != currentRegion->GetMatchingFinallyRegion(true));
- if (!isEarly) return false;
- if (DoesExitLabelDominate(leaveInstr)) return false;
- // Cleanup Rest of the Leave chain
- exitLabel = DeleteLeaveChainBlocks(leaveInstr, instrPrev);
- return true;
- }
- // try of try-catch
- IR::BranchInstr *leaveChain = leaveInstr;
- IR::LabelInstr * leaveTarget = leaveChain->GetTarget();
- IR::Instr *target = leaveTarget->GetNextRealInstr();
- if (target->m_opcode == Js::OpCode::Br)
- {
- IR::BranchInstr *tryExit = target->AsBranchInstr();
- instrPrev = tryExit;
- return false;
- }
- if (DoesExitLabelDominate(leaveInstr)) return false;
- // Cleanup Rest of the Leave chain
- exitLabel = DeleteLeaveChainBlocks(leaveInstr, instrPrev);
- return true;
- }
- if (currentRegion->GetType() == RegionTypeCatch)
- {
- // We don't care for early exits in catch blocks, because we bailout anyway
- return false;
- }
- Assert(currentRegion->GetType() == RegionTypeFinally);
- // All Leave's inside Finally region are early exits
- // Since we execute non-excepting Finallys in JIT now, we should convert Leave to Br
- if (DoesExitLabelDominate(leaveInstr)) return false;
- exitLabel = DeleteLeaveChainBlocks(leaveInstr, instrPrev);
- return true;
- }
- ///----------------------------------------------------------------------------
- ///
- /// FlowGraph::Build
- ///
- /// Construct flow graph and loop structures for the current state of the function.
- ///
- ///----------------------------------------------------------------------------
- void
- FlowGraph::Build(void)
- {
- Func * func = this->func;
- BEGIN_CODEGEN_PHASE(func, Js::FGPeepsPhase);
- this->RunPeeps();
- END_CODEGEN_PHASE(func, Js::FGPeepsPhase);
- bool assignRegionsBeforeGlobopt = this->func->HasTry() && (this->func->DoOptimizeTry() ||
- (this->func->IsSimpleJit() && this->func->hasBailout) ||
- this->func->IsLoopBodyInTryFinally());
- bool createNonExceptionFinally = this->func->HasFinally() && (this->func->DoOptimizeTry() ||
- (this->func->IsSimpleJit() && this->func->hasBailout));
- // We don't optimize fully with SimpleJit. But, when JIT loop body is enabled, we do support
- // bailing out from a simple jitted function to do a full jit of a loop body in the function
- // (BailOnSimpleJitToFullJitLoopBody). For that purpose, we need the flow from try to handler.
- // We also need accurate flow when we are jitting a loop body and have a tryfinally, because we could be inserting BailOutOnEarlyExit
- if (assignRegionsBeforeGlobopt)
- {
- this->catchLabelStack = JitAnew(this->alloc, SList<IR::LabelInstr*>, this->alloc);
- }
- if (this->func->HasFinally() && assignRegionsBeforeGlobopt)
- {
- this->finallyLabelStack = JitAnew(this->alloc, SList<IR::LabelInstr*>, this->alloc);
- this->regToFinallyEndMap = JitAnew(this->alloc, RegionToFinallyEndMapType, this->alloc, 0);
- }
- IR::Instr * currLastInstr = nullptr;
- BasicBlock * currBlock = nullptr;
- BasicBlock * nextBlock = nullptr;
- bool hasCall = false;
- FOREACH_INSTR_IN_FUNC_BACKWARD_EDITING(instr, instrPrev, func)
- {
- if (currLastInstr == nullptr || instr->EndsBasicBlock())
- {
- // Start working on a new block.
- // If we're currently processing a block, then wrap it up before beginning a new one.
- if (currLastInstr != nullptr)
- {
- nextBlock = currBlock;
- currBlock = this->AddBlock(instr->m_next, currLastInstr, nextBlock);
- currBlock->hasCall = hasCall;
- hasCall = false;
- }
- currLastInstr = instr;
- }
- if (instr->StartsBasicBlock())
- {
- // Insert a BrOnException after the loop top if we are in a try-catch/try-finally. This is required to
- // model flow from the loop to the catch/finally block for loops that don't have a break condition.
- if (instr->IsLabelInstr() && instr->AsLabelInstr()->m_isLoopTop && instr->m_next->m_opcode != Js::OpCode::BrOnException)
- {
- IR::BranchInstr * brOnException = nullptr;
- IR::Instr *instrNext = instr->m_next;
- if (this->catchLabelStack && !this->catchLabelStack->Empty())
- {
- brOnException = IR::BranchInstr::New(Js::OpCode::BrOnException, this->catchLabelStack->Top(), instr->m_func);
- instr->InsertAfter(brOnException);
- }
- if (this->finallyLabelStack && !this->finallyLabelStack->Empty())
- {
- brOnException = IR::BranchInstr::New(Js::OpCode::BrOnException, this->finallyLabelStack->Top(), instr->m_func);
- instr->InsertAfter(brOnException);
- }
- if (brOnException)
- {
- instrPrev = instrNext->m_prev;
- continue;
- }
- }
- // Wrap up the current block and get ready to process a new one.
- nextBlock = currBlock;
- currBlock = this->AddBlock(instr, currLastInstr, nextBlock);
- currBlock->hasCall = hasCall;
- hasCall = false;
- currLastInstr = nullptr;
- }
- switch (instr->m_opcode)
- {
- case Js::OpCode::Catch:
- Assert(instr->m_prev->IsLabelInstr());
- if (this->catchLabelStack)
- {
- this->catchLabelStack->Push(instr->m_prev->AsLabelInstr());
- }
- break;
- case Js::OpCode::Finally:
- {
- if (!this->finallyLabelStack)
- {
- break;
- }
- //To enable globopt on functions with try finallys we transform the flowgraph as below :
- // TryFinally L1
- // <try code>
- // Leave L2
- // L2 : Br L3
- // L1 : Finally
- // <finally code>
- // LeaveNull
- // L3 : <code after try finally>
- //
- //to:
- //
- // TryFinally L1
- // <try code>
- // BrOnException L1
- // Leave L2
- // L1 : BailOnException
- // L2 : Finally
- // <finally code>
- // LeaveNull
- // L3: <code after try finally>
- //We generate 2 flow edges from the try - an exception path and a non exception path.
- //This transformation enables us to optimize on the non-excepting finally path
- Assert(instr->m_prev->IsLabelInstr());
- IR::LabelInstr * finallyLabel = instr->m_prev->AsLabelInstr();
- this->finallyLabelStack->Push(finallyLabel);
- if (!createNonExceptionFinally)
- {
- break;
- }
- // Find leave label
- Assert(finallyLabel->m_prev->m_opcode == Js::OpCode::Br && finallyLabel->m_prev->m_prev->m_opcode == Js::OpCode::Label);
- IR::Instr * insertPoint = finallyLabel->m_prev;
- IR::LabelInstr * leaveTarget = finallyLabel->m_prev->m_prev->AsLabelInstr();
- leaveTarget->Unlink();
- finallyLabel->InsertBefore(leaveTarget);
- finallyLabel->Unlink();
- insertPoint->InsertBefore(finallyLabel);
- // Bailout from the opcode following Finally
- IR::Instr * bailOnException = IR::BailOutInstr::New(Js::OpCode::BailOnException, IR::BailOutOnException, instr->m_next, instr->m_func);
- insertPoint->InsertBefore(bailOnException);
- insertPoint->Remove();
- Assert(leaveTarget->labelRefs.HasOne());
- IR::BranchInstr * brOnException = IR::BranchInstr::New(Js::OpCode::BrOnException, finallyLabel, instr->m_func);
- leaveTarget->labelRefs.Head()->InsertBefore(brOnException);
- instrPrev = instr->m_prev;
- }
- break;
- case Js::OpCode::TryCatch:
- if (this->catchLabelStack)
- {
- AssertOrFailFast(!this->catchLabelStack->Empty());
- this->catchLabelStack->Pop();
- }
- break;
- case Js::OpCode::TryFinally:
- if (this->finallyLabelStack)
- {
- AssertOrFailFast(!this->finallyLabelStack->Empty());
- this->finallyLabelStack->Pop();
- }
- break;
- case Js::OpCode::CloneBlockScope:
- case Js::OpCode::CloneInnerScopeSlots:
- // It would be nice to do this in IRBuilder, but doing so gives us
- // trouble when doing the DoSlotArrayCheck since it assume single def
- // of the sym to do its check properly. So instead we assign the dst
- // here in FlowGraph.
- instr->SetDst(instr->GetSrc1());
- break;
- }
- if (OpCodeAttr::UseAllFields(instr->m_opcode))
- {
- // UseAllFields opcode are call instruction or opcode that would call.
- hasCall = true;
- if (OpCodeAttr::CallInstr(instr->m_opcode))
- {
- if (!instr->isCallInstrProtectedByNoProfileBailout)
- {
- instr->m_func->SetHasCallsOnSelfAndParents();
- }
- // For ARM, ARM64 & X64 (non x86) because of their register calling convention
- // the ArgOuts need to be moved next to the call.
- #if !defined(_M_IX86)
- IR::Instr* argInsertInstr = instr;
- instr->IterateArgInstrs([&](IR::Instr* argInstr)
- {
- if (argInstr->m_opcode != Js::OpCode::LdSpreadIndices &&
- argInstr->m_opcode != Js::OpCode::ArgOut_A_Dynamic &&
- argInstr->m_opcode != Js::OpCode::ArgOut_A_FromStackArgs &&
- argInstr->m_opcode != Js::OpCode::ArgOut_A_SpreadArg)
- {
- // don't have bailout in asm.js so we don't need BytecodeArgOutCapture
- if (!argInstr->m_func->GetJITFunctionBody()->IsAsmJsMode())
- {
- // Need to always generate byte code arg out capture,
- // because bailout can't restore from the arg out as it is
- // replaced by new sym for register calling convention in lower
- argInstr->GenerateBytecodeArgOutCapture();
- }
- // Check if the instruction is already next
- if (argInstr != argInsertInstr->m_prev)
- {
- // It is not, move it.
- argInstr->Move(argInsertInstr);
- }
- argInsertInstr = argInstr;
- }
- return false;
- });
- #endif
- }
- }
- }
- NEXT_INSTR_IN_FUNC_BACKWARD_EDITING;
- this->func->isFlowGraphValid = true;
- Assert(!this->catchLabelStack || this->catchLabelStack->Empty());
- Assert(!this->finallyLabelStack || this->finallyLabelStack->Empty());
- // We've been walking backward so that edge lists would be in the right order. Now walk the blocks
- // forward to number the blocks in lexical order.
- unsigned int blockNum = 0;
- FOREACH_BLOCK(block, this)
- {
- block->SetBlockNum(blockNum++);
- }NEXT_BLOCK;
- AssertMsg(blockNum == this->blockCount, "Block count is out of whack");
- this->RemoveUnreachableBlocks();
- // Regions need to be assigned before Globopt because:
- // 1. FullJit: The Backward Pass will set the write-through symbols on the regions and the forward pass will
- // use this information to insert ToVars for those symbols. Also, for a symbol determined as write-through
- // in the try region to be restored correctly by the bailout, it should not be removed from the
- // byteCodeUpwardExposedUsed upon a def in the try region (the def might be preempted by an exception).
- //
- // 2. SimpleJit: Same case of correct restoration as above applies in SimpleJit too. However, the only bailout
- // we have in Simple Jitted code right now is BailOnSimpleJitToFullJitLoopBody, installed in IRBuilder. So,
- // for now, we can just check if the func has a bailout to assign regions pre globopt while running SimpleJit.
- blockNum = 0;
- FOREACH_BLOCK_ALL(block, this)
- {
- block->SetBlockNum(blockNum++);
- if (assignRegionsBeforeGlobopt)
- {
- if (block->isDeleted && !block->isDead)
- {
- continue;
- }
- this->UpdateRegionForBlock(block);
- }
- } NEXT_BLOCK_ALL;
- AssertMsg(blockNum == this->blockCount, "Block count is out of whack");
- #if DBG_DUMP
- if (PHASE_DUMP(Js::FGBuildPhase, this->GetFunc()))
- {
- if (assignRegionsBeforeGlobopt)
- {
- Output::Print(_u("Before adding early exit edges\n"));
- FOREACH_BLOCK_ALL(block, this)
- {
- block->DumpHeader(true);
- Region *region = block->GetFirstInstr()->AsLabelInstr()->GetRegion();
- if (region)
- {
- const char16 * regMap[] = { _u("RegionTypeInvalid"),
- _u("RegionTypeRoot"),
- _u("RegionTypeTry"),
- _u("RegionTypeCatch"),
- _u("RegionTypeFinally") };
- Output::Print(_u("Region %p RegionParent %p RegionType %s\n"), region, region->GetParent(), regMap[region->GetType()]);
- }
- } NEXT_BLOCK_ALL;
- this->func->Dump();
- }
- }
- #endif
- if (this->finallyLabelStack)
- {
- Assert(this->finallyLabelStack->Empty());
- if (createNonExceptionFinally)
- {
- // Add s0 definition at the beginning of the function
- // We need this because - s0 symbol can get added to bytcodeUpwardExposed use when there are early returns,
- // And globopt will complain that s0 is uninitialized, because we define it to undefined only at the end of the function
- const auto addrOpnd = IR::AddrOpnd::New(this->func->GetScriptContextInfo()->GetUndefinedAddr(), IR::AddrOpndKindDynamicVar, this->func, true);
- addrOpnd->SetValueType(ValueType::Undefined);
- IR::RegOpnd *regOpnd = IR::RegOpnd::New(this->func->m_symTable->FindStackSym(0), TyVar, this->func);
- IR::Instr *ldRet = IR::Instr::New(Js::OpCode::Ld_A, regOpnd, addrOpnd, this->func);
- this->func->m_headInstr->GetNextRealInstr()->InsertBefore(ldRet);
- }
- IR::LabelInstr * currentLabel = nullptr;
- // look for early exits from a try, and insert bailout
- FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, func)
- {
- if (instr->IsLabelInstr())
- {
- currentLabel = instr->AsLabelInstr();
- }
- else if (instr->m_opcode == Js::OpCode::TryFinally)
- {
- Assert(instr->AsBranchInstr()->GetTarget()->GetRegion()->GetType() == RegionTypeFinally);
- this->finallyLabelStack->Push(instr->AsBranchInstr()->GetTarget());
- }
- else if (instr->m_opcode == Js::OpCode::Leave)
- {
- Assert(currentLabel != nullptr);
- __analysis_assume(currentLabel != nullptr);
- if (createNonExceptionFinally)
- {
- IR::LabelInstr *branchTarget = instr->AsBranchInstr()->GetTarget();
- IR::LabelInstr *exitLabel = nullptr;
- // An early exit (break, continue, return) from an EH region appears as Leave opcode
- // When there is an early exit within a try finally, we have to execute finally code
- // Currently we bailout on early exits
- // For all such edges add edge from eh region -> finally and finally -> earlyexit
- if (CheckIfEarlyExitAndAddEdgeToFinally(instr->AsBranchInstr(), currentLabel->GetRegion(), branchTarget->GetRegion(), instrNext, exitLabel))
- {
- Assert(exitLabel);
- IR::Instr * bailOnEarlyExit = IR::BailOutInstr::New(Js::OpCode::BailOnEarlyExit, IR::BailOutOnEarlyExit, instr, instr->m_func);
- instr->InsertBefore(bailOnEarlyExit);
- IR::LabelInstr *exceptFinallyLabel = this->finallyLabelStack->Top();
- IR::LabelInstr *nonExceptFinallyLabel = exceptFinallyLabel->m_next->m_next->AsLabelInstr();
- // It is possible for the finally region to have a non terminating loop, in which case the end of finally is eliminated
- // We can skip adding edge from finally to early exit in this case
- IR::Instr * leaveToFinally = IR::BranchInstr::New(Js::OpCode::Leave, exceptFinallyLabel, this->func);
- instr->InsertBefore(leaveToFinally);
- instr->Remove();
- this->AddEdge(currentLabel->GetBasicBlock(), exceptFinallyLabel->GetBasicBlock());
- if (this->regToFinallyEndMap->ContainsKey(nonExceptFinallyLabel->GetRegion()))
- {
- BasicBlock * finallyEndBlock = this->regToFinallyEndMap->Item(nonExceptFinallyLabel->GetRegion());
- Assert(finallyEndBlock);
- Assert(finallyEndBlock->GetFirstInstr()->AsLabelInstr()->GetRegion() == nonExceptFinallyLabel->GetRegion());
- InsertEdgeFromFinallyToEarlyExit(finallyEndBlock, exitLabel);
- }
- }
- else if (currentLabel->GetRegion()->GetType() == RegionTypeFinally)
- {
- Assert(currentLabel->GetRegion()->GetMatchingTryRegion()->GetMatchingFinallyRegion(false) == currentLabel->GetRegion());
- // Convert Leave to Br because we execute non-excepting Finally in native code
- instr->m_opcode = Js::OpCode::Br;
- #if DBG
- instr->AsBranchInstr()->m_leaveConvToBr = true;
- #endif
- }
- }
- else
- {
- if (currentLabel->GetRegion()->GetType() == RegionTypeRoot)
- {
- // Found an orphaned leave, should be an early return from the loop body, insert bailout
- Assert(instr->AsBranchInstr()->m_isOrphanedLeave);
- IR::Instr * bailOnEarlyExit = IR::BailOutInstr::New(Js::OpCode::BailOnEarlyExit, IR::BailOutOnEarlyExit, instr, instr->m_func);
- instr->InsertBefore(bailOnEarlyExit);
- }
- else if (currentLabel->GetRegion()->GetType() == RegionTypeCatch)
- {
- if (!this->finallyLabelStack->Empty())
- {
- BasicBlock *currentBlock = currentLabel->GetBasicBlock();
- Assert(currentBlock->GetSuccList()->Count() == 1);
- BasicBlock *succ = currentBlock->GetSuccList()->Head()->GetSucc();
- currentBlock->RemoveSucc(succ, this, true);
- // Add an edge to the finally block
- IR::BranchInstr * brOnException = IR::BranchInstr::New(Js::OpCode::BrOnException, this->finallyLabelStack->Top(), instr->m_func);
- instr->InsertBefore(brOnException);
- IR::LabelInstr * label = IR::LabelInstr::New(Js::OpCode::Label, instr->m_func);
- instr->InsertBefore(label);
- BasicBlock *leaveBlock = this->AddBlock(label, instr, currentBlock->GetNext(), currentBlock);
- // Add edge to finally block, leave block
- this->AddEdge(currentBlock, this->finallyLabelStack->Top()->GetBasicBlock());
- this->AddEdge(currentBlock, leaveBlock);
- }
- }
- }
- }
- else if (instr->m_opcode == Js::OpCode::Finally)
- {
- AssertOrFailFast(!this->finallyLabelStack->Empty());
- this->finallyLabelStack->Pop();
- }
- }
- NEXT_INSTR_IN_FUNC_EDITING;
- this->RemoveUnreachableBlocks();
- blockNum = 0;
- FOREACH_BLOCK_ALL(block, this)
- {
- block->SetBlockNum(blockNum++);
- } NEXT_BLOCK_ALL;
- }
-
- this->FindLoops();
- #if DBG_DUMP
- if (PHASE_DUMP(Js::FGBuildPhase, this->GetFunc()))
- {
- if (assignRegionsBeforeGlobopt)
- {
- Output::Print(_u("After adding early exit edges/Before CanonicalizeLoops\n"));
- FOREACH_BLOCK_ALL(block, this)
- {
- block->DumpHeader(true);
- Region *region = block->GetFirstInstr()->AsLabelInstr()->GetRegion();
- if (region)
- {
- const char16 * regMap[] = { _u("RegionTypeInvalid"),
- _u("RegionTypeRoot"),
- _u("RegionTypeTry"),
- _u("RegionTypeCatch"),
- _u("RegionTypeFinally") };
- Output::Print(_u("Region %p RegionParent %p RegionType %s\n"), region, region->GetParent(), regMap[region->GetType()]);
- }
- } NEXT_BLOCK_ALL;
- this->func->Dump();
- }
- }
- #endif
- bool breakBlocksRelocated = this->CanonicalizeLoops();
- blockNum = 0;
- FOREACH_BLOCK_ALL(block, this)
- {
- block->SetBlockNum(blockNum++);
- } NEXT_BLOCK_ALL;
- #if DBG
- FOREACH_BLOCK_ALL(block, this)
- {
- if (assignRegionsBeforeGlobopt)
- {
- if (block->GetFirstInstr()->AsLabelInstr()->GetRegion())
- {
- Assert(block->GetFirstInstr()->AsLabelInstr()->GetRegion()->ehBailoutData);
- }
- }
- } NEXT_BLOCK_ALL;
- #endif
- #if DBG_DUMP
- if (PHASE_DUMP(Js::FGBuildPhase, this->GetFunc()))
- {
- if (assignRegionsBeforeGlobopt)
- {
- Output::Print(_u("After CanonicalizeLoops\n"));
- FOREACH_BLOCK_ALL(block, this)
- {
- block->DumpHeader(true);
- Region *region = block->GetFirstInstr()->AsLabelInstr()->GetRegion();
- if (region)
- {
- const char16 * regMap[] = { _u("RegionTypeInvalid"),
- _u("RegionTypeRoot"),
- _u("RegionTypeTry"),
- _u("RegionTypeCatch"),
- _u("RegionTypeFinally") };
- Output::Print(_u("Region %p RegionParent %p RegionType %s\n"), region, region->GetParent(), regMap[region->GetType()]);
- }
- } NEXT_BLOCK_ALL;
- this->func->Dump();
- }
- }
- #endif
- if (breakBlocksRelocated)
- {
- // Sort loop lists only if there is break block removal.
- SortLoopLists();
- }
- #if DBG
- this->VerifyLoopGraph();
- #endif
- #if DBG_DUMP
- this->Dump(false, nullptr);
- #endif
- }
- void FlowGraph::InsertEdgeFromFinallyToEarlyExit(BasicBlock *finallyEndBlock, IR::LabelInstr *exitLabel)
- {
- IR::Instr * lastInstr = finallyEndBlock->GetLastInstr();
- IR::LabelInstr * lastLabel = finallyEndBlock->GetFirstInstr()->AsLabelInstr();
- Assert(lastInstr->m_opcode == Js::OpCode::LeaveNull || lastInstr->m_opcode == Js::OpCode::Leave || lastInstr->m_opcode == Js::OpCode::BrOnException);
- // Add a new block, add BrOnException to earlyexit, assign region
- // Finally
- // ...
- // L1:
- // LeaveNull
- // to
- // Finally
- // ...
- // L1:
- // BrOnException earlyExitLabel
- // L1':
- // LeaveNull
- BasicBlock *nextBB = finallyEndBlock->GetNext();
- IR::LabelInstr *leaveLabel = IR::LabelInstr::New(Js::OpCode::Label, this->func);
- lastInstr->InsertBefore(leaveLabel);
- this->AddBlock(leaveLabel, lastInstr, nextBB, finallyEndBlock /*prevBlock*/);
- leaveLabel->SetRegion(lastLabel->GetRegion());
- this->AddEdge(finallyEndBlock, leaveLabel->GetBasicBlock());
- // If the Leave/LeaveNull at the end of finally was not preceeded by a Label, we have to create a new block with BrOnException to early exit
- if (!lastInstr->GetPrevRealInstrOrLabel()->IsLabelInstr())
- {
- IR::LabelInstr *brLabel = IR::LabelInstr::New(Js::OpCode::Label, this->func);
- leaveLabel->InsertBefore(brLabel);
- IR::BranchInstr *brToExit = IR::BranchInstr::New(Js::OpCode::BrOnException, exitLabel, this->func);
- brToExit->m_brFinallyToEarlyExit = true;
- brToExit->SetByteCodeOffset(lastInstr);
- leaveLabel->InsertBefore(brToExit);
- this->AddBlock(brLabel, brToExit, finallyEndBlock->GetNext(), finallyEndBlock /*prevBlock*/);
- brLabel->SetRegion(lastLabel->GetRegion());
- this->AddEdge(finallyEndBlock, brLabel->GetBasicBlock());
- }
- else
- {
- // If the Leave/LeaveNull at the end of finally was preceeded by a Label, we reuse the block inserting BrOnException to early exit in it
- IR::BranchInstr *brToExit = IR::BranchInstr::New(Js::OpCode::BrOnException, exitLabel, this->func);
- brToExit->SetByteCodeOffset(lastInstr);
- brToExit->m_brFinallyToEarlyExit = true;
- leaveLabel->InsertBefore(brToExit);
- this->AddEdge(finallyEndBlock, exitLabel->GetBasicBlock());
- }
- // In case of throw/non-terminating loop, there maybe no edge to the next block
- if (this->FindEdge(finallyEndBlock, nextBB))
- {
- finallyEndBlock->RemoveSucc(nextBB, this);
- }
- this->regToFinallyEndMap->Item(lastLabel->GetRegion(), leaveLabel->GetBasicBlock());
- }
- void
- FlowGraph::SortLoopLists()
- {
- // Sort the blocks in loopList
- for (Loop *loop = this->loopList; loop; loop = loop->next)
- {
- unsigned int lastBlockNumber = loop->GetHeadBlock()->GetBlockNum();
- // Insertion sort as the blockList is almost sorted in the loop.
- FOREACH_BLOCK_IN_LOOP_EDITING(block, loop, iter)
- {
- if (lastBlockNumber <= block->GetBlockNum())
- {
- lastBlockNumber = block->GetBlockNum();
- }
- else
- {
- iter.UnlinkCurrent();
- FOREACH_BLOCK_IN_LOOP_EDITING(insertBlock,loop,newIter)
- {
- if (insertBlock->GetBlockNum() > block->GetBlockNum())
- {
- break;
- }
- }NEXT_BLOCK_IN_LOOP_EDITING;
- newIter.InsertBefore(block);
- }
- }NEXT_BLOCK_IN_LOOP_EDITING;
- }
- }
- void
- FlowGraph::RunPeeps()
- {
- if (this->func->HasTry())
- {
- return;
- }
- if (PHASE_OFF(Js::FGPeepsPhase, this->func))
- {
- return;
- }
- IR::Instr * instrCm = nullptr;
- bool tryUnsignedCmpPeep = false;
- FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, this->func)
- {
- switch(instr->m_opcode)
- {
- case Js::OpCode::Br:
- case Js::OpCode::BrEq_I4:
- case Js::OpCode::BrGe_I4:
- case Js::OpCode::BrGt_I4:
- case Js::OpCode::BrLt_I4:
- case Js::OpCode::BrLe_I4:
- case Js::OpCode::BrUnGe_I4:
- case Js::OpCode::BrUnGt_I4:
- case Js::OpCode::BrUnLt_I4:
- case Js::OpCode::BrUnLe_I4:
- case Js::OpCode::BrNeq_I4:
- case Js::OpCode::BrEq_A:
- case Js::OpCode::BrGe_A:
- case Js::OpCode::BrGt_A:
- case Js::OpCode::BrLt_A:
- case Js::OpCode::BrLe_A:
- case Js::OpCode::BrUnGe_A:
- case Js::OpCode::BrUnGt_A:
- case Js::OpCode::BrUnLt_A:
- case Js::OpCode::BrUnLe_A:
- case Js::OpCode::BrNotEq_A:
- case Js::OpCode::BrNotNeq_A:
- case Js::OpCode::BrSrNotEq_A:
- case Js::OpCode::BrSrNotNeq_A:
- case Js::OpCode::BrNotGe_A:
- case Js::OpCode::BrNotGt_A:
- case Js::OpCode::BrNotLt_A:
- case Js::OpCode::BrNotLe_A:
- case Js::OpCode::BrNeq_A:
- case Js::OpCode::BrNotNull_A:
- case Js::OpCode::BrNotAddr_A:
- case Js::OpCode::BrAddr_A:
- case Js::OpCode::BrSrEq_A:
- case Js::OpCode::BrSrNeq_A:
- case Js::OpCode::BrOnHasProperty:
- case Js::OpCode::BrOnNoProperty:
- case Js::OpCode::BrHasSideEffects:
- case Js::OpCode::BrNotHasSideEffects:
- case Js::OpCode::BrFncEqApply:
- case Js::OpCode::BrFncNeqApply:
- case Js::OpCode::BrOnEmpty:
- case Js::OpCode::BrOnNotEmpty:
- case Js::OpCode::BrFncCachedScopeEq:
- case Js::OpCode::BrFncCachedScopeNeq:
- case Js::OpCode::BrOnObject_A:
- case Js::OpCode::BrOnClassConstructor:
- case Js::OpCode::BrOnBaseConstructorKind:
- if (tryUnsignedCmpPeep)
- {
- this->UnsignedCmpPeep(instr);
- }
- instrNext = Peeps::PeepBranch(instr->AsBranchInstr());
- break;
- case Js::OpCode::MultiBr:
- // TODO: Run peeps on these as well...
- break;
- case Js::OpCode::BrTrue_I4:
- case Js::OpCode::BrFalse_I4:
- case Js::OpCode::BrTrue_A:
- case Js::OpCode::BrFalse_A:
- if (instrCm)
- {
- if (instrCm->GetDst()->IsInt32())
- {
- Assert(instr->m_opcode == Js::OpCode::BrTrue_I4 || instr->m_opcode == Js::OpCode::BrFalse_I4);
- instrNext = this->PeepTypedCm(instrCm);
- }
- else
- {
- instrNext = this->PeepCm(instrCm);
- }
- instrCm = nullptr;
- if (instrNext == nullptr)
- {
- // Set instrNext back to the current instr.
- instrNext = instr;
- }
- }
- else
- {
- instrNext = Peeps::PeepBranch(instr->AsBranchInstr());
- }
- break;
- case Js::OpCode::CmEq_I4:
- case Js::OpCode::CmGe_I4:
- case Js::OpCode::CmGt_I4:
- case Js::OpCode::CmLt_I4:
- case Js::OpCode::CmLe_I4:
- case Js::OpCode::CmNeq_I4:
- case Js::OpCode::CmEq_A:
- case Js::OpCode::CmGe_A:
- case Js::OpCode::CmGt_A:
- case Js::OpCode::CmLt_A:
- case Js::OpCode::CmLe_A:
- case Js::OpCode::CmNeq_A:
- case Js::OpCode::CmSrEq_A:
- case Js::OpCode::CmSrNeq_A:
- if (tryUnsignedCmpPeep)
- {
- this->UnsignedCmpPeep(instr);
- }
- case Js::OpCode::CmUnGe_I4:
- case Js::OpCode::CmUnGt_I4:
- case Js::OpCode::CmUnLt_I4:
- case Js::OpCode::CmUnLe_I4:
- case Js::OpCode::CmUnGe_A:
- case Js::OpCode::CmUnGt_A:
- case Js::OpCode::CmUnLt_A:
- case Js::OpCode::CmUnLe_A:
- // There may be useless branches between the Cm instr and the branch that uses the result.
- // So save the last Cm instr seen, and trigger the peep on the next BrTrue/BrFalse.
- instrCm = instr;
- break;
- case Js::OpCode::Label:
- if (instr->AsLabelInstr()->IsUnreferenced())
- {
- instrNext = Peeps::PeepUnreachableLabel(instr->AsLabelInstr(), false);
- }
- break;
- case Js::OpCode::StatementBoundary:
- instr->ClearByteCodeOffset();
- instr->SetByteCodeOffset(instr->GetNextRealInstrOrLabel());
- break;
- case Js::OpCode::ShrU_I4:
- case Js::OpCode::ShrU_A:
- if (tryUnsignedCmpPeep)
- {
- break;
- }
- if (instr->GetDst()->AsRegOpnd()->m_sym->IsSingleDef()
- && instr->GetSrc2()->IsRegOpnd() && instr->GetSrc2()->AsRegOpnd()->m_sym->IsTaggableIntConst()
- && instr->GetSrc2()->AsRegOpnd()->m_sym->GetIntConstValue() == 0)
- {
- tryUnsignedCmpPeep = true;
- }
- break;
- default:
- Assert(!instr->IsBranchInstr());
- }
- } NEXT_INSTR_IN_FUNC_EDITING;
- }
- void
- Loop::InsertLandingPad(FlowGraph *fg)
- {
- BasicBlock *headBlock = this->GetHeadBlock();
- // Always create a landing pad. This allows globopt to easily hoist instructions
- // and re-optimize the block if needed.
- BasicBlock *landingPad = BasicBlock::New(fg);
- this->landingPad = landingPad;
- IR::Instr * headInstr = headBlock->GetFirstInstr();
- IR::LabelInstr *landingPadLabel = IR::LabelInstr::New(Js::OpCode::Label, headInstr->m_func);
- landingPadLabel->SetByteCodeOffset(headInstr);
- headInstr->InsertBefore(landingPadLabel);
- landingPadLabel->SetBasicBlock(landingPad);
- landingPadLabel->SetRegion(headBlock->GetFirstInstr()->AsLabelInstr()->GetRegion());
- landingPadLabel->m_hasNonBranchRef = headBlock->GetFirstInstr()->AsLabelInstr()->m_hasNonBranchRef;
- landingPad->SetBlockNum(fg->blockCount++);
- landingPad->SetFirstInstr(landingPadLabel);
- landingPad->SetLastInstr(landingPadLabel);
- landingPad->prev = headBlock->prev;
- landingPad->prev->next = landingPad;
- landingPad->next = headBlock;
- headBlock->prev = landingPad;
- Loop *parentLoop = this->parent;
- landingPad->loop = parentLoop;
- // We need to add this block to the block list of the parent loops
- while (parentLoop)
- {
- // Find the head block in the block list of the parent loop
- FOREACH_BLOCK_IN_LOOP_EDITING(block, parentLoop, iter)
- {
- if (block == headBlock)
- {
- // Add the landing pad to the block list
- iter.InsertBefore(landingPad);
- break;
- }
- } NEXT_BLOCK_IN_LOOP_EDITING;
- parentLoop = parentLoop->parent;
- }
- // Fix predecessor flow edges
- FOREACH_PREDECESSOR_EDGE_EDITING(edge, headBlock, iter)
- {
- // Make sure it isn't a back-edge
- if (edge->GetPred()->loop != this && !this->IsDescendentOrSelf(edge->GetPred()->loop))
- {
- if (edge->GetPred()->GetLastInstr()->IsBranchInstr() && headBlock->GetFirstInstr()->IsLabelInstr())
- {
- IR::BranchInstr *branch = edge->GetPred()->GetLastInstr()->AsBranchInstr();
- branch->ReplaceTarget(headBlock->GetFirstInstr()->AsLabelInstr(), landingPadLabel);
- }
- headBlock->UnlinkPred(edge->GetPred(), false);
- landingPad->AddPred(edge, fg);
- edge->SetSucc(landingPad);
- }
- } NEXT_PREDECESSOR_EDGE_EDITING;
- fg->AddEdge(landingPad, headBlock);
- if (headBlock->GetFirstInstr()->AsLabelInstr()->GetRegion() && headBlock->GetFirstInstr()->AsLabelInstr()->GetRegion()->GetType() != RegionTypeRoot)
- {
- landingPadLabel->m_hasNonBranchRef = true;
- }
- }
- bool
- Loop::RemoveBreakBlocks(FlowGraph *fg)
- {
- bool breakBlockRelocated = false;
- if (PHASE_OFF(Js::RemoveBreakBlockPhase, fg->GetFunc()))
- {
- return false;
- }
- BasicBlock *loopTailBlock = nullptr;
- FOREACH_BLOCK_IN_LOOP(block, this)
- {
- loopTailBlock = block;
- }NEXT_BLOCK_IN_LOOP;
- AnalysisAssert(loopTailBlock);
- FOREACH_BLOCK_BACKWARD_IN_RANGE_EDITING(breakBlockEnd, loopTailBlock, this->GetHeadBlock(), blockPrev)
- {
- while (!this->IsDescendentOrSelf(breakBlockEnd->loop))
- {
- // Found at least one break block;
- breakBlockRelocated = true;
- #if DBG
- breakBlockEnd->isBreakBlock = true;
- #endif
- // Find the first block in this break block sequence.
- BasicBlock *breakBlockStart = breakBlockEnd;
- BasicBlock *breakBlockStartPrev = breakBlockEnd->GetPrev();
- // Walk back the blocks until we find a block which belongs to that block.
- // Note: We don't really care if there are break blocks corresponding to different loops. We move the blocks conservatively to the end of the loop.
- // Algorithm works on one loop at a time.
- while((breakBlockStartPrev->loop == breakBlockEnd->loop) || !this->IsDescendentOrSelf(breakBlockStartPrev->loop))
- {
- breakBlockStart = breakBlockStartPrev;
- breakBlockStartPrev = breakBlockStartPrev->GetPrev();
- }
- #if DBG
- breakBlockStart->isBreakBlock = true; // Mark the first block as well.
- #endif
- BasicBlock *exitLoopTail = loopTailBlock;
- // Move these break blocks to the tail of the loop.
- fg->MoveBlocksBefore(breakBlockStart, breakBlockEnd, exitLoopTail->next);
- #if DBG_DUMP
- fg->Dump(true /*needs verbose flag*/, _u("\n After Each iteration of canonicalization \n"));
- #endif
- // Again be conservative, there are edits to the loop graph. Start fresh for this loop.
- breakBlockEnd = loopTailBlock;
- blockPrev = breakBlockEnd->prev;
- }
- } NEXT_BLOCK_BACKWARD_IN_RANGE_EDITING;
- return breakBlockRelocated;
- }
- void
- FlowGraph::MoveBlocksBefore(BasicBlock *blockStart, BasicBlock *blockEnd, BasicBlock *insertBlock)
- {
- BasicBlock *srcPredBlock = blockStart->prev;
- BasicBlock *srcNextBlock = blockEnd->next;
- BasicBlock *dstPredBlock = insertBlock->prev;
- IR::Instr* dstPredBlockLastInstr = dstPredBlock->GetLastInstr();
- IR::Instr* blockEndLastInstr = blockEnd->GetLastInstr();
- // Fix block linkage
- srcPredBlock->next = srcNextBlock;
- srcNextBlock->prev = srcPredBlock;
- dstPredBlock->next = blockStart;
- insertBlock->prev = blockEnd;
- blockStart->prev = dstPredBlock;
- blockEnd->next = insertBlock;
- // Fix instruction linkage
- IR::Instr::MoveRangeAfter(blockStart->GetFirstInstr(), blockEndLastInstr, dstPredBlockLastInstr);
- // Fix instruction flow
- IR::Instr *srcLastInstr = srcPredBlock->GetLastInstr();
- if (srcLastInstr->IsBranchInstr() && srcLastInstr->AsBranchInstr()->HasFallThrough())
- {
- // There was a fallthrough in the break blocks original position.
- IR::BranchInstr *srcBranch = srcLastInstr->AsBranchInstr();
- IR::Instr *srcBranchNextInstr = srcBranch->GetNextRealInstrOrLabel();
- // Save the target and invert the branch.
- IR::LabelInstr *srcBranchTarget = srcBranch->GetTarget();
- srcPredBlock->InvertBranch(srcBranch);
- IR::LabelInstr *srcLabel = blockStart->GetFirstInstr()->AsLabelInstr();
- // Point the inverted branch to break block.
- srcBranch->SetTarget(srcLabel);
- if (srcBranchNextInstr != srcBranchTarget)
- {
- FlowEdge *srcEdge = this->FindEdge(srcPredBlock, srcBranchTarget->GetBasicBlock());
- Assert(srcEdge);
- BasicBlock *compensationBlock = this->InsertCompensationCodeForBlockMove(srcEdge, true /*insert compensation block to loop list*/, false /*At source*/);
- Assert(compensationBlock);
- }
- }
- IR::Instr *dstLastInstr = dstPredBlockLastInstr;
- bool assignRegionsBeforeGlobopt = this->func->HasTry() && (this->func->DoOptimizeTry() ||
- (this->func->IsSimpleJit() && this->func->hasBailout) ||
- this->func->IsLoopBodyInTryFinally());
- if (dstLastInstr->IsBranchInstr() && dstLastInstr->AsBranchInstr()->HasFallThrough())
- {
- //There is a fallthrough in the block after which break block is inserted.
- FlowEdge *dstEdge = this->FindEdge(dstPredBlock, blockEnd->GetNext());
- Assert(dstEdge);
- BasicBlock *compensationBlock = this->InsertCompensationCodeForBlockMove(dstEdge, true /*insert compensation block to loop list*/, true /*At sink*/);
- Assert(compensationBlock);
- }
- // We have to update region info for blocks whose predecessors changed
- if (assignRegionsBeforeGlobopt)
- {
- UpdateRegionForBlockFromEHPred(dstPredBlock, true);
- UpdateRegionForBlockFromEHPred(blockStart, true);
- UpdateRegionForBlockFromEHPred(srcNextBlock, true);
- }
- }
- FlowEdge *
- FlowGraph::FindEdge(BasicBlock *predBlock, BasicBlock *succBlock)
- {
- FlowEdge *srcEdge = nullptr;
- FOREACH_SUCCESSOR_EDGE(edge, predBlock)
- {
- if (edge->GetSucc() == succBlock)
- {
- srcEdge = edge;
- break;
- }
- } NEXT_SUCCESSOR_EDGE;
- return srcEdge;
- }
- void
- BasicBlock::InvertBranch(IR::BranchInstr *branch)
- {
- Assert(this->GetLastInstr() == branch);
- Assert(this->GetSuccList()->HasTwo());
- branch->Invert();
- this->GetSuccList()->Reverse();
- }
- bool
- FlowGraph::CanonicalizeLoops()
- {
- if (this->func->HasProfileInfo())
- {
- this->implicitCallFlags = this->func->GetReadOnlyProfileInfo()->GetImplicitCallFlags();
- for (Loop *loop = this->loopList; loop; loop = loop->next)
- {
- this->implicitCallFlags = (Js::ImplicitCallFlags)(this->implicitCallFlags | loop->GetImplicitCallFlags());
- }
- }
- #if DBG_DUMP
- this->Dump(true, _u("\n Before canonicalizeLoops \n"));
- #endif
- bool breakBlockRelocated = false;
- for (Loop *loop = this->loopList; loop; loop = loop->next)
- {
- loop->InsertLandingPad(this);
- if (!this->func->HasTry() || this->func->DoOptimizeTry())
- {
- bool relocated = loop->RemoveBreakBlocks(this);
- if (!breakBlockRelocated && relocated)
- {
- breakBlockRelocated = true;
- }
- }
- }
- #if DBG_DUMP
- this->Dump(true, _u("\n After canonicalizeLoops \n"));
- #endif
- return breakBlockRelocated;
- }
- // Find the loops in this function, build the loop structure, and build a linked
- // list of the basic blocks in this loop (including blocks of inner loops). The
- // list preserves the reverse post-order of the blocks in the flowgraph block list.
- void
- FlowGraph::FindLoops()
- {
- if (!this->hasLoop)
- {
- return;
- }
- Func * func = this->func;
- FOREACH_BLOCK_BACKWARD_IN_FUNC(block, func)
- {
- if (block->loop != nullptr)
- {
- // Block already visited
- continue;
- }
- FOREACH_SUCCESSOR_BLOCK(succ, block)
- {
- if (succ->isLoopHeader && succ->loop == nullptr)
- {
- // Found a loop back-edge
- BuildLoop(succ, block);
- }
- } NEXT_SUCCESSOR_BLOCK;
- if (block->isLoopHeader && block->loop == nullptr)
- {
- // We would have built a loop for it if it was a loop...
- block->isLoopHeader = false;
- block->GetFirstInstr()->AsLabelInstr()->m_isLoopTop = false;
- }
- } NEXT_BLOCK_BACKWARD_IN_FUNC;
- }
- void
- FlowGraph::BuildLoop(BasicBlock *headBlock, BasicBlock *tailBlock, Loop *parentLoop)
- {
- // This function is recursive, so when jitting in the foreground, probe the stack
- if(!func->IsBackgroundJIT())
- {
- PROBE_STACK(func->GetScriptContext(), Js::Constants::MinStackDefault);
- }
- if (tailBlock->number < headBlock->number)
- {
- // Not a loop. We didn't see any back-edge.
- headBlock->isLoopHeader = false;
- headBlock->GetFirstInstr()->AsLabelInstr()->m_isLoopTop = false;
- return;
- }
- Assert(headBlock->isLoopHeader);
- Loop *loop = JitAnewZ(this->GetFunc()->m_alloc, Loop, this->GetFunc()->m_alloc, this->GetFunc());
- loop->next = this->loopList;
- this->loopList = loop;
- headBlock->loop = loop;
- loop->headBlock = headBlock;
- loop->int32SymsOnEntry = nullptr;
- loop->lossyInt32SymsOnEntry = nullptr;
- // If parentLoop is a parent of loop, it's headBlock better appear first.
- if (parentLoop && loop->headBlock->number > parentLoop->headBlock->number)
- {
- loop->parent = parentLoop;
- parentLoop->isLeaf = false;
- }
- loop->hasDeadStoreCollectionPass = false;
- loop->hasDeadStorePrepass = false;
- loop->memOpInfo = nullptr;
- loop->doMemOp = true;
- NoRecoverMemoryJitArenaAllocator tempAlloc(_u("BE-LoopBuilder"), this->func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
- WalkLoopBlocks(tailBlock, loop, &tempAlloc);
- Assert(loop->GetHeadBlock() == headBlock);
- IR::LabelInstr * firstInstr = loop->GetLoopTopInstr();
- firstInstr->SetLoop(loop);
- if (firstInstr->IsProfiledLabelInstr())
- {
- loop->SetImplicitCallFlags(firstInstr->AsProfiledLabelInstr()->loopImplicitCallFlags);
- if (this->func->HasProfileInfo() && this->func->GetReadOnlyProfileInfo()->IsLoopImplicitCallInfoDisabled())
- {
- loop->SetImplicitCallFlags(this->func->GetReadOnlyProfileInfo()->GetImplicitCallFlags());
- }
- loop->SetLoopFlags(firstInstr->AsProfiledLabelInstr()->loopFlags);
- }
- else
- {
- // Didn't collect profile information, don't do optimizations
- loop->SetImplicitCallFlags(Js::ImplicitCall_All);
- }
- }
- Loop::MemCopyCandidate* Loop::MemOpCandidate::AsMemCopy()
- {
- Assert(this->IsMemCopy());
- return (Loop::MemCopyCandidate*)this;
- }
- Loop::MemSetCandidate* Loop::MemOpCandidate::AsMemSet()
- {
- Assert(this->IsMemSet());
- return (Loop::MemSetCandidate*)this;
- }
- void
- Loop::EnsureMemOpVariablesInitialized()
- {
- Assert(this->doMemOp);
- if (this->memOpInfo == nullptr)
- {
- JitArenaAllocator *allocator = this->GetFunc()->GetTopFunc()->m_fg->alloc;
- this->memOpInfo = JitAnewStruct(allocator, Loop::MemOpInfo);
- this->memOpInfo->inductionVariablesUsedAfterLoop = nullptr;
- this->memOpInfo->startIndexOpndCache[0] = nullptr;
- this->memOpInfo->startIndexOpndCache[1] = nullptr;
- this->memOpInfo->startIndexOpndCache[2] = nullptr;
- this->memOpInfo->startIndexOpndCache[3] = nullptr;
- this->memOpInfo->inductionVariableChangeInfoMap = JitAnew(allocator, Loop::InductionVariableChangeInfoMap, allocator);
- this->memOpInfo->inductionVariableOpndPerUnrollMap = JitAnew(allocator, Loop::InductionVariableOpndPerUnrollMap, allocator);
- this->memOpInfo->candidates = JitAnew(allocator, Loop::MemOpList, allocator);
- }
- }
- // Walk the basic blocks backwards until we find the loop header.
- // Mark basic blocks in the loop by looking at the predecessors
- // of blocks known to be in the loop.
- // Recurse on inner loops.
- void
- FlowGraph::WalkLoopBlocks(BasicBlock *block, Loop *loop, JitArenaAllocator *tempAlloc)
- {
- AnalysisAssert(loop);
- BVSparse<JitArenaAllocator> *loopBlocksBv = JitAnew(tempAlloc, BVSparse<JitArenaAllocator>, tempAlloc);
- BasicBlock *tailBlock = block;
- BasicBlock *lastBlock;
- loopBlocksBv->Set(block->GetBlockNum());
- this->AddBlockToLoop(block, loop);
- if (block == loop->headBlock)
- {
- // Single block loop, we're done
- return;
- }
- do
- {
- BOOL isInLoop = loopBlocksBv->Test(block->GetBlockNum());
- FOREACH_SUCCESSOR_BLOCK(succ, block)
- {
- if (succ->isLoopHeader)
- {
- // Found a loop back-edge
- if (loop->headBlock == succ)
- {
- isInLoop = true;
- }
- else if (succ->loop == nullptr || succ->loop->headBlock != succ)
- {
- // Recurse on inner loop
- BuildLoop(succ, block, isInLoop ? loop : nullptr);
- }
- }
- } NEXT_SUCCESSOR_BLOCK;
- if (isInLoop)
- {
- // This block is in the loop. All of it's predecessors should be contained in the loop as well.
- FOREACH_PREDECESSOR_BLOCK(pred, block)
- {
- // Fix up loop parent if it isn't set already.
- // If pred->loop != loop, we're looking at an inner loop, which was already visited.
- // If pred->loop->parent == nullptr, this is the first time we see this loop from an outer
- // loop, so this must be an immediate child.
- if (pred->loop && pred->loop != loop && loop->headBlock->number < pred->loop->headBlock->number
- && (pred->loop->parent == nullptr || pred->loop->parent->headBlock->number < loop->headBlock->number))
- {
- pred->loop->parent = loop;
- loop->isLeaf = false;
- if (pred->loop->hasCall)
- {
- loop->SetHasCall();
- }
- loop->SetImplicitCallFlags(pred->loop->GetImplicitCallFlags());
- }
- // Add pred to loop bit vector
- loopBlocksBv->Set(pred->GetBlockNum());
- } NEXT_PREDECESSOR_BLOCK;
- if (block->loop == nullptr || block->loop->IsDescendentOrSelf(loop))
- {
- block->loop = loop;
- }
- if (block != tailBlock)
- {
- this->AddBlockToLoop(block, loop);
- }
- }
- lastBlock = block;
- block = block->GetPrev();
- } while (lastBlock != loop->headBlock);
- }
- // Add block to this loop, and it's parent loops.
- void
- FlowGraph::AddBlockToLoop(BasicBlock *block, Loop *loop)
- {
- loop->blockList.Prepend(block);
- if (block->hasCall)
- {
- loop->SetHasCall();
- }
- }
- ///----------------------------------------------------------------------------
- ///
- /// FlowGraph::AddBlock
- ///
- /// Finish processing of a new block: hook up successor arcs, note loops, etc.
- ///
- ///----------------------------------------------------------------------------
- BasicBlock *
- FlowGraph::AddBlock(
- IR::Instr * firstInstr,
- IR::Instr * lastInstr,
- BasicBlock * nextBlock,
- BasicBlock *prevBlock)
- {
- BasicBlock * block;
- IR::LabelInstr * labelInstr;
- if (firstInstr->IsLabelInstr())
- {
- labelInstr = firstInstr->AsLabelInstr();
- }
- else
- {
- labelInstr = IR::LabelInstr::New(Js::OpCode::Label, firstInstr->m_func);
- labelInstr->SetByteCodeOffset(firstInstr);
- if (firstInstr->IsEntryInstr())
- {
- firstInstr->InsertAfter(labelInstr);
- }
- else
- {
- firstInstr->InsertBefore(labelInstr);
- }
- firstInstr = labelInstr;
- }
- block = labelInstr->GetBasicBlock();
- if (block == nullptr)
- {
- block = BasicBlock::New(this);
- labelInstr->SetBasicBlock(block);
- // Remember last block in function to target successor of RETs.
- if (!this->tailBlock)
- {
- this->tailBlock = block;
- }
- }
- // Hook up the successor edges
- if (lastInstr->EndsBasicBlock())
- {
- BasicBlock * blockTarget = nullptr;
- if (lastInstr->IsBranchInstr())
- {
- // Hook up a successor edge to the branch target.
- IR::BranchInstr * branchInstr = lastInstr->AsBranchInstr();
- if(branchInstr->IsMultiBranch())
- {
- BasicBlock * blockMultiBrTarget;
- IR::MultiBranchInstr * multiBranchInstr = branchInstr->AsMultiBrInstr();
- multiBranchInstr->MapUniqueMultiBrLabels([&](IR::LabelInstr * labelInstr) -> void
- {
- blockMultiBrTarget = SetBlockTargetAndLoopFlag(labelInstr);
- this->AddEdge(block, blockMultiBrTarget);
- });
- }
- else
- {
- IR::LabelInstr * targetLabelInstr = branchInstr->GetTarget();
- blockTarget = SetBlockTargetAndLoopFlag(targetLabelInstr);
- if (branchInstr->IsConditional())
- {
- IR::Instr *instrNext = branchInstr->GetNextRealInstrOrLabel();
- if (instrNext->IsLabelInstr())
- {
- SetBlockTargetAndLoopFlag(instrNext->AsLabelInstr());
- }
- }
- }
- }
- else if (lastInstr->m_opcode == Js::OpCode::Ret && block != this->tailBlock)
- {
- blockTarget = this->tailBlock;
- }
- if (blockTarget)
- {
- this->AddEdge(block, blockTarget);
- }
- }
- if (lastInstr->HasFallThrough())
- {
- // Add a branch to next instruction so that we don't have to update the flow graph
- // when the glob opt tries to insert instructions.
- // We don't run the globopt with try/catch, don't need to insert branch to next for fall through blocks.
- if (!this->func->HasTry() && !lastInstr->IsBranchInstr())
- {
- IR::BranchInstr * instr = IR::BranchInstr::New(Js::OpCode::Br,
- lastInstr->m_next->AsLabelInstr(), lastInstr->m_func);
- instr->SetByteCodeOffset(lastInstr->m_next);
- lastInstr->InsertAfter(instr);
- lastInstr = instr;
- }
- this->AddEdge(block, nextBlock);
- }
- block->SetBlockNum(this->blockCount++);
- block->SetFirstInstr(firstInstr);
- block->SetLastInstr(lastInstr);
- if (!prevBlock)
- {
- if (this->blockList)
- {
- this->blockList->prev = block;
- }
- block->next = this->blockList;
- this->blockList = block;
- }
- else
- {
- prevBlock->next = block;
- block->prev = prevBlock;
- block->next = nextBlock;
- nextBlock->prev = block;
- }
- return block;
- }
- BasicBlock *
- FlowGraph::SetBlockTargetAndLoopFlag(IR::LabelInstr * labelInstr)
- {
- BasicBlock * blockTarget = nullptr;
- blockTarget = labelInstr->GetBasicBlock();
- if (blockTarget == nullptr)
- {
- blockTarget = BasicBlock::New(this);
- labelInstr->SetBasicBlock(blockTarget);
- }
- if (labelInstr->m_isLoopTop)
- {
- blockTarget->isLoopHeader = true;
- this->hasLoop = true;
- }
- return blockTarget;
- }
- ///----------------------------------------------------------------------------
- ///
- /// FlowGraph::AddEdge
- ///
- /// Add an edge connecting the two given blocks.
- ///
- ///----------------------------------------------------------------------------
- FlowEdge *
- FlowGraph::AddEdge(BasicBlock * blockPred, BasicBlock * blockSucc)
- {
- FlowEdge * edge = FlowEdge::New(this);
- edge->SetPred(blockPred);
- edge->SetSucc(blockSucc);
- blockPred->AddSucc(edge, this);
- blockSucc->AddPred(edge, this);
- return edge;
- }
- ///----------------------------------------------------------------------------
- ///
- /// FlowGraph::Destroy
- ///
- /// Remove all references to FG structures from the IR in preparation for freeing
- /// the FG.
- ///
- ///----------------------------------------------------------------------------
- void
- FlowGraph::Destroy(void)
- {
- BOOL fHasTry = this->func->HasTry();
- if (fHasTry)
- {
- // Do unreachable code removal up front to avoid problems
- // with unreachable back edges, etc.
- this->RemoveUnreachableBlocks();
- }
- FOREACH_BLOCK_ALL(block, this)
- {
- IR::Instr * firstInstr = block->GetFirstInstr();
- if (block->isDeleted && !block->isDead)
- {
- if (firstInstr->IsLabelInstr())
- {
- IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
- labelInstr->UnlinkBasicBlock();
- // Removing the label for non try blocks as we have a deleted block which has the label instruction
- // still not removed; this prevents the assert for cases where the deleted blocks fall through to a helper block,
- // i.e. helper introduced by polymorphic inlining bailout.
- // Skipping Try blocks as we have dependency on blocks to get the last instr(see below in this function)
- if (!fHasTry)
- {
- if (this->func->GetJITFunctionBody()->IsCoroutine())
- {
- // the label could be a yield resume label, in which case we also need to remove it from the YieldOffsetResumeLabels list
- this->func->MapUntilYieldOffsetResumeLabels([this, &labelInstr](int i, const YieldOffsetResumeLabel& yorl)
- {
- if (labelInstr == yorl.Second())
- {
- labelInstr->m_hasNonBranchRef = false;
- this->func->RemoveYieldOffsetResumeLabel(yorl);
- return true;
- }
- return false;
- });
- }
- Assert(labelInstr->IsUnreferenced());
- labelInstr->Remove();
- }
- }
- continue;
- }
- if (block->isLoopHeader && !block->isDead)
- {
- // Mark the tail block of this loop (the last back-edge). The register allocator
- // uses this to lexically find loops.
- BasicBlock *loopTail = nullptr;
- AssertMsg(firstInstr->IsLabelInstr() && firstInstr->AsLabelInstr()->m_isLoopTop,
- "Label not marked as loop top...");
- FOREACH_BLOCK_IN_LOOP(loopBlock, block->loop)
- {
- FOREACH_SUCCESSOR_BLOCK(succ, loopBlock)
- {
- if (succ == block)
- {
- loopTail = loopBlock;
- break;
- }
- } NEXT_SUCCESSOR_BLOCK;
- } NEXT_BLOCK_IN_LOOP;
- if (loopTail)
- {
- AssertMsg(loopTail->GetLastInstr()->IsBranchInstr(), "LastInstr of loop should always be a branch no?");
- block->loop->SetLoopTopInstr(block->GetFirstInstr()->AsLabelInstr());
- }
- else
- {
- // This loop doesn't have a back-edge: that is, it is not a loop
- // anymore...
- firstInstr->AsLabelInstr()->m_isLoopTop = FALSE;
- }
- }
- if (fHasTry)
- {
- this->UpdateRegionForBlock(block);
- }
- if (firstInstr->IsLabelInstr())
- {
- IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
- labelInstr->UnlinkBasicBlock();
- if (labelInstr->IsUnreferenced() && !fHasTry)
- {
- // This is an unreferenced label, probably added by FG building.
- // Delete it now to make extended basic blocks visible.
- if (firstInstr == block->GetLastInstr())
- {
- labelInstr->Remove();
- continue;
- }
- else
- {
- labelInstr->Remove();
- }
- }
- }
- IR::Instr * lastInstr = block->GetLastInstr();
- if (lastInstr->IsBranchInstr())
- {
- IR::BranchInstr * branchInstr = lastInstr->AsBranchInstr();
- if (!branchInstr->IsConditional() && branchInstr->GetTarget() == branchInstr->m_next)
- {
- // Remove branch to next
- branchInstr->Remove();
- }
- }
- }
- NEXT_BLOCK;
- #if DBG
- if (fHasTry)
- {
- // Now that all blocks have regions, we should see consistently propagated regions at all
- // block boundaries.
- FOREACH_BLOCK(block, this)
- {
- Region * region = block->GetFirstInstr()->AsLabelInstr()->GetRegion();
- Region * predRegion = nullptr;
- FOREACH_PREDECESSOR_BLOCK(predBlock, block)
- {
- predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
- if (predBlock->GetLastInstr() == nullptr)
- {
- AssertMsg(region == predRegion, "Bad region propagation through empty block");
- }
- else
- {
- switch (predBlock->GetLastInstr()->m_opcode)
- {
- case Js::OpCode::TryCatch:
- case Js::OpCode::TryFinally:
- AssertMsg(region->GetParent() == predRegion, "Bad region prop on entry to try-catch/finally");
- if (block->GetFirstInstr() == predBlock->GetLastInstr()->AsBranchInstr()->GetTarget())
- {
- if (predBlock->GetLastInstr()->m_opcode == Js::OpCode::TryCatch)
- {
- AssertMsg(region->GetType() == RegionTypeCatch, "Bad region type on entry to catch");
- }
- else
- {
- AssertMsg(region->GetType() == RegionTypeFinally, "Bad region type on entry to finally");
- }
- }
- else
- {
- AssertMsg(region->GetType() == RegionTypeTry, "Bad region type on entry to try");
- }
- break;
- case Js::OpCode::Leave:
- AssertMsg(region == predRegion->GetParent() || (predRegion->GetType() == RegionTypeTry && predRegion->GetMatchingFinallyRegion(false) == region) ||
- (region == predRegion && this->func->IsLoopBodyInTry() ||
- // edge from early exit to finally in simplejit - in fulljit this Leave would have been deadcoded due to preceeding BailOutOnEarlyExit
- (predBlock->GetLastInstr()->GetPrevRealInstr()->m_opcode == Js::OpCode::BailOnEarlyExit && region->GetType() == RegionTypeFinally && this->func->IsSimpleJit())), "Bad region prop on leaving try-catch/finally");
- break;
- case Js::OpCode::LeaveNull:
- AssertMsg(region == predRegion->GetParent() || (region == predRegion && this->func->IsLoopBodyInTry()), "Bad region prop on leaving try-catch/finally");
- break;
- // If the try region has a branch out of the loop,
- // - the branch is moved out of the loop as part of break block removal, and
- // - BrOnException is inverted to BrOnNoException and a Br is inserted after it.
- // Otherwise,
- // - FullJit: BrOnException is removed in the forward pass.
- case Js::OpCode::BrOnException:
- Assert(!this->func->DoGlobOpt());
- break;
- case Js::OpCode::BrOnNoException:
- Assert(region->GetType() == RegionTypeTry || region->GetType() == RegionTypeCatch || region->GetType() == RegionTypeFinally ||
- // A BrOnException from finally to early exit can be converted to BrOnNoException and Br
- // The Br block maybe a common successor block for early exit along with the BrOnNoException block
- // Region from Br block will be picked up from a predecessor which is not BrOnNoException due to early exit
- // See test0() in test/EH/tryfinallytests.js
- (predRegion->GetType() == RegionTypeFinally && predBlock->GetLastInstr()->AsBranchInstr()->m_brFinallyToEarlyExit));
- break;
- case Js::OpCode::Br:
- if (predBlock->GetLastInstr()->AsBranchInstr()->m_leaveConvToBr)
- {
- // Leave converted to Br in finally region
- AssertMsg(region == predRegion->GetParent(), "Bad region prop in finally");
- }
- else if (region->GetType() == RegionTypeCatch && region != predRegion)
- {
- AssertMsg(predRegion->GetType() == RegionTypeTry, "Bad region type for the try");
- }
- else if (region->GetType() == RegionTypeFinally && region != predRegion)
- {
- // We may be left with edges from finally region to early exit
- AssertMsg(predRegion->IsNonExceptingFinally() || predRegion->GetType() == RegionTypeTry, "Bad region type for the try");
- }
- else
- {
- // We may be left with edges from finally region to early exit
- AssertMsg(predRegion->IsNonExceptingFinally() || region == predRegion, "Bad region propagation through interior block");
- }
- break;
- default:
- break;
- }
- }
- }
- NEXT_PREDECESSOR_BLOCK;
- switch (region->GetType())
- {
- case RegionTypeRoot:
- Assert(!region->GetMatchingTryRegion() && !region->GetMatchingCatchRegion() && !region->GetMatchingFinallyRegion(true) && !region->GetMatchingFinallyRegion(false));
- break;
- case RegionTypeTry:
- if (this->func->DoOptimizeTry() || (this->func->IsSimpleJit() && this->func->hasBailout))
- {
- Assert((region->GetMatchingCatchRegion() != nullptr) ^ (region->GetMatchingFinallyRegion(true) && region->GetMatchingFinallyRegion(false)));
- }
- else
- {
- Assert((region->GetMatchingCatchRegion() != nullptr) ^ (region->GetMatchingFinallyRegion(true) && !region->GetMatchingFinallyRegion(false)));
- }
- break;
- case RegionTypeCatch:
- case RegionTypeFinally:
- Assert(region->GetMatchingTryRegion());
- break;
- }
- }
- NEXT_BLOCK;
- FOREACH_BLOCK_ALL(block, this)
- {
- if (block->GetFirstInstr()->IsLabelInstr())
- {
- IR::LabelInstr *labelInstr = block->GetFirstInstr()->AsLabelInstr();
- if (labelInstr->IsUnreferenced())
- {
- // This is an unreferenced label, probably added by FG building.
- // Delete it now to make extended basic blocks visible.
- labelInstr->Remove();
- }
- }
- } NEXT_BLOCK;
- }
- #endif
- this->func->isFlowGraphValid = false;
- }
- bool FlowGraph::IsEHTransitionInstr(IR::Instr *instr)
- {
- Js::OpCode op = instr->m_opcode;
- return (op == Js::OpCode::TryCatch || op == Js::OpCode::TryFinally || op == Js::OpCode::Leave || op == Js::OpCode::LeaveNull);
- }
- BasicBlock * FlowGraph::GetPredecessorForRegionPropagation(BasicBlock *block)
- {
- BasicBlock *ehPred = nullptr;
- FOREACH_PREDECESSOR_BLOCK(predBlock, block)
- {
- Region * predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
- if (IsEHTransitionInstr(predBlock->GetLastInstr()) && predRegion)
- {
- // MGTODO : change this to return, once you know there can exist only one eh transitioning pred
- Assert(ehPred == nullptr);
- ehPred = predBlock;
- }
- AssertMsg(predBlock->GetBlockNum() < this->blockCount, "Misnumbered block at teardown time?");
- }
- NEXT_PREDECESSOR_BLOCK;
- return ehPred;
- }
- // Propagate the region forward from the block's predecessor(s), tracking the effect
- // of the flow transition. Record the region in the block-to-region map provided
- // and on the label at the entry to the block (if any).
- // We need to know the end of finally for inserting edge at the end of finally to early exit
- // Store it in regToFinallyEndMap as we visit blocks instead of recomputing later while adding early exit edges
- void
- FlowGraph::UpdateRegionForBlock(BasicBlock * block)
- {
- Region *region;
- Region * predRegion = nullptr;
- IR::Instr * tryInstr = nullptr;
- IR::Instr * firstInstr = block->GetFirstInstr();
- if (firstInstr->IsLabelInstr() && firstInstr->AsLabelInstr()->GetRegion())
- {
- #if DBG
- bool assignRegionsBeforeGlobopt = this->func->HasTry() && (this->func->DoOptimizeTry() ||
- (this->func->IsSimpleJit() && this->func->hasBailout) ||
- this->func->IsLoopBodyInTryFinally());
- Assert(assignRegionsBeforeGlobopt);
- #endif
- return;
- }
- if (block == this->blockList)
- {
- // Head of the graph: create the root region.
- region = Region::New(RegionTypeRoot, nullptr, this->func);
- }
- else
- {
- // Propagate the region forward by finding a predecessor we've already processed.
- region = nullptr;
- FOREACH_PREDECESSOR_BLOCK(predBlock, block)
- {
- AssertMsg(predBlock->GetBlockNum() < this->blockCount, "Misnumbered block at teardown time?");
- predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
- if (predRegion != nullptr)
- {
- region = this->PropagateRegionFromPred(block, predBlock, predRegion, tryInstr);
- break;
- }
- }
- NEXT_PREDECESSOR_BLOCK;
- if (block->GetLastInstr()->m_opcode == Js::OpCode::LeaveNull || block->GetLastInstr()->m_opcode == Js::OpCode::Leave)
- {
- if (this->regToFinallyEndMap && region->IsNonExceptingFinally())
- {
- BasicBlock *endOfFinally = regToFinallyEndMap->ContainsKey(region) ? regToFinallyEndMap->Item(region) : nullptr;
- if (!endOfFinally)
- {
- regToFinallyEndMap->Add(region, block);
- }
- else
- {
- Assert(endOfFinally->GetLastInstr()->m_opcode != Js::OpCode::LeaveNull || block == endOfFinally);
- regToFinallyEndMap->Item(region, block);
- }
- }
- }
- }
- Assert(region || block->GetPredList()->Count() == 0);
- if (region && !region->ehBailoutData)
- {
- region->AllocateEHBailoutData(this->func, tryInstr);
- }
- Assert(firstInstr->IsLabelInstr());
- if (firstInstr->IsLabelInstr())
- {
- // Record the region on the label and make sure it stays around as a region
- // marker if we're entering a region at this point.
- IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
- labelInstr->SetRegion(region);
- if (region != predRegion)
- {
- labelInstr->m_hasNonBranchRef = true;
- }
- // One of the pred blocks maybe an eh region, in that case it is important to mark this label's m_hasNonBranchRef
- // If not later in codegen, this label can get deleted. And during SccLiveness, region is propagated to newly created labels in lowerer from the previous label's region
- // We can end up assigning an eh region to a label in a non eh region. And if there is a bailout in such a region, bad things will happen in the interpreter :)
- // See test2()/test3() in tryfinallytests.js
- if (!labelInstr->m_hasNonBranchRef)
- {
- FOREACH_PREDECESSOR_BLOCK(predBlock, block)
- {
- AssertMsg(predBlock->GetBlockNum() < this->blockCount, "Misnumbered block at teardown time?");
- predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
- if (predRegion != region)
- {
- labelInstr->m_hasNonBranchRef = true;
- break;
- }
- }
- NEXT_PREDECESSOR_BLOCK;
- }
- }
- }
- void
- FlowGraph::UpdateRegionForBlockFromEHPred(BasicBlock * block, bool reassign)
- {
- Region *region = nullptr;
- Region * predRegion = nullptr;
- IR::Instr * tryInstr = nullptr;
- IR::Instr * firstInstr = block->GetFirstInstr();
- if (!reassign && firstInstr->IsLabelInstr() && firstInstr->AsLabelInstr()->GetRegion())
- {
- Assert(this->func->HasTry() && (this->func->DoOptimizeTry() || (this->func->IsSimpleJit() && this->func->hasBailout)));
- return;
- }
- if (block->isDead || block->isDeleted)
- {
- // We can end up calling this function with such blocks, return doing nothing
- // See test5() in tryfinallytests.js
- return;
- }
- if (block == this->blockList)
- {
- // Head of the graph: create the root region.
- region = Region::New(RegionTypeRoot, nullptr, this->func);
- }
- else if (block->GetPredList()->Count() == 1)
- {
- BasicBlock *predBlock = block->GetPredList()->Head()->GetPred();
- AssertMsg(predBlock->GetBlockNum() < this->blockCount, "Misnumbered block at teardown time?");
- predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
- Assert(predRegion);
- region = this->PropagateRegionFromPred(block, predBlock, predRegion, tryInstr);
- }
- else
- {
- // Propagate the region forward by finding a predecessor we've already processed.
- // Since we do break block remval after region propagation, we cannot pick the first predecessor which has an assigned region
- // If there is a eh transitioning pred, we pick that
- // There cannot be more than one eh transitioning pred (?)
- BasicBlock *ehPred = this->GetPredecessorForRegionPropagation(block);
- if (ehPred)
- {
- predRegion = ehPred->GetFirstInstr()->AsLabelInstr()->GetRegion();
- Assert(predRegion != nullptr);
- region = this->PropagateRegionFromPred(block, ehPred, predRegion, tryInstr);
- }
- else
- {
- FOREACH_PREDECESSOR_BLOCK(predBlock, block)
- {
- predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
- if (predRegion != nullptr)
- {
- if ((predBlock->GetLastInstr()->m_opcode == Js::OpCode::BrOnException || predBlock->GetLastInstr()->m_opcode == Js::OpCode::BrOnNoException) &&
- predBlock->GetLastInstr()->AsBranchInstr()->m_brFinallyToEarlyExit)
- {
- Assert(predRegion->IsNonExceptingFinally());
- // BrOnException from finally region to early exit
- // Skip this edge
- continue;
- }
- if (predBlock->GetLastInstr()->m_opcode == Js::OpCode::Br &&
- predBlock->GetLastInstr()->GetPrevRealInstr()->m_opcode == Js::OpCode::BrOnNoException)
- {
- Assert(predBlock->GetLastInstr()->GetPrevRealInstr()->AsBranchInstr()->m_brFinallyToEarlyExit);
- Assert(predRegion->IsNonExceptingFinally());
- // BrOnException from finally region to early exit changed to BrOnNoException and Br during break block removal
- continue;
- }
- region = this->PropagateRegionFromPred(block, predBlock, predRegion, tryInstr);
- break;
- }
- }
- NEXT_PREDECESSOR_BLOCK;
- }
- }
- Assert(region || block->GetPredList()->Count() == 0 || block->firstInstr->AsLabelInstr()->GetRegion());
- if (region)
- {
- if (!region->ehBailoutData)
- {
- region->AllocateEHBailoutData(this->func, tryInstr);
- }
- Assert(firstInstr->IsLabelInstr());
- if (firstInstr->IsLabelInstr())
- {
- // Record the region on the label and make sure it stays around as a region
- // marker if we're entering a region at this point.
- IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
- labelInstr->SetRegion(region);
- if (region != predRegion)
- {
- labelInstr->m_hasNonBranchRef = true;
- }
- }
- }
- }
- Region *
- FlowGraph::PropagateRegionFromPred(BasicBlock * block, BasicBlock * predBlock, Region * predRegion, IR::Instr * &tryInstr)
- {
- // Propagate predRegion to region, looking at the flow transition for an opcode
- // that affects the region.
- Region * region = nullptr;
- IR::Instr * predLastInstr = predBlock->GetLastInstr();
- IR::Instr * firstInstr = block->GetFirstInstr();
- if (predLastInstr == nullptr)
- {
- // Empty block: trivially propagate the region.
- region = predRegion;
- }
- else
- {
- Region * tryRegion = nullptr;
- IR::LabelInstr * tryInstrNext = nullptr;
- switch (predLastInstr->m_opcode)
- {
- case Js::OpCode::TryCatch:
- // Entry to a try-catch. See whether we're entering the try or the catch
- // by looking for the handler label.
- Assert(predLastInstr->m_next->IsLabelInstr());
- tryInstrNext = predLastInstr->m_next->AsLabelInstr();
- tryRegion = tryInstrNext->GetRegion();
- if (firstInstr == predLastInstr->AsBranchInstr()->GetTarget())
- {
- region = Region::New(RegionTypeCatch, predRegion, this->func);
- Assert(tryRegion);
- region->SetMatchingTryRegion(tryRegion);
- tryRegion->SetMatchingCatchRegion(region);
- }
- else
- {
- region = Region::New(RegionTypeTry, predRegion, this->func);
- tryInstr = predLastInstr;
- }
- break;
- case Js::OpCode::TryFinally:
- // Entry to a try-finally. See whether we're entering the try or the finally
- // by looking for the handler label.
- Assert(predLastInstr->m_next->IsLabelInstr());
- tryInstrNext = predLastInstr->m_next->AsLabelInstr();
- tryRegion = tryInstrNext->GetRegion();
- if (firstInstr == predLastInstr->AsBranchInstr()->GetTarget())
- {
- Assert(tryRegion && tryRegion->GetType() == RegionType::RegionTypeTry);
- region = Region::New(RegionTypeFinally, predRegion, this->func);
- region->SetMatchingTryRegion(tryRegion);
- tryRegion->SetMatchingFinallyRegion(region, true);
- tryInstr = predLastInstr;
- }
- else
- {
- region = Region::New(RegionTypeTry, predRegion, this->func);
- tryInstr = predLastInstr;
- }
- break;
- case Js::OpCode::Leave:
- if (firstInstr->m_next && firstInstr->m_next->m_opcode == Js::OpCode::Finally)
- {
- tryRegion = predRegion;
- Assert(tryRegion->GetMatchingFinallyRegion(true) != nullptr);
- region = Region::New(RegionTypeFinally, predRegion->GetParent(), this->func);
- Assert(tryRegion && tryRegion->GetType() == RegionType::RegionTypeTry);
- region->SetMatchingTryRegion(tryRegion);
- tryRegion->SetMatchingFinallyRegion(region, false);
- break;
- }
- // Exiting a try or handler. Retrieve the current region's parent.
- region = predRegion->GetParent();
- if (region == nullptr)
- {
- // We found a Leave in the root region- this can only happen when a jitted loop body
- // in a try block has a return statement.
- Assert(this->func->IsLoopBodyInTry());
- predLastInstr->AsBranchInstr()->m_isOrphanedLeave = true;
- region = predRegion;
- }
- break;
- case Js::OpCode::LeaveNull:
- // Exiting a try or handler. Retrieve the current region's parent.
- region = predRegion->GetParent();
- if (region == nullptr)
- {
- // We found a Leave in the root region- this can only happen when a jitted loop body
- // in a try block has a return statement.
- Assert(this->func->IsLoopBodyInTry());
- predLastInstr->AsBranchInstr()->m_isOrphanedLeave = true;
- region = predRegion;
- }
- break;
- case Js::OpCode::BailOnException:
- // Infinite loop, no edge to non excepting finally
- if (firstInstr->m_next && firstInstr->m_next->m_opcode == Js::OpCode::Finally)
- {
- tryRegion = predRegion->GetMatchingTryRegion();
- region = Region::New(RegionTypeFinally, predRegion->GetParent(), this->func);
- Assert(tryRegion && tryRegion->GetType() == RegionType::RegionTypeTry);
- region->SetMatchingTryRegion(tryRegion);
- tryRegion->SetMatchingFinallyRegion(region, false);
- }
- break;
- case Js::OpCode::BrOnException:
- // Infinite loop inside another EH region within finally,
- // We have added edges for all infinite loops inside a finally, identify that and transition to parent
- if (predRegion->GetType() != RegionTypeFinally && firstInstr->GetNextRealInstr()->m_opcode == Js::OpCode::LeaveNull)
- {
- region = predRegion->GetParent();
- }
- else
- {
- region = predRegion;
- }
- break;
- default:
- // Normal (non-EH) transition: just propagate the region.
- region = predRegion;
- break;
- }
- }
- return region;
- }
- void
- FlowGraph::InsertCompBlockToLoopList(Loop *loop, BasicBlock* compBlock, BasicBlock* targetBlock, bool postTarget)
- {
- if (loop)
- {
- bool found = false;
- FOREACH_BLOCK_IN_LOOP_EDITING(loopBlock, loop, iter)
- {
- if (loopBlock == targetBlock)
- {
- found = true;
- break;
- }
- } NEXT_BLOCK_IN_LOOP_EDITING;
- if (found)
- {
- if (postTarget)
- {
- iter.Next();
- }
- iter.InsertBefore(compBlock);
- }
- InsertCompBlockToLoopList(loop->parent, compBlock, targetBlock, postTarget);
- }
- }
- // Insert a block on the given edge
- BasicBlock *
- FlowGraph::InsertAirlockBlock(FlowEdge * edge)
- {
- BasicBlock * airlockBlock = BasicBlock::New(this);
- BasicBlock * sourceBlock = edge->GetPred();
- BasicBlock * sinkBlock = edge->GetSucc();
- BasicBlock * sinkPrevBlock = sinkBlock->prev;
- IR::Instr * sinkPrevBlockLastInstr = sinkPrevBlock->GetLastInstr();
- IR::Instr * sourceLastInstr = sourceBlock->GetLastInstr();
- airlockBlock->loop = sinkBlock->loop;
- airlockBlock->SetBlockNum(this->blockCount++);
- #ifdef DBG
- airlockBlock->isAirLockBlock = true;
- #endif
- //
- // Fixup block linkage
- //
- // airlock block is inserted right before sourceBlock
- airlockBlock->prev = sinkBlock->prev;
- sinkBlock->prev = airlockBlock;
- airlockBlock->next = sinkBlock;
- airlockBlock->prev->next = airlockBlock;
- //
- // Fixup flow edges
- //
- sourceBlock->RemoveSucc(sinkBlock, this, false);
- // Add sourceBlock -> airlockBlock
- this->AddEdge(sourceBlock, airlockBlock);
- // Add airlockBlock -> sinkBlock
- edge->SetPred(airlockBlock);
- airlockBlock->AddSucc(edge, this);
- // Fixup data use count
- airlockBlock->SetDataUseCount(1);
- sourceBlock->DecrementDataUseCount();
- //
- // Fixup IR
- //
- // Maintain the instruction region for inlining
- IR::LabelInstr *sinkLabel = sinkBlock->GetFirstInstr()->AsLabelInstr();
- Func * sinkLabelFunc = sinkLabel->m_func;
- IR::LabelInstr *airlockLabel = IR::LabelInstr::New(Js::OpCode::Label, sinkLabelFunc);
- sinkPrevBlockLastInstr->InsertAfter(airlockLabel);
- airlockBlock->SetFirstInstr(airlockLabel);
- airlockLabel->SetBasicBlock(airlockBlock);
- // Add br to sinkBlock from airlock block
- IR::BranchInstr *airlockBr = IR::BranchInstr::New(Js::OpCode::Br, sinkLabel, sinkLabelFunc);
- airlockBr->SetByteCodeOffset(sinkLabel);
- airlockLabel->InsertAfter(airlockBr);
- airlockBlock->SetLastInstr(airlockBr);
- airlockLabel->SetByteCodeOffset(sinkLabel);
- // Fixup flow out of sourceBlock
- IR::BranchInstr *sourceBr = sourceLastInstr->AsBranchInstr();
- if (sourceBr->IsMultiBranch())
- {
- const bool replaced = sourceBr->AsMultiBrInstr()->ReplaceTarget(sinkLabel, airlockLabel);
- Assert(replaced);
- }
- else if (sourceBr->GetTarget() == sinkLabel)
- {
- sourceBr->SetTarget(airlockLabel);
- }
- if (!sinkPrevBlockLastInstr->IsBranchInstr() || sinkPrevBlockLastInstr->AsBranchInstr()->HasFallThrough())
- {
- if (!sinkPrevBlock->isDeleted)
- {
- FlowEdge *dstEdge = this->FindEdge(sinkPrevBlock, sinkBlock);
- if (dstEdge) // Possibility that sourceblock may be same as sinkPrevBlock
- {
- BasicBlock* compensationBlock = this->InsertCompensationCodeForBlockMove(dstEdge, true /*insert comp block to loop list*/, true);
- compensationBlock->IncrementDataUseCount();
- // We need to skip airlock compensation block in globopt as its inserted while globopt is iteration over the blocks.
- compensationBlock->isAirLockCompensationBlock = true;
- }
- }
- }
- #if DBG_DUMP
- this->Dump(true, _u("\n After insertion of airlock block \n"));
- #endif
- return airlockBlock;
- }
- // Insert a block on the given edge
- BasicBlock *
- FlowGraph::InsertCompensationCodeForBlockMove(FlowEdge * edge, bool insertToLoopList, bool sinkBlockLoop)
- {
- BasicBlock * compBlock = BasicBlock::New(this);
- BasicBlock * sourceBlock = edge->GetPred();
- BasicBlock * sinkBlock = edge->GetSucc();
- BasicBlock * fallthroughBlock = sourceBlock->next;
- IR::Instr * sourceLastInstr = sourceBlock->GetLastInstr();
- compBlock->SetBlockNum(this->blockCount++);
- if (insertToLoopList)
- {
- // For flow graph edits in
- if (sinkBlockLoop)
- {
- if (sinkBlock->loop && sinkBlock->loop->GetHeadBlock() == sinkBlock)
- {
- // BLUE 531255: sinkblock may be the head block of new loop, we shouldn't insert compensation block to that loop
- // Insert it to all the parent loop lists.
- compBlock->loop = sinkBlock->loop->parent;
- InsertCompBlockToLoopList(compBlock->loop, compBlock, sinkBlock, false);
- }
- else
- {
- compBlock->loop = sinkBlock->loop;
- InsertCompBlockToLoopList(compBlock->loop, compBlock, sinkBlock, false); // sinkBlock or fallthroughBlock?
- }
- #ifdef DBG
- compBlock->isBreakCompensationBlockAtSink = true;
- #endif
- }
- else
- {
- compBlock->loop = sourceBlock->loop;
- InsertCompBlockToLoopList(compBlock->loop, compBlock, sourceBlock, true);
- #ifdef DBG
- compBlock->isBreakCompensationBlockAtSource = true;
- #endif
- }
- }
- //
- // Fixup block linkage
- //
- // compensation block is inserted right after sourceBlock
- compBlock->next = fallthroughBlock;
- fallthroughBlock->prev = compBlock;
- compBlock->prev = sourceBlock;
- sourceBlock->next = compBlock;
- //
- // Fixup flow edges
- //
- sourceBlock->RemoveSucc(sinkBlock, this, false);
- // Add sourceBlock -> airlockBlock
- this->AddEdge(sourceBlock, compBlock);
- // Add airlockBlock -> sinkBlock
- edge->SetPred(compBlock);
- compBlock->AddSucc(edge, this);
- //
- // Fixup IR
- //
- // Maintain the instruction region for inlining
- IR::LabelInstr *sinkLabel = sinkBlock->GetFirstInstr()->AsLabelInstr();
- Func * sinkLabelFunc = sinkLabel->m_func;
- IR::LabelInstr *compLabel = IR::LabelInstr::New(Js::OpCode::Label, sinkLabelFunc);
- sourceLastInstr->InsertAfter(compLabel);
- compBlock->SetFirstInstr(compLabel);
- compLabel->SetBasicBlock(compBlock);
- // Add br to sinkBlock from compensation block
- IR::BranchInstr *compBr = IR::BranchInstr::New(Js::OpCode::Br, sinkLabel, sinkLabelFunc);
- compBr->SetByteCodeOffset(sinkLabel);
- compLabel->InsertAfter(compBr);
- compBlock->SetLastInstr(compBr);
- compLabel->SetByteCodeOffset(sinkLabel);
- // Fixup flow out of sourceBlock
- if (sourceLastInstr->IsBranchInstr())
- {
- IR::BranchInstr *sourceBr = sourceLastInstr->AsBranchInstr();
- Assert(sourceBr->IsMultiBranch() || sourceBr->IsConditional());
- if (sourceBr->IsMultiBranch())
- {
- const bool replaced = sourceBr->AsMultiBrInstr()->ReplaceTarget(sinkLabel, compLabel);
- Assert(replaced);
- }
- }
- bool assignRegionsBeforeGlobopt = this->func->HasTry() && (this->func->DoOptimizeTry() ||
- (this->func->IsSimpleJit() && this->func->hasBailout) ||
- this->func->IsLoopBodyInTryFinally());
- if (assignRegionsBeforeGlobopt)
- {
- UpdateRegionForBlockFromEHPred(compBlock);
- }
- return compBlock;
- }
- void
- FlowGraph::RemoveUnreachableBlocks()
- {
- AnalysisAssert(this->blockList);
- FOREACH_BLOCK(block, this)
- {
- block->isVisited = false;
- }
- NEXT_BLOCK;
- this->blockList->isVisited = true;
- FOREACH_BLOCK_EDITING(block, this)
- {
- if (block->isVisited)
- {
- FOREACH_SUCCESSOR_BLOCK(succ, block)
- {
- succ->isVisited = true;
- } NEXT_SUCCESSOR_BLOCK;
- }
- else
- {
- this->RemoveBlock(block);
- }
- }
- NEXT_BLOCK_EDITING;
- }
- // If block has no predecessor, remove it.
- bool
- FlowGraph::RemoveUnreachableBlock(BasicBlock *block, GlobOpt * globOpt)
- {
- bool isDead = false;
- if ((block->GetPredList() == nullptr || block->GetPredList()->Empty()) && block != this->func->m_fg->blockList)
- {
- isDead = true;
- }
- else if (block->isLoopHeader)
- {
- // A dead loop still has back-edges pointing to it...
- isDead = true;
- FOREACH_PREDECESSOR_BLOCK(pred, block)
- {
- if (!block->loop->IsDescendentOrSelf(pred->loop))
- {
- isDead = false;
- }
- } NEXT_PREDECESSOR_BLOCK;
- }
- if (isDead)
- {
- this->RemoveBlock(block, globOpt);
- return true;
- }
- return false;
- }
- IR::Instr *
- FlowGraph::PeepTypedCm(IR::Instr *instr)
- {
- // Basic pattern, peep:
- // t1 = CmEq a, b
- // BrTrue_I4 $L1, t1
- // Into:
- // t1 = 1
- // BrEq $L1, a, b
- // t1 = 0
- IR::Instr * instrNext = instr->GetNextRealInstrOrLabel();
- // find intermediate Lds
- IR::Instr * instrLd = nullptr;
- if (instrNext->m_opcode == Js::OpCode::Ld_I4)
- {
- instrLd = instrNext;
- instrNext = instrNext->GetNextRealInstrOrLabel();
- }
- IR::Instr * instrLd2 = nullptr;
- if (instrNext->m_opcode == Js::OpCode::Ld_I4)
- {
- instrLd2 = instrNext;
- instrNext = instrNext->GetNextRealInstrOrLabel();
- }
- // Find BrTrue/BrFalse
- IR::Instr *instrBr;
- bool brIsTrue;
- if (instrNext->m_opcode == Js::OpCode::BrTrue_I4)
- {
- instrBr = instrNext;
- brIsTrue = true;
- }
- else if (instrNext->m_opcode == Js::OpCode::BrFalse_I4)
- {
- instrBr = instrNext;
- brIsTrue = false;
- }
- else
- {
- return nullptr;
- }
- AssertMsg(instrLd || (!instrLd && !instrLd2), "Either instrLd is non-null or both null");
- // if we have intermediate Lds, then make sure pattern is:
- // t1 = CmEq a, b
- // t2 = Ld_A t1
- // BrTrue $L1, t2
- if (instrLd && !instrLd->GetSrc1()->IsEqual(instr->GetDst()))
- {
- return nullptr;
- }
- if (instrLd2 && !instrLd2->GetSrc1()->IsEqual(instrLd->GetDst()))
- {
- return nullptr;
- }
- // Make sure we have:
- // t1 = CmEq a, b
- // BrTrue/BrFalse t1
- if (!(instr->GetDst()->IsEqual(instrBr->GetSrc1()) || (instrLd && instrLd->GetDst()->IsEqual(instrBr->GetSrc1())) || (instrLd2 && instrLd2->GetDst()->IsEqual(instrBr->GetSrc1()))))
- {
- return nullptr;
- }
- IR::Opnd * src1 = instr->UnlinkSrc1();
- IR::Opnd * src2 = instr->UnlinkSrc2();
- IR::Instr * instrNew;
- IR::Opnd * tmpOpnd;
- if (instr->GetDst()->IsEqual(src1) || (instrLd && instrLd->GetDst()->IsEqual(src1)) || (instrLd2 && instrLd2->GetDst()->IsEqual(src1)))
- {
- Assert(src1->IsInt32());
- tmpOpnd = IR::RegOpnd::New(TyInt32, instr->m_func);
- instrNew = IR::Instr::New(Js::OpCode::Ld_I4, tmpOpnd, src1, instr->m_func);
- instrNew->SetByteCodeOffset(instr);
- instr->InsertBefore(instrNew);
- src1 = tmpOpnd;
- }
- if (instr->GetDst()->IsEqual(src2) || (instrLd && instrLd->GetDst()->IsEqual(src2)) || (instrLd2 && instrLd2->GetDst()->IsEqual(src2)))
- {
- Assert(src2->IsInt32());
- tmpOpnd = IR::RegOpnd::New(TyInt32, instr->m_func);
- instrNew = IR::Instr::New(Js::OpCode::Ld_I4, tmpOpnd, src2, instr->m_func);
- instrNew->SetByteCodeOffset(instr);
- instr->InsertBefore(instrNew);
- src2 = tmpOpnd;
- }
- instrBr->ReplaceSrc1(src1);
- instrBr->SetSrc2(src2);
- Js::OpCode newOpcode;
- switch (instr->m_opcode)
- {
- case Js::OpCode::CmEq_I4:
- newOpcode = Js::OpCode::BrEq_I4;
- break;
- case Js::OpCode::CmGe_I4:
- newOpcode = Js::OpCode::BrGe_I4;
- break;
- case Js::OpCode::CmGt_I4:
- newOpcode = Js::OpCode::BrGt_I4;
- break;
- case Js::OpCode::CmLt_I4:
- newOpcode = Js::OpCode::BrLt_I4;
- break;
- case Js::OpCode::CmLe_I4:
- newOpcode = Js::OpCode::BrLe_I4;
- break;
- case Js::OpCode::CmUnGe_I4:
- newOpcode = Js::OpCode::BrUnGe_I4;
- break;
- case Js::OpCode::CmUnGt_I4:
- newOpcode = Js::OpCode::BrUnGt_I4;
- break;
- case Js::OpCode::CmUnLt_I4:
- newOpcode = Js::OpCode::BrUnLt_I4;
- break;
- case Js::OpCode::CmUnLe_I4:
- newOpcode = Js::OpCode::BrUnLe_I4;
- break;
- case Js::OpCode::CmNeq_I4:
- newOpcode = Js::OpCode::BrNeq_I4;
- break;
- case Js::OpCode::CmEq_A:
- newOpcode = Js::OpCode::BrEq_A;
- break;
- case Js::OpCode::CmGe_A:
- newOpcode = Js::OpCode::BrGe_A;
- break;
- case Js::OpCode::CmGt_A:
- newOpcode = Js::OpCode::BrGt_A;
- break;
- case Js::OpCode::CmLt_A:
- newOpcode = Js::OpCode::BrLt_A;
- break;
- case Js::OpCode::CmLe_A:
- newOpcode = Js::OpCode::BrLe_A;
- break;
- case Js::OpCode::CmUnGe_A:
- newOpcode = Js::OpCode::BrUnGe_A;
- break;
- case Js::OpCode::CmUnGt_A:
- newOpcode = Js::OpCode::BrUnGt_A;
- break;
- case Js::OpCode::CmUnLt_A:
- newOpcode = Js::OpCode::BrUnLt_A;
- break;
- case Js::OpCode::CmUnLe_A:
- newOpcode = Js::OpCode::BrUnLe_A;
- break;
- case Js::OpCode::CmNeq_A:
- newOpcode = Js::OpCode::BrNeq_A;
- break;
- case Js::OpCode::CmSrEq_A:
- newOpcode = Js::OpCode::BrSrEq_A;
- break;
- case Js::OpCode::CmSrNeq_A:
- newOpcode = Js::OpCode::BrSrNeq_A;
- break;
- default:
- newOpcode = Js::OpCode::InvalidOpCode;
- Assume(UNREACHED);
- }
- instrBr->m_opcode = newOpcode;
- if (brIsTrue)
- {
- instr->SetSrc1(IR::IntConstOpnd::New(1, TyInt8, instr->m_func));
- instr->m_opcode = Js::OpCode::Ld_I4;
- instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instr->GetDst(), IR::IntConstOpnd::New(0, TyInt8, instr->m_func), instr->m_func);
- instrNew->SetByteCodeOffset(instrBr);
- instrBr->InsertAfter(instrNew);
- if (instrLd)
- {
- instrLd->ReplaceSrc1(IR::IntConstOpnd::New(1, TyInt8, instr->m_func));
- instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd->GetDst(), IR::IntConstOpnd::New(0, TyInt8, instr->m_func), instr->m_func);
- instrNew->SetByteCodeOffset(instrBr);
- instrBr->InsertAfter(instrNew);
- if (instrLd2)
- {
- instrLd2->ReplaceSrc1(IR::IntConstOpnd::New(1, TyInt8, instr->m_func));
- instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd2->GetDst(), IR::IntConstOpnd::New(0, TyInt8, instr->m_func), instr->m_func);
- instrNew->SetByteCodeOffset(instrBr);
- instrBr->InsertAfter(instrNew);
- }
- }
- }
- else
- {
- instrBr->AsBranchInstr()->Invert();
- instr->SetSrc1(IR::IntConstOpnd::New(0, TyInt8, instr->m_func));
- instr->m_opcode = Js::OpCode::Ld_I4;
- instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instr->GetDst(), IR::IntConstOpnd::New(1, TyInt8, instr->m_func), instr->m_func);
- instrNew->SetByteCodeOffset(instrBr);
- instrBr->InsertAfter(instrNew);
- if (instrLd)
- {
- instrLd->ReplaceSrc1(IR::IntConstOpnd::New(0, TyInt8, instr->m_func));
- instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd->GetDst(), IR::IntConstOpnd::New(1, TyInt8, instr->m_func), instr->m_func);
- instrNew->SetByteCodeOffset(instrBr);
- instrBr->InsertAfter(instrNew);
- if (instrLd2)
- {
- instrLd2->ReplaceSrc1(IR::IntConstOpnd::New(0, TyInt8, instr->m_func));
- instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd2->GetDst(), IR::IntConstOpnd::New(1, TyInt8, instr->m_func), instr->m_func);
- instrNew->SetByteCodeOffset(instrBr);
- instrBr->InsertAfter(instrNew);
- }
- }
- }
- return instrBr;
- }
- IR::Instr *
- FlowGraph::PeepCm(IR::Instr *instr)
- {
- // Basic pattern, peep:
- // t1 = CmEq a, b
- // t2 = Ld_A t1
- // BrTrue $L1, t2
- // Into:
- // t1 = True
- // t2 = True
- // BrEq $L1, a, b
- // t1 = False
- // t2 = False
- //
- // The true/false Ld_A's will most likely end up being dead-stores...
- // Alternate Pattern
- // t1= CmEq a, b
- // BrTrue $L1, t1
- // Into:
- // BrEq $L1, a, b
- Func *func = instr->m_func;
- // Find Ld_A
- IR::Instr *instrNext = instr->GetNextRealInstrOrLabel();
- IR::Instr *inlineeEndInstr = nullptr;
- IR::Instr *instrNew;
- IR::Instr *instrLd = nullptr, *instrLd2 = nullptr;
- IR::Instr *instrByteCode = instrNext;
- bool ldFound = false;
- IR::Opnd *brSrc = instr->GetDst();
- if (instrNext->m_opcode == Js::OpCode::Ld_A && instrNext->GetSrc1()->IsEqual(instr->GetDst()))
- {
- ldFound = true;
- instrLd = instrNext;
- brSrc = instrNext->GetDst();
- if (brSrc->IsEqual(instr->GetSrc1()) || brSrc->IsEqual(instr->GetSrc2()))
- {
- return nullptr;
- }
- instrNext = instrLd->GetNextRealInstrOrLabel();
- // Is there a second Ld_A?
- if (instrNext->m_opcode == Js::OpCode::Ld_A && instrNext->GetSrc1()->IsEqual(brSrc))
- {
- // We have:
- // t1 = Cm
- // t2 = t1 // ldSrc = t1
- // t3 = t2 // ldDst = t3
- // BrTrue/BrFalse t3
- instrLd2 = instrNext;
- brSrc = instrLd2->GetDst();
- instrNext = instrLd2->GetNextRealInstrOrLabel();
- if (brSrc->IsEqual(instr->GetSrc1()) || brSrc->IsEqual(instr->GetSrc2()))
- {
- return nullptr;
- }
- }
- }
- // Skip over InlineeEnd
- if (instrNext->m_opcode == Js::OpCode::InlineeEnd)
- {
- inlineeEndInstr = instrNext;
- instrNext = inlineeEndInstr->GetNextRealInstrOrLabel();
- }
- // Find BrTrue/BrFalse
- bool brIsTrue;
- if (instrNext->m_opcode == Js::OpCode::BrTrue_A)
- {
- brIsTrue = true;
- }
- else if (instrNext->m_opcode == Js::OpCode::BrFalse_A)
- {
- brIsTrue = false;
- }
- else
- {
- return nullptr;
- }
- IR::Instr *instrBr = instrNext;
- // Make sure we have:
- // t1 = Ld_A
- // BrTrue/BrFalse t1
- if (!instr->GetDst()->IsEqual(instrBr->GetSrc1()) && !brSrc->IsEqual(instrBr->GetSrc1()))
- {
- return nullptr;
- }
- //
- // We have a match. Generate the new branch
- //
- // BrTrue/BrFalse t1
- // Keep a copy of the inliner func and the bytecode offset of the original BrTrue/BrFalse if we end up inserting a new branch out of the inlinee,
- // and sym id of t1 for proper restoration on a bailout before the branch.
- Func* origBrFunc = instrBr->m_func;
- uint32 origBrByteCodeOffset = instrBr->GetByteCodeOffset();
- uint32 origBranchSrcSymId = instrBr->GetSrc1()->GetStackSym()->m_id;
- bool origBranchSrcOpndIsJITOpt = instrBr->GetSrc1()->GetIsJITOptimizedReg();
- instrBr->Unlink();
- instr->InsertBefore(instrBr);
- instrBr->ClearByteCodeOffset();
- instrBr->SetByteCodeOffset(instr);
- instrBr->FreeSrc1();
- instrBr->SetSrc1(instr->UnlinkSrc1());
- instrBr->SetSrc2(instr->UnlinkSrc2());
- instrBr->m_func = instr->m_func;
- Js::OpCode newOpcode = Js::OpCode::InvalidOpCode;
- switch(instr->m_opcode)
- {
- case Js::OpCode::CmEq_A:
- newOpcode = Js::OpCode::BrEq_A;
- break;
- case Js::OpCode::CmGe_A:
- newOpcode = Js::OpCode::BrGe_A;
- break;
- case Js::OpCode::CmGt_A:
- newOpcode = Js::OpCode::BrGt_A;
- break;
- case Js::OpCode::CmLt_A:
- newOpcode = Js::OpCode::BrLt_A;
- break;
- case Js::OpCode::CmLe_A:
- newOpcode = Js::OpCode::BrLe_A;
- break;
- case Js::OpCode::CmUnGe_A:
- newOpcode = Js::OpCode::BrUnGe_A;
- break;
- case Js::OpCode::CmUnGt_A:
- newOpcode = Js::OpCode::BrUnGt_A;
- break;
- case Js::OpCode::CmUnLt_A:
- newOpcode = Js::OpCode::BrUnLt_A;
- break;
- case Js::OpCode::CmUnLe_A:
- newOpcode = Js::OpCode::BrUnLe_A;
- break;
- case Js::OpCode::CmNeq_A:
- newOpcode = Js::OpCode::BrNeq_A;
- break;
- case Js::OpCode::CmSrEq_A:
- newOpcode = Js::OpCode::BrSrEq_A;
- break;
- case Js::OpCode::CmSrNeq_A:
- newOpcode = Js::OpCode::BrSrNeq_A;
- break;
- default:
- Assert(UNREACHED);
- __assume(UNREACHED);
- }
- instrBr->m_opcode = newOpcode;
- IR::AddrOpnd* trueOpnd = IR::AddrOpnd::New(func->GetScriptContextInfo()->GetTrueAddr(), IR::AddrOpndKindDynamicVar, func, true);
- IR::AddrOpnd* falseOpnd = IR::AddrOpnd::New(func->GetScriptContextInfo()->GetFalseAddr(), IR::AddrOpndKindDynamicVar, func, true);
- trueOpnd->SetValueType(ValueType::Boolean);
- falseOpnd->SetValueType(ValueType::Boolean);
- if (!brIsTrue)
- {
- instrBr->AsBranchInstr()->Invert();
- }
- if (ldFound)
- {
- // Split Ld_A into "Ld_A TRUE"/"Ld_A FALSE"
- if (brIsTrue)
- {
- instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), trueOpnd, instrBr->m_func);
- instrNew->SetByteCodeOffset(instrBr);
- instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
- instrBr->InsertBefore(instrNew);
- instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetDst(), trueOpnd, instrBr->m_func);
- instrNew->SetByteCodeOffset(instrBr);
- instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
- instrBr->InsertBefore(instrNew);
- instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), falseOpnd, instrLd->m_func);
- instrLd->InsertBefore(instrNew);
- instrNew->SetByteCodeOffset(instrLd);
- instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
- instrLd->ReplaceSrc1(falseOpnd);
- if (instrLd2)
- {
- instrLd2->ReplaceSrc1(falseOpnd);
- instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd2->GetDst(), trueOpnd, instrBr->m_func);
- instrBr->InsertBefore(instrNew);
- instrNew->SetByteCodeOffset(instrBr);
- instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
- }
- }
- else
- {
- instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), falseOpnd, instrBr->m_func);
- instrBr->InsertBefore(instrNew);
- instrNew->SetByteCodeOffset(instrBr);
- instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
- instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetDst(), falseOpnd, instrBr->m_func);
- instrBr->InsertBefore(instrNew);
- instrNew->SetByteCodeOffset(instrBr);
- instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
- instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), trueOpnd, instrLd->m_func);
- instrLd->InsertBefore(instrNew);
- instrNew->SetByteCodeOffset(instrLd);
- instrLd->ReplaceSrc1(trueOpnd);
- instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
- if (instrLd2)
- {
- instrLd2->ReplaceSrc1(trueOpnd);
- instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd2->GetDst(), falseOpnd, instrBr->m_func);
- instrBr->InsertBefore(instrNew);
- instrNew->SetByteCodeOffset(instrBr);
- instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
- }
- }
- }
- // Fix InlineeEnd
- if (inlineeEndInstr)
- {
- this->InsertInlineeOnFLowEdge(instrBr->AsBranchInstr(), inlineeEndInstr, instrByteCode , origBrFunc, origBrByteCodeOffset, origBranchSrcOpndIsJITOpt, origBranchSrcSymId);
- }
- if (instr->GetDst()->AsRegOpnd()->m_sym->HasByteCodeRegSlot())
- {
- Assert(!instrBr->AsBranchInstr()->HasByteCodeReg());
- StackSym *dstSym = instr->GetDst()->AsRegOpnd()->m_sym;
- instrBr->AsBranchInstr()->SetByteCodeReg(dstSym->GetByteCodeRegSlot());
- }
- brSrc = brSrc->Copy(this->func);
- // We need brSrc later, but instr->Remove() might delete it...
- IR::AutoReuseOpnd brSrcAutoCopy(brSrc, this->func, true);
- instr->Remove();
- //
- // Try optimizing through a second branch.
- // Peep:
- //
- // t2 = True;
- // BrTrue $L1
- // ...
- // L1:
- // t1 = Ld_A t2
- // BrTrue $L2
- //
- // Into:
- // t2 = True;
- // t1 = True;
- // BrTrue $L2 <---
- // ...
- // L1:
- // t1 = Ld_A t2
- // BrTrue $L2
- //
- // This cleanup helps expose second level Cm peeps.
- IR::Instr *instrLd3 = instrBr->AsBranchInstr()->GetTarget()->GetNextRealInstrOrLabel();
- // Skip over branch to branch
- while (instrLd3->m_opcode == Js::OpCode::Br)
- {
- instrLd3 = instrLd3->AsBranchInstr()->GetTarget()->GetNextRealInstrOrLabel();
- }
- // Find Ld_A
- if (instrLd3->m_opcode != Js::OpCode::Ld_A)
- {
- return instrBr;
- }
- IR::Instr *instrBr2 = instrLd3->GetNextRealInstrOrLabel();
- IR::Instr *inlineeEndInstr2 = nullptr;
- // InlineeEnd?
- // REVIEW: Can we handle 2 inlineeEnds?
- if (instrBr2->m_opcode == Js::OpCode::InlineeEnd && !inlineeEndInstr)
- {
- inlineeEndInstr2 = instrBr2;
- instrBr2 = instrBr2->GetNextRealInstrOrLabel();
- }
- // Find branch
- bool brIsTrue2;
- if (instrBr2->m_opcode == Js::OpCode::BrTrue_A)
- {
- brIsTrue2 = true;
- }
- else if (instrBr2->m_opcode == Js::OpCode::BrFalse_A)
- {
- brIsTrue2 = false;
- }
- else
- {
- return nullptr;
- }
- // Make sure Ld_A operates on the right tmps.
- if (!instrLd3->GetDst()->IsEqual(instrBr2->GetSrc1()) || !brSrc->IsEqual(instrLd3->GetSrc1()))
- {
- return nullptr;
- }
- if (instrLd3->GetDst()->IsEqual(instrBr->GetSrc1()) || instrLd3->GetDst()->IsEqual(instrBr->GetSrc2()))
- {
- return nullptr;
- }
- // Make sure that the reg we're assigning to is not live in the intervening instructions (if this is a forward branch).
- if (instrLd3->GetByteCodeOffset() > instrBr->GetByteCodeOffset())
- {
- StackSym *symLd3 = instrLd3->GetDst()->AsRegOpnd()->m_sym;
- if (IR::Instr::HasSymUseInRange(symLd3, instrBr->m_next, instrLd3))
- {
- return nullptr;
- }
- }
- //
- // We have a match!
- //
- if(inlineeEndInstr2)
- {
- origBrFunc = instrBr2->m_func;
- origBrByteCodeOffset = instrBr2->GetByteCodeOffset();
- origBranchSrcSymId = instrBr2->GetSrc1()->GetStackSym()->m_id;
- }
- // Fix Ld_A
- if (brIsTrue)
- {
- instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd3->GetDst(), trueOpnd, instrBr->m_func);
- instrBr->InsertBefore(instrNew);
- instrNew->SetByteCodeOffset(instrBr);
- instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
- }
- else
- {
- instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd3->GetDst(), falseOpnd, instrBr->m_func);
- instrBr->InsertBefore(instrNew);
- instrNew->SetByteCodeOffset(instrBr);
- instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
- }
- IR::LabelInstr *brTarget2;
- // Retarget branch
- if (brIsTrue2 == brIsTrue)
- {
- brTarget2 = instrBr2->AsBranchInstr()->GetTarget();
- }
- else
- {
- brTarget2 = IR::LabelInstr::New(Js::OpCode::Label, instrBr2->m_func);
- brTarget2->SetByteCodeOffset(instrBr2->m_next);
- instrBr2->InsertAfter(brTarget2);
- }
- instrBr->AsBranchInstr()->SetTarget(brTarget2);
- // InlineeEnd?
- if (inlineeEndInstr2)
- {
- this->InsertInlineeOnFLowEdge(instrBr->AsBranchInstr(), inlineeEndInstr2, instrByteCode, origBrFunc, origBrByteCodeOffset, origBranchSrcOpndIsJITOpt, origBranchSrcSymId);
- }
- return instrBr;
- }
- void
- FlowGraph::InsertInlineeOnFLowEdge(IR::BranchInstr *instrBr, IR::Instr *inlineeEndInstr, IR::Instr *instrBytecode, Func* origBrFunc, uint32 origByteCodeOffset, bool origBranchSrcOpndIsJITOpt, uint32 origBranchSrcSymId)
- {
- // Helper for PeepsCm code.
- //
- // We've skipped some InlineeEnd. Globopt expects to see these
- // on all flow paths out of the inlinee. Insert an InlineeEnd
- // on the new path:
- // BrEq $L1, a, b
- // Becomes:
- // BrNeq $L2, a, b
- // InlineeEnd
- // Br $L1
- // L2:
- instrBr->AsBranchInstr()->Invert();
- IR::BranchInstr *newBr = IR::BranchInstr::New(Js::OpCode::Br, instrBr->AsBranchInstr()->GetTarget(), origBrFunc);
- newBr->SetByteCodeOffset(origByteCodeOffset);
- instrBr->InsertAfter(newBr);
- IR::LabelInstr *newLabel = IR::LabelInstr::New(Js::OpCode::Label, instrBr->m_func);
- newLabel->SetByteCodeOffset(instrBytecode);
- newBr->InsertAfter(newLabel);
- instrBr->AsBranchInstr()->SetTarget(newLabel);
- IR::Instr *newInlineeEnd = IR::Instr::New(Js::OpCode::InlineeEnd, inlineeEndInstr->m_func);
- newInlineeEnd->SetSrc1(inlineeEndInstr->GetSrc1());
- newInlineeEnd->SetSrc2(inlineeEndInstr->GetSrc2());
- newInlineeEnd->SetByteCodeOffset(instrBytecode);
- newInlineeEnd->SetIsCloned(true); // Mark it as cloned - this is used later by the inlinee args optimization
- newBr->InsertBefore(newInlineeEnd);
- IR::ByteCodeUsesInstr * useOrigBranchSrcInstr = IR::ByteCodeUsesInstr::New(origBrFunc, origByteCodeOffset);
- useOrigBranchSrcInstr->SetRemovedOpndSymbol(origBranchSrcOpndIsJITOpt, origBranchSrcSymId);
- newBr->InsertBefore(useOrigBranchSrcInstr);
- uint newBrFnNumber = newBr->m_func->GetFunctionNumber();
- Assert(newBrFnNumber == origBrFunc->GetFunctionNumber());
- // The function numbers of the new branch and the inlineeEnd instruction should be different (ensuring that the new branch is not added in the inlinee but in the inliner).
- // Only case when they can be same is recursive calls - inlinee and inliner are the same function
- Assert(newBrFnNumber != inlineeEndInstr->m_func->GetFunctionNumber() ||
- newBrFnNumber == inlineeEndInstr->m_func->GetParentFunc()->GetFunctionNumber());
- }
- BasicBlock *
- BasicBlock::New(FlowGraph * graph)
- {
- BasicBlock * block;
- block = JitAnew(graph->alloc, BasicBlock, graph->alloc, graph->GetFunc());
- return block;
- }
- void
- BasicBlock::AddPred(FlowEdge * edge, FlowGraph * graph)
- {
- this->predList.Prepend(graph->alloc, edge);
- }
- void
- BasicBlock::AddSucc(FlowEdge * edge, FlowGraph * graph)
- {
- this->succList.Prepend(graph->alloc, edge);
- }
- void
- BasicBlock::RemovePred(BasicBlock *block, FlowGraph * graph)
- {
- this->RemovePred(block, graph, true, false);
- }
- void
- BasicBlock::RemoveSucc(BasicBlock *block, FlowGraph * graph)
- {
- this->RemoveSucc(block, graph, true, false);
- }
- void
- BasicBlock::RemoveDeadPred(BasicBlock *block, FlowGraph * graph)
- {
- this->RemovePred(block, graph, true, true);
- }
- void
- BasicBlock::RemoveDeadSucc(BasicBlock *block, FlowGraph * graph)
- {
- this->RemoveSucc(block, graph, true, true);
- }
- void
- BasicBlock::RemovePred(BasicBlock *block, FlowGraph * graph, bool doCleanSucc, bool moveToDead)
- {
- FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetPredList(), iter)
- {
- if (edge->GetPred() == block)
- {
- BasicBlock *blockSucc = edge->GetSucc();
- if (moveToDead)
- {
- iter.MoveCurrentTo(this->GetDeadPredList());
- }
- else
- {
- iter.RemoveCurrent(graph->alloc);
- }
- if (doCleanSucc)
- {
- block->RemoveSucc(this, graph, false, moveToDead);
- }
- if (blockSucc->isLoopHeader && blockSucc->loop && blockSucc->GetPredList()->HasOne())
- {
- Loop *loop = blockSucc->loop;
- loop->isDead = true;
- }
- return;
- }
- } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
- AssertMsg(UNREACHED, "Edge not found.");
- }
- void
- BasicBlock::RemoveSucc(BasicBlock *block, FlowGraph * graph, bool doCleanPred, bool moveToDead)
- {
- FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetSuccList(), iter)
- {
- if (edge->GetSucc() == block)
- {
- if (moveToDead)
- {
- iter.MoveCurrentTo(this->GetDeadSuccList());
- }
- else
- {
- iter.RemoveCurrent(graph->alloc);
- }
- if (doCleanPred)
- {
- block->RemovePred(this, graph, false, moveToDead);
- }
- if (block->isLoopHeader && block->loop && block->GetPredList()->HasOne())
- {
- Loop *loop = block->loop;
- loop->isDead = true;
- }
- return;
- }
- } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
- AssertMsg(UNREACHED, "Edge not found.");
- }
- void
- BasicBlock::UnlinkPred(BasicBlock *block)
- {
- this->UnlinkPred(block, true);
- }
- void
- BasicBlock::UnlinkSucc(BasicBlock *block)
- {
- this->UnlinkSucc(block, true);
- }
- void
- BasicBlock::UnlinkPred(BasicBlock *block, bool doCleanSucc)
- {
- FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetPredList(), iter)
- {
- if (edge->GetPred() == block)
- {
- iter.UnlinkCurrent();
- if (doCleanSucc)
- {
- block->UnlinkSucc(this, false);
- }
- return;
- }
- } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
- AssertMsg(UNREACHED, "Edge not found.");
- }
- void
- BasicBlock::UnlinkSucc(BasicBlock *block, bool doCleanPred)
- {
- FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetSuccList(), iter)
- {
- if (edge->GetSucc() == block)
- {
- iter.UnlinkCurrent();
- if (doCleanPred)
- {
- block->UnlinkPred(this, false);
- }
- return;
- }
- } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
- AssertMsg(UNREACHED, "Edge not found.");
- }
- bool
- BasicBlock::IsLandingPad()
- {
- BasicBlock * nextBlock = this->GetNext();
- return nextBlock && nextBlock->loop && nextBlock->isLoopHeader && nextBlock->loop->landingPad == this;
- }
- BailOutInfo *
- BasicBlock::CreateLoopTopBailOutInfo(GlobOpt * globOpt)
- {
- IR::Instr * firstInstr = this->GetFirstInstr();
- BailOutInfo* bailOutInfo = JitAnew(globOpt->func->m_alloc, BailOutInfo, firstInstr->GetByteCodeOffset(), firstInstr->m_func);
- bailOutInfo->isLoopTopBailOutInfo = true;
- globOpt->FillBailOutInfo(this, bailOutInfo);
- #if ENABLE_DEBUG_CONFIG_OPTIONS
- bailOutInfo->bailOutOpcode = Js::OpCode::LoopBodyStart;
- #endif
- return bailOutInfo;
- }
- IR::Instr *
- FlowGraph::RemoveInstr(IR::Instr *instr, GlobOpt * globOpt)
- {
- IR::Instr *instrPrev = instr->m_prev;
- if (globOpt)
- {
- // Removing block during glob opt. Need to maintain the graph so that
- // bailout will record the byte code use in case the dead code is exposed
- // by dyno-pogo optimization (where bailout need the byte code uses from
- // the dead blocks where it may not be dead after bailing out)
- if (instr->IsLabelInstr())
- {
- instr->AsLabelInstr()->m_isLoopTop = false;
- return instr;
- }
- else if (instr->IsByteCodeUsesInstr())
- {
- return instr;
- }
- /*
- * Scope object has to be implicitly live whenever Heap Arguments object is live.
- * - When we restore HeapArguments object in the bail out path, it expects the scope object also to be restored - if one was created.
- */
- Js::OpCode opcode = instr->m_opcode;
- if (opcode == Js::OpCode::LdElemI_A && instr->DoStackArgsOpt(this->func) &&
- globOpt->CurrentBlockData()->IsArgumentsOpnd(instr->GetSrc1()) && instr->m_func->GetScopeObjSym())
- {
- IR::ByteCodeUsesInstr * byteCodeUsesInstr = IR::ByteCodeUsesInstr::New(instr);
- byteCodeUsesInstr->SetNonOpndSymbol(instr->m_func->GetScopeObjSym()->m_id);
- instr->InsertAfter(byteCodeUsesInstr);
- }
- IR::ByteCodeUsesInstr * newByteCodeUseInstr = globOpt->ConvertToByteCodeUses(instr);
- if (newByteCodeUseInstr != nullptr)
- {
- // We don't care about property used in these instruction
- // It is only necessary for field copy prop so that we will keep the implicit call
- // up to the copy prop location.
- newByteCodeUseInstr->propertySymUse = nullptr;
- if (opcode == Js::OpCode::Yield)
- {
- IR::Instr *instrLabel = newByteCodeUseInstr->m_next;
- while (instrLabel->m_opcode != Js::OpCode::Label)
- {
- instrLabel = instrLabel->m_next;
- }
- func->RemoveDeadYieldOffsetResumeLabel(instrLabel->AsLabelInstr());
- instrLabel->AsLabelInstr()->m_hasNonBranchRef = false;
- }
- // Save the last instruction to update the block with
- return newByteCodeUseInstr;
- }
- else
- {
- return instrPrev;
- }
- }
- else
- {
- instr->Remove();
- return instrPrev;
- }
- }
- void
- FlowGraph::RemoveBlock(BasicBlock *block, GlobOpt * globOpt, bool tailDuping)
- {
- Assert(!block->isDead && !block->isDeleted);
- IR::Instr * lastInstr = nullptr;
- FOREACH_INSTR_IN_BLOCK_EDITING(instr, instrNext, block)
- {
- if (instr->m_opcode == Js::OpCode::FunctionExit)
- {
- // Removing FunctionExit causes problems downstream...
- // We could change the opcode, or have FunctionEpilog/FunctionExit to get
- // rid of the epilog.
- break;
- }
- if (instr == block->GetFirstInstr())
- {
- Assert(instr->IsLabelInstr());
- instr->AsLabelInstr()->m_isLoopTop = false;
- instr->AsLabelInstr()->m_hasNonBranchRef = false;
- }
- else
- {
- lastInstr = this->RemoveInstr(instr, globOpt);
- }
- } NEXT_INSTR_IN_BLOCK_EDITING;
- if (lastInstr)
- {
- block->SetLastInstr(lastInstr);
- }
- FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, block->GetPredList())
- {
- edge->GetPred()->RemoveSucc(block, this, false, globOpt != nullptr);
- } NEXT_SLISTBASECOUNTED_ENTRY;
- FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, block->GetSuccList())
- {
- edge->GetSucc()->RemovePred(block, this, false, globOpt != nullptr);
- } NEXT_SLISTBASECOUNTED_ENTRY;
- if (block->isLoopHeader && this->loopList)
- {
- // If loop graph is built, remove loop from loopList
- Loop **pPrevLoop = &this->loopList;
- while (*pPrevLoop != block->loop)
- {
- pPrevLoop = &((*pPrevLoop)->next);
- }
- *pPrevLoop = (*pPrevLoop)->next;
- this->hasLoop = (this->loopList != nullptr);
- }
- if (globOpt != nullptr)
- {
- block->isDead = true;
- block->GetPredList()->MoveTo(block->GetDeadPredList());
- block->GetSuccList()->MoveTo(block->GetDeadSuccList());
- }
- if (tailDuping)
- {
- block->isDead = true;
- }
- block->isDeleted = true;
- block->SetDataUseCount(0);
- }
- void
- BasicBlock::UnlinkInstr(IR::Instr * instr)
- {
- Assert(this->Contains(instr));
- Assert(this->GetFirstInstr() != this->GetLastInstr());
- if (instr == this->GetFirstInstr())
- {
- Assert(!this->GetFirstInstr()->IsLabelInstr());
- this->SetFirstInstr(instr->m_next);
- }
- else if (instr == this->GetLastInstr())
- {
- this->SetLastInstr(instr->m_prev);
- }
- instr->Unlink();
- }
- void
- BasicBlock::RemoveInstr(IR::Instr * instr)
- {
- Assert(this->Contains(instr));
- if (instr == this->GetFirstInstr())
- {
- this->SetFirstInstr(instr->m_next);
- }
- else if (instr == this->GetLastInstr())
- {
- this->SetLastInstr(instr->m_prev);
- }
- instr->Remove();
- }
- void
- BasicBlock::InsertInstrBefore(IR::Instr *newInstr, IR::Instr *beforeThisInstr)
- {
- Assert(this->Contains(beforeThisInstr));
- beforeThisInstr->InsertBefore(newInstr);
- if(this->GetFirstInstr() == beforeThisInstr)
- {
- Assert(!beforeThisInstr->IsLabelInstr());
- this->SetFirstInstr(newInstr);
- }
- }
- void
- BasicBlock::InsertInstrAfter(IR::Instr *newInstr, IR::Instr *afterThisInstr)
- {
- Assert(this->Contains(afterThisInstr));
- afterThisInstr->InsertAfter(newInstr);
- if (this->GetLastInstr() == afterThisInstr)
- {
- Assert(afterThisInstr->HasFallThrough());
- this->SetLastInstr(newInstr);
- }
- }
- void
- BasicBlock::InsertAfter(IR::Instr *newInstr)
- {
- Assert(this->GetLastInstr()->HasFallThrough());
- this->GetLastInstr()->InsertAfter(newInstr);
- this->SetLastInstr(newInstr);
- }
- void
- Loop::SetHasCall()
- {
- Loop * current = this;
- do
- {
- if (current->hasCall)
- {
- #if DBG
- current = current->parent;
- while (current)
- {
- Assert(current->hasCall);
- current = current->parent;
- }
- #endif
- break;
- }
- current->hasCall = true;
- current = current->parent;
- }
- while (current != nullptr);
- }
- void
- Loop::SetImplicitCallFlags(Js::ImplicitCallFlags newFlags)
- {
- Loop * current = this;
- do
- {
- if ((current->implicitCallFlags & newFlags) == newFlags)
- {
- #if DBG
- current = current->parent;
- while (current)
- {
- Assert((current->implicitCallFlags & newFlags) == newFlags);
- current = current->parent;
- }
- #endif
- break;
- }
- newFlags = (Js::ImplicitCallFlags)(implicitCallFlags | newFlags);
- current->implicitCallFlags = newFlags;
- current = current->parent;
- }
- while (current != nullptr);
- }
- Js::ImplicitCallFlags
- Loop::GetImplicitCallFlags()
- {
- if (this->implicitCallFlags == Js::ImplicitCall_HasNoInfo)
- {
- if (this->parent == nullptr)
- {
- // We don't have any information, and we don't have any parent, so just assume that there aren't any implicit calls
- this->implicitCallFlags = Js::ImplicitCall_None;
- }
- else
- {
- // We don't have any information, get it from the parent and hope for the best
- this->implicitCallFlags = this->parent->GetImplicitCallFlags();
- }
- }
- return this->implicitCallFlags;
- }
- bool
- Loop::CanDoFieldCopyProp()
- {
- #if DBG_DUMP
- if (((this->implicitCallFlags & ~(Js::ImplicitCall_External)) == 0) &&
- Js::Configuration::Global.flags.Trace.IsEnabled(Js::HostOptPhase))
- {
- Output::Print(_u("fieldcopyprop disabled because external: loop count: %d"), GetLoopNumber());
- GetFunc()->DumpFullFunctionName();
- Output::Print(_u("\n"));
- Output::Flush();
- }
- #endif
- return GlobOpt::ImplicitCallFlagsAllowOpts(this);
- }
- bool
- Loop::CanHoistInvariants() const
- {
- Func * func = this->GetHeadBlock()->firstInstr->m_func->GetTopFunc();
- if (PHASE_OFF(Js::InvariantsPhase, func))
- {
- return false;
- }
- return true;
- }
- IR::LabelInstr *
- Loop::GetLoopTopInstr() const
- {
- IR::LabelInstr * instr = nullptr;
- if (this->topFunc->isFlowGraphValid)
- {
- instr = this->GetHeadBlock()->GetFirstInstr()->AsLabelInstr();
- }
- else
- {
- // Flowgraph gets torn down after the globopt, so can't get the loopTop from the head block.
- instr = this->loopTopLabel;
- }
- if (instr)
- {
- Assert(instr->m_isLoopTop);
- }
- return instr;
- }
- void
- Loop::SetLoopTopInstr(IR::LabelInstr * loopTop)
- {
- this->loopTopLabel = loopTop;
- }
- bool
- Loop::IsSymAssignedToInSelfOrParents(StackSym * const sym) const
- {
- for (const Loop* curLoop = this; curLoop != nullptr; curLoop = curLoop->parent)
- {
- if (curLoop->symsAssignedToInLoop->Test(sym->m_id))
- {
- return true;
- }
- }
- return false;
- }
- #if DBG_DUMP
- uint
- Loop::GetLoopNumber() const
- {
- IR::LabelInstr * loopTopInstr = this->GetLoopTopInstr();
- if (loopTopInstr->IsProfiledLabelInstr())
- {
- return loopTopInstr->AsProfiledLabelInstr()->loopNum;
- }
- return Js::LoopHeader::NoLoop;
- }
- bool
- BasicBlock::Contains(IR::Instr * instr)
- {
- FOREACH_INSTR_IN_BLOCK(blockInstr, this)
- {
- if (instr == blockInstr)
- {
- return true;
- }
- }
- NEXT_INSTR_IN_BLOCK;
- return false;
- }
- #endif
- FlowEdge *
- FlowEdge::New(FlowGraph * graph)
- {
- FlowEdge * edge;
- edge = JitAnew(graph->alloc, FlowEdge);
- return edge;
- }
- bool
- Loop::IsDescendentOrSelf(Loop const * loop) const
- {
- Loop const * currentLoop = loop;
- while (currentLoop != nullptr)
- {
- if (currentLoop == this)
- {
- return true;
- }
- currentLoop = currentLoop->parent;
- }
- return false;
- }
- void FlowGraph::SafeRemoveInstr(IR::Instr *instr)
- {
- BasicBlock *block;
- if (instr->m_next->IsLabelInstr())
- {
- block = instr->m_next->AsLabelInstr()->GetBasicBlock()->GetPrev();
- block->RemoveInstr(instr);
- }
- else if (instr->IsLabelInstr())
- {
- block = instr->AsLabelInstr()->GetBasicBlock();
- block->RemoveInstr(instr);
- }
- else
- {
- Assert(!instr->EndsBasicBlock() && !instr->StartsBasicBlock());
- instr->Remove();
- }
- }
- bool FlowGraph::IsUnsignedOpnd(IR::Opnd *src, IR::Opnd **pShrSrc1)
- {
- // Look for an unsigned constant, or the result of an unsigned shift by zero
- if (!src->IsRegOpnd())
- {
- return false;
- }
- if (!src->AsRegOpnd()->m_sym->IsSingleDef())
- {
- return false;
- }
- if (src->AsRegOpnd()->m_sym->IsIntConst())
- {
- int32 intConst = src->AsRegOpnd()->m_sym->GetIntConstValue();
- if (intConst >= 0)
- {
- *pShrSrc1 = src;
- return true;
- }
- else
- {
- return false;
- }
- }
- IR::Instr * shrUInstr = src->AsRegOpnd()->m_sym->GetInstrDef();
- if (shrUInstr->m_opcode != Js::OpCode::ShrU_A)
- {
- return false;
- }
- IR::Opnd *shrCnt = shrUInstr->GetSrc2();
- if (!shrCnt->IsRegOpnd() || !shrCnt->AsRegOpnd()->m_sym->IsTaggableIntConst() || shrCnt->AsRegOpnd()->m_sym->GetIntConstValue() != 0)
- {
- return false;
- }
- IR::Opnd *shrSrc = shrUInstr->GetSrc1();
- *pShrSrc1 = shrSrc;
- return true;
- }
- bool FlowGraph::UnsignedCmpPeep(IR::Instr *cmpInstr)
- {
- IR::Opnd *cmpSrc1 = cmpInstr->GetSrc1();
- IR::Opnd *cmpSrc2 = cmpInstr->GetSrc2();
- IR::Opnd *newSrc1;
- IR::Opnd *newSrc2;
- // Look for something like:
- // t1 = ShrU_A x, 0
- // t2 = 10;
- // BrGt t1, t2, L
- //
- // Peep to:
- //
- // t1 = ShrU_A x, 0
- // t2 = 10;
- // t3 = Or_A x, 0
- // BrUnGt x, t3, L
- // ByteCodeUse t1
- //
- // Hopefully dead-store can get rid of the ShrU
- if (!this->func->DoGlobOpt() || !GlobOpt::DoAggressiveIntTypeSpec(this->func) || !GlobOpt::DoLossyIntTypeSpec(this->func))
- {
- return false;
- }
- if (cmpInstr->IsBranchInstr() && !cmpInstr->AsBranchInstr()->IsConditional())
- {
- return false;
- }
- if (!cmpInstr->GetSrc2())
- {
- return false;
- }
- if (!this->IsUnsignedOpnd(cmpSrc1, &newSrc1))
- {
- return false;
- }
- if (!this->IsUnsignedOpnd(cmpSrc2, &newSrc2))
- {
- return false;
- }
- switch(cmpInstr->m_opcode)
- {
- case Js::OpCode::BrEq_A:
- case Js::OpCode::BrNeq_A:
- case Js::OpCode::BrSrEq_A:
- case Js::OpCode::BrSrNeq_A:
- case Js::OpCode::CmEq_A:
- case Js::OpCode::CmNeq_A:
- case Js::OpCode::CmSrEq_A:
- case Js::OpCode::CmSrNeq_A:
- break;
- case Js::OpCode::BrLe_A:
- cmpInstr->m_opcode = Js::OpCode::BrUnLe_A;
- break;
- case Js::OpCode::BrLt_A:
- cmpInstr->m_opcode = Js::OpCode::BrUnLt_A;
- break;
- case Js::OpCode::BrGe_A:
- cmpInstr->m_opcode = Js::OpCode::BrUnGe_A;
- break;
- case Js::OpCode::BrGt_A:
- cmpInstr->m_opcode = Js::OpCode::BrUnGt_A;
- break;
- case Js::OpCode::CmLe_A:
- cmpInstr->m_opcode = Js::OpCode::CmUnLe_A;
- break;
- case Js::OpCode::CmLt_A:
- cmpInstr->m_opcode = Js::OpCode::CmUnLt_A;
- break;
- case Js::OpCode::CmGe_A:
- cmpInstr->m_opcode = Js::OpCode::CmUnGe_A;
- break;
- case Js::OpCode::CmGt_A:
- cmpInstr->m_opcode = Js::OpCode::CmUnGt_A;
- break;
- default:
- return false;
- }
- IR::ByteCodeUsesInstr * bytecodeInstr = IR::ByteCodeUsesInstr::New(cmpInstr);
- cmpInstr->InsertAfter(bytecodeInstr);
- if (cmpSrc1 != newSrc1)
- {
- if (cmpSrc1->IsRegOpnd() && !cmpSrc1->GetIsJITOptimizedReg())
- {
- bytecodeInstr->Set(cmpSrc1);
- }
- IR::RegOpnd * unsignedSrc = IR::RegOpnd::New(newSrc1->GetType(), cmpInstr->m_func);
- IR::Instr * orZero = IR::Instr::New(Js::OpCode::Or_A, unsignedSrc, newSrc1, IR::IntConstOpnd::New(0, TyMachReg, cmpInstr->m_func), cmpInstr->m_func);
- orZero->SetByteCodeOffset(cmpInstr);
- cmpInstr->InsertBefore(orZero);
- cmpInstr->ReplaceSrc1(unsignedSrc);
- if (newSrc1->IsRegOpnd())
- {
- cmpInstr->GetSrc1()->AsRegOpnd()->SetIsJITOptimizedReg(true);
- orZero->GetSrc1()->SetIsJITOptimizedReg(true);
- }
- }
- if (cmpSrc2 != newSrc2)
- {
- if (cmpSrc2->IsRegOpnd() && !cmpSrc2->GetIsJITOptimizedReg())
- {
- bytecodeInstr->Set(cmpSrc2);
- }
- IR::RegOpnd * unsignedSrc = IR::RegOpnd::New(newSrc2->GetType(), cmpInstr->m_func);
- IR::Instr * orZero = IR::Instr::New(Js::OpCode::Or_A, unsignedSrc, newSrc2, IR::IntConstOpnd::New(0, TyMachReg, cmpInstr->m_func), cmpInstr->m_func);
- orZero->SetByteCodeOffset(cmpInstr);
- cmpInstr->InsertBefore(orZero);
- cmpInstr->ReplaceSrc2(unsignedSrc);
- if (newSrc2->IsRegOpnd())
- {
- cmpInstr->GetSrc2()->AsRegOpnd()->SetIsJITOptimizedReg(true);
- orZero->GetSrc1()->SetIsJITOptimizedReg(true);
- }
- }
- return true;
- }
- #if DBG
- void
- FlowGraph::VerifyLoopGraph()
- {
- FOREACH_BLOCK(block, this)
- {
- Loop *loop = block->loop;
- FOREACH_SUCCESSOR_BLOCK(succ, block)
- {
- if (loop == succ->loop)
- {
- Assert(succ->isLoopHeader == false || loop->GetHeadBlock() == succ);
- continue;
- }
- if (succ->isLoopHeader)
- {
- Assert(succ->loop->parent == loop
- || (!loop->IsDescendentOrSelf(succ->loop)));
- continue;
- }
- Assert(succ->loop == nullptr || succ->loop->IsDescendentOrSelf(loop));
- } NEXT_SUCCESSOR_BLOCK;
- if (!PHASE_OFF(Js::RemoveBreakBlockPhase, this->GetFunc()))
- {
- // Make sure all break blocks have been removed.
- if (loop && !block->isLoopHeader && !(this->func->HasTry() && !this->func->DoOptimizeTry()))
- {
- Assert(loop->IsDescendentOrSelf(block->GetPrev()->loop));
- }
- }
- } NEXT_BLOCK;
- }
- #endif
- #if DBG_DUMP
- void
- FlowGraph::Dump(bool onlyOnVerboseMode, const char16 *form)
- {
- if(PHASE_DUMP(Js::FGBuildPhase, this->GetFunc()))
- {
- if (!onlyOnVerboseMode || Js::Configuration::Global.flags.Verbose)
- {
- if (form)
- {
- Output::Print(form);
- }
- this->Dump();
- }
- }
- }
- void
- FlowGraph::Dump()
- {
- Output::Print(_u("\nFlowGraph\n"));
- FOREACH_BLOCK(block, this)
- {
- Loop * loop = block->loop;
- while (loop)
- {
- Output::Print(_u(" "));
- loop = loop->parent;
- }
- block->DumpHeader(false);
- } NEXT_BLOCK;
- Output::Print(_u("\nLoopGraph\n"));
- for (Loop *loop = this->loopList; loop; loop = loop->next)
- {
- Output::Print(_u("\nLoop\n"));
- FOREACH_BLOCK_IN_LOOP(block, loop)
- {
- block->DumpHeader(false);
- }NEXT_BLOCK_IN_LOOP;
- Output::Print(_u("Loop Ends\n"));
- }
- }
- void
- BasicBlock::DumpHeader(bool insertCR)
- {
- if (insertCR)
- {
- Output::Print(_u("\n"));
- }
- Output::Print(_u("BLOCK %d:"), this->number);
- if (this->isDead)
- {
- Output::Print(_u(" **** DEAD ****"));
- }
- if (this->isBreakBlock)
- {
- Output::Print(_u(" **** Break Block ****"));
- }
- else if (this->isAirLockBlock)
- {
- Output::Print(_u(" **** Air lock Block ****"));
- }
- else if (this->isBreakCompensationBlockAtSource)
- {
- Output::Print(_u(" **** Break Source Compensation Code ****"));
- }
- else if (this->isBreakCompensationBlockAtSink)
- {
- Output::Print(_u(" **** Break Sink Compensation Code ****"));
- }
- else if (this->isAirLockCompensationBlock)
- {
- Output::Print(_u(" **** Airlock block Compensation Code ****"));
- }
- if (!this->predList.Empty())
- {
- BOOL fFirst = TRUE;
- Output::Print(_u(" In("));
- FOREACH_PREDECESSOR_BLOCK(blockPred, this)
- {
- if (!fFirst)
- {
- Output::Print(_u(", "));
- }
- Output::Print(_u("%d"), blockPred->GetBlockNum());
- fFirst = FALSE;
- }
- NEXT_PREDECESSOR_BLOCK;
- Output::Print(_u(")"));
- }
- if (!this->succList.Empty())
- {
- BOOL fFirst = TRUE;
- Output::Print(_u(" Out("));
- FOREACH_SUCCESSOR_BLOCK(blockSucc, this)
- {
- if (!fFirst)
- {
- Output::Print(_u(", "));
- }
- Output::Print(_u("%d"), blockSucc->GetBlockNum());
- fFirst = FALSE;
- }
- NEXT_SUCCESSOR_BLOCK;
- Output::Print(_u(")"));
- }
- if (!this->deadPredList.Empty())
- {
- BOOL fFirst = TRUE;
- Output::Print(_u(" DeadIn("));
- FOREACH_DEAD_PREDECESSOR_BLOCK(blockPred, this)
- {
- if (!fFirst)
- {
- Output::Print(_u(", "));
- }
- Output::Print(_u("%d"), blockPred->GetBlockNum());
- fFirst = FALSE;
- }
- NEXT_DEAD_PREDECESSOR_BLOCK;
- Output::Print(_u(")"));
- }
- if (!this->deadSuccList.Empty())
- {
- BOOL fFirst = TRUE;
- Output::Print(_u(" DeadOut("));
- FOREACH_DEAD_SUCCESSOR_BLOCK(blockSucc, this)
- {
- if (!fFirst)
- {
- Output::Print(_u(", "));
- }
- Output::Print(_u("%d"), blockSucc->GetBlockNum());
- fFirst = FALSE;
- }
- NEXT_DEAD_SUCCESSOR_BLOCK;
- Output::Print(_u(")"));
- }
- if (this->loop)
- {
- Output::Print(_u(" Loop(%d) header: %d"), this->loop->loopNumber - 1, this->loop->GetHeadBlock()->GetBlockNum());
- if (this->loop->parent)
- {
- Output::Print(_u(" parent(%d): %d"), this->loop->parent->loopNumber, this->loop->parent->GetHeadBlock()->GetBlockNum());
- }
- if (this->loop->GetHeadBlock() == this)
- {
- Output::SkipToColumn(50);
- Output::Print(_u("Call Exp/Imp: "));
- if (this->loop->GetHasCall())
- {
- Output::Print(_u("yes/"));
- }
- else
- {
- Output::Print(_u(" no/"));
- }
- Output::Print(Js::DynamicProfileInfo::GetImplicitCallFlagsString(this->loop->GetImplicitCallFlags()));
- }
- }
- Output::Print(_u("\n"));
- if (insertCR)
- {
- Output::Print(_u("\n"));
- }
- }
- void
- BasicBlock::Dump()
- {
- // Dumping the first instruction (label) will dump the block header as well.
- FOREACH_INSTR_IN_BLOCK(instr, this)
- {
- instr->Dump();
- }
- NEXT_INSTR_IN_BLOCK;
- }
- void
- AddPropertyCacheBucket::Dump() const
- {
- Assert(this->initialType != nullptr);
- Assert(this->finalType != nullptr);
- Output::Print(_u(" initial type: 0x%x, final type: 0x%x "), this->initialType->GetAddr(), this->finalType->GetAddr());
- }
- void
- ObjTypeGuardBucket::Dump() const
- {
- Assert(this->guardedPropertyOps != nullptr);
- this->guardedPropertyOps->Dump();
- }
- void
- ObjWriteGuardBucket::Dump() const
- {
- Assert(this->writeGuards != nullptr);
- this->writeGuards->Dump();
- }
- #endif
- void
- BasicBlock::CleanUpValueMaps()
- {
- // Don't do cleanup if it's been done recently.
- // Landing pad could get optimized twice...
- // We want the same info out the first and second time. So always cleanup.
- // Increasing the cleanup threshold count for asmjs to 500
- uint cleanupCount = (!this->globOptData.globOpt->GetIsAsmJSFunc()) ? CONFIG_FLAG(GoptCleanupThreshold) : CONFIG_FLAG(AsmGoptCleanupThreshold);
- if (!this->IsLandingPad() && this->globOptData.globOpt->instrCountSinceLastCleanUp < cleanupCount)
- {
- return;
- }
- this->globOptData.globOpt->instrCountSinceLastCleanUp = 0;
- JitArenaAllocator* tempAlloc = this->globOptData.globOpt->tempAlloc;
- GlobHashTable *thisTable = this->globOptData.symToValueMap;
- BVSparse<JitArenaAllocator> deadSymsBv(tempAlloc);
- BVSparse<JitArenaAllocator> keepAliveSymsBv(tempAlloc);
- BVSparse<JitArenaAllocator> availableValueNumbers(tempAlloc);
- availableValueNumbers.Copy(this->globOptData.globOpt->byteCodeConstantValueNumbersBv);
- BVSparse<JitArenaAllocator> *upwardExposedUses = this->upwardExposedUses;
- BVSparse<JitArenaAllocator> *upwardExposedFields = this->upwardExposedFields;
- bool isInLoop = !!this->loop;
- BVSparse<JitArenaAllocator> symsInCallSequence(tempAlloc);
- SListBase<IR::Opnd *> * callSequence = this->globOptData.callSequence;
- if (callSequence && !callSequence->Empty())
- {
- FOREACH_SLISTBASE_ENTRY(IR::Opnd *, opnd, callSequence)
- {
- StackSym * sym = opnd->GetStackSym();
- symsInCallSequence.Set(sym->m_id);
- }
- }
- NEXT_SLISTBASE_ENTRY;
- for (uint i = 0; i < thisTable->tableSize; i++)
- {
- FOREACH_SLISTBASE_ENTRY_EDITING(GlobHashBucket, bucket, &thisTable->table[i], iter)
- {
- Sym * sym = bucket.value;
- bool isSymUpwardExposed = upwardExposedUses->Test(sym->m_id) || upwardExposedFields->Test(sym->m_id);
- if (!isSymUpwardExposed && symsInCallSequence.Test(sym->m_id))
- {
- // Don't remove/shrink sym-value pair if the sym is referenced in callSequence even if the sym is dead according to backward data flow.
- // This is possible in some edge cases that an infinite loop is involved when evaluating parameter for a function (between StartCall and Call),
- // there is no backward data flow into the infinite loop block, but non empty callSequence still populates to it in this (forward) pass
- // which causes error when looking up value for the syms in callSequence (cannot find the value).
- // It would cause error to fill out the bailout information for the loop blocks.
- // Remove dead syms from callSequence has some risk because there are various associated counters which need to be consistent.
- continue;
- }
- // Make sure symbol was created before backward pass.
- // If symbols isn't upward exposed, mark it as dead.
- // If a symbol was copy-prop'd in a loop prepass, the upwardExposedUses info could be wrong. So wait until we are out of the loop before clearing it.
- bool isSymFieldPRESymStore = isInLoop && this->loop->fieldPRESymStores->Test(sym->m_id);
- if ((SymID)sym->m_id <= this->globOptData.globOpt->maxInitialSymID && !isSymUpwardExposed && !isSymFieldPRESymStore
- && (!isInLoop || !this->globOptData.globOpt->prePassCopyPropSym->Test(sym->m_id)))
- {
- Value *val = bucket.element;
- ValueInfo *valueInfo = val->GetValueInfo();
- Sym *symStore = valueInfo->GetSymStore();
- if (symStore && symStore == sym)
- {
- // Keep constants around, as we don't know if there will be further uses
- if (!bucket.element->GetValueInfo()->IsVarConstant() && !bucket.element->GetValueInfo()->HasIntConstantValue())
- {
- // Symbol may still be a copy-prop candidate. Wait before deleting it.
- deadSymsBv.Set(sym->m_id);
- // Make sure the type sym is added to the dead syms vector as well, because type syms are
- // created in backward pass and so their symIds > maxInitialSymID.
- if (sym->IsStackSym() && sym->AsStackSym()->HasObjectTypeSym())
- {
- deadSymsBv.Set(sym->AsStackSym()->GetObjectTypeSym()->m_id);
- }
- }
- availableValueNumbers.Set(val->GetValueNumber());
- }
- else
- {
- // Make sure the type sym is added to the dead syms vector as well, because type syms are
- // created in backward pass and so their symIds > maxInitialSymID. Perhaps we could remove
- // it explicitly here, but would it work alright with the iterator?
- if (sym->IsStackSym() && sym->AsStackSym()->HasObjectTypeSym())
- {
- deadSymsBv.Set(sym->AsStackSym()->GetObjectTypeSym()->m_id);
- }
- // Not a copy-prop candidate; delete it right away.
- iter.RemoveCurrent(thisTable->alloc);
- this->globOptData.liveInt32Syms->Clear(sym->m_id);
- this->globOptData.liveLossyInt32Syms->Clear(sym->m_id);
- this->globOptData.liveFloat64Syms->Clear(sym->m_id);
- this->globOptData.SetChangedSym(sym);
- }
- }
- else
- {
- if (sym->IsPropertySym() && !this->globOptData.liveFields->Test(sym->m_id))
- {
- // Remove propertySyms which are not live anymore.
- iter.RemoveCurrent(thisTable->alloc);
- this->globOptData.liveInt32Syms->Clear(sym->m_id);
- this->globOptData.liveLossyInt32Syms->Clear(sym->m_id);
- this->globOptData.liveFloat64Syms->Clear(sym->m_id);
- }
- else
- {
- // Look at the copy-prop candidate. We don't want to get rid of the data for a symbol which is
- // a copy-prop candidate.
- Value *val = bucket.element;
- ValueInfo *valueInfo = val->GetValueInfo();
- Sym *symStore = valueInfo->GetSymStore();
- if (symStore && symStore != sym)
- {
- keepAliveSymsBv.Set(symStore->m_id);
- if (symStore->IsStackSym() && symStore->AsStackSym()->HasObjectTypeSym())
- {
- keepAliveSymsBv.Set(symStore->AsStackSym()->GetObjectTypeSym()->m_id);
- }
- }
- availableValueNumbers.Set(val->GetValueNumber());
- }
- }
- } NEXT_SLISTBASE_ENTRY_EDITING;
- }
- deadSymsBv.Minus(&keepAliveSymsBv);
- // Now cleanup exprToValueMap table
- ExprHashTable *thisExprTable = this->globOptData.exprToValueMap;
- bool oldHasCSECandidatesValue = this->globOptData.hasCSECandidates; // Could be false if none need bailout.
- this->globOptData.hasCSECandidates = false;
- for (uint i = 0; i < thisExprTable->tableSize; i++)
- {
- FOREACH_SLISTBASE_ENTRY_EDITING(ExprHashBucket, bucket, &thisExprTable->table[i], iter)
- {
- ExprHash hash = bucket.value;
- ValueNumber src1ValNum = hash.GetSrc1ValueNumber();
- ValueNumber src2ValNum = hash.GetSrc2ValueNumber();
- // If src1Val or src2Val are not available anymore, no point keeping this CSE candidate
- bool removeCurrent = false;
- if ((src1ValNum && !availableValueNumbers.Test(src1ValNum))
- || (src2ValNum && !availableValueNumbers.Test(src2ValNum)))
- {
- removeCurrent = true;
- }
- else
- {
- // If we are keeping this value, make sure we also keep the symStore in the value table
- removeCurrent = true; // Remove by default, unless it's set to false later below.
- Value *val = bucket.element;
- if (val)
- {
- Sym *symStore = val->GetValueInfo()->GetSymStore();
- if (symStore)
- {
- Value *symStoreVal = this->globOptData.FindValue(symStore);
- if (symStoreVal && symStoreVal->GetValueNumber() == val->GetValueNumber())
- {
- removeCurrent = false;
- deadSymsBv.Clear(symStore->m_id);
- if (symStore->IsStackSym() && symStore->AsStackSym()->HasObjectTypeSym())
- {
- deadSymsBv.Clear(symStore->AsStackSym()->GetObjectTypeSym()->m_id);
- }
- }
- }
- }
- }
- if(removeCurrent)
- {
- iter.RemoveCurrent(thisExprTable->alloc);
- }
- else
- {
- this->globOptData.hasCSECandidates = oldHasCSECandidatesValue;
- }
- } NEXT_SLISTBASE_ENTRY_EDITING;
- }
- FOREACH_BITSET_IN_SPARSEBV(dead_id, &deadSymsBv)
- {
- thisTable->Clear(dead_id);
- Sym* sym = this->func->m_symTable->Find(dead_id);
- this->globOptData.SetChangedSym(sym);
- }
- NEXT_BITSET_IN_SPARSEBV;
- if (!deadSymsBv.IsEmpty())
- {
- if (this->func->IsJitInDebugMode())
- {
- // Do not remove non-temp local vars from liveVarSyms (i.e. do not let them become dead).
- // We will need to restore all initialized/used so far non-temp local during bail out.
- // (See BackwardPass::ProcessBailOutInfo)
- Assert(this->func->m_nonTempLocalVars);
- BVSparse<JitArenaAllocator> tempBv(tempAlloc);
- tempBv.Minus(&deadSymsBv, this->func->m_nonTempLocalVars);
- this->globOptData.liveVarSyms->Minus(&tempBv);
- #if DBG
- tempBv.And(this->globOptData.liveInt32Syms, this->func->m_nonTempLocalVars);
- AssertMsg(tempBv.IsEmpty(), "Type spec is disabled under debugger. How come did we get a non-temp local in liveInt32Syms?");
- tempBv.And(this->globOptData.liveLossyInt32Syms, this->func->m_nonTempLocalVars);
- AssertMsg(tempBv.IsEmpty(), "Type spec is disabled under debugger. How come did we get a non-temp local in liveLossyInt32Syms?");
- tempBv.And(this->globOptData.liveFloat64Syms, this->func->m_nonTempLocalVars);
- AssertMsg(tempBv.IsEmpty(), "Type spec is disabled under debugger. How come did we get a non-temp local in liveFloat64Syms?");
- #endif
- }
- else
- {
- this->globOptData.liveVarSyms->Minus(&deadSymsBv);
- }
- this->globOptData.liveInt32Syms->Minus(&deadSymsBv);
- this->globOptData.liveLossyInt32Syms->Minus(&deadSymsBv);
- this->globOptData.liveFloat64Syms->Minus(&deadSymsBv);
- }
- JitAdelete(this->globOptData.globOpt->alloc, upwardExposedUses);
- this->upwardExposedUses = nullptr;
- JitAdelete(this->globOptData.globOpt->alloc, upwardExposedFields);
- this->upwardExposedFields = nullptr;
- if (this->cloneStrCandidates)
- {
- JitAdelete(this->globOptData.globOpt->alloc, this->cloneStrCandidates);
- this->cloneStrCandidates = nullptr;
- }
- }
- bool IsLegalOpcodeForPathDepBrFold(IR::Instr *instr)
- {
- if (!instr->IsRealInstr())
- {
- return true;
- }
- switch (instr->m_opcode)
- {
- case Js::OpCode::Ld_A:
- case Js::OpCode::Ld_I4:
- case Js::OpCode::ByteCodeUses:
- return true;
- }
- #if DBG
- if (PHASE_TRACE(Js::PathDepBranchFoldingPhase, instr->m_func) && Js::Configuration::Global.flags.Verbose)
- {
- Output::Print(_u("Skipping PathDependentBranchFolding due to: "));
- instr->Dump();
- }
- #endif
- return false;
- }
- IR::LabelInstr* BasicBlock::CanProveConditionalBranch(IR::BranchInstr *branch, GlobOpt* globOpt, GlobHashTable * localSymToValueMap)
- {
- if (!branch->GetSrc1() || !branch->GetSrc1()->GetStackSym())
- {
- return nullptr;
- }
- Value *src1Val = nullptr, *src2Val = nullptr;
- Js::Var src1Var = nullptr, src2Var = nullptr;
- auto FindValueInLocalThenGlobalValueTable = [&](StackSym *sym) -> Value *
- {
- Value ** srcValPtr = localSymToValueMap->Get(sym);
- Value * srcVal = nullptr;
- if (!srcValPtr)
- {
- srcVal = this->globOptData.FindValue(sym);
- }
- else
- {
- srcVal = *srcValPtr;
- }
- if (!srcVal)
- {
- return nullptr;
- }
- return srcVal;
- };
- src1Val = FindValueInLocalThenGlobalValueTable(branch->GetSrc1()->GetStackSym());
- if (!src1Val)
- {
- return nullptr;
- }
- src1Var = globOpt->GetConstantVar(branch->GetSrc1(), src1Val);
- if (branch->GetSrc2() != nullptr)
- {
- if (branch->GetSrc2()->GetStackSym())
- {
- src2Val = FindValueInLocalThenGlobalValueTable(branch->GetSrc2()->GetStackSym());
- }
- if (!src2Val)
- {
- return nullptr;
- }
- src2Var = globOpt->GetConstantVar(branch->GetSrc2(), src2Val);
- }
- bool provenTrue;
- if (!globOpt->CanProveConditionalBranch(branch, src1Val, src2Val, src1Var, src2Var, &provenTrue))
- {
- return nullptr;
- }
- IR::LabelInstr * newTarget = provenTrue ? branch->GetTarget() : branch->GetNextRealInstrOrLabel()->AsLabelInstr();
- if (PHASE_TRACE(Js::PathDepBranchFoldingPhase, this->func))
- {
- Output::Print(_u("TRACE PathDependentBranchFolding: "));
- Output::Print(_u("Can prove retarget of branch in Block %d from Block %d to Block %d in func %s\n"),
- this->GetBlockNum(),
- this->GetLastInstr()->IsBranchInstr() ? this->GetLastInstr()->AsBranchInstr()->GetTarget()->GetBasicBlock()->GetBlockNum() : this->GetNext()->GetBlockNum(),
- newTarget->GetBasicBlock()->GetBlockNum(),
- this->func->GetJITFunctionBody()->GetDisplayName());
- Output::Flush();
- }
- return newTarget;
- }
- bool
- BasicBlock::CheckLegalityAndFoldPathDepBranches(GlobOpt* globOpt)
- {
- IR::LabelInstr * lastBranchTarget = nullptr;
- // For the block we start processing from, we maintain a valueTable, we go over the instrs in the block
- // If we find a Ld_A, we find if the src1 already has a value in the local valueTable, if not we use the value from the pred's valueTable merge
- // For other instrs, we assign null value
- GlobHashTable * localSymToValueMap = GlobHashTable::New(globOpt->alloc, 8);
- FOREACH_INSTR_IN_BLOCK(instr, this)
- {
- if (OpCodeAttr::HasDeadFallThrough(instr->m_opcode))
- {
- return false;
- }
- if (instr->GetDst())
- {
- if (instr->GetDst()->GetStackSym())
- {
- if (instr->m_opcode == Js::OpCode::Ld_A)
- {
- Value **localValue = localSymToValueMap->FindOrInsertNew(instr->GetDst()->GetSym());
- if (instr->GetSrc1()->GetSym())
- {
- Value **localSrc1Value = localSymToValueMap->Get(instr->GetSrc1()->GetSym());
- *localValue = localSrc1Value == nullptr ? this->globOptData.FindValue(instr->GetSrc1()->GetSym()) : *localSrc1Value;
- }
- }
- else
- {
- // complex instr, can't track value, insert nullptr
- Value **localValue = localSymToValueMap->FindOrInsertNew(instr->GetDst()->GetSym());
- *localValue = nullptr;
- }
- }
- }
- } NEXT_INSTR_IN_BLOCK;
- IR::Instr * instr = this->GetLastInstr();
- IR::LabelInstr * currentLabel = nullptr;
- /* We start from the current instruction and go on scanning for legality, as long as it is legal to skip an instruction, skip.
- * When we see an unconditional branch, start scanning from the branchTarget
- * When we see a conditional branch, check if we can prove the branch target, if we can, adjust the flowgraph, and continue in the direction of the proven target
- * We stop, when we no longer can skip instructions, either due to legality check or a non provable conditional branch
- */
- while (instr)
- {
- if (!instr->IsBranchInstr() && !instr->IsLabelInstr() && !IsLegalOpcodeForPathDepBrFold(instr))
- {
- return lastBranchTarget != nullptr;
- }
- if (OpCodeAttr::HasDeadFallThrough(instr->m_opcode)) // BailOnNoProfile etc
- {
- return lastBranchTarget != nullptr;
- }
- if (instr->GetDst())
- {
- if (!instr->GetDst()->IsRegOpnd()) // complex dstOpnd, stop.
- {
- return lastBranchTarget != nullptr;
- }
- IR::RegOpnd *dst = instr->GetDst()->AsRegOpnd();
- if (currentLabel)
- {
- BasicBlock * currentBlock = currentLabel->GetBasicBlock();
- // If the dstOpnd is defined and used in the same block, it is legal, but if the successor blocks use it, stop.
- if (currentBlock->successorBlockUses && currentBlock->successorBlockUses->Test(dst->GetStackSym()->m_id))
- {
- return lastBranchTarget != nullptr;
- }
- }
- if (instr->GetSrc1())
- {
- // If the src of the load instruction is produced in the currentBlock, its value will not be available in the valuetable, check in local valuetable
- Value **localValue = localSymToValueMap->FindOrInsertNew(instr->GetDst()->GetSym());
- if (instr->GetSrc1()->GetSym())
- {
- Value **localSrc1Value = localSymToValueMap->Get(instr->GetSrc1()->GetSym());
- *localValue = localSrc1Value == nullptr ? this->globOptData.FindValue(instr->GetSrc1()->GetSym()) : *localSrc1Value;
- }
- else
- {
- ValueType src1Value = instr->GetSrc1()->GetValueType();
- if (src1Value.IsUndefined())
- {
- *localValue = globOpt->NewGenericValue(ValueType::Undefined, instr->GetDst());
- }
- // MGTODO : handle other constants
- else
- {
- return lastBranchTarget != nullptr;
- }
- }
- }
- }
- if (instr->IsLabelInstr())
- {
- if (instr->AsLabelInstr()->m_isLoopTop)
- {
- // don't cross over to loops
- return lastBranchTarget != nullptr;
- }
- currentLabel = instr->AsLabelInstr();
- }
- if (instr->IsBranchInstr())
- {
- IR::BranchInstr* branch = instr->AsBranchInstr();
- IR::LabelInstr* branchTarget = nullptr;
- if (branch->IsUnconditional())
- {
- branchTarget = branch->GetTarget();
- if (!branchTarget)
- {
- return lastBranchTarget != nullptr;
- }
- if (branchTarget->m_isLoopTop)
- {
- return lastBranchTarget != nullptr;
- }
- }
- else
- {
- if (branch->GetTarget()->m_isLoopTop)
- {
- return lastBranchTarget != nullptr;
- }
- branchTarget = CanProveConditionalBranch(branch, globOpt, localSymToValueMap);
- if (!branchTarget)
- {
- return lastBranchTarget != nullptr;
- }
- }
- if (this->GetLastInstr()->IsBranchInstr() && (this->GetLastInstr()->AsBranchInstr()->GetTarget() == branchTarget))
- {
- // happens on the first block we start from only
- lastBranchTarget = branchTarget;
- instr = lastBranchTarget;
- continue;
- }
- if (branchTarget != this->GetLastInstr()->GetNextRealInstrOrLabel())
- {
- IR::Instr* lastInstr = this->GetLastInstr();
- IR::BranchInstr * newBranch = IR::BranchInstr::New(Js::OpCode::Br, branchTarget, branchTarget->m_func);
- newBranch->SetByteCodeOffset(lastInstr);
- if (lastInstr->IsBranchInstr())
- {
- globOpt->ConvertToByteCodeUses(lastInstr);
- }
- this->GetLastInstr()->InsertAfter(newBranch);
- globOpt->func->m_fg->AddEdge(this, branchTarget->GetBasicBlock());
- this->IncrementDataUseCount();
- }
- else
- {
- // If the new target is a fall through label, delete the branch
- globOpt->ConvertToByteCodeUses(this->GetLastInstr());
- }
- // We are adding an unconditional branch, go over all the current successors and remove the ones that are dead now
- FOREACH_SUCCESSOR_BLOCK_EDITING(blockSucc, this, iter)
- {
- if (branchTarget != blockSucc->GetFirstInstr()->AsLabelInstr())
- {
- // Change the old succ edge to dead
- this->RemoveDeadSucc(blockSucc, globOpt->func->m_fg);
- if (this->GetDataUseCount() > 0)
- {
- this->DecrementDataUseCount();
- }
- if (blockSucc->GetPredList()->Count() == 0)
- {
- this->func->m_fg->RemoveBlock(blockSucc, globOpt);
- }
- }
- }NEXT_SUCCESSOR_BLOCK_EDITING;
- lastBranchTarget = branchTarget;
- instr = lastBranchTarget;
- }
- else
- {
- instr = instr->m_next;
- }
- }
- return lastBranchTarget != nullptr;
- }
- bool
- BasicBlock::PathDepBranchFolding(GlobOpt* globOpt)
- {
- if (PHASE_OFF(Js::PathDepBranchFoldingPhase, this->func))
- {
- return false;
- }
- if (globOpt->IsLoopPrePass())
- {
- return false;
- }
- if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
- {
- return false;
- }
- CheckLegalityAndFoldPathDepBranches(globOpt);
- return true;
- }
- void
- BasicBlock::MergePredBlocksValueMaps(GlobOpt* globOpt)
- {
- Assert(!globOpt->isCallHelper);
- // We keep a local temporary copy for the merge
- GlobOptBlockData blockData(this->globOptData.curFunc);
- if (!globOpt->isRecursiveCallOnLandingPad)
- {
- blockData.NullOutBlockData(globOpt, globOpt->func);
- }
- else
- {
- // If we are going over the landing pad again after field PRE, just start again
- // with the value table where we left off.
- blockData.CopyBlockData(&this->globOptData);
- return;
- }
- BVSparse<JitArenaAllocator> symsRequiringCompensation(globOpt->tempAlloc);
- {
- BVSparse<JitArenaAllocator> symsCreatedForMerge(globOpt->tempAlloc);
- bool forceTypeSpecOnLoopHeader = true;
- FOREACH_PREDECESSOR_BLOCK(pred, this)
- {
- if (pred->globOptData.callSequence && pred->globOptData.callSequence->Empty())
- {
- JitAdelete(globOpt->alloc, pred->globOptData.callSequence);
- pred->globOptData.callSequence = nullptr;
- }
- if (this->isLoopHeader && globOpt->IsLoopPrePass() && globOpt->prePassLoop == this->loop && this->loop->IsDescendentOrSelf(pred->loop))
- {
- // Loop back-edge.
- // First pass on loop runs optimistically, without doing transforms.
- // Skip this edge for now.
- continue;
- }
- PathDependentInfo *const pathDependentInfo = __edge->GetPathDependentInfo();
- PathDependentInfoToRestore pathDependentInfoToRestore;
- if (pathDependentInfo)
- {
- pathDependentInfoToRestore = globOpt->UpdatePathDependentInfo(pathDependentInfo);
- }
- Assert(pred->GetDataUseCount());
- // First pred?
- if (blockData.symToValueMap == nullptr)
- {
- // Only one edge?
- if (pred->GetSuccList()->HasOne() && this->GetPredList()->HasOne() && this->loop == nullptr)
- {
- blockData.ReuseBlockData(&pred->globOptData);
- // Don't need to restore the old value info
- pathDependentInfoToRestore.Clear();
- }
- else
- {
- blockData.CloneBlockData(this, pred);
- }
- }
- else
- {
- const bool isLoopPrePass = globOpt->IsLoopPrePass();
- blockData.MergeBlockData(
- this,
- pred,
- isLoopPrePass ? nullptr : &symsRequiringCompensation,
- isLoopPrePass ? nullptr : &symsCreatedForMerge,
- forceTypeSpecOnLoopHeader);
- forceTypeSpecOnLoopHeader = false; // can force type-spec on the loop header only for the first back edge.
- }
- // Restore the value for the next edge
- if (pathDependentInfo)
- {
- globOpt->RestorePathDependentInfo(pathDependentInfo, pathDependentInfoToRestore);
- __edge->ClearPathDependentInfo(globOpt->alloc);
- }
- } NEXT_PREDECESSOR_BLOCK;
- }
- // Consider: We can recreate values for hoisted field so it can copy prop out of the loop
- if (blockData.symToValueMap == nullptr)
- {
- blockData.InitBlockData(globOpt, globOpt->func);
- }
- if (!globOpt->DoObjTypeSpec())
- {
- // Object type specialization is off, but if copy prop is on (e.g., /force:fieldhoist) we're not clearing liveFields,
- // so we may be letting type syms slip through this block.
- globOpt->KillAllObjectTypes(this->globOptData.liveFields);
- }
- this->globOptData.CopyBlockData(&blockData);
- if (globOpt->IsLoopPrePass())
- {
- Assert(this->loop);
- if(globOpt->DoBoundCheckHoist())
- {
- globOpt->SetInductionVariableValueNumbers(&blockData);
- }
- if (this->isLoopHeader && globOpt->rootLoopPrePass == this->loop)
- {
- // Capture bail out info in case we have optimization that needs it
- Assert(this->loop->bailOutInfo == nullptr);
- this->loop->bailOutInfo = this->CreateLoopTopBailOutInfo(globOpt);
- }
- // If loop pre-pass, don't insert convert from type-spec to var
- return;
- }
- this->CleanUpValueMaps();
- Sym *symIV = nullptr;
- // Clean up the syms requiring compensation by checking the final value in the merged block to see if the sym still requires
- // compensation. All the while, create a mapping from sym to value info in the merged block. This dictionary helps avoid a
- // value lookup in the merged block per predecessor.
- SymToValueInfoMap symsRequiringCompensationToMergedValueInfoMap(globOpt->tempAlloc);
- if(!symsRequiringCompensation.IsEmpty())
- {
- const SymTable *const symTable = func->m_symTable;
- FOREACH_BITSET_IN_SPARSEBV(id, &symsRequiringCompensation)
- {
- Sym *const sym = symTable->Find(id);
- Assert(sym);
- Value *const value = blockData.FindValue(sym);
- if(!value)
- {
- continue;
- }
- ValueInfo *const valueInfo = value->GetValueInfo();
- if(!valueInfo->IsArrayValueInfo())
- {
- continue;
- }
- // At least one new sym was created while merging and associated with the merged value info, so those syms will
- // require compensation in predecessors. For now, the dead store phase is relied upon to remove compensation that is
- // dead due to no further uses of the new sym.
- symsRequiringCompensationToMergedValueInfoMap.Add(sym, valueInfo);
- } NEXT_BITSET_IN_SPARSEBV;
- symsRequiringCompensation.ClearAll();
- }
- if (this->isLoopHeader)
- {
- BVSparse<JitArenaAllocator> * tempBv = globOpt->tempBv;
- // Values on the back-edge in the prepass may be conservative for syms defined in the loop, and type specialization in
- // the prepass is not reflective of the value, but rather, is used to determine whether the sym should be specialized
- // around the loop. Additionally, some syms that are used before defined in the loop may be specialized in the loop
- // header despite not being specialized in the landing pad. Now that the type specialization bit-vectors are merged,
- // specialize the corresponding value infos in the loop header too.
- Assert(tempBv->IsEmpty());
- Loop *const loop = this->loop;
- SymTable *const symTable = func->m_symTable;
- JitArenaAllocator *const alloc = globOpt->alloc;
- // Int-specialized syms
- tempBv->Or(loop->likelyIntSymsUsedBeforeDefined, loop->symsDefInLoop);
- tempBv->And(blockData.liveInt32Syms);
- tempBv->Minus(blockData.liveLossyInt32Syms);
- FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
- {
- StackSym *const varSym = symTable->FindStackSym(id);
- Assert(varSym);
- Value *const value = blockData.FindValue(varSym);
- Assert(value);
- ValueInfo *const valueInfo = value->GetValueInfo();
- if(!valueInfo->IsInt())
- {
- globOpt->ChangeValueInfo(nullptr, value, valueInfo->SpecializeToInt32(alloc));
- }
- } NEXT_BITSET_IN_SPARSEBV;
- // Float-specialized syms
- tempBv->Or(loop->likelyNumberSymsUsedBeforeDefined, loop->symsDefInLoop);
- tempBv->Or(loop->forceFloat64SymsOnEntry);
- tempBv->And(blockData.liveFloat64Syms);
- GlobOptBlockData &landingPadBlockData = loop->landingPad->globOptData;
- FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
- {
- StackSym *const varSym = symTable->FindStackSym(id);
- Assert(varSym);
- // If the type-spec sym is null or if the sym is not float-specialized in the loop landing pad, the sym may have
- // been merged to float on a loop back-edge when it was live as float on the back-edge, and live as int in the loop
- // header. In this case, compensation inserted in the loop landing pad will use BailOutNumberOnly, and so it is
- // guaranteed that the value will be float. Otherwise, if the type-spec sym exists, its field can be checked to see
- // if it's prevented from being anything but a number.
- StackSym *const typeSpecSym = varSym->GetFloat64EquivSym(nullptr);
- if(!typeSpecSym ||
- typeSpecSym->m_requiresBailOnNotNumber ||
- !landingPadBlockData.IsFloat64TypeSpecialized(varSym))
- {
- Value *const value = blockData.FindValue(varSym);
- if(value)
- {
- ValueInfo *const valueInfo = value->GetValueInfo();
- if(!valueInfo->IsNumber())
- {
- globOpt->ChangeValueInfo(this, value, valueInfo->SpecializeToFloat64(alloc));
- }
- }
- else
- {
- this->globOptData.SetValue(globOpt->NewGenericValue(ValueType::Float), varSym);
- }
- }
- } NEXT_BITSET_IN_SPARSEBV;
- tempBv->ClearAll();
- }
- // We need to handle the case where a symbol is type-spec'd coming from some predecessors,
- // but not from others.
- //
- // We can do this by inserting the right conversion in the predecessor block, but we
- // can only do this if we are the first successor of that block, since the previous successors
- // would have already been processed. Instead, we'll need to break the edge and insert a block
- // (airlock block) to put in the conversion code.
- Assert(globOpt->tempBv->IsEmpty());
- BVSparse<JitArenaAllocator> symsNeedingLossyIntConversion(globOpt->tempAlloc);
- BVSparse<JitArenaAllocator> symsNeedingLosslessIntConversion(globOpt->tempAlloc);
- BVSparse<JitArenaAllocator> symsNeedingFloatConversion(globOpt->tempAlloc);
- FOREACH_PREDECESSOR_EDGE_EDITING(edge, this, iter)
- {
- BasicBlock *pred = edge->GetPred();
- if (pred->loop && pred->loop->GetHeadBlock() == this)
- {
- pred->DecrementDataUseCount();
- // Skip loop back-edges. We will handle these when we get to the exit blocks.
- continue;
- }
- BasicBlock *orgPred = nullptr;
- if (pred->isAirLockCompensationBlock)
- {
- Assert(pred->GetPredList()->HasOne());
- orgPred = pred;
- pred = (pred->GetPredList()->Head())->GetPred();
- }
- // Lossy int in the merged block, and no int in the predecessor - need a lossy conversion to int
- symsNeedingLossyIntConversion.Minus(blockData.liveLossyInt32Syms, pred->globOptData.liveInt32Syms);
- // Lossless int in the merged block, and no lossless int in the predecessor - need a lossless conversion to int
- symsNeedingLosslessIntConversion.Minus(blockData.liveInt32Syms, blockData.liveLossyInt32Syms);
- globOpt->tempBv->Minus(pred->globOptData.liveInt32Syms, pred->globOptData.liveLossyInt32Syms);
- symsNeedingLosslessIntConversion.Minus(globOpt->tempBv);
- globOpt->tempBv->Minus(blockData.liveVarSyms, pred->globOptData.liveVarSyms);
- symsNeedingFloatConversion.Minus(blockData.liveFloat64Syms, pred->globOptData.liveFloat64Syms);
- bool symIVNeedsSpecializing = (symIV && !pred->globOptData.liveInt32Syms->Test(symIV->m_id) && !symsNeedingLosslessIntConversion.Test(symIV->m_id));
- if (!globOpt->tempBv->IsEmpty() ||
- !symsNeedingLossyIntConversion.IsEmpty() ||
- !symsNeedingLosslessIntConversion.IsEmpty() ||
- !symsNeedingFloatConversion.IsEmpty() ||
- symIVNeedsSpecializing ||
- symsRequiringCompensationToMergedValueInfoMap.Count() != 0)
- {
- // We can't un-specialize a symbol in a predecessor if we've already processed
- // a successor of that block. Instead, insert a new block on the flow edge
- // (an airlock block) and do the un-specialization there.
- //
- // Alternatively, the current block could be an exit block out of this loop, and so the predecessor may exit the
- // loop. In that case, if the predecessor may continue into the loop without exiting, then we need an airlock block
- // to do the appropriate conversions only on the exit path (preferring not to do the conversions inside the loop).
- // If, on the other hand, the predecessor always flows into the current block, then it always exits, so we don't need
- // an airlock block and can just do the conversions in the predecessor.
- if (pred->GetSuccList()->Head()->GetSucc() != this||
- (pred->loop && pred->loop->parent == this->loop && pred->GetSuccList()->Count() > 1))
- {
- BasicBlock *airlockBlock = nullptr;
- if (!orgPred)
- {
- if (PHASE_TRACE(Js::GlobOptPhase, this->func) && !globOpt->IsLoopPrePass())
- {
- Output::Print(_u("TRACE: "));
- Output::Print(_u("Inserting airlock block to convert syms to var between block %d and %d\n"),
- pred->GetBlockNum(), this->GetBlockNum());
- Output::Flush();
- }
- airlockBlock = globOpt->func->m_fg->InsertAirlockBlock(edge);
- }
- else
- {
- Assert(orgPred->isAirLockCompensationBlock);
- airlockBlock = orgPred;
- pred->DecrementDataUseCount();
- airlockBlock->isAirLockCompensationBlock = false; // This is airlock block now. So remove the attribute.
- }
- globOpt->CloneBlockData(airlockBlock, pred);
- pred = airlockBlock;
- }
- if (!globOpt->tempBv->IsEmpty())
- {
- globOpt->ToVar(globOpt->tempBv, pred);
- }
- if (!symsNeedingLossyIntConversion.IsEmpty())
- {
- globOpt->ToInt32(&symsNeedingLossyIntConversion, pred, true /* lossy */);
- }
- if (!symsNeedingLosslessIntConversion.IsEmpty())
- {
- globOpt->ToInt32(&symsNeedingLosslessIntConversion, pred, false /* lossy */);
- }
- if (!symsNeedingFloatConversion.IsEmpty())
- {
- globOpt->ToFloat64(&symsNeedingFloatConversion, pred);
- }
- if (symIVNeedsSpecializing)
- {
- globOpt->tempBv->ClearAll();
- globOpt->tempBv->Set(symIV->m_id);
- globOpt->ToInt32(globOpt->tempBv, pred, false /* lossy */);
- }
- if(symsRequiringCompensationToMergedValueInfoMap.Count() != 0)
- {
- globOpt->InsertValueCompensation(pred, symsRequiringCompensationToMergedValueInfoMap);
- }
- }
- } NEXT_PREDECESSOR_EDGE_EDITING;
- FOREACH_PREDECESSOR_EDGE(edge, this)
- {
- // Peak Memory optimization:
- // These are in an arena, but putting them on the free list greatly reduces
- // the peak memory used by the global optimizer for complex flow graphs.
- BasicBlock *pred = edge->GetPred();
- if (!this->isLoopHeader || this->loop != pred->loop)
- {
- // Skip airlock compensation block as we are not going to walk this block.
- if (pred->isAirLockCompensationBlock)
- {
- pred->DecrementDataUseCount();
- Assert(pred->GetPredList()->HasOne());
- pred = (pred->GetPredList()->Head())->GetPred();
- }
- if (pred->DecrementDataUseCount() == 0 && (!this->loop || this->loop->landingPad != pred))
- {
- if (!(pred->GetSuccList()->HasOne() && this->GetPredList()->HasOne() && this->loop == nullptr))
- {
- pred->globOptData.DeleteBlockData();
- }
- else
- {
- pred->globOptData.NullOutBlockData(globOpt, globOpt->func);
- }
- }
- }
- } NEXT_PREDECESSOR_EDGE;
- globOpt->tempBv->ClearAll();
- Assert(!globOpt->IsLoopPrePass()); // We already early return if we are in prepass
- if (this->isLoopHeader)
- {
- Loop *const loop = this->loop;
- // Save values live on loop entry, such that we can adjust the state of the
- // values on the back-edge to match.
- loop->varSymsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
- loop->varSymsOnEntry->Copy(this->globOptData.liveVarSyms);
- loop->int32SymsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
- loop->int32SymsOnEntry->Copy(this->globOptData.liveInt32Syms);
- loop->lossyInt32SymsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
- loop->lossyInt32SymsOnEntry->Copy(this->globOptData.liveLossyInt32Syms);
- loop->float64SymsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
- loop->float64SymsOnEntry->Copy(this->globOptData.liveFloat64Syms);
- loop->liveFieldsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
- loop->liveFieldsOnEntry->Copy(this->globOptData.liveFields);
- if(globOpt->DoBoundCheckHoist() && loop->inductionVariables)
- {
- globOpt->FinalizeInductionVariables(loop, &blockData);
- if(globOpt->DoLoopCountBasedBoundCheckHoist())
- {
- globOpt->DetermineDominatingLoopCountableBlock(loop, this);
- }
- }
- }
- else if (!this->loop)
- {
- this->SetDataUseCount(this->GetSuccList()->Count());
- }
- else if(this == this->loop->dominatingLoopCountableBlock)
- {
- globOpt->DetermineLoopCount(this->loop);
- }
- }
- void GlobOpt::CloneBlockData(BasicBlock *const toBlock, BasicBlock *const fromBlock)
- {
- toBlock->globOptData.CloneBlockData(toBlock, fromBlock);
- }
- void
- GlobOpt::CloneValues(BasicBlock *const toBlock, GlobOptBlockData *toData, GlobOptBlockData *fromData)
- {
- ValueSet *const valuesToKillOnCalls = JitAnew(this->alloc, ValueSet, this->alloc);
- toData->valuesToKillOnCalls = valuesToKillOnCalls;
- // Values are shared between symbols with the same ValueNumber.
- // Use a dictionary to share the clone values.
- ValueSetByValueNumber *const valuesCreatedForClone = this->valuesCreatedForClone;
- Assert(valuesCreatedForClone);
- Assert(valuesCreatedForClone->Count() == 0);
- DebugOnly(ValueSetByValueNumber originalValues(tempAlloc, 64));
- const uint tableSize = toData->symToValueMap->tableSize;
- SListBase<GlobHashBucket> *const table = toData->symToValueMap->table;
- for (uint i = 0; i < tableSize; i++)
- {
- FOREACH_SLISTBASE_ENTRY(GlobHashBucket, bucket, &table[i])
- {
- Value *value = bucket.element;
- ValueNumber valueNum = value->GetValueNumber();
- #if DBG
- // Ensure that the set of values in fromData contains only one value per value number. Byte-code constant values
- // are reused in multiple blocks without cloning, so exclude those value numbers.
- {
- Value *const previouslyClonedOriginalValue = originalValues.Lookup(valueNum);
- if (previouslyClonedOriginalValue)
- {
- if (!byteCodeConstantValueNumbersBv->Test(valueNum))
- {
- Assert(value == previouslyClonedOriginalValue);
- }
- }
- else
- {
- originalValues.Add(value);
- }
- }
- #endif
- Value *newValue = valuesCreatedForClone->Lookup(valueNum);
- if (!newValue)
- {
- newValue = CopyValue(value, valueNum);
- TrackMergedValueForKills(newValue, toData, nullptr);
- valuesCreatedForClone->Add(newValue);
- }
- bucket.element = newValue;
- } NEXT_SLISTBASE_ENTRY;
- }
- valuesCreatedForClone->Clear();
- ProcessValueKills(toBlock, toData);
- }
- PRECandidatesList * GlobOpt::FindBackEdgePRECandidates(BasicBlock *block, JitArenaAllocator *alloc)
- {
- // Iterate over the value table looking for propertySyms which are candidates to
- // pre-load in the landing pad for field PRE
- GlobHashTable *valueTable = block->globOptData.symToValueMap;
- Loop *loop = block->loop;
- PRECandidatesList *candidates = nullptr;
- for (uint i = 0; i < valueTable->tableSize; i++)
- {
- FOREACH_SLISTBASE_ENTRY(GlobHashBucket, bucket, &valueTable->table[i])
- {
- Sym *sym = bucket.value;
- if (!sym->IsPropertySym())
- {
- continue;
- }
- PropertySym *propertySym = sym->AsPropertySym();
- // Field should be live on the back-edge
- if (!block->globOptData.liveFields->Test(propertySym->m_id))
- {
- continue;
- }
- // Field should be live in the landing pad as well
- if (!loop->landingPad->globOptData.liveFields->Test(propertySym->m_id))
- {
- continue;
- }
- Value *value = bucket.element;
- Sym *symStore = value->GetValueInfo()->GetSymStore();
- if (!symStore || !symStore->IsStackSym())
- {
- continue;
- }
- // Check upwardExposed in case of:
- // s1 = 0;
- // loop:
- // = o.x;
- // foo();
- // o.x = s1;
- // Can't thrash s1 in loop top.
- if (!symStore->AsStackSym()->IsSingleDef() || loop->GetHeadBlock()->upwardExposedUses->Test(symStore->m_id))
- {
- // If symStore isn't singleDef, we need to make sure it still has the same value.
- // This usually fails if we are not aggressive at transferring values in the prepass.
- Value **pSymStoreFromValue = valueTable->Get(symStore->m_id);
- // Consider: We should be fine if symStore isn't live in landing pad...
- if (!pSymStoreFromValue || (*pSymStoreFromValue)->GetValueNumber() != value->GetValueNumber())
- {
- continue;
- }
- }
- BasicBlock *landingPad = loop->landingPad;
- Value *landingPadValue = landingPad->globOptData.FindValue(propertySym);
- if (!landingPadValue)
- {
- // Value should be added as initial value or already be there.
- return nullptr;
- }
- IR::Instr * ldInstr = this->prePassInstrMap->Lookup(propertySym->m_id, nullptr);
- if (!ldInstr)
- {
- continue;
- }
- if (!candidates)
- {
- candidates = Anew(alloc, PRECandidatesList, alloc);
- }
- candidates->Prepend(&bucket);
- } NEXT_SLISTBASE_ENTRY;
- }
- return candidates;
- }
- void GlobOpt::InsertCloneStrs(BasicBlock *toBlock, GlobOptBlockData *toData, GlobOptBlockData *fromData)
- {
- if (toBlock->isLoopHeader // isLoopBackEdge
- && toBlock->cloneStrCandidates
- && !IsLoopPrePass())
- {
- Loop *loop = toBlock->loop;
- BasicBlock *landingPad = loop->landingPad;
- const SymTable *const symTable = func->m_symTable;
- Assert(tempBv->IsEmpty());
- tempBv->And(toBlock->cloneStrCandidates, fromData->isTempSrc);
- FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
- {
- StackSym *const sym = (StackSym *)symTable->Find(id);
- Assert(sym);
- if (!landingPad->globOptData.liveVarSyms->Test(id)
- || !fromData->liveVarSyms->Test(id))
- {
- continue;
- }
- Value * landingPadValue = landingPad->globOptData.FindValue(sym);
- if (landingPadValue == nullptr)
- {
- continue;
- }
- Value * loopValue = fromData->FindValue(sym);
- if (loopValue == nullptr)
- {
- continue;
- }
- ValueInfo *landingPadValueInfo = landingPadValue->GetValueInfo();
- ValueInfo *loopValueInfo = loopValue->GetValueInfo();
- if (landingPadValueInfo->IsLikelyString()
- && loopValueInfo->IsLikelyString())
- {
- IR::Instr *cloneStr = IR::Instr::New(Js::OpCode::CloneStr, this->func);
- IR::RegOpnd *opnd = IR::RegOpnd::New(sym, IRType::TyVar, this->func);
- cloneStr->SetDst(opnd);
- cloneStr->SetSrc1(opnd);
- if (loop->bailOutInfo->bailOutInstr)
- {
- loop->bailOutInfo->bailOutInstr->InsertBefore(cloneStr);
- }
- else
- {
- landingPad->InsertAfter(cloneStr);
- }
- toData->isTempSrc->Set(id);
- }
- }
- NEXT_BITSET_IN_SPARSEBV;
- tempBv->ClearAll();
- }
- }
|