LinearScan.cpp 180 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "SccLiveness.h"
  7. #if DBG_DUMP || ENABLE_DEBUG_CONFIG_OPTIONS
  8. char const * const RegNames[RegNumCount] =
  9. {
  10. #define REGDAT(Name, ListName, ...) "" STRINGIZE(ListName) "",
  11. #include "RegList.h"
  12. #undef REGDAT
  13. };
  14. char16 const * const RegNamesW[RegNumCount] =
  15. {
  16. #define REGDAT(Name, ListName, ...) _u("") STRINGIZEW(ListName) _u(""),
  17. #include "RegList.h"
  18. #undef REGDAT
  19. };
  20. #endif
  21. static const uint8 RegAttribs[RegNumCount] =
  22. {
  23. #define REGDAT(Name, ListName, Encode, Type, Attribs) Attribs,
  24. #include "RegList.h"
  25. #undef REGDAT
  26. };
  27. extern const IRType RegTypes[RegNumCount] =
  28. {
  29. #define REGDAT(Name, ListName, Encode, Type, Attribs) Type,
  30. #include "RegList.h"
  31. #undef REGDAT
  32. };
  33. LoweredBasicBlock* LoweredBasicBlock::New(JitArenaAllocator* allocator)
  34. {
  35. return JitAnew(allocator, LoweredBasicBlock, allocator);
  36. }
  37. void LoweredBasicBlock::Copy(LoweredBasicBlock* block)
  38. {
  39. this->inlineeFrameLifetimes.Copy(&block->inlineeFrameLifetimes);
  40. this->inlineeStack.Copy(&block->inlineeStack);
  41. this->inlineeFrameSyms.Copy(&block->inlineeFrameSyms);
  42. }
  43. bool LoweredBasicBlock::HasData()
  44. {
  45. return this->inlineeFrameLifetimes.Count() > 0 || this->inlineeStack.Count() > 0;
  46. }
  47. LoweredBasicBlock* LoweredBasicBlock::Clone(JitArenaAllocator* allocator)
  48. {
  49. if (this->HasData())
  50. {
  51. LoweredBasicBlock* clone = LoweredBasicBlock::New(allocator);
  52. clone->Copy(this);
  53. return clone;
  54. }
  55. return nullptr;
  56. }
  57. bool LoweredBasicBlock::Equals(LoweredBasicBlock* otherBlock)
  58. {
  59. if(this->HasData() != otherBlock->HasData())
  60. {
  61. return false;
  62. }
  63. if (!this->inlineeFrameLifetimes.Equals(&otherBlock->inlineeFrameLifetimes))
  64. {
  65. return false;
  66. }
  67. if (!this->inlineeStack.Equals(&otherBlock->inlineeStack))
  68. {
  69. return false;
  70. }
  71. return true;
  72. }
  73. // LinearScan::RegAlloc
  74. // This register allocator is based on the 1999 linear scan register allocation paper
  75. // by Poletto and Sarkar. This code however walks the IR while doing the lifetime
  76. // allocations, and assigns the regs to all the RegOpnd as it goes. It assumes
  77. // the IR is in R-DFO, and that the lifetime list is sorted in starting order.
  78. // Lifetimes are allocated as they become live, and retired as they go dead. RegOpnd
  79. // are assigned their register. If a lifetime becomes active and there are no free
  80. // registers left, a lifetime is picked to be spilled.
  81. // When we spill, the whole lifetime is spilled. All the loads and stores are done
  82. // through memory for that lifetime, even the ones allocated before the current instruction.
  83. // We do optimize this slightly by not reloading the previous loads that were not in loops.
  84. void
  85. LinearScan::RegAlloc()
  86. {
  87. NoRecoverMemoryJitArenaAllocator tempAlloc(_u("BE-LinearScan"), this->func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  88. this->tempAlloc = &tempAlloc;
  89. this->opHelperSpilledLiveranges = JitAnew(&tempAlloc, SList<Lifetime *>, &tempAlloc);
  90. this->activeLiveranges = JitAnew(&tempAlloc, SList<Lifetime *>, &tempAlloc);
  91. this->liveOnBackEdgeSyms = JitAnew(&tempAlloc, BVSparse<JitArenaAllocator>, &tempAlloc);
  92. this->stackPackInUseLiveRanges = JitAnew(&tempAlloc, SList<Lifetime *>, &tempAlloc);
  93. this->stackSlotsFreeList = JitAnew(&tempAlloc, SList<StackSlot *>, &tempAlloc);
  94. this->currentBlock = LoweredBasicBlock::New(&tempAlloc);
  95. IR::Instr *currentInstr = this->func->m_headInstr;
  96. SCCLiveness liveness(this->func, this->tempAlloc);
  97. BEGIN_CODEGEN_PHASE(this->func, Js::LivenessPhase);
  98. // Build the lifetime list
  99. liveness.Build();
  100. END_CODEGEN_PHASE(this->func, Js::LivenessPhase);
  101. this->lifetimeList = &liveness.lifetimeList;
  102. this->opHelperBlockList = &liveness.opHelperBlockList;
  103. this->opHelperBlockIter = SList<OpHelperBlock>::Iterator(this->opHelperBlockList);
  104. this->opHelperBlockIter.Next();
  105. this->Init();
  106. NativeCodeData::Allocator * nativeAllocator = this->func->GetNativeCodeDataAllocator();
  107. if (func->hasBailout)
  108. {
  109. this->globalBailOutRecordTables = NativeCodeDataNewArrayZ(nativeAllocator, GlobalBailOutRecordDataTable *, func->m_inlineeId + 1);
  110. this->lastUpdatedRowIndices = JitAnewArrayZ(this->tempAlloc, uint *, func->m_inlineeId + 1);
  111. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  112. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  113. {
  114. this->func->GetScriptContext()->bailOutOffsetBytes += (sizeof(GlobalBailOutRecordDataTable *) * (func->m_inlineeId + 1));
  115. this->func->GetScriptContext()->bailOutRecordBytes += (sizeof(GlobalBailOutRecordDataTable *) * (func->m_inlineeId + 1));
  116. }
  117. #endif
  118. }
  119. m_bailOutRecordCount = 0;
  120. IR::Instr * insertBailInAfter = nullptr;
  121. BailOutInfo * bailOutInfoForBailIn = nullptr;
  122. bool endOfBasicBlock = true;
  123. FOREACH_INSTR_EDITING(instr, instrNext, currentInstr)
  124. {
  125. if (instr->GetNumber() == 0)
  126. {
  127. AssertMsg(LowererMD::IsAssign(instr), "Only expect spill code here");
  128. continue;
  129. }
  130. #if DBG_DUMP && defined(ENABLE_DEBUG_CONFIG_OPTIONS)
  131. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::LinearScanPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  132. {
  133. instr->Dump();
  134. }
  135. #endif // DBG
  136. this->currentInstr = instr;
  137. if(instr->StartsBasicBlock() || endOfBasicBlock)
  138. {
  139. endOfBasicBlock = false;
  140. ++currentBlockNumber;
  141. }
  142. if (instr->IsLabelInstr())
  143. {
  144. this->lastLabel = instr->AsLabelInstr();
  145. if (this->lastLabel->m_loweredBasicBlock)
  146. {
  147. this->currentBlock = this->lastLabel->m_loweredBasicBlock;
  148. }
  149. else if(currentBlock->HasData())
  150. {
  151. // Check if the previous block has fall-through. If so, retain the block info. If not, create empty info.
  152. IR::Instr *const prevInstr = instr->GetPrevRealInstrOrLabel();
  153. Assert(prevInstr);
  154. if(!prevInstr->HasFallThrough())
  155. {
  156. currentBlock = LoweredBasicBlock::New(&tempAlloc);
  157. }
  158. }
  159. this->currentRegion = this->lastLabel->GetRegion();
  160. }
  161. else if (instr->IsBranchInstr())
  162. {
  163. if (this->func->HasTry() && this->func->DoOptimizeTryCatch())
  164. {
  165. this->ProcessEHRegionBoundary(instr);
  166. }
  167. this->ProcessSecondChanceBoundary(instr->AsBranchInstr());
  168. }
  169. this->CheckIfInLoop(instr);
  170. if (this->RemoveDeadStores(instr))
  171. {
  172. continue;
  173. }
  174. if (instr->HasBailOutInfo())
  175. {
  176. if (this->currentRegion)
  177. {
  178. RegionType curRegType = this->currentRegion->GetType();
  179. Assert(curRegType != RegionTypeFinally); //Finally regions are not optimized yet
  180. if (curRegType == RegionTypeTry || curRegType == RegionTypeCatch)
  181. {
  182. this->func->hasBailoutInEHRegion = true;
  183. }
  184. }
  185. this->FillBailOutRecord(instr);
  186. if (instr->GetBailOutKind() == IR::BailOutForGeneratorYield)
  187. {
  188. Assert(instr->m_next->IsLabelInstr());
  189. insertBailInAfter = instr->m_next;
  190. bailOutInfoForBailIn = instr->GetBailOutInfo();
  191. }
  192. }
  193. this->SetSrcRegs(instr);
  194. this->EndDeadLifetimes(instr);
  195. this->CheckOpHelper(instr);
  196. this->KillImplicitRegs(instr);
  197. this->AllocateNewLifetimes(instr);
  198. this->SetDstReg(instr);
  199. this->EndDeadOpHelperLifetimes(instr);
  200. if (instr->IsLabelInstr())
  201. {
  202. this->ProcessSecondChanceBoundary(instr->AsLabelInstr());
  203. }
  204. #if DBG
  205. this->CheckInvariants();
  206. #endif // DBG
  207. if(instr->EndsBasicBlock())
  208. {
  209. endOfBasicBlock = true;
  210. }
  211. if (insertBailInAfter == instr)
  212. {
  213. instrNext = linearScanMD.GenerateBailInForGeneratorYield(instr, bailOutInfoForBailIn);
  214. insertBailInAfter = nullptr;
  215. bailOutInfoForBailIn = nullptr;
  216. }
  217. }NEXT_INSTR_EDITING;
  218. if (func->hasBailout)
  219. {
  220. for (uint i = 0; i <= func->m_inlineeId; i++)
  221. {
  222. if (globalBailOutRecordTables[i] != nullptr)
  223. {
  224. globalBailOutRecordTables[i]->Finalize(nativeAllocator, &tempAlloc);
  225. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  226. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  227. {
  228. func->GetScriptContext()->bailOutOffsetBytes += sizeof(GlobalBailOutRecordDataRow) * globalBailOutRecordTables[i]->length;
  229. func->GetScriptContext()->bailOutRecordBytes += sizeof(GlobalBailOutRecordDataRow) * globalBailOutRecordTables[i]->length;
  230. }
  231. #endif
  232. }
  233. }
  234. }
  235. AssertMsg((this->intRegUsedCount + this->floatRegUsedCount) == this->linearScanMD.UnAllocatableRegCount(this->func) , "RegUsedCount is wrong");
  236. AssertMsg(this->activeLiveranges->Empty(), "Active list not empty");
  237. AssertMsg(this->stackPackInUseLiveRanges->Empty(), "Spilled list not empty");
  238. AssertMsg(!this->opHelperBlockIter.IsValid(), "Got to the end with a helper block still on the list?");
  239. Assert(this->currentBlock->inlineeStack.Count() == 0);
  240. this->InsertOpHelperSpillAndRestores();
  241. #if _M_IX86
  242. # if ENABLE_DEBUG_CONFIG_OPTIONS
  243. if (Js::Configuration::Global.flags.Instrument.IsEnabled(Js::LinearScanPhase, this->func->GetSourceContextId(),this->func->GetLocalFunctionId()))
  244. {
  245. this->DynamicStatsInstrument();
  246. }
  247. # endif
  248. #endif
  249. #if DBG_DUMP
  250. if (PHASE_STATS(Js::LinearScanPhase, this->func))
  251. {
  252. this->PrintStats();
  253. }
  254. if (PHASE_TRACE(Js::StackPackPhase, this->func))
  255. {
  256. Output::Print(_u("---------------------------\n"));
  257. }
  258. #endif // DBG_DUMP
  259. DebugOnly(this->func->allowRemoveBailOutArgInstr = true);
  260. }
  261. JitArenaAllocator *
  262. LinearScan::GetTempAlloc()
  263. {
  264. Assert(tempAlloc);
  265. return tempAlloc;
  266. }
  267. #if DBG
  268. void
  269. LinearScan::CheckInvariants() const
  270. {
  271. BitVector bv = this->nonAllocatableRegs;
  272. uint32 lastend = 0;
  273. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, this->activeLiveranges)
  274. {
  275. // Make sure there are only one lifetime per reg
  276. Assert(!bv.Test(lifetime->reg));
  277. bv.Set(lifetime->reg);
  278. Assert(!lifetime->isOpHelperSpilled);
  279. Assert(!lifetime->isSpilled);
  280. Assert(lifetime->end >= lastend);
  281. lastend = lifetime->end;
  282. }
  283. NEXT_SLIST_ENTRY;
  284. // Make sure the active reg bit vector is correct
  285. Assert(bv.Equal(this->activeRegs));
  286. uint ints = 0, floats = 0;
  287. FOREACH_BITSET_IN_UNITBV(index, this->activeRegs, BitVector)
  288. {
  289. if (IRType_IsFloat(RegTypes[index]))
  290. {
  291. floats++;
  292. }
  293. else
  294. {
  295. ints++;
  296. }
  297. }
  298. NEXT_BITSET_IN_UNITBV;
  299. Assert(ints == this->intRegUsedCount);
  300. Assert(floats == this->floatRegUsedCount);
  301. Assert((this->intRegUsedCount + this->floatRegUsedCount) == this->activeRegs.Count());
  302. bv.ClearAll();
  303. lastend = 0;
  304. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, this->opHelperSpilledLiveranges)
  305. {
  306. // Make sure there are only one lifetime per reg in the op helper spilled liveranges
  307. Assert(!bv.Test(lifetime->reg));
  308. if (!lifetime->cantOpHelperSpill)
  309. {
  310. bv.Set(lifetime->reg);
  311. Assert(lifetime->isOpHelperSpilled);
  312. Assert(!lifetime->isSpilled);
  313. }
  314. Assert(lifetime->end >= lastend);
  315. lastend = lifetime->end;
  316. }
  317. NEXT_SLIST_ENTRY;
  318. // Make sure the opHelperSpilledRegs bit vector is correct
  319. Assert(bv.Equal(this->opHelperSpilledRegs));
  320. for (int i = 0; i < RegNumCount; i++)
  321. {
  322. if (this->tempRegs.Test(i))
  323. {
  324. Assert(this->tempRegLifetimes[i]->reg == i);
  325. }
  326. }
  327. FOREACH_BITSET_IN_UNITBV(reg, this->secondChanceRegs, BitVector)
  328. {
  329. Lifetime *lifetime = this->regContent[reg];
  330. Assert(lifetime);
  331. StackSym *sym = lifetime->sym;
  332. Assert(lifetime->isSecondChanceAllocated);
  333. Assert(sym->IsConst() || sym->IsAllocated()); // Should have been spilled already.
  334. } NEXT_BITSET_IN_UNITBV;
  335. }
  336. #endif // DBG
  337. // LinearScan::Init
  338. // Initialize bit vectors
  339. void
  340. LinearScan::Init()
  341. {
  342. FOREACH_REG(reg)
  343. {
  344. // Registers that can't be used are set to active, and will remain this way
  345. if (!LinearScan::IsAllocatable(reg))
  346. {
  347. this->activeRegs.Set(reg);
  348. if (IRType_IsFloat(RegTypes[reg]))
  349. {
  350. this->floatRegUsedCount++;
  351. }
  352. else
  353. {
  354. this->intRegUsedCount++;
  355. }
  356. }
  357. if (RegTypes[reg] == TyMachReg)
  358. {
  359. // JIT64_TODO: Rename int32Regs to machIntRegs.
  360. this->int32Regs.Set(reg);
  361. numInt32Regs++;
  362. }
  363. else if (RegTypes[reg] == TyFloat64)
  364. {
  365. this->floatRegs.Set(reg);
  366. numFloatRegs++;
  367. }
  368. if (LinearScan::IsCallerSaved(reg))
  369. {
  370. this->callerSavedRegs.Set(reg);
  371. }
  372. if (LinearScan::IsCalleeSaved(reg))
  373. {
  374. this->calleeSavedRegs.Set(reg);
  375. }
  376. this->regContent[reg] = nullptr;
  377. } NEXT_REG;
  378. this->instrUseRegs.ClearAll();
  379. this->secondChanceRegs.ClearAll();
  380. this->linearScanMD.Init(this);
  381. #if DBG
  382. this->nonAllocatableRegs = this->activeRegs;
  383. #endif
  384. #if DBG_DUMP
  385. if (PHASE_TRACE(Js::LinearScanPhase, this->func))
  386. {
  387. this->func->DumpHeader();
  388. }
  389. #endif
  390. }
  391. // LinearScan::CheckIfInLoop
  392. // Track whether the current instruction is in a loop or not.
  393. bool
  394. LinearScan::CheckIfInLoop(IR::Instr *instr)
  395. {
  396. if (this->IsInLoop())
  397. {
  398. // Look for end of loop
  399. AssertMsg(this->curLoop->regAlloc.loopEnd != 0, "Something is wrong here....");
  400. if (instr->GetNumber() >= this->curLoop->regAlloc.loopEnd)
  401. {
  402. AssertMsg(instr->IsBranchInstr(), "Loop tail should be a branchInstr");
  403. while (this->IsInLoop() && instr->GetNumber() >= this->curLoop->regAlloc.loopEnd)
  404. {
  405. this->loopNest--;
  406. this->curLoop->isProcessed = true;
  407. this->curLoop = this->curLoop->parent;
  408. if (this->loopNest == 0)
  409. {
  410. this->liveOnBackEdgeSyms->ClearAll();
  411. }
  412. }
  413. }
  414. }
  415. if (instr->IsLabelInstr() && instr->AsLabelInstr()->m_isLoopTop)
  416. {
  417. IR::LabelInstr * labelInstr = instr->AsLabelInstr();
  418. Loop *parentLoop = this->curLoop;
  419. if (parentLoop)
  420. {
  421. parentLoop->isLeaf = false;
  422. }
  423. this->curLoop = labelInstr->GetLoop();
  424. this->curLoop->isProcessed = false;
  425. // Lexically nested may not always nest in a flow based way:
  426. // while(i--) {
  427. // if (cond) {
  428. // while(j--) {
  429. // }
  430. // break;
  431. // }
  432. // }
  433. // These look nested, but they are not...
  434. // So update the flow based parent to be lexical or we won't be able to figure out when we get back
  435. // to the outer loop.
  436. // REVIEW: This isn't necessary anymore now that break blocks are moved out of the loops.
  437. this->curLoop->parent = parentLoop;
  438. this->curLoop->regAlloc.defdInLoopBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  439. this->curLoop->regAlloc.symRegUseBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  440. this->curLoop->regAlloc.loopStart = labelInstr->GetNumber();
  441. this->curLoop->regAlloc.exitRegContentList = JitAnew(this->tempAlloc, SList<Lifetime **>, this->tempAlloc);
  442. this->curLoop->regAlloc.regUseBv = 0;
  443. this->liveOnBackEdgeSyms->Or(this->curLoop->regAlloc.liveOnBackEdgeSyms);
  444. this->loopNest++;
  445. }
  446. return this->IsInLoop();
  447. }
  448. void
  449. LinearScan::InsertOpHelperSpillAndRestores()
  450. {
  451. linearScanMD.InsertOpHelperSpillAndRestores(opHelperBlockList);
  452. }
  453. void
  454. LinearScan::CheckOpHelper(IR::Instr *instr)
  455. {
  456. if (this->IsInHelperBlock())
  457. {
  458. if (this->currentOpHelperBlock->opHelperEndInstr == instr)
  459. {
  460. // Get targetInstr if we can.
  461. // We can deterministically get it only for unconditional branches, as conditional branch may fall through.
  462. IR::Instr * targetInstr = nullptr;
  463. if (instr->IsBranchInstr() && instr->AsBranchInstr()->IsUnconditional())
  464. {
  465. AssertMsg(!instr->AsBranchInstr()->IsMultiBranch(), "Not supported for Multibranch");
  466. targetInstr = instr->AsBranchInstr()->GetTarget();
  467. }
  468. /*
  469. * Keep track of the number of registers we've had to
  470. * store and restore around a helper block for LinearScanMD (on ARM
  471. * and X64). We need this to be able to allocate space in the frame.
  472. * We can't emit a PUSH/POP sequence around the block like IA32 because
  473. * the stack pointer can't move outside the prolog.
  474. */
  475. uint32 helperSpilledLiverangeCount = 0;
  476. // Exiting a helper block. We are going to insert
  477. // the restore here after linear scan. So put all the restored
  478. // lifetime back to active
  479. while (!this->opHelperSpilledLiveranges->Empty())
  480. {
  481. Lifetime * lifetime = this->opHelperSpilledLiveranges->Pop();
  482. lifetime->isOpHelperSpilled = false;
  483. if (!lifetime->cantOpHelperSpill)
  484. {
  485. // Put the life time back to active
  486. this->AssignActiveReg(lifetime, lifetime->reg);
  487. bool reload = true;
  488. // Lifetime ends before the target after helper block, don't need to save and restore helper spilled lifetime.
  489. if (targetInstr && lifetime->end < targetInstr->GetNumber())
  490. {
  491. // However, if lifetime is spilled as arg - we still need to spill it because the helper assumes the value
  492. // to be available in the stack
  493. if (lifetime->isOpHelperSpillAsArg)
  494. {
  495. // we should not attempt to restore it as it is dead on return from the helper.
  496. reload = false;
  497. }
  498. else
  499. {
  500. Assert(!instr->AsBranchInstr()->IsLoopTail(this->func));
  501. continue;
  502. }
  503. }
  504. // Save all the lifetime that needs to be restored
  505. OpHelperSpilledLifetime spilledLifetime;
  506. spilledLifetime.lifetime = lifetime;
  507. spilledLifetime.spillAsArg = lifetime->isOpHelperSpillAsArg;
  508. spilledLifetime.reload = reload;
  509. /*
  510. * Can't unfortunately move this into the else block above because we don't know if this
  511. * lifetime will actually get spilled until register allocation completes.
  512. * Instead we allocate a slot to this StackSym in LinearScanMD iff
  513. * !(lifetime.isSpilled && lifetime.noReloadsIfSpilled).
  514. */
  515. helperSpilledLiverangeCount++;
  516. // save the reg in case it is spilled later. We still need to save and restore
  517. // for the non-loop case.
  518. spilledLifetime.reg = lifetime->reg;
  519. this->currentOpHelperBlock->spilledLifetime.Prepend(spilledLifetime);
  520. }
  521. else
  522. {
  523. // Clear it for the next helper block
  524. lifetime->cantOpHelperSpill = false;
  525. }
  526. lifetime->isOpHelperSpillAsArg = false;
  527. }
  528. this->totalOpHelperFullVisitedLength += this->currentOpHelperBlock->Length();
  529. // Use a dummy label as the insertion point of the reloads, as second-chance-allocation
  530. // may insert compensation code right before the branch
  531. IR::PragmaInstr *dummyLabel = IR::PragmaInstr::New(Js::OpCode::Nop, 0, this->func);
  532. this->currentOpHelperBlock->opHelperEndInstr->InsertBefore(dummyLabel);
  533. dummyLabel->CopyNumber(this->currentOpHelperBlock->opHelperEndInstr);
  534. this->currentOpHelperBlock->opHelperEndInstr = dummyLabel;
  535. this->opHelperSpilledRegs.ClearAll();
  536. this->currentOpHelperBlock = nullptr;
  537. linearScanMD.EndOfHelperBlock(helperSpilledLiverangeCount);
  538. }
  539. }
  540. if (this->opHelperBlockIter.IsValid())
  541. {
  542. AssertMsg(
  543. !instr->IsLabelInstr() ||
  544. !instr->AsLabelInstr()->isOpHelper ||
  545. this->opHelperBlockIter.Data().opHelperLabel == instr,
  546. "Found a helper label that doesn't begin the next helper block in the list?");
  547. if (this->opHelperBlockIter.Data().opHelperLabel == instr)
  548. {
  549. this->currentOpHelperBlock = &this->opHelperBlockIter.Data();
  550. this->opHelperBlockIter.Next();
  551. }
  552. }
  553. }
  554. uint
  555. LinearScan::HelperBlockStartInstrNumber() const
  556. {
  557. Assert(IsInHelperBlock());
  558. return this->currentOpHelperBlock->opHelperLabel->GetNumber();
  559. }
  560. uint
  561. LinearScan::HelperBlockEndInstrNumber() const
  562. {
  563. Assert(IsInHelperBlock());
  564. return this->currentOpHelperBlock->opHelperEndInstr->GetNumber();
  565. }
  566. // LinearScan::AddToActive
  567. // Add a lifetime to the active list. The list is kept sorted in order lifetime end.
  568. // This makes it easier to pick the lifetimes to retire.
  569. void
  570. LinearScan::AddToActive(Lifetime * lifetime)
  571. {
  572. LinearScan::AddLiveRange(this->activeLiveranges, lifetime);
  573. this->regContent[lifetime->reg] = lifetime;
  574. if (lifetime->isSecondChanceAllocated)
  575. {
  576. this->secondChanceRegs.Set(lifetime->reg);
  577. }
  578. else
  579. {
  580. Assert(!this->secondChanceRegs.Test(lifetime->reg));
  581. }
  582. }
  583. void
  584. LinearScan::AddOpHelperSpilled(Lifetime * lifetime)
  585. {
  586. RegNum reg = lifetime->reg;
  587. Assert(this->IsInHelperBlock());
  588. Assert(!this->opHelperSpilledRegs.Test(reg));
  589. Assert(lifetime->isOpHelperSpilled == false);
  590. Assert(lifetime->cantOpHelperSpill == false);
  591. this->opHelperSpilledRegs.Set(reg);
  592. lifetime->isOpHelperSpilled = true;
  593. this->regContent[reg] = nullptr;
  594. this->secondChanceRegs.Clear(reg);
  595. // If a lifetime is being OpHelper spilled and it's an inlinee arg sym
  596. // we need to make sure its spilled to the sym offset spill space, i.e. isOpHelperSpillAsArg
  597. // is set. Otherwise, it's value will not be available on inline frame reconstruction.
  598. if (this->currentBlock->inlineeFrameSyms.Count() > 0 &&
  599. this->currentBlock->inlineeFrameSyms.ContainsKey(lifetime->sym->m_id) &&
  600. (lifetime->sym->m_isSingleDef || !lifetime->defList.Empty()))
  601. {
  602. lifetime->isOpHelperSpillAsArg = true;
  603. if (!lifetime->sym->IsAllocated())
  604. {
  605. this->AllocateStackSpace(lifetime);
  606. }
  607. this->RecordLoopUse(lifetime, lifetime->reg);
  608. }
  609. LinearScan::AddLiveRange(this->opHelperSpilledLiveranges, lifetime);
  610. }
  611. void
  612. LinearScan::RemoveOpHelperSpilled(Lifetime * lifetime)
  613. {
  614. Assert(this->IsInHelperBlock());
  615. Assert(lifetime->isOpHelperSpilled);
  616. Assert(lifetime->cantOpHelperSpill == false);
  617. Assert(this->opHelperSpilledRegs.Test(lifetime->reg));
  618. this->opHelperSpilledRegs.Clear(lifetime->reg);
  619. lifetime->isOpHelperSpilled = false;
  620. lifetime->cantOpHelperSpill = false;
  621. lifetime->isOpHelperSpillAsArg = false;
  622. this->opHelperSpilledLiveranges->Remove(lifetime);
  623. }
  624. void
  625. LinearScan::SetCantOpHelperSpill(Lifetime * lifetime)
  626. {
  627. Assert(this->IsInHelperBlock());
  628. Assert(lifetime->isOpHelperSpilled);
  629. Assert(lifetime->cantOpHelperSpill == false);
  630. this->opHelperSpilledRegs.Clear(lifetime->reg);
  631. lifetime->isOpHelperSpilled = false;
  632. lifetime->cantOpHelperSpill = true;
  633. }
  634. void
  635. LinearScan::AddLiveRange(SList<Lifetime *> * list, Lifetime * newLifetime)
  636. {
  637. FOREACH_SLIST_ENTRY_EDITING(Lifetime *, lifetime, list, iter)
  638. {
  639. if (newLifetime->end < lifetime->end)
  640. {
  641. break;
  642. }
  643. }
  644. NEXT_SLIST_ENTRY_EDITING;
  645. iter.InsertBefore(newLifetime);
  646. }
  647. Lifetime *
  648. LinearScan::RemoveRegLiveRange(SList<Lifetime *> * list, RegNum reg)
  649. {
  650. // Find the register in the active set
  651. FOREACH_SLIST_ENTRY_EDITING(Lifetime *, lifetime, list, iter)
  652. {
  653. if (lifetime->reg == reg)
  654. {
  655. Lifetime * lifetimeReturn = lifetime;
  656. iter.RemoveCurrent();
  657. return lifetimeReturn;
  658. }
  659. } NEXT_SLIST_ENTRY_EDITING;
  660. AssertMsg(false, "Can't find life range for a reg");
  661. return nullptr;
  662. }
  663. // LinearScan::SetDstReg
  664. // Set the reg on each RegOpnd def.
  665. void
  666. LinearScan::SetDstReg(IR::Instr *instr)
  667. {
  668. //
  669. // Enregister dst
  670. //
  671. IR::Opnd *dst = instr->GetDst();
  672. if (dst == nullptr)
  673. {
  674. return;
  675. }
  676. if (!dst->IsRegOpnd())
  677. {
  678. // This could be, for instance, a store to a sym with a large offset
  679. // that was just assigned when we saw the use.
  680. this->linearScanMD.LegalizeDef(instr);
  681. return;
  682. }
  683. IR::RegOpnd * regOpnd = dst->AsRegOpnd();
  684. /*
  685. * If this is a register used to setup a callsite per
  686. * a calling convention then mark it unavailable to allocate
  687. * until we see a CALL.
  688. */
  689. if (regOpnd->m_isCallArg)
  690. {
  691. RegNum callSetupReg = regOpnd->GetReg();
  692. callSetupRegs.Set(callSetupReg);
  693. }
  694. StackSym * stackSym = regOpnd->m_sym;
  695. // Arg slot sym can be in a RegOpnd for param passed via registers
  696. // Just use the assigned register
  697. if (stackSym == nullptr || stackSym->IsArgSlotSym())
  698. {
  699. //
  700. // Already allocated register. just spill the destination
  701. //
  702. RegNum reg = regOpnd->GetReg();
  703. if(LinearScan::IsAllocatable(reg))
  704. {
  705. this->SpillReg(reg);
  706. }
  707. this->tempRegs.Clear(reg);
  708. }
  709. else
  710. {
  711. if (regOpnd->GetReg() != RegNOREG)
  712. {
  713. this->RecordLoopUse(nullptr, regOpnd->GetReg());
  714. // Nothing to do
  715. return;
  716. }
  717. Lifetime * lifetime = stackSym->scratch.linearScan.lifetime;
  718. uint32 useCountCost = LinearScan::GetUseSpillCost(this->loopNest, (this->currentOpHelperBlock != nullptr));
  719. // Optimistically decrease the useCount. We'll undo this if we put it on the defList.
  720. lifetime->SubFromUseCount(useCountCost, this->curLoop);
  721. if (lifetime->isSpilled)
  722. {
  723. if (stackSym->IsConst() && !IsSymNonTempLocalVar(stackSym))
  724. {
  725. // We will reload the constant (but in debug mode, we still need to process this if this is a user var).
  726. return;
  727. }
  728. RegNum reg = regOpnd->GetReg();
  729. if (reg != RegNOREG)
  730. {
  731. // It is already assigned, just record it as a temp reg
  732. this->AssignTempReg(lifetime, reg);
  733. }
  734. else
  735. {
  736. IR::Opnd *src1 = instr->GetSrc1();
  737. IR::Opnd *src2 = instr->GetSrc2();
  738. if ((src1 && src1->IsRegOpnd() && src1->AsRegOpnd()->m_sym == stackSym) ||
  739. (src2 && src2->IsRegOpnd() && src2->AsRegOpnd()->m_sym == stackSym))
  740. {
  741. // OpEQ: src1 should have a valid reg (put src2 for other targets)
  742. reg = this->GetAssignedTempReg(lifetime, dst->GetType());
  743. Assert(reg != RegNOREG);
  744. RecordDef(lifetime, instr, 0);
  745. }
  746. else
  747. {
  748. // Try second chance
  749. reg = this->SecondChanceAllocation(lifetime, false);
  750. if (reg != RegNOREG)
  751. {
  752. Assert(!stackSym->m_isSingleDef);
  753. this->SetReg(regOpnd);
  754. // Keep track of defs for this lifetime, in case it gets spilled.
  755. RecordDef(lifetime, instr, useCountCost);
  756. return;
  757. }
  758. else
  759. {
  760. reg = this->GetAssignedTempReg(lifetime, dst->GetType());
  761. RecordDef(lifetime, instr, 0);
  762. }
  763. }
  764. if (LowererMD::IsAssign(instr) && instr->GetSrc1()->IsRegOpnd())
  765. {
  766. // Fold the spilled store
  767. if (reg != RegNOREG)
  768. {
  769. // If the value is in a temp reg, it's not valid any more.
  770. this->tempRegs.Clear(reg);
  771. }
  772. IRType srcType = instr->GetSrc1()->GetType();
  773. instr->ReplaceDst(IR::SymOpnd::New(stackSym, srcType, this->func));
  774. this->linearScanMD.LegalizeDef(instr);
  775. return;
  776. }
  777. if (reg == RegNOREG)
  778. {
  779. IR::Opnd *src = instr->GetSrc1();
  780. if (src && src->IsRegOpnd() && src->AsRegOpnd()->m_sym == stackSym)
  781. {
  782. // Handle OPEQ's for x86/x64
  783. reg = src->AsRegOpnd()->GetReg();
  784. AssertMsg(!this->activeRegs.Test(reg), "Shouldn't be active");
  785. }
  786. else
  787. {
  788. // The lifetime was spilled, but we still need a reg for this operand.
  789. reg = this->FindReg(nullptr, regOpnd);
  790. }
  791. this->AssignTempReg(lifetime, reg);
  792. }
  793. }
  794. if (!lifetime->isDeadStore && !lifetime->isSecondChanceAllocated)
  795. {
  796. // Insert a store since the lifetime is spilled
  797. IR::Opnd *nextDst = instr->m_next->GetDst();
  798. // Don't need the store however if the next instruction has the same dst
  799. if (nextDst == nullptr || !nextDst->IsEqual(regOpnd))
  800. {
  801. this->InsertStore(instr, regOpnd->m_sym, reg);
  802. }
  803. }
  804. }
  805. else
  806. {
  807. if (lifetime->isOpHelperSpilled)
  808. {
  809. // We must be in a helper block and the lifetime must
  810. // start before the helper block
  811. Assert(this->IsInHelperBlock());
  812. Assert(lifetime->start < this->HelperBlockStartInstrNumber());
  813. RegNum reg = lifetime->reg;
  814. Assert(this->opHelperSpilledRegs.Test(reg));
  815. if (this->activeRegs.Test(reg))
  816. {
  817. // The reg must have been used locally in the helper block
  818. // by some other lifetime. Just spill it
  819. this->SpillReg(reg);
  820. }
  821. // We can't save/restore this reg across the helper call because the restore would overwrite
  822. // this def, but the def means we don't need to spill at all. Mark the lifetime as cantOpHelperSpill
  823. // however in case another helper call in this block tries to spill it.
  824. this->SetCantOpHelperSpill(lifetime);
  825. this->AddToActive(lifetime);
  826. this->tempRegs.Clear(reg);
  827. this->activeRegs.Set(reg);
  828. if (RegTypes[reg] == TyMachReg)
  829. {
  830. this->intRegUsedCount++;
  831. }
  832. else
  833. {
  834. Assert(RegTypes[reg] == TyFloat64);
  835. this->floatRegUsedCount++;
  836. }
  837. }
  838. // Keep track of defs for this lifetime, in case it gets spilled.
  839. RecordDef(lifetime, instr, useCountCost);
  840. }
  841. this->SetReg(regOpnd);
  842. }
  843. }
  844. // Get the stack offset of the non temp locals from the stack.
  845. int32 LinearScan::GetStackOffset(Js::RegSlot regSlotId)
  846. {
  847. int32 stackSlotId = regSlotId - this->func->GetJITFunctionBody()->GetFirstNonTempLocalIndex();
  848. Assert(stackSlotId >= 0);
  849. return this->func->GetLocalVarSlotOffset(stackSlotId);
  850. }
  851. //
  852. // This helper function is used for saving bytecode stack sym value to memory / local slots on stack so that we can read it for the locals inspection.
  853. void
  854. LinearScan::WriteThroughForLocal(IR::RegOpnd* regOpnd, Lifetime* lifetime, IR::Instr* instrInsertAfter)
  855. {
  856. Assert(regOpnd);
  857. Assert(lifetime);
  858. Assert(instrInsertAfter);
  859. StackSym* sym = regOpnd->m_sym;
  860. Assert(IsSymNonTempLocalVar(sym));
  861. Js::RegSlot slotIndex = sym->GetByteCodeRegSlot();
  862. // First we insert the write through moves
  863. sym->m_offset = GetStackOffset(slotIndex);
  864. sym->m_allocated = true;
  865. // Save the value on reg to local var slot.
  866. this->InsertStore(instrInsertAfter, sym, lifetime->reg);
  867. }
  868. bool
  869. LinearScan::NeedsWriteThrough(StackSym * sym)
  870. {
  871. return this->NeedsWriteThroughForEH(sym) || this->IsSymNonTempLocalVar(sym);
  872. }
  873. bool
  874. LinearScan::NeedsWriteThroughForEH(StackSym * sym)
  875. {
  876. if (!this->func->HasTry() || !this->func->DoOptimizeTryCatch() || !sym->HasByteCodeRegSlot())
  877. {
  878. return false;
  879. }
  880. Assert(this->currentRegion);
  881. return this->currentRegion->writeThroughSymbolsSet && this->currentRegion->writeThroughSymbolsSet->Test(sym->m_id);
  882. }
  883. // Helper routine to check if current sym belongs to non temp bytecodereg
  884. bool
  885. LinearScan::IsSymNonTempLocalVar(StackSym *sym)
  886. {
  887. Assert(sym);
  888. if (this->func->IsJitInDebugMode() && sym->HasByteCodeRegSlot())
  889. {
  890. Js::RegSlot slotIndex = sym->GetByteCodeRegSlot();
  891. return this->func->IsNonTempLocalVar(slotIndex);
  892. }
  893. return false;
  894. }
  895. // LinearScan::SetSrcRegs
  896. // Set the reg on each RegOpnd use.
  897. // Note that this includes regOpnd of indir dsts...
  898. void
  899. LinearScan::SetSrcRegs(IR::Instr *instr)
  900. {
  901. //
  902. // Enregister srcs
  903. //
  904. IR::Opnd *src1 = instr->GetSrc1();
  905. if (src1 != nullptr)
  906. {
  907. // Capture src2 now as folding in SetUses could swab the srcs...
  908. IR::Opnd *src2 = instr->GetSrc2();
  909. this->SetUses(instr, src1);
  910. if (src2 != nullptr)
  911. {
  912. this->SetUses(instr, src2);
  913. }
  914. }
  915. IR::Opnd *dst = instr->GetDst();
  916. if (dst && dst->IsIndirOpnd())
  917. {
  918. this->SetUses(instr, dst);
  919. }
  920. this->instrUseRegs.ClearAll();
  921. }
  922. // LinearScan::SetUses
  923. void
  924. LinearScan::SetUses(IR::Instr *instr, IR::Opnd *opnd)
  925. {
  926. switch (opnd->GetKind())
  927. {
  928. case IR::OpndKindReg:
  929. this->SetUse(instr, opnd->AsRegOpnd());
  930. break;
  931. case IR::OpndKindSym:
  932. {
  933. Sym * sym = opnd->AsSymOpnd()->m_sym;
  934. if (sym->IsStackSym())
  935. {
  936. StackSym* stackSym = sym->AsStackSym();
  937. if (!stackSym->IsAllocated())
  938. {
  939. func->StackAllocate(stackSym, opnd->GetSize());
  940. // StackSym's lifetime is allocated during SCCLiveness::ProcessDst
  941. // we might not need to set the flag if the sym is not a dst.
  942. if (stackSym->scratch.linearScan.lifetime)
  943. {
  944. stackSym->scratch.linearScan.lifetime->cantStackPack = true;
  945. }
  946. }
  947. this->linearScanMD.LegalizeUse(instr, opnd);
  948. }
  949. }
  950. break;
  951. case IR::OpndKindIndir:
  952. {
  953. IR::IndirOpnd * indirOpnd = opnd->AsIndirOpnd();
  954. this->SetUse(instr, indirOpnd->GetBaseOpnd());
  955. if (indirOpnd->GetIndexOpnd())
  956. {
  957. this->SetUse(instr, indirOpnd->GetIndexOpnd());
  958. }
  959. }
  960. break;
  961. case IR::OpndKindIntConst:
  962. case IR::OpndKindAddr:
  963. this->linearScanMD.LegalizeConstantUse(instr, opnd);
  964. break;
  965. };
  966. }
  967. struct FillBailOutState
  968. {
  969. SListCounted<Js::Var> constantList;
  970. uint registerSaveCount;
  971. StackSym * registerSaveSyms[RegNumCount - 1];
  972. FillBailOutState(JitArenaAllocator * allocator) : constantList(allocator) {}
  973. };
  974. void
  975. LinearScan::FillBailOutOffset(int * offset, StackSym * stackSym, FillBailOutState * state, IR::Instr * instr)
  976. {
  977. AssertMsg(*offset == 0, "Can't have two active lifetime for the same byte code register");
  978. if (stackSym->IsConst())
  979. {
  980. state->constantList.Prepend(reinterpret_cast<Js::Var>(stackSym->GetLiteralConstValue_PostGlobOpt()));
  981. // Constant offset are offset by the number of register save slots
  982. *offset = state->constantList.Count() + GetBailOutRegisterSaveSlotCount() + GetBailOutReserveSlotCount();
  983. }
  984. else if (stackSym->m_isEncodedConstant)
  985. {
  986. Assert(!stackSym->m_isSingleDef);
  987. state->constantList.Prepend((Js::Var)stackSym->constantValue);
  988. // Constant offset are offset by the number of register save slots
  989. *offset = state->constantList.Count() + GetBailOutRegisterSaveSlotCount() + GetBailOutReserveSlotCount();
  990. }
  991. else
  992. {
  993. Lifetime * lifetime = stackSym->scratch.linearScan.lifetime;
  994. Assert(lifetime && lifetime->start < instr->GetNumber() && instr->GetNumber() <= lifetime->end);
  995. if (instr->GetBailOutKind() == IR::BailOutOnException)
  996. {
  997. // Apart from the exception object sym, lifetimes for all other syms that need to be restored at this bailout,
  998. // must have been spilled at least once (at the TryCatch, or at the Leave, or both)
  999. // Post spilling, a lifetime could have been second chance allocated. But, it should still have stack allocated for its sym
  1000. Assert(stackSym->IsAllocated() || (stackSym == this->currentRegion->GetExceptionObjectSym()));
  1001. }
  1002. this->PrepareForUse(lifetime);
  1003. if (lifetime->isSpilled ||
  1004. ((instr->GetBailOutKind() == IR::BailOutOnException) && (stackSym != this->currentRegion->GetExceptionObjectSym()))) // BailOutOnException must restore from memory
  1005. {
  1006. Assert(stackSym->IsAllocated());
  1007. #ifdef MD_GROW_LOCALS_AREA_UP
  1008. *offset = -((int)stackSym->m_offset + BailOutInfo::StackSymBias);
  1009. #else
  1010. // Stack offset are negative, includes the PUSH EBP and return address
  1011. *offset = stackSym->m_offset - (2 * MachPtr);
  1012. #endif
  1013. }
  1014. else
  1015. {
  1016. Assert(lifetime->reg != RegNOREG);
  1017. Assert(state->registerSaveSyms[lifetime->reg - 1] == nullptr ||
  1018. state->registerSaveSyms[lifetime->reg - 1] == stackSym);
  1019. AssertMsg((stackSym->IsFloat64() || stackSym->IsSimd128()) && RegTypes[lifetime->reg] == TyFloat64 ||
  1020. !(stackSym->IsFloat64() || stackSym->IsSimd128()) && RegTypes[lifetime->reg] != TyFloat64,
  1021. "Trying to save float64 sym into non-float64 reg or non-float64 sym into float64 reg");
  1022. // Save the register value to the register save space using the reg enum value as index
  1023. state->registerSaveSyms[lifetime->reg - 1] = stackSym;
  1024. *offset = LinearScanMD::GetRegisterSaveIndex(lifetime->reg);
  1025. state->registerSaveCount++;
  1026. }
  1027. }
  1028. }
  1029. struct FuncBailOutData
  1030. {
  1031. Func * func;
  1032. BailOutRecord * bailOutRecord;
  1033. int * localOffsets;
  1034. BVFixed * losslessInt32Syms;
  1035. BVFixed * float64Syms;
  1036. // SIMD_JS
  1037. BVFixed * simd128F4Syms;
  1038. BVFixed * simd128I4Syms;
  1039. BVFixed * simd128I8Syms;
  1040. BVFixed * simd128I16Syms;
  1041. BVFixed * simd128U4Syms;
  1042. BVFixed * simd128U8Syms;
  1043. BVFixed * simd128U16Syms;
  1044. BVFixed * simd128B4Syms;
  1045. BVFixed * simd128B8Syms;
  1046. BVFixed * simd128B16Syms;
  1047. void Initialize(Func * func, JitArenaAllocator * tempAllocator);
  1048. void FinalizeLocalOffsets(JitArenaAllocator *allocator, GlobalBailOutRecordDataTable *table, uint **lastUpdatedRowIndices);
  1049. void Clear(JitArenaAllocator * tempAllocator);
  1050. };
  1051. void
  1052. FuncBailOutData::Initialize(Func * func, JitArenaAllocator * tempAllocator)
  1053. {
  1054. Js::RegSlot localsCount = func->GetJITFunctionBody()->GetLocalsCount();
  1055. this->func = func;
  1056. this->localOffsets = AnewArrayZ(tempAllocator, int, localsCount);
  1057. this->losslessInt32Syms = BVFixed::New(localsCount, tempAllocator);
  1058. this->float64Syms = BVFixed::New(localsCount, tempAllocator);
  1059. // SIMD_JS
  1060. this->simd128F4Syms = BVFixed::New(localsCount, tempAllocator);
  1061. this->simd128I4Syms = BVFixed::New(localsCount, tempAllocator);
  1062. this->simd128I8Syms = BVFixed::New(localsCount, tempAllocator);
  1063. this->simd128I16Syms = BVFixed::New(localsCount, tempAllocator);
  1064. this->simd128U4Syms = BVFixed::New(localsCount, tempAllocator);
  1065. this->simd128U8Syms = BVFixed::New(localsCount, tempAllocator);
  1066. this->simd128U16Syms = BVFixed::New(localsCount, tempAllocator);
  1067. this->simd128B4Syms = BVFixed::New(localsCount, tempAllocator);
  1068. this->simd128B8Syms = BVFixed::New(localsCount, tempAllocator);
  1069. this->simd128B16Syms = BVFixed::New(localsCount, tempAllocator);
  1070. }
  1071. void
  1072. FuncBailOutData::FinalizeLocalOffsets(JitArenaAllocator *allocator, GlobalBailOutRecordDataTable *globalBailOutRecordDataTable, uint **lastUpdatedRowIndices)
  1073. {
  1074. Js::RegSlot localsCount = func->GetJITFunctionBody()->GetLocalsCount();
  1075. Assert(globalBailOutRecordDataTable != nullptr);
  1076. Assert(lastUpdatedRowIndices != nullptr);
  1077. if (*lastUpdatedRowIndices == nullptr)
  1078. {
  1079. *lastUpdatedRowIndices = JitAnewArrayZ(allocator, uint, localsCount);
  1080. memset(*lastUpdatedRowIndices, -1, sizeof(uint)*localsCount);
  1081. }
  1082. uint32 bailOutRecordId = bailOutRecord->m_bailOutRecordId;
  1083. bailOutRecord->localOffsetsCount = 0;
  1084. for (uint32 i = 0; i < localsCount; i++)
  1085. {
  1086. // if the sym is live
  1087. if (localOffsets[i] != 0)
  1088. {
  1089. bool isFloat = float64Syms->Test(i) != 0;
  1090. bool isInt = losslessInt32Syms->Test(i) != 0;
  1091. // SIMD_JS
  1092. bool isSimd128F4 = simd128F4Syms->Test(i) != 0;
  1093. bool isSimd128I4 = simd128I4Syms->Test(i) != 0;
  1094. bool isSimd128I8 = simd128I8Syms->Test(i) != 0;
  1095. bool isSimd128I16 = simd128I16Syms->Test(i) != 0;
  1096. bool isSimd128U4 = simd128U4Syms->Test(i) != 0;
  1097. bool isSimd128U8 = simd128U8Syms->Test(i) != 0;
  1098. bool isSimd128U16 = simd128U16Syms->Test(i) != 0;
  1099. bool isSimd128B4 = simd128B4Syms->Test(i) != 0;
  1100. bool isSimd128B8 = simd128B8Syms->Test(i) != 0;
  1101. bool isSimd128B16 = simd128B16Syms->Test(i) != 0;
  1102. globalBailOutRecordDataTable->AddOrUpdateRow(allocator, bailOutRecordId, i, isFloat, isInt,
  1103. isSimd128F4, isSimd128I4, isSimd128I8, isSimd128I16, isSimd128U4, isSimd128U8, isSimd128U16,
  1104. isSimd128B4, isSimd128B8, isSimd128B16, localOffsets[i], &((*lastUpdatedRowIndices)[i]));
  1105. Assert(globalBailOutRecordDataTable->globalBailOutRecordDataRows[(*lastUpdatedRowIndices)[i]].regSlot == i);
  1106. bailOutRecord->localOffsetsCount++;
  1107. }
  1108. }
  1109. }
  1110. void
  1111. FuncBailOutData::Clear(JitArenaAllocator * tempAllocator)
  1112. {
  1113. Js::RegSlot localsCount = func->GetJITFunctionBody()->GetLocalsCount();
  1114. JitAdeleteArray(tempAllocator, localsCount, localOffsets);
  1115. losslessInt32Syms->Delete(tempAllocator);
  1116. float64Syms->Delete(tempAllocator);
  1117. // SIMD_JS
  1118. simd128F4Syms->Delete(tempAllocator);
  1119. simd128I4Syms->Delete(tempAllocator);
  1120. simd128I8Syms->Delete(tempAllocator);
  1121. simd128I16Syms->Delete(tempAllocator);
  1122. simd128U4Syms->Delete(tempAllocator);
  1123. simd128U8Syms->Delete(tempAllocator);
  1124. simd128U16Syms->Delete(tempAllocator);
  1125. simd128B4Syms->Delete(tempAllocator);
  1126. simd128B8Syms->Delete(tempAllocator);
  1127. simd128B16Syms->Delete(tempAllocator);
  1128. }
  1129. GlobalBailOutRecordDataTable *
  1130. LinearScan::EnsureGlobalBailOutRecordTable(Func *func)
  1131. {
  1132. Assert(globalBailOutRecordTables != nullptr);
  1133. Func *topFunc = func->GetTopFunc();
  1134. bool isTopFunc = (func == topFunc);
  1135. uint32 inlineeID = isTopFunc ? 0 : func->m_inlineeId;
  1136. NativeCodeData::Allocator * allocator = this->func->GetNativeCodeDataAllocator();
  1137. GlobalBailOutRecordDataTable *globalBailOutRecordDataTable = globalBailOutRecordTables[inlineeID];
  1138. if (globalBailOutRecordDataTable == nullptr)
  1139. {
  1140. globalBailOutRecordDataTable = globalBailOutRecordTables[inlineeID] = NativeCodeDataNew(allocator, GlobalBailOutRecordDataTable);
  1141. globalBailOutRecordDataTable->length = globalBailOutRecordDataTable->size = 0;
  1142. globalBailOutRecordDataTable->isInlinedFunction = !isTopFunc;
  1143. globalBailOutRecordDataTable->hasNonSimpleParams = func->GetHasNonSimpleParams();
  1144. globalBailOutRecordDataTable->hasStackArgOpt = func->IsStackArgsEnabled();
  1145. globalBailOutRecordDataTable->isInlinedConstructor = func->IsInlinedConstructor();
  1146. globalBailOutRecordDataTable->isLoopBody = topFunc->IsLoopBody();
  1147. globalBailOutRecordDataTable->returnValueRegSlot = func->returnValueRegSlot;
  1148. globalBailOutRecordDataTable->isScopeObjRestored = false;
  1149. globalBailOutRecordDataTable->firstActualStackOffset = -1;
  1150. globalBailOutRecordDataTable->registerSaveSpace = (Js::Var*)func->GetThreadContextInfo()->GetBailOutRegisterSaveSpaceAddr();
  1151. globalBailOutRecordDataTable->globalBailOutRecordDataRows = nullptr;
  1152. if (func->GetJITFunctionBody()->GetForInLoopDepth() != 0)
  1153. {
  1154. #ifdef MD_GROW_LOCALS_AREA_UP
  1155. Assert(func->GetForInEnumeratorArrayOffset() >= 0);
  1156. globalBailOutRecordDataTable->forInEnumeratorArrayRestoreOffset = func->GetForInEnumeratorArrayOffset();
  1157. #else
  1158. // Stack offset are negative, includes the PUSH EBP and return address
  1159. globalBailOutRecordDataTable->forInEnumeratorArrayRestoreOffset = func->GetForInEnumeratorArrayOffset() - (2 * MachPtr);
  1160. #endif
  1161. }
  1162. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  1163. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  1164. {
  1165. topFunc->GetScriptContext()->bailOutOffsetBytes += sizeof(GlobalBailOutRecordDataTable);
  1166. topFunc->GetScriptContext()->bailOutRecordBytes += sizeof(GlobalBailOutRecordDataTable);
  1167. }
  1168. #endif
  1169. }
  1170. return globalBailOutRecordDataTable;
  1171. }
  1172. void
  1173. LinearScan::FillBailOutRecord(IR::Instr * instr)
  1174. {
  1175. BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
  1176. if (this->func->HasTry())
  1177. {
  1178. RegionType currentRegionType = this->currentRegion->GetType();
  1179. if (currentRegionType == RegionTypeTry || currentRegionType == RegionTypeCatch)
  1180. {
  1181. bailOutInfo->bailOutRecord->ehBailoutData = this->currentRegion->ehBailoutData;
  1182. }
  1183. }
  1184. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed = bailOutInfo->byteCodeUpwardExposedUsed;
  1185. Func * bailOutFunc = bailOutInfo->bailOutFunc;
  1186. uint funcCount = bailOutFunc->inlineDepth + 1;
  1187. FuncBailOutData * funcBailOutData = AnewArray(this->tempAlloc, FuncBailOutData, funcCount);
  1188. uint funcIndex = funcCount - 1;
  1189. funcBailOutData[funcIndex].Initialize(bailOutFunc, this->tempAlloc);
  1190. funcBailOutData[funcIndex].bailOutRecord = bailOutInfo->bailOutRecord;
  1191. bailOutInfo->bailOutRecord->m_bailOutRecordId = m_bailOutRecordCount++;
  1192. bailOutInfo->bailOutRecord->globalBailOutRecordTable = EnsureGlobalBailOutRecordTable(bailOutFunc);
  1193. NativeCodeData::Allocator * allocator = this->func->GetNativeCodeDataAllocator();
  1194. #if DBG_DUMP
  1195. if(PHASE_DUMP(Js::BailOutPhase, this->func))
  1196. {
  1197. Output::Print(_u("-------------------Bailout dump -------------------------\n"));
  1198. instr->Dump();
  1199. }
  1200. #endif
  1201. // Generate chained bailout record for inlined functions
  1202. Func * currentFunc = bailOutFunc->GetParentFunc();
  1203. uint bailOutOffset = bailOutFunc->postCallByteCodeOffset;
  1204. while (currentFunc != nullptr)
  1205. {
  1206. Assert(funcIndex > 0);
  1207. Assert(bailOutOffset != Js::Constants::NoByteCodeOffset);
  1208. BailOutRecord * bailOutRecord = NativeCodeDataNewZ(allocator, BailOutRecord, bailOutOffset, (uint)-1, IR::BailOutInvalid, currentFunc);
  1209. bailOutRecord->m_bailOutRecordId = m_bailOutRecordCount++;
  1210. bailOutRecord->globalBailOutRecordTable = EnsureGlobalBailOutRecordTable(currentFunc);
  1211. #if ENABLE_DEBUG_CONFIG_OPTIONS
  1212. // To indicate this is a subsequent bailout from an inlinee
  1213. bailOutRecord->bailOutOpcode = Js::OpCode::InlineeEnd;
  1214. #endif
  1215. funcBailOutData[funcIndex].bailOutRecord->parent = bailOutRecord;
  1216. funcIndex--;
  1217. funcBailOutData[funcIndex].bailOutRecord = bailOutRecord;
  1218. funcBailOutData[funcIndex].Initialize(currentFunc, this->tempAlloc);
  1219. bailOutOffset = currentFunc->postCallByteCodeOffset;
  1220. currentFunc = currentFunc->GetParentFunc();
  1221. }
  1222. Assert(funcIndex == 0);
  1223. Assert(bailOutOffset == Js::Constants::NoByteCodeOffset);
  1224. FillBailOutState state(this->tempAlloc);
  1225. state.registerSaveCount = 0;
  1226. memset(state.registerSaveSyms, 0, sizeof(state.registerSaveSyms));
  1227. // Fill in the constants
  1228. FOREACH_SLISTBASE_ENTRY_EDITING(ConstantStackSymValue, value, &bailOutInfo->usedCapturedValues.constantValues, constantValuesIterator)
  1229. {
  1230. AssertMsg(bailOutInfo->bailOutRecord->bailOutKind != IR::BailOutForGeneratorYield, "constant prop syms unexpected for bail-in for generator yield");
  1231. StackSym * stackSym = value.Key();
  1232. if(stackSym->HasArgSlotNum())
  1233. {
  1234. continue;
  1235. }
  1236. Assert(stackSym->HasByteCodeRegSlot());
  1237. Js::RegSlot i = stackSym->GetByteCodeRegSlot();
  1238. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1239. uint index = stackSymFunc->inlineDepth;
  1240. Assert(i != Js::Constants::NoRegister);
  1241. Assert(i < stackSymFunc->GetJITFunctionBody()->GetLocalsCount());
  1242. Assert(index < funcCount);
  1243. __analysis_assume(index < funcCount);
  1244. Assert(funcBailOutData[index].func == stackSymFunc);
  1245. Assert(!byteCodeUpwardExposedUsed->Test(stackSym->m_id));
  1246. BailoutConstantValue constValue = value.Value();
  1247. Js::Var varValue = constValue.ToVar(this->func);
  1248. state.constantList.Prepend(varValue);
  1249. AssertMsg(funcBailOutData[index].localOffsets[i] == 0, "Can't have two active lifetime for the same byte code register");
  1250. // Constant offset are offset by the number of register save slots
  1251. funcBailOutData[index].localOffsets[i] = state.constantList.Count() + GetBailOutRegisterSaveSlotCount() + GetBailOutReserveSlotCount();
  1252. #if DBG_DUMP
  1253. if(PHASE_DUMP(Js::BailOutPhase, this->func))
  1254. {
  1255. Output::Print(_u("Constant stack sym #%d (argOut:%s): "), i, IsTrueOrFalse(stackSym->HasArgSlotNum()));
  1256. stackSym->Dump();
  1257. Output::Print(_u(" (0x%p (Var) Offset: %d)\n"), varValue, funcBailOutData[index].localOffsets[i]);
  1258. }
  1259. #endif
  1260. constantValuesIterator.RemoveCurrent(this->func->m_alloc);
  1261. }
  1262. NEXT_SLISTBASE_ENTRY_EDITING;
  1263. // Fill in the copy prop syms
  1264. FOREACH_SLISTBASE_ENTRY_EDITING(CopyPropSyms, copyPropSyms, &bailOutInfo->usedCapturedValues.copyPropSyms, copyPropSymsIter)
  1265. {
  1266. AssertMsg(bailOutInfo->bailOutRecord->bailOutKind != IR::BailOutForGeneratorYield, "copy prop syms unexpected for bail-in for generator yield");
  1267. StackSym * stackSym = copyPropSyms.Key();
  1268. if(stackSym->HasArgSlotNum())
  1269. {
  1270. continue;
  1271. }
  1272. Js::RegSlot i = stackSym->GetByteCodeRegSlot();
  1273. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1274. uint index = stackSymFunc->inlineDepth;
  1275. Assert(i != Js::Constants::NoRegister);
  1276. Assert(i < stackSymFunc->GetJITFunctionBody()->GetLocalsCount());
  1277. Assert(index < funcCount);
  1278. __analysis_assume(index < funcCount);
  1279. Assert(funcBailOutData[index].func == stackSymFunc);
  1280. AssertMsg(funcBailOutData[index].localOffsets[i] == 0, "Can't have two active lifetime for the same byte code register");
  1281. Assert(!byteCodeUpwardExposedUsed->Test(stackSym->m_id));
  1282. StackSym * copyStackSym = copyPropSyms.Value();
  1283. this->FillBailOutOffset(&funcBailOutData[index].localOffsets[i], copyStackSym, &state, instr);
  1284. if (copyStackSym->IsInt32())
  1285. {
  1286. funcBailOutData[index].losslessInt32Syms->Set(i);
  1287. }
  1288. else if (copyStackSym->IsFloat64())
  1289. {
  1290. funcBailOutData[index].float64Syms->Set(i);
  1291. }
  1292. // SIMD_JS
  1293. else if (copyStackSym->IsSimd128F4())
  1294. {
  1295. funcBailOutData[index].simd128F4Syms->Set(i);
  1296. }
  1297. else if (copyStackSym->IsSimd128I4())
  1298. {
  1299. funcBailOutData[index].simd128I4Syms->Set(i);
  1300. }
  1301. else if (copyStackSym->IsSimd128I8())
  1302. {
  1303. funcBailOutData[index].simd128I8Syms->Set(i);
  1304. }
  1305. else if (copyStackSym->IsSimd128I16())
  1306. {
  1307. funcBailOutData[index].simd128I16Syms->Set(i);
  1308. }
  1309. else if (copyStackSym->IsSimd128U4())
  1310. {
  1311. funcBailOutData[index].simd128U4Syms->Set(i);
  1312. }
  1313. else if (copyStackSym->IsSimd128U8())
  1314. {
  1315. funcBailOutData[index].simd128U8Syms->Set(i);
  1316. }
  1317. else if (copyStackSym->IsSimd128U16())
  1318. {
  1319. funcBailOutData[index].simd128U16Syms->Set(i);
  1320. }
  1321. else if (copyStackSym->IsSimd128B4())
  1322. {
  1323. funcBailOutData[index].simd128B4Syms->Set(i);
  1324. }
  1325. else if (copyStackSym->IsSimd128B8())
  1326. {
  1327. funcBailOutData[index].simd128B8Syms->Set(i);
  1328. }
  1329. else if (copyStackSym->IsSimd128B16())
  1330. {
  1331. funcBailOutData[index].simd128B16Syms->Set(i);
  1332. }
  1333. copyPropSymsIter.RemoveCurrent(this->func->m_alloc);
  1334. }
  1335. NEXT_SLISTBASE_ENTRY_EDITING;
  1336. // Fill in the upward exposed syms
  1337. FOREACH_BITSET_IN_SPARSEBV(id, byteCodeUpwardExposedUsed)
  1338. {
  1339. StackSym * stackSym = this->func->m_symTable->FindStackSym(id);
  1340. Assert(stackSym != nullptr);
  1341. Js::RegSlot i = stackSym->GetByteCodeRegSlot();
  1342. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1343. uint index = stackSymFunc->inlineDepth;
  1344. Assert(i != Js::Constants::NoRegister);
  1345. Assert(i < stackSymFunc->GetJITFunctionBody()->GetLocalsCount());
  1346. Assert(index < funcCount);
  1347. __analysis_assume(index < funcCount);
  1348. Assert(funcBailOutData[index].func == stackSymFunc);
  1349. AssertMsg(funcBailOutData[index].localOffsets[i] == 0, "Can't have two active lifetime for the same byte code register");
  1350. this->FillBailOutOffset(&funcBailOutData[index].localOffsets[i], stackSym, &state, instr);
  1351. if (stackSym->IsInt32())
  1352. {
  1353. funcBailOutData[index].losslessInt32Syms->Set(i);
  1354. }
  1355. else if (stackSym->IsFloat64())
  1356. {
  1357. funcBailOutData[index].float64Syms->Set(i);
  1358. }
  1359. // SIMD_JS
  1360. else if (stackSym->IsSimd128F4())
  1361. {
  1362. funcBailOutData[index].simd128F4Syms->Set(i);
  1363. }
  1364. else if (stackSym->IsSimd128I4())
  1365. {
  1366. funcBailOutData[index].simd128I4Syms->Set(i);
  1367. }
  1368. else if (stackSym->IsSimd128I8())
  1369. {
  1370. funcBailOutData[index].simd128I8Syms->Set(i);
  1371. }
  1372. else if (stackSym->IsSimd128I16())
  1373. {
  1374. funcBailOutData[index].simd128I16Syms->Set(i);
  1375. }
  1376. else if (stackSym->IsSimd128U4())
  1377. {
  1378. funcBailOutData[index].simd128U4Syms->Set(i);
  1379. }
  1380. else if (stackSym->IsSimd128U8())
  1381. {
  1382. funcBailOutData[index].simd128U8Syms->Set(i);
  1383. }
  1384. else if (stackSym->IsSimd128U16())
  1385. {
  1386. funcBailOutData[index].simd128U16Syms->Set(i);
  1387. }
  1388. else if (stackSym->IsSimd128B4())
  1389. {
  1390. funcBailOutData[index].simd128B4Syms->Set(i);
  1391. }
  1392. else if (stackSym->IsSimd128B8())
  1393. {
  1394. funcBailOutData[index].simd128B8Syms->Set(i);
  1395. }
  1396. else if (stackSym->IsSimd128B16())
  1397. {
  1398. funcBailOutData[index].simd128B16Syms->Set(i);
  1399. }
  1400. }
  1401. NEXT_BITSET_IN_SPARSEBV;
  1402. if (bailOutInfo->usedCapturedValues.argObjSyms)
  1403. {
  1404. FOREACH_BITSET_IN_SPARSEBV(id, bailOutInfo->usedCapturedValues.argObjSyms)
  1405. {
  1406. StackSym * stackSym = this->func->m_symTable->FindStackSym(id);
  1407. Assert(stackSym != nullptr);
  1408. Js::RegSlot i = stackSym->GetByteCodeRegSlot();
  1409. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1410. uint index = stackSymFunc->inlineDepth;
  1411. Assert(i != Js::Constants::NoRegister);
  1412. Assert(i < stackSymFunc->GetJITFunctionBody()->GetLocalsCount());
  1413. Assert(index < funcCount);
  1414. __analysis_assume(index < funcCount);
  1415. Assert(funcBailOutData[index].func == stackSymFunc);
  1416. AssertMsg(funcBailOutData[index].localOffsets[i] == 0, "Can't have two active lifetime for the same byte code register");
  1417. funcBailOutData[index].localOffsets[i] = BailOutRecord::GetArgumentsObjectOffset();
  1418. }
  1419. NEXT_BITSET_IN_SPARSEBV;
  1420. }
  1421. // In the debug mode, fill in the rest of non temp locals as well in the records so that the restore stub will just get it automatically.
  1422. if (this->func->IsJitInDebugMode())
  1423. {
  1424. // Need to allow filling the formal args slots.
  1425. if (func->GetJITFunctionBody()->HasPropIdToFormalsMap())
  1426. {
  1427. Assert(func->GetJITFunctionBody()->GetInParamsCount() > 0);
  1428. uint32 endIndex = min(func->GetJITFunctionBody()->GetFirstNonTempLocalIndex() + func->GetJITFunctionBody()->GetInParamsCount() - 1, func->GetJITFunctionBody()->GetEndNonTempLocalIndex());
  1429. for (uint32 index = func->GetJITFunctionBody()->GetFirstNonTempLocalIndex(); index < endIndex; index++)
  1430. {
  1431. StackSym * stackSym = this->func->m_symTable->FindStackSym(index);
  1432. if (stackSym != nullptr)
  1433. {
  1434. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1435. Js::RegSlot regSlotId = stackSym->GetByteCodeRegSlot();
  1436. if (func->IsNonTempLocalVar(regSlotId))
  1437. {
  1438. if (!func->GetJITFunctionBody()->IsRegSlotFormal(regSlotId - func->GetJITFunctionBody()->GetFirstNonTempLocalIndex()))
  1439. {
  1440. continue;
  1441. }
  1442. uint dataIndex = stackSymFunc->inlineDepth;
  1443. Assert(dataIndex == 0); // There is no inlining while in debug mode
  1444. // Filling in which are not filled already.
  1445. __analysis_assume(dataIndex == 0);
  1446. if (funcBailOutData[dataIndex].localOffsets[regSlotId] == 0)
  1447. {
  1448. int32 offset = GetStackOffset(regSlotId);
  1449. #ifdef MD_GROW_LOCALS_AREA_UP
  1450. Assert(offset >= 0);
  1451. #else
  1452. Assert(offset < 0);
  1453. #endif
  1454. funcBailOutData[dataIndex].localOffsets[regSlotId] = this->func->AdjustOffsetValue(offset);
  1455. // We don't support typespec for debug, rework on the bellow assert once we start support them.
  1456. Assert(!stackSym->IsInt32() && !stackSym->IsFloat64() && !stackSym->IsSimd128());
  1457. }
  1458. }
  1459. }
  1460. }
  1461. }
  1462. }
  1463. // fill in the out params
  1464. uint startCallCount = bailOutInfo->startCallCount;
  1465. if (bailOutInfo->totalOutParamCount != 0)
  1466. {
  1467. Assert(startCallCount != 0);
  1468. uint argOutSlot = 0;
  1469. uint * startCallOutParamCounts = (uint*)NativeCodeDataNewArrayNoFixup(allocator, UIntType<DataDesc_ArgOutOffsetInfo_StartCallOutParamCounts>, startCallCount);
  1470. #ifdef _M_IX86
  1471. uint * startCallArgRestoreAdjustCounts = (uint*)NativeCodeDataNewArrayNoFixup(allocator, UIntType<DataDesc_ArgOutOffsetInfo_StartCallOutParamCounts>, startCallCount);
  1472. #endif
  1473. NativeCodeData::AllocatorNoFixup<BVFixed>* allocatorT = (NativeCodeData::AllocatorNoFixup<BVFixed>*)allocator;
  1474. BVFixed * argOutFloat64Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1475. BVFixed * argOutLosslessInt32Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1476. // SIMD_JS
  1477. BVFixed * argOutSimd128F4Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1478. BVFixed * argOutSimd128I4Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1479. BVFixed * argOutSimd128I8Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1480. BVFixed * argOutSimd128I16Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1481. BVFixed * argOutSimd128U4Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1482. BVFixed * argOutSimd128U8Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1483. BVFixed * argOutSimd128U16Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1484. BVFixed * argOutSimd128B4Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1485. BVFixed * argOutSimd128B8Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1486. BVFixed * argOutSimd128B16Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1487. int* outParamOffsets = bailOutInfo->outParamOffsets = (int*)NativeCodeDataNewArrayZNoFixup(allocator, IntType<DataDesc_BailoutInfo_CotalOutParamCount>, bailOutInfo->totalOutParamCount);
  1488. #ifdef _M_IX86
  1489. int currentStackOffset = 0;
  1490. bailOutInfo->outParamFrameAdjustArgSlot = JitAnew(this->func->m_alloc, BVSparse<JitArenaAllocator>, this->func->m_alloc);
  1491. #endif
  1492. if (this->func->HasInlinee())
  1493. {
  1494. bailOutInfo->outParamInlinedArgSlot = JitAnew(this->func->m_alloc, BVSparse<JitArenaAllocator>, this->func->m_alloc);
  1495. }
  1496. #if DBG
  1497. uint lastFuncIndex = 0;
  1498. #endif
  1499. for (uint i = 0; i < startCallCount; i++)
  1500. {
  1501. uint outParamStart = argOutSlot; // Start of the out param offset for the current start call
  1502. // Number of out param for the current start call
  1503. uint outParamCount = bailOutInfo->GetStartCallOutParamCount(i);
  1504. startCallOutParamCounts[i] = outParamCount;
  1505. #ifdef _M_IX86
  1506. startCallArgRestoreAdjustCounts[i] = bailOutInfo->startCallInfo[i].argRestoreAdjustCount;
  1507. // Only x86 has a progression of pushes of out args, with stack alignment.
  1508. bool fDoStackAdjust = false;
  1509. if (!bailOutInfo->inlinedStartCall->Test(i))
  1510. {
  1511. // Only do the stack adjustment if the StartCall has not been moved down past the bailout.
  1512. fDoStackAdjust = bailOutInfo->NeedsStartCallAdjust(i, instr);
  1513. if (fDoStackAdjust)
  1514. {
  1515. currentStackOffset -= Math::Align<int>(outParamCount * MachPtr, MachStackAlignment);
  1516. }
  1517. }
  1518. #endif
  1519. Func * currentStartCallFunc = bailOutInfo->startCallFunc[i];
  1520. #if DBG
  1521. Assert(lastFuncIndex <= currentStartCallFunc->inlineDepth);
  1522. lastFuncIndex = currentStartCallFunc->inlineDepth;
  1523. #endif
  1524. FuncBailOutData& currentFuncBailOutData = funcBailOutData[currentStartCallFunc->inlineDepth];
  1525. BailOutRecord * currentBailOutRecord = currentFuncBailOutData.bailOutRecord;
  1526. if (currentBailOutRecord->argOutOffsetInfo == nullptr)
  1527. {
  1528. currentBailOutRecord->argOutOffsetInfo = NativeCodeDataNew(allocator, BailOutRecord::ArgOutOffsetInfo);
  1529. currentBailOutRecord->argOutOffsetInfo->argOutFloat64Syms = nullptr;
  1530. currentBailOutRecord->argOutOffsetInfo->argOutLosslessInt32Syms = nullptr;
  1531. // SIMD_JS
  1532. currentBailOutRecord->argOutOffsetInfo->argOutSimd128F4Syms = nullptr;
  1533. currentBailOutRecord->argOutOffsetInfo->argOutSimd128I4Syms = nullptr;
  1534. currentBailOutRecord->argOutOffsetInfo->argOutSimd128I8Syms = nullptr;
  1535. currentBailOutRecord->argOutOffsetInfo->argOutSimd128I16Syms = nullptr;
  1536. currentBailOutRecord->argOutOffsetInfo->argOutSimd128U4Syms = nullptr;
  1537. currentBailOutRecord->argOutOffsetInfo->argOutSimd128U8Syms = nullptr;
  1538. currentBailOutRecord->argOutOffsetInfo->argOutSimd128U16Syms = nullptr;
  1539. currentBailOutRecord->argOutOffsetInfo->argOutSimd128B4Syms = nullptr;
  1540. currentBailOutRecord->argOutOffsetInfo->argOutSimd128B8Syms = nullptr;
  1541. currentBailOutRecord->argOutOffsetInfo->argOutSimd128B16Syms = nullptr;
  1542. currentBailOutRecord->argOutOffsetInfo->argOutSymStart = 0;
  1543. currentBailOutRecord->argOutOffsetInfo->outParamOffsets = nullptr;
  1544. currentBailOutRecord->argOutOffsetInfo->startCallOutParamCounts = nullptr;
  1545. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  1546. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  1547. {
  1548. this->func->GetScriptContext()->bailOutRecordBytes += sizeof(BailOutRecord::ArgOutOffsetInfo);
  1549. }
  1550. #endif
  1551. }
  1552. currentBailOutRecord->argOutOffsetInfo->startCallCount++;
  1553. if (currentBailOutRecord->argOutOffsetInfo->outParamOffsets == nullptr)
  1554. {
  1555. Assert(currentBailOutRecord->argOutOffsetInfo->startCallOutParamCounts == nullptr);
  1556. currentBailOutRecord->argOutOffsetInfo->startCallIndex = i;
  1557. currentBailOutRecord->argOutOffsetInfo->startCallOutParamCounts = &startCallOutParamCounts[i];
  1558. #ifdef _M_IX86
  1559. currentBailOutRecord->startCallArgRestoreAdjustCounts = &startCallArgRestoreAdjustCounts[i];
  1560. #endif
  1561. currentBailOutRecord->argOutOffsetInfo->outParamOffsets = &outParamOffsets[outParamStart];
  1562. currentBailOutRecord->argOutOffsetInfo->argOutSymStart = outParamStart;
  1563. currentBailOutRecord->argOutOffsetInfo->argOutFloat64Syms = argOutFloat64Syms;
  1564. currentBailOutRecord->argOutOffsetInfo->argOutLosslessInt32Syms = argOutLosslessInt32Syms;
  1565. // SIMD_JS
  1566. currentBailOutRecord->argOutOffsetInfo->argOutSimd128F4Syms = argOutSimd128F4Syms;
  1567. currentBailOutRecord->argOutOffsetInfo->argOutSimd128I4Syms = argOutSimd128I4Syms ;
  1568. currentBailOutRecord->argOutOffsetInfo->argOutSimd128I8Syms = argOutSimd128I8Syms ;
  1569. currentBailOutRecord->argOutOffsetInfo->argOutSimd128I16Syms = argOutSimd128I16Syms ;
  1570. currentBailOutRecord->argOutOffsetInfo->argOutSimd128U4Syms = argOutSimd128U4Syms ;
  1571. currentBailOutRecord->argOutOffsetInfo->argOutSimd128U8Syms = argOutSimd128U8Syms ;
  1572. currentBailOutRecord->argOutOffsetInfo->argOutSimd128U16Syms = argOutSimd128U16Syms ;
  1573. currentBailOutRecord->argOutOffsetInfo->argOutSimd128B4Syms = argOutSimd128U4Syms;
  1574. currentBailOutRecord->argOutOffsetInfo->argOutSimd128B8Syms = argOutSimd128U8Syms;
  1575. currentBailOutRecord->argOutOffsetInfo->argOutSimd128B16Syms = argOutSimd128U16Syms;
  1576. }
  1577. #if DBG_DUMP
  1578. if (PHASE_DUMP(Js::BailOutPhase, this->func))
  1579. {
  1580. Output::Print(_u("Bailout function: %s [#%d] \n"), currentStartCallFunc->GetJITFunctionBody()->GetDisplayName(),
  1581. currentStartCallFunc->GetJITFunctionBody()->GetFunctionNumber());
  1582. }
  1583. #endif
  1584. for (uint j = 0; j < outParamCount; j++, argOutSlot++)
  1585. {
  1586. StackSym * sym = bailOutInfo->argOutSyms[argOutSlot];
  1587. if (sym == nullptr)
  1588. {
  1589. // This can happen when instr with bailout occurs before all ArgOuts for current call instr are processed.
  1590. continue;
  1591. }
  1592. Assert(sym->GetArgSlotNum() > 0 && sym->GetArgSlotNum() <= outParamCount);
  1593. uint argSlot = sym->GetArgSlotNum() - 1;
  1594. uint outParamOffsetIndex = outParamStart + argSlot;
  1595. if (!sym->m_isBailOutReferenced && !sym->IsArgSlotSym())
  1596. {
  1597. FOREACH_SLISTBASE_ENTRY_EDITING(ConstantStackSymValue, constantValue, &bailOutInfo->usedCapturedValues.constantValues, iterator)
  1598. {
  1599. if (constantValue.Key()->m_id == sym->m_id)
  1600. {
  1601. Js::Var varValue = constantValue.Value().ToVar(func);
  1602. state.constantList.Prepend(varValue);
  1603. outParamOffsets[outParamOffsetIndex] = state.constantList.Count() + GetBailOutRegisterSaveSlotCount() + GetBailOutReserveSlotCount();
  1604. #if DBG_DUMP
  1605. if (PHASE_DUMP(Js::BailOutPhase, this->func))
  1606. {
  1607. Output::Print(_u("OutParam #%d: "), argSlot);
  1608. sym->Dump();
  1609. Output::Print(_u(" (0x%p (Var)))\n"), varValue);
  1610. }
  1611. #endif
  1612. iterator.RemoveCurrent(func->m_alloc);
  1613. break;
  1614. }
  1615. }
  1616. NEXT_SLISTBASE_ENTRY_EDITING;
  1617. if (outParamOffsets[outParamOffsetIndex])
  1618. {
  1619. continue;
  1620. }
  1621. FOREACH_SLISTBASE_ENTRY_EDITING(CopyPropSyms, copyPropSym, &bailOutInfo->usedCapturedValues.copyPropSyms, iter)
  1622. {
  1623. if (copyPropSym.Key()->m_id == sym->m_id)
  1624. {
  1625. StackSym * copyStackSym = copyPropSym.Value();
  1626. BVSparse<JitArenaAllocator>* argObjSyms = bailOutInfo->usedCapturedValues.argObjSyms;
  1627. if (argObjSyms && argObjSyms->Test(copyStackSym->m_id))
  1628. {
  1629. outParamOffsets[outParamOffsetIndex] = BailOutRecord::GetArgumentsObjectOffset();
  1630. }
  1631. else
  1632. {
  1633. this->FillBailOutOffset(&outParamOffsets[outParamOffsetIndex], copyStackSym, &state, instr);
  1634. if (copyStackSym->IsInt32())
  1635. {
  1636. argOutLosslessInt32Syms->Set(outParamOffsetIndex);
  1637. }
  1638. else if (copyStackSym->IsFloat64())
  1639. {
  1640. argOutFloat64Syms->Set(outParamOffsetIndex);
  1641. }
  1642. // SIMD_JS
  1643. else if (copyStackSym->IsSimd128F4())
  1644. {
  1645. argOutSimd128F4Syms->Set(outParamOffsetIndex);
  1646. }
  1647. else if (copyStackSym->IsSimd128I4())
  1648. {
  1649. argOutSimd128I4Syms->Set(outParamOffsetIndex);
  1650. }
  1651. else if (copyStackSym->IsSimd128I8())
  1652. {
  1653. argOutSimd128I8Syms->Set(outParamOffsetIndex);
  1654. }
  1655. else if (copyStackSym->IsSimd128I16())
  1656. {
  1657. argOutSimd128I16Syms->Set(outParamOffsetIndex);
  1658. }
  1659. else if (copyStackSym->IsSimd128U4())
  1660. {
  1661. argOutSimd128U4Syms->Set(outParamOffsetIndex);
  1662. }
  1663. else if (copyStackSym->IsSimd128U8())
  1664. {
  1665. argOutSimd128U8Syms->Set(outParamOffsetIndex);
  1666. }
  1667. else if (copyStackSym->IsSimd128U16())
  1668. {
  1669. argOutSimd128U16Syms->Set(outParamOffsetIndex);
  1670. }
  1671. else if (copyStackSym->IsSimd128B4())
  1672. {
  1673. argOutSimd128B4Syms->Set(outParamOffsetIndex);
  1674. }
  1675. else if (copyStackSym->IsSimd128B8())
  1676. {
  1677. argOutSimd128B8Syms->Set(outParamOffsetIndex);
  1678. }
  1679. else if (copyStackSym->IsSimd128B16())
  1680. {
  1681. argOutSimd128B16Syms->Set(outParamOffsetIndex);
  1682. }
  1683. }
  1684. #if DBG_DUMP
  1685. if (PHASE_DUMP(Js::BailOutPhase, this->func))
  1686. {
  1687. Output::Print(_u("OutParam #%d: "), argSlot);
  1688. sym->Dump();
  1689. Output::Print(_u(" Copy Prop sym:"));
  1690. copyStackSym->Dump();
  1691. Output::Print(_u("\n"));
  1692. }
  1693. #endif
  1694. iter.RemoveCurrent(func->m_alloc);
  1695. break;
  1696. }
  1697. }
  1698. NEXT_SLISTBASE_ENTRY_EDITING;
  1699. Assert(outParamOffsets[outParamOffsetIndex] != 0);
  1700. }
  1701. else
  1702. {
  1703. if (sym->IsArgSlotSym())
  1704. {
  1705. if (sym->m_isSingleDef)
  1706. {
  1707. Assert(sym->m_instrDef->m_func == currentStartCallFunc);
  1708. IR::Instr * instrDef = sym->m_instrDef;
  1709. Assert(LowererMD::IsAssign(instrDef));
  1710. if (instrDef->GetNumber() < instr->GetNumber())
  1711. {
  1712. // The ArgOut instr is above current bailout instr.
  1713. AssertMsg(sym->IsVar(), "Arg out slot can only be var.");
  1714. if (sym->m_isInlinedArgSlot)
  1715. {
  1716. Assert(this->func->HasInlinee());
  1717. #ifdef MD_GROW_LOCALS_AREA_UP
  1718. outParamOffsets[outParamOffsetIndex] = -((int)sym->m_offset + BailOutInfo::StackSymBias);
  1719. #else
  1720. outParamOffsets[outParamOffsetIndex] = sym->m_offset;
  1721. #endif
  1722. bailOutInfo->outParamInlinedArgSlot->Set(outParamOffsetIndex);
  1723. }
  1724. else if (sym->m_isOrphanedArg)
  1725. {
  1726. #ifdef MD_GROW_LOCALS_AREA_UP
  1727. outParamOffsets[outParamOffsetIndex] = -((int)sym->m_offset + BailOutInfo::StackSymBias);
  1728. #else
  1729. // Stack offset are negative, includes the PUSH EBP and return address
  1730. outParamOffsets[outParamOffsetIndex] = sym->m_offset - (2 * MachPtr);
  1731. #endif
  1732. }
  1733. #ifdef _M_IX86
  1734. else if (fDoStackAdjust)
  1735. {
  1736. // If we've got args on the stack, then we must have seen (and adjusted for) the StartCall.
  1737. // The values is already on the stack
  1738. // On AMD64/ARM, ArgOut should have been moved next to the call, and shouldn't have bailout between them
  1739. // Except for inlined arg outs
  1740. outParamOffsets[outParamOffsetIndex] = currentStackOffset + argSlot * MachPtr;
  1741. bailOutInfo->outParamFrameAdjustArgSlot->Set(outParamOffsetIndex);
  1742. }
  1743. #endif
  1744. else
  1745. {
  1746. this->FillBailOutOffset(&outParamOffsets[outParamOffsetIndex], sym, &state, instr);
  1747. }
  1748. }
  1749. else
  1750. {
  1751. // The ArgOut instruction might have moved down right next to the call,
  1752. // because of a register calling convention, cloning, etc. This loop walks the chain
  1753. // of assignments to try to find the original location of the assignment where
  1754. // the value is available.
  1755. while (!sym->IsConst())
  1756. {
  1757. // the value is in the register
  1758. IR::RegOpnd * regOpnd = instrDef->GetSrc1()->AsRegOpnd();
  1759. sym = regOpnd->m_sym;
  1760. if (sym->scratch.linearScan.lifetime->start < instr->GetNumber())
  1761. {
  1762. break;
  1763. }
  1764. if (sym->m_isEncodedConstant)
  1765. {
  1766. break;
  1767. }
  1768. // For out parameter we might need to follow multiple assignments
  1769. Assert(sym->m_isSingleDef);
  1770. instrDef = sym->m_instrDef;
  1771. Assert(LowererMD::IsAssign(instrDef));
  1772. }
  1773. if (bailOutInfo->usedCapturedValues.argObjSyms && bailOutInfo->usedCapturedValues.argObjSyms->Test(sym->m_id))
  1774. {
  1775. //foo.apply(this,arguments) case and we bailout when the apply is overridden. We need to restore the arguments object.
  1776. outParamOffsets[outParamOffsetIndex] = BailOutRecord::GetArgumentsObjectOffset();
  1777. }
  1778. else
  1779. {
  1780. this->FillBailOutOffset(&outParamOffsets[outParamOffsetIndex], sym, &state, instr);
  1781. }
  1782. }
  1783. }
  1784. }
  1785. else
  1786. {
  1787. this->FillBailOutOffset(&outParamOffsets[outParamOffsetIndex], sym, &state, instr);
  1788. }
  1789. if (sym->IsFloat64())
  1790. {
  1791. argOutFloat64Syms->Set(outParamOffsetIndex);
  1792. }
  1793. else if (sym->IsInt32())
  1794. {
  1795. argOutLosslessInt32Syms->Set(outParamOffsetIndex);
  1796. }
  1797. // SIMD_JS
  1798. else if (sym->IsSimd128F4())
  1799. {
  1800. argOutSimd128F4Syms->Set(outParamOffsetIndex);
  1801. }
  1802. else if (sym->IsSimd128I4())
  1803. {
  1804. argOutSimd128I4Syms->Set(outParamOffsetIndex);
  1805. }
  1806. else if (sym->IsSimd128I8())
  1807. {
  1808. argOutSimd128I8Syms->Set(outParamOffsetIndex);
  1809. }
  1810. else if (sym->IsSimd128I16())
  1811. {
  1812. argOutSimd128I16Syms->Set(outParamOffsetIndex);
  1813. }
  1814. else if (sym->IsSimd128U4())
  1815. {
  1816. argOutSimd128U4Syms->Set(outParamOffsetIndex);
  1817. }
  1818. else if (sym->IsSimd128U8())
  1819. {
  1820. argOutSimd128U8Syms->Set(outParamOffsetIndex);
  1821. }
  1822. else if (sym->IsSimd128U16())
  1823. {
  1824. argOutSimd128U16Syms->Set(outParamOffsetIndex);
  1825. }
  1826. else if (sym->IsSimd128B4())
  1827. {
  1828. argOutSimd128B4Syms->Set(outParamOffsetIndex);
  1829. }
  1830. else if (sym->IsSimd128B8())
  1831. {
  1832. argOutSimd128B8Syms->Set(outParamOffsetIndex);
  1833. }
  1834. else if (sym->IsSimd128B16())
  1835. {
  1836. argOutSimd128B16Syms->Set(outParamOffsetIndex);
  1837. }
  1838. #if DBG_DUMP
  1839. if (PHASE_DUMP(Js::BailOutPhase, this->func))
  1840. {
  1841. Output::Print(_u("OutParam #%d: "), argSlot);
  1842. sym->Dump();
  1843. Output::Print(_u("\n"));
  1844. }
  1845. #endif
  1846. }
  1847. }
  1848. }
  1849. }
  1850. else
  1851. {
  1852. Assert(bailOutInfo->argOutSyms == nullptr);
  1853. Assert(bailOutInfo->startCallCount == 0);
  1854. }
  1855. if (this->currentBlock->inlineeStack.Count() > 0)
  1856. {
  1857. this->SpillInlineeArgs(instr);
  1858. }
  1859. else
  1860. {
  1861. // There is a chance that the instruction was hoisting from an inlinee func
  1862. // but if there are no inlinee frames - make sure the instr belongs to the outer func
  1863. // to ensure encoder does not encode an inline frame here - which does not really exist
  1864. instr->m_func = this->func;
  1865. }
  1866. linearScanMD.GenerateBailOut(instr, state.registerSaveSyms, _countof(state.registerSaveSyms));
  1867. // generate the constant table
  1868. Js::Var * constants = NativeCodeDataNewArrayNoFixup(allocator, Js::Var, state.constantList.Count());
  1869. uint constantCount = state.constantList.Count();
  1870. while (!state.constantList.Empty())
  1871. {
  1872. Js::Var value = state.constantList.Head();
  1873. state.constantList.RemoveHead();
  1874. constants[state.constantList.Count()] = value;
  1875. }
  1876. // Generate the stack literal bail out info
  1877. FillStackLiteralBailOutRecord(instr, bailOutInfo, funcBailOutData, funcCount);
  1878. for (uint i = 0; i < funcCount; i++)
  1879. {
  1880. funcBailOutData[i].bailOutRecord->constants = constants;
  1881. #if DBG
  1882. funcBailOutData[i].bailOutRecord->inlineDepth = funcBailOutData[i].func->inlineDepth;
  1883. funcBailOutData[i].bailOutRecord->constantCount = constantCount;
  1884. #endif
  1885. uint32 tableIndex = funcBailOutData[i].func->IsTopFunc() ? 0 : funcBailOutData[i].func->m_inlineeId;
  1886. funcBailOutData[i].FinalizeLocalOffsets(tempAlloc, this->globalBailOutRecordTables[tableIndex], &(this->lastUpdatedRowIndices[tableIndex]));
  1887. #if DBG_DUMP
  1888. if(PHASE_DUMP(Js::BailOutPhase, this->func))
  1889. {
  1890. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  1891. Output::Print(_u("Bailout function: %s [%s]\n"), funcBailOutData[i].func->GetJITFunctionBody()->GetDisplayName(), funcBailOutData[i].func->GetDebugNumberSet(debugStringBuffer), i);
  1892. funcBailOutData[i].bailOutRecord->Dump();
  1893. }
  1894. #endif
  1895. funcBailOutData[i].Clear(this->tempAlloc);
  1896. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  1897. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  1898. {
  1899. this->func->GetScriptContext()->bailOutRecordBytes += sizeof(BailOutRecord);
  1900. }
  1901. #endif
  1902. }
  1903. JitAdeleteArray(this->tempAlloc, funcCount, funcBailOutData);
  1904. }
  1905. template <typename Fn>
  1906. void
  1907. LinearScan::ForEachStackLiteralBailOutInfo(IR::Instr * instr, BailOutInfo * bailOutInfo, FuncBailOutData * funcBailOutData, uint funcCount, Fn fn)
  1908. {
  1909. for (uint i = 0; i < bailOutInfo->stackLiteralBailOutInfoCount; i++)
  1910. {
  1911. BailOutInfo::StackLiteralBailOutInfo& stackLiteralBailOutInfo = bailOutInfo->stackLiteralBailOutInfo[i];
  1912. StackSym * stackSym = stackLiteralBailOutInfo.stackSym;
  1913. Assert(stackSym->scratch.linearScan.lifetime->start < instr->GetNumber());
  1914. Assert(stackSym->scratch.linearScan.lifetime->end >= instr->GetNumber());
  1915. Js::RegSlot regSlot = stackSym->GetByteCodeRegSlot();
  1916. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1917. uint index = stackSymFunc->inlineDepth;
  1918. Assert(regSlot != Js::Constants::NoRegister);
  1919. Assert(regSlot < stackSymFunc->GetJITFunctionBody()->GetLocalsCount());
  1920. Assert(index < funcCount);
  1921. Assert(funcBailOutData[index].func == stackSymFunc);
  1922. Assert(funcBailOutData[index].localOffsets[regSlot] != 0);
  1923. fn(index, stackLiteralBailOutInfo, regSlot);
  1924. }
  1925. }
  1926. void
  1927. LinearScan::FillStackLiteralBailOutRecord(IR::Instr * instr, BailOutInfo * bailOutInfo, FuncBailOutData * funcBailOutData, uint funcCount)
  1928. {
  1929. if (bailOutInfo->stackLiteralBailOutInfoCount)
  1930. {
  1931. // Count the data
  1932. ForEachStackLiteralBailOutInfo(instr, bailOutInfo, funcBailOutData, funcCount,
  1933. [=](uint funcIndex, BailOutInfo::StackLiteralBailOutInfo& stackLiteralBailOutInfo, Js::RegSlot regSlot)
  1934. {
  1935. funcBailOutData[funcIndex].bailOutRecord->stackLiteralBailOutRecordCount++;
  1936. });
  1937. // Allocate the data
  1938. NativeCodeData::Allocator * allocator = this->func->GetNativeCodeDataAllocator();
  1939. for (uint i = 0; i < funcCount; i++)
  1940. {
  1941. uint stackLiteralBailOutRecordCount = funcBailOutData[i].bailOutRecord->stackLiteralBailOutRecordCount;
  1942. if (stackLiteralBailOutRecordCount)
  1943. {
  1944. funcBailOutData[i].bailOutRecord->stackLiteralBailOutRecord =
  1945. NativeCodeDataNewArrayNoFixup(allocator, BailOutRecord::StackLiteralBailOutRecord, stackLiteralBailOutRecordCount);
  1946. // reset the count so we can track how much we have filled below
  1947. funcBailOutData[i].bailOutRecord->stackLiteralBailOutRecordCount = 0;
  1948. }
  1949. }
  1950. // Fill out the data
  1951. ForEachStackLiteralBailOutInfo(instr, bailOutInfo, funcBailOutData, funcCount,
  1952. [=](uint funcIndex, BailOutInfo::StackLiteralBailOutInfo& stackLiteralBailOutInfo, Js::RegSlot regSlot)
  1953. {
  1954. uint& recordIndex = funcBailOutData[funcIndex].bailOutRecord->stackLiteralBailOutRecordCount;
  1955. BailOutRecord::StackLiteralBailOutRecord& stackLiteralBailOutRecord =
  1956. funcBailOutData[funcIndex].bailOutRecord->stackLiteralBailOutRecord[recordIndex++];
  1957. stackLiteralBailOutRecord.regSlot = regSlot;
  1958. stackLiteralBailOutRecord.initFldCount = stackLiteralBailOutInfo.initFldCount;
  1959. });
  1960. }
  1961. }
  1962. void
  1963. LinearScan::PrepareForUse(Lifetime * lifetime)
  1964. {
  1965. if (lifetime->isOpHelperSpilled)
  1966. {
  1967. // using a value in a helper that has been spilled in the helper block.
  1968. // Just spill it for real
  1969. // We must be in a helper block and the lifetime must
  1970. // start before the helper block
  1971. Assert(this->IsInHelperBlock());
  1972. Assert(lifetime->start < this->HelperBlockStartInstrNumber());
  1973. IR::Instr *insertionInstr = this->currentOpHelperBlock->opHelperLabel;
  1974. this->RemoveOpHelperSpilled(lifetime);
  1975. this->SpillLiveRange(lifetime, insertionInstr);
  1976. }
  1977. }
  1978. void
  1979. LinearScan::RecordUse(Lifetime * lifetime, IR::Instr * instr, IR::RegOpnd * regOpnd, bool isFromBailout)
  1980. {
  1981. uint32 useCountCost = LinearScan::GetUseSpillCost(this->loopNest, (this->currentOpHelperBlock != nullptr || isFromBailout));
  1982. // We only spill at the use for constants (i.e. reload) or for function with try blocks. We don't
  1983. // have real accurate flow info for the later.
  1984. if ((regOpnd && regOpnd->m_sym->IsConst())
  1985. || (
  1986. (this->func->HasTry() && !this->func->DoOptimizeTryCatch()) &&
  1987. this->IsInLoop() &&
  1988. lifetime->lastUseLabel != this->lastLabel &&
  1989. this->liveOnBackEdgeSyms->Test(lifetime->sym->m_id) &&
  1990. !(lifetime->previousDefBlockNumber == currentBlockNumber && !lifetime->defList.Empty())
  1991. ))
  1992. {
  1993. // Keep track of all the uses of this lifetime in case we decide to spill it.
  1994. // Note that we won't need to insert reloads if the use are not in a loop,
  1995. // unless it is a const. We always reload const instead of spilling to the stack.
  1996. //
  1997. // We also don't need to insert reloads if the previous use was in the same basic block (the first use in the block
  1998. // would have done the reload), or the previous def is in the same basic block and the value is still live. Furthermore,
  1999. // if the previous def is in the same basic block, the value is still live, and there's another def after this use in
  2000. // the same basic block, the previous def may not do a spill store, so we must not reload the value from the stack.
  2001. lifetime->useList.Prepend(instr);
  2002. lifetime->lastUseLabel = this->lastLabel;
  2003. lifetime->AddToUseCountAdjust(useCountCost, this->curLoop, this->func);
  2004. }
  2005. else
  2006. {
  2007. if (!isFromBailout)
  2008. {
  2009. // Since we won't reload this use if the lifetime gets spilled, adjust the spill cost to reflect this.
  2010. lifetime->SubFromUseCount(useCountCost, this->curLoop);
  2011. }
  2012. }
  2013. if (this->IsInLoop())
  2014. {
  2015. this->RecordLoopUse(lifetime, lifetime->reg);
  2016. }
  2017. }
  2018. void LinearScan::RecordLoopUse(Lifetime *lifetime, RegNum reg)
  2019. {
  2020. if (!this->IsInLoop())
  2021. {
  2022. return;
  2023. }
  2024. if (this->func->HasTry() && !this->func->DoOptimizeTryCatch())
  2025. {
  2026. return;
  2027. }
  2028. // Record on each loop which register live into the loop ended up being used.
  2029. // We are trying to avoid the need for compensation at the bottom of the loop if
  2030. // the reg ends up being spilled before it is actually used.
  2031. Loop *curLoop = this->curLoop;
  2032. SymID symId = (SymID)-1;
  2033. if (lifetime)
  2034. {
  2035. symId = lifetime->sym->m_id;
  2036. }
  2037. while (curLoop)
  2038. {
  2039. // Note that if the lifetime is spilled and reallocated to the same register,
  2040. // will mark it as used when we shouldn't. However, it is hard at this point to handle
  2041. // the case were a flow edge from the previous allocation merges in with the new allocation.
  2042. // No compensation is inserted to let us know with previous lifetime needs reloading at the bottom of the loop...
  2043. if (lifetime && curLoop->regAlloc.loopTopRegContent[reg] == lifetime)
  2044. {
  2045. curLoop->regAlloc.symRegUseBv->Set(symId);
  2046. }
  2047. curLoop->regAlloc.regUseBv.Set(reg);
  2048. curLoop = curLoop->parent;
  2049. }
  2050. }
  2051. void
  2052. LinearScan::RecordDef(Lifetime *const lifetime, IR::Instr *const instr, const uint32 useCountCost)
  2053. {
  2054. Assert(lifetime);
  2055. Assert(instr);
  2056. Assert(instr->GetDst());
  2057. IR::RegOpnd * regOpnd = instr->GetDst()->AsRegOpnd();
  2058. Assert(regOpnd);
  2059. StackSym *const sym = regOpnd->m_sym;
  2060. if (this->IsInLoop())
  2061. {
  2062. Loop *curLoop = this->curLoop;
  2063. while (curLoop)
  2064. {
  2065. curLoop->regAlloc.defdInLoopBv->Set(lifetime->sym->m_id);
  2066. curLoop->regAlloc.regUseBv.Set(lifetime->reg);
  2067. curLoop = curLoop->parent;
  2068. }
  2069. }
  2070. if (lifetime->isSpilled)
  2071. {
  2072. return;
  2073. }
  2074. if (this->NeedsWriteThrough(sym))
  2075. {
  2076. if (this->IsSymNonTempLocalVar(sym))
  2077. {
  2078. // In the debug mode, we will write through on the stack location.
  2079. WriteThroughForLocal(regOpnd, lifetime, instr);
  2080. }
  2081. else
  2082. {
  2083. // If this is a write-through sym, it should be live on the entry to 'try' and should have already
  2084. // been allocated when we spilled all active lifetimes there.
  2085. // If it was not part of the active lifetimes on entry to the 'try' then it must have been spilled
  2086. // earlier and should have stack allocated for it.
  2087. Assert(this->NeedsWriteThroughForEH(sym) && sym->IsAllocated());
  2088. this->InsertStore(instr, sym, lifetime->reg);
  2089. }
  2090. // No need to record-def further as we already have stack allocated for it.
  2091. return;
  2092. }
  2093. if (sym->m_isSingleDef)
  2094. {
  2095. lifetime->AddToUseCount(useCountCost, this->curLoop, this->func);
  2096. // the def of a single-def sym is already on the sym
  2097. return;
  2098. }
  2099. if(lifetime->previousDefBlockNumber == currentBlockNumber && !lifetime->defList.Empty())
  2100. {
  2101. // Only keep track of the last def in each basic block. When there are multiple defs of a sym in a basic block, upon
  2102. // spill of that sym, a store needs to be inserted only after the last def of the sym.
  2103. Assert(lifetime->defList.Head()->GetDst()->AsRegOpnd()->m_sym == sym);
  2104. lifetime->defList.Head() = instr;
  2105. }
  2106. else
  2107. {
  2108. // First def of this sym in the current basic block
  2109. lifetime->previousDefBlockNumber = currentBlockNumber;
  2110. lifetime->defList.Prepend(instr);
  2111. // Keep track of the cost of reinserting all the defs if we choose to spill this way.
  2112. lifetime->allDefsCost += useCountCost;
  2113. }
  2114. }
  2115. // LinearScan::SetUse
  2116. void
  2117. LinearScan::SetUse(IR::Instr *instr, IR::RegOpnd *regOpnd)
  2118. {
  2119. if (regOpnd->GetReg() != RegNOREG)
  2120. {
  2121. this->RecordLoopUse(nullptr, regOpnd->GetReg());
  2122. return;
  2123. }
  2124. StackSym *sym = regOpnd->m_sym;
  2125. Lifetime * lifetime = sym->scratch.linearScan.lifetime;
  2126. this->PrepareForUse(lifetime);
  2127. if (lifetime->isSpilled)
  2128. {
  2129. // See if it has been loaded in this basic block
  2130. RegNum reg = this->GetAssignedTempReg(lifetime, regOpnd->GetType());
  2131. if (reg == RegNOREG)
  2132. {
  2133. if (sym->IsConst() && EncoderMD::TryConstFold(instr, regOpnd))
  2134. {
  2135. return;
  2136. }
  2137. reg = this->SecondChanceAllocation(lifetime, false);
  2138. if (reg != RegNOREG)
  2139. {
  2140. IR::Instr *insertInstr = this->TryHoistLoad(instr, lifetime);
  2141. this->InsertLoad(insertInstr, sym, reg);
  2142. }
  2143. else
  2144. {
  2145. // Try folding if there are no registers available
  2146. if (!sym->IsConst() && !this->RegsAvailable(regOpnd->GetType()) && EncoderMD::TryFold(instr, regOpnd))
  2147. {
  2148. return;
  2149. }
  2150. // We need a reg no matter what. Try to force second chance to re-allocate this.
  2151. reg = this->SecondChanceAllocation(lifetime, true);
  2152. if (reg == RegNOREG)
  2153. {
  2154. // Forcing second chance didn't work.
  2155. // Allocate a new temp reg for it
  2156. reg = this->FindReg(nullptr, regOpnd);
  2157. this->AssignTempReg(lifetime, reg);
  2158. }
  2159. this->InsertLoad(instr, sym, reg);
  2160. }
  2161. }
  2162. }
  2163. if (!lifetime->isSpilled && instr->GetNumber() < lifetime->end)
  2164. {
  2165. // Don't border to record the use if this is the last use of the lifetime.
  2166. this->RecordUse(lifetime, instr, regOpnd);
  2167. }
  2168. else
  2169. {
  2170. lifetime->SubFromUseCount(LinearScan::GetUseSpillCost(this->loopNest, (this->currentOpHelperBlock != nullptr)), this->curLoop);
  2171. }
  2172. this->instrUseRegs.Set(lifetime->reg);
  2173. this->SetReg(regOpnd);
  2174. }
  2175. // LinearScan::SetReg
  2176. void
  2177. LinearScan::SetReg(IR::RegOpnd *regOpnd)
  2178. {
  2179. if (regOpnd->GetReg() == RegNOREG)
  2180. {
  2181. RegNum reg = regOpnd->m_sym->scratch.linearScan.lifetime->reg;
  2182. AssertMsg(reg != RegNOREG, "Reg should be allocated here...");
  2183. regOpnd->SetReg(reg);
  2184. }
  2185. }
  2186. bool
  2187. LinearScan::SkipNumberedInstr(IR::Instr *instr)
  2188. {
  2189. if (instr->IsLabelInstr())
  2190. {
  2191. if (instr->AsLabelInstr()->m_isLoopTop)
  2192. {
  2193. Assert(instr->GetNumber() != instr->m_next->GetNumber()
  2194. && (instr->GetNumber() != instr->m_prev->GetNumber() || instr->m_prev->m_opcode == Js::OpCode::Nop));
  2195. }
  2196. else
  2197. {
  2198. return true;
  2199. }
  2200. }
  2201. return false;
  2202. }
  2203. // LinearScan::EndDeadLifetimes
  2204. // Look for lifetimes that are ending here, and retire them.
  2205. void
  2206. LinearScan::EndDeadLifetimes(IR::Instr *instr)
  2207. {
  2208. Lifetime * deadLifetime;
  2209. if (this->SkipNumberedInstr(instr))
  2210. {
  2211. return;
  2212. }
  2213. // Retire all active lifetime ending at this instruction
  2214. while (!this->activeLiveranges->Empty() && this->activeLiveranges->Head()->end <= instr->GetNumber())
  2215. {
  2216. deadLifetime = this->activeLiveranges->Head();
  2217. deadLifetime->defList.Clear();
  2218. deadLifetime->useList.Clear();
  2219. this->activeLiveranges->RemoveHead();
  2220. RegNum reg = deadLifetime->reg;
  2221. this->activeRegs.Clear(reg);
  2222. this->regContent[reg] = nullptr;
  2223. this->secondChanceRegs.Clear(reg);
  2224. if (RegTypes[reg] == TyMachReg)
  2225. {
  2226. this->intRegUsedCount--;
  2227. }
  2228. else
  2229. {
  2230. Assert(RegTypes[reg] == TyFloat64);
  2231. this->floatRegUsedCount--;
  2232. }
  2233. }
  2234. // Look for spilled lifetimes which end here such that we can make their stack slot
  2235. // available for stack-packing.
  2236. while (!this->stackPackInUseLiveRanges->Empty() && this->stackPackInUseLiveRanges->Head()->end <= instr->GetNumber())
  2237. {
  2238. deadLifetime = this->stackPackInUseLiveRanges->Head();
  2239. deadLifetime->defList.Clear();
  2240. deadLifetime->useList.Clear();
  2241. this->stackPackInUseLiveRanges->RemoveHead();
  2242. if (!deadLifetime->cantStackPack)
  2243. {
  2244. Assert(deadLifetime->spillStackSlot);
  2245. deadLifetime->spillStackSlot->lastUse = deadLifetime->end;
  2246. this->stackSlotsFreeList->Push(deadLifetime->spillStackSlot);
  2247. }
  2248. }
  2249. }
  2250. void
  2251. LinearScan::EndDeadOpHelperLifetimes(IR::Instr * instr)
  2252. {
  2253. if (this->SkipNumberedInstr(instr))
  2254. {
  2255. return;
  2256. }
  2257. while (!this->opHelperSpilledLiveranges->Empty() &&
  2258. this->opHelperSpilledLiveranges->Head()->end <= instr->GetNumber())
  2259. {
  2260. Lifetime * deadLifetime;
  2261. // The lifetime doesn't extend beyond the helper block
  2262. // No need to save and restore around the helper block
  2263. Assert(this->IsInHelperBlock());
  2264. deadLifetime = this->opHelperSpilledLiveranges->Head();
  2265. this->opHelperSpilledLiveranges->RemoveHead();
  2266. if (!deadLifetime->cantOpHelperSpill)
  2267. {
  2268. this->opHelperSpilledRegs.Clear(deadLifetime->reg);
  2269. }
  2270. deadLifetime->isOpHelperSpilled = false;
  2271. deadLifetime->cantOpHelperSpill = false;
  2272. deadLifetime->isOpHelperSpillAsArg = false;
  2273. }
  2274. }
  2275. // LinearScan::AllocateNewLifetimes
  2276. // Look for lifetimes coming live, and allocate a register for them.
  2277. void
  2278. LinearScan::AllocateNewLifetimes(IR::Instr *instr)
  2279. {
  2280. if (this->SkipNumberedInstr(instr))
  2281. {
  2282. return;
  2283. }
  2284. // Try to catch:
  2285. // x = MOV y(r1)
  2286. // where y's lifetime just ended and x's lifetime is starting.
  2287. // If so, set r1 as a preferred register for x, which may allow peeps to remove the MOV
  2288. if (instr->GetSrc1() && instr->GetSrc1()->IsRegOpnd() && LowererMD::IsAssign(instr) && instr->GetDst() && instr->GetDst()->IsRegOpnd() && instr->GetDst()->AsRegOpnd()->m_sym)
  2289. {
  2290. IR::RegOpnd *src = instr->GetSrc1()->AsRegOpnd();
  2291. StackSym *srcSym = src->m_sym;
  2292. // If src is a physReg ref, or src's lifetime ends here.
  2293. if (!srcSym || srcSym->scratch.linearScan.lifetime->end == instr->GetNumber())
  2294. {
  2295. Lifetime *dstLifetime = instr->GetDst()->AsRegOpnd()->m_sym->scratch.linearScan.lifetime;
  2296. if (dstLifetime)
  2297. {
  2298. dstLifetime->regPreference.Set(src->GetReg());
  2299. }
  2300. }
  2301. }
  2302. // Look for starting lifetimes
  2303. while (!this->lifetimeList->Empty() && this->lifetimeList->Head()->start <= instr->GetNumber())
  2304. {
  2305. // We're at the start of a new live range
  2306. Lifetime * newLifetime = this->lifetimeList->Head();
  2307. newLifetime->lastAllocationStart = instr->GetNumber();
  2308. this->lifetimeList->RemoveHead();
  2309. if (newLifetime->dontAllocate)
  2310. {
  2311. // Lifetime spilled before beginning allocation (e.g., a lifetime known to span
  2312. // multiple EH regions.) Do the work of spilling it now without adding it to the list.
  2313. this->SpillLiveRange(newLifetime);
  2314. continue;
  2315. }
  2316. RegNum reg;
  2317. if (newLifetime->reg == RegNOREG)
  2318. {
  2319. if (newLifetime->isDeadStore)
  2320. {
  2321. // No uses, let's not waste a reg.
  2322. newLifetime->isSpilled = true;
  2323. continue;
  2324. }
  2325. reg = this->FindReg(newLifetime, nullptr);
  2326. }
  2327. else
  2328. {
  2329. // This lifetime is already assigned a physical register. Make
  2330. // sure that register is available by calling SpillReg
  2331. reg = newLifetime->reg;
  2332. // If we're in a helper block, the physical register we're trying to ensure is available might get helper
  2333. // spilled. Don't allow that if this lifetime's end lies beyond the end of the helper block because
  2334. // spill code assumes that this physical register isn't active at the end of the helper block when it tries
  2335. // to restore it. So we'd have to really spill the lifetime then anyway.
  2336. this->SpillReg(reg, IsInHelperBlock() ? (newLifetime->end > currentOpHelperBlock->opHelperEndInstr->GetNumber()) : false);
  2337. newLifetime->cantSpill = true;
  2338. }
  2339. // If we did get a register for this lifetime, add it to the active set.
  2340. if (newLifetime->isSpilled == false)
  2341. {
  2342. this->AssignActiveReg(newLifetime, reg);
  2343. }
  2344. }
  2345. }
  2346. // LinearScan::FindReg
  2347. // Look for an available register. If one isn't available, spill something.
  2348. // Note that the newLifetime passed in could be the one we end up spilling.
  2349. RegNum
  2350. LinearScan::FindReg(Lifetime *newLifetime, IR::RegOpnd *regOpnd, bool force)
  2351. {
  2352. BVIndex regIndex = BVInvalidIndex;
  2353. IRType type;
  2354. bool tryCallerSavedRegs = false;
  2355. BitVector callerSavedAvailableBv;
  2356. if (newLifetime)
  2357. {
  2358. if (newLifetime->isFloat)
  2359. {
  2360. type = TyFloat64;
  2361. }
  2362. else if (newLifetime->isSimd128F4)
  2363. {
  2364. type = TySimd128F4;
  2365. }
  2366. else if (newLifetime->isSimd128I4)
  2367. {
  2368. type = TySimd128I4;
  2369. }
  2370. else if (newLifetime->isSimd128I8)
  2371. {
  2372. type = TySimd128I8;
  2373. }
  2374. else if (newLifetime->isSimd128I16)
  2375. {
  2376. type = TySimd128I16;
  2377. }
  2378. else if (newLifetime->isSimd128U4)
  2379. {
  2380. type = TySimd128U4;
  2381. }
  2382. else if (newLifetime->isSimd128U8)
  2383. {
  2384. type = TySimd128U8;
  2385. }
  2386. else if (newLifetime->isSimd128U16)
  2387. {
  2388. type = TySimd128U16;
  2389. }
  2390. else if (newLifetime->isSimd128B4)
  2391. {
  2392. type = TySimd128B4;
  2393. }
  2394. else if (newLifetime->isSimd128B8)
  2395. {
  2396. type = TySimd128B8;
  2397. }
  2398. else if (newLifetime->isSimd128B16)
  2399. {
  2400. type = TySimd128B16;
  2401. }
  2402. else if (newLifetime->isSimd128D2)
  2403. {
  2404. type = TySimd128D2;
  2405. }
  2406. else
  2407. {
  2408. type = TyMachReg;
  2409. }
  2410. }
  2411. else
  2412. {
  2413. Assert(regOpnd);
  2414. type = regOpnd->GetType();
  2415. }
  2416. if (this->RegsAvailable(type))
  2417. {
  2418. BitVector regsBv;
  2419. regsBv.Copy(this->activeRegs);
  2420. regsBv.Or(this->instrUseRegs);
  2421. regsBv.Or(this->callSetupRegs);
  2422. regsBv.ComplimentAll();
  2423. if (newLifetime)
  2424. {
  2425. if (this->IsInHelperBlock())
  2426. {
  2427. if (newLifetime->end >= this->HelperBlockEndInstrNumber())
  2428. {
  2429. // this lifetime goes beyond the helper function
  2430. // We need to exclude the helper spilled register as well.
  2431. regsBv.Minus(this->opHelperSpilledRegs);
  2432. }
  2433. }
  2434. if (newLifetime->isFloat || newLifetime->isSimd128())
  2435. {
  2436. #ifdef _M_IX86
  2437. Assert(AutoSystemInfo::Data.SSE2Available());
  2438. #endif
  2439. regsBv.And(this->floatRegs);
  2440. }
  2441. else
  2442. {
  2443. regsBv.And(this->int32Regs);
  2444. regsBv = this->linearScanMD.FilterRegIntSizeConstraints(regsBv, newLifetime->intUsageBv);
  2445. }
  2446. if (newLifetime->isLiveAcrossCalls)
  2447. {
  2448. // Try to find a callee saved regs
  2449. BitVector regsBvTemp = regsBv;
  2450. regsBvTemp.And(this->calleeSavedRegs);
  2451. regIndex = GetPreferencedRegIndex(newLifetime, regsBvTemp);
  2452. if (regIndex == BVInvalidIndex)
  2453. {
  2454. if (!newLifetime->isLiveAcrossUserCalls)
  2455. {
  2456. // No callee saved regs is found and the lifetime only across helper
  2457. // calls, we can also use a caller saved regs to make use of the
  2458. // save and restore around helper blocks
  2459. regIndex = GetPreferencedRegIndex(newLifetime, regsBv);
  2460. }
  2461. else
  2462. {
  2463. // If we can't find a callee-saved reg, we can try using a caller-saved reg instead.
  2464. // We'll hopefully get a few loads enregistered that way before we get to the call.
  2465. tryCallerSavedRegs = true;
  2466. callerSavedAvailableBv = regsBv;
  2467. }
  2468. }
  2469. }
  2470. else
  2471. {
  2472. regIndex = GetPreferencedRegIndex(newLifetime, regsBv);
  2473. }
  2474. }
  2475. else
  2476. {
  2477. AssertMsg(regOpnd, "Need a lifetime or a regOpnd passed in");
  2478. if (regOpnd->IsFloat() || regOpnd->IsSimd128())
  2479. {
  2480. #ifdef _M_IX86
  2481. Assert(AutoSystemInfo::Data.SSE2Available());
  2482. #endif
  2483. regsBv.And(this->floatRegs);
  2484. }
  2485. else
  2486. {
  2487. regsBv.And(this->int32Regs);
  2488. BitVector regSizeBv;
  2489. regSizeBv.ClearAll();
  2490. regSizeBv.Set(TySize[regOpnd->GetType()]);
  2491. regsBv = this->linearScanMD.FilterRegIntSizeConstraints(regsBv, regSizeBv);
  2492. }
  2493. if (!this->tempRegs.IsEmpty())
  2494. {
  2495. // avoid the temp reg that we have loaded in this basic block
  2496. BitVector regsBvTemp = regsBv;
  2497. regsBvTemp.Minus(this->tempRegs);
  2498. regIndex = regsBvTemp.GetPrevBit();
  2499. }
  2500. if (regIndex == BVInvalidIndex)
  2501. {
  2502. // allocate a temp reg from the other end of the bit vector so that it can
  2503. // keep live for longer.
  2504. regIndex = regsBv.GetPrevBit();
  2505. }
  2506. }
  2507. }
  2508. RegNum reg;
  2509. if (BVInvalidIndex != regIndex)
  2510. {
  2511. Assert(regIndex < RegNumCount);
  2512. reg = (RegNum)regIndex;
  2513. }
  2514. else
  2515. {
  2516. if (tryCallerSavedRegs)
  2517. {
  2518. Assert(newLifetime);
  2519. regIndex = GetPreferencedRegIndex(newLifetime, callerSavedAvailableBv);
  2520. if (BVInvalidIndex == regIndex)
  2521. {
  2522. tryCallerSavedRegs = false;
  2523. }
  2524. }
  2525. bool dontSpillCurrent = tryCallerSavedRegs;
  2526. if (newLifetime && newLifetime->isSpilled)
  2527. {
  2528. // Second chance allocation
  2529. dontSpillCurrent = true;
  2530. }
  2531. // Can't find reg, spill some lifetime.
  2532. reg = this->Spill(newLifetime, regOpnd, dontSpillCurrent, force);
  2533. if (reg == RegNOREG && tryCallerSavedRegs)
  2534. {
  2535. Assert(BVInvalidIndex != regIndex);
  2536. reg = (RegNum)regIndex;
  2537. // This lifetime will get spilled once we get to the call it overlaps with (note: this may not be true
  2538. // for second chance allocation as we may be beyond the call). Mark it as a cheap spill to give up the register
  2539. // if some lifetime not overlapping with a call needs it.
  2540. newLifetime->isCheapSpill = true;
  2541. }
  2542. }
  2543. // We always have to return a reg if we are allocate temp reg.
  2544. // If we are allocating for a new lifetime, we return RegNOREG, if we
  2545. // spill the new lifetime
  2546. Assert(newLifetime != nullptr || (reg != RegNOREG && reg < RegNumCount));
  2547. return reg;
  2548. }
  2549. BVIndex
  2550. LinearScan::GetPreferencedRegIndex(Lifetime *lifetime, BitVector freeRegs)
  2551. {
  2552. BitVector freePreferencedRegs = freeRegs;
  2553. freePreferencedRegs.And(lifetime->regPreference);
  2554. // If one of the preferred register (if any) is available, use it. Otherwise, just pick one of free register.
  2555. if (!freePreferencedRegs.IsEmpty())
  2556. {
  2557. return freePreferencedRegs.GetNextBit();
  2558. }
  2559. else
  2560. {
  2561. return freeRegs.GetNextBit();
  2562. }
  2563. }
  2564. // LinearScan::Spill
  2565. // We need to spill something to free up a reg. If the newLifetime
  2566. // past in isn't NULL, we can spill this one instead of an active one.
  2567. RegNum
  2568. LinearScan::Spill(Lifetime *newLifetime, IR::RegOpnd *regOpnd, bool dontSpillCurrent, bool force)
  2569. {
  2570. uint minSpillCost = (uint)-1;
  2571. Assert(!newLifetime || !regOpnd || newLifetime->isFloat == (regOpnd->GetType() == TyMachDouble) || newLifetime->isSimd128() == (regOpnd->IsSimd128()));
  2572. bool isFloatReg;
  2573. BitVector intUsageBV;
  2574. bool needCalleeSaved;
  2575. // For now, we just spill the lifetime with the lowest spill cost.
  2576. if (newLifetime)
  2577. {
  2578. isFloatReg = newLifetime->isFloat || newLifetime->isSimd128();
  2579. if (!force)
  2580. {
  2581. minSpillCost = this->GetSpillCost(newLifetime);
  2582. }
  2583. intUsageBV = newLifetime->intUsageBv;
  2584. needCalleeSaved = newLifetime->isLiveAcrossUserCalls;
  2585. }
  2586. else
  2587. {
  2588. needCalleeSaved = false;
  2589. if (regOpnd->IsFloat() || regOpnd->IsSimd128())
  2590. {
  2591. isFloatReg = true;
  2592. }
  2593. else
  2594. {
  2595. // Filter for int reg size constraints
  2596. isFloatReg = false;
  2597. intUsageBV.ClearAll();
  2598. intUsageBV.Set(TySize[regOpnd->GetType()]);
  2599. }
  2600. }
  2601. SList<Lifetime *>::EditingIterator candidate;
  2602. FOREACH_SLIST_ENTRY_EDITING(Lifetime *, lifetime, this->activeLiveranges, iter)
  2603. {
  2604. uint spillCost = this->GetSpillCost(lifetime);
  2605. if (spillCost < minSpillCost &&
  2606. this->instrUseRegs.Test(lifetime->reg) == false &&
  2607. (lifetime->isFloat || lifetime->isSimd128()) == isFloatReg &&
  2608. !lifetime->cantSpill &&
  2609. (!needCalleeSaved || this->calleeSavedRegs.Test(lifetime->reg)) &&
  2610. this->linearScanMD.FitRegIntSizeConstraints(lifetime->reg, intUsageBV))
  2611. {
  2612. minSpillCost = spillCost;
  2613. candidate = iter;
  2614. }
  2615. } NEXT_SLIST_ENTRY_EDITING;
  2616. AssertMsg(newLifetime || candidate.IsValid(), "Didn't find anything to spill?!?");
  2617. Lifetime * spilledRange;
  2618. if (candidate.IsValid())
  2619. {
  2620. spilledRange = candidate.Data();
  2621. candidate.RemoveCurrent();
  2622. this->activeRegs.Clear(spilledRange->reg);
  2623. if (spilledRange->isFloat || spilledRange->isSimd128())
  2624. {
  2625. this->floatRegUsedCount--;
  2626. }
  2627. else
  2628. {
  2629. this->intRegUsedCount--;
  2630. }
  2631. }
  2632. else if (dontSpillCurrent)
  2633. {
  2634. return RegNOREG;
  2635. }
  2636. else
  2637. {
  2638. spilledRange = newLifetime;
  2639. }
  2640. return this->SpillLiveRange(spilledRange);
  2641. }
  2642. // LinearScan::SpillLiveRange
  2643. RegNum
  2644. LinearScan::SpillLiveRange(Lifetime * spilledRange, IR::Instr *insertionInstr)
  2645. {
  2646. Assert(!spilledRange->isSpilled);
  2647. RegNum reg = spilledRange->reg;
  2648. StackSym *sym = spilledRange->sym;
  2649. spilledRange->isSpilled = true;
  2650. spilledRange->isCheapSpill = false;
  2651. spilledRange->reg = RegNOREG;
  2652. // Don't allocate stack space for const, we always reload them. (For debugm mode, allocate on the stack)
  2653. if (!sym->IsAllocated() && (!sym->IsConst() || IsSymNonTempLocalVar(sym)))
  2654. {
  2655. this->AllocateStackSpace(spilledRange);
  2656. }
  2657. // No need to insert loads or stores if there are no uses.
  2658. if (!spilledRange->isDeadStore)
  2659. {
  2660. // In the debug mode, don't do insertstore for this stacksym, as we want to retain the IsConst for the sym,
  2661. // and later we are going to find the reg for it.
  2662. if (!IsSymNonTempLocalVar(sym))
  2663. {
  2664. this->InsertStores(spilledRange, reg, insertionInstr);
  2665. }
  2666. if (this->IsInLoop() || sym->IsConst())
  2667. {
  2668. this->InsertLoads(sym, reg);
  2669. }
  2670. else
  2671. {
  2672. sym->scratch.linearScan.lifetime->useList.Clear();
  2673. }
  2674. // Adjust useCount in case of second chance allocation
  2675. spilledRange->ApplyUseCountAdjust(this->curLoop);
  2676. }
  2677. Assert(reg == RegNOREG || spilledRange->reg == RegNOREG || this->regContent[reg] == spilledRange);
  2678. if (spilledRange->isSecondChanceAllocated)
  2679. {
  2680. Assert(reg == RegNOREG || spilledRange->reg == RegNOREG
  2681. || (this->regContent[reg] == spilledRange && this->secondChanceRegs.Test(reg)));
  2682. this->secondChanceRegs.Clear(reg);
  2683. spilledRange->isSecondChanceAllocated = false;
  2684. }
  2685. else
  2686. {
  2687. Assert(!this->secondChanceRegs.Test(reg));
  2688. }
  2689. this->regContent[reg] = nullptr;
  2690. #if DBG_DUMP
  2691. if (PHASE_TRACE(Js::LinearScanPhase, this->func))
  2692. {
  2693. Output::Print(_u("**** Spill: "));
  2694. sym->Dump();
  2695. Output::Print(_u("(%S)"), RegNames[reg]);
  2696. Output::Print(_u(" SpillCount:%d Length:%d Cost:%d\n"),
  2697. spilledRange->useCount, spilledRange->end - spilledRange->start, this->GetSpillCost(spilledRange));
  2698. }
  2699. #endif
  2700. return reg;
  2701. }
  2702. // LinearScan::SpillReg
  2703. // Spill a given register.
  2704. void
  2705. LinearScan::SpillReg(RegNum reg, bool forceSpill /* = false */)
  2706. {
  2707. Lifetime *spilledRange = nullptr;
  2708. if (activeRegs.Test(reg))
  2709. {
  2710. spilledRange = LinearScan::RemoveRegLiveRange(activeLiveranges, reg);
  2711. }
  2712. else if (opHelperSpilledRegs.Test(reg) && forceSpill)
  2713. {
  2714. // If a lifetime that was assigned this register was helper spilled,
  2715. // really spill it now.
  2716. Assert(IsInHelperBlock());
  2717. // Look for the liverange in opHelperSpilledLiveranges instead of
  2718. // activeLiveranges.
  2719. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, opHelperSpilledLiveranges)
  2720. {
  2721. if (lifetime->reg == reg)
  2722. {
  2723. spilledRange = lifetime;
  2724. break;
  2725. }
  2726. } NEXT_SLIST_ENTRY;
  2727. Assert(spilledRange);
  2728. Assert(!spilledRange->cantSpill);
  2729. RemoveOpHelperSpilled(spilledRange);
  2730. // Really spill this liverange below.
  2731. }
  2732. else
  2733. {
  2734. return;
  2735. }
  2736. AnalysisAssert(spilledRange);
  2737. Assert(!spilledRange->cantSpill);
  2738. if ((!forceSpill) && this->IsInHelperBlock() && spilledRange->start < this->HelperBlockStartInstrNumber() && !spilledRange->cantOpHelperSpill)
  2739. {
  2740. // if the lifetime starts before the helper block, we can do save and restore
  2741. // around the helper block instead.
  2742. this->AddOpHelperSpilled(spilledRange);
  2743. }
  2744. else
  2745. {
  2746. if (spilledRange->cantOpHelperSpill)
  2747. {
  2748. // We're really spilling this liverange, so take it out of the helper-spilled liveranges
  2749. // to avoid confusion (see Win8 313433).
  2750. Assert(!spilledRange->isOpHelperSpilled);
  2751. spilledRange->cantOpHelperSpill = false;
  2752. this->opHelperSpilledLiveranges->Remove(spilledRange);
  2753. }
  2754. this->SpillLiveRange(spilledRange);
  2755. }
  2756. if (this->activeRegs.Test(reg))
  2757. {
  2758. this->activeRegs.Clear(reg);
  2759. if (RegTypes[reg] == TyMachReg)
  2760. {
  2761. this->intRegUsedCount--;
  2762. }
  2763. else
  2764. {
  2765. Assert(RegTypes[reg] == TyFloat64);
  2766. this->floatRegUsedCount--;
  2767. }
  2768. }
  2769. }
  2770. void
  2771. LinearScan::ProcessEHRegionBoundary(IR::Instr * instr)
  2772. {
  2773. Assert(instr->IsBranchInstr());
  2774. Assert(instr->m_opcode != Js::OpCode::TryFinally); // finallys are not supported for optimization yet.
  2775. if (instr->m_opcode != Js::OpCode::TryCatch && instr->m_opcode != Js::OpCode::Leave)
  2776. {
  2777. return;
  2778. }
  2779. // Spill everything upon entry to the try region and upon a Leave.
  2780. IR::Instr* insertionInstr = instr->m_opcode != Js::OpCode::Leave ? instr : instr->m_prev;
  2781. FOREACH_SLIST_ENTRY_EDITING(Lifetime *, lifetime, this->activeLiveranges, iter)
  2782. {
  2783. this->activeRegs.Clear(lifetime->reg);
  2784. if (lifetime->isFloat || lifetime->isSimd128())
  2785. {
  2786. this->floatRegUsedCount--;
  2787. }
  2788. else
  2789. {
  2790. this->intRegUsedCount--;
  2791. }
  2792. this->SpillLiveRange(lifetime, insertionInstr);
  2793. iter.RemoveCurrent();
  2794. }
  2795. NEXT_SLIST_ENTRY_EDITING;
  2796. }
  2797. void
  2798. LinearScan::AllocateStackSpace(Lifetime *spilledRange)
  2799. {
  2800. if (spilledRange->sym->IsAllocated())
  2801. {
  2802. return;
  2803. }
  2804. uint32 size = TySize[spilledRange->sym->GetType()];
  2805. // For the bytecodereg syms instead of spilling to the any other location lets re-use the already created slot.
  2806. if (IsSymNonTempLocalVar(spilledRange->sym))
  2807. {
  2808. Js::RegSlot slotIndex = spilledRange->sym->GetByteCodeRegSlot();
  2809. // Get the offset which is already allocated from this local, and always spill on that location.
  2810. spilledRange->sym->m_offset = GetStackOffset(slotIndex);
  2811. spilledRange->sym->m_allocated = true;
  2812. return;
  2813. }
  2814. StackSlot * newStackSlot = nullptr;
  2815. if (!PHASE_OFF(Js::StackPackPhase, this->func) && !this->func->IsJitInDebugMode() && !spilledRange->cantStackPack)
  2816. {
  2817. // Search for a free stack slot to re-use
  2818. FOREACH_SLIST_ENTRY_EDITING(StackSlot *, slot, this->stackSlotsFreeList, iter)
  2819. {
  2820. // Heuristic: should we use '==' or '>=' for the size?
  2821. if (slot->lastUse <= spilledRange->start && slot->size >= size)
  2822. {
  2823. StackSym *spilledSym = spilledRange->sym;
  2824. Assert(!spilledSym->IsArgSlotSym() && !spilledSym->IsParamSlotSym());
  2825. Assert(!spilledSym->IsAllocated());
  2826. spilledRange->spillStackSlot = slot;
  2827. spilledSym->m_offset = slot->offset;
  2828. spilledSym->m_allocated = true;
  2829. iter.RemoveCurrent();
  2830. #if DBG_DUMP
  2831. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::StackPackPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  2832. {
  2833. spilledSym->Dump();
  2834. Output::Print(_u(" *** stack packed at offset %3d (%4d - %4d)\n"), spilledSym->m_offset, spilledRange->start, spilledRange->end);
  2835. }
  2836. #endif
  2837. break;
  2838. }
  2839. } NEXT_SLIST_ENTRY_EDITING;
  2840. if (spilledRange->spillStackSlot == nullptr)
  2841. {
  2842. newStackSlot = JitAnewStruct(this->tempAlloc, StackSlot);
  2843. newStackSlot->size = size;
  2844. spilledRange->spillStackSlot = newStackSlot;
  2845. }
  2846. this->AddLiveRange(this->stackPackInUseLiveRanges, spilledRange);
  2847. }
  2848. if (!spilledRange->sym->IsAllocated())
  2849. {
  2850. // Can't stack pack, allocate new stack slot.
  2851. StackSym *spilledSym = spilledRange->sym;
  2852. this->func->StackAllocate(spilledSym, size);
  2853. #if DBG_DUMP
  2854. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::StackPackPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  2855. {
  2856. spilledSym->Dump();
  2857. Output::Print(_u(" at offset %3d (%4d - %4d)\n"), spilledSym->m_offset, spilledRange->start, spilledRange->end);
  2858. }
  2859. #endif
  2860. if (newStackSlot != nullptr)
  2861. {
  2862. newStackSlot->offset = spilledSym->m_offset;
  2863. }
  2864. }
  2865. }
  2866. // LinearScan::InsertLoads
  2867. void
  2868. LinearScan::InsertLoads(StackSym *sym, RegNum reg)
  2869. {
  2870. Lifetime *lifetime = sym->scratch.linearScan.lifetime;
  2871. FOREACH_SLIST_ENTRY(IR::Instr *, instr, &lifetime->useList)
  2872. {
  2873. this->InsertLoad(instr, sym, reg);
  2874. } NEXT_SLIST_ENTRY;
  2875. lifetime->useList.Clear();
  2876. }
  2877. // LinearScan::InsertStores
  2878. void
  2879. LinearScan::InsertStores(Lifetime *lifetime, RegNum reg, IR::Instr *insertionInstr)
  2880. {
  2881. StackSym *sym = lifetime->sym;
  2882. // If single def, use instrDef on the symbol
  2883. if (sym->m_isSingleDef)
  2884. {
  2885. IR::Instr * defInstr = sym->m_instrDef;
  2886. if ((!sym->IsConst() && defInstr->GetDst()->AsRegOpnd()->GetReg() == RegNOREG)
  2887. || this->secondChanceRegs.Test(reg))
  2888. {
  2889. // This can happen if we were trying to allocate this lifetime,
  2890. // and it is getting spilled right away.
  2891. // For second chance allocations, this should have already been handled.
  2892. return;
  2893. }
  2894. this->InsertStore(defInstr, defInstr->FindRegDef(sym)->m_sym, reg);
  2895. return;
  2896. }
  2897. if (reg == RegNOREG)
  2898. {
  2899. return;
  2900. }
  2901. uint localStoreCost = LinearScan::GetUseSpillCost(this->loopNest, (this->currentOpHelperBlock != nullptr));
  2902. // Is it cheaper to spill all the defs we've seen so far or just insert a store at the current point?
  2903. if ((this->func->HasTry() && !this->func->DoOptimizeTryCatch()) || localStoreCost >= lifetime->allDefsCost)
  2904. {
  2905. // Insert a store for each def point we've seen so far
  2906. FOREACH_SLIST_ENTRY(IR::Instr *, instr, &(lifetime->defList))
  2907. {
  2908. if (instr->GetDst()->AsRegOpnd()->GetReg() != RegNOREG)
  2909. {
  2910. IR::RegOpnd *regOpnd = instr->FindRegDef(sym);
  2911. // Note that reg may not be equal to regOpnd->GetReg() if the lifetime has been re-allocated since we've seen this def
  2912. this->InsertStore(instr, regOpnd->m_sym, regOpnd->GetReg());
  2913. }
  2914. } NEXT_SLIST_ENTRY;
  2915. lifetime->defList.Clear();
  2916. lifetime->allDefsCost = 0;
  2917. lifetime->needsStoreCompensation = false;
  2918. }
  2919. else if (!lifetime->defList.Empty())
  2920. {
  2921. // Insert a def right here at the current instr, and then we'll use compensation code for paths not covered by this def.
  2922. if (!insertionInstr)
  2923. {
  2924. insertionInstr = this->currentInstr->m_prev;
  2925. }
  2926. this->InsertStore(insertionInstr, sym, reg);
  2927. if (this->IsInLoop())
  2928. {
  2929. RecordLoopUse(lifetime, reg);
  2930. }
  2931. // We now need to insert all store compensations when needed, unless we spill all the defs later on.
  2932. lifetime->needsStoreCompensation = true;
  2933. }
  2934. }
  2935. // LinearScan::InsertStore
  2936. void
  2937. LinearScan::InsertStore(IR::Instr *instr, StackSym *sym, RegNum reg)
  2938. {
  2939. // Win8 Bug 391484: We cannot use regOpnd->GetType() here because it
  2940. // can lead to truncation as downstream usage of the register might be of a size
  2941. // greater than the current use. Using RegTypes[reg] works only if the stack slot size
  2942. // is always at least of size MachPtr
  2943. // In the debug mode, if the current sym belongs to the byte code locals, then do not unlink this instruction, as we need to have this instruction to be there
  2944. // to produce the write-through instruction.
  2945. if (sym->IsConst() && !IsSymNonTempLocalVar(sym))
  2946. {
  2947. // Let's just delete the def. We'll reload the constant.
  2948. // We can't just delete the instruction however since the
  2949. // uses will look at the def to get the value.
  2950. // Make sure it wasn't already deleted.
  2951. if (sym->m_instrDef->m_next)
  2952. {
  2953. sym->m_instrDef->Unlink();
  2954. sym->m_instrDef->m_next = nullptr;
  2955. }
  2956. return;
  2957. }
  2958. Assert(reg != RegNOREG);
  2959. IRType type = sym->GetType();
  2960. if (sym->IsSimd128())
  2961. {
  2962. type = sym->GetType();
  2963. }
  2964. IR::Instr *store = IR::Instr::New(LowererMD::GetStoreOp(type),
  2965. IR::SymOpnd::New(sym, type, this->func),
  2966. IR::RegOpnd::New(sym, reg, type, this->func), this->func);
  2967. instr->InsertAfter(store);
  2968. store->CopyNumber(instr);
  2969. this->linearScanMD.LegalizeDef(store);
  2970. #if DBG_DUMP
  2971. if (PHASE_TRACE(Js::LinearScanPhase, this->func))
  2972. {
  2973. Output::Print(_u("...Inserting store for "));
  2974. sym->Dump();
  2975. Output::Print(_u(" Cost:%d\n"), this->GetSpillCost(sym->scratch.linearScan.lifetime));
  2976. }
  2977. #endif
  2978. }
  2979. // LinearScan::InsertLoad
  2980. void
  2981. LinearScan::InsertLoad(IR::Instr *instr, StackSym *sym, RegNum reg)
  2982. {
  2983. IR::Opnd *src;
  2984. // The size of loads and stores to memory need to match. See the comment
  2985. // around type in InsertStore above.
  2986. IRType type = sym->GetType();
  2987. if (sym->IsSimd128())
  2988. {
  2989. type = sym->GetType();
  2990. }
  2991. bool isMovSDZero = false;
  2992. if (sym->IsConst())
  2993. {
  2994. Assert(!sym->IsAllocated() || IsSymNonTempLocalVar(sym));
  2995. // For an intConst, reload the constant instead of using the stack.
  2996. // Create a new StackSym to make sure the old sym remains singleDef
  2997. src = sym->GetConstOpnd();
  2998. if (!src)
  2999. {
  3000. isMovSDZero = true;
  3001. sym = StackSym::New(sym->GetType(), this->func);
  3002. sym->m_isConst = true;
  3003. sym->m_isFltConst = true;
  3004. }
  3005. else
  3006. {
  3007. StackSym * oldSym = sym;
  3008. sym = StackSym::New(TyVar, this->func);
  3009. sym->m_isConst = true;
  3010. sym->m_isIntConst = oldSym->m_isIntConst;
  3011. sym->m_isInt64Const = oldSym->m_isInt64Const;
  3012. sym->m_isTaggableIntConst = sym->m_isTaggableIntConst;
  3013. }
  3014. }
  3015. else
  3016. {
  3017. src = IR::SymOpnd::New(sym, type, this->func);
  3018. }
  3019. IR::Instr * load;
  3020. #if defined(_M_IX86) || defined(_M_X64)
  3021. if (isMovSDZero)
  3022. {
  3023. load = IR::Instr::New(Js::OpCode::MOVSD_ZERO,
  3024. IR::RegOpnd::New(sym, reg, type, this->func), this->func);
  3025. instr->InsertBefore(load);
  3026. }
  3027. else
  3028. #endif
  3029. {
  3030. load = Lowerer::InsertMove(IR::RegOpnd::New(sym, reg, type, this->func), src, instr);
  3031. }
  3032. load->CopyNumber(instr);
  3033. if (!isMovSDZero)
  3034. {
  3035. this->linearScanMD.LegalizeUse(load, src);
  3036. }
  3037. this->RecordLoopUse(nullptr, reg);
  3038. #if DBG_DUMP
  3039. if (PHASE_TRACE(Js::LinearScanPhase, this->func))
  3040. {
  3041. Output::Print(_u("...Inserting load for "));
  3042. sym->Dump();
  3043. if (sym->scratch.linearScan.lifetime)
  3044. {
  3045. Output::Print(_u(" Cost:%d\n"), this->GetSpillCost(sym->scratch.linearScan.lifetime));
  3046. }
  3047. else
  3048. {
  3049. Output::Print(_u("\n"));
  3050. }
  3051. }
  3052. #endif
  3053. }
  3054. uint8
  3055. LinearScan::GetRegAttribs(RegNum reg)
  3056. {
  3057. return RegAttribs[reg];
  3058. }
  3059. IRType
  3060. LinearScan::GetRegType(RegNum reg)
  3061. {
  3062. return RegTypes[reg];
  3063. }
  3064. bool
  3065. LinearScan::IsCalleeSaved(RegNum reg)
  3066. {
  3067. return (RegAttribs[reg] & RA_CALLEESAVE) != 0;
  3068. }
  3069. bool
  3070. LinearScan::IsCallerSaved(RegNum reg) const
  3071. {
  3072. return !LinearScan::IsCalleeSaved(reg) && LinearScan::IsAllocatable(reg);
  3073. }
  3074. bool
  3075. LinearScan::IsAllocatable(RegNum reg) const
  3076. {
  3077. return !(RegAttribs[reg] & RA_DONTALLOCATE) && this->linearScanMD.IsAllocatable(reg, this->func);
  3078. }
  3079. void
  3080. LinearScan::KillImplicitRegs(IR::Instr *instr)
  3081. {
  3082. if (instr->IsLabelInstr() || instr->IsBranchInstr())
  3083. {
  3084. // Note: need to clear these for branch as well because this info isn't recorded for second chance
  3085. // allocation on branch boundaries
  3086. this->tempRegs.ClearAll();
  3087. }
  3088. #if defined(_M_IX86) || defined(_M_X64)
  3089. if (instr->m_opcode == Js::OpCode::IMUL)
  3090. {
  3091. this->SpillReg(LowererMDArch::GetRegIMulHighDestLower());
  3092. this->tempRegs.Clear(LowererMDArch::GetRegIMulHighDestLower());
  3093. this->RecordLoopUse(nullptr, LowererMDArch::GetRegIMulHighDestLower());
  3094. return;
  3095. }
  3096. #endif
  3097. this->TrackInlineeArgLifetimes(instr);
  3098. // Don't care about kills on bailout calls as we are going to exit anyways
  3099. // Also, for bailout scenarios we have already handled the inlinee frame spills
  3100. Assert(LowererMD::IsCall(instr) || !instr->HasBailOutInfo());
  3101. if (!LowererMD::IsCall(instr) || instr->HasBailOutInfo())
  3102. {
  3103. return;
  3104. }
  3105. if (this->currentBlock->inlineeStack.Count() > 0)
  3106. {
  3107. this->SpillInlineeArgs(instr);
  3108. }
  3109. else
  3110. {
  3111. instr->m_func = this->func;
  3112. }
  3113. //
  3114. // Spill caller-saved registers that are active.
  3115. //
  3116. BitVector deadRegs;
  3117. deadRegs.Copy(this->activeRegs);
  3118. deadRegs.And(this->callerSavedRegs);
  3119. FOREACH_BITSET_IN_UNITBV(reg, deadRegs, BitVector)
  3120. {
  3121. this->SpillReg((RegNum)reg);
  3122. }
  3123. NEXT_BITSET_IN_UNITBV;
  3124. this->tempRegs.And(this->calleeSavedRegs);
  3125. if (callSetupRegs.Count())
  3126. {
  3127. callSetupRegs.ClearAll();
  3128. }
  3129. Loop *loop = this->curLoop;
  3130. while (loop)
  3131. {
  3132. loop->regAlloc.regUseBv.Or(this->callerSavedRegs);
  3133. loop = loop->parent;
  3134. }
  3135. }
  3136. //
  3137. // Before a call, all inlinee frame syms need to be spilled to a pre-defined location
  3138. //
  3139. void LinearScan::SpillInlineeArgs(IR::Instr* instr)
  3140. {
  3141. Assert(this->currentBlock->inlineeStack.Count() > 0);
  3142. // Ensure the call instruction is tied to the current inlinee
  3143. // This is used in the encoder to encode mapping or return offset and InlineeFrameRecord
  3144. instr->m_func = this->currentBlock->inlineeStack.Last();
  3145. BitVector spilledRegs;
  3146. this->currentBlock->inlineeFrameLifetimes.Map([&](uint i, Lifetime* lifetime){
  3147. Assert(lifetime->start < instr->GetNumber() && lifetime->end >= instr->GetNumber());
  3148. Assert(!lifetime->sym->IsConst());
  3149. Assert(this->currentBlock->inlineeFrameSyms.ContainsKey(lifetime->sym->m_id));
  3150. if (lifetime->reg == RegNOREG)
  3151. {
  3152. return;
  3153. }
  3154. StackSym* sym = lifetime->sym;
  3155. if (!lifetime->isSpilled && !lifetime->isOpHelperSpilled &&
  3156. (!lifetime->isDeadStore && (lifetime->sym->m_isSingleDef || !lifetime->defList.Empty()))) // if deflist is empty - we have already spilled at all defs - and the value is current
  3157. {
  3158. if (!spilledRegs.Test(lifetime->reg))
  3159. {
  3160. spilledRegs.Set(lifetime->reg);
  3161. if (!sym->IsAllocated())
  3162. {
  3163. this->AllocateStackSpace(lifetime);
  3164. }
  3165. this->RecordLoopUse(lifetime, lifetime->reg);
  3166. Assert(this->regContent[lifetime->reg] != nullptr);
  3167. if (sym->m_isSingleDef)
  3168. {
  3169. // For a single def - we do not track the deflist - the def below will remove the single def on the sym
  3170. // hence, we need to track the original def.
  3171. Assert(lifetime->defList.Empty());
  3172. lifetime->defList.Prepend(sym->m_instrDef);
  3173. }
  3174. this->InsertStore(instr->m_prev, sym, lifetime->reg);
  3175. }
  3176. }
  3177. });
  3178. }
  3179. void LinearScan::TrackInlineeArgLifetimes(IR::Instr* instr)
  3180. {
  3181. if (instr->m_opcode == Js::OpCode::InlineeStart)
  3182. {
  3183. if (instr->m_func->m_hasInlineArgsOpt)
  3184. {
  3185. instr->m_func->frameInfo->IterateSyms([=](StackSym* sym){
  3186. Lifetime* lifetime = sym->scratch.linearScan.lifetime;
  3187. this->currentBlock->inlineeFrameLifetimes.Add(lifetime);
  3188. // We need to maintain as count because the same sym can be used for multiple arguments
  3189. uint* value;
  3190. if (this->currentBlock->inlineeFrameSyms.TryGetReference(sym->m_id, &value))
  3191. {
  3192. *value = *value + 1;
  3193. }
  3194. else
  3195. {
  3196. this->currentBlock->inlineeFrameSyms.Add(sym->m_id, 1);
  3197. }
  3198. });
  3199. if (this->currentBlock->inlineeStack.Count() > 0)
  3200. {
  3201. Assert(instr->m_func->inlineDepth == this->currentBlock->inlineeStack.Last()->inlineDepth + 1);
  3202. }
  3203. this->currentBlock->inlineeStack.Add(instr->m_func);
  3204. }
  3205. else
  3206. {
  3207. Assert(this->currentBlock->inlineeStack.Count() == 0);
  3208. }
  3209. }
  3210. else if (instr->m_opcode == Js::OpCode::InlineeEnd)
  3211. {
  3212. if (instr->m_func->m_hasInlineArgsOpt)
  3213. {
  3214. instr->m_func->frameInfo->AllocateRecord(this->func, instr->m_func->GetJITFunctionBody()->GetAddr());
  3215. if(this->currentBlock->inlineeStack.Count() == 0)
  3216. {
  3217. // Block is unreachable
  3218. Assert(this->currentBlock->inlineeFrameLifetimes.Count() == 0);
  3219. Assert(this->currentBlock->inlineeFrameSyms.Count() == 0);
  3220. }
  3221. else
  3222. {
  3223. Func* func = this->currentBlock->inlineeStack.RemoveAtEnd();
  3224. Assert(func == instr->m_func);
  3225. instr->m_func->frameInfo->IterateSyms([=](StackSym* sym){
  3226. Lifetime* lifetime = this->currentBlock->inlineeFrameLifetimes.RemoveAtEnd();
  3227. uint* value;
  3228. if (this->currentBlock->inlineeFrameSyms.TryGetReference(sym->m_id, &value))
  3229. {
  3230. *value = *value - 1;
  3231. if (*value == 0)
  3232. {
  3233. bool removed = this->currentBlock->inlineeFrameSyms.Remove(sym->m_id);
  3234. Assert(removed);
  3235. }
  3236. }
  3237. else
  3238. {
  3239. Assert(UNREACHED);
  3240. }
  3241. Assert(sym->scratch.linearScan.lifetime == lifetime);
  3242. }, /*reverse*/ true);
  3243. }
  3244. }
  3245. }
  3246. }
  3247. // GetSpillCost
  3248. // The spill cost is trying to estimate the usage density of the lifetime,
  3249. // by dividing the useCount by the lifetime length.
  3250. uint
  3251. LinearScan::GetSpillCost(Lifetime *lifetime)
  3252. {
  3253. uint useCount = lifetime->GetRegionUseCount(this->curLoop);
  3254. uint spillCost;
  3255. // Get local spill cost. Ignore helper blocks as we'll also need compensation on the main path.
  3256. uint localUseCost = LinearScan::GetUseSpillCost(this->loopNest, false);
  3257. if (lifetime->reg && !lifetime->isSpilled)
  3258. {
  3259. // If it is in a reg, we'll need a store
  3260. if (localUseCost >= lifetime->allDefsCost)
  3261. {
  3262. useCount += lifetime->allDefsCost;
  3263. }
  3264. else
  3265. {
  3266. useCount += localUseCost;
  3267. }
  3268. if (this->curLoop && !lifetime->sym->IsConst()
  3269. && this->curLoop->regAlloc.liveOnBackEdgeSyms->Test(lifetime->sym->m_id))
  3270. {
  3271. // If we spill here, we'll need to insert a load at the bottom of the loop
  3272. // (it would be nice to be able to check is was in a reg at the top of the loop)...
  3273. useCount += localUseCost;
  3274. }
  3275. }
  3276. // When comparing 2 lifetimes, we don't really care about the actual length of the lifetimes.
  3277. // What matters is how much longer will they use the register.
  3278. const uint start = currentInstr->GetNumber();
  3279. uint end = max(start, lifetime->end);
  3280. uint lifetimeTotalOpHelperFullVisitedLength = lifetime->totalOpHelperLengthByEnd;
  3281. if (this->curLoop && this->curLoop->regAlloc.loopEnd < end && !PHASE_OFF(Js::RegionUseCountPhase, this->func))
  3282. {
  3283. end = this->curLoop->regAlloc.loopEnd;
  3284. lifetimeTotalOpHelperFullVisitedLength = this->curLoop->regAlloc.helperLength;
  3285. }
  3286. uint length = end - start + 1;
  3287. // Exclude helper block length since helper block paths are typically infrequently taken paths and not as important
  3288. const uint totalOpHelperVisitedLength = this->totalOpHelperFullVisitedLength + CurrentOpHelperVisitedLength(currentInstr);
  3289. Assert(lifetimeTotalOpHelperFullVisitedLength >= totalOpHelperVisitedLength);
  3290. const uint lifetimeHelperLength = lifetimeTotalOpHelperFullVisitedLength - totalOpHelperVisitedLength;
  3291. Assert(length >= lifetimeHelperLength);
  3292. length -= lifetimeHelperLength;
  3293. if(length == 0)
  3294. {
  3295. length = 1;
  3296. }
  3297. // Add a base length so that the difference between a length of 1 and a length of 2 is not so large
  3298. #ifdef _M_X64
  3299. length += 64;
  3300. #else
  3301. length += 16;
  3302. #endif
  3303. spillCost = (useCount << 13) / length;
  3304. if (lifetime->isSecondChanceAllocated)
  3305. {
  3306. // Second chance allocation have additional overhead, so de-prioritize them
  3307. // Note: could use more tuning...
  3308. spillCost = spillCost * 4/5;
  3309. }
  3310. if (lifetime->isCheapSpill)
  3311. {
  3312. // This lifetime will get spilled eventually, so lower the spill cost to favor other lifetimes
  3313. // Note: could use more tuning...
  3314. spillCost /= 2;
  3315. }
  3316. if (lifetime->sym->IsConst())
  3317. {
  3318. spillCost = spillCost / 16;
  3319. }
  3320. return spillCost;
  3321. }
  3322. bool
  3323. LinearScan::RemoveDeadStores(IR::Instr *instr)
  3324. {
  3325. IR::Opnd *dst = instr->GetDst();
  3326. if (dst && dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym && !dst->AsRegOpnd()->m_isCallArg)
  3327. {
  3328. IR::RegOpnd *regOpnd = dst->AsRegOpnd();
  3329. Lifetime * lifetime = regOpnd->m_sym->scratch.linearScan.lifetime;
  3330. if (lifetime->isDeadStore)
  3331. {
  3332. if (Lowerer::HasSideEffects(instr) == false)
  3333. {
  3334. // If all the bailouts referencing this arg are removed (which can happen in some scenarios)
  3335. //- then it's OK to remove this def of the arg
  3336. DebugOnly(this->func->allowRemoveBailOutArgInstr = true);
  3337. // We are removing this instruction, end dead life time now
  3338. this->EndDeadLifetimes(instr);
  3339. instr->Remove();
  3340. DebugOnly(this->func->allowRemoveBailOutArgInstr = false);
  3341. return true;
  3342. }
  3343. }
  3344. }
  3345. return false;
  3346. }
  3347. void
  3348. LinearScan::AssignActiveReg(Lifetime * lifetime, RegNum reg)
  3349. {
  3350. Assert(!this->activeRegs.Test(reg));
  3351. Assert(!lifetime->isSpilled);
  3352. Assert(lifetime->reg == RegNOREG || lifetime->reg == reg);
  3353. this->func->m_regsUsed.Set(reg);
  3354. lifetime->reg = reg;
  3355. this->activeRegs.Set(reg);
  3356. if (lifetime->isFloat || lifetime->isSimd128())
  3357. {
  3358. this->floatRegUsedCount++;
  3359. }
  3360. else
  3361. {
  3362. this->intRegUsedCount++;
  3363. }
  3364. this->AddToActive(lifetime);
  3365. this->tempRegs.Clear(reg);
  3366. }
  3367. void
  3368. LinearScan::AssignTempReg(Lifetime * lifetime, RegNum reg)
  3369. {
  3370. Assert(reg > RegNOREG && reg < RegNumCount);
  3371. Assert(!this->activeRegs.Test(reg));
  3372. Assert(lifetime->isSpilled);
  3373. this->func->m_regsUsed.Set(reg);
  3374. lifetime->reg = reg;
  3375. this->tempRegs.Set(reg);
  3376. __analysis_assume(reg > 0 && reg < RegNumCount);
  3377. this->tempRegLifetimes[reg] = lifetime;
  3378. this->RecordLoopUse(nullptr, reg);
  3379. }
  3380. RegNum
  3381. LinearScan::GetAssignedTempReg(Lifetime * lifetime, IRType type)
  3382. {
  3383. if (this->tempRegs.Test(lifetime->reg) && this->tempRegLifetimes[lifetime->reg] == lifetime)
  3384. {
  3385. if (this->linearScanMD.FitRegIntSizeConstraints(lifetime->reg, type))
  3386. {
  3387. this->RecordLoopUse(nullptr, lifetime->reg);
  3388. return lifetime->reg;
  3389. }
  3390. else
  3391. {
  3392. // Free this temp, we'll need to find another one.
  3393. this->tempRegs.Clear(lifetime->reg);
  3394. lifetime->reg = RegNOREG;
  3395. }
  3396. }
  3397. return RegNOREG;
  3398. }
  3399. uint
  3400. LinearScan::GetUseSpillCost(uint loopNest, BOOL isInHelperBlock)
  3401. {
  3402. if (isInHelperBlock)
  3403. {
  3404. // Helper block uses are not as important.
  3405. return 0;
  3406. }
  3407. else if (loopNest < 6)
  3408. {
  3409. return (1 << (loopNest * 3));
  3410. }
  3411. else
  3412. {
  3413. // Slow growth for deep nest to avoid overflow
  3414. return (1 << (5 * 3)) * (loopNest-5);
  3415. }
  3416. }
  3417. void
  3418. LinearScan::ProcessSecondChanceBoundary(IR::BranchInstr *branchInstr)
  3419. {
  3420. if (this->func->HasTry() && !this->func->DoOptimizeTryCatch())
  3421. {
  3422. return;
  3423. }
  3424. if (this->currentOpHelperBlock && this->currentOpHelperBlock->opHelperEndInstr == branchInstr)
  3425. {
  3426. // Lifetimes opHelperSpilled won't get recorded by SaveRegContent(). Do it here.
  3427. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, this->opHelperSpilledLiveranges)
  3428. {
  3429. if (!lifetime->cantOpHelperSpill)
  3430. {
  3431. if (lifetime->isSecondChanceAllocated)
  3432. {
  3433. this->secondChanceRegs.Set(lifetime->reg);
  3434. }
  3435. this->regContent[lifetime->reg] = lifetime;
  3436. }
  3437. } NEXT_SLIST_ENTRY;
  3438. }
  3439. if(branchInstr->IsMultiBranch())
  3440. {
  3441. IR::MultiBranchInstr * multiBranchInstr = branchInstr->AsMultiBrInstr();
  3442. multiBranchInstr->MapUniqueMultiBrLabels([=](IR::LabelInstr * branchLabel) -> void
  3443. {
  3444. this->ProcessSecondChanceBoundaryHelper(branchInstr, branchLabel);
  3445. });
  3446. }
  3447. else
  3448. {
  3449. IR::LabelInstr *branchLabel = branchInstr->GetTarget();
  3450. this->ProcessSecondChanceBoundaryHelper(branchInstr, branchLabel);
  3451. }
  3452. this->SaveRegContent(branchInstr);
  3453. }
  3454. void
  3455. LinearScan::ProcessSecondChanceBoundaryHelper(IR::BranchInstr *branchInstr, IR::LabelInstr *branchLabel)
  3456. {
  3457. if (branchInstr->GetNumber() > branchLabel->GetNumber())
  3458. {
  3459. // Loop back-edge
  3460. Assert(branchLabel->m_isLoopTop);
  3461. branchInstr->m_regContent = nullptr;
  3462. this->InsertSecondChanceCompensation(this->regContent, branchLabel->m_regContent, branchInstr, branchLabel);
  3463. }
  3464. else
  3465. {
  3466. // Forward branch
  3467. this->SaveRegContent(branchInstr);
  3468. if (this->curLoop)
  3469. {
  3470. this->curLoop->regAlloc.exitRegContentList->Prepend(branchInstr->m_regContent);
  3471. }
  3472. if (!branchLabel->m_loweredBasicBlock)
  3473. {
  3474. if (branchInstr->IsConditional() || branchInstr->IsMultiBranch())
  3475. {
  3476. // Clone with deep copy
  3477. branchLabel->m_loweredBasicBlock = this->currentBlock->Clone(this->tempAlloc);
  3478. }
  3479. else
  3480. {
  3481. // If the unconditional branch leads to the end of the function for the scenario of a bailout - we do not want to
  3482. // copy the lowered inlinee info.
  3483. IR::Instr* nextInstr = branchLabel->GetNextRealInstr();
  3484. if (nextInstr->m_opcode != Js::OpCode::FunctionExit &&
  3485. nextInstr->m_opcode != Js::OpCode::BailOutStackRestore &&
  3486. this->currentBlock->HasData())
  3487. {
  3488. // Clone with shallow copy
  3489. branchLabel->m_loweredBasicBlock = this->currentBlock;
  3490. }
  3491. }
  3492. }
  3493. else
  3494. {
  3495. // The lowerer sometimes generates unreachable blocks that would have empty data.
  3496. Assert(!currentBlock->HasData() || branchLabel->m_loweredBasicBlock->Equals(this->currentBlock));
  3497. }
  3498. }
  3499. }
  3500. void
  3501. LinearScan::ProcessSecondChanceBoundary(IR::LabelInstr *labelInstr)
  3502. {
  3503. if (this->func->HasTry() && !this->func->DoOptimizeTryCatch())
  3504. {
  3505. return;
  3506. }
  3507. if (labelInstr->m_isLoopTop)
  3508. {
  3509. this->SaveRegContent(labelInstr);
  3510. Lifetime ** regContent = AnewArrayZ(this->tempAlloc, Lifetime *, RegNumCount);
  3511. js_memcpy_s(regContent, (RegNumCount * sizeof(Lifetime *)), this->regContent, sizeof(this->regContent));
  3512. this->curLoop->regAlloc.loopTopRegContent = regContent;
  3513. }
  3514. FOREACH_SLISTCOUNTED_ENTRY_EDITING(IR::BranchInstr *, branchInstr, &labelInstr->labelRefs, iter)
  3515. {
  3516. if (branchInstr->m_isAirlock)
  3517. {
  3518. // This branch was just inserted... Skip it.
  3519. continue;
  3520. }
  3521. Assert(branchInstr->GetNumber() && labelInstr->GetNumber());
  3522. if (branchInstr->GetNumber() < labelInstr->GetNumber())
  3523. {
  3524. // Normal branch
  3525. this->InsertSecondChanceCompensation(branchInstr->m_regContent, this->regContent, branchInstr, labelInstr);
  3526. }
  3527. else
  3528. {
  3529. // Loop back-edge
  3530. Assert(labelInstr->m_isLoopTop);
  3531. }
  3532. } NEXT_SLISTCOUNTED_ENTRY_EDITING;
  3533. }
  3534. IR::Instr * LinearScan::EnsureAirlock(bool needsAirlock, bool *pHasAirlock, IR::Instr *insertionInstr,
  3535. IR::Instr **pInsertionStartInstr, IR::BranchInstr *branchInstr, IR::LabelInstr *labelInstr)
  3536. {
  3537. if (needsAirlock && !(*pHasAirlock))
  3538. {
  3539. // We need an extra block for the compensation code.
  3540. insertionInstr = this->InsertAirlock(branchInstr, labelInstr);
  3541. *pInsertionStartInstr = insertionInstr->m_prev;
  3542. *pHasAirlock = true;
  3543. }
  3544. return insertionInstr;
  3545. }
  3546. bool LinearScan::NeedsLoopBackEdgeCompensation(Lifetime *lifetime, IR::LabelInstr *loopTopLabel)
  3547. {
  3548. if (!lifetime)
  3549. {
  3550. return false;
  3551. }
  3552. if (lifetime->sym->IsConst())
  3553. {
  3554. return false;
  3555. }
  3556. // No need if lifetime begins in the loop
  3557. if (lifetime->start > loopTopLabel->GetNumber())
  3558. {
  3559. return false;
  3560. }
  3561. // Only needed if lifetime is live on the back-edge, and the register is used inside the loop, or the lifetime extends
  3562. // beyond the loop (and compensation out of the loop may use this reg)...
  3563. if (!loopTopLabel->GetLoop()->regAlloc.liveOnBackEdgeSyms->Test(lifetime->sym->m_id)
  3564. || (this->currentInstr->GetNumber() >= lifetime->end && !this->curLoop->regAlloc.symRegUseBv->Test(lifetime->sym->m_id)))
  3565. {
  3566. return false;
  3567. }
  3568. return true;
  3569. }
  3570. void
  3571. LinearScan::InsertSecondChanceCompensation(Lifetime ** branchRegContent, Lifetime **labelRegContent,
  3572. IR::BranchInstr *branchInstr, IR::LabelInstr *labelInstr)
  3573. {
  3574. IR::Instr *prevInstr = branchInstr->GetPrevRealInstrOrLabel();
  3575. bool needsAirlock = branchInstr->IsConditional() || (prevInstr->IsBranchInstr() && prevInstr->AsBranchInstr()->IsConditional()) || branchInstr->IsMultiBranch();
  3576. bool hasAirlock = false;
  3577. IR::Instr *insertionInstr = branchInstr;
  3578. IR::Instr *insertionStartInstr = branchInstr->m_prev;
  3579. // For loop back-edge, we want to keep the insertionStartInstr before the branch as spill need to happen on all paths
  3580. // Pass a dummy instr address to airLockBlock insertion code.
  3581. BitVector thrashedRegs(0);
  3582. bool isLoopBackEdge = (this->regContent == branchRegContent);
  3583. Lifetime * tmpRegContent[RegNumCount];
  3584. Lifetime **regContent = this->regContent;
  3585. if (isLoopBackEdge)
  3586. {
  3587. Loop *loop = labelInstr->GetLoop();
  3588. js_memcpy_s(&tmpRegContent, (RegNumCount * sizeof(Lifetime *)), this->regContent, sizeof(this->regContent));
  3589. branchRegContent = tmpRegContent;
  3590. regContent = tmpRegContent;
  3591. #if defined(_M_IX86) || defined(_M_X64)
  3592. // Insert XCHG to avoid some conflicts for int regs
  3593. // Note: no XCHG on ARM or SSE2. We could however use 3 XOR on ARM...
  3594. this->AvoidCompensationConflicts(labelInstr, branchInstr, labelRegContent, branchRegContent,
  3595. &insertionInstr, &insertionStartInstr, needsAirlock, &hasAirlock);
  3596. #endif
  3597. FOREACH_BITSET_IN_UNITBV(reg, this->secondChanceRegs, BitVector)
  3598. {
  3599. Lifetime *labelLifetime = labelRegContent[reg];
  3600. Lifetime *lifetime = branchRegContent[reg];
  3601. // 1. Insert Stores
  3602. // Lifetime starts before the loop
  3603. // Lifetime was re-allocated within the loop (i.e.: a load was most likely inserted)
  3604. // Lifetime is live on back-edge and has unsaved defs.
  3605. if (lifetime && lifetime->start < labelInstr->GetNumber() && lifetime->lastAllocationStart > labelInstr->GetNumber()
  3606. && (labelInstr->GetLoop()->regAlloc.liveOnBackEdgeSyms->Test(lifetime->sym->m_id))
  3607. && !lifetime->defList.Empty())
  3608. {
  3609. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3610. // If the lifetime was second chance allocated inside the loop, there might
  3611. // be spilled loads of this symbol in the loop. Insert the stores.
  3612. // We don't need to do this if the lifetime was re-allocated before the loop.
  3613. //
  3614. // Note that reg may not be equal to lifetime->reg because of inserted XCHG...
  3615. this->InsertStores(lifetime, lifetime->reg, insertionStartInstr);
  3616. }
  3617. if (lifetime == labelLifetime)
  3618. {
  3619. continue;
  3620. }
  3621. // 2. MOV labelReg/MEM, branchReg
  3622. // Move current register to match content at the top of the loop
  3623. if (this->NeedsLoopBackEdgeCompensation(lifetime, labelInstr))
  3624. {
  3625. // Mismatch, we need to insert compensation code
  3626. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3627. // MOV ESI, EAX
  3628. // MOV EDI, ECX
  3629. // MOV ECX, ESI
  3630. // MOV EAX, EDI <<<
  3631. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3632. lifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3633. }
  3634. // 2. MOV labelReg, MEM
  3635. // Lifetime was in a reg at the top of the loop but is spilled right now.
  3636. if (labelLifetime && labelLifetime->isSpilled && !labelLifetime->sym->IsConst() && labelLifetime->end >= branchInstr->GetNumber())
  3637. {
  3638. if (!loop->regAlloc.liveOnBackEdgeSyms->Test(labelLifetime->sym->m_id))
  3639. {
  3640. continue;
  3641. }
  3642. if (this->ClearLoopExitIfRegUnused(labelLifetime, (RegNum)reg, branchInstr, loop))
  3643. {
  3644. continue;
  3645. }
  3646. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3647. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3648. labelLifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3649. }
  3650. } NEXT_BITSET_IN_UNITBV;
  3651. // 3. MOV labelReg, MEM
  3652. // Finish up reloading lifetimes needed at the top. #2 only handled secondChanceRegs.
  3653. FOREACH_REG(reg)
  3654. {
  3655. // Handle lifetimes in a register at the top of the loop, but not currently.
  3656. Lifetime *labelLifetime = labelRegContent[reg];
  3657. if (labelLifetime && !labelLifetime->sym->IsConst() && labelLifetime != branchRegContent[reg] && !thrashedRegs.Test(reg)
  3658. && (loop->regAlloc.liveOnBackEdgeSyms->Test(labelLifetime->sym->m_id)))
  3659. {
  3660. if (this->ClearLoopExitIfRegUnused(labelLifetime, (RegNum)reg, branchInstr, loop))
  3661. {
  3662. continue;
  3663. }
  3664. // Mismatch, we need to insert compensation code
  3665. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3666. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3667. labelLifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3668. }
  3669. } NEXT_REG;
  3670. if (hasAirlock)
  3671. {
  3672. loop->regAlloc.hasAirLock = true;
  3673. }
  3674. }
  3675. else
  3676. {
  3677. //
  3678. // Non-loop-back-edge merge
  3679. //
  3680. FOREACH_REG(reg)
  3681. {
  3682. Lifetime *branchLifetime = branchRegContent[reg];
  3683. Lifetime *lifetime = regContent[reg];
  3684. if (lifetime == branchLifetime)
  3685. {
  3686. continue;
  3687. }
  3688. if (branchLifetime && branchLifetime->isSpilled && !branchLifetime->sym->IsConst() && branchLifetime->end > labelInstr->GetNumber())
  3689. {
  3690. // The lifetime was in a reg at the branch and is now spilled. We need a store on this path.
  3691. //
  3692. // MOV MEM, branch_REG
  3693. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3694. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3695. branchLifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3696. }
  3697. if (lifetime && !lifetime->sym->IsConst() && lifetime->start <= branchInstr->GetNumber())
  3698. {
  3699. // MOV label_REG, branch_REG / MEM
  3700. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3701. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3702. lifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3703. }
  3704. } NEXT_REG;
  3705. }
  3706. if (hasAirlock)
  3707. {
  3708. // Fix opHelper on airlock label.
  3709. if (insertionInstr->m_prev->IsLabelInstr() && insertionInstr->IsLabelInstr())
  3710. {
  3711. if (insertionInstr->m_prev->AsLabelInstr()->isOpHelper && !insertionInstr->AsLabelInstr()->isOpHelper)
  3712. {
  3713. insertionInstr->m_prev->AsLabelInstr()->isOpHelper = false;
  3714. }
  3715. }
  3716. }
  3717. }
  3718. void
  3719. LinearScan::ReconcileRegContent(Lifetime ** branchRegContent, Lifetime **labelRegContent,
  3720. IR::BranchInstr *branchInstr, IR::LabelInstr *labelInstr,
  3721. Lifetime *lifetime, RegNum reg, BitVector *thrashedRegs, IR::Instr *insertionInstr, IR::Instr *insertionStartInstr)
  3722. {
  3723. RegNum originalReg = RegNOREG;
  3724. IRType type = RegTypes[reg];
  3725. Assert(labelRegContent[reg] != branchRegContent[reg]);
  3726. bool matchBranchReg = (branchRegContent[reg] == lifetime);
  3727. Lifetime **originalRegContent = (matchBranchReg ? labelRegContent : branchRegContent);
  3728. bool isLoopBackEdge = (branchInstr->GetNumber() > labelInstr->GetNumber());
  3729. if (lifetime->sym->IsConst())
  3730. {
  3731. return;
  3732. }
  3733. // Look if this lifetime was in a different register in the previous block.
  3734. // Split the search in 2 to speed this up.
  3735. if (type == TyMachReg)
  3736. {
  3737. FOREACH_INT_REG(regIter)
  3738. {
  3739. if (originalRegContent[regIter] == lifetime)
  3740. {
  3741. originalReg = regIter;
  3742. break;
  3743. }
  3744. } NEXT_INT_REG;
  3745. }
  3746. else
  3747. {
  3748. Assert(type == TyFloat64 || IRType_IsSimd128(type));
  3749. FOREACH_FLOAT_REG(regIter)
  3750. {
  3751. if (originalRegContent[regIter] == lifetime)
  3752. {
  3753. originalReg = regIter;
  3754. break;
  3755. }
  3756. } NEXT_FLOAT_REG;
  3757. }
  3758. RegNum branchReg, labelReg;
  3759. if (matchBranchReg)
  3760. {
  3761. branchReg = reg;
  3762. labelReg = originalReg;
  3763. }
  3764. else
  3765. {
  3766. branchReg = originalReg;
  3767. labelReg = reg;
  3768. }
  3769. if (branchReg != RegNOREG && !thrashedRegs->Test(branchReg) && !lifetime->sym->IsConst())
  3770. {
  3771. Assert(branchRegContent[branchReg] == lifetime);
  3772. if (labelReg != RegNOREG)
  3773. {
  3774. // MOV labelReg, branchReg
  3775. Assert(labelRegContent[labelReg] == lifetime);
  3776. IR::Instr *load = IR::Instr::New(LowererMD::GetLoadOp(type),
  3777. IR::RegOpnd::New(lifetime->sym, labelReg, type, this->func),
  3778. IR::RegOpnd::New(lifetime->sym, branchReg, type, this->func), this->func);
  3779. insertionInstr->InsertBefore(load);
  3780. load->CopyNumber(insertionInstr);
  3781. // symRegUseBv needs to be set properly. Unfortunately, we need to go conservative as we don't know
  3782. // which allocation it was at the source of the branch.
  3783. if (this->IsInLoop())
  3784. {
  3785. this->RecordLoopUse(lifetime, branchReg);
  3786. }
  3787. thrashedRegs->Set(labelReg);
  3788. }
  3789. else if (!lifetime->sym->IsSingleDef() && lifetime->needsStoreCompensation && !isLoopBackEdge)
  3790. {
  3791. Assert(!lifetime->sym->IsConst());
  3792. Assert(matchBranchReg);
  3793. Assert(branchRegContent[branchReg] == lifetime);
  3794. // MOV mem, branchReg
  3795. this->InsertStores(lifetime, branchReg, insertionInstr->m_prev);
  3796. // symRegUseBv needs to be set properly. Unfortunately, we need to go conservative as we don't know
  3797. // which allocation it was at the source of the branch.
  3798. if (this->IsInLoop())
  3799. {
  3800. this->RecordLoopUse(lifetime, branchReg);
  3801. }
  3802. }
  3803. }
  3804. else if (labelReg != RegNOREG)
  3805. {
  3806. Assert(labelRegContent[labelReg] == lifetime);
  3807. Assert(lifetime->sym->IsConst() || lifetime->sym->IsAllocated());
  3808. if (branchReg != RegNOREG && !lifetime->sym->IsSingleDef())
  3809. {
  3810. Assert(thrashedRegs->Test(branchReg));
  3811. // We can't insert a "MOV labelReg, branchReg" at the insertion point
  3812. // because branchReg was thrashed by a previous reload.
  3813. // Look for that reload to see if we can insert before it.
  3814. IR::Instr *newInsertionInstr = insertionInstr->m_prev;
  3815. bool foundIt = false;
  3816. while (LowererMD::IsAssign(newInsertionInstr))
  3817. {
  3818. IR::Opnd *dst = newInsertionInstr->GetDst();
  3819. IR::Opnd *src = newInsertionInstr->GetSrc1();
  3820. if (src->IsRegOpnd() && src->AsRegOpnd()->GetReg() == labelReg)
  3821. {
  3822. // This uses labelReg, give up...
  3823. break;
  3824. }
  3825. if (dst->IsRegOpnd() && dst->AsRegOpnd()->GetReg() == branchReg)
  3826. {
  3827. // Success!
  3828. foundIt = true;
  3829. break;
  3830. }
  3831. newInsertionInstr = newInsertionInstr->m_prev;
  3832. }
  3833. if (foundIt)
  3834. {
  3835. // MOV labelReg, branchReg
  3836. Assert(labelRegContent[labelReg] == lifetime);
  3837. IR::Instr *load = IR::Instr::New(LowererMD::GetLoadOp(type),
  3838. IR::RegOpnd::New(lifetime->sym, labelReg, type, this->func),
  3839. IR::RegOpnd::New(lifetime->sym, branchReg, type, this->func), this->func);
  3840. newInsertionInstr->InsertBefore(load);
  3841. load->CopyNumber(newInsertionInstr);
  3842. // symRegUseBv needs to be set properly. Unfortunately, we need to go conservative as we don't know
  3843. // which allocation it was at the source of the branch.
  3844. if (this->IsInLoop())
  3845. {
  3846. this->RecordLoopUse(lifetime, branchReg);
  3847. }
  3848. thrashedRegs->Set(labelReg);
  3849. return;
  3850. }
  3851. Assert(thrashedRegs->Test(branchReg));
  3852. this->InsertStores(lifetime, branchReg, insertionStartInstr);
  3853. // symRegUseBv needs to be set properly. Unfortunately, we need to go conservative as we don't know
  3854. // which allocation it was at the source of the branch.
  3855. if (this->IsInLoop())
  3856. {
  3857. this->RecordLoopUse(lifetime, branchReg);
  3858. }
  3859. }
  3860. // MOV labelReg, mem
  3861. this->InsertLoad(insertionInstr, lifetime->sym, labelReg);
  3862. thrashedRegs->Set(labelReg);
  3863. }
  3864. else if (!lifetime->sym->IsConst())
  3865. {
  3866. Assert(matchBranchReg);
  3867. Assert(branchReg != RegNOREG);
  3868. // The lifetime was in a register at the top of the loop, but we thrashed it with a previous reload...
  3869. if (!lifetime->sym->IsSingleDef())
  3870. {
  3871. this->InsertStores(lifetime, branchReg, insertionStartInstr);
  3872. }
  3873. #if DBG_DUMP
  3874. if (PHASE_TRACE(Js::SecondChancePhase, this->func))
  3875. {
  3876. Output::Print(_u("****** Spilling reg because of bad compensation code order: "));
  3877. lifetime->sym->Dump();
  3878. Output::Print(_u("\n"));
  3879. }
  3880. #endif
  3881. }
  3882. }
  3883. bool LinearScan::ClearLoopExitIfRegUnused(Lifetime *lifetime, RegNum reg, IR::BranchInstr *branchInstr, Loop *loop)
  3884. {
  3885. // If a lifetime was enregistered into the loop and then spilled, we need compensation at the bottom
  3886. // of the loop to reload the lifetime into that register.
  3887. // If that lifetime was spilled before it was ever used, we don't need the compensation code.
  3888. // We do however need to clear the regContent on any loop exit as the register will not
  3889. // be available anymore on that path.
  3890. // Note: If the lifetime was reloaded into the same register, we might clear the regContent unnecessarily...
  3891. if (!PHASE_OFF(Js::ClearRegLoopExitPhase, this->func))
  3892. {
  3893. return false;
  3894. }
  3895. if (!loop->regAlloc.symRegUseBv->Test(lifetime->sym->m_id) && !lifetime->needsStoreCompensation)
  3896. {
  3897. if (lifetime->end > branchInstr->GetNumber())
  3898. {
  3899. FOREACH_SLIST_ENTRY(Lifetime **, regContent, loop->regAlloc.exitRegContentList)
  3900. {
  3901. if (regContent[reg] == lifetime)
  3902. {
  3903. regContent[reg] = nullptr;
  3904. }
  3905. } NEXT_SLIST_ENTRY;
  3906. }
  3907. return true;
  3908. }
  3909. return false;
  3910. }
  3911. #if defined(_M_IX86) || defined(_M_X64)
  3912. void LinearScan::AvoidCompensationConflicts(IR::LabelInstr *labelInstr, IR::BranchInstr *branchInstr,
  3913. Lifetime *labelRegContent[], Lifetime *branchRegContent[],
  3914. IR::Instr **pInsertionInstr, IR::Instr **pInsertionStartInstr, bool needsAirlock, bool *pHasAirlock)
  3915. {
  3916. bool changed = true;
  3917. // Look for conflicts in the incoming compensation code:
  3918. // MOV ESI, EAX
  3919. // MOV ECX, ESI << ESI was lost...
  3920. // Using XCHG:
  3921. // XCHG ESI, EAX
  3922. // MOV ECX, EAX
  3923. //
  3924. // Note that we need to iterate while(changed) to catch all conflicts
  3925. while(changed) {
  3926. RegNum conflictRegs[RegNumCount] = {RegNOREG};
  3927. changed = false;
  3928. FOREACH_BITSET_IN_UNITBV(reg, this->secondChanceRegs, BitVector)
  3929. {
  3930. Lifetime *labelLifetime = labelRegContent[reg];
  3931. Lifetime *lifetime = branchRegContent[reg];
  3932. // We don't have an XCHG for SSE2 regs
  3933. if (lifetime == labelLifetime || IRType_IsFloat(RegTypes[reg]))
  3934. {
  3935. continue;
  3936. }
  3937. if (this->NeedsLoopBackEdgeCompensation(lifetime, labelInstr))
  3938. {
  3939. // Mismatch, we need to insert compensation code
  3940. *pInsertionInstr = this->EnsureAirlock(needsAirlock, pHasAirlock, *pInsertionInstr, pInsertionStartInstr, branchInstr, labelInstr);
  3941. if (conflictRegs[reg] != RegNOREG)
  3942. {
  3943. // Eliminate conflict with an XCHG
  3944. IR::RegOpnd *reg1 = IR::RegOpnd::New(branchRegContent[reg]->sym, (RegNum)reg, RegTypes[reg], this->func);
  3945. IR::RegOpnd *reg2 = IR::RegOpnd::New(branchRegContent[reg]->sym, conflictRegs[reg], RegTypes[reg], this->func);
  3946. IR::Instr *instrXchg = IR::Instr::New(Js::OpCode::XCHG, reg1, reg1, reg2, this->func);
  3947. (*pInsertionInstr)->InsertBefore(instrXchg);
  3948. instrXchg->CopyNumber(*pInsertionInstr);
  3949. Lifetime *tmpLifetime = branchRegContent[reg];
  3950. branchRegContent[reg] = branchRegContent[conflictRegs[reg]];
  3951. branchRegContent[conflictRegs[reg]] = tmpLifetime;
  3952. reg = conflictRegs[reg];
  3953. changed = true;
  3954. }
  3955. RegNum labelReg = RegNOREG;
  3956. FOREACH_INT_REG(regIter)
  3957. {
  3958. if (labelRegContent[regIter] == branchRegContent[reg])
  3959. {
  3960. labelReg = regIter;
  3961. break;
  3962. }
  3963. } NEXT_INT_REG;
  3964. if (labelReg != RegNOREG)
  3965. {
  3966. conflictRegs[labelReg] = (RegNum)reg;
  3967. }
  3968. }
  3969. } NEXT_BITSET_IN_UNITBV;
  3970. }
  3971. }
  3972. #endif
  3973. RegNum
  3974. LinearScan::SecondChanceAllocation(Lifetime *lifetime, bool force)
  3975. {
  3976. if (PHASE_OFF(Js::SecondChancePhase, this->func) || this->func->HasTry())
  3977. {
  3978. return RegNOREG;
  3979. }
  3980. // Don't start a second chance allocation from a helper block
  3981. if (lifetime->dontAllocate || this->IsInHelperBlock() || lifetime->isDeadStore)
  3982. {
  3983. return RegNOREG;
  3984. }
  3985. Assert(lifetime->isSpilled);
  3986. Assert(lifetime->sym->IsConst() || lifetime->sym->IsAllocated());
  3987. RegNum oldReg = lifetime->reg;
  3988. RegNum reg;
  3989. if (lifetime->start == this->currentInstr->GetNumber() || lifetime->end == this->currentInstr->GetNumber())
  3990. {
  3991. // No point doing second chance if the lifetime ends here, or starts here (normal allocation would
  3992. // have found a register if one is available).
  3993. return RegNOREG;
  3994. }
  3995. if (lifetime->sym->IsConst())
  3996. {
  3997. // Can't second-chance allocate because we might have deleted the initial def instr, after
  3998. // having set the reg content on a forward branch...
  3999. return RegNOREG;
  4000. }
  4001. lifetime->reg = RegNOREG;
  4002. lifetime->isSecondChanceAllocated = true;
  4003. reg = this->FindReg(lifetime, nullptr, force);
  4004. lifetime->reg = oldReg;
  4005. if (reg == RegNOREG)
  4006. {
  4007. lifetime->isSecondChanceAllocated = false;
  4008. return reg;
  4009. }
  4010. // Success!! We're re-allocating this lifetime...
  4011. this->SecondChanceAllocateToReg(lifetime, reg);
  4012. return reg;
  4013. }
  4014. void LinearScan::SecondChanceAllocateToReg(Lifetime *lifetime, RegNum reg)
  4015. {
  4016. RegNum oldReg = lifetime->reg;
  4017. if (oldReg != RegNOREG && this->tempRegLifetimes[oldReg] == lifetime)
  4018. {
  4019. this->tempRegs.Clear(oldReg);
  4020. }
  4021. lifetime->isSpilled = false;
  4022. lifetime->isSecondChanceAllocated = true;
  4023. lifetime->lastAllocationStart = this->currentInstr->GetNumber();
  4024. lifetime->reg = RegNOREG;
  4025. this->AssignActiveReg(lifetime, reg);
  4026. this->secondChanceRegs.Set(reg);
  4027. lifetime->sym->scratch.linearScan.lifetime->useList.Clear();
  4028. #if DBG_DUMP
  4029. if (PHASE_TRACE(Js::SecondChancePhase, this->func))
  4030. {
  4031. Output::Print(_u("**** Second chance: "));
  4032. lifetime->sym->Dump();
  4033. Output::Print(_u("\t Reg: %S "), RegNames[reg]);
  4034. Output::Print(_u(" SpillCount:%d Length:%d Cost:%d %S\n"),
  4035. lifetime->useCount, lifetime->end - lifetime->start, this->GetSpillCost(lifetime),
  4036. lifetime->isLiveAcrossCalls ? "LiveAcrossCalls" : "");
  4037. }
  4038. #endif
  4039. }
  4040. IR::Instr *
  4041. LinearScan::InsertAirlock(IR::BranchInstr *branchInstr, IR::LabelInstr *labelInstr)
  4042. {
  4043. // Insert a new block on a flow arc:
  4044. // JEQ L1 JEQ L2
  4045. // ... => ...
  4046. // <fallthrough> JMP L1
  4047. // L1: L2:
  4048. // <new block>
  4049. // L1:
  4050. // An airlock is needed when we need to add code on a flow arc, and the code can't
  4051. // be added directly at the source or sink of that flow arc without impacting other
  4052. // code paths.
  4053. bool isOpHelper = labelInstr->isOpHelper;
  4054. if (!isOpHelper)
  4055. {
  4056. // Check if branch is coming from helper block.
  4057. IR::Instr *prevLabel = branchInstr->m_prev;
  4058. while (prevLabel && !prevLabel->IsLabelInstr())
  4059. {
  4060. prevLabel = prevLabel->m_prev;
  4061. }
  4062. if (prevLabel && prevLabel->AsLabelInstr()->isOpHelper)
  4063. {
  4064. isOpHelper = true;
  4065. }
  4066. }
  4067. IR::LabelInstr *airlockLabel = IR::LabelInstr::New(Js::OpCode::Label, this->func, isOpHelper);
  4068. airlockLabel->SetRegion(this->currentRegion);
  4069. #if DBG
  4070. if (isOpHelper)
  4071. {
  4072. if (branchInstr->m_isHelperToNonHelperBranch)
  4073. {
  4074. labelInstr->m_noHelperAssert = true;
  4075. }
  4076. if (labelInstr->isOpHelper && labelInstr->m_noHelperAssert)
  4077. {
  4078. airlockLabel->m_noHelperAssert = true;
  4079. }
  4080. }
  4081. #endif
  4082. bool replaced = branchInstr->ReplaceTarget(labelInstr, airlockLabel);
  4083. Assert(replaced);
  4084. IR::Instr * prevInstr = labelInstr->GetPrevRealInstrOrLabel();
  4085. if (prevInstr->HasFallThrough())
  4086. {
  4087. IR::BranchInstr *branchOverAirlock = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelInstr, this->func);
  4088. prevInstr->InsertAfter(branchOverAirlock);
  4089. branchOverAirlock->CopyNumber(prevInstr);
  4090. prevInstr = branchOverAirlock;
  4091. branchOverAirlock->m_isAirlock = true;
  4092. branchOverAirlock->m_regContent = nullptr;
  4093. }
  4094. prevInstr->InsertAfter(airlockLabel);
  4095. airlockLabel->CopyNumber(prevInstr);
  4096. prevInstr = labelInstr->GetPrevRealInstrOrLabel();
  4097. return labelInstr;
  4098. }
  4099. void
  4100. LinearScan::SaveRegContent(IR::Instr *instr)
  4101. {
  4102. bool isLabelLoopTop = false;
  4103. Lifetime ** regContent = AnewArrayZ(this->tempAlloc, Lifetime *, RegNumCount);
  4104. if (instr->IsBranchInstr())
  4105. {
  4106. instr->AsBranchInstr()->m_regContent = regContent;
  4107. }
  4108. else
  4109. {
  4110. Assert(instr->IsLabelInstr());
  4111. Assert(instr->AsLabelInstr()->m_isLoopTop);
  4112. instr->AsLabelInstr()->m_regContent = regContent;
  4113. isLabelLoopTop = true;
  4114. }
  4115. js_memcpy_s(regContent, (RegNumCount * sizeof(Lifetime *)), this->regContent, sizeof(this->regContent));
  4116. #if DBG
  4117. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, this->activeLiveranges)
  4118. {
  4119. Assert(regContent[lifetime->reg] == lifetime);
  4120. } NEXT_SLIST_ENTRY;
  4121. #endif
  4122. }
  4123. bool LinearScan::RegsAvailable(IRType type)
  4124. {
  4125. if (IRType_IsFloat(type) || IRType_IsSimd128(type))
  4126. {
  4127. return (this->floatRegUsedCount < FLOAT_REG_COUNT);
  4128. }
  4129. else
  4130. {
  4131. return (this->intRegUsedCount < INT_REG_COUNT);
  4132. }
  4133. }
  4134. uint LinearScan::GetRemainingHelperLength(Lifetime *const lifetime)
  4135. {
  4136. // Walk the helper block linked list starting from the next helper block until the end of the lifetime
  4137. uint helperLength = 0;
  4138. SList<OpHelperBlock>::Iterator it(opHelperBlockIter);
  4139. Assert(it.IsValid());
  4140. const uint end = max(currentInstr->GetNumber(), lifetime->end);
  4141. do
  4142. {
  4143. const OpHelperBlock &helper = it.Data();
  4144. const uint helperStart = helper.opHelperLabel->GetNumber();
  4145. if(helperStart > end)
  4146. {
  4147. break;
  4148. }
  4149. const uint helperEnd = min(end, helper.opHelperEndInstr->GetNumber());
  4150. helperLength += helperEnd - helperStart;
  4151. if(helperEnd != helper.opHelperEndInstr->GetNumber() || !helper.opHelperEndInstr->IsLabelInstr())
  4152. {
  4153. // A helper block that ends at a label does not return to the function. Since this helper block does not end
  4154. // at a label, include the end instruction as well.
  4155. ++helperLength;
  4156. }
  4157. } while(it.Next());
  4158. return helperLength;
  4159. }
  4160. uint LinearScan::CurrentOpHelperVisitedLength(IR::Instr *const currentInstr) const
  4161. {
  4162. Assert(currentInstr);
  4163. if(!currentOpHelperBlock)
  4164. {
  4165. return 0;
  4166. }
  4167. // Consider the current instruction to have not yet been visited
  4168. Assert(currentInstr->GetNumber() >= currentOpHelperBlock->opHelperLabel->GetNumber());
  4169. return currentInstr->GetNumber() - currentOpHelperBlock->opHelperLabel->GetNumber();
  4170. }
  4171. IR::Instr * LinearScan::TryHoistLoad(IR::Instr *instr, Lifetime *lifetime)
  4172. {
  4173. // If we are loading a lifetime into a register inside a loop, try to hoist that load outside the loop
  4174. // if that register hasn't been used yet.
  4175. RegNum reg = lifetime->reg;
  4176. IR::Instr *insertInstr = instr;
  4177. if (PHASE_OFF(Js::RegHoistLoadsPhase, this->func))
  4178. {
  4179. return insertInstr;
  4180. }
  4181. if ((this->func->HasTry() && !this->func->DoOptimizeTryCatch()) || (this->currentRegion && this->currentRegion->GetType() != RegionTypeRoot))
  4182. {
  4183. return insertInstr;
  4184. }
  4185. // Register unused, and lifetime unused yet.
  4186. if (this->IsInLoop() && !this->curLoop->regAlloc.regUseBv.Test(reg)
  4187. && !this->curLoop->regAlloc.defdInLoopBv->Test(lifetime->sym->m_id)
  4188. && !this->curLoop->regAlloc.symRegUseBv->Test(lifetime->sym->m_id)
  4189. && !this->curLoop->regAlloc.hasAirLock)
  4190. {
  4191. // Let's hoist!
  4192. insertInstr = insertInstr->m_prev;
  4193. // Walk each instructions until the top of the loop looking for branches
  4194. while (!insertInstr->IsLabelInstr() || !insertInstr->AsLabelInstr()->m_isLoopTop || !insertInstr->AsLabelInstr()->GetLoop()->IsDescendentOrSelf(this->curLoop))
  4195. {
  4196. if (insertInstr->IsBranchInstr() && insertInstr->AsBranchInstr()->m_regContent)
  4197. {
  4198. IR::BranchInstr *branchInstr = insertInstr->AsBranchInstr();
  4199. // That lifetime might have been in another register coming into the loop, and spilled before used.
  4200. // Clear the reg content.
  4201. FOREACH_REG(regIter)
  4202. {
  4203. if (branchInstr->m_regContent[regIter] == lifetime)
  4204. {
  4205. branchInstr->m_regContent[regIter] = nullptr;
  4206. }
  4207. } NEXT_REG;
  4208. // Set the regContent for that reg to the lifetime on this branch
  4209. branchInstr->m_regContent[reg] = lifetime;
  4210. }
  4211. insertInstr = insertInstr->m_prev;
  4212. }
  4213. IR::LabelInstr *loopTopLabel = insertInstr->AsLabelInstr();
  4214. // Set the reg content for the loop top correctly as well
  4215. FOREACH_REG(regIter)
  4216. {
  4217. if (loopTopLabel->m_regContent[regIter] == lifetime)
  4218. {
  4219. loopTopLabel->m_regContent[regIter] = nullptr;
  4220. this->curLoop->regAlloc.loopTopRegContent[regIter] = nullptr;
  4221. }
  4222. } NEXT_REG;
  4223. Assert(loopTopLabel->GetLoop() == this->curLoop);
  4224. loopTopLabel->m_regContent[reg] = lifetime;
  4225. this->curLoop->regAlloc.loopTopRegContent[reg] = lifetime;
  4226. this->RecordLoopUse(lifetime, reg);
  4227. IR::LabelInstr *loopLandingPad = nullptr;
  4228. Assert(loopTopLabel->GetNumber() != Js::Constants::NoByteCodeOffset);
  4229. // Insert load in landing pad.
  4230. // Redirect branches to new landing pad.
  4231. FOREACH_SLISTCOUNTED_ENTRY_EDITING(IR::BranchInstr *, branchInstr, &loopTopLabel->labelRefs, iter)
  4232. {
  4233. Assert(branchInstr->GetNumber() != Js::Constants::NoByteCodeOffset);
  4234. // <= because the branch may be newly inserted and have the same instr number as the loop top...
  4235. if (branchInstr->GetNumber() <= loopTopLabel->GetNumber())
  4236. {
  4237. if (!loopLandingPad)
  4238. {
  4239. loopLandingPad = IR::LabelInstr::New(Js::OpCode::Label, this->func);
  4240. loopLandingPad->SetRegion(this->currentRegion);
  4241. loopTopLabel->InsertBefore(loopLandingPad);
  4242. loopLandingPad->CopyNumber(loopTopLabel);
  4243. }
  4244. branchInstr->ReplaceTarget(loopTopLabel, loopLandingPad);
  4245. }
  4246. } NEXT_SLISTCOUNTED_ENTRY_EDITING;
  4247. }
  4248. return insertInstr;
  4249. }
  4250. #if DBG_DUMP
  4251. void LinearScan::PrintStats() const
  4252. {
  4253. uint loopNest = 0;
  4254. uint storeCount = 0;
  4255. uint loadCount = 0;
  4256. uint wStoreCount = 0;
  4257. uint wLoadCount = 0;
  4258. uint instrCount = 0;
  4259. bool isInHelper = false;
  4260. FOREACH_INSTR_IN_FUNC_BACKWARD(instr, this->func)
  4261. {
  4262. switch (instr->GetKind())
  4263. {
  4264. case IR::InstrKindPragma:
  4265. continue;
  4266. case IR::InstrKindBranch:
  4267. if (instr->AsBranchInstr()->IsLoopTail(this->func))
  4268. {
  4269. loopNest++;
  4270. }
  4271. instrCount++;
  4272. break;
  4273. case IR::InstrKindLabel:
  4274. case IR::InstrKindProfiledLabel:
  4275. if (instr->AsLabelInstr()->m_isLoopTop)
  4276. {
  4277. Assert(loopNest);
  4278. loopNest--;
  4279. }
  4280. isInHelper = instr->AsLabelInstr()->isOpHelper;
  4281. break;
  4282. default:
  4283. {
  4284. Assert(instr->IsRealInstr());
  4285. if (isInHelper)
  4286. {
  4287. continue;
  4288. }
  4289. IR::Opnd *dst = instr->GetDst();
  4290. if (dst && dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsStackSym() && dst->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4291. {
  4292. storeCount++;
  4293. wStoreCount += LinearScan::GetUseSpillCost(loopNest, false);
  4294. }
  4295. IR::Opnd *src1 = instr->GetSrc1();
  4296. if (src1)
  4297. {
  4298. if (src1->IsSymOpnd() && src1->AsSymOpnd()->m_sym->IsStackSym() && src1->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4299. {
  4300. loadCount++;
  4301. wLoadCount += LinearScan::GetUseSpillCost(loopNest, false);
  4302. }
  4303. IR::Opnd *src2 = instr->GetSrc2();
  4304. if (src2 && src2->IsSymOpnd() && src2->AsSymOpnd()->m_sym->IsStackSym() && src2->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4305. {
  4306. loadCount++;
  4307. wLoadCount += LinearScan::GetUseSpillCost(loopNest, false);
  4308. }
  4309. }
  4310. }
  4311. break;
  4312. }
  4313. } NEXT_INSTR_IN_FUNC_BACKWARD;
  4314. Assert(loopNest == 0);
  4315. this->func->DumpFullFunctionName();
  4316. Output::SkipToColumn(45);
  4317. Output::Print(_u("Instrs:%5d, Lds:%4d, Strs:%4d, WLds: %4d, WStrs: %4d, WRefs: %4d\n"),
  4318. instrCount, loadCount, storeCount, wLoadCount, wStoreCount, wLoadCount+wStoreCount);
  4319. }
  4320. #endif
  4321. #ifdef _M_IX86
  4322. # if ENABLE_DEBUG_CONFIG_OPTIONS
  4323. IR::Instr * LinearScan::GetIncInsertionPoint(IR::Instr *instr)
  4324. {
  4325. // Make sure we don't insert an INC between an instr setting the condition code, and one using it.
  4326. IR::Instr *instrNext = instr;
  4327. while(!EncoderMD::UsesConditionCode(instrNext) && !EncoderMD::SetsConditionCode(instrNext))
  4328. {
  4329. if (instrNext->IsLabelInstr() || instrNext->IsExitInstr() || instrNext->IsBranchInstr())
  4330. {
  4331. break;
  4332. }
  4333. instrNext = instrNext->GetNextRealInstrOrLabel();
  4334. }
  4335. if (instrNext->IsLowered() && EncoderMD::UsesConditionCode(instrNext))
  4336. {
  4337. IR::Instr *instrPrev = instr->GetPrevRealInstrOrLabel();
  4338. while(!EncoderMD::SetsConditionCode(instrPrev))
  4339. {
  4340. instrPrev = instrPrev->GetPrevRealInstrOrLabel();
  4341. Assert(!instrPrev->IsLabelInstr());
  4342. }
  4343. return instrPrev;
  4344. }
  4345. return instr;
  4346. }
  4347. void LinearScan::DynamicStatsInstrument()
  4348. {
  4349. {
  4350. IR::Instr *firstInstr = this->func->m_headInstr;
  4351. IR::MemRefOpnd *memRefOpnd = IR::MemRefOpnd::New(this->func->GetJITFunctionBody()->GetCallCountStatsAddr(), TyUint32, this->func);
  4352. firstInstr->InsertAfter(IR::Instr::New(Js::OpCode::INC, memRefOpnd, memRefOpnd, this->func));
  4353. }
  4354. FOREACH_INSTR_IN_FUNC(instr, this->func)
  4355. {
  4356. if (!instr->IsRealInstr() || !instr->IsLowered())
  4357. {
  4358. continue;
  4359. }
  4360. if (EncoderMD::UsesConditionCode(instr) && instr->GetPrevRealInstrOrLabel()->IsLabelInstr())
  4361. {
  4362. continue;
  4363. }
  4364. IR::Opnd *dst = instr->GetDst();
  4365. if (dst && dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsStackSym() && dst->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4366. {
  4367. IR::Instr *insertionInstr = this->GetIncInsertionPoint(instr);
  4368. IR::MemRefOpnd *memRefOpnd = IR::MemRefOpnd::New(this->func->GetJITFunctionBody()->GetRegAllocStoreCountAddr(), TyUint32, this->func);
  4369. insertionInstr->InsertBefore(IR::Instr::New(Js::OpCode::INC, memRefOpnd, memRefOpnd, this->func));
  4370. }
  4371. IR::Opnd *src1 = instr->GetSrc1();
  4372. if (src1)
  4373. {
  4374. if (src1->IsSymOpnd() && src1->AsSymOpnd()->m_sym->IsStackSym() && src1->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4375. {
  4376. IR::Instr *insertionInstr = this->GetIncInsertionPoint(instr);
  4377. IR::MemRefOpnd *memRefOpnd = IR::MemRefOpnd::New(this->func->GetJITFunctionBody()->GetRegAllocStoreCountAddr(), TyUint32, this->func);
  4378. insertionInstr->InsertBefore(IR::Instr::New(Js::OpCode::INC, memRefOpnd, memRefOpnd, this->func));
  4379. }
  4380. IR::Opnd *src2 = instr->GetSrc2();
  4381. if (src2 && src2->IsSymOpnd() && src2->AsSymOpnd()->m_sym->IsStackSym() && src2->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4382. {
  4383. IR::Instr *insertionInstr = this->GetIncInsertionPoint(instr);
  4384. IR::MemRefOpnd *memRefOpnd = IR::MemRefOpnd::New(this->func->GetJITFunctionBody()->GetRegAllocStoreCountAddr(), TyUint32, this->func);
  4385. insertionInstr->InsertBefore(IR::Instr::New(Js::OpCode::INC, memRefOpnd, memRefOpnd, this->func));
  4386. }
  4387. }
  4388. } NEXT_INSTR_IN_FUNC;
  4389. }
  4390. # endif //ENABLE_DEBUG_CONFIG_OPTIONS
  4391. #endif // _M_IX86
  4392. IR::Instr* LinearScan::InsertMove(IR::Opnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr)
  4393. {
  4394. IR::Instr *instrPrev = insertBeforeInstr->m_prev;
  4395. IR::Instr *instrRet = Lowerer::InsertMove(dst, src, insertBeforeInstr);
  4396. for (IR::Instr *instr = instrPrev->m_next; instr != insertBeforeInstr; instr = instr->m_next)
  4397. {
  4398. instr->CopyNumber(insertBeforeInstr);
  4399. }
  4400. return instrRet;
  4401. }
  4402. IR::Instr* LinearScan::InsertLea(IR::RegOpnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr)
  4403. {
  4404. IR::Instr *instrPrev = insertBeforeInstr->m_prev;
  4405. IR::Instr *instrRet = Lowerer::InsertLea(dst, src, insertBeforeInstr, true);
  4406. for (IR::Instr *instr = instrPrev->m_next; instr != insertBeforeInstr; instr = instr->m_next)
  4407. {
  4408. instr->CopyNumber(insertBeforeInstr);
  4409. }
  4410. return instrRet;
  4411. }