LinearScan.cpp 173 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "SccLiveness.h"
  7. #if DBG_DUMP || ENABLE_DEBUG_CONFIG_OPTIONS
  8. char const * const RegNames[RegNumCount] =
  9. {
  10. #define REGDAT(Name, ListName, ...) "" STRINGIZE(ListName) "",
  11. #include "RegList.h"
  12. #undef REGDAT
  13. };
  14. char16 const * const RegNamesW[RegNumCount] =
  15. {
  16. #define REGDAT(Name, ListName, ...) _u("") STRINGIZEW(ListName) _u(""),
  17. #include "RegList.h"
  18. #undef REGDAT
  19. };
  20. #endif
  21. static const uint8 RegAttribs[RegNumCount] =
  22. {
  23. #define REGDAT(Name, ListName, Encode, Type, Attribs) Attribs,
  24. #include "RegList.h"
  25. #undef REGDAT
  26. };
  27. extern const IRType RegTypes[RegNumCount] =
  28. {
  29. #define REGDAT(Name, ListName, Encode, Type, Attribs) Type,
  30. #include "RegList.h"
  31. #undef REGDAT
  32. };
  33. LoweredBasicBlock* LoweredBasicBlock::New(JitArenaAllocator* allocator)
  34. {
  35. return JitAnew(allocator, LoweredBasicBlock, allocator);
  36. }
  37. void LoweredBasicBlock::Delete(JitArenaAllocator* allocator)
  38. {
  39. JitAdelete(allocator, this);
  40. }
  41. void LoweredBasicBlock::Copy(LoweredBasicBlock* block)
  42. {
  43. this->inlineeFrameLifetimes.Copy(&block->inlineeFrameLifetimes);
  44. this->inlineeStack.Copy(&block->inlineeStack);
  45. this->inlineeFrameSyms.Copy(&block->inlineeFrameSyms);
  46. }
  47. bool LoweredBasicBlock::HasData()
  48. {
  49. return this->inlineeFrameLifetimes.Count() > 0 || this->inlineeStack.Count() > 0;
  50. }
  51. LoweredBasicBlock* LoweredBasicBlock::Clone(JitArenaAllocator* allocator)
  52. {
  53. if (this->HasData())
  54. {
  55. LoweredBasicBlock* clone = LoweredBasicBlock::New(allocator);
  56. clone->Copy(this);
  57. return clone;
  58. }
  59. return nullptr;
  60. }
  61. bool LoweredBasicBlock::Equals(LoweredBasicBlock* otherBlock)
  62. {
  63. if(this->HasData() != otherBlock->HasData())
  64. {
  65. return false;
  66. }
  67. if (!this->inlineeFrameLifetimes.Equals(&otherBlock->inlineeFrameLifetimes))
  68. {
  69. return false;
  70. }
  71. if (!this->inlineeStack.Equals(&otherBlock->inlineeStack))
  72. {
  73. return false;
  74. }
  75. return true;
  76. }
  77. // LinearScan::RegAlloc
  78. // This register allocator is based on the 1999 linear scan register allocation paper
  79. // by Poletto and Sarkar. This code however walks the IR while doing the lifetime
  80. // allocations, and assigns the regs to all the RegOpnd as it goes. It assumes
  81. // the IR is in R-DFO, and that the lifetime list is sorted in starting order.
  82. // Lifetimes are allocated as they become live, and retired as they go dead. RegOpnd
  83. // are assigned their register. If a lifetime becomes active and there are no free
  84. // registers left, a lifetime is picked to be spilled.
  85. // When we spill, the whole lifetime is spilled. All the loads and stores are done
  86. // through memory for that lifetime, even the ones allocated before the current instruction.
  87. // We do optimize this slightly by not reloading the previous loads that were not in loops.
  88. void
  89. LinearScan::RegAlloc()
  90. {
  91. NoRecoverMemoryJitArenaAllocator tempAlloc(_u("BE-LinearScan"), this->func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  92. this->tempAlloc = &tempAlloc;
  93. this->opHelperSpilledLiveranges = JitAnew(&tempAlloc, SList<Lifetime *>, &tempAlloc);
  94. this->activeLiveranges = JitAnew(&tempAlloc, SList<Lifetime *>, &tempAlloc);
  95. this->liveOnBackEdgeSyms = JitAnew(&tempAlloc, BVSparse<JitArenaAllocator>, &tempAlloc);
  96. this->stackPackInUseLiveRanges = JitAnew(&tempAlloc, SList<Lifetime *>, &tempAlloc);
  97. this->stackSlotsFreeList = JitAnew(&tempAlloc, SList<StackSlot *>, &tempAlloc);
  98. this->currentBlock = LoweredBasicBlock::New(&tempAlloc);
  99. IR::Instr *currentInstr = this->func->m_headInstr;
  100. SCCLiveness liveness(this->func, this->tempAlloc);
  101. BEGIN_CODEGEN_PHASE(this->func, Js::LivenessPhase);
  102. // Build the lifetime list
  103. liveness.Build();
  104. END_CODEGEN_PHASE(this->func, Js::LivenessPhase);
  105. this->lifetimeList = &liveness.lifetimeList;
  106. this->opHelperBlockList = &liveness.opHelperBlockList;
  107. this->opHelperBlockIter = SList<OpHelperBlock>::Iterator(this->opHelperBlockList);
  108. this->opHelperBlockIter.Next();
  109. this->Init();
  110. NativeCodeData::Allocator * nativeAllocator = this->func->GetNativeCodeDataAllocator();
  111. if (func->hasBailout)
  112. {
  113. this->globalBailOutRecordTables = NativeCodeDataNewArrayZ(nativeAllocator, GlobalBailOutRecordDataTable *, func->m_inlineeId + 1);
  114. this->lastUpdatedRowIndices = JitAnewArrayZ(this->tempAlloc, uint *, func->m_inlineeId + 1);
  115. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  116. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  117. {
  118. this->func->GetScriptContext()->bailOutOffsetBytes += (sizeof(GlobalBailOutRecordDataTable *) * (func->m_inlineeId + 1));
  119. this->func->GetScriptContext()->bailOutRecordBytes += (sizeof(GlobalBailOutRecordDataTable *) * (func->m_inlineeId + 1));
  120. }
  121. #endif
  122. }
  123. m_bailOutRecordCount = 0;
  124. IR::Instr * insertBailInAfter = nullptr;
  125. BailOutInfo * bailOutInfoForBailIn = nullptr;
  126. bool endOfBasicBlock = true;
  127. FOREACH_INSTR_EDITING(instr, instrNext, currentInstr)
  128. {
  129. if (instr->GetNumber() == 0)
  130. {
  131. AssertMsg(LowererMD::IsAssign(instr), "Only expect spill code here");
  132. continue;
  133. }
  134. #if DBG_DUMP && defined(ENABLE_DEBUG_CONFIG_OPTIONS)
  135. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::LinearScanPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  136. {
  137. instr->Dump();
  138. }
  139. #endif // DBG
  140. this->currentInstr = instr;
  141. if (instr->StartsBasicBlock() || endOfBasicBlock)
  142. {
  143. endOfBasicBlock = false;
  144. ++currentBlockNumber;
  145. }
  146. bool isLoopBackEdge = false;
  147. if (instr->IsLabelInstr())
  148. {
  149. this->lastLabel = instr->AsLabelInstr();
  150. if (this->lastLabel->m_loweredBasicBlock)
  151. {
  152. IR::Instr *const prevInstr = instr->GetPrevRealInstrOrLabel();
  153. Assert(prevInstr);
  154. if (prevInstr->HasFallThrough())
  155. {
  156. this->currentBlock->Delete(&tempAlloc);
  157. }
  158. this->currentBlock = this->lastLabel->m_loweredBasicBlock;
  159. }
  160. else if (currentBlock->HasData())
  161. {
  162. // Check if the previous block has fall-through. If so, retain the block info. If not, create empty info.
  163. IR::Instr *const prevInstr = instr->GetPrevRealInstrOrLabel();
  164. Assert(prevInstr);
  165. if (!prevInstr->HasFallThrough())
  166. {
  167. currentBlock = LoweredBasicBlock::New(&tempAlloc);
  168. }
  169. }
  170. this->currentRegion = this->lastLabel->GetRegion();
  171. }
  172. else if (instr->IsBranchInstr())
  173. {
  174. if (this->func->HasTry() && this->func->DoOptimizeTry())
  175. {
  176. this->ProcessEHRegionBoundary(instr);
  177. }
  178. isLoopBackEdge = this->IsInLoop() && instr->GetNumber() >= this->curLoop->regAlloc.loopEnd;
  179. }
  180. if (this->RemoveDeadStores(instr))
  181. {
  182. continue;
  183. }
  184. if (instr->HasBailOutInfo())
  185. {
  186. if (this->currentRegion)
  187. {
  188. RegionType curRegType = this->currentRegion->GetType();
  189. if (curRegType == RegionTypeTry || curRegType == RegionTypeCatch || curRegType == RegionTypeFinally)
  190. {
  191. this->func->hasBailoutInEHRegion = true;
  192. }
  193. }
  194. this->FillBailOutRecord(instr);
  195. if (instr->GetBailOutKind() == IR::BailOutForGeneratorYield)
  196. {
  197. Assert(instr->m_next->IsLabelInstr());
  198. insertBailInAfter = instr->m_next;
  199. bailOutInfoForBailIn = instr->GetBailOutInfo();
  200. }
  201. }
  202. this->SetSrcRegs(instr);
  203. this->EndDeadLifetimes(instr, isLoopBackEdge);
  204. if (instr->IsBranchInstr())
  205. {
  206. this->ProcessSecondChanceBoundary(instr->AsBranchInstr());
  207. }
  208. this->CheckIfInLoop(instr);
  209. if (isLoopBackEdge)
  210. {
  211. this->EndDeadLifetimes(instr, false);
  212. }
  213. this->CheckOpHelper(instr);
  214. this->KillImplicitRegs(instr);
  215. this->AllocateNewLifetimes(instr);
  216. this->SetDstReg(instr);
  217. this->EndDeadOpHelperLifetimes(instr);
  218. if (instr->IsLabelInstr())
  219. {
  220. this->ProcessSecondChanceBoundary(instr->AsLabelInstr());
  221. }
  222. #if DBG
  223. this->CheckInvariants();
  224. #endif // DBG
  225. if(instr->EndsBasicBlock())
  226. {
  227. endOfBasicBlock = true;
  228. }
  229. if (insertBailInAfter == instr)
  230. {
  231. instrNext = linearScanMD.GenerateBailInForGeneratorYield(instr, bailOutInfoForBailIn);
  232. insertBailInAfter = nullptr;
  233. bailOutInfoForBailIn = nullptr;
  234. }
  235. }NEXT_INSTR_EDITING;
  236. if (func->hasBailout)
  237. {
  238. for (uint i = 0; i <= func->m_inlineeId; i++)
  239. {
  240. if (globalBailOutRecordTables[i] != nullptr)
  241. {
  242. globalBailOutRecordTables[i]->Finalize(nativeAllocator, &tempAlloc);
  243. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  244. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  245. {
  246. func->GetScriptContext()->bailOutOffsetBytes += sizeof(GlobalBailOutRecordDataRow) * globalBailOutRecordTables[i]->length;
  247. func->GetScriptContext()->bailOutRecordBytes += sizeof(GlobalBailOutRecordDataRow) * globalBailOutRecordTables[i]->length;
  248. }
  249. #endif
  250. }
  251. }
  252. }
  253. AssertMsg((this->intRegUsedCount + this->floatRegUsedCount) == this->linearScanMD.UnAllocatableRegCount(this->func) , "RegUsedCount is wrong");
  254. AssertMsg(this->activeLiveranges->Empty(), "Active list not empty");
  255. AssertMsg(this->stackPackInUseLiveRanges->Empty(), "Spilled list not empty");
  256. AssertMsg(!this->opHelperBlockIter.IsValid(), "Got to the end with a helper block still on the list?");
  257. Assert(this->currentBlock->inlineeStack.Count() == 0);
  258. this->InsertOpHelperSpillAndRestores();
  259. #if _M_IX86
  260. # if ENABLE_DEBUG_CONFIG_OPTIONS
  261. if (Js::Configuration::Global.flags.Instrument.IsEnabled(Js::LinearScanPhase, this->func->GetSourceContextId(),this->func->GetLocalFunctionId()))
  262. {
  263. this->DynamicStatsInstrument();
  264. }
  265. # endif
  266. #endif
  267. #if DBG_DUMP
  268. if (PHASE_STATS(Js::LinearScanPhase, this->func))
  269. {
  270. this->PrintStats();
  271. }
  272. if (PHASE_TRACE(Js::StackPackPhase, this->func))
  273. {
  274. Output::Print(_u("---------------------------\n"));
  275. }
  276. #endif // DBG_DUMP
  277. DebugOnly(this->func->allowRemoveBailOutArgInstr = true);
  278. }
  279. JitArenaAllocator *
  280. LinearScan::GetTempAlloc()
  281. {
  282. Assert(tempAlloc);
  283. return tempAlloc;
  284. }
  285. #if DBG
  286. void
  287. LinearScan::CheckInvariants() const
  288. {
  289. BitVector bv = this->nonAllocatableRegs;
  290. uint32 lastend = 0;
  291. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, this->activeLiveranges)
  292. {
  293. // Make sure there are only one lifetime per reg
  294. Assert(!bv.Test(lifetime->reg));
  295. bv.Set(lifetime->reg);
  296. Assert(!lifetime->isOpHelperSpilled);
  297. Assert(!lifetime->isSpilled);
  298. Assert(lifetime->end >= lastend);
  299. lastend = lifetime->end;
  300. }
  301. NEXT_SLIST_ENTRY;
  302. // Make sure the active reg bit vector is correct
  303. Assert(bv.Equal(this->activeRegs));
  304. uint ints = 0, floats = 0;
  305. FOREACH_BITSET_IN_UNITBV(index, this->activeRegs, BitVector)
  306. {
  307. if (IRType_IsFloat(RegTypes[index]))
  308. {
  309. floats++;
  310. }
  311. else
  312. {
  313. ints++;
  314. }
  315. }
  316. NEXT_BITSET_IN_UNITBV;
  317. Assert(ints == this->intRegUsedCount);
  318. Assert(floats == this->floatRegUsedCount);
  319. Assert((this->intRegUsedCount + this->floatRegUsedCount) == this->activeRegs.Count());
  320. bv.ClearAll();
  321. lastend = 0;
  322. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, this->opHelperSpilledLiveranges)
  323. {
  324. // Make sure there are only one lifetime per reg in the op helper spilled liveranges
  325. Assert(!bv.Test(lifetime->reg));
  326. if (!lifetime->cantOpHelperSpill)
  327. {
  328. bv.Set(lifetime->reg);
  329. Assert(lifetime->isOpHelperSpilled);
  330. Assert(!lifetime->isSpilled);
  331. }
  332. Assert(lifetime->end >= lastend);
  333. lastend = lifetime->end;
  334. }
  335. NEXT_SLIST_ENTRY;
  336. // Make sure the opHelperSpilledRegs bit vector is correct
  337. Assert(bv.Equal(this->opHelperSpilledRegs));
  338. for (int i = 0; i < RegNumCount; i++)
  339. {
  340. if (this->tempRegs.Test(i))
  341. {
  342. Assert(this->tempRegLifetimes[i]->reg == i);
  343. }
  344. }
  345. FOREACH_BITSET_IN_UNITBV(reg, this->secondChanceRegs, BitVector)
  346. {
  347. Lifetime *lifetime = this->regContent[reg];
  348. Assert(lifetime);
  349. StackSym *sym = lifetime->sym;
  350. Assert(lifetime->isSecondChanceAllocated);
  351. Assert(sym->IsConst() || sym->IsAllocated()); // Should have been spilled already.
  352. } NEXT_BITSET_IN_UNITBV;
  353. }
  354. #endif // DBG
  355. // LinearScan::Init
  356. // Initialize bit vectors
  357. void
  358. LinearScan::Init()
  359. {
  360. FOREACH_REG(reg)
  361. {
  362. // Registers that can't be used are set to active, and will remain this way
  363. if (!LinearScan::IsAllocatable(reg))
  364. {
  365. this->activeRegs.Set(reg);
  366. if (IRType_IsFloat(RegTypes[reg]))
  367. {
  368. this->floatRegUsedCount++;
  369. }
  370. else
  371. {
  372. this->intRegUsedCount++;
  373. }
  374. }
  375. if (RegTypes[reg] == TyMachReg)
  376. {
  377. // JIT64_TODO: Rename int32Regs to machIntRegs.
  378. this->int32Regs.Set(reg);
  379. numInt32Regs++;
  380. }
  381. else if (RegTypes[reg] == TyFloat64)
  382. {
  383. this->floatRegs.Set(reg);
  384. numFloatRegs++;
  385. }
  386. if (LinearScan::IsCallerSaved(reg))
  387. {
  388. this->callerSavedRegs.Set(reg);
  389. }
  390. if (LinearScan::IsCalleeSaved(reg))
  391. {
  392. this->calleeSavedRegs.Set(reg);
  393. }
  394. this->regContent[reg] = nullptr;
  395. } NEXT_REG;
  396. this->instrUseRegs.ClearAll();
  397. this->secondChanceRegs.ClearAll();
  398. this->linearScanMD.Init(this);
  399. #if DBG
  400. this->nonAllocatableRegs = this->activeRegs;
  401. #endif
  402. #if DBG_DUMP
  403. if (PHASE_TRACE(Js::LinearScanPhase, this->func))
  404. {
  405. this->func->DumpHeader();
  406. }
  407. #endif
  408. }
  409. // LinearScan::CheckIfInLoop
  410. // Track whether the current instruction is in a loop or not.
  411. bool
  412. LinearScan::CheckIfInLoop(IR::Instr *instr)
  413. {
  414. if (this->IsInLoop())
  415. {
  416. // Look for end of loop
  417. AssertMsg(this->curLoop->regAlloc.loopEnd != 0, "Something is wrong here....");
  418. if (instr->GetNumber() >= this->curLoop->regAlloc.loopEnd)
  419. {
  420. AssertMsg(instr->IsBranchInstr(), "Loop tail should be a branchInstr");
  421. while (this->IsInLoop() && instr->GetNumber() >= this->curLoop->regAlloc.loopEnd)
  422. {
  423. this->loopNest--;
  424. this->curLoop->regAlloc.defdInLoopBv->ClearAll();
  425. this->curLoop->regAlloc.symRegUseBv->ClearAll();
  426. this->curLoop->regAlloc.liveOnBackEdgeSyms->ClearAll();
  427. this->curLoop->regAlloc.exitRegContentList->Clear();
  428. this->curLoop->isProcessed = true;
  429. this->curLoop = this->curLoop->parent;
  430. if (this->loopNest == 0)
  431. {
  432. this->liveOnBackEdgeSyms->ClearAll();
  433. }
  434. }
  435. }
  436. }
  437. if (instr->IsLabelInstr() && instr->AsLabelInstr()->m_isLoopTop)
  438. {
  439. IR::LabelInstr * labelInstr = instr->AsLabelInstr();
  440. Loop *parentLoop = this->curLoop;
  441. if (parentLoop)
  442. {
  443. parentLoop->isLeaf = false;
  444. }
  445. this->curLoop = labelInstr->GetLoop();
  446. this->curLoop->isProcessed = false;
  447. // Lexically nested may not always nest in a flow based way:
  448. // while(i--) {
  449. // if (cond) {
  450. // while(j--) {
  451. // }
  452. // break;
  453. // }
  454. // }
  455. // These look nested, but they are not...
  456. // So update the flow based parent to be lexical or we won't be able to figure out when we get back
  457. // to the outer loop.
  458. // REVIEW: This isn't necessary anymore now that break blocks are moved out of the loops.
  459. this->curLoop->parent = parentLoop;
  460. this->curLoop->regAlloc.defdInLoopBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  461. this->curLoop->regAlloc.symRegUseBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  462. this->curLoop->regAlloc.loopStart = labelInstr->GetNumber();
  463. this->curLoop->regAlloc.exitRegContentList = JitAnew(this->tempAlloc, SList<Lifetime **>, this->tempAlloc);
  464. this->curLoop->regAlloc.regUseBv = 0;
  465. this->liveOnBackEdgeSyms->Or(this->curLoop->regAlloc.liveOnBackEdgeSyms);
  466. this->loopNest++;
  467. }
  468. return this->IsInLoop();
  469. }
  470. void
  471. LinearScan::InsertOpHelperSpillAndRestores()
  472. {
  473. linearScanMD.InsertOpHelperSpillAndRestores(opHelperBlockList);
  474. }
  475. void
  476. LinearScan::CheckOpHelper(IR::Instr *instr)
  477. {
  478. if (this->IsInHelperBlock())
  479. {
  480. if (this->currentOpHelperBlock->opHelperEndInstr == instr)
  481. {
  482. // Get targetInstr if we can.
  483. // We can deterministically get it only for unconditional branches, as conditional branch may fall through.
  484. IR::Instr * targetInstr = nullptr;
  485. if (instr->IsBranchInstr() && instr->AsBranchInstr()->IsUnconditional())
  486. {
  487. AssertMsg(!instr->AsBranchInstr()->IsMultiBranch(), "Not supported for Multibranch");
  488. targetInstr = instr->AsBranchInstr()->GetTarget();
  489. }
  490. /*
  491. * Keep track of the number of registers we've had to
  492. * store and restore around a helper block for LinearScanMD (on ARM
  493. * and X64). We need this to be able to allocate space in the frame.
  494. * We can't emit a PUSH/POP sequence around the block like IA32 because
  495. * the stack pointer can't move outside the prolog.
  496. */
  497. uint32 helperSpilledLiverangeCount = 0;
  498. // Exiting a helper block. We are going to insert
  499. // the restore here after linear scan. So put all the restored
  500. // lifetime back to active
  501. while (!this->opHelperSpilledLiveranges->Empty())
  502. {
  503. Lifetime * lifetime = this->opHelperSpilledLiveranges->Pop();
  504. lifetime->isOpHelperSpilled = false;
  505. if (!lifetime->cantOpHelperSpill)
  506. {
  507. // Put the life time back to active
  508. this->AssignActiveReg(lifetime, lifetime->reg);
  509. bool reload = true;
  510. // Lifetime ends before the target after helper block, don't need to save and restore helper spilled lifetime.
  511. if (targetInstr && lifetime->end < targetInstr->GetNumber())
  512. {
  513. // However, if lifetime is spilled as arg - we still need to spill it because the helper assumes the value
  514. // to be available in the stack
  515. if (lifetime->isOpHelperSpillAsArg)
  516. {
  517. // we should not attempt to restore it as it is dead on return from the helper.
  518. reload = false;
  519. }
  520. else
  521. {
  522. Assert(!instr->AsBranchInstr()->IsLoopTail(this->func));
  523. continue;
  524. }
  525. }
  526. // Save all the lifetime that needs to be restored
  527. OpHelperSpilledLifetime spilledLifetime;
  528. spilledLifetime.lifetime = lifetime;
  529. spilledLifetime.spillAsArg = lifetime->isOpHelperSpillAsArg;
  530. spilledLifetime.reload = reload;
  531. /*
  532. * Can't unfortunately move this into the else block above because we don't know if this
  533. * lifetime will actually get spilled until register allocation completes.
  534. * Instead we allocate a slot to this StackSym in LinearScanMD iff
  535. * !(lifetime.isSpilled && lifetime.noReloadsIfSpilled).
  536. */
  537. helperSpilledLiverangeCount++;
  538. // save the reg in case it is spilled later. We still need to save and restore
  539. // for the non-loop case.
  540. spilledLifetime.reg = lifetime->reg;
  541. this->currentOpHelperBlock->spilledLifetime.Prepend(spilledLifetime);
  542. }
  543. else
  544. {
  545. // Clear it for the next helper block
  546. lifetime->cantOpHelperSpill = false;
  547. }
  548. lifetime->isOpHelperSpillAsArg = false;
  549. }
  550. this->totalOpHelperFullVisitedLength += this->currentOpHelperBlock->Length();
  551. // Use a dummy label as the insertion point of the reloads, as second-chance-allocation
  552. // may insert compensation code right before the branch
  553. IR::PragmaInstr *dummyLabel = IR::PragmaInstr::New(Js::OpCode::Nop, 0, this->func);
  554. this->currentOpHelperBlock->opHelperEndInstr->InsertBefore(dummyLabel);
  555. dummyLabel->CopyNumber(this->currentOpHelperBlock->opHelperEndInstr);
  556. this->currentOpHelperBlock->opHelperEndInstr = dummyLabel;
  557. this->opHelperSpilledRegs.ClearAll();
  558. this->currentOpHelperBlock = nullptr;
  559. linearScanMD.EndOfHelperBlock(helperSpilledLiverangeCount);
  560. }
  561. }
  562. if (this->opHelperBlockIter.IsValid())
  563. {
  564. AssertMsg(
  565. !instr->IsLabelInstr() ||
  566. !instr->AsLabelInstr()->isOpHelper ||
  567. this->opHelperBlockIter.Data().opHelperLabel == instr,
  568. "Found a helper label that doesn't begin the next helper block in the list?");
  569. if (this->opHelperBlockIter.Data().opHelperLabel == instr)
  570. {
  571. this->currentOpHelperBlock = &this->opHelperBlockIter.Data();
  572. this->opHelperBlockIter.Next();
  573. }
  574. }
  575. }
  576. uint
  577. LinearScan::HelperBlockStartInstrNumber() const
  578. {
  579. Assert(IsInHelperBlock());
  580. return this->currentOpHelperBlock->opHelperLabel->GetNumber();
  581. }
  582. uint
  583. LinearScan::HelperBlockEndInstrNumber() const
  584. {
  585. Assert(IsInHelperBlock());
  586. return this->currentOpHelperBlock->opHelperEndInstr->GetNumber();
  587. }
  588. // LinearScan::AddToActive
  589. // Add a lifetime to the active list. The list is kept sorted in order lifetime end.
  590. // This makes it easier to pick the lifetimes to retire.
  591. void
  592. LinearScan::AddToActive(Lifetime * lifetime)
  593. {
  594. LinearScan::AddLiveRange(this->activeLiveranges, lifetime);
  595. this->regContent[lifetime->reg] = lifetime;
  596. if (lifetime->isSecondChanceAllocated)
  597. {
  598. this->secondChanceRegs.Set(lifetime->reg);
  599. }
  600. else
  601. {
  602. Assert(!this->secondChanceRegs.Test(lifetime->reg));
  603. }
  604. }
  605. void
  606. LinearScan::AddOpHelperSpilled(Lifetime * lifetime)
  607. {
  608. RegNum reg = lifetime->reg;
  609. Assert(this->IsInHelperBlock());
  610. Assert(!this->opHelperSpilledRegs.Test(reg));
  611. Assert(lifetime->isOpHelperSpilled == false);
  612. Assert(lifetime->cantOpHelperSpill == false);
  613. this->opHelperSpilledRegs.Set(reg);
  614. lifetime->isOpHelperSpilled = true;
  615. this->regContent[reg] = nullptr;
  616. this->secondChanceRegs.Clear(reg);
  617. // If a lifetime is being OpHelper spilled and it's an inlinee arg sym
  618. // we need to make sure its spilled to the sym offset spill space, i.e. isOpHelperSpillAsArg
  619. // is set. Otherwise, it's value will not be available on inline frame reconstruction.
  620. if (this->currentBlock->inlineeFrameSyms.Count() > 0 &&
  621. this->currentBlock->inlineeFrameSyms.ContainsKey(lifetime->sym->m_id) &&
  622. (lifetime->sym->m_isSingleDef || !lifetime->defList.Empty()))
  623. {
  624. lifetime->isOpHelperSpillAsArg = true;
  625. if (!lifetime->sym->IsAllocated())
  626. {
  627. this->AllocateStackSpace(lifetime);
  628. }
  629. this->RecordLoopUse(lifetime, lifetime->reg);
  630. }
  631. LinearScan::AddLiveRange(this->opHelperSpilledLiveranges, lifetime);
  632. }
  633. void
  634. LinearScan::RemoveOpHelperSpilled(Lifetime * lifetime)
  635. {
  636. Assert(this->IsInHelperBlock());
  637. Assert(lifetime->isOpHelperSpilled);
  638. Assert(lifetime->cantOpHelperSpill == false);
  639. Assert(this->opHelperSpilledRegs.Test(lifetime->reg));
  640. this->opHelperSpilledRegs.Clear(lifetime->reg);
  641. lifetime->isOpHelperSpilled = false;
  642. lifetime->cantOpHelperSpill = false;
  643. lifetime->isOpHelperSpillAsArg = false;
  644. this->opHelperSpilledLiveranges->Remove(lifetime);
  645. }
  646. void
  647. LinearScan::SetCantOpHelperSpill(Lifetime * lifetime)
  648. {
  649. Assert(this->IsInHelperBlock());
  650. Assert(lifetime->isOpHelperSpilled);
  651. Assert(lifetime->cantOpHelperSpill == false);
  652. this->opHelperSpilledRegs.Clear(lifetime->reg);
  653. lifetime->isOpHelperSpilled = false;
  654. lifetime->cantOpHelperSpill = true;
  655. }
  656. void
  657. LinearScan::AddLiveRange(SList<Lifetime *> * list, Lifetime * newLifetime)
  658. {
  659. FOREACH_SLIST_ENTRY_EDITING(Lifetime *, lifetime, list, iter)
  660. {
  661. if (newLifetime->end < lifetime->end)
  662. {
  663. break;
  664. }
  665. }
  666. NEXT_SLIST_ENTRY_EDITING;
  667. iter.InsertBefore(newLifetime);
  668. }
  669. Lifetime *
  670. LinearScan::RemoveRegLiveRange(SList<Lifetime *> * list, RegNum reg)
  671. {
  672. // Find the register in the active set
  673. FOREACH_SLIST_ENTRY_EDITING(Lifetime *, lifetime, list, iter)
  674. {
  675. if (lifetime->reg == reg)
  676. {
  677. Lifetime * lifetimeReturn = lifetime;
  678. iter.RemoveCurrent();
  679. return lifetimeReturn;
  680. }
  681. } NEXT_SLIST_ENTRY_EDITING;
  682. AssertMsg(false, "Can't find life range for a reg");
  683. return nullptr;
  684. }
  685. // LinearScan::SetDstReg
  686. // Set the reg on each RegOpnd def.
  687. void
  688. LinearScan::SetDstReg(IR::Instr *instr)
  689. {
  690. //
  691. // Enregister dst
  692. //
  693. IR::Opnd *dst = instr->GetDst();
  694. if (dst == nullptr)
  695. {
  696. return;
  697. }
  698. if (!dst->IsRegOpnd())
  699. {
  700. // This could be, for instance, a store to a sym with a large offset
  701. // that was just assigned when we saw the use.
  702. this->linearScanMD.LegalizeDef(instr);
  703. return;
  704. }
  705. IR::RegOpnd * regOpnd = dst->AsRegOpnd();
  706. /*
  707. * If this is a register used to setup a callsite per
  708. * a calling convention then mark it unavailable to allocate
  709. * until we see a CALL.
  710. */
  711. if (regOpnd->m_isCallArg)
  712. {
  713. RegNum callSetupReg = regOpnd->GetReg();
  714. callSetupRegs.Set(callSetupReg);
  715. }
  716. StackSym * stackSym = regOpnd->m_sym;
  717. // Arg slot sym can be in a RegOpnd for param passed via registers
  718. // Just use the assigned register
  719. if (stackSym == nullptr || stackSym->IsArgSlotSym())
  720. {
  721. //
  722. // Already allocated register. just spill the destination
  723. //
  724. RegNum reg = regOpnd->GetReg();
  725. if(LinearScan::IsAllocatable(reg))
  726. {
  727. this->SpillReg(reg);
  728. }
  729. this->tempRegs.Clear(reg);
  730. }
  731. else
  732. {
  733. if (regOpnd->GetReg() != RegNOREG)
  734. {
  735. this->RecordLoopUse(nullptr, regOpnd->GetReg());
  736. // Nothing to do
  737. return;
  738. }
  739. Lifetime * lifetime = stackSym->scratch.linearScan.lifetime;
  740. uint32 useCountCost = LinearScan::GetUseSpillCost(this->loopNest, (this->currentOpHelperBlock != nullptr));
  741. // Optimistically decrease the useCount. We'll undo this if we put it on the defList.
  742. lifetime->SubFromUseCount(useCountCost, this->curLoop);
  743. if (lifetime->isSpilled)
  744. {
  745. if (stackSym->IsConst() && !IsSymNonTempLocalVar(stackSym))
  746. {
  747. // We will reload the constant (but in debug mode, we still need to process this if this is a user var).
  748. return;
  749. }
  750. RegNum reg = regOpnd->GetReg();
  751. if (reg != RegNOREG)
  752. {
  753. // It is already assigned, just record it as a temp reg
  754. this->AssignTempReg(lifetime, reg);
  755. }
  756. else
  757. {
  758. IR::Opnd *src1 = instr->GetSrc1();
  759. IR::Opnd *src2 = instr->GetSrc2();
  760. if ((src1 && src1->IsRegOpnd() && src1->AsRegOpnd()->m_sym == stackSym) ||
  761. (src2 && src2->IsRegOpnd() && src2->AsRegOpnd()->m_sym == stackSym))
  762. {
  763. // OpEQ: src1 should have a valid reg (put src2 for other targets)
  764. reg = this->GetAssignedTempReg(lifetime, dst->GetType());
  765. Assert(reg != RegNOREG);
  766. RecordDef(lifetime, instr, 0);
  767. }
  768. else
  769. {
  770. // Try second chance
  771. reg = this->SecondChanceAllocation(lifetime, false);
  772. if (reg != RegNOREG)
  773. {
  774. Assert(!stackSym->m_isSingleDef);
  775. this->SetReg(regOpnd);
  776. // Keep track of defs for this lifetime, in case it gets spilled.
  777. RecordDef(lifetime, instr, useCountCost);
  778. return;
  779. }
  780. else
  781. {
  782. reg = this->GetAssignedTempReg(lifetime, dst->GetType());
  783. RecordDef(lifetime, instr, 0);
  784. }
  785. }
  786. if (LowererMD::IsAssign(instr) && instr->GetSrc1()->IsRegOpnd())
  787. {
  788. // Fold the spilled store
  789. if (reg != RegNOREG)
  790. {
  791. // If the value is in a temp reg, it's not valid any more.
  792. this->tempRegs.Clear(reg);
  793. }
  794. IRType srcType = instr->GetSrc1()->GetType();
  795. instr->ReplaceDst(IR::SymOpnd::New(stackSym, srcType, this->func));
  796. this->linearScanMD.LegalizeDef(instr);
  797. return;
  798. }
  799. if (reg == RegNOREG)
  800. {
  801. IR::Opnd *src = instr->GetSrc1();
  802. if (src && src->IsRegOpnd() && src->AsRegOpnd()->m_sym == stackSym)
  803. {
  804. // Handle OPEQ's for x86/x64
  805. reg = src->AsRegOpnd()->GetReg();
  806. AssertMsg(!this->activeRegs.Test(reg), "Shouldn't be active");
  807. }
  808. else
  809. {
  810. // The lifetime was spilled, but we still need a reg for this operand.
  811. reg = this->FindReg(nullptr, regOpnd);
  812. }
  813. this->AssignTempReg(lifetime, reg);
  814. }
  815. }
  816. if (!lifetime->isDeadStore && !lifetime->isSecondChanceAllocated)
  817. {
  818. // Insert a store since the lifetime is spilled
  819. this->InsertStore(instr, regOpnd->m_sym, reg);
  820. }
  821. }
  822. else
  823. {
  824. if (lifetime->isOpHelperSpilled)
  825. {
  826. // We must be in a helper block and the lifetime must
  827. // start before the helper block
  828. Assert(this->IsInHelperBlock());
  829. Assert(lifetime->start < this->HelperBlockStartInstrNumber());
  830. RegNum reg = lifetime->reg;
  831. Assert(this->opHelperSpilledRegs.Test(reg));
  832. if (this->activeRegs.Test(reg))
  833. {
  834. // The reg must have been used locally in the helper block
  835. // by some other lifetime. Just spill it
  836. this->SpillReg(reg);
  837. }
  838. // We can't save/restore this reg across the helper call because the restore would overwrite
  839. // this def, but the def means we don't need to spill at all. Mark the lifetime as cantOpHelperSpill
  840. // however in case another helper call in this block tries to spill it.
  841. this->SetCantOpHelperSpill(lifetime);
  842. this->AddToActive(lifetime);
  843. this->tempRegs.Clear(reg);
  844. this->activeRegs.Set(reg);
  845. if (RegTypes[reg] == TyMachReg)
  846. {
  847. this->intRegUsedCount++;
  848. }
  849. else
  850. {
  851. Assert(RegTypes[reg] == TyFloat64);
  852. this->floatRegUsedCount++;
  853. }
  854. }
  855. // Keep track of defs for this lifetime, in case it gets spilled.
  856. RecordDef(lifetime, instr, useCountCost);
  857. }
  858. this->SetReg(regOpnd);
  859. }
  860. }
  861. // Get the stack offset of the non temp locals from the stack.
  862. int32 LinearScan::GetStackOffset(Js::RegSlot regSlotId)
  863. {
  864. int32 stackSlotId = regSlotId - this->func->GetJITFunctionBody()->GetFirstNonTempLocalIndex();
  865. Assert(stackSlotId >= 0);
  866. return this->func->GetLocalVarSlotOffset(stackSlotId);
  867. }
  868. //
  869. // This helper function is used for saving bytecode stack sym value to memory / local slots on stack so that we can read it for the locals inspection.
  870. void
  871. LinearScan::WriteThroughForLocal(IR::RegOpnd* regOpnd, Lifetime* lifetime, IR::Instr* instrInsertAfter)
  872. {
  873. Assert(regOpnd);
  874. Assert(lifetime);
  875. Assert(instrInsertAfter);
  876. StackSym* sym = regOpnd->m_sym;
  877. Assert(IsSymNonTempLocalVar(sym));
  878. Js::RegSlot slotIndex = sym->GetByteCodeRegSlot();
  879. // First we insert the write through moves
  880. sym->m_offset = GetStackOffset(slotIndex);
  881. sym->m_allocated = true;
  882. // Save the value on reg to local var slot.
  883. this->InsertStore(instrInsertAfter, sym, lifetime->reg);
  884. }
  885. bool
  886. LinearScan::NeedsWriteThrough(StackSym * sym)
  887. {
  888. return this->NeedsWriteThroughForEH(sym) || this->IsSymNonTempLocalVar(sym);
  889. }
  890. bool
  891. LinearScan::NeedsWriteThroughForEH(StackSym * sym)
  892. {
  893. if (!this->func->HasTry() || !this->func->DoOptimizeTry() || !sym->HasByteCodeRegSlot())
  894. {
  895. return false;
  896. }
  897. Assert(this->currentRegion);
  898. return this->currentRegion->writeThroughSymbolsSet && this->currentRegion->writeThroughSymbolsSet->Test(sym->m_id);
  899. }
  900. // Helper routine to check if current sym belongs to non temp bytecodereg
  901. bool
  902. LinearScan::IsSymNonTempLocalVar(StackSym *sym)
  903. {
  904. Assert(sym);
  905. if (this->func->IsJitInDebugMode() && sym->HasByteCodeRegSlot())
  906. {
  907. Js::RegSlot slotIndex = sym->GetByteCodeRegSlot();
  908. return this->func->IsNonTempLocalVar(slotIndex);
  909. }
  910. return false;
  911. }
  912. // LinearScan::SetSrcRegs
  913. // Set the reg on each RegOpnd use.
  914. // Note that this includes regOpnd of indir dsts...
  915. void
  916. LinearScan::SetSrcRegs(IR::Instr *instr)
  917. {
  918. //
  919. // Enregister srcs
  920. //
  921. IR::Opnd *src1 = instr->GetSrc1();
  922. if (src1 != nullptr)
  923. {
  924. // Capture src2 now as folding in SetUses could swab the srcs...
  925. IR::Opnd *src2 = instr->GetSrc2();
  926. this->SetUses(instr, src1);
  927. if (src2 != nullptr)
  928. {
  929. this->SetUses(instr, src2);
  930. }
  931. }
  932. IR::Opnd *dst = instr->GetDst();
  933. if (dst && dst->IsIndirOpnd())
  934. {
  935. this->SetUses(instr, dst);
  936. }
  937. this->instrUseRegs.ClearAll();
  938. }
  939. // LinearScan::SetUses
  940. void
  941. LinearScan::SetUses(IR::Instr *instr, IR::Opnd *opnd)
  942. {
  943. switch (opnd->GetKind())
  944. {
  945. case IR::OpndKindReg:
  946. this->SetUse(instr, opnd->AsRegOpnd());
  947. break;
  948. case IR::OpndKindSym:
  949. {
  950. Sym * sym = opnd->AsSymOpnd()->m_sym;
  951. if (sym->IsStackSym())
  952. {
  953. StackSym* stackSym = sym->AsStackSym();
  954. if (!stackSym->IsAllocated())
  955. {
  956. func->StackAllocate(stackSym, opnd->GetSize());
  957. // StackSym's lifetime is allocated during SCCLiveness::ProcessDst
  958. // we might not need to set the flag if the sym is not a dst.
  959. if (stackSym->scratch.linearScan.lifetime)
  960. {
  961. stackSym->scratch.linearScan.lifetime->cantStackPack = true;
  962. }
  963. }
  964. this->linearScanMD.LegalizeUse(instr, opnd);
  965. }
  966. }
  967. break;
  968. case IR::OpndKindIndir:
  969. {
  970. IR::IndirOpnd * indirOpnd = opnd->AsIndirOpnd();
  971. if (indirOpnd->GetBaseOpnd())
  972. {
  973. this->SetUse(instr, indirOpnd->GetBaseOpnd());
  974. }
  975. if (indirOpnd->GetIndexOpnd())
  976. {
  977. this->SetUse(instr, indirOpnd->GetIndexOpnd());
  978. }
  979. }
  980. break;
  981. case IR::OpndKindList:
  982. {
  983. opnd->AsListOpnd()->Map([&](int i, IR::Opnd* opnd)
  984. {
  985. this->SetUses(instr, opnd);
  986. });
  987. }
  988. break;
  989. case IR::OpndKindIntConst:
  990. case IR::OpndKindAddr:
  991. this->linearScanMD.LegalizeConstantUse(instr, opnd);
  992. break;
  993. };
  994. }
  995. struct FillBailOutState
  996. {
  997. SListCounted<Js::Var> constantList;
  998. uint registerSaveCount;
  999. StackSym * registerSaveSyms[RegNumCount - 1];
  1000. FillBailOutState(JitArenaAllocator * allocator) : constantList(allocator) {}
  1001. };
  1002. void
  1003. LinearScan::FillBailOutOffset(int * offset, StackSym * stackSym, FillBailOutState * state, IR::Instr * instr)
  1004. {
  1005. AssertMsg(*offset == 0, "Can't have two active lifetime for the same byte code register");
  1006. if (stackSym->IsConst())
  1007. {
  1008. state->constantList.Prepend(reinterpret_cast<Js::Var>(stackSym->GetLiteralConstValue_PostGlobOpt()));
  1009. // Constant offset are offset by the number of register save slots
  1010. *offset = state->constantList.Count() + GetBailOutRegisterSaveSlotCount() + GetBailOutReserveSlotCount();
  1011. }
  1012. else if (stackSym->m_isEncodedConstant)
  1013. {
  1014. Assert(!stackSym->m_isSingleDef);
  1015. state->constantList.Prepend((Js::Var)stackSym->constantValue);
  1016. // Constant offset are offset by the number of register save slots
  1017. *offset = state->constantList.Count() + GetBailOutRegisterSaveSlotCount() + GetBailOutReserveSlotCount();
  1018. }
  1019. else
  1020. {
  1021. Lifetime * lifetime = stackSym->scratch.linearScan.lifetime;
  1022. Assert(lifetime && lifetime->start < instr->GetNumber() && instr->GetNumber() <= lifetime->end);
  1023. if (instr->GetBailOutKind() == IR::BailOutOnException)
  1024. {
  1025. // Apart from the exception object sym, lifetimes for all other syms that need to be restored at this bailout,
  1026. // must have been spilled at least once (at the TryCatch, or at the Leave, or both)
  1027. // Post spilling, a lifetime could have been second chance allocated. But, it should still have stack allocated for its sym
  1028. Assert(stackSym->IsAllocated() || (stackSym == this->currentRegion->GetExceptionObjectSym()));
  1029. }
  1030. this->PrepareForUse(lifetime);
  1031. if (lifetime->isSpilled ||
  1032. ((instr->GetBailOutKind() == IR::BailOutOnException) && (stackSym != this->currentRegion->GetExceptionObjectSym()))) // BailOutOnException must restore from memory
  1033. {
  1034. Assert(stackSym->IsAllocated());
  1035. #ifdef MD_GROW_LOCALS_AREA_UP
  1036. *offset = -((int)stackSym->m_offset + BailOutInfo::StackSymBias);
  1037. #else
  1038. // Stack offset are negative, includes the PUSH EBP and return address
  1039. *offset = stackSym->m_offset - (2 * MachPtr);
  1040. #endif
  1041. }
  1042. else
  1043. {
  1044. Assert(lifetime->reg != RegNOREG);
  1045. Assert(state->registerSaveSyms[lifetime->reg - 1] == nullptr ||
  1046. state->registerSaveSyms[lifetime->reg - 1] == stackSym);
  1047. AssertMsg((stackSym->IsFloat64() || stackSym->IsSimd128()) && RegTypes[lifetime->reg] == TyFloat64 ||
  1048. !(stackSym->IsFloat64() || stackSym->IsSimd128()) && RegTypes[lifetime->reg] != TyFloat64,
  1049. "Trying to save float64 sym into non-float64 reg or non-float64 sym into float64 reg");
  1050. // Save the register value to the register save space using the reg enum value as index
  1051. state->registerSaveSyms[lifetime->reg - 1] = stackSym;
  1052. *offset = LinearScanMD::GetRegisterSaveIndex(lifetime->reg);
  1053. state->registerSaveCount++;
  1054. }
  1055. }
  1056. }
  1057. struct FuncBailOutData
  1058. {
  1059. Func * func;
  1060. BailOutRecord * bailOutRecord;
  1061. int * localOffsets;
  1062. BVFixed * losslessInt32Syms;
  1063. BVFixed * float64Syms;
  1064. void Initialize(Func * func, JitArenaAllocator * tempAllocator);
  1065. void FinalizeLocalOffsets(JitArenaAllocator *allocator, GlobalBailOutRecordDataTable *table, uint **lastUpdatedRowIndices);
  1066. void Clear(JitArenaAllocator * tempAllocator);
  1067. };
  1068. void
  1069. FuncBailOutData::Initialize(Func * func, JitArenaAllocator * tempAllocator)
  1070. {
  1071. Js::RegSlot localsCount = func->GetJITFunctionBody()->GetLocalsCount();
  1072. this->func = func;
  1073. this->localOffsets = AnewArrayZ(tempAllocator, int, localsCount);
  1074. this->losslessInt32Syms = BVFixed::New(localsCount, tempAllocator);
  1075. this->float64Syms = BVFixed::New(localsCount, tempAllocator);
  1076. }
  1077. void
  1078. FuncBailOutData::FinalizeLocalOffsets(JitArenaAllocator *allocator, GlobalBailOutRecordDataTable *globalBailOutRecordDataTable, uint **lastUpdatedRowIndices)
  1079. {
  1080. Js::RegSlot localsCount = func->GetJITFunctionBody()->GetLocalsCount();
  1081. Assert(globalBailOutRecordDataTable != nullptr);
  1082. Assert(lastUpdatedRowIndices != nullptr);
  1083. if (*lastUpdatedRowIndices == nullptr)
  1084. {
  1085. *lastUpdatedRowIndices = JitAnewArrayZ(allocator, uint, localsCount);
  1086. memset(*lastUpdatedRowIndices, -1, sizeof(uint)*localsCount);
  1087. }
  1088. uint32 bailOutRecordId = bailOutRecord->m_bailOutRecordId;
  1089. bailOutRecord->localOffsetsCount = 0;
  1090. for (uint32 i = 0; i < localsCount; i++)
  1091. {
  1092. // if the sym is live
  1093. if (localOffsets[i] != 0)
  1094. {
  1095. bool isFloat = float64Syms->Test(i) != 0;
  1096. bool isInt = losslessInt32Syms->Test(i) != 0;
  1097. globalBailOutRecordDataTable->AddOrUpdateRow(allocator, bailOutRecordId, i, isFloat, isInt, localOffsets[i], &((*lastUpdatedRowIndices)[i]));
  1098. Assert(globalBailOutRecordDataTable->globalBailOutRecordDataRows[(*lastUpdatedRowIndices)[i]].regSlot == i);
  1099. bailOutRecord->localOffsetsCount++;
  1100. }
  1101. }
  1102. }
  1103. void
  1104. FuncBailOutData::Clear(JitArenaAllocator * tempAllocator)
  1105. {
  1106. Js::RegSlot localsCount = func->GetJITFunctionBody()->GetLocalsCount();
  1107. JitAdeleteArray(tempAllocator, localsCount, localOffsets);
  1108. losslessInt32Syms->Delete(tempAllocator);
  1109. float64Syms->Delete(tempAllocator);
  1110. }
  1111. GlobalBailOutRecordDataTable *
  1112. LinearScan::EnsureGlobalBailOutRecordTable(Func *func)
  1113. {
  1114. Assert(globalBailOutRecordTables != nullptr);
  1115. Func *topFunc = func->GetTopFunc();
  1116. bool isTopFunc = (func == topFunc);
  1117. uint32 inlineeID = isTopFunc ? 0 : func->m_inlineeId;
  1118. NativeCodeData::Allocator * allocator = this->func->GetNativeCodeDataAllocator();
  1119. GlobalBailOutRecordDataTable *globalBailOutRecordDataTable = globalBailOutRecordTables[inlineeID];
  1120. if (globalBailOutRecordDataTable == nullptr)
  1121. {
  1122. globalBailOutRecordDataTable = globalBailOutRecordTables[inlineeID] = NativeCodeDataNew(allocator, GlobalBailOutRecordDataTable);
  1123. globalBailOutRecordDataTable->entryPointInfo = (Js::EntryPointInfo*)func->GetWorkItem()->GetJITTimeInfo()->GetEntryPointInfoAddr();
  1124. globalBailOutRecordDataTable->length = globalBailOutRecordDataTable->size = 0;
  1125. globalBailOutRecordDataTable->isInlinedFunction = !isTopFunc;
  1126. globalBailOutRecordDataTable->hasNonSimpleParams = func->GetHasNonSimpleParams();
  1127. globalBailOutRecordDataTable->hasStackArgOpt = func->IsStackArgsEnabled();
  1128. globalBailOutRecordDataTable->isInlinedConstructor = func->IsInlinedConstructor();
  1129. globalBailOutRecordDataTable->isLoopBody = topFunc->IsLoopBody();
  1130. globalBailOutRecordDataTable->returnValueRegSlot = func->returnValueRegSlot;
  1131. globalBailOutRecordDataTable->isScopeObjRestored = false;
  1132. globalBailOutRecordDataTable->firstActualStackOffset = -1;
  1133. globalBailOutRecordDataTable->registerSaveSpace = (Js::Var*)func->GetThreadContextInfo()->GetBailOutRegisterSaveSpaceAddr();
  1134. globalBailOutRecordDataTable->globalBailOutRecordDataRows = nullptr;
  1135. if (func->GetJITFunctionBody()->GetForInLoopDepth() != 0)
  1136. {
  1137. #ifdef MD_GROW_LOCALS_AREA_UP
  1138. Assert(func->GetForInEnumeratorArrayOffset() >= 0);
  1139. globalBailOutRecordDataTable->forInEnumeratorArrayRestoreOffset = func->GetForInEnumeratorArrayOffset();
  1140. #else
  1141. // Stack offset are negative, includes the PUSH EBP and return address
  1142. globalBailOutRecordDataTable->forInEnumeratorArrayRestoreOffset = func->GetForInEnumeratorArrayOffset() - (2 * MachPtr);
  1143. #endif
  1144. }
  1145. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  1146. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  1147. {
  1148. topFunc->GetScriptContext()->bailOutOffsetBytes += sizeof(GlobalBailOutRecordDataTable);
  1149. topFunc->GetScriptContext()->bailOutRecordBytes += sizeof(GlobalBailOutRecordDataTable);
  1150. }
  1151. #endif
  1152. }
  1153. return globalBailOutRecordDataTable;
  1154. }
  1155. void
  1156. LinearScan::SetBitVectorIfTypeSpec(StackSym * sym, Js::RegSlot regSlot, BVFixed * intSyms, BVFixed * floatSyms)
  1157. {
  1158. if (sym->IsTypeSpec())
  1159. {
  1160. if (IRType_IsNativeInt(sym->m_type))
  1161. {
  1162. intSyms->Set(regSlot);
  1163. }
  1164. else if (IRType_IsFloat(sym->m_type))
  1165. {
  1166. floatSyms->Set(regSlot);
  1167. }
  1168. else
  1169. {
  1170. Assert(UNREACHED);
  1171. }
  1172. }
  1173. }
  1174. void
  1175. LinearScan::FillBailOutRecord(IR::Instr * instr)
  1176. {
  1177. BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
  1178. if (this->func->HasTry())
  1179. {
  1180. RegionType currentRegionType = this->currentRegion->GetType();
  1181. if (currentRegionType == RegionTypeTry || currentRegionType == RegionTypeCatch || currentRegionType == RegionTypeFinally)
  1182. {
  1183. bailOutInfo->bailOutRecord->ehBailoutData = this->currentRegion->ehBailoutData;
  1184. }
  1185. }
  1186. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed = bailOutInfo->byteCodeUpwardExposedUsed;
  1187. Func * bailOutFunc = bailOutInfo->bailOutFunc;
  1188. uint funcCount = bailOutFunc->inlineDepth + 1;
  1189. FuncBailOutData * funcBailOutData = AnewArray(this->tempAlloc, FuncBailOutData, funcCount);
  1190. uint funcIndex = funcCount - 1;
  1191. funcBailOutData[funcIndex].Initialize(bailOutFunc, this->tempAlloc);
  1192. funcBailOutData[funcIndex].bailOutRecord = bailOutInfo->bailOutRecord;
  1193. bailOutInfo->bailOutRecord->m_bailOutRecordId = m_bailOutRecordCount++;
  1194. bailOutInfo->bailOutRecord->globalBailOutRecordTable = EnsureGlobalBailOutRecordTable(bailOutFunc);
  1195. NativeCodeData::Allocator * allocator = this->func->GetNativeCodeDataAllocator();
  1196. #if DBG_DUMP
  1197. if(PHASE_DUMP(Js::BailOutPhase, this->func))
  1198. {
  1199. Output::Print(_u("-------------------Bailout dump -------------------------\n"));
  1200. instr->Dump();
  1201. }
  1202. #endif
  1203. // Generate chained bailout record for inlined functions
  1204. Func * currentFunc = bailOutFunc->GetParentFunc();
  1205. uint bailOutOffset = bailOutFunc->postCallByteCodeOffset;
  1206. while (currentFunc != nullptr)
  1207. {
  1208. Assert(funcIndex > 0);
  1209. Assert(bailOutOffset != Js::Constants::NoByteCodeOffset);
  1210. BailOutRecord * bailOutRecord = NativeCodeDataNewZ(allocator, BailOutRecord, bailOutOffset, (uint)-1, IR::BailOutInvalid, currentFunc);
  1211. bailOutRecord->m_bailOutRecordId = m_bailOutRecordCount++;
  1212. bailOutRecord->globalBailOutRecordTable = EnsureGlobalBailOutRecordTable(currentFunc);
  1213. #if ENABLE_DEBUG_CONFIG_OPTIONS
  1214. // To indicate this is a subsequent bailout from an inlinee
  1215. bailOutRecord->bailOutOpcode = Js::OpCode::InlineeEnd;
  1216. #endif
  1217. if (this->func->HasTry())
  1218. {
  1219. RegionType currentRegionType = this->currentRegion->GetType();
  1220. if (currentRegionType == RegionTypeTry || currentRegionType == RegionTypeCatch || currentRegionType == RegionTypeFinally)
  1221. {
  1222. bailOutRecord->ehBailoutData = this->currentRegion->ehBailoutData;
  1223. }
  1224. }
  1225. funcBailOutData[funcIndex].bailOutRecord->parent = bailOutRecord;
  1226. funcIndex--;
  1227. funcBailOutData[funcIndex].bailOutRecord = bailOutRecord;
  1228. funcBailOutData[funcIndex].Initialize(currentFunc, this->tempAlloc);
  1229. bailOutOffset = currentFunc->postCallByteCodeOffset;
  1230. currentFunc = currentFunc->GetParentFunc();
  1231. }
  1232. Assert(funcIndex == 0);
  1233. Assert(bailOutOffset == Js::Constants::NoByteCodeOffset);
  1234. FillBailOutState state(this->tempAlloc);
  1235. state.registerSaveCount = 0;
  1236. memset(state.registerSaveSyms, 0, sizeof(state.registerSaveSyms));
  1237. // Fill in the constants
  1238. FOREACH_SLISTBASE_ENTRY_EDITING(ConstantStackSymValue, value, &bailOutInfo->usedCapturedValues.constantValues, constantValuesIterator)
  1239. {
  1240. AssertMsg(bailOutInfo->bailOutRecord->bailOutKind != IR::BailOutForGeneratorYield, "constant prop syms unexpected for bail-in for generator yield");
  1241. StackSym * stackSym = value.Key();
  1242. if(stackSym->HasArgSlotNum())
  1243. {
  1244. continue;
  1245. }
  1246. Assert(stackSym->HasByteCodeRegSlot());
  1247. Js::RegSlot i = stackSym->GetByteCodeRegSlot();
  1248. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1249. uint index = stackSymFunc->inlineDepth;
  1250. Assert(i != Js::Constants::NoRegister);
  1251. Assert(i < stackSymFunc->GetJITFunctionBody()->GetLocalsCount());
  1252. Assert(index < funcCount);
  1253. __analysis_assume(index < funcCount);
  1254. Assert(funcBailOutData[index].func == stackSymFunc);
  1255. Assert(!byteCodeUpwardExposedUsed->Test(stackSym->m_id));
  1256. BailoutConstantValue constValue = value.Value();
  1257. Js::Var varValue = constValue.ToVar(this->func);
  1258. state.constantList.Prepend(varValue);
  1259. AssertMsg(funcBailOutData[index].localOffsets[i] == 0, "Can't have two active lifetime for the same byte code register");
  1260. // Constant offset are offset by the number of register save slots
  1261. funcBailOutData[index].localOffsets[i] = state.constantList.Count() + GetBailOutRegisterSaveSlotCount() + GetBailOutReserveSlotCount();
  1262. #if DBG_DUMP
  1263. if(PHASE_DUMP(Js::BailOutPhase, this->func))
  1264. {
  1265. Output::Print(_u("Constant stack sym #%d (argOut:%s): "), i, IsTrueOrFalse(stackSym->HasArgSlotNum()));
  1266. stackSym->Dump();
  1267. Output::Print(_u(" (0x%p (Var) Offset: %d)\n"), varValue, funcBailOutData[index].localOffsets[i]);
  1268. }
  1269. #endif
  1270. constantValuesIterator.RemoveCurrent(this->func->m_alloc);
  1271. }
  1272. NEXT_SLISTBASE_ENTRY_EDITING;
  1273. // Fill in the copy prop syms
  1274. FOREACH_SLISTBASE_ENTRY_EDITING(CopyPropSyms, copyPropSyms, &bailOutInfo->usedCapturedValues.copyPropSyms, copyPropSymsIter)
  1275. {
  1276. AssertMsg(bailOutInfo->bailOutRecord->bailOutKind != IR::BailOutForGeneratorYield, "copy prop syms unexpected for bail-in for generator yield");
  1277. StackSym * stackSym = copyPropSyms.Key();
  1278. if(stackSym->HasArgSlotNum())
  1279. {
  1280. continue;
  1281. }
  1282. Js::RegSlot i = stackSym->GetByteCodeRegSlot();
  1283. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1284. uint index = stackSymFunc->inlineDepth;
  1285. Assert(i != Js::Constants::NoRegister);
  1286. Assert(i < stackSymFunc->GetJITFunctionBody()->GetLocalsCount());
  1287. Assert(index < funcCount);
  1288. __analysis_assume(index < funcCount);
  1289. Assert(funcBailOutData[index].func == stackSymFunc);
  1290. AssertMsg(funcBailOutData[index].localOffsets[i] == 0, "Can't have two active lifetime for the same byte code register");
  1291. Assert(!byteCodeUpwardExposedUsed->Test(stackSym->m_id));
  1292. StackSym * copyStackSym = copyPropSyms.Value();
  1293. this->FillBailOutOffset(&funcBailOutData[index].localOffsets[i], copyStackSym, &state, instr);
  1294. SetBitVectorIfTypeSpec(copyStackSym, i, funcBailOutData[index].losslessInt32Syms, funcBailOutData[index].float64Syms);
  1295. copyPropSymsIter.RemoveCurrent(this->func->m_alloc);
  1296. }
  1297. NEXT_SLISTBASE_ENTRY_EDITING;
  1298. // Fill in the upward exposed syms
  1299. FOREACH_BITSET_IN_SPARSEBV(id, byteCodeUpwardExposedUsed)
  1300. {
  1301. StackSym * stackSym = this->func->m_symTable->FindStackSym(id);
  1302. Assert(stackSym != nullptr);
  1303. Js::RegSlot i = stackSym->GetByteCodeRegSlot();
  1304. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1305. uint index = stackSymFunc->inlineDepth;
  1306. Assert(i != Js::Constants::NoRegister);
  1307. Assert(i < stackSymFunc->GetJITFunctionBody()->GetLocalsCount());
  1308. Assert(index < funcCount);
  1309. __analysis_assume(index < funcCount);
  1310. Assert(funcBailOutData[index].func == stackSymFunc);
  1311. AssertMsg(funcBailOutData[index].localOffsets[i] == 0, "Can't have two active lifetime for the same byte code register");
  1312. this->FillBailOutOffset(&funcBailOutData[index].localOffsets[i], stackSym, &state, instr);
  1313. SetBitVectorIfTypeSpec(stackSym, i, funcBailOutData[index].losslessInt32Syms, funcBailOutData[index].float64Syms);
  1314. }
  1315. NEXT_BITSET_IN_SPARSEBV;
  1316. if (bailOutInfo->usedCapturedValues.argObjSyms)
  1317. {
  1318. FOREACH_BITSET_IN_SPARSEBV(id, bailOutInfo->usedCapturedValues.argObjSyms)
  1319. {
  1320. StackSym * stackSym = this->func->m_symTable->FindStackSym(id);
  1321. Assert(stackSym != nullptr);
  1322. Js::RegSlot i = stackSym->GetByteCodeRegSlot();
  1323. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1324. uint index = stackSymFunc->inlineDepth;
  1325. Assert(i != Js::Constants::NoRegister);
  1326. Assert(i < stackSymFunc->GetJITFunctionBody()->GetLocalsCount());
  1327. Assert(index < funcCount);
  1328. __analysis_assume(index < funcCount);
  1329. Assert(funcBailOutData[index].func == stackSymFunc);
  1330. AssertMsg(funcBailOutData[index].localOffsets[i] == 0, "Can't have two active lifetime for the same byte code register");
  1331. funcBailOutData[index].localOffsets[i] = BailOutRecord::GetArgumentsObjectOffset();
  1332. }
  1333. NEXT_BITSET_IN_SPARSEBV;
  1334. }
  1335. // In the debug mode, fill in the rest of non temp locals as well in the records so that the restore stub will just get it automatically.
  1336. if (this->func->IsJitInDebugMode())
  1337. {
  1338. // Need to allow filling the formal args slots.
  1339. if (func->GetJITFunctionBody()->HasPropIdToFormalsMap())
  1340. {
  1341. Assert(func->GetJITFunctionBody()->GetInParamsCount() > 0);
  1342. uint32 endIndex = min(func->GetJITFunctionBody()->GetFirstNonTempLocalIndex() + func->GetJITFunctionBody()->GetInParamsCount() - 1, func->GetJITFunctionBody()->GetEndNonTempLocalIndex());
  1343. for (uint32 index = func->GetJITFunctionBody()->GetFirstNonTempLocalIndex(); index < endIndex; index++)
  1344. {
  1345. StackSym * stackSym = this->func->m_symTable->FindStackSym(index);
  1346. if (stackSym != nullptr)
  1347. {
  1348. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1349. Js::RegSlot regSlotId = stackSym->GetByteCodeRegSlot();
  1350. if (func->IsNonTempLocalVar(regSlotId))
  1351. {
  1352. if (!func->GetJITFunctionBody()->IsRegSlotFormal(regSlotId - func->GetJITFunctionBody()->GetFirstNonTempLocalIndex()))
  1353. {
  1354. continue;
  1355. }
  1356. uint dataIndex = stackSymFunc->inlineDepth;
  1357. Assert(dataIndex == 0); // There is no inlining while in debug mode
  1358. // Filling in which are not filled already.
  1359. __analysis_assume(dataIndex == 0);
  1360. if (funcBailOutData[dataIndex].localOffsets[regSlotId] == 0)
  1361. {
  1362. int32 offset = GetStackOffset(regSlotId);
  1363. #ifdef MD_GROW_LOCALS_AREA_UP
  1364. Assert(offset >= 0);
  1365. #else
  1366. Assert(offset < 0);
  1367. #endif
  1368. funcBailOutData[dataIndex].localOffsets[regSlotId] = this->func->AdjustOffsetValue(offset);
  1369. // We don't support typespec for debug, rework on the bellow assert once we start support them.
  1370. Assert(!stackSym->IsTypeSpec());
  1371. }
  1372. }
  1373. }
  1374. }
  1375. }
  1376. }
  1377. // fill in the out params
  1378. uint startCallCount = bailOutInfo->startCallCount;
  1379. if (bailOutInfo->totalOutParamCount != 0)
  1380. {
  1381. Assert(startCallCount != 0);
  1382. uint argOutSlot = 0;
  1383. uint * startCallOutParamCounts = (uint*)NativeCodeDataNewArrayNoFixup(allocator, UIntType<DataDesc_ArgOutOffsetInfo_StartCallOutParamCounts>, startCallCount);
  1384. #ifdef _M_IX86
  1385. uint * startCallArgRestoreAdjustCounts = (uint*)NativeCodeDataNewArrayNoFixup(allocator, UIntType<DataDesc_ArgOutOffsetInfo_StartCallOutParamCounts>, startCallCount);
  1386. #endif
  1387. NativeCodeData::AllocatorNoFixup<BVFixed>* allocatorT = (NativeCodeData::AllocatorNoFixup<BVFixed>*)allocator;
  1388. BVFixed * argOutFloat64Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1389. BVFixed * argOutLosslessInt32Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1390. #ifdef _M_IX86
  1391. BVFixed * isOrphanedArgSlot = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1392. #endif
  1393. int* outParamOffsets = bailOutInfo->outParamOffsets = (int*)NativeCodeDataNewArrayZNoFixup(allocator, IntType<DataDesc_BailoutInfo_CotalOutParamCount>, bailOutInfo->totalOutParamCount);
  1394. #ifdef _M_IX86
  1395. int currentStackOffset = 0;
  1396. bailOutInfo->outParamFrameAdjustArgSlot = JitAnew(this->func->m_alloc, BVSparse<JitArenaAllocator>, this->func->m_alloc);
  1397. #endif
  1398. if (this->func->HasInlinee())
  1399. {
  1400. bailOutInfo->outParamInlinedArgSlot = JitAnew(this->func->m_alloc, BVSparse<JitArenaAllocator>, this->func->m_alloc);
  1401. }
  1402. #if DBG
  1403. uint lastFuncIndex = 0;
  1404. #endif
  1405. for (uint i = 0; i < startCallCount; i++)
  1406. {
  1407. uint outParamStart = argOutSlot; // Start of the out param offset for the current start call
  1408. // Number of out param for the current start call
  1409. uint outParamCount = bailOutInfo->GetStartCallOutParamCount(i);
  1410. startCallOutParamCounts[i] = outParamCount;
  1411. #ifdef _M_IX86
  1412. if (bailOutInfo->startCallInfo[i].instr->m_opcode == Js::OpCode::StartCall)
  1413. {
  1414. // Deadcode might have eliminated the argouts and the call instruction due to a BailOnNoProfile after StartCall
  1415. // In such cases, StartCall opcode is not changed to LoweredStartCall, mark the StartCall instruction accordingly
  1416. bailOutInfo->startCallInfo[i].isOrphanedCall = true;
  1417. }
  1418. // Only x86 has a progression of pushes of out args, with stack alignment.
  1419. bool fDoStackAdjust = false;
  1420. if (!bailOutInfo->inlinedStartCall->Test(i))
  1421. {
  1422. // Only do the stack adjustment if the StartCall has not been moved down past the bailout.
  1423. fDoStackAdjust = bailOutInfo->NeedsStartCallAdjust(i, instr);
  1424. if (fDoStackAdjust)
  1425. {
  1426. currentStackOffset -= Math::Align<int>(outParamCount * MachPtr, MachStackAlignment);
  1427. }
  1428. }
  1429. #endif
  1430. Func * currentStartCallFunc = bailOutInfo->startCallFunc[i];
  1431. #if DBG
  1432. Assert(lastFuncIndex <= currentStartCallFunc->inlineDepth);
  1433. lastFuncIndex = currentStartCallFunc->inlineDepth;
  1434. #endif
  1435. FuncBailOutData& currentFuncBailOutData = funcBailOutData[currentStartCallFunc->inlineDepth];
  1436. BailOutRecord * currentBailOutRecord = currentFuncBailOutData.bailOutRecord;
  1437. if (currentBailOutRecord->argOutOffsetInfo == nullptr)
  1438. {
  1439. currentBailOutRecord->argOutOffsetInfo = NativeCodeDataNew(allocator, BailOutRecord::ArgOutOffsetInfo);
  1440. currentBailOutRecord->argOutOffsetInfo->argOutFloat64Syms = nullptr;
  1441. currentBailOutRecord->argOutOffsetInfo->argOutLosslessInt32Syms = nullptr;
  1442. currentBailOutRecord->argOutOffsetInfo->argOutSymStart = 0;
  1443. currentBailOutRecord->argOutOffsetInfo->outParamOffsets = nullptr;
  1444. currentBailOutRecord->argOutOffsetInfo->startCallOutParamCounts = nullptr;
  1445. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  1446. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  1447. {
  1448. this->func->GetScriptContext()->bailOutRecordBytes += sizeof(BailOutRecord::ArgOutOffsetInfo);
  1449. }
  1450. #endif
  1451. }
  1452. currentBailOutRecord->argOutOffsetInfo->startCallCount++;
  1453. if (currentBailOutRecord->argOutOffsetInfo->outParamOffsets == nullptr)
  1454. {
  1455. Assert(currentBailOutRecord->argOutOffsetInfo->startCallOutParamCounts == nullptr);
  1456. currentBailOutRecord->argOutOffsetInfo->startCallIndex = i;
  1457. currentBailOutRecord->argOutOffsetInfo->startCallOutParamCounts = &startCallOutParamCounts[i];
  1458. #ifdef _M_IX86
  1459. currentBailOutRecord->argOutOffsetInfo->isOrphanedArgSlot = isOrphanedArgSlot;
  1460. currentBailOutRecord->startCallArgRestoreAdjustCounts = &startCallArgRestoreAdjustCounts[i];
  1461. #endif
  1462. currentBailOutRecord->argOutOffsetInfo->outParamOffsets = &outParamOffsets[outParamStart];
  1463. currentBailOutRecord->argOutOffsetInfo->argOutSymStart = outParamStart;
  1464. currentBailOutRecord->argOutOffsetInfo->argOutFloat64Syms = argOutFloat64Syms;
  1465. currentBailOutRecord->argOutOffsetInfo->argOutLosslessInt32Syms = argOutLosslessInt32Syms;
  1466. }
  1467. #if DBG_DUMP
  1468. if (PHASE_DUMP(Js::BailOutPhase, this->func))
  1469. {
  1470. Output::Print(_u("Bailout function: %s [#%d] \n"), currentStartCallFunc->GetJITFunctionBody()->GetDisplayName(),
  1471. currentStartCallFunc->GetJITFunctionBody()->GetFunctionNumber());
  1472. }
  1473. #endif
  1474. for (uint j = 0; j < outParamCount; j++, argOutSlot++)
  1475. {
  1476. StackSym * sym = bailOutInfo->argOutSyms[argOutSlot];
  1477. if (sym == nullptr)
  1478. {
  1479. // This can happen when instr with bailout occurs before all ArgOuts for current call instr are processed.
  1480. continue;
  1481. }
  1482. Assert(sym->GetArgSlotNum() > 0 && sym->GetArgSlotNum() <= outParamCount);
  1483. uint argSlot = sym->GetArgSlotNum() - 1;
  1484. uint outParamOffsetIndex = outParamStart + argSlot;
  1485. if (!sym->m_isBailOutReferenced && !sym->IsArgSlotSym())
  1486. {
  1487. FOREACH_SLISTBASE_ENTRY_EDITING(ConstantStackSymValue, constantValue, &bailOutInfo->usedCapturedValues.constantValues, iterator)
  1488. {
  1489. if (constantValue.Key()->m_id == sym->m_id)
  1490. {
  1491. Js::Var varValue = constantValue.Value().ToVar(func);
  1492. state.constantList.Prepend(varValue);
  1493. outParamOffsets[outParamOffsetIndex] = state.constantList.Count() + GetBailOutRegisterSaveSlotCount() + GetBailOutReserveSlotCount();
  1494. #if DBG_DUMP
  1495. if (PHASE_DUMP(Js::BailOutPhase, this->func))
  1496. {
  1497. Output::Print(_u("OutParam #%d: "), argSlot);
  1498. sym->Dump();
  1499. Output::Print(_u(" (0x%p (Var)))\n"), varValue);
  1500. }
  1501. #endif
  1502. iterator.RemoveCurrent(func->m_alloc);
  1503. break;
  1504. }
  1505. }
  1506. NEXT_SLISTBASE_ENTRY_EDITING;
  1507. if (outParamOffsets[outParamOffsetIndex])
  1508. {
  1509. continue;
  1510. }
  1511. FOREACH_SLISTBASE_ENTRY_EDITING(CopyPropSyms, copyPropSym, &bailOutInfo->usedCapturedValues.copyPropSyms, iter)
  1512. {
  1513. if (copyPropSym.Key()->m_id == sym->m_id)
  1514. {
  1515. StackSym * copyStackSym = copyPropSym.Value();
  1516. BVSparse<JitArenaAllocator>* argObjSyms = bailOutInfo->usedCapturedValues.argObjSyms;
  1517. if (argObjSyms && argObjSyms->Test(copyStackSym->m_id))
  1518. {
  1519. outParamOffsets[outParamOffsetIndex] = BailOutRecord::GetArgumentsObjectOffset();
  1520. }
  1521. else
  1522. {
  1523. this->FillBailOutOffset(&outParamOffsets[outParamOffsetIndex], copyStackSym, &state, instr);
  1524. SetBitVectorIfTypeSpec(copyStackSym, outParamOffsetIndex, argOutLosslessInt32Syms, argOutFloat64Syms);
  1525. }
  1526. #if DBG_DUMP
  1527. if (PHASE_DUMP(Js::BailOutPhase, this->func))
  1528. {
  1529. Output::Print(_u("OutParam #%d: "), argSlot);
  1530. sym->Dump();
  1531. Output::Print(_u(" Copy Prop sym:"));
  1532. copyStackSym->Dump();
  1533. Output::Print(_u("\n"));
  1534. }
  1535. #endif
  1536. iter.RemoveCurrent(func->m_alloc);
  1537. break;
  1538. }
  1539. }
  1540. NEXT_SLISTBASE_ENTRY_EDITING;
  1541. Assert(outParamOffsets[outParamOffsetIndex] != 0);
  1542. }
  1543. else
  1544. {
  1545. if (sym->IsArgSlotSym())
  1546. {
  1547. if (sym->m_isSingleDef)
  1548. {
  1549. Assert(sym->m_instrDef->m_func == currentStartCallFunc);
  1550. IR::Instr * instrDef = sym->m_instrDef;
  1551. Assert(LowererMD::IsAssign(instrDef));
  1552. if (instrDef->GetNumber() < instr->GetNumber())
  1553. {
  1554. // The ArgOut instr is above current bailout instr.
  1555. AssertMsg(sym->IsVar(), "Arg out slot can only be var.");
  1556. if (sym->m_isInlinedArgSlot)
  1557. {
  1558. Assert(this->func->HasInlinee());
  1559. #ifdef MD_GROW_LOCALS_AREA_UP
  1560. outParamOffsets[outParamOffsetIndex] = -((int)sym->m_offset + BailOutInfo::StackSymBias);
  1561. #else
  1562. outParamOffsets[outParamOffsetIndex] = sym->m_offset;
  1563. #endif
  1564. bailOutInfo->outParamInlinedArgSlot->Set(outParamOffsetIndex);
  1565. }
  1566. else if (sym->m_isOrphanedArg)
  1567. {
  1568. #ifdef MD_GROW_LOCALS_AREA_UP
  1569. outParamOffsets[outParamOffsetIndex] = -((int)sym->m_offset + BailOutInfo::StackSymBias);
  1570. #else
  1571. // Stack offset are negative, includes the PUSH EBP and return address
  1572. outParamOffsets[outParamOffsetIndex] = sym->m_offset - (2 * MachPtr);
  1573. #endif
  1574. #ifdef _M_IX86
  1575. isOrphanedArgSlot->Set(outParamOffsetIndex);
  1576. Assert(bailOutInfo->startCallInfo[i].isOrphanedCall == true);
  1577. #endif
  1578. }
  1579. #ifdef _M_IX86
  1580. else if (fDoStackAdjust)
  1581. {
  1582. // If we've got args on the stack, then we must have seen (and adjusted for) the StartCall.
  1583. // The values is already on the stack
  1584. // On AMD64/ARM, ArgOut should have been moved next to the call, and shouldn't have bailout between them
  1585. // Except for inlined arg outs
  1586. outParamOffsets[outParamOffsetIndex] = currentStackOffset + argSlot * MachPtr;
  1587. bailOutInfo->outParamFrameAdjustArgSlot->Set(outParamOffsetIndex);
  1588. }
  1589. #endif
  1590. else
  1591. {
  1592. this->FillBailOutOffset(&outParamOffsets[outParamOffsetIndex], sym, &state, instr);
  1593. }
  1594. }
  1595. else
  1596. {
  1597. // The ArgOut instruction might have moved down right next to the call,
  1598. // because of a register calling convention, cloning, etc. This loop walks the chain
  1599. // of assignments to try to find the original location of the assignment where
  1600. // the value is available.
  1601. while (!sym->IsConst())
  1602. {
  1603. // the value is in the register
  1604. IR::RegOpnd * regOpnd = instrDef->GetSrc1()->AsRegOpnd();
  1605. sym = regOpnd->m_sym;
  1606. if (sym->scratch.linearScan.lifetime->start < instr->GetNumber())
  1607. {
  1608. break;
  1609. }
  1610. if (sym->m_isEncodedConstant)
  1611. {
  1612. break;
  1613. }
  1614. // For out parameter we might need to follow multiple assignments
  1615. Assert(sym->m_isSingleDef);
  1616. instrDef = sym->m_instrDef;
  1617. Assert(LowererMD::IsAssign(instrDef));
  1618. }
  1619. if (bailOutInfo->usedCapturedValues.argObjSyms && bailOutInfo->usedCapturedValues.argObjSyms->Test(sym->m_id))
  1620. {
  1621. //foo.apply(this,arguments) case and we bailout when the apply is overridden. We need to restore the arguments object.
  1622. outParamOffsets[outParamOffsetIndex] = BailOutRecord::GetArgumentsObjectOffset();
  1623. }
  1624. else
  1625. {
  1626. this->FillBailOutOffset(&outParamOffsets[outParamOffsetIndex], sym, &state, instr);
  1627. }
  1628. }
  1629. }
  1630. }
  1631. else
  1632. {
  1633. this->FillBailOutOffset(&outParamOffsets[outParamOffsetIndex], sym, &state, instr);
  1634. }
  1635. SetBitVectorIfTypeSpec(sym, outParamOffsetIndex, argOutLosslessInt32Syms, argOutFloat64Syms);
  1636. #if DBG_DUMP
  1637. if (PHASE_DUMP(Js::BailOutPhase, this->func))
  1638. {
  1639. Output::Print(_u("OutParam #%d: "), argSlot);
  1640. sym->Dump();
  1641. Output::Print(_u("\n"));
  1642. }
  1643. #endif
  1644. }
  1645. }
  1646. }
  1647. for (int i = startCallCount - 1; i >= 0; i--)
  1648. {
  1649. #ifdef _M_IX86
  1650. uint argRestoreAdjustCount = 0;
  1651. if (this->currentRegion && (this->currentRegion->GetType() == RegionTypeTry || this->currentRegion->GetType() == RegionTypeFinally))
  1652. {
  1653. // For a bailout in argument evaluation from an EH region, the esp is offset by the TryCatch helper's frame. So, the argouts are not actually pushed at the
  1654. // offsets stored in the bailout record, which are relative to ebp. Need to restore the argouts from the actual value of esp before calling the Bailout helper.
  1655. // For nested calls, argouts for the outer call need to be restored from an offset of stack-adjustment-done-by-the-inner-call from esp.
  1656. if ((unsigned)(i + 1) == bailOutInfo->startCallCount)
  1657. {
  1658. argRestoreAdjustCount = 0;
  1659. }
  1660. else
  1661. {
  1662. uint argCount = bailOutInfo->startCallInfo[i + 1].isOrphanedCall ? 0 : bailOutInfo->startCallInfo[i + 1].argCount;
  1663. argRestoreAdjustCount = bailOutInfo->startCallInfo[i + 1].argRestoreAdjustCount + argCount;
  1664. if ((Math::Align<int32>(argCount * MachPtr, MachStackAlignment) - (argCount * MachPtr)) != 0)
  1665. {
  1666. argRestoreAdjustCount++;
  1667. }
  1668. }
  1669. bailOutInfo->startCallInfo[i].argRestoreAdjustCount = argRestoreAdjustCount;
  1670. }
  1671. startCallArgRestoreAdjustCounts[i] = bailOutInfo->startCallInfo[i].argRestoreAdjustCount;
  1672. #endif
  1673. }
  1674. }
  1675. else
  1676. {
  1677. Assert(bailOutInfo->argOutSyms == nullptr);
  1678. Assert(bailOutInfo->startCallCount == 0);
  1679. }
  1680. if (this->currentBlock->inlineeStack.Count() > 0)
  1681. {
  1682. this->SpillInlineeArgs(instr);
  1683. }
  1684. else
  1685. {
  1686. // There is a chance that the instruction was hoisting from an inlinee func
  1687. // but if there are no inlinee frames - make sure the instr belongs to the outer func
  1688. // to ensure encoder does not encode an inline frame here - which does not really exist
  1689. instr->m_func = this->func;
  1690. }
  1691. linearScanMD.GenerateBailOut(instr, state.registerSaveSyms, _countof(state.registerSaveSyms));
  1692. // generate the constant table
  1693. Js::Var * constants = NativeCodeDataNewArrayNoFixup(allocator, Js::Var, state.constantList.Count());
  1694. uint constantCount = state.constantList.Count();
  1695. while (!state.constantList.Empty())
  1696. {
  1697. Js::Var value = state.constantList.Head();
  1698. state.constantList.RemoveHead();
  1699. constants[state.constantList.Count()] = value;
  1700. }
  1701. // Generate the stack literal bail out info
  1702. FillStackLiteralBailOutRecord(instr, bailOutInfo, funcBailOutData, funcCount);
  1703. for (uint i = 0; i < funcCount; i++)
  1704. {
  1705. funcBailOutData[i].bailOutRecord->constants = constants;
  1706. #if DBG
  1707. funcBailOutData[i].bailOutRecord->inlineDepth = funcBailOutData[i].func->inlineDepth;
  1708. funcBailOutData[i].bailOutRecord->constantCount = constantCount;
  1709. #endif
  1710. uint32 tableIndex = funcBailOutData[i].func->IsTopFunc() ? 0 : funcBailOutData[i].func->m_inlineeId;
  1711. funcBailOutData[i].FinalizeLocalOffsets(tempAlloc, this->globalBailOutRecordTables[tableIndex], &(this->lastUpdatedRowIndices[tableIndex]));
  1712. #if DBG_DUMP
  1713. if(PHASE_DUMP(Js::BailOutPhase, this->func))
  1714. {
  1715. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  1716. Output::Print(_u("Bailout function: %s [%s]\n"), funcBailOutData[i].func->GetJITFunctionBody()->GetDisplayName(), funcBailOutData[i].func->GetDebugNumberSet(debugStringBuffer), i);
  1717. funcBailOutData[i].bailOutRecord->Dump();
  1718. }
  1719. #endif
  1720. funcBailOutData[i].Clear(this->tempAlloc);
  1721. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  1722. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  1723. {
  1724. this->func->GetScriptContext()->bailOutRecordBytes += sizeof(BailOutRecord);
  1725. }
  1726. #endif
  1727. }
  1728. JitAdeleteArray(this->tempAlloc, funcCount, funcBailOutData);
  1729. }
  1730. template <typename Fn>
  1731. void
  1732. LinearScan::ForEachStackLiteralBailOutInfo(IR::Instr * instr, BailOutInfo * bailOutInfo, FuncBailOutData * funcBailOutData, uint funcCount, Fn fn)
  1733. {
  1734. for (uint i = 0; i < bailOutInfo->stackLiteralBailOutInfoCount; i++)
  1735. {
  1736. BailOutInfo::StackLiteralBailOutInfo& stackLiteralBailOutInfo = bailOutInfo->stackLiteralBailOutInfo[i];
  1737. StackSym * stackSym = stackLiteralBailOutInfo.stackSym;
  1738. Assert(stackSym->scratch.linearScan.lifetime->start < instr->GetNumber());
  1739. Assert(stackSym->scratch.linearScan.lifetime->end >= instr->GetNumber());
  1740. Js::RegSlot regSlot = stackSym->GetByteCodeRegSlot();
  1741. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1742. uint index = stackSymFunc->inlineDepth;
  1743. Assert(regSlot != Js::Constants::NoRegister);
  1744. Assert(regSlot < stackSymFunc->GetJITFunctionBody()->GetLocalsCount());
  1745. Assert(index < funcCount);
  1746. Assert(funcBailOutData[index].func == stackSymFunc);
  1747. Assert(funcBailOutData[index].localOffsets[regSlot] != 0);
  1748. fn(index, stackLiteralBailOutInfo, regSlot);
  1749. }
  1750. }
  1751. void
  1752. LinearScan::FillStackLiteralBailOutRecord(IR::Instr * instr, BailOutInfo * bailOutInfo, FuncBailOutData * funcBailOutData, uint funcCount)
  1753. {
  1754. if (bailOutInfo->stackLiteralBailOutInfoCount)
  1755. {
  1756. // Count the data
  1757. ForEachStackLiteralBailOutInfo(instr, bailOutInfo, funcBailOutData, funcCount,
  1758. [=](uint funcIndex, BailOutInfo::StackLiteralBailOutInfo& stackLiteralBailOutInfo, Js::RegSlot regSlot)
  1759. {
  1760. funcBailOutData[funcIndex].bailOutRecord->stackLiteralBailOutRecordCount++;
  1761. });
  1762. // Allocate the data
  1763. NativeCodeData::Allocator * allocator = this->func->GetNativeCodeDataAllocator();
  1764. for (uint i = 0; i < funcCount; i++)
  1765. {
  1766. uint stackLiteralBailOutRecordCount = funcBailOutData[i].bailOutRecord->stackLiteralBailOutRecordCount;
  1767. if (stackLiteralBailOutRecordCount)
  1768. {
  1769. funcBailOutData[i].bailOutRecord->stackLiteralBailOutRecord =
  1770. NativeCodeDataNewArrayNoFixup(allocator, BailOutRecord::StackLiteralBailOutRecord, stackLiteralBailOutRecordCount);
  1771. // reset the count so we can track how much we have filled below
  1772. funcBailOutData[i].bailOutRecord->stackLiteralBailOutRecordCount = 0;
  1773. }
  1774. }
  1775. // Fill out the data
  1776. ForEachStackLiteralBailOutInfo(instr, bailOutInfo, funcBailOutData, funcCount,
  1777. [=](uint funcIndex, BailOutInfo::StackLiteralBailOutInfo& stackLiteralBailOutInfo, Js::RegSlot regSlot)
  1778. {
  1779. uint& recordIndex = funcBailOutData[funcIndex].bailOutRecord->stackLiteralBailOutRecordCount;
  1780. BailOutRecord::StackLiteralBailOutRecord& stackLiteralBailOutRecord =
  1781. funcBailOutData[funcIndex].bailOutRecord->stackLiteralBailOutRecord[recordIndex++];
  1782. stackLiteralBailOutRecord.regSlot = regSlot;
  1783. stackLiteralBailOutRecord.initFldCount = stackLiteralBailOutInfo.initFldCount;
  1784. });
  1785. }
  1786. }
  1787. void
  1788. LinearScan::PrepareForUse(Lifetime * lifetime)
  1789. {
  1790. if (lifetime->isOpHelperSpilled)
  1791. {
  1792. // using a value in a helper that has been spilled in the helper block.
  1793. // Just spill it for real
  1794. // We must be in a helper block and the lifetime must
  1795. // start before the helper block
  1796. Assert(this->IsInHelperBlock());
  1797. Assert(lifetime->start < this->HelperBlockStartInstrNumber());
  1798. IR::Instr *insertionInstr = this->currentOpHelperBlock->opHelperLabel;
  1799. this->RemoveOpHelperSpilled(lifetime);
  1800. this->SpillLiveRange(lifetime, insertionInstr);
  1801. }
  1802. }
  1803. void
  1804. LinearScan::RecordUse(Lifetime * lifetime, IR::Instr * instr, IR::RegOpnd * regOpnd, bool isFromBailout)
  1805. {
  1806. uint32 useCountCost = LinearScan::GetUseSpillCost(this->loopNest, (this->currentOpHelperBlock != nullptr || isFromBailout));
  1807. // We only spill at the use for constants (i.e. reload) or for function with try blocks. We don't
  1808. // have real accurate flow info for the later.
  1809. if ((regOpnd && regOpnd->m_sym->IsConst())
  1810. || (
  1811. (this->func->HasTry() && !this->func->DoOptimizeTry()) &&
  1812. this->IsInLoop() &&
  1813. lifetime->lastUseLabel != this->lastLabel &&
  1814. this->liveOnBackEdgeSyms->Test(lifetime->sym->m_id) &&
  1815. !(lifetime->previousDefBlockNumber == currentBlockNumber && !lifetime->defList.Empty())
  1816. ))
  1817. {
  1818. // Keep track of all the uses of this lifetime in case we decide to spill it.
  1819. // Note that we won't need to insert reloads if the use are not in a loop,
  1820. // unless it is a const. We always reload const instead of spilling to the stack.
  1821. //
  1822. // We also don't need to insert reloads if the previous use was in the same basic block (the first use in the block
  1823. // would have done the reload), or the previous def is in the same basic block and the value is still live. Furthermore,
  1824. // if the previous def is in the same basic block, the value is still live, and there's another def after this use in
  1825. // the same basic block, the previous def may not do a spill store, so we must not reload the value from the stack.
  1826. lifetime->useList.Prepend(instr);
  1827. lifetime->lastUseLabel = this->lastLabel;
  1828. lifetime->AddToUseCountAdjust(useCountCost, this->curLoop, this->func);
  1829. }
  1830. else
  1831. {
  1832. if (!isFromBailout)
  1833. {
  1834. // Since we won't reload this use if the lifetime gets spilled, adjust the spill cost to reflect this.
  1835. lifetime->SubFromUseCount(useCountCost, this->curLoop);
  1836. }
  1837. }
  1838. if (this->IsInLoop())
  1839. {
  1840. this->RecordLoopUse(lifetime, lifetime->reg);
  1841. }
  1842. }
  1843. void LinearScan::RecordLoopUse(Lifetime *lifetime, RegNum reg)
  1844. {
  1845. if (!this->IsInLoop())
  1846. {
  1847. return;
  1848. }
  1849. if (this->func->HasTry() && !this->func->DoOptimizeTry())
  1850. {
  1851. return;
  1852. }
  1853. // Record on each loop which register live into the loop ended up being used.
  1854. // We are trying to avoid the need for compensation at the bottom of the loop if
  1855. // the reg ends up being spilled before it is actually used.
  1856. Loop *curLoop = this->curLoop;
  1857. SymID symId = SymID_Invalid;
  1858. if (lifetime)
  1859. {
  1860. symId = lifetime->sym->m_id;
  1861. }
  1862. while (curLoop)
  1863. {
  1864. // Note that if the lifetime is spilled and reallocated to the same register,
  1865. // will mark it as used when we shouldn't. However, it is hard at this point to handle
  1866. // the case were a flow edge from the previous allocation merges in with the new allocation.
  1867. // No compensation is inserted to let us know with previous lifetime needs reloading at the bottom of the loop...
  1868. if (lifetime && curLoop->regAlloc.loopTopRegContent[reg] == lifetime)
  1869. {
  1870. curLoop->regAlloc.symRegUseBv->Set(symId);
  1871. }
  1872. curLoop->regAlloc.regUseBv.Set(reg);
  1873. curLoop = curLoop->parent;
  1874. }
  1875. }
  1876. void
  1877. LinearScan::RecordDef(Lifetime *const lifetime, IR::Instr *const instr, const uint32 useCountCost)
  1878. {
  1879. Assert(lifetime);
  1880. Assert(instr);
  1881. Assert(instr->GetDst());
  1882. IR::RegOpnd * regOpnd = instr->GetDst()->AsRegOpnd();
  1883. Assert(regOpnd);
  1884. StackSym *const sym = regOpnd->m_sym;
  1885. if (this->IsInLoop())
  1886. {
  1887. Loop *curLoop = this->curLoop;
  1888. while (curLoop)
  1889. {
  1890. curLoop->regAlloc.defdInLoopBv->Set(lifetime->sym->m_id);
  1891. curLoop->regAlloc.regUseBv.Set(lifetime->reg);
  1892. curLoop = curLoop->parent;
  1893. }
  1894. }
  1895. if (lifetime->isSpilled)
  1896. {
  1897. return;
  1898. }
  1899. if (this->NeedsWriteThrough(sym))
  1900. {
  1901. if (this->IsSymNonTempLocalVar(sym))
  1902. {
  1903. // In the debug mode, we will write through on the stack location.
  1904. WriteThroughForLocal(regOpnd, lifetime, instr);
  1905. }
  1906. else
  1907. {
  1908. // If this is a write-through sym, it should be live on the entry to 'try' and should have already
  1909. // been allocated when we spilled all active lifetimes there.
  1910. // If it was not part of the active lifetimes on entry to the 'try' then it must have been spilled
  1911. // earlier and should have stack allocated for it.
  1912. Assert(this->NeedsWriteThroughForEH(sym) && sym->IsAllocated());
  1913. this->InsertStore(instr, sym, lifetime->reg);
  1914. }
  1915. // No need to record-def further as we already have stack allocated for it.
  1916. return;
  1917. }
  1918. if (sym->m_isSingleDef)
  1919. {
  1920. lifetime->AddToUseCount(useCountCost, this->curLoop, this->func);
  1921. // the def of a single-def sym is already on the sym
  1922. return;
  1923. }
  1924. if(lifetime->previousDefBlockNumber == currentBlockNumber && !lifetime->defList.Empty())
  1925. {
  1926. // Only keep track of the last def in each basic block. When there are multiple defs of a sym in a basic block, upon
  1927. // spill of that sym, a store needs to be inserted only after the last def of the sym.
  1928. Assert(lifetime->defList.Head()->GetDst()->AsRegOpnd()->m_sym == sym);
  1929. lifetime->defList.Head() = instr;
  1930. }
  1931. else
  1932. {
  1933. // First def of this sym in the current basic block
  1934. lifetime->previousDefBlockNumber = currentBlockNumber;
  1935. lifetime->defList.Prepend(instr);
  1936. // Keep track of the cost of reinserting all the defs if we choose to spill this way.
  1937. lifetime->allDefsCost += useCountCost;
  1938. }
  1939. }
  1940. // LinearScan::SetUse
  1941. void
  1942. LinearScan::SetUse(IR::Instr *instr, IR::RegOpnd *regOpnd)
  1943. {
  1944. if (regOpnd->GetReg() != RegNOREG)
  1945. {
  1946. this->RecordLoopUse(nullptr, regOpnd->GetReg());
  1947. return;
  1948. }
  1949. StackSym *sym = regOpnd->m_sym;
  1950. Lifetime * lifetime = sym->scratch.linearScan.lifetime;
  1951. this->PrepareForUse(lifetime);
  1952. if (lifetime->isSpilled)
  1953. {
  1954. // See if it has been loaded in this basic block
  1955. RegNum reg = this->GetAssignedTempReg(lifetime, regOpnd->GetType());
  1956. if (reg == RegNOREG)
  1957. {
  1958. if (sym->IsConst() && EncoderMD::TryConstFold(instr, regOpnd))
  1959. {
  1960. return;
  1961. }
  1962. reg = this->SecondChanceAllocation(lifetime, false);
  1963. if (reg != RegNOREG)
  1964. {
  1965. IR::Instr *insertInstr = this->TryHoistLoad(instr, lifetime);
  1966. this->InsertLoad(insertInstr, sym, reg);
  1967. }
  1968. else
  1969. {
  1970. // Try folding if there are no registers available
  1971. if (!sym->IsConst() && !this->RegsAvailable(regOpnd->GetType()) && EncoderMD::TryFold(instr, regOpnd))
  1972. {
  1973. return;
  1974. }
  1975. // We need a reg no matter what. Try to force second chance to re-allocate this.
  1976. reg = this->SecondChanceAllocation(lifetime, true);
  1977. if (reg == RegNOREG)
  1978. {
  1979. // Forcing second chance didn't work.
  1980. // Allocate a new temp reg for it
  1981. reg = this->FindReg(nullptr, regOpnd);
  1982. this->AssignTempReg(lifetime, reg);
  1983. }
  1984. this->InsertLoad(instr, sym, reg);
  1985. }
  1986. }
  1987. }
  1988. if (!lifetime->isSpilled && instr->GetNumber() < lifetime->end)
  1989. {
  1990. // Don't border to record the use if this is the last use of the lifetime.
  1991. this->RecordUse(lifetime, instr, regOpnd);
  1992. }
  1993. else
  1994. {
  1995. lifetime->SubFromUseCount(LinearScan::GetUseSpillCost(this->loopNest, (this->currentOpHelperBlock != nullptr)), this->curLoop);
  1996. }
  1997. this->instrUseRegs.Set(lifetime->reg);
  1998. this->SetReg(regOpnd);
  1999. }
  2000. // LinearScan::SetReg
  2001. void
  2002. LinearScan::SetReg(IR::RegOpnd *regOpnd)
  2003. {
  2004. if (regOpnd->GetReg() == RegNOREG)
  2005. {
  2006. RegNum reg = regOpnd->m_sym->scratch.linearScan.lifetime->reg;
  2007. AssertMsg(reg != RegNOREG, "Reg should be allocated here...");
  2008. regOpnd->SetReg(reg);
  2009. }
  2010. }
  2011. bool
  2012. LinearScan::SkipNumberedInstr(IR::Instr *instr)
  2013. {
  2014. if (instr->IsLabelInstr())
  2015. {
  2016. if (instr->AsLabelInstr()->m_isLoopTop)
  2017. {
  2018. Assert(instr->GetNumber() != instr->m_next->GetNumber()
  2019. && (instr->GetNumber() != instr->m_prev->GetNumber() || instr->m_prev->m_opcode == Js::OpCode::Nop));
  2020. }
  2021. else
  2022. {
  2023. return true;
  2024. }
  2025. }
  2026. return false;
  2027. }
  2028. // LinearScan::EndDeadLifetimes
  2029. // Look for lifetimes that are ending here, and retire them.
  2030. void
  2031. LinearScan::EndDeadLifetimes(IR::Instr *instr, bool isLoopBackEdge)
  2032. {
  2033. if (this->SkipNumberedInstr(instr))
  2034. {
  2035. return;
  2036. }
  2037. // Retire all active lifetime ending at this instruction
  2038. FOREACH_SLIST_ENTRY_EDITING(Lifetime *, deadLifetime, this->activeLiveranges, iter)
  2039. {
  2040. if (deadLifetime->end > instr->GetNumber())
  2041. {
  2042. break;
  2043. }
  2044. if (isLoopBackEdge && this->curLoop->regAlloc.liveOnBackEdgeSyms->Test(deadLifetime->sym->m_id))
  2045. {
  2046. continue;
  2047. }
  2048. deadLifetime->defList.Clear();
  2049. deadLifetime->useList.Clear();
  2050. RegNum reg = deadLifetime->reg;
  2051. this->activeRegs.Clear(reg);
  2052. this->regContent[reg] = nullptr;
  2053. this->secondChanceRegs.Clear(reg);
  2054. if (RegTypes[reg] == TyMachReg)
  2055. {
  2056. this->intRegUsedCount--;
  2057. }
  2058. else
  2059. {
  2060. Assert(RegTypes[reg] == TyFloat64);
  2061. this->floatRegUsedCount--;
  2062. }
  2063. iter.RemoveCurrent();
  2064. } NEXT_SLIST_ENTRY_EDITING;
  2065. // Look for spilled lifetimes which end here such that we can make their stack slot
  2066. // available for stack-packing.
  2067. FOREACH_SLIST_ENTRY_EDITING(Lifetime *, deadStackPack, this->stackPackInUseLiveRanges, stackPackIter)
  2068. {
  2069. if (deadStackPack->end > instr->GetNumber())
  2070. {
  2071. break;
  2072. }
  2073. if (isLoopBackEdge && this->curLoop->regAlloc.liveOnBackEdgeSyms->Test(deadStackPack->sym->m_id))
  2074. {
  2075. continue;
  2076. }
  2077. deadStackPack->defList.Clear();
  2078. deadStackPack->useList.Clear();
  2079. if (!deadStackPack->cantStackPack)
  2080. {
  2081. Assert(deadStackPack->spillStackSlot);
  2082. deadStackPack->spillStackSlot->lastUse = deadStackPack->end;
  2083. this->stackSlotsFreeList->Push(deadStackPack->spillStackSlot);
  2084. }
  2085. stackPackIter.RemoveCurrent();
  2086. } NEXT_SLIST_ENTRY_EDITING;
  2087. }
  2088. void
  2089. LinearScan::EndDeadOpHelperLifetimes(IR::Instr * instr)
  2090. {
  2091. if (this->SkipNumberedInstr(instr))
  2092. {
  2093. return;
  2094. }
  2095. while (!this->opHelperSpilledLiveranges->Empty() &&
  2096. this->opHelperSpilledLiveranges->Head()->end <= instr->GetNumber())
  2097. {
  2098. Lifetime * deadLifetime;
  2099. // The lifetime doesn't extend beyond the helper block
  2100. // No need to save and restore around the helper block
  2101. Assert(this->IsInHelperBlock());
  2102. deadLifetime = this->opHelperSpilledLiveranges->Head();
  2103. this->opHelperSpilledLiveranges->RemoveHead();
  2104. if (!deadLifetime->cantOpHelperSpill)
  2105. {
  2106. this->opHelperSpilledRegs.Clear(deadLifetime->reg);
  2107. }
  2108. deadLifetime->isOpHelperSpilled = false;
  2109. deadLifetime->cantOpHelperSpill = false;
  2110. deadLifetime->isOpHelperSpillAsArg = false;
  2111. }
  2112. }
  2113. // LinearScan::AllocateNewLifetimes
  2114. // Look for lifetimes coming live, and allocate a register for them.
  2115. void
  2116. LinearScan::AllocateNewLifetimes(IR::Instr *instr)
  2117. {
  2118. if (this->SkipNumberedInstr(instr))
  2119. {
  2120. return;
  2121. }
  2122. // Try to catch:
  2123. // x = MOV y(r1)
  2124. // where y's lifetime just ended and x's lifetime is starting.
  2125. // If so, set r1 as a preferred register for x, which may allow peeps to remove the MOV
  2126. if (instr->GetSrc1() && instr->GetSrc1()->IsRegOpnd() && LowererMD::IsAssign(instr) && instr->GetDst() && instr->GetDst()->IsRegOpnd() && instr->GetDst()->AsRegOpnd()->m_sym)
  2127. {
  2128. IR::RegOpnd *src = instr->GetSrc1()->AsRegOpnd();
  2129. StackSym *srcSym = src->m_sym;
  2130. // If src is a physReg ref, or src's lifetime ends here.
  2131. if (!srcSym || srcSym->scratch.linearScan.lifetime->end == instr->GetNumber())
  2132. {
  2133. Lifetime *dstLifetime = instr->GetDst()->AsRegOpnd()->m_sym->scratch.linearScan.lifetime;
  2134. if (dstLifetime)
  2135. {
  2136. dstLifetime->regPreference.Set(src->GetReg());
  2137. }
  2138. }
  2139. }
  2140. // Look for starting lifetimes
  2141. while (!this->lifetimeList->Empty() && this->lifetimeList->Head()->start <= instr->GetNumber())
  2142. {
  2143. // We're at the start of a new live range
  2144. Lifetime * newLifetime = this->lifetimeList->Head();
  2145. newLifetime->lastAllocationStart = instr->GetNumber();
  2146. this->lifetimeList->RemoveHead();
  2147. if (newLifetime->dontAllocate)
  2148. {
  2149. // Lifetime spilled before beginning allocation (e.g., a lifetime known to span
  2150. // multiple EH regions.) Do the work of spilling it now without adding it to the list.
  2151. this->SpillLiveRange(newLifetime);
  2152. continue;
  2153. }
  2154. RegNum reg;
  2155. if (newLifetime->reg == RegNOREG)
  2156. {
  2157. if (newLifetime->isDeadStore)
  2158. {
  2159. // No uses, let's not waste a reg.
  2160. newLifetime->isSpilled = true;
  2161. continue;
  2162. }
  2163. reg = this->FindReg(newLifetime, nullptr);
  2164. }
  2165. else
  2166. {
  2167. // This lifetime is already assigned a physical register. Make
  2168. // sure that register is available by calling SpillReg
  2169. reg = newLifetime->reg;
  2170. // If we're in a helper block, the physical register we're trying to ensure is available might get helper
  2171. // spilled. Don't allow that if this lifetime's end lies beyond the end of the helper block because
  2172. // spill code assumes that this physical register isn't active at the end of the helper block when it tries
  2173. // to restore it. So we'd have to really spill the lifetime then anyway.
  2174. this->SpillReg(reg, IsInHelperBlock() ? (newLifetime->end > currentOpHelperBlock->opHelperEndInstr->GetNumber()) : false);
  2175. newLifetime->cantSpill = true;
  2176. }
  2177. // If we did get a register for this lifetime, add it to the active set.
  2178. if (newLifetime->isSpilled == false)
  2179. {
  2180. this->AssignActiveReg(newLifetime, reg);
  2181. }
  2182. }
  2183. }
  2184. // LinearScan::FindReg
  2185. // Look for an available register. If one isn't available, spill something.
  2186. // Note that the newLifetime passed in could be the one we end up spilling.
  2187. RegNum
  2188. LinearScan::FindReg(Lifetime *newLifetime, IR::RegOpnd *regOpnd, bool force)
  2189. {
  2190. BVIndex regIndex = BVInvalidIndex;
  2191. bool isRegAvailable = true;
  2192. bool tryCallerSavedRegs = false;
  2193. BitVector callerSavedAvailableBv;
  2194. if (newLifetime)
  2195. {
  2196. if (newLifetime->IsInt())
  2197. {
  2198. isRegAvailable = this->intRegUsedCount < INT_REG_COUNT;
  2199. }
  2200. else
  2201. {
  2202. isRegAvailable = this->floatRegUsedCount < FLOAT_REG_COUNT;
  2203. }
  2204. }
  2205. else
  2206. {
  2207. Assert(regOpnd);
  2208. isRegAvailable = this->RegsAvailable(regOpnd->GetType());
  2209. }
  2210. if (isRegAvailable)
  2211. {
  2212. BitVector regsBv;
  2213. regsBv.Copy(this->activeRegs);
  2214. regsBv.Or(this->instrUseRegs);
  2215. regsBv.Or(this->callSetupRegs);
  2216. regsBv.ComplimentAll();
  2217. if (newLifetime)
  2218. {
  2219. if (this->IsInHelperBlock())
  2220. {
  2221. if (newLifetime->end >= this->HelperBlockEndInstrNumber())
  2222. {
  2223. // this lifetime goes beyond the helper function
  2224. // We need to exclude the helper spilled register as well.
  2225. regsBv.Minus(this->opHelperSpilledRegs);
  2226. }
  2227. }
  2228. if (newLifetime->IsInt())
  2229. {
  2230. regsBv.And(this->int32Regs);
  2231. regsBv = this->linearScanMD.FilterRegIntSizeConstraints(regsBv, newLifetime->intUsageBv);
  2232. }
  2233. else
  2234. {
  2235. #ifdef _M_IX86
  2236. Assert(AutoSystemInfo::Data.SSE2Available());
  2237. #endif
  2238. regsBv.And(this->floatRegs);
  2239. }
  2240. if (newLifetime->isLiveAcrossCalls)
  2241. {
  2242. // Try to find a callee saved regs
  2243. BitVector regsBvTemp = regsBv;
  2244. regsBvTemp.And(this->calleeSavedRegs);
  2245. regIndex = GetPreferencedRegIndex(newLifetime, regsBvTemp);
  2246. if (regIndex == BVInvalidIndex)
  2247. {
  2248. if (!newLifetime->isLiveAcrossUserCalls)
  2249. {
  2250. // No callee saved regs is found and the lifetime only across helper
  2251. // calls, we can also use a caller saved regs to make use of the
  2252. // save and restore around helper blocks
  2253. regIndex = GetPreferencedRegIndex(newLifetime, regsBv);
  2254. }
  2255. else
  2256. {
  2257. // If we can't find a callee-saved reg, we can try using a caller-saved reg instead.
  2258. // We'll hopefully get a few loads enregistered that way before we get to the call.
  2259. tryCallerSavedRegs = true;
  2260. callerSavedAvailableBv = regsBv;
  2261. }
  2262. }
  2263. }
  2264. else
  2265. {
  2266. regIndex = GetPreferencedRegIndex(newLifetime, regsBv);
  2267. }
  2268. }
  2269. else
  2270. {
  2271. AssertMsg(regOpnd, "Need a lifetime or a regOpnd passed in");
  2272. if (regOpnd->IsFloat() || regOpnd->IsSimd128())
  2273. {
  2274. #ifdef _M_IX86
  2275. Assert(AutoSystemInfo::Data.SSE2Available());
  2276. #endif
  2277. regsBv.And(this->floatRegs);
  2278. }
  2279. else
  2280. {
  2281. regsBv.And(this->int32Regs);
  2282. BitVector regSizeBv;
  2283. regSizeBv.ClearAll();
  2284. regSizeBv.Set(TySize[regOpnd->GetType()]);
  2285. regsBv = this->linearScanMD.FilterRegIntSizeConstraints(regsBv, regSizeBv);
  2286. }
  2287. BitVector regsBvNoTemps = regsBv;
  2288. if (!this->tempRegs.IsEmpty())
  2289. {
  2290. // Avoid the temp reg that we have loaded in this basic block
  2291. regsBvNoTemps.Minus(this->tempRegs);
  2292. }
  2293. BitVector regsBvNoTempsNoCallee = regsBvNoTemps;
  2294. // Try to find a non-callee saved reg so that we don't have to save it in prolog
  2295. regsBvNoTempsNoCallee.Minus(this->calleeSavedRegs);
  2296. // Allocate a non-callee saved reg from the other end of the bit vector so that it can keep live for longer
  2297. regIndex = regsBvNoTempsNoCallee.GetPrevBit();
  2298. if (regIndex == BVInvalidIndex)
  2299. {
  2300. // If we don't have any non-callee saved reg then get the first available callee saved reg so that prolog can store adjacent registers
  2301. regIndex = regsBvNoTemps.GetNextBit();
  2302. }
  2303. if (regIndex == BVInvalidIndex)
  2304. {
  2305. // Everything is used, get the temp from other end
  2306. regIndex = regsBv.GetPrevBit();
  2307. }
  2308. }
  2309. }
  2310. RegNum reg;
  2311. if (BVInvalidIndex != regIndex)
  2312. {
  2313. Assert(regIndex < RegNumCount);
  2314. reg = (RegNum)regIndex;
  2315. }
  2316. else
  2317. {
  2318. if (tryCallerSavedRegs)
  2319. {
  2320. Assert(newLifetime);
  2321. regIndex = GetPreferencedRegIndex(newLifetime, callerSavedAvailableBv);
  2322. if (BVInvalidIndex == regIndex)
  2323. {
  2324. tryCallerSavedRegs = false;
  2325. }
  2326. }
  2327. bool dontSpillCurrent = tryCallerSavedRegs;
  2328. if (newLifetime && newLifetime->isSpilled)
  2329. {
  2330. // Second chance allocation
  2331. dontSpillCurrent = true;
  2332. }
  2333. // Can't find reg, spill some lifetime.
  2334. reg = this->Spill(newLifetime, regOpnd, dontSpillCurrent, force);
  2335. if (reg == RegNOREG && tryCallerSavedRegs)
  2336. {
  2337. Assert(BVInvalidIndex != regIndex);
  2338. reg = (RegNum)regIndex;
  2339. // This lifetime will get spilled once we get to the call it overlaps with (note: this may not be true
  2340. // for second chance allocation as we may be beyond the call). Mark it as a cheap spill to give up the register
  2341. // if some lifetime not overlapping with a call needs it.
  2342. newLifetime->isCheapSpill = true;
  2343. }
  2344. }
  2345. // We always have to return a reg if we are allocate temp reg.
  2346. // If we are allocating for a new lifetime, we return RegNOREG, if we
  2347. // spill the new lifetime
  2348. Assert(newLifetime != nullptr || (reg != RegNOREG && reg < RegNumCount));
  2349. return reg;
  2350. }
  2351. BVIndex
  2352. LinearScan::GetPreferencedRegIndex(Lifetime *lifetime, BitVector freeRegs)
  2353. {
  2354. BitVector freePreferencedRegs = freeRegs;
  2355. freePreferencedRegs.And(lifetime->regPreference);
  2356. // If one of the preferred register (if any) is available, use it. Otherwise, just pick one of free register.
  2357. if (!freePreferencedRegs.IsEmpty())
  2358. {
  2359. return freePreferencedRegs.GetNextBit();
  2360. }
  2361. else
  2362. {
  2363. return freeRegs.GetNextBit();
  2364. }
  2365. }
  2366. // LinearScan::Spill
  2367. // We need to spill something to free up a reg. If the newLifetime
  2368. // past in isn't NULL, we can spill this one instead of an active one.
  2369. RegNum
  2370. LinearScan::Spill(Lifetime *newLifetime, IR::RegOpnd *regOpnd, bool dontSpillCurrent, bool force)
  2371. {
  2372. uint minSpillCost = (uint)-1;
  2373. Assert(!newLifetime || !regOpnd || newLifetime->isFloat == (regOpnd->GetType() == TyMachDouble || regOpnd->IsSimd128()));
  2374. bool isFloatReg;
  2375. BitVector intUsageBV;
  2376. bool needCalleeSaved;
  2377. // For now, we just spill the lifetime with the lowest spill cost.
  2378. if (newLifetime)
  2379. {
  2380. isFloatReg = newLifetime->isFloat;
  2381. if (!force)
  2382. {
  2383. minSpillCost = this->GetSpillCost(newLifetime);
  2384. }
  2385. intUsageBV = newLifetime->intUsageBv;
  2386. needCalleeSaved = newLifetime->isLiveAcrossUserCalls;
  2387. }
  2388. else
  2389. {
  2390. needCalleeSaved = false;
  2391. if (regOpnd->IsFloat() || regOpnd->IsSimd128())
  2392. {
  2393. isFloatReg = true;
  2394. }
  2395. else
  2396. {
  2397. // Filter for int reg size constraints
  2398. isFloatReg = false;
  2399. intUsageBV.ClearAll();
  2400. intUsageBV.Set(TySize[regOpnd->GetType()]);
  2401. }
  2402. }
  2403. SList<Lifetime *>::EditingIterator candidate;
  2404. FOREACH_SLIST_ENTRY_EDITING(Lifetime *, lifetime, this->activeLiveranges, iter)
  2405. {
  2406. uint spillCost = this->GetSpillCost(lifetime);
  2407. if (spillCost < minSpillCost &&
  2408. this->instrUseRegs.Test(lifetime->reg) == false &&
  2409. lifetime->isFloat == isFloatReg &&
  2410. !lifetime->cantSpill &&
  2411. (!needCalleeSaved || this->calleeSavedRegs.Test(lifetime->reg)) &&
  2412. this->linearScanMD.FitRegIntSizeConstraints(lifetime->reg, intUsageBV))
  2413. {
  2414. minSpillCost = spillCost;
  2415. candidate = iter;
  2416. }
  2417. } NEXT_SLIST_ENTRY_EDITING;
  2418. AssertMsg(newLifetime || candidate.IsValid(), "Didn't find anything to spill?!?");
  2419. Lifetime * spilledRange;
  2420. if (candidate.IsValid())
  2421. {
  2422. spilledRange = candidate.Data();
  2423. candidate.RemoveCurrent();
  2424. this->activeRegs.Clear(spilledRange->reg);
  2425. if (spilledRange->IsInt())
  2426. {
  2427. this->intRegUsedCount--;
  2428. }
  2429. else
  2430. {
  2431. this->floatRegUsedCount--;
  2432. }
  2433. }
  2434. else if (dontSpillCurrent)
  2435. {
  2436. return RegNOREG;
  2437. }
  2438. else
  2439. {
  2440. spilledRange = newLifetime;
  2441. }
  2442. return this->SpillLiveRange(spilledRange);
  2443. }
  2444. // LinearScan::SpillLiveRange
  2445. RegNum
  2446. LinearScan::SpillLiveRange(Lifetime * spilledRange, IR::Instr *insertionInstr)
  2447. {
  2448. Assert(!spilledRange->isSpilled);
  2449. RegNum reg = spilledRange->reg;
  2450. StackSym *sym = spilledRange->sym;
  2451. spilledRange->isSpilled = true;
  2452. spilledRange->isCheapSpill = false;
  2453. spilledRange->reg = RegNOREG;
  2454. // Don't allocate stack space for const, we always reload them. (For debugm mode, allocate on the stack)
  2455. if (!sym->IsAllocated() && (!sym->IsConst() || IsSymNonTempLocalVar(sym)))
  2456. {
  2457. this->AllocateStackSpace(spilledRange);
  2458. }
  2459. // No need to insert loads or stores if there are no uses.
  2460. if (!spilledRange->isDeadStore)
  2461. {
  2462. // In the debug mode, don't do insertstore for this stacksym, as we want to retain the IsConst for the sym,
  2463. // and later we are going to find the reg for it.
  2464. if (!IsSymNonTempLocalVar(sym))
  2465. {
  2466. this->InsertStores(spilledRange, reg, insertionInstr);
  2467. }
  2468. if (this->IsInLoop() || sym->IsConst())
  2469. {
  2470. this->InsertLoads(sym, reg);
  2471. }
  2472. else
  2473. {
  2474. sym->scratch.linearScan.lifetime->useList.Clear();
  2475. }
  2476. // Adjust useCount in case of second chance allocation
  2477. spilledRange->ApplyUseCountAdjust(this->curLoop);
  2478. }
  2479. Assert(reg == RegNOREG || spilledRange->reg == RegNOREG || this->regContent[reg] == spilledRange);
  2480. if (spilledRange->isSecondChanceAllocated)
  2481. {
  2482. Assert(reg == RegNOREG || spilledRange->reg == RegNOREG
  2483. || (this->regContent[reg] == spilledRange && this->secondChanceRegs.Test(reg)));
  2484. this->secondChanceRegs.Clear(reg);
  2485. spilledRange->isSecondChanceAllocated = false;
  2486. }
  2487. else
  2488. {
  2489. Assert(!this->secondChanceRegs.Test(reg));
  2490. }
  2491. this->regContent[reg] = nullptr;
  2492. #if DBG_DUMP
  2493. if (PHASE_TRACE(Js::LinearScanPhase, this->func))
  2494. {
  2495. Output::Print(_u("**** Spill: "));
  2496. sym->Dump();
  2497. Output::Print(_u("(%S)"), RegNames[reg]);
  2498. Output::Print(_u(" SpillCount:%d Length:%d Cost:%d\n"),
  2499. spilledRange->useCount, spilledRange->end - spilledRange->start, this->GetSpillCost(spilledRange));
  2500. }
  2501. #endif
  2502. return reg;
  2503. }
  2504. // LinearScan::SpillReg
  2505. // Spill a given register.
  2506. void
  2507. LinearScan::SpillReg(RegNum reg, bool forceSpill /* = false */)
  2508. {
  2509. Lifetime *spilledRange = nullptr;
  2510. if (activeRegs.Test(reg))
  2511. {
  2512. spilledRange = LinearScan::RemoveRegLiveRange(activeLiveranges, reg);
  2513. }
  2514. else if (opHelperSpilledRegs.Test(reg) && forceSpill)
  2515. {
  2516. // If a lifetime that was assigned this register was helper spilled,
  2517. // really spill it now.
  2518. Assert(IsInHelperBlock());
  2519. // Look for the liverange in opHelperSpilledLiveranges instead of
  2520. // activeLiveranges.
  2521. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, opHelperSpilledLiveranges)
  2522. {
  2523. if (lifetime->reg == reg)
  2524. {
  2525. spilledRange = lifetime;
  2526. break;
  2527. }
  2528. } NEXT_SLIST_ENTRY;
  2529. Assert(spilledRange);
  2530. Assert(!spilledRange->cantSpill);
  2531. RemoveOpHelperSpilled(spilledRange);
  2532. // Really spill this liverange below.
  2533. }
  2534. else
  2535. {
  2536. return;
  2537. }
  2538. AnalysisAssert(spilledRange);
  2539. Assert(!spilledRange->cantSpill);
  2540. if ((!forceSpill) && this->IsInHelperBlock() && spilledRange->start < this->HelperBlockStartInstrNumber() && !spilledRange->cantOpHelperSpill)
  2541. {
  2542. // if the lifetime starts before the helper block, we can do save and restore
  2543. // around the helper block instead.
  2544. this->AddOpHelperSpilled(spilledRange);
  2545. }
  2546. else
  2547. {
  2548. if (spilledRange->cantOpHelperSpill)
  2549. {
  2550. // We're really spilling this liverange, so take it out of the helper-spilled liveranges
  2551. // to avoid confusion (see Win8 313433).
  2552. Assert(!spilledRange->isOpHelperSpilled);
  2553. spilledRange->cantOpHelperSpill = false;
  2554. this->opHelperSpilledLiveranges->Remove(spilledRange);
  2555. }
  2556. this->SpillLiveRange(spilledRange);
  2557. }
  2558. if (this->activeRegs.Test(reg))
  2559. {
  2560. this->activeRegs.Clear(reg);
  2561. if (RegTypes[reg] == TyMachReg)
  2562. {
  2563. this->intRegUsedCount--;
  2564. }
  2565. else
  2566. {
  2567. Assert(RegTypes[reg] == TyFloat64);
  2568. this->floatRegUsedCount--;
  2569. }
  2570. }
  2571. }
  2572. void
  2573. LinearScan::ProcessEHRegionBoundary(IR::Instr * instr)
  2574. {
  2575. Assert(instr->IsBranchInstr());
  2576. if (instr->m_opcode != Js::OpCode::TryCatch && instr->m_opcode != Js::OpCode::TryFinally && instr->m_opcode != Js::OpCode::Leave)
  2577. {
  2578. return;
  2579. }
  2580. // Spill everything upon entry to the try region and upon a Leave.
  2581. IR::Instr* insertionInstr = instr->m_opcode != Js::OpCode::Leave ? instr : instr->m_prev;
  2582. FOREACH_SLIST_ENTRY_EDITING(Lifetime *, lifetime, this->activeLiveranges, iter)
  2583. {
  2584. this->activeRegs.Clear(lifetime->reg);
  2585. if (lifetime->IsInt())
  2586. {
  2587. this->intRegUsedCount--;
  2588. }
  2589. else
  2590. {
  2591. this->floatRegUsedCount--;
  2592. }
  2593. this->SpillLiveRange(lifetime, insertionInstr);
  2594. iter.RemoveCurrent();
  2595. }
  2596. NEXT_SLIST_ENTRY_EDITING;
  2597. }
  2598. void
  2599. LinearScan::AllocateStackSpace(Lifetime *spilledRange)
  2600. {
  2601. if (spilledRange->sym->IsAllocated())
  2602. {
  2603. return;
  2604. }
  2605. uint32 size = TySize[spilledRange->sym->GetType()];
  2606. // For the bytecodereg syms instead of spilling to the any other location lets re-use the already created slot.
  2607. if (IsSymNonTempLocalVar(spilledRange->sym))
  2608. {
  2609. Js::RegSlot slotIndex = spilledRange->sym->GetByteCodeRegSlot();
  2610. // Get the offset which is already allocated from this local, and always spill on that location.
  2611. spilledRange->sym->m_offset = GetStackOffset(slotIndex);
  2612. spilledRange->sym->m_allocated = true;
  2613. return;
  2614. }
  2615. StackSlot * newStackSlot = nullptr;
  2616. if (!PHASE_OFF(Js::StackPackPhase, this->func) && !this->func->IsJitInDebugMode() && !spilledRange->cantStackPack)
  2617. {
  2618. // Search for a free stack slot to re-use
  2619. FOREACH_SLIST_ENTRY_EDITING(StackSlot *, slot, this->stackSlotsFreeList, iter)
  2620. {
  2621. // Heuristic: should we use '==' or '>=' for the size?
  2622. if (slot->lastUse <= spilledRange->start && slot->size >= size)
  2623. {
  2624. StackSym *spilledSym = spilledRange->sym;
  2625. Assert(!spilledSym->IsArgSlotSym() && !spilledSym->IsParamSlotSym());
  2626. Assert(!spilledSym->IsAllocated());
  2627. spilledRange->spillStackSlot = slot;
  2628. spilledSym->m_offset = slot->offset;
  2629. spilledSym->m_allocated = true;
  2630. iter.RemoveCurrent();
  2631. #if DBG_DUMP
  2632. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::StackPackPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  2633. {
  2634. spilledSym->Dump();
  2635. Output::Print(_u(" *** stack packed at offset %3d (%4d - %4d)\n"), spilledSym->m_offset, spilledRange->start, spilledRange->end);
  2636. }
  2637. #endif
  2638. break;
  2639. }
  2640. } NEXT_SLIST_ENTRY_EDITING;
  2641. if (spilledRange->spillStackSlot == nullptr)
  2642. {
  2643. newStackSlot = JitAnewStruct(this->tempAlloc, StackSlot);
  2644. newStackSlot->size = size;
  2645. spilledRange->spillStackSlot = newStackSlot;
  2646. }
  2647. this->AddLiveRange(this->stackPackInUseLiveRanges, spilledRange);
  2648. }
  2649. if (!spilledRange->sym->IsAllocated())
  2650. {
  2651. // Can't stack pack, allocate new stack slot.
  2652. StackSym *spilledSym = spilledRange->sym;
  2653. this->func->StackAllocate(spilledSym, size);
  2654. #if DBG_DUMP
  2655. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::StackPackPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  2656. {
  2657. spilledSym->Dump();
  2658. Output::Print(_u(" at offset %3d (%4d - %4d)\n"), spilledSym->m_offset, spilledRange->start, spilledRange->end);
  2659. }
  2660. #endif
  2661. if (newStackSlot != nullptr)
  2662. {
  2663. newStackSlot->offset = spilledSym->m_offset;
  2664. }
  2665. }
  2666. }
  2667. // LinearScan::InsertLoads
  2668. void
  2669. LinearScan::InsertLoads(StackSym *sym, RegNum reg)
  2670. {
  2671. Lifetime *lifetime = sym->scratch.linearScan.lifetime;
  2672. FOREACH_SLIST_ENTRY(IR::Instr *, instr, &lifetime->useList)
  2673. {
  2674. this->InsertLoad(instr, sym, reg);
  2675. } NEXT_SLIST_ENTRY;
  2676. lifetime->useList.Clear();
  2677. }
  2678. // LinearScan::InsertStores
  2679. void
  2680. LinearScan::InsertStores(Lifetime *lifetime, RegNum reg, IR::Instr *insertionInstr)
  2681. {
  2682. StackSym *sym = lifetime->sym;
  2683. // If single def, use instrDef on the symbol
  2684. if (sym->m_isSingleDef)
  2685. {
  2686. IR::Instr * defInstr = sym->m_instrDef;
  2687. if ((!sym->IsConst() && defInstr->GetDst()->AsRegOpnd()->GetReg() == RegNOREG)
  2688. || this->secondChanceRegs.Test(reg))
  2689. {
  2690. // This can happen if we were trying to allocate this lifetime,
  2691. // and it is getting spilled right away.
  2692. // For second chance allocations, this should have already been handled.
  2693. return;
  2694. }
  2695. this->InsertStore(defInstr, defInstr->FindRegDef(sym)->m_sym, reg);
  2696. return;
  2697. }
  2698. if (reg == RegNOREG)
  2699. {
  2700. return;
  2701. }
  2702. uint localStoreCost = LinearScan::GetUseSpillCost(this->loopNest, (this->currentOpHelperBlock != nullptr));
  2703. // Is it cheaper to spill all the defs we've seen so far or just insert a store at the current point?
  2704. if ((this->func->HasTry() && !this->func->DoOptimizeTry()) || localStoreCost >= lifetime->allDefsCost)
  2705. {
  2706. // Insert a store for each def point we've seen so far
  2707. FOREACH_SLIST_ENTRY(IR::Instr *, instr, &(lifetime->defList))
  2708. {
  2709. if (instr->GetDst()->AsRegOpnd()->GetReg() != RegNOREG)
  2710. {
  2711. IR::RegOpnd *regOpnd = instr->FindRegDef(sym);
  2712. // Note that reg may not be equal to regOpnd->GetReg() if the lifetime has been re-allocated since we've seen this def
  2713. this->InsertStore(instr, regOpnd->m_sym, regOpnd->GetReg());
  2714. }
  2715. } NEXT_SLIST_ENTRY;
  2716. lifetime->defList.Clear();
  2717. lifetime->allDefsCost = 0;
  2718. lifetime->needsStoreCompensation = false;
  2719. }
  2720. else if (!lifetime->defList.Empty())
  2721. {
  2722. // Insert a def right here at the current instr, and then we'll use compensation code for paths not covered by this def.
  2723. if (!insertionInstr)
  2724. {
  2725. insertionInstr = this->currentInstr->m_prev;
  2726. }
  2727. this->InsertStore(insertionInstr, sym, reg);
  2728. if (this->IsInLoop())
  2729. {
  2730. RecordLoopUse(lifetime, reg);
  2731. }
  2732. // We now need to insert all store compensations when needed, unless we spill all the defs later on.
  2733. lifetime->needsStoreCompensation = true;
  2734. }
  2735. }
  2736. // LinearScan::InsertStore
  2737. void
  2738. LinearScan::InsertStore(IR::Instr *instr, StackSym *sym, RegNum reg)
  2739. {
  2740. // Win8 Bug 391484: We cannot use regOpnd->GetType() here because it
  2741. // can lead to truncation as downstream usage of the register might be of a size
  2742. // greater than the current use. Using RegTypes[reg] works only if the stack slot size
  2743. // is always at least of size MachPtr
  2744. // In the debug mode, if the current sym belongs to the byte code locals, then do not unlink this instruction, as we need to have this instruction to be there
  2745. // to produce the write-through instruction.
  2746. if (sym->IsConst() && !IsSymNonTempLocalVar(sym))
  2747. {
  2748. // Let's just delete the def. We'll reload the constant.
  2749. // We can't just delete the instruction however since the
  2750. // uses will look at the def to get the value.
  2751. // Make sure it wasn't already deleted.
  2752. if (sym->m_instrDef->m_next)
  2753. {
  2754. sym->m_instrDef->Unlink();
  2755. sym->m_instrDef->m_next = nullptr;
  2756. }
  2757. return;
  2758. }
  2759. Assert(reg != RegNOREG);
  2760. IRType type = sym->GetType();
  2761. IR::Instr *store = IR::Instr::New(LowererMD::GetStoreOp(type),
  2762. IR::SymOpnd::New(sym, type, this->func),
  2763. IR::RegOpnd::New(sym, reg, type, this->func), this->func);
  2764. instr->InsertAfter(store);
  2765. store->CopyNumber(instr);
  2766. this->linearScanMD.LegalizeDef(store);
  2767. #if DBG_DUMP
  2768. if (PHASE_TRACE(Js::LinearScanPhase, this->func))
  2769. {
  2770. Output::Print(_u("...Inserting store for "));
  2771. sym->Dump();
  2772. Output::Print(_u(" Cost:%d\n"), this->GetSpillCost(sym->scratch.linearScan.lifetime));
  2773. }
  2774. #endif
  2775. }
  2776. // LinearScan::InsertLoad
  2777. void
  2778. LinearScan::InsertLoad(IR::Instr *instr, StackSym *sym, RegNum reg)
  2779. {
  2780. IR::Opnd *src;
  2781. // The size of loads and stores to memory need to match. See the comment
  2782. // around type in InsertStore above.
  2783. IRType type = sym->GetType();
  2784. bool isMovSDZero = false;
  2785. if (sym->IsConst())
  2786. {
  2787. Assert(!sym->IsAllocated() || IsSymNonTempLocalVar(sym));
  2788. // For an intConst, reload the constant instead of using the stack.
  2789. // Create a new StackSym to make sure the old sym remains singleDef
  2790. src = sym->GetConstOpnd();
  2791. if (!src)
  2792. {
  2793. isMovSDZero = true;
  2794. sym = StackSym::New(sym->GetType(), this->func);
  2795. sym->m_isConst = true;
  2796. sym->m_isFltConst = true;
  2797. }
  2798. else
  2799. {
  2800. StackSym * oldSym = sym;
  2801. sym = StackSym::New(sym->GetType(), this->func);
  2802. sym->m_isConst = true;
  2803. sym->m_isIntConst = oldSym->m_isIntConst;
  2804. sym->m_isInt64Const = oldSym->m_isInt64Const;
  2805. sym->m_isTaggableIntConst = oldSym->m_isTaggableIntConst;
  2806. }
  2807. }
  2808. else
  2809. {
  2810. src = IR::SymOpnd::New(sym, type, this->func);
  2811. }
  2812. IR::Instr * load;
  2813. #if defined(_M_IX86) || defined(_M_X64)
  2814. if (isMovSDZero)
  2815. {
  2816. load = IR::Instr::New(Js::OpCode::MOVSD_ZERO,
  2817. IR::RegOpnd::New(sym, reg, type, this->func), this->func);
  2818. instr->InsertBefore(load);
  2819. }
  2820. else
  2821. #endif
  2822. {
  2823. load = Lowerer::InsertMove(IR::RegOpnd::New(sym, reg, type, this->func), src, instr);
  2824. }
  2825. load->CopyNumber(instr);
  2826. if (!isMovSDZero)
  2827. {
  2828. this->linearScanMD.LegalizeUse(load, src);
  2829. }
  2830. this->RecordLoopUse(nullptr, reg);
  2831. #if DBG_DUMP
  2832. if (PHASE_TRACE(Js::LinearScanPhase, this->func))
  2833. {
  2834. Output::Print(_u("...Inserting load for "));
  2835. sym->Dump();
  2836. if (sym->scratch.linearScan.lifetime)
  2837. {
  2838. Output::Print(_u(" Cost:%d\n"), this->GetSpillCost(sym->scratch.linearScan.lifetime));
  2839. }
  2840. else
  2841. {
  2842. Output::Print(_u("\n"));
  2843. }
  2844. }
  2845. #endif
  2846. }
  2847. uint8
  2848. LinearScan::GetRegAttribs(RegNum reg)
  2849. {
  2850. return RegAttribs[reg];
  2851. }
  2852. IRType
  2853. LinearScan::GetRegType(RegNum reg)
  2854. {
  2855. return RegTypes[reg];
  2856. }
  2857. bool
  2858. LinearScan::IsCalleeSaved(RegNum reg)
  2859. {
  2860. return (RegAttribs[reg] & RA_CALLEESAVE) != 0;
  2861. }
  2862. bool
  2863. LinearScan::IsCallerSaved(RegNum reg) const
  2864. {
  2865. return !LinearScan::IsCalleeSaved(reg) && LinearScan::IsAllocatable(reg);
  2866. }
  2867. bool
  2868. LinearScan::IsAllocatable(RegNum reg) const
  2869. {
  2870. return !(RegAttribs[reg] & RA_DONTALLOCATE) && this->linearScanMD.IsAllocatable(reg, this->func);
  2871. }
  2872. void
  2873. LinearScan::KillImplicitRegs(IR::Instr *instr)
  2874. {
  2875. if (instr->IsLabelInstr() || instr->IsBranchInstr())
  2876. {
  2877. // Note: need to clear these for branch as well because this info isn't recorded for second chance
  2878. // allocation on branch boundaries
  2879. this->tempRegs.ClearAll();
  2880. }
  2881. #if defined(_M_IX86) || defined(_M_X64)
  2882. if (instr->m_opcode == Js::OpCode::IMUL)
  2883. {
  2884. this->SpillReg(LowererMDArch::GetRegIMulHighDestLower());
  2885. this->tempRegs.Clear(LowererMDArch::GetRegIMulHighDestLower());
  2886. this->RecordLoopUse(nullptr, LowererMDArch::GetRegIMulHighDestLower());
  2887. return;
  2888. }
  2889. #endif
  2890. this->TrackInlineeArgLifetimes(instr);
  2891. // Don't care about kills on bailout calls as we are going to exit anyways
  2892. // Also, for bailout scenarios we have already handled the inlinee frame spills
  2893. Assert(LowererMD::IsCall(instr) || !instr->HasBailOutInfo());
  2894. if (!LowererMD::IsCall(instr) || instr->HasBailOutInfo())
  2895. {
  2896. return;
  2897. }
  2898. if (this->currentBlock->inlineeStack.Count() > 0)
  2899. {
  2900. this->SpillInlineeArgs(instr);
  2901. }
  2902. else
  2903. {
  2904. instr->m_func = this->func;
  2905. }
  2906. //
  2907. // Spill caller-saved registers that are active.
  2908. //
  2909. BitVector deadRegs;
  2910. deadRegs.Copy(this->activeRegs);
  2911. deadRegs.And(this->callerSavedRegs);
  2912. FOREACH_BITSET_IN_UNITBV(reg, deadRegs, BitVector)
  2913. {
  2914. this->SpillReg((RegNum)reg);
  2915. }
  2916. NEXT_BITSET_IN_UNITBV;
  2917. this->tempRegs.And(this->calleeSavedRegs);
  2918. if (callSetupRegs.Count())
  2919. {
  2920. callSetupRegs.ClearAll();
  2921. }
  2922. Loop *loop = this->curLoop;
  2923. while (loop)
  2924. {
  2925. loop->regAlloc.regUseBv.Or(this->callerSavedRegs);
  2926. loop = loop->parent;
  2927. }
  2928. }
  2929. //
  2930. // Before a call, all inlinee frame syms need to be spilled to a pre-defined location
  2931. //
  2932. void LinearScan::SpillInlineeArgs(IR::Instr* instr)
  2933. {
  2934. Assert(this->currentBlock->inlineeStack.Count() > 0);
  2935. // Ensure the call instruction is tied to the current inlinee
  2936. // This is used in the encoder to encode mapping or return offset and InlineeFrameRecord
  2937. instr->m_func = this->currentBlock->inlineeStack.Last();
  2938. BitVector spilledRegs;
  2939. this->currentBlock->inlineeFrameLifetimes.Map([&](uint i, Lifetime* lifetime){
  2940. Assert(lifetime->start < instr->GetNumber() && lifetime->end >= instr->GetNumber());
  2941. Assert(!lifetime->sym->IsConst());
  2942. Assert(this->currentBlock->inlineeFrameSyms.ContainsKey(lifetime->sym->m_id));
  2943. if (lifetime->reg == RegNOREG)
  2944. {
  2945. return;
  2946. }
  2947. StackSym* sym = lifetime->sym;
  2948. if (!lifetime->isSpilled && !lifetime->isOpHelperSpilled &&
  2949. (!lifetime->isDeadStore && (lifetime->sym->m_isSingleDef || !lifetime->defList.Empty()))) // if deflist is empty - we have already spilled at all defs - and the value is current
  2950. {
  2951. if (!spilledRegs.Test(lifetime->reg))
  2952. {
  2953. spilledRegs.Set(lifetime->reg);
  2954. if (!sym->IsAllocated())
  2955. {
  2956. this->AllocateStackSpace(lifetime);
  2957. }
  2958. this->RecordLoopUse(lifetime, lifetime->reg);
  2959. Assert(this->regContent[lifetime->reg] != nullptr);
  2960. if (sym->m_isSingleDef)
  2961. {
  2962. // For a single def - we do not track the deflist - the def below will remove the single def on the sym
  2963. // hence, we need to track the original def.
  2964. Assert(lifetime->defList.Empty());
  2965. lifetime->defList.Prepend(sym->m_instrDef);
  2966. }
  2967. this->InsertStore(instr->m_prev, sym, lifetime->reg);
  2968. }
  2969. }
  2970. });
  2971. }
  2972. void LinearScan::TrackInlineeArgLifetimes(IR::Instr* instr)
  2973. {
  2974. if (instr->m_opcode == Js::OpCode::InlineeStart)
  2975. {
  2976. if (instr->m_func->m_hasInlineArgsOpt)
  2977. {
  2978. instr->m_func->frameInfo->IterateSyms([=](StackSym* sym){
  2979. Lifetime* lifetime = sym->scratch.linearScan.lifetime;
  2980. this->currentBlock->inlineeFrameLifetimes.Add(lifetime);
  2981. // We need to maintain as count because the same sym can be used for multiple arguments
  2982. uint* value;
  2983. if (this->currentBlock->inlineeFrameSyms.TryGetReference(sym->m_id, &value))
  2984. {
  2985. *value = *value + 1;
  2986. }
  2987. else
  2988. {
  2989. this->currentBlock->inlineeFrameSyms.Add(sym->m_id, 1);
  2990. }
  2991. });
  2992. if (this->currentBlock->inlineeStack.Count() > 0)
  2993. {
  2994. Assert(instr->m_func->inlineDepth == this->currentBlock->inlineeStack.Last()->inlineDepth + 1);
  2995. }
  2996. this->currentBlock->inlineeStack.Add(instr->m_func);
  2997. }
  2998. else
  2999. {
  3000. Assert(this->currentBlock->inlineeStack.Count() == 0);
  3001. }
  3002. }
  3003. else if (instr->m_opcode == Js::OpCode::InlineeEnd || instr->HasBailOnNoProfile())
  3004. {
  3005. if (instr->m_func->m_hasInlineArgsOpt)
  3006. {
  3007. instr->m_func->frameInfo->AllocateRecord(this->func, instr->m_func->GetJITFunctionBody()->GetAddr());
  3008. if(this->currentBlock->inlineeStack.Count() == 0)
  3009. {
  3010. // Block is unreachable
  3011. Assert(this->currentBlock->inlineeFrameLifetimes.Count() == 0);
  3012. Assert(this->currentBlock->inlineeFrameSyms.Count() == 0);
  3013. }
  3014. else
  3015. {
  3016. Func* func = this->currentBlock->inlineeStack.RemoveAtEnd();
  3017. Assert(func == instr->m_func);
  3018. instr->m_func->frameInfo->IterateSyms([=](StackSym* sym){
  3019. Lifetime* lifetime = this->currentBlock->inlineeFrameLifetimes.RemoveAtEnd();
  3020. uint* value;
  3021. if (this->currentBlock->inlineeFrameSyms.TryGetReference(sym->m_id, &value))
  3022. {
  3023. *value = *value - 1;
  3024. if (*value == 0)
  3025. {
  3026. bool removed = this->currentBlock->inlineeFrameSyms.Remove(sym->m_id);
  3027. Assert(removed);
  3028. }
  3029. }
  3030. else
  3031. {
  3032. Assert(UNREACHED);
  3033. }
  3034. Assert(sym->scratch.linearScan.lifetime == lifetime);
  3035. }, /*reverse*/ true);
  3036. }
  3037. }
  3038. }
  3039. }
  3040. // GetSpillCost
  3041. // The spill cost is trying to estimate the usage density of the lifetime,
  3042. // by dividing the useCount by the lifetime length.
  3043. uint
  3044. LinearScan::GetSpillCost(Lifetime *lifetime)
  3045. {
  3046. uint useCount = lifetime->GetRegionUseCount(this->curLoop);
  3047. uint spillCost;
  3048. // Get local spill cost. Ignore helper blocks as we'll also need compensation on the main path.
  3049. uint localUseCost = LinearScan::GetUseSpillCost(this->loopNest, false);
  3050. if (lifetime->reg && !lifetime->isSpilled)
  3051. {
  3052. // If it is in a reg, we'll need a store
  3053. if (localUseCost >= lifetime->allDefsCost)
  3054. {
  3055. useCount += lifetime->allDefsCost;
  3056. }
  3057. else
  3058. {
  3059. useCount += localUseCost;
  3060. }
  3061. if (this->curLoop && !lifetime->sym->IsConst()
  3062. && this->curLoop->regAlloc.liveOnBackEdgeSyms->Test(lifetime->sym->m_id))
  3063. {
  3064. // If we spill here, we'll need to insert a load at the bottom of the loop
  3065. // (it would be nice to be able to check is was in a reg at the top of the loop)...
  3066. useCount += localUseCost;
  3067. }
  3068. }
  3069. // When comparing 2 lifetimes, we don't really care about the actual length of the lifetimes.
  3070. // What matters is how much longer will they use the register.
  3071. const uint start = currentInstr->GetNumber();
  3072. uint end = max(start, lifetime->end);
  3073. uint lifetimeTotalOpHelperFullVisitedLength = lifetime->totalOpHelperLengthByEnd;
  3074. if (this->curLoop && this->curLoop->regAlloc.loopEnd < end && !PHASE_OFF(Js::RegionUseCountPhase, this->func))
  3075. {
  3076. end = this->curLoop->regAlloc.loopEnd;
  3077. lifetimeTotalOpHelperFullVisitedLength = this->curLoop->regAlloc.helperLength;
  3078. }
  3079. uint length = end - start + 1;
  3080. // Exclude helper block length since helper block paths are typically infrequently taken paths and not as important
  3081. const uint totalOpHelperVisitedLength = this->totalOpHelperFullVisitedLength + CurrentOpHelperVisitedLength(currentInstr);
  3082. Assert(lifetimeTotalOpHelperFullVisitedLength >= totalOpHelperVisitedLength);
  3083. const uint lifetimeHelperLength = lifetimeTotalOpHelperFullVisitedLength - totalOpHelperVisitedLength;
  3084. Assert(length >= lifetimeHelperLength);
  3085. length -= lifetimeHelperLength;
  3086. if(length == 0)
  3087. {
  3088. length = 1;
  3089. }
  3090. // Add a base length so that the difference between a length of 1 and a length of 2 is not so large
  3091. #ifdef _M_X64
  3092. length += 64;
  3093. #else
  3094. length += 16;
  3095. #endif
  3096. spillCost = (useCount << 13) / length;
  3097. if (lifetime->isSecondChanceAllocated)
  3098. {
  3099. // Second chance allocation have additional overhead, so de-prioritize them
  3100. // Note: could use more tuning...
  3101. spillCost = spillCost * 4/5;
  3102. }
  3103. if (lifetime->isCheapSpill)
  3104. {
  3105. // This lifetime will get spilled eventually, so lower the spill cost to favor other lifetimes
  3106. // Note: could use more tuning...
  3107. spillCost /= 2;
  3108. }
  3109. if (lifetime->sym->IsConst())
  3110. {
  3111. spillCost = spillCost / 16;
  3112. }
  3113. return spillCost;
  3114. }
  3115. bool
  3116. LinearScan::RemoveDeadStores(IR::Instr *instr)
  3117. {
  3118. IR::Opnd *dst = instr->GetDst();
  3119. if (dst && dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym && !dst->AsRegOpnd()->m_isCallArg)
  3120. {
  3121. IR::RegOpnd *regOpnd = dst->AsRegOpnd();
  3122. Lifetime * lifetime = regOpnd->m_sym->scratch.linearScan.lifetime;
  3123. if (lifetime->isDeadStore)
  3124. {
  3125. if (Lowerer::HasSideEffects(instr) == false)
  3126. {
  3127. // If all the bailouts referencing this arg are removed (which can happen in some scenarios)
  3128. //- then it's OK to remove this def of the arg
  3129. DebugOnly(this->func->allowRemoveBailOutArgInstr = true);
  3130. // We are removing this instruction, end dead life time now
  3131. this->EndDeadLifetimes(instr, false);
  3132. instr->Remove();
  3133. DebugOnly(this->func->allowRemoveBailOutArgInstr = false);
  3134. return true;
  3135. }
  3136. }
  3137. }
  3138. return false;
  3139. }
  3140. void
  3141. LinearScan::AssignActiveReg(Lifetime * lifetime, RegNum reg)
  3142. {
  3143. Assert(!this->activeRegs.Test(reg));
  3144. Assert(!lifetime->isSpilled);
  3145. Assert(lifetime->reg == RegNOREG || lifetime->reg == reg);
  3146. this->func->m_regsUsed.Set(reg);
  3147. lifetime->reg = reg;
  3148. this->activeRegs.Set(reg);
  3149. if (lifetime->IsInt())
  3150. {
  3151. this->intRegUsedCount++;
  3152. }
  3153. else
  3154. {
  3155. this->floatRegUsedCount++;
  3156. }
  3157. this->AddToActive(lifetime);
  3158. this->tempRegs.Clear(reg);
  3159. }
  3160. void
  3161. LinearScan::AssignTempReg(Lifetime * lifetime, RegNum reg)
  3162. {
  3163. Assert(reg > RegNOREG && reg < RegNumCount);
  3164. Assert(!this->activeRegs.Test(reg));
  3165. Assert(lifetime->isSpilled);
  3166. this->func->m_regsUsed.Set(reg);
  3167. lifetime->reg = reg;
  3168. this->tempRegs.Set(reg);
  3169. __analysis_assume(reg > 0 && reg < RegNumCount);
  3170. this->tempRegLifetimes[reg] = lifetime;
  3171. this->RecordLoopUse(nullptr, reg);
  3172. }
  3173. RegNum
  3174. LinearScan::GetAssignedTempReg(Lifetime * lifetime, IRType type)
  3175. {
  3176. if (this->tempRegs.Test(lifetime->reg) && this->tempRegLifetimes[lifetime->reg] == lifetime)
  3177. {
  3178. if (this->linearScanMD.FitRegIntSizeConstraints(lifetime->reg, type))
  3179. {
  3180. this->RecordLoopUse(nullptr, lifetime->reg);
  3181. return lifetime->reg;
  3182. }
  3183. else
  3184. {
  3185. // Free this temp, we'll need to find another one.
  3186. this->tempRegs.Clear(lifetime->reg);
  3187. lifetime->reg = RegNOREG;
  3188. }
  3189. }
  3190. return RegNOREG;
  3191. }
  3192. uint
  3193. LinearScan::GetUseSpillCost(uint loopNest, BOOL isInHelperBlock)
  3194. {
  3195. if (isInHelperBlock)
  3196. {
  3197. // Helper block uses are not as important.
  3198. return 0;
  3199. }
  3200. else if (loopNest < 6)
  3201. {
  3202. return (1 << (loopNest * 3));
  3203. }
  3204. else
  3205. {
  3206. // Slow growth for deep nest to avoid overflow
  3207. return (1 << (5 * 3)) * (loopNest-5);
  3208. }
  3209. }
  3210. void
  3211. LinearScan::ProcessSecondChanceBoundary(IR::BranchInstr *branchInstr)
  3212. {
  3213. if (this->func->HasTry() && !this->func->DoOptimizeTry())
  3214. {
  3215. return;
  3216. }
  3217. if (this->currentOpHelperBlock && this->currentOpHelperBlock->opHelperEndInstr == branchInstr)
  3218. {
  3219. // Lifetimes opHelperSpilled won't get recorded by SaveRegContent(). Do it here.
  3220. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, this->opHelperSpilledLiveranges)
  3221. {
  3222. if (!lifetime->cantOpHelperSpill)
  3223. {
  3224. if (lifetime->isSecondChanceAllocated)
  3225. {
  3226. this->secondChanceRegs.Set(lifetime->reg);
  3227. }
  3228. this->regContent[lifetime->reg] = lifetime;
  3229. }
  3230. } NEXT_SLIST_ENTRY;
  3231. }
  3232. if(branchInstr->IsMultiBranch())
  3233. {
  3234. IR::MultiBranchInstr * multiBranchInstr = branchInstr->AsMultiBrInstr();
  3235. multiBranchInstr->MapUniqueMultiBrLabels([=](IR::LabelInstr * branchLabel) -> void
  3236. {
  3237. this->ProcessSecondChanceBoundaryHelper(branchInstr, branchLabel);
  3238. });
  3239. }
  3240. else
  3241. {
  3242. IR::LabelInstr *branchLabel = branchInstr->GetTarget();
  3243. this->ProcessSecondChanceBoundaryHelper(branchInstr, branchLabel);
  3244. }
  3245. this->SaveRegContent(branchInstr);
  3246. }
  3247. void
  3248. LinearScan::ProcessSecondChanceBoundaryHelper(IR::BranchInstr *branchInstr, IR::LabelInstr *branchLabel)
  3249. {
  3250. if (branchInstr->GetNumber() > branchLabel->GetNumber())
  3251. {
  3252. // Loop back-edge
  3253. Assert(branchLabel->m_isLoopTop);
  3254. branchInstr->m_regContent = nullptr;
  3255. this->InsertSecondChanceCompensation(this->regContent, branchLabel->m_regContent, branchInstr, branchLabel);
  3256. }
  3257. else
  3258. {
  3259. // Forward branch
  3260. this->SaveRegContent(branchInstr);
  3261. if (this->curLoop)
  3262. {
  3263. this->curLoop->regAlloc.exitRegContentList->Prepend(branchInstr->m_regContent);
  3264. }
  3265. if (!branchLabel->m_loweredBasicBlock)
  3266. {
  3267. if (branchInstr->IsConditional() || branchInstr->IsMultiBranch())
  3268. {
  3269. // Clone with deep copy
  3270. branchLabel->m_loweredBasicBlock = this->currentBlock->Clone(this->tempAlloc);
  3271. }
  3272. else
  3273. {
  3274. // If the unconditional branch leads to the end of the function for the scenario of a bailout - we do not want to
  3275. // copy the lowered inlinee info.
  3276. IR::Instr* nextInstr = branchLabel->GetNextRealInstr();
  3277. if (nextInstr->m_opcode != Js::OpCode::FunctionExit &&
  3278. nextInstr->m_opcode != Js::OpCode::BailOutStackRestore &&
  3279. this->currentBlock->HasData())
  3280. {
  3281. IR::Instr* branchNextInstr = branchInstr->GetNextRealInstrOrLabel();
  3282. if (branchNextInstr->IsLabelInstr())
  3283. {
  3284. // Clone with shallow copy
  3285. branchLabel->m_loweredBasicBlock = this->currentBlock;
  3286. }
  3287. else
  3288. {
  3289. // Dead code after the unconditional branch causes the currentBlock data to be freed later on...
  3290. // Deep copy in this case.
  3291. branchLabel->m_loweredBasicBlock = this->currentBlock->Clone(this->tempAlloc);
  3292. }
  3293. }
  3294. }
  3295. }
  3296. else
  3297. {
  3298. // The lowerer sometimes generates unreachable blocks that would have empty data.
  3299. Assert(!currentBlock->HasData() || branchLabel->m_loweredBasicBlock->Equals(this->currentBlock));
  3300. }
  3301. }
  3302. }
  3303. void
  3304. LinearScan::ProcessSecondChanceBoundary(IR::LabelInstr *labelInstr)
  3305. {
  3306. if (this->func->HasTry() && !this->func->DoOptimizeTry())
  3307. {
  3308. return;
  3309. }
  3310. if (labelInstr->m_isLoopTop)
  3311. {
  3312. this->SaveRegContent(labelInstr);
  3313. Lifetime ** regContent = AnewArrayZ(this->tempAlloc, Lifetime *, RegNumCount);
  3314. js_memcpy_s(regContent, (RegNumCount * sizeof(Lifetime *)), this->regContent, sizeof(this->regContent));
  3315. this->curLoop->regAlloc.loopTopRegContent = regContent;
  3316. }
  3317. FOREACH_SLISTCOUNTED_ENTRY_EDITING(IR::BranchInstr *, branchInstr, &labelInstr->labelRefs, iter)
  3318. {
  3319. if (branchInstr->m_isAirlock)
  3320. {
  3321. // This branch was just inserted... Skip it.
  3322. continue;
  3323. }
  3324. Assert(branchInstr->GetNumber() && labelInstr->GetNumber());
  3325. if (branchInstr->GetNumber() < labelInstr->GetNumber())
  3326. {
  3327. // Normal branch
  3328. Lifetime ** branchRegContent = branchInstr->m_regContent;
  3329. bool isMultiBranch = true;
  3330. if (!branchInstr->IsMultiBranch())
  3331. {
  3332. branchInstr->m_regContent = nullptr;
  3333. isMultiBranch = false;
  3334. }
  3335. this->InsertSecondChanceCompensation(branchRegContent, this->regContent, branchInstr, labelInstr);
  3336. if (!isMultiBranch)
  3337. {
  3338. JitAdeleteArray(this->tempAlloc, RegNumCount, branchRegContent);
  3339. }
  3340. }
  3341. else
  3342. {
  3343. // Loop back-edge
  3344. Assert(labelInstr->m_isLoopTop);
  3345. }
  3346. } NEXT_SLISTCOUNTED_ENTRY_EDITING;
  3347. }
  3348. IR::Instr * LinearScan::EnsureAirlock(bool needsAirlock, bool *pHasAirlock, IR::Instr *insertionInstr,
  3349. IR::Instr **pInsertionStartInstr, IR::BranchInstr *branchInstr, IR::LabelInstr *labelInstr)
  3350. {
  3351. if (needsAirlock && !(*pHasAirlock))
  3352. {
  3353. // We need an extra block for the compensation code.
  3354. insertionInstr = this->InsertAirlock(branchInstr, labelInstr);
  3355. *pInsertionStartInstr = insertionInstr->m_prev;
  3356. *pHasAirlock = true;
  3357. }
  3358. return insertionInstr;
  3359. }
  3360. bool LinearScan::NeedsLoopBackEdgeCompensation(Lifetime *lifetime, IR::LabelInstr *loopTopLabel)
  3361. {
  3362. if (!lifetime)
  3363. {
  3364. return false;
  3365. }
  3366. if (lifetime->sym->IsConst())
  3367. {
  3368. return false;
  3369. }
  3370. // No need if lifetime begins in the loop
  3371. if (lifetime->start > loopTopLabel->GetNumber())
  3372. {
  3373. return false;
  3374. }
  3375. // Only needed if lifetime is live on the back-edge, and the register is used inside the loop, or the lifetime extends
  3376. // beyond the loop (and compensation out of the loop may use this reg)...
  3377. if (!loopTopLabel->GetLoop()->regAlloc.liveOnBackEdgeSyms->Test(lifetime->sym->m_id)
  3378. || (this->currentInstr->GetNumber() >= lifetime->end && !this->curLoop->regAlloc.symRegUseBv->Test(lifetime->sym->m_id)))
  3379. {
  3380. return false;
  3381. }
  3382. return true;
  3383. }
  3384. void
  3385. LinearScan::InsertSecondChanceCompensation(Lifetime ** branchRegContent, Lifetime **labelRegContent,
  3386. IR::BranchInstr *branchInstr, IR::LabelInstr *labelInstr)
  3387. {
  3388. IR::Instr *prevInstr = branchInstr->GetPrevRealInstrOrLabel();
  3389. bool needsAirlock = branchInstr->IsConditional() || (prevInstr->IsBranchInstr() && prevInstr->AsBranchInstr()->IsConditional()) || branchInstr->IsMultiBranch();
  3390. bool hasAirlock = false;
  3391. IR::Instr *insertionInstr = branchInstr;
  3392. IR::Instr *insertionStartInstr = branchInstr->m_prev;
  3393. // For loop back-edge, we want to keep the insertionStartInstr before the branch as spill need to happen on all paths
  3394. // Pass a dummy instr address to airLockBlock insertion code.
  3395. BitVector thrashedRegs(0);
  3396. bool isLoopBackEdge = (this->regContent == branchRegContent);
  3397. Lifetime * tmpRegContent[RegNumCount];
  3398. Lifetime **regContent = this->regContent;
  3399. if (isLoopBackEdge)
  3400. {
  3401. Loop *loop = labelInstr->GetLoop();
  3402. js_memcpy_s(&tmpRegContent, (RegNumCount * sizeof(Lifetime *)), this->regContent, sizeof(this->regContent));
  3403. branchRegContent = tmpRegContent;
  3404. regContent = tmpRegContent;
  3405. #if defined(_M_IX86) || defined(_M_X64)
  3406. // Insert XCHG to avoid some conflicts for int regs
  3407. // Note: no XCHG on ARM or SSE2. We could however use 3 XOR on ARM...
  3408. this->AvoidCompensationConflicts(labelInstr, branchInstr, labelRegContent, branchRegContent,
  3409. &insertionInstr, &insertionStartInstr, needsAirlock, &hasAirlock);
  3410. #endif
  3411. FOREACH_BITSET_IN_UNITBV(reg, this->secondChanceRegs, BitVector)
  3412. {
  3413. Lifetime *labelLifetime = labelRegContent[reg];
  3414. Lifetime *lifetime = branchRegContent[reg];
  3415. // 1. Insert Stores
  3416. // Lifetime starts before the loop
  3417. // Lifetime was re-allocated within the loop (i.e.: a load was most likely inserted)
  3418. // Lifetime is live on back-edge and has unsaved defs.
  3419. if (lifetime && lifetime->start < labelInstr->GetNumber() && lifetime->lastAllocationStart > labelInstr->GetNumber()
  3420. && (labelInstr->GetLoop()->regAlloc.liveOnBackEdgeSyms->Test(lifetime->sym->m_id))
  3421. && !lifetime->defList.Empty())
  3422. {
  3423. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3424. // If the lifetime was second chance allocated inside the loop, there might
  3425. // be spilled loads of this symbol in the loop. Insert the stores.
  3426. // We don't need to do this if the lifetime was re-allocated before the loop.
  3427. //
  3428. // Note that reg may not be equal to lifetime->reg because of inserted XCHG...
  3429. this->InsertStores(lifetime, lifetime->reg, insertionStartInstr);
  3430. }
  3431. if (lifetime == labelLifetime)
  3432. {
  3433. continue;
  3434. }
  3435. // 2. MOV labelReg/MEM, branchReg
  3436. // Move current register to match content at the top of the loop
  3437. if (this->NeedsLoopBackEdgeCompensation(lifetime, labelInstr))
  3438. {
  3439. // Mismatch, we need to insert compensation code
  3440. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3441. // MOV ESI, EAX
  3442. // MOV EDI, ECX
  3443. // MOV ECX, ESI
  3444. // MOV EAX, EDI <<<
  3445. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3446. lifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3447. }
  3448. // 2. MOV labelReg, MEM
  3449. // Lifetime was in a reg at the top of the loop but is spilled right now.
  3450. if (labelLifetime && labelLifetime->isSpilled && !labelLifetime->sym->IsConst() && labelLifetime->end >= branchInstr->GetNumber())
  3451. {
  3452. if (!loop->regAlloc.liveOnBackEdgeSyms->Test(labelLifetime->sym->m_id))
  3453. {
  3454. continue;
  3455. }
  3456. if (this->ClearLoopExitIfRegUnused(labelLifetime, (RegNum)reg, branchInstr, loop))
  3457. {
  3458. continue;
  3459. }
  3460. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3461. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3462. labelLifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3463. }
  3464. } NEXT_BITSET_IN_UNITBV;
  3465. // 3. MOV labelReg, MEM
  3466. // Finish up reloading lifetimes needed at the top. #2 only handled secondChanceRegs.
  3467. FOREACH_REG(reg)
  3468. {
  3469. // Handle lifetimes in a register at the top of the loop, but not currently.
  3470. Lifetime *labelLifetime = labelRegContent[reg];
  3471. if (labelLifetime && !labelLifetime->sym->IsConst() && labelLifetime != branchRegContent[reg] && !thrashedRegs.Test(reg)
  3472. && (loop->regAlloc.liveOnBackEdgeSyms->Test(labelLifetime->sym->m_id)))
  3473. {
  3474. if (this->ClearLoopExitIfRegUnused(labelLifetime, (RegNum)reg, branchInstr, loop))
  3475. {
  3476. continue;
  3477. }
  3478. // Mismatch, we need to insert compensation code
  3479. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3480. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3481. labelLifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3482. }
  3483. } NEXT_REG;
  3484. if (hasAirlock)
  3485. {
  3486. loop->regAlloc.hasAirLock = true;
  3487. }
  3488. }
  3489. else
  3490. {
  3491. //
  3492. // Non-loop-back-edge merge
  3493. //
  3494. FOREACH_REG(reg)
  3495. {
  3496. Lifetime *branchLifetime = branchRegContent[reg];
  3497. Lifetime *lifetime = regContent[reg];
  3498. if (lifetime == branchLifetime)
  3499. {
  3500. continue;
  3501. }
  3502. if (branchLifetime && branchLifetime->isSpilled && !branchLifetime->sym->IsConst() && branchLifetime->end > labelInstr->GetNumber())
  3503. {
  3504. // The lifetime was in a reg at the branch and is now spilled. We need a store on this path.
  3505. //
  3506. // MOV MEM, branch_REG
  3507. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3508. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3509. branchLifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3510. }
  3511. if (lifetime && !lifetime->sym->IsConst() && lifetime->start <= branchInstr->GetNumber())
  3512. {
  3513. // MOV label_REG, branch_REG / MEM
  3514. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3515. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3516. lifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3517. }
  3518. } NEXT_REG;
  3519. }
  3520. if (hasAirlock)
  3521. {
  3522. // Fix opHelper on airlock label.
  3523. if (insertionInstr->m_prev->IsLabelInstr() && insertionInstr->IsLabelInstr())
  3524. {
  3525. if (insertionInstr->m_prev->AsLabelInstr()->isOpHelper && !insertionInstr->AsLabelInstr()->isOpHelper)
  3526. {
  3527. insertionInstr->m_prev->AsLabelInstr()->isOpHelper = false;
  3528. }
  3529. }
  3530. }
  3531. }
  3532. void
  3533. LinearScan::ReconcileRegContent(Lifetime ** branchRegContent, Lifetime **labelRegContent,
  3534. IR::BranchInstr *branchInstr, IR::LabelInstr *labelInstr,
  3535. Lifetime *lifetime, RegNum reg, BitVector *thrashedRegs, IR::Instr *insertionInstr, IR::Instr *insertionStartInstr)
  3536. {
  3537. RegNum originalReg = RegNOREG;
  3538. IRType type = RegTypes[reg];
  3539. Assert(labelRegContent[reg] != branchRegContent[reg]);
  3540. bool matchBranchReg = (branchRegContent[reg] == lifetime);
  3541. Lifetime **originalRegContent = (matchBranchReg ? labelRegContent : branchRegContent);
  3542. bool isLoopBackEdge = (branchInstr->GetNumber() > labelInstr->GetNumber());
  3543. if (lifetime->sym->IsConst())
  3544. {
  3545. return;
  3546. }
  3547. // Look if this lifetime was in a different register in the previous block.
  3548. // Split the search in 2 to speed this up.
  3549. if (type == TyMachReg)
  3550. {
  3551. FOREACH_INT_REG(regIter)
  3552. {
  3553. if (originalRegContent[regIter] == lifetime)
  3554. {
  3555. originalReg = regIter;
  3556. break;
  3557. }
  3558. } NEXT_INT_REG;
  3559. }
  3560. else
  3561. {
  3562. Assert(type == TyFloat64 || IRType_IsSimd(type));
  3563. FOREACH_FLOAT_REG(regIter)
  3564. {
  3565. if (originalRegContent[regIter] == lifetime)
  3566. {
  3567. originalReg = regIter;
  3568. break;
  3569. }
  3570. } NEXT_FLOAT_REG;
  3571. }
  3572. RegNum branchReg, labelReg;
  3573. if (matchBranchReg)
  3574. {
  3575. branchReg = reg;
  3576. labelReg = originalReg;
  3577. }
  3578. else
  3579. {
  3580. branchReg = originalReg;
  3581. labelReg = reg;
  3582. }
  3583. if (branchReg != RegNOREG && !thrashedRegs->Test(branchReg) && !lifetime->sym->IsConst())
  3584. {
  3585. Assert(branchRegContent[branchReg] == lifetime);
  3586. if (labelReg != RegNOREG)
  3587. {
  3588. // MOV labelReg, branchReg
  3589. Assert(labelRegContent[labelReg] == lifetime);
  3590. IR::Instr *load = IR::Instr::New(LowererMD::GetLoadOp(type),
  3591. IR::RegOpnd::New(lifetime->sym, labelReg, type, this->func),
  3592. IR::RegOpnd::New(lifetime->sym, branchReg, type, this->func), this->func);
  3593. insertionInstr->InsertBefore(load);
  3594. load->CopyNumber(insertionInstr);
  3595. // symRegUseBv needs to be set properly. Unfortunately, we need to go conservative as we don't know
  3596. // which allocation it was at the source of the branch.
  3597. if (this->IsInLoop())
  3598. {
  3599. this->RecordLoopUse(lifetime, branchReg);
  3600. }
  3601. thrashedRegs->Set(labelReg);
  3602. }
  3603. else if (!lifetime->sym->IsSingleDef() && lifetime->needsStoreCompensation && !isLoopBackEdge)
  3604. {
  3605. Assert(!lifetime->sym->IsConst());
  3606. Assert(matchBranchReg);
  3607. Assert(branchRegContent[branchReg] == lifetime);
  3608. // MOV mem, branchReg
  3609. this->InsertStores(lifetime, branchReg, insertionInstr->m_prev);
  3610. // symRegUseBv needs to be set properly. Unfortunately, we need to go conservative as we don't know
  3611. // which allocation it was at the source of the branch.
  3612. if (this->IsInLoop())
  3613. {
  3614. this->RecordLoopUse(lifetime, branchReg);
  3615. }
  3616. }
  3617. }
  3618. else if (labelReg != RegNOREG)
  3619. {
  3620. Assert(labelRegContent[labelReg] == lifetime);
  3621. Assert(lifetime->sym->IsConst() || lifetime->sym->IsAllocated());
  3622. if (branchReg != RegNOREG && !lifetime->sym->IsSingleDef())
  3623. {
  3624. Assert(thrashedRegs->Test(branchReg));
  3625. // We can't insert a "MOV labelReg, branchReg" at the insertion point
  3626. // because branchReg was thrashed by a previous reload.
  3627. // Look for that reload to see if we can insert before it.
  3628. IR::Instr *newInsertionInstr = insertionInstr->m_prev;
  3629. bool foundIt = false;
  3630. while (LowererMD::IsAssign(newInsertionInstr))
  3631. {
  3632. IR::Opnd *dst = newInsertionInstr->GetDst();
  3633. IR::Opnd *src = newInsertionInstr->GetSrc1();
  3634. if (src->IsRegOpnd() && src->AsRegOpnd()->GetReg() == labelReg)
  3635. {
  3636. // This uses labelReg, give up...
  3637. break;
  3638. }
  3639. if (dst->IsRegOpnd() && dst->AsRegOpnd()->GetReg() == branchReg)
  3640. {
  3641. // Success!
  3642. foundIt = true;
  3643. break;
  3644. }
  3645. newInsertionInstr = newInsertionInstr->m_prev;
  3646. }
  3647. if (foundIt)
  3648. {
  3649. // MOV labelReg, branchReg
  3650. Assert(labelRegContent[labelReg] == lifetime);
  3651. IR::Instr *load = IR::Instr::New(LowererMD::GetLoadOp(type),
  3652. IR::RegOpnd::New(lifetime->sym, labelReg, type, this->func),
  3653. IR::RegOpnd::New(lifetime->sym, branchReg, type, this->func), this->func);
  3654. newInsertionInstr->InsertBefore(load);
  3655. load->CopyNumber(newInsertionInstr);
  3656. // symRegUseBv needs to be set properly. Unfortunately, we need to go conservative as we don't know
  3657. // which allocation it was at the source of the branch.
  3658. if (this->IsInLoop())
  3659. {
  3660. this->RecordLoopUse(lifetime, branchReg);
  3661. }
  3662. thrashedRegs->Set(labelReg);
  3663. return;
  3664. }
  3665. Assert(thrashedRegs->Test(branchReg));
  3666. this->InsertStores(lifetime, branchReg, insertionStartInstr);
  3667. // symRegUseBv needs to be set properly. Unfortunately, we need to go conservative as we don't know
  3668. // which allocation it was at the source of the branch.
  3669. if (this->IsInLoop())
  3670. {
  3671. this->RecordLoopUse(lifetime, branchReg);
  3672. }
  3673. }
  3674. // MOV labelReg, mem
  3675. this->InsertLoad(insertionInstr, lifetime->sym, labelReg);
  3676. thrashedRegs->Set(labelReg);
  3677. }
  3678. else if (!lifetime->sym->IsConst())
  3679. {
  3680. Assert(matchBranchReg);
  3681. Assert(branchReg != RegNOREG);
  3682. // The lifetime was in a register at the top of the loop, but we thrashed it with a previous reload...
  3683. if (!lifetime->sym->IsSingleDef())
  3684. {
  3685. this->InsertStores(lifetime, branchReg, insertionStartInstr);
  3686. }
  3687. #if DBG_DUMP
  3688. if (PHASE_TRACE(Js::SecondChancePhase, this->func))
  3689. {
  3690. Output::Print(_u("****** Spilling reg because of bad compensation code order: "));
  3691. lifetime->sym->Dump();
  3692. Output::Print(_u("\n"));
  3693. }
  3694. #endif
  3695. }
  3696. }
  3697. bool LinearScan::ClearLoopExitIfRegUnused(Lifetime *lifetime, RegNum reg, IR::BranchInstr *branchInstr, Loop *loop)
  3698. {
  3699. // If a lifetime was enregistered into the loop and then spilled, we need compensation at the bottom
  3700. // of the loop to reload the lifetime into that register.
  3701. // If that lifetime was spilled before it was ever used, we don't need the compensation code.
  3702. // We do however need to clear the regContent on any loop exit as the register will not
  3703. // be available anymore on that path.
  3704. // Note: If the lifetime was reloaded into the same register, we might clear the regContent unnecessarily...
  3705. if (!PHASE_OFF(Js::ClearRegLoopExitPhase, this->func))
  3706. {
  3707. return false;
  3708. }
  3709. if (!loop->regAlloc.symRegUseBv->Test(lifetime->sym->m_id) && !lifetime->needsStoreCompensation)
  3710. {
  3711. if (lifetime->end > branchInstr->GetNumber())
  3712. {
  3713. FOREACH_SLIST_ENTRY(Lifetime **, regContent, loop->regAlloc.exitRegContentList)
  3714. {
  3715. if (regContent[reg] == lifetime)
  3716. {
  3717. regContent[reg] = nullptr;
  3718. }
  3719. } NEXT_SLIST_ENTRY;
  3720. }
  3721. return true;
  3722. }
  3723. return false;
  3724. }
  3725. #if defined(_M_IX86) || defined(_M_X64)
  3726. void LinearScan::AvoidCompensationConflicts(IR::LabelInstr *labelInstr, IR::BranchInstr *branchInstr,
  3727. Lifetime *labelRegContent[], Lifetime *branchRegContent[],
  3728. IR::Instr **pInsertionInstr, IR::Instr **pInsertionStartInstr, bool needsAirlock, bool *pHasAirlock)
  3729. {
  3730. bool changed = true;
  3731. // Look for conflicts in the incoming compensation code:
  3732. // MOV ESI, EAX
  3733. // MOV ECX, ESI << ESI was lost...
  3734. // Using XCHG:
  3735. // XCHG ESI, EAX
  3736. // MOV ECX, EAX
  3737. //
  3738. // Note that we need to iterate while(changed) to catch all conflicts
  3739. while(changed) {
  3740. RegNum conflictRegs[RegNumCount] = {RegNOREG};
  3741. changed = false;
  3742. FOREACH_BITSET_IN_UNITBV(reg, this->secondChanceRegs, BitVector)
  3743. {
  3744. Lifetime *labelLifetime = labelRegContent[reg];
  3745. Lifetime *lifetime = branchRegContent[reg];
  3746. // We don't have an XCHG for SSE2 regs
  3747. if (lifetime == labelLifetime || IRType_IsFloat(RegTypes[reg]))
  3748. {
  3749. continue;
  3750. }
  3751. if (this->NeedsLoopBackEdgeCompensation(lifetime, labelInstr))
  3752. {
  3753. // Mismatch, we need to insert compensation code
  3754. *pInsertionInstr = this->EnsureAirlock(needsAirlock, pHasAirlock, *pInsertionInstr, pInsertionStartInstr, branchInstr, labelInstr);
  3755. if (conflictRegs[reg] != RegNOREG)
  3756. {
  3757. // Eliminate conflict with an XCHG
  3758. IR::RegOpnd *reg1 = IR::RegOpnd::New(branchRegContent[reg]->sym, (RegNum)reg, RegTypes[reg], this->func);
  3759. IR::RegOpnd *reg2 = IR::RegOpnd::New(branchRegContent[reg]->sym, conflictRegs[reg], RegTypes[reg], this->func);
  3760. IR::Instr *instrXchg = IR::Instr::New(Js::OpCode::XCHG, reg1, reg1, reg2, this->func);
  3761. (*pInsertionInstr)->InsertBefore(instrXchg);
  3762. instrXchg->CopyNumber(*pInsertionInstr);
  3763. Lifetime *tmpLifetime = branchRegContent[reg];
  3764. branchRegContent[reg] = branchRegContent[conflictRegs[reg]];
  3765. branchRegContent[conflictRegs[reg]] = tmpLifetime;
  3766. reg = conflictRegs[reg];
  3767. changed = true;
  3768. }
  3769. RegNum labelReg = RegNOREG;
  3770. FOREACH_INT_REG(regIter)
  3771. {
  3772. if (labelRegContent[regIter] == branchRegContent[reg])
  3773. {
  3774. labelReg = regIter;
  3775. break;
  3776. }
  3777. } NEXT_INT_REG;
  3778. if (labelReg != RegNOREG)
  3779. {
  3780. conflictRegs[labelReg] = (RegNum)reg;
  3781. }
  3782. }
  3783. } NEXT_BITSET_IN_UNITBV;
  3784. }
  3785. }
  3786. #endif
  3787. RegNum
  3788. LinearScan::SecondChanceAllocation(Lifetime *lifetime, bool force)
  3789. {
  3790. if (PHASE_OFF(Js::SecondChancePhase, this->func) || this->func->HasTry())
  3791. {
  3792. return RegNOREG;
  3793. }
  3794. // Don't start a second chance allocation from a helper block
  3795. if (lifetime->dontAllocate || this->IsInHelperBlock() || lifetime->isDeadStore)
  3796. {
  3797. return RegNOREG;
  3798. }
  3799. Assert(lifetime->isSpilled);
  3800. Assert(lifetime->sym->IsConst() || lifetime->sym->IsAllocated());
  3801. RegNum oldReg = lifetime->reg;
  3802. RegNum reg;
  3803. if (lifetime->start == this->currentInstr->GetNumber() || lifetime->end == this->currentInstr->GetNumber())
  3804. {
  3805. // No point doing second chance if the lifetime ends here, or starts here (normal allocation would
  3806. // have found a register if one is available).
  3807. return RegNOREG;
  3808. }
  3809. if (lifetime->sym->IsConst())
  3810. {
  3811. // Can't second-chance allocate because we might have deleted the initial def instr, after
  3812. // having set the reg content on a forward branch...
  3813. return RegNOREG;
  3814. }
  3815. lifetime->reg = RegNOREG;
  3816. lifetime->isSecondChanceAllocated = true;
  3817. reg = this->FindReg(lifetime, nullptr, force);
  3818. lifetime->reg = oldReg;
  3819. if (reg == RegNOREG)
  3820. {
  3821. lifetime->isSecondChanceAllocated = false;
  3822. return reg;
  3823. }
  3824. // Success!! We're re-allocating this lifetime...
  3825. this->SecondChanceAllocateToReg(lifetime, reg);
  3826. return reg;
  3827. }
  3828. void LinearScan::SecondChanceAllocateToReg(Lifetime *lifetime, RegNum reg)
  3829. {
  3830. RegNum oldReg = lifetime->reg;
  3831. if (oldReg != RegNOREG && this->tempRegLifetimes[oldReg] == lifetime)
  3832. {
  3833. this->tempRegs.Clear(oldReg);
  3834. }
  3835. lifetime->isSpilled = false;
  3836. lifetime->isSecondChanceAllocated = true;
  3837. lifetime->lastAllocationStart = this->currentInstr->GetNumber();
  3838. lifetime->reg = RegNOREG;
  3839. this->AssignActiveReg(lifetime, reg);
  3840. this->secondChanceRegs.Set(reg);
  3841. lifetime->sym->scratch.linearScan.lifetime->useList.Clear();
  3842. #if DBG_DUMP
  3843. if (PHASE_TRACE(Js::SecondChancePhase, this->func))
  3844. {
  3845. Output::Print(_u("**** Second chance: "));
  3846. lifetime->sym->Dump();
  3847. Output::Print(_u("\t Reg: %S "), RegNames[reg]);
  3848. Output::Print(_u(" SpillCount:%d Length:%d Cost:%d %S\n"),
  3849. lifetime->useCount, lifetime->end - lifetime->start, this->GetSpillCost(lifetime),
  3850. lifetime->isLiveAcrossCalls ? "LiveAcrossCalls" : "");
  3851. }
  3852. #endif
  3853. }
  3854. IR::Instr *
  3855. LinearScan::InsertAirlock(IR::BranchInstr *branchInstr, IR::LabelInstr *labelInstr)
  3856. {
  3857. // Insert a new block on a flow arc:
  3858. // JEQ L1 JEQ L2
  3859. // ... => ...
  3860. // <fallthrough> JMP L1
  3861. // L1: L2:
  3862. // <new block>
  3863. // L1:
  3864. // An airlock is needed when we need to add code on a flow arc, and the code can't
  3865. // be added directly at the source or sink of that flow arc without impacting other
  3866. // code paths.
  3867. bool isOpHelper = labelInstr->isOpHelper;
  3868. if (!isOpHelper)
  3869. {
  3870. // Check if branch is coming from helper block.
  3871. IR::Instr *prevLabel = branchInstr->m_prev;
  3872. while (prevLabel && !prevLabel->IsLabelInstr())
  3873. {
  3874. prevLabel = prevLabel->m_prev;
  3875. }
  3876. if (prevLabel && prevLabel->AsLabelInstr()->isOpHelper)
  3877. {
  3878. isOpHelper = true;
  3879. }
  3880. }
  3881. IR::LabelInstr *airlockLabel = IR::LabelInstr::New(Js::OpCode::Label, this->func, isOpHelper);
  3882. airlockLabel->SetRegion(this->currentRegion);
  3883. #if DBG
  3884. if (isOpHelper)
  3885. {
  3886. if (branchInstr->m_isHelperToNonHelperBranch)
  3887. {
  3888. labelInstr->m_noHelperAssert = true;
  3889. }
  3890. if (labelInstr->isOpHelper && labelInstr->m_noHelperAssert)
  3891. {
  3892. airlockLabel->m_noHelperAssert = true;
  3893. }
  3894. }
  3895. #endif
  3896. bool replaced = branchInstr->ReplaceTarget(labelInstr, airlockLabel);
  3897. Assert(replaced);
  3898. IR::Instr * prevInstr = labelInstr->GetPrevRealInstrOrLabel();
  3899. if (prevInstr->HasFallThrough())
  3900. {
  3901. IR::BranchInstr *branchOverAirlock = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelInstr, this->func);
  3902. prevInstr->InsertAfter(branchOverAirlock);
  3903. branchOverAirlock->CopyNumber(prevInstr);
  3904. prevInstr = branchOverAirlock;
  3905. branchOverAirlock->m_isAirlock = true;
  3906. branchOverAirlock->m_regContent = nullptr;
  3907. }
  3908. prevInstr->InsertAfter(airlockLabel);
  3909. airlockLabel->CopyNumber(prevInstr);
  3910. prevInstr = labelInstr->GetPrevRealInstrOrLabel();
  3911. return labelInstr;
  3912. }
  3913. void
  3914. LinearScan::SaveRegContent(IR::Instr *instr)
  3915. {
  3916. bool isLabelLoopTop = false;
  3917. Lifetime ** regContent = AnewArrayZ(this->tempAlloc, Lifetime *, RegNumCount);
  3918. if (instr->IsBranchInstr())
  3919. {
  3920. instr->AsBranchInstr()->m_regContent = regContent;
  3921. }
  3922. else
  3923. {
  3924. Assert(instr->IsLabelInstr());
  3925. Assert(instr->AsLabelInstr()->m_isLoopTop);
  3926. instr->AsLabelInstr()->m_regContent = regContent;
  3927. isLabelLoopTop = true;
  3928. }
  3929. js_memcpy_s(regContent, (RegNumCount * sizeof(Lifetime *)), this->regContent, sizeof(this->regContent));
  3930. #if DBG
  3931. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, this->activeLiveranges)
  3932. {
  3933. Assert(regContent[lifetime->reg] == lifetime);
  3934. } NEXT_SLIST_ENTRY;
  3935. #endif
  3936. }
  3937. bool LinearScan::RegsAvailable(IRType type)
  3938. {
  3939. if (IRType_IsFloat(type) || IRType_IsSimd(type))
  3940. {
  3941. return (this->floatRegUsedCount < FLOAT_REG_COUNT);
  3942. }
  3943. else
  3944. {
  3945. return (this->intRegUsedCount < INT_REG_COUNT);
  3946. }
  3947. }
  3948. uint LinearScan::GetRemainingHelperLength(Lifetime *const lifetime)
  3949. {
  3950. // Walk the helper block linked list starting from the next helper block until the end of the lifetime
  3951. uint helperLength = 0;
  3952. SList<OpHelperBlock>::Iterator it(opHelperBlockIter);
  3953. Assert(it.IsValid());
  3954. const uint end = max(currentInstr->GetNumber(), lifetime->end);
  3955. do
  3956. {
  3957. const OpHelperBlock &helper = it.Data();
  3958. const uint helperStart = helper.opHelperLabel->GetNumber();
  3959. if(helperStart > end)
  3960. {
  3961. break;
  3962. }
  3963. const uint helperEnd = min(end, helper.opHelperEndInstr->GetNumber());
  3964. helperLength += helperEnd - helperStart;
  3965. if(helperEnd != helper.opHelperEndInstr->GetNumber() || !helper.opHelperEndInstr->IsLabelInstr())
  3966. {
  3967. // A helper block that ends at a label does not return to the function. Since this helper block does not end
  3968. // at a label, include the end instruction as well.
  3969. ++helperLength;
  3970. }
  3971. } while(it.Next());
  3972. return helperLength;
  3973. }
  3974. uint LinearScan::CurrentOpHelperVisitedLength(IR::Instr *const currentInstr) const
  3975. {
  3976. Assert(currentInstr);
  3977. if(!currentOpHelperBlock)
  3978. {
  3979. return 0;
  3980. }
  3981. // Consider the current instruction to have not yet been visited
  3982. Assert(currentInstr->GetNumber() >= currentOpHelperBlock->opHelperLabel->GetNumber());
  3983. return currentInstr->GetNumber() - currentOpHelperBlock->opHelperLabel->GetNumber();
  3984. }
  3985. IR::Instr * LinearScan::TryHoistLoad(IR::Instr *instr, Lifetime *lifetime)
  3986. {
  3987. // If we are loading a lifetime into a register inside a loop, try to hoist that load outside the loop
  3988. // if that register hasn't been used yet.
  3989. RegNum reg = lifetime->reg;
  3990. IR::Instr *insertInstr = instr;
  3991. if (PHASE_OFF(Js::RegHoistLoadsPhase, this->func))
  3992. {
  3993. return insertInstr;
  3994. }
  3995. if ((this->func->HasTry() && !this->func->DoOptimizeTry()) || (this->currentRegion && this->currentRegion->GetType() != RegionTypeRoot))
  3996. {
  3997. return insertInstr;
  3998. }
  3999. // Register unused, and lifetime unused yet.
  4000. if (this->IsInLoop() && !this->curLoop->regAlloc.regUseBv.Test(reg)
  4001. && !this->curLoop->regAlloc.defdInLoopBv->Test(lifetime->sym->m_id)
  4002. && !this->curLoop->regAlloc.symRegUseBv->Test(lifetime->sym->m_id)
  4003. && !this->curLoop->regAlloc.hasAirLock)
  4004. {
  4005. // Let's hoist!
  4006. insertInstr = insertInstr->m_prev;
  4007. // Walk each instructions until the top of the loop looking for branches
  4008. while (!insertInstr->IsLabelInstr() || !insertInstr->AsLabelInstr()->m_isLoopTop || !insertInstr->AsLabelInstr()->GetLoop()->IsDescendentOrSelf(this->curLoop))
  4009. {
  4010. if (insertInstr->IsBranchInstr() && insertInstr->AsBranchInstr()->m_regContent)
  4011. {
  4012. IR::BranchInstr *branchInstr = insertInstr->AsBranchInstr();
  4013. // That lifetime might have been in another register coming into the loop, and spilled before used.
  4014. // Clear the reg content.
  4015. FOREACH_REG(regIter)
  4016. {
  4017. if (branchInstr->m_regContent[regIter] == lifetime)
  4018. {
  4019. branchInstr->m_regContent[regIter] = nullptr;
  4020. }
  4021. } NEXT_REG;
  4022. // Set the regContent for that reg to the lifetime on this branch
  4023. branchInstr->m_regContent[reg] = lifetime;
  4024. }
  4025. insertInstr = insertInstr->m_prev;
  4026. }
  4027. IR::LabelInstr *loopTopLabel = insertInstr->AsLabelInstr();
  4028. // Set the reg content for the loop top correctly as well
  4029. FOREACH_REG(regIter)
  4030. {
  4031. if (loopTopLabel->m_regContent[regIter] == lifetime)
  4032. {
  4033. loopTopLabel->m_regContent[regIter] = nullptr;
  4034. this->curLoop->regAlloc.loopTopRegContent[regIter] = nullptr;
  4035. }
  4036. } NEXT_REG;
  4037. Assert(loopTopLabel->GetLoop() == this->curLoop);
  4038. loopTopLabel->m_regContent[reg] = lifetime;
  4039. this->curLoop->regAlloc.loopTopRegContent[reg] = lifetime;
  4040. this->RecordLoopUse(lifetime, reg);
  4041. IR::LabelInstr *loopLandingPad = nullptr;
  4042. Assert(loopTopLabel->GetNumber() != Js::Constants::NoByteCodeOffset);
  4043. // Insert load in landing pad.
  4044. // Redirect branches to new landing pad.
  4045. FOREACH_SLISTCOUNTED_ENTRY_EDITING(IR::BranchInstr *, branchInstr, &loopTopLabel->labelRefs, iter)
  4046. {
  4047. Assert(branchInstr->GetNumber() != Js::Constants::NoByteCodeOffset);
  4048. // <= because the branch may be newly inserted and have the same instr number as the loop top...
  4049. if (branchInstr->GetNumber() <= loopTopLabel->GetNumber())
  4050. {
  4051. if (!loopLandingPad)
  4052. {
  4053. loopLandingPad = IR::LabelInstr::New(Js::OpCode::Label, this->func);
  4054. loopLandingPad->SetRegion(this->currentRegion);
  4055. loopTopLabel->InsertBefore(loopLandingPad);
  4056. loopLandingPad->CopyNumber(loopTopLabel);
  4057. }
  4058. branchInstr->ReplaceTarget(loopTopLabel, loopLandingPad);
  4059. }
  4060. } NEXT_SLISTCOUNTED_ENTRY_EDITING;
  4061. }
  4062. return insertInstr;
  4063. }
  4064. #if DBG_DUMP
  4065. void LinearScan::PrintStats() const
  4066. {
  4067. uint loopNest = 0;
  4068. uint storeCount = 0;
  4069. uint loadCount = 0;
  4070. uint wStoreCount = 0;
  4071. uint wLoadCount = 0;
  4072. uint instrCount = 0;
  4073. bool isInHelper = false;
  4074. FOREACH_INSTR_IN_FUNC_BACKWARD(instr, this->func)
  4075. {
  4076. switch (instr->GetKind())
  4077. {
  4078. case IR::InstrKindPragma:
  4079. continue;
  4080. case IR::InstrKindBranch:
  4081. if (instr->AsBranchInstr()->IsLoopTail(this->func))
  4082. {
  4083. loopNest++;
  4084. }
  4085. instrCount++;
  4086. break;
  4087. case IR::InstrKindLabel:
  4088. case IR::InstrKindProfiledLabel:
  4089. if (instr->AsLabelInstr()->m_isLoopTop)
  4090. {
  4091. Assert(loopNest);
  4092. loopNest--;
  4093. }
  4094. isInHelper = instr->AsLabelInstr()->isOpHelper;
  4095. break;
  4096. default:
  4097. {
  4098. Assert(instr->IsRealInstr());
  4099. if (isInHelper)
  4100. {
  4101. continue;
  4102. }
  4103. IR::Opnd *dst = instr->GetDst();
  4104. if (dst && dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsStackSym() && dst->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4105. {
  4106. storeCount++;
  4107. wStoreCount += LinearScan::GetUseSpillCost(loopNest, false);
  4108. }
  4109. IR::Opnd *src1 = instr->GetSrc1();
  4110. if (src1)
  4111. {
  4112. if (src1->IsSymOpnd() && src1->AsSymOpnd()->m_sym->IsStackSym() && src1->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4113. {
  4114. loadCount++;
  4115. wLoadCount += LinearScan::GetUseSpillCost(loopNest, false);
  4116. }
  4117. IR::Opnd *src2 = instr->GetSrc2();
  4118. if (src2 && src2->IsSymOpnd() && src2->AsSymOpnd()->m_sym->IsStackSym() && src2->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4119. {
  4120. loadCount++;
  4121. wLoadCount += LinearScan::GetUseSpillCost(loopNest, false);
  4122. }
  4123. }
  4124. }
  4125. break;
  4126. }
  4127. } NEXT_INSTR_IN_FUNC_BACKWARD;
  4128. Assert(loopNest == 0);
  4129. this->func->DumpFullFunctionName();
  4130. Output::SkipToColumn(45);
  4131. Output::Print(_u("Instrs:%5d, Lds:%4d, Strs:%4d, WLds: %4d, WStrs: %4d, WRefs: %4d\n"),
  4132. instrCount, loadCount, storeCount, wLoadCount, wStoreCount, wLoadCount+wStoreCount);
  4133. }
  4134. #endif
  4135. #ifdef _M_IX86
  4136. # if ENABLE_DEBUG_CONFIG_OPTIONS
  4137. IR::Instr * LinearScan::GetIncInsertionPoint(IR::Instr *instr)
  4138. {
  4139. // Make sure we don't insert an INC between an instr setting the condition code, and one using it.
  4140. IR::Instr *instrNext = instr;
  4141. while(!EncoderMD::UsesConditionCode(instrNext) && !EncoderMD::SetsConditionCode(instrNext))
  4142. {
  4143. if (instrNext->IsLabelInstr() || instrNext->IsExitInstr() || instrNext->IsBranchInstr())
  4144. {
  4145. break;
  4146. }
  4147. instrNext = instrNext->GetNextRealInstrOrLabel();
  4148. }
  4149. if (instrNext->IsLowered() && EncoderMD::UsesConditionCode(instrNext))
  4150. {
  4151. IR::Instr *instrPrev = instr->GetPrevRealInstrOrLabel();
  4152. while(!EncoderMD::SetsConditionCode(instrPrev))
  4153. {
  4154. instrPrev = instrPrev->GetPrevRealInstrOrLabel();
  4155. Assert(!instrPrev->IsLabelInstr());
  4156. }
  4157. return instrPrev;
  4158. }
  4159. return instr;
  4160. }
  4161. void LinearScan::DynamicStatsInstrument()
  4162. {
  4163. {
  4164. IR::Instr *firstInstr = this->func->m_headInstr;
  4165. IR::MemRefOpnd *memRefOpnd = IR::MemRefOpnd::New(this->func->GetJITFunctionBody()->GetCallCountStatsAddr(), TyUint32, this->func);
  4166. firstInstr->InsertAfter(IR::Instr::New(Js::OpCode::INC, memRefOpnd, memRefOpnd, this->func));
  4167. }
  4168. FOREACH_INSTR_IN_FUNC(instr, this->func)
  4169. {
  4170. if (!instr->IsRealInstr() || !instr->IsLowered())
  4171. {
  4172. continue;
  4173. }
  4174. if (EncoderMD::UsesConditionCode(instr) && instr->GetPrevRealInstrOrLabel()->IsLabelInstr())
  4175. {
  4176. continue;
  4177. }
  4178. IR::Opnd *dst = instr->GetDst();
  4179. if (dst && dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsStackSym() && dst->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4180. {
  4181. IR::Instr *insertionInstr = this->GetIncInsertionPoint(instr);
  4182. IR::MemRefOpnd *memRefOpnd = IR::MemRefOpnd::New(this->func->GetJITFunctionBody()->GetRegAllocStoreCountAddr(), TyUint32, this->func);
  4183. insertionInstr->InsertBefore(IR::Instr::New(Js::OpCode::INC, memRefOpnd, memRefOpnd, this->func));
  4184. }
  4185. IR::Opnd *src1 = instr->GetSrc1();
  4186. if (src1)
  4187. {
  4188. if (src1->IsSymOpnd() && src1->AsSymOpnd()->m_sym->IsStackSym() && src1->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4189. {
  4190. IR::Instr *insertionInstr = this->GetIncInsertionPoint(instr);
  4191. IR::MemRefOpnd *memRefOpnd = IR::MemRefOpnd::New(this->func->GetJITFunctionBody()->GetRegAllocStoreCountAddr(), TyUint32, this->func);
  4192. insertionInstr->InsertBefore(IR::Instr::New(Js::OpCode::INC, memRefOpnd, memRefOpnd, this->func));
  4193. }
  4194. IR::Opnd *src2 = instr->GetSrc2();
  4195. if (src2 && src2->IsSymOpnd() && src2->AsSymOpnd()->m_sym->IsStackSym() && src2->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4196. {
  4197. IR::Instr *insertionInstr = this->GetIncInsertionPoint(instr);
  4198. IR::MemRefOpnd *memRefOpnd = IR::MemRefOpnd::New(this->func->GetJITFunctionBody()->GetRegAllocStoreCountAddr(), TyUint32, this->func);
  4199. insertionInstr->InsertBefore(IR::Instr::New(Js::OpCode::INC, memRefOpnd, memRefOpnd, this->func));
  4200. }
  4201. }
  4202. } NEXT_INSTR_IN_FUNC;
  4203. }
  4204. # endif //ENABLE_DEBUG_CONFIG_OPTIONS
  4205. #endif // _M_IX86
  4206. IR::Instr* LinearScan::InsertMove(IR::Opnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr)
  4207. {
  4208. IR::Instr *instrPrev = insertBeforeInstr->m_prev;
  4209. IR::Instr *instrRet = Lowerer::InsertMove(dst, src, insertBeforeInstr);
  4210. for (IR::Instr *instr = instrPrev->m_next; instr != insertBeforeInstr; instr = instr->m_next)
  4211. {
  4212. instr->CopyNumber(insertBeforeInstr);
  4213. }
  4214. return instrRet;
  4215. }
  4216. IR::Instr* LinearScan::InsertLea(IR::RegOpnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr)
  4217. {
  4218. IR::Instr *instrPrev = insertBeforeInstr->m_prev;
  4219. AutoRestoreLegalize restore(insertBeforeInstr->m_func, true);
  4220. IR::Instr *instrRet = Lowerer::InsertLea(dst, src, insertBeforeInstr);
  4221. for (IR::Instr *instr = instrPrev->m_next; instr != insertBeforeInstr; instr = instr->m_next)
  4222. {
  4223. instr->CopyNumber(insertBeforeInstr);
  4224. }
  4225. return instrRet;
  4226. }