LinearScan.cpp 169 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "BackEnd.h"
  6. #include "SCCLiveness.h"
  7. #if DBG_DUMP || ENABLE_DEBUG_CONFIG_OPTIONS
  8. char const * const RegNames[RegNumCount] =
  9. {
  10. #define REGDAT(Name, ListName, ...) "" STRINGIZE(ListName) "",
  11. #include "RegList.h"
  12. #undef REGDAT
  13. };
  14. wchar_t const * const RegNamesW[RegNumCount] =
  15. {
  16. #define REGDAT(Name, ListName, ...) L"" STRINGIZEW(ListName) L"",
  17. #include "RegList.h"
  18. #undef REGDAT
  19. };
  20. #endif
  21. static const uint8 RegAttribs[RegNumCount] =
  22. {
  23. #define REGDAT(Name, ListName, Encode, Type, Attribs) Attribs,
  24. #include "RegList.h"
  25. #undef REGDAT
  26. };
  27. extern const IRType RegTypes[RegNumCount] =
  28. {
  29. #define REGDAT(Name, ListName, Encode, Type, Attribs) Type,
  30. #include "RegList.h"
  31. #undef REGDAT
  32. };
  33. LoweredBasicBlock* LoweredBasicBlock::New(JitArenaAllocator* allocator)
  34. {
  35. return JitAnew(allocator, LoweredBasicBlock, allocator);
  36. }
  37. void LoweredBasicBlock::Copy(LoweredBasicBlock* block)
  38. {
  39. this->inlineeFrameLifetimes.Copy(&block->inlineeFrameLifetimes);
  40. this->inlineeStack.Copy(&block->inlineeStack);
  41. this->inlineeFrameSyms.Copy(&block->inlineeFrameSyms);
  42. }
  43. bool LoweredBasicBlock::HasData()
  44. {
  45. return this->inlineeFrameLifetimes.Count() > 0 || this->inlineeStack.Count() > 0;
  46. }
  47. LoweredBasicBlock* LoweredBasicBlock::Clone(JitArenaAllocator* allocator)
  48. {
  49. if (this->HasData())
  50. {
  51. LoweredBasicBlock* clone = LoweredBasicBlock::New(allocator);
  52. clone->Copy(this);
  53. return clone;
  54. }
  55. return nullptr;
  56. }
  57. bool LoweredBasicBlock::Equals(LoweredBasicBlock* otherBlock)
  58. {
  59. if(this->HasData() != otherBlock->HasData())
  60. {
  61. return false;
  62. }
  63. if (!this->inlineeFrameLifetimes.Equals(&otherBlock->inlineeFrameLifetimes))
  64. {
  65. return false;
  66. }
  67. if (!this->inlineeStack.Equals(&otherBlock->inlineeStack))
  68. {
  69. return false;
  70. }
  71. return true;
  72. }
  73. // LinearScan::RegAlloc
  74. // This register allocator is based on the 1999 linear scan register allocation paper
  75. // by Poletto and Sarkar. This code however walks the IR while doing the lifetime
  76. // allocations, and assigns the regs to all the RegOpnd as it goes. It assumes
  77. // the IR is in R-DFO, and that the lifetime list is sorted in starting order.
  78. // Lifetimes are allocated as they become live, and retired as they go dead. RegOpnd
  79. // are assigned their register. If a lifetime becomes active and there are no free
  80. // registers left, a lifetime is picked to be spilled.
  81. // When we spill, the whole lifetime is spilled. All the loads and stores are done
  82. // through memory for that lifetime, even the ones allocated before the current instruction.
  83. // We do optimize this slightly by not reloading the previous loads that were not in loops.
  84. void
  85. LinearScan::RegAlloc()
  86. {
  87. NoRecoverMemoryJitArenaAllocator tempAlloc(L"BE-LinearScan", this->func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  88. this->tempAlloc = &tempAlloc;
  89. this->opHelperSpilledLiveranges = JitAnew(&tempAlloc, SList<Lifetime *>, &tempAlloc);
  90. this->activeLiveranges = JitAnew(&tempAlloc, SList<Lifetime *>, &tempAlloc);
  91. this->liveOnBackEdgeSyms = JitAnew(&tempAlloc, BVSparse<JitArenaAllocator>, &tempAlloc);
  92. this->stackPackInUseLiveRanges = JitAnew(&tempAlloc, SList<Lifetime *>, &tempAlloc);
  93. this->stackSlotsFreeList = JitAnew(&tempAlloc, SList<StackSlot *>, &tempAlloc);
  94. this->currentBlock = LoweredBasicBlock::New(&tempAlloc);
  95. IR::Instr *currentInstr = this->func->m_headInstr;
  96. SCCLiveness liveness(this->func, this->tempAlloc);
  97. BEGIN_CODEGEN_PHASE(this->func, Js::LivenessPhase);
  98. // Build the lifetime list
  99. liveness.Build();
  100. END_CODEGEN_PHASE(this->func, Js::LivenessPhase);
  101. this->lifetimeList = &liveness.lifetimeList;
  102. this->opHelperBlockList = &liveness.opHelperBlockList;
  103. this->opHelperBlockIter = SList<OpHelperBlock>::Iterator(this->opHelperBlockList);
  104. this->opHelperBlockIter.Next();
  105. this->Init();
  106. NativeCodeData::Allocator * nativeAllocator = this->func->GetNativeCodeDataAllocator();
  107. if (func->hasBailout)
  108. {
  109. this->globalBailOutRecordTables = NativeCodeDataNewArrayZ(nativeAllocator, GlobalBailOutRecordDataTable *, func->m_inlineeId + 1);
  110. this->lastUpdatedRowIndices = JitAnewArrayZ(this->tempAlloc, uint *, func->m_inlineeId + 1);
  111. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  112. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  113. {
  114. this->func->GetScriptContext()->bailOutOffsetBytes += (sizeof(GlobalBailOutRecordDataTable *) * (func->m_inlineeId + 1));
  115. this->func->GetScriptContext()->bailOutRecordBytes += (sizeof(GlobalBailOutRecordDataTable *) * (func->m_inlineeId + 1));
  116. }
  117. #endif
  118. }
  119. m_bailOutRecordCount = 0;
  120. IR::Instr * insertBailInAfter = nullptr;
  121. BailOutInfo * bailOutInfoForBailIn = nullptr;
  122. bool endOfBasicBlock = true;
  123. FOREACH_INSTR_EDITING(instr, instrNext, currentInstr)
  124. {
  125. if (instr->GetNumber() == 0)
  126. {
  127. AssertMsg(LowererMD::IsAssign(instr), "Only expect spill code here");
  128. continue;
  129. }
  130. #if DBG_DUMP && defined(ENABLE_DEBUG_CONFIG_OPTIONS)
  131. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::LinearScanPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  132. {
  133. instr->Dump();
  134. }
  135. #endif // DBG
  136. this->currentInstr = instr;
  137. if(instr->StartsBasicBlock() || endOfBasicBlock)
  138. {
  139. endOfBasicBlock = false;
  140. ++currentBlockNumber;
  141. }
  142. if (instr->IsLabelInstr())
  143. {
  144. this->lastLabel = instr->AsLabelInstr();
  145. if (this->lastLabel->m_loweredBasicBlock)
  146. {
  147. this->currentBlock = this->lastLabel->m_loweredBasicBlock;
  148. }
  149. else if(currentBlock->HasData())
  150. {
  151. // Check if the previous block has fall-through. If so, retain the block info. If not, create empty info.
  152. IR::Instr *const prevInstr = instr->GetPrevRealInstrOrLabel();
  153. Assert(prevInstr);
  154. if(!prevInstr->HasFallThrough())
  155. {
  156. currentBlock = LoweredBasicBlock::New(&tempAlloc);
  157. }
  158. }
  159. this->currentRegion = this->lastLabel->GetRegion();
  160. }
  161. else if (instr->IsBranchInstr())
  162. {
  163. if (this->func->HasTry() && this->func->DoOptimizeTryCatch())
  164. {
  165. this->ProcessEHRegionBoundary(instr);
  166. }
  167. this->ProcessSecondChanceBoundary(instr->AsBranchInstr());
  168. }
  169. this->CheckIfInLoop(instr);
  170. if (this->RemoveDeadStores(instr))
  171. {
  172. continue;
  173. }
  174. if (instr->HasBailOutInfo())
  175. {
  176. if (this->currentRegion)
  177. {
  178. RegionType curRegType = this->currentRegion->GetType();
  179. Assert(curRegType != RegionTypeFinally); //Finally regions are not optimized yet
  180. if (curRegType == RegionTypeTry || curRegType == RegionTypeCatch)
  181. {
  182. this->func->hasBailoutInEHRegion = true;
  183. }
  184. }
  185. this->FillBailOutRecord(instr);
  186. if (instr->GetBailOutKind() == IR::BailOutForGeneratorYield)
  187. {
  188. Assert(instr->m_next->IsLabelInstr());
  189. insertBailInAfter = instr->m_next;
  190. bailOutInfoForBailIn = instr->GetBailOutInfo();
  191. }
  192. }
  193. this->SetSrcRegs(instr);
  194. this->EndDeadLifetimes(instr);
  195. this->CheckOpHelper(instr);
  196. this->KillImplicitRegs(instr);
  197. this->AllocateNewLifetimes(instr);
  198. this->SetDstReg(instr);
  199. this->EndDeadOpHelperLifetimes(instr);
  200. if (instr->IsLabelInstr())
  201. {
  202. this->ProcessSecondChanceBoundary(instr->AsLabelInstr());
  203. }
  204. #if DBG
  205. this->CheckInvariants();
  206. #endif // DBG
  207. if(instr->EndsBasicBlock())
  208. {
  209. endOfBasicBlock = true;
  210. }
  211. if (insertBailInAfter == instr)
  212. {
  213. instrNext = linearScanMD.GenerateBailInForGeneratorYield(instr, bailOutInfoForBailIn);
  214. insertBailInAfter = nullptr;
  215. bailOutInfoForBailIn = nullptr;
  216. }
  217. }NEXT_INSTR_EDITING;
  218. if (func->hasBailout)
  219. {
  220. for (uint i = 0; i <= func->m_inlineeId; i++)
  221. {
  222. if (globalBailOutRecordTables[i] != nullptr)
  223. {
  224. globalBailOutRecordTables[i]->Finalize(nativeAllocator, &tempAlloc);
  225. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  226. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  227. {
  228. func->GetScriptContext()->bailOutOffsetBytes += sizeof(GlobalBailOutRecordDataRow) * globalBailOutRecordTables[i]->length;
  229. func->GetScriptContext()->bailOutRecordBytes += sizeof(GlobalBailOutRecordDataRow) * globalBailOutRecordTables[i]->length;
  230. }
  231. #endif
  232. }
  233. }
  234. }
  235. AssertMsg((this->intRegUsedCount + this->floatRegUsedCount) == this->linearScanMD.UnAllocatableRegCount(this->func) , "RegUsedCount is wrong");
  236. AssertMsg(this->activeLiveranges->Empty(), "Active list not empty");
  237. AssertMsg(this->stackPackInUseLiveRanges->Empty(), "Spilled list not empty");
  238. AssertMsg(!this->opHelperBlockIter.IsValid(), "Got to the end with a helper block still on the list?");
  239. Assert(this->currentBlock->inlineeStack.Count() == 0);
  240. this->InsertOpHelperSpillAndRestores();
  241. #if _M_IX86
  242. # if ENABLE_DEBUG_CONFIG_OPTIONS
  243. if (Js::Configuration::Global.flags.Instrument.IsEnabled(Js::LinearScanPhase, this->func->GetJnFunction()->GetSourceContextId(),this->func->GetJnFunction()->GetLocalFunctionId()))
  244. {
  245. this->DynamicStatsInstrument();
  246. }
  247. # endif
  248. #endif
  249. #if DBG_DUMP
  250. if (PHASE_STATS(Js::LinearScanPhase, this->func))
  251. {
  252. this->PrintStats();
  253. }
  254. if (PHASE_TRACE(Js::StackPackPhase, this->func))
  255. {
  256. Output::Print(L"---------------------------\n");
  257. }
  258. #endif // DBG_DUMP
  259. DebugOnly(this->func->allowRemoveBailOutArgInstr = true);
  260. }
  261. JitArenaAllocator *
  262. LinearScan::GetTempAlloc()
  263. {
  264. Assert(tempAlloc);
  265. return tempAlloc;
  266. }
  267. #if DBG
  268. void
  269. LinearScan::CheckInvariants() const
  270. {
  271. BitVector bv = this->nonAllocatableRegs;
  272. uint32 lastend = 0;
  273. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, this->activeLiveranges)
  274. {
  275. // Make sure there are only one lifetime per reg
  276. Assert(!bv.Test(lifetime->reg));
  277. bv.Set(lifetime->reg);
  278. Assert(!lifetime->isOpHelperSpilled);
  279. Assert(!lifetime->isSpilled);
  280. Assert(lifetime->end >= lastend);
  281. lastend = lifetime->end;
  282. }
  283. NEXT_SLIST_ENTRY;
  284. // Make sure the active reg bit vector is correct
  285. Assert(bv.Equal(this->activeRegs));
  286. uint ints = 0, floats = 0;
  287. FOREACH_BITSET_IN_UNITBV(index, this->activeRegs, BitVector)
  288. {
  289. if (IRType_IsFloat(RegTypes[index]))
  290. {
  291. floats++;
  292. }
  293. else
  294. {
  295. ints++;
  296. }
  297. }
  298. NEXT_BITSET_IN_UNITBV;
  299. Assert(ints == this->intRegUsedCount);
  300. Assert(floats == this->floatRegUsedCount);
  301. Assert((this->intRegUsedCount + this->floatRegUsedCount) == this->activeRegs.Count());
  302. bv.ClearAll();
  303. lastend = 0;
  304. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, this->opHelperSpilledLiveranges)
  305. {
  306. // Make sure there are only one lifetime per reg in the op helper spilled liveranges
  307. Assert(!bv.Test(lifetime->reg));
  308. if (!lifetime->cantOpHelperSpill)
  309. {
  310. bv.Set(lifetime->reg);
  311. Assert(lifetime->isOpHelperSpilled);
  312. Assert(!lifetime->isSpilled);
  313. }
  314. Assert(lifetime->end >= lastend);
  315. lastend = lifetime->end;
  316. }
  317. NEXT_SLIST_ENTRY;
  318. // Make sure the opHelperSpilledRegs bit vector is correct
  319. Assert(bv.Equal(this->opHelperSpilledRegs));
  320. for (int i = 0; i < RegNumCount; i++)
  321. {
  322. if (this->tempRegs.Test(i))
  323. {
  324. Assert(this->tempRegLifetimes[i]->reg == i);
  325. }
  326. }
  327. FOREACH_BITSET_IN_UNITBV(reg, this->secondChanceRegs, BitVector)
  328. {
  329. Lifetime *lifetime = this->regContent[reg];
  330. Assert(lifetime);
  331. StackSym *sym = lifetime->sym;
  332. Assert(lifetime->isSecondChanceAllocated);
  333. Assert(sym->IsConst() || sym->IsAllocated()); // Should have been spilled already.
  334. } NEXT_BITSET_IN_UNITBV;
  335. }
  336. #endif // DBG
  337. // LinearScan::Init
  338. // Initialize bit vectors
  339. void
  340. LinearScan::Init()
  341. {
  342. FOREACH_REG(reg)
  343. {
  344. // Registers that can't be used are set to active, and will remain this way
  345. if (!LinearScan::IsAllocatable(reg))
  346. {
  347. this->activeRegs.Set(reg);
  348. if (IRType_IsFloat(RegTypes[reg]))
  349. {
  350. this->floatRegUsedCount++;
  351. }
  352. else
  353. {
  354. this->intRegUsedCount++;
  355. }
  356. }
  357. if (RegTypes[reg] == TyMachReg)
  358. {
  359. // JIT64_TODO: Rename int32Regs to machIntRegs.
  360. this->int32Regs.Set(reg);
  361. numInt32Regs++;
  362. }
  363. else if (RegTypes[reg] == TyFloat64)
  364. {
  365. this->floatRegs.Set(reg);
  366. numFloatRegs++;
  367. }
  368. if (LinearScan::IsCallerSaved(reg))
  369. {
  370. this->callerSavedRegs.Set(reg);
  371. }
  372. if (LinearScan::IsCalleeSaved(reg))
  373. {
  374. this->calleeSavedRegs.Set(reg);
  375. }
  376. this->regContent[reg] = nullptr;
  377. } NEXT_REG;
  378. this->instrUseRegs.ClearAll();
  379. this->secondChanceRegs.ClearAll();
  380. this->linearScanMD.Init(this);
  381. #if DBG
  382. this->nonAllocatableRegs = this->activeRegs;
  383. #endif
  384. #if DBG_DUMP
  385. if (PHASE_TRACE(Js::LinearScanPhase, this->func))
  386. {
  387. this->func->DumpHeader();
  388. }
  389. #endif
  390. }
  391. // LinearScan::CheckIfInLoop
  392. // Track whether the current instruction is in a loop or not.
  393. bool
  394. LinearScan::CheckIfInLoop(IR::Instr *instr)
  395. {
  396. if (this->IsInLoop())
  397. {
  398. // Look for end of loop
  399. AssertMsg(this->curLoop->regAlloc.loopEnd != 0, "Something is wrong here....");
  400. if (instr->GetNumber() >= this->curLoop->regAlloc.loopEnd)
  401. {
  402. AssertMsg(instr->IsBranchInstr(), "Loop tail should be a branchInstr");
  403. while (this->IsInLoop() && instr->GetNumber() >= this->curLoop->regAlloc.loopEnd)
  404. {
  405. this->loopNest--;
  406. this->curLoop->isProcessed = true;
  407. this->curLoop = this->curLoop->parent;
  408. if (this->loopNest == 0)
  409. {
  410. this->liveOnBackEdgeSyms->ClearAll();
  411. }
  412. }
  413. }
  414. }
  415. if (instr->IsLabelInstr() && instr->AsLabelInstr()->m_isLoopTop)
  416. {
  417. IR::LabelInstr * labelInstr = instr->AsLabelInstr();
  418. Loop *parentLoop = this->curLoop;
  419. if (parentLoop)
  420. {
  421. parentLoop->isLeaf = false;
  422. }
  423. this->curLoop = labelInstr->GetLoop();
  424. this->curLoop->isProcessed = false;
  425. // Lexically nested may not always nest in a flow based way:
  426. // while(i--) {
  427. // if (cond) {
  428. // while(j--) {
  429. // }
  430. // break;
  431. // }
  432. // }
  433. // These look nested, but they are not...
  434. // So update the flow based parent to be lexical or we won't be able to figure out when we get back
  435. // to the outer loop.
  436. // REVIEW: This isn't necessary anymore now that break blocks are moved out of the loops.
  437. this->curLoop->parent = parentLoop;
  438. this->curLoop->regAlloc.defdInLoopBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  439. this->curLoop->regAlloc.symRegUseBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  440. this->curLoop->regAlloc.loopStart = labelInstr->GetNumber();
  441. this->curLoop->regAlloc.exitRegContentList = JitAnew(this->tempAlloc, SList<Lifetime **>, this->tempAlloc);
  442. this->curLoop->regAlloc.regUseBv = 0;
  443. this->liveOnBackEdgeSyms->Or(this->curLoop->regAlloc.liveOnBackEdgeSyms);
  444. this->loopNest++;
  445. }
  446. return this->IsInLoop();
  447. }
  448. void
  449. LinearScan::InsertOpHelperSpillAndRestores()
  450. {
  451. linearScanMD.InsertOpHelperSpillAndRestores(opHelperBlockList);
  452. }
  453. void
  454. LinearScan::CheckOpHelper(IR::Instr *instr)
  455. {
  456. if (this->IsInHelperBlock())
  457. {
  458. if (this->currentOpHelperBlock->opHelperEndInstr == instr)
  459. {
  460. // Get targetInstr if we can.
  461. // We can deterministically get it only for unconditional branches, as conditional branch may fall through.
  462. IR::Instr * targetInstr = nullptr;
  463. if (instr->IsBranchInstr() && instr->AsBranchInstr()->IsUnconditional())
  464. {
  465. AssertMsg(!instr->AsBranchInstr()->IsMultiBranch(), "Not supported for Multibranch");
  466. targetInstr = instr->AsBranchInstr()->GetTarget();
  467. }
  468. /*
  469. * Keep track of the number of registers we've had to
  470. * store and restore around a helper block for LinearScanMD (on ARM
  471. * and X64). We need this to be able to allocate space in the frame.
  472. * We can't emit a PUSH/POP sequence around the block like IA32 because
  473. * the stack pointer can't move outside the prolog.
  474. */
  475. uint32 helperSpilledLiverangeCount = 0;
  476. // Exiting a helper block. We are going to insert
  477. // the restore here after linear scan. So put all the restored
  478. // lifetime back to active
  479. while (!this->opHelperSpilledLiveranges->Empty())
  480. {
  481. Lifetime * lifetime = this->opHelperSpilledLiveranges->Pop();
  482. lifetime->isOpHelperSpilled = false;
  483. if (!lifetime->cantOpHelperSpill)
  484. {
  485. // Put the life time back to active
  486. this->AssignActiveReg(lifetime, lifetime->reg);
  487. bool reload = true;
  488. // Lifetime ends before the target after helper block, don't need to save and restore helper spilled lifetime.
  489. if (targetInstr && lifetime->end < targetInstr->GetNumber())
  490. {
  491. // However, if lifetime is spilled as arg - we still need to spill it because the helper assumes the value
  492. // to be available in the stack
  493. if (lifetime->isOpHelperSpillAsArg)
  494. {
  495. // we should not attempt to restore it as it is dead on return from the helper.
  496. reload = false;
  497. }
  498. else
  499. {
  500. Assert(!instr->AsBranchInstr()->IsLoopTail(this->func));
  501. continue;
  502. }
  503. }
  504. // Save all the lifetime that needs to be restored
  505. OpHelperSpilledLifetime spilledLifetime;
  506. spilledLifetime.lifetime = lifetime;
  507. spilledLifetime.spillAsArg = lifetime->isOpHelperSpillAsArg;
  508. spilledLifetime.reload = reload;
  509. /*
  510. * Can't unfortunately move this into the else block above because we don't know if this
  511. * lifetime will actually get spilled until register allocation completes.
  512. * Instead we allocate a slot to this StackSym in LinearScanMD iff
  513. * !(lifetime.isSpilled && lifetime.noReloadsIfSpilled).
  514. */
  515. helperSpilledLiverangeCount++;
  516. // save the reg in case it is spilled later. We still need to save and restore
  517. // for the non-loop case.
  518. spilledLifetime.reg = lifetime->reg;
  519. this->currentOpHelperBlock->spilledLifetime.Prepend(spilledLifetime);
  520. }
  521. else
  522. {
  523. // Clear it for the next helper block
  524. lifetime->cantOpHelperSpill = false;
  525. }
  526. lifetime->isOpHelperSpillAsArg = false;
  527. }
  528. this->totalOpHelperFullVisitedLength += this->currentOpHelperBlock->Length();
  529. // Use a dummy label as the insertion point of the reloads, as second-chance-allocation
  530. // may insert compensation code right before the branch
  531. IR::PragmaInstr *dummyLabel = IR::PragmaInstr::New(Js::OpCode::Nop, 0, this->func);
  532. this->currentOpHelperBlock->opHelperEndInstr->InsertBefore(dummyLabel);
  533. dummyLabel->CopyNumber(this->currentOpHelperBlock->opHelperEndInstr);
  534. this->currentOpHelperBlock->opHelperEndInstr = dummyLabel;
  535. this->opHelperSpilledRegs.ClearAll();
  536. this->currentOpHelperBlock = nullptr;
  537. linearScanMD.EndOfHelperBlock(helperSpilledLiverangeCount);
  538. }
  539. }
  540. if (this->opHelperBlockIter.IsValid())
  541. {
  542. AssertMsg(
  543. !instr->IsLabelInstr() ||
  544. !instr->AsLabelInstr()->isOpHelper ||
  545. this->opHelperBlockIter.Data().opHelperLabel == instr,
  546. "Found a helper label that doesn't begin the next helper block in the list?");
  547. if (this->opHelperBlockIter.Data().opHelperLabel == instr)
  548. {
  549. this->currentOpHelperBlock = &this->opHelperBlockIter.Data();
  550. this->opHelperBlockIter.Next();
  551. }
  552. }
  553. }
  554. uint
  555. LinearScan::HelperBlockStartInstrNumber() const
  556. {
  557. Assert(IsInHelperBlock());
  558. return this->currentOpHelperBlock->opHelperLabel->GetNumber();
  559. }
  560. uint
  561. LinearScan::HelperBlockEndInstrNumber() const
  562. {
  563. Assert(IsInHelperBlock());
  564. return this->currentOpHelperBlock->opHelperEndInstr->GetNumber();
  565. }
  566. // LinearScan::AddToActive
  567. // Add a lifetime to the active list. The list is kept sorted in order lifetime end.
  568. // This makes it easier to pick the lifetimes to retire.
  569. void
  570. LinearScan::AddToActive(Lifetime * lifetime)
  571. {
  572. LinearScan::AddLiveRange(this->activeLiveranges, lifetime);
  573. this->regContent[lifetime->reg] = lifetime;
  574. if (lifetime->isSecondChanceAllocated)
  575. {
  576. this->secondChanceRegs.Set(lifetime->reg);
  577. }
  578. else
  579. {
  580. Assert(!this->secondChanceRegs.Test(lifetime->reg));
  581. }
  582. }
  583. void
  584. LinearScan::AddOpHelperSpilled(Lifetime * lifetime)
  585. {
  586. RegNum reg = lifetime->reg;
  587. Assert(this->IsInHelperBlock());
  588. Assert(!this->opHelperSpilledRegs.Test(reg));
  589. Assert(lifetime->isOpHelperSpilled == false);
  590. Assert(lifetime->cantOpHelperSpill == false);
  591. this->opHelperSpilledRegs.Set(reg);
  592. lifetime->isOpHelperSpilled = true;
  593. this->regContent[reg] = nullptr;
  594. this->secondChanceRegs.Clear(reg);
  595. // If a lifetime is being OpHelper spilled and it's an inlinee arg sym
  596. // we need to make sure its spilled to the sym offset spill space, i.e. isOpHelperSpillAsArg
  597. // is set. Otherwise, it's value will not be available on inline frame reconstruction.
  598. if (this->currentBlock->inlineeFrameSyms.Count() > 0 &&
  599. this->currentBlock->inlineeFrameSyms.ContainsKey(lifetime->sym->m_id) &&
  600. (lifetime->sym->m_isSingleDef || !lifetime->defList.Empty()))
  601. {
  602. lifetime->isOpHelperSpillAsArg = true;
  603. if (!lifetime->sym->IsAllocated())
  604. {
  605. this->AllocateStackSpace(lifetime);
  606. }
  607. this->RecordLoopUse(lifetime, lifetime->reg);
  608. }
  609. LinearScan::AddLiveRange(this->opHelperSpilledLiveranges, lifetime);
  610. }
  611. void
  612. LinearScan::RemoveOpHelperSpilled(Lifetime * lifetime)
  613. {
  614. Assert(this->IsInHelperBlock());
  615. Assert(lifetime->isOpHelperSpilled);
  616. Assert(lifetime->cantOpHelperSpill == false);
  617. Assert(this->opHelperSpilledRegs.Test(lifetime->reg));
  618. this->opHelperSpilledRegs.Clear(lifetime->reg);
  619. lifetime->isOpHelperSpilled = false;
  620. lifetime->cantOpHelperSpill = false;
  621. lifetime->isOpHelperSpillAsArg = false;
  622. this->opHelperSpilledLiveranges->Remove(lifetime);
  623. }
  624. void
  625. LinearScan::SetCantOpHelperSpill(Lifetime * lifetime)
  626. {
  627. Assert(this->IsInHelperBlock());
  628. Assert(lifetime->isOpHelperSpilled);
  629. Assert(lifetime->cantOpHelperSpill == false);
  630. this->opHelperSpilledRegs.Clear(lifetime->reg);
  631. lifetime->isOpHelperSpilled = false;
  632. lifetime->cantOpHelperSpill = true;
  633. }
  634. void
  635. LinearScan::AddLiveRange(SList<Lifetime *> * list, Lifetime * newLifetime)
  636. {
  637. FOREACH_SLIST_ENTRY_EDITING(Lifetime *, lifetime, list, iter)
  638. {
  639. if (newLifetime->end < lifetime->end)
  640. {
  641. break;
  642. }
  643. }
  644. NEXT_SLIST_ENTRY_EDITING;
  645. iter.InsertBefore(newLifetime);
  646. }
  647. Lifetime *
  648. LinearScan::RemoveRegLiveRange(SList<Lifetime *> * list, RegNum reg)
  649. {
  650. // Find the register in the active set
  651. FOREACH_SLIST_ENTRY_EDITING(Lifetime *, lifetime, list, iter)
  652. {
  653. if (lifetime->reg == reg)
  654. {
  655. Lifetime * lifetimeReturn = lifetime;
  656. iter.RemoveCurrent();
  657. return lifetimeReturn;
  658. }
  659. } NEXT_SLIST_ENTRY_EDITING;
  660. AssertMsg(false, "Can't find life range for a reg");
  661. return nullptr;
  662. }
  663. // LinearScan::SetDstReg
  664. // Set the reg on each RegOpnd def.
  665. void
  666. LinearScan::SetDstReg(IR::Instr *instr)
  667. {
  668. //
  669. // Enregister dst
  670. //
  671. IR::Opnd *dst = instr->GetDst();
  672. if (dst == nullptr)
  673. {
  674. return;
  675. }
  676. if (!dst->IsRegOpnd())
  677. {
  678. // This could be, for instance, a store to a sym with a large offset
  679. // that was just assigned when we saw the use.
  680. this->linearScanMD.LegalizeDef(instr);
  681. return;
  682. }
  683. IR::RegOpnd * regOpnd = dst->AsRegOpnd();
  684. /*
  685. * If this is a register used to setup a callsite per
  686. * a calling convention then mark it unavailable to allocate
  687. * until we see a CALL.
  688. */
  689. if (regOpnd->m_isCallArg)
  690. {
  691. RegNum callSetupReg = regOpnd->GetReg();
  692. callSetupRegs.Set(callSetupReg);
  693. }
  694. StackSym * stackSym = regOpnd->m_sym;
  695. // Arg slot sym can be in an RegOpnd for param passed via registers
  696. // Just use the assigned register
  697. if (stackSym == nullptr || stackSym->IsArgSlotSym())
  698. {
  699. //
  700. // Already allocated register. just spill the destination
  701. //
  702. RegNum reg = regOpnd->GetReg();
  703. if(LinearScan::IsAllocatable(reg))
  704. {
  705. this->SpillReg(reg);
  706. }
  707. this->tempRegs.Clear(reg);
  708. }
  709. else
  710. {
  711. if (regOpnd->GetReg() != RegNOREG)
  712. {
  713. this->RecordLoopUse(nullptr, regOpnd->GetReg());
  714. // Nothing to do
  715. return;
  716. }
  717. Lifetime * lifetime = stackSym->scratch.linearScan.lifetime;
  718. uint32 useCountCost = LinearScan::GetUseSpillCost(this->loopNest, (this->currentOpHelperBlock != nullptr));
  719. // Optimistically decrease the useCount. We'll undo this if we put it on the defList.
  720. lifetime->SubFromUseCount(useCountCost, this->curLoop);
  721. if (lifetime->isSpilled)
  722. {
  723. if (stackSym->IsConst() && !IsSymNonTempLocalVar(stackSym))
  724. {
  725. // We will reload the constant (but in debug mode, we still need to process this if this is a user var).
  726. return;
  727. }
  728. RegNum reg = regOpnd->GetReg();
  729. if (reg != RegNOREG)
  730. {
  731. // It is already assigned, just record it as a temp reg
  732. this->AssignTempReg(lifetime, reg);
  733. }
  734. else
  735. {
  736. IR::Opnd *src1 = instr->GetSrc1();
  737. IR::Opnd *src2 = instr->GetSrc2();
  738. if ((src1 && src1->IsRegOpnd() && src1->AsRegOpnd()->m_sym == stackSym) ||
  739. (src2 && src2->IsRegOpnd() && src2->AsRegOpnd()->m_sym == stackSym))
  740. {
  741. // OpEQ: src1 should have a valid reg (put src2 for other targets)
  742. reg = this->GetAssignedTempReg(lifetime, dst->GetType());
  743. Assert(reg != RegNOREG);
  744. RecordDef(lifetime, instr, 0);
  745. }
  746. else
  747. {
  748. // Try second chance
  749. reg = this->SecondChanceAllocation(lifetime, false);
  750. if (reg != RegNOREG)
  751. {
  752. Assert(!stackSym->m_isSingleDef);
  753. this->SetReg(regOpnd);
  754. // Keep track of defs for this lifetime, in case it gets spilled.
  755. RecordDef(lifetime, instr, useCountCost);
  756. return;
  757. }
  758. else
  759. {
  760. reg = this->GetAssignedTempReg(lifetime, dst->GetType());
  761. RecordDef(lifetime, instr, 0);
  762. }
  763. }
  764. if (LowererMD::IsAssign(instr) && instr->GetSrc1()->IsRegOpnd())
  765. {
  766. // Fold the spilled store
  767. if (reg != RegNOREG)
  768. {
  769. // If the value is in a temp reg, it's not valid any more.
  770. this->tempRegs.Clear(reg);
  771. }
  772. IRType srcType = instr->GetSrc1()->GetType();
  773. instr->ReplaceDst(IR::SymOpnd::New(stackSym, srcType, this->func));
  774. this->linearScanMD.LegalizeDef(instr);
  775. return;
  776. }
  777. if (reg == RegNOREG)
  778. {
  779. IR::Opnd *src = instr->GetSrc1();
  780. if (src && src->IsRegOpnd() && src->AsRegOpnd()->m_sym == stackSym)
  781. {
  782. // Handle OPEQ's for x86/x64
  783. reg = src->AsRegOpnd()->GetReg();
  784. AssertMsg(!this->activeRegs.Test(reg), "Shouldn't be active");
  785. }
  786. else
  787. {
  788. // The lifetime was spilled, but we still need a reg for this operand.
  789. reg = this->FindReg(nullptr, regOpnd);
  790. }
  791. this->AssignTempReg(lifetime, reg);
  792. }
  793. }
  794. if (!lifetime->isDeadStore && !lifetime->isSecondChanceAllocated)
  795. {
  796. // Insert a store since the lifetime is spilled
  797. IR::Opnd *nextDst = instr->m_next->GetDst();
  798. // Don't need the store however if the next instruction has the same dst
  799. if (nextDst == nullptr || !nextDst->IsEqual(regOpnd))
  800. {
  801. this->InsertStore(instr, regOpnd->m_sym, reg);
  802. }
  803. }
  804. }
  805. else
  806. {
  807. if (lifetime->isOpHelperSpilled)
  808. {
  809. // We must be in a helper block and the lifetime must
  810. // start before the helper block
  811. Assert(this->IsInHelperBlock());
  812. Assert(lifetime->start < this->HelperBlockStartInstrNumber());
  813. RegNum reg = lifetime->reg;
  814. Assert(this->opHelperSpilledRegs.Test(reg));
  815. if (this->activeRegs.Test(reg))
  816. {
  817. // The reg must have been used locally in the helper block
  818. // by some other lifetime. Just spill it
  819. this->SpillReg(reg);
  820. }
  821. // We can't save/restore this reg across the helper call because the restore would overwrite
  822. // this def, but the def means we don't need to spill at all. Mark the lifetime as cantOpHelperSpill
  823. // however in case another helper call in this block tries to spill it.
  824. this->SetCantOpHelperSpill(lifetime);
  825. this->AddToActive(lifetime);
  826. this->tempRegs.Clear(reg);
  827. this->activeRegs.Set(reg);
  828. if (RegTypes[reg] == TyMachReg)
  829. {
  830. this->intRegUsedCount++;
  831. }
  832. else
  833. {
  834. Assert(RegTypes[reg] == TyFloat64);
  835. this->floatRegUsedCount++;
  836. }
  837. }
  838. // Keep track of defs for this lifetime, in case it gets spilled.
  839. RecordDef(lifetime, instr, useCountCost);
  840. }
  841. this->SetReg(regOpnd);
  842. }
  843. }
  844. // Get the stack offset of the non temp locals from the stack.
  845. int32 LinearScan::GetStackOffset(Js::RegSlot regSlotId)
  846. {
  847. Assert(this->func->GetJnFunction());
  848. int32 stackSlotId = regSlotId - this->func->GetJnFunction()->GetFirstNonTempLocalIndex();
  849. Assert(stackSlotId >= 0);
  850. return this->func->GetLocalVarSlotOffset(stackSlotId);
  851. }
  852. //
  853. // This helper function is used for saving bytecode stack sym value to memory / local slots on stack so that we can read it for the locals inspection.
  854. void
  855. LinearScan::WriteThroughForLocal(IR::RegOpnd* regOpnd, Lifetime* lifetime, IR::Instr* instrInsertAfter)
  856. {
  857. Assert(regOpnd);
  858. Assert(lifetime);
  859. Assert(instrInsertAfter);
  860. StackSym* sym = regOpnd->m_sym;
  861. Assert(IsSymNonTempLocalVar(sym));
  862. Js::RegSlot slotIndex = sym->GetByteCodeRegSlot();
  863. // First we insert the write through moves
  864. sym->m_offset = GetStackOffset(slotIndex);
  865. sym->m_allocated = true;
  866. // Save the value on reg to local var slot.
  867. this->InsertStore(instrInsertAfter, sym, lifetime->reg);
  868. }
  869. bool
  870. LinearScan::NeedsWriteThrough(StackSym * sym)
  871. {
  872. return this->NeedsWriteThroughForEH(sym) || this->IsSymNonTempLocalVar(sym);
  873. }
  874. bool
  875. LinearScan::NeedsWriteThroughForEH(StackSym * sym)
  876. {
  877. if (!this->func->HasTry() || !this->func->DoOptimizeTryCatch() || !sym->HasByteCodeRegSlot())
  878. {
  879. return false;
  880. }
  881. Assert(this->currentRegion);
  882. return this->currentRegion->writeThroughSymbolsSet && this->currentRegion->writeThroughSymbolsSet->Test(sym->m_id);
  883. }
  884. // Helper routine to check if current sym belongs to non temp bytecodereg
  885. bool
  886. LinearScan::IsSymNonTempLocalVar(StackSym *sym)
  887. {
  888. Assert(sym);
  889. if (this->func->IsJitInDebugMode() && sym->HasByteCodeRegSlot())
  890. {
  891. Js::RegSlot slotIndex = sym->GetByteCodeRegSlot();
  892. return this->func->IsNonTempLocalVar(slotIndex);
  893. }
  894. return false;
  895. }
  896. // LinearScan::SetSrcRegs
  897. // Set the reg on each RegOpnd use.
  898. // Note that this includes regOpnd of indir dsts...
  899. void
  900. LinearScan::SetSrcRegs(IR::Instr *instr)
  901. {
  902. //
  903. // Enregister srcs
  904. //
  905. IR::Opnd *src1 = instr->GetSrc1();
  906. if (src1 != nullptr)
  907. {
  908. // Capture src2 now as folding in SetUses could swab the srcs...
  909. IR::Opnd *src2 = instr->GetSrc2();
  910. this->SetUses(instr, src1);
  911. if (src2 != nullptr)
  912. {
  913. this->SetUses(instr, src2);
  914. }
  915. }
  916. IR::Opnd *dst = instr->GetDst();
  917. if (dst && dst->IsIndirOpnd())
  918. {
  919. this->SetUses(instr, dst);
  920. }
  921. this->instrUseRegs.ClearAll();
  922. }
  923. // LinearScan::SetUses
  924. void
  925. LinearScan::SetUses(IR::Instr *instr, IR::Opnd *opnd)
  926. {
  927. switch (opnd->GetKind())
  928. {
  929. case IR::OpndKindReg:
  930. this->SetUse(instr, opnd->AsRegOpnd());
  931. break;
  932. case IR::OpndKindSym:
  933. {
  934. Sym * sym = opnd->AsSymOpnd()->m_sym;
  935. if (sym->IsStackSym())
  936. {
  937. StackSym* stackSym = sym->AsStackSym();
  938. if (!stackSym->IsAllocated())
  939. {
  940. func->StackAllocate(stackSym, opnd->GetSize());
  941. // StackSym's lifetime is allocated during SCCLiveness::ProcessDst
  942. // we might not need to set the flag if the sym is not a dst.
  943. if (stackSym->scratch.linearScan.lifetime)
  944. {
  945. stackSym->scratch.linearScan.lifetime->cantStackPack = true;
  946. }
  947. }
  948. this->linearScanMD.LegalizeUse(instr, opnd);
  949. }
  950. }
  951. break;
  952. case IR::OpndKindIndir:
  953. {
  954. IR::IndirOpnd * indirOpnd = opnd->AsIndirOpnd();
  955. this->SetUse(instr, indirOpnd->GetBaseOpnd());
  956. if (indirOpnd->GetIndexOpnd())
  957. {
  958. this->SetUse(instr, indirOpnd->GetIndexOpnd());
  959. }
  960. }
  961. break;
  962. case IR::OpndKindIntConst:
  963. case IR::OpndKindAddr:
  964. this->linearScanMD.LegalizeConstantUse(instr, opnd);
  965. break;
  966. };
  967. }
  968. struct FillBailOutState
  969. {
  970. SListCounted<Js::Var> constantList;
  971. uint registerSaveCount;
  972. StackSym * registerSaveSyms[RegNumCount - 1];
  973. FillBailOutState(JitArenaAllocator * allocator) : constantList(allocator) {}
  974. };
  975. void
  976. LinearScan::FillBailOutOffset(int * offset, StackSym * stackSym, FillBailOutState * state, IR::Instr * instr)
  977. {
  978. AssertMsg(*offset == 0, "Can't have two active lifetime for the same byte code register");
  979. if (stackSym->IsConst())
  980. {
  981. state->constantList.Prepend(reinterpret_cast<Js::Var>(stackSym->GetLiteralConstValue_PostGlobOpt()));
  982. // Constant offset are offset by the number of register save slots
  983. *offset = state->constantList.Count() + GetBailOutRegisterSaveSlotCount() + GetBailOutReserveSlotCount();
  984. }
  985. else if (stackSym->m_isEncodedConstant)
  986. {
  987. Assert(!stackSym->m_isSingleDef);
  988. state->constantList.Prepend((Js::Var)stackSym->constantValue);
  989. // Constant offset are offset by the number of register save slots
  990. *offset = state->constantList.Count() + GetBailOutRegisterSaveSlotCount() + GetBailOutReserveSlotCount();
  991. }
  992. else
  993. {
  994. Lifetime * lifetime = stackSym->scratch.linearScan.lifetime;
  995. Assert(lifetime && lifetime->start < instr->GetNumber() && instr->GetNumber() <= lifetime->end);
  996. if (instr->GetBailOutKind() == IR::BailOutOnException)
  997. {
  998. // Apart from the exception object sym, lifetimes for all other syms that need to be restored at this bailout,
  999. // must have been spilled at least once (at the TryCatch, or at the Leave, or both)
  1000. // Post spilling, a lifetime could have been second chance allocated. But, it should still have stack allocated for its sym
  1001. Assert(stackSym->IsAllocated() || (stackSym == this->currentRegion->GetExceptionObjectSym()));
  1002. }
  1003. this->PrepareForUse(lifetime);
  1004. if (lifetime->isSpilled ||
  1005. ((instr->GetBailOutKind() == IR::BailOutOnException) && (stackSym != this->currentRegion->GetExceptionObjectSym()))) // BailOutOnException must restore from memory
  1006. {
  1007. Assert(stackSym->IsAllocated());
  1008. #ifdef MD_GROW_LOCALS_AREA_UP
  1009. *offset = -((int)stackSym->m_offset + BailOutInfo::StackSymBias);
  1010. #else
  1011. // Stack offset are negative, includes the PUSH EBP and return address
  1012. *offset = stackSym->m_offset - (2 * MachPtr);
  1013. #endif
  1014. }
  1015. else
  1016. {
  1017. Assert(lifetime->reg != RegNOREG);
  1018. Assert(state->registerSaveSyms[lifetime->reg - 1] == nullptr ||
  1019. state->registerSaveSyms[lifetime->reg - 1] == stackSym);
  1020. AssertMsg((stackSym->IsFloat64() || stackSym->IsSimd128()) && RegTypes[lifetime->reg] == TyFloat64 ||
  1021. !(stackSym->IsFloat64() || stackSym->IsSimd128()) && RegTypes[lifetime->reg] != TyFloat64,
  1022. "Trying to save float64 sym into non-float64 reg or non-float64 sym into float64 reg");
  1023. // Save the register value to the register save space using the reg enum value as index
  1024. state->registerSaveSyms[lifetime->reg - 1] = stackSym;
  1025. *offset = LinearScanMD::GetRegisterSaveIndex(lifetime->reg);
  1026. state->registerSaveCount++;
  1027. }
  1028. }
  1029. }
  1030. struct FuncBailOutData
  1031. {
  1032. Func * func;
  1033. BailOutRecord * bailOutRecord;
  1034. int * localOffsets;
  1035. BVFixed * losslessInt32Syms;
  1036. BVFixed * float64Syms;
  1037. // SIMD_JS
  1038. BVFixed * simd128F4Syms;
  1039. BVFixed * simd128I4Syms;
  1040. void Initialize(Func * func, JitArenaAllocator * tempAllocator);
  1041. void FinalizeLocalOffsets(JitArenaAllocator *allocator, GlobalBailOutRecordDataTable *table, uint **lastUpdatedRowIndices);
  1042. void Clear(JitArenaAllocator * tempAllocator);
  1043. };
  1044. void
  1045. FuncBailOutData::Initialize(Func * func, JitArenaAllocator * tempAllocator)
  1046. {
  1047. Js::RegSlot localsCount = func->GetJnFunction()->GetLocalsCount();
  1048. this->func = func;
  1049. this->localOffsets = AnewArrayZ(tempAllocator, int, localsCount);
  1050. this->losslessInt32Syms = BVFixed::New(localsCount, tempAllocator);
  1051. this->float64Syms = BVFixed::New(localsCount, tempAllocator);
  1052. // SIMD_JS
  1053. this->simd128F4Syms = BVFixed::New(localsCount, tempAllocator);
  1054. this->simd128I4Syms = BVFixed::New(localsCount, tempAllocator);
  1055. }
  1056. void
  1057. FuncBailOutData::FinalizeLocalOffsets(JitArenaAllocator *allocator, GlobalBailOutRecordDataTable *globalBailOutRecordDataTable, uint **lastUpdatedRowIndices)
  1058. {
  1059. Js::RegSlot localsCount = func->GetJnFunction()->GetLocalsCount();
  1060. Assert(globalBailOutRecordDataTable != nullptr);
  1061. Assert(lastUpdatedRowIndices != nullptr);
  1062. if (*lastUpdatedRowIndices == nullptr)
  1063. {
  1064. *lastUpdatedRowIndices = JitAnewArrayZ(allocator, uint, localsCount);
  1065. memset(*lastUpdatedRowIndices, -1, sizeof(uint)*localsCount);
  1066. }
  1067. uint32 bailOutRecordId = bailOutRecord->m_bailOutRecordId;
  1068. bailOutRecord->localOffsetsCount = 0;
  1069. for (uint32 i = 0; i < localsCount; i++)
  1070. {
  1071. // if the sym is live
  1072. if (localOffsets[i] != 0)
  1073. {
  1074. bool isFloat = float64Syms->Test(i) != 0;
  1075. bool isInt = losslessInt32Syms->Test(i) != 0;
  1076. // SIMD_JS
  1077. bool isSimd128F4 = simd128F4Syms->Test(i) != 0;
  1078. bool isSimd128I4 = simd128I4Syms->Test(i) != 0;
  1079. globalBailOutRecordDataTable->AddOrUpdateRow(allocator, bailOutRecordId, i, isFloat, isInt, isSimd128F4, isSimd128I4, localOffsets[i], &((*lastUpdatedRowIndices)[i]));
  1080. Assert(globalBailOutRecordDataTable->globalBailOutRecordDataRows[(*lastUpdatedRowIndices)[i]].regSlot == i);
  1081. bailOutRecord->localOffsetsCount++;
  1082. }
  1083. }
  1084. }
  1085. void
  1086. FuncBailOutData::Clear(JitArenaAllocator * tempAllocator)
  1087. {
  1088. Js::RegSlot localsCount = func->GetJnFunction()->GetLocalsCount();
  1089. JitAdeleteArray(tempAllocator, localsCount, localOffsets);
  1090. losslessInt32Syms->Delete(tempAllocator);
  1091. float64Syms->Delete(tempAllocator);
  1092. // SIMD_JS
  1093. simd128F4Syms->Delete(tempAllocator);
  1094. simd128I4Syms->Delete(tempAllocator);
  1095. }
  1096. GlobalBailOutRecordDataTable *
  1097. LinearScan::EnsureGlobalBailOutRecordTable(Func *func)
  1098. {
  1099. Assert(globalBailOutRecordTables != nullptr);
  1100. Func *topFunc = func->GetTopFunc();
  1101. bool isTopFunc = (func == topFunc);
  1102. uint32 inlineeID = isTopFunc ? 0 : func->m_inlineeId;
  1103. NativeCodeData::Allocator * allocator = this->func->GetNativeCodeDataAllocator();
  1104. GlobalBailOutRecordDataTable *globalBailOutRecordDataTable = globalBailOutRecordTables[inlineeID];
  1105. if (globalBailOutRecordDataTable == nullptr)
  1106. {
  1107. globalBailOutRecordDataTable = globalBailOutRecordTables[inlineeID] = NativeCodeDataNew(allocator, GlobalBailOutRecordDataTable);
  1108. globalBailOutRecordDataTable->length = globalBailOutRecordDataTable->size = 0;
  1109. globalBailOutRecordDataTable->isInlinedFunction = !isTopFunc;
  1110. globalBailOutRecordDataTable->isInlinedConstructor = func->IsInlinedConstructor();
  1111. globalBailOutRecordDataTable->isLoopBody = topFunc->IsLoopBody();
  1112. globalBailOutRecordDataTable->returnValueRegSlot = func->returnValueRegSlot;
  1113. globalBailOutRecordDataTable->firstActualStackOffset = -1;
  1114. globalBailOutRecordDataTable->registerSaveSpace = func->GetScriptContext()->GetThreadContext()->GetBailOutRegisterSaveSpace();
  1115. globalBailOutRecordDataTable->globalBailOutRecordDataRows = nullptr;
  1116. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  1117. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  1118. {
  1119. topFunc->GetScriptContext()->bailOutOffsetBytes += sizeof(GlobalBailOutRecordDataTable);
  1120. topFunc->GetScriptContext()->bailOutRecordBytes += sizeof(GlobalBailOutRecordDataTable);
  1121. }
  1122. #endif
  1123. }
  1124. return globalBailOutRecordDataTable;
  1125. }
  1126. void
  1127. LinearScan::FillBailOutRecord(IR::Instr * instr)
  1128. {
  1129. BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
  1130. if (this->func->HasTry())
  1131. {
  1132. RegionType currentRegionType = this->currentRegion->GetType();
  1133. if (currentRegionType == RegionTypeTry || currentRegionType == RegionTypeCatch)
  1134. {
  1135. bailOutInfo->bailOutRecord->ehBailoutData = this->currentRegion->ehBailoutData;
  1136. }
  1137. }
  1138. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed = bailOutInfo->byteCodeUpwardExposedUsed;
  1139. Func * bailOutFunc = bailOutInfo->bailOutFunc;
  1140. uint funcCount = bailOutFunc->inlineDepth + 1;
  1141. FuncBailOutData * funcBailOutData = AnewArray(this->tempAlloc, FuncBailOutData, funcCount);
  1142. uint index = funcCount - 1;
  1143. funcBailOutData[index].Initialize(bailOutFunc, this->tempAlloc);
  1144. funcBailOutData[index].bailOutRecord = bailOutInfo->bailOutRecord;
  1145. bailOutInfo->bailOutRecord->m_bailOutRecordId = m_bailOutRecordCount++;
  1146. bailOutInfo->bailOutRecord->globalBailOutRecordTable = EnsureGlobalBailOutRecordTable(bailOutFunc);
  1147. NativeCodeData::Allocator * allocator = this->func->GetNativeCodeDataAllocator();
  1148. #if DBG_DUMP
  1149. if(PHASE_DUMP(Js::BailOutPhase, this->func))
  1150. {
  1151. Output::Print(L"-------------------Bailout dump -------------------------\n");
  1152. instr->Dump();
  1153. }
  1154. #endif
  1155. // Generate chained bailout record for inlined functions
  1156. Func * currentFunc = bailOutFunc->GetParentFunc();
  1157. uint bailOutOffset = bailOutFunc->postCallByteCodeOffset;
  1158. while (currentFunc != nullptr)
  1159. {
  1160. Assert(index > 0);
  1161. Assert(bailOutOffset != Js::Constants::NoByteCodeOffset);
  1162. BailOutRecord * bailOutRecord = NativeCodeDataNewZ(allocator, BailOutRecord, bailOutOffset, (uint)-1, IR::BailOutInvalid, currentFunc);
  1163. bailOutRecord->m_bailOutRecordId = m_bailOutRecordCount++;
  1164. bailOutRecord->globalBailOutRecordTable = EnsureGlobalBailOutRecordTable(currentFunc);
  1165. #if ENABLE_DEBUG_CONFIG_OPTIONS
  1166. // To indicate this is a subsequent bailout from an inlinee
  1167. bailOutRecord->bailOutOpcode = Js::OpCode::InlineeEnd;
  1168. #endif
  1169. funcBailOutData[index].bailOutRecord->parent = bailOutRecord;
  1170. index--;
  1171. funcBailOutData[index].bailOutRecord = bailOutRecord;
  1172. funcBailOutData[index].Initialize(currentFunc, this->tempAlloc);
  1173. bailOutOffset = currentFunc->postCallByteCodeOffset;
  1174. currentFunc = currentFunc->GetParentFunc();
  1175. }
  1176. Assert(index == 0);
  1177. Assert(bailOutOffset == Js::Constants::NoByteCodeOffset);
  1178. FillBailOutState state(this->tempAlloc);
  1179. state.registerSaveCount = 0;
  1180. memset(state.registerSaveSyms, 0, sizeof(state.registerSaveSyms));
  1181. // Fill in the constants
  1182. FOREACH_SLISTBASE_ENTRY_EDITING(ConstantStackSymValue, value, &bailOutInfo->usedCapturedValues.constantValues, iterator)
  1183. {
  1184. AssertMsg(bailOutInfo->bailOutRecord->bailOutKind != IR::BailOutForGeneratorYield, "constant prop syms unexpected for bail-in for generator yield");
  1185. StackSym * stackSym = value.Key();
  1186. if(stackSym->HasArgSlotNum())
  1187. {
  1188. continue;
  1189. }
  1190. Assert(stackSym->HasByteCodeRegSlot());
  1191. Js::RegSlot i = stackSym->GetByteCodeRegSlot();
  1192. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1193. uint index = stackSymFunc->inlineDepth;
  1194. Assert(i != Js::Constants::NoRegister);
  1195. Assert(i < stackSymFunc->GetJnFunction()->GetLocalsCount());
  1196. Assert(index < funcCount);
  1197. __analysis_assume(index < funcCount);
  1198. Assert(funcBailOutData[index].func == stackSymFunc);
  1199. Assert(!byteCodeUpwardExposedUsed->Test(stackSym->m_id));
  1200. BailoutConstantValue constValue = value.Value();
  1201. Js::Var varValue = constValue.ToVar(this->func, stackSymFunc->GetScriptContext());
  1202. state.constantList.Prepend(varValue);
  1203. AssertMsg(funcBailOutData[index].localOffsets[i] == 0, "Can't have two active lifetime for the same byte code register");
  1204. // Constant offset are offset by the number of register save slots
  1205. funcBailOutData[index].localOffsets[i] = state.constantList.Count() + GetBailOutRegisterSaveSlotCount() + GetBailOutReserveSlotCount();
  1206. #if DBG_DUMP
  1207. if(PHASE_DUMP(Js::BailOutPhase, this->func))
  1208. {
  1209. Output::Print(L"Constant stack sym #%d (argOut:%s): ", i, IsTrueOrFalse(stackSym->HasArgSlotNum()));
  1210. stackSym->Dump();
  1211. Output::Print(L" (0x%p (Var) Offset: %d)\n", varValue, funcBailOutData[index].localOffsets[i]);
  1212. }
  1213. #endif
  1214. iterator.RemoveCurrent(this->func->m_alloc);
  1215. }
  1216. NEXT_SLISTBASE_ENTRY_EDITING;
  1217. // Fill in the copy prop syms
  1218. FOREACH_SLISTBASE_ENTRY_EDITING(CopyPropSyms, copyPropSyms, &bailOutInfo->usedCapturedValues.copyPropSyms, iter)
  1219. {
  1220. AssertMsg(bailOutInfo->bailOutRecord->bailOutKind != IR::BailOutForGeneratorYield, "copy prop syms unexpected for bail-in for generator yield");
  1221. StackSym * stackSym = copyPropSyms.Key();
  1222. if(stackSym->HasArgSlotNum())
  1223. {
  1224. continue;
  1225. }
  1226. Js::RegSlot i = stackSym->GetByteCodeRegSlot();
  1227. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1228. uint index = stackSymFunc->inlineDepth;
  1229. Assert(i != Js::Constants::NoRegister);
  1230. Assert(i < stackSymFunc->GetJnFunction()->GetLocalsCount());
  1231. Assert(index < funcCount);
  1232. __analysis_assume(index < funcCount);
  1233. Assert(funcBailOutData[index].func == stackSymFunc);
  1234. AssertMsg(funcBailOutData[index].localOffsets[i] == 0, "Can't have two active lifetime for the same byte code register");
  1235. Assert(!byteCodeUpwardExposedUsed->Test(stackSym->m_id));
  1236. StackSym * copyStackSym = copyPropSyms.Value();
  1237. this->FillBailOutOffset(&funcBailOutData[index].localOffsets[i], copyStackSym, &state, instr);
  1238. if (copyStackSym->IsInt32())
  1239. {
  1240. funcBailOutData[index].losslessInt32Syms->Set(i);
  1241. }
  1242. else if (copyStackSym->IsFloat64())
  1243. {
  1244. funcBailOutData[index].float64Syms->Set(i);
  1245. }
  1246. // SIMD_JS
  1247. else if (copyStackSym->IsSimd128F4())
  1248. {
  1249. funcBailOutData[index].simd128F4Syms->Set(i);
  1250. }
  1251. else if (copyStackSym->IsSimd128I4())
  1252. {
  1253. funcBailOutData[index].simd128I4Syms->Set(i);
  1254. }
  1255. iter.RemoveCurrent(this->func->m_alloc);
  1256. }
  1257. NEXT_SLISTBASE_ENTRY_EDITING;
  1258. // Fill in the upward exposed syms
  1259. FOREACH_BITSET_IN_SPARSEBV(id, byteCodeUpwardExposedUsed)
  1260. {
  1261. StackSym * stackSym = this->func->m_symTable->FindStackSym(id);
  1262. Assert(stackSym != nullptr);
  1263. Js::RegSlot i = stackSym->GetByteCodeRegSlot();
  1264. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1265. uint index = stackSymFunc->inlineDepth;
  1266. Assert(i != Js::Constants::NoRegister);
  1267. Assert(i < stackSymFunc->GetJnFunction()->GetLocalsCount());
  1268. Assert(index < funcCount);
  1269. __analysis_assume(index < funcCount);
  1270. Assert(funcBailOutData[index].func == stackSymFunc);
  1271. AssertMsg(funcBailOutData[index].localOffsets[i] == 0, "Can't have two active lifetime for the same byte code register");
  1272. this->FillBailOutOffset(&funcBailOutData[index].localOffsets[i], stackSym, &state, instr);
  1273. if (stackSym->IsInt32())
  1274. {
  1275. funcBailOutData[index].losslessInt32Syms->Set(i);
  1276. }
  1277. else if (stackSym->IsFloat64())
  1278. {
  1279. funcBailOutData[index].float64Syms->Set(i);
  1280. }
  1281. // SIMD_JS
  1282. else if (stackSym->IsSimd128F4())
  1283. {
  1284. funcBailOutData[index].simd128F4Syms->Set(i);
  1285. }
  1286. else if (stackSym->IsSimd128I4())
  1287. {
  1288. funcBailOutData[index].simd128I4Syms->Set(i);
  1289. }
  1290. }
  1291. NEXT_BITSET_IN_SPARSEBV;
  1292. if (bailOutInfo->usedCapturedValues.argObjSyms)
  1293. {
  1294. FOREACH_BITSET_IN_SPARSEBV(id, bailOutInfo->usedCapturedValues.argObjSyms)
  1295. {
  1296. StackSym * stackSym = this->func->m_symTable->FindStackSym(id);
  1297. Assert(stackSym != nullptr);
  1298. Js::RegSlot i = stackSym->GetByteCodeRegSlot();
  1299. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1300. uint index = stackSymFunc->inlineDepth;
  1301. Assert(i != Js::Constants::NoRegister);
  1302. Assert(i < stackSymFunc->GetJnFunction()->GetLocalsCount());
  1303. Assert(index < funcCount);
  1304. __analysis_assume(index < funcCount);
  1305. Assert(funcBailOutData[index].func == stackSymFunc);
  1306. AssertMsg(funcBailOutData[index].localOffsets[i] == 0, "Can't have two active lifetime for the same byte code register");
  1307. funcBailOutData[index].localOffsets[i] = BailOutRecord::GetArgumentsObjectOffset();
  1308. }
  1309. NEXT_BITSET_IN_SPARSEBV;
  1310. }
  1311. // In the debug mode, fill in the rest of non temp locals as well in the records so that the restore stub will just get it automatically.
  1312. if (this->func->IsJitInDebugMode())
  1313. {
  1314. Js::FunctionBody* functionBody = this->func->GetJnFunction();
  1315. Assert(functionBody);
  1316. // Need to allow filling the formal args slots.
  1317. Js::PropertyIdOnRegSlotsContainer *propertyIdContainer = functionBody->GetPropertyIdOnRegSlotsContainer();
  1318. bool hasFormalArgs = propertyIdContainer != nullptr && propertyIdContainer->propertyIdsForFormalArgs != nullptr;
  1319. if (hasFormalArgs)
  1320. {
  1321. for (uint32 index = functionBody->GetFirstNonTempLocalIndex(); index < functionBody->GetEndNonTempLocalIndex(); index++)
  1322. {
  1323. StackSym * stackSym = this->func->m_symTable->FindStackSym(index);
  1324. if (stackSym != nullptr)
  1325. {
  1326. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1327. Js::RegSlot regSlotId = stackSym->GetByteCodeRegSlot();
  1328. if (functionBody->IsNonTempLocalVar(regSlotId))
  1329. {
  1330. if (!propertyIdContainer->IsRegSlotFormal(regSlotId - functionBody->GetFirstNonTempLocalIndex()))
  1331. {
  1332. continue;
  1333. }
  1334. uint dataIndex = stackSymFunc->inlineDepth;
  1335. Assert(dataIndex == 0); // There is no inlining while in debug mode
  1336. // Filling in which are not filled already.
  1337. __analysis_assume(dataIndex == 0);
  1338. if (funcBailOutData[dataIndex].localOffsets[regSlotId] == 0)
  1339. {
  1340. int32 offset = GetStackOffset(regSlotId);
  1341. #ifdef MD_GROW_LOCALS_AREA_UP
  1342. Assert(offset >= 0);
  1343. #else
  1344. Assert(offset < 0);
  1345. #endif
  1346. funcBailOutData[dataIndex].localOffsets[regSlotId] = this->func->AdjustOffsetValue(offset);
  1347. // We don't support typespec for debug, rework on the bellow assert once we start support them.
  1348. Assert(!stackSym->IsInt32() && !stackSym->IsFloat64() && !stackSym->IsSimd128());
  1349. }
  1350. }
  1351. }
  1352. }
  1353. }
  1354. }
  1355. // fill in the out params
  1356. uint startCallCount = bailOutInfo->startCallCount;
  1357. if (bailOutInfo->totalOutParamCount != 0)
  1358. {
  1359. Assert(startCallCount != 0);
  1360. uint argOutSlot = 0;
  1361. uint * startCallOutParamCounts = NativeCodeDataNewArray(allocator, uint, startCallCount);
  1362. #ifdef _M_IX86
  1363. uint * startCallArgRestoreAdjustCounts = NativeCodeDataNewArray(allocator, uint, startCallCount);
  1364. #endif
  1365. BVFixed * argOutFloat64Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocator);
  1366. BVFixed * argOutLosslessInt32Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocator);
  1367. // SIMD_JS
  1368. BVFixed * argOutSimd128F4Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocator);
  1369. BVFixed * argOutSimd128I4Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocator);
  1370. int* outParamOffsets = bailOutInfo->outParamOffsets = NativeCodeDataNewArrayZ(allocator, int, bailOutInfo->totalOutParamCount);
  1371. #ifdef _M_IX86
  1372. int currentStackOffset = 0;
  1373. bailOutInfo->outParamFrameAdjustArgSlot = JitAnew(this->func->m_alloc, BVSparse<JitArenaAllocator>, this->func->m_alloc);
  1374. #endif
  1375. if (this->func->HasInlinee())
  1376. {
  1377. bailOutInfo->outParamInlinedArgSlot = JitAnew(this->func->m_alloc, BVSparse<JitArenaAllocator>, this->func->m_alloc);
  1378. }
  1379. #if DBG
  1380. uint lastFuncIndex = 0;
  1381. #endif
  1382. for (uint i = 0; i < startCallCount; i++)
  1383. {
  1384. uint outParamStart = argOutSlot; // Start of the out param offset for the current start call
  1385. // Number of out param for the current start call
  1386. uint outParamCount = bailOutInfo->GetStartCallOutParamCount(i);
  1387. startCallOutParamCounts[i] = outParamCount;
  1388. #ifdef _M_IX86
  1389. startCallArgRestoreAdjustCounts[i] = bailOutInfo->startCallInfo[i].argRestoreAdjustCount;
  1390. // Only x86 has a progression of pushes of out args, with stack alignment.
  1391. bool fDoStackAdjust = false;
  1392. if (!bailOutInfo->inlinedStartCall->Test(i))
  1393. {
  1394. // Only do the stack adjustment if the StartCall has not been moved down past the bailout.
  1395. fDoStackAdjust = bailOutInfo->NeedsStartCallAdjust(i, instr);
  1396. if (fDoStackAdjust)
  1397. {
  1398. currentStackOffset -= Math::Align<int>(outParamCount * MachPtr, MachStackAlignment);
  1399. }
  1400. }
  1401. #endif
  1402. Func * currentStartCallFunc = bailOutInfo->startCallFunc[i];
  1403. #if DBG
  1404. Assert(lastFuncIndex <= currentStartCallFunc->inlineDepth);
  1405. lastFuncIndex = currentStartCallFunc->inlineDepth;
  1406. #endif
  1407. FuncBailOutData& currentFuncBailOutData = funcBailOutData[currentStartCallFunc->inlineDepth];
  1408. BailOutRecord * currentBailOutRecord = currentFuncBailOutData.bailOutRecord;
  1409. if (currentBailOutRecord->argOutOffsetInfo == nullptr)
  1410. {
  1411. currentBailOutRecord->argOutOffsetInfo = NativeCodeDataNew(allocator, BailOutRecord::ArgOutOffsetInfo);
  1412. currentBailOutRecord->argOutOffsetInfo->argOutFloat64Syms = nullptr;
  1413. currentBailOutRecord->argOutOffsetInfo->argOutLosslessInt32Syms = nullptr;
  1414. // SIMD_JS
  1415. currentBailOutRecord->argOutOffsetInfo->argOutSimd128F4Syms = nullptr;
  1416. currentBailOutRecord->argOutOffsetInfo->argOutSimd128I4Syms = nullptr;
  1417. currentBailOutRecord->argOutOffsetInfo->argOutSymStart = 0;
  1418. currentBailOutRecord->argOutOffsetInfo->outParamOffsets = nullptr;
  1419. currentBailOutRecord->argOutOffsetInfo->startCallOutParamCounts = nullptr;
  1420. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  1421. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  1422. {
  1423. this->func->GetScriptContext()->bailOutRecordBytes += sizeof(BailOutRecord::ArgOutOffsetInfo);
  1424. }
  1425. #endif
  1426. }
  1427. currentBailOutRecord->argOutOffsetInfo->startCallCount++;
  1428. if (currentBailOutRecord->argOutOffsetInfo->outParamOffsets == nullptr)
  1429. {
  1430. Assert(currentBailOutRecord->argOutOffsetInfo->startCallOutParamCounts == nullptr);
  1431. currentBailOutRecord->argOutOffsetInfo->startCallOutParamCounts = &startCallOutParamCounts[i];
  1432. #ifdef _M_IX86
  1433. currentBailOutRecord->startCallArgRestoreAdjustCounts = &startCallArgRestoreAdjustCounts[i];
  1434. #endif
  1435. currentBailOutRecord->argOutOffsetInfo->outParamOffsets = &outParamOffsets[outParamStart];
  1436. currentBailOutRecord->argOutOffsetInfo->argOutSymStart = outParamStart;
  1437. currentBailOutRecord->argOutOffsetInfo->argOutFloat64Syms = argOutFloat64Syms;
  1438. currentBailOutRecord->argOutOffsetInfo->argOutLosslessInt32Syms = argOutLosslessInt32Syms;
  1439. // SIMD_JS
  1440. currentBailOutRecord->argOutOffsetInfo->argOutSimd128F4Syms = argOutSimd128F4Syms;
  1441. currentBailOutRecord->argOutOffsetInfo->argOutSimd128I4Syms = argOutSimd128I4Syms;
  1442. }
  1443. #if DBG_DUMP
  1444. if (PHASE_DUMP(Js::BailOutPhase, this->func))
  1445. {
  1446. Output::Print(L"Bailout function: %s [#%d] \n", currentStartCallFunc->GetJnFunction()->GetDisplayName(),
  1447. currentStartCallFunc->GetJnFunction()->GetFunctionNumber());
  1448. }
  1449. #endif
  1450. for (uint j = 0; j < outParamCount; j++, argOutSlot++)
  1451. {
  1452. StackSym * sym = bailOutInfo->argOutSyms[argOutSlot];
  1453. if (sym == nullptr)
  1454. {
  1455. // This can happen when instr with bailout occurs before all ArgOuts for current call instr are processed.
  1456. continue;
  1457. }
  1458. Assert(sym->GetArgSlotNum() > 0 && sym->GetArgSlotNum() <= outParamCount);
  1459. uint argSlot = sym->GetArgSlotNum() - 1;
  1460. uint outParamOffsetIndex = outParamStart + argSlot;
  1461. if (!sym->m_isBailOutReferenced && !sym->IsArgSlotSym())
  1462. {
  1463. FOREACH_SLISTBASE_ENTRY_EDITING(ConstantStackSymValue, constantValue, &bailOutInfo->usedCapturedValues.constantValues, iterator)
  1464. {
  1465. if (constantValue.Key()->m_id == sym->m_id)
  1466. {
  1467. Js::Var varValue = constantValue.Value().ToVar(func, currentStartCallFunc->GetScriptContext());
  1468. state.constantList.Prepend(varValue);
  1469. outParamOffsets[outParamOffsetIndex] = state.constantList.Count() + GetBailOutRegisterSaveSlotCount() + GetBailOutReserveSlotCount();
  1470. #if DBG_DUMP
  1471. if (PHASE_DUMP(Js::BailOutPhase, this->func))
  1472. {
  1473. Output::Print(L"OutParam #%d: ", argSlot);
  1474. sym->Dump();
  1475. Output::Print(L" (0x%p (Var)))\n", varValue);
  1476. }
  1477. #endif
  1478. iterator.RemoveCurrent(func->m_alloc);
  1479. break;
  1480. }
  1481. }
  1482. NEXT_SLISTBASE_ENTRY_EDITING;
  1483. if (outParamOffsets[outParamOffsetIndex])
  1484. {
  1485. continue;
  1486. }
  1487. FOREACH_SLISTBASE_ENTRY_EDITING(CopyPropSyms, copyPropSym, &bailOutInfo->usedCapturedValues.copyPropSyms, iter)
  1488. {
  1489. if (copyPropSym.Key()->m_id == sym->m_id)
  1490. {
  1491. StackSym * copyStackSym = copyPropSym.Value();
  1492. BVSparse<JitArenaAllocator>* argObjSyms = bailOutInfo->usedCapturedValues.argObjSyms;
  1493. if (argObjSyms && argObjSyms->Test(copyStackSym->m_id))
  1494. {
  1495. outParamOffsets[outParamOffsetIndex] = BailOutRecord::GetArgumentsObjectOffset();
  1496. }
  1497. else
  1498. {
  1499. this->FillBailOutOffset(&outParamOffsets[outParamOffsetIndex], copyStackSym, &state, instr);
  1500. if (copyStackSym->IsInt32())
  1501. {
  1502. argOutLosslessInt32Syms->Set(outParamOffsetIndex);
  1503. }
  1504. else if (copyStackSym->IsFloat64())
  1505. {
  1506. argOutFloat64Syms->Set(outParamOffsetIndex);
  1507. }
  1508. // SIMD_JS
  1509. else if (copyStackSym->IsSimd128F4())
  1510. {
  1511. argOutSimd128F4Syms->Set(outParamOffsetIndex);
  1512. }
  1513. else if (copyStackSym->IsSimd128I4())
  1514. {
  1515. argOutSimd128I4Syms->Set(outParamOffsetIndex);
  1516. }
  1517. }
  1518. #if DBG_DUMP
  1519. if (PHASE_DUMP(Js::BailOutPhase, this->func))
  1520. {
  1521. Output::Print(L"OutParam #%d: ", argSlot);
  1522. sym->Dump();
  1523. Output::Print(L" Copy Prop sym:");
  1524. copyStackSym->Dump();
  1525. Output::Print(L"\n");
  1526. }
  1527. #endif
  1528. iter.RemoveCurrent(func->m_alloc);
  1529. break;
  1530. }
  1531. }
  1532. NEXT_SLISTBASE_ENTRY_EDITING;
  1533. Assert(outParamOffsets[outParamOffsetIndex] != 0);
  1534. }
  1535. else
  1536. {
  1537. if (sym->IsArgSlotSym())
  1538. {
  1539. if (sym->m_isSingleDef)
  1540. {
  1541. Assert(sym->m_instrDef->m_func == currentStartCallFunc);
  1542. IR::Instr * instrDef = sym->m_instrDef;
  1543. Assert(LowererMD::IsAssign(instrDef));
  1544. if (instrDef->GetNumber() < instr->GetNumber())
  1545. {
  1546. // The ArgOut instr is above current bailout instr.
  1547. AssertMsg(sym->IsVar(), "Arg out slot can only be var.");
  1548. if (sym->m_isInlinedArgSlot)
  1549. {
  1550. Assert(this->func->HasInlinee());
  1551. #ifdef MD_GROW_LOCALS_AREA_UP
  1552. outParamOffsets[outParamOffsetIndex] = -((int)sym->m_offset + BailOutInfo::StackSymBias);
  1553. #else
  1554. outParamOffsets[outParamOffsetIndex] = sym->m_offset;
  1555. #endif
  1556. bailOutInfo->outParamInlinedArgSlot->Set(outParamOffsetIndex);
  1557. }
  1558. else if (sym->m_isOrphanedArg)
  1559. {
  1560. #ifdef MD_GROW_LOCALS_AREA_UP
  1561. outParamOffsets[outParamOffsetIndex] = -((int)sym->m_offset + BailOutInfo::StackSymBias);
  1562. #else
  1563. // Stack offset are negative, includes the PUSH EBP and return address
  1564. outParamOffsets[outParamOffsetIndex] = sym->m_offset - (2 * MachPtr);
  1565. #endif
  1566. }
  1567. #ifdef _M_IX86
  1568. else if (fDoStackAdjust)
  1569. {
  1570. // If we've got args on the stack, then we must have seen (and adjusted for) the StartCall.
  1571. // The values is already on the stack
  1572. // On AMD64/ARM, ArgOut should have been moved next to the call, and shouldn't have bailout between them
  1573. // Except for inlined arg outs
  1574. outParamOffsets[outParamOffsetIndex] = currentStackOffset + argSlot * MachPtr;
  1575. bailOutInfo->outParamFrameAdjustArgSlot->Set(outParamOffsetIndex);
  1576. }
  1577. #endif
  1578. else
  1579. {
  1580. this->FillBailOutOffset(&outParamOffsets[outParamOffsetIndex], sym, &state, instr);
  1581. }
  1582. }
  1583. else
  1584. {
  1585. // The ArgOut instruction might have moved down right next to the call,
  1586. // because of a register calling convention, cloning, etc. This loop walks the chain
  1587. // of assignments to try to find the original location of the assignment where
  1588. // the value is available.
  1589. while (!sym->IsConst())
  1590. {
  1591. // the value is in the register
  1592. IR::RegOpnd * regOpnd = instrDef->GetSrc1()->AsRegOpnd();
  1593. sym = regOpnd->m_sym;
  1594. if (sym->scratch.linearScan.lifetime->start < instr->GetNumber())
  1595. {
  1596. break;
  1597. }
  1598. if (sym->m_isEncodedConstant)
  1599. {
  1600. break;
  1601. }
  1602. // For out parameter we might need to follow multiple assignments
  1603. Assert(sym->m_isSingleDef);
  1604. instrDef = sym->m_instrDef;
  1605. Assert(LowererMD::IsAssign(instrDef));
  1606. }
  1607. if (bailOutInfo->usedCapturedValues.argObjSyms && bailOutInfo->usedCapturedValues.argObjSyms->Test(sym->m_id))
  1608. {
  1609. //foo.apply(this,arguments) case and we bailout when the apply is overridden. We need to restore the arguments object.
  1610. outParamOffsets[outParamOffsetIndex] = BailOutRecord::GetArgumentsObjectOffset();
  1611. }
  1612. else
  1613. {
  1614. this->FillBailOutOffset(&outParamOffsets[outParamOffsetIndex], sym, &state, instr);
  1615. }
  1616. }
  1617. }
  1618. }
  1619. else
  1620. {
  1621. this->FillBailOutOffset(&outParamOffsets[outParamOffsetIndex], sym, &state, instr);
  1622. }
  1623. if (sym->IsFloat64())
  1624. {
  1625. argOutFloat64Syms->Set(outParamOffsetIndex);
  1626. }
  1627. else if (sym->IsInt32())
  1628. {
  1629. argOutLosslessInt32Syms->Set(outParamOffsetIndex);
  1630. }
  1631. // SIMD_JS
  1632. else if (sym->IsSimd128F4())
  1633. {
  1634. argOutSimd128F4Syms->Set(outParamOffsetIndex);
  1635. }
  1636. else if (sym->IsSimd128I4())
  1637. {
  1638. argOutSimd128I4Syms->Set(outParamOffsetIndex);
  1639. }
  1640. #if DBG_DUMP
  1641. if (PHASE_DUMP(Js::BailOutPhase, this->func))
  1642. {
  1643. Output::Print(L"OutParam #%d: ", argSlot);
  1644. sym->Dump();
  1645. Output::Print(L"\n");
  1646. }
  1647. #endif
  1648. }
  1649. }
  1650. }
  1651. }
  1652. else
  1653. {
  1654. Assert(bailOutInfo->argOutSyms == nullptr);
  1655. Assert(bailOutInfo->startCallCount == 0);
  1656. }
  1657. if (this->currentBlock->inlineeStack.Count() > 0)
  1658. {
  1659. this->SpillInlineeArgs(instr);
  1660. }
  1661. else
  1662. {
  1663. // There is a chance that the instruction was hoisting from an inlinee func
  1664. // but if there are no inlinee frames - make sure the instr belongs to the outer func
  1665. // to ensure encoder does not encode an inline frame here - which does not really exist
  1666. instr->m_func = this->func;
  1667. }
  1668. linearScanMD.GenerateBailOut(instr, state.registerSaveSyms, _countof(state.registerSaveSyms));
  1669. // generate the constant table
  1670. Js::Var * constants = NativeCodeDataNewArray(allocator, Js::Var, state.constantList.Count());
  1671. uint constantCount = state.constantList.Count();
  1672. while (!state.constantList.Empty())
  1673. {
  1674. Js::Var value = state.constantList.Head();
  1675. state.constantList.RemoveHead();
  1676. constants[state.constantList.Count()] = value;
  1677. }
  1678. // Generate the stack literal bail out info
  1679. FillStackLiteralBailOutRecord(instr, bailOutInfo, funcBailOutData, funcCount);
  1680. for (uint i = 0; i < funcCount; i++)
  1681. {
  1682. funcBailOutData[i].bailOutRecord->constants = constants;
  1683. #if DBG
  1684. funcBailOutData[i].bailOutRecord->inlineDepth = funcBailOutData[i].func->inlineDepth;
  1685. funcBailOutData[i].bailOutRecord->constantCount = constantCount;
  1686. #endif
  1687. uint32 tableIndex = funcBailOutData[i].func->IsTopFunc() ? 0 : funcBailOutData[i].func->m_inlineeId;
  1688. funcBailOutData[i].FinalizeLocalOffsets(tempAlloc, this->globalBailOutRecordTables[tableIndex], &(this->lastUpdatedRowIndices[tableIndex]));
  1689. #if DBG_DUMP
  1690. if(PHASE_DUMP(Js::BailOutPhase, this->func))
  1691. {
  1692. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  1693. Output::Print(L"Bailout function: %s [%s]\n", funcBailOutData[i].func->GetJnFunction()->GetDisplayName(), funcBailOutData[i].func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer), i);
  1694. funcBailOutData[i].bailOutRecord->Dump();
  1695. }
  1696. #endif
  1697. funcBailOutData[i].Clear(this->tempAlloc);
  1698. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  1699. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  1700. {
  1701. this->func->GetScriptContext()->bailOutRecordBytes += sizeof(BailOutRecord);
  1702. }
  1703. #endif
  1704. }
  1705. JitAdeleteArray(this->tempAlloc, funcCount, funcBailOutData);
  1706. }
  1707. template <typename Fn>
  1708. void
  1709. LinearScan::ForEachStackLiteralBailOutInfo(IR::Instr * instr, BailOutInfo * bailOutInfo, FuncBailOutData * funcBailOutData, uint funcCount, Fn fn)
  1710. {
  1711. for (uint i = 0; i < bailOutInfo->stackLiteralBailOutInfoCount; i++)
  1712. {
  1713. BailOutInfo::StackLiteralBailOutInfo& stackLiteralBailOutInfo = bailOutInfo->stackLiteralBailOutInfo[i];
  1714. StackSym * stackSym = stackLiteralBailOutInfo.stackSym;
  1715. Assert(stackSym->scratch.linearScan.lifetime->start < instr->GetNumber());
  1716. Assert(stackSym->scratch.linearScan.lifetime->end >= instr->GetNumber());
  1717. Js::RegSlot regSlot = stackSym->GetByteCodeRegSlot();
  1718. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1719. uint index = stackSymFunc->inlineDepth;
  1720. Assert(regSlot != Js::Constants::NoRegister);
  1721. Assert(regSlot < stackSymFunc->GetJnFunction()->GetLocalsCount());
  1722. Assert(index < funcCount);
  1723. Assert(funcBailOutData[index].func == stackSymFunc);
  1724. Assert(funcBailOutData[index].localOffsets[regSlot] != 0);
  1725. fn(index, stackLiteralBailOutInfo, regSlot);
  1726. }
  1727. }
  1728. void
  1729. LinearScan::FillStackLiteralBailOutRecord(IR::Instr * instr, BailOutInfo * bailOutInfo, FuncBailOutData * funcBailOutData, uint funcCount)
  1730. {
  1731. if (bailOutInfo->stackLiteralBailOutInfoCount)
  1732. {
  1733. // Count the data
  1734. ForEachStackLiteralBailOutInfo(instr, bailOutInfo, funcBailOutData, funcCount,
  1735. [=](uint funcIndex, BailOutInfo::StackLiteralBailOutInfo& stackLiteralBailOutInfo, Js::RegSlot regSlot)
  1736. {
  1737. funcBailOutData[funcIndex].bailOutRecord->stackLiteralBailOutRecordCount++;
  1738. });
  1739. // Allocate the data
  1740. NativeCodeData::Allocator * allocator = this->func->GetNativeCodeDataAllocator();
  1741. for (uint i = 0; i < funcCount; i++)
  1742. {
  1743. uint stackLiteralBailOutRecordCount = funcBailOutData[i].bailOutRecord->stackLiteralBailOutRecordCount;
  1744. if (stackLiteralBailOutRecordCount)
  1745. {
  1746. funcBailOutData[i].bailOutRecord->stackLiteralBailOutRecord =
  1747. NativeCodeDataNewArray(allocator, BailOutRecord::StackLiteralBailOutRecord, stackLiteralBailOutRecordCount);
  1748. // reset the count so we can track how much we have filled below
  1749. funcBailOutData[i].bailOutRecord->stackLiteralBailOutRecordCount = 0;
  1750. }
  1751. }
  1752. // Fill out the data
  1753. ForEachStackLiteralBailOutInfo(instr, bailOutInfo, funcBailOutData, funcCount,
  1754. [=](uint funcIndex, BailOutInfo::StackLiteralBailOutInfo& stackLiteralBailOutInfo, Js::RegSlot regSlot)
  1755. {
  1756. uint& recordIndex = funcBailOutData[funcIndex].bailOutRecord->stackLiteralBailOutRecordCount;
  1757. BailOutRecord::StackLiteralBailOutRecord& stackLiteralBailOutRecord =
  1758. funcBailOutData[funcIndex].bailOutRecord->stackLiteralBailOutRecord[recordIndex++];
  1759. stackLiteralBailOutRecord.regSlot = regSlot;
  1760. stackLiteralBailOutRecord.initFldCount = stackLiteralBailOutInfo.initFldCount;
  1761. });
  1762. }
  1763. }
  1764. void
  1765. LinearScan::PrepareForUse(Lifetime * lifetime)
  1766. {
  1767. if (lifetime->isOpHelperSpilled)
  1768. {
  1769. // using a value in a helper that has been spilled in the helper block.
  1770. // Just spill it for real
  1771. // We must be in a helper block and the lifetime must
  1772. // start before the helper block
  1773. Assert(this->IsInHelperBlock());
  1774. Assert(lifetime->start < this->HelperBlockStartInstrNumber());
  1775. IR::Instr *insertionInstr = this->currentOpHelperBlock->opHelperLabel;
  1776. this->RemoveOpHelperSpilled(lifetime);
  1777. this->SpillLiveRange(lifetime, insertionInstr);
  1778. }
  1779. }
  1780. void
  1781. LinearScan::RecordUse(Lifetime * lifetime, IR::Instr * instr, IR::RegOpnd * regOpnd, bool isFromBailout)
  1782. {
  1783. uint32 useCountCost = LinearScan::GetUseSpillCost(this->loopNest, (this->currentOpHelperBlock != nullptr || isFromBailout));
  1784. // We only spill at the use for constants (i.e. reload) or for function with try blocks. We don't
  1785. // have real accurate flow info for the later.
  1786. if ((regOpnd && regOpnd->m_sym->IsConst())
  1787. || (
  1788. (this->func->HasTry() && !this->func->DoOptimizeTryCatch()) &&
  1789. this->IsInLoop() &&
  1790. lifetime->lastUseLabel != this->lastLabel &&
  1791. this->liveOnBackEdgeSyms->Test(lifetime->sym->m_id) &&
  1792. !(lifetime->previousDefBlockNumber == currentBlockNumber && !lifetime->defList.Empty())
  1793. ))
  1794. {
  1795. // Keep track of all the uses of this lifetime in case we decide to spill it.
  1796. // Note that we won't need to insert reloads if the use are not in a loop,
  1797. // unless it is a const. We always reload const instead of spilling to the stack.
  1798. //
  1799. // We also don't need to insert reloads if the previous use was in the same basic block (the first use in the block
  1800. // would have done the reload), or the previous def is in the same basic block and the value is still live. Furthermore,
  1801. // if the previous def is in the same basic block, the value is still live, and there's another def after this use in
  1802. // the same basic block, the previous def may not do a spill store, so we must not reload the value from the stack.
  1803. lifetime->useList.Prepend(instr);
  1804. lifetime->lastUseLabel = this->lastLabel;
  1805. lifetime->AddToUseCountAdjust(useCountCost, this->curLoop, this->func);
  1806. }
  1807. else
  1808. {
  1809. if (!isFromBailout)
  1810. {
  1811. // Since we won't reload this use if the lifetime gets spilled, adjust the spill cost to reflect this.
  1812. lifetime->SubFromUseCount(useCountCost, this->curLoop);
  1813. }
  1814. }
  1815. if (this->IsInLoop())
  1816. {
  1817. this->RecordLoopUse(lifetime, lifetime->reg);
  1818. }
  1819. }
  1820. void LinearScan::RecordLoopUse(Lifetime *lifetime, RegNum reg)
  1821. {
  1822. if (!this->IsInLoop())
  1823. {
  1824. return;
  1825. }
  1826. if (this->func->HasTry() && !this->func->DoOptimizeTryCatch())
  1827. {
  1828. return;
  1829. }
  1830. // Record on each loop which register live into the loop ended up being used.
  1831. // We are trying to avoid the need for compensation at the bottom of the loop if
  1832. // the reg ends up being spilled before it is actually used.
  1833. Loop *curLoop = this->curLoop;
  1834. SymID symId = (SymID)-1;
  1835. if (lifetime)
  1836. {
  1837. symId = lifetime->sym->m_id;
  1838. }
  1839. while (curLoop)
  1840. {
  1841. // Note that if the lifetime is spilled and reallocated to the same register,
  1842. // will mark it as used when we shouldn't. However, it is hard at this point to handle
  1843. // the case were a flow edge from the previous allocation merges in with the new allocation.
  1844. // No compensation is inserted to let us know with previous lifetime needs reloading at the bottom of the loop...
  1845. if (lifetime && curLoop->regAlloc.loopTopRegContent[reg] == lifetime)
  1846. {
  1847. curLoop->regAlloc.symRegUseBv->Set(symId);
  1848. }
  1849. curLoop->regAlloc.regUseBv.Set(reg);
  1850. curLoop = curLoop->parent;
  1851. }
  1852. }
  1853. void
  1854. LinearScan::RecordDef(Lifetime *const lifetime, IR::Instr *const instr, const uint32 useCountCost)
  1855. {
  1856. Assert(lifetime);
  1857. Assert(instr);
  1858. Assert(instr->GetDst());
  1859. IR::RegOpnd * regOpnd = instr->GetDst()->AsRegOpnd();
  1860. Assert(regOpnd);
  1861. StackSym *const sym = regOpnd->m_sym;
  1862. if (this->IsInLoop())
  1863. {
  1864. Loop *curLoop = this->curLoop;
  1865. while (curLoop)
  1866. {
  1867. curLoop->regAlloc.defdInLoopBv->Set(lifetime->sym->m_id);
  1868. curLoop->regAlloc.regUseBv.Set(lifetime->reg);
  1869. curLoop = curLoop->parent;
  1870. }
  1871. }
  1872. if (lifetime->isSpilled)
  1873. {
  1874. return;
  1875. }
  1876. if (this->NeedsWriteThrough(sym))
  1877. {
  1878. if (this->IsSymNonTempLocalVar(sym))
  1879. {
  1880. // In the debug mode, we will write through on the stack location.
  1881. WriteThroughForLocal(regOpnd, lifetime, instr);
  1882. }
  1883. else
  1884. {
  1885. // If this is a write-through sym, it should be live on the entry to 'try' and should have already
  1886. // been allocated when we spilled all active lifetimes there.
  1887. // If it was not part of the active lifetimes on entry to the 'try' then it must have been spilled
  1888. // earlier and should have stack allocated for it.
  1889. Assert(this->NeedsWriteThroughForEH(sym) && sym->IsAllocated());
  1890. this->InsertStore(instr, sym, lifetime->reg);
  1891. }
  1892. // No need to record-def further as we already have stack allocated for it.
  1893. return;
  1894. }
  1895. if (sym->m_isSingleDef)
  1896. {
  1897. lifetime->AddToUseCount(useCountCost, this->curLoop, this->func);
  1898. // the def of a single-def sym is already on the sym
  1899. return;
  1900. }
  1901. if(lifetime->previousDefBlockNumber == currentBlockNumber && !lifetime->defList.Empty())
  1902. {
  1903. // Only keep track of the last def in each basic block. When there are multiple defs of a sym in a basic block, upon
  1904. // spill of that sym, a store needs to be inserted only after the last def of the sym.
  1905. Assert(lifetime->defList.Head()->GetDst()->AsRegOpnd()->m_sym == sym);
  1906. lifetime->defList.Head() = instr;
  1907. }
  1908. else
  1909. {
  1910. // First def of this sym in the current basic block
  1911. lifetime->previousDefBlockNumber = currentBlockNumber;
  1912. lifetime->defList.Prepend(instr);
  1913. // Keep track of the cost of reinserting all the defs if we choose to spill this way.
  1914. lifetime->allDefsCost += useCountCost;
  1915. }
  1916. }
  1917. // LinearScan::SetUse
  1918. void
  1919. LinearScan::SetUse(IR::Instr *instr, IR::RegOpnd *regOpnd)
  1920. {
  1921. if (regOpnd->GetReg() != RegNOREG)
  1922. {
  1923. this->RecordLoopUse(nullptr, regOpnd->GetReg());
  1924. return;
  1925. }
  1926. StackSym *sym = regOpnd->m_sym;
  1927. Lifetime * lifetime = sym->scratch.linearScan.lifetime;
  1928. this->PrepareForUse(lifetime);
  1929. if (lifetime->isSpilled)
  1930. {
  1931. // See if it has been loaded in this basic block
  1932. RegNum reg = this->GetAssignedTempReg(lifetime, regOpnd->GetType());
  1933. if (reg == RegNOREG)
  1934. {
  1935. if (sym->IsConst() && EncoderMD::TryConstFold(instr, regOpnd))
  1936. {
  1937. return;
  1938. }
  1939. reg = this->SecondChanceAllocation(lifetime, false);
  1940. if (reg != RegNOREG)
  1941. {
  1942. IR::Instr *insertInstr = this->TryHoistLoad(instr, lifetime);
  1943. this->InsertLoad(insertInstr, sym, reg);
  1944. }
  1945. else
  1946. {
  1947. // Try folding if there are no registers available
  1948. if (!sym->IsConst() && !this->RegsAvailable(regOpnd->GetType()) && EncoderMD::TryFold(instr, regOpnd))
  1949. {
  1950. return;
  1951. }
  1952. // We need a reg no matter what. Try to force second chance to re-allocate this.
  1953. reg = this->SecondChanceAllocation(lifetime, true);
  1954. if (reg == RegNOREG)
  1955. {
  1956. // Forcing second chance didn't work.
  1957. // Allocate a new temp reg for it
  1958. reg = this->FindReg(nullptr, regOpnd);
  1959. this->AssignTempReg(lifetime, reg);
  1960. }
  1961. this->InsertLoad(instr, sym, reg);
  1962. }
  1963. }
  1964. }
  1965. if (!lifetime->isSpilled && instr->GetNumber() < lifetime->end)
  1966. {
  1967. // Don't border to record the use if this is the last use of the lifetime.
  1968. this->RecordUse(lifetime, instr, regOpnd);
  1969. }
  1970. else
  1971. {
  1972. lifetime->SubFromUseCount(LinearScan::GetUseSpillCost(this->loopNest, (this->currentOpHelperBlock != nullptr)), this->curLoop);
  1973. }
  1974. this->instrUseRegs.Set(lifetime->reg);
  1975. this->SetReg(regOpnd);
  1976. }
  1977. // LinearScan::SetReg
  1978. void
  1979. LinearScan::SetReg(IR::RegOpnd *regOpnd)
  1980. {
  1981. if (regOpnd->GetReg() == RegNOREG)
  1982. {
  1983. RegNum reg = regOpnd->m_sym->scratch.linearScan.lifetime->reg;
  1984. AssertMsg(reg != RegNOREG, "Reg should be allocated here...");
  1985. regOpnd->SetReg(reg);
  1986. }
  1987. }
  1988. bool
  1989. LinearScan::SkipNumberedInstr(IR::Instr *instr)
  1990. {
  1991. if (instr->IsLabelInstr())
  1992. {
  1993. if (instr->AsLabelInstr()->m_isLoopTop)
  1994. {
  1995. Assert(instr->GetNumber() != instr->m_next->GetNumber()
  1996. && (instr->GetNumber() != instr->m_prev->GetNumber() || instr->m_prev->m_opcode == Js::OpCode::Nop));
  1997. }
  1998. else
  1999. {
  2000. return true;
  2001. }
  2002. }
  2003. return false;
  2004. }
  2005. // LinearScan::EndDeadLifetimes
  2006. // Look for lifetimes that are ending here, and retire them.
  2007. void
  2008. LinearScan::EndDeadLifetimes(IR::Instr *instr)
  2009. {
  2010. Lifetime * deadLifetime;
  2011. if (this->SkipNumberedInstr(instr))
  2012. {
  2013. return;
  2014. }
  2015. // Retire all active lifetime ending at this instruction
  2016. while (!this->activeLiveranges->Empty() && this->activeLiveranges->Head()->end <= instr->GetNumber())
  2017. {
  2018. deadLifetime = this->activeLiveranges->Head();
  2019. deadLifetime->defList.Clear();
  2020. deadLifetime->useList.Clear();
  2021. this->activeLiveranges->RemoveHead();
  2022. RegNum reg = deadLifetime->reg;
  2023. this->activeRegs.Clear(reg);
  2024. this->regContent[reg] = nullptr;
  2025. this->secondChanceRegs.Clear(reg);
  2026. if (RegTypes[reg] == TyMachReg)
  2027. {
  2028. this->intRegUsedCount--;
  2029. }
  2030. else
  2031. {
  2032. Assert(RegTypes[reg] == TyFloat64);
  2033. this->floatRegUsedCount--;
  2034. }
  2035. }
  2036. // Look for spilled lifetimes which end here such that we can make their stack slot
  2037. // available for stack-packing.
  2038. while (!this->stackPackInUseLiveRanges->Empty() && this->stackPackInUseLiveRanges->Head()->end <= instr->GetNumber())
  2039. {
  2040. deadLifetime = this->stackPackInUseLiveRanges->Head();
  2041. deadLifetime->defList.Clear();
  2042. deadLifetime->useList.Clear();
  2043. this->stackPackInUseLiveRanges->RemoveHead();
  2044. if (!deadLifetime->cantStackPack)
  2045. {
  2046. Assert(deadLifetime->spillStackSlot);
  2047. deadLifetime->spillStackSlot->lastUse = deadLifetime->end;
  2048. this->stackSlotsFreeList->Push(deadLifetime->spillStackSlot);
  2049. }
  2050. }
  2051. }
  2052. void
  2053. LinearScan::EndDeadOpHelperLifetimes(IR::Instr * instr)
  2054. {
  2055. if (this->SkipNumberedInstr(instr))
  2056. {
  2057. return;
  2058. }
  2059. while (!this->opHelperSpilledLiveranges->Empty() &&
  2060. this->opHelperSpilledLiveranges->Head()->end <= instr->GetNumber())
  2061. {
  2062. Lifetime * deadLifetime;
  2063. // The lifetime doesn't extend beyond the helper block
  2064. // No need to save and restore around the helper block
  2065. Assert(this->IsInHelperBlock());
  2066. deadLifetime = this->opHelperSpilledLiveranges->Head();
  2067. this->opHelperSpilledLiveranges->RemoveHead();
  2068. if (!deadLifetime->cantOpHelperSpill)
  2069. {
  2070. this->opHelperSpilledRegs.Clear(deadLifetime->reg);
  2071. }
  2072. deadLifetime->isOpHelperSpilled = false;
  2073. deadLifetime->cantOpHelperSpill = false;
  2074. deadLifetime->isOpHelperSpillAsArg = false;
  2075. }
  2076. }
  2077. // LinearScan::AllocateNewLifetimes
  2078. // Look for lifetimes coming live, and allocate a register for them.
  2079. void
  2080. LinearScan::AllocateNewLifetimes(IR::Instr *instr)
  2081. {
  2082. if (this->SkipNumberedInstr(instr))
  2083. {
  2084. return;
  2085. }
  2086. // Try to catch:
  2087. // x = MOV y(r1)
  2088. // where y's lifetime just ended and x's lifetime is starting.
  2089. // If so, set r1 as a preferred register for x, which may allow peeps to remove the MOV
  2090. if (instr->GetSrc1() && instr->GetSrc1()->IsRegOpnd() && LowererMD::IsAssign(instr) && instr->GetDst() && instr->GetDst()->IsRegOpnd() && instr->GetDst()->AsRegOpnd()->m_sym)
  2091. {
  2092. IR::RegOpnd *src = instr->GetSrc1()->AsRegOpnd();
  2093. StackSym *srcSym = src->m_sym;
  2094. // If src is a physReg ref, or src's lifetime ends here.
  2095. if (!srcSym || srcSym->scratch.linearScan.lifetime->end == instr->GetNumber())
  2096. {
  2097. Lifetime *dstLifetime = instr->GetDst()->AsRegOpnd()->m_sym->scratch.linearScan.lifetime;
  2098. if (dstLifetime)
  2099. {
  2100. dstLifetime->regPreference.Set(src->GetReg());
  2101. }
  2102. }
  2103. }
  2104. // Look for starting lifetimes
  2105. while (!this->lifetimeList->Empty() && this->lifetimeList->Head()->start <= instr->GetNumber())
  2106. {
  2107. // We're at the start of a new live range
  2108. Lifetime * newLifetime = this->lifetimeList->Head();
  2109. newLifetime->lastAllocationStart = instr->GetNumber();
  2110. this->lifetimeList->RemoveHead();
  2111. if (newLifetime->dontAllocate)
  2112. {
  2113. // Lifetime spilled before beginning allocation (e.g., a lifetime known to span
  2114. // multiple EH regions.) Do the work of spilling it now without adding it to the list.
  2115. this->SpillLiveRange(newLifetime);
  2116. continue;
  2117. }
  2118. RegNum reg;
  2119. if (newLifetime->reg == RegNOREG)
  2120. {
  2121. if (newLifetime->isDeadStore)
  2122. {
  2123. // No uses, let's not waste a reg.
  2124. newLifetime->isSpilled = true;
  2125. continue;
  2126. }
  2127. reg = this->FindReg(newLifetime, nullptr);
  2128. }
  2129. else
  2130. {
  2131. // This lifetime is already assigned a physical register. Make
  2132. // sure that register is available by calling SpillReg
  2133. reg = newLifetime->reg;
  2134. // If we're in a helper block, the physical register we're trying to ensure is available might get helper
  2135. // spilled. Don't allow that if this lifetime's end lies beyond the end of the helper block because
  2136. // spill code assumes that this physical register isn't active at the end of the helper block when it tries
  2137. // to restore it. So we'd have to really spill the lifetime then anyway.
  2138. this->SpillReg(reg, IsInHelperBlock() ? (newLifetime->end > currentOpHelperBlock->opHelperEndInstr->GetNumber()) : false);
  2139. newLifetime->cantSpill = true;
  2140. }
  2141. // If we did get a register for this lifetime, add it to the active set.
  2142. if (newLifetime->isSpilled == false)
  2143. {
  2144. this->AssignActiveReg(newLifetime, reg);
  2145. }
  2146. }
  2147. }
  2148. // LinearScan::FindReg
  2149. // Look for an available register. If one isn't available, spill something.
  2150. // Note that the newLifetime passed in could be the one we end up spilling.
  2151. RegNum
  2152. LinearScan::FindReg(Lifetime *newLifetime, IR::RegOpnd *regOpnd, bool force)
  2153. {
  2154. BVIndex regIndex = BVInvalidIndex;
  2155. IRType type;
  2156. bool tryCallerSavedRegs = false;
  2157. BitVector callerSavedAvailableBv;
  2158. if (newLifetime)
  2159. {
  2160. if (newLifetime->isFloat)
  2161. {
  2162. type = TyFloat64;
  2163. }
  2164. else if (newLifetime->isSimd128F4)
  2165. {
  2166. type = TySimd128F4;
  2167. }
  2168. else if (newLifetime->isSimd128I4)
  2169. {
  2170. type = TySimd128I4;
  2171. }
  2172. else if (newLifetime->isSimd128D2)
  2173. {
  2174. type = TySimd128D2;
  2175. }
  2176. else
  2177. {
  2178. type = TyMachReg;
  2179. }
  2180. }
  2181. else
  2182. {
  2183. Assert(regOpnd);
  2184. type = regOpnd->GetType();
  2185. }
  2186. if (this->RegsAvailable(type))
  2187. {
  2188. BitVector regsBv;
  2189. regsBv.Copy(this->activeRegs);
  2190. regsBv.Or(this->instrUseRegs);
  2191. regsBv.Or(this->callSetupRegs);
  2192. regsBv.ComplimentAll();
  2193. if (newLifetime)
  2194. {
  2195. if (this->IsInHelperBlock())
  2196. {
  2197. if (newLifetime->end >= this->HelperBlockEndInstrNumber())
  2198. {
  2199. // this lifetime goes beyond the helper function
  2200. // We need to exclude the helper spilled register as well.
  2201. regsBv.Minus(this->opHelperSpilledRegs);
  2202. }
  2203. }
  2204. if (newLifetime->isFloat || newLifetime->isSimd128())
  2205. {
  2206. #ifdef _M_IX86
  2207. Assert(AutoSystemInfo::Data.SSE2Available());
  2208. #endif
  2209. regsBv.And(this->floatRegs);
  2210. }
  2211. else
  2212. {
  2213. regsBv.And(this->int32Regs);
  2214. regsBv = this->linearScanMD.FilterRegIntSizeConstraints(regsBv, newLifetime->intUsageBv);
  2215. }
  2216. if (newLifetime->isLiveAcrossCalls)
  2217. {
  2218. // Try to find a callee saved regs
  2219. BitVector regsBvTemp = regsBv;
  2220. regsBvTemp.And(this->calleeSavedRegs);
  2221. regIndex = GetPreferrencedRegIndex(newLifetime, regsBvTemp);
  2222. if (regIndex == BVInvalidIndex)
  2223. {
  2224. if (!newLifetime->isLiveAcrossUserCalls)
  2225. {
  2226. // No callee saved regs is found and the lifetime only across helper
  2227. // calls, we can also use a caller saved regs to make use of the
  2228. // save and restore around helper blocks
  2229. regIndex = GetPreferrencedRegIndex(newLifetime, regsBv);
  2230. }
  2231. else
  2232. {
  2233. // If we can't find a callee-saved reg, we can try using a caller-saved reg instead.
  2234. // We'll hopefully get a few loads enregistered that way before we get to the call.
  2235. tryCallerSavedRegs = true;
  2236. callerSavedAvailableBv = regsBv;
  2237. }
  2238. }
  2239. }
  2240. else
  2241. {
  2242. regIndex = GetPreferrencedRegIndex(newLifetime, regsBv);
  2243. }
  2244. }
  2245. else
  2246. {
  2247. AssertMsg(regOpnd, "Need a lifetime or a regOpnd passed in");
  2248. if (regOpnd->IsFloat() || regOpnd->IsSimd128())
  2249. {
  2250. #ifdef _M_IX86
  2251. Assert(AutoSystemInfo::Data.SSE2Available());
  2252. #endif
  2253. regsBv.And(this->floatRegs);
  2254. }
  2255. else
  2256. {
  2257. regsBv.And(this->int32Regs);
  2258. BitVector regSizeBv;
  2259. regSizeBv.ClearAll();
  2260. regSizeBv.Set(TySize[regOpnd->GetType()]);
  2261. regsBv = this->linearScanMD.FilterRegIntSizeConstraints(regsBv, regSizeBv);
  2262. }
  2263. if (!this->tempRegs.IsEmpty())
  2264. {
  2265. // avoid the temp reg that we have loaded in this basic block
  2266. BitVector regsBvTemp = regsBv;
  2267. regsBvTemp.Minus(this->tempRegs);
  2268. regIndex = regsBvTemp.GetPrevBit();
  2269. }
  2270. if (regIndex == BVInvalidIndex)
  2271. {
  2272. // allocate a temp reg from the other end of the bit vector so that it can
  2273. // keep live for longer.
  2274. regIndex = regsBv.GetPrevBit();
  2275. }
  2276. }
  2277. }
  2278. RegNum reg;
  2279. if (BVInvalidIndex != regIndex)
  2280. {
  2281. Assert(regIndex < RegNumCount);
  2282. reg = (RegNum)regIndex;
  2283. }
  2284. else
  2285. {
  2286. if (tryCallerSavedRegs)
  2287. {
  2288. Assert(newLifetime);
  2289. regIndex = GetPreferrencedRegIndex(newLifetime, callerSavedAvailableBv);
  2290. if (BVInvalidIndex == regIndex)
  2291. {
  2292. tryCallerSavedRegs = false;
  2293. }
  2294. }
  2295. bool dontSpillCurrent = tryCallerSavedRegs;
  2296. if (newLifetime && newLifetime->isSpilled)
  2297. {
  2298. // Second chance allocation
  2299. dontSpillCurrent = true;
  2300. }
  2301. // Can't find reg, spill some lifetime.
  2302. reg = this->Spill(newLifetime, regOpnd, dontSpillCurrent, force);
  2303. if (reg == RegNOREG && tryCallerSavedRegs)
  2304. {
  2305. Assert(BVInvalidIndex != regIndex);
  2306. reg = (RegNum)regIndex;
  2307. // This lifetime will get spilled once we get to the call it overlaps with (note: this may not be true
  2308. // for second chance allocation as we may be beyond the call). Mark it as a cheap spill to give up the register
  2309. // if some lifetime not overlapping with a call needs it.
  2310. newLifetime->isCheapSpill = true;
  2311. }
  2312. }
  2313. // We always have to return a reg if we are allocate temp reg.
  2314. // If we are allocating for a new lifetime, we return RegNOREG, if we
  2315. // spill the new lifetime
  2316. Assert(newLifetime != nullptr || (reg != RegNOREG && reg < RegNumCount));
  2317. return reg;
  2318. }
  2319. BVIndex
  2320. LinearScan::GetPreferrencedRegIndex(Lifetime *lifetime, BitVector freeRegs)
  2321. {
  2322. BitVector freePreferencedRegs = freeRegs;
  2323. freePreferencedRegs.And(lifetime->regPreference);
  2324. // If one of the preferred register (if any) is available, use it. Otherwise, just pick one of free register.
  2325. if (!freePreferencedRegs.IsEmpty())
  2326. {
  2327. return freePreferencedRegs.GetNextBit();
  2328. }
  2329. else
  2330. {
  2331. return freeRegs.GetNextBit();
  2332. }
  2333. }
  2334. // LinearScan::Spill
  2335. // We need to spill something to free up a reg. If the newLifetime
  2336. // past in isn't NULL, we can spill this one instead of an active one.
  2337. RegNum
  2338. LinearScan::Spill(Lifetime *newLifetime, IR::RegOpnd *regOpnd, bool dontSpillCurrent, bool force)
  2339. {
  2340. uint minSpillCost = (uint)-1;
  2341. Assert(!newLifetime || !regOpnd || newLifetime->isFloat == (regOpnd->GetType() == TyMachDouble) || newLifetime->isSimd128() == (regOpnd->IsSimd128()));
  2342. bool isFloatReg;
  2343. BitVector intUsageBV;
  2344. bool needCalleeSaved;
  2345. // For now, we just spill the lifetime with the lowest spill cost.
  2346. if (newLifetime)
  2347. {
  2348. isFloatReg = newLifetime->isFloat || newLifetime->isSimd128();
  2349. if (!force)
  2350. {
  2351. minSpillCost = this->GetSpillCost(newLifetime);
  2352. }
  2353. intUsageBV = newLifetime->intUsageBv;
  2354. needCalleeSaved = newLifetime->isLiveAcrossUserCalls;
  2355. }
  2356. else
  2357. {
  2358. needCalleeSaved = false;
  2359. if (regOpnd->IsFloat() || regOpnd->IsSimd128())
  2360. {
  2361. isFloatReg = true;
  2362. }
  2363. else
  2364. {
  2365. // Filter for int reg size constraints
  2366. isFloatReg = false;
  2367. intUsageBV.ClearAll();
  2368. intUsageBV.Set(TySize[regOpnd->GetType()]);
  2369. }
  2370. }
  2371. SList<Lifetime *>::EditingIterator candidate;
  2372. FOREACH_SLIST_ENTRY_EDITING(Lifetime *, lifetime, this->activeLiveranges, iter)
  2373. {
  2374. uint spillCost = this->GetSpillCost(lifetime);
  2375. if (spillCost < minSpillCost &&
  2376. this->instrUseRegs.Test(lifetime->reg) == false &&
  2377. (lifetime->isFloat || lifetime->isSimd128()) == isFloatReg &&
  2378. !lifetime->cantSpill &&
  2379. (!needCalleeSaved || this->calleeSavedRegs.Test(lifetime->reg)) &&
  2380. this->linearScanMD.FitRegIntSizeConstraints(lifetime->reg, intUsageBV))
  2381. {
  2382. minSpillCost = spillCost;
  2383. candidate = iter;
  2384. }
  2385. } NEXT_SLIST_ENTRY_EDITING;
  2386. AssertMsg(newLifetime || candidate.IsValid(), "Didn't find anything to spill?!?");
  2387. Lifetime * spilledRange;
  2388. if (candidate.IsValid())
  2389. {
  2390. spilledRange = candidate.Data();
  2391. candidate.RemoveCurrent();
  2392. this->activeRegs.Clear(spilledRange->reg);
  2393. if (spilledRange->isFloat || spilledRange->isSimd128())
  2394. {
  2395. this->floatRegUsedCount--;
  2396. }
  2397. else
  2398. {
  2399. this->intRegUsedCount--;
  2400. }
  2401. }
  2402. else if (dontSpillCurrent)
  2403. {
  2404. return RegNOREG;
  2405. }
  2406. else
  2407. {
  2408. spilledRange = newLifetime;
  2409. }
  2410. return this->SpillLiveRange(spilledRange);
  2411. }
  2412. // LinearScan::SpillLiveRange
  2413. RegNum
  2414. LinearScan::SpillLiveRange(Lifetime * spilledRange, IR::Instr *insertionInstr)
  2415. {
  2416. Assert(!spilledRange->isSpilled);
  2417. RegNum reg = spilledRange->reg;
  2418. StackSym *sym = spilledRange->sym;
  2419. spilledRange->isSpilled = true;
  2420. spilledRange->isCheapSpill = false;
  2421. spilledRange->reg = RegNOREG;
  2422. // Don't allocate stack space for const, we always reload them. (For debugm mode, allocate on the stack)
  2423. if (!sym->IsAllocated() && (!sym->IsConst() || IsSymNonTempLocalVar(sym)))
  2424. {
  2425. this->AllocateStackSpace(spilledRange);
  2426. }
  2427. // No need to insert loads or stores if there are no uses.
  2428. if (!spilledRange->isDeadStore)
  2429. {
  2430. // In the debug mode, don't do insertstore for this stacksym, as we want to retain the IsConst for the sym,
  2431. // and later we are going to find the reg for it.
  2432. if (!IsSymNonTempLocalVar(sym))
  2433. {
  2434. this->InsertStores(spilledRange, reg, insertionInstr);
  2435. }
  2436. if (this->IsInLoop() || sym->IsConst())
  2437. {
  2438. this->InsertLoads(sym, reg);
  2439. }
  2440. else
  2441. {
  2442. sym->scratch.linearScan.lifetime->useList.Clear();
  2443. }
  2444. // Adjust useCount in case of second chance allocation
  2445. spilledRange->ApplyUseCountAdjust(this->curLoop);
  2446. }
  2447. Assert(reg == RegNOREG || spilledRange->reg == RegNOREG || this->regContent[reg] == spilledRange);
  2448. if (spilledRange->isSecondChanceAllocated)
  2449. {
  2450. Assert(reg == RegNOREG || spilledRange->reg == RegNOREG
  2451. || (this->regContent[reg] == spilledRange && this->secondChanceRegs.Test(reg)));
  2452. this->secondChanceRegs.Clear(reg);
  2453. spilledRange->isSecondChanceAllocated = false;
  2454. }
  2455. else
  2456. {
  2457. Assert(!this->secondChanceRegs.Test(reg));
  2458. }
  2459. this->regContent[reg] = nullptr;
  2460. #if DBG_DUMP
  2461. if (PHASE_TRACE(Js::LinearScanPhase, this->func))
  2462. {
  2463. Output::Print(L"**** Spill: ");
  2464. sym->Dump();
  2465. Output::Print(L"(%S)", RegNames[reg]);
  2466. Output::Print(L" SpillCount:%d Length:%d Cost:%d\n",
  2467. spilledRange->useCount, spilledRange->end - spilledRange->start, this->GetSpillCost(spilledRange));
  2468. }
  2469. #endif
  2470. return reg;
  2471. }
  2472. // LinearScan::SpillReg
  2473. // Spill a given register.
  2474. void
  2475. LinearScan::SpillReg(RegNum reg, bool forceSpill /* = false */)
  2476. {
  2477. Lifetime *spilledRange = nullptr;
  2478. if (activeRegs.Test(reg))
  2479. {
  2480. spilledRange = LinearScan::RemoveRegLiveRange(activeLiveranges, reg);
  2481. }
  2482. else if (opHelperSpilledRegs.Test(reg) && forceSpill)
  2483. {
  2484. // If a lifetime that was assigned this register was helper spilled,
  2485. // really spill it now.
  2486. Assert(IsInHelperBlock());
  2487. // Look for the liverange in opHelperSpilledLiveranges instead of
  2488. // activeLiveranges.
  2489. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, opHelperSpilledLiveranges)
  2490. {
  2491. if (lifetime->reg == reg)
  2492. {
  2493. spilledRange = lifetime;
  2494. break;
  2495. }
  2496. } NEXT_SLIST_ENTRY;
  2497. Assert(spilledRange);
  2498. Assert(!spilledRange->cantSpill);
  2499. RemoveOpHelperSpilled(spilledRange);
  2500. // Really spill this liverange below.
  2501. }
  2502. else
  2503. {
  2504. return;
  2505. }
  2506. AnalysisAssert(spilledRange);
  2507. Assert(!spilledRange->cantSpill);
  2508. if ((!forceSpill) && this->IsInHelperBlock() && spilledRange->start < this->HelperBlockStartInstrNumber() && !spilledRange->cantOpHelperSpill)
  2509. {
  2510. // if the lifetime starts before the helper block, we can do save and restore
  2511. // around the helper block instead.
  2512. this->AddOpHelperSpilled(spilledRange);
  2513. }
  2514. else
  2515. {
  2516. if (spilledRange->cantOpHelperSpill)
  2517. {
  2518. // We're really spilling this liverange, so take it out of the helper-spilled liveranges
  2519. // to avoid confusion (see Win8 313433).
  2520. Assert(!spilledRange->isOpHelperSpilled);
  2521. spilledRange->cantOpHelperSpill = false;
  2522. this->opHelperSpilledLiveranges->Remove(spilledRange);
  2523. }
  2524. this->SpillLiveRange(spilledRange);
  2525. }
  2526. if (this->activeRegs.Test(reg))
  2527. {
  2528. this->activeRegs.Clear(reg);
  2529. if (RegTypes[reg] == TyMachReg)
  2530. {
  2531. this->intRegUsedCount--;
  2532. }
  2533. else
  2534. {
  2535. Assert(RegTypes[reg] == TyFloat64);
  2536. this->floatRegUsedCount--;
  2537. }
  2538. }
  2539. }
  2540. void
  2541. LinearScan::ProcessEHRegionBoundary(IR::Instr * instr)
  2542. {
  2543. Assert(instr->IsBranchInstr());
  2544. Assert(instr->m_opcode != Js::OpCode::TryFinally); // finallys are not supported for optimization yet.
  2545. if (instr->m_opcode != Js::OpCode::TryCatch && instr->m_opcode != Js::OpCode::Leave)
  2546. {
  2547. return;
  2548. }
  2549. // Spill everything upon entry to the try region and upon a Leave.
  2550. IR::Instr* insertionInstr = instr->m_opcode != Js::OpCode::Leave ? instr : instr->m_prev;
  2551. FOREACH_SLIST_ENTRY_EDITING(Lifetime *, lifetime, this->activeLiveranges, iter)
  2552. {
  2553. this->activeRegs.Clear(lifetime->reg);
  2554. if (lifetime->isFloat || lifetime->isSimd128())
  2555. {
  2556. this->floatRegUsedCount--;
  2557. }
  2558. else
  2559. {
  2560. this->intRegUsedCount--;
  2561. }
  2562. this->SpillLiveRange(lifetime, insertionInstr);
  2563. iter.RemoveCurrent();
  2564. }
  2565. NEXT_SLIST_ENTRY_EDITING;
  2566. }
  2567. void
  2568. LinearScan::AllocateStackSpace(Lifetime *spilledRange)
  2569. {
  2570. if (spilledRange->sym->IsAllocated())
  2571. {
  2572. return;
  2573. }
  2574. uint32 size = TySize[spilledRange->sym->GetType()];
  2575. // For the bytecodereg syms instead of spilling to the any other location lets re-use the already created slot.
  2576. if (IsSymNonTempLocalVar(spilledRange->sym))
  2577. {
  2578. Js::RegSlot slotIndex = spilledRange->sym->GetByteCodeRegSlot();
  2579. // Get the offset which is already allocated from this local, and always spill on that location.
  2580. spilledRange->sym->m_offset = GetStackOffset(slotIndex);
  2581. spilledRange->sym->m_allocated = true;
  2582. return;
  2583. }
  2584. StackSlot * newStackSlot = nullptr;
  2585. if (!PHASE_OFF(Js::StackPackPhase, this->func) && !this->func->IsJitInDebugMode() && !spilledRange->cantStackPack)
  2586. {
  2587. // Search for a free stack slot to re-use
  2588. FOREACH_SLIST_ENTRY_EDITING(StackSlot *, slot, this->stackSlotsFreeList, iter)
  2589. {
  2590. // Heuristic: should we use '==' or '>=' for the size?
  2591. if (slot->lastUse <= spilledRange->start && slot->size >= size)
  2592. {
  2593. StackSym *spilledSym = spilledRange->sym;
  2594. Assert(!spilledSym->IsArgSlotSym() && !spilledSym->IsParamSlotSym());
  2595. Assert(!spilledSym->IsAllocated());
  2596. spilledRange->spillStackSlot = slot;
  2597. spilledSym->m_offset = slot->offset;
  2598. spilledSym->m_allocated = true;
  2599. iter.RemoveCurrent();
  2600. #if DBG_DUMP
  2601. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::StackPackPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  2602. {
  2603. spilledSym->Dump();
  2604. Output::Print(L" *** stack packed at offset %3d (%4d - %4d)\n", spilledSym->m_offset, spilledRange->start, spilledRange->end);
  2605. }
  2606. #endif
  2607. break;
  2608. }
  2609. } NEXT_SLIST_ENTRY_EDITING;
  2610. if (spilledRange->spillStackSlot == nullptr)
  2611. {
  2612. newStackSlot = JitAnewStruct(this->tempAlloc, StackSlot);
  2613. newStackSlot->size = size;
  2614. spilledRange->spillStackSlot = newStackSlot;
  2615. }
  2616. this->AddLiveRange(this->stackPackInUseLiveRanges, spilledRange);
  2617. }
  2618. if (!spilledRange->sym->IsAllocated())
  2619. {
  2620. // Can't stack pack, allocate new stack slot.
  2621. StackSym *spilledSym = spilledRange->sym;
  2622. this->func->StackAllocate(spilledSym, size);
  2623. #if DBG_DUMP
  2624. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::StackPackPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  2625. {
  2626. spilledSym->Dump();
  2627. Output::Print(L" at offset %3d (%4d - %4d)\n", spilledSym->m_offset, spilledRange->start, spilledRange->end);
  2628. }
  2629. #endif
  2630. if (newStackSlot != nullptr)
  2631. {
  2632. newStackSlot->offset = spilledSym->m_offset;
  2633. }
  2634. }
  2635. }
  2636. // LinearScan::InsertLoads
  2637. void
  2638. LinearScan::InsertLoads(StackSym *sym, RegNum reg)
  2639. {
  2640. Lifetime *lifetime = sym->scratch.linearScan.lifetime;
  2641. FOREACH_SLIST_ENTRY(IR::Instr *, instr, &lifetime->useList)
  2642. {
  2643. this->InsertLoad(instr, sym, reg);
  2644. } NEXT_SLIST_ENTRY;
  2645. lifetime->useList.Clear();
  2646. }
  2647. // LinearScan::InsertStores
  2648. void
  2649. LinearScan::InsertStores(Lifetime *lifetime, RegNum reg, IR::Instr *insertionInstr)
  2650. {
  2651. StackSym *sym = lifetime->sym;
  2652. // If single def, use instrDef on the symbol
  2653. if (sym->m_isSingleDef)
  2654. {
  2655. IR::Instr * defInstr = sym->m_instrDef;
  2656. if (!sym->IsConst() && defInstr->GetDst()->AsRegOpnd()->GetReg() == RegNOREG
  2657. || this->secondChanceRegs.Test(reg))
  2658. {
  2659. // This can happen if we were trying to allocate this lifetime,
  2660. // and it is getting spilled right away.
  2661. // For second chance allocations, this should have already been handled.
  2662. return;
  2663. }
  2664. this->InsertStore(defInstr, defInstr->FindRegDef(sym)->m_sym, reg);
  2665. return;
  2666. }
  2667. if (reg == RegNOREG)
  2668. {
  2669. return;
  2670. }
  2671. uint localStoreCost = LinearScan::GetUseSpillCost(this->loopNest, (this->currentOpHelperBlock != nullptr));
  2672. // Is it cheaper to spill all the defs we've seen so far or just insert a store at the current point?
  2673. if ((this->func->HasTry() && !this->func->DoOptimizeTryCatch()) || localStoreCost >= lifetime->allDefsCost)
  2674. {
  2675. // Insert a store for each def point we've seen so far
  2676. FOREACH_SLIST_ENTRY(IR::Instr *, instr, &(lifetime->defList))
  2677. {
  2678. if (instr->GetDst()->AsRegOpnd()->GetReg() != RegNOREG)
  2679. {
  2680. IR::RegOpnd *regOpnd = instr->FindRegDef(sym);
  2681. // Note that reg may not be equal to regOpnd->GetReg() if the lifetime has been re-allocated since we've seen this def
  2682. this->InsertStore(instr, regOpnd->m_sym, regOpnd->GetReg());
  2683. }
  2684. } NEXT_SLIST_ENTRY;
  2685. lifetime->defList.Clear();
  2686. lifetime->allDefsCost = 0;
  2687. lifetime->needsStoreCompensation = false;
  2688. }
  2689. else if (!lifetime->defList.Empty())
  2690. {
  2691. // Insert a def right here at the current instr, and then we'll use compensation code for paths not covered by this def.
  2692. if (!insertionInstr)
  2693. {
  2694. insertionInstr = this->currentInstr->m_prev;
  2695. }
  2696. this->InsertStore(insertionInstr, sym, reg);
  2697. if (this->IsInLoop())
  2698. {
  2699. RecordLoopUse(lifetime, reg);
  2700. }
  2701. // We now need to insert all store compensations when needed, unless we spill all the defs later on.
  2702. lifetime->needsStoreCompensation = true;
  2703. }
  2704. }
  2705. // LinearScan::InsertStore
  2706. void
  2707. LinearScan::InsertStore(IR::Instr *instr, StackSym *sym, RegNum reg)
  2708. {
  2709. // Win8 Bug 391484: We cannot use regOpnd->GetType() here because it
  2710. // can lead to truncation as downstream usage of the register might be of a size
  2711. // greater than the current use. Using RegTypes[reg] works only if the stack slot size
  2712. // is always at least of size MachPtr
  2713. // In the debug mode, if the current sym belongs to the byte code locals, then do not unlink this instruction, as we need to have this instruction to be there
  2714. // to produce the write-through instruction.
  2715. if (sym->IsConst() && !IsSymNonTempLocalVar(sym))
  2716. {
  2717. // Let's just delete the def. We'll reload the constant.
  2718. // We can't just delete the instruction however since the
  2719. // uses will look at the def to get the value.
  2720. // Make sure it wasn't already deleted.
  2721. if (sym->m_instrDef->m_next)
  2722. {
  2723. sym->m_instrDef->Unlink();
  2724. sym->m_instrDef->m_next = nullptr;
  2725. }
  2726. return;
  2727. }
  2728. Assert(reg != RegNOREG);
  2729. IRType type = sym->GetType();
  2730. if (sym->IsSimd128())
  2731. {
  2732. type = sym->GetType();
  2733. }
  2734. IR::Instr *store = IR::Instr::New(LowererMD::GetStoreOp(type),
  2735. IR::SymOpnd::New(sym, type, this->func),
  2736. IR::RegOpnd::New(sym, reg, type, this->func), this->func);
  2737. instr->InsertAfter(store);
  2738. store->CopyNumber(instr);
  2739. this->linearScanMD.LegalizeDef(store);
  2740. #if DBG_DUMP
  2741. if (PHASE_TRACE(Js::LinearScanPhase, this->func))
  2742. {
  2743. Output::Print(L"...Inserting store for ");
  2744. sym->Dump();
  2745. Output::Print(L" Cost:%d\n", this->GetSpillCost(sym->scratch.linearScan.lifetime));
  2746. }
  2747. #endif
  2748. }
  2749. // LinearScan::InsertLoad
  2750. void
  2751. LinearScan::InsertLoad(IR::Instr *instr, StackSym *sym, RegNum reg)
  2752. {
  2753. IR::Opnd *src;
  2754. // The size of loads and stores to memory need to match. See the comment
  2755. // around type in InsertStore above.
  2756. IRType type = sym->GetType();
  2757. if (sym->IsSimd128())
  2758. {
  2759. type = sym->GetType();
  2760. }
  2761. bool isMovSDZero = false;
  2762. if (sym->IsConst())
  2763. {
  2764. Assert(!sym->IsAllocated() || IsSymNonTempLocalVar(sym));
  2765. // For an intConst, reload the constant instead of using the stack.
  2766. // Create a new StackSym to make sure the old sym remains singleDef
  2767. src = sym->GetConstOpnd();
  2768. if (!src)
  2769. {
  2770. isMovSDZero = true;
  2771. sym = StackSym::New(sym->GetType(), this->func);
  2772. sym->m_isConst = true;
  2773. sym->m_isFltConst = true;
  2774. }
  2775. else
  2776. {
  2777. StackSym * oldSym = sym;
  2778. sym = StackSym::New(TyVar, this->func);
  2779. sym->m_isConst = true;
  2780. sym->m_isIntConst = oldSym->m_isIntConst;
  2781. sym->m_isTaggableIntConst = sym->m_isTaggableIntConst;
  2782. }
  2783. }
  2784. else
  2785. {
  2786. src = IR::SymOpnd::New(sym, type, this->func);
  2787. }
  2788. IR::Instr * load;
  2789. #if defined(_M_IX86) || defined(_M_X64)
  2790. if (isMovSDZero)
  2791. {
  2792. load = IR::Instr::New(Js::OpCode::MOVSD_ZERO,
  2793. IR::RegOpnd::New(sym, reg, type, this->func), this->func);
  2794. instr->InsertBefore(load);
  2795. }
  2796. else
  2797. #endif
  2798. {
  2799. load = Lowerer::InsertMove(IR::RegOpnd::New(sym, reg, type, this->func), src, instr);
  2800. }
  2801. load->CopyNumber(instr);
  2802. if (!isMovSDZero)
  2803. {
  2804. this->linearScanMD.LegalizeUse(load, src);
  2805. }
  2806. this->RecordLoopUse(nullptr, reg);
  2807. #if DBG_DUMP
  2808. if (PHASE_TRACE(Js::LinearScanPhase, this->func))
  2809. {
  2810. Output::Print(L"...Inserting load for ");
  2811. sym->Dump();
  2812. if (sym->scratch.linearScan.lifetime)
  2813. {
  2814. Output::Print(L" Cost:%d\n", this->GetSpillCost(sym->scratch.linearScan.lifetime));
  2815. }
  2816. else
  2817. {
  2818. Output::Print(L"\n");
  2819. }
  2820. }
  2821. #endif
  2822. }
  2823. uint8
  2824. LinearScan::GetRegAttribs(RegNum reg)
  2825. {
  2826. return RegAttribs[reg];
  2827. }
  2828. IRType
  2829. LinearScan::GetRegType(RegNum reg)
  2830. {
  2831. return RegTypes[reg];
  2832. }
  2833. bool
  2834. LinearScan::IsCalleeSaved(RegNum reg)
  2835. {
  2836. return (RegAttribs[reg] & RA_CALLEESAVE) != 0;
  2837. }
  2838. bool
  2839. LinearScan::IsCallerSaved(RegNum reg) const
  2840. {
  2841. return !LinearScan::IsCalleeSaved(reg) && LinearScan::IsAllocatable(reg);
  2842. }
  2843. bool
  2844. LinearScan::IsAllocatable(RegNum reg) const
  2845. {
  2846. return !(RegAttribs[reg] & RA_DONTALLOCATE) && this->linearScanMD.IsAllocatable(reg, this->func);
  2847. }
  2848. void
  2849. LinearScan::KillImplicitRegs(IR::Instr *instr)
  2850. {
  2851. if (instr->IsLabelInstr() || instr->IsBranchInstr())
  2852. {
  2853. // Note: need to clear these for branch as well because this info isn't recorded for second chance
  2854. // allocation on branch boundaries
  2855. this->tempRegs.ClearAll();
  2856. }
  2857. #if defined(_M_IX86) || defined(_M_X64)
  2858. if (instr->m_opcode == Js::OpCode::IMUL)
  2859. {
  2860. this->SpillReg(LowererMDArch::GetRegIMulHighDestLower());
  2861. this->tempRegs.Clear(LowererMDArch::GetRegIMulHighDestLower());
  2862. this->RecordLoopUse(nullptr, LowererMDArch::GetRegIMulHighDestLower());
  2863. return;
  2864. }
  2865. #endif
  2866. this->TrackInlineeArgLifetimes(instr);
  2867. // Don't care about kills on bailout calls as we are going to exit anyways
  2868. // Also, for bailout scenarios we have already handled the inlinee frame spills
  2869. Assert(LowererMD::IsCall(instr) || !instr->HasBailOutInfo());
  2870. if (!LowererMD::IsCall(instr) || instr->HasBailOutInfo())
  2871. {
  2872. return;
  2873. }
  2874. if (this->currentBlock->inlineeStack.Count() > 0)
  2875. {
  2876. this->SpillInlineeArgs(instr);
  2877. }
  2878. else
  2879. {
  2880. instr->m_func = this->func;
  2881. }
  2882. //
  2883. // Spill caller-saved registers that are active.
  2884. //
  2885. BitVector deadRegs;
  2886. deadRegs.Copy(this->activeRegs);
  2887. deadRegs.And(this->callerSavedRegs);
  2888. FOREACH_BITSET_IN_UNITBV(reg, deadRegs, BitVector)
  2889. {
  2890. this->SpillReg((RegNum)reg);
  2891. }
  2892. NEXT_BITSET_IN_UNITBV;
  2893. this->tempRegs.And(this->calleeSavedRegs);
  2894. if (callSetupRegs.Count())
  2895. {
  2896. callSetupRegs.ClearAll();
  2897. }
  2898. Loop *loop = this->curLoop;
  2899. while (loop)
  2900. {
  2901. loop->regAlloc.regUseBv.Or(this->callerSavedRegs);
  2902. loop = loop->parent;
  2903. }
  2904. }
  2905. //
  2906. // Before a call, all inlinee frame syms need to be spilled to a pre-defined location
  2907. //
  2908. void LinearScan::SpillInlineeArgs(IR::Instr* instr)
  2909. {
  2910. Assert(this->currentBlock->inlineeStack.Count() > 0);
  2911. // Ensure the call instruction is tied to the current inlinee
  2912. // This is used in the encoder to encode mapping or return offset and InlineeFrameRecord
  2913. instr->m_func = this->currentBlock->inlineeStack.Last();
  2914. BitVector spilledRegs;
  2915. this->currentBlock->inlineeFrameLifetimes.Map([&](uint i, Lifetime* lifetime){
  2916. Assert(lifetime->start < instr->GetNumber() && lifetime->end >= instr->GetNumber());
  2917. Assert(!lifetime->sym->IsConst());
  2918. Assert(this->currentBlock->inlineeFrameSyms.ContainsKey(lifetime->sym->m_id));
  2919. if (lifetime->reg == RegNOREG)
  2920. {
  2921. return;
  2922. }
  2923. StackSym* sym = lifetime->sym;
  2924. if (!lifetime->isSpilled && !lifetime->isOpHelperSpilled &&
  2925. (!lifetime->isDeadStore && (lifetime->sym->m_isSingleDef || !lifetime->defList.Empty()))) // if deflist is empty - we have already spilled at all defs - and the value is current
  2926. {
  2927. if (!spilledRegs.Test(lifetime->reg))
  2928. {
  2929. spilledRegs.Set(lifetime->reg);
  2930. if (!sym->IsAllocated())
  2931. {
  2932. this->AllocateStackSpace(lifetime);
  2933. }
  2934. this->RecordLoopUse(lifetime, lifetime->reg);
  2935. Assert(this->regContent[lifetime->reg] != nullptr);
  2936. if (sym->m_isSingleDef)
  2937. {
  2938. // For a single def - we do not track the deflist - the def below will remove the single def on the sym
  2939. // hence, we need to track the original def.
  2940. Assert(lifetime->defList.Empty());
  2941. lifetime->defList.Prepend(sym->m_instrDef);
  2942. }
  2943. this->InsertStore(instr->m_prev, sym, lifetime->reg);
  2944. }
  2945. }
  2946. });
  2947. }
  2948. void LinearScan::TrackInlineeArgLifetimes(IR::Instr* instr)
  2949. {
  2950. if (instr->m_opcode == Js::OpCode::InlineeStart)
  2951. {
  2952. if (instr->m_func->m_hasInlineArgsOpt)
  2953. {
  2954. instr->m_func->frameInfo->IterateSyms([=](StackSym* sym){
  2955. Lifetime* lifetime = sym->scratch.linearScan.lifetime;
  2956. this->currentBlock->inlineeFrameLifetimes.Add(lifetime);
  2957. // We need to maintain as count because the same sym can be used for multiple arguments
  2958. uint* value;
  2959. if (this->currentBlock->inlineeFrameSyms.TryGetReference(sym->m_id, &value))
  2960. {
  2961. *value = *value + 1;
  2962. }
  2963. else
  2964. {
  2965. this->currentBlock->inlineeFrameSyms.Add(sym->m_id, 1);
  2966. }
  2967. });
  2968. if (this->currentBlock->inlineeStack.Count() > 0)
  2969. {
  2970. Assert(instr->m_func->inlineDepth == this->currentBlock->inlineeStack.Last()->inlineDepth + 1);
  2971. }
  2972. this->currentBlock->inlineeStack.Add(instr->m_func);
  2973. }
  2974. else
  2975. {
  2976. Assert(this->currentBlock->inlineeStack.Count() == 0);
  2977. }
  2978. }
  2979. else if (instr->m_opcode == Js::OpCode::InlineeEnd)
  2980. {
  2981. if (instr->m_func->m_hasInlineArgsOpt)
  2982. {
  2983. instr->m_func->frameInfo->AllocateRecord(this->func, instr->m_func->GetJnFunction());
  2984. if(this->currentBlock->inlineeStack.Count() == 0)
  2985. {
  2986. // Block is unreachable
  2987. Assert(this->currentBlock->inlineeFrameLifetimes.Count() == 0);
  2988. Assert(this->currentBlock->inlineeFrameSyms.Count() == 0);
  2989. }
  2990. else
  2991. {
  2992. Func* func = this->currentBlock->inlineeStack.RemoveAtEnd();
  2993. Assert(func == instr->m_func);
  2994. instr->m_func->frameInfo->IterateSyms([=](StackSym* sym){
  2995. Lifetime* lifetime = this->currentBlock->inlineeFrameLifetimes.RemoveAtEnd();
  2996. uint* value;
  2997. if (this->currentBlock->inlineeFrameSyms.TryGetReference(sym->m_id, &value))
  2998. {
  2999. *value = *value - 1;
  3000. if (*value == 0)
  3001. {
  3002. bool removed = this->currentBlock->inlineeFrameSyms.Remove(sym->m_id);
  3003. Assert(removed);
  3004. }
  3005. }
  3006. else
  3007. {
  3008. Assert(UNREACHED);
  3009. }
  3010. Assert(sym->scratch.linearScan.lifetime == lifetime);
  3011. }, /*reverse*/ true);
  3012. }
  3013. }
  3014. }
  3015. }
  3016. // GetSpillCost
  3017. // The spill cost is trying to estimate the usage density of the lifetime,
  3018. // by dividing the useCount by the lifetime length.
  3019. uint
  3020. LinearScan::GetSpillCost(Lifetime *lifetime)
  3021. {
  3022. uint useCount = lifetime->GetRegionUseCount(this->curLoop);
  3023. uint spillCost;
  3024. // Get local spill cost. Ignore helper blocks as we'll also need compensation on the main path.
  3025. uint localUseCost = LinearScan::GetUseSpillCost(this->loopNest, false);
  3026. if (lifetime->reg && !lifetime->isSpilled)
  3027. {
  3028. // If it is in a reg, we'll need a store
  3029. if (localUseCost >= lifetime->allDefsCost)
  3030. {
  3031. useCount += lifetime->allDefsCost;
  3032. }
  3033. else
  3034. {
  3035. useCount += localUseCost;
  3036. }
  3037. if (this->curLoop && !lifetime->sym->IsConst()
  3038. && this->curLoop->regAlloc.liveOnBackEdgeSyms->Test(lifetime->sym->m_id))
  3039. {
  3040. // If we spill here, we'll need to insert a load at the bottom of the loop
  3041. // (it would be nice to be able to check is was in a reg at the top of the loop)...
  3042. useCount += localUseCost;
  3043. }
  3044. }
  3045. // When comparing 2 lifetimes, we don't really care about the actual length of the lifetimes.
  3046. // What matters is how much longer will they use the register.
  3047. const uint start = currentInstr->GetNumber();
  3048. uint end = max(start, lifetime->end);
  3049. uint lifetimeTotalOpHelperFullVisitedLength = lifetime->totalOpHelperLengthByEnd;
  3050. if (this->curLoop && this->curLoop->regAlloc.loopEnd < end && !PHASE_OFF(Js::RegionUseCountPhase, this->func))
  3051. {
  3052. end = this->curLoop->regAlloc.loopEnd;
  3053. lifetimeTotalOpHelperFullVisitedLength = this->curLoop->regAlloc.helperLength;
  3054. }
  3055. uint length = end - start + 1;
  3056. // Exclude helper block length since helper block paths are typically infrequently taken paths and not as important
  3057. const uint totalOpHelperVisitedLength = this->totalOpHelperFullVisitedLength + CurrentOpHelperVisitedLength(currentInstr);
  3058. Assert(lifetimeTotalOpHelperFullVisitedLength >= totalOpHelperVisitedLength);
  3059. const uint lifetimeHelperLength = lifetimeTotalOpHelperFullVisitedLength - totalOpHelperVisitedLength;
  3060. Assert(length >= lifetimeHelperLength);
  3061. length -= lifetimeHelperLength;
  3062. if(length == 0)
  3063. {
  3064. length = 1;
  3065. }
  3066. // Add a base length so that the difference between a length of 1 and a length of 2 is not so large
  3067. #ifdef _M_X64
  3068. length += 64;
  3069. #else
  3070. length += 16;
  3071. #endif
  3072. spillCost = (useCount << 13) / length;
  3073. if (lifetime->isSecondChanceAllocated)
  3074. {
  3075. // Second chance allocation have additional overhead, so de-prioritize them
  3076. // Note: could use more tuning...
  3077. spillCost = spillCost * 4/5;
  3078. }
  3079. if (lifetime->isCheapSpill)
  3080. {
  3081. // This lifetime will get spilled eventually, so lower the spill cost to favor other lifetimes
  3082. // Note: could use more tuning...
  3083. spillCost /= 2;
  3084. }
  3085. if (lifetime->sym->IsConst())
  3086. {
  3087. spillCost = spillCost / 16;
  3088. }
  3089. return spillCost;
  3090. }
  3091. bool
  3092. LinearScan::RemoveDeadStores(IR::Instr *instr)
  3093. {
  3094. IR::Opnd *dst = instr->GetDst();
  3095. if (dst && dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym && !dst->AsRegOpnd()->m_isCallArg)
  3096. {
  3097. IR::RegOpnd *regOpnd = dst->AsRegOpnd();
  3098. Lifetime * lifetime = regOpnd->m_sym->scratch.linearScan.lifetime;
  3099. if (lifetime->isDeadStore)
  3100. {
  3101. if (Lowerer::HasSideEffects(instr) == false)
  3102. {
  3103. // If all the bailouts referencing this arg are removed (which can happen in some scenarios)
  3104. //- then it's OK to remove this def of the arg
  3105. DebugOnly(this->func->allowRemoveBailOutArgInstr = true);
  3106. // We are removing this instruction, end dead life time now
  3107. this->EndDeadLifetimes(instr);
  3108. instr->Remove();
  3109. DebugOnly(this->func->allowRemoveBailOutArgInstr = false);
  3110. return true;
  3111. }
  3112. }
  3113. }
  3114. return false;
  3115. }
  3116. void
  3117. LinearScan::AssignActiveReg(Lifetime * lifetime, RegNum reg)
  3118. {
  3119. Assert(!this->activeRegs.Test(reg));
  3120. Assert(!lifetime->isSpilled);
  3121. Assert(lifetime->reg == RegNOREG || lifetime->reg == reg);
  3122. this->func->m_regsUsed.Set(reg);
  3123. lifetime->reg = reg;
  3124. this->activeRegs.Set(reg);
  3125. if (lifetime->isFloat || lifetime->isSimd128())
  3126. {
  3127. this->floatRegUsedCount++;
  3128. }
  3129. else
  3130. {
  3131. this->intRegUsedCount++;
  3132. }
  3133. this->AddToActive(lifetime);
  3134. this->tempRegs.Clear(reg);
  3135. }
  3136. void
  3137. LinearScan::AssignTempReg(Lifetime * lifetime, RegNum reg)
  3138. {
  3139. Assert(reg > RegNOREG && reg < RegNumCount);
  3140. Assert(!this->activeRegs.Test(reg));
  3141. Assert(lifetime->isSpilled);
  3142. this->func->m_regsUsed.Set(reg);
  3143. lifetime->reg = reg;
  3144. this->tempRegs.Set(reg);
  3145. __analysis_assume(reg > 0 && reg < RegNumCount);
  3146. this->tempRegLifetimes[reg] = lifetime;
  3147. this->RecordLoopUse(nullptr, reg);
  3148. }
  3149. RegNum
  3150. LinearScan::GetAssignedTempReg(Lifetime * lifetime, IRType type)
  3151. {
  3152. if (this->tempRegs.Test(lifetime->reg) && this->tempRegLifetimes[lifetime->reg] == lifetime)
  3153. {
  3154. if (this->linearScanMD.FitRegIntSizeConstraints(lifetime->reg, type))
  3155. {
  3156. this->RecordLoopUse(nullptr, lifetime->reg);
  3157. return lifetime->reg;
  3158. }
  3159. else
  3160. {
  3161. // Free this temp, we'll need to find another one.
  3162. this->tempRegs.Clear(lifetime->reg);
  3163. lifetime->reg = RegNOREG;
  3164. }
  3165. }
  3166. return RegNOREG;
  3167. }
  3168. uint
  3169. LinearScan::GetUseSpillCost(uint loopNest, BOOL isInHelperBlock)
  3170. {
  3171. if (isInHelperBlock)
  3172. {
  3173. // Helper block uses are not as important.
  3174. return 0;
  3175. }
  3176. else if (loopNest < 6)
  3177. {
  3178. return (1 << (loopNest * 3));
  3179. }
  3180. else
  3181. {
  3182. // Slow growth for deep nest to avoid overflow
  3183. return (1 << (5 * 3)) * (loopNest-5);
  3184. }
  3185. }
  3186. void
  3187. LinearScan::ProcessSecondChanceBoundary(IR::BranchInstr *branchInstr)
  3188. {
  3189. if (this->func->HasTry() && !this->func->DoOptimizeTryCatch())
  3190. {
  3191. return;
  3192. }
  3193. if (this->currentOpHelperBlock && this->currentOpHelperBlock->opHelperEndInstr == branchInstr)
  3194. {
  3195. // Lifetimes opHelperSpilled won't get recorded by SaveRegContent(). Do it here.
  3196. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, this->opHelperSpilledLiveranges)
  3197. {
  3198. if (!lifetime->cantOpHelperSpill)
  3199. {
  3200. if (lifetime->isSecondChanceAllocated)
  3201. {
  3202. this->secondChanceRegs.Set(lifetime->reg);
  3203. }
  3204. this->regContent[lifetime->reg] = lifetime;
  3205. }
  3206. } NEXT_SLIST_ENTRY;
  3207. }
  3208. if(branchInstr->IsMultiBranch())
  3209. {
  3210. IR::MultiBranchInstr * multiBranchInstr = branchInstr->AsMultiBrInstr();
  3211. multiBranchInstr->MapUniqueMultiBrLabels([=](IR::LabelInstr * branchLabel) -> void
  3212. {
  3213. this->ProcessSecondChanceBoundaryHelper(branchInstr, branchLabel);
  3214. });
  3215. }
  3216. else
  3217. {
  3218. IR::LabelInstr *branchLabel = branchInstr->GetTarget();
  3219. this->ProcessSecondChanceBoundaryHelper(branchInstr, branchLabel);
  3220. }
  3221. this->SaveRegContent(branchInstr);
  3222. }
  3223. void
  3224. LinearScan::ProcessSecondChanceBoundaryHelper(IR::BranchInstr *branchInstr, IR::LabelInstr *branchLabel)
  3225. {
  3226. if (branchInstr->GetNumber() > branchLabel->GetNumber())
  3227. {
  3228. // Loop back-edge
  3229. Assert(branchLabel->m_isLoopTop);
  3230. branchInstr->m_regContent = nullptr;
  3231. this->InsertSecondChanceCompensation(this->regContent, branchLabel->m_regContent, branchInstr, branchLabel);
  3232. }
  3233. else
  3234. {
  3235. // Forward branch
  3236. this->SaveRegContent(branchInstr);
  3237. if (this->curLoop)
  3238. {
  3239. this->curLoop->regAlloc.exitRegContentList->Prepend(branchInstr->m_regContent);
  3240. }
  3241. if (!branchLabel->m_loweredBasicBlock)
  3242. {
  3243. if (branchInstr->IsConditional() || branchInstr->IsMultiBranch())
  3244. {
  3245. // Clone with deep copy
  3246. branchLabel->m_loweredBasicBlock = this->currentBlock->Clone(this->tempAlloc);
  3247. }
  3248. else
  3249. {
  3250. // If the unconditional branch leads to the end of the function for the scenario of a bailout - we do not want to
  3251. // copy the lowered inlinee info.
  3252. IR::Instr* nextInstr = branchLabel->GetNextRealInstr();
  3253. if (nextInstr->m_opcode != Js::OpCode::FunctionExit &&
  3254. nextInstr->m_opcode != Js::OpCode::BailOutStackRestore &&
  3255. this->currentBlock->HasData())
  3256. {
  3257. // Clone with shallow copy
  3258. branchLabel->m_loweredBasicBlock = this->currentBlock;
  3259. }
  3260. }
  3261. }
  3262. else
  3263. {
  3264. // The lowerer sometimes generates unreachable blocks that would have empty data.
  3265. Assert(!currentBlock->HasData() || branchLabel->m_loweredBasicBlock->Equals(this->currentBlock));
  3266. }
  3267. }
  3268. }
  3269. void
  3270. LinearScan::ProcessSecondChanceBoundary(IR::LabelInstr *labelInstr)
  3271. {
  3272. if (this->func->HasTry() && !this->func->DoOptimizeTryCatch())
  3273. {
  3274. return;
  3275. }
  3276. if (labelInstr->m_isLoopTop)
  3277. {
  3278. this->SaveRegContent(labelInstr);
  3279. Lifetime ** regContent = AnewArrayZ(this->tempAlloc, Lifetime *, RegNumCount);
  3280. js_memcpy_s(regContent, (RegNumCount * sizeof(Lifetime *)), this->regContent, sizeof(this->regContent));
  3281. this->curLoop->regAlloc.loopTopRegContent = regContent;
  3282. }
  3283. FOREACH_SLISTCOUNTED_ENTRY_EDITING(IR::BranchInstr *, branchInstr, &labelInstr->labelRefs, iter)
  3284. {
  3285. if (branchInstr->m_isAirlock)
  3286. {
  3287. // This branch was just inserted... Skip it.
  3288. continue;
  3289. }
  3290. Assert(branchInstr->GetNumber() && labelInstr->GetNumber());
  3291. if (branchInstr->GetNumber() < labelInstr->GetNumber())
  3292. {
  3293. // Normal branch
  3294. this->InsertSecondChanceCompensation(branchInstr->m_regContent, this->regContent, branchInstr, labelInstr);
  3295. }
  3296. else
  3297. {
  3298. // Loop back-edge
  3299. Assert(labelInstr->m_isLoopTop);
  3300. }
  3301. } NEXT_SLISTCOUNTED_ENTRY_EDITING;
  3302. }
  3303. IR::Instr * LinearScan::EnsureAirlock(bool needsAirlock, bool *pHasAirlock, IR::Instr *insertionInstr,
  3304. IR::Instr **pInsertionStartInstr, IR::BranchInstr *branchInstr, IR::LabelInstr *labelInstr)
  3305. {
  3306. if (needsAirlock && !(*pHasAirlock))
  3307. {
  3308. // We need an extra block for the compensation code.
  3309. insertionInstr = this->InsertAirlock(branchInstr, labelInstr);
  3310. *pInsertionStartInstr = insertionInstr->m_prev;
  3311. *pHasAirlock = true;
  3312. }
  3313. return insertionInstr;
  3314. }
  3315. bool LinearScan::NeedsLoopBackEdgeCompensation(Lifetime *lifetime, IR::LabelInstr *loopTopLabel)
  3316. {
  3317. if (!lifetime)
  3318. {
  3319. return false;
  3320. }
  3321. if (lifetime->sym->IsConst())
  3322. {
  3323. return false;
  3324. }
  3325. // No need if lifetime begins in the loop
  3326. if (lifetime->start > loopTopLabel->GetNumber())
  3327. {
  3328. return false;
  3329. }
  3330. // Only needed if lifetime is live on the back-edge, and the register is used inside the loop, or the lifetime extends
  3331. // beyond the loop (and compensation out of the loop may use this reg)...
  3332. if (!loopTopLabel->GetLoop()->regAlloc.liveOnBackEdgeSyms->Test(lifetime->sym->m_id)
  3333. || (this->currentInstr->GetNumber() >= lifetime->end && !this->curLoop->regAlloc.symRegUseBv->Test(lifetime->sym->m_id)))
  3334. {
  3335. return false;
  3336. }
  3337. return true;
  3338. }
  3339. void
  3340. LinearScan::InsertSecondChanceCompensation(Lifetime ** branchRegContent, Lifetime **labelRegContent,
  3341. IR::BranchInstr *branchInstr, IR::LabelInstr *labelInstr)
  3342. {
  3343. IR::Instr *prevInstr = branchInstr->GetPrevRealInstrOrLabel();
  3344. bool needsAirlock = branchInstr->IsConditional() || (prevInstr->IsBranchInstr() && prevInstr->AsBranchInstr()->IsConditional() || branchInstr->IsMultiBranch());
  3345. bool hasAirlock = false;
  3346. IR::Instr *insertionInstr = branchInstr;
  3347. IR::Instr *insertionStartInstr = branchInstr->m_prev;
  3348. // For loop back-edge, we want to keep the insertionStartInstr before the branch as spill need to happen on all paths
  3349. // Pass a dummy instr address to airLockBlock insertion code.
  3350. BitVector thrashedRegs(0);
  3351. bool isLoopBackEdge = (this->regContent == branchRegContent);
  3352. Lifetime * tmpRegContent[RegNumCount];
  3353. Lifetime **regContent = this->regContent;
  3354. if (isLoopBackEdge)
  3355. {
  3356. Loop *loop = labelInstr->GetLoop();
  3357. js_memcpy_s(&tmpRegContent, (RegNumCount * sizeof(Lifetime *)), this->regContent, sizeof(this->regContent));
  3358. branchRegContent = tmpRegContent;
  3359. regContent = tmpRegContent;
  3360. #if defined(_M_IX86) || defined(_M_X64)
  3361. // Insert XCHG to avoid some conflicts for int regs
  3362. // Note: no XCHG on ARM or SSE2. We could however use 3 XOR on ARM...
  3363. this->AvoidCompensationConflicts(labelInstr, branchInstr, labelRegContent, branchRegContent,
  3364. &insertionInstr, &insertionStartInstr, needsAirlock, &hasAirlock);
  3365. #endif
  3366. FOREACH_BITSET_IN_UNITBV(reg, this->secondChanceRegs, BitVector)
  3367. {
  3368. Lifetime *labelLifetime = labelRegContent[reg];
  3369. Lifetime *lifetime = branchRegContent[reg];
  3370. // 1. Insert Stores
  3371. // Lifetime starts before the loop
  3372. // Lifetime was re-allocated within the loop (i.e.: a load was most likely inserted)
  3373. // Lifetime is live on back-edge and has unsaved defs.
  3374. if (lifetime && lifetime->start < labelInstr->GetNumber() && lifetime->lastAllocationStart > labelInstr->GetNumber()
  3375. && (labelInstr->GetLoop()->regAlloc.liveOnBackEdgeSyms->Test(lifetime->sym->m_id))
  3376. && !lifetime->defList.Empty())
  3377. {
  3378. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3379. // If the lifetime was second chance allocated inside the loop, there might
  3380. // be spilled loads of this symbol in the loop. Insert the stores.
  3381. // We don't need to do this if the lifetime was re-allocated before the loop.
  3382. //
  3383. // Note that reg may not be equal to lifetime->reg because of inserted XCHG...
  3384. this->InsertStores(lifetime, lifetime->reg, insertionStartInstr);
  3385. }
  3386. if (lifetime == labelLifetime)
  3387. {
  3388. continue;
  3389. }
  3390. // 2. MOV labelReg/MEM, branchReg
  3391. // Move current register to match content at the top of the loop
  3392. if (this->NeedsLoopBackEdgeCompensation(lifetime, labelInstr))
  3393. {
  3394. // Mismatch, we need to insert compensation code
  3395. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3396. // MOV ESI, EAX
  3397. // MOV EDI, ECX
  3398. // MOV ECX, ESI
  3399. // MOV EAX, EDI <<<
  3400. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3401. lifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3402. }
  3403. // 2. MOV labelReg, MEM
  3404. // Lifetime was in a reg at the top of the loop but is spilled right now.
  3405. if (labelLifetime && labelLifetime->isSpilled && !labelLifetime->sym->IsConst() && labelLifetime->end >= branchInstr->GetNumber())
  3406. {
  3407. if (!loop->regAlloc.liveOnBackEdgeSyms->Test(labelLifetime->sym->m_id))
  3408. {
  3409. continue;
  3410. }
  3411. if (this->ClearLoopExitIfRegUnused(labelLifetime, (RegNum)reg, branchInstr, loop))
  3412. {
  3413. continue;
  3414. }
  3415. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3416. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3417. labelLifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3418. }
  3419. } NEXT_BITSET_IN_UNITBV;
  3420. // 3. MOV labelReg, MEM
  3421. // Finish up reloading lifetimes needed at the top. #2 only handled secondChanceRegs.
  3422. FOREACH_REG(reg)
  3423. {
  3424. // Handle lifetimes in a register at the top of the loop, but not currently.
  3425. Lifetime *labelLifetime = labelRegContent[reg];
  3426. if (labelLifetime && !labelLifetime->sym->IsConst() && labelLifetime != branchRegContent[reg] && !thrashedRegs.Test(reg)
  3427. && (loop->regAlloc.liveOnBackEdgeSyms->Test(labelLifetime->sym->m_id)))
  3428. {
  3429. if (this->ClearLoopExitIfRegUnused(labelLifetime, (RegNum)reg, branchInstr, loop))
  3430. {
  3431. continue;
  3432. }
  3433. // Mismatch, we need to insert compensation code
  3434. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3435. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3436. labelLifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3437. }
  3438. } NEXT_REG;
  3439. if (hasAirlock)
  3440. {
  3441. loop->regAlloc.hasAirLock = true;
  3442. }
  3443. }
  3444. else
  3445. {
  3446. //
  3447. // Non-loop-back-edge merge
  3448. //
  3449. FOREACH_REG(reg)
  3450. {
  3451. Lifetime *branchLifetime = branchRegContent[reg];
  3452. Lifetime *lifetime = regContent[reg];
  3453. if (lifetime == branchLifetime)
  3454. {
  3455. continue;
  3456. }
  3457. if (branchLifetime && branchLifetime->isSpilled && !branchLifetime->sym->IsConst() && branchLifetime->end > labelInstr->GetNumber())
  3458. {
  3459. // The lifetime was in a reg at the branch and is now spilled. We need a store on this path.
  3460. //
  3461. // MOV MEM, branch_REG
  3462. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3463. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3464. branchLifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3465. }
  3466. if (lifetime && !lifetime->sym->IsConst() && lifetime->start <= branchInstr->GetNumber())
  3467. {
  3468. // MOV label_REG, branch_REG / MEM
  3469. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3470. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3471. lifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3472. }
  3473. } NEXT_REG;
  3474. }
  3475. if (hasAirlock)
  3476. {
  3477. // Fix opHelper on airlock label.
  3478. if (insertionInstr->m_prev->IsLabelInstr() && insertionInstr->IsLabelInstr())
  3479. {
  3480. if (insertionInstr->m_prev->AsLabelInstr()->isOpHelper && !insertionInstr->AsLabelInstr()->isOpHelper)
  3481. {
  3482. insertionInstr->m_prev->AsLabelInstr()->isOpHelper = false;
  3483. }
  3484. }
  3485. }
  3486. }
  3487. void
  3488. LinearScan::ReconcileRegContent(Lifetime ** branchRegContent, Lifetime **labelRegContent,
  3489. IR::BranchInstr *branchInstr, IR::LabelInstr *labelInstr,
  3490. Lifetime *lifetime, RegNum reg, BitVector *thrashedRegs, IR::Instr *insertionInstr, IR::Instr *insertionStartInstr)
  3491. {
  3492. RegNum originalReg = RegNOREG;
  3493. IRType type = RegTypes[reg];
  3494. Assert(labelRegContent[reg] != branchRegContent[reg]);
  3495. bool matchBranchReg = (branchRegContent[reg] == lifetime);
  3496. Lifetime **originalRegContent = (matchBranchReg ? labelRegContent : branchRegContent);
  3497. bool isLoopBackEdge = (branchInstr->GetNumber() > labelInstr->GetNumber());
  3498. if (lifetime->sym->IsConst())
  3499. {
  3500. return;
  3501. }
  3502. // Look if this lifetime was in a different register in the previous block.
  3503. // Split the search in 2 to speed this up.
  3504. if (type == TyMachReg)
  3505. {
  3506. FOREACH_INT_REG(regIter)
  3507. {
  3508. if (originalRegContent[regIter] == lifetime)
  3509. {
  3510. originalReg = regIter;
  3511. break;
  3512. }
  3513. } NEXT_INT_REG;
  3514. }
  3515. else
  3516. {
  3517. Assert(type == TyFloat64 || IRType_IsSimd128(type));
  3518. FOREACH_FLOAT_REG(regIter)
  3519. {
  3520. if (originalRegContent[regIter] == lifetime)
  3521. {
  3522. originalReg = regIter;
  3523. break;
  3524. }
  3525. } NEXT_FLOAT_REG;
  3526. }
  3527. RegNum branchReg, labelReg;
  3528. if (matchBranchReg)
  3529. {
  3530. branchReg = reg;
  3531. labelReg = originalReg;
  3532. }
  3533. else
  3534. {
  3535. branchReg = originalReg;
  3536. labelReg = reg;
  3537. }
  3538. if (branchReg != RegNOREG && !thrashedRegs->Test(branchReg) && !lifetime->sym->IsConst())
  3539. {
  3540. Assert(branchRegContent[branchReg] == lifetime);
  3541. if (labelReg != RegNOREG)
  3542. {
  3543. // MOV labelReg, branchReg
  3544. Assert(labelRegContent[labelReg] == lifetime);
  3545. IR::Instr *load = IR::Instr::New(LowererMD::GetLoadOp(type),
  3546. IR::RegOpnd::New(lifetime->sym, labelReg, type, this->func),
  3547. IR::RegOpnd::New(lifetime->sym, branchReg, type, this->func), this->func);
  3548. insertionInstr->InsertBefore(load);
  3549. load->CopyNumber(insertionInstr);
  3550. // symRegUseBv needs to be set properly. Unfortunately, we need to go conservative as we don't know
  3551. // which allocation it was at the source of the branch.
  3552. if (this->IsInLoop())
  3553. {
  3554. this->RecordLoopUse(lifetime, branchReg);
  3555. }
  3556. thrashedRegs->Set(labelReg);
  3557. }
  3558. else if (!lifetime->sym->IsSingleDef() && lifetime->needsStoreCompensation && !isLoopBackEdge)
  3559. {
  3560. Assert(!lifetime->sym->IsConst());
  3561. Assert(matchBranchReg);
  3562. Assert(branchRegContent[branchReg] == lifetime);
  3563. // MOV mem, branchReg
  3564. this->InsertStores(lifetime, branchReg, insertionInstr->m_prev);
  3565. // symRegUseBv needs to be set properly. Unfortunately, we need to go conservative as we don't know
  3566. // which allocation it was at the source of the branch.
  3567. if (this->IsInLoop())
  3568. {
  3569. this->RecordLoopUse(lifetime, branchReg);
  3570. }
  3571. }
  3572. }
  3573. else if (labelReg != RegNOREG)
  3574. {
  3575. Assert(labelRegContent[labelReg] == lifetime);
  3576. Assert(lifetime->sym->IsConst() || lifetime->sym->IsAllocated());
  3577. if (branchReg != RegNOREG && !lifetime->sym->IsSingleDef())
  3578. {
  3579. Assert(thrashedRegs->Test(branchReg));
  3580. // We can't insert a "MOV labelReg, branchReg" at the insertion point
  3581. // because branchReg was thrashed by a previous reload.
  3582. // Look for that reload to see if we can insert before it.
  3583. IR::Instr *newInsertionInstr = insertionInstr->m_prev;
  3584. bool foundIt = false;
  3585. while (LowererMD::IsAssign(newInsertionInstr))
  3586. {
  3587. IR::Opnd *dst = newInsertionInstr->GetDst();
  3588. IR::Opnd *src = newInsertionInstr->GetSrc1();
  3589. if (src->IsRegOpnd() && src->AsRegOpnd()->GetReg() == labelReg)
  3590. {
  3591. // This uses labelReg, give up...
  3592. break;
  3593. }
  3594. if (dst->IsRegOpnd() && dst->AsRegOpnd()->GetReg() == branchReg)
  3595. {
  3596. // Success!
  3597. foundIt = true;
  3598. break;
  3599. }
  3600. newInsertionInstr = newInsertionInstr->m_prev;
  3601. }
  3602. if (foundIt)
  3603. {
  3604. // MOV labelReg, branchReg
  3605. Assert(labelRegContent[labelReg] == lifetime);
  3606. IR::Instr *load = IR::Instr::New(LowererMD::GetLoadOp(type),
  3607. IR::RegOpnd::New(lifetime->sym, labelReg, type, this->func),
  3608. IR::RegOpnd::New(lifetime->sym, branchReg, type, this->func), this->func);
  3609. newInsertionInstr->InsertBefore(load);
  3610. load->CopyNumber(newInsertionInstr);
  3611. // symRegUseBv needs to be set properly. Unfortunately, we need to go conservative as we don't know
  3612. // which allocation it was at the source of the branch.
  3613. if (this->IsInLoop())
  3614. {
  3615. this->RecordLoopUse(lifetime, branchReg);
  3616. }
  3617. thrashedRegs->Set(labelReg);
  3618. return;
  3619. }
  3620. Assert(thrashedRegs->Test(branchReg));
  3621. this->InsertStores(lifetime, branchReg, insertionStartInstr);
  3622. // symRegUseBv needs to be set properly. Unfortunately, we need to go conservative as we don't know
  3623. // which allocation it was at the source of the branch.
  3624. if (this->IsInLoop())
  3625. {
  3626. this->RecordLoopUse(lifetime, branchReg);
  3627. }
  3628. }
  3629. // MOV labelReg, mem
  3630. this->InsertLoad(insertionInstr, lifetime->sym, labelReg);
  3631. thrashedRegs->Set(labelReg);
  3632. }
  3633. else if (!lifetime->sym->IsConst())
  3634. {
  3635. Assert(matchBranchReg);
  3636. Assert(branchReg != RegNOREG);
  3637. // The lifetime was in a register at the top of the loop, but we thrashed it with a previous reload...
  3638. if (!lifetime->sym->IsSingleDef())
  3639. {
  3640. this->InsertStores(lifetime, branchReg, insertionStartInstr);
  3641. }
  3642. #if DBG_DUMP
  3643. if (PHASE_TRACE(Js::SecondChancePhase, this->func))
  3644. {
  3645. Output::Print(L"****** Spilling reg because of bad compensation code order: ");
  3646. lifetime->sym->Dump();
  3647. Output::Print(L"\n");
  3648. }
  3649. #endif
  3650. }
  3651. }
  3652. bool LinearScan::ClearLoopExitIfRegUnused(Lifetime *lifetime, RegNum reg, IR::BranchInstr *branchInstr, Loop *loop)
  3653. {
  3654. // If a lifetime was enregistered into the loop and then spilled, we need compensation at the bottom
  3655. // of the loop to reload the lifetime into that register.
  3656. // If that lifetime was spilled before it was ever used, we don't need the compensation code.
  3657. // We do however need to clear the regContent on any loop exit as the register will not
  3658. // be available anymore on that path.
  3659. // Note: If the lifetime was reloaded into the same register, we might clear the regContent unnecessarily...
  3660. if (!PHASE_OFF(Js::ClearRegLoopExitPhase, this->func))
  3661. {
  3662. return false;
  3663. }
  3664. if (!loop->regAlloc.symRegUseBv->Test(lifetime->sym->m_id) && !lifetime->needsStoreCompensation)
  3665. {
  3666. if (lifetime->end > branchInstr->GetNumber())
  3667. {
  3668. FOREACH_SLIST_ENTRY(Lifetime **, regContent, loop->regAlloc.exitRegContentList)
  3669. {
  3670. if (regContent[reg] == lifetime)
  3671. {
  3672. regContent[reg] = nullptr;
  3673. }
  3674. } NEXT_SLIST_ENTRY;
  3675. }
  3676. return true;
  3677. }
  3678. return false;
  3679. }
  3680. #if defined(_M_IX86) || defined(_M_X64)
  3681. void LinearScan::AvoidCompensationConflicts(IR::LabelInstr *labelInstr, IR::BranchInstr *branchInstr,
  3682. Lifetime *labelRegContent[], Lifetime *branchRegContent[],
  3683. IR::Instr **pInsertionInstr, IR::Instr **pInsertionStartInstr, bool needsAirlock, bool *pHasAirlock)
  3684. {
  3685. bool changed = true;
  3686. // Look for conflicts in the incoming compensation code:
  3687. // MOV ESI, EAX
  3688. // MOV ECX, ESI << ESI was lost...
  3689. // Using XCHG:
  3690. // XCHG ESI, EAX
  3691. // MOV ECX, EAX
  3692. //
  3693. // Note that we need to iterate while(changed) to catch all conflicts
  3694. while(changed) {
  3695. RegNum conflictRegs[RegNumCount] = {RegNOREG};
  3696. changed = false;
  3697. FOREACH_BITSET_IN_UNITBV(reg, this->secondChanceRegs, BitVector)
  3698. {
  3699. Lifetime *labelLifetime = labelRegContent[reg];
  3700. Lifetime *lifetime = branchRegContent[reg];
  3701. // We don't have an XCHG for SSE2 regs
  3702. if (lifetime == labelLifetime || IRType_IsFloat(RegTypes[reg]))
  3703. {
  3704. continue;
  3705. }
  3706. if (this->NeedsLoopBackEdgeCompensation(lifetime, labelInstr))
  3707. {
  3708. // Mismatch, we need to insert compensation code
  3709. *pInsertionInstr = this->EnsureAirlock(needsAirlock, pHasAirlock, *pInsertionInstr, pInsertionStartInstr, branchInstr, labelInstr);
  3710. if (conflictRegs[reg] != RegNOREG)
  3711. {
  3712. // Eliminate conflict with an XCHG
  3713. IR::RegOpnd *reg1 = IR::RegOpnd::New(branchRegContent[reg]->sym, (RegNum)reg, RegTypes[reg], this->func);
  3714. IR::RegOpnd *reg2 = IR::RegOpnd::New(branchRegContent[reg]->sym, conflictRegs[reg], RegTypes[reg], this->func);
  3715. IR::Instr *instrXchg = IR::Instr::New(Js::OpCode::XCHG, reg1, reg1, reg2, this->func);
  3716. (*pInsertionInstr)->InsertBefore(instrXchg);
  3717. instrXchg->CopyNumber(*pInsertionInstr);
  3718. Lifetime *tmpLifetime = branchRegContent[reg];
  3719. branchRegContent[reg] = branchRegContent[conflictRegs[reg]];
  3720. branchRegContent[conflictRegs[reg]] = tmpLifetime;
  3721. reg = conflictRegs[reg];
  3722. changed = true;
  3723. }
  3724. RegNum labelReg = RegNOREG;
  3725. FOREACH_INT_REG(regIter)
  3726. {
  3727. if (labelRegContent[regIter] == branchRegContent[reg])
  3728. {
  3729. labelReg = regIter;
  3730. break;
  3731. }
  3732. } NEXT_INT_REG;
  3733. if (labelReg != RegNOREG)
  3734. {
  3735. conflictRegs[labelReg] = (RegNum)reg;
  3736. }
  3737. }
  3738. } NEXT_BITSET_IN_UNITBV;
  3739. }
  3740. }
  3741. #endif
  3742. RegNum
  3743. LinearScan::SecondChanceAllocation(Lifetime *lifetime, bool force)
  3744. {
  3745. if (PHASE_OFF(Js::SecondChancePhase, this->func) || this->func->HasTry())
  3746. {
  3747. return RegNOREG;
  3748. }
  3749. // Don't start a second chance allocation from a helper block
  3750. if (lifetime->dontAllocate || this->IsInHelperBlock() || lifetime->isDeadStore)
  3751. {
  3752. return RegNOREG;
  3753. }
  3754. Assert(lifetime->isSpilled);
  3755. Assert(lifetime->sym->IsConst() || lifetime->sym->IsAllocated());
  3756. RegNum oldReg = lifetime->reg;
  3757. RegNum reg;
  3758. if (lifetime->start == this->currentInstr->GetNumber() || lifetime->end == this->currentInstr->GetNumber())
  3759. {
  3760. // No point doing second chance if the lifetime ends here, or starts here (normal allocation would
  3761. // have found a register if one is available).
  3762. return RegNOREG;
  3763. }
  3764. if (lifetime->sym->IsConst())
  3765. {
  3766. // Can't second-chance allocate because we might have deleted the initial def instr, after
  3767. // having set the reg content on a forward branch...
  3768. return RegNOREG;
  3769. }
  3770. lifetime->reg = RegNOREG;
  3771. lifetime->isSecondChanceAllocated = true;
  3772. reg = this->FindReg(lifetime, nullptr, force);
  3773. lifetime->reg = oldReg;
  3774. if (reg == RegNOREG)
  3775. {
  3776. lifetime->isSecondChanceAllocated = false;
  3777. return reg;
  3778. }
  3779. // Success!! We're re-allocating this lifetime...
  3780. this->SecondChanceAllocateToReg(lifetime, reg);
  3781. return reg;
  3782. }
  3783. void LinearScan::SecondChanceAllocateToReg(Lifetime *lifetime, RegNum reg)
  3784. {
  3785. RegNum oldReg = lifetime->reg;
  3786. if (oldReg != RegNOREG && this->tempRegLifetimes[oldReg] == lifetime)
  3787. {
  3788. this->tempRegs.Clear(oldReg);
  3789. }
  3790. lifetime->isSpilled = false;
  3791. lifetime->isSecondChanceAllocated = true;
  3792. lifetime->lastAllocationStart = this->currentInstr->GetNumber();
  3793. lifetime->reg = RegNOREG;
  3794. this->AssignActiveReg(lifetime, reg);
  3795. this->secondChanceRegs.Set(reg);
  3796. lifetime->sym->scratch.linearScan.lifetime->useList.Clear();
  3797. #if DBG_DUMP
  3798. if (PHASE_TRACE(Js::SecondChancePhase, this->func))
  3799. {
  3800. Output::Print(L"**** Second chance: ");
  3801. lifetime->sym->Dump();
  3802. Output::Print(L"\t Reg: %S ", RegNames[reg]);
  3803. Output::Print(L" SpillCount:%d Length:%d Cost:%d %S\n",
  3804. lifetime->useCount, lifetime->end - lifetime->start, this->GetSpillCost(lifetime),
  3805. lifetime->isLiveAcrossCalls ? "LiveAcrossCalls" : "");
  3806. }
  3807. #endif
  3808. }
  3809. IR::Instr *
  3810. LinearScan::InsertAirlock(IR::BranchInstr *branchInstr, IR::LabelInstr *labelInstr)
  3811. {
  3812. // Insert a new block on a flow arc:
  3813. // JEQ L1 JEQ L2
  3814. // ... => ...
  3815. // <fallthrough> JMP L1
  3816. // L1: L2:
  3817. // <new block>
  3818. // L1:
  3819. // An airlock is needed when we need to add code on a flow arc, and the code can't
  3820. // be added directly at the source or sink of that flow arc without impacting other
  3821. // code paths.
  3822. bool isOpHelper = labelInstr->isOpHelper;
  3823. if (!isOpHelper)
  3824. {
  3825. // Check if branch is coming from helper block.
  3826. IR::Instr *prevLabel = branchInstr->m_prev;
  3827. while (prevLabel && !prevLabel->IsLabelInstr())
  3828. {
  3829. prevLabel = prevLabel->m_prev;
  3830. }
  3831. if (prevLabel && prevLabel->AsLabelInstr()->isOpHelper)
  3832. {
  3833. isOpHelper = true;
  3834. }
  3835. }
  3836. IR::LabelInstr *airlockLabel = IR::LabelInstr::New(Js::OpCode::Label, this->func, isOpHelper);
  3837. airlockLabel->SetRegion(this->currentRegion);
  3838. #if DBG
  3839. if (isOpHelper)
  3840. {
  3841. if (branchInstr->m_isHelperToNonHelperBranch)
  3842. {
  3843. labelInstr->m_noHelperAssert = true;
  3844. }
  3845. if (labelInstr->isOpHelper && labelInstr->m_noHelperAssert)
  3846. {
  3847. airlockLabel->m_noHelperAssert = true;
  3848. }
  3849. }
  3850. #endif
  3851. bool replaced = branchInstr->ReplaceTarget(labelInstr, airlockLabel);
  3852. Assert(replaced);
  3853. IR::Instr * prevInstr = labelInstr->GetPrevRealInstrOrLabel();
  3854. if (prevInstr->HasFallThrough())
  3855. {
  3856. IR::BranchInstr *branchOverAirlock = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelInstr, this->func);
  3857. prevInstr->InsertAfter(branchOverAirlock);
  3858. branchOverAirlock->CopyNumber(prevInstr);
  3859. prevInstr = branchOverAirlock;
  3860. branchOverAirlock->m_isAirlock = true;
  3861. branchOverAirlock->m_regContent = nullptr;
  3862. }
  3863. prevInstr->InsertAfter(airlockLabel);
  3864. airlockLabel->CopyNumber(prevInstr);
  3865. prevInstr = labelInstr->GetPrevRealInstrOrLabel();
  3866. return labelInstr;
  3867. }
  3868. void
  3869. LinearScan::SaveRegContent(IR::Instr *instr)
  3870. {
  3871. bool isLabelLoopTop = false;
  3872. Lifetime ** regContent = AnewArrayZ(this->tempAlloc, Lifetime *, RegNumCount);
  3873. if (instr->IsBranchInstr())
  3874. {
  3875. instr->AsBranchInstr()->m_regContent = regContent;
  3876. }
  3877. else
  3878. {
  3879. Assert(instr->IsLabelInstr());
  3880. Assert(instr->AsLabelInstr()->m_isLoopTop);
  3881. instr->AsLabelInstr()->m_regContent = regContent;
  3882. isLabelLoopTop = true;
  3883. }
  3884. js_memcpy_s(regContent, (RegNumCount * sizeof(Lifetime *)), this->regContent, sizeof(this->regContent));
  3885. #if DBG
  3886. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, this->activeLiveranges)
  3887. {
  3888. Assert(regContent[lifetime->reg] == lifetime);
  3889. } NEXT_SLIST_ENTRY;
  3890. #endif
  3891. }
  3892. bool LinearScan::RegsAvailable(IRType type)
  3893. {
  3894. if (IRType_IsFloat(type) || IRType_IsSimd128(type))
  3895. {
  3896. return (this->floatRegUsedCount < FLOAT_REG_COUNT);
  3897. }
  3898. else
  3899. {
  3900. return (this->intRegUsedCount < INT_REG_COUNT);
  3901. }
  3902. }
  3903. uint LinearScan::GetRemainingHelperLength(Lifetime *const lifetime)
  3904. {
  3905. // Walk the helper block linked list starting from the next helper block until the end of the lifetime
  3906. uint helperLength = 0;
  3907. SList<OpHelperBlock>::Iterator it(opHelperBlockIter);
  3908. Assert(it.IsValid());
  3909. const uint end = max(currentInstr->GetNumber(), lifetime->end);
  3910. do
  3911. {
  3912. const OpHelperBlock &helper = it.Data();
  3913. const uint helperStart = helper.opHelperLabel->GetNumber();
  3914. if(helperStart > end)
  3915. {
  3916. break;
  3917. }
  3918. const uint helperEnd = min(end, helper.opHelperEndInstr->GetNumber());
  3919. helperLength += helperEnd - helperStart;
  3920. if(helperEnd != helper.opHelperEndInstr->GetNumber() || !helper.opHelperEndInstr->IsLabelInstr())
  3921. {
  3922. // A helper block that ends at a label does not return to the function. Since this helper block does not end
  3923. // at a label, include the end instruction as well.
  3924. ++helperLength;
  3925. }
  3926. } while(it.Next());
  3927. return helperLength;
  3928. }
  3929. uint LinearScan::CurrentOpHelperVisitedLength(IR::Instr *const currentInstr) const
  3930. {
  3931. Assert(currentInstr);
  3932. if(!currentOpHelperBlock)
  3933. {
  3934. return 0;
  3935. }
  3936. // Consider the current instruction to have not yet been visited
  3937. Assert(currentInstr->GetNumber() >= currentOpHelperBlock->opHelperLabel->GetNumber());
  3938. return currentInstr->GetNumber() - currentOpHelperBlock->opHelperLabel->GetNumber();
  3939. }
  3940. IR::Instr * LinearScan::TryHoistLoad(IR::Instr *instr, Lifetime *lifetime)
  3941. {
  3942. // If we are loading a lifetime into a register inside a loop, try to hoist that load outside the loop
  3943. // if that register hasn't been used yet.
  3944. RegNum reg = lifetime->reg;
  3945. IR::Instr *insertInstr = instr;
  3946. if (PHASE_OFF(Js::RegHoistLoadsPhase, this->func))
  3947. {
  3948. return insertInstr;
  3949. }
  3950. if ((this->func->HasTry() && !this->func->DoOptimizeTryCatch()) || (this->currentRegion && this->currentRegion->GetType() != RegionTypeRoot))
  3951. {
  3952. return insertInstr;
  3953. }
  3954. // Register unused, and lifetime unused yet.
  3955. if (this->IsInLoop() && !this->curLoop->regAlloc.regUseBv.Test(reg)
  3956. && !this->curLoop->regAlloc.defdInLoopBv->Test(lifetime->sym->m_id)
  3957. && !this->curLoop->regAlloc.symRegUseBv->Test(lifetime->sym->m_id)
  3958. && !this->curLoop->regAlloc.hasAirLock)
  3959. {
  3960. // Let's hoist!
  3961. insertInstr = insertInstr->m_prev;
  3962. // Walk each instructions until the top of the loop looking for branches
  3963. while (!insertInstr->IsLabelInstr() || !insertInstr->AsLabelInstr()->m_isLoopTop || !insertInstr->AsLabelInstr()->GetLoop()->IsDescendentOrSelf(this->curLoop))
  3964. {
  3965. if (insertInstr->IsBranchInstr() && insertInstr->AsBranchInstr()->m_regContent)
  3966. {
  3967. IR::BranchInstr *branchInstr = insertInstr->AsBranchInstr();
  3968. // That lifetime might have been in another register coming into the loop, and spilled before used.
  3969. // Clear the reg content.
  3970. FOREACH_REG(regIter)
  3971. {
  3972. if (branchInstr->m_regContent[regIter] == lifetime)
  3973. {
  3974. branchInstr->m_regContent[regIter] = nullptr;
  3975. }
  3976. } NEXT_REG;
  3977. // Set the regContent for that reg to the lifetime on this branch
  3978. branchInstr->m_regContent[reg] = lifetime;
  3979. }
  3980. insertInstr = insertInstr->m_prev;
  3981. }
  3982. IR::LabelInstr *loopTopLabel = insertInstr->AsLabelInstr();
  3983. // Set the reg content for the loop top correctly as well
  3984. FOREACH_REG(regIter)
  3985. {
  3986. if (loopTopLabel->m_regContent[regIter] == lifetime)
  3987. {
  3988. loopTopLabel->m_regContent[regIter] = nullptr;
  3989. this->curLoop->regAlloc.loopTopRegContent[regIter] = nullptr;
  3990. }
  3991. } NEXT_REG;
  3992. Assert(loopTopLabel->GetLoop() == this->curLoop);
  3993. loopTopLabel->m_regContent[reg] = lifetime;
  3994. this->curLoop->regAlloc.loopTopRegContent[reg] = lifetime;
  3995. this->RecordLoopUse(lifetime, reg);
  3996. IR::LabelInstr *loopLandingPad = nullptr;
  3997. Assert(loopTopLabel->GetNumber() != Js::Constants::NoByteCodeOffset);
  3998. // Insert load in landing pad.
  3999. // Redirect branches to new landing pad.
  4000. FOREACH_SLISTCOUNTED_ENTRY_EDITING(IR::BranchInstr *, branchInstr, &loopTopLabel->labelRefs, iter)
  4001. {
  4002. Assert(branchInstr->GetNumber() != Js::Constants::NoByteCodeOffset);
  4003. // <= because the branch may be newly inserted and have the same instr number as the loop top...
  4004. if (branchInstr->GetNumber() <= loopTopLabel->GetNumber())
  4005. {
  4006. if (!loopLandingPad)
  4007. {
  4008. loopLandingPad = IR::LabelInstr::New(Js::OpCode::Label, this->func);
  4009. loopLandingPad->SetRegion(this->currentRegion);
  4010. loopTopLabel->InsertBefore(loopLandingPad);
  4011. loopLandingPad->CopyNumber(loopTopLabel);
  4012. }
  4013. branchInstr->ReplaceTarget(loopTopLabel, loopLandingPad);
  4014. }
  4015. } NEXT_SLISTCOUNTED_ENTRY_EDITING;
  4016. }
  4017. return insertInstr;
  4018. }
  4019. #if DBG_DUMP
  4020. void LinearScan::PrintStats() const
  4021. {
  4022. uint loopNest = 0;
  4023. uint storeCount = 0;
  4024. uint loadCount = 0;
  4025. uint wStoreCount = 0;
  4026. uint wLoadCount = 0;
  4027. uint instrCount = 0;
  4028. bool isInHelper = false;
  4029. FOREACH_INSTR_IN_FUNC_BACKWARD(instr, this->func)
  4030. {
  4031. switch (instr->GetKind())
  4032. {
  4033. case IR::InstrKindPragma:
  4034. continue;
  4035. case IR::InstrKindBranch:
  4036. if (instr->AsBranchInstr()->IsLoopTail(this->func))
  4037. {
  4038. loopNest++;
  4039. }
  4040. instrCount++;
  4041. break;
  4042. case IR::InstrKindLabel:
  4043. case IR::InstrKindProfiledLabel:
  4044. if (instr->AsLabelInstr()->m_isLoopTop)
  4045. {
  4046. Assert(loopNest);
  4047. loopNest--;
  4048. }
  4049. isInHelper = instr->AsLabelInstr()->isOpHelper;
  4050. break;
  4051. default:
  4052. {
  4053. Assert(instr->IsRealInstr());
  4054. if (isInHelper)
  4055. {
  4056. continue;
  4057. }
  4058. IR::Opnd *dst = instr->GetDst();
  4059. if (dst && dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsStackSym() && dst->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4060. {
  4061. storeCount++;
  4062. wStoreCount += LinearScan::GetUseSpillCost(loopNest, false);
  4063. }
  4064. IR::Opnd *src1 = instr->GetSrc1();
  4065. if (src1)
  4066. {
  4067. if (src1->IsSymOpnd() && src1->AsSymOpnd()->m_sym->IsStackSym() && src1->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4068. {
  4069. loadCount++;
  4070. wLoadCount += LinearScan::GetUseSpillCost(loopNest, false);
  4071. }
  4072. IR::Opnd *src2 = instr->GetSrc2();
  4073. if (src2 && src2->IsSymOpnd() && src2->AsSymOpnd()->m_sym->IsStackSym() && src2->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4074. {
  4075. loadCount++;
  4076. wLoadCount += LinearScan::GetUseSpillCost(loopNest, false);
  4077. }
  4078. }
  4079. }
  4080. break;
  4081. }
  4082. } NEXT_INSTR_IN_FUNC_BACKWARD;
  4083. Assert(loopNest == 0);
  4084. this->func->GetJnFunction()->DumpFullFunctionName();
  4085. Output::SkipToColumn(45);
  4086. Output::Print(L"Instrs:%5d, Lds:%4d, Strs:%4d, WLds: %4d, WStrs: %4d, WRefs: %4d\n",
  4087. instrCount, loadCount, storeCount, wLoadCount, wStoreCount, wLoadCount+wStoreCount);
  4088. }
  4089. #endif
  4090. #ifdef _M_IX86
  4091. # if ENABLE_DEBUG_CONFIG_OPTIONS
  4092. IR::Instr * LinearScan::GetIncInsertionPoint(IR::Instr *instr)
  4093. {
  4094. // Make sure we don't insert an INC between an instr setting the condition code, and one using it.
  4095. IR::Instr *instrNext = instr;
  4096. while(!EncoderMD::UsesConditionCode(instrNext) && !EncoderMD::SetsConditionCode(instrNext))
  4097. {
  4098. if (instrNext->IsLabelInstr() || instrNext->IsExitInstr() || instrNext->IsBranchInstr())
  4099. {
  4100. break;
  4101. }
  4102. instrNext = instrNext->GetNextRealInstrOrLabel();
  4103. }
  4104. if (instrNext->IsLowered() && EncoderMD::UsesConditionCode(instrNext))
  4105. {
  4106. IR::Instr *instrPrev = instr->GetPrevRealInstrOrLabel();
  4107. while(!EncoderMD::SetsConditionCode(instrPrev))
  4108. {
  4109. instrPrev = instrPrev->GetPrevRealInstrOrLabel();
  4110. Assert(!instrPrev->IsLabelInstr());
  4111. }
  4112. return instrPrev;
  4113. }
  4114. return instr;
  4115. }
  4116. void LinearScan::DynamicStatsInstrument()
  4117. {
  4118. IR::Instr *firstInstr = this->func->m_headInstr;
  4119. IR::MemRefOpnd *memRefOpnd = IR::MemRefOpnd::New(&(this->func->GetJnFunction()->callCountStats), TyUint32, this->func);
  4120. firstInstr->InsertAfter(IR::Instr::New(Js::OpCode::INC, memRefOpnd, memRefOpnd, this->func));
  4121. FOREACH_INSTR_IN_FUNC(instr, this->func)
  4122. {
  4123. if (!instr->IsRealInstr() || !instr->IsLowered())
  4124. {
  4125. continue;
  4126. }
  4127. if (EncoderMD::UsesConditionCode(instr) && instr->GetPrevRealInstrOrLabel()->IsLabelInstr())
  4128. {
  4129. continue;
  4130. }
  4131. IR::Opnd *dst = instr->GetDst();
  4132. if (dst && dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsStackSym() && dst->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4133. {
  4134. IR::Instr *insertionInstr = this->GetIncInsertionPoint(instr);
  4135. IR::MemRefOpnd *memRefOpnd = IR::MemRefOpnd::New(&(this->func->GetJnFunction()->regAllocStoreCount), TyUint32, this->func);
  4136. insertionInstr->InsertBefore(IR::Instr::New(Js::OpCode::INC, memRefOpnd, memRefOpnd, this->func));
  4137. }
  4138. IR::Opnd *src1 = instr->GetSrc1();
  4139. if (src1)
  4140. {
  4141. if (src1->IsSymOpnd() && src1->AsSymOpnd()->m_sym->IsStackSym() && src1->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4142. {
  4143. IR::Instr *insertionInstr = this->GetIncInsertionPoint(instr);
  4144. IR::MemRefOpnd *memRefOpnd = IR::MemRefOpnd::New(&(this->func->GetJnFunction()->regAllocLoadCount), TyUint32, this->func);
  4145. insertionInstr->InsertBefore(IR::Instr::New(Js::OpCode::INC, memRefOpnd, memRefOpnd, this->func));
  4146. }
  4147. IR::Opnd *src2 = instr->GetSrc2();
  4148. if (src2 && src2->IsSymOpnd() && src2->AsSymOpnd()->m_sym->IsStackSym() && src2->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4149. {
  4150. IR::Instr *insertionInstr = this->GetIncInsertionPoint(instr);
  4151. IR::MemRefOpnd *memRefOpnd = IR::MemRefOpnd::New(&(this->func->GetJnFunction()->regAllocLoadCount), TyUint32, this->func);
  4152. insertionInstr->InsertBefore(IR::Instr::New(Js::OpCode::INC, memRefOpnd, memRefOpnd, this->func));
  4153. }
  4154. }
  4155. } NEXT_INSTR_IN_FUNC;
  4156. }
  4157. # endif //ENABLE_DEBUG_CONFIG_OPTIONS
  4158. #endif // _M_IX86
  4159. IR::Instr* LinearScan::InsertMove(IR::Opnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr)
  4160. {
  4161. IR::Instr *instrPrev = insertBeforeInstr->m_prev;
  4162. IR::Instr *instrRet = Lowerer::InsertMove(dst, src, insertBeforeInstr);
  4163. for (IR::Instr *instr = instrPrev->m_next; instr != insertBeforeInstr; instr = instr->m_next)
  4164. {
  4165. instr->CopyNumber(insertBeforeInstr);
  4166. }
  4167. return instrRet;
  4168. }