LinearScan.cpp 181 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #include "SccLiveness.h"
  7. #if DBG_DUMP || ENABLE_DEBUG_CONFIG_OPTIONS
  8. char const * const RegNames[RegNumCount] =
  9. {
  10. #define REGDAT(Name, ListName, ...) "" STRINGIZE(ListName) "",
  11. #include "RegList.h"
  12. #undef REGDAT
  13. };
  14. char16 const * const RegNamesW[RegNumCount] =
  15. {
  16. #define REGDAT(Name, ListName, ...) _u("") STRINGIZEW(ListName) _u(""),
  17. #include "RegList.h"
  18. #undef REGDAT
  19. };
  20. #endif
  21. static const uint8 RegAttribs[RegNumCount] =
  22. {
  23. #define REGDAT(Name, ListName, Encode, Type, Attribs) Attribs,
  24. #include "RegList.h"
  25. #undef REGDAT
  26. };
  27. extern const IRType RegTypes[RegNumCount] =
  28. {
  29. #define REGDAT(Name, ListName, Encode, Type, Attribs) Type,
  30. #include "RegList.h"
  31. #undef REGDAT
  32. };
  33. LoweredBasicBlock* LoweredBasicBlock::New(JitArenaAllocator* allocator)
  34. {
  35. return JitAnew(allocator, LoweredBasicBlock, allocator);
  36. }
  37. void LoweredBasicBlock::Copy(LoweredBasicBlock* block)
  38. {
  39. this->inlineeFrameLifetimes.Copy(&block->inlineeFrameLifetimes);
  40. this->inlineeStack.Copy(&block->inlineeStack);
  41. this->inlineeFrameSyms.Copy(&block->inlineeFrameSyms);
  42. }
  43. bool LoweredBasicBlock::HasData()
  44. {
  45. return this->inlineeFrameLifetimes.Count() > 0 || this->inlineeStack.Count() > 0;
  46. }
  47. LoweredBasicBlock* LoweredBasicBlock::Clone(JitArenaAllocator* allocator)
  48. {
  49. if (this->HasData())
  50. {
  51. LoweredBasicBlock* clone = LoweredBasicBlock::New(allocator);
  52. clone->Copy(this);
  53. return clone;
  54. }
  55. return nullptr;
  56. }
  57. bool LoweredBasicBlock::Equals(LoweredBasicBlock* otherBlock)
  58. {
  59. if(this->HasData() != otherBlock->HasData())
  60. {
  61. return false;
  62. }
  63. if (!this->inlineeFrameLifetimes.Equals(&otherBlock->inlineeFrameLifetimes))
  64. {
  65. return false;
  66. }
  67. if (!this->inlineeStack.Equals(&otherBlock->inlineeStack))
  68. {
  69. return false;
  70. }
  71. return true;
  72. }
  73. // LinearScan::RegAlloc
  74. // This register allocator is based on the 1999 linear scan register allocation paper
  75. // by Poletto and Sarkar. This code however walks the IR while doing the lifetime
  76. // allocations, and assigns the regs to all the RegOpnd as it goes. It assumes
  77. // the IR is in R-DFO, and that the lifetime list is sorted in starting order.
  78. // Lifetimes are allocated as they become live, and retired as they go dead. RegOpnd
  79. // are assigned their register. If a lifetime becomes active and there are no free
  80. // registers left, a lifetime is picked to be spilled.
  81. // When we spill, the whole lifetime is spilled. All the loads and stores are done
  82. // through memory for that lifetime, even the ones allocated before the current instruction.
  83. // We do optimize this slightly by not reloading the previous loads that were not in loops.
  84. void
  85. LinearScan::RegAlloc()
  86. {
  87. NoRecoverMemoryJitArenaAllocator tempAlloc(_u("BE-LinearScan"), this->func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  88. this->tempAlloc = &tempAlloc;
  89. this->opHelperSpilledLiveranges = JitAnew(&tempAlloc, SList<Lifetime *>, &tempAlloc);
  90. this->activeLiveranges = JitAnew(&tempAlloc, SList<Lifetime *>, &tempAlloc);
  91. this->liveOnBackEdgeSyms = JitAnew(&tempAlloc, BVSparse<JitArenaAllocator>, &tempAlloc);
  92. this->stackPackInUseLiveRanges = JitAnew(&tempAlloc, SList<Lifetime *>, &tempAlloc);
  93. this->stackSlotsFreeList = JitAnew(&tempAlloc, SList<StackSlot *>, &tempAlloc);
  94. this->currentBlock = LoweredBasicBlock::New(&tempAlloc);
  95. IR::Instr *currentInstr = this->func->m_headInstr;
  96. SCCLiveness liveness(this->func, this->tempAlloc);
  97. BEGIN_CODEGEN_PHASE(this->func, Js::LivenessPhase);
  98. // Build the lifetime list
  99. liveness.Build();
  100. END_CODEGEN_PHASE(this->func, Js::LivenessPhase);
  101. this->lifetimeList = &liveness.lifetimeList;
  102. this->opHelperBlockList = &liveness.opHelperBlockList;
  103. this->opHelperBlockIter = SList<OpHelperBlock>::Iterator(this->opHelperBlockList);
  104. this->opHelperBlockIter.Next();
  105. this->Init();
  106. NativeCodeData::Allocator * nativeAllocator = this->func->GetNativeCodeDataAllocator();
  107. if (func->hasBailout)
  108. {
  109. this->globalBailOutRecordTables = NativeCodeDataNewArrayZ(nativeAllocator, GlobalBailOutRecordDataTable *, func->m_inlineeId + 1);
  110. this->lastUpdatedRowIndices = JitAnewArrayZ(this->tempAlloc, uint *, func->m_inlineeId + 1);
  111. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  112. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  113. {
  114. this->func->GetScriptContext()->bailOutOffsetBytes += (sizeof(GlobalBailOutRecordDataTable *) * (func->m_inlineeId + 1));
  115. this->func->GetScriptContext()->bailOutRecordBytes += (sizeof(GlobalBailOutRecordDataTable *) * (func->m_inlineeId + 1));
  116. }
  117. #endif
  118. }
  119. m_bailOutRecordCount = 0;
  120. IR::Instr * insertBailInAfter = nullptr;
  121. BailOutInfo * bailOutInfoForBailIn = nullptr;
  122. bool endOfBasicBlock = true;
  123. FOREACH_INSTR_EDITING(instr, instrNext, currentInstr)
  124. {
  125. if (instr->GetNumber() == 0)
  126. {
  127. AssertMsg(LowererMD::IsAssign(instr), "Only expect spill code here");
  128. continue;
  129. }
  130. #if DBG_DUMP && defined(ENABLE_DEBUG_CONFIG_OPTIONS)
  131. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::LinearScanPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  132. {
  133. instr->Dump();
  134. }
  135. #endif // DBG
  136. this->currentInstr = instr;
  137. if(instr->StartsBasicBlock() || endOfBasicBlock)
  138. {
  139. endOfBasicBlock = false;
  140. ++currentBlockNumber;
  141. }
  142. if (instr->IsLabelInstr())
  143. {
  144. this->lastLabel = instr->AsLabelInstr();
  145. if (this->lastLabel->m_loweredBasicBlock)
  146. {
  147. this->currentBlock = this->lastLabel->m_loweredBasicBlock;
  148. }
  149. else if(currentBlock->HasData())
  150. {
  151. // Check if the previous block has fall-through. If so, retain the block info. If not, create empty info.
  152. IR::Instr *const prevInstr = instr->GetPrevRealInstrOrLabel();
  153. Assert(prevInstr);
  154. if(!prevInstr->HasFallThrough())
  155. {
  156. currentBlock = LoweredBasicBlock::New(&tempAlloc);
  157. }
  158. }
  159. this->currentRegion = this->lastLabel->GetRegion();
  160. }
  161. else if (instr->IsBranchInstr())
  162. {
  163. if (this->func->HasTry() && this->func->DoOptimizeTry())
  164. {
  165. this->ProcessEHRegionBoundary(instr);
  166. }
  167. this->ProcessSecondChanceBoundary(instr->AsBranchInstr());
  168. }
  169. this->CheckIfInLoop(instr);
  170. if (this->RemoveDeadStores(instr))
  171. {
  172. continue;
  173. }
  174. if (instr->HasBailOutInfo())
  175. {
  176. if (this->currentRegion)
  177. {
  178. RegionType curRegType = this->currentRegion->GetType();
  179. if (curRegType == RegionTypeTry || curRegType == RegionTypeCatch || curRegType == RegionTypeFinally)
  180. {
  181. this->func->hasBailoutInEHRegion = true;
  182. }
  183. }
  184. this->FillBailOutRecord(instr);
  185. if (instr->GetBailOutKind() == IR::BailOutForGeneratorYield)
  186. {
  187. Assert(instr->m_next->IsLabelInstr());
  188. insertBailInAfter = instr->m_next;
  189. bailOutInfoForBailIn = instr->GetBailOutInfo();
  190. }
  191. }
  192. this->SetSrcRegs(instr);
  193. this->EndDeadLifetimes(instr);
  194. this->CheckOpHelper(instr);
  195. this->KillImplicitRegs(instr);
  196. this->AllocateNewLifetimes(instr);
  197. this->SetDstReg(instr);
  198. this->EndDeadOpHelperLifetimes(instr);
  199. if (instr->IsLabelInstr())
  200. {
  201. this->ProcessSecondChanceBoundary(instr->AsLabelInstr());
  202. }
  203. #if DBG
  204. this->CheckInvariants();
  205. #endif // DBG
  206. if(instr->EndsBasicBlock())
  207. {
  208. endOfBasicBlock = true;
  209. }
  210. if (insertBailInAfter == instr)
  211. {
  212. instrNext = linearScanMD.GenerateBailInForGeneratorYield(instr, bailOutInfoForBailIn);
  213. insertBailInAfter = nullptr;
  214. bailOutInfoForBailIn = nullptr;
  215. }
  216. }NEXT_INSTR_EDITING;
  217. if (func->hasBailout)
  218. {
  219. for (uint i = 0; i <= func->m_inlineeId; i++)
  220. {
  221. if (globalBailOutRecordTables[i] != nullptr)
  222. {
  223. globalBailOutRecordTables[i]->Finalize(nativeAllocator, &tempAlloc);
  224. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  225. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  226. {
  227. func->GetScriptContext()->bailOutOffsetBytes += sizeof(GlobalBailOutRecordDataRow) * globalBailOutRecordTables[i]->length;
  228. func->GetScriptContext()->bailOutRecordBytes += sizeof(GlobalBailOutRecordDataRow) * globalBailOutRecordTables[i]->length;
  229. }
  230. #endif
  231. }
  232. }
  233. }
  234. AssertMsg((this->intRegUsedCount + this->floatRegUsedCount) == this->linearScanMD.UnAllocatableRegCount(this->func) , "RegUsedCount is wrong");
  235. AssertMsg(this->activeLiveranges->Empty(), "Active list not empty");
  236. AssertMsg(this->stackPackInUseLiveRanges->Empty(), "Spilled list not empty");
  237. AssertMsg(!this->opHelperBlockIter.IsValid(), "Got to the end with a helper block still on the list?");
  238. Assert(this->currentBlock->inlineeStack.Count() == 0);
  239. this->InsertOpHelperSpillAndRestores();
  240. #if _M_IX86
  241. # if ENABLE_DEBUG_CONFIG_OPTIONS
  242. if (Js::Configuration::Global.flags.Instrument.IsEnabled(Js::LinearScanPhase, this->func->GetSourceContextId(),this->func->GetLocalFunctionId()))
  243. {
  244. this->DynamicStatsInstrument();
  245. }
  246. # endif
  247. #endif
  248. #if DBG_DUMP
  249. if (PHASE_STATS(Js::LinearScanPhase, this->func))
  250. {
  251. this->PrintStats();
  252. }
  253. if (PHASE_TRACE(Js::StackPackPhase, this->func))
  254. {
  255. Output::Print(_u("---------------------------\n"));
  256. }
  257. #endif // DBG_DUMP
  258. DebugOnly(this->func->allowRemoveBailOutArgInstr = true);
  259. }
  260. JitArenaAllocator *
  261. LinearScan::GetTempAlloc()
  262. {
  263. Assert(tempAlloc);
  264. return tempAlloc;
  265. }
  266. #if DBG
  267. void
  268. LinearScan::CheckInvariants() const
  269. {
  270. BitVector bv = this->nonAllocatableRegs;
  271. uint32 lastend = 0;
  272. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, this->activeLiveranges)
  273. {
  274. // Make sure there are only one lifetime per reg
  275. Assert(!bv.Test(lifetime->reg));
  276. bv.Set(lifetime->reg);
  277. Assert(!lifetime->isOpHelperSpilled);
  278. Assert(!lifetime->isSpilled);
  279. Assert(lifetime->end >= lastend);
  280. lastend = lifetime->end;
  281. }
  282. NEXT_SLIST_ENTRY;
  283. // Make sure the active reg bit vector is correct
  284. Assert(bv.Equal(this->activeRegs));
  285. uint ints = 0, floats = 0;
  286. FOREACH_BITSET_IN_UNITBV(index, this->activeRegs, BitVector)
  287. {
  288. if (IRType_IsFloat(RegTypes[index]))
  289. {
  290. floats++;
  291. }
  292. else
  293. {
  294. ints++;
  295. }
  296. }
  297. NEXT_BITSET_IN_UNITBV;
  298. Assert(ints == this->intRegUsedCount);
  299. Assert(floats == this->floatRegUsedCount);
  300. Assert((this->intRegUsedCount + this->floatRegUsedCount) == this->activeRegs.Count());
  301. bv.ClearAll();
  302. lastend = 0;
  303. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, this->opHelperSpilledLiveranges)
  304. {
  305. // Make sure there are only one lifetime per reg in the op helper spilled liveranges
  306. Assert(!bv.Test(lifetime->reg));
  307. if (!lifetime->cantOpHelperSpill)
  308. {
  309. bv.Set(lifetime->reg);
  310. Assert(lifetime->isOpHelperSpilled);
  311. Assert(!lifetime->isSpilled);
  312. }
  313. Assert(lifetime->end >= lastend);
  314. lastend = lifetime->end;
  315. }
  316. NEXT_SLIST_ENTRY;
  317. // Make sure the opHelperSpilledRegs bit vector is correct
  318. Assert(bv.Equal(this->opHelperSpilledRegs));
  319. for (int i = 0; i < RegNumCount; i++)
  320. {
  321. if (this->tempRegs.Test(i))
  322. {
  323. Assert(this->tempRegLifetimes[i]->reg == i);
  324. }
  325. }
  326. FOREACH_BITSET_IN_UNITBV(reg, this->secondChanceRegs, BitVector)
  327. {
  328. Lifetime *lifetime = this->regContent[reg];
  329. Assert(lifetime);
  330. StackSym *sym = lifetime->sym;
  331. Assert(lifetime->isSecondChanceAllocated);
  332. Assert(sym->IsConst() || sym->IsAllocated()); // Should have been spilled already.
  333. } NEXT_BITSET_IN_UNITBV;
  334. }
  335. #endif // DBG
  336. // LinearScan::Init
  337. // Initialize bit vectors
  338. void
  339. LinearScan::Init()
  340. {
  341. FOREACH_REG(reg)
  342. {
  343. // Registers that can't be used are set to active, and will remain this way
  344. if (!LinearScan::IsAllocatable(reg))
  345. {
  346. this->activeRegs.Set(reg);
  347. if (IRType_IsFloat(RegTypes[reg]))
  348. {
  349. this->floatRegUsedCount++;
  350. }
  351. else
  352. {
  353. this->intRegUsedCount++;
  354. }
  355. }
  356. if (RegTypes[reg] == TyMachReg)
  357. {
  358. // JIT64_TODO: Rename int32Regs to machIntRegs.
  359. this->int32Regs.Set(reg);
  360. numInt32Regs++;
  361. }
  362. else if (RegTypes[reg] == TyFloat64)
  363. {
  364. this->floatRegs.Set(reg);
  365. numFloatRegs++;
  366. }
  367. if (LinearScan::IsCallerSaved(reg))
  368. {
  369. this->callerSavedRegs.Set(reg);
  370. }
  371. if (LinearScan::IsCalleeSaved(reg))
  372. {
  373. this->calleeSavedRegs.Set(reg);
  374. }
  375. this->regContent[reg] = nullptr;
  376. } NEXT_REG;
  377. this->instrUseRegs.ClearAll();
  378. this->secondChanceRegs.ClearAll();
  379. this->linearScanMD.Init(this);
  380. #if DBG
  381. this->nonAllocatableRegs = this->activeRegs;
  382. #endif
  383. #if DBG_DUMP
  384. if (PHASE_TRACE(Js::LinearScanPhase, this->func))
  385. {
  386. this->func->DumpHeader();
  387. }
  388. #endif
  389. }
  390. // LinearScan::CheckIfInLoop
  391. // Track whether the current instruction is in a loop or not.
  392. bool
  393. LinearScan::CheckIfInLoop(IR::Instr *instr)
  394. {
  395. if (this->IsInLoop())
  396. {
  397. // Look for end of loop
  398. AssertMsg(this->curLoop->regAlloc.loopEnd != 0, "Something is wrong here....");
  399. if (instr->GetNumber() >= this->curLoop->regAlloc.loopEnd)
  400. {
  401. AssertMsg(instr->IsBranchInstr(), "Loop tail should be a branchInstr");
  402. while (this->IsInLoop() && instr->GetNumber() >= this->curLoop->regAlloc.loopEnd)
  403. {
  404. this->loopNest--;
  405. this->curLoop->isProcessed = true;
  406. this->curLoop = this->curLoop->parent;
  407. if (this->loopNest == 0)
  408. {
  409. this->liveOnBackEdgeSyms->ClearAll();
  410. }
  411. }
  412. }
  413. }
  414. if (instr->IsLabelInstr() && instr->AsLabelInstr()->m_isLoopTop)
  415. {
  416. IR::LabelInstr * labelInstr = instr->AsLabelInstr();
  417. Loop *parentLoop = this->curLoop;
  418. if (parentLoop)
  419. {
  420. parentLoop->isLeaf = false;
  421. }
  422. this->curLoop = labelInstr->GetLoop();
  423. this->curLoop->isProcessed = false;
  424. // Lexically nested may not always nest in a flow based way:
  425. // while(i--) {
  426. // if (cond) {
  427. // while(j--) {
  428. // }
  429. // break;
  430. // }
  431. // }
  432. // These look nested, but they are not...
  433. // So update the flow based parent to be lexical or we won't be able to figure out when we get back
  434. // to the outer loop.
  435. // REVIEW: This isn't necessary anymore now that break blocks are moved out of the loops.
  436. this->curLoop->parent = parentLoop;
  437. this->curLoop->regAlloc.defdInLoopBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  438. this->curLoop->regAlloc.symRegUseBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  439. this->curLoop->regAlloc.loopStart = labelInstr->GetNumber();
  440. this->curLoop->regAlloc.exitRegContentList = JitAnew(this->tempAlloc, SList<Lifetime **>, this->tempAlloc);
  441. this->curLoop->regAlloc.regUseBv = 0;
  442. this->liveOnBackEdgeSyms->Or(this->curLoop->regAlloc.liveOnBackEdgeSyms);
  443. this->loopNest++;
  444. }
  445. return this->IsInLoop();
  446. }
  447. void
  448. LinearScan::InsertOpHelperSpillAndRestores()
  449. {
  450. linearScanMD.InsertOpHelperSpillAndRestores(opHelperBlockList);
  451. }
  452. void
  453. LinearScan::CheckOpHelper(IR::Instr *instr)
  454. {
  455. if (this->IsInHelperBlock())
  456. {
  457. if (this->currentOpHelperBlock->opHelperEndInstr == instr)
  458. {
  459. // Get targetInstr if we can.
  460. // We can deterministically get it only for unconditional branches, as conditional branch may fall through.
  461. IR::Instr * targetInstr = nullptr;
  462. if (instr->IsBranchInstr() && instr->AsBranchInstr()->IsUnconditional())
  463. {
  464. AssertMsg(!instr->AsBranchInstr()->IsMultiBranch(), "Not supported for Multibranch");
  465. targetInstr = instr->AsBranchInstr()->GetTarget();
  466. }
  467. /*
  468. * Keep track of the number of registers we've had to
  469. * store and restore around a helper block for LinearScanMD (on ARM
  470. * and X64). We need this to be able to allocate space in the frame.
  471. * We can't emit a PUSH/POP sequence around the block like IA32 because
  472. * the stack pointer can't move outside the prolog.
  473. */
  474. uint32 helperSpilledLiverangeCount = 0;
  475. // Exiting a helper block. We are going to insert
  476. // the restore here after linear scan. So put all the restored
  477. // lifetime back to active
  478. while (!this->opHelperSpilledLiveranges->Empty())
  479. {
  480. Lifetime * lifetime = this->opHelperSpilledLiveranges->Pop();
  481. lifetime->isOpHelperSpilled = false;
  482. if (!lifetime->cantOpHelperSpill)
  483. {
  484. // Put the life time back to active
  485. this->AssignActiveReg(lifetime, lifetime->reg);
  486. bool reload = true;
  487. // Lifetime ends before the target after helper block, don't need to save and restore helper spilled lifetime.
  488. if (targetInstr && lifetime->end < targetInstr->GetNumber())
  489. {
  490. // However, if lifetime is spilled as arg - we still need to spill it because the helper assumes the value
  491. // to be available in the stack
  492. if (lifetime->isOpHelperSpillAsArg)
  493. {
  494. // we should not attempt to restore it as it is dead on return from the helper.
  495. reload = false;
  496. }
  497. else
  498. {
  499. Assert(!instr->AsBranchInstr()->IsLoopTail(this->func));
  500. continue;
  501. }
  502. }
  503. // Save all the lifetime that needs to be restored
  504. OpHelperSpilledLifetime spilledLifetime;
  505. spilledLifetime.lifetime = lifetime;
  506. spilledLifetime.spillAsArg = lifetime->isOpHelperSpillAsArg;
  507. spilledLifetime.reload = reload;
  508. /*
  509. * Can't unfortunately move this into the else block above because we don't know if this
  510. * lifetime will actually get spilled until register allocation completes.
  511. * Instead we allocate a slot to this StackSym in LinearScanMD iff
  512. * !(lifetime.isSpilled && lifetime.noReloadsIfSpilled).
  513. */
  514. helperSpilledLiverangeCount++;
  515. // save the reg in case it is spilled later. We still need to save and restore
  516. // for the non-loop case.
  517. spilledLifetime.reg = lifetime->reg;
  518. this->currentOpHelperBlock->spilledLifetime.Prepend(spilledLifetime);
  519. }
  520. else
  521. {
  522. // Clear it for the next helper block
  523. lifetime->cantOpHelperSpill = false;
  524. }
  525. lifetime->isOpHelperSpillAsArg = false;
  526. }
  527. this->totalOpHelperFullVisitedLength += this->currentOpHelperBlock->Length();
  528. // Use a dummy label as the insertion point of the reloads, as second-chance-allocation
  529. // may insert compensation code right before the branch
  530. IR::PragmaInstr *dummyLabel = IR::PragmaInstr::New(Js::OpCode::Nop, 0, this->func);
  531. this->currentOpHelperBlock->opHelperEndInstr->InsertBefore(dummyLabel);
  532. dummyLabel->CopyNumber(this->currentOpHelperBlock->opHelperEndInstr);
  533. this->currentOpHelperBlock->opHelperEndInstr = dummyLabel;
  534. this->opHelperSpilledRegs.ClearAll();
  535. this->currentOpHelperBlock = nullptr;
  536. linearScanMD.EndOfHelperBlock(helperSpilledLiverangeCount);
  537. }
  538. }
  539. if (this->opHelperBlockIter.IsValid())
  540. {
  541. AssertMsg(
  542. !instr->IsLabelInstr() ||
  543. !instr->AsLabelInstr()->isOpHelper ||
  544. this->opHelperBlockIter.Data().opHelperLabel == instr,
  545. "Found a helper label that doesn't begin the next helper block in the list?");
  546. if (this->opHelperBlockIter.Data().opHelperLabel == instr)
  547. {
  548. this->currentOpHelperBlock = &this->opHelperBlockIter.Data();
  549. this->opHelperBlockIter.Next();
  550. }
  551. }
  552. }
  553. uint
  554. LinearScan::HelperBlockStartInstrNumber() const
  555. {
  556. Assert(IsInHelperBlock());
  557. return this->currentOpHelperBlock->opHelperLabel->GetNumber();
  558. }
  559. uint
  560. LinearScan::HelperBlockEndInstrNumber() const
  561. {
  562. Assert(IsInHelperBlock());
  563. return this->currentOpHelperBlock->opHelperEndInstr->GetNumber();
  564. }
  565. // LinearScan::AddToActive
  566. // Add a lifetime to the active list. The list is kept sorted in order lifetime end.
  567. // This makes it easier to pick the lifetimes to retire.
  568. void
  569. LinearScan::AddToActive(Lifetime * lifetime)
  570. {
  571. LinearScan::AddLiveRange(this->activeLiveranges, lifetime);
  572. this->regContent[lifetime->reg] = lifetime;
  573. if (lifetime->isSecondChanceAllocated)
  574. {
  575. this->secondChanceRegs.Set(lifetime->reg);
  576. }
  577. else
  578. {
  579. Assert(!this->secondChanceRegs.Test(lifetime->reg));
  580. }
  581. }
  582. void
  583. LinearScan::AddOpHelperSpilled(Lifetime * lifetime)
  584. {
  585. RegNum reg = lifetime->reg;
  586. Assert(this->IsInHelperBlock());
  587. Assert(!this->opHelperSpilledRegs.Test(reg));
  588. Assert(lifetime->isOpHelperSpilled == false);
  589. Assert(lifetime->cantOpHelperSpill == false);
  590. this->opHelperSpilledRegs.Set(reg);
  591. lifetime->isOpHelperSpilled = true;
  592. this->regContent[reg] = nullptr;
  593. this->secondChanceRegs.Clear(reg);
  594. // If a lifetime is being OpHelper spilled and it's an inlinee arg sym
  595. // we need to make sure its spilled to the sym offset spill space, i.e. isOpHelperSpillAsArg
  596. // is set. Otherwise, it's value will not be available on inline frame reconstruction.
  597. if (this->currentBlock->inlineeFrameSyms.Count() > 0 &&
  598. this->currentBlock->inlineeFrameSyms.ContainsKey(lifetime->sym->m_id) &&
  599. (lifetime->sym->m_isSingleDef || !lifetime->defList.Empty()))
  600. {
  601. lifetime->isOpHelperSpillAsArg = true;
  602. if (!lifetime->sym->IsAllocated())
  603. {
  604. this->AllocateStackSpace(lifetime);
  605. }
  606. this->RecordLoopUse(lifetime, lifetime->reg);
  607. }
  608. LinearScan::AddLiveRange(this->opHelperSpilledLiveranges, lifetime);
  609. }
  610. void
  611. LinearScan::RemoveOpHelperSpilled(Lifetime * lifetime)
  612. {
  613. Assert(this->IsInHelperBlock());
  614. Assert(lifetime->isOpHelperSpilled);
  615. Assert(lifetime->cantOpHelperSpill == false);
  616. Assert(this->opHelperSpilledRegs.Test(lifetime->reg));
  617. this->opHelperSpilledRegs.Clear(lifetime->reg);
  618. lifetime->isOpHelperSpilled = false;
  619. lifetime->cantOpHelperSpill = false;
  620. lifetime->isOpHelperSpillAsArg = false;
  621. this->opHelperSpilledLiveranges->Remove(lifetime);
  622. }
  623. void
  624. LinearScan::SetCantOpHelperSpill(Lifetime * lifetime)
  625. {
  626. Assert(this->IsInHelperBlock());
  627. Assert(lifetime->isOpHelperSpilled);
  628. Assert(lifetime->cantOpHelperSpill == false);
  629. this->opHelperSpilledRegs.Clear(lifetime->reg);
  630. lifetime->isOpHelperSpilled = false;
  631. lifetime->cantOpHelperSpill = true;
  632. }
  633. void
  634. LinearScan::AddLiveRange(SList<Lifetime *> * list, Lifetime * newLifetime)
  635. {
  636. FOREACH_SLIST_ENTRY_EDITING(Lifetime *, lifetime, list, iter)
  637. {
  638. if (newLifetime->end < lifetime->end)
  639. {
  640. break;
  641. }
  642. }
  643. NEXT_SLIST_ENTRY_EDITING;
  644. iter.InsertBefore(newLifetime);
  645. }
  646. Lifetime *
  647. LinearScan::RemoveRegLiveRange(SList<Lifetime *> * list, RegNum reg)
  648. {
  649. // Find the register in the active set
  650. FOREACH_SLIST_ENTRY_EDITING(Lifetime *, lifetime, list, iter)
  651. {
  652. if (lifetime->reg == reg)
  653. {
  654. Lifetime * lifetimeReturn = lifetime;
  655. iter.RemoveCurrent();
  656. return lifetimeReturn;
  657. }
  658. } NEXT_SLIST_ENTRY_EDITING;
  659. AssertMsg(false, "Can't find life range for a reg");
  660. return nullptr;
  661. }
  662. // LinearScan::SetDstReg
  663. // Set the reg on each RegOpnd def.
  664. void
  665. LinearScan::SetDstReg(IR::Instr *instr)
  666. {
  667. //
  668. // Enregister dst
  669. //
  670. IR::Opnd *dst = instr->GetDst();
  671. if (dst == nullptr)
  672. {
  673. return;
  674. }
  675. if (!dst->IsRegOpnd())
  676. {
  677. // This could be, for instance, a store to a sym with a large offset
  678. // that was just assigned when we saw the use.
  679. this->linearScanMD.LegalizeDef(instr);
  680. return;
  681. }
  682. IR::RegOpnd * regOpnd = dst->AsRegOpnd();
  683. /*
  684. * If this is a register used to setup a callsite per
  685. * a calling convention then mark it unavailable to allocate
  686. * until we see a CALL.
  687. */
  688. if (regOpnd->m_isCallArg)
  689. {
  690. RegNum callSetupReg = regOpnd->GetReg();
  691. callSetupRegs.Set(callSetupReg);
  692. }
  693. StackSym * stackSym = regOpnd->m_sym;
  694. // Arg slot sym can be in a RegOpnd for param passed via registers
  695. // Just use the assigned register
  696. if (stackSym == nullptr || stackSym->IsArgSlotSym())
  697. {
  698. //
  699. // Already allocated register. just spill the destination
  700. //
  701. RegNum reg = regOpnd->GetReg();
  702. if(LinearScan::IsAllocatable(reg))
  703. {
  704. this->SpillReg(reg);
  705. }
  706. this->tempRegs.Clear(reg);
  707. }
  708. else
  709. {
  710. if (regOpnd->GetReg() != RegNOREG)
  711. {
  712. this->RecordLoopUse(nullptr, regOpnd->GetReg());
  713. // Nothing to do
  714. return;
  715. }
  716. Lifetime * lifetime = stackSym->scratch.linearScan.lifetime;
  717. uint32 useCountCost = LinearScan::GetUseSpillCost(this->loopNest, (this->currentOpHelperBlock != nullptr));
  718. // Optimistically decrease the useCount. We'll undo this if we put it on the defList.
  719. lifetime->SubFromUseCount(useCountCost, this->curLoop);
  720. if (lifetime->isSpilled)
  721. {
  722. if (stackSym->IsConst() && !IsSymNonTempLocalVar(stackSym))
  723. {
  724. // We will reload the constant (but in debug mode, we still need to process this if this is a user var).
  725. return;
  726. }
  727. RegNum reg = regOpnd->GetReg();
  728. if (reg != RegNOREG)
  729. {
  730. // It is already assigned, just record it as a temp reg
  731. this->AssignTempReg(lifetime, reg);
  732. }
  733. else
  734. {
  735. IR::Opnd *src1 = instr->GetSrc1();
  736. IR::Opnd *src2 = instr->GetSrc2();
  737. if ((src1 && src1->IsRegOpnd() && src1->AsRegOpnd()->m_sym == stackSym) ||
  738. (src2 && src2->IsRegOpnd() && src2->AsRegOpnd()->m_sym == stackSym))
  739. {
  740. // OpEQ: src1 should have a valid reg (put src2 for other targets)
  741. reg = this->GetAssignedTempReg(lifetime, dst->GetType());
  742. Assert(reg != RegNOREG);
  743. RecordDef(lifetime, instr, 0);
  744. }
  745. else
  746. {
  747. // Try second chance
  748. reg = this->SecondChanceAllocation(lifetime, false);
  749. if (reg != RegNOREG)
  750. {
  751. Assert(!stackSym->m_isSingleDef);
  752. this->SetReg(regOpnd);
  753. // Keep track of defs for this lifetime, in case it gets spilled.
  754. RecordDef(lifetime, instr, useCountCost);
  755. return;
  756. }
  757. else
  758. {
  759. reg = this->GetAssignedTempReg(lifetime, dst->GetType());
  760. RecordDef(lifetime, instr, 0);
  761. }
  762. }
  763. if (LowererMD::IsAssign(instr) && instr->GetSrc1()->IsRegOpnd())
  764. {
  765. // Fold the spilled store
  766. if (reg != RegNOREG)
  767. {
  768. // If the value is in a temp reg, it's not valid any more.
  769. this->tempRegs.Clear(reg);
  770. }
  771. IRType srcType = instr->GetSrc1()->GetType();
  772. instr->ReplaceDst(IR::SymOpnd::New(stackSym, srcType, this->func));
  773. this->linearScanMD.LegalizeDef(instr);
  774. return;
  775. }
  776. if (reg == RegNOREG)
  777. {
  778. IR::Opnd *src = instr->GetSrc1();
  779. if (src && src->IsRegOpnd() && src->AsRegOpnd()->m_sym == stackSym)
  780. {
  781. // Handle OPEQ's for x86/x64
  782. reg = src->AsRegOpnd()->GetReg();
  783. AssertMsg(!this->activeRegs.Test(reg), "Shouldn't be active");
  784. }
  785. else
  786. {
  787. // The lifetime was spilled, but we still need a reg for this operand.
  788. reg = this->FindReg(nullptr, regOpnd);
  789. }
  790. this->AssignTempReg(lifetime, reg);
  791. }
  792. }
  793. if (!lifetime->isDeadStore && !lifetime->isSecondChanceAllocated)
  794. {
  795. // Insert a store since the lifetime is spilled
  796. this->InsertStore(instr, regOpnd->m_sym, reg);
  797. }
  798. }
  799. else
  800. {
  801. if (lifetime->isOpHelperSpilled)
  802. {
  803. // We must be in a helper block and the lifetime must
  804. // start before the helper block
  805. Assert(this->IsInHelperBlock());
  806. Assert(lifetime->start < this->HelperBlockStartInstrNumber());
  807. RegNum reg = lifetime->reg;
  808. Assert(this->opHelperSpilledRegs.Test(reg));
  809. if (this->activeRegs.Test(reg))
  810. {
  811. // The reg must have been used locally in the helper block
  812. // by some other lifetime. Just spill it
  813. this->SpillReg(reg);
  814. }
  815. // We can't save/restore this reg across the helper call because the restore would overwrite
  816. // this def, but the def means we don't need to spill at all. Mark the lifetime as cantOpHelperSpill
  817. // however in case another helper call in this block tries to spill it.
  818. this->SetCantOpHelperSpill(lifetime);
  819. this->AddToActive(lifetime);
  820. this->tempRegs.Clear(reg);
  821. this->activeRegs.Set(reg);
  822. if (RegTypes[reg] == TyMachReg)
  823. {
  824. this->intRegUsedCount++;
  825. }
  826. else
  827. {
  828. Assert(RegTypes[reg] == TyFloat64);
  829. this->floatRegUsedCount++;
  830. }
  831. }
  832. // Keep track of defs for this lifetime, in case it gets spilled.
  833. RecordDef(lifetime, instr, useCountCost);
  834. }
  835. this->SetReg(regOpnd);
  836. }
  837. }
  838. // Get the stack offset of the non temp locals from the stack.
  839. int32 LinearScan::GetStackOffset(Js::RegSlot regSlotId)
  840. {
  841. int32 stackSlotId = regSlotId - this->func->GetJITFunctionBody()->GetFirstNonTempLocalIndex();
  842. Assert(stackSlotId >= 0);
  843. return this->func->GetLocalVarSlotOffset(stackSlotId);
  844. }
  845. //
  846. // This helper function is used for saving bytecode stack sym value to memory / local slots on stack so that we can read it for the locals inspection.
  847. void
  848. LinearScan::WriteThroughForLocal(IR::RegOpnd* regOpnd, Lifetime* lifetime, IR::Instr* instrInsertAfter)
  849. {
  850. Assert(regOpnd);
  851. Assert(lifetime);
  852. Assert(instrInsertAfter);
  853. StackSym* sym = regOpnd->m_sym;
  854. Assert(IsSymNonTempLocalVar(sym));
  855. Js::RegSlot slotIndex = sym->GetByteCodeRegSlot();
  856. // First we insert the write through moves
  857. sym->m_offset = GetStackOffset(slotIndex);
  858. sym->m_allocated = true;
  859. // Save the value on reg to local var slot.
  860. this->InsertStore(instrInsertAfter, sym, lifetime->reg);
  861. }
  862. bool
  863. LinearScan::NeedsWriteThrough(StackSym * sym)
  864. {
  865. return this->NeedsWriteThroughForEH(sym) || this->IsSymNonTempLocalVar(sym);
  866. }
  867. bool
  868. LinearScan::NeedsWriteThroughForEH(StackSym * sym)
  869. {
  870. if (!this->func->HasTry() || !this->func->DoOptimizeTry() || !sym->HasByteCodeRegSlot())
  871. {
  872. return false;
  873. }
  874. Assert(this->currentRegion);
  875. return this->currentRegion->writeThroughSymbolsSet && this->currentRegion->writeThroughSymbolsSet->Test(sym->m_id);
  876. }
  877. // Helper routine to check if current sym belongs to non temp bytecodereg
  878. bool
  879. LinearScan::IsSymNonTempLocalVar(StackSym *sym)
  880. {
  881. Assert(sym);
  882. if (this->func->IsJitInDebugMode() && sym->HasByteCodeRegSlot())
  883. {
  884. Js::RegSlot slotIndex = sym->GetByteCodeRegSlot();
  885. return this->func->IsNonTempLocalVar(slotIndex);
  886. }
  887. return false;
  888. }
  889. // LinearScan::SetSrcRegs
  890. // Set the reg on each RegOpnd use.
  891. // Note that this includes regOpnd of indir dsts...
  892. void
  893. LinearScan::SetSrcRegs(IR::Instr *instr)
  894. {
  895. //
  896. // Enregister srcs
  897. //
  898. IR::Opnd *src1 = instr->GetSrc1();
  899. if (src1 != nullptr)
  900. {
  901. // Capture src2 now as folding in SetUses could swab the srcs...
  902. IR::Opnd *src2 = instr->GetSrc2();
  903. this->SetUses(instr, src1);
  904. if (src2 != nullptr)
  905. {
  906. this->SetUses(instr, src2);
  907. }
  908. }
  909. IR::Opnd *dst = instr->GetDst();
  910. if (dst && dst->IsIndirOpnd())
  911. {
  912. this->SetUses(instr, dst);
  913. }
  914. this->instrUseRegs.ClearAll();
  915. }
  916. // LinearScan::SetUses
  917. void
  918. LinearScan::SetUses(IR::Instr *instr, IR::Opnd *opnd)
  919. {
  920. switch (opnd->GetKind())
  921. {
  922. case IR::OpndKindReg:
  923. this->SetUse(instr, opnd->AsRegOpnd());
  924. break;
  925. case IR::OpndKindSym:
  926. {
  927. Sym * sym = opnd->AsSymOpnd()->m_sym;
  928. if (sym->IsStackSym())
  929. {
  930. StackSym* stackSym = sym->AsStackSym();
  931. if (!stackSym->IsAllocated())
  932. {
  933. func->StackAllocate(stackSym, opnd->GetSize());
  934. // StackSym's lifetime is allocated during SCCLiveness::ProcessDst
  935. // we might not need to set the flag if the sym is not a dst.
  936. if (stackSym->scratch.linearScan.lifetime)
  937. {
  938. stackSym->scratch.linearScan.lifetime->cantStackPack = true;
  939. }
  940. }
  941. this->linearScanMD.LegalizeUse(instr, opnd);
  942. }
  943. }
  944. break;
  945. case IR::OpndKindIndir:
  946. {
  947. IR::IndirOpnd * indirOpnd = opnd->AsIndirOpnd();
  948. if (indirOpnd->GetBaseOpnd())
  949. {
  950. this->SetUse(instr, indirOpnd->GetBaseOpnd());
  951. }
  952. if (indirOpnd->GetIndexOpnd())
  953. {
  954. this->SetUse(instr, indirOpnd->GetIndexOpnd());
  955. }
  956. }
  957. break;
  958. case IR::OpndKindIntConst:
  959. case IR::OpndKindAddr:
  960. this->linearScanMD.LegalizeConstantUse(instr, opnd);
  961. break;
  962. };
  963. }
  964. struct FillBailOutState
  965. {
  966. SListCounted<Js::Var> constantList;
  967. uint registerSaveCount;
  968. StackSym * registerSaveSyms[RegNumCount - 1];
  969. FillBailOutState(JitArenaAllocator * allocator) : constantList(allocator) {}
  970. };
  971. void
  972. LinearScan::FillBailOutOffset(int * offset, StackSym * stackSym, FillBailOutState * state, IR::Instr * instr)
  973. {
  974. AssertMsg(*offset == 0, "Can't have two active lifetime for the same byte code register");
  975. if (stackSym->IsConst())
  976. {
  977. state->constantList.Prepend(reinterpret_cast<Js::Var>(stackSym->GetLiteralConstValue_PostGlobOpt()));
  978. // Constant offset are offset by the number of register save slots
  979. *offset = state->constantList.Count() + GetBailOutRegisterSaveSlotCount() + GetBailOutReserveSlotCount();
  980. }
  981. else if (stackSym->m_isEncodedConstant)
  982. {
  983. Assert(!stackSym->m_isSingleDef);
  984. state->constantList.Prepend((Js::Var)stackSym->constantValue);
  985. // Constant offset are offset by the number of register save slots
  986. *offset = state->constantList.Count() + GetBailOutRegisterSaveSlotCount() + GetBailOutReserveSlotCount();
  987. }
  988. else
  989. {
  990. Lifetime * lifetime = stackSym->scratch.linearScan.lifetime;
  991. Assert(lifetime && lifetime->start < instr->GetNumber() && instr->GetNumber() <= lifetime->end);
  992. if (instr->GetBailOutKind() == IR::BailOutOnException)
  993. {
  994. // Apart from the exception object sym, lifetimes for all other syms that need to be restored at this bailout,
  995. // must have been spilled at least once (at the TryCatch, or at the Leave, or both)
  996. // Post spilling, a lifetime could have been second chance allocated. But, it should still have stack allocated for its sym
  997. Assert(stackSym->IsAllocated() || (stackSym == this->currentRegion->GetExceptionObjectSym()));
  998. }
  999. this->PrepareForUse(lifetime);
  1000. if (lifetime->isSpilled ||
  1001. ((instr->GetBailOutKind() == IR::BailOutOnException) && (stackSym != this->currentRegion->GetExceptionObjectSym()))) // BailOutOnException must restore from memory
  1002. {
  1003. Assert(stackSym->IsAllocated());
  1004. #ifdef MD_GROW_LOCALS_AREA_UP
  1005. *offset = -((int)stackSym->m_offset + BailOutInfo::StackSymBias);
  1006. #else
  1007. // Stack offset are negative, includes the PUSH EBP and return address
  1008. *offset = stackSym->m_offset - (2 * MachPtr);
  1009. #endif
  1010. }
  1011. else
  1012. {
  1013. Assert(lifetime->reg != RegNOREG);
  1014. Assert(state->registerSaveSyms[lifetime->reg - 1] == nullptr ||
  1015. state->registerSaveSyms[lifetime->reg - 1] == stackSym);
  1016. AssertMsg((stackSym->IsFloat64() || stackSym->IsSimd128()) && RegTypes[lifetime->reg] == TyFloat64 ||
  1017. !(stackSym->IsFloat64() || stackSym->IsSimd128()) && RegTypes[lifetime->reg] != TyFloat64,
  1018. "Trying to save float64 sym into non-float64 reg or non-float64 sym into float64 reg");
  1019. // Save the register value to the register save space using the reg enum value as index
  1020. state->registerSaveSyms[lifetime->reg - 1] = stackSym;
  1021. *offset = LinearScanMD::GetRegisterSaveIndex(lifetime->reg);
  1022. state->registerSaveCount++;
  1023. }
  1024. }
  1025. }
  1026. struct FuncBailOutData
  1027. {
  1028. Func * func;
  1029. BailOutRecord * bailOutRecord;
  1030. int * localOffsets;
  1031. BVFixed * losslessInt32Syms;
  1032. BVFixed * float64Syms;
  1033. #ifdef ENABLE_SIMDJS
  1034. // SIMD_JS
  1035. BVFixed * simd128F4Syms;
  1036. BVFixed * simd128I4Syms;
  1037. BVFixed * simd128I8Syms;
  1038. BVFixed * simd128I16Syms;
  1039. BVFixed * simd128U4Syms;
  1040. BVFixed * simd128U8Syms;
  1041. BVFixed * simd128U16Syms;
  1042. BVFixed * simd128B4Syms;
  1043. BVFixed * simd128B8Syms;
  1044. BVFixed * simd128B16Syms;
  1045. #endif
  1046. void Initialize(Func * func, JitArenaAllocator * tempAllocator);
  1047. void FinalizeLocalOffsets(JitArenaAllocator *allocator, GlobalBailOutRecordDataTable *table, uint **lastUpdatedRowIndices);
  1048. void Clear(JitArenaAllocator * tempAllocator);
  1049. };
  1050. void
  1051. FuncBailOutData::Initialize(Func * func, JitArenaAllocator * tempAllocator)
  1052. {
  1053. Js::RegSlot localsCount = func->GetJITFunctionBody()->GetLocalsCount();
  1054. this->func = func;
  1055. this->localOffsets = AnewArrayZ(tempAllocator, int, localsCount);
  1056. this->losslessInt32Syms = BVFixed::New(localsCount, tempAllocator);
  1057. this->float64Syms = BVFixed::New(localsCount, tempAllocator);
  1058. #ifdef ENABLE_SIMDJS
  1059. // SIMD_JS
  1060. this->simd128F4Syms = BVFixed::New(localsCount, tempAllocator);
  1061. this->simd128I4Syms = BVFixed::New(localsCount, tempAllocator);
  1062. this->simd128I8Syms = BVFixed::New(localsCount, tempAllocator);
  1063. this->simd128I16Syms = BVFixed::New(localsCount, tempAllocator);
  1064. this->simd128U4Syms = BVFixed::New(localsCount, tempAllocator);
  1065. this->simd128U8Syms = BVFixed::New(localsCount, tempAllocator);
  1066. this->simd128U16Syms = BVFixed::New(localsCount, tempAllocator);
  1067. this->simd128B4Syms = BVFixed::New(localsCount, tempAllocator);
  1068. this->simd128B8Syms = BVFixed::New(localsCount, tempAllocator);
  1069. this->simd128B16Syms = BVFixed::New(localsCount, tempAllocator);
  1070. #endif
  1071. }
  1072. void
  1073. FuncBailOutData::FinalizeLocalOffsets(JitArenaAllocator *allocator, GlobalBailOutRecordDataTable *globalBailOutRecordDataTable, uint **lastUpdatedRowIndices)
  1074. {
  1075. Js::RegSlot localsCount = func->GetJITFunctionBody()->GetLocalsCount();
  1076. Assert(globalBailOutRecordDataTable != nullptr);
  1077. Assert(lastUpdatedRowIndices != nullptr);
  1078. if (*lastUpdatedRowIndices == nullptr)
  1079. {
  1080. *lastUpdatedRowIndices = JitAnewArrayZ(allocator, uint, localsCount);
  1081. memset(*lastUpdatedRowIndices, -1, sizeof(uint)*localsCount);
  1082. }
  1083. uint32 bailOutRecordId = bailOutRecord->m_bailOutRecordId;
  1084. bailOutRecord->localOffsetsCount = 0;
  1085. for (uint32 i = 0; i < localsCount; i++)
  1086. {
  1087. // if the sym is live
  1088. if (localOffsets[i] != 0)
  1089. {
  1090. bool isFloat = float64Syms->Test(i) != 0;
  1091. bool isInt = losslessInt32Syms->Test(i) != 0;
  1092. #ifdef ENABLE_SIMDJS
  1093. // SIMD_JS
  1094. bool isSimd128F4 = simd128F4Syms->Test(i) != 0;
  1095. bool isSimd128I4 = simd128I4Syms->Test(i) != 0;
  1096. bool isSimd128I8 = simd128I8Syms->Test(i) != 0;
  1097. bool isSimd128I16 = simd128I16Syms->Test(i) != 0;
  1098. bool isSimd128U4 = simd128U4Syms->Test(i) != 0;
  1099. bool isSimd128U8 = simd128U8Syms->Test(i) != 0;
  1100. bool isSimd128U16 = simd128U16Syms->Test(i) != 0;
  1101. bool isSimd128B4 = simd128B4Syms->Test(i) != 0;
  1102. bool isSimd128B8 = simd128B8Syms->Test(i) != 0;
  1103. bool isSimd128B16 = simd128B16Syms->Test(i) != 0;
  1104. globalBailOutRecordDataTable->AddOrUpdateRow(allocator, bailOutRecordId, i, isFloat, isInt,
  1105. isSimd128F4, isSimd128I4, isSimd128I8, isSimd128I16, isSimd128U4, isSimd128U8, isSimd128U16,
  1106. isSimd128B4, isSimd128B8, isSimd128B16, localOffsets[i], &((*lastUpdatedRowIndices)[i]));
  1107. #else
  1108. globalBailOutRecordDataTable->AddOrUpdateRow(allocator, bailOutRecordId, i, isFloat, isInt,
  1109. false, false, false, false, false, false, false,
  1110. false, false, false, localOffsets[i], &((*lastUpdatedRowIndices)[i]));
  1111. #endif
  1112. Assert(globalBailOutRecordDataTable->globalBailOutRecordDataRows[(*lastUpdatedRowIndices)[i]].regSlot == i);
  1113. bailOutRecord->localOffsetsCount++;
  1114. }
  1115. }
  1116. }
  1117. void
  1118. FuncBailOutData::Clear(JitArenaAllocator * tempAllocator)
  1119. {
  1120. Js::RegSlot localsCount = func->GetJITFunctionBody()->GetLocalsCount();
  1121. JitAdeleteArray(tempAllocator, localsCount, localOffsets);
  1122. losslessInt32Syms->Delete(tempAllocator);
  1123. float64Syms->Delete(tempAllocator);
  1124. #ifdef ENABLE_SIMDJS
  1125. // SIMD_JS
  1126. simd128F4Syms->Delete(tempAllocator);
  1127. simd128I4Syms->Delete(tempAllocator);
  1128. simd128I8Syms->Delete(tempAllocator);
  1129. simd128I16Syms->Delete(tempAllocator);
  1130. simd128U4Syms->Delete(tempAllocator);
  1131. simd128U8Syms->Delete(tempAllocator);
  1132. simd128U16Syms->Delete(tempAllocator);
  1133. simd128B4Syms->Delete(tempAllocator);
  1134. simd128B8Syms->Delete(tempAllocator);
  1135. simd128B16Syms->Delete(tempAllocator);
  1136. #endif
  1137. }
  1138. GlobalBailOutRecordDataTable *
  1139. LinearScan::EnsureGlobalBailOutRecordTable(Func *func)
  1140. {
  1141. Assert(globalBailOutRecordTables != nullptr);
  1142. Func *topFunc = func->GetTopFunc();
  1143. bool isTopFunc = (func == topFunc);
  1144. uint32 inlineeID = isTopFunc ? 0 : func->m_inlineeId;
  1145. NativeCodeData::Allocator * allocator = this->func->GetNativeCodeDataAllocator();
  1146. GlobalBailOutRecordDataTable *globalBailOutRecordDataTable = globalBailOutRecordTables[inlineeID];
  1147. if (globalBailOutRecordDataTable == nullptr)
  1148. {
  1149. globalBailOutRecordDataTable = globalBailOutRecordTables[inlineeID] = NativeCodeDataNew(allocator, GlobalBailOutRecordDataTable);
  1150. globalBailOutRecordDataTable->length = globalBailOutRecordDataTable->size = 0;
  1151. globalBailOutRecordDataTable->isInlinedFunction = !isTopFunc;
  1152. globalBailOutRecordDataTable->hasNonSimpleParams = func->GetHasNonSimpleParams();
  1153. globalBailOutRecordDataTable->hasStackArgOpt = func->IsStackArgsEnabled();
  1154. globalBailOutRecordDataTable->isInlinedConstructor = func->IsInlinedConstructor();
  1155. globalBailOutRecordDataTable->isLoopBody = topFunc->IsLoopBody();
  1156. globalBailOutRecordDataTable->returnValueRegSlot = func->returnValueRegSlot;
  1157. globalBailOutRecordDataTable->isScopeObjRestored = false;
  1158. globalBailOutRecordDataTable->firstActualStackOffset = -1;
  1159. globalBailOutRecordDataTable->registerSaveSpace = (Js::Var*)func->GetThreadContextInfo()->GetBailOutRegisterSaveSpaceAddr();
  1160. globalBailOutRecordDataTable->globalBailOutRecordDataRows = nullptr;
  1161. if (func->GetJITFunctionBody()->GetForInLoopDepth() != 0)
  1162. {
  1163. #ifdef MD_GROW_LOCALS_AREA_UP
  1164. Assert(func->GetForInEnumeratorArrayOffset() >= 0);
  1165. globalBailOutRecordDataTable->forInEnumeratorArrayRestoreOffset = func->GetForInEnumeratorArrayOffset();
  1166. #else
  1167. // Stack offset are negative, includes the PUSH EBP and return address
  1168. globalBailOutRecordDataTable->forInEnumeratorArrayRestoreOffset = func->GetForInEnumeratorArrayOffset() - (2 * MachPtr);
  1169. #endif
  1170. }
  1171. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  1172. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  1173. {
  1174. topFunc->GetScriptContext()->bailOutOffsetBytes += sizeof(GlobalBailOutRecordDataTable);
  1175. topFunc->GetScriptContext()->bailOutRecordBytes += sizeof(GlobalBailOutRecordDataTable);
  1176. }
  1177. #endif
  1178. }
  1179. return globalBailOutRecordDataTable;
  1180. }
  1181. void
  1182. LinearScan::FillBailOutRecord(IR::Instr * instr)
  1183. {
  1184. BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
  1185. if (this->func->HasTry())
  1186. {
  1187. RegionType currentRegionType = this->currentRegion->GetType();
  1188. if (currentRegionType == RegionTypeTry || currentRegionType == RegionTypeCatch || currentRegionType == RegionTypeFinally)
  1189. {
  1190. bailOutInfo->bailOutRecord->ehBailoutData = this->currentRegion->ehBailoutData;
  1191. }
  1192. }
  1193. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed = bailOutInfo->byteCodeUpwardExposedUsed;
  1194. Func * bailOutFunc = bailOutInfo->bailOutFunc;
  1195. uint funcCount = bailOutFunc->inlineDepth + 1;
  1196. FuncBailOutData * funcBailOutData = AnewArray(this->tempAlloc, FuncBailOutData, funcCount);
  1197. uint funcIndex = funcCount - 1;
  1198. funcBailOutData[funcIndex].Initialize(bailOutFunc, this->tempAlloc);
  1199. funcBailOutData[funcIndex].bailOutRecord = bailOutInfo->bailOutRecord;
  1200. bailOutInfo->bailOutRecord->m_bailOutRecordId = m_bailOutRecordCount++;
  1201. bailOutInfo->bailOutRecord->globalBailOutRecordTable = EnsureGlobalBailOutRecordTable(bailOutFunc);
  1202. NativeCodeData::Allocator * allocator = this->func->GetNativeCodeDataAllocator();
  1203. #if DBG_DUMP
  1204. if(PHASE_DUMP(Js::BailOutPhase, this->func))
  1205. {
  1206. Output::Print(_u("-------------------Bailout dump -------------------------\n"));
  1207. instr->Dump();
  1208. }
  1209. #endif
  1210. // Generate chained bailout record for inlined functions
  1211. Func * currentFunc = bailOutFunc->GetParentFunc();
  1212. uint bailOutOffset = bailOutFunc->postCallByteCodeOffset;
  1213. while (currentFunc != nullptr)
  1214. {
  1215. Assert(funcIndex > 0);
  1216. Assert(bailOutOffset != Js::Constants::NoByteCodeOffset);
  1217. BailOutRecord * bailOutRecord = NativeCodeDataNewZ(allocator, BailOutRecord, bailOutOffset, (uint)-1, IR::BailOutInvalid, currentFunc);
  1218. bailOutRecord->m_bailOutRecordId = m_bailOutRecordCount++;
  1219. bailOutRecord->globalBailOutRecordTable = EnsureGlobalBailOutRecordTable(currentFunc);
  1220. #if ENABLE_DEBUG_CONFIG_OPTIONS
  1221. // To indicate this is a subsequent bailout from an inlinee
  1222. bailOutRecord->bailOutOpcode = Js::OpCode::InlineeEnd;
  1223. #endif
  1224. funcBailOutData[funcIndex].bailOutRecord->parent = bailOutRecord;
  1225. funcIndex--;
  1226. funcBailOutData[funcIndex].bailOutRecord = bailOutRecord;
  1227. funcBailOutData[funcIndex].Initialize(currentFunc, this->tempAlloc);
  1228. bailOutOffset = currentFunc->postCallByteCodeOffset;
  1229. currentFunc = currentFunc->GetParentFunc();
  1230. }
  1231. Assert(funcIndex == 0);
  1232. Assert(bailOutOffset == Js::Constants::NoByteCodeOffset);
  1233. FillBailOutState state(this->tempAlloc);
  1234. state.registerSaveCount = 0;
  1235. memset(state.registerSaveSyms, 0, sizeof(state.registerSaveSyms));
  1236. // Fill in the constants
  1237. FOREACH_SLISTBASE_ENTRY_EDITING(ConstantStackSymValue, value, &bailOutInfo->usedCapturedValues.constantValues, constantValuesIterator)
  1238. {
  1239. AssertMsg(bailOutInfo->bailOutRecord->bailOutKind != IR::BailOutForGeneratorYield, "constant prop syms unexpected for bail-in for generator yield");
  1240. StackSym * stackSym = value.Key();
  1241. if(stackSym->HasArgSlotNum())
  1242. {
  1243. continue;
  1244. }
  1245. Assert(stackSym->HasByteCodeRegSlot());
  1246. Js::RegSlot i = stackSym->GetByteCodeRegSlot();
  1247. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1248. uint index = stackSymFunc->inlineDepth;
  1249. Assert(i != Js::Constants::NoRegister);
  1250. Assert(i < stackSymFunc->GetJITFunctionBody()->GetLocalsCount());
  1251. Assert(index < funcCount);
  1252. __analysis_assume(index < funcCount);
  1253. Assert(funcBailOutData[index].func == stackSymFunc);
  1254. Assert(!byteCodeUpwardExposedUsed->Test(stackSym->m_id));
  1255. BailoutConstantValue constValue = value.Value();
  1256. Js::Var varValue = constValue.ToVar(this->func);
  1257. state.constantList.Prepend(varValue);
  1258. AssertMsg(funcBailOutData[index].localOffsets[i] == 0, "Can't have two active lifetime for the same byte code register");
  1259. // Constant offset are offset by the number of register save slots
  1260. funcBailOutData[index].localOffsets[i] = state.constantList.Count() + GetBailOutRegisterSaveSlotCount() + GetBailOutReserveSlotCount();
  1261. #if DBG_DUMP
  1262. if(PHASE_DUMP(Js::BailOutPhase, this->func))
  1263. {
  1264. Output::Print(_u("Constant stack sym #%d (argOut:%s): "), i, IsTrueOrFalse(stackSym->HasArgSlotNum()));
  1265. stackSym->Dump();
  1266. Output::Print(_u(" (0x%p (Var) Offset: %d)\n"), varValue, funcBailOutData[index].localOffsets[i]);
  1267. }
  1268. #endif
  1269. constantValuesIterator.RemoveCurrent(this->func->m_alloc);
  1270. }
  1271. NEXT_SLISTBASE_ENTRY_EDITING;
  1272. // Fill in the copy prop syms
  1273. FOREACH_SLISTBASE_ENTRY_EDITING(CopyPropSyms, copyPropSyms, &bailOutInfo->usedCapturedValues.copyPropSyms, copyPropSymsIter)
  1274. {
  1275. AssertMsg(bailOutInfo->bailOutRecord->bailOutKind != IR::BailOutForGeneratorYield, "copy prop syms unexpected for bail-in for generator yield");
  1276. StackSym * stackSym = copyPropSyms.Key();
  1277. if(stackSym->HasArgSlotNum())
  1278. {
  1279. continue;
  1280. }
  1281. Js::RegSlot i = stackSym->GetByteCodeRegSlot();
  1282. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1283. uint index = stackSymFunc->inlineDepth;
  1284. Assert(i != Js::Constants::NoRegister);
  1285. Assert(i < stackSymFunc->GetJITFunctionBody()->GetLocalsCount());
  1286. Assert(index < funcCount);
  1287. __analysis_assume(index < funcCount);
  1288. Assert(funcBailOutData[index].func == stackSymFunc);
  1289. AssertMsg(funcBailOutData[index].localOffsets[i] == 0, "Can't have two active lifetime for the same byte code register");
  1290. Assert(!byteCodeUpwardExposedUsed->Test(stackSym->m_id));
  1291. StackSym * copyStackSym = copyPropSyms.Value();
  1292. this->FillBailOutOffset(&funcBailOutData[index].localOffsets[i], copyStackSym, &state, instr);
  1293. if (copyStackSym->IsInt32())
  1294. {
  1295. funcBailOutData[index].losslessInt32Syms->Set(i);
  1296. }
  1297. else if (copyStackSym->IsFloat64())
  1298. {
  1299. funcBailOutData[index].float64Syms->Set(i);
  1300. }
  1301. #ifdef ENABLE_SIMDJS
  1302. // SIMD_JS
  1303. else if (copyStackSym->IsSimd128F4())
  1304. {
  1305. funcBailOutData[index].simd128F4Syms->Set(i);
  1306. }
  1307. else if (copyStackSym->IsSimd128I4())
  1308. {
  1309. funcBailOutData[index].simd128I4Syms->Set(i);
  1310. }
  1311. else if (copyStackSym->IsSimd128I8())
  1312. {
  1313. funcBailOutData[index].simd128I8Syms->Set(i);
  1314. }
  1315. else if (copyStackSym->IsSimd128I16())
  1316. {
  1317. funcBailOutData[index].simd128I16Syms->Set(i);
  1318. }
  1319. else if (copyStackSym->IsSimd128U4())
  1320. {
  1321. funcBailOutData[index].simd128U4Syms->Set(i);
  1322. }
  1323. else if (copyStackSym->IsSimd128U8())
  1324. {
  1325. funcBailOutData[index].simd128U8Syms->Set(i);
  1326. }
  1327. else if (copyStackSym->IsSimd128U16())
  1328. {
  1329. funcBailOutData[index].simd128U16Syms->Set(i);
  1330. }
  1331. else if (copyStackSym->IsSimd128B4())
  1332. {
  1333. funcBailOutData[index].simd128B4Syms->Set(i);
  1334. }
  1335. else if (copyStackSym->IsSimd128B8())
  1336. {
  1337. funcBailOutData[index].simd128B8Syms->Set(i);
  1338. }
  1339. else if (copyStackSym->IsSimd128B16())
  1340. {
  1341. funcBailOutData[index].simd128B16Syms->Set(i);
  1342. }
  1343. #endif
  1344. copyPropSymsIter.RemoveCurrent(this->func->m_alloc);
  1345. }
  1346. NEXT_SLISTBASE_ENTRY_EDITING;
  1347. // Fill in the upward exposed syms
  1348. FOREACH_BITSET_IN_SPARSEBV(id, byteCodeUpwardExposedUsed)
  1349. {
  1350. StackSym * stackSym = this->func->m_symTable->FindStackSym(id);
  1351. Assert(stackSym != nullptr);
  1352. Js::RegSlot i = stackSym->GetByteCodeRegSlot();
  1353. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1354. uint index = stackSymFunc->inlineDepth;
  1355. Assert(i != Js::Constants::NoRegister);
  1356. Assert(i < stackSymFunc->GetJITFunctionBody()->GetLocalsCount());
  1357. Assert(index < funcCount);
  1358. __analysis_assume(index < funcCount);
  1359. Assert(funcBailOutData[index].func == stackSymFunc);
  1360. AssertMsg(funcBailOutData[index].localOffsets[i] == 0, "Can't have two active lifetime for the same byte code register");
  1361. this->FillBailOutOffset(&funcBailOutData[index].localOffsets[i], stackSym, &state, instr);
  1362. if (stackSym->IsInt32())
  1363. {
  1364. funcBailOutData[index].losslessInt32Syms->Set(i);
  1365. }
  1366. else if (stackSym->IsFloat64())
  1367. {
  1368. funcBailOutData[index].float64Syms->Set(i);
  1369. }
  1370. #ifdef ENABLE_SIMDJS
  1371. // SIMD_JS
  1372. else if (stackSym->IsSimd128F4())
  1373. {
  1374. funcBailOutData[index].simd128F4Syms->Set(i);
  1375. }
  1376. else if (stackSym->IsSimd128I4())
  1377. {
  1378. funcBailOutData[index].simd128I4Syms->Set(i);
  1379. }
  1380. else if (stackSym->IsSimd128I8())
  1381. {
  1382. funcBailOutData[index].simd128I8Syms->Set(i);
  1383. }
  1384. else if (stackSym->IsSimd128I16())
  1385. {
  1386. funcBailOutData[index].simd128I16Syms->Set(i);
  1387. }
  1388. else if (stackSym->IsSimd128U4())
  1389. {
  1390. funcBailOutData[index].simd128U4Syms->Set(i);
  1391. }
  1392. else if (stackSym->IsSimd128U8())
  1393. {
  1394. funcBailOutData[index].simd128U8Syms->Set(i);
  1395. }
  1396. else if (stackSym->IsSimd128U16())
  1397. {
  1398. funcBailOutData[index].simd128U16Syms->Set(i);
  1399. }
  1400. else if (stackSym->IsSimd128B4())
  1401. {
  1402. funcBailOutData[index].simd128B4Syms->Set(i);
  1403. }
  1404. else if (stackSym->IsSimd128B8())
  1405. {
  1406. funcBailOutData[index].simd128B8Syms->Set(i);
  1407. }
  1408. else if (stackSym->IsSimd128B16())
  1409. {
  1410. funcBailOutData[index].simd128B16Syms->Set(i);
  1411. }
  1412. #endif
  1413. }
  1414. NEXT_BITSET_IN_SPARSEBV;
  1415. if (bailOutInfo->usedCapturedValues.argObjSyms)
  1416. {
  1417. FOREACH_BITSET_IN_SPARSEBV(id, bailOutInfo->usedCapturedValues.argObjSyms)
  1418. {
  1419. StackSym * stackSym = this->func->m_symTable->FindStackSym(id);
  1420. Assert(stackSym != nullptr);
  1421. Js::RegSlot i = stackSym->GetByteCodeRegSlot();
  1422. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1423. uint index = stackSymFunc->inlineDepth;
  1424. Assert(i != Js::Constants::NoRegister);
  1425. Assert(i < stackSymFunc->GetJITFunctionBody()->GetLocalsCount());
  1426. Assert(index < funcCount);
  1427. __analysis_assume(index < funcCount);
  1428. Assert(funcBailOutData[index].func == stackSymFunc);
  1429. AssertMsg(funcBailOutData[index].localOffsets[i] == 0, "Can't have two active lifetime for the same byte code register");
  1430. funcBailOutData[index].localOffsets[i] = BailOutRecord::GetArgumentsObjectOffset();
  1431. }
  1432. NEXT_BITSET_IN_SPARSEBV;
  1433. }
  1434. // In the debug mode, fill in the rest of non temp locals as well in the records so that the restore stub will just get it automatically.
  1435. if (this->func->IsJitInDebugMode())
  1436. {
  1437. // Need to allow filling the formal args slots.
  1438. if (func->GetJITFunctionBody()->HasPropIdToFormalsMap())
  1439. {
  1440. Assert(func->GetJITFunctionBody()->GetInParamsCount() > 0);
  1441. uint32 endIndex = min(func->GetJITFunctionBody()->GetFirstNonTempLocalIndex() + func->GetJITFunctionBody()->GetInParamsCount() - 1, func->GetJITFunctionBody()->GetEndNonTempLocalIndex());
  1442. for (uint32 index = func->GetJITFunctionBody()->GetFirstNonTempLocalIndex(); index < endIndex; index++)
  1443. {
  1444. StackSym * stackSym = this->func->m_symTable->FindStackSym(index);
  1445. if (stackSym != nullptr)
  1446. {
  1447. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1448. Js::RegSlot regSlotId = stackSym->GetByteCodeRegSlot();
  1449. if (func->IsNonTempLocalVar(regSlotId))
  1450. {
  1451. if (!func->GetJITFunctionBody()->IsRegSlotFormal(regSlotId - func->GetJITFunctionBody()->GetFirstNonTempLocalIndex()))
  1452. {
  1453. continue;
  1454. }
  1455. uint dataIndex = stackSymFunc->inlineDepth;
  1456. Assert(dataIndex == 0); // There is no inlining while in debug mode
  1457. // Filling in which are not filled already.
  1458. __analysis_assume(dataIndex == 0);
  1459. if (funcBailOutData[dataIndex].localOffsets[regSlotId] == 0)
  1460. {
  1461. int32 offset = GetStackOffset(regSlotId);
  1462. #ifdef MD_GROW_LOCALS_AREA_UP
  1463. Assert(offset >= 0);
  1464. #else
  1465. Assert(offset < 0);
  1466. #endif
  1467. funcBailOutData[dataIndex].localOffsets[regSlotId] = this->func->AdjustOffsetValue(offset);
  1468. // We don't support typespec for debug, rework on the bellow assert once we start support them.
  1469. Assert(!stackSym->IsInt32() && !stackSym->IsFloat64() && !stackSym->IsSimd128());
  1470. }
  1471. }
  1472. }
  1473. }
  1474. }
  1475. }
  1476. // fill in the out params
  1477. uint startCallCount = bailOutInfo->startCallCount;
  1478. if (bailOutInfo->totalOutParamCount != 0)
  1479. {
  1480. Assert(startCallCount != 0);
  1481. uint argOutSlot = 0;
  1482. uint * startCallOutParamCounts = (uint*)NativeCodeDataNewArrayNoFixup(allocator, UIntType<DataDesc_ArgOutOffsetInfo_StartCallOutParamCounts>, startCallCount);
  1483. #ifdef _M_IX86
  1484. uint * startCallArgRestoreAdjustCounts = (uint*)NativeCodeDataNewArrayNoFixup(allocator, UIntType<DataDesc_ArgOutOffsetInfo_StartCallOutParamCounts>, startCallCount);
  1485. #endif
  1486. NativeCodeData::AllocatorNoFixup<BVFixed>* allocatorT = (NativeCodeData::AllocatorNoFixup<BVFixed>*)allocator;
  1487. BVFixed * argOutFloat64Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1488. BVFixed * argOutLosslessInt32Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1489. // SIMD_JS
  1490. BVFixed * argOutSimd128F4Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1491. BVFixed * argOutSimd128I4Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1492. BVFixed * argOutSimd128I8Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1493. BVFixed * argOutSimd128I16Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1494. BVFixed * argOutSimd128U4Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1495. BVFixed * argOutSimd128U8Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1496. BVFixed * argOutSimd128U16Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1497. BVFixed * argOutSimd128B4Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1498. BVFixed * argOutSimd128B8Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1499. BVFixed * argOutSimd128B16Syms = BVFixed::New(bailOutInfo->totalOutParamCount, allocatorT);
  1500. int* outParamOffsets = bailOutInfo->outParamOffsets = (int*)NativeCodeDataNewArrayZNoFixup(allocator, IntType<DataDesc_BailoutInfo_CotalOutParamCount>, bailOutInfo->totalOutParamCount);
  1501. #ifdef _M_IX86
  1502. int currentStackOffset = 0;
  1503. bailOutInfo->outParamFrameAdjustArgSlot = JitAnew(this->func->m_alloc, BVSparse<JitArenaAllocator>, this->func->m_alloc);
  1504. #endif
  1505. if (this->func->HasInlinee())
  1506. {
  1507. bailOutInfo->outParamInlinedArgSlot = JitAnew(this->func->m_alloc, BVSparse<JitArenaAllocator>, this->func->m_alloc);
  1508. }
  1509. #if DBG
  1510. uint lastFuncIndex = 0;
  1511. #endif
  1512. for (uint i = 0; i < startCallCount; i++)
  1513. {
  1514. uint outParamStart = argOutSlot; // Start of the out param offset for the current start call
  1515. // Number of out param for the current start call
  1516. uint outParamCount = bailOutInfo->GetStartCallOutParamCount(i);
  1517. startCallOutParamCounts[i] = outParamCount;
  1518. #ifdef _M_IX86
  1519. startCallArgRestoreAdjustCounts[i] = bailOutInfo->startCallInfo[i].argRestoreAdjustCount;
  1520. // Only x86 has a progression of pushes of out args, with stack alignment.
  1521. bool fDoStackAdjust = false;
  1522. if (!bailOutInfo->inlinedStartCall->Test(i))
  1523. {
  1524. // Only do the stack adjustment if the StartCall has not been moved down past the bailout.
  1525. fDoStackAdjust = bailOutInfo->NeedsStartCallAdjust(i, instr);
  1526. if (fDoStackAdjust)
  1527. {
  1528. currentStackOffset -= Math::Align<int>(outParamCount * MachPtr, MachStackAlignment);
  1529. }
  1530. }
  1531. #endif
  1532. Func * currentStartCallFunc = bailOutInfo->startCallFunc[i];
  1533. #if DBG
  1534. Assert(lastFuncIndex <= currentStartCallFunc->inlineDepth);
  1535. lastFuncIndex = currentStartCallFunc->inlineDepth;
  1536. #endif
  1537. FuncBailOutData& currentFuncBailOutData = funcBailOutData[currentStartCallFunc->inlineDepth];
  1538. BailOutRecord * currentBailOutRecord = currentFuncBailOutData.bailOutRecord;
  1539. if (currentBailOutRecord->argOutOffsetInfo == nullptr)
  1540. {
  1541. currentBailOutRecord->argOutOffsetInfo = NativeCodeDataNew(allocator, BailOutRecord::ArgOutOffsetInfo);
  1542. currentBailOutRecord->argOutOffsetInfo->argOutFloat64Syms = nullptr;
  1543. currentBailOutRecord->argOutOffsetInfo->argOutLosslessInt32Syms = nullptr;
  1544. #ifdef ENABLE_SIMDJS
  1545. // SIMD_JS
  1546. currentBailOutRecord->argOutOffsetInfo->argOutSimd128F4Syms = nullptr;
  1547. currentBailOutRecord->argOutOffsetInfo->argOutSimd128I4Syms = nullptr;
  1548. currentBailOutRecord->argOutOffsetInfo->argOutSimd128I8Syms = nullptr;
  1549. currentBailOutRecord->argOutOffsetInfo->argOutSimd128I16Syms = nullptr;
  1550. currentBailOutRecord->argOutOffsetInfo->argOutSimd128U4Syms = nullptr;
  1551. currentBailOutRecord->argOutOffsetInfo->argOutSimd128U8Syms = nullptr;
  1552. currentBailOutRecord->argOutOffsetInfo->argOutSimd128U16Syms = nullptr;
  1553. currentBailOutRecord->argOutOffsetInfo->argOutSimd128B4Syms = nullptr;
  1554. currentBailOutRecord->argOutOffsetInfo->argOutSimd128B8Syms = nullptr;
  1555. currentBailOutRecord->argOutOffsetInfo->argOutSimd128B16Syms = nullptr;
  1556. #endif
  1557. currentBailOutRecord->argOutOffsetInfo->argOutSymStart = 0;
  1558. currentBailOutRecord->argOutOffsetInfo->outParamOffsets = nullptr;
  1559. currentBailOutRecord->argOutOffsetInfo->startCallOutParamCounts = nullptr;
  1560. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  1561. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  1562. {
  1563. this->func->GetScriptContext()->bailOutRecordBytes += sizeof(BailOutRecord::ArgOutOffsetInfo);
  1564. }
  1565. #endif
  1566. }
  1567. currentBailOutRecord->argOutOffsetInfo->startCallCount++;
  1568. if (currentBailOutRecord->argOutOffsetInfo->outParamOffsets == nullptr)
  1569. {
  1570. Assert(currentBailOutRecord->argOutOffsetInfo->startCallOutParamCounts == nullptr);
  1571. currentBailOutRecord->argOutOffsetInfo->startCallIndex = i;
  1572. currentBailOutRecord->argOutOffsetInfo->startCallOutParamCounts = &startCallOutParamCounts[i];
  1573. #ifdef _M_IX86
  1574. currentBailOutRecord->startCallArgRestoreAdjustCounts = &startCallArgRestoreAdjustCounts[i];
  1575. #endif
  1576. currentBailOutRecord->argOutOffsetInfo->outParamOffsets = &outParamOffsets[outParamStart];
  1577. currentBailOutRecord->argOutOffsetInfo->argOutSymStart = outParamStart;
  1578. currentBailOutRecord->argOutOffsetInfo->argOutFloat64Syms = argOutFloat64Syms;
  1579. currentBailOutRecord->argOutOffsetInfo->argOutLosslessInt32Syms = argOutLosslessInt32Syms;
  1580. #ifdef ENABLE_SIMDJS
  1581. // SIMD_JS
  1582. currentBailOutRecord->argOutOffsetInfo->argOutSimd128F4Syms = argOutSimd128F4Syms;
  1583. currentBailOutRecord->argOutOffsetInfo->argOutSimd128I4Syms = argOutSimd128I4Syms ;
  1584. currentBailOutRecord->argOutOffsetInfo->argOutSimd128I8Syms = argOutSimd128I8Syms ;
  1585. currentBailOutRecord->argOutOffsetInfo->argOutSimd128I16Syms = argOutSimd128I16Syms ;
  1586. currentBailOutRecord->argOutOffsetInfo->argOutSimd128U4Syms = argOutSimd128U4Syms ;
  1587. currentBailOutRecord->argOutOffsetInfo->argOutSimd128U8Syms = argOutSimd128U8Syms ;
  1588. currentBailOutRecord->argOutOffsetInfo->argOutSimd128U16Syms = argOutSimd128U16Syms ;
  1589. currentBailOutRecord->argOutOffsetInfo->argOutSimd128B4Syms = argOutSimd128U4Syms;
  1590. currentBailOutRecord->argOutOffsetInfo->argOutSimd128B8Syms = argOutSimd128U8Syms;
  1591. currentBailOutRecord->argOutOffsetInfo->argOutSimd128B16Syms = argOutSimd128U16Syms;
  1592. #endif
  1593. }
  1594. #if DBG_DUMP
  1595. if (PHASE_DUMP(Js::BailOutPhase, this->func))
  1596. {
  1597. Output::Print(_u("Bailout function: %s [#%d] \n"), currentStartCallFunc->GetJITFunctionBody()->GetDisplayName(),
  1598. currentStartCallFunc->GetJITFunctionBody()->GetFunctionNumber());
  1599. }
  1600. #endif
  1601. for (uint j = 0; j < outParamCount; j++, argOutSlot++)
  1602. {
  1603. StackSym * sym = bailOutInfo->argOutSyms[argOutSlot];
  1604. if (sym == nullptr)
  1605. {
  1606. // This can happen when instr with bailout occurs before all ArgOuts for current call instr are processed.
  1607. continue;
  1608. }
  1609. Assert(sym->GetArgSlotNum() > 0 && sym->GetArgSlotNum() <= outParamCount);
  1610. uint argSlot = sym->GetArgSlotNum() - 1;
  1611. uint outParamOffsetIndex = outParamStart + argSlot;
  1612. if (!sym->m_isBailOutReferenced && !sym->IsArgSlotSym())
  1613. {
  1614. FOREACH_SLISTBASE_ENTRY_EDITING(ConstantStackSymValue, constantValue, &bailOutInfo->usedCapturedValues.constantValues, iterator)
  1615. {
  1616. if (constantValue.Key()->m_id == sym->m_id)
  1617. {
  1618. Js::Var varValue = constantValue.Value().ToVar(func);
  1619. state.constantList.Prepend(varValue);
  1620. outParamOffsets[outParamOffsetIndex] = state.constantList.Count() + GetBailOutRegisterSaveSlotCount() + GetBailOutReserveSlotCount();
  1621. #if DBG_DUMP
  1622. if (PHASE_DUMP(Js::BailOutPhase, this->func))
  1623. {
  1624. Output::Print(_u("OutParam #%d: "), argSlot);
  1625. sym->Dump();
  1626. Output::Print(_u(" (0x%p (Var)))\n"), varValue);
  1627. }
  1628. #endif
  1629. iterator.RemoveCurrent(func->m_alloc);
  1630. break;
  1631. }
  1632. }
  1633. NEXT_SLISTBASE_ENTRY_EDITING;
  1634. if (outParamOffsets[outParamOffsetIndex])
  1635. {
  1636. continue;
  1637. }
  1638. FOREACH_SLISTBASE_ENTRY_EDITING(CopyPropSyms, copyPropSym, &bailOutInfo->usedCapturedValues.copyPropSyms, iter)
  1639. {
  1640. if (copyPropSym.Key()->m_id == sym->m_id)
  1641. {
  1642. StackSym * copyStackSym = copyPropSym.Value();
  1643. BVSparse<JitArenaAllocator>* argObjSyms = bailOutInfo->usedCapturedValues.argObjSyms;
  1644. if (argObjSyms && argObjSyms->Test(copyStackSym->m_id))
  1645. {
  1646. outParamOffsets[outParamOffsetIndex] = BailOutRecord::GetArgumentsObjectOffset();
  1647. }
  1648. else
  1649. {
  1650. this->FillBailOutOffset(&outParamOffsets[outParamOffsetIndex], copyStackSym, &state, instr);
  1651. if (copyStackSym->IsInt32())
  1652. {
  1653. argOutLosslessInt32Syms->Set(outParamOffsetIndex);
  1654. }
  1655. else if (copyStackSym->IsFloat64())
  1656. {
  1657. argOutFloat64Syms->Set(outParamOffsetIndex);
  1658. }
  1659. // SIMD_JS
  1660. else if (copyStackSym->IsSimd128F4())
  1661. {
  1662. argOutSimd128F4Syms->Set(outParamOffsetIndex);
  1663. }
  1664. else if (copyStackSym->IsSimd128I4())
  1665. {
  1666. argOutSimd128I4Syms->Set(outParamOffsetIndex);
  1667. }
  1668. else if (copyStackSym->IsSimd128I8())
  1669. {
  1670. argOutSimd128I8Syms->Set(outParamOffsetIndex);
  1671. }
  1672. else if (copyStackSym->IsSimd128I16())
  1673. {
  1674. argOutSimd128I16Syms->Set(outParamOffsetIndex);
  1675. }
  1676. else if (copyStackSym->IsSimd128U4())
  1677. {
  1678. argOutSimd128U4Syms->Set(outParamOffsetIndex);
  1679. }
  1680. else if (copyStackSym->IsSimd128U8())
  1681. {
  1682. argOutSimd128U8Syms->Set(outParamOffsetIndex);
  1683. }
  1684. else if (copyStackSym->IsSimd128U16())
  1685. {
  1686. argOutSimd128U16Syms->Set(outParamOffsetIndex);
  1687. }
  1688. else if (copyStackSym->IsSimd128B4())
  1689. {
  1690. argOutSimd128B4Syms->Set(outParamOffsetIndex);
  1691. }
  1692. else if (copyStackSym->IsSimd128B8())
  1693. {
  1694. argOutSimd128B8Syms->Set(outParamOffsetIndex);
  1695. }
  1696. else if (copyStackSym->IsSimd128B16())
  1697. {
  1698. argOutSimd128B16Syms->Set(outParamOffsetIndex);
  1699. }
  1700. }
  1701. #if DBG_DUMP
  1702. if (PHASE_DUMP(Js::BailOutPhase, this->func))
  1703. {
  1704. Output::Print(_u("OutParam #%d: "), argSlot);
  1705. sym->Dump();
  1706. Output::Print(_u(" Copy Prop sym:"));
  1707. copyStackSym->Dump();
  1708. Output::Print(_u("\n"));
  1709. }
  1710. #endif
  1711. iter.RemoveCurrent(func->m_alloc);
  1712. break;
  1713. }
  1714. }
  1715. NEXT_SLISTBASE_ENTRY_EDITING;
  1716. Assert(outParamOffsets[outParamOffsetIndex] != 0);
  1717. }
  1718. else
  1719. {
  1720. if (sym->IsArgSlotSym())
  1721. {
  1722. if (sym->m_isSingleDef)
  1723. {
  1724. Assert(sym->m_instrDef->m_func == currentStartCallFunc);
  1725. IR::Instr * instrDef = sym->m_instrDef;
  1726. Assert(LowererMD::IsAssign(instrDef));
  1727. if (instrDef->GetNumber() < instr->GetNumber())
  1728. {
  1729. // The ArgOut instr is above current bailout instr.
  1730. AssertMsg(sym->IsVar(), "Arg out slot can only be var.");
  1731. if (sym->m_isInlinedArgSlot)
  1732. {
  1733. Assert(this->func->HasInlinee());
  1734. #ifdef MD_GROW_LOCALS_AREA_UP
  1735. outParamOffsets[outParamOffsetIndex] = -((int)sym->m_offset + BailOutInfo::StackSymBias);
  1736. #else
  1737. outParamOffsets[outParamOffsetIndex] = sym->m_offset;
  1738. #endif
  1739. bailOutInfo->outParamInlinedArgSlot->Set(outParamOffsetIndex);
  1740. }
  1741. else if (sym->m_isOrphanedArg)
  1742. {
  1743. #ifdef MD_GROW_LOCALS_AREA_UP
  1744. outParamOffsets[outParamOffsetIndex] = -((int)sym->m_offset + BailOutInfo::StackSymBias);
  1745. #else
  1746. // Stack offset are negative, includes the PUSH EBP and return address
  1747. outParamOffsets[outParamOffsetIndex] = sym->m_offset - (2 * MachPtr);
  1748. #endif
  1749. }
  1750. #ifdef _M_IX86
  1751. else if (fDoStackAdjust)
  1752. {
  1753. // If we've got args on the stack, then we must have seen (and adjusted for) the StartCall.
  1754. // The values is already on the stack
  1755. // On AMD64/ARM, ArgOut should have been moved next to the call, and shouldn't have bailout between them
  1756. // Except for inlined arg outs
  1757. outParamOffsets[outParamOffsetIndex] = currentStackOffset + argSlot * MachPtr;
  1758. bailOutInfo->outParamFrameAdjustArgSlot->Set(outParamOffsetIndex);
  1759. }
  1760. #endif
  1761. else
  1762. {
  1763. this->FillBailOutOffset(&outParamOffsets[outParamOffsetIndex], sym, &state, instr);
  1764. }
  1765. }
  1766. else
  1767. {
  1768. // The ArgOut instruction might have moved down right next to the call,
  1769. // because of a register calling convention, cloning, etc. This loop walks the chain
  1770. // of assignments to try to find the original location of the assignment where
  1771. // the value is available.
  1772. while (!sym->IsConst())
  1773. {
  1774. // the value is in the register
  1775. IR::RegOpnd * regOpnd = instrDef->GetSrc1()->AsRegOpnd();
  1776. sym = regOpnd->m_sym;
  1777. if (sym->scratch.linearScan.lifetime->start < instr->GetNumber())
  1778. {
  1779. break;
  1780. }
  1781. if (sym->m_isEncodedConstant)
  1782. {
  1783. break;
  1784. }
  1785. // For out parameter we might need to follow multiple assignments
  1786. Assert(sym->m_isSingleDef);
  1787. instrDef = sym->m_instrDef;
  1788. Assert(LowererMD::IsAssign(instrDef));
  1789. }
  1790. if (bailOutInfo->usedCapturedValues.argObjSyms && bailOutInfo->usedCapturedValues.argObjSyms->Test(sym->m_id))
  1791. {
  1792. //foo.apply(this,arguments) case and we bailout when the apply is overridden. We need to restore the arguments object.
  1793. outParamOffsets[outParamOffsetIndex] = BailOutRecord::GetArgumentsObjectOffset();
  1794. }
  1795. else
  1796. {
  1797. this->FillBailOutOffset(&outParamOffsets[outParamOffsetIndex], sym, &state, instr);
  1798. }
  1799. }
  1800. }
  1801. }
  1802. else
  1803. {
  1804. this->FillBailOutOffset(&outParamOffsets[outParamOffsetIndex], sym, &state, instr);
  1805. }
  1806. if (sym->IsFloat64())
  1807. {
  1808. argOutFloat64Syms->Set(outParamOffsetIndex);
  1809. }
  1810. else if (sym->IsInt32())
  1811. {
  1812. argOutLosslessInt32Syms->Set(outParamOffsetIndex);
  1813. }
  1814. // SIMD_JS
  1815. else if (sym->IsSimd128F4())
  1816. {
  1817. argOutSimd128F4Syms->Set(outParamOffsetIndex);
  1818. }
  1819. else if (sym->IsSimd128I4())
  1820. {
  1821. argOutSimd128I4Syms->Set(outParamOffsetIndex);
  1822. }
  1823. else if (sym->IsSimd128I8())
  1824. {
  1825. argOutSimd128I8Syms->Set(outParamOffsetIndex);
  1826. }
  1827. else if (sym->IsSimd128I16())
  1828. {
  1829. argOutSimd128I16Syms->Set(outParamOffsetIndex);
  1830. }
  1831. else if (sym->IsSimd128U4())
  1832. {
  1833. argOutSimd128U4Syms->Set(outParamOffsetIndex);
  1834. }
  1835. else if (sym->IsSimd128U8())
  1836. {
  1837. argOutSimd128U8Syms->Set(outParamOffsetIndex);
  1838. }
  1839. else if (sym->IsSimd128U16())
  1840. {
  1841. argOutSimd128U16Syms->Set(outParamOffsetIndex);
  1842. }
  1843. else if (sym->IsSimd128B4())
  1844. {
  1845. argOutSimd128B4Syms->Set(outParamOffsetIndex);
  1846. }
  1847. else if (sym->IsSimd128B8())
  1848. {
  1849. argOutSimd128B8Syms->Set(outParamOffsetIndex);
  1850. }
  1851. else if (sym->IsSimd128B16())
  1852. {
  1853. argOutSimd128B16Syms->Set(outParamOffsetIndex);
  1854. }
  1855. #if DBG_DUMP
  1856. if (PHASE_DUMP(Js::BailOutPhase, this->func))
  1857. {
  1858. Output::Print(_u("OutParam #%d: "), argSlot);
  1859. sym->Dump();
  1860. Output::Print(_u("\n"));
  1861. }
  1862. #endif
  1863. }
  1864. }
  1865. }
  1866. }
  1867. else
  1868. {
  1869. Assert(bailOutInfo->argOutSyms == nullptr);
  1870. Assert(bailOutInfo->startCallCount == 0);
  1871. }
  1872. if (this->currentBlock->inlineeStack.Count() > 0)
  1873. {
  1874. this->SpillInlineeArgs(instr);
  1875. }
  1876. else
  1877. {
  1878. // There is a chance that the instruction was hoisting from an inlinee func
  1879. // but if there are no inlinee frames - make sure the instr belongs to the outer func
  1880. // to ensure encoder does not encode an inline frame here - which does not really exist
  1881. instr->m_func = this->func;
  1882. }
  1883. linearScanMD.GenerateBailOut(instr, state.registerSaveSyms, _countof(state.registerSaveSyms));
  1884. // generate the constant table
  1885. Js::Var * constants = NativeCodeDataNewArrayNoFixup(allocator, Js::Var, state.constantList.Count());
  1886. uint constantCount = state.constantList.Count();
  1887. while (!state.constantList.Empty())
  1888. {
  1889. Js::Var value = state.constantList.Head();
  1890. state.constantList.RemoveHead();
  1891. constants[state.constantList.Count()] = value;
  1892. }
  1893. // Generate the stack literal bail out info
  1894. FillStackLiteralBailOutRecord(instr, bailOutInfo, funcBailOutData, funcCount);
  1895. for (uint i = 0; i < funcCount; i++)
  1896. {
  1897. funcBailOutData[i].bailOutRecord->constants = constants;
  1898. #if DBG
  1899. funcBailOutData[i].bailOutRecord->inlineDepth = funcBailOutData[i].func->inlineDepth;
  1900. funcBailOutData[i].bailOutRecord->constantCount = constantCount;
  1901. #endif
  1902. uint32 tableIndex = funcBailOutData[i].func->IsTopFunc() ? 0 : funcBailOutData[i].func->m_inlineeId;
  1903. funcBailOutData[i].FinalizeLocalOffsets(tempAlloc, this->globalBailOutRecordTables[tableIndex], &(this->lastUpdatedRowIndices[tableIndex]));
  1904. #if DBG_DUMP
  1905. if(PHASE_DUMP(Js::BailOutPhase, this->func))
  1906. {
  1907. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  1908. Output::Print(_u("Bailout function: %s [%s]\n"), funcBailOutData[i].func->GetJITFunctionBody()->GetDisplayName(), funcBailOutData[i].func->GetDebugNumberSet(debugStringBuffer), i);
  1909. funcBailOutData[i].bailOutRecord->Dump();
  1910. }
  1911. #endif
  1912. funcBailOutData[i].Clear(this->tempAlloc);
  1913. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  1914. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  1915. {
  1916. this->func->GetScriptContext()->bailOutRecordBytes += sizeof(BailOutRecord);
  1917. }
  1918. #endif
  1919. }
  1920. JitAdeleteArray(this->tempAlloc, funcCount, funcBailOutData);
  1921. }
  1922. template <typename Fn>
  1923. void
  1924. LinearScan::ForEachStackLiteralBailOutInfo(IR::Instr * instr, BailOutInfo * bailOutInfo, FuncBailOutData * funcBailOutData, uint funcCount, Fn fn)
  1925. {
  1926. for (uint i = 0; i < bailOutInfo->stackLiteralBailOutInfoCount; i++)
  1927. {
  1928. BailOutInfo::StackLiteralBailOutInfo& stackLiteralBailOutInfo = bailOutInfo->stackLiteralBailOutInfo[i];
  1929. StackSym * stackSym = stackLiteralBailOutInfo.stackSym;
  1930. Assert(stackSym->scratch.linearScan.lifetime->start < instr->GetNumber());
  1931. Assert(stackSym->scratch.linearScan.lifetime->end >= instr->GetNumber());
  1932. Js::RegSlot regSlot = stackSym->GetByteCodeRegSlot();
  1933. Func * stackSymFunc = stackSym->GetByteCodeFunc();
  1934. uint index = stackSymFunc->inlineDepth;
  1935. Assert(regSlot != Js::Constants::NoRegister);
  1936. Assert(regSlot < stackSymFunc->GetJITFunctionBody()->GetLocalsCount());
  1937. Assert(index < funcCount);
  1938. Assert(funcBailOutData[index].func == stackSymFunc);
  1939. Assert(funcBailOutData[index].localOffsets[regSlot] != 0);
  1940. fn(index, stackLiteralBailOutInfo, regSlot);
  1941. }
  1942. }
  1943. void
  1944. LinearScan::FillStackLiteralBailOutRecord(IR::Instr * instr, BailOutInfo * bailOutInfo, FuncBailOutData * funcBailOutData, uint funcCount)
  1945. {
  1946. if (bailOutInfo->stackLiteralBailOutInfoCount)
  1947. {
  1948. // Count the data
  1949. ForEachStackLiteralBailOutInfo(instr, bailOutInfo, funcBailOutData, funcCount,
  1950. [=](uint funcIndex, BailOutInfo::StackLiteralBailOutInfo& stackLiteralBailOutInfo, Js::RegSlot regSlot)
  1951. {
  1952. funcBailOutData[funcIndex].bailOutRecord->stackLiteralBailOutRecordCount++;
  1953. });
  1954. // Allocate the data
  1955. NativeCodeData::Allocator * allocator = this->func->GetNativeCodeDataAllocator();
  1956. for (uint i = 0; i < funcCount; i++)
  1957. {
  1958. uint stackLiteralBailOutRecordCount = funcBailOutData[i].bailOutRecord->stackLiteralBailOutRecordCount;
  1959. if (stackLiteralBailOutRecordCount)
  1960. {
  1961. funcBailOutData[i].bailOutRecord->stackLiteralBailOutRecord =
  1962. NativeCodeDataNewArrayNoFixup(allocator, BailOutRecord::StackLiteralBailOutRecord, stackLiteralBailOutRecordCount);
  1963. // reset the count so we can track how much we have filled below
  1964. funcBailOutData[i].bailOutRecord->stackLiteralBailOutRecordCount = 0;
  1965. }
  1966. }
  1967. // Fill out the data
  1968. ForEachStackLiteralBailOutInfo(instr, bailOutInfo, funcBailOutData, funcCount,
  1969. [=](uint funcIndex, BailOutInfo::StackLiteralBailOutInfo& stackLiteralBailOutInfo, Js::RegSlot regSlot)
  1970. {
  1971. uint& recordIndex = funcBailOutData[funcIndex].bailOutRecord->stackLiteralBailOutRecordCount;
  1972. BailOutRecord::StackLiteralBailOutRecord& stackLiteralBailOutRecord =
  1973. funcBailOutData[funcIndex].bailOutRecord->stackLiteralBailOutRecord[recordIndex++];
  1974. stackLiteralBailOutRecord.regSlot = regSlot;
  1975. stackLiteralBailOutRecord.initFldCount = stackLiteralBailOutInfo.initFldCount;
  1976. });
  1977. }
  1978. }
  1979. void
  1980. LinearScan::PrepareForUse(Lifetime * lifetime)
  1981. {
  1982. if (lifetime->isOpHelperSpilled)
  1983. {
  1984. // using a value in a helper that has been spilled in the helper block.
  1985. // Just spill it for real
  1986. // We must be in a helper block and the lifetime must
  1987. // start before the helper block
  1988. Assert(this->IsInHelperBlock());
  1989. Assert(lifetime->start < this->HelperBlockStartInstrNumber());
  1990. IR::Instr *insertionInstr = this->currentOpHelperBlock->opHelperLabel;
  1991. this->RemoveOpHelperSpilled(lifetime);
  1992. this->SpillLiveRange(lifetime, insertionInstr);
  1993. }
  1994. }
  1995. void
  1996. LinearScan::RecordUse(Lifetime * lifetime, IR::Instr * instr, IR::RegOpnd * regOpnd, bool isFromBailout)
  1997. {
  1998. uint32 useCountCost = LinearScan::GetUseSpillCost(this->loopNest, (this->currentOpHelperBlock != nullptr || isFromBailout));
  1999. // We only spill at the use for constants (i.e. reload) or for function with try blocks. We don't
  2000. // have real accurate flow info for the later.
  2001. if ((regOpnd && regOpnd->m_sym->IsConst())
  2002. || (
  2003. (this->func->HasTry() && !this->func->DoOptimizeTry()) &&
  2004. this->IsInLoop() &&
  2005. lifetime->lastUseLabel != this->lastLabel &&
  2006. this->liveOnBackEdgeSyms->Test(lifetime->sym->m_id) &&
  2007. !(lifetime->previousDefBlockNumber == currentBlockNumber && !lifetime->defList.Empty())
  2008. ))
  2009. {
  2010. // Keep track of all the uses of this lifetime in case we decide to spill it.
  2011. // Note that we won't need to insert reloads if the use are not in a loop,
  2012. // unless it is a const. We always reload const instead of spilling to the stack.
  2013. //
  2014. // We also don't need to insert reloads if the previous use was in the same basic block (the first use in the block
  2015. // would have done the reload), or the previous def is in the same basic block and the value is still live. Furthermore,
  2016. // if the previous def is in the same basic block, the value is still live, and there's another def after this use in
  2017. // the same basic block, the previous def may not do a spill store, so we must not reload the value from the stack.
  2018. lifetime->useList.Prepend(instr);
  2019. lifetime->lastUseLabel = this->lastLabel;
  2020. lifetime->AddToUseCountAdjust(useCountCost, this->curLoop, this->func);
  2021. }
  2022. else
  2023. {
  2024. if (!isFromBailout)
  2025. {
  2026. // Since we won't reload this use if the lifetime gets spilled, adjust the spill cost to reflect this.
  2027. lifetime->SubFromUseCount(useCountCost, this->curLoop);
  2028. }
  2029. }
  2030. if (this->IsInLoop())
  2031. {
  2032. this->RecordLoopUse(lifetime, lifetime->reg);
  2033. }
  2034. }
  2035. void LinearScan::RecordLoopUse(Lifetime *lifetime, RegNum reg)
  2036. {
  2037. if (!this->IsInLoop())
  2038. {
  2039. return;
  2040. }
  2041. if (this->func->HasTry() && !this->func->DoOptimizeTry())
  2042. {
  2043. return;
  2044. }
  2045. // Record on each loop which register live into the loop ended up being used.
  2046. // We are trying to avoid the need for compensation at the bottom of the loop if
  2047. // the reg ends up being spilled before it is actually used.
  2048. Loop *curLoop = this->curLoop;
  2049. SymID symId = (SymID)-1;
  2050. if (lifetime)
  2051. {
  2052. symId = lifetime->sym->m_id;
  2053. }
  2054. while (curLoop)
  2055. {
  2056. // Note that if the lifetime is spilled and reallocated to the same register,
  2057. // will mark it as used when we shouldn't. However, it is hard at this point to handle
  2058. // the case were a flow edge from the previous allocation merges in with the new allocation.
  2059. // No compensation is inserted to let us know with previous lifetime needs reloading at the bottom of the loop...
  2060. if (lifetime && curLoop->regAlloc.loopTopRegContent[reg] == lifetime)
  2061. {
  2062. curLoop->regAlloc.symRegUseBv->Set(symId);
  2063. }
  2064. curLoop->regAlloc.regUseBv.Set(reg);
  2065. curLoop = curLoop->parent;
  2066. }
  2067. }
  2068. void
  2069. LinearScan::RecordDef(Lifetime *const lifetime, IR::Instr *const instr, const uint32 useCountCost)
  2070. {
  2071. Assert(lifetime);
  2072. Assert(instr);
  2073. Assert(instr->GetDst());
  2074. IR::RegOpnd * regOpnd = instr->GetDst()->AsRegOpnd();
  2075. Assert(regOpnd);
  2076. StackSym *const sym = regOpnd->m_sym;
  2077. if (this->IsInLoop())
  2078. {
  2079. Loop *curLoop = this->curLoop;
  2080. while (curLoop)
  2081. {
  2082. curLoop->regAlloc.defdInLoopBv->Set(lifetime->sym->m_id);
  2083. curLoop->regAlloc.regUseBv.Set(lifetime->reg);
  2084. curLoop = curLoop->parent;
  2085. }
  2086. }
  2087. if (lifetime->isSpilled)
  2088. {
  2089. return;
  2090. }
  2091. if (this->NeedsWriteThrough(sym))
  2092. {
  2093. if (this->IsSymNonTempLocalVar(sym))
  2094. {
  2095. // In the debug mode, we will write through on the stack location.
  2096. WriteThroughForLocal(regOpnd, lifetime, instr);
  2097. }
  2098. else
  2099. {
  2100. // If this is a write-through sym, it should be live on the entry to 'try' and should have already
  2101. // been allocated when we spilled all active lifetimes there.
  2102. // If it was not part of the active lifetimes on entry to the 'try' then it must have been spilled
  2103. // earlier and should have stack allocated for it.
  2104. Assert(this->NeedsWriteThroughForEH(sym) && sym->IsAllocated());
  2105. this->InsertStore(instr, sym, lifetime->reg);
  2106. }
  2107. // No need to record-def further as we already have stack allocated for it.
  2108. return;
  2109. }
  2110. if (sym->m_isSingleDef)
  2111. {
  2112. lifetime->AddToUseCount(useCountCost, this->curLoop, this->func);
  2113. // the def of a single-def sym is already on the sym
  2114. return;
  2115. }
  2116. if(lifetime->previousDefBlockNumber == currentBlockNumber && !lifetime->defList.Empty())
  2117. {
  2118. // Only keep track of the last def in each basic block. When there are multiple defs of a sym in a basic block, upon
  2119. // spill of that sym, a store needs to be inserted only after the last def of the sym.
  2120. Assert(lifetime->defList.Head()->GetDst()->AsRegOpnd()->m_sym == sym);
  2121. lifetime->defList.Head() = instr;
  2122. }
  2123. else
  2124. {
  2125. // First def of this sym in the current basic block
  2126. lifetime->previousDefBlockNumber = currentBlockNumber;
  2127. lifetime->defList.Prepend(instr);
  2128. // Keep track of the cost of reinserting all the defs if we choose to spill this way.
  2129. lifetime->allDefsCost += useCountCost;
  2130. }
  2131. }
  2132. // LinearScan::SetUse
  2133. void
  2134. LinearScan::SetUse(IR::Instr *instr, IR::RegOpnd *regOpnd)
  2135. {
  2136. if (regOpnd->GetReg() != RegNOREG)
  2137. {
  2138. this->RecordLoopUse(nullptr, regOpnd->GetReg());
  2139. return;
  2140. }
  2141. StackSym *sym = regOpnd->m_sym;
  2142. Lifetime * lifetime = sym->scratch.linearScan.lifetime;
  2143. this->PrepareForUse(lifetime);
  2144. if (lifetime->isSpilled)
  2145. {
  2146. // See if it has been loaded in this basic block
  2147. RegNum reg = this->GetAssignedTempReg(lifetime, regOpnd->GetType());
  2148. if (reg == RegNOREG)
  2149. {
  2150. if (sym->IsConst() && EncoderMD::TryConstFold(instr, regOpnd))
  2151. {
  2152. return;
  2153. }
  2154. reg = this->SecondChanceAllocation(lifetime, false);
  2155. if (reg != RegNOREG)
  2156. {
  2157. IR::Instr *insertInstr = this->TryHoistLoad(instr, lifetime);
  2158. this->InsertLoad(insertInstr, sym, reg);
  2159. }
  2160. else
  2161. {
  2162. // Try folding if there are no registers available
  2163. if (!sym->IsConst() && !this->RegsAvailable(regOpnd->GetType()) && EncoderMD::TryFold(instr, regOpnd))
  2164. {
  2165. return;
  2166. }
  2167. // We need a reg no matter what. Try to force second chance to re-allocate this.
  2168. reg = this->SecondChanceAllocation(lifetime, true);
  2169. if (reg == RegNOREG)
  2170. {
  2171. // Forcing second chance didn't work.
  2172. // Allocate a new temp reg for it
  2173. reg = this->FindReg(nullptr, regOpnd);
  2174. this->AssignTempReg(lifetime, reg);
  2175. }
  2176. this->InsertLoad(instr, sym, reg);
  2177. }
  2178. }
  2179. }
  2180. if (!lifetime->isSpilled && instr->GetNumber() < lifetime->end)
  2181. {
  2182. // Don't border to record the use if this is the last use of the lifetime.
  2183. this->RecordUse(lifetime, instr, regOpnd);
  2184. }
  2185. else
  2186. {
  2187. lifetime->SubFromUseCount(LinearScan::GetUseSpillCost(this->loopNest, (this->currentOpHelperBlock != nullptr)), this->curLoop);
  2188. }
  2189. this->instrUseRegs.Set(lifetime->reg);
  2190. this->SetReg(regOpnd);
  2191. }
  2192. // LinearScan::SetReg
  2193. void
  2194. LinearScan::SetReg(IR::RegOpnd *regOpnd)
  2195. {
  2196. if (regOpnd->GetReg() == RegNOREG)
  2197. {
  2198. RegNum reg = regOpnd->m_sym->scratch.linearScan.lifetime->reg;
  2199. AssertMsg(reg != RegNOREG, "Reg should be allocated here...");
  2200. regOpnd->SetReg(reg);
  2201. }
  2202. }
  2203. bool
  2204. LinearScan::SkipNumberedInstr(IR::Instr *instr)
  2205. {
  2206. if (instr->IsLabelInstr())
  2207. {
  2208. if (instr->AsLabelInstr()->m_isLoopTop)
  2209. {
  2210. Assert(instr->GetNumber() != instr->m_next->GetNumber()
  2211. && (instr->GetNumber() != instr->m_prev->GetNumber() || instr->m_prev->m_opcode == Js::OpCode::Nop));
  2212. }
  2213. else
  2214. {
  2215. return true;
  2216. }
  2217. }
  2218. return false;
  2219. }
  2220. // LinearScan::EndDeadLifetimes
  2221. // Look for lifetimes that are ending here, and retire them.
  2222. void
  2223. LinearScan::EndDeadLifetimes(IR::Instr *instr)
  2224. {
  2225. Lifetime * deadLifetime;
  2226. if (this->SkipNumberedInstr(instr))
  2227. {
  2228. return;
  2229. }
  2230. // Retire all active lifetime ending at this instruction
  2231. while (!this->activeLiveranges->Empty() && this->activeLiveranges->Head()->end <= instr->GetNumber())
  2232. {
  2233. deadLifetime = this->activeLiveranges->Head();
  2234. deadLifetime->defList.Clear();
  2235. deadLifetime->useList.Clear();
  2236. this->activeLiveranges->RemoveHead();
  2237. RegNum reg = deadLifetime->reg;
  2238. this->activeRegs.Clear(reg);
  2239. this->regContent[reg] = nullptr;
  2240. this->secondChanceRegs.Clear(reg);
  2241. if (RegTypes[reg] == TyMachReg)
  2242. {
  2243. this->intRegUsedCount--;
  2244. }
  2245. else
  2246. {
  2247. Assert(RegTypes[reg] == TyFloat64);
  2248. this->floatRegUsedCount--;
  2249. }
  2250. }
  2251. // Look for spilled lifetimes which end here such that we can make their stack slot
  2252. // available for stack-packing.
  2253. while (!this->stackPackInUseLiveRanges->Empty() && this->stackPackInUseLiveRanges->Head()->end <= instr->GetNumber())
  2254. {
  2255. deadLifetime = this->stackPackInUseLiveRanges->Head();
  2256. deadLifetime->defList.Clear();
  2257. deadLifetime->useList.Clear();
  2258. this->stackPackInUseLiveRanges->RemoveHead();
  2259. if (!deadLifetime->cantStackPack)
  2260. {
  2261. Assert(deadLifetime->spillStackSlot);
  2262. deadLifetime->spillStackSlot->lastUse = deadLifetime->end;
  2263. this->stackSlotsFreeList->Push(deadLifetime->spillStackSlot);
  2264. }
  2265. }
  2266. }
  2267. void
  2268. LinearScan::EndDeadOpHelperLifetimes(IR::Instr * instr)
  2269. {
  2270. if (this->SkipNumberedInstr(instr))
  2271. {
  2272. return;
  2273. }
  2274. while (!this->opHelperSpilledLiveranges->Empty() &&
  2275. this->opHelperSpilledLiveranges->Head()->end <= instr->GetNumber())
  2276. {
  2277. Lifetime * deadLifetime;
  2278. // The lifetime doesn't extend beyond the helper block
  2279. // No need to save and restore around the helper block
  2280. Assert(this->IsInHelperBlock());
  2281. deadLifetime = this->opHelperSpilledLiveranges->Head();
  2282. this->opHelperSpilledLiveranges->RemoveHead();
  2283. if (!deadLifetime->cantOpHelperSpill)
  2284. {
  2285. this->opHelperSpilledRegs.Clear(deadLifetime->reg);
  2286. }
  2287. deadLifetime->isOpHelperSpilled = false;
  2288. deadLifetime->cantOpHelperSpill = false;
  2289. deadLifetime->isOpHelperSpillAsArg = false;
  2290. }
  2291. }
  2292. // LinearScan::AllocateNewLifetimes
  2293. // Look for lifetimes coming live, and allocate a register for them.
  2294. void
  2295. LinearScan::AllocateNewLifetimes(IR::Instr *instr)
  2296. {
  2297. if (this->SkipNumberedInstr(instr))
  2298. {
  2299. return;
  2300. }
  2301. // Try to catch:
  2302. // x = MOV y(r1)
  2303. // where y's lifetime just ended and x's lifetime is starting.
  2304. // If so, set r1 as a preferred register for x, which may allow peeps to remove the MOV
  2305. if (instr->GetSrc1() && instr->GetSrc1()->IsRegOpnd() && LowererMD::IsAssign(instr) && instr->GetDst() && instr->GetDst()->IsRegOpnd() && instr->GetDst()->AsRegOpnd()->m_sym)
  2306. {
  2307. IR::RegOpnd *src = instr->GetSrc1()->AsRegOpnd();
  2308. StackSym *srcSym = src->m_sym;
  2309. // If src is a physReg ref, or src's lifetime ends here.
  2310. if (!srcSym || srcSym->scratch.linearScan.lifetime->end == instr->GetNumber())
  2311. {
  2312. Lifetime *dstLifetime = instr->GetDst()->AsRegOpnd()->m_sym->scratch.linearScan.lifetime;
  2313. if (dstLifetime)
  2314. {
  2315. dstLifetime->regPreference.Set(src->GetReg());
  2316. }
  2317. }
  2318. }
  2319. // Look for starting lifetimes
  2320. while (!this->lifetimeList->Empty() && this->lifetimeList->Head()->start <= instr->GetNumber())
  2321. {
  2322. // We're at the start of a new live range
  2323. Lifetime * newLifetime = this->lifetimeList->Head();
  2324. newLifetime->lastAllocationStart = instr->GetNumber();
  2325. this->lifetimeList->RemoveHead();
  2326. if (newLifetime->dontAllocate)
  2327. {
  2328. // Lifetime spilled before beginning allocation (e.g., a lifetime known to span
  2329. // multiple EH regions.) Do the work of spilling it now without adding it to the list.
  2330. this->SpillLiveRange(newLifetime);
  2331. continue;
  2332. }
  2333. RegNum reg;
  2334. if (newLifetime->reg == RegNOREG)
  2335. {
  2336. if (newLifetime->isDeadStore)
  2337. {
  2338. // No uses, let's not waste a reg.
  2339. newLifetime->isSpilled = true;
  2340. continue;
  2341. }
  2342. reg = this->FindReg(newLifetime, nullptr);
  2343. }
  2344. else
  2345. {
  2346. // This lifetime is already assigned a physical register. Make
  2347. // sure that register is available by calling SpillReg
  2348. reg = newLifetime->reg;
  2349. // If we're in a helper block, the physical register we're trying to ensure is available might get helper
  2350. // spilled. Don't allow that if this lifetime's end lies beyond the end of the helper block because
  2351. // spill code assumes that this physical register isn't active at the end of the helper block when it tries
  2352. // to restore it. So we'd have to really spill the lifetime then anyway.
  2353. this->SpillReg(reg, IsInHelperBlock() ? (newLifetime->end > currentOpHelperBlock->opHelperEndInstr->GetNumber()) : false);
  2354. newLifetime->cantSpill = true;
  2355. }
  2356. // If we did get a register for this lifetime, add it to the active set.
  2357. if (newLifetime->isSpilled == false)
  2358. {
  2359. this->AssignActiveReg(newLifetime, reg);
  2360. }
  2361. }
  2362. }
  2363. // LinearScan::FindReg
  2364. // Look for an available register. If one isn't available, spill something.
  2365. // Note that the newLifetime passed in could be the one we end up spilling.
  2366. RegNum
  2367. LinearScan::FindReg(Lifetime *newLifetime, IR::RegOpnd *regOpnd, bool force)
  2368. {
  2369. BVIndex regIndex = BVInvalidIndex;
  2370. IRType type;
  2371. bool tryCallerSavedRegs = false;
  2372. BitVector callerSavedAvailableBv;
  2373. if (newLifetime)
  2374. {
  2375. if (newLifetime->isFloat)
  2376. {
  2377. type = TyFloat64;
  2378. }
  2379. else if (newLifetime->isSimd128F4)
  2380. {
  2381. type = TySimd128F4;
  2382. }
  2383. else if (newLifetime->isSimd128I4)
  2384. {
  2385. type = TySimd128I4;
  2386. }
  2387. else if (newLifetime->isSimd128I8)
  2388. {
  2389. type = TySimd128I8;
  2390. }
  2391. else if (newLifetime->isSimd128I16)
  2392. {
  2393. type = TySimd128I16;
  2394. }
  2395. else if (newLifetime->isSimd128U4)
  2396. {
  2397. type = TySimd128U4;
  2398. }
  2399. else if (newLifetime->isSimd128U8)
  2400. {
  2401. type = TySimd128U8;
  2402. }
  2403. else if (newLifetime->isSimd128U16)
  2404. {
  2405. type = TySimd128U16;
  2406. }
  2407. else if (newLifetime->isSimd128B4)
  2408. {
  2409. type = TySimd128B4;
  2410. }
  2411. else if (newLifetime->isSimd128B8)
  2412. {
  2413. type = TySimd128B8;
  2414. }
  2415. else if (newLifetime->isSimd128B16)
  2416. {
  2417. type = TySimd128B16;
  2418. }
  2419. else if (newLifetime->isSimd128D2)
  2420. {
  2421. type = TySimd128D2;
  2422. }
  2423. else
  2424. {
  2425. type = TyMachReg;
  2426. }
  2427. }
  2428. else
  2429. {
  2430. Assert(regOpnd);
  2431. type = regOpnd->GetType();
  2432. }
  2433. if (this->RegsAvailable(type))
  2434. {
  2435. BitVector regsBv;
  2436. regsBv.Copy(this->activeRegs);
  2437. regsBv.Or(this->instrUseRegs);
  2438. regsBv.Or(this->callSetupRegs);
  2439. regsBv.ComplimentAll();
  2440. if (newLifetime)
  2441. {
  2442. if (this->IsInHelperBlock())
  2443. {
  2444. if (newLifetime->end >= this->HelperBlockEndInstrNumber())
  2445. {
  2446. // this lifetime goes beyond the helper function
  2447. // We need to exclude the helper spilled register as well.
  2448. regsBv.Minus(this->opHelperSpilledRegs);
  2449. }
  2450. }
  2451. if (newLifetime->isFloat || newLifetime->isSimd128())
  2452. {
  2453. #ifdef _M_IX86
  2454. Assert(AutoSystemInfo::Data.SSE2Available());
  2455. #endif
  2456. regsBv.And(this->floatRegs);
  2457. }
  2458. else
  2459. {
  2460. regsBv.And(this->int32Regs);
  2461. regsBv = this->linearScanMD.FilterRegIntSizeConstraints(regsBv, newLifetime->intUsageBv);
  2462. }
  2463. if (newLifetime->isLiveAcrossCalls)
  2464. {
  2465. // Try to find a callee saved regs
  2466. BitVector regsBvTemp = regsBv;
  2467. regsBvTemp.And(this->calleeSavedRegs);
  2468. regIndex = GetPreferencedRegIndex(newLifetime, regsBvTemp);
  2469. if (regIndex == BVInvalidIndex)
  2470. {
  2471. if (!newLifetime->isLiveAcrossUserCalls)
  2472. {
  2473. // No callee saved regs is found and the lifetime only across helper
  2474. // calls, we can also use a caller saved regs to make use of the
  2475. // save and restore around helper blocks
  2476. regIndex = GetPreferencedRegIndex(newLifetime, regsBv);
  2477. }
  2478. else
  2479. {
  2480. // If we can't find a callee-saved reg, we can try using a caller-saved reg instead.
  2481. // We'll hopefully get a few loads enregistered that way before we get to the call.
  2482. tryCallerSavedRegs = true;
  2483. callerSavedAvailableBv = regsBv;
  2484. }
  2485. }
  2486. }
  2487. else
  2488. {
  2489. regIndex = GetPreferencedRegIndex(newLifetime, regsBv);
  2490. }
  2491. }
  2492. else
  2493. {
  2494. AssertMsg(regOpnd, "Need a lifetime or a regOpnd passed in");
  2495. if (regOpnd->IsFloat() || regOpnd->IsSimd128())
  2496. {
  2497. #ifdef _M_IX86
  2498. Assert(AutoSystemInfo::Data.SSE2Available());
  2499. #endif
  2500. regsBv.And(this->floatRegs);
  2501. }
  2502. else
  2503. {
  2504. regsBv.And(this->int32Regs);
  2505. BitVector regSizeBv;
  2506. regSizeBv.ClearAll();
  2507. regSizeBv.Set(TySize[regOpnd->GetType()]);
  2508. regsBv = this->linearScanMD.FilterRegIntSizeConstraints(regsBv, regSizeBv);
  2509. }
  2510. if (!this->tempRegs.IsEmpty())
  2511. {
  2512. // avoid the temp reg that we have loaded in this basic block
  2513. BitVector regsBvTemp = regsBv;
  2514. regsBvTemp.Minus(this->tempRegs);
  2515. regIndex = regsBvTemp.GetPrevBit();
  2516. }
  2517. if (regIndex == BVInvalidIndex)
  2518. {
  2519. // allocate a temp reg from the other end of the bit vector so that it can
  2520. // keep live for longer.
  2521. regIndex = regsBv.GetPrevBit();
  2522. }
  2523. }
  2524. }
  2525. RegNum reg;
  2526. if (BVInvalidIndex != regIndex)
  2527. {
  2528. Assert(regIndex < RegNumCount);
  2529. reg = (RegNum)regIndex;
  2530. }
  2531. else
  2532. {
  2533. if (tryCallerSavedRegs)
  2534. {
  2535. Assert(newLifetime);
  2536. regIndex = GetPreferencedRegIndex(newLifetime, callerSavedAvailableBv);
  2537. if (BVInvalidIndex == regIndex)
  2538. {
  2539. tryCallerSavedRegs = false;
  2540. }
  2541. }
  2542. bool dontSpillCurrent = tryCallerSavedRegs;
  2543. if (newLifetime && newLifetime->isSpilled)
  2544. {
  2545. // Second chance allocation
  2546. dontSpillCurrent = true;
  2547. }
  2548. // Can't find reg, spill some lifetime.
  2549. reg = this->Spill(newLifetime, regOpnd, dontSpillCurrent, force);
  2550. if (reg == RegNOREG && tryCallerSavedRegs)
  2551. {
  2552. Assert(BVInvalidIndex != regIndex);
  2553. reg = (RegNum)regIndex;
  2554. // This lifetime will get spilled once we get to the call it overlaps with (note: this may not be true
  2555. // for second chance allocation as we may be beyond the call). Mark it as a cheap spill to give up the register
  2556. // if some lifetime not overlapping with a call needs it.
  2557. newLifetime->isCheapSpill = true;
  2558. }
  2559. }
  2560. // We always have to return a reg if we are allocate temp reg.
  2561. // If we are allocating for a new lifetime, we return RegNOREG, if we
  2562. // spill the new lifetime
  2563. Assert(newLifetime != nullptr || (reg != RegNOREG && reg < RegNumCount));
  2564. return reg;
  2565. }
  2566. BVIndex
  2567. LinearScan::GetPreferencedRegIndex(Lifetime *lifetime, BitVector freeRegs)
  2568. {
  2569. BitVector freePreferencedRegs = freeRegs;
  2570. freePreferencedRegs.And(lifetime->regPreference);
  2571. // If one of the preferred register (if any) is available, use it. Otherwise, just pick one of free register.
  2572. if (!freePreferencedRegs.IsEmpty())
  2573. {
  2574. return freePreferencedRegs.GetNextBit();
  2575. }
  2576. else
  2577. {
  2578. return freeRegs.GetNextBit();
  2579. }
  2580. }
  2581. // LinearScan::Spill
  2582. // We need to spill something to free up a reg. If the newLifetime
  2583. // past in isn't NULL, we can spill this one instead of an active one.
  2584. RegNum
  2585. LinearScan::Spill(Lifetime *newLifetime, IR::RegOpnd *regOpnd, bool dontSpillCurrent, bool force)
  2586. {
  2587. uint minSpillCost = (uint)-1;
  2588. Assert(!newLifetime || !regOpnd || newLifetime->isFloat == (regOpnd->GetType() == TyMachDouble) || newLifetime->isSimd128() == (regOpnd->IsSimd128()));
  2589. bool isFloatReg;
  2590. BitVector intUsageBV;
  2591. bool needCalleeSaved;
  2592. // For now, we just spill the lifetime with the lowest spill cost.
  2593. if (newLifetime)
  2594. {
  2595. isFloatReg = newLifetime->isFloat || newLifetime->isSimd128();
  2596. if (!force)
  2597. {
  2598. minSpillCost = this->GetSpillCost(newLifetime);
  2599. }
  2600. intUsageBV = newLifetime->intUsageBv;
  2601. needCalleeSaved = newLifetime->isLiveAcrossUserCalls;
  2602. }
  2603. else
  2604. {
  2605. needCalleeSaved = false;
  2606. if (regOpnd->IsFloat() || regOpnd->IsSimd128())
  2607. {
  2608. isFloatReg = true;
  2609. }
  2610. else
  2611. {
  2612. // Filter for int reg size constraints
  2613. isFloatReg = false;
  2614. intUsageBV.ClearAll();
  2615. intUsageBV.Set(TySize[regOpnd->GetType()]);
  2616. }
  2617. }
  2618. SList<Lifetime *>::EditingIterator candidate;
  2619. FOREACH_SLIST_ENTRY_EDITING(Lifetime *, lifetime, this->activeLiveranges, iter)
  2620. {
  2621. uint spillCost = this->GetSpillCost(lifetime);
  2622. if (spillCost < minSpillCost &&
  2623. this->instrUseRegs.Test(lifetime->reg) == false &&
  2624. (lifetime->isFloat || lifetime->isSimd128()) == isFloatReg &&
  2625. !lifetime->cantSpill &&
  2626. (!needCalleeSaved || this->calleeSavedRegs.Test(lifetime->reg)) &&
  2627. this->linearScanMD.FitRegIntSizeConstraints(lifetime->reg, intUsageBV))
  2628. {
  2629. minSpillCost = spillCost;
  2630. candidate = iter;
  2631. }
  2632. } NEXT_SLIST_ENTRY_EDITING;
  2633. AssertMsg(newLifetime || candidate.IsValid(), "Didn't find anything to spill?!?");
  2634. Lifetime * spilledRange;
  2635. if (candidate.IsValid())
  2636. {
  2637. spilledRange = candidate.Data();
  2638. candidate.RemoveCurrent();
  2639. this->activeRegs.Clear(spilledRange->reg);
  2640. if (spilledRange->isFloat || spilledRange->isSimd128())
  2641. {
  2642. this->floatRegUsedCount--;
  2643. }
  2644. else
  2645. {
  2646. this->intRegUsedCount--;
  2647. }
  2648. }
  2649. else if (dontSpillCurrent)
  2650. {
  2651. return RegNOREG;
  2652. }
  2653. else
  2654. {
  2655. spilledRange = newLifetime;
  2656. }
  2657. return this->SpillLiveRange(spilledRange);
  2658. }
  2659. // LinearScan::SpillLiveRange
  2660. RegNum
  2661. LinearScan::SpillLiveRange(Lifetime * spilledRange, IR::Instr *insertionInstr)
  2662. {
  2663. Assert(!spilledRange->isSpilled);
  2664. RegNum reg = spilledRange->reg;
  2665. StackSym *sym = spilledRange->sym;
  2666. spilledRange->isSpilled = true;
  2667. spilledRange->isCheapSpill = false;
  2668. spilledRange->reg = RegNOREG;
  2669. // Don't allocate stack space for const, we always reload them. (For debugm mode, allocate on the stack)
  2670. if (!sym->IsAllocated() && (!sym->IsConst() || IsSymNonTempLocalVar(sym)))
  2671. {
  2672. this->AllocateStackSpace(spilledRange);
  2673. }
  2674. // No need to insert loads or stores if there are no uses.
  2675. if (!spilledRange->isDeadStore)
  2676. {
  2677. // In the debug mode, don't do insertstore for this stacksym, as we want to retain the IsConst for the sym,
  2678. // and later we are going to find the reg for it.
  2679. if (!IsSymNonTempLocalVar(sym))
  2680. {
  2681. this->InsertStores(spilledRange, reg, insertionInstr);
  2682. }
  2683. if (this->IsInLoop() || sym->IsConst())
  2684. {
  2685. this->InsertLoads(sym, reg);
  2686. }
  2687. else
  2688. {
  2689. sym->scratch.linearScan.lifetime->useList.Clear();
  2690. }
  2691. // Adjust useCount in case of second chance allocation
  2692. spilledRange->ApplyUseCountAdjust(this->curLoop);
  2693. }
  2694. Assert(reg == RegNOREG || spilledRange->reg == RegNOREG || this->regContent[reg] == spilledRange);
  2695. if (spilledRange->isSecondChanceAllocated)
  2696. {
  2697. Assert(reg == RegNOREG || spilledRange->reg == RegNOREG
  2698. || (this->regContent[reg] == spilledRange && this->secondChanceRegs.Test(reg)));
  2699. this->secondChanceRegs.Clear(reg);
  2700. spilledRange->isSecondChanceAllocated = false;
  2701. }
  2702. else
  2703. {
  2704. Assert(!this->secondChanceRegs.Test(reg));
  2705. }
  2706. this->regContent[reg] = nullptr;
  2707. #if DBG_DUMP
  2708. if (PHASE_TRACE(Js::LinearScanPhase, this->func))
  2709. {
  2710. Output::Print(_u("**** Spill: "));
  2711. sym->Dump();
  2712. Output::Print(_u("(%S)"), RegNames[reg]);
  2713. Output::Print(_u(" SpillCount:%d Length:%d Cost:%d\n"),
  2714. spilledRange->useCount, spilledRange->end - spilledRange->start, this->GetSpillCost(spilledRange));
  2715. }
  2716. #endif
  2717. return reg;
  2718. }
  2719. // LinearScan::SpillReg
  2720. // Spill a given register.
  2721. void
  2722. LinearScan::SpillReg(RegNum reg, bool forceSpill /* = false */)
  2723. {
  2724. Lifetime *spilledRange = nullptr;
  2725. if (activeRegs.Test(reg))
  2726. {
  2727. spilledRange = LinearScan::RemoveRegLiveRange(activeLiveranges, reg);
  2728. }
  2729. else if (opHelperSpilledRegs.Test(reg) && forceSpill)
  2730. {
  2731. // If a lifetime that was assigned this register was helper spilled,
  2732. // really spill it now.
  2733. Assert(IsInHelperBlock());
  2734. // Look for the liverange in opHelperSpilledLiveranges instead of
  2735. // activeLiveranges.
  2736. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, opHelperSpilledLiveranges)
  2737. {
  2738. if (lifetime->reg == reg)
  2739. {
  2740. spilledRange = lifetime;
  2741. break;
  2742. }
  2743. } NEXT_SLIST_ENTRY;
  2744. Assert(spilledRange);
  2745. Assert(!spilledRange->cantSpill);
  2746. RemoveOpHelperSpilled(spilledRange);
  2747. // Really spill this liverange below.
  2748. }
  2749. else
  2750. {
  2751. return;
  2752. }
  2753. AnalysisAssert(spilledRange);
  2754. Assert(!spilledRange->cantSpill);
  2755. if ((!forceSpill) && this->IsInHelperBlock() && spilledRange->start < this->HelperBlockStartInstrNumber() && !spilledRange->cantOpHelperSpill)
  2756. {
  2757. // if the lifetime starts before the helper block, we can do save and restore
  2758. // around the helper block instead.
  2759. this->AddOpHelperSpilled(spilledRange);
  2760. }
  2761. else
  2762. {
  2763. if (spilledRange->cantOpHelperSpill)
  2764. {
  2765. // We're really spilling this liverange, so take it out of the helper-spilled liveranges
  2766. // to avoid confusion (see Win8 313433).
  2767. Assert(!spilledRange->isOpHelperSpilled);
  2768. spilledRange->cantOpHelperSpill = false;
  2769. this->opHelperSpilledLiveranges->Remove(spilledRange);
  2770. }
  2771. this->SpillLiveRange(spilledRange);
  2772. }
  2773. if (this->activeRegs.Test(reg))
  2774. {
  2775. this->activeRegs.Clear(reg);
  2776. if (RegTypes[reg] == TyMachReg)
  2777. {
  2778. this->intRegUsedCount--;
  2779. }
  2780. else
  2781. {
  2782. Assert(RegTypes[reg] == TyFloat64);
  2783. this->floatRegUsedCount--;
  2784. }
  2785. }
  2786. }
  2787. void
  2788. LinearScan::ProcessEHRegionBoundary(IR::Instr * instr)
  2789. {
  2790. Assert(instr->IsBranchInstr());
  2791. if (instr->m_opcode != Js::OpCode::TryCatch && instr->m_opcode != Js::OpCode::TryFinally && instr->m_opcode != Js::OpCode::Leave)
  2792. {
  2793. return;
  2794. }
  2795. // Spill everything upon entry to the try region and upon a Leave.
  2796. IR::Instr* insertionInstr = instr->m_opcode != Js::OpCode::Leave ? instr : instr->m_prev;
  2797. FOREACH_SLIST_ENTRY_EDITING(Lifetime *, lifetime, this->activeLiveranges, iter)
  2798. {
  2799. this->activeRegs.Clear(lifetime->reg);
  2800. if (lifetime->isFloat || lifetime->isSimd128())
  2801. {
  2802. this->floatRegUsedCount--;
  2803. }
  2804. else
  2805. {
  2806. this->intRegUsedCount--;
  2807. }
  2808. this->SpillLiveRange(lifetime, insertionInstr);
  2809. iter.RemoveCurrent();
  2810. }
  2811. NEXT_SLIST_ENTRY_EDITING;
  2812. }
  2813. void
  2814. LinearScan::AllocateStackSpace(Lifetime *spilledRange)
  2815. {
  2816. if (spilledRange->sym->IsAllocated())
  2817. {
  2818. return;
  2819. }
  2820. uint32 size = TySize[spilledRange->sym->GetType()];
  2821. // For the bytecodereg syms instead of spilling to the any other location lets re-use the already created slot.
  2822. if (IsSymNonTempLocalVar(spilledRange->sym))
  2823. {
  2824. Js::RegSlot slotIndex = spilledRange->sym->GetByteCodeRegSlot();
  2825. // Get the offset which is already allocated from this local, and always spill on that location.
  2826. spilledRange->sym->m_offset = GetStackOffset(slotIndex);
  2827. spilledRange->sym->m_allocated = true;
  2828. return;
  2829. }
  2830. StackSlot * newStackSlot = nullptr;
  2831. if (!PHASE_OFF(Js::StackPackPhase, this->func) && !this->func->IsJitInDebugMode() && !spilledRange->cantStackPack)
  2832. {
  2833. // Search for a free stack slot to re-use
  2834. FOREACH_SLIST_ENTRY_EDITING(StackSlot *, slot, this->stackSlotsFreeList, iter)
  2835. {
  2836. // Heuristic: should we use '==' or '>=' for the size?
  2837. if (slot->lastUse <= spilledRange->start && slot->size >= size)
  2838. {
  2839. StackSym *spilledSym = spilledRange->sym;
  2840. Assert(!spilledSym->IsArgSlotSym() && !spilledSym->IsParamSlotSym());
  2841. Assert(!spilledSym->IsAllocated());
  2842. spilledRange->spillStackSlot = slot;
  2843. spilledSym->m_offset = slot->offset;
  2844. spilledSym->m_allocated = true;
  2845. iter.RemoveCurrent();
  2846. #if DBG_DUMP
  2847. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::StackPackPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  2848. {
  2849. spilledSym->Dump();
  2850. Output::Print(_u(" *** stack packed at offset %3d (%4d - %4d)\n"), spilledSym->m_offset, spilledRange->start, spilledRange->end);
  2851. }
  2852. #endif
  2853. break;
  2854. }
  2855. } NEXT_SLIST_ENTRY_EDITING;
  2856. if (spilledRange->spillStackSlot == nullptr)
  2857. {
  2858. newStackSlot = JitAnewStruct(this->tempAlloc, StackSlot);
  2859. newStackSlot->size = size;
  2860. spilledRange->spillStackSlot = newStackSlot;
  2861. }
  2862. this->AddLiveRange(this->stackPackInUseLiveRanges, spilledRange);
  2863. }
  2864. if (!spilledRange->sym->IsAllocated())
  2865. {
  2866. // Can't stack pack, allocate new stack slot.
  2867. StackSym *spilledSym = spilledRange->sym;
  2868. this->func->StackAllocate(spilledSym, size);
  2869. #if DBG_DUMP
  2870. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::StackPackPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  2871. {
  2872. spilledSym->Dump();
  2873. Output::Print(_u(" at offset %3d (%4d - %4d)\n"), spilledSym->m_offset, spilledRange->start, spilledRange->end);
  2874. }
  2875. #endif
  2876. if (newStackSlot != nullptr)
  2877. {
  2878. newStackSlot->offset = spilledSym->m_offset;
  2879. }
  2880. }
  2881. }
  2882. // LinearScan::InsertLoads
  2883. void
  2884. LinearScan::InsertLoads(StackSym *sym, RegNum reg)
  2885. {
  2886. Lifetime *lifetime = sym->scratch.linearScan.lifetime;
  2887. FOREACH_SLIST_ENTRY(IR::Instr *, instr, &lifetime->useList)
  2888. {
  2889. this->InsertLoad(instr, sym, reg);
  2890. } NEXT_SLIST_ENTRY;
  2891. lifetime->useList.Clear();
  2892. }
  2893. // LinearScan::InsertStores
  2894. void
  2895. LinearScan::InsertStores(Lifetime *lifetime, RegNum reg, IR::Instr *insertionInstr)
  2896. {
  2897. StackSym *sym = lifetime->sym;
  2898. // If single def, use instrDef on the symbol
  2899. if (sym->m_isSingleDef)
  2900. {
  2901. IR::Instr * defInstr = sym->m_instrDef;
  2902. if ((!sym->IsConst() && defInstr->GetDst()->AsRegOpnd()->GetReg() == RegNOREG)
  2903. || this->secondChanceRegs.Test(reg))
  2904. {
  2905. // This can happen if we were trying to allocate this lifetime,
  2906. // and it is getting spilled right away.
  2907. // For second chance allocations, this should have already been handled.
  2908. return;
  2909. }
  2910. this->InsertStore(defInstr, defInstr->FindRegDef(sym)->m_sym, reg);
  2911. return;
  2912. }
  2913. if (reg == RegNOREG)
  2914. {
  2915. return;
  2916. }
  2917. uint localStoreCost = LinearScan::GetUseSpillCost(this->loopNest, (this->currentOpHelperBlock != nullptr));
  2918. // Is it cheaper to spill all the defs we've seen so far or just insert a store at the current point?
  2919. if ((this->func->HasTry() && !this->func->DoOptimizeTry()) || localStoreCost >= lifetime->allDefsCost)
  2920. {
  2921. // Insert a store for each def point we've seen so far
  2922. FOREACH_SLIST_ENTRY(IR::Instr *, instr, &(lifetime->defList))
  2923. {
  2924. if (instr->GetDst()->AsRegOpnd()->GetReg() != RegNOREG)
  2925. {
  2926. IR::RegOpnd *regOpnd = instr->FindRegDef(sym);
  2927. // Note that reg may not be equal to regOpnd->GetReg() if the lifetime has been re-allocated since we've seen this def
  2928. this->InsertStore(instr, regOpnd->m_sym, regOpnd->GetReg());
  2929. }
  2930. } NEXT_SLIST_ENTRY;
  2931. lifetime->defList.Clear();
  2932. lifetime->allDefsCost = 0;
  2933. lifetime->needsStoreCompensation = false;
  2934. }
  2935. else if (!lifetime->defList.Empty())
  2936. {
  2937. // Insert a def right here at the current instr, and then we'll use compensation code for paths not covered by this def.
  2938. if (!insertionInstr)
  2939. {
  2940. insertionInstr = this->currentInstr->m_prev;
  2941. }
  2942. this->InsertStore(insertionInstr, sym, reg);
  2943. if (this->IsInLoop())
  2944. {
  2945. RecordLoopUse(lifetime, reg);
  2946. }
  2947. // We now need to insert all store compensations when needed, unless we spill all the defs later on.
  2948. lifetime->needsStoreCompensation = true;
  2949. }
  2950. }
  2951. // LinearScan::InsertStore
  2952. void
  2953. LinearScan::InsertStore(IR::Instr *instr, StackSym *sym, RegNum reg)
  2954. {
  2955. // Win8 Bug 391484: We cannot use regOpnd->GetType() here because it
  2956. // can lead to truncation as downstream usage of the register might be of a size
  2957. // greater than the current use. Using RegTypes[reg] works only if the stack slot size
  2958. // is always at least of size MachPtr
  2959. // In the debug mode, if the current sym belongs to the byte code locals, then do not unlink this instruction, as we need to have this instruction to be there
  2960. // to produce the write-through instruction.
  2961. if (sym->IsConst() && !IsSymNonTempLocalVar(sym))
  2962. {
  2963. // Let's just delete the def. We'll reload the constant.
  2964. // We can't just delete the instruction however since the
  2965. // uses will look at the def to get the value.
  2966. // Make sure it wasn't already deleted.
  2967. if (sym->m_instrDef->m_next)
  2968. {
  2969. sym->m_instrDef->Unlink();
  2970. sym->m_instrDef->m_next = nullptr;
  2971. }
  2972. return;
  2973. }
  2974. Assert(reg != RegNOREG);
  2975. IRType type = sym->GetType();
  2976. if (sym->IsSimd128())
  2977. {
  2978. type = sym->GetType();
  2979. }
  2980. IR::Instr *store = IR::Instr::New(LowererMD::GetStoreOp(type),
  2981. IR::SymOpnd::New(sym, type, this->func),
  2982. IR::RegOpnd::New(sym, reg, type, this->func), this->func);
  2983. instr->InsertAfter(store);
  2984. store->CopyNumber(instr);
  2985. this->linearScanMD.LegalizeDef(store);
  2986. #if DBG_DUMP
  2987. if (PHASE_TRACE(Js::LinearScanPhase, this->func))
  2988. {
  2989. Output::Print(_u("...Inserting store for "));
  2990. sym->Dump();
  2991. Output::Print(_u(" Cost:%d\n"), this->GetSpillCost(sym->scratch.linearScan.lifetime));
  2992. }
  2993. #endif
  2994. }
  2995. // LinearScan::InsertLoad
  2996. void
  2997. LinearScan::InsertLoad(IR::Instr *instr, StackSym *sym, RegNum reg)
  2998. {
  2999. IR::Opnd *src;
  3000. // The size of loads and stores to memory need to match. See the comment
  3001. // around type in InsertStore above.
  3002. IRType type = sym->GetType();
  3003. if (sym->IsSimd128())
  3004. {
  3005. type = sym->GetType();
  3006. }
  3007. bool isMovSDZero = false;
  3008. if (sym->IsConst())
  3009. {
  3010. Assert(!sym->IsAllocated() || IsSymNonTempLocalVar(sym));
  3011. // For an intConst, reload the constant instead of using the stack.
  3012. // Create a new StackSym to make sure the old sym remains singleDef
  3013. src = sym->GetConstOpnd();
  3014. if (!src)
  3015. {
  3016. isMovSDZero = true;
  3017. sym = StackSym::New(sym->GetType(), this->func);
  3018. sym->m_isConst = true;
  3019. sym->m_isFltConst = true;
  3020. }
  3021. else
  3022. {
  3023. StackSym * oldSym = sym;
  3024. sym = StackSym::New(TyVar, this->func);
  3025. sym->m_isConst = true;
  3026. sym->m_isIntConst = oldSym->m_isIntConst;
  3027. sym->m_isInt64Const = oldSym->m_isInt64Const;
  3028. sym->m_isTaggableIntConst = sym->m_isTaggableIntConst;
  3029. }
  3030. }
  3031. else
  3032. {
  3033. src = IR::SymOpnd::New(sym, type, this->func);
  3034. }
  3035. IR::Instr * load;
  3036. #if defined(_M_IX86) || defined(_M_X64)
  3037. if (isMovSDZero)
  3038. {
  3039. load = IR::Instr::New(Js::OpCode::MOVSD_ZERO,
  3040. IR::RegOpnd::New(sym, reg, type, this->func), this->func);
  3041. instr->InsertBefore(load);
  3042. }
  3043. else
  3044. #endif
  3045. {
  3046. load = Lowerer::InsertMove(IR::RegOpnd::New(sym, reg, type, this->func), src, instr);
  3047. }
  3048. load->CopyNumber(instr);
  3049. if (!isMovSDZero)
  3050. {
  3051. this->linearScanMD.LegalizeUse(load, src);
  3052. }
  3053. this->RecordLoopUse(nullptr, reg);
  3054. #if DBG_DUMP
  3055. if (PHASE_TRACE(Js::LinearScanPhase, this->func))
  3056. {
  3057. Output::Print(_u("...Inserting load for "));
  3058. sym->Dump();
  3059. if (sym->scratch.linearScan.lifetime)
  3060. {
  3061. Output::Print(_u(" Cost:%d\n"), this->GetSpillCost(sym->scratch.linearScan.lifetime));
  3062. }
  3063. else
  3064. {
  3065. Output::Print(_u("\n"));
  3066. }
  3067. }
  3068. #endif
  3069. }
  3070. uint8
  3071. LinearScan::GetRegAttribs(RegNum reg)
  3072. {
  3073. return RegAttribs[reg];
  3074. }
  3075. IRType
  3076. LinearScan::GetRegType(RegNum reg)
  3077. {
  3078. return RegTypes[reg];
  3079. }
  3080. bool
  3081. LinearScan::IsCalleeSaved(RegNum reg)
  3082. {
  3083. return (RegAttribs[reg] & RA_CALLEESAVE) != 0;
  3084. }
  3085. bool
  3086. LinearScan::IsCallerSaved(RegNum reg) const
  3087. {
  3088. return !LinearScan::IsCalleeSaved(reg) && LinearScan::IsAllocatable(reg);
  3089. }
  3090. bool
  3091. LinearScan::IsAllocatable(RegNum reg) const
  3092. {
  3093. return !(RegAttribs[reg] & RA_DONTALLOCATE) && this->linearScanMD.IsAllocatable(reg, this->func);
  3094. }
  3095. void
  3096. LinearScan::KillImplicitRegs(IR::Instr *instr)
  3097. {
  3098. if (instr->IsLabelInstr() || instr->IsBranchInstr())
  3099. {
  3100. // Note: need to clear these for branch as well because this info isn't recorded for second chance
  3101. // allocation on branch boundaries
  3102. this->tempRegs.ClearAll();
  3103. }
  3104. #if defined(_M_IX86) || defined(_M_X64)
  3105. if (instr->m_opcode == Js::OpCode::IMUL)
  3106. {
  3107. this->SpillReg(LowererMDArch::GetRegIMulHighDestLower());
  3108. this->tempRegs.Clear(LowererMDArch::GetRegIMulHighDestLower());
  3109. this->RecordLoopUse(nullptr, LowererMDArch::GetRegIMulHighDestLower());
  3110. return;
  3111. }
  3112. #endif
  3113. this->TrackInlineeArgLifetimes(instr);
  3114. // Don't care about kills on bailout calls as we are going to exit anyways
  3115. // Also, for bailout scenarios we have already handled the inlinee frame spills
  3116. Assert(LowererMD::IsCall(instr) || !instr->HasBailOutInfo());
  3117. if (!LowererMD::IsCall(instr) || instr->HasBailOutInfo())
  3118. {
  3119. return;
  3120. }
  3121. if (this->currentBlock->inlineeStack.Count() > 0)
  3122. {
  3123. this->SpillInlineeArgs(instr);
  3124. }
  3125. else
  3126. {
  3127. instr->m_func = this->func;
  3128. }
  3129. //
  3130. // Spill caller-saved registers that are active.
  3131. //
  3132. BitVector deadRegs;
  3133. deadRegs.Copy(this->activeRegs);
  3134. deadRegs.And(this->callerSavedRegs);
  3135. FOREACH_BITSET_IN_UNITBV(reg, deadRegs, BitVector)
  3136. {
  3137. this->SpillReg((RegNum)reg);
  3138. }
  3139. NEXT_BITSET_IN_UNITBV;
  3140. this->tempRegs.And(this->calleeSavedRegs);
  3141. if (callSetupRegs.Count())
  3142. {
  3143. callSetupRegs.ClearAll();
  3144. }
  3145. Loop *loop = this->curLoop;
  3146. while (loop)
  3147. {
  3148. loop->regAlloc.regUseBv.Or(this->callerSavedRegs);
  3149. loop = loop->parent;
  3150. }
  3151. }
  3152. //
  3153. // Before a call, all inlinee frame syms need to be spilled to a pre-defined location
  3154. //
  3155. void LinearScan::SpillInlineeArgs(IR::Instr* instr)
  3156. {
  3157. Assert(this->currentBlock->inlineeStack.Count() > 0);
  3158. // Ensure the call instruction is tied to the current inlinee
  3159. // This is used in the encoder to encode mapping or return offset and InlineeFrameRecord
  3160. instr->m_func = this->currentBlock->inlineeStack.Last();
  3161. BitVector spilledRegs;
  3162. this->currentBlock->inlineeFrameLifetimes.Map([&](uint i, Lifetime* lifetime){
  3163. Assert(lifetime->start < instr->GetNumber() && lifetime->end >= instr->GetNumber());
  3164. Assert(!lifetime->sym->IsConst());
  3165. Assert(this->currentBlock->inlineeFrameSyms.ContainsKey(lifetime->sym->m_id));
  3166. if (lifetime->reg == RegNOREG)
  3167. {
  3168. return;
  3169. }
  3170. StackSym* sym = lifetime->sym;
  3171. if (!lifetime->isSpilled && !lifetime->isOpHelperSpilled &&
  3172. (!lifetime->isDeadStore && (lifetime->sym->m_isSingleDef || !lifetime->defList.Empty()))) // if deflist is empty - we have already spilled at all defs - and the value is current
  3173. {
  3174. if (!spilledRegs.Test(lifetime->reg))
  3175. {
  3176. spilledRegs.Set(lifetime->reg);
  3177. if (!sym->IsAllocated())
  3178. {
  3179. this->AllocateStackSpace(lifetime);
  3180. }
  3181. this->RecordLoopUse(lifetime, lifetime->reg);
  3182. Assert(this->regContent[lifetime->reg] != nullptr);
  3183. if (sym->m_isSingleDef)
  3184. {
  3185. // For a single def - we do not track the deflist - the def below will remove the single def on the sym
  3186. // hence, we need to track the original def.
  3187. Assert(lifetime->defList.Empty());
  3188. lifetime->defList.Prepend(sym->m_instrDef);
  3189. }
  3190. this->InsertStore(instr->m_prev, sym, lifetime->reg);
  3191. }
  3192. }
  3193. });
  3194. }
  3195. void LinearScan::TrackInlineeArgLifetimes(IR::Instr* instr)
  3196. {
  3197. if (instr->m_opcode == Js::OpCode::InlineeStart)
  3198. {
  3199. if (instr->m_func->m_hasInlineArgsOpt)
  3200. {
  3201. instr->m_func->frameInfo->IterateSyms([=](StackSym* sym){
  3202. Lifetime* lifetime = sym->scratch.linearScan.lifetime;
  3203. this->currentBlock->inlineeFrameLifetimes.Add(lifetime);
  3204. // We need to maintain as count because the same sym can be used for multiple arguments
  3205. uint* value;
  3206. if (this->currentBlock->inlineeFrameSyms.TryGetReference(sym->m_id, &value))
  3207. {
  3208. *value = *value + 1;
  3209. }
  3210. else
  3211. {
  3212. this->currentBlock->inlineeFrameSyms.Add(sym->m_id, 1);
  3213. }
  3214. });
  3215. if (this->currentBlock->inlineeStack.Count() > 0)
  3216. {
  3217. Assert(instr->m_func->inlineDepth == this->currentBlock->inlineeStack.Last()->inlineDepth + 1);
  3218. }
  3219. this->currentBlock->inlineeStack.Add(instr->m_func);
  3220. }
  3221. else
  3222. {
  3223. Assert(this->currentBlock->inlineeStack.Count() == 0);
  3224. }
  3225. }
  3226. else if (instr->m_opcode == Js::OpCode::InlineeEnd || instr->HasBailOnNoProfile())
  3227. {
  3228. if (instr->m_func->m_hasInlineArgsOpt)
  3229. {
  3230. instr->m_func->frameInfo->AllocateRecord(this->func, instr->m_func->GetJITFunctionBody()->GetAddr());
  3231. if(this->currentBlock->inlineeStack.Count() == 0)
  3232. {
  3233. // Block is unreachable
  3234. Assert(this->currentBlock->inlineeFrameLifetimes.Count() == 0);
  3235. Assert(this->currentBlock->inlineeFrameSyms.Count() == 0);
  3236. }
  3237. else
  3238. {
  3239. Func* func = this->currentBlock->inlineeStack.RemoveAtEnd();
  3240. Assert(func == instr->m_func);
  3241. instr->m_func->frameInfo->IterateSyms([=](StackSym* sym){
  3242. Lifetime* lifetime = this->currentBlock->inlineeFrameLifetimes.RemoveAtEnd();
  3243. uint* value;
  3244. if (this->currentBlock->inlineeFrameSyms.TryGetReference(sym->m_id, &value))
  3245. {
  3246. *value = *value - 1;
  3247. if (*value == 0)
  3248. {
  3249. bool removed = this->currentBlock->inlineeFrameSyms.Remove(sym->m_id);
  3250. Assert(removed);
  3251. }
  3252. }
  3253. else
  3254. {
  3255. Assert(UNREACHED);
  3256. }
  3257. Assert(sym->scratch.linearScan.lifetime == lifetime);
  3258. }, /*reverse*/ true);
  3259. }
  3260. }
  3261. }
  3262. }
  3263. // GetSpillCost
  3264. // The spill cost is trying to estimate the usage density of the lifetime,
  3265. // by dividing the useCount by the lifetime length.
  3266. uint
  3267. LinearScan::GetSpillCost(Lifetime *lifetime)
  3268. {
  3269. uint useCount = lifetime->GetRegionUseCount(this->curLoop);
  3270. uint spillCost;
  3271. // Get local spill cost. Ignore helper blocks as we'll also need compensation on the main path.
  3272. uint localUseCost = LinearScan::GetUseSpillCost(this->loopNest, false);
  3273. if (lifetime->reg && !lifetime->isSpilled)
  3274. {
  3275. // If it is in a reg, we'll need a store
  3276. if (localUseCost >= lifetime->allDefsCost)
  3277. {
  3278. useCount += lifetime->allDefsCost;
  3279. }
  3280. else
  3281. {
  3282. useCount += localUseCost;
  3283. }
  3284. if (this->curLoop && !lifetime->sym->IsConst()
  3285. && this->curLoop->regAlloc.liveOnBackEdgeSyms->Test(lifetime->sym->m_id))
  3286. {
  3287. // If we spill here, we'll need to insert a load at the bottom of the loop
  3288. // (it would be nice to be able to check is was in a reg at the top of the loop)...
  3289. useCount += localUseCost;
  3290. }
  3291. }
  3292. // When comparing 2 lifetimes, we don't really care about the actual length of the lifetimes.
  3293. // What matters is how much longer will they use the register.
  3294. const uint start = currentInstr->GetNumber();
  3295. uint end = max(start, lifetime->end);
  3296. uint lifetimeTotalOpHelperFullVisitedLength = lifetime->totalOpHelperLengthByEnd;
  3297. if (this->curLoop && this->curLoop->regAlloc.loopEnd < end && !PHASE_OFF(Js::RegionUseCountPhase, this->func))
  3298. {
  3299. end = this->curLoop->regAlloc.loopEnd;
  3300. lifetimeTotalOpHelperFullVisitedLength = this->curLoop->regAlloc.helperLength;
  3301. }
  3302. uint length = end - start + 1;
  3303. // Exclude helper block length since helper block paths are typically infrequently taken paths and not as important
  3304. const uint totalOpHelperVisitedLength = this->totalOpHelperFullVisitedLength + CurrentOpHelperVisitedLength(currentInstr);
  3305. Assert(lifetimeTotalOpHelperFullVisitedLength >= totalOpHelperVisitedLength);
  3306. const uint lifetimeHelperLength = lifetimeTotalOpHelperFullVisitedLength - totalOpHelperVisitedLength;
  3307. Assert(length >= lifetimeHelperLength);
  3308. length -= lifetimeHelperLength;
  3309. if(length == 0)
  3310. {
  3311. length = 1;
  3312. }
  3313. // Add a base length so that the difference between a length of 1 and a length of 2 is not so large
  3314. #ifdef _M_X64
  3315. length += 64;
  3316. #else
  3317. length += 16;
  3318. #endif
  3319. spillCost = (useCount << 13) / length;
  3320. if (lifetime->isSecondChanceAllocated)
  3321. {
  3322. // Second chance allocation have additional overhead, so de-prioritize them
  3323. // Note: could use more tuning...
  3324. spillCost = spillCost * 4/5;
  3325. }
  3326. if (lifetime->isCheapSpill)
  3327. {
  3328. // This lifetime will get spilled eventually, so lower the spill cost to favor other lifetimes
  3329. // Note: could use more tuning...
  3330. spillCost /= 2;
  3331. }
  3332. if (lifetime->sym->IsConst())
  3333. {
  3334. spillCost = spillCost / 16;
  3335. }
  3336. return spillCost;
  3337. }
  3338. bool
  3339. LinearScan::RemoveDeadStores(IR::Instr *instr)
  3340. {
  3341. IR::Opnd *dst = instr->GetDst();
  3342. if (dst && dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym && !dst->AsRegOpnd()->m_isCallArg)
  3343. {
  3344. IR::RegOpnd *regOpnd = dst->AsRegOpnd();
  3345. Lifetime * lifetime = regOpnd->m_sym->scratch.linearScan.lifetime;
  3346. if (lifetime->isDeadStore)
  3347. {
  3348. if (Lowerer::HasSideEffects(instr) == false)
  3349. {
  3350. // If all the bailouts referencing this arg are removed (which can happen in some scenarios)
  3351. //- then it's OK to remove this def of the arg
  3352. DebugOnly(this->func->allowRemoveBailOutArgInstr = true);
  3353. // We are removing this instruction, end dead life time now
  3354. this->EndDeadLifetimes(instr);
  3355. instr->Remove();
  3356. DebugOnly(this->func->allowRemoveBailOutArgInstr = false);
  3357. return true;
  3358. }
  3359. }
  3360. }
  3361. return false;
  3362. }
  3363. void
  3364. LinearScan::AssignActiveReg(Lifetime * lifetime, RegNum reg)
  3365. {
  3366. Assert(!this->activeRegs.Test(reg));
  3367. Assert(!lifetime->isSpilled);
  3368. Assert(lifetime->reg == RegNOREG || lifetime->reg == reg);
  3369. this->func->m_regsUsed.Set(reg);
  3370. lifetime->reg = reg;
  3371. this->activeRegs.Set(reg);
  3372. if (lifetime->isFloat || lifetime->isSimd128())
  3373. {
  3374. this->floatRegUsedCount++;
  3375. }
  3376. else
  3377. {
  3378. this->intRegUsedCount++;
  3379. }
  3380. this->AddToActive(lifetime);
  3381. this->tempRegs.Clear(reg);
  3382. }
  3383. void
  3384. LinearScan::AssignTempReg(Lifetime * lifetime, RegNum reg)
  3385. {
  3386. Assert(reg > RegNOREG && reg < RegNumCount);
  3387. Assert(!this->activeRegs.Test(reg));
  3388. Assert(lifetime->isSpilled);
  3389. this->func->m_regsUsed.Set(reg);
  3390. lifetime->reg = reg;
  3391. this->tempRegs.Set(reg);
  3392. __analysis_assume(reg > 0 && reg < RegNumCount);
  3393. this->tempRegLifetimes[reg] = lifetime;
  3394. this->RecordLoopUse(nullptr, reg);
  3395. }
  3396. RegNum
  3397. LinearScan::GetAssignedTempReg(Lifetime * lifetime, IRType type)
  3398. {
  3399. if (this->tempRegs.Test(lifetime->reg) && this->tempRegLifetimes[lifetime->reg] == lifetime)
  3400. {
  3401. if (this->linearScanMD.FitRegIntSizeConstraints(lifetime->reg, type))
  3402. {
  3403. this->RecordLoopUse(nullptr, lifetime->reg);
  3404. return lifetime->reg;
  3405. }
  3406. else
  3407. {
  3408. // Free this temp, we'll need to find another one.
  3409. this->tempRegs.Clear(lifetime->reg);
  3410. lifetime->reg = RegNOREG;
  3411. }
  3412. }
  3413. return RegNOREG;
  3414. }
  3415. uint
  3416. LinearScan::GetUseSpillCost(uint loopNest, BOOL isInHelperBlock)
  3417. {
  3418. if (isInHelperBlock)
  3419. {
  3420. // Helper block uses are not as important.
  3421. return 0;
  3422. }
  3423. else if (loopNest < 6)
  3424. {
  3425. return (1 << (loopNest * 3));
  3426. }
  3427. else
  3428. {
  3429. // Slow growth for deep nest to avoid overflow
  3430. return (1 << (5 * 3)) * (loopNest-5);
  3431. }
  3432. }
  3433. void
  3434. LinearScan::ProcessSecondChanceBoundary(IR::BranchInstr *branchInstr)
  3435. {
  3436. if (this->func->HasTry() && !this->func->DoOptimizeTry())
  3437. {
  3438. return;
  3439. }
  3440. if (this->currentOpHelperBlock && this->currentOpHelperBlock->opHelperEndInstr == branchInstr)
  3441. {
  3442. // Lifetimes opHelperSpilled won't get recorded by SaveRegContent(). Do it here.
  3443. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, this->opHelperSpilledLiveranges)
  3444. {
  3445. if (!lifetime->cantOpHelperSpill)
  3446. {
  3447. if (lifetime->isSecondChanceAllocated)
  3448. {
  3449. this->secondChanceRegs.Set(lifetime->reg);
  3450. }
  3451. this->regContent[lifetime->reg] = lifetime;
  3452. }
  3453. } NEXT_SLIST_ENTRY;
  3454. }
  3455. if(branchInstr->IsMultiBranch())
  3456. {
  3457. IR::MultiBranchInstr * multiBranchInstr = branchInstr->AsMultiBrInstr();
  3458. multiBranchInstr->MapUniqueMultiBrLabels([=](IR::LabelInstr * branchLabel) -> void
  3459. {
  3460. this->ProcessSecondChanceBoundaryHelper(branchInstr, branchLabel);
  3461. });
  3462. }
  3463. else
  3464. {
  3465. IR::LabelInstr *branchLabel = branchInstr->GetTarget();
  3466. this->ProcessSecondChanceBoundaryHelper(branchInstr, branchLabel);
  3467. }
  3468. this->SaveRegContent(branchInstr);
  3469. }
  3470. void
  3471. LinearScan::ProcessSecondChanceBoundaryHelper(IR::BranchInstr *branchInstr, IR::LabelInstr *branchLabel)
  3472. {
  3473. if (branchInstr->GetNumber() > branchLabel->GetNumber())
  3474. {
  3475. // Loop back-edge
  3476. Assert(branchLabel->m_isLoopTop);
  3477. branchInstr->m_regContent = nullptr;
  3478. this->InsertSecondChanceCompensation(this->regContent, branchLabel->m_regContent, branchInstr, branchLabel);
  3479. }
  3480. else
  3481. {
  3482. // Forward branch
  3483. this->SaveRegContent(branchInstr);
  3484. if (this->curLoop)
  3485. {
  3486. this->curLoop->regAlloc.exitRegContentList->Prepend(branchInstr->m_regContent);
  3487. }
  3488. if (!branchLabel->m_loweredBasicBlock)
  3489. {
  3490. if (branchInstr->IsConditional() || branchInstr->IsMultiBranch())
  3491. {
  3492. // Clone with deep copy
  3493. branchLabel->m_loweredBasicBlock = this->currentBlock->Clone(this->tempAlloc);
  3494. }
  3495. else
  3496. {
  3497. // If the unconditional branch leads to the end of the function for the scenario of a bailout - we do not want to
  3498. // copy the lowered inlinee info.
  3499. IR::Instr* nextInstr = branchLabel->GetNextRealInstr();
  3500. if (nextInstr->m_opcode != Js::OpCode::FunctionExit &&
  3501. nextInstr->m_opcode != Js::OpCode::BailOutStackRestore &&
  3502. this->currentBlock->HasData())
  3503. {
  3504. // Clone with shallow copy
  3505. branchLabel->m_loweredBasicBlock = this->currentBlock;
  3506. }
  3507. }
  3508. }
  3509. else
  3510. {
  3511. // The lowerer sometimes generates unreachable blocks that would have empty data.
  3512. Assert(!currentBlock->HasData() || branchLabel->m_loweredBasicBlock->Equals(this->currentBlock));
  3513. }
  3514. }
  3515. }
  3516. void
  3517. LinearScan::ProcessSecondChanceBoundary(IR::LabelInstr *labelInstr)
  3518. {
  3519. if (this->func->HasTry() && !this->func->DoOptimizeTry())
  3520. {
  3521. return;
  3522. }
  3523. if (labelInstr->m_isLoopTop)
  3524. {
  3525. this->SaveRegContent(labelInstr);
  3526. Lifetime ** regContent = AnewArrayZ(this->tempAlloc, Lifetime *, RegNumCount);
  3527. js_memcpy_s(regContent, (RegNumCount * sizeof(Lifetime *)), this->regContent, sizeof(this->regContent));
  3528. this->curLoop->regAlloc.loopTopRegContent = regContent;
  3529. }
  3530. FOREACH_SLISTCOUNTED_ENTRY_EDITING(IR::BranchInstr *, branchInstr, &labelInstr->labelRefs, iter)
  3531. {
  3532. if (branchInstr->m_isAirlock)
  3533. {
  3534. // This branch was just inserted... Skip it.
  3535. continue;
  3536. }
  3537. Assert(branchInstr->GetNumber() && labelInstr->GetNumber());
  3538. if (branchInstr->GetNumber() < labelInstr->GetNumber())
  3539. {
  3540. // Normal branch
  3541. this->InsertSecondChanceCompensation(branchInstr->m_regContent, this->regContent, branchInstr, labelInstr);
  3542. }
  3543. else
  3544. {
  3545. // Loop back-edge
  3546. Assert(labelInstr->m_isLoopTop);
  3547. }
  3548. } NEXT_SLISTCOUNTED_ENTRY_EDITING;
  3549. }
  3550. IR::Instr * LinearScan::EnsureAirlock(bool needsAirlock, bool *pHasAirlock, IR::Instr *insertionInstr,
  3551. IR::Instr **pInsertionStartInstr, IR::BranchInstr *branchInstr, IR::LabelInstr *labelInstr)
  3552. {
  3553. if (needsAirlock && !(*pHasAirlock))
  3554. {
  3555. // We need an extra block for the compensation code.
  3556. insertionInstr = this->InsertAirlock(branchInstr, labelInstr);
  3557. *pInsertionStartInstr = insertionInstr->m_prev;
  3558. *pHasAirlock = true;
  3559. }
  3560. return insertionInstr;
  3561. }
  3562. bool LinearScan::NeedsLoopBackEdgeCompensation(Lifetime *lifetime, IR::LabelInstr *loopTopLabel)
  3563. {
  3564. if (!lifetime)
  3565. {
  3566. return false;
  3567. }
  3568. if (lifetime->sym->IsConst())
  3569. {
  3570. return false;
  3571. }
  3572. // No need if lifetime begins in the loop
  3573. if (lifetime->start > loopTopLabel->GetNumber())
  3574. {
  3575. return false;
  3576. }
  3577. // Only needed if lifetime is live on the back-edge, and the register is used inside the loop, or the lifetime extends
  3578. // beyond the loop (and compensation out of the loop may use this reg)...
  3579. if (!loopTopLabel->GetLoop()->regAlloc.liveOnBackEdgeSyms->Test(lifetime->sym->m_id)
  3580. || (this->currentInstr->GetNumber() >= lifetime->end && !this->curLoop->regAlloc.symRegUseBv->Test(lifetime->sym->m_id)))
  3581. {
  3582. return false;
  3583. }
  3584. return true;
  3585. }
  3586. void
  3587. LinearScan::InsertSecondChanceCompensation(Lifetime ** branchRegContent, Lifetime **labelRegContent,
  3588. IR::BranchInstr *branchInstr, IR::LabelInstr *labelInstr)
  3589. {
  3590. IR::Instr *prevInstr = branchInstr->GetPrevRealInstrOrLabel();
  3591. bool needsAirlock = branchInstr->IsConditional() || (prevInstr->IsBranchInstr() && prevInstr->AsBranchInstr()->IsConditional()) || branchInstr->IsMultiBranch();
  3592. bool hasAirlock = false;
  3593. IR::Instr *insertionInstr = branchInstr;
  3594. IR::Instr *insertionStartInstr = branchInstr->m_prev;
  3595. // For loop back-edge, we want to keep the insertionStartInstr before the branch as spill need to happen on all paths
  3596. // Pass a dummy instr address to airLockBlock insertion code.
  3597. BitVector thrashedRegs(0);
  3598. bool isLoopBackEdge = (this->regContent == branchRegContent);
  3599. Lifetime * tmpRegContent[RegNumCount];
  3600. Lifetime **regContent = this->regContent;
  3601. if (isLoopBackEdge)
  3602. {
  3603. Loop *loop = labelInstr->GetLoop();
  3604. js_memcpy_s(&tmpRegContent, (RegNumCount * sizeof(Lifetime *)), this->regContent, sizeof(this->regContent));
  3605. branchRegContent = tmpRegContent;
  3606. regContent = tmpRegContent;
  3607. #if defined(_M_IX86) || defined(_M_X64)
  3608. // Insert XCHG to avoid some conflicts for int regs
  3609. // Note: no XCHG on ARM or SSE2. We could however use 3 XOR on ARM...
  3610. this->AvoidCompensationConflicts(labelInstr, branchInstr, labelRegContent, branchRegContent,
  3611. &insertionInstr, &insertionStartInstr, needsAirlock, &hasAirlock);
  3612. #endif
  3613. FOREACH_BITSET_IN_UNITBV(reg, this->secondChanceRegs, BitVector)
  3614. {
  3615. Lifetime *labelLifetime = labelRegContent[reg];
  3616. Lifetime *lifetime = branchRegContent[reg];
  3617. // 1. Insert Stores
  3618. // Lifetime starts before the loop
  3619. // Lifetime was re-allocated within the loop (i.e.: a load was most likely inserted)
  3620. // Lifetime is live on back-edge and has unsaved defs.
  3621. if (lifetime && lifetime->start < labelInstr->GetNumber() && lifetime->lastAllocationStart > labelInstr->GetNumber()
  3622. && (labelInstr->GetLoop()->regAlloc.liveOnBackEdgeSyms->Test(lifetime->sym->m_id))
  3623. && !lifetime->defList.Empty())
  3624. {
  3625. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3626. // If the lifetime was second chance allocated inside the loop, there might
  3627. // be spilled loads of this symbol in the loop. Insert the stores.
  3628. // We don't need to do this if the lifetime was re-allocated before the loop.
  3629. //
  3630. // Note that reg may not be equal to lifetime->reg because of inserted XCHG...
  3631. this->InsertStores(lifetime, lifetime->reg, insertionStartInstr);
  3632. }
  3633. if (lifetime == labelLifetime)
  3634. {
  3635. continue;
  3636. }
  3637. // 2. MOV labelReg/MEM, branchReg
  3638. // Move current register to match content at the top of the loop
  3639. if (this->NeedsLoopBackEdgeCompensation(lifetime, labelInstr))
  3640. {
  3641. // Mismatch, we need to insert compensation code
  3642. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3643. // MOV ESI, EAX
  3644. // MOV EDI, ECX
  3645. // MOV ECX, ESI
  3646. // MOV EAX, EDI <<<
  3647. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3648. lifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3649. }
  3650. // 2. MOV labelReg, MEM
  3651. // Lifetime was in a reg at the top of the loop but is spilled right now.
  3652. if (labelLifetime && labelLifetime->isSpilled && !labelLifetime->sym->IsConst() && labelLifetime->end >= branchInstr->GetNumber())
  3653. {
  3654. if (!loop->regAlloc.liveOnBackEdgeSyms->Test(labelLifetime->sym->m_id))
  3655. {
  3656. continue;
  3657. }
  3658. if (this->ClearLoopExitIfRegUnused(labelLifetime, (RegNum)reg, branchInstr, loop))
  3659. {
  3660. continue;
  3661. }
  3662. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3663. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3664. labelLifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3665. }
  3666. } NEXT_BITSET_IN_UNITBV;
  3667. // 3. MOV labelReg, MEM
  3668. // Finish up reloading lifetimes needed at the top. #2 only handled secondChanceRegs.
  3669. FOREACH_REG(reg)
  3670. {
  3671. // Handle lifetimes in a register at the top of the loop, but not currently.
  3672. Lifetime *labelLifetime = labelRegContent[reg];
  3673. if (labelLifetime && !labelLifetime->sym->IsConst() && labelLifetime != branchRegContent[reg] && !thrashedRegs.Test(reg)
  3674. && (loop->regAlloc.liveOnBackEdgeSyms->Test(labelLifetime->sym->m_id)))
  3675. {
  3676. if (this->ClearLoopExitIfRegUnused(labelLifetime, (RegNum)reg, branchInstr, loop))
  3677. {
  3678. continue;
  3679. }
  3680. // Mismatch, we need to insert compensation code
  3681. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3682. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3683. labelLifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3684. }
  3685. } NEXT_REG;
  3686. if (hasAirlock)
  3687. {
  3688. loop->regAlloc.hasAirLock = true;
  3689. }
  3690. }
  3691. else
  3692. {
  3693. //
  3694. // Non-loop-back-edge merge
  3695. //
  3696. FOREACH_REG(reg)
  3697. {
  3698. Lifetime *branchLifetime = branchRegContent[reg];
  3699. Lifetime *lifetime = regContent[reg];
  3700. if (lifetime == branchLifetime)
  3701. {
  3702. continue;
  3703. }
  3704. if (branchLifetime && branchLifetime->isSpilled && !branchLifetime->sym->IsConst() && branchLifetime->end > labelInstr->GetNumber())
  3705. {
  3706. // The lifetime was in a reg at the branch and is now spilled. We need a store on this path.
  3707. //
  3708. // MOV MEM, branch_REG
  3709. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3710. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3711. branchLifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3712. }
  3713. if (lifetime && !lifetime->sym->IsConst() && lifetime->start <= branchInstr->GetNumber())
  3714. {
  3715. // MOV label_REG, branch_REG / MEM
  3716. insertionInstr = this->EnsureAirlock(needsAirlock, &hasAirlock, insertionInstr, &insertionStartInstr, branchInstr, labelInstr);
  3717. this->ReconcileRegContent(branchRegContent, labelRegContent, branchInstr, labelInstr,
  3718. lifetime, (RegNum)reg, &thrashedRegs, insertionInstr, insertionStartInstr);
  3719. }
  3720. } NEXT_REG;
  3721. }
  3722. if (hasAirlock)
  3723. {
  3724. // Fix opHelper on airlock label.
  3725. if (insertionInstr->m_prev->IsLabelInstr() && insertionInstr->IsLabelInstr())
  3726. {
  3727. if (insertionInstr->m_prev->AsLabelInstr()->isOpHelper && !insertionInstr->AsLabelInstr()->isOpHelper)
  3728. {
  3729. insertionInstr->m_prev->AsLabelInstr()->isOpHelper = false;
  3730. }
  3731. }
  3732. }
  3733. }
  3734. void
  3735. LinearScan::ReconcileRegContent(Lifetime ** branchRegContent, Lifetime **labelRegContent,
  3736. IR::BranchInstr *branchInstr, IR::LabelInstr *labelInstr,
  3737. Lifetime *lifetime, RegNum reg, BitVector *thrashedRegs, IR::Instr *insertionInstr, IR::Instr *insertionStartInstr)
  3738. {
  3739. RegNum originalReg = RegNOREG;
  3740. IRType type = RegTypes[reg];
  3741. Assert(labelRegContent[reg] != branchRegContent[reg]);
  3742. bool matchBranchReg = (branchRegContent[reg] == lifetime);
  3743. Lifetime **originalRegContent = (matchBranchReg ? labelRegContent : branchRegContent);
  3744. bool isLoopBackEdge = (branchInstr->GetNumber() > labelInstr->GetNumber());
  3745. if (lifetime->sym->IsConst())
  3746. {
  3747. return;
  3748. }
  3749. // Look if this lifetime was in a different register in the previous block.
  3750. // Split the search in 2 to speed this up.
  3751. if (type == TyMachReg)
  3752. {
  3753. FOREACH_INT_REG(regIter)
  3754. {
  3755. if (originalRegContent[regIter] == lifetime)
  3756. {
  3757. originalReg = regIter;
  3758. break;
  3759. }
  3760. } NEXT_INT_REG;
  3761. }
  3762. else
  3763. {
  3764. Assert(type == TyFloat64 || IRType_IsSimd128(type));
  3765. FOREACH_FLOAT_REG(regIter)
  3766. {
  3767. if (originalRegContent[regIter] == lifetime)
  3768. {
  3769. originalReg = regIter;
  3770. break;
  3771. }
  3772. } NEXT_FLOAT_REG;
  3773. }
  3774. RegNum branchReg, labelReg;
  3775. if (matchBranchReg)
  3776. {
  3777. branchReg = reg;
  3778. labelReg = originalReg;
  3779. }
  3780. else
  3781. {
  3782. branchReg = originalReg;
  3783. labelReg = reg;
  3784. }
  3785. if (branchReg != RegNOREG && !thrashedRegs->Test(branchReg) && !lifetime->sym->IsConst())
  3786. {
  3787. Assert(branchRegContent[branchReg] == lifetime);
  3788. if (labelReg != RegNOREG)
  3789. {
  3790. // MOV labelReg, branchReg
  3791. Assert(labelRegContent[labelReg] == lifetime);
  3792. IR::Instr *load = IR::Instr::New(LowererMD::GetLoadOp(type),
  3793. IR::RegOpnd::New(lifetime->sym, labelReg, type, this->func),
  3794. IR::RegOpnd::New(lifetime->sym, branchReg, type, this->func), this->func);
  3795. insertionInstr->InsertBefore(load);
  3796. load->CopyNumber(insertionInstr);
  3797. // symRegUseBv needs to be set properly. Unfortunately, we need to go conservative as we don't know
  3798. // which allocation it was at the source of the branch.
  3799. if (this->IsInLoop())
  3800. {
  3801. this->RecordLoopUse(lifetime, branchReg);
  3802. }
  3803. thrashedRegs->Set(labelReg);
  3804. }
  3805. else if (!lifetime->sym->IsSingleDef() && lifetime->needsStoreCompensation && !isLoopBackEdge)
  3806. {
  3807. Assert(!lifetime->sym->IsConst());
  3808. Assert(matchBranchReg);
  3809. Assert(branchRegContent[branchReg] == lifetime);
  3810. // MOV mem, branchReg
  3811. this->InsertStores(lifetime, branchReg, insertionInstr->m_prev);
  3812. // symRegUseBv needs to be set properly. Unfortunately, we need to go conservative as we don't know
  3813. // which allocation it was at the source of the branch.
  3814. if (this->IsInLoop())
  3815. {
  3816. this->RecordLoopUse(lifetime, branchReg);
  3817. }
  3818. }
  3819. }
  3820. else if (labelReg != RegNOREG)
  3821. {
  3822. Assert(labelRegContent[labelReg] == lifetime);
  3823. Assert(lifetime->sym->IsConst() || lifetime->sym->IsAllocated());
  3824. if (branchReg != RegNOREG && !lifetime->sym->IsSingleDef())
  3825. {
  3826. Assert(thrashedRegs->Test(branchReg));
  3827. // We can't insert a "MOV labelReg, branchReg" at the insertion point
  3828. // because branchReg was thrashed by a previous reload.
  3829. // Look for that reload to see if we can insert before it.
  3830. IR::Instr *newInsertionInstr = insertionInstr->m_prev;
  3831. bool foundIt = false;
  3832. while (LowererMD::IsAssign(newInsertionInstr))
  3833. {
  3834. IR::Opnd *dst = newInsertionInstr->GetDst();
  3835. IR::Opnd *src = newInsertionInstr->GetSrc1();
  3836. if (src->IsRegOpnd() && src->AsRegOpnd()->GetReg() == labelReg)
  3837. {
  3838. // This uses labelReg, give up...
  3839. break;
  3840. }
  3841. if (dst->IsRegOpnd() && dst->AsRegOpnd()->GetReg() == branchReg)
  3842. {
  3843. // Success!
  3844. foundIt = true;
  3845. break;
  3846. }
  3847. newInsertionInstr = newInsertionInstr->m_prev;
  3848. }
  3849. if (foundIt)
  3850. {
  3851. // MOV labelReg, branchReg
  3852. Assert(labelRegContent[labelReg] == lifetime);
  3853. IR::Instr *load = IR::Instr::New(LowererMD::GetLoadOp(type),
  3854. IR::RegOpnd::New(lifetime->sym, labelReg, type, this->func),
  3855. IR::RegOpnd::New(lifetime->sym, branchReg, type, this->func), this->func);
  3856. newInsertionInstr->InsertBefore(load);
  3857. load->CopyNumber(newInsertionInstr);
  3858. // symRegUseBv needs to be set properly. Unfortunately, we need to go conservative as we don't know
  3859. // which allocation it was at the source of the branch.
  3860. if (this->IsInLoop())
  3861. {
  3862. this->RecordLoopUse(lifetime, branchReg);
  3863. }
  3864. thrashedRegs->Set(labelReg);
  3865. return;
  3866. }
  3867. Assert(thrashedRegs->Test(branchReg));
  3868. this->InsertStores(lifetime, branchReg, insertionStartInstr);
  3869. // symRegUseBv needs to be set properly. Unfortunately, we need to go conservative as we don't know
  3870. // which allocation it was at the source of the branch.
  3871. if (this->IsInLoop())
  3872. {
  3873. this->RecordLoopUse(lifetime, branchReg);
  3874. }
  3875. }
  3876. // MOV labelReg, mem
  3877. this->InsertLoad(insertionInstr, lifetime->sym, labelReg);
  3878. thrashedRegs->Set(labelReg);
  3879. }
  3880. else if (!lifetime->sym->IsConst())
  3881. {
  3882. Assert(matchBranchReg);
  3883. Assert(branchReg != RegNOREG);
  3884. // The lifetime was in a register at the top of the loop, but we thrashed it with a previous reload...
  3885. if (!lifetime->sym->IsSingleDef())
  3886. {
  3887. this->InsertStores(lifetime, branchReg, insertionStartInstr);
  3888. }
  3889. #if DBG_DUMP
  3890. if (PHASE_TRACE(Js::SecondChancePhase, this->func))
  3891. {
  3892. Output::Print(_u("****** Spilling reg because of bad compensation code order: "));
  3893. lifetime->sym->Dump();
  3894. Output::Print(_u("\n"));
  3895. }
  3896. #endif
  3897. }
  3898. }
  3899. bool LinearScan::ClearLoopExitIfRegUnused(Lifetime *lifetime, RegNum reg, IR::BranchInstr *branchInstr, Loop *loop)
  3900. {
  3901. // If a lifetime was enregistered into the loop and then spilled, we need compensation at the bottom
  3902. // of the loop to reload the lifetime into that register.
  3903. // If that lifetime was spilled before it was ever used, we don't need the compensation code.
  3904. // We do however need to clear the regContent on any loop exit as the register will not
  3905. // be available anymore on that path.
  3906. // Note: If the lifetime was reloaded into the same register, we might clear the regContent unnecessarily...
  3907. if (!PHASE_OFF(Js::ClearRegLoopExitPhase, this->func))
  3908. {
  3909. return false;
  3910. }
  3911. if (!loop->regAlloc.symRegUseBv->Test(lifetime->sym->m_id) && !lifetime->needsStoreCompensation)
  3912. {
  3913. if (lifetime->end > branchInstr->GetNumber())
  3914. {
  3915. FOREACH_SLIST_ENTRY(Lifetime **, regContent, loop->regAlloc.exitRegContentList)
  3916. {
  3917. if (regContent[reg] == lifetime)
  3918. {
  3919. regContent[reg] = nullptr;
  3920. }
  3921. } NEXT_SLIST_ENTRY;
  3922. }
  3923. return true;
  3924. }
  3925. return false;
  3926. }
  3927. #if defined(_M_IX86) || defined(_M_X64)
  3928. void LinearScan::AvoidCompensationConflicts(IR::LabelInstr *labelInstr, IR::BranchInstr *branchInstr,
  3929. Lifetime *labelRegContent[], Lifetime *branchRegContent[],
  3930. IR::Instr **pInsertionInstr, IR::Instr **pInsertionStartInstr, bool needsAirlock, bool *pHasAirlock)
  3931. {
  3932. bool changed = true;
  3933. // Look for conflicts in the incoming compensation code:
  3934. // MOV ESI, EAX
  3935. // MOV ECX, ESI << ESI was lost...
  3936. // Using XCHG:
  3937. // XCHG ESI, EAX
  3938. // MOV ECX, EAX
  3939. //
  3940. // Note that we need to iterate while(changed) to catch all conflicts
  3941. while(changed) {
  3942. RegNum conflictRegs[RegNumCount] = {RegNOREG};
  3943. changed = false;
  3944. FOREACH_BITSET_IN_UNITBV(reg, this->secondChanceRegs, BitVector)
  3945. {
  3946. Lifetime *labelLifetime = labelRegContent[reg];
  3947. Lifetime *lifetime = branchRegContent[reg];
  3948. // We don't have an XCHG for SSE2 regs
  3949. if (lifetime == labelLifetime || IRType_IsFloat(RegTypes[reg]))
  3950. {
  3951. continue;
  3952. }
  3953. if (this->NeedsLoopBackEdgeCompensation(lifetime, labelInstr))
  3954. {
  3955. // Mismatch, we need to insert compensation code
  3956. *pInsertionInstr = this->EnsureAirlock(needsAirlock, pHasAirlock, *pInsertionInstr, pInsertionStartInstr, branchInstr, labelInstr);
  3957. if (conflictRegs[reg] != RegNOREG)
  3958. {
  3959. // Eliminate conflict with an XCHG
  3960. IR::RegOpnd *reg1 = IR::RegOpnd::New(branchRegContent[reg]->sym, (RegNum)reg, RegTypes[reg], this->func);
  3961. IR::RegOpnd *reg2 = IR::RegOpnd::New(branchRegContent[reg]->sym, conflictRegs[reg], RegTypes[reg], this->func);
  3962. IR::Instr *instrXchg = IR::Instr::New(Js::OpCode::XCHG, reg1, reg1, reg2, this->func);
  3963. (*pInsertionInstr)->InsertBefore(instrXchg);
  3964. instrXchg->CopyNumber(*pInsertionInstr);
  3965. Lifetime *tmpLifetime = branchRegContent[reg];
  3966. branchRegContent[reg] = branchRegContent[conflictRegs[reg]];
  3967. branchRegContent[conflictRegs[reg]] = tmpLifetime;
  3968. reg = conflictRegs[reg];
  3969. changed = true;
  3970. }
  3971. RegNum labelReg = RegNOREG;
  3972. FOREACH_INT_REG(regIter)
  3973. {
  3974. if (labelRegContent[regIter] == branchRegContent[reg])
  3975. {
  3976. labelReg = regIter;
  3977. break;
  3978. }
  3979. } NEXT_INT_REG;
  3980. if (labelReg != RegNOREG)
  3981. {
  3982. conflictRegs[labelReg] = (RegNum)reg;
  3983. }
  3984. }
  3985. } NEXT_BITSET_IN_UNITBV;
  3986. }
  3987. }
  3988. #endif
  3989. RegNum
  3990. LinearScan::SecondChanceAllocation(Lifetime *lifetime, bool force)
  3991. {
  3992. if (PHASE_OFF(Js::SecondChancePhase, this->func) || this->func->HasTry())
  3993. {
  3994. return RegNOREG;
  3995. }
  3996. // Don't start a second chance allocation from a helper block
  3997. if (lifetime->dontAllocate || this->IsInHelperBlock() || lifetime->isDeadStore)
  3998. {
  3999. return RegNOREG;
  4000. }
  4001. Assert(lifetime->isSpilled);
  4002. Assert(lifetime->sym->IsConst() || lifetime->sym->IsAllocated());
  4003. RegNum oldReg = lifetime->reg;
  4004. RegNum reg;
  4005. if (lifetime->start == this->currentInstr->GetNumber() || lifetime->end == this->currentInstr->GetNumber())
  4006. {
  4007. // No point doing second chance if the lifetime ends here, or starts here (normal allocation would
  4008. // have found a register if one is available).
  4009. return RegNOREG;
  4010. }
  4011. if (lifetime->sym->IsConst())
  4012. {
  4013. // Can't second-chance allocate because we might have deleted the initial def instr, after
  4014. // having set the reg content on a forward branch...
  4015. return RegNOREG;
  4016. }
  4017. lifetime->reg = RegNOREG;
  4018. lifetime->isSecondChanceAllocated = true;
  4019. reg = this->FindReg(lifetime, nullptr, force);
  4020. lifetime->reg = oldReg;
  4021. if (reg == RegNOREG)
  4022. {
  4023. lifetime->isSecondChanceAllocated = false;
  4024. return reg;
  4025. }
  4026. // Success!! We're re-allocating this lifetime...
  4027. this->SecondChanceAllocateToReg(lifetime, reg);
  4028. return reg;
  4029. }
  4030. void LinearScan::SecondChanceAllocateToReg(Lifetime *lifetime, RegNum reg)
  4031. {
  4032. RegNum oldReg = lifetime->reg;
  4033. if (oldReg != RegNOREG && this->tempRegLifetimes[oldReg] == lifetime)
  4034. {
  4035. this->tempRegs.Clear(oldReg);
  4036. }
  4037. lifetime->isSpilled = false;
  4038. lifetime->isSecondChanceAllocated = true;
  4039. lifetime->lastAllocationStart = this->currentInstr->GetNumber();
  4040. lifetime->reg = RegNOREG;
  4041. this->AssignActiveReg(lifetime, reg);
  4042. this->secondChanceRegs.Set(reg);
  4043. lifetime->sym->scratch.linearScan.lifetime->useList.Clear();
  4044. #if DBG_DUMP
  4045. if (PHASE_TRACE(Js::SecondChancePhase, this->func))
  4046. {
  4047. Output::Print(_u("**** Second chance: "));
  4048. lifetime->sym->Dump();
  4049. Output::Print(_u("\t Reg: %S "), RegNames[reg]);
  4050. Output::Print(_u(" SpillCount:%d Length:%d Cost:%d %S\n"),
  4051. lifetime->useCount, lifetime->end - lifetime->start, this->GetSpillCost(lifetime),
  4052. lifetime->isLiveAcrossCalls ? "LiveAcrossCalls" : "");
  4053. }
  4054. #endif
  4055. }
  4056. IR::Instr *
  4057. LinearScan::InsertAirlock(IR::BranchInstr *branchInstr, IR::LabelInstr *labelInstr)
  4058. {
  4059. // Insert a new block on a flow arc:
  4060. // JEQ L1 JEQ L2
  4061. // ... => ...
  4062. // <fallthrough> JMP L1
  4063. // L1: L2:
  4064. // <new block>
  4065. // L1:
  4066. // An airlock is needed when we need to add code on a flow arc, and the code can't
  4067. // be added directly at the source or sink of that flow arc without impacting other
  4068. // code paths.
  4069. bool isOpHelper = labelInstr->isOpHelper;
  4070. if (!isOpHelper)
  4071. {
  4072. // Check if branch is coming from helper block.
  4073. IR::Instr *prevLabel = branchInstr->m_prev;
  4074. while (prevLabel && !prevLabel->IsLabelInstr())
  4075. {
  4076. prevLabel = prevLabel->m_prev;
  4077. }
  4078. if (prevLabel && prevLabel->AsLabelInstr()->isOpHelper)
  4079. {
  4080. isOpHelper = true;
  4081. }
  4082. }
  4083. IR::LabelInstr *airlockLabel = IR::LabelInstr::New(Js::OpCode::Label, this->func, isOpHelper);
  4084. airlockLabel->SetRegion(this->currentRegion);
  4085. #if DBG
  4086. if (isOpHelper)
  4087. {
  4088. if (branchInstr->m_isHelperToNonHelperBranch)
  4089. {
  4090. labelInstr->m_noHelperAssert = true;
  4091. }
  4092. if (labelInstr->isOpHelper && labelInstr->m_noHelperAssert)
  4093. {
  4094. airlockLabel->m_noHelperAssert = true;
  4095. }
  4096. }
  4097. #endif
  4098. bool replaced = branchInstr->ReplaceTarget(labelInstr, airlockLabel);
  4099. Assert(replaced);
  4100. IR::Instr * prevInstr = labelInstr->GetPrevRealInstrOrLabel();
  4101. if (prevInstr->HasFallThrough())
  4102. {
  4103. IR::BranchInstr *branchOverAirlock = IR::BranchInstr::New(LowererMD::MDUncondBranchOpcode, labelInstr, this->func);
  4104. prevInstr->InsertAfter(branchOverAirlock);
  4105. branchOverAirlock->CopyNumber(prevInstr);
  4106. prevInstr = branchOverAirlock;
  4107. branchOverAirlock->m_isAirlock = true;
  4108. branchOverAirlock->m_regContent = nullptr;
  4109. }
  4110. prevInstr->InsertAfter(airlockLabel);
  4111. airlockLabel->CopyNumber(prevInstr);
  4112. prevInstr = labelInstr->GetPrevRealInstrOrLabel();
  4113. return labelInstr;
  4114. }
  4115. void
  4116. LinearScan::SaveRegContent(IR::Instr *instr)
  4117. {
  4118. bool isLabelLoopTop = false;
  4119. Lifetime ** regContent = AnewArrayZ(this->tempAlloc, Lifetime *, RegNumCount);
  4120. if (instr->IsBranchInstr())
  4121. {
  4122. instr->AsBranchInstr()->m_regContent = regContent;
  4123. }
  4124. else
  4125. {
  4126. Assert(instr->IsLabelInstr());
  4127. Assert(instr->AsLabelInstr()->m_isLoopTop);
  4128. instr->AsLabelInstr()->m_regContent = regContent;
  4129. isLabelLoopTop = true;
  4130. }
  4131. js_memcpy_s(regContent, (RegNumCount * sizeof(Lifetime *)), this->regContent, sizeof(this->regContent));
  4132. #if DBG
  4133. FOREACH_SLIST_ENTRY(Lifetime *, lifetime, this->activeLiveranges)
  4134. {
  4135. Assert(regContent[lifetime->reg] == lifetime);
  4136. } NEXT_SLIST_ENTRY;
  4137. #endif
  4138. }
  4139. bool LinearScan::RegsAvailable(IRType type)
  4140. {
  4141. if (IRType_IsFloat(type) || IRType_IsSimd128(type))
  4142. {
  4143. return (this->floatRegUsedCount < FLOAT_REG_COUNT);
  4144. }
  4145. else
  4146. {
  4147. return (this->intRegUsedCount < INT_REG_COUNT);
  4148. }
  4149. }
  4150. uint LinearScan::GetRemainingHelperLength(Lifetime *const lifetime)
  4151. {
  4152. // Walk the helper block linked list starting from the next helper block until the end of the lifetime
  4153. uint helperLength = 0;
  4154. SList<OpHelperBlock>::Iterator it(opHelperBlockIter);
  4155. Assert(it.IsValid());
  4156. const uint end = max(currentInstr->GetNumber(), lifetime->end);
  4157. do
  4158. {
  4159. const OpHelperBlock &helper = it.Data();
  4160. const uint helperStart = helper.opHelperLabel->GetNumber();
  4161. if(helperStart > end)
  4162. {
  4163. break;
  4164. }
  4165. const uint helperEnd = min(end, helper.opHelperEndInstr->GetNumber());
  4166. helperLength += helperEnd - helperStart;
  4167. if(helperEnd != helper.opHelperEndInstr->GetNumber() || !helper.opHelperEndInstr->IsLabelInstr())
  4168. {
  4169. // A helper block that ends at a label does not return to the function. Since this helper block does not end
  4170. // at a label, include the end instruction as well.
  4171. ++helperLength;
  4172. }
  4173. } while(it.Next());
  4174. return helperLength;
  4175. }
  4176. uint LinearScan::CurrentOpHelperVisitedLength(IR::Instr *const currentInstr) const
  4177. {
  4178. Assert(currentInstr);
  4179. if(!currentOpHelperBlock)
  4180. {
  4181. return 0;
  4182. }
  4183. // Consider the current instruction to have not yet been visited
  4184. Assert(currentInstr->GetNumber() >= currentOpHelperBlock->opHelperLabel->GetNumber());
  4185. return currentInstr->GetNumber() - currentOpHelperBlock->opHelperLabel->GetNumber();
  4186. }
  4187. IR::Instr * LinearScan::TryHoistLoad(IR::Instr *instr, Lifetime *lifetime)
  4188. {
  4189. // If we are loading a lifetime into a register inside a loop, try to hoist that load outside the loop
  4190. // if that register hasn't been used yet.
  4191. RegNum reg = lifetime->reg;
  4192. IR::Instr *insertInstr = instr;
  4193. if (PHASE_OFF(Js::RegHoistLoadsPhase, this->func))
  4194. {
  4195. return insertInstr;
  4196. }
  4197. if ((this->func->HasTry() && !this->func->DoOptimizeTry()) || (this->currentRegion && this->currentRegion->GetType() != RegionTypeRoot))
  4198. {
  4199. return insertInstr;
  4200. }
  4201. // Register unused, and lifetime unused yet.
  4202. if (this->IsInLoop() && !this->curLoop->regAlloc.regUseBv.Test(reg)
  4203. && !this->curLoop->regAlloc.defdInLoopBv->Test(lifetime->sym->m_id)
  4204. && !this->curLoop->regAlloc.symRegUseBv->Test(lifetime->sym->m_id)
  4205. && !this->curLoop->regAlloc.hasAirLock)
  4206. {
  4207. // Let's hoist!
  4208. insertInstr = insertInstr->m_prev;
  4209. // Walk each instructions until the top of the loop looking for branches
  4210. while (!insertInstr->IsLabelInstr() || !insertInstr->AsLabelInstr()->m_isLoopTop || !insertInstr->AsLabelInstr()->GetLoop()->IsDescendentOrSelf(this->curLoop))
  4211. {
  4212. if (insertInstr->IsBranchInstr() && insertInstr->AsBranchInstr()->m_regContent)
  4213. {
  4214. IR::BranchInstr *branchInstr = insertInstr->AsBranchInstr();
  4215. // That lifetime might have been in another register coming into the loop, and spilled before used.
  4216. // Clear the reg content.
  4217. FOREACH_REG(regIter)
  4218. {
  4219. if (branchInstr->m_regContent[regIter] == lifetime)
  4220. {
  4221. branchInstr->m_regContent[regIter] = nullptr;
  4222. }
  4223. } NEXT_REG;
  4224. // Set the regContent for that reg to the lifetime on this branch
  4225. branchInstr->m_regContent[reg] = lifetime;
  4226. }
  4227. insertInstr = insertInstr->m_prev;
  4228. }
  4229. IR::LabelInstr *loopTopLabel = insertInstr->AsLabelInstr();
  4230. // Set the reg content for the loop top correctly as well
  4231. FOREACH_REG(regIter)
  4232. {
  4233. if (loopTopLabel->m_regContent[regIter] == lifetime)
  4234. {
  4235. loopTopLabel->m_regContent[regIter] = nullptr;
  4236. this->curLoop->regAlloc.loopTopRegContent[regIter] = nullptr;
  4237. }
  4238. } NEXT_REG;
  4239. Assert(loopTopLabel->GetLoop() == this->curLoop);
  4240. loopTopLabel->m_regContent[reg] = lifetime;
  4241. this->curLoop->regAlloc.loopTopRegContent[reg] = lifetime;
  4242. this->RecordLoopUse(lifetime, reg);
  4243. IR::LabelInstr *loopLandingPad = nullptr;
  4244. Assert(loopTopLabel->GetNumber() != Js::Constants::NoByteCodeOffset);
  4245. // Insert load in landing pad.
  4246. // Redirect branches to new landing pad.
  4247. FOREACH_SLISTCOUNTED_ENTRY_EDITING(IR::BranchInstr *, branchInstr, &loopTopLabel->labelRefs, iter)
  4248. {
  4249. Assert(branchInstr->GetNumber() != Js::Constants::NoByteCodeOffset);
  4250. // <= because the branch may be newly inserted and have the same instr number as the loop top...
  4251. if (branchInstr->GetNumber() <= loopTopLabel->GetNumber())
  4252. {
  4253. if (!loopLandingPad)
  4254. {
  4255. loopLandingPad = IR::LabelInstr::New(Js::OpCode::Label, this->func);
  4256. loopLandingPad->SetRegion(this->currentRegion);
  4257. loopTopLabel->InsertBefore(loopLandingPad);
  4258. loopLandingPad->CopyNumber(loopTopLabel);
  4259. }
  4260. branchInstr->ReplaceTarget(loopTopLabel, loopLandingPad);
  4261. }
  4262. } NEXT_SLISTCOUNTED_ENTRY_EDITING;
  4263. }
  4264. return insertInstr;
  4265. }
  4266. #if DBG_DUMP
  4267. void LinearScan::PrintStats() const
  4268. {
  4269. uint loopNest = 0;
  4270. uint storeCount = 0;
  4271. uint loadCount = 0;
  4272. uint wStoreCount = 0;
  4273. uint wLoadCount = 0;
  4274. uint instrCount = 0;
  4275. bool isInHelper = false;
  4276. FOREACH_INSTR_IN_FUNC_BACKWARD(instr, this->func)
  4277. {
  4278. switch (instr->GetKind())
  4279. {
  4280. case IR::InstrKindPragma:
  4281. continue;
  4282. case IR::InstrKindBranch:
  4283. if (instr->AsBranchInstr()->IsLoopTail(this->func))
  4284. {
  4285. loopNest++;
  4286. }
  4287. instrCount++;
  4288. break;
  4289. case IR::InstrKindLabel:
  4290. case IR::InstrKindProfiledLabel:
  4291. if (instr->AsLabelInstr()->m_isLoopTop)
  4292. {
  4293. Assert(loopNest);
  4294. loopNest--;
  4295. }
  4296. isInHelper = instr->AsLabelInstr()->isOpHelper;
  4297. break;
  4298. default:
  4299. {
  4300. Assert(instr->IsRealInstr());
  4301. if (isInHelper)
  4302. {
  4303. continue;
  4304. }
  4305. IR::Opnd *dst = instr->GetDst();
  4306. if (dst && dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsStackSym() && dst->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4307. {
  4308. storeCount++;
  4309. wStoreCount += LinearScan::GetUseSpillCost(loopNest, false);
  4310. }
  4311. IR::Opnd *src1 = instr->GetSrc1();
  4312. if (src1)
  4313. {
  4314. if (src1->IsSymOpnd() && src1->AsSymOpnd()->m_sym->IsStackSym() && src1->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4315. {
  4316. loadCount++;
  4317. wLoadCount += LinearScan::GetUseSpillCost(loopNest, false);
  4318. }
  4319. IR::Opnd *src2 = instr->GetSrc2();
  4320. if (src2 && src2->IsSymOpnd() && src2->AsSymOpnd()->m_sym->IsStackSym() && src2->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4321. {
  4322. loadCount++;
  4323. wLoadCount += LinearScan::GetUseSpillCost(loopNest, false);
  4324. }
  4325. }
  4326. }
  4327. break;
  4328. }
  4329. } NEXT_INSTR_IN_FUNC_BACKWARD;
  4330. Assert(loopNest == 0);
  4331. this->func->DumpFullFunctionName();
  4332. Output::SkipToColumn(45);
  4333. Output::Print(_u("Instrs:%5d, Lds:%4d, Strs:%4d, WLds: %4d, WStrs: %4d, WRefs: %4d\n"),
  4334. instrCount, loadCount, storeCount, wLoadCount, wStoreCount, wLoadCount+wStoreCount);
  4335. }
  4336. #endif
  4337. #ifdef _M_IX86
  4338. # if ENABLE_DEBUG_CONFIG_OPTIONS
  4339. IR::Instr * LinearScan::GetIncInsertionPoint(IR::Instr *instr)
  4340. {
  4341. // Make sure we don't insert an INC between an instr setting the condition code, and one using it.
  4342. IR::Instr *instrNext = instr;
  4343. while(!EncoderMD::UsesConditionCode(instrNext) && !EncoderMD::SetsConditionCode(instrNext))
  4344. {
  4345. if (instrNext->IsLabelInstr() || instrNext->IsExitInstr() || instrNext->IsBranchInstr())
  4346. {
  4347. break;
  4348. }
  4349. instrNext = instrNext->GetNextRealInstrOrLabel();
  4350. }
  4351. if (instrNext->IsLowered() && EncoderMD::UsesConditionCode(instrNext))
  4352. {
  4353. IR::Instr *instrPrev = instr->GetPrevRealInstrOrLabel();
  4354. while(!EncoderMD::SetsConditionCode(instrPrev))
  4355. {
  4356. instrPrev = instrPrev->GetPrevRealInstrOrLabel();
  4357. Assert(!instrPrev->IsLabelInstr());
  4358. }
  4359. return instrPrev;
  4360. }
  4361. return instr;
  4362. }
  4363. void LinearScan::DynamicStatsInstrument()
  4364. {
  4365. {
  4366. IR::Instr *firstInstr = this->func->m_headInstr;
  4367. IR::MemRefOpnd *memRefOpnd = IR::MemRefOpnd::New(this->func->GetJITFunctionBody()->GetCallCountStatsAddr(), TyUint32, this->func);
  4368. firstInstr->InsertAfter(IR::Instr::New(Js::OpCode::INC, memRefOpnd, memRefOpnd, this->func));
  4369. }
  4370. FOREACH_INSTR_IN_FUNC(instr, this->func)
  4371. {
  4372. if (!instr->IsRealInstr() || !instr->IsLowered())
  4373. {
  4374. continue;
  4375. }
  4376. if (EncoderMD::UsesConditionCode(instr) && instr->GetPrevRealInstrOrLabel()->IsLabelInstr())
  4377. {
  4378. continue;
  4379. }
  4380. IR::Opnd *dst = instr->GetDst();
  4381. if (dst && dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsStackSym() && dst->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4382. {
  4383. IR::Instr *insertionInstr = this->GetIncInsertionPoint(instr);
  4384. IR::MemRefOpnd *memRefOpnd = IR::MemRefOpnd::New(this->func->GetJITFunctionBody()->GetRegAllocStoreCountAddr(), TyUint32, this->func);
  4385. insertionInstr->InsertBefore(IR::Instr::New(Js::OpCode::INC, memRefOpnd, memRefOpnd, this->func));
  4386. }
  4387. IR::Opnd *src1 = instr->GetSrc1();
  4388. if (src1)
  4389. {
  4390. if (src1->IsSymOpnd() && src1->AsSymOpnd()->m_sym->IsStackSym() && src1->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4391. {
  4392. IR::Instr *insertionInstr = this->GetIncInsertionPoint(instr);
  4393. IR::MemRefOpnd *memRefOpnd = IR::MemRefOpnd::New(this->func->GetJITFunctionBody()->GetRegAllocStoreCountAddr(), TyUint32, this->func);
  4394. insertionInstr->InsertBefore(IR::Instr::New(Js::OpCode::INC, memRefOpnd, memRefOpnd, this->func));
  4395. }
  4396. IR::Opnd *src2 = instr->GetSrc2();
  4397. if (src2 && src2->IsSymOpnd() && src2->AsSymOpnd()->m_sym->IsStackSym() && src2->AsSymOpnd()->m_sym->AsStackSym()->IsAllocated())
  4398. {
  4399. IR::Instr *insertionInstr = this->GetIncInsertionPoint(instr);
  4400. IR::MemRefOpnd *memRefOpnd = IR::MemRefOpnd::New(this->func->GetJITFunctionBody()->GetRegAllocStoreCountAddr(), TyUint32, this->func);
  4401. insertionInstr->InsertBefore(IR::Instr::New(Js::OpCode::INC, memRefOpnd, memRefOpnd, this->func));
  4402. }
  4403. }
  4404. } NEXT_INSTR_IN_FUNC;
  4405. }
  4406. # endif //ENABLE_DEBUG_CONFIG_OPTIONS
  4407. #endif // _M_IX86
  4408. IR::Instr* LinearScan::InsertMove(IR::Opnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr)
  4409. {
  4410. IR::Instr *instrPrev = insertBeforeInstr->m_prev;
  4411. IR::Instr *instrRet = Lowerer::InsertMove(dst, src, insertBeforeInstr);
  4412. for (IR::Instr *instr = instrPrev->m_next; instr != insertBeforeInstr; instr = instr->m_next)
  4413. {
  4414. instr->CopyNumber(insertBeforeInstr);
  4415. }
  4416. return instrRet;
  4417. }
  4418. IR::Instr* LinearScan::InsertLea(IR::RegOpnd *dst, IR::Opnd *src, IR::Instr *const insertBeforeInstr)
  4419. {
  4420. IR::Instr *instrPrev = insertBeforeInstr->m_prev;
  4421. IR::Instr *instrRet = Lowerer::InsertLea(dst, src, insertBeforeInstr, true);
  4422. for (IR::Instr *instr = instrPrev->m_next; instr != insertBeforeInstr; instr = instr->m_next)
  4423. {
  4424. instr->CopyNumber(insertBeforeInstr);
  4425. }
  4426. return instrRet;
  4427. }