Recycler.h 97 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. #include "CollectionState.h"
  7. namespace Js
  8. {
  9. class Profiler;
  10. enum Phase: unsigned short;
  11. };
  12. namespace JsUtil
  13. {
  14. class ThreadService;
  15. };
  16. #ifdef STACK_BACK_TRACE
  17. class StackBackTraceNode;
  18. #endif
  19. class ScriptEngineBase;
  20. class JavascriptThreadService;
  21. #ifdef PROFILE_MEM
  22. struct RecyclerMemoryData;
  23. #endif
  24. namespace Memory
  25. {
  26. template <typename T> class RecyclerRootPtr;
  27. class AutoBooleanToggle
  28. {
  29. public:
  30. AutoBooleanToggle(bool * b, bool value = true, bool valueMayChange = false)
  31. : b(b)
  32. {
  33. Assert(!(*b));
  34. *b = value;
  35. #if DBG
  36. this->value = value;
  37. this->valueMayChange = valueMayChange;
  38. #endif
  39. }
  40. ~AutoBooleanToggle()
  41. {
  42. if (b)
  43. {
  44. Assert(valueMayChange || *b == value);
  45. *b = false;
  46. }
  47. }
  48. void Leave()
  49. {
  50. Assert(valueMayChange || *b == value);
  51. *b = false;
  52. b = nullptr;
  53. }
  54. private:
  55. bool * b;
  56. #if DBG
  57. bool value;
  58. bool valueMayChange;
  59. #endif
  60. };
  61. template <class T>
  62. class AutoRestoreValue
  63. {
  64. public:
  65. AutoRestoreValue(T* var, const T& val):
  66. variable(var)
  67. {
  68. Assert(var);
  69. oldValue = (*variable);
  70. (*variable) = val;
  71. #ifdef DEBUG
  72. debugSetValue = val;
  73. #endif
  74. }
  75. ~AutoRestoreValue()
  76. {
  77. Assert((*variable) == debugSetValue);
  78. (*variable) = oldValue;
  79. }
  80. private:
  81. #ifdef DEBUG
  82. T debugSetValue;
  83. #endif
  84. T* variable;
  85. T oldValue;
  86. };
  87. class Recycler;
  88. class RecyclerScanMemoryCallback
  89. {
  90. public:
  91. RecyclerScanMemoryCallback(Recycler* recycler) : recycler(recycler) {}
  92. void operator()(void** obj, size_t byteCount);
  93. private:
  94. Recycler* recycler;
  95. };
  96. template<ObjectInfoBits infoBits>
  97. struct InfoBitsWrapper{};
  98. // Allocation macro
  99. #define RecyclerNew(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocInlined, T, __VA_ARGS__)
  100. #define RecyclerNewPlus(recycler,size,T,...) AllocatorNewPlus(Recycler, recycler, size, T, __VA_ARGS__)
  101. #define RecyclerNewPlusZ(recycler,size,T,...) AllocatorNewPlusZ(Recycler, recycler, size, T, __VA_ARGS__)
  102. #define RecyclerNewZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocZeroInlined, T, __VA_ARGS__)
  103. #define RecyclerNewStruct(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocInlined, T)
  104. #define RecyclerNewStructZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocZeroInlined, T)
  105. #define RecyclerNewStructPlus(recycler,size,T) AllocatorNewStructPlus(Recycler, recycler, size, T)
  106. #define RecyclerNewArray(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, Alloc, T, count)
  107. #define RecyclerNewArrayZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZero, T, count)
  108. #define RecyclerNewFinalized(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedInlined, T, __VA_ARGS__)))
  109. #define RecyclerNewFinalizedPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalized, size, T, __VA_ARGS__)))
  110. #define RecyclerNewTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedInlined, T, __VA_ARGS__)))
  111. #define RecyclerNewEnumClass(recycler, enumClass, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<enumClass>()) T(__VA_ARGS__)
  112. #define RecyclerNewWithInfoBits(recycler, infoBits, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<infoBits>()) T(__VA_ARGS__)
  113. #define RecyclerNewFinalizedClientTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedClientTrackedInlined, T, __VA_ARGS__)))
  114. #if defined(RECYCLER_WRITE_BARRIER_ALLOC)
  115. #define RecyclerNewWithBarrier(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocWithBarrier, T, __VA_ARGS__)
  116. #define RecyclerNewWithBarrierPlus(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocWithBarrier, size, T, __VA_ARGS__)
  117. #define RecyclerNewWithBarrierPlusZ(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocZeroWithBarrier, size, T, __VA_ARGS__)
  118. #define RecyclerNewWithBarrierZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocZeroWithBarrier, T, __VA_ARGS__)
  119. #define RecyclerNewWithBarrierStruct(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocWithBarrier, T)
  120. #define RecyclerNewWithBarrierStructZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocZeroWithBarrier, T)
  121. #define RecyclerNewWithBarrierStructPlus(recycler,size,T) AllocatorNewStructPlusBase(Recycler, recycler, AllocWithBarrier, size, T)
  122. #define RecyclerNewWithBarrierArray(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocWithBarrier, T, count)
  123. #define RecyclerNewWithBarrierArrayZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZeroWithBarrier, T, count)
  124. #define RecyclerNewWithBarrierFinalized(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedWithBarrierInlined, T, __VA_ARGS__)))
  125. #define RecyclerNewWithBarrierFinalizedPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedWithBarrier, size, T, __VA_ARGS__)))
  126. #define RecyclerNewWithBarrierTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedWithBarrierInlined, T, __VA_ARGS__)))
  127. #define RecyclerNewWithBarrierEnumClass(recycler, enumClass, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<(ObjectInfoBits)(enumClass | WithBarrierBit)>()) T(__VA_ARGS__)
  128. #define RecyclerNewWithBarrierWithInfoBits(recycler, infoBits, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<(ObjectInfoBits)(infoBits | WithBarrierBit)>()) T(__VA_ARGS__)
  129. #define RecyclerNewWithBarrierFinalizedClientTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedClientTrackedWithBarrierInlined, T, __VA_ARGS__)))
  130. #endif
  131. #ifndef RECYCLER_WRITE_BARRIER
  132. #define RecyclerNewWithBarrier RecyclerNew
  133. #define RecyclerNewWithBarrierPlus RecyclerNewPlus
  134. #define RecyclerNewWithBarrierPlusZ RecyclerNewPlusZ
  135. #define RecyclerNewWithBarrierZ RecyclerNewZ
  136. #define RecyclerNewWithBarrierStruct RecyclerNewStruct
  137. #define RecyclerNewWithBarrierStructZ RecyclerNewStructZ
  138. #define RecyclerNewWithBarrierStructPlus RecyclerNewStructPlus
  139. #define RecyclerNewWithBarrierArray RecyclerNewArray
  140. #define RecyclerNewWithBarrierArrayZ RecyclerNewArrayZ
  141. #define RecyclerNewWithBarrierFinalized RecyclerNewFinalized
  142. #define RecyclerNewWithBarrierFinalizedPlus RecyclerNewFinalizedPlus
  143. #define RecyclerNewWithBarrierTracked RecyclerNewTracked
  144. #define RecyclerNewWithBarrierEnumClass RecyclerNewEnumClass
  145. #define RecyclerNewWithBarrierWithInfoBits RecyclerNewWithInfoBits
  146. #define RecyclerNewWithBarrierFinalizedClientTracked RecyclerNewFinalizedClientTracked
  147. #endif
  148. // Leaf allocators
  149. #define RecyclerNewLeaf(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafInlined, T, __VA_ARGS__)
  150. #define RecyclerNewLeafZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafZeroInlined, T, __VA_ARGS__)
  151. #define RecyclerNewPlusLeaf(recycler,size,T,...) AllocatorNewPlusLeaf(Recycler, recycler, size, T, __VA_ARGS__)
  152. #define RecyclerNewPlusLeafZ(recycler,size,T,...) AllocatorNewPlusLeafZ(Recycler, recycler, size, T, __VA_ARGS__)
  153. #define RecyclerNewStructLeaf(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocLeafInlined, T)
  154. #define RecyclerNewStructLeafZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocLeafZeroInlined, T)
  155. #define RecyclerNewArrayLeafZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafZero, T, count)
  156. #define RecyclerNewArrayLeaf(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeaf, T, count)
  157. #define RecyclerNewFinalizedLeaf(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedLeafInlined, T, __VA_ARGS__)))
  158. #define RecyclerNewFinalizedLeafPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedLeaf, size, T, __VA_ARGS__)))
  159. #define RecyclerNewTrackedLeaf(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedLeafInlined, T, __VA_ARGS__)))
  160. #define RecyclerNewTrackedLeafPlusZ(recycler,size,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocZeroTrackedLeafInlined, size, T, __VA_ARGS__)))
  161. #ifdef RECYCLER_VISITED_HOST
  162. #define RecyclerAllocVisitedHostTracedAndFinalizedZero(recycler,size) recycler->AllocVisitedHost<RecyclerVisitedHostTracedFinalizableBits>(size)
  163. #define RecyclerAllocVisitedHostFinalizedZero(recycler,size) recycler->AllocVisitedHost<RecyclerVisitedHostFinalizableBits>(size)
  164. #define RecyclerAllocVisitedHostTracedZero(recycler,size) recycler->AllocVisitedHost<RecyclerVisitedHostTracedBits>(size)
  165. #define RecyclerAllocLeafZero(recycler,size) recycler->AllocVisitedHost<LeafBit>(size)
  166. #endif
  167. #ifdef TRACE_OBJECT_LIFETIME
  168. #define RecyclerNewLeafTrace(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafTrace, T, __VA_ARGS__)
  169. #define RecyclerNewLeafZTrace(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafZeroTrace, T, __VA_ARGS__)
  170. #define RecyclerNewPlusLeafTrace(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocLeafTrace, size, T, __VA_ARGS__)
  171. #define RecyclerNewArrayLeafZTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafZeroTrace, T, count)
  172. #define RecyclerNewArrayTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocTrace, T, count)
  173. #define RecyclerNewArrayZTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZeroTrace, T, count)
  174. #define RecyclerNewArrayLeafTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafTrace, T, count)
  175. #define RecyclerNewFinalizedTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedTrace, T, __VA_ARGS__)))
  176. #define RecyclerNewFinalizedLeafTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedLeafTrace, T, __VA_ARGS__)))
  177. #define RecyclerNewFinalizedPlusTrace(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedTrace, size, T, __VA_ARGS__)))
  178. #define RecyclerNewTrackedTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedTrace, T, __VA_ARGS__)))
  179. #define RecyclerNewTrackedLeafTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedLeafTrace, T, __VA_ARGS__)))
  180. #else
  181. #define RecyclerNewLeafTrace RecyclerNewLeaf
  182. #define RecyclerNewLeafZTrace RecyclerNewLeafZ
  183. #define RecyclerNewPlusLeafTrace RecyclerNewPlusLeaf
  184. #define RecyclerNewArrayLeafZTrace RecyclerNewArrayLeafZ
  185. #define RecyclerNewArrayTrace RecyclerNewArray
  186. #define RecyclerNewArrayZTrace RecyclerNewArrayZ
  187. #define RecyclerNewArrayLeafTrace RecyclerNewArrayLeaf
  188. #define RecyclerNewFinalizedTrace RecyclerNewFinalized
  189. #define RecyclerNewFinalizedLeafTrace RecyclerNewFinalizedLeaf
  190. #define RecyclerNewFinalizedPlusTrace RecyclerNewFinalizedPlus
  191. #define RecyclerNewTrackedTrace RecyclerNewTracked
  192. #define RecyclerNewTrackedLeafTrace RecyclerNewTrackedLeaf
  193. #endif
  194. #ifdef RECYCLER_TRACE
  195. #define RecyclerVerboseTrace(flags, ...) \
  196. if (flags.Verbose && flags.Trace.IsEnabled(Js::RecyclerPhase)) \
  197. { \
  198. Output::Print(__VA_ARGS__); \
  199. }
  200. #define AllocationVerboseTrace(flags, ...) \
  201. if (flags.Verbose && flags.Trace.IsEnabled(Js::MemoryAllocationPhase)) \
  202. { \
  203. Output::Print(__VA_ARGS__); \
  204. }
  205. #define LargeAllocationVerboseTrace(flags, ...) \
  206. if (flags.Verbose && \
  207. (flags.Trace.IsEnabled(Js::MemoryAllocationPhase) || \
  208. flags.Trace.IsEnabled(Js::LargeMemoryAllocationPhase))) \
  209. { \
  210. Output::Print(__VA_ARGS__); \
  211. }
  212. #define PageAllocatorAllocationVerboseTrace(flags, ...) \
  213. if (flags.Verbose && flags.Trace.IsEnabled(Js::PageAllocatorAllocPhase)) \
  214. { \
  215. Output::Print(__VA_ARGS__); \
  216. }
  217. #else
  218. #define RecyclerVerboseTrace(...)
  219. #define AllocationVerboseTrace(...)
  220. #define LargeAllocationVerboseTrace(...)
  221. #endif
  222. #define RecyclerHeapNew(recycler,heapInfo,T,...) new (recycler, heapInfo) T(__VA_ARGS__)
  223. #define RecyclerHeapDelete(recycler,heapInfo,addr) (static_cast<Recycler *>(recycler)->HeapFree(heapInfo,addr))
  224. typedef void (__cdecl* ExternalRootMarker)(void *);
  225. enum CollectionFlags
  226. {
  227. CollectHeuristic_AllocSize = 0x00000001,
  228. CollectHeuristic_Time = 0x00000002,
  229. CollectHeuristic_TimeIfScriptActive = 0x00000004,
  230. CollectHeuristic_TimeIfInScript = 0x00000008,
  231. CollectHeuristic_Never = 0x00000080,
  232. CollectHeuristic_Mask = 0x000000FF,
  233. CollectOverride_FinishConcurrent = 0x00001000,
  234. CollectOverride_ExhaustiveCandidate = 0x00002000,
  235. CollectOverride_ForceInThread = 0x00004000,
  236. CollectOverride_AllowDispose = 0x00008000,
  237. CollectOverride_AllowReentrant = 0x00010000,
  238. CollectOverride_ForceFinish = 0x00020000,
  239. CollectOverride_Explicit = 0x00040000,
  240. CollectOverride_DisableIdleFinish = 0x00080000,
  241. CollectOverride_BackgroundFinishMark= 0x00100000,
  242. CollectOverride_FinishConcurrentTimeout = 0x00200000,
  243. CollectOverride_NoExhaustiveCollect = 0x00400000,
  244. CollectOverride_SkipStack = 0x01000000,
  245. CollectOverride_CheckScriptContextClose = 0x02000000,
  246. CollectMode_Partial = 0x08000000,
  247. CollectMode_Concurrent = 0x10000000,
  248. CollectMode_Exhaustive = 0x20000000,
  249. CollectMode_DecommitNow = 0x40000000,
  250. CollectMode_CacheCleanup = 0x80000000,
  251. CollectNowForceInThread = CollectOverride_ForceInThread,
  252. CollectNowForceInThreadExternal = CollectOverride_ForceInThread | CollectOverride_AllowDispose,
  253. CollectNowForceInThreadExternalNoStack = CollectOverride_ForceInThread | CollectOverride_AllowDispose | CollectOverride_SkipStack,
  254. CollectNowDefault = CollectOverride_FinishConcurrent,
  255. CollectNowDefaultLSCleanup = CollectOverride_FinishConcurrent | CollectOverride_AllowDispose,
  256. CollectNowDecommitNowExplicit = CollectNowDefault | CollectMode_DecommitNow | CollectMode_CacheCleanup | CollectOverride_Explicit | CollectOverride_AllowDispose,
  257. CollectNowConcurrent = CollectOverride_FinishConcurrent | CollectMode_Concurrent,
  258. CollectNowExhaustive = CollectOverride_FinishConcurrent | CollectMode_Exhaustive | CollectOverride_AllowDispose,
  259. CollectNowPartial = CollectOverride_FinishConcurrent | CollectMode_Partial,
  260. CollectNowConcurrentPartial = CollectMode_Concurrent | CollectNowPartial,
  261. CollectOnAllocation = CollectHeuristic_AllocSize | CollectHeuristic_Time | CollectMode_Concurrent | CollectMode_Partial | CollectOverride_FinishConcurrent | CollectOverride_AllowReentrant | CollectOverride_FinishConcurrentTimeout,
  262. CollectOnTypedArrayAllocation = CollectHeuristic_AllocSize | CollectHeuristic_Time | CollectMode_Concurrent | CollectMode_Partial | CollectOverride_FinishConcurrent | CollectOverride_AllowReentrant | CollectOverride_FinishConcurrentTimeout | CollectOverride_AllowDispose,
  263. CollectOnScriptIdle = CollectOverride_CheckScriptContextClose | CollectOverride_FinishConcurrent | CollectMode_Concurrent | CollectMode_CacheCleanup | CollectOverride_SkipStack,
  264. CollectOnScriptExit = CollectOverride_CheckScriptContextClose | CollectHeuristic_AllocSize | CollectOverride_FinishConcurrent | CollectMode_Concurrent | CollectMode_CacheCleanup,
  265. CollectExhaustiveCandidate = CollectHeuristic_Never | CollectOverride_ExhaustiveCandidate,
  266. CollectOnScriptCloseNonPrimary = CollectNowConcurrent | CollectOverride_ExhaustiveCandidate | CollectOverride_AllowDispose,
  267. CollectOnRecoverFromOutOfMemory = CollectOverride_ForceInThread | CollectMode_DecommitNow,
  268. CollectOnSuspendCleanup = CollectNowConcurrent | CollectMode_Exhaustive | CollectMode_DecommitNow | CollectOverride_DisableIdleFinish,
  269. FinishConcurrentOnIdle = CollectMode_Concurrent | CollectOverride_DisableIdleFinish,
  270. FinishConcurrentOnIdleAtRoot = CollectMode_Concurrent | CollectOverride_DisableIdleFinish | CollectOverride_SkipStack,
  271. FinishConcurrentDefault = CollectMode_Concurrent | CollectOverride_DisableIdleFinish | CollectOverride_BackgroundFinishMark,
  272. FinishConcurrentOnExitScript = FinishConcurrentDefault,
  273. FinishConcurrentOnEnterScript = FinishConcurrentDefault,
  274. FinishConcurrentOnAllocation = FinishConcurrentDefault,
  275. FinishDispose = CollectOverride_AllowDispose,
  276. FinishDisposeTimed = CollectOverride_AllowDispose | CollectHeuristic_TimeIfScriptActive,
  277. ForceFinishCollection = CollectOverride_ForceFinish | CollectOverride_ForceInThread,
  278. #ifdef RECYCLER_STRESS
  279. CollectStress = CollectNowForceInThread,
  280. #if ENABLE_PARTIAL_GC
  281. CollectPartialStress = CollectMode_Partial,
  282. #endif
  283. #if ENABLE_CONCURRENT_GC
  284. CollectBackgroundStress = CollectNowDefault,
  285. CollectConcurrentStress = CollectNowConcurrent,
  286. #if ENABLE_PARTIAL_GC
  287. CollectConcurrentPartialStress = CollectConcurrentStress | CollectPartialStress,
  288. #endif
  289. #endif
  290. #endif
  291. #if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
  292. CollectNowFinalGC = CollectNowExhaustive | CollectOverride_ForceInThread | CollectOverride_SkipStack | CollectOverride_Explicit | CollectOverride_AllowDispose,
  293. #endif
  294. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  295. CollectNowExhaustiveSkipStack = CollectNowExhaustive | CollectOverride_SkipStack, // Used by test
  296. #endif
  297. };
  298. class RecyclerCollectionWrapper
  299. {
  300. public:
  301. RecyclerCollectionWrapper() :
  302. _isScriptContextCloseGCPending(FALSE)
  303. { }
  304. typedef BOOL (Recycler::*CollectionFunction)(CollectionFlags flags);
  305. virtual void PreCollectionCallBack(CollectionFlags flags) = 0;
  306. virtual void PreSweepCallback() = 0;
  307. virtual void PreRescanMarkCallback() = 0;
  308. virtual size_t RootMarkCallback(RecyclerScanMemoryCallback& scanMemoryCallback, BOOL * stacksScannedByRuntime) = 0;
  309. virtual void RescanMarkTimeoutCallback() = 0;
  310. virtual void EndMarkCallback() = 0;
  311. virtual void ConcurrentCallback() = 0;
  312. virtual void WaitCollectionCallBack() = 0;
  313. virtual void PostCollectionCallBack() = 0;
  314. virtual BOOL ExecuteRecyclerCollectionFunction(Recycler * recycler, CollectionFunction function, CollectionFlags flags) = 0;
  315. virtual uint GetRandomNumber() = 0;
  316. virtual bool DoSpecialMarkOnScanStack() = 0;
  317. virtual void PostSweepRedeferralCallBack() = 0;
  318. #ifdef FAULT_INJECTION
  319. virtual void DisposeScriptContextByFaultInjectionCallBack() = 0;
  320. #endif
  321. virtual void DisposeObjects(Recycler * recycler) = 0;
  322. virtual void PreDisposeObjectsCallBack() = 0;
  323. #ifdef ENABLE_PROJECTION
  324. virtual void MarkExternalWeakReferencedObjects(bool inPartialCollect) = 0;
  325. virtual void ResolveExternalWeakReferencedObjects() = 0;
  326. #endif
  327. #if DBG || defined(PROFILE_EXEC)
  328. virtual bool AsyncHostOperationStart(void *) = 0;
  329. virtual void AsyncHostOperationEnd(bool wasInAsync, void *) = 0;
  330. #endif
  331. BOOL GetIsScriptContextCloseGCPending()
  332. {
  333. return _isScriptContextCloseGCPending;
  334. }
  335. void ClearIsScriptContextCloseGCPending()
  336. {
  337. _isScriptContextCloseGCPending = FALSE;
  338. }
  339. void SetIsScriptContextCloseGCPending()
  340. {
  341. _isScriptContextCloseGCPending = TRUE;
  342. }
  343. protected:
  344. BOOL _isScriptContextCloseGCPending;
  345. };
  346. class DefaultRecyclerCollectionWrapper : public RecyclerCollectionWrapper
  347. {
  348. public:
  349. virtual void PreCollectionCallBack(CollectionFlags flags) override {}
  350. virtual void PreSweepCallback() override {}
  351. virtual void PreRescanMarkCallback() override {}
  352. virtual void RescanMarkTimeoutCallback() override {}
  353. virtual void EndMarkCallback() override {}
  354. virtual size_t RootMarkCallback(RecyclerScanMemoryCallback& scanMemoryCallback, BOOL * stacksScannedByRuntime) override { *stacksScannedByRuntime = FALSE; return 0; }
  355. virtual void ConcurrentCallback() override {}
  356. virtual void WaitCollectionCallBack() override {}
  357. virtual void PostCollectionCallBack() override {}
  358. virtual BOOL ExecuteRecyclerCollectionFunction(Recycler * recycler, CollectionFunction function, CollectionFlags flags) override;
  359. virtual uint GetRandomNumber() override { return 0; }
  360. virtual bool DoSpecialMarkOnScanStack() override { return false; }
  361. virtual void PostSweepRedeferralCallBack() override {}
  362. #ifdef FAULT_INJECTION
  363. virtual void DisposeScriptContextByFaultInjectionCallBack() override {};
  364. #endif
  365. virtual void DisposeObjects(Recycler * recycler) override;
  366. virtual void PreDisposeObjectsCallBack() override {};
  367. #ifdef ENABLE_PROJECTION
  368. virtual void MarkExternalWeakReferencedObjects(bool inPartialCollect) override {};
  369. virtual void ResolveExternalWeakReferencedObjects() override {};
  370. #endif
  371. #if DBG || defined(PROFILE_EXEC)
  372. virtual bool AsyncHostOperationStart(void *) override { return false; };
  373. virtual void AsyncHostOperationEnd(bool wasInAsync, void *) override {};
  374. #endif
  375. static DefaultRecyclerCollectionWrapper Instance;
  376. private:
  377. static bool IsCollectionDisabled(Recycler * recycler);
  378. };
  379. #ifdef RECYCLER_STATS
  380. struct RecyclerCollectionStats
  381. {
  382. size_t startCollectAllocBytes;
  383. #if ENABLE_PARTIAL_GC
  384. size_t startCollectNewPageCount;
  385. #endif
  386. size_t continueCollectAllocBytes;
  387. size_t finishCollectTryCount;
  388. // Heuristic Stats
  389. #if ENABLE_PARTIAL_GC
  390. size_t rescanRootBytes;
  391. size_t estimatedPartialReuseBytes;
  392. size_t uncollectedNewPageCountPartialCollect;
  393. size_t partialCollectSmallHeapBlockReuseMinFreeBytes;
  394. double collectEfficacy;
  395. double collectCost;
  396. #endif
  397. // Mark stats
  398. size_t tryMarkCount; // # of pointer try mark (* pointer size to get total number byte looked at)
  399. size_t tryMarkNullCount;
  400. size_t tryMarkUnalignedCount;
  401. size_t tryMarkNonRecyclerMemoryCount;
  402. size_t tryMarkInteriorCount;
  403. size_t tryMarkInteriorNullCount;
  404. size_t tryMarkInteriorNonRecyclerMemoryCount;
  405. size_t rootCount;
  406. size_t stackCount;
  407. size_t remarkCount;
  408. size_t scanCount; // non-leaf objects marked.
  409. size_t trackCount;
  410. size_t finalizeCount;
  411. size_t markThruNewObjCount;
  412. size_t markThruFalseNewObjCount;
  413. struct MarkData
  414. {
  415. // Rescan stats
  416. size_t rescanPageCount;
  417. size_t rescanObjectCount;
  418. size_t rescanObjectByteCount;
  419. size_t rescanLargePageCount;
  420. size_t rescanLargeObjectCount;
  421. size_t rescanLargeByteCount;
  422. size_t markCount; // total number of object marked
  423. size_t markBytes; // size of all objects marked.
  424. } markData;
  425. #if ENABLE_CONCURRENT_GC
  426. MarkData backgroundMarkData[RecyclerHeuristic::MaxBackgroundRepeatMarkCount];
  427. size_t trackedObjectCount;
  428. #endif
  429. #if ENABLE_PARTIAL_GC
  430. size_t clientTrackedObjectCount;
  431. #endif
  432. // Sweep stats
  433. size_t heapBlockCount[HeapBlock::BlockTypeCount]; // number of heap blocks (processed during swept)
  434. size_t heapBlockFreeCount[HeapBlock::BlockTypeCount]; // number of heap blocks deleted
  435. size_t heapBlockConcurrentSweptCount[HeapBlock::SmallBlockTypeCount];
  436. size_t heapBlockSweptCount[HeapBlock::SmallBlockTypeCount]; // number of heap blocks swept
  437. size_t objectSweptCount; // objects freed (free list + whole page freed)
  438. size_t objectSweptBytes;
  439. size_t objectSweptFreeListCount; // objects freed (free list)
  440. size_t objectSweptFreeListBytes;
  441. size_t objectSweepScanCount; // number of objects walked for sweeping (exclude whole page freed)
  442. size_t finalizeSweepCount; // number of objects finalizer/dispose called
  443. #if ENABLE_PARTIAL_GC
  444. size_t smallNonLeafHeapBlockPartialReuseCount[HeapBlock::SmallBlockTypeCount];
  445. size_t smallNonLeafHeapBlockPartialReuseBytes[HeapBlock::SmallBlockTypeCount];
  446. size_t smallNonLeafHeapBlockPartialUnusedCount[HeapBlock::SmallBlockTypeCount];
  447. size_t smallNonLeafHeapBlockPartialUnusedBytes[HeapBlock::SmallBlockTypeCount];
  448. #endif
  449. // Memory Stats
  450. size_t heapBlockFreeByteCount[HeapBlock::BlockTypeCount]; // The remaining usable free byte count
  451. size_t largeHeapBlockUsedByteCount; // Used byte count
  452. size_t largeHeapBlockTotalByteCount; // Total byte count
  453. // Empty/zero heap block stats
  454. uint numEmptySmallBlocks[HeapBlock::SmallBlockTypeCount];
  455. uint numZeroedOutSmallBlocks;
  456. };
  457. #define RECYCLER_STATS_INC_IF(cond, r, f) if (cond) { RECYCLER_STATS_INC(r, f); }
  458. #define RECYCLER_STATS_INC(r, f) ++r->collectionStats.f
  459. #define RECYCLER_STATS_INTERLOCKED_INC(r, f) { InterlockedIncrement((LONG *)&r->collectionStats.f); }
  460. #define RECYCLER_STATS_DEC(r, f) --r->collectionStats.f
  461. #define RECYCLER_STATS_ADD(r, f, v) r->collectionStats.f += (v)
  462. #define RECYCLER_STATS_INTERLOCKED_ADD(r, f, v) { InterlockedAdd((LONG *)&r->collectionStats.f, (LONG)(v)); }
  463. #define RECYCLER_STATS_SUB(r, f, v) r->collectionStats.f -= (v)
  464. #define RECYCLER_STATS_SET(r, f, v) r->collectionStats.f = v
  465. #else
  466. #define RECYCLER_STATS_INC_IF(cond, r, f)
  467. #define RECYCLER_STATS_INC(r, f)
  468. #define RECYCLER_STATS_INTERLOCKED_INC(r, f)
  469. #define RECYCLER_STATS_DEC(r, f)
  470. #define RECYCLER_STATS_ADD(r, f, v)
  471. #define RECYCLER_STATS_INTERLOCKED_ADD(r, f, v)
  472. #define RECYCLER_STATS_SUB(r, f, v)
  473. #define RECYCLER_STATS_SET(r, f, v)
  474. #endif
  475. #ifdef RECYCLER_TRACE
  476. struct CollectionParam
  477. {
  478. CollectionFlags flags;
  479. bool finishOnly;
  480. bool repeat;
  481. bool priorityBoostConcurrentSweepOverride;
  482. bool domCollect;
  483. int timeDiff;
  484. size_t uncollectedAllocBytes;
  485. size_t uncollectedPinnedObjects;
  486. #if ENABLE_PARTIAL_GC
  487. size_t uncollectedNewPageCountPartialCollect;
  488. size_t uncollectedNewPageCount;
  489. size_t unusedPartialCollectFreeBytes;
  490. bool inPartialCollectMode;
  491. #endif
  492. };
  493. #endif
  494. #include "RecyclerObjectGraphDumper.h"
  495. #if ENABLE_CONCURRENT_GC
  496. class RecyclerParallelThread
  497. {
  498. public:
  499. typedef void (Recycler::* WorkFunc)();
  500. RecyclerParallelThread(Recycler * recycler, WorkFunc workFunc) :
  501. recycler(recycler),
  502. workFunc(workFunc),
  503. concurrentWorkReadyEvent(NULL),
  504. concurrentWorkDoneEvent(NULL),
  505. concurrentThread(NULL)
  506. {
  507. }
  508. ~RecyclerParallelThread()
  509. {
  510. Assert(concurrentThread == NULL);
  511. Assert(concurrentWorkReadyEvent == NULL);
  512. Assert(concurrentWorkDoneEvent == NULL);
  513. }
  514. bool StartConcurrent();
  515. void WaitForConcurrent();
  516. void Shutdown();
  517. bool EnableConcurrent(bool synchronizeOnStartup);
  518. private:
  519. // Static entry point for thread creation
  520. static unsigned int CALLBACK StaticThreadProc(LPVOID lpParameter);
  521. // Static entry point for thread service usage
  522. static void CALLBACK StaticBackgroundWorkCallback(void * callbackData);
  523. private:
  524. WorkFunc workFunc;
  525. Recycler * recycler;
  526. HANDLE concurrentWorkReadyEvent;// main thread uses this event to tell concurrent threads that the work is ready
  527. HANDLE concurrentWorkDoneEvent;// concurrent threads use this event to tell main thread that the work allocated is done
  528. HANDLE concurrentThread;
  529. bool synchronizeOnStartup;
  530. };
  531. #endif
  532. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  533. class AutoProtectPages
  534. {
  535. public:
  536. AutoProtectPages(Recycler* recycler, bool protectEnabled);
  537. ~AutoProtectPages();
  538. void Unprotect();
  539. private:
  540. Recycler* recycler;
  541. bool isReadOnly;
  542. };
  543. #endif
  544. class Recycler
  545. {
  546. friend class RecyclerScanMemoryCallback;
  547. friend class RecyclerSweep;
  548. friend class MarkContext;
  549. friend class HeapBlock;
  550. friend class HeapBlockMap32;
  551. #if ENABLE_CONCURRENT_GC
  552. friend class RecyclerParallelThread;
  553. #endif
  554. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  555. friend class AutoProtectPages;
  556. #endif
  557. template <typename T> friend class RecyclerWeakReference;
  558. template <typename T> friend class WeakReferenceHashTable;
  559. template <typename TBlockType>
  560. friend class SmallHeapBlockAllocator; // Needed for FindHeapBlock
  561. #if defined(RECYCLER_TRACE)
  562. friend class JavascriptThreadService;
  563. #endif
  564. #ifdef HEAP_ENUMERATION_VALIDATION
  565. friend class ActiveScriptProfilerHeapEnum;
  566. #endif
  567. friend class ScriptEngineBase; // This is for disabling GC for certain Host operations.
  568. friend class ::CodeGenNumberThreadAllocator;
  569. friend struct ::XProcNumberPageSegmentManager;
  570. public:
  571. static const uint ConcurrentThreadStackSize = 300000;
  572. static const bool FakeZeroLengthArray = true;
  573. #ifdef RECYCLER_PAGE_HEAP
  574. // Keeping as constant in case we want to tweak the value here
  575. // Set to 0 so that the tool can do the filtering instead of the runtime
  576. #if DBG
  577. static const int s_numFramesToSkipForPageHeapAlloc = 10;
  578. static const int s_numFramesToSkipForPageHeapFree = 0;
  579. static const int s_numFramesToCaptureForPageHeap = 32;
  580. #else
  581. static const int s_numFramesToSkipForPageHeapAlloc = 0;
  582. static const int s_numFramesToSkipForPageHeapFree = 0;
  583. static const int s_numFramesToCaptureForPageHeap = 32;
  584. #endif
  585. #endif
  586. uint Cookie;
  587. class AutoEnterExternalStackSkippingGCMode
  588. {
  589. public:
  590. AutoEnterExternalStackSkippingGCMode(Recycler* recycler):
  591. _recycler(recycler)
  592. {
  593. // Setting this in a re-entrant mode is not allowed
  594. Assert(!recycler->isExternalStackSkippingGC);
  595. #if DBG
  596. _recycler->isExternalStackSkippingGC = true;
  597. #endif
  598. }
  599. ~AutoEnterExternalStackSkippingGCMode()
  600. {
  601. #if DBG
  602. _recycler->isExternalStackSkippingGC = false;
  603. #endif
  604. }
  605. private:
  606. Recycler* _recycler;
  607. };
  608. private:
  609. IdleDecommitPageAllocator * threadPageAllocator;
  610. #ifdef RECYCLER_WRITE_BARRIER_ALLOC_SEPARATE_PAGE
  611. RecyclerPageAllocator recyclerWithBarrierPageAllocator;
  612. #endif
  613. RecyclerPageAllocator recyclerPageAllocator;
  614. RecyclerPageAllocator recyclerLargeBlockPageAllocator;
  615. public:
  616. template<typename Action>
  617. void ForEachPageAllocator(Action action)
  618. {
  619. action(&this->recyclerPageAllocator);
  620. action(&this->recyclerLargeBlockPageAllocator);
  621. #ifdef RECYCLER_WRITE_BARRIER_ALLOC_SEPARATE_PAGE
  622. action(&this->recyclerWithBarrierPageAllocator);
  623. #endif
  624. action(threadPageAllocator);
  625. }
  626. private:
  627. class AutoSwitchCollectionStates
  628. {
  629. public:
  630. AutoSwitchCollectionStates(Recycler* recycler, CollectionState entryState, CollectionState exitState):
  631. _recycler(recycler),
  632. _exitState(exitState)
  633. {
  634. _recycler->collectionState = entryState;
  635. }
  636. ~AutoSwitchCollectionStates()
  637. {
  638. _recycler->collectionState = _exitState;
  639. }
  640. private:
  641. Recycler* _recycler;
  642. CollectionState _exitState;
  643. };
  644. CollectionState collectionState;
  645. JsUtil::ThreadService *threadService;
  646. HeapBlockMap heapBlockMap;
  647. #if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
  648. struct PinRecord
  649. {
  650. #ifdef STACK_BACK_TRACE
  651. PinRecord() : refCount(0), stackBackTraces(nullptr) {}
  652. #else
  653. PinRecord() : refCount(0) {}
  654. #endif
  655. PinRecord& operator=(uint newRefCount)
  656. {
  657. #ifdef STACK_BACK_TRACE
  658. Assert(stackBackTraces == nullptr);
  659. #endif
  660. Assert(newRefCount == 0); refCount = 0; return *this;
  661. }
  662. PinRecord& operator++() { ++refCount; return *this; }
  663. PinRecord& operator--() { --refCount; return *this; }
  664. operator uint() const { return refCount; }
  665. #ifdef STACK_BACK_TRACE
  666. StackBackTraceNode * stackBackTraces;
  667. #endif
  668. private:
  669. uint refCount;
  670. };
  671. #else
  672. typedef uint PinRecord;
  673. #endif
  674. typedef SimpleHashTable<void *, PinRecord, HeapAllocator, DefaultComparer, true, PrimePolicy> PinnedObjectHashTable;
  675. PinnedObjectHashTable pinnedObjectMap;
  676. WeakReferenceHashTable<PrimePolicy> weakReferenceMap;
  677. uint weakReferenceCleanupId;
  678. void * transientPinnedObject;
  679. #if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
  680. #ifdef STACK_BACK_TRACE
  681. StackBackTrace * transientPinnedObjectStackBackTrace;
  682. #endif
  683. #endif
  684. struct GuestArenaAllocator : public ArenaAllocator
  685. {
  686. GuestArenaAllocator(__in_z char16 const* name, PageAllocator * pageAllocator, void (*outOfMemoryFunc)())
  687. : ArenaAllocator(name, pageAllocator, outOfMemoryFunc), pendingDelete(false)
  688. {
  689. }
  690. bool pendingDelete;
  691. };
  692. DListBase<GuestArenaAllocator> guestArenaList;
  693. DListBase<ArenaData*> externalGuestArenaList; // guest arenas are scanned for roots
  694. #ifdef RECYCLER_PAGE_HEAP
  695. bool isPageHeapEnabled;
  696. bool capturePageHeapAllocStack;
  697. bool capturePageHeapFreeStack;
  698. inline bool IsPageHeapEnabled() const { return isPageHeapEnabled; }
  699. template<ObjectInfoBits attributes>
  700. bool IsPageHeapEnabled(size_t size);
  701. inline bool ShouldCapturePageHeapAllocStack() const { return capturePageHeapAllocStack; }
  702. void VerifyPageHeapFillAfterAlloc(char* memBlock, size_t size, ObjectInfoBits attributes);
  703. #else
  704. inline const bool IsPageHeapEnabled() const { return false; }
  705. inline bool ShouldCapturePageHeapAllocStack() const { return false; }
  706. #endif
  707. #ifdef RECYCLER_MARK_TRACK
  708. MarkMap* markMap;
  709. CriticalSection markMapCriticalSection;
  710. void PrintMarkMap();
  711. void ClearMarkMap();
  712. #endif
  713. // Number of pages to reserve for the primary mark stack
  714. // This is the minimum number of pages to guarantee that a single heap block
  715. // can be rescanned in the worst possible case where every object in a heap block
  716. // in the smallest bucket needs to be rescanned
  717. // These many pages being reserved guarantees that in OOM Rescan, we can make progress
  718. // on every rescan iteration
  719. // We add one because there is a small amount of the page reserved for page pool metadata
  720. // so we need to allocate an additional page to be sure
  721. // Currently, this works out to 2 pages on 32-bit and 5 pages on 64-bit
  722. static const int PrimaryMarkStackReservedPageCount =
  723. ((SmallAllocationBlockAttributes::PageCount * MarkContext::MarkCandidateSize) / SmallAllocationBlockAttributes::MinObjectSize) + 1;
  724. MarkContext markContext;
  725. // Contexts for parallel marking.
  726. // We support up to 4 way parallelism, main context + 3 additional parallel contexts.
  727. MarkContext parallelMarkContext1;
  728. MarkContext parallelMarkContext2;
  729. MarkContext parallelMarkContext3;
  730. // Page pools for above markContexts
  731. PagePool markPagePool;
  732. PagePool parallelMarkPagePool1;
  733. PagePool parallelMarkPagePool2;
  734. PagePool parallelMarkPagePool3;
  735. bool IsMarkStackEmpty();
  736. bool HasPendingMarkObjects() const { return markContext.HasPendingMarkObjects() || parallelMarkContext1.HasPendingMarkObjects() || parallelMarkContext2.HasPendingMarkObjects() || parallelMarkContext3.HasPendingMarkObjects(); }
  737. bool HasPendingTrackObjects() const { return markContext.HasPendingTrackObjects() || parallelMarkContext1.HasPendingTrackObjects() || parallelMarkContext2.HasPendingTrackObjects() || parallelMarkContext3.HasPendingTrackObjects(); }
  738. RecyclerCollectionWrapper * collectionWrapper;
  739. HANDLE mainThreadHandle;
  740. void * stackBase;
  741. class SavedRegisterState
  742. {
  743. public:
  744. #if _M_IX86
  745. static const int NumRegistersToSave = 8;
  746. #elif _M_ARM
  747. static const int NumRegistersToSave = 13;
  748. #elif _M_ARM64
  749. static const int NumRegistersToSave = 27;
  750. #elif _M_AMD64
  751. static const int NumRegistersToSave = 16;
  752. #endif
  753. SavedRegisterState()
  754. {
  755. memset(registers, 0, sizeof(void*) * NumRegistersToSave);
  756. }
  757. void** GetRegisters()
  758. {
  759. return registers;
  760. }
  761. void* GetStackTop()
  762. {
  763. // By convention, our register-saving routine will always
  764. // save the stack pointer as the first item in the array
  765. return registers[0];
  766. }
  767. private:
  768. void* registers[NumRegistersToSave];
  769. };
  770. SavedRegisterState savedThreadContext;
  771. bool inDispose;
  772. #if DBG
  773. uint collectionCount;
  774. #endif
  775. #if DBG || defined RECYCLER_TRACE
  776. bool inResolveExternalWeakReferences;
  777. #endif
  778. bool allowDispose;
  779. bool inDisposeWrapper;
  780. bool needOOMRescan;
  781. bool hasDisposableObject;
  782. DWORD tickCountNextDispose;
  783. bool hasPendingTransferDisposedObjects;
  784. bool inExhaustiveCollection;
  785. bool hasExhaustiveCandidate;
  786. bool inCacheCleanupCollection;
  787. bool inDecommitNowCollection;
  788. bool isScriptActive;
  789. bool isInScript;
  790. bool isShuttingDown;
  791. bool scanPinnedObjectMap;
  792. bool hasScannedInitialImplicitRoots;
  793. bool hasPendingUnpinnedObject;
  794. bool hasPendingDeleteGuestArena;
  795. bool inEndMarkOnLowMemory;
  796. bool decommitOnFinish;
  797. bool enableScanInteriorPointers;
  798. bool enableScanImplicitRoots;
  799. bool disableCollectOnAllocationHeuristics;
  800. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  801. bool disableCollection;
  802. #endif
  803. #if ENABLE_PARTIAL_GC
  804. bool enablePartialCollect;
  805. bool inPartialCollectMode;
  806. #if ENABLE_CONCURRENT_GC
  807. bool hasBackgroundFinishPartial;
  808. bool partialConcurrentNextCollection;
  809. #endif
  810. #endif
  811. #ifdef RECYCLER_STRESS
  812. bool forcePartialScanStack;
  813. bool recyclerStress;
  814. #if ENABLE_CONCURRENT_GC
  815. bool recyclerBackgroundStress;
  816. bool recyclerConcurrentStress;
  817. bool recyclerConcurrentRepeatStress;
  818. #endif
  819. #if ENABLE_PARTIAL_GC
  820. bool recyclerPartialStress;
  821. #endif
  822. #endif
  823. #if DBG
  824. bool isExternalStackSkippingGC;
  825. #endif
  826. bool skipStack;
  827. #if ENABLE_CONCURRENT_GC
  828. #if DBG
  829. bool isConcurrentGCOnIdle;
  830. bool isFinishGCOnIdle;
  831. #endif
  832. bool queueTrackedObject;
  833. bool hasPendingConcurrentFindRoot;
  834. bool priorityBoost;
  835. bool disableConcurrent;
  836. bool enableConcurrentMark;
  837. bool enableParallelMark;
  838. bool enableConcurrentSweep;
  839. uint maxParallelism; // Max # of total threads to run in parallel
  840. byte backgroundRescanCount; // for ETW events and stats
  841. byte backgroundFinishMarkCount;
  842. size_t backgroundRescanRootBytes;
  843. HANDLE concurrentWorkReadyEvent; // main thread uses this event to tell concurrent threads that the work is ready
  844. HANDLE concurrentWorkDoneEvent; // concurrent threads use this event to tell main thread that the work allocated is done
  845. HANDLE concurrentThread;
  846. template <uint parallelId>
  847. void ParallelWorkFunc();
  848. RecyclerParallelThread parallelThread1;
  849. RecyclerParallelThread parallelThread2;
  850. #if DBG
  851. // Variable indicating if the concurrent thread has exited or not
  852. // If the concurrent thread hasn't started yet, this is set to true
  853. // Once the concurrent thread starts, it sets this to false,
  854. // and when the concurrent thread exits, it sets this to true.
  855. bool concurrentThreadExited;
  856. bool disableConcurrentThreadExitedCheck;
  857. bool isProcessingTrackedObjects;
  858. #endif
  859. uint tickCountStartConcurrent;
  860. bool isAborting;
  861. #endif
  862. #if DBG
  863. bool hasIncompleteDoCollect;
  864. // This is set to true when we begin a Rescan, and set to false when either:
  865. // (1) We finish the final in-thread Rescan and are about to Mark
  866. // (2) We do a conditional ResetWriteWatch and are about to Mark
  867. // When this flag is true, we should not be modifying existing mark-related state,
  868. // including markBits and rescanState.
  869. bool isProcessingRescan;
  870. #endif
  871. Js::ConfigFlagsTable& recyclerFlagsTable;
  872. RecyclerSweep recyclerSweepInstance;
  873. RecyclerSweep * recyclerSweep;
  874. static const uint tickDiffToNextCollect = 300;
  875. #ifdef IDLE_DECOMMIT_ENABLED
  876. HANDLE concurrentIdleDecommitEvent;
  877. LONG needIdleDecommitSignal;
  878. #endif
  879. #if ENABLE_PARTIAL_GC
  880. SListBase<void *> clientTrackedObjectList;
  881. ArenaAllocator clientTrackedObjectAllocator;
  882. size_t partialUncollectedAllocBytes;
  883. // Dynamic Heuristics for partial GC
  884. size_t uncollectedNewPageCountPartialCollect;
  885. #endif
  886. uint tickCountNextCollection;
  887. uint tickCountNextFinishCollection;
  888. void (*outOfMemoryFunc)();
  889. #ifdef RECYCLER_TEST_SUPPORT
  890. BOOL (*checkFn)(char* addr, size_t size);
  891. #endif
  892. ExternalRootMarker externalRootMarker;
  893. void * externalRootMarkerContext;
  894. #ifdef PROFILE_EXEC
  895. Js::Profiler * profiler;
  896. Js::Profiler * backgroundProfiler;
  897. PageAllocator backgroundProfilerPageAllocator;
  898. DListBase<ArenaAllocator> backgroundProfilerArena;
  899. #endif
  900. // destruct autoHeap after backgroundProfilerPageAllocator;
  901. HeapInfo autoHeap;
  902. #ifdef PROFILE_MEM
  903. RecyclerMemoryData * memoryData;
  904. #endif
  905. ThreadContextId mainThreadId;
  906. #if DBG
  907. uint heapBlockCount;
  908. bool disableThreadAccessCheck;
  909. #endif
  910. #if DBG || defined(RECYCLER_STATS)
  911. bool isForceSweeping;
  912. #endif
  913. #ifdef NTBUILD
  914. RecyclerWatsonTelemetryBlock localTelemetryBlock;
  915. RecyclerWatsonTelemetryBlock * telemetryBlock;
  916. #endif
  917. #ifdef RECYCLER_STATS
  918. RecyclerCollectionStats collectionStats;
  919. void PrintHeapBlockStats(char16 const * name, HeapBlock::HeapBlockType type);
  920. void PrintHeapBlockMemoryStats(char16 const * name, HeapBlock::HeapBlockType type);
  921. void PrintCollectStats();
  922. void PrintHeuristicCollectionStats();
  923. void PrintMarkCollectionStats();
  924. void PrintBackgroundCollectionStats();
  925. void PrintMemoryStats();
  926. void PrintBackgroundCollectionStat(RecyclerCollectionStats::MarkData const& markData);
  927. #endif
  928. #ifdef RECYCLER_TRACE
  929. CollectionParam collectionParam;
  930. #endif
  931. #ifdef RECYCLER_MEMORY_VERIFY
  932. uint verifyPad;
  933. bool verifyEnabled;
  934. #endif
  935. #ifdef RECYCLER_DUMP_OBJECT_GRAPH
  936. friend class RecyclerObjectGraphDumper;
  937. RecyclerObjectGraphDumper * objectGraphDumper;
  938. public:
  939. bool dumpObjectOnceOnCollect;
  940. #endif
  941. public:
  942. Recycler(AllocationPolicyManager * policyManager, IdleDecommitPageAllocator * pageAllocator, void(*outOfMemoryFunc)(), Js::ConfigFlagsTable& flags);
  943. ~Recycler();
  944. void Initialize(const bool forceInThread, JsUtil::ThreadService *threadService, const bool deferThreadStartup = false
  945. #ifdef RECYCLER_PAGE_HEAP
  946. , PageHeapMode pageheapmode = PageHeapMode::PageHeapModeOff
  947. , bool captureAllocCallStack = false
  948. , bool captureFreeCallStack = false
  949. #endif
  950. );
  951. Js::ConfigFlagsTable& GetRecyclerFlagsTable() const { return this->recyclerFlagsTable; }
  952. void SetMemProtectMode();
  953. bool IsMemProtectMode();
  954. size_t GetUsedBytes();
  955. void LogMemProtectHeapSize(bool fromGC);
  956. char* Realloc(void* buffer, DECLSPEC_GUARD_OVERFLOW size_t existingBytes, DECLSPEC_GUARD_OVERFLOW size_t requestedBytes, bool truncate = true);
  957. #ifdef NTBUILD
  958. void SetTelemetryBlock(RecyclerWatsonTelemetryBlock * telemetryBlock) { this->telemetryBlock = telemetryBlock; }
  959. #endif
  960. void Prime();
  961. void* GetOwnerContext() { return (void*) this->collectionWrapper; }
  962. PageAllocator * GetPageAllocator() { return threadPageAllocator; }
  963. bool NeedOOMRescan() const;
  964. void SetNeedOOMRescan();
  965. void ClearNeedOOMRescan();
  966. BOOL RequestConcurrentWrapperCallback();
  967. BOOL CollectionInProgress() const;
  968. BOOL IsExiting() const;
  969. BOOL IsSweeping() const;
  970. #ifdef RECYCLER_PAGE_HEAP
  971. inline bool ShouldCapturePageHeapFreeStack() const { return capturePageHeapFreeStack; }
  972. #else
  973. inline bool ShouldCapturePageHeapFreeStack() const { return false; }
  974. #endif
  975. void SetIsThreadBound();
  976. void SetIsScriptActive(bool isScriptActive);
  977. void SetIsInScript(bool isInScript);
  978. bool ShouldIdleCollectOnExit();
  979. void ScheduleNextCollection();
  980. IdleDecommitPageAllocator * GetRecyclerLeafPageAllocator();
  981. IdleDecommitPageAllocator * GetRecyclerPageAllocator();
  982. IdleDecommitPageAllocator * GetRecyclerLargeBlockPageAllocator();
  983. #ifdef RECYCLER_WRITE_BARRIER_ALLOC_SEPARATE_PAGE
  984. IdleDecommitPageAllocator * GetRecyclerWithBarrierPageAllocator();
  985. #endif
  986. BOOL IsShuttingDown() const { return this->isShuttingDown; }
  987. #if ENABLE_CONCURRENT_GC
  988. #if DBG
  989. BOOL IsConcurrentMarkEnabled() const { return enableConcurrentMark; }
  990. BOOL IsConcurrentSweepEnabled() const { return enableConcurrentSweep; }
  991. #endif
  992. template <CollectionFlags flags>
  993. BOOL FinishConcurrent();
  994. void ShutdownThread();
  995. bool EnableConcurrent(JsUtil::ThreadService *threadService, bool startAllThreads);
  996. void DisableConcurrent();
  997. void StartQueueTrackedObject();
  998. bool DoQueueTrackedObject() const;
  999. void PrepareSweep();
  1000. #endif
  1001. template <CollectionFlags flags>
  1002. void SetupPostCollectionFlags();
  1003. void EnsureNotCollecting();
  1004. #if ENABLE_CONCURRENT_GC
  1005. bool QueueTrackedObject(FinalizableObject * trackableObject);
  1006. #endif
  1007. // FindRoots
  1008. void TryMarkNonInterior(void* candidate, void* parentReference = nullptr);
  1009. void TryMarkInterior(void *candidate, void* parentReference = nullptr);
  1010. bool InCacheCleanupCollection() { return inCacheCleanupCollection; }
  1011. void ClearCacheCleanupCollection() { Assert(inCacheCleanupCollection); inCacheCleanupCollection = false; }
  1012. // Finalizer support
  1013. void SetExternalRootMarker(ExternalRootMarker fn, void * context);
  1014. ArenaAllocator * CreateGuestArena(char16 const * name, void (*outOfMemoryFunc)());
  1015. void DeleteGuestArena(ArenaAllocator * arenaAllocator);
  1016. ArenaData ** RegisterExternalGuestArena(ArenaData* guestArena)
  1017. {
  1018. return externalGuestArenaList.PrependNode(&NoThrowHeapAllocator::Instance, guestArena);
  1019. }
  1020. void UnregisterExternalGuestArena(ArenaData* guestArena)
  1021. {
  1022. externalGuestArenaList.Remove(&NoThrowHeapAllocator::Instance, guestArena);
  1023. // Any time a root is removed during a GC, it indicates that an exhaustive
  1024. // collection is likely going to have work to do so trigger an exhaustive
  1025. // candidate GC to indicate this fact
  1026. this->CollectNow<CollectExhaustiveCandidate>();
  1027. }
  1028. void UnregisterExternalGuestArena(ArenaData** guestArena)
  1029. {
  1030. externalGuestArenaList.RemoveElement(&NoThrowHeapAllocator::Instance, guestArena);
  1031. // Any time a root is removed during a GC, it indicates that an exhaustive
  1032. // collection is likely going to have work to do so trigger an exhaustive
  1033. // candidate GC to indicate this fact
  1034. this->CollectNow<CollectExhaustiveCandidate>();
  1035. }
  1036. #ifdef RECYCLER_TEST_SUPPORT
  1037. void SetCheckFn(BOOL(*checkFn)(char* addr, size_t size));
  1038. #endif
  1039. void SetCollectionWrapper(RecyclerCollectionWrapper * wrapper);
  1040. static size_t GetAlignedSize(size_t size) { return HeapInfo::GetAlignedSize(size); }
  1041. HeapInfo* GetAutoHeap() { return &autoHeap; }
  1042. template <CollectionFlags flags>
  1043. BOOL CollectNow();
  1044. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1045. void DisplayMemStats();
  1046. #endif
  1047. void AddExternalMemoryUsage(size_t size);
  1048. bool NeedDispose() { return this->hasDisposableObject; }
  1049. template <CollectionFlags flags>
  1050. bool FinishDisposeObjectsNow();
  1051. bool RequestExternalMemoryAllocation(size_t size);
  1052. void ReportExternalMemoryFailure(size_t size);
  1053. void ReportExternalMemoryFree(size_t size);
  1054. // ExternalAllocFunc returns true when allocation succeeds
  1055. template <typename ExternalAllocFunc>
  1056. bool DoExternalAllocation(size_t size, ExternalAllocFunc externalAllocFunc);
  1057. #ifdef TRACE_OBJECT_LIFETIME
  1058. #define DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes) \
  1059. inline char* AllocFunc##Trace(size_t size) \
  1060. { \
  1061. return AllocWithAttributesFunc<(ObjectInfoBits)(attributes | TraceBit), /* nothrow = */ false>(size); \
  1062. }
  1063. #else
  1064. #define DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributeFunc, attributes)
  1065. #endif
  1066. #define DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocWithAttributesFunc, attributes) \
  1067. inline char * AllocFunc(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1068. { \
  1069. return AllocWithAttributesFunc<attributes, /* nothrow = */ false>(size); \
  1070. } \
  1071. __forceinline char * AllocFunc##Inlined(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1072. { \
  1073. return AllocWithAttributesFunc##Inlined<attributes, /* nothrow = */ false>(size); \
  1074. } \
  1075. DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes);
  1076. #define DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocWithAttributesFunc, attributes) \
  1077. inline char * NoThrow##AllocFunc(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1078. { \
  1079. return AllocWithAttributesFunc<attributes, /* nothrow = */ true>(size); \
  1080. } \
  1081. inline char * NoThrow##AllocFunc##Inlined(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1082. { \
  1083. return AllocWithAttributesFunc##Inlined<attributes, /* nothrow = */ true>(size); \
  1084. } \
  1085. DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes);
  1086. #define DEFINE_RECYCLER_ALLOC(AllocFunc, attributes) DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocWithAttributes, attributes)
  1087. #define DEFINE_RECYCLER_ALLOC_ZERO(AllocFunc, attributes) DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocZeroWithAttributes, attributes)
  1088. #define DEFINE_RECYCLER_NOTHROW_ALLOC(AllocFunc, attributes) DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocWithAttributes, attributes)
  1089. #define DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocFunc, attributes) DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocZeroWithAttributes, attributes)
  1090. #if GLOBAL_ENABLE_WRITE_BARRIER && !defined(_WIN32)
  1091. DEFINE_RECYCLER_ALLOC(Alloc, WithBarrierBit);
  1092. DEFINE_RECYCLER_ALLOC_ZERO(AllocZero, WithBarrierBit);
  1093. DEFINE_RECYCLER_ALLOC(AllocFinalized, FinalizableWithBarrierObjectBits);
  1094. DEFINE_RECYCLER_ALLOC(AllocTracked, ClientTrackableObjectWithBarrierBits);
  1095. DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTracked, ClientTrackableObjectWithBarrierBits);
  1096. #else
  1097. DEFINE_RECYCLER_ALLOC(Alloc, NoBit);
  1098. DEFINE_RECYCLER_ALLOC_ZERO(AllocZero, NoBit);
  1099. DEFINE_RECYCLER_ALLOC(AllocFinalized, FinalizableObjectBits);
  1100. DEFINE_RECYCLER_ALLOC(AllocTracked, ClientTrackableObjectBits);
  1101. DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTracked, ClientFinalizableObjectBits);
  1102. #endif
  1103. #ifdef RECYCLER_WRITE_BARRIER_ALLOC
  1104. DEFINE_RECYCLER_ALLOC(AllocWithBarrier, WithBarrierBit);
  1105. DEFINE_RECYCLER_ALLOC_ZERO(AllocZeroWithBarrier, WithBarrierBit);
  1106. DEFINE_RECYCLER_ALLOC(AllocFinalizedWithBarrier, FinalizableWithBarrierObjectBits);
  1107. DEFINE_RECYCLER_ALLOC(AllocTrackedWithBarrier, ClientTrackableObjectWithBarrierBits);
  1108. DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTrackedWithBarrier, ClientFinalizableObjectWithBarrierBits);
  1109. #endif
  1110. DEFINE_RECYCLER_ALLOC(AllocLeaf, LeafBit);
  1111. DEFINE_RECYCLER_ALLOC(AllocFinalizedLeaf, FinalizableLeafBits);
  1112. DEFINE_RECYCLER_ALLOC(AllocTrackedLeaf, ClientTrackableLeafBits);
  1113. DEFINE_RECYCLER_ALLOC_ZERO(AllocLeafZero, LeafBit);
  1114. DEFINE_RECYCLER_ALLOC_ZERO(AllocZeroTrackedLeaf, ClientTrackableLeafBits);
  1115. DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocImplicitRootLeaf, ImplicitRootLeafBits);
  1116. DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocImplicitRoot, ImplicitRootBit);
  1117. template <ObjectInfoBits enumClass>
  1118. char * AllocEnumClass(DECLSPEC_GUARD_OVERFLOW size_t size)
  1119. {
  1120. Assert((enumClass & EnumClassMask) != 0);
  1121. //Assert((enumClass & ~EnumClassMask & ~WithBarrierBit) == 0);
  1122. return AllocWithAttributes<(ObjectInfoBits)(enumClass), /* nothrow = */ false>(size);
  1123. }
  1124. template <ObjectInfoBits infoBits>
  1125. char * AllocWithInfoBits(DECLSPEC_GUARD_OVERFLOW size_t size)
  1126. {
  1127. return AllocWithAttributes<infoBits, /* nothrow = */ false>(size);
  1128. }
  1129. template <ObjectInfoBits infoBits>
  1130. char * AllocVisitedHost(DECLSPEC_GUARD_OVERFLOW size_t size)
  1131. {
  1132. return AllocZeroWithAttributes<infoBits, /* nothrow = */ true>(size);
  1133. }
  1134. template<typename T>
  1135. RecyclerWeakReference<T>* CreateWeakReferenceHandle(T* pStrongReference);
  1136. uint GetWeakReferenceCleanupId() const { return weakReferenceCleanupId; }
  1137. template<typename T>
  1138. bool FindOrCreateWeakReferenceHandle(T* pStrongReference, RecyclerWeakReference<T> **ppWeakRef);
  1139. template<typename T>
  1140. bool TryGetWeakReferenceHandle(T* pStrongReference, RecyclerWeakReference<T> **weakReference);
  1141. template <ObjectInfoBits attributes>
  1142. char* GetAddressOfAllocator(size_t sizeCat)
  1143. {
  1144. Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
  1145. return (char*)this->autoHeap.GetBucket<attributes>(sizeCat).GetAllocator();
  1146. }
  1147. template <ObjectInfoBits attributes>
  1148. uint32 GetEndAddressOffset(size_t sizeCat)
  1149. {
  1150. Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
  1151. return this->autoHeap.GetBucket<attributes>(sizeCat).GetAllocator()->GetEndAddressOffset();
  1152. }
  1153. template <ObjectInfoBits attributes>
  1154. uint32 GetFreeObjectListOffset(size_t sizeCat)
  1155. {
  1156. Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
  1157. return this->autoHeap.GetBucket<attributes>(sizeCat).GetAllocator()->GetFreeObjectListOffset();
  1158. }
  1159. void GetNormalHeapBlockAllocatorInfoForNativeAllocation(size_t sizeCat, void*& allocatorAddress, uint32& endAddressOffset, uint32& freeListOffset, bool allowBumpAllocation, bool isOOPJIT);
  1160. static void GetNormalHeapBlockAllocatorInfoForNativeAllocation(void* recyclerAddr, size_t sizeCat, void*& allocatorAddress, uint32& endAddressOffset, uint32& freeListOffset, bool allowBumpAllocation, bool isOOPJIT);
  1161. bool AllowNativeCodeBumpAllocation();
  1162. static void TrackNativeAllocatedMemoryBlock(Recycler * recycler, void * memBlock, size_t sizeCat);
  1163. void Free(void* buffer, size_t size)
  1164. {
  1165. Assert(false);
  1166. }
  1167. bool ExplicitFreeLeaf(void* buffer, size_t size);
  1168. bool ExplicitFreeNonLeaf(void* buffer, size_t size);
  1169. template <ObjectInfoBits attributes>
  1170. bool ExplicitFreeInternalWrapper(void* buffer, size_t allocSize);
  1171. template <ObjectInfoBits attributes, typename TBlockAttributes>
  1172. bool ExplicitFreeInternal(void* buffer, size_t size, size_t sizeCat);
  1173. size_t GetAllocSize(size_t size);
  1174. template <typename TBlockAttributes>
  1175. void SetExplicitFreeBitOnSmallBlock(HeapBlock* heapBlock, size_t sizeCat, void* buffer, ObjectInfoBits attributes);
  1176. char* HeapAllocR(HeapInfo* eHeap, DECLSPEC_GUARD_OVERFLOW size_t size)
  1177. {
  1178. return RealAlloc<LeafBit, /* nothrow = */ false>(eHeap, size);
  1179. }
  1180. void HeapFree(HeapInfo* eHeap,void* candidate);
  1181. void EnumerateObjects(ObjectInfoBits infoBits, void (*CallBackFunction)(void * address, size_t size));
  1182. void RootAddRef(void* obj, uint *count = nullptr);
  1183. void RootRelease(void* obj, uint *count = nullptr);
  1184. template <ObjectInfoBits attributes, bool nothrow>
  1185. inline char* RealAlloc(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size);
  1186. template <ObjectInfoBits attributes, bool isSmallAlloc, bool nothrow>
  1187. inline char* RealAllocFromBucket(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size);
  1188. void EnterIdleDecommit();
  1189. void LeaveIdleDecommit();
  1190. void DisposeObjects();
  1191. BOOL IsValidObject(void* candidate, size_t minimumSize = 0);
  1192. #if DBG
  1193. void SetDisableThreadAccessCheck();
  1194. void SetDisableConcurrentThreadExitedCheck();
  1195. void CheckAllocExternalMark() const;
  1196. BOOL IsFreeObject(void * candidate);
  1197. BOOL IsReentrantState() const;
  1198. #endif
  1199. #if DBG_DUMP
  1200. void PrintMarkStack();
  1201. #endif
  1202. #ifdef PROFILE_EXEC
  1203. Js::Profiler * GetProfiler() const { return this->profiler; }
  1204. ArenaAllocator * AddBackgroundProfilerArena();
  1205. void ReleaseBackgroundProfilerArena(ArenaAllocator * arena);
  1206. void SetProfiler(Js::Profiler * profiler, Js::Profiler * backgroundProfiler);
  1207. #endif
  1208. #ifdef RECYCLER_MEMORY_VERIFY
  1209. BOOL VerifyEnabled() const { return verifyEnabled; }
  1210. uint GetVerifyPad() const { return verifyPad; }
  1211. void Verify(Js::Phase phase);
  1212. static void VerifyCheck(BOOL cond, char16 const * msg, void * address, void * corruptedAddress);
  1213. static void VerifyCheckFill(void * address, size_t size);
  1214. void FillCheckPad(void * address, size_t size, size_t alignedAllocSize, bool objectAlreadyInitialized);
  1215. void FillCheckPad(void * address, size_t size, size_t alignedAllocSize)
  1216. {
  1217. FillCheckPad(address, size, alignedAllocSize, false);
  1218. }
  1219. static void FillPadNoCheck(void * address, size_t size, size_t alignedAllocSize, bool objectAlreadyInitialized);
  1220. void VerifyCheckPad(void * address, size_t size);
  1221. void VerifyCheckPadExplicitFreeList(void * address, size_t size);
  1222. static const byte VerifyMemFill = 0xCA;
  1223. #endif
  1224. #ifdef RECYCLER_ZERO_MEM_CHECK
  1225. void VerifyZeroFill(void * address, size_t size);
  1226. void VerifyLargeAllocZeroFill(void * address, size_t size, ObjectInfoBits attributes);
  1227. #endif
  1228. #ifdef RECYCLER_DUMP_OBJECT_GRAPH
  1229. bool DumpObjectGraph(RecyclerObjectGraphDumper::Param * param = nullptr);
  1230. void DumpObjectDescription(void *object);
  1231. #endif
  1232. #ifdef LEAK_REPORT
  1233. void ReportLeaks();
  1234. void ReportLeaksOnProcessDetach();
  1235. #endif
  1236. #ifdef CHECK_MEMORY_LEAK
  1237. void CheckLeaks(char16 const * header);
  1238. void CheckLeaksOnProcessDetach(char16 const * header);
  1239. #endif
  1240. #ifdef RECYCLER_TRACE
  1241. void SetDomCollect(bool isDomCollect) { collectionParam.domCollect = isDomCollect; }
  1242. void CaptureCollectionParam(CollectionFlags flags, bool repeat = false);
  1243. #endif
  1244. private:
  1245. // RecyclerRootPtr has implicit conversion to pointers, prevent it to be
  1246. // passed to RootAddRef/RootRelease directly
  1247. template <typename T>
  1248. void RootAddRef(RecyclerRootPtr<T>& ptr, uint *count = nullptr);
  1249. template <typename T>
  1250. void RootRelease(RecyclerRootPtr<T>& ptr, uint *count = nullptr);
  1251. template <CollectionFlags flags>
  1252. BOOL CollectInternal();
  1253. template <CollectionFlags flags>
  1254. BOOL Collect();
  1255. template <CollectionFlags flags>
  1256. BOOL CollectWithHeuristic();
  1257. template <CollectionFlags flags>
  1258. BOOL CollectWithExhaustiveCandidate();
  1259. template <CollectionFlags flags>
  1260. BOOL GetPartialFlag();
  1261. bool NeedExhaustiveRepeatCollect() const;
  1262. #if DBG
  1263. bool ExpectStackSkip() const;
  1264. #endif
  1265. static size_t const InvalidScanRootBytes = (size_t)-1;
  1266. // Small Allocator
  1267. template <typename SmallHeapBlockAllocatorType>
  1268. void AddSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat);
  1269. template <typename SmallHeapBlockAllocatorType>
  1270. void RemoveSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat);
  1271. template <ObjectInfoBits attributes, typename SmallHeapBlockAllocatorType>
  1272. char * SmallAllocatorAlloc(SmallHeapBlockAllocatorType * allocator, size_t sizeCat, size_t size);
  1273. // Allocation
  1274. template <ObjectInfoBits attributes, bool nothrow>
  1275. inline char * AllocWithAttributesInlined(DECLSPEC_GUARD_OVERFLOW size_t size);
  1276. template <ObjectInfoBits attributes, bool nothrow>
  1277. char * AllocWithAttributes(DECLSPEC_GUARD_OVERFLOW size_t size)
  1278. {
  1279. return AllocWithAttributesInlined<attributes, nothrow>(size);
  1280. }
  1281. template <ObjectInfoBits attributes, bool nothrow>
  1282. inline char* AllocZeroWithAttributesInlined(DECLSPEC_GUARD_OVERFLOW size_t size);
  1283. template <ObjectInfoBits attributes, bool nothrow>
  1284. char* AllocZeroWithAttributes(DECLSPEC_GUARD_OVERFLOW size_t size)
  1285. {
  1286. return AllocZeroWithAttributesInlined<attributes, nothrow>(size);
  1287. }
  1288. char* AllocWeakReferenceEntry(DECLSPEC_GUARD_OVERFLOW size_t size)
  1289. {
  1290. return AllocWithAttributes<WeakReferenceEntryBits, /* nothrow = */ false>(size);
  1291. }
  1292. bool NeedDisposeTimed()
  1293. {
  1294. DWORD ticks = ::GetTickCount();
  1295. return (ticks > tickCountNextDispose && this->hasDisposableObject);
  1296. }
  1297. char* TryLargeAlloc(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size, ObjectInfoBits attributes, bool nothrow);
  1298. template <bool nothrow>
  1299. char* LargeAlloc(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size, ObjectInfoBits attributes);
  1300. void OutOfMemory();
  1301. // Collection
  1302. BOOL DoCollect(CollectionFlags flags);
  1303. BOOL DoCollectWrapped(CollectionFlags flags);
  1304. BOOL CollectOnAllocatorThread();
  1305. #if DBG
  1306. void ResetThreadId();
  1307. #endif
  1308. template <bool background>
  1309. size_t ScanPinnedObjects();
  1310. size_t ScanStack();
  1311. size_t ScanArena(ArenaData * alloc, bool background);
  1312. void ScanImplicitRoots();
  1313. void ScanInitialImplicitRoots();
  1314. void ScanNewImplicitRoots();
  1315. size_t FindRoots();
  1316. size_t TryMarkArenaMemoryBlockList(ArenaMemoryBlock * memoryBlocks);
  1317. size_t TryMarkBigBlockList(BigBlock * memoryBlocks);
  1318. #if ENABLE_CONCURRENT_GC
  1319. #if FALSE // REVIEW: remove this code since not using
  1320. size_t TryMarkBigBlockListWithWriteWatch(BigBlock * memoryBlocks);
  1321. #endif
  1322. #endif
  1323. // Mark
  1324. void ResetMarks(ResetMarkFlags flags);
  1325. void Mark();
  1326. bool EndMark();
  1327. bool EndMarkCheckOOMRescan();
  1328. void EndMarkOnLowMemory();
  1329. #if ENABLE_CONCURRENT_GC
  1330. void DoParallelMark();
  1331. void DoBackgroundParallelMark();
  1332. #endif
  1333. size_t RootMark(CollectionState markState);
  1334. void ProcessMark(bool background);
  1335. void ProcessParallelMark(bool background, MarkContext * markContext);
  1336. template <bool parallel, bool interior>
  1337. void ProcessMarkContext(MarkContext * markContext);
  1338. public:
  1339. bool IsObjectMarked(void* candidate) { return this->heapBlockMap.IsMarked(candidate); }
  1340. #ifdef RECYCLER_STRESS
  1341. bool StressCollectNow();
  1342. #endif
  1343. private:
  1344. HeapBlock* FindHeapBlock(void * candidate);
  1345. struct FindBlockCache
  1346. {
  1347. FindBlockCache():
  1348. heapBlock(nullptr),
  1349. candidate(nullptr)
  1350. {
  1351. }
  1352. HeapBlock* heapBlock;
  1353. void* candidate;
  1354. } blockCache;
  1355. inline void ScanObjectInline(void ** obj, size_t byteCount);
  1356. inline void ScanObjectInlineInterior(void ** obj, size_t byteCount);
  1357. template <bool doSpecialMark>
  1358. inline void ScanMemoryInline(void ** obj, size_t byteCount);
  1359. template <bool doSpecialMark>
  1360. void ScanMemory(void ** obj, size_t byteCount) { if (byteCount != 0) { ScanMemoryInline<doSpecialMark>(obj, byteCount); } }
  1361. bool AddMark(void * candidate, size_t byteCount) throw();
  1362. #ifdef RECYCLER_VISITED_HOST
  1363. bool AddPreciselyTracedMark(IRecyclerVisitedObject * candidate) throw();
  1364. #endif
  1365. // Sweep
  1366. #if ENABLE_PARTIAL_GC
  1367. bool Sweep(size_t rescanRootBytes = (size_t)-1, bool concurrent = false, bool adjustPartialHeuristics = false);
  1368. #else
  1369. bool Sweep(bool concurrent = false);
  1370. #endif
  1371. void SweepWeakReference();
  1372. void SweepHeap(bool concurrent, RecyclerSweep& recyclerSweep);
  1373. void FinishSweep(RecyclerSweep& recyclerSweep);
  1374. #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1375. void FinishConcurrentSweep();
  1376. #endif
  1377. bool FinishDisposeObjects();
  1378. template <CollectionFlags flags>
  1379. bool FinishDisposeObjectsWrapped();
  1380. // end collection
  1381. void FinishCollection();
  1382. void FinishCollection(bool needConcurrentSweep);
  1383. void EndCollection();
  1384. void ResetCollectionState();
  1385. void ResetMarkCollectionState();
  1386. void ResetHeuristicCounters();
  1387. void ResetPartialHeuristicCounters();
  1388. BOOL IsMarkState() const;
  1389. BOOL IsFindRootsState() const;
  1390. BOOL IsInThreadFindRootsState() const;
  1391. template <Js::Phase phase>
  1392. void CollectionBegin();
  1393. template <Js::Phase phase>
  1394. void CollectionEnd();
  1395. #if ENABLE_PARTIAL_GC
  1396. void ProcessClientTrackedObjects();
  1397. bool PartialCollect(bool concurrent);
  1398. void FinishPartialCollect(RecyclerSweep * recyclerSweep = nullptr);
  1399. void ClearPartialCollect();
  1400. #if ENABLE_CONCURRENT_GC
  1401. void BackgroundFinishPartialCollect(RecyclerSweep * recyclerSweep);
  1402. #endif
  1403. #endif
  1404. size_t RescanMark(DWORD waitTime);
  1405. size_t FinishMark(DWORD waitTime);
  1406. size_t FinishMarkRescan(bool background);
  1407. #if ENABLE_CONCURRENT_GC
  1408. void ProcessTrackedObjects();
  1409. #endif
  1410. BOOL IsAllocatableCallbackState()
  1411. {
  1412. return (collectionState & (Collection_PostSweepRedeferralCallback | Collection_PostCollectionCallback));
  1413. }
  1414. #if ENABLE_CONCURRENT_GC
  1415. // Concurrent GC
  1416. BOOL IsConcurrentEnabled() const { return this->enableConcurrentMark || this->enableParallelMark || this->enableConcurrentSweep; }
  1417. BOOL IsConcurrentMarkState() const;
  1418. BOOL IsConcurrentMarkExecutingState() const;
  1419. BOOL IsConcurrentResetMarksState() const;
  1420. BOOL IsConcurrentFindRootState() const;
  1421. BOOL IsConcurrentExecutingState() const;
  1422. BOOL IsConcurrentSweepExecutingState() const;
  1423. BOOL IsConcurrentSweepSetupState() const;
  1424. BOOL IsConcurrentSweepState() const;
  1425. BOOL IsConcurrentState() const;
  1426. BOOL InConcurrentSweep()
  1427. {
  1428. return ((collectionState & Collection_ConcurrentSweep) == Collection_ConcurrentSweep);
  1429. }
  1430. #if DBG
  1431. BOOL IsConcurrentFinishedState() const;
  1432. #endif // DBG
  1433. bool InitializeConcurrent(JsUtil::ThreadService* threadService);
  1434. bool AbortConcurrent(bool restoreState);
  1435. void FinalizeConcurrent(bool restoreState);
  1436. static unsigned int CALLBACK StaticThreadProc(LPVOID lpParameter);
  1437. static int ExceptFilter(LPEXCEPTION_POINTERS pEP);
  1438. DWORD ThreadProc();
  1439. void DoBackgroundWork(bool forceForeground = false);
  1440. static void CALLBACK StaticBackgroundWorkCallback(void * callbackData);
  1441. BOOL CollectOnConcurrentThread();
  1442. bool StartConcurrent(CollectionState const state);
  1443. BOOL StartBackgroundMarkCollect();
  1444. BOOL StartSynchronousBackgroundMark();
  1445. BOOL StartAsynchronousBackgroundMark();
  1446. BOOL StartBackgroundMark(bool foregroundResetMark, bool foregroundFindRoots);
  1447. BOOL StartConcurrentSweepCollect();
  1448. template <CollectionFlags flags>
  1449. BOOL TryFinishConcurrentCollect();
  1450. BOOL WaitForConcurrentThread(DWORD waitTime);
  1451. void FlushBackgroundPages();
  1452. BOOL FinishConcurrentCollect(CollectionFlags flags);
  1453. void FinishTransferSwept(CollectionFlags flags);
  1454. BOOL FinishConcurrentCollectWrapped(CollectionFlags flags);
  1455. void BackgroundMark();
  1456. void BackgroundResetMarks();
  1457. void PrepareBackgroundFindRoots();
  1458. void RevertPrepareBackgroundFindRoots();
  1459. size_t BackgroundFindRoots();
  1460. size_t BackgroundScanStack();
  1461. size_t BackgroundRepeatMark();
  1462. size_t BackgroundRescan(RescanFlags rescanFlags);
  1463. void BackgroundResetWriteWatchAll();
  1464. size_t BackgroundFinishMark();
  1465. char* GetScriptThreadStackTop();
  1466. void SweepPendingObjects(RecyclerSweep& recyclerSweep);
  1467. void ConcurrentTransferSweptObjects(RecyclerSweep& recyclerSweep);
  1468. #if ENABLE_PARTIAL_GC
  1469. void ConcurrentPartialTransferSweptObjects(RecyclerSweep& recyclerSweep);
  1470. #endif // ENABLE_PARTIAL_GC
  1471. #endif // ENABLE_CONCURRENT_GC
  1472. bool ForceSweepObject();
  1473. void NotifyFree(__in char * address, size_t size);
  1474. template <typename T>
  1475. void NotifyFree(T * heapBlock);
  1476. void CleanupPendingUnroot();
  1477. #ifdef ENABLE_JS_ETW
  1478. ULONG EventWriteFreeMemoryBlock(HeapBlock* heapBlock);
  1479. void FlushFreeRecord();
  1480. void AppendFreeMemoryETWRecord(__in char *address, size_t size);
  1481. static const uint BulkFreeMemoryCount = 400;
  1482. uint bulkFreeMemoryWrittenCount;
  1483. struct ETWFreeRecord {
  1484. char* memoryAddress;
  1485. uint32 objectSize;
  1486. };
  1487. ETWFreeRecord etwFreeRecords[BulkFreeMemoryCount];
  1488. #endif
  1489. template <ObjectInfoBits attributes>
  1490. bool IntegrateBlock(char * blockAddress, PageSegment * segment, size_t allocSize, size_t objectSize);
  1491. template <class TBlockAttributes> friend class SmallHeapBlockT;
  1492. template <class TBlockAttributes> friend class SmallNormalHeapBlockT;
  1493. template <class TBlockAttributes> friend class SmallLeafHeapBlockT;
  1494. template <class TBlockAttributes> friend class SmallFinalizableHeapBlockT;
  1495. #ifdef RECYCLER_VISITED_HOST
  1496. template <class TBlockAttributes> friend class SmallRecyclerVisitedHostHeapBlockT;
  1497. #endif
  1498. friend class LargeHeapBlock;
  1499. friend class HeapInfo;
  1500. friend class LargeHeapBucket;
  1501. template <typename TBlockType>
  1502. friend class HeapBucketT;
  1503. template <typename TBlockType>
  1504. friend class SmallNormalHeapBucketBase;
  1505. template <typename T, ObjectInfoBits attributes>
  1506. friend class RecyclerFastAllocator;
  1507. #ifdef RECYCLER_TRACE
  1508. void PrintCollectTrace(Js::Phase phase, bool finish = false, bool noConcurrentWork = false);
  1509. #endif
  1510. #ifdef RECYCLER_VERIFY_MARK
  1511. void VerifyMark();
  1512. void VerifyMarkRoots();
  1513. void VerifyMarkStack();
  1514. void VerifyMarkArena(ArenaData * arena);
  1515. void VerifyMarkBigBlockList(BigBlock * memoryBlocks);
  1516. void VerifyMarkArenaMemoryBlockList(ArenaMemoryBlock * memoryBlocks);
  1517. bool VerifyMark(void * objectAddress, void * target);
  1518. bool VerifyMark(void * target);
  1519. #endif
  1520. #if DBG_DUMP
  1521. bool forceTraceMark;
  1522. #endif
  1523. bool isHeapEnumInProgress;
  1524. #if DBG
  1525. bool allowAllocationDuringHeapEnum;
  1526. bool allowAllocationDuringRenentrance;
  1527. #ifdef ENABLE_PROJECTION
  1528. bool isInRefCountTrackingForProjection;
  1529. #endif
  1530. #endif
  1531. // There are two scenarios we allow limited allocation but disallow GC during those allocations:
  1532. // in heapenum when we allocate PropertyRecord, and
  1533. // in projection ExternalMark allowing allocating VarToDispEx. This is the common flag
  1534. // while we have debug only flag for each of the two scenarios.
  1535. bool isCollectionDisabled;
  1536. #ifdef TRACK_ALLOC
  1537. public:
  1538. Recycler * TrackAllocInfo(TrackAllocData const& data);
  1539. void ClearTrackAllocInfo(TrackAllocData* data = NULL);
  1540. #ifdef PROFILE_RECYCLER_ALLOC
  1541. void PrintAllocStats();
  1542. private:
  1543. static bool DoProfileAllocTracker();
  1544. void InitializeProfileAllocTracker();
  1545. void TrackUnallocated(__in char* address, __in char *endAddress, size_t sizeCat);
  1546. void TrackAllocCore(void * object, size_t size, const TrackAllocData& trackAllocData, bool traceLifetime = false);
  1547. void* TrackAlloc(void * object, size_t size, const TrackAllocData& trackAllocData, bool traceLifetime = false);
  1548. void TrackIntegrate(__in_ecount(blockSize) char * blockAddress, size_t blockSize, size_t allocSize, size_t objectSize, const TrackAllocData& trackAllocData);
  1549. BOOL TrackFree(const char* address, size_t size);
  1550. void TrackAllocWeakRef(RecyclerWeakReferenceBase * weakRef);
  1551. void TrackFreeWeakRef(RecyclerWeakReferenceBase * weakRef);
  1552. struct TrackerData
  1553. {
  1554. TrackerData(type_info const * typeinfo, bool isArray) : typeinfo(typeinfo), isArray(isArray),
  1555. ItemSize(0), ItemCount(0), AllocCount(0), ReqSize(0), AllocSize(0), FreeCount(0), FreeSize(0), TraceLifetime(false)
  1556. #ifdef PERF_COUNTERS
  1557. , counter(PerfCounter::RecyclerTrackerCounterSet::GetPerfCounter(typeinfo, isArray))
  1558. , sizeCounter(PerfCounter::RecyclerTrackerCounterSet::GetPerfSizeCounter(typeinfo, isArray))
  1559. #endif
  1560. {
  1561. }
  1562. type_info const * typeinfo;
  1563. bool isArray;
  1564. #ifdef TRACE_OBJECT_LIFETIME
  1565. bool TraceLifetime;
  1566. #endif
  1567. size_t ItemSize;
  1568. size_t ItemCount;
  1569. int AllocCount;
  1570. int64 ReqSize;
  1571. int64 AllocSize;
  1572. int FreeCount;
  1573. int64 FreeSize;
  1574. #ifdef PERF_COUNTERS
  1575. PerfCounter::Counter& counter;
  1576. PerfCounter::Counter& sizeCounter;
  1577. #endif
  1578. static TrackerData EmptyData;
  1579. static TrackerData ExplicitFreeListObjectData;
  1580. };
  1581. TrackerData * GetTrackerData(void * address);
  1582. void SetTrackerData(void * address, TrackerData * data);
  1583. struct TrackerItem
  1584. {
  1585. TrackerItem(type_info const * typeinfo) : instanceData(typeinfo, false), arrayData(typeinfo, true)
  1586. #ifdef PERF_COUNTERS
  1587. , weakRefCounter(PerfCounter::RecyclerTrackerCounterSet::GetWeakRefPerfCounter(typeinfo))
  1588. #endif
  1589. {}
  1590. TrackerData instanceData;
  1591. TrackerData arrayData;
  1592. #ifdef PERF_COUNTERS
  1593. PerfCounter::Counter& weakRefCounter;
  1594. #endif
  1595. };
  1596. typedef JsUtil::BaseDictionary<type_info const *, TrackerItem *, NoCheckHeapAllocator, PrimeSizePolicy, DefaultComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> TypeInfotoTrackerItemMap;
  1597. typedef JsUtil::BaseDictionary<void *, TrackerData *, NoCheckHeapAllocator, PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> PointerToTrackerDataMap;
  1598. TypeInfotoTrackerItemMap * trackerDictionary;
  1599. CriticalSection * trackerCriticalSection;
  1600. #endif
  1601. TrackAllocData nextAllocData;
  1602. #endif
  1603. public:
  1604. // Enumeration
  1605. class AutoSetupRecyclerForNonCollectingMark
  1606. {
  1607. private:
  1608. Recycler& m_recycler;
  1609. bool m_setupDone;
  1610. CollectionState m_previousCollectionState;
  1611. #ifdef RECYCLER_STATS
  1612. RecyclerCollectionStats m_previousCollectionStats;
  1613. #endif
  1614. public:
  1615. AutoSetupRecyclerForNonCollectingMark(Recycler& recycler, bool setupForHeapEnumeration = false);
  1616. ~AutoSetupRecyclerForNonCollectingMark();
  1617. void DoCommonSetup();
  1618. void SetupForHeapEnumeration();
  1619. };
  1620. friend class RecyclerHeapObjectInfo;
  1621. bool FindImplicitRootObject(void* candidate, RecyclerHeapObjectInfo& heapObject);
  1622. bool FindHeapObject(void* candidate, FindHeapObjectFlags flags, RecyclerHeapObjectInfo& heapObject);
  1623. bool FindHeapObjectWithClearedAllocators(void* candidate, RecyclerHeapObjectInfo& heapObject);
  1624. bool IsCollectionDisabled() const { return isCollectionDisabled; }
  1625. bool IsHeapEnumInProgress() const { Assert(isHeapEnumInProgress ? isCollectionDisabled : true); return isHeapEnumInProgress; }
  1626. #if DBG
  1627. // There are limited cases that we have to allow allocation during heap enumeration. GC is explicitly
  1628. // disabled during heap enumeration for these limited cases. (See DefaultRecyclerCollectionWrapper)
  1629. // The only case of allocation right now is allocating property record for string based type handler
  1630. // so we can use the propertyId as the relation Id.
  1631. // Allocation during enumeration is still frown upon and should still be avoid if possible.
  1632. bool AllowAllocationDuringHeapEnum() const { return allowAllocationDuringHeapEnum; }
  1633. class AutoAllowAllocationDuringHeapEnum : public AutoBooleanToggle
  1634. {
  1635. public:
  1636. AutoAllowAllocationDuringHeapEnum(Recycler * recycler) : AutoBooleanToggle(&recycler->allowAllocationDuringHeapEnum) {};
  1637. };
  1638. #ifdef ENABLE_PROJECTION
  1639. bool IsInRefCountTrackingForProjection() const { return isInRefCountTrackingForProjection;}
  1640. class AutoIsInRefCountTrackingForProjection : public AutoBooleanToggle
  1641. {
  1642. public:
  1643. AutoIsInRefCountTrackingForProjection(Recycler * recycler) : AutoBooleanToggle(&recycler->isInRefCountTrackingForProjection) {};
  1644. };
  1645. #endif
  1646. #endif
  1647. class AutoAllowAllocationDuringReentrance : public AutoBooleanToggle
  1648. {
  1649. public:
  1650. AutoAllowAllocationDuringReentrance(Recycler * recycler) :
  1651. AutoBooleanToggle(&recycler->isCollectionDisabled)
  1652. #if DBG
  1653. , allowAllocationDuringRenentrance(&recycler->allowAllocationDuringRenentrance)
  1654. #endif
  1655. {};
  1656. #if DBG
  1657. private:
  1658. AutoBooleanToggle allowAllocationDuringRenentrance;
  1659. #endif
  1660. };
  1661. #ifdef HEAP_ENUMERATION_VALIDATION
  1662. typedef void(*PostHeapEnumScanCallback)(const HeapObject& heapObject, void *data);
  1663. PostHeapEnumScanCallback pfPostHeapEnumScanCallback;
  1664. void *postHeapEnunScanData;
  1665. void PostHeapEnumScan(PostHeapEnumScanCallback callback, void*data);
  1666. bool IsPostEnumHeapValidationInProgress() const { return pfPostHeapEnumScanCallback != NULL; }
  1667. #endif
  1668. public:
  1669. void* GetRealAddressFromInterior(void* candidate);
  1670. private:
  1671. void BeginNonCollectingMark();
  1672. void EndNonCollectingMark();
  1673. #if defined(RECYCLER_DUMP_OBJECT_GRAPH) || defined(LEAK_REPORT) || defined(CHECK_MEMORY_LEAK)
  1674. public:
  1675. bool IsInDllCanUnloadNow() const { return inDllCanUnloadNow; }
  1676. bool IsInDetachProcess() const { return inDetachProcess; }
  1677. void SetInDllCanUnloadNow();
  1678. void SetInDetachProcess();
  1679. private:
  1680. bool inDllCanUnloadNow;
  1681. bool inDetachProcess;
  1682. bool isPrimaryMarkContextInitialized;
  1683. #endif
  1684. #if defined(LEAK_REPORT) || defined(CHECK_MEMORY_LEAK)
  1685. template <class Fn>
  1686. void ReportOnProcessDetach(Fn fn);
  1687. void PrintPinnedObjectStackTraces();
  1688. #endif
  1689. public:
  1690. typedef void (CALLBACK *ObjectBeforeCollectCallback)(void* object, void* callbackState); // same as jsrt JsObjectBeforeCollectCallback
  1691. // same as jsrt JsObjectBeforeCollectCallbackWrapper
  1692. typedef void (CALLBACK *ObjectBeforeCollectCallbackWrapper)(ObjectBeforeCollectCallback callback, void* object, void* callbackState, void* threadContext);
  1693. void SetObjectBeforeCollectCallback(void* object,
  1694. ObjectBeforeCollectCallback callback,
  1695. void* callbackState,
  1696. ObjectBeforeCollectCallbackWrapper callbackWrapper,
  1697. void* threadContext);
  1698. void ClearObjectBeforeCollectCallbacks();
  1699. bool IsInObjectBeforeCollectCallback() const { return objectBeforeCollectCallbackState != ObjectBeforeCollectCallback_None; }
  1700. private:
  1701. struct ObjectBeforeCollectCallbackData
  1702. {
  1703. ObjectBeforeCollectCallback callback;
  1704. void* callbackState;
  1705. void* threadContext;
  1706. ObjectBeforeCollectCallbackWrapper callbackWrapper;
  1707. ObjectBeforeCollectCallbackData() {}
  1708. ObjectBeforeCollectCallbackData(ObjectBeforeCollectCallbackWrapper callbackWrapper, ObjectBeforeCollectCallback callback, void* callbackState, void* threadContext) :
  1709. callbackWrapper(callbackWrapper), callback(callback), callbackState(callbackState), threadContext(threadContext) {}
  1710. };
  1711. typedef JsUtil::BaseDictionary<void*, ObjectBeforeCollectCallbackData, HeapAllocator,
  1712. PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> ObjectBeforeCollectCallbackMap;
  1713. ObjectBeforeCollectCallbackMap* objectBeforeCollectCallbackMap;
  1714. enum ObjectBeforeCollectCallbackState
  1715. {
  1716. ObjectBeforeCollectCallback_None,
  1717. ObjectBeforeCollectCallback_Normal, // Normal GC BeforeCollect callback
  1718. ObjectBeforeCollectCallback_Shutdown, // At shutdown invoke all BeforeCollect callback
  1719. } objectBeforeCollectCallbackState;
  1720. bool ProcessObjectBeforeCollectCallbacks(bool atShutdown = false);
  1721. #if GLOBAL_ENABLE_WRITE_BARRIER
  1722. private:
  1723. typedef JsUtil::BaseDictionary<void *, size_t, HeapAllocator, PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::AsymetricResizeLock> PendingWriteBarrierBlockMap;
  1724. PendingWriteBarrierBlockMap pendingWriteBarrierBlockMap;
  1725. public:
  1726. void RegisterPendingWriteBarrierBlock(void* address, size_t bytes);
  1727. void UnRegisterPendingWriteBarrierBlock(void* address);
  1728. #endif
  1729. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  1730. private:
  1731. static Recycler* recyclerList;
  1732. static CriticalSection recyclerListLock;
  1733. Recycler* next;
  1734. public:
  1735. static void WBSetBitJIT(char* addr)
  1736. {
  1737. return WBSetBit(addr);
  1738. }
  1739. static void WBSetBit(char* addr);
  1740. static void WBSetBitRange(char* addr, uint length);
  1741. static void WBVerifyBitIsSet(char* addr, char* target);
  1742. static bool WBCheckIsRecyclerAddress(char* addr);
  1743. #endif
  1744. };
  1745. class RecyclerHeapObjectInfo
  1746. {
  1747. void* m_address;
  1748. Recycler * m_recycler;
  1749. HeapBlock* m_heapBlock;
  1750. #if LARGEHEAPBLOCK_ENCODING
  1751. union
  1752. {
  1753. byte * m_attributes;
  1754. LargeObjectHeader * m_largeHeapBlockHeader;
  1755. };
  1756. bool isUsingLargeHeapBlock = false;
  1757. #else
  1758. byte * m_attributes;
  1759. #endif
  1760. public:
  1761. RecyclerHeapObjectInfo() : m_address(NULL), m_recycler(NULL), m_heapBlock(NULL), m_attributes(NULL) {}
  1762. RecyclerHeapObjectInfo(void* address, Recycler * recycler, HeapBlock* heapBlock, byte * attributes) :
  1763. m_address(address), m_recycler(recycler), m_heapBlock(heapBlock), m_attributes(attributes) { }
  1764. void* GetObjectAddress() const { return m_address; }
  1765. #ifdef RECYCLER_PAGE_HEAP
  1766. bool IsPageHeapAlloc() const
  1767. {
  1768. return isUsingLargeHeapBlock && ((LargeHeapBlock*)m_heapBlock)->InPageHeapMode();
  1769. }
  1770. void PageHeapLockPages() const
  1771. {
  1772. Assert(IsPageHeapAlloc());
  1773. ((LargeHeapBlock*)m_heapBlock)->PageHeapLockPages();
  1774. }
  1775. #endif
  1776. bool IsLeaf() const
  1777. {
  1778. #if LARGEHEAPBLOCK_ENCODING
  1779. if (isUsingLargeHeapBlock)
  1780. {
  1781. return (m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie) & LeafBit) != 0;
  1782. }
  1783. #endif
  1784. return ((*m_attributes & LeafBit) != 0 || this->m_heapBlock->IsLeafBlock());
  1785. }
  1786. bool IsImplicitRoot() const
  1787. {
  1788. #if LARGEHEAPBLOCK_ENCODING
  1789. if (isUsingLargeHeapBlock)
  1790. {
  1791. return (m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie) & ImplicitRootBit) != 0;
  1792. }
  1793. #endif
  1794. return (*m_attributes & ImplicitRootBit) != 0;
  1795. }
  1796. bool IsObjectMarked() const { Assert(m_recycler); return m_recycler->heapBlockMap.IsMarked(m_address); }
  1797. void SetObjectMarked() { Assert(m_recycler); m_recycler->heapBlockMap.SetMark(m_address); }
  1798. ObjectInfoBits GetAttributes() const
  1799. {
  1800. #if LARGEHEAPBLOCK_ENCODING
  1801. if (isUsingLargeHeapBlock)
  1802. {
  1803. return (ObjectInfoBits)m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
  1804. }
  1805. #endif
  1806. return (ObjectInfoBits)*m_attributes;
  1807. }
  1808. size_t GetSize() const;
  1809. #if LARGEHEAPBLOCK_ENCODING
  1810. void SetLargeHeapBlockHeader(LargeObjectHeader * largeHeapBlockHeader)
  1811. {
  1812. m_largeHeapBlockHeader = largeHeapBlockHeader;
  1813. isUsingLargeHeapBlock = true;
  1814. }
  1815. #endif
  1816. bool SetMemoryProfilerHasEnumerated()
  1817. {
  1818. Assert(m_heapBlock);
  1819. #if LARGEHEAPBLOCK_ENCODING
  1820. if (isUsingLargeHeapBlock)
  1821. {
  1822. return SetMemoryProfilerHasEnumeratedForLargeHeapBlock();
  1823. }
  1824. #endif
  1825. bool wasMemoryProfilerOldObject = (*m_attributes & MemoryProfilerOldObjectBit) != 0;
  1826. *m_attributes |= MemoryProfilerOldObjectBit;
  1827. return wasMemoryProfilerOldObject;
  1828. }
  1829. bool ClearImplicitRootBit()
  1830. {
  1831. // This can only be called on the main thread for non-finalizable block
  1832. // As finalizable block requires that the bit not be change during concurrent mark
  1833. // since the background thread change the NewTrackBit
  1834. Assert(!m_heapBlock->IsAnyFinalizableBlock());
  1835. #ifdef RECYCLER_PAGE_HEAP
  1836. Recycler* recycler = this->m_recycler;
  1837. if (recycler->IsPageHeapEnabled() && recycler->ShouldCapturePageHeapFreeStack())
  1838. {
  1839. #ifdef STACK_BACK_TRACE
  1840. if (this->isUsingLargeHeapBlock)
  1841. {
  1842. LargeHeapBlock* largeHeapBlock = (LargeHeapBlock*)this->m_heapBlock;
  1843. if (largeHeapBlock->InPageHeapMode())
  1844. {
  1845. largeHeapBlock->CapturePageHeapFreeStack();
  1846. }
  1847. }
  1848. #endif
  1849. }
  1850. #endif
  1851. #if LARGEHEAPBLOCK_ENCODING
  1852. if (isUsingLargeHeapBlock)
  1853. {
  1854. return ClearImplicitRootBitsForLargeHeapBlock();
  1855. }
  1856. #endif
  1857. Assert(m_attributes);
  1858. bool wasImplicitRoot = (*m_attributes & ImplicitRootBit) != 0;
  1859. *m_attributes &= ~ImplicitRootBit;
  1860. return wasImplicitRoot;
  1861. }
  1862. void ExplicitFree()
  1863. {
  1864. if (*m_attributes == ObjectInfoBits::LeafBit)
  1865. {
  1866. m_recycler->ExplicitFreeLeaf(m_address, GetSize());
  1867. }
  1868. else
  1869. {
  1870. Assert(*m_attributes == ObjectInfoBits::NoBit);
  1871. m_recycler->ExplicitFreeNonLeaf(m_address, GetSize());
  1872. }
  1873. }
  1874. #if LARGEHEAPBLOCK_ENCODING
  1875. bool ClearImplicitRootBitsForLargeHeapBlock()
  1876. {
  1877. Assert(m_largeHeapBlockHeader);
  1878. byte attributes = m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
  1879. bool wasImplicitRoot = (attributes & ImplicitRootBit) != 0;
  1880. m_largeHeapBlockHeader->SetAttributes(m_recycler->Cookie, attributes & ~ImplicitRootBit);
  1881. return wasImplicitRoot;
  1882. }
  1883. bool SetMemoryProfilerHasEnumeratedForLargeHeapBlock()
  1884. {
  1885. Assert(m_largeHeapBlockHeader);
  1886. byte attributes = m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
  1887. bool wasMemoryProfilerOldObject = (attributes & MemoryProfilerOldObjectBit) != 0;
  1888. m_largeHeapBlockHeader->SetAttributes(m_recycler->Cookie, attributes | MemoryProfilerOldObjectBit);
  1889. return wasMemoryProfilerOldObject;
  1890. }
  1891. #endif
  1892. };
  1893. // A fake heap block to replace the original heap block where the strong ref is when it has been collected
  1894. // as the original heap block may have been freed
  1895. class CollectedRecyclerWeakRefHeapBlock : public HeapBlock
  1896. {
  1897. public:
  1898. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  1899. virtual void WBVerifyBitIsSet(char* addr) override { Assert(false); }
  1900. virtual void WBSetBit(char* addr) override { Assert(false); }
  1901. virtual void WBSetBitRange(char* addr, uint count) override { Assert(false); }
  1902. virtual void WBClearBit(char* addr) override { Assert(false); }
  1903. virtual void WBClearObject(char* addr) override { Assert(false); }
  1904. #endif
  1905. #if DBG
  1906. virtual BOOL IsFreeObject(void* objectAddress) override { Assert(false); return false; }
  1907. #endif
  1908. virtual BOOL IsValidObject(void* objectAddress) override { Assert(false); return false; }
  1909. virtual byte* GetRealAddressFromInterior(void* interiorAddress) override { Assert(false); return nullptr; }
  1910. virtual size_t GetObjectSize(void* object) const override { Assert(false); return 0; }
  1911. virtual bool FindHeapObject(void* objectAddress, Recycler * recycler, FindHeapObjectFlags flags, RecyclerHeapObjectInfo& heapObject) override { Assert(false); return false; }
  1912. virtual bool TestObjectMarkedBit(void* objectAddress) override { Assert(false); return false; }
  1913. virtual void SetObjectMarkedBit(void* objectAddress) override { Assert(false); }
  1914. #ifdef RECYCLER_VERIFY_MARK
  1915. virtual bool VerifyMark(void * objectAddress, void * target) override { Assert(false); return false; }
  1916. #endif
  1917. #ifdef RECYCLER_PERF_COUNTERS
  1918. virtual void UpdatePerfCountersOnFree() override { Assert(false); }
  1919. #endif
  1920. #ifdef PROFILE_RECYCLER_ALLOC
  1921. virtual void * GetTrackerData(void * address) override { Assert(false); return nullptr; }
  1922. virtual void SetTrackerData(void * address, void * data) override { Assert(false); }
  1923. #endif
  1924. static CollectedRecyclerWeakRefHeapBlock Instance;
  1925. private:
  1926. CollectedRecyclerWeakRefHeapBlock() : HeapBlock(BlockTypeCount)
  1927. {
  1928. #if ENABLE_CONCURRENT_GC
  1929. isPendingConcurrentSweep = false;
  1930. #endif
  1931. }
  1932. };
  1933. class AutoIdleDecommit
  1934. {
  1935. public:
  1936. AutoIdleDecommit(Recycler * recycler) : recycler(recycler) { recycler->EnterIdleDecommit(); }
  1937. ~AutoIdleDecommit() { recycler->LeaveIdleDecommit(); }
  1938. private:
  1939. Recycler * recycler;
  1940. };
  1941. template <typename SmallHeapBlockAllocatorType>
  1942. void
  1943. Recycler::AddSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat)
  1944. {
  1945. autoHeap.AddSmallAllocator(allocator, sizeCat);
  1946. }
  1947. template <typename SmallHeapBlockAllocatorType>
  1948. void
  1949. Recycler::RemoveSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat)
  1950. {
  1951. autoHeap.RemoveSmallAllocator(allocator, sizeCat);
  1952. }
  1953. template <ObjectInfoBits attributes, typename SmallHeapBlockAllocatorType>
  1954. char *
  1955. Recycler::SmallAllocatorAlloc(SmallHeapBlockAllocatorType * allocator, DECLSPEC_GUARD_OVERFLOW size_t sizeCat, size_t size)
  1956. {
  1957. return autoHeap.SmallAllocatorAlloc<attributes>(this, allocator, sizeCat, size);
  1958. }
  1959. // Dummy recycler allocator policy classes to choose the allocation function
  1960. class _RecyclerLeafPolicy;
  1961. class _RecyclerNonLeafPolicy;
  1962. #ifdef RECYCLER_WRITE_BARRIER
  1963. class _RecyclerWriteBarrierPolicy;
  1964. #endif
  1965. template <typename Policy>
  1966. class _RecyclerAllocatorFunc
  1967. {};
  1968. template <>
  1969. class _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
  1970. {
  1971. public:
  1972. typedef char * (Recycler::*AllocFuncType)(size_t);
  1973. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  1974. static AllocFuncType GetAllocFunc()
  1975. {
  1976. return &Recycler::AllocLeaf;
  1977. }
  1978. static AllocFuncType GetAllocZeroFunc()
  1979. {
  1980. return &Recycler::AllocLeafZero;
  1981. }
  1982. static FreeFuncType GetFreeFunc()
  1983. {
  1984. return &Recycler::ExplicitFreeLeaf;
  1985. }
  1986. };
  1987. template <>
  1988. class _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
  1989. {
  1990. public:
  1991. typedef char * (Recycler::*AllocFuncType)(size_t);
  1992. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  1993. static AllocFuncType GetAllocFunc()
  1994. {
  1995. return &Recycler::Alloc;
  1996. }
  1997. static AllocFuncType GetAllocZeroFunc()
  1998. {
  1999. return &Recycler::AllocZero;
  2000. }
  2001. static FreeFuncType GetFreeFunc()
  2002. {
  2003. return &Recycler::ExplicitFreeNonLeaf;
  2004. }
  2005. };
  2006. #ifdef RECYCLER_WRITE_BARRIER
  2007. template <>
  2008. class _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2009. {
  2010. public:
  2011. typedef char * (Recycler::*AllocFuncType)(size_t);
  2012. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  2013. static AllocFuncType GetAllocFunc()
  2014. {
  2015. return &Recycler::AllocWithBarrier;
  2016. }
  2017. static AllocFuncType GetAllocZeroFunc()
  2018. {
  2019. return &Recycler::AllocZeroWithBarrier;
  2020. }
  2021. static FreeFuncType GetFreeFunc()
  2022. {
  2023. return &Recycler::ExplicitFreeNonLeaf;
  2024. }
  2025. };
  2026. #endif
  2027. // This is used by the compiler; when T is NOT a pointer i.e. a value type - it causes leaf allocation
  2028. template <typename T>
  2029. class TypeAllocatorFunc<Recycler, T> : public _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
  2030. {
  2031. };
  2032. #if GLOBAL_ENABLE_WRITE_BARRIER
  2033. template <typename T>
  2034. class TypeAllocatorFunc<Recycler, T *> : public _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2035. {
  2036. };
  2037. #else
  2038. // Partial template specialization; applies to T when it is a pointer
  2039. template <typename T>
  2040. class TypeAllocatorFunc<Recycler, T *> : public _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
  2041. {
  2042. };
  2043. #endif
  2044. // Dummy class to choose the allocation function
  2045. class RecyclerLeafAllocator
  2046. {
  2047. public:
  2048. static const bool FakeZeroLengthArray = true;
  2049. };
  2050. class RecyclerNonLeafAllocator
  2051. {
  2052. public:
  2053. static const bool FakeZeroLengthArray = true;
  2054. };
  2055. class RecyclerWriteBarrierAllocator
  2056. {
  2057. public:
  2058. static const bool FakeZeroLengthArray = true;
  2059. };
  2060. // Choose RecyclerLeafAllocator / RecyclerNonLeafAllocator based on "bool isLeaf"
  2061. template <bool isLeaf>
  2062. struct _RecyclerLeaf { typedef RecyclerLeafAllocator AllocatorType; };
  2063. template <>
  2064. struct _RecyclerLeaf<false> { typedef RecyclerNonLeafAllocator AllocatorType; };
  2065. template <bool isLeaf>
  2066. class ListTypeAllocatorFunc<Recycler, isLeaf>
  2067. {
  2068. public:
  2069. // RecyclerLeafAllocator / RecyclerNonLeafAllocator based on "bool isLeaf"
  2070. // used by write barrier type traits
  2071. typedef typename _RecyclerLeaf<isLeaf>::AllocatorType EffectiveAllocatorType;
  2072. typedef char * (Recycler::*AllocFuncType)(size_t);
  2073. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  2074. static AllocFuncType GetAllocFunc()
  2075. {
  2076. return isLeaf ? &Recycler::AllocLeaf : &Recycler::Alloc;
  2077. }
  2078. static FreeFuncType GetFreeFunc()
  2079. {
  2080. if (isLeaf)
  2081. {
  2082. return &Recycler::ExplicitFreeLeaf;
  2083. }
  2084. else
  2085. {
  2086. return &Recycler::ExplicitFreeNonLeaf;
  2087. }
  2088. }
  2089. };
  2090. // Partial template specialization to allocate as non leaf
  2091. template <typename T>
  2092. class TypeAllocatorFunc<RecyclerNonLeafAllocator, T> :
  2093. #if GLOBAL_ENABLE_WRITE_BARRIER
  2094. public _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2095. #else
  2096. public _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
  2097. #endif
  2098. {
  2099. };
  2100. #ifdef RECYCLER_WRITE_BARRIER
  2101. template <typename T>
  2102. class TypeAllocatorFunc<RecyclerWriteBarrierAllocator, T> : public _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2103. {
  2104. };
  2105. #endif
  2106. template <typename T>
  2107. class TypeAllocatorFunc<RecyclerLeafAllocator, T> : public _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
  2108. {
  2109. };
  2110. template <typename TAllocType>
  2111. struct AllocatorInfo<Recycler, TAllocType>
  2112. {
  2113. typedef Recycler AllocatorType;
  2114. typedef TypeAllocatorFunc<Recycler, TAllocType> AllocatorFunc;
  2115. typedef _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy> InstAllocatorFunc; // By default any instance considered non-leaf
  2116. };
  2117. template <typename TAllocType>
  2118. struct AllocatorInfo<RecyclerNonLeafAllocator, TAllocType>
  2119. {
  2120. typedef Recycler AllocatorType;
  2121. typedef TypeAllocatorFunc<RecyclerNonLeafAllocator, TAllocType> AllocatorFunc;
  2122. typedef TypeAllocatorFunc<RecyclerNonLeafAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
  2123. };
  2124. template <typename TAllocType>
  2125. struct AllocatorInfo<RecyclerWriteBarrierAllocator, TAllocType>
  2126. {
  2127. typedef Recycler AllocatorType;
  2128. typedef TypeAllocatorFunc<RecyclerWriteBarrierAllocator, TAllocType> AllocatorFunc;
  2129. typedef TypeAllocatorFunc<RecyclerWriteBarrierAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
  2130. };
  2131. template <typename TAllocType>
  2132. struct AllocatorInfo<RecyclerLeafAllocator, TAllocType>
  2133. {
  2134. typedef Recycler AllocatorType;
  2135. typedef TypeAllocatorFunc<RecyclerLeafAllocator, TAllocType> AllocatorFunc;
  2136. typedef TypeAllocatorFunc<RecyclerLeafAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
  2137. };
  2138. template <>
  2139. struct ForceNonLeafAllocator<Recycler>
  2140. {
  2141. typedef RecyclerNonLeafAllocator AllocatorType;
  2142. };
  2143. template <>
  2144. struct ForceNonLeafAllocator<RecyclerLeafAllocator>
  2145. {
  2146. typedef RecyclerNonLeafAllocator AllocatorType;
  2147. };
  2148. template <>
  2149. struct ForceLeafAllocator<Recycler>
  2150. {
  2151. typedef RecyclerLeafAllocator AllocatorType;
  2152. };
  2153. template <>
  2154. struct ForceLeafAllocator<RecyclerNonLeafAllocator>
  2155. {
  2156. typedef RecyclerLeafAllocator AllocatorType;
  2157. };
  2158. // TODO: enable -profile for GC phases.
  2159. // access the same profiler object from multiple GC threads which shares one recycler object,
  2160. // but profiler object is not thread safe
  2161. #if defined(PROFILE_EXEC) && 0
  2162. #define RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase) if (recycler->profiler != nullptr) { recycler->profiler->Begin(phase); }
  2163. #define RECYCLER_PROFILE_EXEC_END(recycler, phase) if (recycler->profiler != nullptr) { recycler->profiler->End(phase); }
  2164. #define RECYCLER_PROFILE_EXEC_BEGIN2(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->Begin(phase1); recycler->profiler->Begin(phase2);}
  2165. #define RECYCLER_PROFILE_EXEC_END2(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->End(phase1); recycler->profiler->End(phase2);}
  2166. #define RECYCLER_PROFILE_EXEC_CHANGE(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->End(phase1); recycler->profiler->Begin(phase2); }
  2167. #define RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase) if (recycler->backgroundProfiler != nullptr) { recycler->backgroundProfiler->Begin(phase); }
  2168. #define RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase) if (recycler->backgroundProfiler != nullptr) { recycler->backgroundProfiler->End(phase); }
  2169. #define RECYCLER_PROFILE_EXEC_THREAD_BEGIN(background, recycler, phase) if (background) { RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase); } else { RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase); }
  2170. #define RECYCLER_PROFILE_EXEC_THREAD_END(background, recycler, phase) if (background) { RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase); } else { RECYCLER_PROFILE_EXEC_END(recycler, phase); }
  2171. #else
  2172. #define RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase)
  2173. #define RECYCLER_PROFILE_EXEC_END(recycler, phase)
  2174. #define RECYCLER_PROFILE_EXEC_BEGIN2(recycler, phase1, phase2)
  2175. #define RECYCLER_PROFILE_EXEC_END2(recycler, phase1, phase2)
  2176. #define RECYCLER_PROFILE_EXEC_CHANGE(recycler, phase1, phase2)
  2177. #define RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase)
  2178. #define RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase)
  2179. #define RECYCLER_PROFILE_EXEC_THREAD_BEGIN(background, recycler, phase)
  2180. #define RECYCLER_PROFILE_EXEC_THREAD_END(background, recycler, phase)
  2181. #endif
  2182. }
  2183. _Ret_notnull_ inline void * __cdecl
  2184. operator new(DECLSPEC_GUARD_OVERFLOW size_t byteSize, Recycler * alloc, HeapInfo * heapInfo)
  2185. {
  2186. return alloc->HeapAllocR(heapInfo, byteSize);
  2187. }
  2188. inline void __cdecl
  2189. operator delete(void * obj, Recycler * alloc, HeapInfo * heapInfo)
  2190. {
  2191. alloc->HeapFree(heapInfo, obj);
  2192. }
  2193. template<ObjectInfoBits infoBits>
  2194. _Ret_notnull_ inline void * __cdecl
  2195. operator new(DECLSPEC_GUARD_OVERFLOW size_t byteSize, Recycler * recycler, const InfoBitsWrapper<infoBits>&)
  2196. {
  2197. AssertCanHandleOutOfMemory();
  2198. Assert(byteSize != 0);
  2199. void * buffer;
  2200. if (infoBits & EnumClass_1_Bit)
  2201. {
  2202. buffer = recycler->AllocEnumClass<infoBits>(byteSize);
  2203. }
  2204. else
  2205. {
  2206. buffer = recycler->AllocWithInfoBits<infoBits>(byteSize);
  2207. }
  2208. // All of our allocation should throw on out of memory
  2209. Assume(buffer != nullptr);
  2210. return buffer;
  2211. }
  2212. #if DBG && defined(RECYCLER_VERIFY_MARK)
  2213. extern bool IsLikelyRuntimeFalseReference(
  2214. char* objectStartAddress, size_t offset, const char* typeName);
  2215. #define DECLARE_RECYCLER_VERIFY_MARK_FRIEND() \
  2216. private: \
  2217. friend bool ::IsLikelyRuntimeFalseReference( \
  2218. char* objectStartAddress, size_t offset, const char* typeName);
  2219. #define IMPLEMENT_STUB_IsLikelyRuntimeFalseReference() \
  2220. bool IsLikelyRuntimeFalseReference( \
  2221. char* objectStartAddress, size_t offset, const char* typeName) \
  2222. { return false; }
  2223. #else
  2224. #define DECLARE_RECYCLER_VERIFY_MARK_FRIEND()
  2225. #define IMPLEMENT_STUB_IsLikelyRuntimeFalseReference()
  2226. #endif
  2227. template <typename ExternalAllocFunc>
  2228. bool Recycler::DoExternalAllocation(size_t size, ExternalAllocFunc externalAllocFunc)
  2229. {
  2230. // Request external memory allocation
  2231. if (!RequestExternalMemoryAllocation(size))
  2232. {
  2233. // Attempt to free some memory then try again
  2234. CollectNow<CollectOnTypedArrayAllocation>();
  2235. if (!RequestExternalMemoryAllocation(size))
  2236. {
  2237. return false;
  2238. }
  2239. }
  2240. struct AutoExternalAllocation
  2241. {
  2242. bool allocationSucceeded = false;
  2243. Recycler* recycler;
  2244. size_t size;
  2245. AutoExternalAllocation(Recycler* recycler, size_t size): recycler(recycler), size(size) {}
  2246. // In case the externalAllocFunc throws or fails, the destructor will report the failure
  2247. ~AutoExternalAllocation() { if (!allocationSucceeded) recycler->ReportExternalMemoryFailure(size); }
  2248. };
  2249. AutoExternalAllocation externalAllocation(this, size);
  2250. if (externalAllocFunc())
  2251. {
  2252. this->AddExternalMemoryUsage(size);
  2253. externalAllocation.allocationSucceeded = true;
  2254. return true;
  2255. }
  2256. return false;
  2257. }