Recycler.h 87 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. #include "CollectionState.h"
  7. namespace Js
  8. {
  9. class Profiler;
  10. enum Phase;
  11. };
  12. namespace JsUtil
  13. {
  14. class ThreadService;
  15. };
  16. class StackBackTraceNode;
  17. class ScriptEngineBase;
  18. class JavascriptThreadService;
  19. #ifdef PROFILE_MEM
  20. struct RecyclerMemoryData;
  21. #endif
  22. namespace Memory
  23. {
  24. template <typename T> class RecyclerRootPtr;
  25. class AutoBooleanToggle
  26. {
  27. public:
  28. AutoBooleanToggle(bool * b, bool value = true, bool valueMayChange = false)
  29. : b(b)
  30. {
  31. Assert(!(*b));
  32. *b = value;
  33. #if DBG
  34. this->value = value;
  35. this->valueMayChange = valueMayChange;
  36. #endif
  37. }
  38. ~AutoBooleanToggle()
  39. {
  40. if (b)
  41. {
  42. Assert(valueMayChange || *b == value);
  43. *b = false;
  44. }
  45. }
  46. void Leave()
  47. {
  48. Assert(valueMayChange || *b == value);
  49. *b = false;
  50. b = nullptr;
  51. }
  52. private:
  53. bool * b;
  54. #if DBG
  55. bool value;
  56. bool valueMayChange;
  57. #endif
  58. };
  59. template <class T>
  60. class AutoRestoreValue
  61. {
  62. public:
  63. AutoRestoreValue(T* var, const T& val):
  64. variable(var)
  65. {
  66. Assert(var);
  67. oldValue = (*variable);
  68. (*variable) = val;
  69. #ifdef DEBUG
  70. debugSetValue = val;
  71. #endif
  72. }
  73. ~AutoRestoreValue()
  74. {
  75. Assert((*variable) == debugSetValue);
  76. (*variable) = oldValue;
  77. }
  78. private:
  79. #ifdef DEBUG
  80. T debugSetValue;
  81. #endif
  82. T* variable;
  83. T oldValue;
  84. };
  85. class Recycler;
  86. class RecyclerScanMemoryCallback
  87. {
  88. public:
  89. RecyclerScanMemoryCallback(Recycler* recycler) : recycler(recycler) {}
  90. void operator()(void** obj, size_t byteCount);
  91. private:
  92. Recycler* recycler;
  93. };
  94. template<ObjectInfoBits infoBits>
  95. struct InfoBitsWrapper{};
  96. // Allocation macro
  97. #define RecyclerNew(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocInlined, T, __VA_ARGS__)
  98. #define RecyclerNewPlus(recycler,size,T,...) AllocatorNewPlus(Recycler, recycler, size, T, __VA_ARGS__)
  99. #define RecyclerNewPlusLeaf(recycler,size,T,...) AllocatorNewPlusLeaf(Recycler, recycler, size, T, __VA_ARGS__)
  100. #define RecyclerNewPlusZ(recycler,size,T,...) AllocatorNewPlusZ(Recycler, recycler, size, T, __VA_ARGS__)
  101. #define RecyclerNewPlusLeafZ(recycler,size,T,...) AllocatorNewPlusLeafZ(Recycler, recycler, size, T, __VA_ARGS__)
  102. #define RecyclerNewZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocZeroInlined, T, __VA_ARGS__)
  103. #define RecyclerNewStruct(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocInlined, T)
  104. #define RecyclerNewStructZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocZeroInlined, T)
  105. #define RecyclerNewStructPlus(recycler,size,T) AllocatorNewStructPlus(Recycler, recycler, size, T)
  106. #define RecyclerNewStructLeaf(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocLeafInlined, T)
  107. #define RecyclerNewStructLeafZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocLeafZeroInlined, T)
  108. #define RecyclerNewLeaf(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafInlined, T, __VA_ARGS__)
  109. #define RecyclerNewLeafZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafZeroInlined, T, __VA_ARGS__)
  110. #define RecyclerNewArrayLeafZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafZero, T, count)
  111. #define RecyclerNewArray(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, Alloc, T, count)
  112. #define RecyclerNewArrayZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZero, T, count)
  113. #define RecyclerNewArrayLeaf(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeaf, T, count)
  114. // Use static_cast to make sure the finalized and tracked object have the right base class
  115. #define RecyclerNewFinalized(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedInlined, T, __VA_ARGS__)))
  116. #define RecyclerNewFinalizedLeaf(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedLeafInlined, T, __VA_ARGS__)))
  117. #define RecyclerNewFinalizedPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalized, size, T, __VA_ARGS__)))
  118. #define RecyclerNewFinalizedLeafPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedLeaf, size, T, __VA_ARGS__)))
  119. #define RecyclerNewTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedInlined, T, __VA_ARGS__)))
  120. #define RecyclerNewTrackedLeaf(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedLeafInlined, T, __VA_ARGS__)))
  121. #define RecyclerNewTrackedLeafPlusZ(recycler,size,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocZeroTrackedLeafInlined, size, T, __VA_ARGS__)))
  122. #define RecyclerNewEnumClass(recycler, enumClass, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), enumClass) T(__VA_ARGS__)
  123. #define RecyclerNewWithInfoBits(recycler, infoBits, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<infoBits>()) T(__VA_ARGS__)
  124. #define RecyclerNewFinalizedClientTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedClientTrackedInlined, T, __VA_ARGS__)))
  125. #ifdef RECYCLER_WRITE_BARRIER_ALLOC
  126. #define RecyclerNewWithBarrier(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocWithBarrier, T, __VA_ARGS__)
  127. #define RecyclerNewWithBarrierPlus(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocWithBarrier, size, T, __VA_ARGS__)
  128. #define RecyclerNewWithBarrierPlusZ(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocZeroWithBarrier, size, T, __VA_ARGS__)
  129. #define RecyclerNewWithBarrierArray(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocWithBarrier, T, count)
  130. #define RecyclerNewWithBarrierArrayZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZeroWithBarrier, T, count)
  131. #define RecyclerNewWithBarrierStruct(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocWithBarrier, T)
  132. #define RecyclerNewWithBarrierStructZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocZeroWithBarrier, T)
  133. #define RecyclerNewWithBarrierFinalized(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedWithBarrierInlined, T, __VA_ARGS__)))
  134. #define RecyclerNewWithBarrierFinalizedPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedWithBarrier, size, T, __VA_ARGS__)))
  135. #else
  136. #define RecyclerNewWithBarrier RecyclerNew
  137. #define RecyclerNewWithBarrierPlus RecyclerNewPlus
  138. #define RecyclerNewWithBarrierPlusZ RecyclerNewPlusZ
  139. #define RecyclerNewWithBarrierArray RecyclerNewArray
  140. #define RecyclerNewWithBarrierArrayZ RecyclerNewArrayZ
  141. #define RecyclerNewWithBarrierStruct RecyclerNewStruct
  142. #define RecyclerNewWithBarrierStructZ RecyclerNewStructZ
  143. #define RecyclerNewWithBarrierFinalized RecyclerNewFinalized
  144. #define RecyclerNewWithBarrierFinalizedPlus RecyclerNewFinalizedPlus
  145. #endif
  146. #ifdef TRACE_OBJECT_LIFETIME
  147. #define RecyclerNewLeafTrace(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafTrace, T, __VA_ARGS__)
  148. #define RecyclerNewLeafZTrace(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafZeroTrace, T, __VA_ARGS__)
  149. #define RecyclerNewPlusLeafTrace(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocLeafTrace, size, T, __VA_ARGS__)
  150. #define RecyclerNewArrayLeafZTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafZeroTrace, T, count)
  151. #define RecyclerNewArrayTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocTrace, T, count)
  152. #define RecyclerNewArrayZTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZeroTrace, T, count)
  153. #define RecyclerNewArrayLeafTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafTrace, T, count)
  154. // Use static_cast to make sure the finalized and tracked object have the right base class
  155. #define RecyclerNewFinalizedTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedTrace, T, __VA_ARGS__)))
  156. #define RecyclerNewFinalizedLeafTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedLeafTrace, T, __VA_ARGS__)))
  157. #define RecyclerNewFinalizedPlusTrace(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedTrace, size, T, __VA_ARGS__)))
  158. #define RecyclerNewTrackedTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedTrace, T, __VA_ARGS__)))
  159. #define RecyclerNewTrackedLeafTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedLeafTrace, T, __VA_ARGS__)))
  160. #else
  161. #define RecyclerNewLeafTrace RecyclerNewLeaf
  162. #define RecyclerNewLeafZTrace RecyclerNewLeafZ
  163. #define RecyclerNewPlusLeafTrace RecyclerNewPlusLeaf
  164. #define RecyclerNewArrayLeafZTrace RecyclerNewArrayLeafZ
  165. #define RecyclerNewArrayTrace RecyclerNewArray
  166. #define RecyclerNewArrayZTrace RecyclerNewArrayZ
  167. #define RecyclerNewArrayLeafTrace RecyclerNewArrayLeaf
  168. #define RecyclerNewFinalizedTrace RecyclerNewFinalized
  169. #define RecyclerNewFinalizedLeafTrace RecyclerNewFinalizedLeaf
  170. #define RecyclerNewFinalizedPlusTrace RecyclerNewFinalizedPlus
  171. #define RecyclerNewTrackedTrace RecyclerNewTracked
  172. #define RecyclerNewTrackedLeafTrace RecyclerNewTrackedLeaf
  173. #endif
  174. #ifdef RECYCLER_TRACE
  175. #define RecyclerVerboseTrace(flags, ...) \
  176. if (flags.Verbose && flags.Trace.IsEnabled(Js::RecyclerPhase)) \
  177. { \
  178. Output::Print(__VA_ARGS__); \
  179. }
  180. #define AllocationVerboseTrace(flags, ...) \
  181. if (flags.Verbose && flags.Trace.IsEnabled(Js::MemoryAllocationPhase)) \
  182. { \
  183. Output::Print(__VA_ARGS__); \
  184. }
  185. #define LargeAllocationVerboseTrace(flags, ...) \
  186. if (flags.Verbose && \
  187. (flags.Trace.IsEnabled(Js::MemoryAllocationPhase) || \
  188. flags.Trace.IsEnabled(Js::LargeMemoryAllocationPhase))) \
  189. { \
  190. Output::Print(__VA_ARGS__); \
  191. }
  192. #define PageAllocatorAllocationVerboseTrace(flags, ...) \
  193. if (flags.Verbose && flags.Trace.IsEnabled(Js::PageAllocatorAllocPhase)) \
  194. { \
  195. Output::Print(__VA_ARGS__); \
  196. }
  197. #else
  198. #define RecyclerVerboseTrace(...)
  199. #define AllocationVerboseTrace(...)
  200. #define LargeAllocationVerboseTrace(...)
  201. #endif
  202. #define RecyclerHeapNew(recycler,heapInfo,T,...) new (recycler, heapInfo) T(__VA_ARGS__)
  203. #define RecyclerHeapDelete(recycler,heapInfo,addr) (static_cast<Recycler *>(recycler)->HeapFree(heapInfo,addr))
  204. typedef void (__cdecl* ExternalRootMarker)(void *);
  205. enum CollectionFlags
  206. {
  207. CollectHeuristic_AllocSize = 0x00000001,
  208. CollectHeuristic_Time = 0x00000002,
  209. CollectHeuristic_TimeIfScriptActive = 0x00000004,
  210. CollectHeuristic_TimeIfInScript = 0x00000008,
  211. CollectHeuristic_Never = 0x00000080,
  212. CollectHeuristic_Mask = 0x000000FF,
  213. CollectOverride_FinishConcurrent = 0x00001000,
  214. CollectOverride_ExhaustiveCandidate = 0x00002000,
  215. CollectOverride_ForceInThread = 0x00004000,
  216. CollectOverride_AllowDispose = 0x00008000,
  217. CollectOverride_AllowReentrant = 0x00010000,
  218. CollectOverride_ForceFinish = 0x00020000,
  219. CollectOverride_Explicit = 0x00040000,
  220. CollectOverride_DisableIdleFinish = 0x00080000,
  221. CollectOverride_BackgroundFinishMark= 0x00100000,
  222. CollectOverride_FinishConcurrentTimeout = 0x00200000,
  223. CollectOverride_NoExhaustiveCollect = 0x00400000,
  224. CollectOverride_SkipStack = 0x01000000,
  225. CollectMode_Partial = 0x08000000,
  226. CollectMode_Concurrent = 0x10000000,
  227. CollectMode_Exhaustive = 0x20000000,
  228. CollectMode_DecommitNow = 0x40000000,
  229. CollectMode_CacheCleanup = 0x80000000,
  230. CollectNowForceInThread = CollectOverride_ForceInThread,
  231. CollectNowForceInThreadExternal = CollectOverride_ForceInThread | CollectOverride_AllowDispose,
  232. CollectNowForceInThreadExternalNoStack = CollectOverride_ForceInThread | CollectOverride_AllowDispose | CollectOverride_SkipStack,
  233. CollectNowDefault = CollectOverride_FinishConcurrent,
  234. CollectNowDefaultLSCleanup = CollectOverride_FinishConcurrent | CollectOverride_AllowDispose,
  235. CollectNowDecommitNowExplicit = CollectNowDefault | CollectMode_DecommitNow | CollectMode_CacheCleanup | CollectOverride_Explicit | CollectOverride_AllowDispose,
  236. CollectNowConcurrent = CollectOverride_FinishConcurrent | CollectMode_Concurrent,
  237. CollectNowExhaustive = CollectOverride_FinishConcurrent | CollectMode_Exhaustive | CollectOverride_AllowDispose,
  238. CollectNowPartial = CollectOverride_FinishConcurrent | CollectMode_Partial,
  239. CollectNowConcurrentPartial = CollectMode_Concurrent | CollectNowPartial,
  240. CollectOnAllocation = CollectHeuristic_AllocSize | CollectHeuristic_Time | CollectMode_Concurrent | CollectMode_Partial | CollectOverride_FinishConcurrent | CollectOverride_AllowReentrant | CollectOverride_FinishConcurrentTimeout,
  241. CollectOnTypedArrayAllocation = CollectHeuristic_AllocSize | CollectHeuristic_Time | CollectMode_Concurrent | CollectMode_Partial | CollectOverride_FinishConcurrent | CollectOverride_AllowReentrant | CollectOverride_FinishConcurrentTimeout | CollectOverride_AllowDispose,
  242. CollectOnScriptIdle = CollectOverride_FinishConcurrent | CollectMode_Concurrent | CollectMode_CacheCleanup | CollectOverride_SkipStack,
  243. CollectOnScriptExit = CollectHeuristic_AllocSize | CollectOverride_FinishConcurrent | CollectMode_Concurrent | CollectMode_CacheCleanup,
  244. CollectExhaustiveCandidate = CollectHeuristic_Never | CollectOverride_ExhaustiveCandidate,
  245. CollectOnScriptCloseNonPrimary = CollectNowConcurrent | CollectOverride_ExhaustiveCandidate | CollectOverride_AllowDispose,
  246. CollectOnRecoverFromOutOfMemory = CollectOverride_ForceInThread | CollectMode_DecommitNow,
  247. CollectOnSuspendCleanup = CollectNowConcurrent | CollectMode_Exhaustive | CollectMode_DecommitNow | CollectOverride_DisableIdleFinish,
  248. FinishConcurrentOnIdle = CollectMode_Concurrent | CollectOverride_DisableIdleFinish,
  249. FinishConcurrentOnIdleAtRoot = CollectMode_Concurrent | CollectOverride_DisableIdleFinish | CollectOverride_SkipStack,
  250. FinishConcurrentOnExitScript = CollectMode_Concurrent | CollectOverride_DisableIdleFinish | CollectOverride_BackgroundFinishMark,
  251. FinishConcurrentOnEnterScript = CollectMode_Concurrent | CollectOverride_DisableIdleFinish | CollectOverride_BackgroundFinishMark,
  252. FinishConcurrentOnAllocation = CollectMode_Concurrent | CollectOverride_DisableIdleFinish | CollectOverride_BackgroundFinishMark,
  253. FinishDispose = CollectOverride_AllowDispose,
  254. FinishDisposeTimed = CollectOverride_AllowDispose | CollectHeuristic_TimeIfScriptActive,
  255. ForceFinishCollection = CollectOverride_ForceFinish | CollectOverride_ForceInThread,
  256. #ifdef RECYCLER_STRESS
  257. CollectStress = CollectNowForceInThread,
  258. #ifdef PARTIAL_GC_ENABLED
  259. CollectPartialStress = CollectMode_Partial,
  260. #endif
  261. #ifdef CONCURRENT_GC_ENABLED
  262. CollectBackgroundStress = CollectNowDefault,
  263. CollectConcurrentStress = CollectNowConcurrent,
  264. #ifdef PARTIAL_GC_ENABLED
  265. CollectConcurrentPartialStress = CollectConcurrentStress | CollectPartialStress,
  266. #endif
  267. #endif
  268. #endif
  269. #if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
  270. CollectNowFinalGC = CollectNowExhaustive | CollectOverride_ForceInThread | CollectOverride_SkipStack | CollectOverride_Explicit | CollectOverride_AllowDispose,
  271. #endif
  272. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  273. CollectNowExhaustiveSkipStack = CollectNowExhaustive | CollectOverride_SkipStack, // Used by test
  274. #endif
  275. };
  276. class RecyclerCollectionWrapper
  277. {
  278. public:
  279. typedef BOOL (Recycler::*CollectionFunction)(CollectionFlags flags);
  280. virtual void PreCollectionCallBack(CollectionFlags flags) = 0;
  281. virtual void PreSweepCallback() = 0;
  282. virtual void PreRescanMarkCallback() = 0;
  283. virtual size_t RootMarkCallback(RecyclerScanMemoryCallback& scanMemoryCallback, BOOL * stacksScannedByRuntime) = 0;
  284. virtual void RescanMarkTimeoutCallback() = 0;
  285. virtual void EndMarkCallback() = 0;
  286. virtual void ConcurrentCallback() = 0;
  287. virtual void WaitCollectionCallBack() = 0;
  288. virtual void PostCollectionCallBack() = 0;
  289. virtual BOOL ExecuteRecyclerCollectionFunction(Recycler * recycler, CollectionFunction function, CollectionFlags flags) = 0;
  290. virtual uint GetRandomNumber() = 0;
  291. #ifdef FAULT_INJECTION
  292. virtual void DisposeScriptContextByFaultInjectionCallBack() = 0;
  293. #endif
  294. virtual void DisposeObjects(Recycler * recycler) = 0;
  295. #ifdef ENABLE_PROJECTION
  296. virtual void MarkExternalWeakReferencedObjects(bool inPartialCollect) = 0;
  297. virtual void ResolveExternalWeakReferencedObjects() = 0;
  298. #endif
  299. #if DBG || defined(PROFILE_EXEC)
  300. virtual bool AsyncHostOperationStart(void *) = 0;
  301. virtual void AsyncHostOperationEnd(bool wasInAsync, void *) = 0;
  302. #endif
  303. };
  304. class DefaultRecyclerCollectionWrapper : public RecyclerCollectionWrapper
  305. {
  306. public:
  307. virtual void PreCollectionCallBack(CollectionFlags flags) override {}
  308. virtual void PreSweepCallback() override {}
  309. virtual void PreRescanMarkCallback() override {}
  310. virtual void RescanMarkTimeoutCallback() override {}
  311. virtual void EndMarkCallback() override {}
  312. virtual size_t RootMarkCallback(RecyclerScanMemoryCallback& scanMemoryCallback, BOOL * stacksScannedByRuntime) override { *stacksScannedByRuntime = FALSE; return 0; }
  313. virtual void ConcurrentCallback() override {}
  314. virtual void WaitCollectionCallBack() override {}
  315. virtual void PostCollectionCallBack() override {}
  316. virtual BOOL ExecuteRecyclerCollectionFunction(Recycler * recycler, CollectionFunction function, CollectionFlags flags) override;
  317. virtual uint GetRandomNumber() override { return 0; }
  318. #ifdef FAULT_INJECTION
  319. virtual void DisposeScriptContextByFaultInjectionCallBack() override {};
  320. #endif
  321. virtual void DisposeObjects(Recycler * recycler) override;
  322. #ifdef ENABLE_PROJECTION
  323. virtual void MarkExternalWeakReferencedObjects(bool inPartialCollect) override {};
  324. virtual void ResolveExternalWeakReferencedObjects() override {};
  325. #endif
  326. #if DBG || defined(PROFILE_EXEC)
  327. virtual bool AsyncHostOperationStart(void *) override { return false; };
  328. virtual void AsyncHostOperationEnd(bool wasInAsync, void *) override {};
  329. #endif
  330. static DefaultRecyclerCollectionWrapper Instance;
  331. private:
  332. static bool IsCollectionDisabled(Recycler * recycler);
  333. };
  334. #ifdef RECYCLER_STATS
  335. struct RecyclerCollectionStats
  336. {
  337. size_t startCollectAllocBytes;
  338. #ifdef PARTIAL_GC_ENABLED
  339. size_t startCollectNewPageCount;
  340. #endif
  341. size_t continueCollectAllocBytes;
  342. size_t finishCollectTryCount;
  343. // Heuristic Stats
  344. #ifdef PARTIAL_GC_ENABLED
  345. size_t rescanRootBytes;
  346. size_t estimatedPartialReuseBytes;
  347. size_t uncollectedNewPageCountPartialCollect;
  348. size_t partialCollectSmallHeapBlockReuseMinFreeBytes;
  349. double collectEfficacy;
  350. double collectCost;
  351. #endif
  352. // Mark stats
  353. size_t tryMarkCount; // # of pointer try mark (* pointer size to get total number byte looked at)
  354. size_t tryMarkNullCount;
  355. size_t tryMarkUnalignedCount;
  356. size_t tryMarkNonRecyclerMemoryCount;
  357. size_t tryMarkInteriorCount;
  358. size_t tryMarkInteriorNullCount;
  359. size_t tryMarkInteriorNonRecyclerMemoryCount;
  360. size_t rootCount;
  361. size_t stackCount;
  362. size_t remarkCount;
  363. size_t scanCount; // non-leaf objects marked.
  364. size_t trackCount;
  365. size_t finalizeCount;
  366. size_t markThruNewObjCount;
  367. size_t markThruFalseNewObjCount;
  368. struct MarkData
  369. {
  370. // Rescan stats
  371. #if defined(PARTIAL_GC_ENABLED) || defined(CONCURRENT_GC_ENABLED)
  372. size_t rescanPageCount;
  373. size_t rescanObjectCount;
  374. size_t rescanObjectByteCount;
  375. size_t rescanLargePageCount;
  376. size_t rescanLargeObjectCount;
  377. size_t rescanLargeByteCount;
  378. #endif
  379. size_t markCount; // total number of object marked
  380. size_t markBytes; // size of all objects marked.
  381. } markData;
  382. #ifdef CONCURRENT_GC_ENABLED
  383. MarkData backgroundMarkData[RecyclerHeuristic::MaxBackgroundRepeatMarkCount];
  384. size_t trackedObjectCount;
  385. #endif
  386. #ifdef PARTIAL_GC_ENABLED
  387. size_t clientTrackedObjectCount;
  388. #endif
  389. // Sweep stats
  390. size_t heapBlockCount[HeapBlock::BlockTypeCount]; // number of heap blocks (processed during swept)
  391. size_t heapBlockFreeCount[HeapBlock::BlockTypeCount]; // number of heap blocks deleted
  392. size_t heapBlockConcurrentSweptCount[HeapBlock::SmallBlockTypeCount];
  393. size_t heapBlockSweptCount[HeapBlock::SmallBlockTypeCount]; // number of heap blocks swept
  394. size_t objectSweptCount; // objects freed (free list + whole page freed)
  395. size_t objectSweptBytes;
  396. size_t objectSweptFreeListCount; // objects freed (free list)
  397. size_t objectSweptFreeListBytes;
  398. size_t objectSweepScanCount; // number of objects walked for sweeping (exclude whole page freed)
  399. size_t finalizeSweepCount; // number of objects finalizer/dispose called
  400. #ifdef PARTIAL_GC_ENABLED
  401. size_t smallNonLeafHeapBlockPartialReuseCount[HeapBlock::SmallBlockTypeCount];
  402. size_t smallNonLeafHeapBlockPartialReuseBytes[HeapBlock::SmallBlockTypeCount];
  403. size_t smallNonLeafHeapBlockPartialUnusedCount[HeapBlock::SmallBlockTypeCount];
  404. size_t smallNonLeafHeapBlockPartialUnusedBytes[HeapBlock::SmallBlockTypeCount];
  405. #endif
  406. // Memory Stats
  407. size_t heapBlockFreeByteCount[HeapBlock::BlockTypeCount]; // The remaining usable free byte count
  408. size_t largeHeapBlockUsedByteCount; // Used byte count
  409. size_t largeHeapBlockTotalByteCount; // Total byte count
  410. // Empty/zero heap block stats
  411. uint numEmptySmallBlocks[HeapBlock::SmallBlockTypeCount];
  412. uint numZeroedOutSmallBlocks;
  413. };
  414. #define RECYCLER_STATS_INC_IF(cond, r, f) if (cond) { RECYCLER_STATS_INC(r, f); }
  415. #define RECYCLER_STATS_INC(r, f) ++r->collectionStats.f
  416. #define RECYCLER_STATS_INTERLOCKED_INC(r, f) { InterlockedIncrement((LONG *)&r->collectionStats.f); }
  417. #define RECYCLER_STATS_DEC(r, f) --r->collectionStats.f
  418. #define RECYCLER_STATS_ADD(r, f, v) r->collectionStats.f += (v)
  419. #define RECYCLER_STATS_INTERLOCKED_ADD(r, f, v) { InterlockedAdd((LONG *)&r->collectionStats.f, (LONG)(v)); }
  420. #define RECYCLER_STATS_SUB(r, f, v) r->collectionStats.f -= (v)
  421. #define RECYCLER_STATS_SET(r, f, v) r->collectionStats.f = v
  422. #else
  423. #define RECYCLER_STATS_INC_IF(cond, r, f)
  424. #define RECYCLER_STATS_INC(r, f)
  425. #define RECYCLER_STATS_INTERLOCKED_INC(r, f)
  426. #define RECYCLER_STATS_DEC(r, f)
  427. #define RECYCLER_STATS_ADD(r, f, v)
  428. #define RECYCLER_STATS_INTERLOCKED_ADD(r, f, v)
  429. #define RECYCLER_STATS_SUB(r, f, v)
  430. #define RECYCLER_STATS_SET(r, f, v)
  431. #endif
  432. #ifdef RECYCLER_TRACE
  433. struct CollectionParam
  434. {
  435. CollectionFlags flags;
  436. bool finishOnly;
  437. bool repeat;
  438. bool priorityBoostConcurentSweepOverride;
  439. bool domCollect;
  440. int timeDiff;
  441. size_t uncollectedAllocBytes;
  442. size_t uncollectedPinnedObjects;
  443. #ifdef PARTIAL_GC_ENABLED
  444. size_t uncollectedNewPageCountPartialCollect;
  445. size_t uncollectedNewPageCount;
  446. size_t unusedPartialCollectFreeBytes;
  447. bool inPartialCollectMode;
  448. #endif
  449. };
  450. #endif
  451. #include "RecyclerObjectGraphDumper.h"
  452. #ifdef RECYCLER_WRITE_BARRIER_ALLOC_SEPARATE_PAGE
  453. // Macro to be used within the recycler
  454. #define ForRecyclerPageAllocator(action) { \
  455. this->recyclerPageAllocator.##action; \
  456. this->recyclerLargeBlockPageAllocator.##action; \
  457. this->recyclerWithBarrierPageAllocator.##action; \
  458. this->threadPageAllocator->##action; \
  459. }
  460. // Macro that external objects referencing the recycler can use
  461. #define ForEachRecyclerPageAllocatorIn(recycler, action) { \
  462. recycler->GetRecyclerPageAllocator()->##action; \
  463. recycler->GetRecyclerLargeBlockPageAllocator()->##action; \
  464. recycler->GetRecyclerWithBarrierPageAllocator()->##action; \
  465. recycler->GetRecyclerLeafPageAllocator()->##action; \
  466. }
  467. #else
  468. // Macro to be used within the recycler
  469. #define ForRecyclerPageAllocator(action) { \
  470. this->recyclerPageAllocator.##action; \
  471. this->recyclerLargeBlockPageAllocator.##action; \
  472. this->threadPageAllocator->##action; \
  473. }
  474. // Macro that external objects referencing the recycler can use
  475. #define ForEachRecyclerPageAllocatorIn(recycler, action) { \
  476. recycler->GetRecyclerPageAllocator()->##action; \
  477. recycler->GetRecyclerLargeBlockPageAllocator()->##action; \
  478. recycler->GetRecyclerLeafPageAllocator()->##action; \
  479. }
  480. #endif
  481. class RecyclerParallelThread
  482. {
  483. public:
  484. typedef void (Recycler::* WorkFunc)();
  485. RecyclerParallelThread(Recycler * recycler, WorkFunc workFunc) :
  486. recycler(recycler),
  487. workFunc(workFunc),
  488. concurrentWorkReadyEvent(NULL),
  489. concurrentWorkDoneEvent(NULL),
  490. concurrentThread(NULL)
  491. {
  492. }
  493. ~RecyclerParallelThread()
  494. {
  495. Assert(concurrentThread == NULL);
  496. Assert(concurrentWorkReadyEvent == NULL);
  497. Assert(concurrentWorkDoneEvent == NULL);
  498. }
  499. bool StartConcurrent();
  500. void WaitForConcurrent();
  501. void Shutdown();
  502. bool EnableConcurrent(bool synchronizeOnStartup);
  503. private:
  504. // Static entry point for thread creation
  505. static unsigned int StaticThreadProc(LPVOID lpParameter);
  506. // Static entry point for thread service usage
  507. static void StaticBackgroundWorkCallback(void * callbackData);
  508. private:
  509. WorkFunc workFunc;
  510. Recycler * recycler;
  511. HANDLE concurrentWorkReadyEvent;// main thread uses this event to tell concurrent threads that the work is ready
  512. HANDLE concurrentWorkDoneEvent;// concurrent threads use this event to tell main thread that the work allocated is done
  513. HANDLE concurrentThread;
  514. bool synchronizeOnStartup;
  515. };
  516. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  517. class AutoProtectPages
  518. {
  519. public:
  520. AutoProtectPages(Recycler* recycler, bool protectEnabled);
  521. ~AutoProtectPages();
  522. void Unprotect();
  523. private:
  524. Recycler* recycler;
  525. bool isReadOnly;
  526. };
  527. #endif
  528. class Recycler
  529. {
  530. friend class RecyclerScanMemoryCallback;
  531. friend class RecyclerSweep;
  532. friend class MarkContext;
  533. friend class HeapBlock;
  534. friend class HeapBlockMap32;
  535. friend class RecyclerParallelThread;
  536. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  537. friend class AutoProtectPages;
  538. #endif
  539. template <typename T> friend class RecyclerWeakReference;
  540. template <typename T> friend class WeakReferenceHashTable;
  541. template <typename TBlockType>
  542. friend class SmallHeapBlockAllocator; // Needed for FindHeapBlock
  543. #if defined(RECYCLER_TRACE)
  544. friend class JavascriptThreadService;
  545. #endif
  546. #ifdef HEAP_ENUMERATION_VALIDATION
  547. friend class ActiveScriptProfilerHeapEnum;
  548. #endif
  549. friend class ScriptEngineBase; // This is for disabling GC for certain Host operations.
  550. friend class CodeGenNumberThreadAllocator;
  551. public:
  552. static const uint ConcurrentThreadStackSize = 300000;
  553. static const bool FakeZeroLengthArray = true;
  554. #ifdef RECYCLER_PAGE_HEAP
  555. // Keeping as constant in case we want to tweak the value here
  556. // Set to 0 so that the tool can do the filtering instead of the runtime
  557. static const int s_numFramesToSkipForPageHeapAlloc = 0;
  558. static const int s_numFramesToSkipForPageHeapFree = 0;
  559. static const int s_numFramesToCaptureForPageHeap = 20;
  560. #endif
  561. uint Cookie;
  562. class AutoEnterExternalStackSkippingGCMode
  563. {
  564. public:
  565. AutoEnterExternalStackSkippingGCMode(Recycler* recycler):
  566. _recycler(recycler)
  567. {
  568. // Setting this in a re-entrant mode is not allowed
  569. Assert(!recycler->isExternalStackSkippingGC);
  570. #if DBG
  571. _recycler->isExternalStackSkippingGC = true;
  572. #endif
  573. }
  574. ~AutoEnterExternalStackSkippingGCMode()
  575. {
  576. #if DBG
  577. _recycler->isExternalStackSkippingGC = false;
  578. #endif
  579. }
  580. private:
  581. Recycler* _recycler;
  582. };
  583. private:
  584. class AutoSwitchCollectionStates
  585. {
  586. public:
  587. AutoSwitchCollectionStates(Recycler* recycler, CollectionState entryState, CollectionState exitState):
  588. _recycler(recycler),
  589. _exitState(exitState)
  590. {
  591. _recycler->collectionState = entryState;
  592. }
  593. ~AutoSwitchCollectionStates()
  594. {
  595. _recycler->collectionState = _exitState;
  596. }
  597. private:
  598. Recycler* _recycler;
  599. CollectionState _exitState;
  600. };
  601. CollectionState collectionState;
  602. IdleDecommitPageAllocator * threadPageAllocator;
  603. #ifdef RECYCLER_WRITE_BARRIER_ALLOC_SEPARATE_PAGE
  604. RecyclerPageAllocator recyclerWithBarrierPageAllocator;
  605. #endif
  606. RecyclerPageAllocator recyclerPageAllocator;
  607. RecyclerPageAllocator recyclerLargeBlockPageAllocator;
  608. JsUtil::ThreadService *threadService;
  609. HeapBlockMap heapBlockMap;
  610. #if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
  611. struct PinRecord
  612. {
  613. PinRecord() : refCount(0), stackBackTraces(nullptr) {}
  614. PinRecord& operator=(uint newRefCount)
  615. {
  616. Assert(stackBackTraces == nullptr); Assert(newRefCount == 0); refCount = 0; return *this;
  617. }
  618. PinRecord& operator++() { ++refCount; return *this; }
  619. PinRecord& operator--() { --refCount; return *this; }
  620. operator uint() const { return refCount; }
  621. StackBackTraceNode * stackBackTraces;
  622. private:
  623. uint refCount;
  624. };
  625. #else
  626. typedef uint PinRecord;
  627. #endif
  628. typedef SimpleHashTable<void *, PinRecord, HeapAllocator, DefaultComparer, true, PrimePolicy> PinnedObjectHashTable;
  629. PinnedObjectHashTable pinnedObjectMap;
  630. WeakReferenceHashTable<PrimePolicy> weakReferenceMap;
  631. uint weakReferenceCleanupId;
  632. void * transientPinnedObject;
  633. #if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
  634. StackBackTrace * transientPinnedObjectStackBackTrace;
  635. #endif
  636. struct GuestArenaAllocator : public ArenaAllocator
  637. {
  638. GuestArenaAllocator(__in LPCWSTR name, PageAllocator * pageAllocator, void (*outOfMemoryFunc)())
  639. : ArenaAllocator(name, pageAllocator, outOfMemoryFunc), pendingDelete(false)
  640. {
  641. }
  642. bool pendingDelete;
  643. };
  644. DListBase<GuestArenaAllocator> guestArenaList;
  645. DListBase<ArenaData*> externalGuestArenaList; // guest arenas are scanned for roots
  646. HeapInfo autoHeap;
  647. #ifdef RECYCLER_PAGE_HEAP
  648. __inline bool IsPageHeapEnabled() const { return isPageHeapEnabled; }
  649. __inline bool ShouldCapturePageHeapAllocStack() const { return capturePageHeapAllocStack; }
  650. bool isPageHeapEnabled;
  651. bool capturePageHeapAllocStack;
  652. bool capturePageHeapFreeStack;
  653. #else
  654. __inline const bool IsPageHeapEnabled() const { return false; }
  655. __inline bool ShouldCapturePageHeapAllocStack() const { return false; }
  656. #endif
  657. #ifdef RECYCLER_MARK_TRACK
  658. MarkMap* markMap;
  659. CriticalSection markMapCriticalSection;
  660. void PrintMarkMap();
  661. void ClearMarkMap();
  662. #endif
  663. // Number of pages to reserve for the primary mark stack
  664. // This is the minimum number of pages to guarantee that a single heap block
  665. // can be rescanned in the worst possible case where every object in a heap block
  666. // in the smallest bucket needs to be rescanned
  667. // These many pages being reserved guarantees that in OOM Rescan, we can make progress
  668. // on every rescan iteration
  669. // We add one because there is a small amount of the page reserved for page pool metadata
  670. // so we need to allocate an additional page to be sure
  671. // Currently, this works out to 2 pages on 32-bit and 5 pages on 64-bit
  672. static const int PrimaryMarkStackReservedPageCount =
  673. ((SmallAllocationBlockAttributes::PageCount * MarkContext::MarkCandidateSize) / SmallAllocationBlockAttributes::MinObjectSize) + 1;
  674. MarkContext markContext;
  675. // Contexts for parallel marking.
  676. // We support up to 4 way parallelism, main context + 3 additional parallel contexts.
  677. MarkContext parallelMarkContext1;
  678. MarkContext parallelMarkContext2;
  679. MarkContext parallelMarkContext3;
  680. // Page pools for above markContexts
  681. PagePool markPagePool;
  682. PagePool parallelMarkPagePool1;
  683. PagePool parallelMarkPagePool2;
  684. PagePool parallelMarkPagePool3;
  685. bool IsMarkStackEmpty();
  686. bool HasPendingMarkObjects() const { return markContext.HasPendingMarkObjects() || parallelMarkContext1.HasPendingMarkObjects() || parallelMarkContext2.HasPendingMarkObjects() || parallelMarkContext3.HasPendingMarkObjects(); }
  687. bool HasPendingTrackObjects() const { return markContext.HasPendingTrackObjects() || parallelMarkContext1.HasPendingTrackObjects() || parallelMarkContext2.HasPendingTrackObjects() || parallelMarkContext3.HasPendingTrackObjects(); }
  688. RecyclerCollectionWrapper * collectionWrapper;
  689. bool inDispose;
  690. #if DBG
  691. uint collectionCount;
  692. #endif
  693. #if DBG || defined RECYCLER_TRACE
  694. bool inResolveExternalWeakReferences;
  695. #endif
  696. bool allowDispose;
  697. bool inDisposeWrapper;
  698. bool needOOMRescan;
  699. bool hasDisposableObject;
  700. DWORD tickCountNextDispose;
  701. bool hasPendingTransferDisposedObjects;
  702. bool inExhaustiveCollection;
  703. bool hasExhaustiveCandidate;
  704. bool inCacheCleanupCollection;
  705. bool inDecommitNowCollection;
  706. bool isScriptActive;
  707. bool isInScript;
  708. bool isShuttingDown;
  709. bool scanPinnedObjectMap;
  710. bool hasScannedInitialImplicitRoots;
  711. bool hasPendingUnpinnedObject;
  712. bool hasPendingDeleteGuestArena;
  713. bool inEndMarkOnLowMemory;
  714. bool decommitOnFinish;
  715. bool enableScanInteriorPointers;
  716. bool enableScanImplicitRoots;
  717. bool disableCollectOnAllocationHeuristics;
  718. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  719. bool disableCollection;
  720. #endif
  721. #ifdef PARTIAL_GC_ENABLED
  722. bool enablePartialCollect;
  723. bool inPartialCollectMode;
  724. bool hasBackgroundFinishPartial;
  725. bool partialConcurrentNextCollection;
  726. #ifdef RECYCLER_STRESS
  727. bool forcePartialScanStack;
  728. bool recyclerStress;
  729. #ifdef CONCURRENT_GC_ENABLED
  730. bool recyclerBackgroundStress;
  731. bool recyclerConcurrentStress;
  732. bool recyclerConcurrentRepeatStress;
  733. #endif
  734. #ifdef PARTIAL_GC_ENABLED
  735. bool recyclerPartialStress;
  736. #endif
  737. #endif
  738. #endif
  739. #ifdef CONCURRENT_GC_ENABLED
  740. bool skipStack;
  741. #if DBG
  742. bool isConcurrentGCOnIdle;
  743. bool isFinishGCOnIdle;
  744. bool isExternalStackSkippingGC;
  745. #endif
  746. bool queueTrackedObject;
  747. bool hasPendingConcurrentFindRoot;
  748. bool priorityBoost;
  749. bool disableConcurrent;
  750. bool enableConcurrentMark;
  751. bool enableParallelMark;
  752. bool enableConcurrentSweep;
  753. uint maxParallelism; // Max # of total threads to run in parallel
  754. byte backgroundRescanCount; // for ETW events and stats
  755. byte backgroundFinishMarkCount;
  756. size_t backgroundRescanRootBytes;
  757. HANDLE concurrentWorkReadyEvent; // main thread uses this event to tell concurrent threads that the work is ready
  758. HANDLE concurrentWorkDoneEvent; // concurrent threads use this event to tell main thread that the work allocated is done
  759. HANDLE concurrentThread;
  760. HANDLE mainThreadHandle;
  761. class SavedRegisterState
  762. {
  763. public:
  764. #if _M_IX86
  765. static const int NumRegistersToSave = 8;
  766. #elif _M_ARM
  767. static const int NumRegistersToSave = 13;
  768. #elif _M_ARM64
  769. static const int NumRegistersToSave = 13;
  770. #elif _M_AMD64
  771. static const int NumRegistersToSave = 16;
  772. #endif
  773. SavedRegisterState()
  774. {
  775. memset(registers, 0, sizeof(void*) * NumRegistersToSave);
  776. }
  777. void** GetRegisters()
  778. {
  779. return registers;
  780. }
  781. void* GetStackTop()
  782. {
  783. // By convention, our register-saving routine will always
  784. // save the stack pointer as the first item in the array
  785. return registers[0];
  786. }
  787. private:
  788. void* registers[NumRegistersToSave];
  789. };
  790. void * stackBase;
  791. SavedRegisterState savedThreadContext;
  792. template <uint parallelId>
  793. void ParallelWorkFunc();
  794. RecyclerParallelThread parallelThread1;
  795. RecyclerParallelThread parallelThread2;
  796. Js::ConfigFlagsTable& recyclerFlagsTable;
  797. #if DBG
  798. // Variable indicating if the concurrent thread has exited or not
  799. // If the concurrent thread hasn't started yet, this is set to true
  800. // Once the concurrent thread starts, it sets this to false,
  801. // and when the concurrent thread exits, it sets this to true.
  802. bool concurrentThreadExited;
  803. bool disableConcurentThreadExitedCheck;
  804. bool isProcessingTrackedObjects;
  805. bool hasIncompletedDoCollect;
  806. // This is set to true when we begin a Rescan, and set to false when either:
  807. // (1) We finish the final in-thread Rescan and are about to Mark
  808. // (2) We do a conditional ResetWriteWatch and are about to Mark
  809. // When this flag is true, we should not be modifying existing mark-related state,
  810. // including markBits and rescanState.
  811. bool isProcessingRescan;
  812. #endif
  813. uint tickCountStartConcurrent;
  814. bool isAborting;
  815. #endif
  816. RecyclerSweep recyclerSweepInstance;
  817. RecyclerSweep * recyclerSweep;
  818. static const uint tickDiffToNextCollect = 300;
  819. #ifdef IDLE_DECOMMIT_ENABLED
  820. HANDLE concurrentIdleDecommitEvent;
  821. DWORD needIdleDecommitSignal;
  822. #endif
  823. #ifdef PARTIAL_GC_ENABLED
  824. SListBase<void *> clientTrackedObjectList;
  825. ArenaAllocator clientTrackedObjectAllocator;
  826. size_t partialUncollectedAllocBytes;
  827. // Dynamic Heuristics for partial GC
  828. size_t uncollectedNewPageCountPartialCollect;
  829. #endif
  830. uint tickCountNextCollection;
  831. uint tickCountNextFinishCollection;
  832. void (*outOfMemoryFunc)();
  833. #ifdef RECYCLER_TEST_SUPPORT
  834. BOOL (*checkFn)(char* addr, size_t size);
  835. #endif
  836. ExternalRootMarker externalRootMarker;
  837. void * externalRootMarkerContext;
  838. #ifdef PROFILE_EXEC
  839. Js::Profiler * profiler;
  840. Js::Profiler * backgroundProfiler;
  841. PageAllocator backgroundProfilerPageAllocator;
  842. DListBase<ArenaAllocator> backgroundProfilerArena;
  843. #endif
  844. #ifdef PROFILE_MEM
  845. RecyclerMemoryData * memoryData;
  846. #endif
  847. ThreadContextId mainThreadId;
  848. #ifdef ENABLE_BASIC_TELEMETRY
  849. Js::GCTelemetry gcTel;
  850. #endif
  851. #if DBG
  852. uint heapBlockCount;
  853. bool disableThreadAccessCheck;
  854. #endif
  855. #if DBG || defined(RECYCLER_STATS)
  856. bool isForceSweeping;
  857. #endif
  858. RecyclerWatsonTelemetryBlock localTelemetryBlock;
  859. RecyclerWatsonTelemetryBlock * telemetryBlock;
  860. #ifdef RECYCLER_STATS
  861. RecyclerCollectionStats collectionStats;
  862. void PrintHeapBlockStats(wchar_t const * name, HeapBlock::HeapBlockType type);
  863. void PrintHeapBlockMemoryStats(wchar_t const * name, HeapBlock::HeapBlockType type);
  864. void PrintCollectStats();
  865. void PrintHeuristicCollectionStats();
  866. void PrintMarkCollectionStats();
  867. void PrintBackgroundCollectionStats();
  868. void PrintMemoryStats();
  869. void PrintBackgroundCollectionStat(RecyclerCollectionStats::MarkData const& markData);
  870. #endif
  871. #ifdef RECYCLER_TRACE
  872. CollectionParam collectionParam;
  873. #endif
  874. #ifdef RECYCLER_MEMORY_VERIFY
  875. uint verifyPad;
  876. bool verifyEnabled;
  877. #endif
  878. #ifdef RECYCLER_DUMP_OBJECT_GRAPH
  879. friend class RecyclerObjectGraphDumper;
  880. RecyclerObjectGraphDumper * objectGraphDumper;
  881. public:
  882. bool dumpObjectOnceOnCollect;
  883. #endif
  884. public:
  885. Recycler(AllocationPolicyManager * policyManager, IdleDecommitPageAllocator * pageAllocator, void(*outOfMemoryFunc)(), Js::ConfigFlagsTable& flags);
  886. ~Recycler();
  887. void Initialize(const bool forceInThread, JsUtil::ThreadService *threadService, const bool deferThreadStartup = false
  888. #ifdef RECYCLER_PAGE_HEAP
  889. , PageHeapMode pageheapmode = PageHeapMode::PageHeapModeOff
  890. , bool captureAllocCallStack = false
  891. , bool captureFreeCallStack = false
  892. #endif
  893. );
  894. Js::ConfigFlagsTable& GetRecyclerFlagsTable() const { return this->recyclerFlagsTable; }
  895. void SetMemProtectMode();
  896. bool IsMemProtectMode()
  897. {
  898. return this->enableScanImplicitRoots;
  899. }
  900. size_t GetUsedBytes()
  901. {
  902. size_t usedBytes = threadPageAllocator->usedBytes;
  903. #ifdef RECYCLER_WRITE_BARRIER_ALLOC_SEPARATE_PAGE
  904. usedBytes += recyclerWithBarrierPageAllocator.usedBytes;
  905. #endif
  906. usedBytes += recyclerPageAllocator.usedBytes;
  907. usedBytes += recyclerLargeBlockPageAllocator.usedBytes;
  908. return usedBytes;
  909. }
  910. void LogMemProtectHeapSize(bool fromGC);
  911. char* Realloc(void* buffer, size_t existingBytes, size_t requestedBytes, bool truncate = true);
  912. void SetTelemetryBlock(RecyclerWatsonTelemetryBlock * telemetryBlock) { this->telemetryBlock = telemetryBlock; }
  913. void Prime();
  914. void* GetOwnerContext() { return (void*) this->collectionWrapper; }
  915. PageAllocator * GetPageAllocator() { return threadPageAllocator; }
  916. bool NeedOOMRescan() const
  917. {
  918. return this->needOOMRescan;
  919. }
  920. void SetNeedOOMRescan()
  921. {
  922. this->needOOMRescan = true;
  923. }
  924. void ClearNeedOOMRescan()
  925. {
  926. this->needOOMRescan = false;
  927. markContext.GetPageAllocator()->ResetDisableAllocationOutOfMemory();
  928. parallelMarkContext1.GetPageAllocator()->ResetDisableAllocationOutOfMemory();
  929. parallelMarkContext2.GetPageAllocator()->ResetDisableAllocationOutOfMemory();
  930. parallelMarkContext3.GetPageAllocator()->ResetDisableAllocationOutOfMemory();
  931. }
  932. BOOL RequestConcurrentWrapperCallback();
  933. BOOL CollectionInProgress() const
  934. {
  935. return collectionState != CollectionStateNotCollecting;
  936. }
  937. BOOL IsExiting() const
  938. {
  939. return (collectionState == Collection_Exit);
  940. }
  941. BOOL IsSweeping() const
  942. {
  943. return ((collectionState & Collection_Sweep) == Collection_Sweep);
  944. }
  945. #ifdef RECYCLER_PAGE_HEAP
  946. __inline bool ShouldCapturePageHeapFreeStack() const { return capturePageHeapFreeStack; }
  947. #else
  948. __inline bool ShouldCapturePageHeapFreeStack() const { return false; }
  949. #endif
  950. void SetIsThreadBound();
  951. void SetIsScriptActive(bool isScriptActive)
  952. {
  953. Assert(this->isInScript);
  954. Assert(this->isScriptActive != isScriptActive);
  955. this->isScriptActive = isScriptActive;
  956. if (isScriptActive)
  957. {
  958. this->tickCountNextDispose = ::GetTickCount() + RecyclerHeuristic::TickCountFinishCollection;
  959. }
  960. }
  961. void SetIsInScript(bool isInScript)
  962. {
  963. Assert(this->isInScript != isInScript);
  964. this->isInScript = isInScript;
  965. }
  966. bool ShouldIdleCollectOnExit();
  967. void ScheduleNextCollection();
  968. IdleDecommitPageAllocator * GetRecyclerLeafPageAllocator()
  969. {
  970. return this->threadPageAllocator;
  971. }
  972. IdleDecommitPageAllocator * GetRecyclerPageAllocator()
  973. {
  974. return &this->recyclerPageAllocator;
  975. }
  976. IdleDecommitPageAllocator * GetRecyclerLargeBlockPageAllocator()
  977. {
  978. return &this->recyclerLargeBlockPageAllocator;
  979. }
  980. #ifdef RECYCLER_WRITE_BARRIER_ALLOC_SEPARATE_PAGE
  981. IdleDecommitPageAllocator * GetRecyclerWithBarrierPageAllocator()
  982. {
  983. return &this->recyclerWithBarrierPageAllocator;
  984. }
  985. #endif
  986. BOOL IsShuttingDown() const { return this->isShuttingDown; }
  987. #ifdef CONCURRENT_GC_ENABLED
  988. #if DBG
  989. BOOL IsConcurrentMarkEnabled() const { return enableConcurrentMark; }
  990. BOOL IsConcurrentSweepEnabled() const { return enableConcurrentSweep; }
  991. #endif
  992. template <CollectionFlags flags>
  993. BOOL FinishConcurrent();
  994. void ShutdownThread();
  995. bool EnableConcurrent(JsUtil::ThreadService *threadService, bool startAllThreads);
  996. void DisableConcurrent();
  997. void StartQueueTrackedObject();
  998. bool DoQueueTrackedObject() const;
  999. void PrepareSweep();
  1000. #endif
  1001. template <CollectionFlags flags>
  1002. void SetupPostCollectionFlags();
  1003. void EnsureNotCollecting();
  1004. bool QueueTrackedObject(FinalizableObject * trackableObject);
  1005. // FindRoots
  1006. void TryMarkNonInterior(void* candidate, void* parentReference = nullptr);
  1007. void TryMarkInterior(void *candidate, void* parentReference = nullptr);
  1008. bool InCacheCleanupCollection() { return inCacheCleanupCollection; }
  1009. void ClearCacheCleanupCollection() { Assert(inCacheCleanupCollection); inCacheCleanupCollection = false; }
  1010. // Finalizer support
  1011. void SetExternalRootMarker(ExternalRootMarker fn, void * context)
  1012. {
  1013. externalRootMarker = fn;
  1014. externalRootMarkerContext = context;
  1015. }
  1016. HeapInfo* CreateHeap();
  1017. void DestroyHeap(HeapInfo* heapInfo);
  1018. ArenaAllocator * CreateGuestArena(wchar_t const * name, void (*outOfMemoryFunc)());
  1019. void DeleteGuestArena(ArenaAllocator * arenaAllocator);
  1020. ArenaData ** RegisterExternalGuestArena(ArenaData* guestArena)
  1021. {
  1022. return externalGuestArenaList.PrependNode(&NoThrowHeapAllocator::Instance, guestArena);
  1023. }
  1024. void UnregisterExternalGuestArena(ArenaData* guestArena)
  1025. {
  1026. externalGuestArenaList.Remove(&NoThrowHeapAllocator::Instance, guestArena);
  1027. }
  1028. void UnregisterExternalGuestArena(ArenaData** guestArena)
  1029. {
  1030. externalGuestArenaList.RemoveElement(&NoThrowHeapAllocator::Instance, guestArena);
  1031. }
  1032. #ifdef RECYCLER_TEST_SUPPORT
  1033. void SetCheckFn(BOOL(*checkFn)(char* addr, size_t size));
  1034. #endif
  1035. void SetCollectionWrapper(RecyclerCollectionWrapper * wrapper)
  1036. {
  1037. this->collectionWrapper = wrapper;
  1038. #if LARGEHEAPBLOCK_ENCODING
  1039. this->Cookie = wrapper->GetRandomNumber();
  1040. #else
  1041. this->Cookie = 0;
  1042. #endif
  1043. }
  1044. static size_t GetAlignedSize(size_t size) { return HeapInfo::GetAlignedSize(size); }
  1045. HeapInfo* GetAutoHeap() { return &autoHeap; }
  1046. template <CollectionFlags flags>
  1047. BOOL CollectNow();
  1048. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1049. void DisplayMemStats();
  1050. #endif
  1051. void AddExternalMemoryUsage(size_t size);
  1052. bool NeedDispose()
  1053. {
  1054. return this->hasDisposableObject;
  1055. }
  1056. template <CollectionFlags flags>
  1057. bool FinishDisposeObjectsNow();
  1058. BOOL ReportExternalMemoryAllocation(size_t size);
  1059. void ReportExternalMemoryFailure(size_t size);
  1060. void ReportExternalMemoryFree(size_t size);
  1061. #ifdef TRACE_OBJECT_LIFETIME
  1062. #define DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes) \
  1063. __inline char* AllocFunc##Trace(size_t size) \
  1064. { \
  1065. return AllocWithAttributesFunc<(ObjectInfoBits)(attributes | TraceBit), /* nothrow = */ false>(size); \
  1066. }
  1067. #else
  1068. #define DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributeFunc, attributes)
  1069. #endif
  1070. #define DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocWithAttributesFunc, attributes) \
  1071. __inline char * AllocFunc(size_t size) \
  1072. { \
  1073. return AllocWithAttributesFunc<attributes, /* nothrow = */ false>(size); \
  1074. } \
  1075. __forceinline char * AllocFunc##Inlined(size_t size) \
  1076. { \
  1077. return AllocWithAttributesFunc##Inlined<attributes, /* nothrow = */ false>(size); \
  1078. } \
  1079. DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes);
  1080. #define DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocWithAttributesFunc, attributes) \
  1081. __inline char * NoThrow##AllocFunc(size_t size) \
  1082. { \
  1083. return AllocWithAttributesFunc<attributes, /* nothrow = */ true>(size); \
  1084. } \
  1085. __inline char * NoThrow##AllocFunc##Inlined(size_t size) \
  1086. { \
  1087. return AllocWithAttributesFunc##Inlined<attributes, /* nothrow = */ true>(size); \
  1088. } \
  1089. DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes);
  1090. #define DEFINE_RECYCLER_ALLOC(AllocFunc, attributes) DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocWithAttributes, attributes)
  1091. #define DEFINE_RECYCLER_ALLOC_ZERO(AllocFunc, attributes) DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocZeroWithAttributes, attributes)
  1092. #define DEFINE_RECYCLER_NOTHROW_ALLOC(AllocFunc, attributes) DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocWithAttributes, attributes)
  1093. #define DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocFunc, attributes) DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocZeroWithAttributes, attributes)
  1094. DEFINE_RECYCLER_ALLOC(Alloc, NoBit);
  1095. #ifdef RECYCLER_WRITE_BARRIER_ALLOC
  1096. DEFINE_RECYCLER_ALLOC(AllocWithBarrier, WithBarrierBit);
  1097. DEFINE_RECYCLER_ALLOC(AllocFinalizedWithBarrier, FinalizableWithBarrierObjectBits);
  1098. #endif
  1099. DEFINE_RECYCLER_ALLOC(AllocFinalized, FinalizableObjectBits);
  1100. DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTracked, ClientFinalizableObjectBits);
  1101. // All trackable object are client trackable
  1102. DEFINE_RECYCLER_ALLOC(AllocTracked, ClientTrackableObjectBits);
  1103. DEFINE_RECYCLER_ALLOC(AllocLeaf, LeafBit);
  1104. DEFINE_RECYCLER_ALLOC(AllocFinalizedLeaf, FinalizableLeafBits);
  1105. DEFINE_RECYCLER_ALLOC(AllocTrackedLeaf, ClientTrackableLeafBits);
  1106. DEFINE_RECYCLER_ALLOC_ZERO(AllocZero, NoBit);
  1107. #ifdef RECYCLER_WRITE_BARRIER_ALLOC
  1108. DEFINE_RECYCLER_ALLOC_ZERO(AllocZeroWithBarrier, WithBarrierBit);
  1109. #endif
  1110. DEFINE_RECYCLER_ALLOC_ZERO(AllocLeafZero, LeafBit);
  1111. DEFINE_RECYCLER_ALLOC_ZERO(AllocZeroTrackedLeaf, ClientTrackableLeafBits);
  1112. DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocImplicitRootLeaf, ImplicitRootLeafBits);
  1113. DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocImplicitRoot, ImplicitRootBit);
  1114. template <ObjectInfoBits enumClass>
  1115. char * AllocEnumClass(size_t size)
  1116. {
  1117. Assert((enumClass & EnumClassMask) != 0);
  1118. Assert((enumClass & ~EnumClassMask) == 0);
  1119. return AllocWithAttributes<(ObjectInfoBits)(enumClass), /* nothrow = */ false>(size);
  1120. }
  1121. template <ObjectInfoBits infoBits>
  1122. char * AllocWithInfoBits(size_t size)
  1123. {
  1124. return AllocWithAttributes<infoBits, /* nothrow = */ false>(size);
  1125. }
  1126. template<typename T>
  1127. RecyclerWeakReference<T>* CreateWeakReferenceHandle(T* pStrongReference);
  1128. uint GetWeakReferenceCleanupId() const { return weakReferenceCleanupId; }
  1129. template<typename T>
  1130. bool FindOrCreateWeakReferenceHandle(T* pStrongReference, RecyclerWeakReference<T> **ppWeakRef);
  1131. template<typename T>
  1132. bool TryGetWeakReferenceHandle(T* pStrongReference, RecyclerWeakReference<T> **weakReference);
  1133. template <ObjectInfoBits attributes>
  1134. char* GetAddressOfAllocator(size_t sizeCat)
  1135. {
  1136. Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
  1137. return (char*)this->autoHeap.GetBucket<attributes>(sizeCat).GetAllocator();
  1138. }
  1139. template <ObjectInfoBits attributes>
  1140. uint32 GetEndAddressOffset(size_t sizeCat)
  1141. {
  1142. Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
  1143. return this->autoHeap.GetBucket<attributes>(sizeCat).GetAllocator()->GetEndAddressOffset();
  1144. }
  1145. template <ObjectInfoBits attributes>
  1146. uint32 GetFreeObjectListOffset(size_t sizeCat)
  1147. {
  1148. Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
  1149. return this->autoHeap.GetBucket<attributes>(sizeCat).GetAllocator()->GetFreeObjectListOffset();
  1150. }
  1151. void GetNormalHeapBlockAllocatorInfoForNativeAllocation(size_t sizeCat, void*& allocatorAddress, uint32& endAddressOffset, uint32& freeListOffset);
  1152. bool AllowNativeCodeBumpAllocation();
  1153. static void TrackNativeAllocatedMemoryBlock(Recycler * recycler, void * memBlock, size_t sizeCat);
  1154. void Free(void* buffer, size_t size)
  1155. {
  1156. Assert(false);
  1157. }
  1158. bool ExplicitFreeLeaf(void* buffer, size_t size);
  1159. bool ExplicitFreeNonLeaf(void* buffer, size_t size);
  1160. template <ObjectInfoBits attributes>
  1161. bool ExplicitFreeInternalWrapper(void* buffer, size_t allocSize);
  1162. template <ObjectInfoBits attributes, typename TBlockAttributes>
  1163. bool ExplicitFreeInternal(void* buffer, size_t size, size_t sizeCat);
  1164. size_t GetAllocSize(size_t size);
  1165. template <typename TBlockAttributes>
  1166. void SetExplicitFreeBitOnSmallBlock(HeapBlock* heapBlock, size_t sizeCat, void* buffer, ObjectInfoBits attributes);
  1167. char* HeapAllocR(HeapInfo* eHeap, size_t size)
  1168. {
  1169. return RealAlloc<LeafBit, /* nothrow = */ false>(eHeap, size);
  1170. }
  1171. void HeapFree(HeapInfo* eHeap,void* candidate);
  1172. void EnumerateObjects(ObjectInfoBits infoBits, void (*CallBackFunction)(void * address, size_t size));
  1173. void RootAddRef(void* obj, uint *count = nullptr);
  1174. void RootRelease(void* obj, uint *count = nullptr);
  1175. template <ObjectInfoBits attributes, bool nothrow>
  1176. __inline char* RealAlloc(HeapInfo* heap, size_t size);
  1177. template <ObjectInfoBits attributes, bool isSmallAlloc, bool nothrow>
  1178. __inline char* RealAllocFromBucket(HeapInfo* heap, size_t size);
  1179. void EnterIdleDecommit();
  1180. void LeaveIdleDecommit();
  1181. void DisposeObjects();
  1182. BOOL IsValidObject(void* candidate, size_t minimumSize = 0);
  1183. #if DBG
  1184. void SetDisableThreadAccessCheck();
  1185. void SetDisableConcurentThreadExitedCheck();
  1186. void CheckAllocExternalMark() const;
  1187. BOOL IsFreeObject(void * candidate);
  1188. BOOL IsReentrantState() const;
  1189. #endif
  1190. #if DBG_DUMP
  1191. void PrintMarkStack();
  1192. #endif
  1193. #ifdef PROFILE_EXEC
  1194. Js::Profiler * GetProfiler() const { return this->profiler; }
  1195. ArenaAllocator * AddBackgroundProfilerArena();
  1196. void ReleaseBackgroundProfilerArena(ArenaAllocator * arena);
  1197. void SetProfiler(Js::Profiler * profiler, Js::Profiler * backgroundProfiler);
  1198. #endif
  1199. #ifdef RECYCLER_MEMORY_VERIFY
  1200. BOOL VerifyEnabled() const { return verifyEnabled; }
  1201. void Verify(Js::Phase phase);
  1202. static void VerifyCheck(BOOL cond, wchar_t const * msg, void * address, void * corruptedAddress);
  1203. static void VerifyCheckFill(void * address, size_t size);
  1204. void FillCheckPad(void * address, size_t size, size_t alignedAllocSize, bool objectAlreadyInitialized);
  1205. void FillCheckPad(void * address, size_t size, size_t alignedAllocSize)
  1206. {
  1207. FillCheckPad(address, size, alignedAllocSize, false);
  1208. }
  1209. void VerifyCheckPad(void * address, size_t size);
  1210. void VerifyCheckPadExplicitFreeList(void * address, size_t size);
  1211. static const byte VerifyMemFill = 0xCA;
  1212. #endif
  1213. #ifdef RECYCLER_ZERO_MEM_CHECK
  1214. void VerifyZeroFill(void * address, size_t size);
  1215. #endif
  1216. #ifdef RECYCLER_DUMP_OBJECT_GRAPH
  1217. bool DumpObjectGraph(RecyclerObjectGraphDumper::Param * param = nullptr);
  1218. void DumpObjectDescription(void *object);
  1219. #endif
  1220. #ifdef LEAK_REPORT
  1221. void ReportLeaks();
  1222. void ReportLeaksOnProcessDetach();
  1223. #endif
  1224. #ifdef CHECK_MEMORY_LEAK
  1225. void CheckLeaks(wchar_t const * header);
  1226. void CheckLeaksOnProcessDetach(wchar_t const * header);
  1227. #endif
  1228. #ifdef RECYCLER_TRACE
  1229. void SetDomCollect(bool isDomCollect)
  1230. {
  1231. collectionParam.domCollect = isDomCollect;
  1232. }
  1233. void CaptureCollectionParam(CollectionFlags flags, bool repeat = false);
  1234. #endif
  1235. #ifdef ENABLE_BASIC_TELEMETRY
  1236. Js::GCPauseStats GetGCPauseStats()
  1237. {
  1238. return gcTel.GetGCPauseStats(); // returns the maxGCpause time in ms
  1239. }
  1240. void ResetGCPauseStats()
  1241. {
  1242. gcTel.Reset();
  1243. }
  1244. void SetIsScriptSiteCloseGC(bool val)
  1245. {
  1246. gcTel.SetIsScriptSiteCloseGC(val);
  1247. }
  1248. #endif
  1249. private:
  1250. // RecyclerRootPtr has implicit conversion to pointers, prevent it to be
  1251. // passed to RootAddRef/RootRelease directly
  1252. template <typename T>
  1253. void RootAddRef(RecyclerRootPtr<T>& ptr, uint *count = nullptr);
  1254. template <typename T>
  1255. void RootRelease(RecyclerRootPtr<T>& ptr, uint *count = nullptr);
  1256. template <CollectionFlags flags>
  1257. BOOL CollectInternal();
  1258. template <CollectionFlags flags>
  1259. BOOL Collect();
  1260. template <CollectionFlags flags>
  1261. BOOL CollectWithHeuristic();
  1262. template <CollectionFlags flags>
  1263. BOOL CollectWithExhaustiveCandidate();
  1264. template <CollectionFlags flags>
  1265. BOOL GetPartialFlag();
  1266. bool NeedExhaustiveRepeatCollect() const;
  1267. #if DBG
  1268. bool ExpectStackSkip() const;
  1269. #endif
  1270. static size_t const InvalidScanRootBytes = (size_t)-1;
  1271. // Small Allocator
  1272. template <typename SmallHeapBlockAllocatorType>
  1273. void AddSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat);
  1274. template <typename SmallHeapBlockAllocatorType>
  1275. void RemoveSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat);
  1276. template <ObjectInfoBits attributes, typename SmallHeapBlockAllocatorType>
  1277. char * SmallAllocatorAlloc(SmallHeapBlockAllocatorType * allocator, size_t sizeCat);
  1278. // Allocation
  1279. template <ObjectInfoBits attributes, bool nothrow>
  1280. __inline char * AllocWithAttributesInlined(size_t size);
  1281. template <ObjectInfoBits attributes, bool nothrow>
  1282. char * AllocWithAttributes(size_t size)
  1283. {
  1284. return AllocWithAttributesInlined<attributes, nothrow>(size);
  1285. }
  1286. template <ObjectInfoBits attributes, bool nothrow>
  1287. __inline char* AllocZeroWithAttributesInlined(size_t size);
  1288. template <ObjectInfoBits attributes, bool nothrow>
  1289. char* AllocZeroWithAttributes(size_t size)
  1290. {
  1291. return AllocZeroWithAttributesInlined<attributes, nothrow>(size);
  1292. }
  1293. char* AllocWeakReferenceEntry(size_t size)
  1294. {
  1295. return AllocWithAttributes<WeakReferenceEntryBits, /* nothrow = */ false>(size);
  1296. }
  1297. bool NeedDisposeTimed()
  1298. {
  1299. DWORD ticks = ::GetTickCount();
  1300. return (ticks > tickCountNextDispose && this->hasDisposableObject);
  1301. }
  1302. char* TryLargeAlloc(HeapInfo* heap, size_t size, ObjectInfoBits attributes, bool nothrow);
  1303. template <bool nothrow>
  1304. char* LargeAlloc(HeapInfo* heap, size_t size, ObjectInfoBits attributes);
  1305. void OutOfMemory();
  1306. // Collection
  1307. BOOL DoCollect(CollectionFlags flags);
  1308. BOOL DoCollectWrapped(CollectionFlags flags);
  1309. BOOL CollectOnAllocatorThread();
  1310. #if DBG
  1311. void ResetThreadId();
  1312. #endif
  1313. template <bool background>
  1314. size_t ScanPinnedObjects();
  1315. size_t ScanStack();
  1316. size_t ScanArena(ArenaData * alloc, bool background);
  1317. void ScanImplicitRoots();
  1318. void ScanInitialImplicitRoots();
  1319. void ScanNewImplicitRoots();
  1320. size_t FindRoots();
  1321. size_t TryMarkArenaMemoryBlockList(ArenaMemoryBlock * memoryBlocks);
  1322. size_t TryMarkBigBlockList(BigBlock * memoryBlocks);
  1323. size_t TryMarkBigBlockListWithWriteWatch(BigBlock * memoryBlocks);
  1324. // Mark
  1325. void ResetMarks(ResetMarkFlags flags);
  1326. void Mark();
  1327. bool EndMark();
  1328. bool EndMarkCheckOOMRescan();
  1329. void EndMarkOnLowMemory();
  1330. void DoParallelMark();
  1331. void DoBackgroundParallelMark();
  1332. size_t RootMark(CollectionState markState);
  1333. void ProcessMark(bool background);
  1334. void ProcessParallelMark(bool background, MarkContext * markContext);
  1335. template <bool parallel, bool interior>
  1336. void ProcessMarkContext(MarkContext * markContext);
  1337. public:
  1338. bool IsObjectMarked(void* candidate) { return this->heapBlockMap.IsMarked(candidate); }
  1339. #ifdef RECYCLER_STRESS
  1340. bool StressCollectNow();
  1341. #endif
  1342. private:
  1343. HeapBlock* FindHeapBlock(void * candidate);
  1344. struct FindBlockCache
  1345. {
  1346. FindBlockCache():
  1347. heapBlock(nullptr),
  1348. candidate(nullptr)
  1349. {
  1350. }
  1351. HeapBlock* heapBlock;
  1352. void* candidate;
  1353. } blockCache;
  1354. __inline void ScanObjectInline(void ** obj, size_t byteCount);
  1355. __inline void ScanObjectInlineInterior(void ** obj, size_t byteCount);
  1356. __inline void ScanMemoryInline(void ** obj, size_t byteCount);
  1357. void ScanMemory(void ** obj, size_t byteCount) { if (byteCount != 0) { ScanMemoryInline(obj, byteCount); } }
  1358. bool AddMark(void * candidate, size_t byteCount);
  1359. // Sweep
  1360. bool Sweep(size_t rescanRootBytes = (size_t)-1, bool concurrent = false, bool adjustPartialHeuristics = false);
  1361. void SweepWeakReference();
  1362. void SweepHeap(bool concurrent, RecyclerSweep& recyclerSweep);
  1363. void FinishSweep(RecyclerSweep& recyclerSweep);
  1364. bool FinishDisposeObjects();
  1365. template <CollectionFlags flags>
  1366. bool FinishDisposeObjectsWrapped();
  1367. // end collection
  1368. void FinishCollection();
  1369. void FinishCollection(bool needConcurrentSweep);
  1370. void EndCollection();
  1371. void ResetCollectionState();
  1372. void ResetMarkCollectionState();
  1373. void ResetHeuristicCounters();
  1374. void ResetPartialHeuristicCounters();
  1375. BOOL IsMarkState() const;
  1376. BOOL IsFindRootsState() const;
  1377. BOOL IsInThreadFindRootsState() const;
  1378. template <Js::Phase phase>
  1379. void CollectionBegin();
  1380. template <Js::Phase phase>
  1381. void CollectionEnd();
  1382. #ifdef PARTIAL_GC_ENABLED
  1383. void ProcessClientTrackedObjects();
  1384. bool PartialCollect(bool concurrent);
  1385. void FinishPartialCollect(RecyclerSweep * recyclerSweep = nullptr);
  1386. void ClearPartialCollect();
  1387. void BackgroundFinishPartialCollect(RecyclerSweep * recyclerSweep);
  1388. #endif
  1389. #if defined(PARTIAL_GC_ENABLED) || defined(CONCURRENT_GC_ENABLED)
  1390. size_t RescanMark(DWORD waitTime);
  1391. size_t FinishMark(DWORD waitTime);
  1392. size_t FinishMarkRescan(bool background);
  1393. void ProcessTrackedObjects();
  1394. #endif
  1395. #ifdef CONCURRENT_GC_ENABLED
  1396. // Concurrent GC
  1397. BOOL IsConcurrentEnabled() const { return this->enableConcurrentMark || this->enableParallelMark || this->enableConcurrentSweep; }
  1398. BOOL IsConcurrentMarkState() const;
  1399. BOOL IsConcurrentMarkExecutingState() const;
  1400. BOOL IsConcurrentResetMarksState() const;
  1401. BOOL IsConcurrentFindRootState() const;
  1402. BOOL IsConcurrentExecutingState() const;
  1403. BOOL IsConcurrentSweepExecutingState() const;
  1404. BOOL IsConcurrentState() const;
  1405. BOOL InConcurrentSweep()
  1406. {
  1407. return ((collectionState & Collection_ConcurrentSweep) == Collection_ConcurrentSweep);
  1408. }
  1409. #if DBG
  1410. BOOL IsConcurrentFinishedState() const;
  1411. #endif // DBG
  1412. bool InitializeConcurrent(JsUtil::ThreadService* threadService);
  1413. bool AbortConcurrent(bool restoreState);
  1414. void FinalizeConcurrent(bool restoreState);
  1415. static unsigned int StaticThreadProc(LPVOID lpParameter);
  1416. static int ExceptFilter(LPEXCEPTION_POINTERS pEP);
  1417. DWORD ThreadProc();
  1418. void DoBackgroundWork(bool forceForeground = false);
  1419. static void StaticBackgroundWorkCallback(void * callbackData);
  1420. BOOL CollectOnConcurrentThread();
  1421. bool StartConcurrent(CollectionState const state);
  1422. BOOL StartBackgroundMarkCollect();
  1423. BOOL StartSynchronousBackgroundMark();
  1424. BOOL StartAsynchronousBackgroundMark();
  1425. BOOL StartBackgroundMark(bool foregroundResetMark, bool foregroundFindRoots);
  1426. BOOL StartConcurrentSweepCollect();
  1427. template <CollectionFlags flags>
  1428. BOOL TryFinishConcurrentCollect();
  1429. BOOL WaitForConcurrentThread(DWORD waitTime);
  1430. void FlushBackgroundPages();
  1431. BOOL FinishConcurrentCollect(CollectionFlags flags);
  1432. BOOL FinishConcurrentCollectWrapped(CollectionFlags flags);
  1433. void BackgroundMark();
  1434. void BackgroundResetMarks();
  1435. void PrepareBackgroundFindRoots();
  1436. void RevertPrepareBackgroundFindRoots();
  1437. size_t BackgroundFindRoots();
  1438. size_t BackgroundScanStack();
  1439. size_t BackgroundRepeatMark();
  1440. size_t BackgroundRescan(RescanFlags rescanFlags);
  1441. void BackgroundResetWriteWatchAll();
  1442. size_t BackgroundFinishMark();
  1443. char* GetScriptThreadStackTop();
  1444. void SweepPendingObjects(RecyclerSweep& recyclerSweep);
  1445. void ConcurrentTransferSweptObjects(RecyclerSweep& recyclerSweep);
  1446. #ifdef PARTIAL_GC_ENABLED
  1447. void ConcurrentPartialTransferSweptObjects(RecyclerSweep& recyclerSweep);
  1448. #endif // PARTIAL_GC_ENABLED
  1449. #endif // CONCURRENT_GC_ENABLED
  1450. bool ForceSweepObject();
  1451. void NotifyFree(__in char * address, size_t size);
  1452. template <bool pageheap, typename T>
  1453. void NotifyFree(T * heapBlock);
  1454. void CleanupPendingUnroot();
  1455. #ifdef ENABLE_JS_ETW
  1456. ULONG EventWriteFreeMemoryBlock(HeapBlock* heapBlock);
  1457. void FlushFreeRecord();
  1458. void AppendFreeMemoryETWRecord(__in char *address, size_t size);
  1459. static const uint BulkFreeMemoryCount = 400;
  1460. uint bulkFreeMemoryWrittenCount;
  1461. struct ETWFreeRecord {
  1462. char* memoryAddress;
  1463. uint32 objectSize;
  1464. };
  1465. ETWFreeRecord etwFreeRecords[BulkFreeMemoryCount];
  1466. #endif
  1467. template <ObjectInfoBits attributes>
  1468. bool IntegrateBlock(char * blockAddress, PageSegment * segment, size_t allocSize, size_t objectSize);
  1469. template <class TBlockAttributes> friend class SmallHeapBlockT;
  1470. template <class TBlockAttributes> friend class SmallNormalHeapBlockT;
  1471. template <class TBlockAttributes> friend class SmallLeafHeapBlockT;
  1472. template <class TBlockAttributes> friend class SmallFinalizableHeapBlockT;
  1473. friend class LargeHeapBlock;
  1474. friend class HeapInfo;
  1475. friend class LargeHeapBucket;
  1476. template <typename TBlockType>
  1477. friend class HeapBucketT;
  1478. template <typename TBlockType>
  1479. friend class SmallNormalHeapBucketBase;
  1480. template <typename T, ObjectInfoBits attributes = LeafBit>
  1481. friend class RecyclerFastAllocator;
  1482. #ifdef RECYCLER_TRACE
  1483. void PrintCollectTrace(Js::Phase phase, bool finish = false, bool noConcurrentWork = false);
  1484. #endif
  1485. #ifdef RECYCLER_VERIFY_MARK
  1486. void VerifyMark();
  1487. void VerifyMarkRoots();
  1488. void VerifyMarkStack();
  1489. void VerifyMarkArena(ArenaData * arena);
  1490. void VerifyMarkBigBlockList(BigBlock * memoryBlocks);
  1491. void VerifyMarkArenaMemoryBlockList(ArenaMemoryBlock * memoryBlocks);
  1492. void VerifyMark(void * address);
  1493. #endif
  1494. #if DBG_DUMP
  1495. bool forceTraceMark;
  1496. #endif
  1497. bool isHeapEnumInProgress;
  1498. #if DBG
  1499. bool allowAllocationDuringHeapEnum;
  1500. bool allowAllocationDuringRenentrance;
  1501. #ifdef ENABLE_PROJECTION
  1502. bool isInRefCountTrackingForProjection;
  1503. #endif
  1504. #endif
  1505. // There are two scenarios we allow limited allocation but disallow GC during those allocations:
  1506. // in heapenum when we allocate PropertyRecord, and
  1507. // in projection ExternalMark allowing allocating VarToDispEx. This is the common flag
  1508. // while we have debug only flag for each of the two scenarios.
  1509. bool isCollectionDisabled;
  1510. #ifdef TRACK_ALLOC
  1511. public:
  1512. Recycler * TrackAllocInfo(TrackAllocData const& data);
  1513. void ClearTrackAllocInfo(TrackAllocData* data = NULL);
  1514. #ifdef PROFILE_RECYCLER_ALLOC
  1515. void PrintAllocStats();
  1516. private:
  1517. static bool DoProfileAllocTracker();
  1518. void InitializeProfileAllocTracker();
  1519. void TrackUnallocated(__in char* address, __in char *endAddress, size_t sizeCat);
  1520. void TrackAllocCore(void * object, size_t size, const TrackAllocData& trackAllocData, bool traceLifetime = false);
  1521. void* TrackAlloc(void * object, size_t size, const TrackAllocData& trackAllocData, bool traceLifetime = false);
  1522. void TrackIntegrate(__in_ecount(blockSize) char * blockAddress, size_t blockSize, size_t allocSize, size_t objectSize, const TrackAllocData& trackAllocData);
  1523. BOOL TrackFree(const char* address, size_t size);
  1524. void TrackAllocWeakRef(RecyclerWeakReferenceBase * weakRef);
  1525. void TrackFreeWeakRef(RecyclerWeakReferenceBase * weakRef);
  1526. struct TrackerData
  1527. {
  1528. TrackerData(type_info const * typeinfo, bool isArray) : typeinfo(typeinfo), isArray(isArray),
  1529. ItemSize(0), ItemCount(0), AllocCount(0), ReqSize(0), AllocSize(0), FreeCount(0), FreeSize(0), TraceLifetime(false)
  1530. #ifdef PERF_COUNTERS
  1531. , counter(PerfCounter::RecyclerTrackerCounterSet::GetPerfCounter(typeinfo, isArray))
  1532. , sizeCounter(PerfCounter::RecyclerTrackerCounterSet::GetPerfSizeCounter(typeinfo, isArray))
  1533. #endif
  1534. {
  1535. }
  1536. type_info const * typeinfo;
  1537. bool isArray;
  1538. #ifdef TRACE_OBJECT_LIFETIME
  1539. bool TraceLifetime;
  1540. #endif
  1541. size_t ItemSize;
  1542. size_t ItemCount;
  1543. int AllocCount;
  1544. int64 ReqSize;
  1545. int64 AllocSize;
  1546. int FreeCount;
  1547. int64 FreeSize;
  1548. #ifdef PERF_COUNTERS
  1549. PerfCounter::Counter& counter;
  1550. PerfCounter::Counter& sizeCounter;
  1551. #endif
  1552. static TrackerData EmptyData;
  1553. static TrackerData ExplicitFreeListObjectData;
  1554. };
  1555. TrackerData * GetTrackerData(void * address);
  1556. void SetTrackerData(void * address, TrackerData * data);
  1557. struct TrackerItem
  1558. {
  1559. TrackerItem(type_info const * typeinfo) : instanceData(typeinfo, false), arrayData(typeinfo, true)
  1560. #ifdef PERF_COUNTERS
  1561. , weakRefCounter(PerfCounter::RecyclerTrackerCounterSet::GetWeakRefPerfCounter(typeinfo))
  1562. #endif
  1563. {}
  1564. TrackerData instanceData;
  1565. TrackerData arrayData;
  1566. #ifdef PERF_COUNTERS
  1567. PerfCounter::Counter& weakRefCounter;
  1568. #endif
  1569. };
  1570. typedef JsUtil::BaseDictionary<type_info const *, TrackerItem *, NoCheckHeapAllocator, PrimeSizePolicy, DefaultComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> TypeInfotoTrackerItemMap;
  1571. typedef JsUtil::BaseDictionary<void *, TrackerData *, NoCheckHeapAllocator, PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> PointerToTrackerDataMap;
  1572. TypeInfotoTrackerItemMap * trackerDictionary;
  1573. CRITICAL_SECTION trackerCriticalSection;
  1574. #endif
  1575. TrackAllocData nextAllocData;
  1576. #endif
  1577. public:
  1578. // Enumeration
  1579. class AutoSetupRecyclerForNonCollectingMark
  1580. {
  1581. private:
  1582. Recycler& m_recycler;
  1583. bool m_setupDone;
  1584. CollectionState m_previousCollectionState;
  1585. #ifdef RECYCLER_STATS
  1586. RecyclerCollectionStats m_previousCollectionStats;
  1587. #endif
  1588. public:
  1589. AutoSetupRecyclerForNonCollectingMark(Recycler& recycler, bool setupForHeapEnumeration = false);
  1590. ~AutoSetupRecyclerForNonCollectingMark();
  1591. void DoCommonSetup();
  1592. void SetupForHeapEnumeration();
  1593. };
  1594. friend class RecyclerHeapObjectInfo;
  1595. bool FindImplicitRootObject(void* candidate, RecyclerHeapObjectInfo& heapObject);
  1596. bool FindHeapObject(void* candidate, FindHeapObjectFlags flags, RecyclerHeapObjectInfo& heapObject);
  1597. bool FindHeapObjectWithClearedAllocators(void* candidate, RecyclerHeapObjectInfo& heapObject);
  1598. bool IsCollectionDisabled() const { return isCollectionDisabled; }
  1599. bool IsHeapEnumInProgress() const { Assert(isHeapEnumInProgress ? isCollectionDisabled : true); return isHeapEnumInProgress; }
  1600. #if DBG
  1601. // There are limited cases that we have to allow allocation during heap enumeration. GC is explicitly
  1602. // disabled during heap enumeration for these limited cases. (See DefaultRecyclerCollectionWrapper)
  1603. // The only case of allocation right now is allocating property record for string based type handler
  1604. // so we can use the propertyId as the relation Id.
  1605. // Allocation during enumeration is still frown upon and should still be avoid if possible.
  1606. bool AllowAllocationDuringHeapEnum() const { return allowAllocationDuringHeapEnum; }
  1607. class AutoAllowAllocationDuringHeapEnum : public AutoBooleanToggle
  1608. {
  1609. public:
  1610. AutoAllowAllocationDuringHeapEnum(Recycler * recycler) : AutoBooleanToggle(&recycler->allowAllocationDuringHeapEnum) {};
  1611. };
  1612. #ifdef ENABLE_PROJECTION
  1613. bool IsInRefCountTrackingForProjection() const { return isInRefCountTrackingForProjection;}
  1614. class AutoIsInRefCountTrackingForProjection : public AutoBooleanToggle
  1615. {
  1616. public:
  1617. AutoIsInRefCountTrackingForProjection(Recycler * recycler) : AutoBooleanToggle(&recycler->isInRefCountTrackingForProjection) {};
  1618. };
  1619. #endif
  1620. #endif
  1621. class AutoAllowAllocationDuringReentrance : public AutoBooleanToggle
  1622. {
  1623. public:
  1624. AutoAllowAllocationDuringReentrance(Recycler * recycler) :
  1625. AutoBooleanToggle(&recycler->isCollectionDisabled)
  1626. #if DBG
  1627. , allowAllocationDuringRenentrance(&recycler->allowAllocationDuringRenentrance)
  1628. #endif
  1629. {};
  1630. #if DBG
  1631. private:
  1632. AutoBooleanToggle allowAllocationDuringRenentrance;
  1633. #endif
  1634. };
  1635. #ifdef HEAP_ENUMERATION_VALIDATION
  1636. typedef void(*PostHeapEnumScanCallback)(const HeapObject& heapObject, void *data);
  1637. PostHeapEnumScanCallback pfPostHeapEnumScanCallback;
  1638. void *postHeapEnunScanData;
  1639. void PostHeapEnumScan(PostHeapEnumScanCallback callback, void*data);
  1640. bool IsPostEnumHeapValidationInProgress() const { return pfPostHeapEnumScanCallback != NULL; }
  1641. #endif
  1642. private:
  1643. void* GetRealAddressFromInterior(void* candidate);
  1644. void BeginNonCollectingMark();
  1645. void EndNonCollectingMark();
  1646. #if defined(RECYCLER_DUMP_OBJECT_GRAPH) || defined(LEAK_REPORT) || defined(CHECK_MEMORY_LEAK)
  1647. public:
  1648. bool IsInDllCanUnloadNow() const { return inDllCanUnloadNow; }
  1649. bool IsInDetachProcess() const { return inDetachProcess; }
  1650. void SetInDllCanUnloadNow();
  1651. void SetInDetachProcess();
  1652. private:
  1653. bool inDllCanUnloadNow;
  1654. bool inDetachProcess;
  1655. bool isPrimaryMarkContextInitialized;
  1656. #endif
  1657. #if defined(LEAK_REPORT) || defined(CHECK_MEMORY_LEAK)
  1658. template <class Fn>
  1659. void ReportOnProcessDetach(Fn fn);
  1660. void PrintPinnedObjectStackTraces();
  1661. #endif
  1662. public:
  1663. typedef void (CALLBACK *ObjectBeforeCollectCallback)(void* object, void* callbackState); // same as jsrt JsObjectBeforeCollectCallback
  1664. void SetObjectBeforeCollectCallback(void* object, ObjectBeforeCollectCallback callback, void* callbackState);
  1665. void ClearObjectBeforeCollectCallbacks();
  1666. bool IsInObjectBeforeCollectCallback() const { return objectBeforeCollectCallbackState != ObjectBeforeCollectCallback_None; }
  1667. private:
  1668. struct ObjectBeforeCollectCallbackData
  1669. {
  1670. ObjectBeforeCollectCallback callback;
  1671. void* callbackState;
  1672. ObjectBeforeCollectCallbackData() {}
  1673. ObjectBeforeCollectCallbackData(ObjectBeforeCollectCallback callback, void* callbackState) : callback(callback), callbackState(callbackState) {}
  1674. };
  1675. typedef JsUtil::BaseDictionary<void*, ObjectBeforeCollectCallbackData, HeapAllocator,
  1676. PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> ObjectBeforeCollectCallbackMap;
  1677. ObjectBeforeCollectCallbackMap* objectBeforeCollectCallbackMap;
  1678. enum ObjectBeforeCollectCallbackState
  1679. {
  1680. ObjectBeforeCollectCallback_None,
  1681. ObjectBeforeCollectCallback_Normal, // Normal GC BeforeCollect callback
  1682. ObjectBeforeCollectCallback_Shutdown, // At shutdown invoke all BeforeCollect callback
  1683. } objectBeforeCollectCallbackState;
  1684. bool ProcessObjectBeforeCollectCallbacks(bool atShutdown = false);
  1685. };
  1686. class RecyclerHeapObjectInfo
  1687. {
  1688. void* m_address;
  1689. Recycler * m_recycler;
  1690. HeapBlock* m_heapBlock;
  1691. #if LARGEHEAPBLOCK_ENCODING
  1692. union
  1693. {
  1694. byte * m_attributes;
  1695. LargeObjectHeader * m_largeHeapBlockHeader;
  1696. };
  1697. bool isUsingLargeHeapBlock = false;
  1698. #else
  1699. byte * m_attributes;
  1700. #endif
  1701. public:
  1702. RecyclerHeapObjectInfo() : m_address(NULL), m_recycler(NULL), m_heapBlock(NULL), m_attributes(NULL) {}
  1703. RecyclerHeapObjectInfo(void* address, Recycler * recycler, HeapBlock* heapBlock, byte * attributes) :
  1704. m_address(address), m_recycler(recycler), m_heapBlock(heapBlock), m_attributes(attributes) { }
  1705. void* GetObjectAddress() const { return m_address; }
  1706. bool IsLeaf() const
  1707. {
  1708. #if LARGEHEAPBLOCK_ENCODING
  1709. if (isUsingLargeHeapBlock)
  1710. {
  1711. return (m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie) & LeafBit) != 0;
  1712. }
  1713. #endif
  1714. return ((*m_attributes & LeafBit) != 0 || this->m_heapBlock->IsLeafBlock());
  1715. }
  1716. bool IsImplicitRoot() const
  1717. {
  1718. #if LARGEHEAPBLOCK_ENCODING
  1719. if (isUsingLargeHeapBlock)
  1720. {
  1721. return (m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie) & ImplicitRootBit) != 0;
  1722. }
  1723. #endif
  1724. return (*m_attributes & ImplicitRootBit) != 0;
  1725. }
  1726. bool IsObjectMarked() const { Assert(m_recycler); return m_recycler->heapBlockMap.IsMarked(m_address); }
  1727. void SetObjectMarked() { Assert(m_recycler); m_recycler->heapBlockMap.SetMark(m_address); }
  1728. ObjectInfoBits GetAttributes() const
  1729. {
  1730. #if LARGEHEAPBLOCK_ENCODING
  1731. if (isUsingLargeHeapBlock)
  1732. {
  1733. return (ObjectInfoBits)m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
  1734. }
  1735. #endif
  1736. return (ObjectInfoBits)*m_attributes;
  1737. }
  1738. size_t GetSize() const;
  1739. #if LARGEHEAPBLOCK_ENCODING
  1740. void SetLargeHeapBlockHeader(LargeObjectHeader * largeHeapBlockHeader)
  1741. {
  1742. m_largeHeapBlockHeader = largeHeapBlockHeader;
  1743. isUsingLargeHeapBlock = true;
  1744. }
  1745. #endif
  1746. bool SetMemoryProfilerHasEnumerated()
  1747. {
  1748. Assert(m_heapBlock);
  1749. #if LARGEHEAPBLOCK_ENCODING
  1750. if (isUsingLargeHeapBlock)
  1751. {
  1752. return SetMemoryProfilerHasEnumeratedForLargeHeapBlock();
  1753. }
  1754. #endif
  1755. bool wasMemoryProfilerOldObject = (*m_attributes & MemoryProfilerOldObjectBit) != 0;
  1756. *m_attributes |= MemoryProfilerOldObjectBit;
  1757. return wasMemoryProfilerOldObject;
  1758. }
  1759. bool ClearImplicitRootBit()
  1760. {
  1761. // This can only be called on the main thread for non-finalizable block
  1762. // As finalizable block requires that the bit not be change during concurrent mark
  1763. // since the background thread change the NewTrackBit
  1764. Assert(!m_heapBlock->IsAnyFinalizableBlock());
  1765. #ifdef RECYCLER_PAGE_HEAP
  1766. Recycler* recycler = this->m_recycler;
  1767. if (recycler->ShouldCapturePageHeapFreeStack())
  1768. {
  1769. Assert(recycler->IsPageHeapEnabled());
  1770. this->m_heapBlock->CapturePageHeapFreeStack();
  1771. }
  1772. #endif
  1773. #if LARGEHEAPBLOCK_ENCODING
  1774. if (isUsingLargeHeapBlock)
  1775. {
  1776. return ClearImplicitRootBitsForLargeHeapBlock();
  1777. }
  1778. #endif
  1779. Assert(m_attributes);
  1780. bool wasImplicitRoot = (*m_attributes & ImplicitRootBit) != 0;
  1781. *m_attributes &= ~ImplicitRootBit;
  1782. return wasImplicitRoot;
  1783. }
  1784. void ExplicitFree()
  1785. {
  1786. if (*m_attributes == ObjectInfoBits::LeafBit)
  1787. {
  1788. m_recycler->ExplicitFreeLeaf(m_address, GetSize());
  1789. }
  1790. else
  1791. {
  1792. Assert(*m_attributes == ObjectInfoBits::NoBit);
  1793. m_recycler->ExplicitFreeNonLeaf(m_address, GetSize());
  1794. }
  1795. }
  1796. #if LARGEHEAPBLOCK_ENCODING
  1797. bool ClearImplicitRootBitsForLargeHeapBlock()
  1798. {
  1799. Assert(m_largeHeapBlockHeader);
  1800. byte attributes = m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
  1801. bool wasImplicitRoot = (attributes & ImplicitRootBit) != 0;
  1802. m_largeHeapBlockHeader->SetAttributes(m_recycler->Cookie, attributes & ~ImplicitRootBit);
  1803. return wasImplicitRoot;
  1804. }
  1805. bool SetMemoryProfilerHasEnumeratedForLargeHeapBlock()
  1806. {
  1807. Assert(m_largeHeapBlockHeader);
  1808. byte attributes = m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
  1809. bool wasMemoryProfilerOldObject = (attributes & MemoryProfilerOldObjectBit) != 0;
  1810. m_largeHeapBlockHeader->SetAttributes(m_recycler->Cookie, attributes | MemoryProfilerOldObjectBit);
  1811. return wasMemoryProfilerOldObject;
  1812. }
  1813. #endif
  1814. };
  1815. // A fake heap block to replace the original heap block where the strong ref is when it has been collected
  1816. // as the original heap block may have been freed
  1817. class CollectedRecyclerWeakRefHeapBlock : public HeapBlock
  1818. {
  1819. public:
  1820. #if DBG
  1821. virtual BOOL IsFreeObject(void* objectAddress) override { Assert(false); return false; }
  1822. #endif
  1823. virtual BOOL IsValidObject(void* objectAddress) override { Assert(false); return false; }
  1824. virtual byte* GetRealAddressFromInterior(void* interiorAddress) override { Assert(false); return nullptr; }
  1825. virtual size_t GetObjectSize(void* object) override { Assert(false); return 0; }
  1826. virtual bool FindHeapObject(void* objectAddress, Recycler * recycler, FindHeapObjectFlags flags, RecyclerHeapObjectInfo& heapObject) override { Assert(false); return false; }
  1827. virtual bool TestObjectMarkedBit(void* objectAddress) override { Assert(false); return false; }
  1828. virtual void SetObjectMarkedBit(void* objectAddress) override { Assert(false); }
  1829. #ifdef RECYCLER_VERIFY_MARK
  1830. virtual void VerifyMark(void * objectAddress) override { Assert(false); }
  1831. #endif
  1832. #ifdef RECYCLER_PERF_COUNTERS
  1833. virtual void UpdatePerfCountersOnFree() override { Assert(false); }
  1834. #endif
  1835. #ifdef PROFILE_RECYCLER_ALLOC
  1836. virtual void * GetTrackerData(void * address) override { Assert(false); return nullptr; }
  1837. virtual void SetTrackerData(void * address, void * data) override { Assert(false); }
  1838. #endif
  1839. static CollectedRecyclerWeakRefHeapBlock Instance;
  1840. private:
  1841. CollectedRecyclerWeakRefHeapBlock() : HeapBlock(BlockTypeCount) { isPendingConcurrentSweep = false; }
  1842. };
  1843. class AutoIdleDecommit
  1844. {
  1845. public:
  1846. AutoIdleDecommit(Recycler * recycler) : recycler(recycler) { recycler->EnterIdleDecommit(); }
  1847. ~AutoIdleDecommit() { recycler->LeaveIdleDecommit(); }
  1848. private:
  1849. Recycler * recycler;
  1850. };
  1851. template <typename SmallHeapBlockAllocatorType>
  1852. void
  1853. Recycler::AddSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat)
  1854. {
  1855. autoHeap.AddSmallAllocator(allocator, sizeCat);
  1856. }
  1857. template <typename SmallHeapBlockAllocatorType>
  1858. void
  1859. Recycler::RemoveSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat)
  1860. {
  1861. autoHeap.RemoveSmallAllocator(allocator, sizeCat);
  1862. }
  1863. template <ObjectInfoBits attributes, typename SmallHeapBlockAllocatorType>
  1864. char *
  1865. Recycler::SmallAllocatorAlloc(SmallHeapBlockAllocatorType * allocator, size_t sizeCat)
  1866. {
  1867. return autoHeap.SmallAllocatorAlloc<attributes>(this, allocator, sizeCat);
  1868. }
  1869. // Dummy recycler allocator policy classes to choose the allocation function
  1870. class _RecyclerLeafPolicy;
  1871. class _RecyclerNonLeafPolicy;
  1872. template <typename Policy>
  1873. class _RecyclerAllocatorFunc
  1874. {};
  1875. template <>
  1876. class _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
  1877. {
  1878. public:
  1879. typedef char * (Recycler::*AllocFuncType)(size_t);
  1880. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  1881. static AllocFuncType GetAllocFunc()
  1882. {
  1883. return &Recycler::AllocLeaf;
  1884. }
  1885. static AllocFuncType GetAllocZeroFunc()
  1886. {
  1887. return &Recycler::AllocLeafZero;
  1888. }
  1889. static FreeFuncType GetFreeFunc()
  1890. {
  1891. return &Recycler::ExplicitFreeLeaf;
  1892. }
  1893. };
  1894. template <>
  1895. class _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
  1896. {
  1897. public:
  1898. typedef char * (Recycler::*AllocFuncType)(size_t);
  1899. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  1900. static AllocFuncType GetAllocFunc()
  1901. {
  1902. return &Recycler::Alloc;
  1903. }
  1904. static AllocFuncType GetAllocZeroFunc()
  1905. {
  1906. return &Recycler::AllocZero;
  1907. }
  1908. static FreeFuncType GetFreeFunc()
  1909. {
  1910. return &Recycler::ExplicitFreeNonLeaf;
  1911. }
  1912. };
  1913. // This is used by the compiler; when T is NOT a pointer i.e. a value type - it causes leaf allocation
  1914. template <typename T>
  1915. class TypeAllocatorFunc<Recycler, T> : public _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
  1916. {
  1917. };
  1918. // Partial template specialization; applies to T when it is a pointer
  1919. template <typename T>
  1920. class TypeAllocatorFunc<Recycler, T *> : public _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
  1921. {
  1922. };
  1923. template <bool isLeaf>
  1924. class ListTypeAllocatorFunc<Recycler, isLeaf>
  1925. {
  1926. public:
  1927. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  1928. static FreeFuncType GetFreeFunc()
  1929. {
  1930. if (isLeaf)
  1931. {
  1932. return &Recycler::ExplicitFreeLeaf;
  1933. }
  1934. else
  1935. {
  1936. return &Recycler::ExplicitFreeNonLeaf;
  1937. }
  1938. }
  1939. };
  1940. // Dummy class to choose the allocation function
  1941. class RecyclerLeafAllocator;
  1942. class RecyclerNonLeafAllocator;
  1943. // Partial template specialization to allocate as non leaf
  1944. template <typename T>
  1945. class TypeAllocatorFunc<RecyclerNonLeafAllocator, T> : public _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
  1946. {
  1947. };
  1948. template <typename T>
  1949. class TypeAllocatorFunc<RecyclerLeafAllocator, T> : public _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
  1950. {
  1951. };
  1952. template <typename TAllocType>
  1953. struct AllocatorInfo<Recycler, TAllocType>
  1954. {
  1955. typedef Recycler AllocatorType;
  1956. typedef TypeAllocatorFunc<Recycler, TAllocType> AllocatorFunc;
  1957. typedef _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy> InstAllocatorFunc; // By default any instance considered non-leaf
  1958. };
  1959. template <typename TAllocType>
  1960. struct AllocatorInfo<RecyclerNonLeafAllocator, TAllocType>
  1961. {
  1962. typedef Recycler AllocatorType;
  1963. typedef TypeAllocatorFunc<RecyclerNonLeafAllocator, TAllocType> AllocatorFunc;
  1964. typedef TypeAllocatorFunc<RecyclerNonLeafAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
  1965. };
  1966. template <typename TAllocType>
  1967. struct AllocatorInfo<RecyclerLeafAllocator, TAllocType>
  1968. {
  1969. typedef Recycler AllocatorType;
  1970. typedef TypeAllocatorFunc<RecyclerLeafAllocator, TAllocType> AllocatorFunc;
  1971. typedef TypeAllocatorFunc<RecyclerLeafAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
  1972. };
  1973. template <>
  1974. struct ForceNonLeafAllocator<Recycler>
  1975. {
  1976. typedef RecyclerNonLeafAllocator AllocatorType;
  1977. };
  1978. template <>
  1979. struct ForceNonLeafAllocator<RecyclerLeafAllocator>
  1980. {
  1981. typedef RecyclerNonLeafAllocator AllocatorType;
  1982. };
  1983. template <>
  1984. struct ForceLeafAllocator<Recycler>
  1985. {
  1986. typedef RecyclerLeafAllocator AllocatorType;
  1987. };
  1988. template <>
  1989. struct ForceLeafAllocator<RecyclerNonLeafAllocator>
  1990. {
  1991. typedef RecyclerLeafAllocator AllocatorType;
  1992. };
  1993. #ifdef PROFILE_EXEC
  1994. #define RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase) if (recycler->profiler != nullptr) { recycler->profiler->Begin(phase); }
  1995. #define RECYCLER_PROFILE_EXEC_END(recycler, phase) if (recycler->profiler != nullptr) { recycler->profiler->End(phase); }
  1996. #define RECYCLER_PROFILE_EXEC_BEGIN2(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->Begin(phase1); recycler->profiler->Begin(phase2);}
  1997. #define RECYCLER_PROFILE_EXEC_END2(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->End(phase1); recycler->profiler->End(phase2);}
  1998. #define RECYCLER_PROFILE_EXEC_CHANGE(recydler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->End(phase1); recycler->profiler->Begin(phase2); }
  1999. #define RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase) if (recycler->backgroundProfiler != nullptr) { recycler->backgroundProfiler->Begin(phase); }
  2000. #define RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase) if (recycler->backgroundProfiler != nullptr) { recycler->backgroundProfiler->End(phase); }
  2001. #define RECYCLER_PROFILE_EXEC_THREAD_BEGIN(background, recycler, phase) if (background) { RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase); } else { RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase); }
  2002. #define RECYCLER_PROFILE_EXEC_THREAD_END(background, recycler, phase) if (background) { RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase); } else { RECYCLER_PROFILE_EXEC_END(recycler, phase); }
  2003. #else
  2004. #define RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase)
  2005. #define RECYCLER_PROFILE_EXEC_END(recycler, phase)
  2006. #define RECYCLER_PROFILE_EXEC_BEGIN2(recycler, phase1, phase2)
  2007. #define RECYCLER_PROFILE_EXEC_END2(recycler, phase1, phase2)
  2008. #define RECYCLER_PROFILE_EXEC_CHANGE(recydler, phase1, phase2)
  2009. #define RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase)
  2010. #define RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase)
  2011. #define RECYCLER_PROFILE_EXEC_THREAD_BEGIN(background, recycler, phase)
  2012. #define RECYCLER_PROFILE_EXEC_THREAD_END(background, recycler, phase)
  2013. #endif
  2014. }
  2015. _Ret_notnull_ inline void * __cdecl
  2016. operator new(size_t byteSize, Recycler * alloc, HeapInfo * heapInfo)
  2017. {
  2018. return alloc->HeapAllocR(heapInfo, byteSize);
  2019. }
  2020. inline void __cdecl
  2021. operator delete(void * obj, Recycler * alloc, HeapInfo * heapInfo)
  2022. {
  2023. alloc->HeapFree(heapInfo, obj);
  2024. }
  2025. _Ret_notnull_ inline void * __cdecl
  2026. operator new(size_t byteSize, Recycler * recycler, ObjectInfoBits enumClassBits)
  2027. {
  2028. AssertCanHandleOutOfMemory();
  2029. Assert(byteSize != 0);
  2030. Assert(enumClassBits == EnumClass_1_Bit);
  2031. void * buffer = recycler->AllocEnumClass<EnumClass_1_Bit>(byteSize);
  2032. // All of our allocation should throw on out of memory
  2033. Assume(buffer != nullptr);
  2034. return buffer;
  2035. }
  2036. template<ObjectInfoBits infoBits>
  2037. _Ret_notnull_ inline void * __cdecl
  2038. operator new(size_t byteSize, Recycler * recycler, const InfoBitsWrapper<infoBits>&)
  2039. {
  2040. AssertCanHandleOutOfMemory();
  2041. Assert(byteSize != 0);
  2042. void * buffer = recycler->AllocWithInfoBits<infoBits>(byteSize);
  2043. // All of our allocation should throw on out of memory
  2044. Assume(buffer != nullptr);
  2045. return buffer;
  2046. }