Recycler.h 99 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. #include "CollectionState.h"
  7. #include "RecyclerTelemetryInfo.h"
  8. #include "RecyclerWaitReason.h"
  9. #include "Common/ObservableValue.h"
  10. #include "CollectionFlags.h"
  11. namespace Js
  12. {
  13. class Profiler;
  14. enum Phase: unsigned short;
  15. };
  16. namespace JsUtil
  17. {
  18. class ThreadService;
  19. };
  20. #ifdef STACK_BACK_TRACE
  21. class StackBackTraceNode;
  22. #endif
  23. class ScriptEngineBase;
  24. class JavascriptThreadService;
  25. #ifdef PROFILE_MEM
  26. struct RecyclerMemoryData;
  27. #endif
  28. class ThreadContext;
  29. namespace Memory
  30. {
  31. template <typename T> class RecyclerRootPtr;
  32. class AutoBooleanToggle
  33. {
  34. public:
  35. AutoBooleanToggle(bool * b, bool value = true, bool valueMayChange = false)
  36. : b(b)
  37. {
  38. Assert(!(*b));
  39. *b = value;
  40. #if DBG
  41. this->value = value;
  42. this->valueMayChange = valueMayChange;
  43. #endif
  44. }
  45. ~AutoBooleanToggle()
  46. {
  47. if (b)
  48. {
  49. Assert(valueMayChange || *b == value);
  50. *b = false;
  51. }
  52. }
  53. void Leave()
  54. {
  55. Assert(valueMayChange || *b == value);
  56. *b = false;
  57. b = nullptr;
  58. }
  59. private:
  60. bool * b;
  61. #if DBG
  62. bool value;
  63. bool valueMayChange;
  64. #endif
  65. };
  66. template <class T>
  67. class AutoRestoreValue
  68. {
  69. public:
  70. AutoRestoreValue(T* var, const T& val):
  71. variable(var)
  72. {
  73. Assert(var);
  74. oldValue = (*variable);
  75. (*variable) = val;
  76. #ifdef DEBUG
  77. debugSetValue = val;
  78. #endif
  79. }
  80. ~AutoRestoreValue()
  81. {
  82. Assert((*variable) == debugSetValue);
  83. (*variable) = oldValue;
  84. }
  85. private:
  86. #ifdef DEBUG
  87. T debugSetValue;
  88. #endif
  89. T* variable;
  90. T oldValue;
  91. };
  92. class Recycler;
  93. class RecyclerScanMemoryCallback
  94. {
  95. public:
  96. RecyclerScanMemoryCallback(Recycler* recycler) : recycler(recycler) {}
  97. void operator()(void** obj, size_t byteCount);
  98. private:
  99. Recycler* recycler;
  100. };
  101. template<ObjectInfoBits infoBits>
  102. struct InfoBitsWrapper{};
  103. #if ENABLE_WEAK_REFERENCE_REGIONS
  104. template<typename T>
  105. static constexpr bool is_pointer = false;
  106. template<typename K>
  107. static constexpr bool is_pointer<K*> = true;
  108. template<typename T>
  109. class RecyclerWeakReferenceRegionItem {
  110. static_assert(is_pointer<T>, "Weak references must be to pointer types");
  111. friend class Recycler;
  112. public:
  113. RecyclerWeakReferenceRegionItem() : ptr(T()), heapBlock(nullptr) {};
  114. operator T() const { return ptr; };
  115. T operator=(T newPtr) {
  116. Assert(ptr == nullptr); // For safety with concurrent marking, only allow setting the pointer to non-null from null
  117. heapBlock = nullptr;
  118. return ptr = newPtr;
  119. };
  120. void Clear() { heapBlock = nullptr; ptr = nullptr; };
  121. private:
  122. RecyclerWeakReferenceRegionItem(RecyclerWeakReferenceRegionItem<T>&) = delete;
  123. FieldNoBarrier(T) ptr;
  124. FieldNoBarrier(HeapBlock*) heapBlock; // Note: the low bit of the heapBlock is used for background marking
  125. };
  126. class RecyclerWeakReferenceRegion {
  127. friend class Recycler;
  128. public:
  129. RecyclerWeakReferenceRegionItem<void*>* GetPtr() const { return ptr; }
  130. size_t GetCount() const { return count; }
  131. HeapBlock* GetHeapBlock() const { return arrayHeapBlock; }
  132. private:
  133. FieldNoBarrier(RecyclerWeakReferenceRegionItem<void*>*) ptr;
  134. FieldNoBarrier(size_t) count;
  135. FieldNoBarrier(HeapBlock*) arrayHeapBlock;
  136. };
  137. #endif
  138. // Allocation macro
  139. #define RecyclerNew(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocInlined, T, __VA_ARGS__)
  140. #define RecyclerNewPlus(recycler,size,T,...) AllocatorNewPlus(Recycler, recycler, size, T, __VA_ARGS__)
  141. #define RecyclerNewPlusZ(recycler,size,T,...) AllocatorNewPlusZ(Recycler, recycler, size, T, __VA_ARGS__)
  142. #define RecyclerNewZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocZeroInlined, T, __VA_ARGS__)
  143. #define RecyclerNewStruct(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocInlined, T)
  144. #define RecyclerNewStructZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocZeroInlined, T)
  145. #define RecyclerNewStructPlus(recycler,size,T) AllocatorNewStructPlus(Recycler, recycler, size, T)
  146. #define RecyclerNewArray(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, Alloc, T, count)
  147. #define RecyclerNewArrayZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZero, T, count)
  148. #define RecyclerNewFinalized(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedInlined, T, __VA_ARGS__)))
  149. #define RecyclerNewFinalizedPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalized, size, T, __VA_ARGS__)))
  150. #define RecyclerNewTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedInlined, T, __VA_ARGS__)))
  151. #define RecyclerNewTrackedPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocTracked, size, T, __VA_ARGS__)))
  152. #define RecyclerNewEnumClass(recycler, enumClass, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<enumClass>()) T(__VA_ARGS__)
  153. #define RecyclerNewWithInfoBits(recycler, infoBits, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<infoBits>()) T(__VA_ARGS__)
  154. #define RecyclerNewFinalizedClientTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedClientTrackedInlined, T, __VA_ARGS__)))
  155. #if defined(RECYCLER_WRITE_BARRIER_ALLOC)
  156. #define RecyclerNewWithBarrier(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocWithBarrier, T, __VA_ARGS__)
  157. #define RecyclerNewWithBarrierPlus(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocWithBarrier, size, T, __VA_ARGS__)
  158. #define RecyclerNewWithBarrierPlusZ(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocZeroWithBarrier, size, T, __VA_ARGS__)
  159. #define RecyclerNewWithBarrierZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocZeroWithBarrier, T, __VA_ARGS__)
  160. #define RecyclerNewWithBarrierStruct(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocWithBarrier, T)
  161. #define RecyclerNewWithBarrierStructZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocZeroWithBarrier, T)
  162. #define RecyclerNewWithBarrierStructPlus(recycler,size,T) AllocatorNewStructPlusBase(Recycler, recycler, AllocWithBarrier, size, T)
  163. #define RecyclerNewWithBarrierArray(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocWithBarrier, T, count)
  164. #define RecyclerNewWithBarrierArrayZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZeroWithBarrier, T, count)
  165. #define RecyclerNewWithBarrierFinalized(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedWithBarrierInlined, T, __VA_ARGS__)))
  166. #define RecyclerNewWithBarrierFinalizedPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedWithBarrier, size, T, __VA_ARGS__)))
  167. #define RecyclerNewWithBarrierTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedWithBarrierInlined, T, __VA_ARGS__)))
  168. #define RecyclerNewWithBarrierEnumClass(recycler, enumClass, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<(ObjectInfoBits)(enumClass | WithBarrierBit)>()) T(__VA_ARGS__)
  169. #define RecyclerNewWithBarrierWithInfoBits(recycler, infoBits, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<(ObjectInfoBits)(infoBits | WithBarrierBit)>()) T(__VA_ARGS__)
  170. #define RecyclerNewWithBarrierFinalizedClientTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedClientTrackedWithBarrierInlined, T, __VA_ARGS__)))
  171. #endif
  172. #ifndef RECYCLER_WRITE_BARRIER
  173. #define RecyclerNewWithBarrier RecyclerNew
  174. #define RecyclerNewWithBarrierPlus RecyclerNewPlus
  175. #define RecyclerNewWithBarrierPlusZ RecyclerNewPlusZ
  176. #define RecyclerNewWithBarrierZ RecyclerNewZ
  177. #define RecyclerNewWithBarrierStruct RecyclerNewStruct
  178. #define RecyclerNewWithBarrierStructZ RecyclerNewStructZ
  179. #define RecyclerNewWithBarrierStructPlus RecyclerNewStructPlus
  180. #define RecyclerNewWithBarrierArray RecyclerNewArray
  181. #define RecyclerNewWithBarrierArrayZ RecyclerNewArrayZ
  182. #define RecyclerNewWithBarrierFinalized RecyclerNewFinalized
  183. #define RecyclerNewWithBarrierFinalizedPlus RecyclerNewFinalizedPlus
  184. #define RecyclerNewWithBarrierTracked RecyclerNewTracked
  185. #define RecyclerNewWithBarrierEnumClass RecyclerNewEnumClass
  186. #define RecyclerNewWithBarrierWithInfoBits RecyclerNewWithInfoBits
  187. #define RecyclerNewWithBarrierFinalizedClientTracked RecyclerNewFinalizedClientTracked
  188. #endif
  189. // Leaf allocators
  190. #define RecyclerNewLeaf(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafInlined, T, __VA_ARGS__)
  191. #define RecyclerNewLeafZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafZeroInlined, T, __VA_ARGS__)
  192. #define RecyclerNewPlusLeaf(recycler,size,T,...) AllocatorNewPlusLeaf(Recycler, recycler, size, T, __VA_ARGS__)
  193. #define RecyclerNewPlusLeafZ(recycler,size,T,...) AllocatorNewPlusLeafZ(Recycler, recycler, size, T, __VA_ARGS__)
  194. #define RecyclerNewStructLeaf(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocLeafInlined, T)
  195. #define RecyclerNewStructLeafZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocLeafZeroInlined, T)
  196. #define RecyclerNewArrayLeafZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafZero, T, count)
  197. #define RecyclerNewArrayLeaf(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeaf, T, count)
  198. #define RecyclerNewFinalizedLeaf(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedLeafInlined, T, __VA_ARGS__)))
  199. #define RecyclerNewFinalizedLeafPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedLeaf, size, T, __VA_ARGS__)))
  200. #define RecyclerNewTrackedLeaf(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedLeafInlined, T, __VA_ARGS__)))
  201. #define RecyclerNewTrackedLeafPlusZ(recycler,size,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocZeroTrackedLeafInlined, size, T, __VA_ARGS__)))
  202. #ifdef RECYCLER_VISITED_HOST
  203. // We need to track these allocations. The RecyclerVisitedHost* object allocation APIs don't provide us with the type of the objects being allocated. Use the DummyVTableObject type used elsewhere to track the allocations.
  204. #define RecyclerAllocVisitedHostTracedAndFinalized(recycler,size) (TRACK_ALLOC_INFO(recycler, DummyVTableObject, Recycler, size, (size_t)-1))->AllocVisitedHost<RecyclerVisitedHostTracedFinalizableBits>(size)
  205. #define RecyclerAllocVisitedHostFinalized(recycler,size) (TRACK_ALLOC_INFO(recycler, DummyVTableObject, Recycler, size, (size_t)-1))->AllocVisitedHost<RecyclerVisitedHostFinalizableBits>(size)
  206. #define RecyclerAllocVisitedHostTraced(recycler,size) (TRACK_ALLOC_INFO(recycler, DummyVTableObject, Recycler, size, (size_t)-1))->AllocVisitedHost<RecyclerVisitedHostTracedBits>(size)
  207. #define RecyclerAllocLeaf(recycler,size) (TRACK_ALLOC_INFO(recycler, DummyVTableObject, Recycler, size, (size_t)-1))->AllocVisitedHost<LeafBit>(size)
  208. #endif
  209. #ifdef TRACE_OBJECT_LIFETIME
  210. #define RecyclerNewLeafTrace(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafTrace, T, __VA_ARGS__)
  211. #define RecyclerNewLeafZTrace(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafZeroTrace, T, __VA_ARGS__)
  212. #define RecyclerNewPlusLeafTrace(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocLeafTrace, size, T, __VA_ARGS__)
  213. #define RecyclerNewArrayLeafZTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafZeroTrace, T, count)
  214. #define RecyclerNewArrayTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocTrace, T, count)
  215. #define RecyclerNewArrayZTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZeroTrace, T, count)
  216. #define RecyclerNewArrayLeafTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafTrace, T, count)
  217. #define RecyclerNewFinalizedTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedTrace, T, __VA_ARGS__)))
  218. #define RecyclerNewFinalizedLeafTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedLeafTrace, T, __VA_ARGS__)))
  219. #define RecyclerNewFinalizedPlusTrace(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedTrace, size, T, __VA_ARGS__)))
  220. #define RecyclerNewTrackedTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedTrace, T, __VA_ARGS__)))
  221. #define RecyclerNewTrackedLeafTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedLeafTrace, T, __VA_ARGS__)))
  222. #else
  223. #define RecyclerNewLeafTrace RecyclerNewLeaf
  224. #define RecyclerNewLeafZTrace RecyclerNewLeafZ
  225. #define RecyclerNewPlusLeafTrace RecyclerNewPlusLeaf
  226. #define RecyclerNewArrayLeafZTrace RecyclerNewArrayLeafZ
  227. #define RecyclerNewArrayTrace RecyclerNewArray
  228. #define RecyclerNewArrayZTrace RecyclerNewArrayZ
  229. #define RecyclerNewArrayLeafTrace RecyclerNewArrayLeaf
  230. #define RecyclerNewFinalizedTrace RecyclerNewFinalized
  231. #define RecyclerNewFinalizedLeafTrace RecyclerNewFinalizedLeaf
  232. #define RecyclerNewFinalizedPlusTrace RecyclerNewFinalizedPlus
  233. #define RecyclerNewTrackedTrace RecyclerNewTracked
  234. #define RecyclerNewTrackedLeafTrace RecyclerNewTrackedLeaf
  235. #endif
  236. #ifdef RECYCLER_TRACE
  237. #define RecyclerVerboseTrace(flags, ...) \
  238. if (flags.Verbose && flags.Trace.IsEnabled(Js::RecyclerPhase)) \
  239. { \
  240. Output::Print(__VA_ARGS__); \
  241. }
  242. #define AllocationVerboseTrace(flags, ...) \
  243. if (flags.Verbose && flags.Trace.IsEnabled(Js::MemoryAllocationPhase)) \
  244. { \
  245. Output::Print(__VA_ARGS__); \
  246. }
  247. #define LargeAllocationVerboseTrace(flags, ...) \
  248. if (flags.Verbose && \
  249. (flags.Trace.IsEnabled(Js::MemoryAllocationPhase) || \
  250. flags.Trace.IsEnabled(Js::LargeMemoryAllocationPhase))) \
  251. { \
  252. Output::Print(__VA_ARGS__); \
  253. }
  254. #define PageAllocatorAllocationVerboseTrace(flags, ...) \
  255. if (flags.Verbose && flags.Trace.IsEnabled(Js::PageAllocatorAllocPhase)) \
  256. { \
  257. Output::Print(__VA_ARGS__); \
  258. }
  259. #else
  260. #define RecyclerVerboseTrace(...)
  261. #define AllocationVerboseTrace(...)
  262. #define LargeAllocationVerboseTrace(...)
  263. #endif
  264. #define RecyclerHeapNew(recycler,heapInfo,T,...) new (recycler, heapInfo) T(__VA_ARGS__)
  265. #define RecyclerHeapDelete(recycler,heapInfo,addr) (static_cast<Recycler *>(recycler)->HeapFree(heapInfo,addr))
  266. typedef void (__cdecl* ExternalRootMarker)(void *);
  267. typedef void (*DOMWrapperTracingCallback)(_In_opt_ void *data);
  268. typedef bool (*DOMWrapperTracingDoneCallback)(_In_opt_ void *data);
  269. typedef void (*DOMWrapperTracingEnterFinalPauseCallback)(_In_opt_ void *data);
  270. class RecyclerCollectionWrapper
  271. {
  272. public:
  273. RecyclerCollectionWrapper() :
  274. _isScriptContextCloseGCPending(FALSE)
  275. { }
  276. typedef BOOL (Recycler::*CollectionFunction)(CollectionFlags flags);
  277. virtual void PreCollectionCallBack(CollectionFlags flags) = 0;
  278. virtual void PreSweepCallback() = 0;
  279. virtual void PreRescanMarkCallback() = 0;
  280. virtual size_t RootMarkCallback(RecyclerScanMemoryCallback& scanMemoryCallback, BOOL * stacksScannedByRuntime) = 0;
  281. virtual void RescanMarkTimeoutCallback() = 0;
  282. virtual void EndMarkCallback() = 0;
  283. virtual void ConcurrentCallback() = 0;
  284. virtual void WaitCollectionCallBack() = 0;
  285. virtual void PostCollectionCallBack() = 0;
  286. virtual BOOL ExecuteRecyclerCollectionFunction(Recycler * recycler, CollectionFunction function, CollectionFlags flags) = 0;
  287. virtual uint GetRandomNumber() = 0;
  288. virtual bool DoSpecialMarkOnScanStack() = 0;
  289. virtual void OnScanStackCallback(void ** stackTop, size_t byteCount, void ** registers, size_t registersByteCount) = 0;
  290. virtual void PostSweepRedeferralCallBack() = 0;
  291. #ifdef FAULT_INJECTION
  292. virtual void DisposeScriptContextByFaultInjectionCallBack() = 0;
  293. #endif
  294. virtual void DisposeObjects(Recycler * recycler) = 0;
  295. virtual void PreDisposeObjectsCallBack() = 0;
  296. #if DBG || defined(PROFILE_EXEC)
  297. virtual bool AsyncHostOperationStart(void *) = 0;
  298. virtual void AsyncHostOperationEnd(bool wasInAsync, void *) = 0;
  299. #endif
  300. #if DBG
  301. virtual void CheckJsReentrancyOnDispose() = 0;
  302. #endif
  303. BOOL GetIsScriptContextCloseGCPending()
  304. {
  305. return _isScriptContextCloseGCPending;
  306. }
  307. void ClearIsScriptContextCloseGCPending()
  308. {
  309. _isScriptContextCloseGCPending = FALSE;
  310. }
  311. void SetIsScriptContextCloseGCPending()
  312. {
  313. _isScriptContextCloseGCPending = TRUE;
  314. }
  315. void SetDOMWrapperTracingCallback(DOMWrapperTracingCallback callback)
  316. {
  317. wrapperTracingCallback = callback;
  318. }
  319. void SetWrapperTracingCallbackState(void * state)
  320. {
  321. wrapperTracingCallbackState = state;
  322. }
  323. void SetDOMWrapperTracingEnterFinalPauseCallback(DOMWrapperTracingEnterFinalPauseCallback callback)
  324. {
  325. wrapperTracingEnterFinalPauseCallback = callback;
  326. }
  327. void SetDOMWrapperTracingDoneCallback(DOMWrapperTracingDoneCallback callback)
  328. {
  329. wrapperTracingDoneCallback = callback;
  330. }
  331. void EndMarkDomWrapperTracingCallback()
  332. {
  333. if (this->wrapperTracingCallback)
  334. {
  335. this->wrapperTracingCallback(this->wrapperTracingCallbackState);
  336. }
  337. }
  338. bool EndMarkDomWrapperTracingDoneCallback()
  339. {
  340. if (this->wrapperTracingDoneCallback)
  341. {
  342. return this->wrapperTracingDoneCallback(this->wrapperTracingCallbackState);
  343. }
  344. return true;
  345. }
  346. void EndMarkDomWrapperTracingEnterFinalPauseCallback()
  347. {
  348. if (this->wrapperTracingEnterFinalPauseCallback)
  349. {
  350. this->wrapperTracingEnterFinalPauseCallback(this->wrapperTracingCallbackState);
  351. }
  352. }
  353. protected:
  354. BOOL _isScriptContextCloseGCPending;
  355. void * wrapperTracingCallbackState;
  356. DOMWrapperTracingCallback wrapperTracingCallback;
  357. DOMWrapperTracingDoneCallback wrapperTracingDoneCallback;
  358. DOMWrapperTracingEnterFinalPauseCallback wrapperTracingEnterFinalPauseCallback;
  359. };
  360. class DefaultRecyclerCollectionWrapper : public RecyclerCollectionWrapper
  361. {
  362. public:
  363. virtual void PreCollectionCallBack(CollectionFlags flags) override {}
  364. virtual void PreSweepCallback() override {}
  365. virtual void PreRescanMarkCallback() override {}
  366. virtual void RescanMarkTimeoutCallback() override {}
  367. virtual void EndMarkCallback() override {}
  368. virtual size_t RootMarkCallback(RecyclerScanMemoryCallback& scanMemoryCallback, BOOL * stacksScannedByRuntime) override { *stacksScannedByRuntime = FALSE; return 0; }
  369. virtual void ConcurrentCallback() override {}
  370. virtual void WaitCollectionCallBack() override {}
  371. virtual void PostCollectionCallBack() override {}
  372. virtual BOOL ExecuteRecyclerCollectionFunction(Recycler * recycler, CollectionFunction function, CollectionFlags flags) override;
  373. virtual uint GetRandomNumber() override { return 0; }
  374. virtual bool DoSpecialMarkOnScanStack() override { return false; }
  375. virtual void OnScanStackCallback(void ** stackTop, size_t byteCount, void ** registers, size_t registersByteCount) override {};
  376. virtual void PostSweepRedeferralCallBack() override {}
  377. #ifdef FAULT_INJECTION
  378. virtual void DisposeScriptContextByFaultInjectionCallBack() override {};
  379. #endif
  380. virtual void DisposeObjects(Recycler * recycler) override;
  381. virtual void PreDisposeObjectsCallBack() override {};
  382. #if DBG || defined(PROFILE_EXEC)
  383. virtual bool AsyncHostOperationStart(void *) override { return false; };
  384. virtual void AsyncHostOperationEnd(bool wasInAsync, void *) override {};
  385. #endif
  386. #if DBG
  387. virtual void CheckJsReentrancyOnDispose() override {}
  388. #endif
  389. static DefaultRecyclerCollectionWrapper Instance;
  390. private:
  391. static bool IsCollectionDisabled(Recycler * recycler);
  392. };
  393. #ifdef RECYCLER_STATS
  394. struct RecyclerCollectionStats
  395. {
  396. size_t startCollectAllocBytes;
  397. #if ENABLE_PARTIAL_GC
  398. size_t startCollectNewPageCount;
  399. #endif
  400. size_t continueCollectAllocBytes;
  401. size_t finishCollectTryCount;
  402. // Heuristic Stats
  403. #if ENABLE_PARTIAL_GC
  404. size_t rescanRootBytes;
  405. size_t estimatedPartialReuseBytes;
  406. size_t uncollectedNewPageCountPartialCollect;
  407. size_t partialCollectSmallHeapBlockReuseMinFreeBytes;
  408. double collectEfficacy;
  409. double collectCost;
  410. #endif
  411. // Mark stats
  412. size_t tryMarkCount; // # of pointer try mark (* pointer size to get total number byte looked at)
  413. size_t tryMarkNullCount;
  414. size_t tryMarkUnalignedCount;
  415. size_t tryMarkNonRecyclerMemoryCount;
  416. size_t tryMarkInteriorCount;
  417. size_t tryMarkInteriorNullCount;
  418. size_t tryMarkInteriorNonRecyclerMemoryCount;
  419. size_t rootCount;
  420. size_t stackCount;
  421. size_t remarkCount;
  422. size_t scanCount; // non-leaf objects marked.
  423. size_t trackCount;
  424. size_t finalizeCount;
  425. size_t markThruNewObjCount;
  426. size_t markThruFalseNewObjCount;
  427. struct MarkData
  428. {
  429. // Rescan stats
  430. size_t rescanPageCount;
  431. size_t rescanObjectCount;
  432. size_t rescanObjectByteCount;
  433. size_t rescanLargePageCount;
  434. size_t rescanLargeObjectCount;
  435. size_t rescanLargeByteCount;
  436. size_t markCount; // total number of object marked
  437. size_t markBytes; // size of all objects marked.
  438. } markData;
  439. #if ENABLE_CONCURRENT_GC
  440. MarkData backgroundMarkData[RecyclerHeuristic::MaxBackgroundRepeatMarkCount];
  441. size_t trackedObjectCount;
  442. #endif
  443. #if ENABLE_PARTIAL_GC
  444. size_t clientTrackedObjectCount;
  445. #endif
  446. // Sweep stats
  447. size_t heapBlockCount[HeapBlock::BlockTypeCount]; // number of heap blocks (processed during swept)
  448. size_t heapBlockFreeCount[HeapBlock::BlockTypeCount]; // number of heap blocks deleted
  449. size_t heapBlockConcurrentSweptCount[HeapBlock::SmallBlockTypeCount];
  450. size_t heapBlockSweptCount[HeapBlock::SmallBlockTypeCount]; // number of heap blocks swept
  451. size_t objectSweptCount; // objects freed (free list + whole page freed)
  452. size_t objectSweptBytes;
  453. size_t objectSweptFreeListCount; // objects freed (free list)
  454. size_t objectSweptFreeListBytes;
  455. size_t objectSweepScanCount; // number of objects walked for sweeping (exclude whole page freed)
  456. size_t finalizeSweepCount; // number of objects finalizer/dispose called
  457. #if ENABLE_PARTIAL_GC
  458. size_t smallNonLeafHeapBlockPartialReuseCount[HeapBlock::SmallBlockTypeCount];
  459. size_t smallNonLeafHeapBlockPartialReuseBytes[HeapBlock::SmallBlockTypeCount];
  460. size_t smallNonLeafHeapBlockPartialUnusedCount[HeapBlock::SmallBlockTypeCount];
  461. size_t smallNonLeafHeapBlockPartialUnusedBytes[HeapBlock::SmallBlockTypeCount];
  462. #endif
  463. // Memory Stats
  464. size_t heapBlockFreeByteCount[HeapBlock::BlockTypeCount]; // The remaining usable free byte count
  465. size_t largeHeapBlockUsedByteCount; // Used byte count
  466. size_t largeHeapBlockTotalByteCount; // Total byte count
  467. // Empty/zero heap block stats
  468. uint numEmptySmallBlocks[HeapBlock::SmallBlockTypeCount];
  469. uint numZeroedOutSmallBlocks;
  470. };
  471. #define RECYCLER_STATS_INC_IF(cond, r, f) if (cond) { RECYCLER_STATS_INC(r, f); }
  472. #define RECYCLER_STATS_INC(r, f) ++r->collectionStats.f
  473. #define RECYCLER_STATS_INTERLOCKED_INC(r, f) { InterlockedIncrement((LONG *)&r->collectionStats.f); }
  474. #define RECYCLER_STATS_DEC(r, f) --r->collectionStats.f
  475. #define RECYCLER_STATS_ADD(r, f, v) r->collectionStats.f += (v)
  476. #define RECYCLER_STATS_INTERLOCKED_ADD(r, f, v) { InterlockedAdd((LONG *)&r->collectionStats.f, (LONG)(v)); }
  477. #define RECYCLER_STATS_SUB(r, f, v) r->collectionStats.f -= (v)
  478. #define RECYCLER_STATS_SET(r, f, v) r->collectionStats.f = v
  479. #else
  480. #define RECYCLER_STATS_INC_IF(cond, r, f)
  481. #define RECYCLER_STATS_INC(r, f)
  482. #define RECYCLER_STATS_INTERLOCKED_INC(r, f)
  483. #define RECYCLER_STATS_DEC(r, f)
  484. #define RECYCLER_STATS_ADD(r, f, v)
  485. #define RECYCLER_STATS_INTERLOCKED_ADD(r, f, v)
  486. #define RECYCLER_STATS_SUB(r, f, v)
  487. #define RECYCLER_STATS_SET(r, f, v)
  488. #endif
  489. #ifdef RECYCLER_TRACE
  490. struct CollectionParam
  491. {
  492. CollectionFlags flags;
  493. bool finishOnly;
  494. bool repeat;
  495. bool priorityBoostConcurrentSweepOverride;
  496. bool domCollect;
  497. int timeDiff;
  498. size_t uncollectedAllocBytes;
  499. size_t uncollectedPinnedObjects;
  500. #if ENABLE_PARTIAL_GC
  501. size_t uncollectedNewPageCountPartialCollect;
  502. size_t uncollectedNewPageCount;
  503. size_t unusedPartialCollectFreeBytes;
  504. bool inPartialCollectMode;
  505. #endif
  506. };
  507. #endif
  508. #include "RecyclerObjectGraphDumper.h"
  509. #if ENABLE_CONCURRENT_GC
  510. class RecyclerParallelThread
  511. {
  512. friend class ThreadContext;
  513. public:
  514. typedef void (Recycler::* WorkFunc)();
  515. RecyclerParallelThread(Recycler * recycler, WorkFunc workFunc) :
  516. recycler(recycler),
  517. workFunc(workFunc),
  518. concurrentWorkReadyEvent(NULL),
  519. concurrentWorkDoneEvent(NULL),
  520. concurrentThread(NULL)
  521. {
  522. }
  523. ~RecyclerParallelThread()
  524. {
  525. Assert(concurrentThread == NULL);
  526. Assert(concurrentWorkReadyEvent == NULL);
  527. Assert(concurrentWorkDoneEvent == NULL);
  528. }
  529. bool StartConcurrent();
  530. void WaitForConcurrent();
  531. void Shutdown();
  532. bool EnableConcurrent(bool synchronizeOnStartup);
  533. private:
  534. // Static entry point for thread creation
  535. static unsigned int CALLBACK StaticThreadProc(LPVOID lpParameter);
  536. // Static entry point for thread service usage
  537. static void CALLBACK StaticBackgroundWorkCallback(void * callbackData);
  538. private:
  539. WorkFunc workFunc;
  540. Recycler * recycler;
  541. HANDLE concurrentWorkReadyEvent;// main thread uses this event to tell concurrent threads that the work is ready
  542. HANDLE concurrentWorkDoneEvent;// concurrent threads use this event to tell main thread that the work allocated is done
  543. HANDLE concurrentThread;
  544. bool synchronizeOnStartup;
  545. };
  546. #endif
  547. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  548. class AutoProtectPages
  549. {
  550. public:
  551. AutoProtectPages(Recycler* recycler, bool protectEnabled);
  552. ~AutoProtectPages();
  553. void Unprotect();
  554. private:
  555. Recycler* recycler;
  556. bool isReadOnly;
  557. };
  558. #endif
  559. class Recycler
  560. {
  561. friend class RecyclerScanMemoryCallback;
  562. friend class RecyclerSweep;
  563. friend class RecyclerSweepManager;
  564. friend class MarkContext;
  565. friend class HeapBlock;
  566. friend class HeapBlockMap32;
  567. #if ENABLE_CONCURRENT_GC
  568. friend class RecyclerParallelThread;
  569. #endif
  570. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  571. friend class AutoProtectPages;
  572. #endif
  573. #ifdef ENABLE_BASIC_TELEMETRY
  574. friend class RecyclerTelemetryInfo;
  575. #endif
  576. template <typename T> friend class RecyclerWeakReference;
  577. template <typename T> friend class WeakReferenceHashTable;
  578. template <typename TBlockType>
  579. friend class SmallHeapBlockAllocator; // Needed for FindHeapBlock
  580. #if defined(RECYCLER_TRACE)
  581. friend class JavascriptThreadService;
  582. #endif
  583. #ifdef HEAP_ENUMERATION_VALIDATION
  584. friend class ActiveScriptProfilerHeapEnum;
  585. #endif
  586. friend class ScriptEngineBase; // This is for disabling GC for certain Host operations.
  587. #if !FLOATVAR
  588. friend class ::CodeGenNumberThreadAllocator;
  589. friend struct ::XProcNumberPageSegmentManager;
  590. #endif
  591. public:
  592. static const uint ConcurrentThreadStackSize = 300000;
  593. static const bool FakeZeroLengthArray = true;
  594. #ifdef RECYCLER_PAGE_HEAP
  595. // Keeping as constant in case we want to tweak the value here
  596. // Set to 0 so that the tool can do the filtering instead of the runtime
  597. #if DBG
  598. static const int s_numFramesToSkipForPageHeapAlloc = 10;
  599. static const int s_numFramesToSkipForPageHeapFree = 0;
  600. static const int s_numFramesToCaptureForPageHeap = 32;
  601. #else
  602. static const int s_numFramesToSkipForPageHeapAlloc = 0;
  603. static const int s_numFramesToSkipForPageHeapFree = 0;
  604. static const int s_numFramesToCaptureForPageHeap = 32;
  605. #endif
  606. #endif
  607. uint Cookie;
  608. class AutoEnterExternalStackSkippingGCMode
  609. {
  610. public:
  611. AutoEnterExternalStackSkippingGCMode(Recycler* recycler):
  612. _recycler(recycler)
  613. {
  614. // Setting this in a re-entrant mode is not allowed
  615. Assert(!recycler->isExternalStackSkippingGC);
  616. #if DBG
  617. _recycler->isExternalStackSkippingGC = true;
  618. #endif
  619. }
  620. ~AutoEnterExternalStackSkippingGCMode()
  621. {
  622. #if DBG
  623. _recycler->isExternalStackSkippingGC = false;
  624. #endif
  625. }
  626. private:
  627. Recycler* _recycler;
  628. };
  629. private:
  630. class AutoSwitchCollectionStates
  631. {
  632. public:
  633. AutoSwitchCollectionStates(Recycler* recycler, CollectionState entryState, CollectionState exitState):
  634. _recycler(recycler),
  635. _exitState(exitState)
  636. {
  637. _recycler->SetCollectionState(entryState);
  638. }
  639. ~AutoSwitchCollectionStates()
  640. {
  641. _recycler->SetCollectionState(_exitState);
  642. }
  643. private:
  644. Recycler* _recycler;
  645. CollectionState _exitState;
  646. };
  647. #if defined(ENABLE_JS_ETW)
  648. ETWEventGCActivationTrigger collectionStartReason;
  649. CollectionFlags collectionStartFlags;
  650. ETWEventGCActivationTrigger collectionFinishReason;
  651. #endif
  652. class CollectionStateChangedObserver : public ObservableValueObserver<CollectionState>
  653. {
  654. private:
  655. Recycler* recycler;
  656. public:
  657. CollectionStateChangedObserver(Recycler* recycler)
  658. {
  659. this->recycler = recycler;
  660. }
  661. virtual void ValueChanged(const CollectionState& newVal, const CollectionState& oldVal)
  662. {
  663. #ifdef ENABLE_BASIC_TELEMETRY
  664. if (oldVal == CollectionState::CollectionStateNotCollecting &&
  665. newVal != CollectionState::CollectionStateNotCollecting &&
  666. newVal != CollectionState::Collection_PreCollection &&
  667. newVal != CollectionState::CollectionStateExit)
  668. {
  669. this->recycler->GetRecyclerTelemetryInfo().StartPass(newVal);
  670. }
  671. else if (oldVal != CollectionState::CollectionStateNotCollecting &&
  672. oldVal != CollectionState::Collection_PreCollection &&
  673. oldVal != CollectionState::CollectionStateExit &&
  674. newVal == CollectionState::CollectionStateNotCollecting)
  675. {
  676. this->recycler->GetRecyclerTelemetryInfo().EndPass(oldVal);
  677. }
  678. #endif
  679. }
  680. };
  681. CollectionStateChangedObserver collectionStateChangedObserver;
  682. ObservableValue<CollectionState> collectionState;
  683. inline void SetCollectionState(CollectionState newState)
  684. {
  685. this->collectionState = newState;
  686. }
  687. JsUtil::ThreadService *threadService;
  688. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  689. bool allowAllocationsDuringConcurrentSweepForCollection;
  690. #endif
  691. HeapBlockMap heapBlockMap;
  692. #if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
  693. struct PinRecord
  694. {
  695. #ifdef STACK_BACK_TRACE
  696. PinRecord() : refCount(0), stackBackTraces(nullptr) {}
  697. #else
  698. PinRecord() : refCount(0) {}
  699. #endif
  700. PinRecord& operator=(uint newRefCount)
  701. {
  702. #ifdef STACK_BACK_TRACE
  703. Assert(stackBackTraces == nullptr);
  704. #endif
  705. Assert(newRefCount == 0); refCount = 0; return *this;
  706. }
  707. PinRecord& operator++() { ++refCount; return *this; }
  708. PinRecord& operator--() { --refCount; return *this; }
  709. operator uint() const { return refCount; }
  710. #ifdef STACK_BACK_TRACE
  711. StackBackTraceNode * stackBackTraces;
  712. #endif
  713. private:
  714. uint refCount;
  715. };
  716. #else
  717. typedef uint PinRecord;
  718. #endif
  719. typedef SimpleHashTable<void *, PinRecord, HeapAllocator, DefaultComparer, true, PrimePolicy> PinnedObjectHashTable;
  720. PinnedObjectHashTable pinnedObjectMap;
  721. WeakReferenceHashTable<PrimePolicy> weakReferenceMap;
  722. uint weakReferenceCleanupId;
  723. #if ENABLE_WEAK_REFERENCE_REGIONS
  724. SList<RecyclerWeakReferenceRegion, HeapAllocator> weakReferenceRegionList;
  725. #endif
  726. void * transientPinnedObject;
  727. #if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
  728. #ifdef STACK_BACK_TRACE
  729. StackBackTrace * transientPinnedObjectStackBackTrace;
  730. #endif
  731. #endif
  732. struct GuestArenaAllocator : public ArenaAllocator
  733. {
  734. GuestArenaAllocator(__in_z char16 const* name, PageAllocator * pageAllocator, void (*outOfMemoryFunc)())
  735. : ArenaAllocator(name, pageAllocator, outOfMemoryFunc), pendingDelete(false)
  736. {
  737. }
  738. bool pendingDelete;
  739. };
  740. DListBase<GuestArenaAllocator> guestArenaList;
  741. DListBase<ArenaData*> externalGuestArenaList; // guest arenas are scanned for roots
  742. #ifdef RECYCLER_PAGE_HEAP
  743. bool isPageHeapEnabled;
  744. bool capturePageHeapAllocStack;
  745. bool capturePageHeapFreeStack;
  746. inline bool IsPageHeapEnabled() const { return isPageHeapEnabled; }
  747. inline bool ShouldCapturePageHeapAllocStack() const { return capturePageHeapAllocStack; }
  748. void VerifyPageHeapFillAfterAlloc(char* memBlock, size_t size, ObjectInfoBits attributes);
  749. #else
  750. inline bool IsPageHeapEnabled() const { return false; }
  751. inline bool ShouldCapturePageHeapAllocStack() const { return false; }
  752. #endif
  753. #ifdef RECYCLER_MARK_TRACK
  754. MarkMap* markMap;
  755. CriticalSection markMapCriticalSection;
  756. void PrintMarkMap();
  757. void ClearMarkMap();
  758. #endif
  759. // Number of pages to reserve for the primary mark stack
  760. // This is the minimum number of pages to guarantee that a single heap block
  761. // can be rescanned in the worst possible case where every object in a heap block
  762. // in the smallest bucket needs to be rescanned
  763. // These many pages being reserved guarantees that in OOM Rescan, we can make progress
  764. // on every rescan iteration
  765. // We add one because there is a small amount of the page reserved for page pool metadata
  766. // so we need to allocate an additional page to be sure
  767. // Currently, this works out to 2 pages on 32-bit and 5 pages on 64-bit
  768. // NOTE: We have reduced the PageCount for small blocks to 1. This brought down the number of pages reserved for x64 from 5 to 2. This has not shown
  769. // any adverse impact.
  770. static const int PrimaryMarkStackReservedPageCount =
  771. ((SmallAllocationBlockAttributes::PageCount * MarkContext::MarkCandidateSize) / SmallAllocationBlockAttributes::MinObjectSize) + 1;
  772. MarkContext markContext;
  773. // Contexts for parallel marking.
  774. // We support up to 4 way parallelism, main context + 3 additional parallel contexts.
  775. MarkContext parallelMarkContext1;
  776. MarkContext parallelMarkContext2;
  777. MarkContext parallelMarkContext3;
  778. // Page pools for above markContexts
  779. PagePool markPagePool;
  780. PagePool parallelMarkPagePool1;
  781. PagePool parallelMarkPagePool2;
  782. PagePool parallelMarkPagePool3;
  783. bool IsMarkStackEmpty();
  784. bool HasPendingMarkObjects() const { return markContext.HasPendingMarkObjects() || parallelMarkContext1.HasPendingMarkObjects() || parallelMarkContext2.HasPendingMarkObjects() || parallelMarkContext3.HasPendingMarkObjects(); }
  785. bool HasPendingTrackObjects() const { return markContext.HasPendingTrackObjects() || parallelMarkContext1.HasPendingTrackObjects() || parallelMarkContext2.HasPendingTrackObjects() || parallelMarkContext3.HasPendingTrackObjects(); }
  786. RecyclerCollectionWrapper * collectionWrapper;
  787. HANDLE mainThreadHandle;
  788. void * stackBase;
  789. class SavedRegisterState
  790. {
  791. public:
  792. #if _M_IX86
  793. static const int NumRegistersToSave = 8;
  794. #elif _M_ARM
  795. static const int NumRegistersToSave = 13;
  796. #elif _M_ARM64
  797. static const int NumRegistersToSave = 27;
  798. #elif _M_AMD64
  799. static const int NumRegistersToSave = 16;
  800. #endif
  801. SavedRegisterState()
  802. {
  803. memset(registers, 0, sizeof(void*) * NumRegistersToSave);
  804. }
  805. void** GetRegisters()
  806. {
  807. return registers;
  808. }
  809. void* GetStackTop()
  810. {
  811. // By convention, our register-saving routine will always
  812. // save the stack pointer as the first item in the array
  813. return registers[0];
  814. }
  815. private:
  816. void* registers[NumRegistersToSave];
  817. };
  818. SavedRegisterState savedThreadContext;
  819. #if __has_feature(address_sanitizer)
  820. void* savedAsanFakeStack;
  821. #define SAVE_THREAD_ASAN_FAKE_STACK() \
  822. this->savedAsanFakeStack = __asan_get_current_fake_stack()
  823. #else
  824. #define SAVE_THREAD_ASAN_FAKE_STACK()
  825. #endif
  826. bool inDispose;
  827. #if DBG || defined RECYCLER_TRACE
  828. uint collectionCount;
  829. #endif
  830. bool allowDispose;
  831. bool inDisposeWrapper;
  832. bool needOOMRescan;
  833. bool needExternalWrapperTracing;
  834. bool hasDisposableObject;
  835. bool hasNativeGCHost;
  836. DWORD tickCountNextDispose;
  837. bool inExhaustiveCollection;
  838. bool hasExhaustiveCandidate;
  839. bool inCacheCleanupCollection;
  840. bool inDecommitNowCollection;
  841. bool isScriptActive;
  842. bool isInScript;
  843. bool isShuttingDown;
  844. bool scanPinnedObjectMap;
  845. bool hasScannedInitialImplicitRoots;
  846. bool hasPendingUnpinnedObject;
  847. bool hasPendingDeleteGuestArena;
  848. bool inEndMarkOnLowMemory;
  849. bool decommitOnFinish;
  850. bool enableScanInteriorPointers;
  851. bool enableScanImplicitRoots;
  852. bool disableCollectOnAllocationHeuristics;
  853. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  854. bool disableCollection;
  855. #endif
  856. #if ENABLE_PARTIAL_GC
  857. bool enablePartialCollect;
  858. bool inPartialCollectMode;
  859. #if ENABLE_CONCURRENT_GC
  860. bool hasBackgroundFinishPartial;
  861. bool partialConcurrentNextCollection;
  862. #endif
  863. #endif
  864. #ifdef RECYCLER_STRESS
  865. bool forcePartialScanStack;
  866. bool recyclerStress;
  867. #if ENABLE_CONCURRENT_GC
  868. bool recyclerBackgroundStress;
  869. bool recyclerConcurrentStress;
  870. bool recyclerConcurrentRepeatStress;
  871. #endif
  872. #if ENABLE_PARTIAL_GC
  873. bool recyclerPartialStress;
  874. #endif
  875. #endif
  876. #if DBG
  877. bool isExternalStackSkippingGC;
  878. #endif
  879. bool skipStack;
  880. #if ENABLE_CONCURRENT_GC
  881. #if DBG
  882. bool isConcurrentGCOnIdle;
  883. bool isFinishGCOnIdle;
  884. #endif
  885. bool queueTrackedObject;
  886. bool hasPendingConcurrentFindRoot;
  887. bool priorityBoost;
  888. bool disableConcurrent;
  889. bool enableConcurrentMark;
  890. bool enableParallelMark;
  891. bool enableConcurrentSweep;
  892. uint maxParallelism; // Max # of total threads to run in parallel
  893. byte backgroundRescanCount; // for ETW events and stats
  894. byte backgroundFinishMarkCount;
  895. size_t backgroundRescanRootBytes;
  896. HANDLE concurrentWorkReadyEvent; // main thread uses this event to tell concurrent threads that the work is ready
  897. HANDLE concurrentWorkDoneEvent; // concurrent threads use this event to tell main thread that the work allocated is done
  898. HANDLE concurrentThread;
  899. template <uint parallelId>
  900. void ParallelWorkFunc();
  901. RecyclerParallelThread parallelThread1;
  902. RecyclerParallelThread parallelThread2;
  903. #if DBG
  904. // Variable indicating if the concurrent thread has exited or not
  905. // If the concurrent thread hasn't started yet, this is set to true
  906. // Once the concurrent thread starts, it sets this to false,
  907. // and when the concurrent thread exits, it sets this to true.
  908. bool concurrentThreadExited;
  909. bool disableConcurrentThreadExitedCheck;
  910. bool isProcessingTrackedObjects;
  911. #endif
  912. uint tickCountStartConcurrent;
  913. bool isAborting;
  914. #endif
  915. #if DBG
  916. bool hasIncompleteDoCollect;
  917. // This is set to true when we begin a Rescan, and set to false when either:
  918. // (1) We finish the final in-thread Rescan and are about to Mark
  919. // (2) We do a conditional ResetWriteWatch and are about to Mark
  920. // When this flag is true, we should not be modifying existing mark-related state,
  921. // including markBits and rescanState.
  922. bool isProcessingRescan;
  923. #endif
  924. Js::ConfigFlagsTable& recyclerFlagsTable;
  925. RecyclerSweepManager recyclerSweepManagerInstance;
  926. RecyclerSweepManager * recyclerSweepManager;
  927. static const uint tickDiffToNextCollect = 300;
  928. #ifdef IDLE_DECOMMIT_ENABLED
  929. HANDLE concurrentIdleDecommitEvent;
  930. LONG needIdleDecommitSignal;
  931. #endif
  932. #if ENABLE_PARTIAL_GC
  933. SListBase<void *> clientTrackedObjectList;
  934. ArenaAllocator clientTrackedObjectAllocator;
  935. size_t partialUncollectedAllocBytes;
  936. // Dynamic Heuristics for partial GC
  937. size_t uncollectedNewPageCountPartialCollect;
  938. #endif
  939. uint tickCountNextCollection;
  940. uint tickCountNextFinishCollection;
  941. void (*outOfMemoryFunc)();
  942. #ifdef RECYCLER_TEST_SUPPORT
  943. BOOL (*checkFn)(char* addr, size_t size);
  944. #endif
  945. ExternalRootMarker externalRootMarker;
  946. void * externalRootMarkerContext;
  947. #ifdef PROFILE_EXEC
  948. Js::Profiler * profiler;
  949. Js::Profiler * backgroundProfiler;
  950. PageAllocator backgroundProfilerPageAllocator;
  951. DListBase<ArenaAllocator> backgroundProfilerArena;
  952. #endif
  953. // destruct autoHeap after backgroundProfilerPageAllocator;
  954. HeapInfoManager autoHeap;
  955. template <ObjectInfoBits attributes>
  956. HeapInfo * GetHeapInfoForAllocation()
  957. {
  958. return this->GetHeapInfo<attributes>();
  959. }
  960. template <ObjectInfoBits attributes>
  961. HeapInfo * GetHeapInfo()
  962. {
  963. return this->autoHeap.GetDefaultHeap();
  964. }
  965. HeapInfo * GetHeapInfo()
  966. {
  967. return this->autoHeap.GetDefaultHeap();
  968. }
  969. #ifdef PROFILE_MEM
  970. RecyclerMemoryData * memoryData;
  971. #endif
  972. ThreadContextId mainThreadId;
  973. #if DBG
  974. uint heapBlockCount;
  975. bool disableThreadAccessCheck;
  976. #endif
  977. #if DBG || defined(RECYCLER_STATS)
  978. bool isForceSweeping;
  979. #endif
  980. #ifdef NTBUILD
  981. RecyclerWatsonTelemetryBlock localTelemetryBlock;
  982. RecyclerWatsonTelemetryBlock * telemetryBlock;
  983. #endif
  984. #ifdef ENABLE_BASIC_TELEMETRY
  985. private:
  986. RecyclerTelemetryInfo telemetryStats;
  987. GUID recyclerID;
  988. public:
  989. GUID& GetRecyclerID() { return this->recyclerID; }
  990. #endif
  991. public:
  992. bool GetIsInScript() { return this->isInScript; }
  993. bool GetIsScriptActive() { return this->isScriptActive; }
  994. private:
  995. #ifdef RECYCLER_STATS
  996. RecyclerCollectionStats collectionStats;
  997. void PrintHeapBlockStats(char16 const * name, HeapBlock::HeapBlockType type);
  998. void PrintHeapBlockMemoryStats(char16 const * name, HeapBlock::HeapBlockType type);
  999. void PrintCollectStats();
  1000. void PrintHeuristicCollectionStats();
  1001. void PrintMarkCollectionStats();
  1002. void PrintBackgroundCollectionStats();
  1003. void PrintMemoryStats();
  1004. void PrintBackgroundCollectionStat(RecyclerCollectionStats::MarkData const& markData);
  1005. #endif
  1006. #ifdef RECYCLER_TRACE
  1007. CollectionParam collectionParam;
  1008. void PrintBlockStatus(HeapBucket * heapBucket, HeapBlock * heapBlock, char16 const * name);
  1009. #endif
  1010. #ifdef RECYCLER_MEMORY_VERIFY
  1011. uint verifyPad;
  1012. bool verifyEnabled;
  1013. #endif
  1014. #ifdef RECYCLER_DUMP_OBJECT_GRAPH
  1015. friend class RecyclerObjectGraphDumper;
  1016. RecyclerObjectGraphDumper * objectGraphDumper;
  1017. public:
  1018. bool dumpObjectOnceOnCollect;
  1019. #endif
  1020. public:
  1021. Recycler(AllocationPolicyManager * policyManager, IdleDecommitPageAllocator * pageAllocator, void(*outOfMemoryFunc)(), Js::ConfigFlagsTable& flags, RecyclerTelemetryHostInterface* hostInterface);
  1022. ~Recycler();
  1023. void Initialize(const bool forceInThread, JsUtil::ThreadService *threadService, const bool deferThreadStartup = false
  1024. #ifdef RECYCLER_PAGE_HEAP
  1025. , PageHeapMode pageheapmode = PageHeapMode::PageHeapModeOff
  1026. , bool captureAllocCallStack = false
  1027. , bool captureFreeCallStack = false
  1028. #endif
  1029. );
  1030. Js::ConfigFlagsTable& GetRecyclerFlagsTable() const { return this->recyclerFlagsTable; }
  1031. void SetMemProtectMode();
  1032. bool IsMemProtectMode();
  1033. size_t GetUsedBytes();
  1034. void LogMemProtectHeapSize(bool fromGC);
  1035. char* Realloc(void* buffer, DECLSPEC_GUARD_OVERFLOW size_t existingBytes, DECLSPEC_GUARD_OVERFLOW size_t requestedBytes, bool truncate = true);
  1036. #ifdef NTBUILD
  1037. void SetTelemetryBlock(RecyclerWatsonTelemetryBlock * telemetryBlock) { this->telemetryBlock = telemetryBlock; }
  1038. #endif
  1039. uint GetPinnedObjectCount() const { return this->pinnedObjectMap.Count(); }
  1040. void Prime();
  1041. void* GetOwnerContext() { return (void*) this->collectionWrapper; }
  1042. bool NeedOOMRescan() const;
  1043. void SetNeedOOMRescan();
  1044. void ClearNeedOOMRescan();
  1045. BOOL RequestConcurrentWrapperCallback();
  1046. BOOL CollectionInProgress() const;
  1047. BOOL IsExiting() const;
  1048. BOOL IsSweeping() const;
  1049. #ifdef RECYCLER_PAGE_HEAP
  1050. inline bool ShouldCapturePageHeapFreeStack() const { return capturePageHeapFreeStack; }
  1051. #else
  1052. inline bool ShouldCapturePageHeapFreeStack() const { return false; }
  1053. #endif
  1054. void SetIsThreadBound();
  1055. void SetIsScriptActive(bool isScriptActive);
  1056. void SetIsInScript(bool isInScript);
  1057. bool HasNativeGCHost() const;
  1058. void SetHasNativeGCHost();
  1059. void SetNeedExternalWrapperTracing();
  1060. void ClearNeedExternalWrapperTracing();
  1061. bool ShouldIdleCollectOnExit();
  1062. void ScheduleNextCollection();
  1063. BOOL IsShuttingDown() const { return this->isShuttingDown; }
  1064. #if ENABLE_CONCURRENT_GC
  1065. #if DBG
  1066. BOOL IsConcurrentMarkEnabled() const { return enableConcurrentMark; }
  1067. BOOL IsConcurrentSweepEnabled() const { return enableConcurrentSweep; }
  1068. #endif
  1069. template <CollectionFlags flags>
  1070. BOOL FinishConcurrent();
  1071. void ShutdownThread();
  1072. bool EnableConcurrent(JsUtil::ThreadService *threadService, bool startAllThreads);
  1073. void DisableConcurrent();
  1074. void StartQueueTrackedObject();
  1075. bool DoQueueTrackedObject() const;
  1076. void PrepareSweep();
  1077. #endif
  1078. template <CollectionFlags flags>
  1079. void SetupPostCollectionFlags();
  1080. void EnsureNotCollecting();
  1081. #if ENABLE_CONCURRENT_GC
  1082. bool QueueTrackedObject(FinalizableObject * trackableObject);
  1083. #endif
  1084. // FindRoots
  1085. void TryExternalMarkNonInterior(void * candidate);
  1086. void TryMarkNonInterior(void* candidate, void* parentReference = nullptr);
  1087. void TryMarkInterior(void *candidate, void* parentReference = nullptr);
  1088. bool InCacheCleanupCollection() { return inCacheCleanupCollection; }
  1089. void ClearCacheCleanupCollection() { Assert(inCacheCleanupCollection); inCacheCleanupCollection = false; }
  1090. // Finalizer support
  1091. void SetExternalRootMarker(ExternalRootMarker fn, void * context);
  1092. ArenaAllocator * CreateGuestArena(char16 const * name, void (*outOfMemoryFunc)());
  1093. void DeleteGuestArena(ArenaAllocator * arenaAllocator);
  1094. ArenaData ** RegisterExternalGuestArena(ArenaData* guestArena)
  1095. {
  1096. return externalGuestArenaList.PrependNode(&NoThrowHeapAllocator::Instance, guestArena);
  1097. }
  1098. void UnregisterExternalGuestArena(ArenaData* guestArena)
  1099. {
  1100. externalGuestArenaList.Remove(&NoThrowHeapAllocator::Instance, guestArena);
  1101. // Any time a root is removed during a GC, it indicates that an exhaustive
  1102. // collection is likely going to have work to do so trigger an exhaustive
  1103. // candidate GC to indicate this fact
  1104. this->CollectNow<CollectExhaustiveCandidate>();
  1105. }
  1106. void UnregisterExternalGuestArena(ArenaData** guestArena)
  1107. {
  1108. externalGuestArenaList.RemoveElement(&NoThrowHeapAllocator::Instance, guestArena);
  1109. // Any time a root is removed during a GC, it indicates that an exhaustive
  1110. // collection is likely going to have work to do so trigger an exhaustive
  1111. // candidate GC to indicate this fact
  1112. this->CollectNow<CollectExhaustiveCandidate>();
  1113. }
  1114. #ifdef RECYCLER_TEST_SUPPORT
  1115. void SetCheckFn(BOOL(*checkFn)(char* addr, size_t size));
  1116. #endif
  1117. void SetCollectionWrapper(RecyclerCollectionWrapper * wrapper);
  1118. static size_t GetAlignedSize(size_t size) { return HeapInfo::GetAlignedSize(size); }
  1119. HeapInfo* GetDefaultHeapInfo() { return autoHeap.GetDefaultHeap(); }
  1120. template <CollectionFlags flags>
  1121. BOOL CollectNow();
  1122. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1123. void DisplayMemStats();
  1124. #endif
  1125. void AddExternalMemoryUsage(size_t size);
  1126. bool NeedDispose() { return this->hasDisposableObject; }
  1127. template <CollectionFlags flags>
  1128. bool FinishDisposeObjectsNow();
  1129. bool RequestExternalMemoryAllocation(size_t size);
  1130. void ReportExternalMemoryFailure(size_t size);
  1131. void ReportExternalMemoryFree(size_t size);
  1132. // ExternalAllocFunc returns true when allocation succeeds
  1133. template <typename ExternalAllocFunc>
  1134. bool DoExternalAllocation(size_t size, ExternalAllocFunc externalAllocFunc);
  1135. #ifdef TRACE_OBJECT_LIFETIME
  1136. #define DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes) \
  1137. inline char* AllocFunc##Trace(size_t size) \
  1138. { \
  1139. return AllocWithAttributesFunc<(ObjectInfoBits)(attributes | TraceBit), /* nothrow = */ false>(size); \
  1140. }
  1141. #else
  1142. #define DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributeFunc, attributes)
  1143. #endif
  1144. #define DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocWithAttributesFunc, attributes) \
  1145. inline char * AllocFunc(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1146. { \
  1147. return AllocWithAttributesFunc<attributes, /* nothrow = */ false>(size); \
  1148. } \
  1149. __forceinline char * AllocFunc##Inlined(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1150. { \
  1151. return AllocWithAttributesFunc##Inlined<attributes, /* nothrow = */ false>(size); \
  1152. } \
  1153. DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes);
  1154. #define DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocWithAttributesFunc, attributes) \
  1155. inline char * NoThrow##AllocFunc(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1156. { \
  1157. return AllocWithAttributesFunc<attributes, /* nothrow = */ true>(size); \
  1158. } \
  1159. inline char * NoThrow##AllocFunc##Inlined(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1160. { \
  1161. return AllocWithAttributesFunc##Inlined<attributes, /* nothrow = */ true>(size); \
  1162. } \
  1163. DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes);
  1164. #define DEFINE_RECYCLER_ALLOC(AllocFunc, attributes) DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocWithAttributes, attributes)
  1165. #define DEFINE_RECYCLER_ALLOC_ZERO(AllocFunc, attributes) DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocZeroWithAttributes, attributes)
  1166. #define DEFINE_RECYCLER_NOTHROW_ALLOC(AllocFunc, attributes) DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocWithAttributes, attributes)
  1167. #define DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocFunc, attributes) DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocZeroWithAttributes, attributes)
  1168. #if GLOBAL_ENABLE_WRITE_BARRIER
  1169. DEFINE_RECYCLER_ALLOC(Alloc, WithBarrierBit);
  1170. DEFINE_RECYCLER_ALLOC_ZERO(AllocZero, WithBarrierBit);
  1171. DEFINE_RECYCLER_ALLOC(AllocFinalized, FinalizableWithBarrierObjectBits);
  1172. DEFINE_RECYCLER_ALLOC(AllocTracked, ClientTrackableObjectWithBarrierBits);
  1173. DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTracked, ClientTrackableObjectWithBarrierBits);
  1174. #else
  1175. DEFINE_RECYCLER_ALLOC(Alloc, NoBit);
  1176. DEFINE_RECYCLER_ALLOC_ZERO(AllocZero, NoBit);
  1177. DEFINE_RECYCLER_ALLOC(AllocFinalized, FinalizableObjectBits);
  1178. DEFINE_RECYCLER_ALLOC(AllocTracked, ClientTrackableObjectBits);
  1179. DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTracked, ClientFinalizableObjectBits);
  1180. #endif
  1181. #ifdef RECYCLER_WRITE_BARRIER_ALLOC
  1182. DEFINE_RECYCLER_ALLOC(AllocWithBarrier, WithBarrierBit);
  1183. DEFINE_RECYCLER_ALLOC_ZERO(AllocZeroWithBarrier, WithBarrierBit);
  1184. DEFINE_RECYCLER_ALLOC(AllocFinalizedWithBarrier, FinalizableWithBarrierObjectBits);
  1185. DEFINE_RECYCLER_ALLOC(AllocTrackedWithBarrier, ClientTrackableObjectWithBarrierBits);
  1186. DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTrackedWithBarrier, ClientFinalizableObjectWithBarrierBits);
  1187. #endif
  1188. DEFINE_RECYCLER_ALLOC(AllocLeaf, LeafBit);
  1189. DEFINE_RECYCLER_ALLOC(AllocFinalizedLeaf, FinalizableLeafBits);
  1190. DEFINE_RECYCLER_ALLOC(AllocTrackedLeaf, ClientTrackableLeafBits);
  1191. DEFINE_RECYCLER_ALLOC_ZERO(AllocLeafZero, LeafBit);
  1192. DEFINE_RECYCLER_ALLOC_ZERO(AllocZeroTrackedLeaf, ClientTrackableLeafBits);
  1193. DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocImplicitRootLeaf, ImplicitRootLeafBits);
  1194. DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocImplicitRoot, ImplicitRootBit);
  1195. template <ObjectInfoBits enumClass>
  1196. char * AllocEnumClass(DECLSPEC_GUARD_OVERFLOW size_t size)
  1197. {
  1198. Assert((enumClass & EnumClassMask) != 0);
  1199. //Assert((enumClass & ~EnumClassMask & ~WithBarrierBit) == 0);
  1200. return AllocWithAttributes<(ObjectInfoBits)(enumClass), /* nothrow = */ false>(size);
  1201. }
  1202. template <ObjectInfoBits infoBits>
  1203. char * AllocWithInfoBits(DECLSPEC_GUARD_OVERFLOW size_t size)
  1204. {
  1205. return AllocWithAttributes<infoBits, /* nothrow = */ false>(size);
  1206. }
  1207. template <ObjectInfoBits infoBits>
  1208. char * AllocVisitedHost(DECLSPEC_GUARD_OVERFLOW size_t size)
  1209. {
  1210. return AllocWithAttributes<infoBits, /* nothrow = */ true>(size);
  1211. }
  1212. template<typename T>
  1213. RecyclerWeakReference<T>* CreateWeakReferenceHandle(T* pStrongReference);
  1214. #if ENABLE_WEAK_REFERENCE_REGIONS
  1215. template<typename T>
  1216. RecyclerWeakReferenceRegionItem<T>* CreateWeakReferenceRegion(size_t count);
  1217. #endif
  1218. uint GetWeakReferenceCleanupId() const { return weakReferenceCleanupId; }
  1219. template<typename T>
  1220. bool FindOrCreateWeakReferenceHandle(T* pStrongReference, RecyclerWeakReference<T> **ppWeakRef);
  1221. template<typename T>
  1222. bool TryGetWeakReferenceHandle(T* pStrongReference, RecyclerWeakReference<T> **weakReference);
  1223. template <ObjectInfoBits attributes>
  1224. char* GetAddressOfAllocator(size_t sizeCat)
  1225. {
  1226. Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
  1227. return (char*)this->GetHeapInfo<attributes>()->template GetBucket<(ObjectInfoBits)(attributes & GetBlockTypeBitMask)>(sizeCat).GetAllocator();
  1228. }
  1229. template <ObjectInfoBits attributes>
  1230. uint32 GetEndAddressOffset(size_t sizeCat)
  1231. {
  1232. Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
  1233. return this->GetHeapInfo<attributes>()->template GetBucket<(ObjectInfoBits)(attributes & GetBlockTypeBitMask)>(sizeCat).GetAllocator()->GetEndAddressOffset();
  1234. }
  1235. template <ObjectInfoBits attributes>
  1236. uint32 GetFreeObjectListOffset(size_t sizeCat)
  1237. {
  1238. Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
  1239. return this->GetHeapInfo<attributes>()->template GetBucket<(ObjectInfoBits)(attributes & GetBlockTypeBitMask)>(sizeCat).GetAllocator()->GetFreeObjectListOffset();
  1240. }
  1241. void GetNormalHeapBlockAllocatorInfoForNativeAllocation(size_t sizeCat, void*& allocatorAddress, uint32& endAddressOffset, uint32& freeListOffset, bool allowBumpAllocation, bool isOOPJIT);
  1242. static void GetNormalHeapBlockAllocatorInfoForNativeAllocation(void* recyclerAddr, size_t sizeCat, void*& allocatorAddress, uint32& endAddressOffset, uint32& freeListOffset, bool allowBumpAllocation, bool isOOPJIT);
  1243. bool AllowNativeCodeBumpAllocation();
  1244. static void TrackNativeAllocatedMemoryBlock(Recycler * recycler, void * memBlock, size_t sizeCat);
  1245. void Free(void* buffer, size_t size)
  1246. {
  1247. Assert(false);
  1248. }
  1249. bool ExplicitFreeLeaf(void* buffer, size_t size);
  1250. bool ExplicitFreeNonLeaf(void* buffer, size_t size);
  1251. template <ObjectInfoBits attributes>
  1252. bool ExplicitFreeInternalWrapper(void* buffer, size_t allocSize);
  1253. template <ObjectInfoBits attributes, typename TBlockAttributes>
  1254. bool ExplicitFreeInternal(void* buffer, size_t size, size_t sizeCat);
  1255. size_t GetAllocSize(size_t size);
  1256. template <typename TBlockAttributes>
  1257. void SetExplicitFreeBitOnSmallBlock(HeapBlock* heapBlock, size_t sizeCat, void* buffer, ObjectInfoBits attributes);
  1258. char* HeapAllocR(HeapInfo* eHeap, DECLSPEC_GUARD_OVERFLOW size_t size)
  1259. {
  1260. return RealAlloc<LeafBit, /* nothrow = */ false>(eHeap, size);
  1261. }
  1262. void HeapFree(HeapInfo* eHeap,void* candidate);
  1263. void EnumerateObjects(ObjectInfoBits infoBits, void (*CallBackFunction)(void * address, size_t size));
  1264. void RootAddRef(void* obj, uint *count = nullptr);
  1265. void RootRelease(void* obj, uint *count = nullptr);
  1266. template <ObjectInfoBits attributes, bool nothrow>
  1267. inline char* RealAlloc(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size);
  1268. template <ObjectInfoBits attributes, bool isSmallAlloc, bool nothrow>
  1269. inline char* RealAllocFromBucket(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size);
  1270. void EnterIdleDecommit();
  1271. void LeaveIdleDecommit();
  1272. void DisposeObjects();
  1273. BOOL IsValidObject(void* candidate, size_t minimumSize = 0);
  1274. #if DBG
  1275. void SetDisableThreadAccessCheck();
  1276. void SetDisableConcurrentThreadExitedCheck();
  1277. void CheckAllocExternalMark() const;
  1278. BOOL IsFreeObject(void * candidate);
  1279. BOOL IsReentrantState() const;
  1280. #endif
  1281. #if DBG_DUMP
  1282. void PrintMarkStack();
  1283. #endif
  1284. #ifdef PROFILE_EXEC
  1285. Js::Profiler * GetProfiler() const { return this->profiler; }
  1286. ArenaAllocator * AddBackgroundProfilerArena();
  1287. void ReleaseBackgroundProfilerArena(ArenaAllocator * arena);
  1288. void SetProfiler(Js::Profiler * profiler, Js::Profiler * backgroundProfiler);
  1289. #endif
  1290. #ifdef RECYCLER_MEMORY_VERIFY
  1291. BOOL VerifyEnabled() const { return verifyEnabled; }
  1292. uint GetVerifyPad() const { return verifyPad; }
  1293. void Verify(Js::Phase phase);
  1294. static void VerifyCheck(BOOL cond, char16 const * msg, void * address, void * corruptedAddress);
  1295. static void VerifyCheckFill(void * address, size_t size);
  1296. void FillCheckPad(void * address, size_t size, size_t alignedAllocSize, bool objectAlreadyInitialized);
  1297. void FillCheckPad(void * address, size_t size, size_t alignedAllocSize)
  1298. {
  1299. FillCheckPad(address, size, alignedAllocSize, false);
  1300. }
  1301. static void FillPadNoCheck(void * address, size_t size, size_t alignedAllocSize, bool objectAlreadyInitialized);
  1302. void VerifyCheckPad(void * address, size_t size);
  1303. void VerifyCheckPadExplicitFreeList(void * address, size_t size);
  1304. static const byte VerifyMemFill = 0xCA;
  1305. #endif
  1306. #ifdef RECYCLER_ZERO_MEM_CHECK
  1307. void VerifyZeroFill(void * address, size_t size);
  1308. void VerifyLargeAllocZeroFill(void * address, size_t size, ObjectInfoBits attributes);
  1309. #endif
  1310. #ifdef RECYCLER_DUMP_OBJECT_GRAPH
  1311. bool DumpObjectGraph(RecyclerObjectGraphDumper::Param * param = nullptr);
  1312. void DumpObjectDescription(void *object);
  1313. #endif
  1314. #ifdef LEAK_REPORT
  1315. void ReportLeaks();
  1316. void ReportLeaksOnProcessDetach();
  1317. #endif
  1318. #ifdef CHECK_MEMORY_LEAK
  1319. void CheckLeaks(char16 const * header);
  1320. void CheckLeaksOnProcessDetach(char16 const * header);
  1321. #endif
  1322. #ifdef RECYCLER_TRACE
  1323. void SetDomCollect(bool isDomCollect) { collectionParam.domCollect = isDomCollect; }
  1324. void CaptureCollectionParam(CollectionFlags flags, bool repeat = false);
  1325. #endif
  1326. private:
  1327. // RecyclerRootPtr has implicit conversion to pointers, prevent it to be
  1328. // passed to RootAddRef/RootRelease directly
  1329. template <typename T>
  1330. void RootAddRef(RecyclerRootPtr<T>& ptr, uint *count = nullptr);
  1331. template <typename T>
  1332. void RootRelease(RecyclerRootPtr<T>& ptr, uint *count = nullptr);
  1333. template <CollectionFlags flags>
  1334. BOOL CollectInternal();
  1335. template <CollectionFlags flags>
  1336. BOOL Collect();
  1337. template <CollectionFlags flags>
  1338. BOOL CollectWithHeuristic();
  1339. template <CollectionFlags flags>
  1340. BOOL CollectWithExhaustiveCandidate();
  1341. template <CollectionFlags flags>
  1342. BOOL GetPartialFlag();
  1343. bool NeedExhaustiveRepeatCollect() const;
  1344. #if DBG
  1345. bool ExpectStackSkip() const;
  1346. #endif
  1347. static size_t const InvalidScanRootBytes = (size_t)-1;
  1348. // Small Allocator
  1349. template <typename SmallHeapBlockAllocatorType>
  1350. void AddSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat);
  1351. template <typename SmallHeapBlockAllocatorType>
  1352. void RemoveSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat);
  1353. template <ObjectInfoBits attributes, typename SmallHeapBlockAllocatorType>
  1354. char * SmallAllocatorAlloc(SmallHeapBlockAllocatorType * allocator, size_t sizeCat, size_t size);
  1355. // Allocation
  1356. template <ObjectInfoBits attributes, bool nothrow>
  1357. inline char * AllocWithAttributesInlined(DECLSPEC_GUARD_OVERFLOW size_t size);
  1358. template <ObjectInfoBits attributes, bool nothrow>
  1359. char * AllocWithAttributes(DECLSPEC_GUARD_OVERFLOW size_t size)
  1360. {
  1361. return AllocWithAttributesInlined<attributes, nothrow>(size);
  1362. }
  1363. template <ObjectInfoBits attributes, bool nothrow>
  1364. inline char* AllocZeroWithAttributesInlined(DECLSPEC_GUARD_OVERFLOW size_t size);
  1365. template <ObjectInfoBits attributes, bool nothrow>
  1366. char* AllocZeroWithAttributes(DECLSPEC_GUARD_OVERFLOW size_t size)
  1367. {
  1368. return AllocZeroWithAttributesInlined<attributes, nothrow>(size);
  1369. }
  1370. char* AllocWeakReferenceEntry(DECLSPEC_GUARD_OVERFLOW size_t size)
  1371. {
  1372. return AllocWithAttributes<WeakReferenceEntryBits, /* nothrow = */ false>(size);
  1373. }
  1374. bool NeedDisposeTimed()
  1375. {
  1376. DWORD ticks = ::GetTickCount();
  1377. return (ticks > tickCountNextDispose && this->hasDisposableObject);
  1378. }
  1379. char* TryLargeAlloc(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size, ObjectInfoBits attributes, bool nothrow);
  1380. template <bool nothrow>
  1381. char* LargeAlloc(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size, ObjectInfoBits attributes);
  1382. void OutOfMemory();
  1383. // Collection
  1384. BOOL DoCollect(CollectionFlags flags);
  1385. BOOL DoCollectWrapped(CollectionFlags flags);
  1386. BOOL CollectOnAllocatorThread();
  1387. #if DBG
  1388. void ResetThreadId();
  1389. #endif
  1390. template <bool background>
  1391. size_t ScanPinnedObjects();
  1392. size_t ScanStack();
  1393. size_t ScanArena(ArenaData * alloc, bool background);
  1394. void ScanImplicitRoots();
  1395. void ScanInitialImplicitRoots();
  1396. void ScanNewImplicitRoots();
  1397. size_t FindRoots();
  1398. size_t TryMarkArenaMemoryBlockList(ArenaMemoryBlock * memoryBlocks);
  1399. size_t TryMarkBigBlockList(BigBlock * memoryBlocks);
  1400. #if ENABLE_CONCURRENT_GC
  1401. #if FALSE // REVIEW: remove this code since not using
  1402. size_t TryMarkBigBlockListWithWriteWatch(BigBlock * memoryBlocks);
  1403. #endif
  1404. #endif
  1405. // Mark
  1406. void ResetMarks(ResetMarkFlags flags);
  1407. void Mark();
  1408. bool EndMark();
  1409. bool EndMarkCheckOOMRescan();
  1410. void EndMarkOnLowMemory();
  1411. #if ENABLE_CONCURRENT_GC
  1412. void DoParallelMark();
  1413. void DoBackgroundParallelMark();
  1414. #endif
  1415. void FinishWrapperObjectTracing();
  1416. size_t RootMark(CollectionState markState);
  1417. void ProcessMark(bool background);
  1418. void ProcessParallelMark(bool background, MarkContext * markContext);
  1419. template <bool parallel, bool interior>
  1420. void ProcessMarkContext(MarkContext * markContext);
  1421. public:
  1422. bool IsObjectMarked(void* candidate) { return this->heapBlockMap.IsMarked(candidate); }
  1423. #ifdef RECYCLER_STRESS
  1424. bool StressCollectNow();
  1425. #endif
  1426. private:
  1427. HeapBlock* FindHeapBlock(void * candidate);
  1428. struct FindBlockCache
  1429. {
  1430. FindBlockCache():
  1431. heapBlock(nullptr),
  1432. candidate(nullptr)
  1433. {
  1434. }
  1435. HeapBlock* heapBlock;
  1436. void* candidate;
  1437. } blockCache;
  1438. inline void ScanObjectInline(void ** obj, size_t byteCount);
  1439. inline void ScanObjectInlineInterior(void ** obj, size_t byteCount);
  1440. template <bool doSpecialMark, bool forceInterior = false>
  1441. inline void ScanMemoryInline(void ** obj, size_t byteCount
  1442. ADDRESS_SANITIZER_APPEND(RecyclerScanMemoryType scanMemoryType = RecyclerScanMemoryType::General));
  1443. template <bool doSpecialMark>
  1444. void ScanMemory(void ** obj, size_t byteCount) { if (byteCount != 0) { ScanMemoryInline<doSpecialMark>(obj, byteCount); } }
  1445. bool AddMark(void * candidate, size_t byteCount) throw();
  1446. #ifdef RECYCLER_VISITED_HOST
  1447. bool AddPreciselyTracedMark(IRecyclerVisitedObject * candidate) throw();
  1448. #endif
  1449. // Sweep
  1450. #if ENABLE_PARTIAL_GC
  1451. bool Sweep(size_t rescanRootBytes = (size_t)-1, bool concurrent = false, bool adjustPartialHeuristics = false);
  1452. #else
  1453. bool Sweep(bool concurrent = false);
  1454. #endif
  1455. void SweepWeakReference();
  1456. void SweepHeap(bool concurrent, RecyclerSweepManager& recyclerSweepManager);
  1457. void FinishSweep(RecyclerSweepManager& recyclerSweepManager);
  1458. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1459. void DoTwoPassConcurrentSweepPreCheck();
  1460. void FinishSweepPrep();
  1461. void FinishConcurrentSweepPass1();
  1462. void FinishConcurrentSweep();
  1463. #endif
  1464. bool FinishDisposeObjects();
  1465. template <CollectionFlags flags>
  1466. bool FinishDisposeObjectsWrapped();
  1467. // end collection
  1468. void FinishCollection();
  1469. void FinishCollection(bool needConcurrentSweep);
  1470. void EndCollection();
  1471. void ResetCollectionState();
  1472. void ResetMarkCollectionState();
  1473. void ResetHeuristicCounters();
  1474. void ResetPartialHeuristicCounters();
  1475. BOOL IsMarkState() const;
  1476. BOOL IsFindRootsState() const;
  1477. BOOL IsInThreadFindRootsState() const;
  1478. template <Js::Phase phase>
  1479. void CollectionBegin();
  1480. template <Js::Phase phase>
  1481. void CollectionEnd();
  1482. #if ENABLE_PARTIAL_GC
  1483. void ProcessClientTrackedObjects();
  1484. bool PartialCollect(bool concurrent);
  1485. void FinishPartialCollect(RecyclerSweepManager * recyclerSweep = nullptr);
  1486. void ClearPartialCollect();
  1487. #if ENABLE_CONCURRENT_GC
  1488. void BackgroundFinishPartialCollect(RecyclerSweepManager * recyclerSweep);
  1489. #endif
  1490. #endif
  1491. size_t RescanMark(DWORD waitTime);
  1492. size_t FinishMark(DWORD waitTime);
  1493. size_t FinishMarkRescan(bool background);
  1494. #if ENABLE_CONCURRENT_GC
  1495. void ProcessTrackedObjects();
  1496. #endif
  1497. BOOL IsAllocatableCallbackState()
  1498. {
  1499. return (collectionState & (Collection_PostSweepRedeferralCallback | Collection_PostCollectionCallback));
  1500. }
  1501. #if ENABLE_CONCURRENT_GC
  1502. // Concurrent GC
  1503. BOOL IsConcurrentEnabled() const { return this->enableConcurrentMark || this->enableParallelMark || this->enableConcurrentSweep; }
  1504. BOOL IsConcurrentMarkState() const;
  1505. BOOL IsConcurrentMarkExecutingState() const;
  1506. BOOL IsConcurrentResetMarksState() const;
  1507. BOOL IsConcurrentFindRootState() const;
  1508. BOOL IsConcurrentExecutingState() const;
  1509. BOOL IsConcurrentSweepExecutingState() const;
  1510. BOOL IsConcurrentSweepSetupState() const;
  1511. BOOL IsConcurrentSweepState() const;
  1512. BOOL IsConcurrentState() const;
  1513. BOOL InConcurrentSweep()
  1514. {
  1515. return ((collectionState & Collection_ConcurrentSweep) == Collection_ConcurrentSweep);
  1516. }
  1517. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1518. bool AllowAllocationsDuringConcurrentSweep()
  1519. {
  1520. return this->allowAllocationsDuringConcurrentSweepForCollection;
  1521. }
  1522. #endif
  1523. #if DBG
  1524. BOOL IsConcurrentFinishedState() const;
  1525. #endif // DBG
  1526. bool InitializeConcurrent(JsUtil::ThreadService* threadService);
  1527. bool AbortConcurrent(bool restoreState);
  1528. void FinalizeConcurrent(bool restoreState);
  1529. static unsigned int CALLBACK StaticThreadProc(LPVOID lpParameter);
  1530. static int ExceptFilter(LPEXCEPTION_POINTERS pEP);
  1531. DWORD ThreadProc();
  1532. void DoBackgroundWork(bool forceForeground = false);
  1533. static void CALLBACK StaticBackgroundWorkCallback(void * callbackData);
  1534. BOOL CollectOnConcurrentThread();
  1535. bool StartConcurrent(CollectionState const state);
  1536. BOOL StartBackgroundMarkCollect();
  1537. BOOL StartSynchronousBackgroundMark();
  1538. BOOL StartAsynchronousBackgroundMark();
  1539. BOOL StartBackgroundMark(bool foregroundResetMark, bool foregroundFindRoots);
  1540. BOOL StartConcurrentSweepCollect();
  1541. template <CollectionFlags flags>
  1542. BOOL TryFinishConcurrentCollect();
  1543. BOOL WaitForConcurrentThread(DWORD waitTime, RecyclerWaitReason caller = RecyclerWaitReason::Other);
  1544. void FlushBackgroundPages();
  1545. BOOL FinishConcurrentCollect(CollectionFlags flags);
  1546. void FinishTransferSwept(CollectionFlags flags);
  1547. BOOL FinishConcurrentCollectWrapped(CollectionFlags flags);
  1548. void BackgroundMark();
  1549. void BackgroundMarkWeakRefs();
  1550. void BackgroundResetMarks();
  1551. void PrepareBackgroundFindRoots();
  1552. void RevertPrepareBackgroundFindRoots();
  1553. size_t BackgroundFindRoots();
  1554. size_t BackgroundScanStack();
  1555. size_t BackgroundRepeatMark();
  1556. size_t BackgroundRescan(RescanFlags rescanFlags);
  1557. void BackgroundResetWriteWatchAll();
  1558. size_t BackgroundFinishMark();
  1559. char* GetScriptThreadStackTop();
  1560. void SweepPendingObjects(RecyclerSweepManager& recyclerSweepManager);
  1561. void ConcurrentTransferSweptObjects(RecyclerSweepManager& recyclerSweepManager);
  1562. #if ENABLE_PARTIAL_GC
  1563. void ConcurrentPartialTransferSweptObjects(RecyclerSweepManager& recyclerSweepManager);
  1564. #endif // ENABLE_PARTIAL_GC
  1565. #endif // ENABLE_CONCURRENT_GC
  1566. bool ForceSweepObject();
  1567. void NotifyFree(__in char * address, size_t size);
  1568. template <typename T>
  1569. void NotifyFree(T * heapBlock);
  1570. void CleanupPendingUnroot();
  1571. #ifdef ENABLE_JS_ETW
  1572. ULONG EventWriteFreeMemoryBlock(HeapBlock* heapBlock);
  1573. void FlushFreeRecord();
  1574. void AppendFreeMemoryETWRecord(__in char *address, size_t size);
  1575. static const uint BulkFreeMemoryCount = 400;
  1576. uint bulkFreeMemoryWrittenCount;
  1577. struct ETWFreeRecord {
  1578. char* memoryAddress;
  1579. uint32 objectSize;
  1580. };
  1581. ETWFreeRecord etwFreeRecords[BulkFreeMemoryCount];
  1582. #endif
  1583. template <ObjectInfoBits attributes>
  1584. bool IntegrateBlock(char * blockAddress, PageSegment * segment, size_t allocSize, size_t objectSize);
  1585. template <class TBlockAttributes> friend class SmallHeapBlockT;
  1586. template <class TBlockAttributes> friend class SmallNormalHeapBlockT;
  1587. template <class TBlockAttributes> friend class SmallLeafHeapBlockT;
  1588. template <class TBlockAttributes> friend class SmallFinalizableHeapBlockT;
  1589. #ifdef RECYCLER_VISITED_HOST
  1590. template <class TBlockAttributes> friend class SmallRecyclerVisitedHostHeapBlockT;
  1591. #endif
  1592. friend class LargeHeapBlock;
  1593. friend class HeapInfo;
  1594. friend class HeapInfoManager;
  1595. friend class LargeHeapBucket;
  1596. friend class ThreadContext;
  1597. template <typename TBlockType>
  1598. friend class HeapBucketT;
  1599. template <typename TBlockType>
  1600. friend class SmallNormalHeapBucketBase;
  1601. template <typename T, ObjectInfoBits attributes>
  1602. friend class RecyclerFastAllocator;
  1603. #ifdef RECYCLER_TRACE
  1604. void PrintCollectTrace(Js::Phase phase, bool finish = false, bool noConcurrentWork = false);
  1605. #endif
  1606. #ifdef RECYCLER_VERIFY_MARK
  1607. void VerifyMark();
  1608. void VerifyMarkRoots();
  1609. void VerifyMarkStack();
  1610. void VerifyMarkArena(ArenaData * arena);
  1611. void VerifyMarkBigBlockList(BigBlock * memoryBlocks);
  1612. void VerifyMarkArenaMemoryBlockList(ArenaMemoryBlock * memoryBlocks);
  1613. bool VerifyMark(void * objectAddress, void * target);
  1614. bool VerifyMark(void * target);
  1615. #endif
  1616. #if DBG_DUMP
  1617. bool forceTraceMark;
  1618. #endif
  1619. bool isHeapEnumInProgress;
  1620. #if DBG
  1621. bool allowAllocationDuringHeapEnum;
  1622. bool allowAllocationDuringRenentrance;
  1623. #endif
  1624. // There are two scenarios we allow limited allocation but disallow GC during those allocations:
  1625. // in heapenum when we allocate PropertyRecord, and
  1626. // in projection ExternalMark allowing allocating VarToDispEx. This is the common flag
  1627. // while we have debug only flag for each of the two scenarios.
  1628. bool isCollectionDisabled;
  1629. #ifdef ENABLE_BASIC_TELEMETRY
  1630. RecyclerTelemetryInfo& GetRecyclerTelemetryInfo() { return this->telemetryStats; }
  1631. #endif
  1632. #ifdef TRACK_ALLOC
  1633. public:
  1634. Recycler * TrackAllocInfo(TrackAllocData const& data);
  1635. void ClearTrackAllocInfo(TrackAllocData* data = NULL);
  1636. #ifdef PROFILE_RECYCLER_ALLOC
  1637. void PrintAllocStats();
  1638. private:
  1639. static bool DoProfileAllocTracker();
  1640. void InitializeProfileAllocTracker();
  1641. void TrackUnallocated(__in char* address, __in char *endAddress, size_t sizeCat);
  1642. void TrackAllocCore(void * object, size_t size, const TrackAllocData& trackAllocData, bool traceLifetime = false);
  1643. void* TrackAlloc(void * object, size_t size, const TrackAllocData& trackAllocData, bool traceLifetime = false);
  1644. void TrackIntegrate(__in_ecount(blockSize) char * blockAddress, size_t blockSize, size_t allocSize, size_t objectSize, const TrackAllocData& trackAllocData);
  1645. BOOL TrackFree(const char* address, size_t size);
  1646. void TrackAllocWeakRef(RecyclerWeakReferenceBase * weakRef);
  1647. void TrackFreeWeakRef(RecyclerWeakReferenceBase * weakRef);
  1648. struct TrackerData
  1649. {
  1650. TrackerData(type_info const * typeinfo, bool isArray) : typeinfo(typeinfo), isArray(isArray),
  1651. ItemSize(0), ItemCount(0), AllocCount(0), ReqSize(0), AllocSize(0), FreeCount(0), FreeSize(0), TraceLifetime(false)
  1652. #ifdef PERF_COUNTERS
  1653. , counter(PerfCounter::RecyclerTrackerCounterSet::GetPerfCounter(typeinfo, isArray))
  1654. , sizeCounter(PerfCounter::RecyclerTrackerCounterSet::GetPerfSizeCounter(typeinfo, isArray))
  1655. #endif
  1656. {
  1657. }
  1658. type_info const * typeinfo;
  1659. bool isArray;
  1660. #ifdef TRACE_OBJECT_LIFETIME
  1661. bool TraceLifetime;
  1662. #endif
  1663. size_t ItemSize;
  1664. size_t ItemCount;
  1665. int AllocCount;
  1666. int64 ReqSize;
  1667. int64 AllocSize;
  1668. int FreeCount;
  1669. int64 FreeSize;
  1670. #ifdef PERF_COUNTERS
  1671. PerfCounter::Counter& counter;
  1672. PerfCounter::Counter& sizeCounter;
  1673. #endif
  1674. static TrackerData EmptyData;
  1675. static TrackerData ExplicitFreeListObjectData;
  1676. };
  1677. TrackerData * GetTrackerData(void * address);
  1678. void SetTrackerData(void * address, TrackerData * data);
  1679. struct TrackerItem
  1680. {
  1681. TrackerItem(type_info const * typeinfo) : instanceData(typeinfo, false), arrayData(typeinfo, true)
  1682. #ifdef PERF_COUNTERS
  1683. , weakRefCounter(PerfCounter::RecyclerTrackerCounterSet::GetWeakRefPerfCounter(typeinfo))
  1684. #endif
  1685. {}
  1686. TrackerData instanceData;
  1687. TrackerData arrayData;
  1688. #ifdef PERF_COUNTERS
  1689. PerfCounter::Counter& weakRefCounter;
  1690. #endif
  1691. };
  1692. typedef JsUtil::BaseDictionary<type_info const *, TrackerItem *, NoCheckHeapAllocator, PrimeSizePolicy, DefaultComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> TypeInfotoTrackerItemMap;
  1693. typedef JsUtil::BaseDictionary<void *, TrackerData *, NoCheckHeapAllocator, PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> PointerToTrackerDataMap;
  1694. TypeInfotoTrackerItemMap * trackerDictionary;
  1695. CriticalSection * trackerCriticalSection;
  1696. #endif
  1697. TrackAllocData nextAllocData;
  1698. #endif
  1699. public:
  1700. // Enumeration
  1701. class AutoSetupRecyclerForNonCollectingMark
  1702. {
  1703. private:
  1704. Recycler& m_recycler;
  1705. bool m_setupDone;
  1706. CollectionState m_previousCollectionState;
  1707. #ifdef RECYCLER_STATS
  1708. RecyclerCollectionStats m_previousCollectionStats;
  1709. #endif
  1710. public:
  1711. AutoSetupRecyclerForNonCollectingMark(Recycler& recycler, bool setupForHeapEnumeration = false);
  1712. ~AutoSetupRecyclerForNonCollectingMark();
  1713. void DoCommonSetup();
  1714. void SetupForHeapEnumeration();
  1715. };
  1716. friend class RecyclerHeapObjectInfo;
  1717. bool FindImplicitRootObject(void* candidate, RecyclerHeapObjectInfo& heapObject);
  1718. bool FindHeapObject(void* candidate, FindHeapObjectFlags flags, RecyclerHeapObjectInfo& heapObject);
  1719. bool FindHeapObjectWithClearedAllocators(void* candidate, RecyclerHeapObjectInfo& heapObject);
  1720. bool IsCollectionDisabled() const { return isCollectionDisabled; }
  1721. bool IsHeapEnumInProgress() const { Assert(isHeapEnumInProgress ? isCollectionDisabled : true); return isHeapEnumInProgress; }
  1722. #if DBG
  1723. // There are limited cases that we have to allow allocation during heap enumeration. GC is explicitly
  1724. // disabled during heap enumeration for these limited cases. (See DefaultRecyclerCollectionWrapper)
  1725. // The only case of allocation right now is allocating property record for string based type handler
  1726. // so we can use the propertyId as the relation Id.
  1727. // Allocation during enumeration is still frown upon and should still be avoid if possible.
  1728. bool AllowAllocationDuringHeapEnum() const { return allowAllocationDuringHeapEnum; }
  1729. class AutoAllowAllocationDuringHeapEnum : public AutoBooleanToggle
  1730. {
  1731. public:
  1732. AutoAllowAllocationDuringHeapEnum(Recycler * recycler) : AutoBooleanToggle(&recycler->allowAllocationDuringHeapEnum) {};
  1733. };
  1734. #endif
  1735. class AutoAllowAllocationDuringReentrance : public AutoBooleanToggle
  1736. {
  1737. public:
  1738. AutoAllowAllocationDuringReentrance(Recycler * recycler) :
  1739. AutoBooleanToggle(&recycler->isCollectionDisabled)
  1740. #if DBG
  1741. , allowAllocationDuringRenentrance(&recycler->allowAllocationDuringRenentrance)
  1742. #endif
  1743. {};
  1744. #if DBG
  1745. private:
  1746. AutoBooleanToggle allowAllocationDuringRenentrance;
  1747. #endif
  1748. };
  1749. #ifdef HEAP_ENUMERATION_VALIDATION
  1750. typedef void(*PostHeapEnumScanCallback)(const HeapObject& heapObject, void *data);
  1751. PostHeapEnumScanCallback pfPostHeapEnumScanCallback;
  1752. void *postHeapEnunScanData;
  1753. void PostHeapEnumScan(PostHeapEnumScanCallback callback, void*data);
  1754. bool IsPostEnumHeapValidationInProgress() const { return pfPostHeapEnumScanCallback != NULL; }
  1755. #endif
  1756. public:
  1757. void* GetRealAddressFromInterior(void* candidate);
  1758. private:
  1759. void BeginNonCollectingMark();
  1760. void EndNonCollectingMark();
  1761. #if defined(RECYCLER_DUMP_OBJECT_GRAPH) || defined(LEAK_REPORT) || defined(CHECK_MEMORY_LEAK)
  1762. public:
  1763. bool IsInDllCanUnloadNow() const { return inDllCanUnloadNow; }
  1764. bool IsInDetachProcess() const { return inDetachProcess; }
  1765. void SetInDllCanUnloadNow();
  1766. void SetInDetachProcess();
  1767. private:
  1768. bool inDllCanUnloadNow;
  1769. bool inDetachProcess;
  1770. bool isPrimaryMarkContextInitialized;
  1771. #endif
  1772. #if defined(LEAK_REPORT) || defined(CHECK_MEMORY_LEAK)
  1773. template <class Fn>
  1774. void ReportOnProcessDetach(Fn fn);
  1775. void PrintPinnedObjectStackTraces();
  1776. #endif
  1777. public:
  1778. typedef void (CALLBACK *ObjectBeforeCollectCallback)(void* object, void* callbackState); // same as jsrt JsObjectBeforeCollectCallback
  1779. // same as jsrt JsObjectBeforeCollectCallbackWrapper
  1780. typedef void (CALLBACK *ObjectBeforeCollectCallbackWrapper)(ObjectBeforeCollectCallback callback, void* object, void* callbackState, void* threadContext);
  1781. void SetObjectBeforeCollectCallback(void* object,
  1782. ObjectBeforeCollectCallback callback,
  1783. void* callbackState,
  1784. ObjectBeforeCollectCallbackWrapper callbackWrapper,
  1785. void* threadContext);
  1786. void ClearObjectBeforeCollectCallbacks();
  1787. void SetDOMWrapperTracingCallback(void * state, DOMWrapperTracingCallback tracingCallback, DOMWrapperTracingDoneCallback tracingDoneCallback, DOMWrapperTracingEnterFinalPauseCallback enterFinalPauseCallback);
  1788. void ClearDOMWrapperTracingCallback();
  1789. bool IsInObjectBeforeCollectCallback() const { return objectBeforeCollectCallbackState != ObjectBeforeCollectCallback_None; }
  1790. private:
  1791. struct ObjectBeforeCollectCallbackData
  1792. {
  1793. void* object;
  1794. ObjectBeforeCollectCallback callback;
  1795. void* callbackState;
  1796. void* threadContext;
  1797. ObjectBeforeCollectCallbackWrapper callbackWrapper;
  1798. ObjectBeforeCollectCallbackData() {}
  1799. ObjectBeforeCollectCallbackData(void* object, ObjectBeforeCollectCallbackWrapper callbackWrapper, ObjectBeforeCollectCallback callback, void* callbackState, void* threadContext) :
  1800. object(object), callbackWrapper(callbackWrapper), callback(callback), callbackState(callbackState), threadContext(threadContext) {}
  1801. };
  1802. typedef SList<ObjectBeforeCollectCallbackData> ObjectBeforeCollectCallbackList;
  1803. ObjectBeforeCollectCallbackList* objectBeforeCollectCallbackList;
  1804. ArenaAllocator objectBeforeCollectCallbackArena;
  1805. enum ObjectBeforeCollectCallbackState
  1806. {
  1807. ObjectBeforeCollectCallback_None,
  1808. ObjectBeforeCollectCallback_Normal, // Normal GC BeforeCollect callback
  1809. ObjectBeforeCollectCallback_Shutdown, // At shutdown invoke all BeforeCollect callback
  1810. } objectBeforeCollectCallbackState;
  1811. bool ProcessObjectBeforeCollectCallbacks(bool atShutdown = false);
  1812. #if GLOBAL_ENABLE_WRITE_BARRIER
  1813. private:
  1814. typedef JsUtil::BaseDictionary<void *, size_t, HeapAllocator, PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::AsymetricResizeLock> PendingWriteBarrierBlockMap;
  1815. PendingWriteBarrierBlockMap pendingWriteBarrierBlockMap;
  1816. public:
  1817. void RegisterPendingWriteBarrierBlock(void* address, size_t bytes);
  1818. void UnRegisterPendingWriteBarrierBlock(void* address);
  1819. #endif
  1820. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  1821. private:
  1822. static Recycler* recyclerList;
  1823. static CriticalSection recyclerListLock;
  1824. Recycler* next;
  1825. public:
  1826. static void WBSetBitJIT(char* addr)
  1827. {
  1828. return WBSetBit(addr);
  1829. }
  1830. static void WBSetBit(char* addr);
  1831. static void WBSetBitRange(char* addr, uint length);
  1832. static void WBVerifyBitIsSet(char* addr, char* target);
  1833. static bool WBCheckIsRecyclerAddress(char* addr);
  1834. #endif
  1835. #ifdef RECYCLER_FINALIZE_CHECK
  1836. void VerifyFinalize();
  1837. #endif
  1838. };
  1839. class RecyclerHeapObjectInfo
  1840. {
  1841. void* m_address;
  1842. Recycler * m_recycler;
  1843. HeapBlock* m_heapBlock;
  1844. #if LARGEHEAPBLOCK_ENCODING
  1845. union
  1846. {
  1847. byte * m_attributes;
  1848. LargeObjectHeader * m_largeHeapBlockHeader;
  1849. };
  1850. bool isUsingLargeHeapBlock = false;
  1851. #else
  1852. byte * m_attributes;
  1853. #endif
  1854. public:
  1855. RecyclerHeapObjectInfo() : m_address(NULL), m_recycler(NULL), m_heapBlock(NULL), m_attributes(NULL) {}
  1856. RecyclerHeapObjectInfo(void* address, Recycler * recycler, HeapBlock* heapBlock, byte * attributes) :
  1857. m_address(address), m_recycler(recycler), m_heapBlock(heapBlock), m_attributes(attributes) { }
  1858. void* GetObjectAddress() const { return m_address; }
  1859. #ifdef RECYCLER_PAGE_HEAP
  1860. bool IsPageHeapAlloc() const
  1861. {
  1862. return isUsingLargeHeapBlock && ((LargeHeapBlock*)m_heapBlock)->InPageHeapMode();
  1863. }
  1864. void PageHeapLockPages() const
  1865. {
  1866. Assert(IsPageHeapAlloc());
  1867. ((LargeHeapBlock*)m_heapBlock)->PageHeapLockPages();
  1868. }
  1869. #endif
  1870. bool IsLeaf() const
  1871. {
  1872. #if LARGEHEAPBLOCK_ENCODING
  1873. if (isUsingLargeHeapBlock)
  1874. {
  1875. return (m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie) & LeafBit) != 0;
  1876. }
  1877. #endif
  1878. return ((*m_attributes & LeafBit) != 0 || this->m_heapBlock->IsLeafBlock());
  1879. }
  1880. bool IsImplicitRoot() const
  1881. {
  1882. #if LARGEHEAPBLOCK_ENCODING
  1883. if (isUsingLargeHeapBlock)
  1884. {
  1885. return (m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie) & ImplicitRootBit) != 0;
  1886. }
  1887. #endif
  1888. return (*m_attributes & ImplicitRootBit) != 0;
  1889. }
  1890. bool IsObjectMarked() const { Assert(m_recycler); return m_recycler->heapBlockMap.IsMarked(m_address); }
  1891. void SetObjectMarked() { Assert(m_recycler); m_recycler->heapBlockMap.SetMark(m_address); }
  1892. ObjectInfoBits GetAttributes() const
  1893. {
  1894. #if LARGEHEAPBLOCK_ENCODING
  1895. if (isUsingLargeHeapBlock)
  1896. {
  1897. return (ObjectInfoBits)m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
  1898. }
  1899. #endif
  1900. return (ObjectInfoBits)*m_attributes;
  1901. }
  1902. size_t GetSize() const;
  1903. #if LARGEHEAPBLOCK_ENCODING
  1904. void SetLargeHeapBlockHeader(LargeObjectHeader * largeHeapBlockHeader)
  1905. {
  1906. m_largeHeapBlockHeader = largeHeapBlockHeader;
  1907. isUsingLargeHeapBlock = true;
  1908. }
  1909. #endif
  1910. bool SetMemoryProfilerHasEnumerated()
  1911. {
  1912. Assert(m_heapBlock);
  1913. #if LARGEHEAPBLOCK_ENCODING
  1914. if (isUsingLargeHeapBlock)
  1915. {
  1916. return SetMemoryProfilerHasEnumeratedForLargeHeapBlock();
  1917. }
  1918. #endif
  1919. bool wasMemoryProfilerOldObject = (*m_attributes & MemoryProfilerOldObjectBit) != 0;
  1920. *m_attributes |= MemoryProfilerOldObjectBit;
  1921. return wasMemoryProfilerOldObject;
  1922. }
  1923. bool ClearImplicitRootBit()
  1924. {
  1925. // This can only be called on the main thread for non-finalizable block
  1926. // As finalizable block requires that the bit not be change during concurrent mark
  1927. // since the background thread change the NewTrackBit
  1928. Assert(!m_heapBlock->IsAnyFinalizableBlock());
  1929. #ifdef RECYCLER_PAGE_HEAP
  1930. Recycler* recycler = this->m_recycler;
  1931. if (recycler->IsPageHeapEnabled() && recycler->ShouldCapturePageHeapFreeStack())
  1932. {
  1933. #ifdef STACK_BACK_TRACE
  1934. if (this->isUsingLargeHeapBlock)
  1935. {
  1936. LargeHeapBlock* largeHeapBlock = (LargeHeapBlock*)this->m_heapBlock;
  1937. if (largeHeapBlock->InPageHeapMode())
  1938. {
  1939. largeHeapBlock->CapturePageHeapFreeStack();
  1940. }
  1941. }
  1942. #endif
  1943. }
  1944. #endif
  1945. #if LARGEHEAPBLOCK_ENCODING
  1946. if (isUsingLargeHeapBlock)
  1947. {
  1948. return ClearImplicitRootBitsForLargeHeapBlock();
  1949. }
  1950. #endif
  1951. Assert(m_attributes);
  1952. bool wasImplicitRoot = (*m_attributes & ImplicitRootBit) != 0;
  1953. *m_attributes &= ~ImplicitRootBit;
  1954. return wasImplicitRoot;
  1955. }
  1956. void ExplicitFree()
  1957. {
  1958. if (*m_attributes == ObjectInfoBits::LeafBit)
  1959. {
  1960. m_recycler->ExplicitFreeLeaf(m_address, GetSize());
  1961. }
  1962. else
  1963. {
  1964. Assert(*m_attributes == ObjectInfoBits::NoBit);
  1965. m_recycler->ExplicitFreeNonLeaf(m_address, GetSize());
  1966. }
  1967. }
  1968. #if LARGEHEAPBLOCK_ENCODING
  1969. bool ClearImplicitRootBitsForLargeHeapBlock()
  1970. {
  1971. Assert(m_largeHeapBlockHeader);
  1972. byte attributes = m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
  1973. bool wasImplicitRoot = (attributes & ImplicitRootBit) != 0;
  1974. m_largeHeapBlockHeader->SetAttributes(m_recycler->Cookie, attributes & ~ImplicitRootBit);
  1975. return wasImplicitRoot;
  1976. }
  1977. bool SetMemoryProfilerHasEnumeratedForLargeHeapBlock()
  1978. {
  1979. Assert(m_largeHeapBlockHeader);
  1980. byte attributes = m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
  1981. bool wasMemoryProfilerOldObject = (attributes & MemoryProfilerOldObjectBit) != 0;
  1982. m_largeHeapBlockHeader->SetAttributes(m_recycler->Cookie, attributes | MemoryProfilerOldObjectBit);
  1983. return wasMemoryProfilerOldObject;
  1984. }
  1985. #endif
  1986. };
  1987. // A fake heap block to replace the original heap block where the strong ref is when it has been collected
  1988. // as the original heap block may have been freed
  1989. class CollectedRecyclerWeakRefHeapBlock : public HeapBlock
  1990. {
  1991. public:
  1992. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  1993. virtual void WBVerifyBitIsSet(char* addr) override { Assert(false); }
  1994. virtual void WBSetBit(char* addr) override { Assert(false); }
  1995. virtual void WBSetBitRange(char* addr, uint count) override { Assert(false); }
  1996. virtual void WBClearBit(char* addr) override { Assert(false); }
  1997. virtual void WBClearObject(char* addr) override { Assert(false); }
  1998. #endif
  1999. #if DBG
  2000. virtual HeapInfo * GetHeapInfo() const override { Assert(false); return nullptr; }
  2001. virtual BOOL IsFreeObject(void* objectAddress) override { Assert(false); return false; }
  2002. #endif
  2003. virtual BOOL IsValidObject(void* objectAddress) override { Assert(false); return false; }
  2004. virtual byte* GetRealAddressFromInterior(void* interiorAddress) override { Assert(false); return nullptr; }
  2005. virtual size_t GetObjectSize(void* object) const override { Assert(false); return 0; }
  2006. virtual bool FindHeapObject(void* objectAddress, Recycler * recycler, FindHeapObjectFlags flags, RecyclerHeapObjectInfo& heapObject) override { Assert(false); return false; }
  2007. virtual bool TestObjectMarkedBit(void* objectAddress) override { Assert(false); return false; }
  2008. virtual void SetObjectMarkedBit(void* objectAddress) override { Assert(false); }
  2009. #ifdef RECYCLER_VERIFY_MARK
  2010. virtual bool VerifyMark(void * objectAddress, void * target) override { Assert(false); return false; }
  2011. #endif
  2012. #ifdef RECYCLER_PERF_COUNTERS
  2013. virtual void UpdatePerfCountersOnFree() override { Assert(false); }
  2014. #endif
  2015. #ifdef PROFILE_RECYCLER_ALLOC
  2016. virtual void * GetTrackerData(void * address) override { Assert(false); return nullptr; }
  2017. virtual void SetTrackerData(void * address, void * data) override { Assert(false); }
  2018. #endif
  2019. static CollectedRecyclerWeakRefHeapBlock Instance;
  2020. private:
  2021. CollectedRecyclerWeakRefHeapBlock() : HeapBlock(BlockTypeCount)
  2022. {
  2023. #if ENABLE_CONCURRENT_GC
  2024. isPendingConcurrentSweep = false;
  2025. #endif
  2026. }
  2027. };
  2028. class AutoIdleDecommit
  2029. {
  2030. public:
  2031. AutoIdleDecommit(Recycler * recycler) : recycler(recycler) { recycler->EnterIdleDecommit(); }
  2032. ~AutoIdleDecommit() { recycler->LeaveIdleDecommit(); }
  2033. private:
  2034. Recycler * recycler;
  2035. };
  2036. template <typename SmallHeapBlockAllocatorType>
  2037. void
  2038. Recycler::AddSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat)
  2039. {
  2040. this->GetDefaultHeapInfo()->AddSmallAllocator(allocator, sizeCat);
  2041. }
  2042. template <typename SmallHeapBlockAllocatorType>
  2043. void
  2044. Recycler::RemoveSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat)
  2045. {
  2046. this->GetDefaultHeapInfo()->RemoveSmallAllocator(allocator, sizeCat);
  2047. }
  2048. template <ObjectInfoBits attributes, typename SmallHeapBlockAllocatorType>
  2049. char *
  2050. Recycler::SmallAllocatorAlloc(SmallHeapBlockAllocatorType * allocator, DECLSPEC_GUARD_OVERFLOW size_t sizeCat, size_t size)
  2051. {
  2052. return this->GetDefaultHeapInfo()->SmallAllocatorAlloc<attributes>(this, allocator, sizeCat, size);
  2053. }
  2054. // Dummy recycler allocator policy classes to choose the allocation function
  2055. class _RecyclerLeafPolicy;
  2056. class _RecyclerNonLeafPolicy;
  2057. #ifdef RECYCLER_WRITE_BARRIER
  2058. class _RecyclerWriteBarrierPolicy;
  2059. #endif
  2060. template <typename Policy>
  2061. class _RecyclerAllocatorFunc
  2062. {};
  2063. template <>
  2064. class _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
  2065. {
  2066. public:
  2067. typedef char * (Recycler::*AllocFuncType)(size_t);
  2068. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  2069. static AllocFuncType GetAllocFunc()
  2070. {
  2071. return &Recycler::AllocLeaf;
  2072. }
  2073. static AllocFuncType GetAllocZeroFunc()
  2074. {
  2075. return &Recycler::AllocLeafZero;
  2076. }
  2077. static FreeFuncType GetFreeFunc()
  2078. {
  2079. return &Recycler::ExplicitFreeLeaf;
  2080. }
  2081. };
  2082. template <>
  2083. class _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
  2084. {
  2085. public:
  2086. typedef char * (Recycler::*AllocFuncType)(size_t);
  2087. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  2088. static AllocFuncType GetAllocFunc()
  2089. {
  2090. return &Recycler::Alloc;
  2091. }
  2092. static AllocFuncType GetAllocZeroFunc()
  2093. {
  2094. return &Recycler::AllocZero;
  2095. }
  2096. static FreeFuncType GetFreeFunc()
  2097. {
  2098. return &Recycler::ExplicitFreeNonLeaf;
  2099. }
  2100. };
  2101. #ifdef RECYCLER_WRITE_BARRIER
  2102. template <>
  2103. class _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2104. {
  2105. public:
  2106. typedef char * (Recycler::*AllocFuncType)(size_t);
  2107. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  2108. static AllocFuncType GetAllocFunc()
  2109. {
  2110. return &Recycler::AllocWithBarrier;
  2111. }
  2112. static AllocFuncType GetAllocZeroFunc()
  2113. {
  2114. return &Recycler::AllocZeroWithBarrier;
  2115. }
  2116. static FreeFuncType GetFreeFunc()
  2117. {
  2118. return &Recycler::ExplicitFreeNonLeaf;
  2119. }
  2120. };
  2121. #endif
  2122. // This is used by the compiler; when T is NOT a pointer i.e. a value type - it causes leaf allocation
  2123. template <typename T>
  2124. class TypeAllocatorFunc<Recycler, T> : public _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
  2125. {
  2126. };
  2127. #if GLOBAL_ENABLE_WRITE_BARRIER
  2128. template <typename T>
  2129. class TypeAllocatorFunc<Recycler, T *> : public _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2130. {
  2131. };
  2132. #else
  2133. // Partial template specialization; applies to T when it is a pointer
  2134. template <typename T>
  2135. class TypeAllocatorFunc<Recycler, T *> : public _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
  2136. {
  2137. };
  2138. #endif
  2139. // Dummy class to choose the allocation function
  2140. class RecyclerLeafAllocator
  2141. {
  2142. public:
  2143. static const bool FakeZeroLengthArray = true;
  2144. };
  2145. class RecyclerNonLeafAllocator
  2146. {
  2147. public:
  2148. static const bool FakeZeroLengthArray = true;
  2149. };
  2150. class RecyclerWriteBarrierAllocator
  2151. {
  2152. public:
  2153. static const bool FakeZeroLengthArray = true;
  2154. };
  2155. // Choose RecyclerLeafAllocator / RecyclerNonLeafAllocator based on "bool isLeaf"
  2156. template <bool isLeaf>
  2157. struct _RecyclerLeaf { typedef RecyclerLeafAllocator AllocatorType; };
  2158. template <>
  2159. struct _RecyclerLeaf<false> { typedef RecyclerNonLeafAllocator AllocatorType; };
  2160. template <bool isLeaf>
  2161. class ListTypeAllocatorFunc<Recycler, isLeaf>
  2162. {
  2163. public:
  2164. // RecyclerLeafAllocator / RecyclerNonLeafAllocator based on "bool isLeaf"
  2165. // used by write barrier type traits
  2166. typedef typename _RecyclerLeaf<isLeaf>::AllocatorType EffectiveAllocatorType;
  2167. typedef char * (Recycler::*AllocFuncType)(size_t);
  2168. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  2169. static AllocFuncType GetAllocFunc()
  2170. {
  2171. return isLeaf ? &Recycler::AllocLeaf : &Recycler::Alloc;
  2172. }
  2173. static FreeFuncType GetFreeFunc()
  2174. {
  2175. if (isLeaf)
  2176. {
  2177. return &Recycler::ExplicitFreeLeaf;
  2178. }
  2179. else
  2180. {
  2181. return &Recycler::ExplicitFreeNonLeaf;
  2182. }
  2183. }
  2184. };
  2185. // Partial template specialization to allocate as non leaf
  2186. template <typename T>
  2187. class TypeAllocatorFunc<RecyclerNonLeafAllocator, T> :
  2188. #if GLOBAL_ENABLE_WRITE_BARRIER
  2189. public _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2190. #else
  2191. public _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
  2192. #endif
  2193. {
  2194. };
  2195. #ifdef RECYCLER_WRITE_BARRIER
  2196. template <typename T>
  2197. class TypeAllocatorFunc<RecyclerWriteBarrierAllocator, T> : public _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2198. {
  2199. };
  2200. #endif
  2201. template <typename T>
  2202. class TypeAllocatorFunc<RecyclerLeafAllocator, T> : public _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
  2203. {
  2204. };
  2205. template <typename TAllocType>
  2206. struct AllocatorInfo<Recycler, TAllocType>
  2207. {
  2208. typedef Recycler AllocatorType;
  2209. typedef TypeAllocatorFunc<Recycler, TAllocType> AllocatorFunc;
  2210. typedef _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy> InstAllocatorFunc; // By default any instance considered non-leaf
  2211. };
  2212. template <typename TAllocType>
  2213. struct AllocatorInfo<RecyclerNonLeafAllocator, TAllocType>
  2214. {
  2215. typedef Recycler AllocatorType;
  2216. typedef TypeAllocatorFunc<RecyclerNonLeafAllocator, TAllocType> AllocatorFunc;
  2217. typedef TypeAllocatorFunc<RecyclerNonLeafAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
  2218. };
  2219. template <typename TAllocType>
  2220. struct AllocatorInfo<RecyclerWriteBarrierAllocator, TAllocType>
  2221. {
  2222. typedef Recycler AllocatorType;
  2223. typedef TypeAllocatorFunc<RecyclerWriteBarrierAllocator, TAllocType> AllocatorFunc;
  2224. typedef TypeAllocatorFunc<RecyclerWriteBarrierAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
  2225. };
  2226. template <typename TAllocType>
  2227. struct AllocatorInfo<RecyclerLeafAllocator, TAllocType>
  2228. {
  2229. typedef Recycler AllocatorType;
  2230. typedef TypeAllocatorFunc<RecyclerLeafAllocator, TAllocType> AllocatorFunc;
  2231. typedef TypeAllocatorFunc<RecyclerLeafAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
  2232. };
  2233. template <>
  2234. struct ForceNonLeafAllocator<Recycler>
  2235. {
  2236. typedef RecyclerNonLeafAllocator AllocatorType;
  2237. };
  2238. template <>
  2239. struct ForceNonLeafAllocator<RecyclerLeafAllocator>
  2240. {
  2241. typedef RecyclerNonLeafAllocator AllocatorType;
  2242. };
  2243. template <>
  2244. struct ForceLeafAllocator<Recycler>
  2245. {
  2246. typedef RecyclerLeafAllocator AllocatorType;
  2247. };
  2248. template <>
  2249. struct ForceLeafAllocator<RecyclerNonLeafAllocator>
  2250. {
  2251. typedef RecyclerLeafAllocator AllocatorType;
  2252. };
  2253. // TODO: enable -profile for GC phases.
  2254. // access the same profiler object from multiple GC threads which shares one recycler object,
  2255. // but profiler object is not thread safe
  2256. #if defined(PROFILE_EXEC) && 0
  2257. #define RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase) if (recycler->profiler != nullptr) { recycler->profiler->Begin(phase); }
  2258. #define RECYCLER_PROFILE_EXEC_END(recycler, phase) if (recycler->profiler != nullptr) { recycler->profiler->End(phase); }
  2259. #define RECYCLER_PROFILE_EXEC_BEGIN2(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->Begin(phase1); recycler->profiler->Begin(phase2);}
  2260. #define RECYCLER_PROFILE_EXEC_END2(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->End(phase1); recycler->profiler->End(phase2);}
  2261. #define RECYCLER_PROFILE_EXEC_CHANGE(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->End(phase1); recycler->profiler->Begin(phase2); }
  2262. #define RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase) if (recycler->backgroundProfiler != nullptr) { recycler->backgroundProfiler->Begin(phase); }
  2263. #define RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase) if (recycler->backgroundProfiler != nullptr) { recycler->backgroundProfiler->End(phase); }
  2264. #define RECYCLER_PROFILE_EXEC_THREAD_BEGIN(background, recycler, phase) if (background) { RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase); } else { RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase); }
  2265. #define RECYCLER_PROFILE_EXEC_THREAD_END(background, recycler, phase) if (background) { RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase); } else { RECYCLER_PROFILE_EXEC_END(recycler, phase); }
  2266. #else
  2267. #define RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase)
  2268. #define RECYCLER_PROFILE_EXEC_END(recycler, phase)
  2269. #define RECYCLER_PROFILE_EXEC_BEGIN2(recycler, phase1, phase2)
  2270. #define RECYCLER_PROFILE_EXEC_END2(recycler, phase1, phase2)
  2271. #define RECYCLER_PROFILE_EXEC_CHANGE(recycler, phase1, phase2)
  2272. #define RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase)
  2273. #define RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase)
  2274. #define RECYCLER_PROFILE_EXEC_THREAD_BEGIN(background, recycler, phase)
  2275. #define RECYCLER_PROFILE_EXEC_THREAD_END(background, recycler, phase)
  2276. #endif
  2277. }
  2278. _Ret_notnull_ inline void * __cdecl
  2279. operator new(DECLSPEC_GUARD_OVERFLOW size_t byteSize, Recycler * alloc, HeapInfo * heapInfo)
  2280. {
  2281. return alloc->HeapAllocR(heapInfo, byteSize);
  2282. }
  2283. inline void __cdecl
  2284. operator delete(void * obj, Recycler * alloc, HeapInfo * heapInfo)
  2285. {
  2286. alloc->HeapFree(heapInfo, obj);
  2287. }
  2288. template<ObjectInfoBits infoBits>
  2289. _Ret_notnull_ inline void * __cdecl
  2290. operator new(DECLSPEC_GUARD_OVERFLOW size_t byteSize, Recycler * recycler, const InfoBitsWrapper<infoBits>&)
  2291. {
  2292. AssertCanHandleOutOfMemory();
  2293. Assert(byteSize != 0);
  2294. void * buffer;
  2295. if (infoBits & EnumClass_1_Bit)
  2296. {
  2297. buffer = recycler->AllocEnumClass<infoBits>(byteSize);
  2298. }
  2299. else
  2300. {
  2301. buffer = recycler->AllocWithInfoBits<infoBits>(byteSize);
  2302. }
  2303. // All of our allocation should throw on out of memory
  2304. Assume(buffer != nullptr);
  2305. return buffer;
  2306. }
  2307. #if DBG && defined(RECYCLER_VERIFY_MARK)
  2308. extern bool IsLikelyRuntimeFalseReference(
  2309. char* objectStartAddress, size_t offset, const char* typeName);
  2310. #define DECLARE_RECYCLER_VERIFY_MARK_FRIEND() \
  2311. private: \
  2312. friend bool ::IsLikelyRuntimeFalseReference( \
  2313. char* objectStartAddress, size_t offset, const char* typeName);
  2314. #define IMPLEMENT_STUB_IsLikelyRuntimeFalseReference() \
  2315. bool IsLikelyRuntimeFalseReference( \
  2316. char* objectStartAddress, size_t offset, const char* typeName) \
  2317. { return false; }
  2318. #else
  2319. #define DECLARE_RECYCLER_VERIFY_MARK_FRIEND()
  2320. #define IMPLEMENT_STUB_IsLikelyRuntimeFalseReference()
  2321. #endif
  2322. template <typename ExternalAllocFunc>
  2323. bool Recycler::DoExternalAllocation(size_t size, ExternalAllocFunc externalAllocFunc)
  2324. {
  2325. // Request external memory allocation
  2326. if (!RequestExternalMemoryAllocation(size))
  2327. {
  2328. // Attempt to free some memory then try again
  2329. CollectNow<CollectOnTypedArrayAllocation>();
  2330. if (!RequestExternalMemoryAllocation(size))
  2331. {
  2332. return false;
  2333. }
  2334. }
  2335. struct AutoExternalAllocation
  2336. {
  2337. bool allocationSucceeded = false;
  2338. Recycler* recycler;
  2339. size_t size;
  2340. AutoExternalAllocation(Recycler* recycler, size_t size): recycler(recycler), size(size) {}
  2341. // In case the externalAllocFunc throws or fails, the destructor will report the failure
  2342. ~AutoExternalAllocation() { if (!allocationSucceeded) recycler->ReportExternalMemoryFailure(size); }
  2343. };
  2344. AutoExternalAllocation externalAllocation(this, size);
  2345. if (externalAllocFunc())
  2346. {
  2347. externalAllocation.allocationSucceeded = true;
  2348. return true;
  2349. }
  2350. return false;
  2351. }