Recycler.h 98 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. #include "CollectionState.h"
  7. #include "RecyclerTelemetryInfo.h"
  8. #include "RecyclerWaitReason.h"
  9. #include "Common/ObservableValue.h"
  10. #include "CollectionFlags.h"
  11. namespace Js
  12. {
  13. class Profiler;
  14. enum Phase: unsigned short;
  15. };
  16. namespace JsUtil
  17. {
  18. class ThreadService;
  19. };
  20. #ifdef STACK_BACK_TRACE
  21. class StackBackTraceNode;
  22. #endif
  23. class ScriptEngineBase;
  24. class JavascriptThreadService;
  25. #ifdef PROFILE_MEM
  26. struct RecyclerMemoryData;
  27. #endif
  28. class ThreadContext;
  29. namespace Memory
  30. {
  31. template <typename T> class RecyclerRootPtr;
  32. class AutoBooleanToggle
  33. {
  34. public:
  35. AutoBooleanToggle(bool * b, bool value = true, bool valueMayChange = false)
  36. : b(b)
  37. {
  38. Assert(!(*b));
  39. *b = value;
  40. #if DBG
  41. this->value = value;
  42. this->valueMayChange = valueMayChange;
  43. #endif
  44. }
  45. ~AutoBooleanToggle()
  46. {
  47. if (b)
  48. {
  49. Assert(valueMayChange || *b == value);
  50. *b = false;
  51. }
  52. }
  53. void Leave()
  54. {
  55. Assert(valueMayChange || *b == value);
  56. *b = false;
  57. b = nullptr;
  58. }
  59. private:
  60. bool * b;
  61. #if DBG
  62. bool value;
  63. bool valueMayChange;
  64. #endif
  65. };
  66. template <class T>
  67. class AutoRestoreValue
  68. {
  69. public:
  70. AutoRestoreValue(T* var, const T& val):
  71. variable(var)
  72. {
  73. Assert(var);
  74. oldValue = (*variable);
  75. (*variable) = val;
  76. #ifdef DEBUG
  77. debugSetValue = val;
  78. #endif
  79. }
  80. ~AutoRestoreValue()
  81. {
  82. Assert((*variable) == debugSetValue);
  83. (*variable) = oldValue;
  84. }
  85. private:
  86. #ifdef DEBUG
  87. T debugSetValue;
  88. #endif
  89. T* variable;
  90. T oldValue;
  91. };
  92. class Recycler;
  93. class RecyclerScanMemoryCallback
  94. {
  95. public:
  96. RecyclerScanMemoryCallback(Recycler* recycler) : recycler(recycler) {}
  97. void operator()(void** obj, size_t byteCount);
  98. private:
  99. Recycler* recycler;
  100. };
  101. template<ObjectInfoBits infoBits>
  102. struct InfoBitsWrapper{};
  103. #if ENABLE_WEAK_REFERENCE_REGIONS
  104. template<typename T>
  105. static constexpr bool is_pointer = false;
  106. template<typename K>
  107. static constexpr bool is_pointer<K*> = true;
  108. template<typename T>
  109. class RecyclerWeakReferenceRegionItem {
  110. static_assert(is_pointer<T>, "Weak references must be to pointer types");
  111. friend class Recycler;
  112. public:
  113. RecyclerWeakReferenceRegionItem() : ptr(T()), heapBlock(nullptr) {};
  114. operator T() const { return ptr; };
  115. T operator=(T newPtr) {
  116. Assert(ptr == nullptr); // For safety with concurrent marking, only allow setting the pointer to non-null from null
  117. heapBlock = nullptr;
  118. return ptr = newPtr;
  119. };
  120. void Clear() { heapBlock = nullptr; ptr = nullptr; };
  121. private:
  122. RecyclerWeakReferenceRegionItem(RecyclerWeakReferenceRegionItem<T>&) = delete;
  123. FieldNoBarrier(T) ptr;
  124. FieldNoBarrier(HeapBlock*) heapBlock; // Note: the low bit of the heapBlock is used for background marking
  125. };
  126. class RecyclerWeakReferenceRegion {
  127. friend class Recycler;
  128. public:
  129. RecyclerWeakReferenceRegionItem<void*>* GetPtr() const { return ptr; }
  130. size_t GetCount() const { return count; }
  131. HeapBlock* GetHeapBlock() const { return arrayHeapBlock; }
  132. private:
  133. FieldNoBarrier(RecyclerWeakReferenceRegionItem<void*>*) ptr;
  134. FieldNoBarrier(size_t) count;
  135. FieldNoBarrier(HeapBlock*) arrayHeapBlock;
  136. };
  137. #endif
  138. // Allocation macro
  139. #define RecyclerNew(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocInlined, T, __VA_ARGS__)
  140. #define RecyclerNewPlus(recycler,size,T,...) AllocatorNewPlus(Recycler, recycler, size, T, __VA_ARGS__)
  141. #define RecyclerNewPlusZ(recycler,size,T,...) AllocatorNewPlusZ(Recycler, recycler, size, T, __VA_ARGS__)
  142. #define RecyclerNewZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocZeroInlined, T, __VA_ARGS__)
  143. #define RecyclerNewStruct(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocInlined, T)
  144. #define RecyclerNewStructZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocZeroInlined, T)
  145. #define RecyclerNewStructPlus(recycler,size,T) AllocatorNewStructPlus(Recycler, recycler, size, T)
  146. #define RecyclerNewArray(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, Alloc, T, count)
  147. #define RecyclerNewArrayZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZero, T, count)
  148. #define RecyclerNewFinalized(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedInlined, T, __VA_ARGS__)))
  149. #define RecyclerNewFinalizedPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalized, size, T, __VA_ARGS__)))
  150. #define RecyclerNewTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedInlined, T, __VA_ARGS__)))
  151. #define RecyclerNewEnumClass(recycler, enumClass, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<enumClass>()) T(__VA_ARGS__)
  152. #define RecyclerNewWithInfoBits(recycler, infoBits, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<infoBits>()) T(__VA_ARGS__)
  153. #define RecyclerNewFinalizedClientTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedClientTrackedInlined, T, __VA_ARGS__)))
  154. #if defined(RECYCLER_WRITE_BARRIER_ALLOC)
  155. #define RecyclerNewWithBarrier(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocWithBarrier, T, __VA_ARGS__)
  156. #define RecyclerNewWithBarrierPlus(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocWithBarrier, size, T, __VA_ARGS__)
  157. #define RecyclerNewWithBarrierPlusZ(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocZeroWithBarrier, size, T, __VA_ARGS__)
  158. #define RecyclerNewWithBarrierZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocZeroWithBarrier, T, __VA_ARGS__)
  159. #define RecyclerNewWithBarrierStruct(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocWithBarrier, T)
  160. #define RecyclerNewWithBarrierStructZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocZeroWithBarrier, T)
  161. #define RecyclerNewWithBarrierStructPlus(recycler,size,T) AllocatorNewStructPlusBase(Recycler, recycler, AllocWithBarrier, size, T)
  162. #define RecyclerNewWithBarrierArray(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocWithBarrier, T, count)
  163. #define RecyclerNewWithBarrierArrayZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZeroWithBarrier, T, count)
  164. #define RecyclerNewWithBarrierFinalized(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedWithBarrierInlined, T, __VA_ARGS__)))
  165. #define RecyclerNewWithBarrierFinalizedPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedWithBarrier, size, T, __VA_ARGS__)))
  166. #define RecyclerNewWithBarrierTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedWithBarrierInlined, T, __VA_ARGS__)))
  167. #define RecyclerNewWithBarrierEnumClass(recycler, enumClass, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<(ObjectInfoBits)(enumClass | WithBarrierBit)>()) T(__VA_ARGS__)
  168. #define RecyclerNewWithBarrierWithInfoBits(recycler, infoBits, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<(ObjectInfoBits)(infoBits | WithBarrierBit)>()) T(__VA_ARGS__)
  169. #define RecyclerNewWithBarrierFinalizedClientTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedClientTrackedWithBarrierInlined, T, __VA_ARGS__)))
  170. #endif
  171. #ifndef RECYCLER_WRITE_BARRIER
  172. #define RecyclerNewWithBarrier RecyclerNew
  173. #define RecyclerNewWithBarrierPlus RecyclerNewPlus
  174. #define RecyclerNewWithBarrierPlusZ RecyclerNewPlusZ
  175. #define RecyclerNewWithBarrierZ RecyclerNewZ
  176. #define RecyclerNewWithBarrierStruct RecyclerNewStruct
  177. #define RecyclerNewWithBarrierStructZ RecyclerNewStructZ
  178. #define RecyclerNewWithBarrierStructPlus RecyclerNewStructPlus
  179. #define RecyclerNewWithBarrierArray RecyclerNewArray
  180. #define RecyclerNewWithBarrierArrayZ RecyclerNewArrayZ
  181. #define RecyclerNewWithBarrierFinalized RecyclerNewFinalized
  182. #define RecyclerNewWithBarrierFinalizedPlus RecyclerNewFinalizedPlus
  183. #define RecyclerNewWithBarrierTracked RecyclerNewTracked
  184. #define RecyclerNewWithBarrierEnumClass RecyclerNewEnumClass
  185. #define RecyclerNewWithBarrierWithInfoBits RecyclerNewWithInfoBits
  186. #define RecyclerNewWithBarrierFinalizedClientTracked RecyclerNewFinalizedClientTracked
  187. #endif
  188. // Leaf allocators
  189. #define RecyclerNewLeaf(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafInlined, T, __VA_ARGS__)
  190. #define RecyclerNewLeafZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafZeroInlined, T, __VA_ARGS__)
  191. #define RecyclerNewPlusLeaf(recycler,size,T,...) AllocatorNewPlusLeaf(Recycler, recycler, size, T, __VA_ARGS__)
  192. #define RecyclerNewPlusLeafZ(recycler,size,T,...) AllocatorNewPlusLeafZ(Recycler, recycler, size, T, __VA_ARGS__)
  193. #define RecyclerNewStructLeaf(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocLeafInlined, T)
  194. #define RecyclerNewStructLeafZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocLeafZeroInlined, T)
  195. #define RecyclerNewArrayLeafZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafZero, T, count)
  196. #define RecyclerNewArrayLeaf(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeaf, T, count)
  197. #define RecyclerNewFinalizedLeaf(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedLeafInlined, T, __VA_ARGS__)))
  198. #define RecyclerNewFinalizedLeafPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedLeaf, size, T, __VA_ARGS__)))
  199. #define RecyclerNewTrackedLeaf(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedLeafInlined, T, __VA_ARGS__)))
  200. #define RecyclerNewTrackedLeafPlusZ(recycler,size,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocZeroTrackedLeafInlined, size, T, __VA_ARGS__)))
  201. #ifdef RECYCLER_VISITED_HOST
  202. // We need to track these allocations. The RecyclerVisitedHost* object allocation APIs don't provide us with the type of the objects being allocated. Use the DummyVTableObject type used elsewhere to track the allocations.
  203. #define RecyclerAllocVisitedHostTracedAndFinalized(recycler,size) (TRACK_ALLOC_INFO(recycler, DummyVTableObject, Recycler, size, (size_t)-1))->AllocVisitedHost<RecyclerVisitedHostTracedFinalizableBits>(size)
  204. #define RecyclerAllocVisitedHostFinalized(recycler,size) (TRACK_ALLOC_INFO(recycler, DummyVTableObject, Recycler, size, (size_t)-1))->AllocVisitedHost<RecyclerVisitedHostFinalizableBits>(size)
  205. #define RecyclerAllocVisitedHostTraced(recycler,size) (TRACK_ALLOC_INFO(recycler, DummyVTableObject, Recycler, size, (size_t)-1))->AllocVisitedHost<RecyclerVisitedHostTracedBits>(size)
  206. #define RecyclerAllocLeaf(recycler,size) (TRACK_ALLOC_INFO(recycler, DummyVTableObject, Recycler, size, (size_t)-1))->AllocVisitedHost<LeafBit>(size)
  207. #endif
  208. #ifdef TRACE_OBJECT_LIFETIME
  209. #define RecyclerNewLeafTrace(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafTrace, T, __VA_ARGS__)
  210. #define RecyclerNewLeafZTrace(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafZeroTrace, T, __VA_ARGS__)
  211. #define RecyclerNewPlusLeafTrace(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocLeafTrace, size, T, __VA_ARGS__)
  212. #define RecyclerNewArrayLeafZTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafZeroTrace, T, count)
  213. #define RecyclerNewArrayTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocTrace, T, count)
  214. #define RecyclerNewArrayZTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZeroTrace, T, count)
  215. #define RecyclerNewArrayLeafTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafTrace, T, count)
  216. #define RecyclerNewFinalizedTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedTrace, T, __VA_ARGS__)))
  217. #define RecyclerNewFinalizedLeafTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedLeafTrace, T, __VA_ARGS__)))
  218. #define RecyclerNewFinalizedPlusTrace(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedTrace, size, T, __VA_ARGS__)))
  219. #define RecyclerNewTrackedTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedTrace, T, __VA_ARGS__)))
  220. #define RecyclerNewTrackedLeafTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedLeafTrace, T, __VA_ARGS__)))
  221. #else
  222. #define RecyclerNewLeafTrace RecyclerNewLeaf
  223. #define RecyclerNewLeafZTrace RecyclerNewLeafZ
  224. #define RecyclerNewPlusLeafTrace RecyclerNewPlusLeaf
  225. #define RecyclerNewArrayLeafZTrace RecyclerNewArrayLeafZ
  226. #define RecyclerNewArrayTrace RecyclerNewArray
  227. #define RecyclerNewArrayZTrace RecyclerNewArrayZ
  228. #define RecyclerNewArrayLeafTrace RecyclerNewArrayLeaf
  229. #define RecyclerNewFinalizedTrace RecyclerNewFinalized
  230. #define RecyclerNewFinalizedLeafTrace RecyclerNewFinalizedLeaf
  231. #define RecyclerNewFinalizedPlusTrace RecyclerNewFinalizedPlus
  232. #define RecyclerNewTrackedTrace RecyclerNewTracked
  233. #define RecyclerNewTrackedLeafTrace RecyclerNewTrackedLeaf
  234. #endif
  235. #ifdef RECYCLER_TRACE
  236. #define RecyclerVerboseTrace(flags, ...) \
  237. if (flags.Verbose && flags.Trace.IsEnabled(Js::RecyclerPhase)) \
  238. { \
  239. Output::Print(__VA_ARGS__); \
  240. }
  241. #define AllocationVerboseTrace(flags, ...) \
  242. if (flags.Verbose && flags.Trace.IsEnabled(Js::MemoryAllocationPhase)) \
  243. { \
  244. Output::Print(__VA_ARGS__); \
  245. }
  246. #define LargeAllocationVerboseTrace(flags, ...) \
  247. if (flags.Verbose && \
  248. (flags.Trace.IsEnabled(Js::MemoryAllocationPhase) || \
  249. flags.Trace.IsEnabled(Js::LargeMemoryAllocationPhase))) \
  250. { \
  251. Output::Print(__VA_ARGS__); \
  252. }
  253. #define PageAllocatorAllocationVerboseTrace(flags, ...) \
  254. if (flags.Verbose && flags.Trace.IsEnabled(Js::PageAllocatorAllocPhase)) \
  255. { \
  256. Output::Print(__VA_ARGS__); \
  257. }
  258. #else
  259. #define RecyclerVerboseTrace(...)
  260. #define AllocationVerboseTrace(...)
  261. #define LargeAllocationVerboseTrace(...)
  262. #endif
  263. #define RecyclerHeapNew(recycler,heapInfo,T,...) new (recycler, heapInfo) T(__VA_ARGS__)
  264. #define RecyclerHeapDelete(recycler,heapInfo,addr) (static_cast<Recycler *>(recycler)->HeapFree(heapInfo,addr))
  265. typedef void (__cdecl* ExternalRootMarker)(void *);
  266. class RecyclerCollectionWrapper
  267. {
  268. public:
  269. RecyclerCollectionWrapper() :
  270. _isScriptContextCloseGCPending(FALSE)
  271. { }
  272. typedef BOOL (Recycler::*CollectionFunction)(CollectionFlags flags);
  273. virtual void PreCollectionCallBack(CollectionFlags flags) = 0;
  274. virtual void PreSweepCallback() = 0;
  275. virtual void PreRescanMarkCallback() = 0;
  276. virtual size_t RootMarkCallback(RecyclerScanMemoryCallback& scanMemoryCallback, BOOL * stacksScannedByRuntime) = 0;
  277. virtual void RescanMarkTimeoutCallback() = 0;
  278. virtual void EndMarkCallback() = 0;
  279. virtual void ConcurrentCallback() = 0;
  280. virtual void WaitCollectionCallBack() = 0;
  281. virtual void PostCollectionCallBack() = 0;
  282. virtual BOOL ExecuteRecyclerCollectionFunction(Recycler * recycler, CollectionFunction function, CollectionFlags flags) = 0;
  283. virtual uint GetRandomNumber() = 0;
  284. virtual bool DoSpecialMarkOnScanStack() = 0;
  285. virtual void OnScanStackCallback(void ** stackTop, size_t byteCount, void ** registers, size_t registersByteCount) = 0;
  286. virtual void PostSweepRedeferralCallBack() = 0;
  287. #ifdef FAULT_INJECTION
  288. virtual void DisposeScriptContextByFaultInjectionCallBack() = 0;
  289. #endif
  290. virtual void DisposeObjects(Recycler * recycler) = 0;
  291. virtual void PreDisposeObjectsCallBack() = 0;
  292. #ifdef ENABLE_PROJECTION
  293. virtual void MarkExternalWeakReferencedObjects(bool inPartialCollect) = 0;
  294. virtual void ResolveExternalWeakReferencedObjects() = 0;
  295. #endif
  296. #if DBG || defined(PROFILE_EXEC)
  297. virtual bool AsyncHostOperationStart(void *) = 0;
  298. virtual void AsyncHostOperationEnd(bool wasInAsync, void *) = 0;
  299. #endif
  300. #if DBG
  301. virtual void CheckJsReentrancyOnDispose() = 0;
  302. #endif
  303. BOOL GetIsScriptContextCloseGCPending()
  304. {
  305. return _isScriptContextCloseGCPending;
  306. }
  307. void ClearIsScriptContextCloseGCPending()
  308. {
  309. _isScriptContextCloseGCPending = FALSE;
  310. }
  311. void SetIsScriptContextCloseGCPending()
  312. {
  313. _isScriptContextCloseGCPending = TRUE;
  314. }
  315. protected:
  316. BOOL _isScriptContextCloseGCPending;
  317. };
  318. class DefaultRecyclerCollectionWrapper : public RecyclerCollectionWrapper
  319. {
  320. public:
  321. virtual void PreCollectionCallBack(CollectionFlags flags) override {}
  322. virtual void PreSweepCallback() override {}
  323. virtual void PreRescanMarkCallback() override {}
  324. virtual void RescanMarkTimeoutCallback() override {}
  325. virtual void EndMarkCallback() override {}
  326. virtual size_t RootMarkCallback(RecyclerScanMemoryCallback& scanMemoryCallback, BOOL * stacksScannedByRuntime) override { *stacksScannedByRuntime = FALSE; return 0; }
  327. virtual void ConcurrentCallback() override {}
  328. virtual void WaitCollectionCallBack() override {}
  329. virtual void PostCollectionCallBack() override {}
  330. virtual BOOL ExecuteRecyclerCollectionFunction(Recycler * recycler, CollectionFunction function, CollectionFlags flags) override;
  331. virtual uint GetRandomNumber() override { return 0; }
  332. virtual bool DoSpecialMarkOnScanStack() override { return false; }
  333. virtual void OnScanStackCallback(void ** stackTop, size_t byteCount, void ** registers, size_t registersByteCount) override {};
  334. virtual void PostSweepRedeferralCallBack() override {}
  335. #ifdef FAULT_INJECTION
  336. virtual void DisposeScriptContextByFaultInjectionCallBack() override {};
  337. #endif
  338. virtual void DisposeObjects(Recycler * recycler) override;
  339. virtual void PreDisposeObjectsCallBack() override {};
  340. #ifdef ENABLE_PROJECTION
  341. virtual void MarkExternalWeakReferencedObjects(bool inPartialCollect) override {};
  342. virtual void ResolveExternalWeakReferencedObjects() override {};
  343. #endif
  344. #if DBG || defined(PROFILE_EXEC)
  345. virtual bool AsyncHostOperationStart(void *) override { return false; };
  346. virtual void AsyncHostOperationEnd(bool wasInAsync, void *) override {};
  347. #endif
  348. #if DBG
  349. virtual void CheckJsReentrancyOnDispose() override {}
  350. #endif
  351. static DefaultRecyclerCollectionWrapper Instance;
  352. private:
  353. static bool IsCollectionDisabled(Recycler * recycler);
  354. };
  355. #ifdef RECYCLER_STATS
  356. struct RecyclerCollectionStats
  357. {
  358. size_t startCollectAllocBytes;
  359. #if ENABLE_PARTIAL_GC
  360. size_t startCollectNewPageCount;
  361. #endif
  362. size_t continueCollectAllocBytes;
  363. size_t finishCollectTryCount;
  364. // Heuristic Stats
  365. #if ENABLE_PARTIAL_GC
  366. size_t rescanRootBytes;
  367. size_t estimatedPartialReuseBytes;
  368. size_t uncollectedNewPageCountPartialCollect;
  369. size_t partialCollectSmallHeapBlockReuseMinFreeBytes;
  370. double collectEfficacy;
  371. double collectCost;
  372. #endif
  373. // Mark stats
  374. size_t tryMarkCount; // # of pointer try mark (* pointer size to get total number byte looked at)
  375. size_t tryMarkNullCount;
  376. size_t tryMarkUnalignedCount;
  377. size_t tryMarkNonRecyclerMemoryCount;
  378. size_t tryMarkInteriorCount;
  379. size_t tryMarkInteriorNullCount;
  380. size_t tryMarkInteriorNonRecyclerMemoryCount;
  381. size_t rootCount;
  382. size_t stackCount;
  383. size_t remarkCount;
  384. size_t scanCount; // non-leaf objects marked.
  385. size_t trackCount;
  386. size_t finalizeCount;
  387. size_t markThruNewObjCount;
  388. size_t markThruFalseNewObjCount;
  389. struct MarkData
  390. {
  391. // Rescan stats
  392. size_t rescanPageCount;
  393. size_t rescanObjectCount;
  394. size_t rescanObjectByteCount;
  395. size_t rescanLargePageCount;
  396. size_t rescanLargeObjectCount;
  397. size_t rescanLargeByteCount;
  398. size_t markCount; // total number of object marked
  399. size_t markBytes; // size of all objects marked.
  400. } markData;
  401. #if ENABLE_CONCURRENT_GC
  402. MarkData backgroundMarkData[RecyclerHeuristic::MaxBackgroundRepeatMarkCount];
  403. size_t trackedObjectCount;
  404. #endif
  405. #if ENABLE_PARTIAL_GC
  406. size_t clientTrackedObjectCount;
  407. #endif
  408. // Sweep stats
  409. size_t heapBlockCount[HeapBlock::BlockTypeCount]; // number of heap blocks (processed during swept)
  410. size_t heapBlockFreeCount[HeapBlock::BlockTypeCount]; // number of heap blocks deleted
  411. size_t heapBlockConcurrentSweptCount[HeapBlock::SmallBlockTypeCount];
  412. size_t heapBlockSweptCount[HeapBlock::SmallBlockTypeCount]; // number of heap blocks swept
  413. size_t objectSweptCount; // objects freed (free list + whole page freed)
  414. size_t objectSweptBytes;
  415. size_t objectSweptFreeListCount; // objects freed (free list)
  416. size_t objectSweptFreeListBytes;
  417. size_t objectSweepScanCount; // number of objects walked for sweeping (exclude whole page freed)
  418. size_t finalizeSweepCount; // number of objects finalizer/dispose called
  419. #if ENABLE_PARTIAL_GC
  420. size_t smallNonLeafHeapBlockPartialReuseCount[HeapBlock::SmallBlockTypeCount];
  421. size_t smallNonLeafHeapBlockPartialReuseBytes[HeapBlock::SmallBlockTypeCount];
  422. size_t smallNonLeafHeapBlockPartialUnusedCount[HeapBlock::SmallBlockTypeCount];
  423. size_t smallNonLeafHeapBlockPartialUnusedBytes[HeapBlock::SmallBlockTypeCount];
  424. #endif
  425. // Memory Stats
  426. size_t heapBlockFreeByteCount[HeapBlock::BlockTypeCount]; // The remaining usable free byte count
  427. size_t largeHeapBlockUsedByteCount; // Used byte count
  428. size_t largeHeapBlockTotalByteCount; // Total byte count
  429. // Empty/zero heap block stats
  430. uint numEmptySmallBlocks[HeapBlock::SmallBlockTypeCount];
  431. uint numZeroedOutSmallBlocks;
  432. };
  433. #define RECYCLER_STATS_INC_IF(cond, r, f) if (cond) { RECYCLER_STATS_INC(r, f); }
  434. #define RECYCLER_STATS_INC(r, f) ++r->collectionStats.f
  435. #define RECYCLER_STATS_INTERLOCKED_INC(r, f) { InterlockedIncrement((LONG *)&r->collectionStats.f); }
  436. #define RECYCLER_STATS_DEC(r, f) --r->collectionStats.f
  437. #define RECYCLER_STATS_ADD(r, f, v) r->collectionStats.f += (v)
  438. #define RECYCLER_STATS_INTERLOCKED_ADD(r, f, v) { InterlockedAdd((LONG *)&r->collectionStats.f, (LONG)(v)); }
  439. #define RECYCLER_STATS_SUB(r, f, v) r->collectionStats.f -= (v)
  440. #define RECYCLER_STATS_SET(r, f, v) r->collectionStats.f = v
  441. #else
  442. #define RECYCLER_STATS_INC_IF(cond, r, f)
  443. #define RECYCLER_STATS_INC(r, f)
  444. #define RECYCLER_STATS_INTERLOCKED_INC(r, f)
  445. #define RECYCLER_STATS_DEC(r, f)
  446. #define RECYCLER_STATS_ADD(r, f, v)
  447. #define RECYCLER_STATS_INTERLOCKED_ADD(r, f, v)
  448. #define RECYCLER_STATS_SUB(r, f, v)
  449. #define RECYCLER_STATS_SET(r, f, v)
  450. #endif
  451. #ifdef RECYCLER_TRACE
  452. struct CollectionParam
  453. {
  454. CollectionFlags flags;
  455. bool finishOnly;
  456. bool repeat;
  457. bool priorityBoostConcurrentSweepOverride;
  458. bool domCollect;
  459. int timeDiff;
  460. size_t uncollectedAllocBytes;
  461. size_t uncollectedPinnedObjects;
  462. #if ENABLE_PARTIAL_GC
  463. size_t uncollectedNewPageCountPartialCollect;
  464. size_t uncollectedNewPageCount;
  465. size_t unusedPartialCollectFreeBytes;
  466. bool inPartialCollectMode;
  467. #endif
  468. };
  469. #endif
  470. #include "RecyclerObjectGraphDumper.h"
  471. #if ENABLE_CONCURRENT_GC
  472. class RecyclerParallelThread
  473. {
  474. friend class ThreadContext;
  475. public:
  476. typedef void (Recycler::* WorkFunc)();
  477. RecyclerParallelThread(Recycler * recycler, WorkFunc workFunc) :
  478. recycler(recycler),
  479. workFunc(workFunc),
  480. concurrentWorkReadyEvent(NULL),
  481. concurrentWorkDoneEvent(NULL),
  482. concurrentThread(NULL)
  483. {
  484. }
  485. ~RecyclerParallelThread()
  486. {
  487. Assert(concurrentThread == NULL);
  488. Assert(concurrentWorkReadyEvent == NULL);
  489. Assert(concurrentWorkDoneEvent == NULL);
  490. }
  491. bool StartConcurrent();
  492. void WaitForConcurrent();
  493. void Shutdown();
  494. bool EnableConcurrent(bool synchronizeOnStartup);
  495. private:
  496. // Static entry point for thread creation
  497. static unsigned int CALLBACK StaticThreadProc(LPVOID lpParameter);
  498. // Static entry point for thread service usage
  499. static void CALLBACK StaticBackgroundWorkCallback(void * callbackData);
  500. private:
  501. WorkFunc workFunc;
  502. Recycler * recycler;
  503. HANDLE concurrentWorkReadyEvent;// main thread uses this event to tell concurrent threads that the work is ready
  504. HANDLE concurrentWorkDoneEvent;// concurrent threads use this event to tell main thread that the work allocated is done
  505. HANDLE concurrentThread;
  506. bool synchronizeOnStartup;
  507. };
  508. #endif
  509. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  510. class AutoProtectPages
  511. {
  512. public:
  513. AutoProtectPages(Recycler* recycler, bool protectEnabled);
  514. ~AutoProtectPages();
  515. void Unprotect();
  516. private:
  517. Recycler* recycler;
  518. bool isReadOnly;
  519. };
  520. #endif
  521. class Recycler
  522. {
  523. friend class RecyclerScanMemoryCallback;
  524. friend class RecyclerSweep;
  525. friend class RecyclerSweepManager;
  526. friend class MarkContext;
  527. friend class HeapBlock;
  528. friend class HeapBlockMap32;
  529. #if ENABLE_CONCURRENT_GC
  530. friend class RecyclerParallelThread;
  531. #endif
  532. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  533. friend class AutoProtectPages;
  534. #endif
  535. #ifdef ENABLE_BASIC_TELEMETRY
  536. friend class RecyclerTelemetryInfo;
  537. #endif
  538. template <typename T> friend class RecyclerWeakReference;
  539. template <typename T> friend class WeakReferenceHashTable;
  540. template <typename TBlockType>
  541. friend class SmallHeapBlockAllocator; // Needed for FindHeapBlock
  542. #if defined(RECYCLER_TRACE)
  543. friend class JavascriptThreadService;
  544. #endif
  545. #ifdef HEAP_ENUMERATION_VALIDATION
  546. friend class ActiveScriptProfilerHeapEnum;
  547. #endif
  548. friend class ScriptEngineBase; // This is for disabling GC for certain Host operations.
  549. #if !FLOATVAR
  550. friend class ::CodeGenNumberThreadAllocator;
  551. friend struct ::XProcNumberPageSegmentManager;
  552. #endif
  553. public:
  554. static const uint ConcurrentThreadStackSize = 300000;
  555. static const bool FakeZeroLengthArray = true;
  556. #ifdef RECYCLER_PAGE_HEAP
  557. // Keeping as constant in case we want to tweak the value here
  558. // Set to 0 so that the tool can do the filtering instead of the runtime
  559. #if DBG
  560. static const int s_numFramesToSkipForPageHeapAlloc = 10;
  561. static const int s_numFramesToSkipForPageHeapFree = 0;
  562. static const int s_numFramesToCaptureForPageHeap = 32;
  563. #else
  564. static const int s_numFramesToSkipForPageHeapAlloc = 0;
  565. static const int s_numFramesToSkipForPageHeapFree = 0;
  566. static const int s_numFramesToCaptureForPageHeap = 32;
  567. #endif
  568. #endif
  569. uint Cookie;
  570. class AutoEnterExternalStackSkippingGCMode
  571. {
  572. public:
  573. AutoEnterExternalStackSkippingGCMode(Recycler* recycler):
  574. _recycler(recycler)
  575. {
  576. // Setting this in a re-entrant mode is not allowed
  577. Assert(!recycler->isExternalStackSkippingGC);
  578. #if DBG
  579. _recycler->isExternalStackSkippingGC = true;
  580. #endif
  581. }
  582. ~AutoEnterExternalStackSkippingGCMode()
  583. {
  584. #if DBG
  585. _recycler->isExternalStackSkippingGC = false;
  586. #endif
  587. }
  588. private:
  589. Recycler* _recycler;
  590. };
  591. private:
  592. class AutoSwitchCollectionStates
  593. {
  594. public:
  595. AutoSwitchCollectionStates(Recycler* recycler, CollectionState entryState, CollectionState exitState):
  596. _recycler(recycler),
  597. _exitState(exitState)
  598. {
  599. _recycler->SetCollectionState(entryState);
  600. }
  601. ~AutoSwitchCollectionStates()
  602. {
  603. _recycler->SetCollectionState(_exitState);
  604. }
  605. private:
  606. Recycler* _recycler;
  607. CollectionState _exitState;
  608. };
  609. #if defined(ENABLE_JS_ETW)
  610. ETWEventGCActivationTrigger collectionStartReason;
  611. CollectionFlags collectionStartFlags;
  612. ETWEventGCActivationTrigger collectionFinishReason;
  613. #endif
  614. class CollectionStateChangedObserver : public ObservableValueObserver<CollectionState>
  615. {
  616. private:
  617. Recycler* recycler;
  618. public:
  619. CollectionStateChangedObserver(Recycler* recycler)
  620. {
  621. this->recycler = recycler;
  622. }
  623. virtual void ValueChanged(const CollectionState& newVal, const CollectionState& oldVal)
  624. {
  625. #ifdef ENABLE_BASIC_TELEMETRY
  626. if (oldVal == CollectionState::CollectionStateNotCollecting &&
  627. newVal != CollectionState::CollectionStateNotCollecting &&
  628. newVal != CollectionState::Collection_PreCollection &&
  629. newVal != CollectionState::CollectionStateExit)
  630. {
  631. this->recycler->GetRecyclerTelemetryInfo().StartPass(newVal);
  632. }
  633. else if (oldVal != CollectionState::CollectionStateNotCollecting &&
  634. oldVal != CollectionState::Collection_PreCollection &&
  635. oldVal != CollectionState::CollectionStateExit &&
  636. newVal == CollectionState::CollectionStateNotCollecting)
  637. {
  638. this->recycler->GetRecyclerTelemetryInfo().EndPass(oldVal);
  639. }
  640. #endif
  641. }
  642. };
  643. CollectionStateChangedObserver collectionStateChangedObserver;
  644. ObservableValue<CollectionState> collectionState;
  645. inline void SetCollectionState(CollectionState newState)
  646. {
  647. this->collectionState = newState;
  648. }
  649. JsUtil::ThreadService *threadService;
  650. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  651. bool allowAllocationsDuringConcurrentSweepForCollection;
  652. #endif
  653. HeapBlockMap heapBlockMap;
  654. #if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
  655. struct PinRecord
  656. {
  657. #ifdef STACK_BACK_TRACE
  658. PinRecord() : refCount(0), stackBackTraces(nullptr) {}
  659. #else
  660. PinRecord() : refCount(0) {}
  661. #endif
  662. PinRecord& operator=(uint newRefCount)
  663. {
  664. #ifdef STACK_BACK_TRACE
  665. Assert(stackBackTraces == nullptr);
  666. #endif
  667. Assert(newRefCount == 0); refCount = 0; return *this;
  668. }
  669. PinRecord& operator++() { ++refCount; return *this; }
  670. PinRecord& operator--() { --refCount; return *this; }
  671. operator uint() const { return refCount; }
  672. #ifdef STACK_BACK_TRACE
  673. StackBackTraceNode * stackBackTraces;
  674. #endif
  675. private:
  676. uint refCount;
  677. };
  678. #else
  679. typedef uint PinRecord;
  680. #endif
  681. typedef SimpleHashTable<void *, PinRecord, HeapAllocator, DefaultComparer, true, PrimePolicy> PinnedObjectHashTable;
  682. PinnedObjectHashTable pinnedObjectMap;
  683. WeakReferenceHashTable<PrimePolicy> weakReferenceMap;
  684. uint weakReferenceCleanupId;
  685. #if ENABLE_WEAK_REFERENCE_REGIONS
  686. SList<RecyclerWeakReferenceRegion, HeapAllocator> weakReferenceRegionList;
  687. #endif
  688. void * transientPinnedObject;
  689. #if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
  690. #ifdef STACK_BACK_TRACE
  691. StackBackTrace * transientPinnedObjectStackBackTrace;
  692. #endif
  693. #endif
  694. struct GuestArenaAllocator : public ArenaAllocator
  695. {
  696. GuestArenaAllocator(__in_z char16 const* name, PageAllocator * pageAllocator, void (*outOfMemoryFunc)())
  697. : ArenaAllocator(name, pageAllocator, outOfMemoryFunc), pendingDelete(false)
  698. {
  699. }
  700. bool pendingDelete;
  701. };
  702. DListBase<GuestArenaAllocator> guestArenaList;
  703. DListBase<ArenaData*> externalGuestArenaList; // guest arenas are scanned for roots
  704. #ifdef RECYCLER_PAGE_HEAP
  705. bool isPageHeapEnabled;
  706. bool capturePageHeapAllocStack;
  707. bool capturePageHeapFreeStack;
  708. inline bool IsPageHeapEnabled() const { return isPageHeapEnabled; }
  709. inline bool ShouldCapturePageHeapAllocStack() const { return capturePageHeapAllocStack; }
  710. void VerifyPageHeapFillAfterAlloc(char* memBlock, size_t size, ObjectInfoBits attributes);
  711. #else
  712. inline const bool IsPageHeapEnabled() const { return false; }
  713. inline bool ShouldCapturePageHeapAllocStack() const { return false; }
  714. #endif
  715. #ifdef RECYCLER_MARK_TRACK
  716. MarkMap* markMap;
  717. CriticalSection markMapCriticalSection;
  718. void PrintMarkMap();
  719. void ClearMarkMap();
  720. #endif
  721. // Number of pages to reserve for the primary mark stack
  722. // This is the minimum number of pages to guarantee that a single heap block
  723. // can be rescanned in the worst possible case where every object in a heap block
  724. // in the smallest bucket needs to be rescanned
  725. // These many pages being reserved guarantees that in OOM Rescan, we can make progress
  726. // on every rescan iteration
  727. // We add one because there is a small amount of the page reserved for page pool metadata
  728. // so we need to allocate an additional page to be sure
  729. // Currently, this works out to 2 pages on 32-bit and 5 pages on 64-bit
  730. // NOTE: We have reduced the PageCount for small blocks to 1. This brought down the number of pages reserved for x64 from 5 to 2. This has not shown
  731. // any adverse impact.
  732. static const int PrimaryMarkStackReservedPageCount =
  733. ((SmallAllocationBlockAttributes::PageCount * MarkContext::MarkCandidateSize) / SmallAllocationBlockAttributes::MinObjectSize) + 1;
  734. MarkContext markContext;
  735. // Contexts for parallel marking.
  736. // We support up to 4 way parallelism, main context + 3 additional parallel contexts.
  737. MarkContext parallelMarkContext1;
  738. MarkContext parallelMarkContext2;
  739. MarkContext parallelMarkContext3;
  740. // Page pools for above markContexts
  741. PagePool markPagePool;
  742. PagePool parallelMarkPagePool1;
  743. PagePool parallelMarkPagePool2;
  744. PagePool parallelMarkPagePool3;
  745. bool IsMarkStackEmpty();
  746. bool HasPendingMarkObjects() const { return markContext.HasPendingMarkObjects() || parallelMarkContext1.HasPendingMarkObjects() || parallelMarkContext2.HasPendingMarkObjects() || parallelMarkContext3.HasPendingMarkObjects(); }
  747. bool HasPendingTrackObjects() const { return markContext.HasPendingTrackObjects() || parallelMarkContext1.HasPendingTrackObjects() || parallelMarkContext2.HasPendingTrackObjects() || parallelMarkContext3.HasPendingTrackObjects(); }
  748. RecyclerCollectionWrapper * collectionWrapper;
  749. HANDLE mainThreadHandle;
  750. void * stackBase;
  751. class SavedRegisterState
  752. {
  753. public:
  754. #if _M_IX86
  755. static const int NumRegistersToSave = 8;
  756. #elif _M_ARM
  757. static const int NumRegistersToSave = 13;
  758. #elif _M_ARM64
  759. static const int NumRegistersToSave = 27;
  760. #elif _M_AMD64
  761. static const int NumRegistersToSave = 16;
  762. #endif
  763. SavedRegisterState()
  764. {
  765. memset(registers, 0, sizeof(void*) * NumRegistersToSave);
  766. }
  767. void** GetRegisters()
  768. {
  769. return registers;
  770. }
  771. void* GetStackTop()
  772. {
  773. // By convention, our register-saving routine will always
  774. // save the stack pointer as the first item in the array
  775. return registers[0];
  776. }
  777. private:
  778. void* registers[NumRegistersToSave];
  779. };
  780. SavedRegisterState savedThreadContext;
  781. #if __has_feature(address_sanitizer)
  782. void* savedAsanFakeStack;
  783. #define SAVE_THREAD_ASAN_FAKE_STACK() \
  784. this->savedAsanFakeStack = __asan_get_current_fake_stack()
  785. #else
  786. #define SAVE_THREAD_ASAN_FAKE_STACK()
  787. #endif
  788. bool inDispose;
  789. #if DBG || defined RECYCLER_TRACE
  790. uint collectionCount;
  791. bool inResolveExternalWeakReferences;
  792. #endif
  793. bool allowDispose;
  794. bool inDisposeWrapper;
  795. bool needOOMRescan;
  796. bool hasDisposableObject;
  797. bool hasNativeGCHost;
  798. DWORD tickCountNextDispose;
  799. bool inExhaustiveCollection;
  800. bool hasExhaustiveCandidate;
  801. bool inCacheCleanupCollection;
  802. bool inDecommitNowCollection;
  803. bool isScriptActive;
  804. bool isInScript;
  805. bool isShuttingDown;
  806. bool scanPinnedObjectMap;
  807. bool hasScannedInitialImplicitRoots;
  808. bool hasPendingUnpinnedObject;
  809. bool hasPendingDeleteGuestArena;
  810. bool inEndMarkOnLowMemory;
  811. bool decommitOnFinish;
  812. bool enableScanInteriorPointers;
  813. bool enableScanImplicitRoots;
  814. bool disableCollectOnAllocationHeuristics;
  815. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  816. bool disableCollection;
  817. #endif
  818. #if ENABLE_PARTIAL_GC
  819. bool enablePartialCollect;
  820. bool inPartialCollectMode;
  821. #if ENABLE_CONCURRENT_GC
  822. bool hasBackgroundFinishPartial;
  823. bool partialConcurrentNextCollection;
  824. #endif
  825. #endif
  826. #ifdef RECYCLER_STRESS
  827. bool forcePartialScanStack;
  828. bool recyclerStress;
  829. #if ENABLE_CONCURRENT_GC
  830. bool recyclerBackgroundStress;
  831. bool recyclerConcurrentStress;
  832. bool recyclerConcurrentRepeatStress;
  833. #endif
  834. #if ENABLE_PARTIAL_GC
  835. bool recyclerPartialStress;
  836. #endif
  837. #endif
  838. #if DBG
  839. bool isExternalStackSkippingGC;
  840. #endif
  841. bool skipStack;
  842. #if ENABLE_CONCURRENT_GC
  843. #if DBG
  844. bool isConcurrentGCOnIdle;
  845. bool isFinishGCOnIdle;
  846. #endif
  847. bool queueTrackedObject;
  848. bool hasPendingConcurrentFindRoot;
  849. bool priorityBoost;
  850. bool disableConcurrent;
  851. bool enableConcurrentMark;
  852. bool enableParallelMark;
  853. bool enableConcurrentSweep;
  854. uint maxParallelism; // Max # of total threads to run in parallel
  855. byte backgroundRescanCount; // for ETW events and stats
  856. byte backgroundFinishMarkCount;
  857. size_t backgroundRescanRootBytes;
  858. HANDLE concurrentWorkReadyEvent; // main thread uses this event to tell concurrent threads that the work is ready
  859. HANDLE concurrentWorkDoneEvent; // concurrent threads use this event to tell main thread that the work allocated is done
  860. HANDLE concurrentThread;
  861. template <uint parallelId>
  862. void ParallelWorkFunc();
  863. RecyclerParallelThread parallelThread1;
  864. RecyclerParallelThread parallelThread2;
  865. #if DBG
  866. // Variable indicating if the concurrent thread has exited or not
  867. // If the concurrent thread hasn't started yet, this is set to true
  868. // Once the concurrent thread starts, it sets this to false,
  869. // and when the concurrent thread exits, it sets this to true.
  870. bool concurrentThreadExited;
  871. bool disableConcurrentThreadExitedCheck;
  872. bool isProcessingTrackedObjects;
  873. #endif
  874. uint tickCountStartConcurrent;
  875. bool isAborting;
  876. #endif
  877. #if DBG
  878. bool hasIncompleteDoCollect;
  879. // This is set to true when we begin a Rescan, and set to false when either:
  880. // (1) We finish the final in-thread Rescan and are about to Mark
  881. // (2) We do a conditional ResetWriteWatch and are about to Mark
  882. // When this flag is true, we should not be modifying existing mark-related state,
  883. // including markBits and rescanState.
  884. bool isProcessingRescan;
  885. #endif
  886. Js::ConfigFlagsTable& recyclerFlagsTable;
  887. RecyclerSweepManager recyclerSweepManagerInstance;
  888. RecyclerSweepManager * recyclerSweepManager;
  889. static const uint tickDiffToNextCollect = 300;
  890. #ifdef IDLE_DECOMMIT_ENABLED
  891. HANDLE concurrentIdleDecommitEvent;
  892. LONG needIdleDecommitSignal;
  893. #endif
  894. #if ENABLE_PARTIAL_GC
  895. SListBase<void *> clientTrackedObjectList;
  896. ArenaAllocator clientTrackedObjectAllocator;
  897. size_t partialUncollectedAllocBytes;
  898. // Dynamic Heuristics for partial GC
  899. size_t uncollectedNewPageCountPartialCollect;
  900. #endif
  901. uint tickCountNextCollection;
  902. uint tickCountNextFinishCollection;
  903. void (*outOfMemoryFunc)();
  904. #ifdef RECYCLER_TEST_SUPPORT
  905. BOOL (*checkFn)(char* addr, size_t size);
  906. #endif
  907. ExternalRootMarker externalRootMarker;
  908. void * externalRootMarkerContext;
  909. #ifdef PROFILE_EXEC
  910. Js::Profiler * profiler;
  911. Js::Profiler * backgroundProfiler;
  912. PageAllocator backgroundProfilerPageAllocator;
  913. DListBase<ArenaAllocator> backgroundProfilerArena;
  914. #endif
  915. // destruct autoHeap after backgroundProfilerPageAllocator;
  916. HeapInfoManager autoHeap;
  917. template <ObjectInfoBits attributes>
  918. HeapInfo * GetHeapInfoForAllocation()
  919. {
  920. return this->GetHeapInfo<attributes>();
  921. }
  922. template <ObjectInfoBits attributes>
  923. HeapInfo * GetHeapInfo()
  924. {
  925. return this->autoHeap.GetDefaultHeap();
  926. }
  927. HeapInfo * GetHeapInfo()
  928. {
  929. return this->autoHeap.GetDefaultHeap();
  930. }
  931. #ifdef PROFILE_MEM
  932. RecyclerMemoryData * memoryData;
  933. #endif
  934. ThreadContextId mainThreadId;
  935. #if DBG
  936. uint heapBlockCount;
  937. bool disableThreadAccessCheck;
  938. #endif
  939. #if DBG || defined(RECYCLER_STATS)
  940. bool isForceSweeping;
  941. #endif
  942. #ifdef NTBUILD
  943. RecyclerWatsonTelemetryBlock localTelemetryBlock;
  944. RecyclerWatsonTelemetryBlock * telemetryBlock;
  945. #endif
  946. #ifdef ENABLE_BASIC_TELEMETRY
  947. private:
  948. RecyclerTelemetryInfo telemetryStats;
  949. GUID recyclerID;
  950. public:
  951. GUID& GetRecyclerID() { return this->recyclerID; }
  952. #endif
  953. public:
  954. bool GetIsInScript() { return this->isInScript; }
  955. bool GetIsScriptActive() { return this->isScriptActive; }
  956. private:
  957. #ifdef RECYCLER_STATS
  958. RecyclerCollectionStats collectionStats;
  959. void PrintHeapBlockStats(char16 const * name, HeapBlock::HeapBlockType type);
  960. void PrintHeapBlockMemoryStats(char16 const * name, HeapBlock::HeapBlockType type);
  961. void PrintCollectStats();
  962. void PrintHeuristicCollectionStats();
  963. void PrintMarkCollectionStats();
  964. void PrintBackgroundCollectionStats();
  965. void PrintMemoryStats();
  966. void PrintBackgroundCollectionStat(RecyclerCollectionStats::MarkData const& markData);
  967. #endif
  968. #ifdef RECYCLER_TRACE
  969. CollectionParam collectionParam;
  970. void PrintBlockStatus(HeapBucket * heapBucket, HeapBlock * heapBlock, char16 const * name);
  971. #endif
  972. #ifdef RECYCLER_MEMORY_VERIFY
  973. uint verifyPad;
  974. bool verifyEnabled;
  975. #endif
  976. #ifdef RECYCLER_DUMP_OBJECT_GRAPH
  977. friend class RecyclerObjectGraphDumper;
  978. RecyclerObjectGraphDumper * objectGraphDumper;
  979. public:
  980. bool dumpObjectOnceOnCollect;
  981. #endif
  982. public:
  983. Recycler(AllocationPolicyManager * policyManager, IdleDecommitPageAllocator * pageAllocator, void(*outOfMemoryFunc)(), Js::ConfigFlagsTable& flags, RecyclerTelemetryHostInterface* hostInterface);
  984. ~Recycler();
  985. void Initialize(const bool forceInThread, JsUtil::ThreadService *threadService, const bool deferThreadStartup = false
  986. #ifdef RECYCLER_PAGE_HEAP
  987. , PageHeapMode pageheapmode = PageHeapMode::PageHeapModeOff
  988. , bool captureAllocCallStack = false
  989. , bool captureFreeCallStack = false
  990. #endif
  991. );
  992. Js::ConfigFlagsTable& GetRecyclerFlagsTable() const { return this->recyclerFlagsTable; }
  993. void SetMemProtectMode();
  994. bool IsMemProtectMode();
  995. size_t GetUsedBytes();
  996. void LogMemProtectHeapSize(bool fromGC);
  997. char* Realloc(void* buffer, DECLSPEC_GUARD_OVERFLOW size_t existingBytes, DECLSPEC_GUARD_OVERFLOW size_t requestedBytes, bool truncate = true);
  998. #ifdef NTBUILD
  999. void SetTelemetryBlock(RecyclerWatsonTelemetryBlock * telemetryBlock) { this->telemetryBlock = telemetryBlock; }
  1000. #endif
  1001. uint GetPinnedObjectCount() const { return this->pinnedObjectMap.Count(); }
  1002. void Prime();
  1003. void* GetOwnerContext() { return (void*) this->collectionWrapper; }
  1004. bool NeedOOMRescan() const;
  1005. void SetNeedOOMRescan();
  1006. void ClearNeedOOMRescan();
  1007. BOOL RequestConcurrentWrapperCallback();
  1008. BOOL CollectionInProgress() const;
  1009. BOOL IsExiting() const;
  1010. BOOL IsSweeping() const;
  1011. #ifdef RECYCLER_PAGE_HEAP
  1012. inline bool ShouldCapturePageHeapFreeStack() const { return capturePageHeapFreeStack; }
  1013. #else
  1014. inline bool ShouldCapturePageHeapFreeStack() const { return false; }
  1015. #endif
  1016. void SetIsThreadBound();
  1017. void SetIsScriptActive(bool isScriptActive);
  1018. void SetIsInScript(bool isInScript);
  1019. bool HasNativeGCHost() const;
  1020. void SetHasNativeGCHost();
  1021. bool ShouldIdleCollectOnExit();
  1022. void ScheduleNextCollection();
  1023. BOOL IsShuttingDown() const { return this->isShuttingDown; }
  1024. #if ENABLE_CONCURRENT_GC
  1025. #if DBG
  1026. BOOL IsConcurrentMarkEnabled() const { return enableConcurrentMark; }
  1027. BOOL IsConcurrentSweepEnabled() const { return enableConcurrentSweep; }
  1028. #endif
  1029. template <CollectionFlags flags>
  1030. BOOL FinishConcurrent();
  1031. void ShutdownThread();
  1032. bool EnableConcurrent(JsUtil::ThreadService *threadService, bool startAllThreads);
  1033. void DisableConcurrent();
  1034. void StartQueueTrackedObject();
  1035. bool DoQueueTrackedObject() const;
  1036. void PrepareSweep();
  1037. #endif
  1038. template <CollectionFlags flags>
  1039. void SetupPostCollectionFlags();
  1040. void EnsureNotCollecting();
  1041. #if ENABLE_CONCURRENT_GC
  1042. bool QueueTrackedObject(FinalizableObject * trackableObject);
  1043. #endif
  1044. // FindRoots
  1045. void TryMarkNonInterior(void* candidate, void* parentReference = nullptr);
  1046. void TryMarkInterior(void *candidate, void* parentReference = nullptr);
  1047. bool InCacheCleanupCollection() { return inCacheCleanupCollection; }
  1048. void ClearCacheCleanupCollection() { Assert(inCacheCleanupCollection); inCacheCleanupCollection = false; }
  1049. // Finalizer support
  1050. void SetExternalRootMarker(ExternalRootMarker fn, void * context);
  1051. ArenaAllocator * CreateGuestArena(char16 const * name, void (*outOfMemoryFunc)());
  1052. void DeleteGuestArena(ArenaAllocator * arenaAllocator);
  1053. ArenaData ** RegisterExternalGuestArena(ArenaData* guestArena)
  1054. {
  1055. return externalGuestArenaList.PrependNode(&NoThrowHeapAllocator::Instance, guestArena);
  1056. }
  1057. void UnregisterExternalGuestArena(ArenaData* guestArena)
  1058. {
  1059. externalGuestArenaList.Remove(&NoThrowHeapAllocator::Instance, guestArena);
  1060. // Any time a root is removed during a GC, it indicates that an exhaustive
  1061. // collection is likely going to have work to do so trigger an exhaustive
  1062. // candidate GC to indicate this fact
  1063. this->CollectNow<CollectExhaustiveCandidate>();
  1064. }
  1065. void UnregisterExternalGuestArena(ArenaData** guestArena)
  1066. {
  1067. externalGuestArenaList.RemoveElement(&NoThrowHeapAllocator::Instance, guestArena);
  1068. // Any time a root is removed during a GC, it indicates that an exhaustive
  1069. // collection is likely going to have work to do so trigger an exhaustive
  1070. // candidate GC to indicate this fact
  1071. this->CollectNow<CollectExhaustiveCandidate>();
  1072. }
  1073. #ifdef RECYCLER_TEST_SUPPORT
  1074. void SetCheckFn(BOOL(*checkFn)(char* addr, size_t size));
  1075. #endif
  1076. void SetCollectionWrapper(RecyclerCollectionWrapper * wrapper);
  1077. static size_t GetAlignedSize(size_t size) { return HeapInfo::GetAlignedSize(size); }
  1078. HeapInfo* GetDefaultHeapInfo() { return autoHeap.GetDefaultHeap(); }
  1079. template <CollectionFlags flags>
  1080. BOOL CollectNow();
  1081. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1082. void DisplayMemStats();
  1083. #endif
  1084. void AddExternalMemoryUsage(size_t size);
  1085. bool NeedDispose() { return this->hasDisposableObject; }
  1086. template <CollectionFlags flags>
  1087. bool FinishDisposeObjectsNow();
  1088. bool RequestExternalMemoryAllocation(size_t size);
  1089. void ReportExternalMemoryFailure(size_t size);
  1090. void ReportExternalMemoryFree(size_t size);
  1091. // ExternalAllocFunc returns true when allocation succeeds
  1092. template <typename ExternalAllocFunc>
  1093. bool DoExternalAllocation(size_t size, ExternalAllocFunc externalAllocFunc);
  1094. #ifdef TRACE_OBJECT_LIFETIME
  1095. #define DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes) \
  1096. inline char* AllocFunc##Trace(size_t size) \
  1097. { \
  1098. return AllocWithAttributesFunc<(ObjectInfoBits)(attributes | TraceBit), /* nothrow = */ false>(size); \
  1099. }
  1100. #else
  1101. #define DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributeFunc, attributes)
  1102. #endif
  1103. #define DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocWithAttributesFunc, attributes) \
  1104. inline char * AllocFunc(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1105. { \
  1106. return AllocWithAttributesFunc<attributes, /* nothrow = */ false>(size); \
  1107. } \
  1108. __forceinline char * AllocFunc##Inlined(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1109. { \
  1110. return AllocWithAttributesFunc##Inlined<attributes, /* nothrow = */ false>(size); \
  1111. } \
  1112. DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes);
  1113. #define DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocWithAttributesFunc, attributes) \
  1114. inline char * NoThrow##AllocFunc(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1115. { \
  1116. return AllocWithAttributesFunc<attributes, /* nothrow = */ true>(size); \
  1117. } \
  1118. inline char * NoThrow##AllocFunc##Inlined(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1119. { \
  1120. return AllocWithAttributesFunc##Inlined<attributes, /* nothrow = */ true>(size); \
  1121. } \
  1122. DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes);
  1123. #define DEFINE_RECYCLER_ALLOC(AllocFunc, attributes) DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocWithAttributes, attributes)
  1124. #define DEFINE_RECYCLER_ALLOC_ZERO(AllocFunc, attributes) DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocZeroWithAttributes, attributes)
  1125. #define DEFINE_RECYCLER_NOTHROW_ALLOC(AllocFunc, attributes) DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocWithAttributes, attributes)
  1126. #define DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocFunc, attributes) DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocZeroWithAttributes, attributes)
  1127. #if GLOBAL_ENABLE_WRITE_BARRIER
  1128. DEFINE_RECYCLER_ALLOC(Alloc, WithBarrierBit);
  1129. DEFINE_RECYCLER_ALLOC_ZERO(AllocZero, WithBarrierBit);
  1130. DEFINE_RECYCLER_ALLOC(AllocFinalized, FinalizableWithBarrierObjectBits);
  1131. DEFINE_RECYCLER_ALLOC(AllocTracked, ClientTrackableObjectWithBarrierBits);
  1132. DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTracked, ClientTrackableObjectWithBarrierBits);
  1133. #else
  1134. DEFINE_RECYCLER_ALLOC(Alloc, NoBit);
  1135. DEFINE_RECYCLER_ALLOC_ZERO(AllocZero, NoBit);
  1136. DEFINE_RECYCLER_ALLOC(AllocFinalized, FinalizableObjectBits);
  1137. DEFINE_RECYCLER_ALLOC(AllocTracked, ClientTrackableObjectBits);
  1138. DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTracked, ClientFinalizableObjectBits);
  1139. #endif
  1140. #ifdef RECYCLER_WRITE_BARRIER_ALLOC
  1141. DEFINE_RECYCLER_ALLOC(AllocWithBarrier, WithBarrierBit);
  1142. DEFINE_RECYCLER_ALLOC_ZERO(AllocZeroWithBarrier, WithBarrierBit);
  1143. DEFINE_RECYCLER_ALLOC(AllocFinalizedWithBarrier, FinalizableWithBarrierObjectBits);
  1144. DEFINE_RECYCLER_ALLOC(AllocTrackedWithBarrier, ClientTrackableObjectWithBarrierBits);
  1145. DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTrackedWithBarrier, ClientFinalizableObjectWithBarrierBits);
  1146. #endif
  1147. DEFINE_RECYCLER_ALLOC(AllocLeaf, LeafBit);
  1148. DEFINE_RECYCLER_ALLOC(AllocFinalizedLeaf, FinalizableLeafBits);
  1149. DEFINE_RECYCLER_ALLOC(AllocTrackedLeaf, ClientTrackableLeafBits);
  1150. DEFINE_RECYCLER_ALLOC_ZERO(AllocLeafZero, LeafBit);
  1151. DEFINE_RECYCLER_ALLOC_ZERO(AllocZeroTrackedLeaf, ClientTrackableLeafBits);
  1152. DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocImplicitRootLeaf, ImplicitRootLeafBits);
  1153. DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocImplicitRoot, ImplicitRootBit);
  1154. template <ObjectInfoBits enumClass>
  1155. char * AllocEnumClass(DECLSPEC_GUARD_OVERFLOW size_t size)
  1156. {
  1157. Assert((enumClass & EnumClassMask) != 0);
  1158. //Assert((enumClass & ~EnumClassMask & ~WithBarrierBit) == 0);
  1159. return AllocWithAttributes<(ObjectInfoBits)(enumClass), /* nothrow = */ false>(size);
  1160. }
  1161. template <ObjectInfoBits infoBits>
  1162. char * AllocWithInfoBits(DECLSPEC_GUARD_OVERFLOW size_t size)
  1163. {
  1164. return AllocWithAttributes<infoBits, /* nothrow = */ false>(size);
  1165. }
  1166. template <ObjectInfoBits infoBits>
  1167. char * AllocVisitedHost(DECLSPEC_GUARD_OVERFLOW size_t size)
  1168. {
  1169. return AllocWithAttributes<infoBits, /* nothrow = */ true>(size);
  1170. }
  1171. template<typename T>
  1172. RecyclerWeakReference<T>* CreateWeakReferenceHandle(T* pStrongReference);
  1173. #if ENABLE_WEAK_REFERENCE_REGIONS
  1174. template<typename T>
  1175. RecyclerWeakReferenceRegionItem<T>* CreateWeakReferenceRegion(size_t count);
  1176. #endif
  1177. uint GetWeakReferenceCleanupId() const { return weakReferenceCleanupId; }
  1178. template<typename T>
  1179. bool FindOrCreateWeakReferenceHandle(T* pStrongReference, RecyclerWeakReference<T> **ppWeakRef);
  1180. template<typename T>
  1181. bool TryGetWeakReferenceHandle(T* pStrongReference, RecyclerWeakReference<T> **weakReference);
  1182. template <ObjectInfoBits attributes>
  1183. char* GetAddressOfAllocator(size_t sizeCat)
  1184. {
  1185. Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
  1186. return (char*)this->GetHeapInfo<attributes>()->template GetBucket<(ObjectInfoBits)(attributes & GetBlockTypeBitMask)>(sizeCat).GetAllocator();
  1187. }
  1188. template <ObjectInfoBits attributes>
  1189. uint32 GetEndAddressOffset(size_t sizeCat)
  1190. {
  1191. Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
  1192. return this->GetHeapInfo<attributes>()->template GetBucket<(ObjectInfoBits)(attributes & GetBlockTypeBitMask)>(sizeCat).GetAllocator()->GetEndAddressOffset();
  1193. }
  1194. template <ObjectInfoBits attributes>
  1195. uint32 GetFreeObjectListOffset(size_t sizeCat)
  1196. {
  1197. Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
  1198. return this->GetHeapInfo<attributes>()->template GetBucket<(ObjectInfoBits)(attributes & GetBlockTypeBitMask)>(sizeCat).GetAllocator()->GetFreeObjectListOffset();
  1199. }
  1200. void GetNormalHeapBlockAllocatorInfoForNativeAllocation(size_t sizeCat, void*& allocatorAddress, uint32& endAddressOffset, uint32& freeListOffset, bool allowBumpAllocation, bool isOOPJIT);
  1201. static void GetNormalHeapBlockAllocatorInfoForNativeAllocation(void* recyclerAddr, size_t sizeCat, void*& allocatorAddress, uint32& endAddressOffset, uint32& freeListOffset, bool allowBumpAllocation, bool isOOPJIT);
  1202. bool AllowNativeCodeBumpAllocation();
  1203. static void TrackNativeAllocatedMemoryBlock(Recycler * recycler, void * memBlock, size_t sizeCat);
  1204. void Free(void* buffer, size_t size)
  1205. {
  1206. Assert(false);
  1207. }
  1208. bool ExplicitFreeLeaf(void* buffer, size_t size);
  1209. bool ExplicitFreeNonLeaf(void* buffer, size_t size);
  1210. template <ObjectInfoBits attributes>
  1211. bool ExplicitFreeInternalWrapper(void* buffer, size_t allocSize);
  1212. template <ObjectInfoBits attributes, typename TBlockAttributes>
  1213. bool ExplicitFreeInternal(void* buffer, size_t size, size_t sizeCat);
  1214. size_t GetAllocSize(size_t size);
  1215. template <typename TBlockAttributes>
  1216. void SetExplicitFreeBitOnSmallBlock(HeapBlock* heapBlock, size_t sizeCat, void* buffer, ObjectInfoBits attributes);
  1217. char* HeapAllocR(HeapInfo* eHeap, DECLSPEC_GUARD_OVERFLOW size_t size)
  1218. {
  1219. return RealAlloc<LeafBit, /* nothrow = */ false>(eHeap, size);
  1220. }
  1221. void HeapFree(HeapInfo* eHeap,void* candidate);
  1222. void EnumerateObjects(ObjectInfoBits infoBits, void (*CallBackFunction)(void * address, size_t size));
  1223. void RootAddRef(void* obj, uint *count = nullptr);
  1224. void RootRelease(void* obj, uint *count = nullptr);
  1225. template <ObjectInfoBits attributes, bool nothrow>
  1226. inline char* RealAlloc(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size);
  1227. template <ObjectInfoBits attributes, bool isSmallAlloc, bool nothrow>
  1228. inline char* RealAllocFromBucket(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size);
  1229. void EnterIdleDecommit();
  1230. void LeaveIdleDecommit();
  1231. void DisposeObjects();
  1232. BOOL IsValidObject(void* candidate, size_t minimumSize = 0);
  1233. #if DBG
  1234. void SetDisableThreadAccessCheck();
  1235. void SetDisableConcurrentThreadExitedCheck();
  1236. void CheckAllocExternalMark() const;
  1237. BOOL IsFreeObject(void * candidate);
  1238. BOOL IsReentrantState() const;
  1239. #endif
  1240. #if DBG_DUMP
  1241. void PrintMarkStack();
  1242. #endif
  1243. #ifdef PROFILE_EXEC
  1244. Js::Profiler * GetProfiler() const { return this->profiler; }
  1245. ArenaAllocator * AddBackgroundProfilerArena();
  1246. void ReleaseBackgroundProfilerArena(ArenaAllocator * arena);
  1247. void SetProfiler(Js::Profiler * profiler, Js::Profiler * backgroundProfiler);
  1248. #endif
  1249. #ifdef RECYCLER_MEMORY_VERIFY
  1250. BOOL VerifyEnabled() const { return verifyEnabled; }
  1251. uint GetVerifyPad() const { return verifyPad; }
  1252. void Verify(Js::Phase phase);
  1253. static void VerifyCheck(BOOL cond, char16 const * msg, void * address, void * corruptedAddress);
  1254. static void VerifyCheckFill(void * address, size_t size);
  1255. void FillCheckPad(void * address, size_t size, size_t alignedAllocSize, bool objectAlreadyInitialized);
  1256. void FillCheckPad(void * address, size_t size, size_t alignedAllocSize)
  1257. {
  1258. FillCheckPad(address, size, alignedAllocSize, false);
  1259. }
  1260. static void FillPadNoCheck(void * address, size_t size, size_t alignedAllocSize, bool objectAlreadyInitialized);
  1261. void VerifyCheckPad(void * address, size_t size);
  1262. void VerifyCheckPadExplicitFreeList(void * address, size_t size);
  1263. static const byte VerifyMemFill = 0xCA;
  1264. #endif
  1265. #ifdef RECYCLER_ZERO_MEM_CHECK
  1266. void VerifyZeroFill(void * address, size_t size);
  1267. void VerifyLargeAllocZeroFill(void * address, size_t size, ObjectInfoBits attributes);
  1268. #endif
  1269. #ifdef RECYCLER_DUMP_OBJECT_GRAPH
  1270. bool DumpObjectGraph(RecyclerObjectGraphDumper::Param * param = nullptr);
  1271. void DumpObjectDescription(void *object);
  1272. #endif
  1273. #ifdef LEAK_REPORT
  1274. void ReportLeaks();
  1275. void ReportLeaksOnProcessDetach();
  1276. #endif
  1277. #ifdef CHECK_MEMORY_LEAK
  1278. void CheckLeaks(char16 const * header);
  1279. void CheckLeaksOnProcessDetach(char16 const * header);
  1280. #endif
  1281. #ifdef RECYCLER_TRACE
  1282. void SetDomCollect(bool isDomCollect) { collectionParam.domCollect = isDomCollect; }
  1283. void CaptureCollectionParam(CollectionFlags flags, bool repeat = false);
  1284. #endif
  1285. private:
  1286. // RecyclerRootPtr has implicit conversion to pointers, prevent it to be
  1287. // passed to RootAddRef/RootRelease directly
  1288. template <typename T>
  1289. void RootAddRef(RecyclerRootPtr<T>& ptr, uint *count = nullptr);
  1290. template <typename T>
  1291. void RootRelease(RecyclerRootPtr<T>& ptr, uint *count = nullptr);
  1292. template <CollectionFlags flags>
  1293. BOOL CollectInternal();
  1294. template <CollectionFlags flags>
  1295. BOOL Collect();
  1296. template <CollectionFlags flags>
  1297. BOOL CollectWithHeuristic();
  1298. template <CollectionFlags flags>
  1299. BOOL CollectWithExhaustiveCandidate();
  1300. template <CollectionFlags flags>
  1301. BOOL GetPartialFlag();
  1302. bool NeedExhaustiveRepeatCollect() const;
  1303. #if DBG
  1304. bool ExpectStackSkip() const;
  1305. #endif
  1306. static size_t const InvalidScanRootBytes = (size_t)-1;
  1307. // Small Allocator
  1308. template <typename SmallHeapBlockAllocatorType>
  1309. void AddSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat);
  1310. template <typename SmallHeapBlockAllocatorType>
  1311. void RemoveSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat);
  1312. template <ObjectInfoBits attributes, typename SmallHeapBlockAllocatorType>
  1313. char * SmallAllocatorAlloc(SmallHeapBlockAllocatorType * allocator, size_t sizeCat, size_t size);
  1314. // Allocation
  1315. template <ObjectInfoBits attributes, bool nothrow>
  1316. inline char * AllocWithAttributesInlined(DECLSPEC_GUARD_OVERFLOW size_t size);
  1317. template <ObjectInfoBits attributes, bool nothrow>
  1318. char * AllocWithAttributes(DECLSPEC_GUARD_OVERFLOW size_t size)
  1319. {
  1320. return AllocWithAttributesInlined<attributes, nothrow>(size);
  1321. }
  1322. template <ObjectInfoBits attributes, bool nothrow>
  1323. inline char* AllocZeroWithAttributesInlined(DECLSPEC_GUARD_OVERFLOW size_t size);
  1324. template <ObjectInfoBits attributes, bool nothrow>
  1325. char* AllocZeroWithAttributes(DECLSPEC_GUARD_OVERFLOW size_t size)
  1326. {
  1327. return AllocZeroWithAttributesInlined<attributes, nothrow>(size);
  1328. }
  1329. char* AllocWeakReferenceEntry(DECLSPEC_GUARD_OVERFLOW size_t size)
  1330. {
  1331. return AllocWithAttributes<WeakReferenceEntryBits, /* nothrow = */ false>(size);
  1332. }
  1333. bool NeedDisposeTimed()
  1334. {
  1335. DWORD ticks = ::GetTickCount();
  1336. return (ticks > tickCountNextDispose && this->hasDisposableObject);
  1337. }
  1338. char* TryLargeAlloc(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size, ObjectInfoBits attributes, bool nothrow);
  1339. template <bool nothrow>
  1340. char* LargeAlloc(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size, ObjectInfoBits attributes);
  1341. void OutOfMemory();
  1342. // Collection
  1343. BOOL DoCollect(CollectionFlags flags);
  1344. BOOL DoCollectWrapped(CollectionFlags flags);
  1345. BOOL CollectOnAllocatorThread();
  1346. #if DBG
  1347. void ResetThreadId();
  1348. #endif
  1349. template <bool background>
  1350. size_t ScanPinnedObjects();
  1351. size_t ScanStack();
  1352. size_t ScanArena(ArenaData * alloc, bool background);
  1353. void ScanImplicitRoots();
  1354. void ScanInitialImplicitRoots();
  1355. void ScanNewImplicitRoots();
  1356. size_t FindRoots();
  1357. size_t TryMarkArenaMemoryBlockList(ArenaMemoryBlock * memoryBlocks);
  1358. size_t TryMarkBigBlockList(BigBlock * memoryBlocks);
  1359. #if ENABLE_CONCURRENT_GC
  1360. #if FALSE // REVIEW: remove this code since not using
  1361. size_t TryMarkBigBlockListWithWriteWatch(BigBlock * memoryBlocks);
  1362. #endif
  1363. #endif
  1364. // Mark
  1365. void ResetMarks(ResetMarkFlags flags);
  1366. void Mark();
  1367. bool EndMark();
  1368. bool EndMarkCheckOOMRescan();
  1369. void EndMarkOnLowMemory();
  1370. #if ENABLE_CONCURRENT_GC
  1371. void DoParallelMark();
  1372. void DoBackgroundParallelMark();
  1373. #endif
  1374. size_t RootMark(CollectionState markState);
  1375. void ProcessMark(bool background);
  1376. void ProcessParallelMark(bool background, MarkContext * markContext);
  1377. template <bool parallel, bool interior>
  1378. void ProcessMarkContext(MarkContext * markContext);
  1379. public:
  1380. bool IsObjectMarked(void* candidate) { return this->heapBlockMap.IsMarked(candidate); }
  1381. #ifdef RECYCLER_STRESS
  1382. bool StressCollectNow();
  1383. #endif
  1384. private:
  1385. HeapBlock* FindHeapBlock(void * candidate);
  1386. struct FindBlockCache
  1387. {
  1388. FindBlockCache():
  1389. heapBlock(nullptr),
  1390. candidate(nullptr)
  1391. {
  1392. }
  1393. HeapBlock* heapBlock;
  1394. void* candidate;
  1395. } blockCache;
  1396. inline void ScanObjectInline(void ** obj, size_t byteCount);
  1397. inline void ScanObjectInlineInterior(void ** obj, size_t byteCount);
  1398. template <bool doSpecialMark, bool forceInterior = false>
  1399. inline void ScanMemoryInline(void ** obj, size_t byteCount
  1400. ADDRESS_SANITIZER_APPEND(RecyclerScanMemoryType scanMemoryType = RecyclerScanMemoryType::General));
  1401. template <bool doSpecialMark>
  1402. void ScanMemory(void ** obj, size_t byteCount) { if (byteCount != 0) { ScanMemoryInline<doSpecialMark>(obj, byteCount); } }
  1403. bool AddMark(void * candidate, size_t byteCount) throw();
  1404. #ifdef RECYCLER_VISITED_HOST
  1405. bool AddPreciselyTracedMark(IRecyclerVisitedObject * candidate) throw();
  1406. #endif
  1407. // Sweep
  1408. #if ENABLE_PARTIAL_GC
  1409. bool Sweep(size_t rescanRootBytes = (size_t)-1, bool concurrent = false, bool adjustPartialHeuristics = false);
  1410. #else
  1411. bool Sweep(bool concurrent = false);
  1412. #endif
  1413. void SweepWeakReference();
  1414. void SweepHeap(bool concurrent, RecyclerSweepManager& recyclerSweepManager);
  1415. void FinishSweep(RecyclerSweepManager& recyclerSweepManager);
  1416. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1417. void DoTwoPassConcurrentSweepPreCheck();
  1418. void FinishSweepPrep();
  1419. void FinishConcurrentSweepPass1();
  1420. void FinishConcurrentSweep();
  1421. #endif
  1422. bool FinishDisposeObjects();
  1423. template <CollectionFlags flags>
  1424. bool FinishDisposeObjectsWrapped();
  1425. // end collection
  1426. void FinishCollection();
  1427. void FinishCollection(bool needConcurrentSweep);
  1428. void EndCollection();
  1429. void ResetCollectionState();
  1430. void ResetMarkCollectionState();
  1431. void ResetHeuristicCounters();
  1432. void ResetPartialHeuristicCounters();
  1433. BOOL IsMarkState() const;
  1434. BOOL IsFindRootsState() const;
  1435. BOOL IsInThreadFindRootsState() const;
  1436. template <Js::Phase phase>
  1437. void CollectionBegin();
  1438. template <Js::Phase phase>
  1439. void CollectionEnd();
  1440. #if ENABLE_PARTIAL_GC
  1441. void ProcessClientTrackedObjects();
  1442. bool PartialCollect(bool concurrent);
  1443. void FinishPartialCollect(RecyclerSweepManager * recyclerSweep = nullptr);
  1444. void ClearPartialCollect();
  1445. #if ENABLE_CONCURRENT_GC
  1446. void BackgroundFinishPartialCollect(RecyclerSweepManager * recyclerSweep);
  1447. #endif
  1448. #endif
  1449. size_t RescanMark(DWORD waitTime);
  1450. size_t FinishMark(DWORD waitTime);
  1451. size_t FinishMarkRescan(bool background);
  1452. #if ENABLE_CONCURRENT_GC
  1453. void ProcessTrackedObjects();
  1454. #endif
  1455. BOOL IsAllocatableCallbackState()
  1456. {
  1457. return (collectionState & (Collection_PostSweepRedeferralCallback | Collection_PostCollectionCallback));
  1458. }
  1459. #if ENABLE_CONCURRENT_GC
  1460. // Concurrent GC
  1461. BOOL IsConcurrentEnabled() const { return this->enableConcurrentMark || this->enableParallelMark || this->enableConcurrentSweep; }
  1462. BOOL IsConcurrentMarkState() const;
  1463. BOOL IsConcurrentMarkExecutingState() const;
  1464. BOOL IsConcurrentResetMarksState() const;
  1465. BOOL IsConcurrentFindRootState() const;
  1466. BOOL IsConcurrentExecutingState() const;
  1467. BOOL IsConcurrentSweepExecutingState() const;
  1468. BOOL IsConcurrentSweepSetupState() const;
  1469. BOOL IsConcurrentSweepState() const;
  1470. BOOL IsConcurrentState() const;
  1471. BOOL InConcurrentSweep()
  1472. {
  1473. return ((collectionState & Collection_ConcurrentSweep) == Collection_ConcurrentSweep);
  1474. }
  1475. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1476. bool AllowAllocationsDuringConcurrentSweep()
  1477. {
  1478. return this->allowAllocationsDuringConcurrentSweepForCollection;
  1479. }
  1480. #endif
  1481. #if DBG
  1482. BOOL IsConcurrentFinishedState() const;
  1483. #endif // DBG
  1484. bool InitializeConcurrent(JsUtil::ThreadService* threadService);
  1485. bool AbortConcurrent(bool restoreState);
  1486. void FinalizeConcurrent(bool restoreState);
  1487. static unsigned int CALLBACK StaticThreadProc(LPVOID lpParameter);
  1488. static int ExceptFilter(LPEXCEPTION_POINTERS pEP);
  1489. DWORD ThreadProc();
  1490. void DoBackgroundWork(bool forceForeground = false);
  1491. static void CALLBACK StaticBackgroundWorkCallback(void * callbackData);
  1492. BOOL CollectOnConcurrentThread();
  1493. bool StartConcurrent(CollectionState const state);
  1494. BOOL StartBackgroundMarkCollect();
  1495. BOOL StartSynchronousBackgroundMark();
  1496. BOOL StartAsynchronousBackgroundMark();
  1497. BOOL StartBackgroundMark(bool foregroundResetMark, bool foregroundFindRoots);
  1498. BOOL StartConcurrentSweepCollect();
  1499. template <CollectionFlags flags>
  1500. BOOL TryFinishConcurrentCollect();
  1501. BOOL WaitForConcurrentThread(DWORD waitTime, RecyclerWaitReason caller = RecyclerWaitReason::Other);
  1502. void FlushBackgroundPages();
  1503. BOOL FinishConcurrentCollect(CollectionFlags flags);
  1504. void FinishTransferSwept(CollectionFlags flags);
  1505. BOOL FinishConcurrentCollectWrapped(CollectionFlags flags);
  1506. void BackgroundMark();
  1507. void BackgroundMarkWeakRefs();
  1508. void BackgroundResetMarks();
  1509. void PrepareBackgroundFindRoots();
  1510. void RevertPrepareBackgroundFindRoots();
  1511. size_t BackgroundFindRoots();
  1512. size_t BackgroundScanStack();
  1513. size_t BackgroundRepeatMark();
  1514. size_t BackgroundRescan(RescanFlags rescanFlags);
  1515. void BackgroundResetWriteWatchAll();
  1516. size_t BackgroundFinishMark();
  1517. char* GetScriptThreadStackTop();
  1518. void SweepPendingObjects(RecyclerSweepManager& recyclerSweepManager);
  1519. void ConcurrentTransferSweptObjects(RecyclerSweepManager& recyclerSweepManager);
  1520. #if ENABLE_PARTIAL_GC
  1521. void ConcurrentPartialTransferSweptObjects(RecyclerSweepManager& recyclerSweepManager);
  1522. #endif // ENABLE_PARTIAL_GC
  1523. #endif // ENABLE_CONCURRENT_GC
  1524. bool ForceSweepObject();
  1525. void NotifyFree(__in char * address, size_t size);
  1526. template <typename T>
  1527. void NotifyFree(T * heapBlock);
  1528. void CleanupPendingUnroot();
  1529. #ifdef ENABLE_JS_ETW
  1530. ULONG EventWriteFreeMemoryBlock(HeapBlock* heapBlock);
  1531. void FlushFreeRecord();
  1532. void AppendFreeMemoryETWRecord(__in char *address, size_t size);
  1533. static const uint BulkFreeMemoryCount = 400;
  1534. uint bulkFreeMemoryWrittenCount;
  1535. struct ETWFreeRecord {
  1536. char* memoryAddress;
  1537. uint32 objectSize;
  1538. };
  1539. ETWFreeRecord etwFreeRecords[BulkFreeMemoryCount];
  1540. #endif
  1541. template <ObjectInfoBits attributes>
  1542. bool IntegrateBlock(char * blockAddress, PageSegment * segment, size_t allocSize, size_t objectSize);
  1543. template <class TBlockAttributes> friend class SmallHeapBlockT;
  1544. template <class TBlockAttributes> friend class SmallNormalHeapBlockT;
  1545. template <class TBlockAttributes> friend class SmallLeafHeapBlockT;
  1546. template <class TBlockAttributes> friend class SmallFinalizableHeapBlockT;
  1547. #ifdef RECYCLER_VISITED_HOST
  1548. template <class TBlockAttributes> friend class SmallRecyclerVisitedHostHeapBlockT;
  1549. #endif
  1550. friend class LargeHeapBlock;
  1551. friend class HeapInfo;
  1552. friend class HeapInfoManager;
  1553. friend class LargeHeapBucket;
  1554. friend class ThreadContext;
  1555. template <typename TBlockType>
  1556. friend class HeapBucketT;
  1557. template <typename TBlockType>
  1558. friend class SmallNormalHeapBucketBase;
  1559. template <typename T, ObjectInfoBits attributes>
  1560. friend class RecyclerFastAllocator;
  1561. #ifdef RECYCLER_TRACE
  1562. void PrintCollectTrace(Js::Phase phase, bool finish = false, bool noConcurrentWork = false);
  1563. #endif
  1564. #ifdef RECYCLER_VERIFY_MARK
  1565. void VerifyMark();
  1566. void VerifyMarkRoots();
  1567. void VerifyMarkStack();
  1568. void VerifyMarkArena(ArenaData * arena);
  1569. void VerifyMarkBigBlockList(BigBlock * memoryBlocks);
  1570. void VerifyMarkArenaMemoryBlockList(ArenaMemoryBlock * memoryBlocks);
  1571. bool VerifyMark(void * objectAddress, void * target);
  1572. bool VerifyMark(void * target);
  1573. #endif
  1574. #if DBG_DUMP
  1575. bool forceTraceMark;
  1576. #endif
  1577. bool isHeapEnumInProgress;
  1578. #if DBG
  1579. bool allowAllocationDuringHeapEnum;
  1580. bool allowAllocationDuringRenentrance;
  1581. #ifdef ENABLE_PROJECTION
  1582. bool isInRefCountTrackingForProjection;
  1583. #endif
  1584. #endif
  1585. // There are two scenarios we allow limited allocation but disallow GC during those allocations:
  1586. // in heapenum when we allocate PropertyRecord, and
  1587. // in projection ExternalMark allowing allocating VarToDispEx. This is the common flag
  1588. // while we have debug only flag for each of the two scenarios.
  1589. bool isCollectionDisabled;
  1590. #ifdef ENABLE_BASIC_TELEMETRY
  1591. RecyclerTelemetryInfo& GetRecyclerTelemetryInfo() { return this->telemetryStats; }
  1592. #endif
  1593. #ifdef TRACK_ALLOC
  1594. public:
  1595. Recycler * TrackAllocInfo(TrackAllocData const& data);
  1596. void ClearTrackAllocInfo(TrackAllocData* data = NULL);
  1597. #ifdef PROFILE_RECYCLER_ALLOC
  1598. void PrintAllocStats();
  1599. private:
  1600. static bool DoProfileAllocTracker();
  1601. void InitializeProfileAllocTracker();
  1602. void TrackUnallocated(__in char* address, __in char *endAddress, size_t sizeCat);
  1603. void TrackAllocCore(void * object, size_t size, const TrackAllocData& trackAllocData, bool traceLifetime = false);
  1604. void* TrackAlloc(void * object, size_t size, const TrackAllocData& trackAllocData, bool traceLifetime = false);
  1605. void TrackIntegrate(__in_ecount(blockSize) char * blockAddress, size_t blockSize, size_t allocSize, size_t objectSize, const TrackAllocData& trackAllocData);
  1606. BOOL TrackFree(const char* address, size_t size);
  1607. void TrackAllocWeakRef(RecyclerWeakReferenceBase * weakRef);
  1608. void TrackFreeWeakRef(RecyclerWeakReferenceBase * weakRef);
  1609. struct TrackerData
  1610. {
  1611. TrackerData(type_info const * typeinfo, bool isArray) : typeinfo(typeinfo), isArray(isArray),
  1612. ItemSize(0), ItemCount(0), AllocCount(0), ReqSize(0), AllocSize(0), FreeCount(0), FreeSize(0), TraceLifetime(false)
  1613. #ifdef PERF_COUNTERS
  1614. , counter(PerfCounter::RecyclerTrackerCounterSet::GetPerfCounter(typeinfo, isArray))
  1615. , sizeCounter(PerfCounter::RecyclerTrackerCounterSet::GetPerfSizeCounter(typeinfo, isArray))
  1616. #endif
  1617. {
  1618. }
  1619. type_info const * typeinfo;
  1620. bool isArray;
  1621. #ifdef TRACE_OBJECT_LIFETIME
  1622. bool TraceLifetime;
  1623. #endif
  1624. size_t ItemSize;
  1625. size_t ItemCount;
  1626. int AllocCount;
  1627. int64 ReqSize;
  1628. int64 AllocSize;
  1629. int FreeCount;
  1630. int64 FreeSize;
  1631. #ifdef PERF_COUNTERS
  1632. PerfCounter::Counter& counter;
  1633. PerfCounter::Counter& sizeCounter;
  1634. #endif
  1635. static TrackerData EmptyData;
  1636. static TrackerData ExplicitFreeListObjectData;
  1637. };
  1638. TrackerData * GetTrackerData(void * address);
  1639. void SetTrackerData(void * address, TrackerData * data);
  1640. struct TrackerItem
  1641. {
  1642. TrackerItem(type_info const * typeinfo) : instanceData(typeinfo, false), arrayData(typeinfo, true)
  1643. #ifdef PERF_COUNTERS
  1644. , weakRefCounter(PerfCounter::RecyclerTrackerCounterSet::GetWeakRefPerfCounter(typeinfo))
  1645. #endif
  1646. {}
  1647. TrackerData instanceData;
  1648. TrackerData arrayData;
  1649. #ifdef PERF_COUNTERS
  1650. PerfCounter::Counter& weakRefCounter;
  1651. #endif
  1652. };
  1653. typedef JsUtil::BaseDictionary<type_info const *, TrackerItem *, NoCheckHeapAllocator, PrimeSizePolicy, DefaultComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> TypeInfotoTrackerItemMap;
  1654. typedef JsUtil::BaseDictionary<void *, TrackerData *, NoCheckHeapAllocator, PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> PointerToTrackerDataMap;
  1655. TypeInfotoTrackerItemMap * trackerDictionary;
  1656. CriticalSection * trackerCriticalSection;
  1657. #endif
  1658. TrackAllocData nextAllocData;
  1659. #endif
  1660. public:
  1661. // Enumeration
  1662. class AutoSetupRecyclerForNonCollectingMark
  1663. {
  1664. private:
  1665. Recycler& m_recycler;
  1666. bool m_setupDone;
  1667. CollectionState m_previousCollectionState;
  1668. #ifdef RECYCLER_STATS
  1669. RecyclerCollectionStats m_previousCollectionStats;
  1670. #endif
  1671. public:
  1672. AutoSetupRecyclerForNonCollectingMark(Recycler& recycler, bool setupForHeapEnumeration = false);
  1673. ~AutoSetupRecyclerForNonCollectingMark();
  1674. void DoCommonSetup();
  1675. void SetupForHeapEnumeration();
  1676. };
  1677. friend class RecyclerHeapObjectInfo;
  1678. bool FindImplicitRootObject(void* candidate, RecyclerHeapObjectInfo& heapObject);
  1679. bool FindHeapObject(void* candidate, FindHeapObjectFlags flags, RecyclerHeapObjectInfo& heapObject);
  1680. bool FindHeapObjectWithClearedAllocators(void* candidate, RecyclerHeapObjectInfo& heapObject);
  1681. bool IsCollectionDisabled() const { return isCollectionDisabled; }
  1682. bool IsHeapEnumInProgress() const { Assert(isHeapEnumInProgress ? isCollectionDisabled : true); return isHeapEnumInProgress; }
  1683. #if DBG
  1684. // There are limited cases that we have to allow allocation during heap enumeration. GC is explicitly
  1685. // disabled during heap enumeration for these limited cases. (See DefaultRecyclerCollectionWrapper)
  1686. // The only case of allocation right now is allocating property record for string based type handler
  1687. // so we can use the propertyId as the relation Id.
  1688. // Allocation during enumeration is still frown upon and should still be avoid if possible.
  1689. bool AllowAllocationDuringHeapEnum() const { return allowAllocationDuringHeapEnum; }
  1690. class AutoAllowAllocationDuringHeapEnum : public AutoBooleanToggle
  1691. {
  1692. public:
  1693. AutoAllowAllocationDuringHeapEnum(Recycler * recycler) : AutoBooleanToggle(&recycler->allowAllocationDuringHeapEnum) {};
  1694. };
  1695. #ifdef ENABLE_PROJECTION
  1696. bool IsInRefCountTrackingForProjection() const { return isInRefCountTrackingForProjection;}
  1697. class AutoIsInRefCountTrackingForProjection : public AutoBooleanToggle
  1698. {
  1699. public:
  1700. AutoIsInRefCountTrackingForProjection(Recycler * recycler) : AutoBooleanToggle(&recycler->isInRefCountTrackingForProjection) {};
  1701. };
  1702. #endif
  1703. #endif
  1704. class AutoAllowAllocationDuringReentrance : public AutoBooleanToggle
  1705. {
  1706. public:
  1707. AutoAllowAllocationDuringReentrance(Recycler * recycler) :
  1708. AutoBooleanToggle(&recycler->isCollectionDisabled)
  1709. #if DBG
  1710. , allowAllocationDuringRenentrance(&recycler->allowAllocationDuringRenentrance)
  1711. #endif
  1712. {};
  1713. #if DBG
  1714. private:
  1715. AutoBooleanToggle allowAllocationDuringRenentrance;
  1716. #endif
  1717. };
  1718. #ifdef HEAP_ENUMERATION_VALIDATION
  1719. typedef void(*PostHeapEnumScanCallback)(const HeapObject& heapObject, void *data);
  1720. PostHeapEnumScanCallback pfPostHeapEnumScanCallback;
  1721. void *postHeapEnunScanData;
  1722. void PostHeapEnumScan(PostHeapEnumScanCallback callback, void*data);
  1723. bool IsPostEnumHeapValidationInProgress() const { return pfPostHeapEnumScanCallback != NULL; }
  1724. #endif
  1725. public:
  1726. void* GetRealAddressFromInterior(void* candidate);
  1727. private:
  1728. void BeginNonCollectingMark();
  1729. void EndNonCollectingMark();
  1730. #if defined(RECYCLER_DUMP_OBJECT_GRAPH) || defined(LEAK_REPORT) || defined(CHECK_MEMORY_LEAK)
  1731. public:
  1732. bool IsInDllCanUnloadNow() const { return inDllCanUnloadNow; }
  1733. bool IsInDetachProcess() const { return inDetachProcess; }
  1734. void SetInDllCanUnloadNow();
  1735. void SetInDetachProcess();
  1736. private:
  1737. bool inDllCanUnloadNow;
  1738. bool inDetachProcess;
  1739. bool isPrimaryMarkContextInitialized;
  1740. #endif
  1741. #if defined(LEAK_REPORT) || defined(CHECK_MEMORY_LEAK)
  1742. template <class Fn>
  1743. void ReportOnProcessDetach(Fn fn);
  1744. void PrintPinnedObjectStackTraces();
  1745. #endif
  1746. public:
  1747. typedef void (CALLBACK *ObjectBeforeCollectCallback)(void* object, void* callbackState); // same as jsrt JsObjectBeforeCollectCallback
  1748. // same as jsrt JsObjectBeforeCollectCallbackWrapper
  1749. typedef void (CALLBACK *ObjectBeforeCollectCallbackWrapper)(ObjectBeforeCollectCallback callback, void* object, void* callbackState, void* threadContext);
  1750. void SetObjectBeforeCollectCallback(void* object,
  1751. ObjectBeforeCollectCallback callback,
  1752. void* callbackState,
  1753. ObjectBeforeCollectCallbackWrapper callbackWrapper,
  1754. void* threadContext);
  1755. void ClearObjectBeforeCollectCallbacks();
  1756. bool IsInObjectBeforeCollectCallback() const { return objectBeforeCollectCallbackState != ObjectBeforeCollectCallback_None; }
  1757. private:
  1758. struct ObjectBeforeCollectCallbackData
  1759. {
  1760. ObjectBeforeCollectCallback callback;
  1761. void* callbackState;
  1762. void* threadContext;
  1763. ObjectBeforeCollectCallbackWrapper callbackWrapper;
  1764. ObjectBeforeCollectCallbackData() {}
  1765. ObjectBeforeCollectCallbackData(ObjectBeforeCollectCallbackWrapper callbackWrapper, ObjectBeforeCollectCallback callback, void* callbackState, void* threadContext) :
  1766. callbackWrapper(callbackWrapper), callback(callback), callbackState(callbackState), threadContext(threadContext) {}
  1767. };
  1768. typedef JsUtil::BaseDictionary<void*, ObjectBeforeCollectCallbackData, HeapAllocator,
  1769. PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> ObjectBeforeCollectCallbackMap;
  1770. ObjectBeforeCollectCallbackMap* objectBeforeCollectCallbackMap;
  1771. enum ObjectBeforeCollectCallbackState
  1772. {
  1773. ObjectBeforeCollectCallback_None,
  1774. ObjectBeforeCollectCallback_Normal, // Normal GC BeforeCollect callback
  1775. ObjectBeforeCollectCallback_Shutdown, // At shutdown invoke all BeforeCollect callback
  1776. } objectBeforeCollectCallbackState;
  1777. bool ProcessObjectBeforeCollectCallbacks(bool atShutdown = false);
  1778. #if GLOBAL_ENABLE_WRITE_BARRIER
  1779. private:
  1780. typedef JsUtil::BaseDictionary<void *, size_t, HeapAllocator, PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::AsymetricResizeLock> PendingWriteBarrierBlockMap;
  1781. PendingWriteBarrierBlockMap pendingWriteBarrierBlockMap;
  1782. public:
  1783. void RegisterPendingWriteBarrierBlock(void* address, size_t bytes);
  1784. void UnRegisterPendingWriteBarrierBlock(void* address);
  1785. #endif
  1786. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  1787. private:
  1788. static Recycler* recyclerList;
  1789. static CriticalSection recyclerListLock;
  1790. Recycler* next;
  1791. public:
  1792. static void WBSetBitJIT(char* addr)
  1793. {
  1794. return WBSetBit(addr);
  1795. }
  1796. static void WBSetBit(char* addr);
  1797. static void WBSetBitRange(char* addr, uint length);
  1798. static void WBVerifyBitIsSet(char* addr, char* target);
  1799. static bool WBCheckIsRecyclerAddress(char* addr);
  1800. #endif
  1801. #ifdef RECYCLER_FINALIZE_CHECK
  1802. void VerifyFinalize();
  1803. #endif
  1804. };
  1805. class RecyclerHeapObjectInfo
  1806. {
  1807. void* m_address;
  1808. Recycler * m_recycler;
  1809. HeapBlock* m_heapBlock;
  1810. #if LARGEHEAPBLOCK_ENCODING
  1811. union
  1812. {
  1813. byte * m_attributes;
  1814. LargeObjectHeader * m_largeHeapBlockHeader;
  1815. };
  1816. bool isUsingLargeHeapBlock = false;
  1817. #else
  1818. byte * m_attributes;
  1819. #endif
  1820. public:
  1821. RecyclerHeapObjectInfo() : m_address(NULL), m_recycler(NULL), m_heapBlock(NULL), m_attributes(NULL) {}
  1822. RecyclerHeapObjectInfo(void* address, Recycler * recycler, HeapBlock* heapBlock, byte * attributes) :
  1823. m_address(address), m_recycler(recycler), m_heapBlock(heapBlock), m_attributes(attributes) { }
  1824. void* GetObjectAddress() const { return m_address; }
  1825. #ifdef RECYCLER_PAGE_HEAP
  1826. bool IsPageHeapAlloc() const
  1827. {
  1828. return isUsingLargeHeapBlock && ((LargeHeapBlock*)m_heapBlock)->InPageHeapMode();
  1829. }
  1830. void PageHeapLockPages() const
  1831. {
  1832. Assert(IsPageHeapAlloc());
  1833. ((LargeHeapBlock*)m_heapBlock)->PageHeapLockPages();
  1834. }
  1835. #endif
  1836. bool IsLeaf() const
  1837. {
  1838. #if LARGEHEAPBLOCK_ENCODING
  1839. if (isUsingLargeHeapBlock)
  1840. {
  1841. return (m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie) & LeafBit) != 0;
  1842. }
  1843. #endif
  1844. return ((*m_attributes & LeafBit) != 0 || this->m_heapBlock->IsLeafBlock());
  1845. }
  1846. bool IsImplicitRoot() const
  1847. {
  1848. #if LARGEHEAPBLOCK_ENCODING
  1849. if (isUsingLargeHeapBlock)
  1850. {
  1851. return (m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie) & ImplicitRootBit) != 0;
  1852. }
  1853. #endif
  1854. return (*m_attributes & ImplicitRootBit) != 0;
  1855. }
  1856. bool IsObjectMarked() const { Assert(m_recycler); return m_recycler->heapBlockMap.IsMarked(m_address); }
  1857. void SetObjectMarked() { Assert(m_recycler); m_recycler->heapBlockMap.SetMark(m_address); }
  1858. ObjectInfoBits GetAttributes() const
  1859. {
  1860. #if LARGEHEAPBLOCK_ENCODING
  1861. if (isUsingLargeHeapBlock)
  1862. {
  1863. return (ObjectInfoBits)m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
  1864. }
  1865. #endif
  1866. return (ObjectInfoBits)*m_attributes;
  1867. }
  1868. size_t GetSize() const;
  1869. #if LARGEHEAPBLOCK_ENCODING
  1870. void SetLargeHeapBlockHeader(LargeObjectHeader * largeHeapBlockHeader)
  1871. {
  1872. m_largeHeapBlockHeader = largeHeapBlockHeader;
  1873. isUsingLargeHeapBlock = true;
  1874. }
  1875. #endif
  1876. bool SetMemoryProfilerHasEnumerated()
  1877. {
  1878. Assert(m_heapBlock);
  1879. #if LARGEHEAPBLOCK_ENCODING
  1880. if (isUsingLargeHeapBlock)
  1881. {
  1882. return SetMemoryProfilerHasEnumeratedForLargeHeapBlock();
  1883. }
  1884. #endif
  1885. bool wasMemoryProfilerOldObject = (*m_attributes & MemoryProfilerOldObjectBit) != 0;
  1886. *m_attributes |= MemoryProfilerOldObjectBit;
  1887. return wasMemoryProfilerOldObject;
  1888. }
  1889. bool ClearImplicitRootBit()
  1890. {
  1891. // This can only be called on the main thread for non-finalizable block
  1892. // As finalizable block requires that the bit not be change during concurrent mark
  1893. // since the background thread change the NewTrackBit
  1894. Assert(!m_heapBlock->IsAnyFinalizableBlock());
  1895. #ifdef RECYCLER_PAGE_HEAP
  1896. Recycler* recycler = this->m_recycler;
  1897. if (recycler->IsPageHeapEnabled() && recycler->ShouldCapturePageHeapFreeStack())
  1898. {
  1899. #ifdef STACK_BACK_TRACE
  1900. if (this->isUsingLargeHeapBlock)
  1901. {
  1902. LargeHeapBlock* largeHeapBlock = (LargeHeapBlock*)this->m_heapBlock;
  1903. if (largeHeapBlock->InPageHeapMode())
  1904. {
  1905. largeHeapBlock->CapturePageHeapFreeStack();
  1906. }
  1907. }
  1908. #endif
  1909. }
  1910. #endif
  1911. #if LARGEHEAPBLOCK_ENCODING
  1912. if (isUsingLargeHeapBlock)
  1913. {
  1914. return ClearImplicitRootBitsForLargeHeapBlock();
  1915. }
  1916. #endif
  1917. Assert(m_attributes);
  1918. bool wasImplicitRoot = (*m_attributes & ImplicitRootBit) != 0;
  1919. *m_attributes &= ~ImplicitRootBit;
  1920. return wasImplicitRoot;
  1921. }
  1922. void ExplicitFree()
  1923. {
  1924. if (*m_attributes == ObjectInfoBits::LeafBit)
  1925. {
  1926. m_recycler->ExplicitFreeLeaf(m_address, GetSize());
  1927. }
  1928. else
  1929. {
  1930. Assert(*m_attributes == ObjectInfoBits::NoBit);
  1931. m_recycler->ExplicitFreeNonLeaf(m_address, GetSize());
  1932. }
  1933. }
  1934. #if LARGEHEAPBLOCK_ENCODING
  1935. bool ClearImplicitRootBitsForLargeHeapBlock()
  1936. {
  1937. Assert(m_largeHeapBlockHeader);
  1938. byte attributes = m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
  1939. bool wasImplicitRoot = (attributes & ImplicitRootBit) != 0;
  1940. m_largeHeapBlockHeader->SetAttributes(m_recycler->Cookie, attributes & ~ImplicitRootBit);
  1941. return wasImplicitRoot;
  1942. }
  1943. bool SetMemoryProfilerHasEnumeratedForLargeHeapBlock()
  1944. {
  1945. Assert(m_largeHeapBlockHeader);
  1946. byte attributes = m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
  1947. bool wasMemoryProfilerOldObject = (attributes & MemoryProfilerOldObjectBit) != 0;
  1948. m_largeHeapBlockHeader->SetAttributes(m_recycler->Cookie, attributes | MemoryProfilerOldObjectBit);
  1949. return wasMemoryProfilerOldObject;
  1950. }
  1951. #endif
  1952. };
  1953. // A fake heap block to replace the original heap block where the strong ref is when it has been collected
  1954. // as the original heap block may have been freed
  1955. class CollectedRecyclerWeakRefHeapBlock : public HeapBlock
  1956. {
  1957. public:
  1958. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  1959. virtual void WBVerifyBitIsSet(char* addr) override { Assert(false); }
  1960. virtual void WBSetBit(char* addr) override { Assert(false); }
  1961. virtual void WBSetBitRange(char* addr, uint count) override { Assert(false); }
  1962. virtual void WBClearBit(char* addr) override { Assert(false); }
  1963. virtual void WBClearObject(char* addr) override { Assert(false); }
  1964. #endif
  1965. #if DBG
  1966. virtual HeapInfo * GetHeapInfo() const override { Assert(false); return nullptr; }
  1967. virtual BOOL IsFreeObject(void* objectAddress) override { Assert(false); return false; }
  1968. #endif
  1969. virtual BOOL IsValidObject(void* objectAddress) override { Assert(false); return false; }
  1970. virtual byte* GetRealAddressFromInterior(void* interiorAddress) override { Assert(false); return nullptr; }
  1971. virtual size_t GetObjectSize(void* object) const override { Assert(false); return 0; }
  1972. virtual bool FindHeapObject(void* objectAddress, Recycler * recycler, FindHeapObjectFlags flags, RecyclerHeapObjectInfo& heapObject) override { Assert(false); return false; }
  1973. virtual bool TestObjectMarkedBit(void* objectAddress) override { Assert(false); return false; }
  1974. virtual void SetObjectMarkedBit(void* objectAddress) override { Assert(false); }
  1975. #ifdef RECYCLER_VERIFY_MARK
  1976. virtual bool VerifyMark(void * objectAddress, void * target) override { Assert(false); return false; }
  1977. #endif
  1978. #ifdef RECYCLER_PERF_COUNTERS
  1979. virtual void UpdatePerfCountersOnFree() override { Assert(false); }
  1980. #endif
  1981. #ifdef PROFILE_RECYCLER_ALLOC
  1982. virtual void * GetTrackerData(void * address) override { Assert(false); return nullptr; }
  1983. virtual void SetTrackerData(void * address, void * data) override { Assert(false); }
  1984. #endif
  1985. static CollectedRecyclerWeakRefHeapBlock Instance;
  1986. private:
  1987. CollectedRecyclerWeakRefHeapBlock() : HeapBlock(BlockTypeCount)
  1988. {
  1989. #if ENABLE_CONCURRENT_GC
  1990. isPendingConcurrentSweep = false;
  1991. #endif
  1992. }
  1993. };
  1994. class AutoIdleDecommit
  1995. {
  1996. public:
  1997. AutoIdleDecommit(Recycler * recycler) : recycler(recycler) { recycler->EnterIdleDecommit(); }
  1998. ~AutoIdleDecommit() { recycler->LeaveIdleDecommit(); }
  1999. private:
  2000. Recycler * recycler;
  2001. };
  2002. template <typename SmallHeapBlockAllocatorType>
  2003. void
  2004. Recycler::AddSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat)
  2005. {
  2006. this->GetDefaultHeapInfo()->AddSmallAllocator(allocator, sizeCat);
  2007. }
  2008. template <typename SmallHeapBlockAllocatorType>
  2009. void
  2010. Recycler::RemoveSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat)
  2011. {
  2012. this->GetDefaultHeapInfo()->RemoveSmallAllocator(allocator, sizeCat);
  2013. }
  2014. template <ObjectInfoBits attributes, typename SmallHeapBlockAllocatorType>
  2015. char *
  2016. Recycler::SmallAllocatorAlloc(SmallHeapBlockAllocatorType * allocator, DECLSPEC_GUARD_OVERFLOW size_t sizeCat, size_t size)
  2017. {
  2018. return this->GetDefaultHeapInfo()->SmallAllocatorAlloc<attributes>(this, allocator, sizeCat, size);
  2019. }
  2020. // Dummy recycler allocator policy classes to choose the allocation function
  2021. class _RecyclerLeafPolicy;
  2022. class _RecyclerNonLeafPolicy;
  2023. #ifdef RECYCLER_WRITE_BARRIER
  2024. class _RecyclerWriteBarrierPolicy;
  2025. #endif
  2026. template <typename Policy>
  2027. class _RecyclerAllocatorFunc
  2028. {};
  2029. template <>
  2030. class _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
  2031. {
  2032. public:
  2033. typedef char * (Recycler::*AllocFuncType)(size_t);
  2034. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  2035. static AllocFuncType GetAllocFunc()
  2036. {
  2037. return &Recycler::AllocLeaf;
  2038. }
  2039. static AllocFuncType GetAllocZeroFunc()
  2040. {
  2041. return &Recycler::AllocLeafZero;
  2042. }
  2043. static FreeFuncType GetFreeFunc()
  2044. {
  2045. return &Recycler::ExplicitFreeLeaf;
  2046. }
  2047. };
  2048. template <>
  2049. class _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
  2050. {
  2051. public:
  2052. typedef char * (Recycler::*AllocFuncType)(size_t);
  2053. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  2054. static AllocFuncType GetAllocFunc()
  2055. {
  2056. return &Recycler::Alloc;
  2057. }
  2058. static AllocFuncType GetAllocZeroFunc()
  2059. {
  2060. return &Recycler::AllocZero;
  2061. }
  2062. static FreeFuncType GetFreeFunc()
  2063. {
  2064. return &Recycler::ExplicitFreeNonLeaf;
  2065. }
  2066. };
  2067. #ifdef RECYCLER_WRITE_BARRIER
  2068. template <>
  2069. class _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2070. {
  2071. public:
  2072. typedef char * (Recycler::*AllocFuncType)(size_t);
  2073. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  2074. static AllocFuncType GetAllocFunc()
  2075. {
  2076. return &Recycler::AllocWithBarrier;
  2077. }
  2078. static AllocFuncType GetAllocZeroFunc()
  2079. {
  2080. return &Recycler::AllocZeroWithBarrier;
  2081. }
  2082. static FreeFuncType GetFreeFunc()
  2083. {
  2084. return &Recycler::ExplicitFreeNonLeaf;
  2085. }
  2086. };
  2087. #endif
  2088. // This is used by the compiler; when T is NOT a pointer i.e. a value type - it causes leaf allocation
  2089. template <typename T>
  2090. class TypeAllocatorFunc<Recycler, T> : public _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
  2091. {
  2092. };
  2093. #if GLOBAL_ENABLE_WRITE_BARRIER
  2094. template <typename T>
  2095. class TypeAllocatorFunc<Recycler, T *> : public _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2096. {
  2097. };
  2098. #else
  2099. // Partial template specialization; applies to T when it is a pointer
  2100. template <typename T>
  2101. class TypeAllocatorFunc<Recycler, T *> : public _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
  2102. {
  2103. };
  2104. #endif
  2105. // Dummy class to choose the allocation function
  2106. class RecyclerLeafAllocator
  2107. {
  2108. public:
  2109. static const bool FakeZeroLengthArray = true;
  2110. };
  2111. class RecyclerNonLeafAllocator
  2112. {
  2113. public:
  2114. static const bool FakeZeroLengthArray = true;
  2115. };
  2116. class RecyclerWriteBarrierAllocator
  2117. {
  2118. public:
  2119. static const bool FakeZeroLengthArray = true;
  2120. };
  2121. // Choose RecyclerLeafAllocator / RecyclerNonLeafAllocator based on "bool isLeaf"
  2122. template <bool isLeaf>
  2123. struct _RecyclerLeaf { typedef RecyclerLeafAllocator AllocatorType; };
  2124. template <>
  2125. struct _RecyclerLeaf<false> { typedef RecyclerNonLeafAllocator AllocatorType; };
  2126. template <bool isLeaf>
  2127. class ListTypeAllocatorFunc<Recycler, isLeaf>
  2128. {
  2129. public:
  2130. // RecyclerLeafAllocator / RecyclerNonLeafAllocator based on "bool isLeaf"
  2131. // used by write barrier type traits
  2132. typedef typename _RecyclerLeaf<isLeaf>::AllocatorType EffectiveAllocatorType;
  2133. typedef char * (Recycler::*AllocFuncType)(size_t);
  2134. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  2135. static AllocFuncType GetAllocFunc()
  2136. {
  2137. return isLeaf ? &Recycler::AllocLeaf : &Recycler::Alloc;
  2138. }
  2139. static FreeFuncType GetFreeFunc()
  2140. {
  2141. if (isLeaf)
  2142. {
  2143. return &Recycler::ExplicitFreeLeaf;
  2144. }
  2145. else
  2146. {
  2147. return &Recycler::ExplicitFreeNonLeaf;
  2148. }
  2149. }
  2150. };
  2151. // Partial template specialization to allocate as non leaf
  2152. template <typename T>
  2153. class TypeAllocatorFunc<RecyclerNonLeafAllocator, T> :
  2154. #if GLOBAL_ENABLE_WRITE_BARRIER
  2155. public _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2156. #else
  2157. public _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
  2158. #endif
  2159. {
  2160. };
  2161. #ifdef RECYCLER_WRITE_BARRIER
  2162. template <typename T>
  2163. class TypeAllocatorFunc<RecyclerWriteBarrierAllocator, T> : public _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2164. {
  2165. };
  2166. #endif
  2167. template <typename T>
  2168. class TypeAllocatorFunc<RecyclerLeafAllocator, T> : public _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
  2169. {
  2170. };
  2171. template <typename TAllocType>
  2172. struct AllocatorInfo<Recycler, TAllocType>
  2173. {
  2174. typedef Recycler AllocatorType;
  2175. typedef TypeAllocatorFunc<Recycler, TAllocType> AllocatorFunc;
  2176. typedef _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy> InstAllocatorFunc; // By default any instance considered non-leaf
  2177. };
  2178. template <typename TAllocType>
  2179. struct AllocatorInfo<RecyclerNonLeafAllocator, TAllocType>
  2180. {
  2181. typedef Recycler AllocatorType;
  2182. typedef TypeAllocatorFunc<RecyclerNonLeafAllocator, TAllocType> AllocatorFunc;
  2183. typedef TypeAllocatorFunc<RecyclerNonLeafAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
  2184. };
  2185. template <typename TAllocType>
  2186. struct AllocatorInfo<RecyclerWriteBarrierAllocator, TAllocType>
  2187. {
  2188. typedef Recycler AllocatorType;
  2189. typedef TypeAllocatorFunc<RecyclerWriteBarrierAllocator, TAllocType> AllocatorFunc;
  2190. typedef TypeAllocatorFunc<RecyclerWriteBarrierAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
  2191. };
  2192. template <typename TAllocType>
  2193. struct AllocatorInfo<RecyclerLeafAllocator, TAllocType>
  2194. {
  2195. typedef Recycler AllocatorType;
  2196. typedef TypeAllocatorFunc<RecyclerLeafAllocator, TAllocType> AllocatorFunc;
  2197. typedef TypeAllocatorFunc<RecyclerLeafAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
  2198. };
  2199. template <>
  2200. struct ForceNonLeafAllocator<Recycler>
  2201. {
  2202. typedef RecyclerNonLeafAllocator AllocatorType;
  2203. };
  2204. template <>
  2205. struct ForceNonLeafAllocator<RecyclerLeafAllocator>
  2206. {
  2207. typedef RecyclerNonLeafAllocator AllocatorType;
  2208. };
  2209. template <>
  2210. struct ForceLeafAllocator<Recycler>
  2211. {
  2212. typedef RecyclerLeafAllocator AllocatorType;
  2213. };
  2214. template <>
  2215. struct ForceLeafAllocator<RecyclerNonLeafAllocator>
  2216. {
  2217. typedef RecyclerLeafAllocator AllocatorType;
  2218. };
  2219. // TODO: enable -profile for GC phases.
  2220. // access the same profiler object from multiple GC threads which shares one recycler object,
  2221. // but profiler object is not thread safe
  2222. #if defined(PROFILE_EXEC) && 0
  2223. #define RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase) if (recycler->profiler != nullptr) { recycler->profiler->Begin(phase); }
  2224. #define RECYCLER_PROFILE_EXEC_END(recycler, phase) if (recycler->profiler != nullptr) { recycler->profiler->End(phase); }
  2225. #define RECYCLER_PROFILE_EXEC_BEGIN2(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->Begin(phase1); recycler->profiler->Begin(phase2);}
  2226. #define RECYCLER_PROFILE_EXEC_END2(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->End(phase1); recycler->profiler->End(phase2);}
  2227. #define RECYCLER_PROFILE_EXEC_CHANGE(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->End(phase1); recycler->profiler->Begin(phase2); }
  2228. #define RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase) if (recycler->backgroundProfiler != nullptr) { recycler->backgroundProfiler->Begin(phase); }
  2229. #define RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase) if (recycler->backgroundProfiler != nullptr) { recycler->backgroundProfiler->End(phase); }
  2230. #define RECYCLER_PROFILE_EXEC_THREAD_BEGIN(background, recycler, phase) if (background) { RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase); } else { RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase); }
  2231. #define RECYCLER_PROFILE_EXEC_THREAD_END(background, recycler, phase) if (background) { RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase); } else { RECYCLER_PROFILE_EXEC_END(recycler, phase); }
  2232. #else
  2233. #define RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase)
  2234. #define RECYCLER_PROFILE_EXEC_END(recycler, phase)
  2235. #define RECYCLER_PROFILE_EXEC_BEGIN2(recycler, phase1, phase2)
  2236. #define RECYCLER_PROFILE_EXEC_END2(recycler, phase1, phase2)
  2237. #define RECYCLER_PROFILE_EXEC_CHANGE(recycler, phase1, phase2)
  2238. #define RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase)
  2239. #define RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase)
  2240. #define RECYCLER_PROFILE_EXEC_THREAD_BEGIN(background, recycler, phase)
  2241. #define RECYCLER_PROFILE_EXEC_THREAD_END(background, recycler, phase)
  2242. #endif
  2243. }
  2244. _Ret_notnull_ inline void * __cdecl
  2245. operator new(DECLSPEC_GUARD_OVERFLOW size_t byteSize, Recycler * alloc, HeapInfo * heapInfo)
  2246. {
  2247. return alloc->HeapAllocR(heapInfo, byteSize);
  2248. }
  2249. inline void __cdecl
  2250. operator delete(void * obj, Recycler * alloc, HeapInfo * heapInfo)
  2251. {
  2252. alloc->HeapFree(heapInfo, obj);
  2253. }
  2254. template<ObjectInfoBits infoBits>
  2255. _Ret_notnull_ inline void * __cdecl
  2256. operator new(DECLSPEC_GUARD_OVERFLOW size_t byteSize, Recycler * recycler, const InfoBitsWrapper<infoBits>&)
  2257. {
  2258. AssertCanHandleOutOfMemory();
  2259. Assert(byteSize != 0);
  2260. void * buffer;
  2261. if (infoBits & EnumClass_1_Bit)
  2262. {
  2263. buffer = recycler->AllocEnumClass<infoBits>(byteSize);
  2264. }
  2265. else
  2266. {
  2267. buffer = recycler->AllocWithInfoBits<infoBits>(byteSize);
  2268. }
  2269. // All of our allocation should throw on out of memory
  2270. Assume(buffer != nullptr);
  2271. return buffer;
  2272. }
  2273. #if DBG && defined(RECYCLER_VERIFY_MARK)
  2274. extern bool IsLikelyRuntimeFalseReference(
  2275. char* objectStartAddress, size_t offset, const char* typeName);
  2276. #define DECLARE_RECYCLER_VERIFY_MARK_FRIEND() \
  2277. private: \
  2278. friend bool ::IsLikelyRuntimeFalseReference( \
  2279. char* objectStartAddress, size_t offset, const char* typeName);
  2280. #define IMPLEMENT_STUB_IsLikelyRuntimeFalseReference() \
  2281. bool IsLikelyRuntimeFalseReference( \
  2282. char* objectStartAddress, size_t offset, const char* typeName) \
  2283. { return false; }
  2284. #else
  2285. #define DECLARE_RECYCLER_VERIFY_MARK_FRIEND()
  2286. #define IMPLEMENT_STUB_IsLikelyRuntimeFalseReference()
  2287. #endif
  2288. template <typename ExternalAllocFunc>
  2289. bool Recycler::DoExternalAllocation(size_t size, ExternalAllocFunc externalAllocFunc)
  2290. {
  2291. // Request external memory allocation
  2292. if (!RequestExternalMemoryAllocation(size))
  2293. {
  2294. // Attempt to free some memory then try again
  2295. CollectNow<CollectOnTypedArrayAllocation>();
  2296. if (!RequestExternalMemoryAllocation(size))
  2297. {
  2298. return false;
  2299. }
  2300. }
  2301. struct AutoExternalAllocation
  2302. {
  2303. bool allocationSucceeded = false;
  2304. Recycler* recycler;
  2305. size_t size;
  2306. AutoExternalAllocation(Recycler* recycler, size_t size): recycler(recycler), size(size) {}
  2307. // In case the externalAllocFunc throws or fails, the destructor will report the failure
  2308. ~AutoExternalAllocation() { if (!allocationSucceeded) recycler->ReportExternalMemoryFailure(size); }
  2309. };
  2310. AutoExternalAllocation externalAllocation(this, size);
  2311. if (externalAllocFunc())
  2312. {
  2313. externalAllocation.allocationSucceeded = true;
  2314. return true;
  2315. }
  2316. return false;
  2317. }