Recycler.h 96 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. #include "CollectionState.h"
  7. namespace Js
  8. {
  9. class Profiler;
  10. enum Phase: unsigned short;
  11. };
  12. namespace JsUtil
  13. {
  14. class ThreadService;
  15. };
  16. #ifdef STACK_BACK_TRACE
  17. class StackBackTraceNode;
  18. #endif
  19. class ScriptEngineBase;
  20. class JavascriptThreadService;
  21. #ifdef PROFILE_MEM
  22. struct RecyclerMemoryData;
  23. #endif
  24. namespace Memory
  25. {
  26. template <typename T> class RecyclerRootPtr;
  27. class AutoBooleanToggle
  28. {
  29. public:
  30. AutoBooleanToggle(bool * b, bool value = true, bool valueMayChange = false)
  31. : b(b)
  32. {
  33. Assert(!(*b));
  34. *b = value;
  35. #if DBG
  36. this->value = value;
  37. this->valueMayChange = valueMayChange;
  38. #endif
  39. }
  40. ~AutoBooleanToggle()
  41. {
  42. if (b)
  43. {
  44. Assert(valueMayChange || *b == value);
  45. *b = false;
  46. }
  47. }
  48. void Leave()
  49. {
  50. Assert(valueMayChange || *b == value);
  51. *b = false;
  52. b = nullptr;
  53. }
  54. private:
  55. bool * b;
  56. #if DBG
  57. bool value;
  58. bool valueMayChange;
  59. #endif
  60. };
  61. template <class T>
  62. class AutoRestoreValue
  63. {
  64. public:
  65. AutoRestoreValue(T* var, const T& val):
  66. variable(var)
  67. {
  68. Assert(var);
  69. oldValue = (*variable);
  70. (*variable) = val;
  71. #ifdef DEBUG
  72. debugSetValue = val;
  73. #endif
  74. }
  75. ~AutoRestoreValue()
  76. {
  77. Assert((*variable) == debugSetValue);
  78. (*variable) = oldValue;
  79. }
  80. private:
  81. #ifdef DEBUG
  82. T debugSetValue;
  83. #endif
  84. T* variable;
  85. T oldValue;
  86. };
  87. class Recycler;
  88. class RecyclerScanMemoryCallback
  89. {
  90. public:
  91. RecyclerScanMemoryCallback(Recycler* recycler) : recycler(recycler) {}
  92. void operator()(void** obj, size_t byteCount);
  93. private:
  94. Recycler* recycler;
  95. };
  96. template<ObjectInfoBits infoBits>
  97. struct InfoBitsWrapper{};
  98. // Allocation macro
  99. #define RecyclerNew(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocInlined, T, __VA_ARGS__)
  100. #define RecyclerNewPlus(recycler,size,T,...) AllocatorNewPlus(Recycler, recycler, size, T, __VA_ARGS__)
  101. #define RecyclerNewPlusZ(recycler,size,T,...) AllocatorNewPlusZ(Recycler, recycler, size, T, __VA_ARGS__)
  102. #define RecyclerNewZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocZeroInlined, T, __VA_ARGS__)
  103. #define RecyclerNewStruct(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocInlined, T)
  104. #define RecyclerNewStructZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocZeroInlined, T)
  105. #define RecyclerNewStructPlus(recycler,size,T) AllocatorNewStructPlus(Recycler, recycler, size, T)
  106. #define RecyclerNewArray(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, Alloc, T, count)
  107. #define RecyclerNewArrayZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZero, T, count)
  108. #define RecyclerNewFinalized(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedInlined, T, __VA_ARGS__)))
  109. #define RecyclerNewFinalizedPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalized, size, T, __VA_ARGS__)))
  110. #define RecyclerNewTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedInlined, T, __VA_ARGS__)))
  111. #define RecyclerNewEnumClass(recycler, enumClass, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<enumClass>()) T(__VA_ARGS__)
  112. #define RecyclerNewWithInfoBits(recycler, infoBits, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<infoBits>()) T(__VA_ARGS__)
  113. #define RecyclerNewFinalizedClientTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedClientTrackedInlined, T, __VA_ARGS__)))
  114. #if defined(RECYCLER_WRITE_BARRIER_ALLOC)
  115. #define RecyclerNewWithBarrier(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocWithBarrier, T, __VA_ARGS__)
  116. #define RecyclerNewWithBarrierPlus(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocWithBarrier, size, T, __VA_ARGS__)
  117. #define RecyclerNewWithBarrierPlusZ(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocZeroWithBarrier, size, T, __VA_ARGS__)
  118. #define RecyclerNewWithBarrierZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocZeroWithBarrier, T, __VA_ARGS__)
  119. #define RecyclerNewWithBarrierStruct(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocWithBarrier, T)
  120. #define RecyclerNewWithBarrierStructZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocZeroWithBarrier, T)
  121. #define RecyclerNewWithBarrierStructPlus(recycler,size,T) AllocatorNewStructPlusBase(Recycler, recycler, AllocWithBarrier, size, T)
  122. #define RecyclerNewWithBarrierArray(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocWithBarrier, T, count)
  123. #define RecyclerNewWithBarrierArrayZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZeroWithBarrier, T, count)
  124. #define RecyclerNewWithBarrierFinalized(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedWithBarrierInlined, T, __VA_ARGS__)))
  125. #define RecyclerNewWithBarrierFinalizedPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedWithBarrier, size, T, __VA_ARGS__)))
  126. #define RecyclerNewWithBarrierTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedWithBarrierInlined, T, __VA_ARGS__)))
  127. #define RecyclerNewWithBarrierEnumClass(recycler, enumClass, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<(ObjectInfoBits)(enumClass | WithBarrierBit)>()) T(__VA_ARGS__)
  128. #define RecyclerNewWithBarrierWithInfoBits(recycler, infoBits, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<(ObjectInfoBits)(infoBits | WithBarrierBit)>()) T(__VA_ARGS__)
  129. #define RecyclerNewWithBarrierFinalizedClientTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedClientTrackedWithBarrierInlined, T, __VA_ARGS__)))
  130. #endif
  131. #ifndef RECYCLER_WRITE_BARRIER
  132. #define RecyclerNewWithBarrier RecyclerNew
  133. #define RecyclerNewWithBarrierPlus RecyclerNewPlus
  134. #define RecyclerNewWithBarrierPlusZ RecyclerNewPlusZ
  135. #define RecyclerNewWithBarrierZ RecyclerNewZ
  136. #define RecyclerNewWithBarrierStruct RecyclerNewStruct
  137. #define RecyclerNewWithBarrierStructZ RecyclerNewStructZ
  138. #define RecyclerNewWithBarrierStructPlus RecyclerNewStructPlus
  139. #define RecyclerNewWithBarrierArray RecyclerNewArray
  140. #define RecyclerNewWithBarrierArrayZ RecyclerNewArrayZ
  141. #define RecyclerNewWithBarrierFinalized RecyclerNewFinalized
  142. #define RecyclerNewWithBarrierFinalizedPlus RecyclerNewFinalizedPlus
  143. #define RecyclerNewWithBarrierTracked RecyclerNewTracked
  144. #define RecyclerNewWithBarrierEnumClass RecyclerNewEnumClass
  145. #define RecyclerNewWithBarrierWithInfoBits RecyclerNewWithInfoBits
  146. #define RecyclerNewWithBarrierFinalizedClientTracked RecyclerNewFinalizedClientTracked
  147. #endif
  148. // Leaf allocators
  149. #define RecyclerNewLeaf(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafInlined, T, __VA_ARGS__)
  150. #define RecyclerNewLeafZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafZeroInlined, T, __VA_ARGS__)
  151. #define RecyclerNewPlusLeaf(recycler,size,T,...) AllocatorNewPlusLeaf(Recycler, recycler, size, T, __VA_ARGS__)
  152. #define RecyclerNewPlusLeafZ(recycler,size,T,...) AllocatorNewPlusLeafZ(Recycler, recycler, size, T, __VA_ARGS__)
  153. #define RecyclerNewStructLeaf(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocLeafInlined, T)
  154. #define RecyclerNewStructLeafZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocLeafZeroInlined, T)
  155. #define RecyclerNewArrayLeafZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafZero, T, count)
  156. #define RecyclerNewArrayLeaf(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeaf, T, count)
  157. #define RecyclerNewFinalizedLeaf(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedLeafInlined, T, __VA_ARGS__)))
  158. #define RecyclerNewFinalizedLeafPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedLeaf, size, T, __VA_ARGS__)))
  159. #define RecyclerNewTrackedLeaf(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedLeafInlined, T, __VA_ARGS__)))
  160. #define RecyclerNewTrackedLeafPlusZ(recycler,size,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocZeroTrackedLeafInlined, size, T, __VA_ARGS__)))
  161. #ifdef TRACE_OBJECT_LIFETIME
  162. #define RecyclerNewLeafTrace(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafTrace, T, __VA_ARGS__)
  163. #define RecyclerNewLeafZTrace(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafZeroTrace, T, __VA_ARGS__)
  164. #define RecyclerNewPlusLeafTrace(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocLeafTrace, size, T, __VA_ARGS__)
  165. #define RecyclerNewArrayLeafZTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafZeroTrace, T, count)
  166. #define RecyclerNewArrayTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocTrace, T, count)
  167. #define RecyclerNewArrayZTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZeroTrace, T, count)
  168. #define RecyclerNewArrayLeafTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafTrace, T, count)
  169. #define RecyclerNewFinalizedTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedTrace, T, __VA_ARGS__)))
  170. #define RecyclerNewFinalizedLeafTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedLeafTrace, T, __VA_ARGS__)))
  171. #define RecyclerNewFinalizedPlusTrace(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedTrace, size, T, __VA_ARGS__)))
  172. #define RecyclerNewTrackedTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedTrace, T, __VA_ARGS__)))
  173. #define RecyclerNewTrackedLeafTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedLeafTrace, T, __VA_ARGS__)))
  174. #else
  175. #define RecyclerNewLeafTrace RecyclerNewLeaf
  176. #define RecyclerNewLeafZTrace RecyclerNewLeafZ
  177. #define RecyclerNewPlusLeafTrace RecyclerNewPlusLeaf
  178. #define RecyclerNewArrayLeafZTrace RecyclerNewArrayLeafZ
  179. #define RecyclerNewArrayTrace RecyclerNewArray
  180. #define RecyclerNewArrayZTrace RecyclerNewArrayZ
  181. #define RecyclerNewArrayLeafTrace RecyclerNewArrayLeaf
  182. #define RecyclerNewFinalizedTrace RecyclerNewFinalized
  183. #define RecyclerNewFinalizedLeafTrace RecyclerNewFinalizedLeaf
  184. #define RecyclerNewFinalizedPlusTrace RecyclerNewFinalizedPlus
  185. #define RecyclerNewTrackedTrace RecyclerNewTracked
  186. #define RecyclerNewTrackedLeafTrace RecyclerNewTrackedLeaf
  187. #endif
  188. #ifdef RECYCLER_TRACE
  189. #define RecyclerVerboseTrace(flags, ...) \
  190. if (flags.Verbose && flags.Trace.IsEnabled(Js::RecyclerPhase)) \
  191. { \
  192. Output::Print(__VA_ARGS__); \
  193. }
  194. #define AllocationVerboseTrace(flags, ...) \
  195. if (flags.Verbose && flags.Trace.IsEnabled(Js::MemoryAllocationPhase)) \
  196. { \
  197. Output::Print(__VA_ARGS__); \
  198. }
  199. #define LargeAllocationVerboseTrace(flags, ...) \
  200. if (flags.Verbose && \
  201. (flags.Trace.IsEnabled(Js::MemoryAllocationPhase) || \
  202. flags.Trace.IsEnabled(Js::LargeMemoryAllocationPhase))) \
  203. { \
  204. Output::Print(__VA_ARGS__); \
  205. }
  206. #define PageAllocatorAllocationVerboseTrace(flags, ...) \
  207. if (flags.Verbose && flags.Trace.IsEnabled(Js::PageAllocatorAllocPhase)) \
  208. { \
  209. Output::Print(__VA_ARGS__); \
  210. }
  211. #else
  212. #define RecyclerVerboseTrace(...)
  213. #define AllocationVerboseTrace(...)
  214. #define LargeAllocationVerboseTrace(...)
  215. #endif
  216. #define RecyclerHeapNew(recycler,heapInfo,T,...) new (recycler, heapInfo) T(__VA_ARGS__)
  217. #define RecyclerHeapDelete(recycler,heapInfo,addr) (static_cast<Recycler *>(recycler)->HeapFree(heapInfo,addr))
  218. typedef void (__cdecl* ExternalRootMarker)(void *);
  219. enum CollectionFlags
  220. {
  221. CollectHeuristic_AllocSize = 0x00000001,
  222. CollectHeuristic_Time = 0x00000002,
  223. CollectHeuristic_TimeIfScriptActive = 0x00000004,
  224. CollectHeuristic_TimeIfInScript = 0x00000008,
  225. CollectHeuristic_Never = 0x00000080,
  226. CollectHeuristic_Mask = 0x000000FF,
  227. CollectOverride_FinishConcurrent = 0x00001000,
  228. CollectOverride_ExhaustiveCandidate = 0x00002000,
  229. CollectOverride_ForceInThread = 0x00004000,
  230. CollectOverride_AllowDispose = 0x00008000,
  231. CollectOverride_AllowReentrant = 0x00010000,
  232. CollectOverride_ForceFinish = 0x00020000,
  233. CollectOverride_Explicit = 0x00040000,
  234. CollectOverride_DisableIdleFinish = 0x00080000,
  235. CollectOverride_BackgroundFinishMark= 0x00100000,
  236. CollectOverride_FinishConcurrentTimeout = 0x00200000,
  237. CollectOverride_NoExhaustiveCollect = 0x00400000,
  238. CollectOverride_SkipStack = 0x01000000,
  239. CollectOverride_CheckScriptContextClose = 0x02000000,
  240. CollectMode_Partial = 0x08000000,
  241. CollectMode_Concurrent = 0x10000000,
  242. CollectMode_Exhaustive = 0x20000000,
  243. CollectMode_DecommitNow = 0x40000000,
  244. CollectMode_CacheCleanup = 0x80000000,
  245. CollectNowForceInThread = CollectOverride_ForceInThread,
  246. CollectNowForceInThreadExternal = CollectOverride_ForceInThread | CollectOverride_AllowDispose,
  247. CollectNowForceInThreadExternalNoStack = CollectOverride_ForceInThread | CollectOverride_AllowDispose | CollectOverride_SkipStack,
  248. CollectNowDefault = CollectOverride_FinishConcurrent,
  249. CollectNowDefaultLSCleanup = CollectOverride_FinishConcurrent | CollectOverride_AllowDispose,
  250. CollectNowDecommitNowExplicit = CollectNowDefault | CollectMode_DecommitNow | CollectMode_CacheCleanup | CollectOverride_Explicit | CollectOverride_AllowDispose,
  251. CollectNowConcurrent = CollectOverride_FinishConcurrent | CollectMode_Concurrent,
  252. CollectNowExhaustive = CollectOverride_FinishConcurrent | CollectMode_Exhaustive | CollectOverride_AllowDispose,
  253. CollectNowPartial = CollectOverride_FinishConcurrent | CollectMode_Partial,
  254. CollectNowConcurrentPartial = CollectMode_Concurrent | CollectNowPartial,
  255. CollectOnAllocation = CollectHeuristic_AllocSize | CollectHeuristic_Time | CollectMode_Concurrent | CollectMode_Partial | CollectOverride_FinishConcurrent | CollectOverride_AllowReentrant | CollectOverride_FinishConcurrentTimeout,
  256. CollectOnTypedArrayAllocation = CollectHeuristic_AllocSize | CollectHeuristic_Time | CollectMode_Concurrent | CollectMode_Partial | CollectOverride_FinishConcurrent | CollectOverride_AllowReentrant | CollectOverride_FinishConcurrentTimeout | CollectOverride_AllowDispose,
  257. CollectOnScriptIdle = CollectOverride_CheckScriptContextClose | CollectOverride_FinishConcurrent | CollectMode_Concurrent | CollectMode_CacheCleanup | CollectOverride_SkipStack,
  258. CollectOnScriptExit = CollectOverride_CheckScriptContextClose | CollectHeuristic_AllocSize | CollectOverride_FinishConcurrent | CollectMode_Concurrent | CollectMode_CacheCleanup,
  259. CollectExhaustiveCandidate = CollectHeuristic_Never | CollectOverride_ExhaustiveCandidate,
  260. CollectOnScriptCloseNonPrimary = CollectNowConcurrent | CollectOverride_ExhaustiveCandidate | CollectOverride_AllowDispose,
  261. CollectOnRecoverFromOutOfMemory = CollectOverride_ForceInThread | CollectMode_DecommitNow,
  262. CollectOnSuspendCleanup = CollectNowConcurrent | CollectMode_Exhaustive | CollectMode_DecommitNow | CollectOverride_DisableIdleFinish,
  263. FinishConcurrentOnIdle = CollectMode_Concurrent | CollectOverride_DisableIdleFinish,
  264. FinishConcurrentOnIdleAtRoot = CollectMode_Concurrent | CollectOverride_DisableIdleFinish | CollectOverride_SkipStack,
  265. FinishConcurrentDefault = CollectMode_Concurrent | CollectOverride_DisableIdleFinish | CollectOverride_BackgroundFinishMark,
  266. FinishConcurrentOnExitScript = FinishConcurrentDefault,
  267. FinishConcurrentOnEnterScript = FinishConcurrentDefault,
  268. FinishConcurrentOnAllocation = FinishConcurrentDefault,
  269. FinishDispose = CollectOverride_AllowDispose,
  270. FinishDisposeTimed = CollectOverride_AllowDispose | CollectHeuristic_TimeIfScriptActive,
  271. ForceFinishCollection = CollectOverride_ForceFinish | CollectOverride_ForceInThread,
  272. #ifdef RECYCLER_STRESS
  273. CollectStress = CollectNowForceInThread,
  274. #if ENABLE_PARTIAL_GC
  275. CollectPartialStress = CollectMode_Partial,
  276. #endif
  277. #if ENABLE_CONCURRENT_GC
  278. CollectBackgroundStress = CollectNowDefault,
  279. CollectConcurrentStress = CollectNowConcurrent,
  280. #if ENABLE_PARTIAL_GC
  281. CollectConcurrentPartialStress = CollectConcurrentStress | CollectPartialStress,
  282. #endif
  283. #endif
  284. #endif
  285. #if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
  286. CollectNowFinalGC = CollectNowExhaustive | CollectOverride_ForceInThread | CollectOverride_SkipStack | CollectOverride_Explicit | CollectOverride_AllowDispose,
  287. #endif
  288. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  289. CollectNowExhaustiveSkipStack = CollectNowExhaustive | CollectOverride_SkipStack, // Used by test
  290. #endif
  291. };
  292. class RecyclerCollectionWrapper
  293. {
  294. public:
  295. RecyclerCollectionWrapper() :
  296. _isScriptContextCloseGCPending(FALSE)
  297. { }
  298. typedef BOOL (Recycler::*CollectionFunction)(CollectionFlags flags);
  299. virtual void PreCollectionCallBack(CollectionFlags flags) = 0;
  300. virtual void PreSweepCallback() = 0;
  301. virtual void PreRescanMarkCallback() = 0;
  302. virtual size_t RootMarkCallback(RecyclerScanMemoryCallback& scanMemoryCallback, BOOL * stacksScannedByRuntime) = 0;
  303. virtual void RescanMarkTimeoutCallback() = 0;
  304. virtual void EndMarkCallback() = 0;
  305. virtual void ConcurrentCallback() = 0;
  306. virtual void WaitCollectionCallBack() = 0;
  307. virtual void PostCollectionCallBack() = 0;
  308. virtual BOOL ExecuteRecyclerCollectionFunction(Recycler * recycler, CollectionFunction function, CollectionFlags flags) = 0;
  309. virtual uint GetRandomNumber() = 0;
  310. virtual bool DoSpecialMarkOnScanStack() = 0;
  311. virtual void PostSweepRedeferralCallBack() = 0;
  312. #ifdef FAULT_INJECTION
  313. virtual void DisposeScriptContextByFaultInjectionCallBack() = 0;
  314. #endif
  315. virtual void DisposeObjects(Recycler * recycler) = 0;
  316. virtual void PreDisposeObjectsCallBack() = 0;
  317. #ifdef ENABLE_PROJECTION
  318. virtual void MarkExternalWeakReferencedObjects(bool inPartialCollect) = 0;
  319. virtual void ResolveExternalWeakReferencedObjects() = 0;
  320. #endif
  321. #if DBG || defined(PROFILE_EXEC)
  322. virtual bool AsyncHostOperationStart(void *) = 0;
  323. virtual void AsyncHostOperationEnd(bool wasInAsync, void *) = 0;
  324. #endif
  325. BOOL GetIsScriptContextCloseGCPending()
  326. {
  327. return _isScriptContextCloseGCPending;
  328. }
  329. void ClearIsScriptContextCloseGCPending()
  330. {
  331. _isScriptContextCloseGCPending = FALSE;
  332. }
  333. void SetIsScriptContextCloseGCPending()
  334. {
  335. _isScriptContextCloseGCPending = TRUE;
  336. }
  337. protected:
  338. BOOL _isScriptContextCloseGCPending;
  339. };
  340. class DefaultRecyclerCollectionWrapper : public RecyclerCollectionWrapper
  341. {
  342. public:
  343. virtual void PreCollectionCallBack(CollectionFlags flags) override {}
  344. virtual void PreSweepCallback() override {}
  345. virtual void PreRescanMarkCallback() override {}
  346. virtual void RescanMarkTimeoutCallback() override {}
  347. virtual void EndMarkCallback() override {}
  348. virtual size_t RootMarkCallback(RecyclerScanMemoryCallback& scanMemoryCallback, BOOL * stacksScannedByRuntime) override { *stacksScannedByRuntime = FALSE; return 0; }
  349. virtual void ConcurrentCallback() override {}
  350. virtual void WaitCollectionCallBack() override {}
  351. virtual void PostCollectionCallBack() override {}
  352. virtual BOOL ExecuteRecyclerCollectionFunction(Recycler * recycler, CollectionFunction function, CollectionFlags flags) override;
  353. virtual uint GetRandomNumber() override { return 0; }
  354. virtual bool DoSpecialMarkOnScanStack() override { return false; }
  355. virtual void PostSweepRedeferralCallBack() override {}
  356. #ifdef FAULT_INJECTION
  357. virtual void DisposeScriptContextByFaultInjectionCallBack() override {};
  358. #endif
  359. virtual void DisposeObjects(Recycler * recycler) override;
  360. virtual void PreDisposeObjectsCallBack() override {};
  361. #ifdef ENABLE_PROJECTION
  362. virtual void MarkExternalWeakReferencedObjects(bool inPartialCollect) override {};
  363. virtual void ResolveExternalWeakReferencedObjects() override {};
  364. #endif
  365. #if DBG || defined(PROFILE_EXEC)
  366. virtual bool AsyncHostOperationStart(void *) override { return false; };
  367. virtual void AsyncHostOperationEnd(bool wasInAsync, void *) override {};
  368. #endif
  369. static DefaultRecyclerCollectionWrapper Instance;
  370. private:
  371. static bool IsCollectionDisabled(Recycler * recycler);
  372. };
  373. #ifdef RECYCLER_STATS
  374. struct RecyclerCollectionStats
  375. {
  376. size_t startCollectAllocBytes;
  377. #if ENABLE_PARTIAL_GC
  378. size_t startCollectNewPageCount;
  379. #endif
  380. size_t continueCollectAllocBytes;
  381. size_t finishCollectTryCount;
  382. // Heuristic Stats
  383. #if ENABLE_PARTIAL_GC
  384. size_t rescanRootBytes;
  385. size_t estimatedPartialReuseBytes;
  386. size_t uncollectedNewPageCountPartialCollect;
  387. size_t partialCollectSmallHeapBlockReuseMinFreeBytes;
  388. double collectEfficacy;
  389. double collectCost;
  390. #endif
  391. // Mark stats
  392. size_t tryMarkCount; // # of pointer try mark (* pointer size to get total number byte looked at)
  393. size_t tryMarkNullCount;
  394. size_t tryMarkUnalignedCount;
  395. size_t tryMarkNonRecyclerMemoryCount;
  396. size_t tryMarkInteriorCount;
  397. size_t tryMarkInteriorNullCount;
  398. size_t tryMarkInteriorNonRecyclerMemoryCount;
  399. size_t rootCount;
  400. size_t stackCount;
  401. size_t remarkCount;
  402. size_t scanCount; // non-leaf objects marked.
  403. size_t trackCount;
  404. size_t finalizeCount;
  405. size_t markThruNewObjCount;
  406. size_t markThruFalseNewObjCount;
  407. struct MarkData
  408. {
  409. // Rescan stats
  410. size_t rescanPageCount;
  411. size_t rescanObjectCount;
  412. size_t rescanObjectByteCount;
  413. size_t rescanLargePageCount;
  414. size_t rescanLargeObjectCount;
  415. size_t rescanLargeByteCount;
  416. size_t markCount; // total number of object marked
  417. size_t markBytes; // size of all objects marked.
  418. } markData;
  419. #if ENABLE_CONCURRENT_GC
  420. MarkData backgroundMarkData[RecyclerHeuristic::MaxBackgroundRepeatMarkCount];
  421. size_t trackedObjectCount;
  422. #endif
  423. #if ENABLE_PARTIAL_GC
  424. size_t clientTrackedObjectCount;
  425. #endif
  426. // Sweep stats
  427. size_t heapBlockCount[HeapBlock::BlockTypeCount]; // number of heap blocks (processed during swept)
  428. size_t heapBlockFreeCount[HeapBlock::BlockTypeCount]; // number of heap blocks deleted
  429. size_t heapBlockConcurrentSweptCount[HeapBlock::SmallBlockTypeCount];
  430. size_t heapBlockSweptCount[HeapBlock::SmallBlockTypeCount]; // number of heap blocks swept
  431. size_t objectSweptCount; // objects freed (free list + whole page freed)
  432. size_t objectSweptBytes;
  433. size_t objectSweptFreeListCount; // objects freed (free list)
  434. size_t objectSweptFreeListBytes;
  435. size_t objectSweepScanCount; // number of objects walked for sweeping (exclude whole page freed)
  436. size_t finalizeSweepCount; // number of objects finalizer/dispose called
  437. #if ENABLE_PARTIAL_GC
  438. size_t smallNonLeafHeapBlockPartialReuseCount[HeapBlock::SmallBlockTypeCount];
  439. size_t smallNonLeafHeapBlockPartialReuseBytes[HeapBlock::SmallBlockTypeCount];
  440. size_t smallNonLeafHeapBlockPartialUnusedCount[HeapBlock::SmallBlockTypeCount];
  441. size_t smallNonLeafHeapBlockPartialUnusedBytes[HeapBlock::SmallBlockTypeCount];
  442. #endif
  443. // Memory Stats
  444. size_t heapBlockFreeByteCount[HeapBlock::BlockTypeCount]; // The remaining usable free byte count
  445. size_t largeHeapBlockUsedByteCount; // Used byte count
  446. size_t largeHeapBlockTotalByteCount; // Total byte count
  447. // Empty/zero heap block stats
  448. uint numEmptySmallBlocks[HeapBlock::SmallBlockTypeCount];
  449. uint numZeroedOutSmallBlocks;
  450. };
  451. #define RECYCLER_STATS_INC_IF(cond, r, f) if (cond) { RECYCLER_STATS_INC(r, f); }
  452. #define RECYCLER_STATS_INC(r, f) ++r->collectionStats.f
  453. #define RECYCLER_STATS_INTERLOCKED_INC(r, f) { InterlockedIncrement((LONG *)&r->collectionStats.f); }
  454. #define RECYCLER_STATS_DEC(r, f) --r->collectionStats.f
  455. #define RECYCLER_STATS_ADD(r, f, v) r->collectionStats.f += (v)
  456. #define RECYCLER_STATS_INTERLOCKED_ADD(r, f, v) { InterlockedAdd((LONG *)&r->collectionStats.f, (LONG)(v)); }
  457. #define RECYCLER_STATS_SUB(r, f, v) r->collectionStats.f -= (v)
  458. #define RECYCLER_STATS_SET(r, f, v) r->collectionStats.f = v
  459. #else
  460. #define RECYCLER_STATS_INC_IF(cond, r, f)
  461. #define RECYCLER_STATS_INC(r, f)
  462. #define RECYCLER_STATS_INTERLOCKED_INC(r, f)
  463. #define RECYCLER_STATS_DEC(r, f)
  464. #define RECYCLER_STATS_ADD(r, f, v)
  465. #define RECYCLER_STATS_INTERLOCKED_ADD(r, f, v)
  466. #define RECYCLER_STATS_SUB(r, f, v)
  467. #define RECYCLER_STATS_SET(r, f, v)
  468. #endif
  469. #ifdef RECYCLER_TRACE
  470. struct CollectionParam
  471. {
  472. CollectionFlags flags;
  473. bool finishOnly;
  474. bool repeat;
  475. bool priorityBoostConcurrentSweepOverride;
  476. bool domCollect;
  477. int timeDiff;
  478. size_t uncollectedAllocBytes;
  479. size_t uncollectedPinnedObjects;
  480. #if ENABLE_PARTIAL_GC
  481. size_t uncollectedNewPageCountPartialCollect;
  482. size_t uncollectedNewPageCount;
  483. size_t unusedPartialCollectFreeBytes;
  484. bool inPartialCollectMode;
  485. #endif
  486. };
  487. #endif
  488. #include "RecyclerObjectGraphDumper.h"
  489. #if ENABLE_CONCURRENT_GC
  490. class RecyclerParallelThread
  491. {
  492. public:
  493. typedef void (Recycler::* WorkFunc)();
  494. RecyclerParallelThread(Recycler * recycler, WorkFunc workFunc) :
  495. recycler(recycler),
  496. workFunc(workFunc),
  497. concurrentWorkReadyEvent(NULL),
  498. concurrentWorkDoneEvent(NULL),
  499. concurrentThread(NULL)
  500. {
  501. }
  502. ~RecyclerParallelThread()
  503. {
  504. Assert(concurrentThread == NULL);
  505. Assert(concurrentWorkReadyEvent == NULL);
  506. Assert(concurrentWorkDoneEvent == NULL);
  507. }
  508. bool StartConcurrent();
  509. void WaitForConcurrent();
  510. void Shutdown();
  511. bool EnableConcurrent(bool synchronizeOnStartup);
  512. private:
  513. // Static entry point for thread creation
  514. static unsigned int CALLBACK StaticThreadProc(LPVOID lpParameter);
  515. // Static entry point for thread service usage
  516. static void CALLBACK StaticBackgroundWorkCallback(void * callbackData);
  517. private:
  518. WorkFunc workFunc;
  519. Recycler * recycler;
  520. HANDLE concurrentWorkReadyEvent;// main thread uses this event to tell concurrent threads that the work is ready
  521. HANDLE concurrentWorkDoneEvent;// concurrent threads use this event to tell main thread that the work allocated is done
  522. HANDLE concurrentThread;
  523. bool synchronizeOnStartup;
  524. };
  525. #endif
  526. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  527. class AutoProtectPages
  528. {
  529. public:
  530. AutoProtectPages(Recycler* recycler, bool protectEnabled);
  531. ~AutoProtectPages();
  532. void Unprotect();
  533. private:
  534. Recycler* recycler;
  535. bool isReadOnly;
  536. };
  537. #endif
  538. class Recycler
  539. {
  540. friend class RecyclerScanMemoryCallback;
  541. friend class RecyclerSweep;
  542. friend class MarkContext;
  543. friend class HeapBlock;
  544. friend class HeapBlockMap32;
  545. #if ENABLE_CONCURRENT_GC
  546. friend class RecyclerParallelThread;
  547. #endif
  548. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  549. friend class AutoProtectPages;
  550. #endif
  551. template <typename T> friend class RecyclerWeakReference;
  552. template <typename T> friend class WeakReferenceHashTable;
  553. template <typename TBlockType>
  554. friend class SmallHeapBlockAllocator; // Needed for FindHeapBlock
  555. #if defined(RECYCLER_TRACE)
  556. friend class JavascriptThreadService;
  557. #endif
  558. #ifdef HEAP_ENUMERATION_VALIDATION
  559. friend class ActiveScriptProfilerHeapEnum;
  560. #endif
  561. friend class ScriptEngineBase; // This is for disabling GC for certain Host operations.
  562. friend class ::CodeGenNumberThreadAllocator;
  563. friend struct ::XProcNumberPageSegmentManager;
  564. public:
  565. static const uint ConcurrentThreadStackSize = 300000;
  566. static const bool FakeZeroLengthArray = true;
  567. #ifdef RECYCLER_PAGE_HEAP
  568. // Keeping as constant in case we want to tweak the value here
  569. // Set to 0 so that the tool can do the filtering instead of the runtime
  570. #if DBG
  571. static const int s_numFramesToSkipForPageHeapAlloc = 10;
  572. static const int s_numFramesToSkipForPageHeapFree = 0;
  573. static const int s_numFramesToCaptureForPageHeap = 32;
  574. #else
  575. static const int s_numFramesToSkipForPageHeapAlloc = 0;
  576. static const int s_numFramesToSkipForPageHeapFree = 0;
  577. static const int s_numFramesToCaptureForPageHeap = 32;
  578. #endif
  579. #endif
  580. uint Cookie;
  581. class AutoEnterExternalStackSkippingGCMode
  582. {
  583. public:
  584. AutoEnterExternalStackSkippingGCMode(Recycler* recycler):
  585. _recycler(recycler)
  586. {
  587. // Setting this in a re-entrant mode is not allowed
  588. Assert(!recycler->isExternalStackSkippingGC);
  589. #if DBG
  590. _recycler->isExternalStackSkippingGC = true;
  591. #endif
  592. }
  593. ~AutoEnterExternalStackSkippingGCMode()
  594. {
  595. #if DBG
  596. _recycler->isExternalStackSkippingGC = false;
  597. #endif
  598. }
  599. private:
  600. Recycler* _recycler;
  601. };
  602. private:
  603. IdleDecommitPageAllocator * threadPageAllocator;
  604. #ifdef RECYCLER_WRITE_BARRIER_ALLOC_SEPARATE_PAGE
  605. RecyclerPageAllocator recyclerWithBarrierPageAllocator;
  606. #endif
  607. RecyclerPageAllocator recyclerPageAllocator;
  608. RecyclerPageAllocator recyclerLargeBlockPageAllocator;
  609. public:
  610. template<typename Action>
  611. void ForEachPageAllocator(Action action)
  612. {
  613. action(&this->recyclerPageAllocator);
  614. action(&this->recyclerLargeBlockPageAllocator);
  615. #ifdef RECYCLER_WRITE_BARRIER_ALLOC_SEPARATE_PAGE
  616. action(&this->recyclerWithBarrierPageAllocator);
  617. #endif
  618. action(threadPageAllocator);
  619. }
  620. private:
  621. class AutoSwitchCollectionStates
  622. {
  623. public:
  624. AutoSwitchCollectionStates(Recycler* recycler, CollectionState entryState, CollectionState exitState):
  625. _recycler(recycler),
  626. _exitState(exitState)
  627. {
  628. _recycler->collectionState = entryState;
  629. }
  630. ~AutoSwitchCollectionStates()
  631. {
  632. _recycler->collectionState = _exitState;
  633. }
  634. private:
  635. Recycler* _recycler;
  636. CollectionState _exitState;
  637. };
  638. CollectionState collectionState;
  639. JsUtil::ThreadService *threadService;
  640. HeapBlockMap heapBlockMap;
  641. #if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
  642. struct PinRecord
  643. {
  644. #ifdef STACK_BACK_TRACE
  645. PinRecord() : refCount(0), stackBackTraces(nullptr) {}
  646. #else
  647. PinRecord() : refCount(0) {}
  648. #endif
  649. PinRecord& operator=(uint newRefCount)
  650. {
  651. #ifdef STACK_BACK_TRACE
  652. Assert(stackBackTraces == nullptr);
  653. #endif
  654. Assert(newRefCount == 0); refCount = 0; return *this;
  655. }
  656. PinRecord& operator++() { ++refCount; return *this; }
  657. PinRecord& operator--() { --refCount; return *this; }
  658. operator uint() const { return refCount; }
  659. #ifdef STACK_BACK_TRACE
  660. StackBackTraceNode * stackBackTraces;
  661. #endif
  662. private:
  663. uint refCount;
  664. };
  665. #else
  666. typedef uint PinRecord;
  667. #endif
  668. typedef SimpleHashTable<void *, PinRecord, HeapAllocator, DefaultComparer, true, PrimePolicy> PinnedObjectHashTable;
  669. PinnedObjectHashTable pinnedObjectMap;
  670. WeakReferenceHashTable<PrimePolicy> weakReferenceMap;
  671. uint weakReferenceCleanupId;
  672. void * transientPinnedObject;
  673. #if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
  674. #ifdef STACK_BACK_TRACE
  675. StackBackTrace * transientPinnedObjectStackBackTrace;
  676. #endif
  677. #endif
  678. struct GuestArenaAllocator : public ArenaAllocator
  679. {
  680. GuestArenaAllocator(__in_z char16 const* name, PageAllocator * pageAllocator, void (*outOfMemoryFunc)())
  681. : ArenaAllocator(name, pageAllocator, outOfMemoryFunc), pendingDelete(false)
  682. {
  683. }
  684. bool pendingDelete;
  685. };
  686. DListBase<GuestArenaAllocator> guestArenaList;
  687. DListBase<ArenaData*> externalGuestArenaList; // guest arenas are scanned for roots
  688. #ifdef RECYCLER_PAGE_HEAP
  689. bool isPageHeapEnabled;
  690. bool capturePageHeapAllocStack;
  691. bool capturePageHeapFreeStack;
  692. inline bool IsPageHeapEnabled() const { return isPageHeapEnabled; }
  693. template<ObjectInfoBits attributes>
  694. bool IsPageHeapEnabled(size_t size);
  695. inline bool ShouldCapturePageHeapAllocStack() const { return capturePageHeapAllocStack; }
  696. void VerifyPageHeapFillAfterAlloc(char* memBlock, size_t size, ObjectInfoBits attributes);
  697. #else
  698. inline const bool IsPageHeapEnabled() const { return false; }
  699. inline bool ShouldCapturePageHeapAllocStack() const { return false; }
  700. #endif
  701. #ifdef RECYCLER_MARK_TRACK
  702. MarkMap* markMap;
  703. CriticalSection markMapCriticalSection;
  704. void PrintMarkMap();
  705. void ClearMarkMap();
  706. #endif
  707. // Number of pages to reserve for the primary mark stack
  708. // This is the minimum number of pages to guarantee that a single heap block
  709. // can be rescanned in the worst possible case where every object in a heap block
  710. // in the smallest bucket needs to be rescanned
  711. // These many pages being reserved guarantees that in OOM Rescan, we can make progress
  712. // on every rescan iteration
  713. // We add one because there is a small amount of the page reserved for page pool metadata
  714. // so we need to allocate an additional page to be sure
  715. // Currently, this works out to 2 pages on 32-bit and 5 pages on 64-bit
  716. static const int PrimaryMarkStackReservedPageCount =
  717. ((SmallAllocationBlockAttributes::PageCount * MarkContext::MarkCandidateSize) / SmallAllocationBlockAttributes::MinObjectSize) + 1;
  718. MarkContext markContext;
  719. // Contexts for parallel marking.
  720. // We support up to 4 way parallelism, main context + 3 additional parallel contexts.
  721. MarkContext parallelMarkContext1;
  722. MarkContext parallelMarkContext2;
  723. MarkContext parallelMarkContext3;
  724. // Page pools for above markContexts
  725. PagePool markPagePool;
  726. PagePool parallelMarkPagePool1;
  727. PagePool parallelMarkPagePool2;
  728. PagePool parallelMarkPagePool3;
  729. bool IsMarkStackEmpty();
  730. bool HasPendingMarkObjects() const { return markContext.HasPendingMarkObjects() || parallelMarkContext1.HasPendingMarkObjects() || parallelMarkContext2.HasPendingMarkObjects() || parallelMarkContext3.HasPendingMarkObjects(); }
  731. bool HasPendingTrackObjects() const { return markContext.HasPendingTrackObjects() || parallelMarkContext1.HasPendingTrackObjects() || parallelMarkContext2.HasPendingTrackObjects() || parallelMarkContext3.HasPendingTrackObjects(); }
  732. RecyclerCollectionWrapper * collectionWrapper;
  733. HANDLE mainThreadHandle;
  734. void * stackBase;
  735. class SavedRegisterState
  736. {
  737. public:
  738. #if _M_IX86
  739. static const int NumRegistersToSave = 8;
  740. #elif _M_ARM
  741. static const int NumRegistersToSave = 13;
  742. #elif _M_ARM64
  743. static const int NumRegistersToSave = 13;
  744. #elif _M_AMD64
  745. static const int NumRegistersToSave = 16;
  746. #endif
  747. SavedRegisterState()
  748. {
  749. memset(registers, 0, sizeof(void*) * NumRegistersToSave);
  750. }
  751. void** GetRegisters()
  752. {
  753. return registers;
  754. }
  755. void* GetStackTop()
  756. {
  757. // By convention, our register-saving routine will always
  758. // save the stack pointer as the first item in the array
  759. return registers[0];
  760. }
  761. private:
  762. void* registers[NumRegistersToSave];
  763. };
  764. SavedRegisterState savedThreadContext;
  765. bool inDispose;
  766. #if DBG
  767. uint collectionCount;
  768. #endif
  769. #if DBG || defined RECYCLER_TRACE
  770. bool inResolveExternalWeakReferences;
  771. #endif
  772. bool allowDispose;
  773. bool inDisposeWrapper;
  774. bool needOOMRescan;
  775. bool hasDisposableObject;
  776. DWORD tickCountNextDispose;
  777. bool hasPendingTransferDisposedObjects;
  778. bool inExhaustiveCollection;
  779. bool hasExhaustiveCandidate;
  780. bool inCacheCleanupCollection;
  781. bool inDecommitNowCollection;
  782. bool isScriptActive;
  783. bool isInScript;
  784. bool isShuttingDown;
  785. bool scanPinnedObjectMap;
  786. bool hasScannedInitialImplicitRoots;
  787. bool hasPendingUnpinnedObject;
  788. bool hasPendingDeleteGuestArena;
  789. bool inEndMarkOnLowMemory;
  790. bool decommitOnFinish;
  791. bool enableScanInteriorPointers;
  792. bool enableScanImplicitRoots;
  793. bool disableCollectOnAllocationHeuristics;
  794. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  795. bool disableCollection;
  796. #endif
  797. #if ENABLE_PARTIAL_GC
  798. bool enablePartialCollect;
  799. bool inPartialCollectMode;
  800. #if ENABLE_CONCURRENT_GC
  801. bool hasBackgroundFinishPartial;
  802. bool partialConcurrentNextCollection;
  803. #endif
  804. #endif
  805. #ifdef RECYCLER_STRESS
  806. bool forcePartialScanStack;
  807. bool recyclerStress;
  808. #if ENABLE_CONCURRENT_GC
  809. bool recyclerBackgroundStress;
  810. bool recyclerConcurrentStress;
  811. bool recyclerConcurrentRepeatStress;
  812. #endif
  813. #if ENABLE_PARTIAL_GC
  814. bool recyclerPartialStress;
  815. #endif
  816. #endif
  817. #if DBG
  818. bool isExternalStackSkippingGC;
  819. #endif
  820. bool skipStack;
  821. #if ENABLE_CONCURRENT_GC
  822. #if DBG
  823. bool isConcurrentGCOnIdle;
  824. bool isFinishGCOnIdle;
  825. #endif
  826. bool queueTrackedObject;
  827. bool hasPendingConcurrentFindRoot;
  828. bool priorityBoost;
  829. bool disableConcurrent;
  830. bool enableConcurrentMark;
  831. bool enableParallelMark;
  832. bool enableConcurrentSweep;
  833. uint maxParallelism; // Max # of total threads to run in parallel
  834. byte backgroundRescanCount; // for ETW events and stats
  835. byte backgroundFinishMarkCount;
  836. size_t backgroundRescanRootBytes;
  837. HANDLE concurrentWorkReadyEvent; // main thread uses this event to tell concurrent threads that the work is ready
  838. HANDLE concurrentWorkDoneEvent; // concurrent threads use this event to tell main thread that the work allocated is done
  839. HANDLE concurrentThread;
  840. template <uint parallelId>
  841. void ParallelWorkFunc();
  842. RecyclerParallelThread parallelThread1;
  843. RecyclerParallelThread parallelThread2;
  844. #if DBG
  845. // Variable indicating if the concurrent thread has exited or not
  846. // If the concurrent thread hasn't started yet, this is set to true
  847. // Once the concurrent thread starts, it sets this to false,
  848. // and when the concurrent thread exits, it sets this to true.
  849. bool concurrentThreadExited;
  850. bool disableConcurrentThreadExitedCheck;
  851. bool isProcessingTrackedObjects;
  852. #endif
  853. uint tickCountStartConcurrent;
  854. bool isAborting;
  855. #endif
  856. #if DBG
  857. bool hasIncompleteDoCollect;
  858. // This is set to true when we begin a Rescan, and set to false when either:
  859. // (1) We finish the final in-thread Rescan and are about to Mark
  860. // (2) We do a conditional ResetWriteWatch and are about to Mark
  861. // When this flag is true, we should not be modifying existing mark-related state,
  862. // including markBits and rescanState.
  863. bool isProcessingRescan;
  864. #endif
  865. Js::ConfigFlagsTable& recyclerFlagsTable;
  866. RecyclerSweep recyclerSweepInstance;
  867. RecyclerSweep * recyclerSweep;
  868. static const uint tickDiffToNextCollect = 300;
  869. #ifdef IDLE_DECOMMIT_ENABLED
  870. HANDLE concurrentIdleDecommitEvent;
  871. LONG needIdleDecommitSignal;
  872. #endif
  873. #if ENABLE_PARTIAL_GC
  874. SListBase<void *> clientTrackedObjectList;
  875. ArenaAllocator clientTrackedObjectAllocator;
  876. size_t partialUncollectedAllocBytes;
  877. // Dynamic Heuristics for partial GC
  878. size_t uncollectedNewPageCountPartialCollect;
  879. #endif
  880. uint tickCountNextCollection;
  881. uint tickCountNextFinishCollection;
  882. void (*outOfMemoryFunc)();
  883. #ifdef RECYCLER_TEST_SUPPORT
  884. BOOL (*checkFn)(char* addr, size_t size);
  885. #endif
  886. ExternalRootMarker externalRootMarker;
  887. void * externalRootMarkerContext;
  888. #ifdef PROFILE_EXEC
  889. Js::Profiler * profiler;
  890. Js::Profiler * backgroundProfiler;
  891. PageAllocator backgroundProfilerPageAllocator;
  892. DListBase<ArenaAllocator> backgroundProfilerArena;
  893. #endif
  894. // destruct autoHeap after backgroundProfilerPageAllocator;
  895. HeapInfo autoHeap;
  896. #ifdef PROFILE_MEM
  897. RecyclerMemoryData * memoryData;
  898. #endif
  899. ThreadContextId mainThreadId;
  900. #if DBG
  901. uint heapBlockCount;
  902. bool disableThreadAccessCheck;
  903. #endif
  904. #if DBG || defined(RECYCLER_STATS)
  905. bool isForceSweeping;
  906. #endif
  907. #ifdef NTBUILD
  908. RecyclerWatsonTelemetryBlock localTelemetryBlock;
  909. RecyclerWatsonTelemetryBlock * telemetryBlock;
  910. #endif
  911. #ifdef RECYCLER_STATS
  912. RecyclerCollectionStats collectionStats;
  913. void PrintHeapBlockStats(char16 const * name, HeapBlock::HeapBlockType type);
  914. void PrintHeapBlockMemoryStats(char16 const * name, HeapBlock::HeapBlockType type);
  915. void PrintCollectStats();
  916. void PrintHeuristicCollectionStats();
  917. void PrintMarkCollectionStats();
  918. void PrintBackgroundCollectionStats();
  919. void PrintMemoryStats();
  920. void PrintBackgroundCollectionStat(RecyclerCollectionStats::MarkData const& markData);
  921. #endif
  922. #ifdef RECYCLER_TRACE
  923. CollectionParam collectionParam;
  924. #endif
  925. #ifdef RECYCLER_MEMORY_VERIFY
  926. uint verifyPad;
  927. bool verifyEnabled;
  928. #endif
  929. #ifdef RECYCLER_DUMP_OBJECT_GRAPH
  930. friend class RecyclerObjectGraphDumper;
  931. RecyclerObjectGraphDumper * objectGraphDumper;
  932. public:
  933. bool dumpObjectOnceOnCollect;
  934. #endif
  935. public:
  936. Recycler(AllocationPolicyManager * policyManager, IdleDecommitPageAllocator * pageAllocator, void(*outOfMemoryFunc)(), Js::ConfigFlagsTable& flags);
  937. ~Recycler();
  938. void Initialize(const bool forceInThread, JsUtil::ThreadService *threadService, const bool deferThreadStartup = false
  939. #ifdef RECYCLER_PAGE_HEAP
  940. , PageHeapMode pageheapmode = PageHeapMode::PageHeapModeOff
  941. , bool captureAllocCallStack = false
  942. , bool captureFreeCallStack = false
  943. #endif
  944. );
  945. Js::ConfigFlagsTable& GetRecyclerFlagsTable() const { return this->recyclerFlagsTable; }
  946. void SetMemProtectMode();
  947. bool IsMemProtectMode();
  948. size_t GetUsedBytes();
  949. void LogMemProtectHeapSize(bool fromGC);
  950. char* Realloc(void* buffer, DECLSPEC_GUARD_OVERFLOW size_t existingBytes, DECLSPEC_GUARD_OVERFLOW size_t requestedBytes, bool truncate = true);
  951. #ifdef NTBUILD
  952. void SetTelemetryBlock(RecyclerWatsonTelemetryBlock * telemetryBlock) { this->telemetryBlock = telemetryBlock; }
  953. #endif
  954. void Prime();
  955. void* GetOwnerContext() { return (void*) this->collectionWrapper; }
  956. PageAllocator * GetPageAllocator() { return threadPageAllocator; }
  957. bool NeedOOMRescan() const;
  958. void SetNeedOOMRescan();
  959. void ClearNeedOOMRescan();
  960. BOOL RequestConcurrentWrapperCallback();
  961. BOOL CollectionInProgress() const;
  962. BOOL IsExiting() const;
  963. BOOL IsSweeping() const;
  964. #ifdef RECYCLER_PAGE_HEAP
  965. inline bool ShouldCapturePageHeapFreeStack() const { return capturePageHeapFreeStack; }
  966. #else
  967. inline bool ShouldCapturePageHeapFreeStack() const { return false; }
  968. #endif
  969. void SetIsThreadBound();
  970. void SetIsScriptActive(bool isScriptActive);
  971. void SetIsInScript(bool isInScript);
  972. bool ShouldIdleCollectOnExit();
  973. void ScheduleNextCollection();
  974. IdleDecommitPageAllocator * GetRecyclerLeafPageAllocator();
  975. IdleDecommitPageAllocator * GetRecyclerPageAllocator();
  976. IdleDecommitPageAllocator * GetRecyclerLargeBlockPageAllocator();
  977. #ifdef RECYCLER_WRITE_BARRIER_ALLOC_SEPARATE_PAGE
  978. IdleDecommitPageAllocator * GetRecyclerWithBarrierPageAllocator();
  979. #endif
  980. BOOL IsShuttingDown() const { return this->isShuttingDown; }
  981. #if ENABLE_CONCURRENT_GC
  982. #if DBG
  983. BOOL IsConcurrentMarkEnabled() const { return enableConcurrentMark; }
  984. BOOL IsConcurrentSweepEnabled() const { return enableConcurrentSweep; }
  985. #endif
  986. template <CollectionFlags flags>
  987. BOOL FinishConcurrent();
  988. void ShutdownThread();
  989. bool EnableConcurrent(JsUtil::ThreadService *threadService, bool startAllThreads);
  990. void DisableConcurrent();
  991. void StartQueueTrackedObject();
  992. bool DoQueueTrackedObject() const;
  993. void PrepareSweep();
  994. #endif
  995. template <CollectionFlags flags>
  996. void SetupPostCollectionFlags();
  997. void EnsureNotCollecting();
  998. #if ENABLE_CONCURRENT_GC
  999. bool QueueTrackedObject(FinalizableObject * trackableObject);
  1000. #endif
  1001. // FindRoots
  1002. void TryMarkNonInterior(void* candidate, void* parentReference = nullptr);
  1003. void TryMarkInterior(void *candidate, void* parentReference = nullptr);
  1004. bool InCacheCleanupCollection() { return inCacheCleanupCollection; }
  1005. void ClearCacheCleanupCollection() { Assert(inCacheCleanupCollection); inCacheCleanupCollection = false; }
  1006. // Finalizer support
  1007. void SetExternalRootMarker(ExternalRootMarker fn, void * context);
  1008. ArenaAllocator * CreateGuestArena(char16 const * name, void (*outOfMemoryFunc)());
  1009. void DeleteGuestArena(ArenaAllocator * arenaAllocator);
  1010. ArenaData ** RegisterExternalGuestArena(ArenaData* guestArena)
  1011. {
  1012. return externalGuestArenaList.PrependNode(&NoThrowHeapAllocator::Instance, guestArena);
  1013. }
  1014. void UnregisterExternalGuestArena(ArenaData* guestArena)
  1015. {
  1016. externalGuestArenaList.Remove(&NoThrowHeapAllocator::Instance, guestArena);
  1017. // Any time a root is removed during a GC, it indicates that an exhaustive
  1018. // collection is likely going to have work to do so trigger an exhaustive
  1019. // candidate GC to indicate this fact
  1020. this->CollectNow<CollectExhaustiveCandidate>();
  1021. }
  1022. void UnregisterExternalGuestArena(ArenaData** guestArena)
  1023. {
  1024. externalGuestArenaList.RemoveElement(&NoThrowHeapAllocator::Instance, guestArena);
  1025. // Any time a root is removed during a GC, it indicates that an exhaustive
  1026. // collection is likely going to have work to do so trigger an exhaustive
  1027. // candidate GC to indicate this fact
  1028. this->CollectNow<CollectExhaustiveCandidate>();
  1029. }
  1030. #ifdef RECYCLER_TEST_SUPPORT
  1031. void SetCheckFn(BOOL(*checkFn)(char* addr, size_t size));
  1032. #endif
  1033. void SetCollectionWrapper(RecyclerCollectionWrapper * wrapper);
  1034. static size_t GetAlignedSize(size_t size) { return HeapInfo::GetAlignedSize(size); }
  1035. HeapInfo* GetAutoHeap() { return &autoHeap; }
  1036. template <CollectionFlags flags>
  1037. BOOL CollectNow();
  1038. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1039. void DisplayMemStats();
  1040. #endif
  1041. void AddExternalMemoryUsage(size_t size);
  1042. bool NeedDispose() { return this->hasDisposableObject; }
  1043. template <CollectionFlags flags>
  1044. bool FinishDisposeObjectsNow();
  1045. bool RequestExternalMemoryAllocation(size_t size);
  1046. void ReportExternalMemoryFailure(size_t size);
  1047. void ReportExternalMemoryFree(size_t size);
  1048. // ExternalAllocFunc returns true when allocation succeeds
  1049. template <typename ExternalAllocFunc>
  1050. bool DoExternalAllocation(size_t size, ExternalAllocFunc externalAllocFunc);
  1051. #ifdef TRACE_OBJECT_LIFETIME
  1052. #define DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes) \
  1053. inline char* AllocFunc##Trace(size_t size) \
  1054. { \
  1055. return AllocWithAttributesFunc<(ObjectInfoBits)(attributes | TraceBit), /* nothrow = */ false>(size); \
  1056. }
  1057. #else
  1058. #define DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributeFunc, attributes)
  1059. #endif
  1060. #define DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocWithAttributesFunc, attributes) \
  1061. inline char * AllocFunc(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1062. { \
  1063. return AllocWithAttributesFunc<attributes, /* nothrow = */ false>(size); \
  1064. } \
  1065. __forceinline char * AllocFunc##Inlined(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1066. { \
  1067. return AllocWithAttributesFunc##Inlined<attributes, /* nothrow = */ false>(size); \
  1068. } \
  1069. DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes);
  1070. #define DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocWithAttributesFunc, attributes) \
  1071. inline char * NoThrow##AllocFunc(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1072. { \
  1073. return AllocWithAttributesFunc<attributes, /* nothrow = */ true>(size); \
  1074. } \
  1075. inline char * NoThrow##AllocFunc##Inlined(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1076. { \
  1077. return AllocWithAttributesFunc##Inlined<attributes, /* nothrow = */ true>(size); \
  1078. } \
  1079. DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes);
  1080. #define DEFINE_RECYCLER_ALLOC(AllocFunc, attributes) DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocWithAttributes, attributes)
  1081. #define DEFINE_RECYCLER_ALLOC_ZERO(AllocFunc, attributes) DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocZeroWithAttributes, attributes)
  1082. #define DEFINE_RECYCLER_NOTHROW_ALLOC(AllocFunc, attributes) DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocWithAttributes, attributes)
  1083. #define DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocFunc, attributes) DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocZeroWithAttributes, attributes)
  1084. #if GLOBAL_ENABLE_WRITE_BARRIER && !defined(_WIN32)
  1085. DEFINE_RECYCLER_ALLOC(Alloc, WithBarrierBit);
  1086. DEFINE_RECYCLER_ALLOC_ZERO(AllocZero, WithBarrierBit);
  1087. DEFINE_RECYCLER_ALLOC(AllocFinalized, FinalizableWithBarrierObjectBits);
  1088. DEFINE_RECYCLER_ALLOC(AllocTracked, ClientTrackableObjectWithBarrierBits);
  1089. DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTracked, ClientTrackableObjectWithBarrierBits);
  1090. #else
  1091. DEFINE_RECYCLER_ALLOC(Alloc, NoBit);
  1092. DEFINE_RECYCLER_ALLOC_ZERO(AllocZero, NoBit);
  1093. DEFINE_RECYCLER_ALLOC(AllocFinalized, FinalizableObjectBits);
  1094. DEFINE_RECYCLER_ALLOC(AllocTracked, ClientTrackableObjectBits);
  1095. DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTracked, ClientFinalizableObjectBits);
  1096. #endif
  1097. #ifdef RECYCLER_WRITE_BARRIER_ALLOC
  1098. DEFINE_RECYCLER_ALLOC(AllocWithBarrier, WithBarrierBit);
  1099. DEFINE_RECYCLER_ALLOC_ZERO(AllocZeroWithBarrier, WithBarrierBit);
  1100. DEFINE_RECYCLER_ALLOC(AllocFinalizedWithBarrier, FinalizableWithBarrierObjectBits);
  1101. DEFINE_RECYCLER_ALLOC(AllocTrackedWithBarrier, ClientTrackableObjectWithBarrierBits);
  1102. DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTrackedWithBarrier, ClientFinalizableObjectWithBarrierBits);
  1103. #endif
  1104. DEFINE_RECYCLER_ALLOC(AllocLeaf, LeafBit);
  1105. DEFINE_RECYCLER_ALLOC(AllocFinalizedLeaf, FinalizableLeafBits);
  1106. DEFINE_RECYCLER_ALLOC(AllocTrackedLeaf, ClientTrackableLeafBits);
  1107. DEFINE_RECYCLER_ALLOC_ZERO(AllocLeafZero, LeafBit);
  1108. DEFINE_RECYCLER_ALLOC_ZERO(AllocZeroTrackedLeaf, ClientTrackableLeafBits);
  1109. DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocImplicitRootLeaf, ImplicitRootLeafBits);
  1110. DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocImplicitRoot, ImplicitRootBit);
  1111. template <ObjectInfoBits enumClass>
  1112. char * AllocEnumClass(DECLSPEC_GUARD_OVERFLOW size_t size)
  1113. {
  1114. Assert((enumClass & EnumClassMask) != 0);
  1115. //Assert((enumClass & ~EnumClassMask & ~WithBarrierBit) == 0);
  1116. return AllocWithAttributes<(ObjectInfoBits)(enumClass), /* nothrow = */ false>(size);
  1117. }
  1118. template <ObjectInfoBits infoBits>
  1119. char * AllocWithInfoBits(DECLSPEC_GUARD_OVERFLOW size_t size)
  1120. {
  1121. return AllocWithAttributes<infoBits, /* nothrow = */ false>(size);
  1122. }
  1123. template<typename T>
  1124. RecyclerWeakReference<T>* CreateWeakReferenceHandle(T* pStrongReference);
  1125. uint GetWeakReferenceCleanupId() const { return weakReferenceCleanupId; }
  1126. template<typename T>
  1127. bool FindOrCreateWeakReferenceHandle(T* pStrongReference, RecyclerWeakReference<T> **ppWeakRef);
  1128. template<typename T>
  1129. bool TryGetWeakReferenceHandle(T* pStrongReference, RecyclerWeakReference<T> **weakReference);
  1130. template <ObjectInfoBits attributes>
  1131. char* GetAddressOfAllocator(size_t sizeCat)
  1132. {
  1133. Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
  1134. return (char*)this->autoHeap.GetBucket<attributes>(sizeCat).GetAllocator();
  1135. }
  1136. template <ObjectInfoBits attributes>
  1137. uint32 GetEndAddressOffset(size_t sizeCat)
  1138. {
  1139. Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
  1140. return this->autoHeap.GetBucket<attributes>(sizeCat).GetAllocator()->GetEndAddressOffset();
  1141. }
  1142. template <ObjectInfoBits attributes>
  1143. uint32 GetFreeObjectListOffset(size_t sizeCat)
  1144. {
  1145. Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
  1146. return this->autoHeap.GetBucket<attributes>(sizeCat).GetAllocator()->GetFreeObjectListOffset();
  1147. }
  1148. void GetNormalHeapBlockAllocatorInfoForNativeAllocation(size_t sizeCat, void*& allocatorAddress, uint32& endAddressOffset, uint32& freeListOffset, bool allowBumpAllocation, bool isOOPJIT);
  1149. static void GetNormalHeapBlockAllocatorInfoForNativeAllocation(void* recyclerAddr, size_t sizeCat, void*& allocatorAddress, uint32& endAddressOffset, uint32& freeListOffset, bool allowBumpAllocation, bool isOOPJIT);
  1150. bool AllowNativeCodeBumpAllocation();
  1151. static void TrackNativeAllocatedMemoryBlock(Recycler * recycler, void * memBlock, size_t sizeCat);
  1152. void Free(void* buffer, size_t size)
  1153. {
  1154. Assert(false);
  1155. }
  1156. bool ExplicitFreeLeaf(void* buffer, size_t size);
  1157. bool ExplicitFreeNonLeaf(void* buffer, size_t size);
  1158. template <ObjectInfoBits attributes>
  1159. bool ExplicitFreeInternalWrapper(void* buffer, size_t allocSize);
  1160. template <ObjectInfoBits attributes, typename TBlockAttributes>
  1161. bool ExplicitFreeInternal(void* buffer, size_t size, size_t sizeCat);
  1162. size_t GetAllocSize(size_t size);
  1163. template <typename TBlockAttributes>
  1164. void SetExplicitFreeBitOnSmallBlock(HeapBlock* heapBlock, size_t sizeCat, void* buffer, ObjectInfoBits attributes);
  1165. char* HeapAllocR(HeapInfo* eHeap, DECLSPEC_GUARD_OVERFLOW size_t size)
  1166. {
  1167. return RealAlloc<LeafBit, /* nothrow = */ false>(eHeap, size);
  1168. }
  1169. void HeapFree(HeapInfo* eHeap,void* candidate);
  1170. void EnumerateObjects(ObjectInfoBits infoBits, void (*CallBackFunction)(void * address, size_t size));
  1171. void RootAddRef(void* obj, uint *count = nullptr);
  1172. void RootRelease(void* obj, uint *count = nullptr);
  1173. template <ObjectInfoBits attributes, bool nothrow>
  1174. inline char* RealAlloc(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size);
  1175. template <ObjectInfoBits attributes, bool isSmallAlloc, bool nothrow>
  1176. inline char* RealAllocFromBucket(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size);
  1177. void EnterIdleDecommit();
  1178. void LeaveIdleDecommit();
  1179. void DisposeObjects();
  1180. BOOL IsValidObject(void* candidate, size_t minimumSize = 0);
  1181. #if DBG
  1182. void SetDisableThreadAccessCheck();
  1183. void SetDisableConcurrentThreadExitedCheck();
  1184. void CheckAllocExternalMark() const;
  1185. BOOL IsFreeObject(void * candidate);
  1186. BOOL IsReentrantState() const;
  1187. #endif
  1188. #if DBG_DUMP
  1189. void PrintMarkStack();
  1190. #endif
  1191. #ifdef PROFILE_EXEC
  1192. Js::Profiler * GetProfiler() const { return this->profiler; }
  1193. ArenaAllocator * AddBackgroundProfilerArena();
  1194. void ReleaseBackgroundProfilerArena(ArenaAllocator * arena);
  1195. void SetProfiler(Js::Profiler * profiler, Js::Profiler * backgroundProfiler);
  1196. #endif
  1197. #ifdef RECYCLER_MEMORY_VERIFY
  1198. BOOL VerifyEnabled() const { return verifyEnabled; }
  1199. uint GetVerifyPad() const { return verifyPad; }
  1200. void Verify(Js::Phase phase);
  1201. static void VerifyCheck(BOOL cond, char16 const * msg, void * address, void * corruptedAddress);
  1202. static void VerifyCheckFill(void * address, size_t size);
  1203. void FillCheckPad(void * address, size_t size, size_t alignedAllocSize, bool objectAlreadyInitialized);
  1204. void FillCheckPad(void * address, size_t size, size_t alignedAllocSize)
  1205. {
  1206. FillCheckPad(address, size, alignedAllocSize, false);
  1207. }
  1208. static void FillPadNoCheck(void * address, size_t size, size_t alignedAllocSize, bool objectAlreadyInitialized);
  1209. void VerifyCheckPad(void * address, size_t size);
  1210. void VerifyCheckPadExplicitFreeList(void * address, size_t size);
  1211. static const byte VerifyMemFill = 0xCA;
  1212. #endif
  1213. #ifdef RECYCLER_ZERO_MEM_CHECK
  1214. void VerifyZeroFill(void * address, size_t size);
  1215. #endif
  1216. #ifdef RECYCLER_DUMP_OBJECT_GRAPH
  1217. bool DumpObjectGraph(RecyclerObjectGraphDumper::Param * param = nullptr);
  1218. void DumpObjectDescription(void *object);
  1219. #endif
  1220. #ifdef LEAK_REPORT
  1221. void ReportLeaks();
  1222. void ReportLeaksOnProcessDetach();
  1223. #endif
  1224. #ifdef CHECK_MEMORY_LEAK
  1225. void CheckLeaks(char16 const * header);
  1226. void CheckLeaksOnProcessDetach(char16 const * header);
  1227. #endif
  1228. #ifdef RECYCLER_TRACE
  1229. void SetDomCollect(bool isDomCollect) { collectionParam.domCollect = isDomCollect; }
  1230. void CaptureCollectionParam(CollectionFlags flags, bool repeat = false);
  1231. #endif
  1232. private:
  1233. // RecyclerRootPtr has implicit conversion to pointers, prevent it to be
  1234. // passed to RootAddRef/RootRelease directly
  1235. template <typename T>
  1236. void RootAddRef(RecyclerRootPtr<T>& ptr, uint *count = nullptr);
  1237. template <typename T>
  1238. void RootRelease(RecyclerRootPtr<T>& ptr, uint *count = nullptr);
  1239. template <CollectionFlags flags>
  1240. BOOL CollectInternal();
  1241. template <CollectionFlags flags>
  1242. BOOL Collect();
  1243. template <CollectionFlags flags>
  1244. BOOL CollectWithHeuristic();
  1245. template <CollectionFlags flags>
  1246. BOOL CollectWithExhaustiveCandidate();
  1247. template <CollectionFlags flags>
  1248. BOOL GetPartialFlag();
  1249. bool NeedExhaustiveRepeatCollect() const;
  1250. #if DBG
  1251. bool ExpectStackSkip() const;
  1252. #endif
  1253. static size_t const InvalidScanRootBytes = (size_t)-1;
  1254. // Small Allocator
  1255. template <typename SmallHeapBlockAllocatorType>
  1256. void AddSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat);
  1257. template <typename SmallHeapBlockAllocatorType>
  1258. void RemoveSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat);
  1259. template <ObjectInfoBits attributes, typename SmallHeapBlockAllocatorType>
  1260. char * SmallAllocatorAlloc(SmallHeapBlockAllocatorType * allocator, size_t sizeCat, size_t size);
  1261. // Allocation
  1262. template <ObjectInfoBits attributes, bool nothrow>
  1263. inline char * AllocWithAttributesInlined(DECLSPEC_GUARD_OVERFLOW size_t size);
  1264. template <ObjectInfoBits attributes, bool nothrow>
  1265. char * AllocWithAttributes(DECLSPEC_GUARD_OVERFLOW size_t size)
  1266. {
  1267. return AllocWithAttributesInlined<attributes, nothrow>(size);
  1268. }
  1269. template <ObjectInfoBits attributes, bool nothrow>
  1270. inline char* AllocZeroWithAttributesInlined(DECLSPEC_GUARD_OVERFLOW size_t size);
  1271. template <ObjectInfoBits attributes, bool nothrow>
  1272. char* AllocZeroWithAttributes(DECLSPEC_GUARD_OVERFLOW size_t size)
  1273. {
  1274. return AllocZeroWithAttributesInlined<attributes, nothrow>(size);
  1275. }
  1276. char* AllocWeakReferenceEntry(DECLSPEC_GUARD_OVERFLOW size_t size)
  1277. {
  1278. return AllocWithAttributes<WeakReferenceEntryBits, /* nothrow = */ false>(size);
  1279. }
  1280. bool NeedDisposeTimed()
  1281. {
  1282. DWORD ticks = ::GetTickCount();
  1283. return (ticks > tickCountNextDispose && this->hasDisposableObject);
  1284. }
  1285. char* TryLargeAlloc(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size, ObjectInfoBits attributes, bool nothrow);
  1286. template <bool nothrow>
  1287. char* LargeAlloc(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size, ObjectInfoBits attributes);
  1288. void OutOfMemory();
  1289. // Collection
  1290. BOOL DoCollect(CollectionFlags flags);
  1291. BOOL DoCollectWrapped(CollectionFlags flags);
  1292. BOOL CollectOnAllocatorThread();
  1293. #if DBG
  1294. void ResetThreadId();
  1295. #endif
  1296. template <bool background>
  1297. size_t ScanPinnedObjects();
  1298. size_t ScanStack();
  1299. size_t ScanArena(ArenaData * alloc, bool background);
  1300. void ScanImplicitRoots();
  1301. void ScanInitialImplicitRoots();
  1302. void ScanNewImplicitRoots();
  1303. size_t FindRoots();
  1304. size_t TryMarkArenaMemoryBlockList(ArenaMemoryBlock * memoryBlocks);
  1305. size_t TryMarkBigBlockList(BigBlock * memoryBlocks);
  1306. #if ENABLE_CONCURRENT_GC
  1307. #if FALSE // REVIEW: remove this code since not using
  1308. size_t TryMarkBigBlockListWithWriteWatch(BigBlock * memoryBlocks);
  1309. #endif
  1310. #endif
  1311. // Mark
  1312. void ResetMarks(ResetMarkFlags flags);
  1313. void Mark();
  1314. bool EndMark();
  1315. bool EndMarkCheckOOMRescan();
  1316. void EndMarkOnLowMemory();
  1317. #if ENABLE_CONCURRENT_GC
  1318. void DoParallelMark();
  1319. void DoBackgroundParallelMark();
  1320. #endif
  1321. size_t RootMark(CollectionState markState);
  1322. void ProcessMark(bool background);
  1323. void ProcessParallelMark(bool background, MarkContext * markContext);
  1324. template <bool parallel, bool interior>
  1325. void ProcessMarkContext(MarkContext * markContext);
  1326. public:
  1327. bool IsObjectMarked(void* candidate) { return this->heapBlockMap.IsMarked(candidate); }
  1328. #ifdef RECYCLER_STRESS
  1329. bool StressCollectNow();
  1330. #endif
  1331. private:
  1332. HeapBlock* FindHeapBlock(void * candidate);
  1333. struct FindBlockCache
  1334. {
  1335. FindBlockCache():
  1336. heapBlock(nullptr),
  1337. candidate(nullptr)
  1338. {
  1339. }
  1340. HeapBlock* heapBlock;
  1341. void* candidate;
  1342. } blockCache;
  1343. inline void ScanObjectInline(void ** obj, size_t byteCount);
  1344. inline void ScanObjectInlineInterior(void ** obj, size_t byteCount);
  1345. template <bool doSpecialMark>
  1346. inline void ScanMemoryInline(void ** obj, size_t byteCount);
  1347. template <bool doSpecialMark>
  1348. void ScanMemory(void ** obj, size_t byteCount) { if (byteCount != 0) { ScanMemoryInline<doSpecialMark>(obj, byteCount); } }
  1349. bool AddMark(void * candidate, size_t byteCount);
  1350. // Sweep
  1351. #if ENABLE_PARTIAL_GC
  1352. bool Sweep(size_t rescanRootBytes = (size_t)-1, bool concurrent = false, bool adjustPartialHeuristics = false);
  1353. #else
  1354. bool Sweep(bool concurrent = false);
  1355. #endif
  1356. void SweepWeakReference();
  1357. void SweepHeap(bool concurrent, RecyclerSweep& recyclerSweep);
  1358. void FinishSweep(RecyclerSweep& recyclerSweep);
  1359. #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1360. void FinishConcurrentSweep();
  1361. #endif
  1362. bool FinishDisposeObjects();
  1363. template <CollectionFlags flags>
  1364. bool FinishDisposeObjectsWrapped();
  1365. // end collection
  1366. void FinishCollection();
  1367. void FinishCollection(bool needConcurrentSweep);
  1368. void EndCollection();
  1369. void ResetCollectionState();
  1370. void ResetMarkCollectionState();
  1371. void ResetHeuristicCounters();
  1372. void ResetPartialHeuristicCounters();
  1373. BOOL IsMarkState() const;
  1374. BOOL IsFindRootsState() const;
  1375. BOOL IsInThreadFindRootsState() const;
  1376. template <Js::Phase phase>
  1377. void CollectionBegin();
  1378. template <Js::Phase phase>
  1379. void CollectionEnd();
  1380. #if ENABLE_PARTIAL_GC
  1381. void ProcessClientTrackedObjects();
  1382. bool PartialCollect(bool concurrent);
  1383. void FinishPartialCollect(RecyclerSweep * recyclerSweep = nullptr);
  1384. void ClearPartialCollect();
  1385. #if ENABLE_CONCURRENT_GC
  1386. void BackgroundFinishPartialCollect(RecyclerSweep * recyclerSweep);
  1387. #endif
  1388. #endif
  1389. size_t RescanMark(DWORD waitTime);
  1390. size_t FinishMark(DWORD waitTime);
  1391. size_t FinishMarkRescan(bool background);
  1392. #if ENABLE_CONCURRENT_GC
  1393. void ProcessTrackedObjects();
  1394. #endif
  1395. BOOL IsAllocatableCallbackState()
  1396. {
  1397. return (collectionState & (Collection_PostSweepRedeferralCallback | Collection_PostCollectionCallback));
  1398. }
  1399. #if ENABLE_CONCURRENT_GC
  1400. // Concurrent GC
  1401. BOOL IsConcurrentEnabled() const { return this->enableConcurrentMark || this->enableParallelMark || this->enableConcurrentSweep; }
  1402. BOOL IsConcurrentMarkState() const;
  1403. BOOL IsConcurrentMarkExecutingState() const;
  1404. BOOL IsConcurrentResetMarksState() const;
  1405. BOOL IsConcurrentFindRootState() const;
  1406. BOOL IsConcurrentExecutingState() const;
  1407. BOOL IsConcurrentSweepExecutingState() const;
  1408. BOOL IsConcurrentSweepSetupState() const;
  1409. BOOL IsConcurrentSweepState() const;
  1410. BOOL IsConcurrentState() const;
  1411. BOOL InConcurrentSweep()
  1412. {
  1413. return ((collectionState & Collection_ConcurrentSweep) == Collection_ConcurrentSweep);
  1414. }
  1415. #if DBG
  1416. BOOL IsConcurrentFinishedState() const;
  1417. #endif // DBG
  1418. bool InitializeConcurrent(JsUtil::ThreadService* threadService);
  1419. bool AbortConcurrent(bool restoreState);
  1420. void FinalizeConcurrent(bool restoreState);
  1421. static unsigned int CALLBACK StaticThreadProc(LPVOID lpParameter);
  1422. static int ExceptFilter(LPEXCEPTION_POINTERS pEP);
  1423. DWORD ThreadProc();
  1424. void DoBackgroundWork(bool forceForeground = false);
  1425. static void CALLBACK StaticBackgroundWorkCallback(void * callbackData);
  1426. BOOL CollectOnConcurrentThread();
  1427. bool StartConcurrent(CollectionState const state);
  1428. BOOL StartBackgroundMarkCollect();
  1429. BOOL StartSynchronousBackgroundMark();
  1430. BOOL StartAsynchronousBackgroundMark();
  1431. BOOL StartBackgroundMark(bool foregroundResetMark, bool foregroundFindRoots);
  1432. BOOL StartConcurrentSweepCollect();
  1433. template <CollectionFlags flags>
  1434. BOOL TryFinishConcurrentCollect();
  1435. BOOL WaitForConcurrentThread(DWORD waitTime);
  1436. void FlushBackgroundPages();
  1437. BOOL FinishConcurrentCollect(CollectionFlags flags);
  1438. void FinishTransferSwept(CollectionFlags flags);
  1439. BOOL FinishConcurrentCollectWrapped(CollectionFlags flags);
  1440. void BackgroundMark();
  1441. void BackgroundResetMarks();
  1442. void PrepareBackgroundFindRoots();
  1443. void RevertPrepareBackgroundFindRoots();
  1444. size_t BackgroundFindRoots();
  1445. size_t BackgroundScanStack();
  1446. size_t BackgroundRepeatMark();
  1447. size_t BackgroundRescan(RescanFlags rescanFlags);
  1448. void BackgroundResetWriteWatchAll();
  1449. size_t BackgroundFinishMark();
  1450. char* GetScriptThreadStackTop();
  1451. void SweepPendingObjects(RecyclerSweep& recyclerSweep);
  1452. void ConcurrentTransferSweptObjects(RecyclerSweep& recyclerSweep);
  1453. #if ENABLE_PARTIAL_GC
  1454. void ConcurrentPartialTransferSweptObjects(RecyclerSweep& recyclerSweep);
  1455. #endif // ENABLE_PARTIAL_GC
  1456. #endif // ENABLE_CONCURRENT_GC
  1457. bool ForceSweepObject();
  1458. void NotifyFree(__in char * address, size_t size);
  1459. template <typename T>
  1460. void NotifyFree(T * heapBlock);
  1461. void CleanupPendingUnroot();
  1462. #ifdef ENABLE_JS_ETW
  1463. ULONG EventWriteFreeMemoryBlock(HeapBlock* heapBlock);
  1464. void FlushFreeRecord();
  1465. void AppendFreeMemoryETWRecord(__in char *address, size_t size);
  1466. static const uint BulkFreeMemoryCount = 400;
  1467. uint bulkFreeMemoryWrittenCount;
  1468. struct ETWFreeRecord {
  1469. char* memoryAddress;
  1470. uint32 objectSize;
  1471. };
  1472. ETWFreeRecord etwFreeRecords[BulkFreeMemoryCount];
  1473. #endif
  1474. template <ObjectInfoBits attributes>
  1475. bool IntegrateBlock(char * blockAddress, PageSegment * segment, size_t allocSize, size_t objectSize);
  1476. template <class TBlockAttributes> friend class SmallHeapBlockT;
  1477. template <class TBlockAttributes> friend class SmallNormalHeapBlockT;
  1478. template <class TBlockAttributes> friend class SmallLeafHeapBlockT;
  1479. template <class TBlockAttributes> friend class SmallFinalizableHeapBlockT;
  1480. friend class LargeHeapBlock;
  1481. friend class HeapInfo;
  1482. friend class LargeHeapBucket;
  1483. template <typename TBlockType>
  1484. friend class HeapBucketT;
  1485. template <typename TBlockType>
  1486. friend class SmallNormalHeapBucketBase;
  1487. template <typename T, ObjectInfoBits attributes>
  1488. friend class RecyclerFastAllocator;
  1489. #ifdef RECYCLER_TRACE
  1490. void PrintCollectTrace(Js::Phase phase, bool finish = false, bool noConcurrentWork = false);
  1491. #endif
  1492. #ifdef RECYCLER_VERIFY_MARK
  1493. void VerifyMark();
  1494. void VerifyMarkRoots();
  1495. void VerifyMarkStack();
  1496. void VerifyMarkArena(ArenaData * arena);
  1497. void VerifyMarkBigBlockList(BigBlock * memoryBlocks);
  1498. void VerifyMarkArenaMemoryBlockList(ArenaMemoryBlock * memoryBlocks);
  1499. bool VerifyMark(void * objectAddress, void * target);
  1500. bool VerifyMark(void * target);
  1501. #endif
  1502. #if DBG_DUMP
  1503. bool forceTraceMark;
  1504. #endif
  1505. bool isHeapEnumInProgress;
  1506. #if DBG
  1507. bool allowAllocationDuringHeapEnum;
  1508. bool allowAllocationDuringRenentrance;
  1509. #ifdef ENABLE_PROJECTION
  1510. bool isInRefCountTrackingForProjection;
  1511. #endif
  1512. #endif
  1513. // There are two scenarios we allow limited allocation but disallow GC during those allocations:
  1514. // in heapenum when we allocate PropertyRecord, and
  1515. // in projection ExternalMark allowing allocating VarToDispEx. This is the common flag
  1516. // while we have debug only flag for each of the two scenarios.
  1517. bool isCollectionDisabled;
  1518. #ifdef TRACK_ALLOC
  1519. public:
  1520. Recycler * TrackAllocInfo(TrackAllocData const& data);
  1521. void ClearTrackAllocInfo(TrackAllocData* data = NULL);
  1522. #ifdef PROFILE_RECYCLER_ALLOC
  1523. void PrintAllocStats();
  1524. private:
  1525. static bool DoProfileAllocTracker();
  1526. void InitializeProfileAllocTracker();
  1527. void TrackUnallocated(__in char* address, __in char *endAddress, size_t sizeCat);
  1528. void TrackAllocCore(void * object, size_t size, const TrackAllocData& trackAllocData, bool traceLifetime = false);
  1529. void* TrackAlloc(void * object, size_t size, const TrackAllocData& trackAllocData, bool traceLifetime = false);
  1530. void TrackIntegrate(__in_ecount(blockSize) char * blockAddress, size_t blockSize, size_t allocSize, size_t objectSize, const TrackAllocData& trackAllocData);
  1531. BOOL TrackFree(const char* address, size_t size);
  1532. void TrackAllocWeakRef(RecyclerWeakReferenceBase * weakRef);
  1533. void TrackFreeWeakRef(RecyclerWeakReferenceBase * weakRef);
  1534. struct TrackerData
  1535. {
  1536. TrackerData(type_info const * typeinfo, bool isArray) : typeinfo(typeinfo), isArray(isArray),
  1537. ItemSize(0), ItemCount(0), AllocCount(0), ReqSize(0), AllocSize(0), FreeCount(0), FreeSize(0), TraceLifetime(false)
  1538. #ifdef PERF_COUNTERS
  1539. , counter(PerfCounter::RecyclerTrackerCounterSet::GetPerfCounter(typeinfo, isArray))
  1540. , sizeCounter(PerfCounter::RecyclerTrackerCounterSet::GetPerfSizeCounter(typeinfo, isArray))
  1541. #endif
  1542. {
  1543. }
  1544. type_info const * typeinfo;
  1545. bool isArray;
  1546. #ifdef TRACE_OBJECT_LIFETIME
  1547. bool TraceLifetime;
  1548. #endif
  1549. size_t ItemSize;
  1550. size_t ItemCount;
  1551. int AllocCount;
  1552. int64 ReqSize;
  1553. int64 AllocSize;
  1554. int FreeCount;
  1555. int64 FreeSize;
  1556. #ifdef PERF_COUNTERS
  1557. PerfCounter::Counter& counter;
  1558. PerfCounter::Counter& sizeCounter;
  1559. #endif
  1560. static TrackerData EmptyData;
  1561. static TrackerData ExplicitFreeListObjectData;
  1562. };
  1563. TrackerData * GetTrackerData(void * address);
  1564. void SetTrackerData(void * address, TrackerData * data);
  1565. struct TrackerItem
  1566. {
  1567. TrackerItem(type_info const * typeinfo) : instanceData(typeinfo, false), arrayData(typeinfo, true)
  1568. #ifdef PERF_COUNTERS
  1569. , weakRefCounter(PerfCounter::RecyclerTrackerCounterSet::GetWeakRefPerfCounter(typeinfo))
  1570. #endif
  1571. {}
  1572. TrackerData instanceData;
  1573. TrackerData arrayData;
  1574. #ifdef PERF_COUNTERS
  1575. PerfCounter::Counter& weakRefCounter;
  1576. #endif
  1577. };
  1578. typedef JsUtil::BaseDictionary<type_info const *, TrackerItem *, NoCheckHeapAllocator, PrimeSizePolicy, DefaultComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> TypeInfotoTrackerItemMap;
  1579. typedef JsUtil::BaseDictionary<void *, TrackerData *, NoCheckHeapAllocator, PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> PointerToTrackerDataMap;
  1580. TypeInfotoTrackerItemMap * trackerDictionary;
  1581. CriticalSection * trackerCriticalSection;
  1582. #endif
  1583. TrackAllocData nextAllocData;
  1584. #endif
  1585. public:
  1586. // Enumeration
  1587. class AutoSetupRecyclerForNonCollectingMark
  1588. {
  1589. private:
  1590. Recycler& m_recycler;
  1591. bool m_setupDone;
  1592. CollectionState m_previousCollectionState;
  1593. #ifdef RECYCLER_STATS
  1594. RecyclerCollectionStats m_previousCollectionStats;
  1595. #endif
  1596. public:
  1597. AutoSetupRecyclerForNonCollectingMark(Recycler& recycler, bool setupForHeapEnumeration = false);
  1598. ~AutoSetupRecyclerForNonCollectingMark();
  1599. void DoCommonSetup();
  1600. void SetupForHeapEnumeration();
  1601. };
  1602. friend class RecyclerHeapObjectInfo;
  1603. bool FindImplicitRootObject(void* candidate, RecyclerHeapObjectInfo& heapObject);
  1604. bool FindHeapObject(void* candidate, FindHeapObjectFlags flags, RecyclerHeapObjectInfo& heapObject);
  1605. bool FindHeapObjectWithClearedAllocators(void* candidate, RecyclerHeapObjectInfo& heapObject);
  1606. bool IsCollectionDisabled() const { return isCollectionDisabled; }
  1607. bool IsHeapEnumInProgress() const { Assert(isHeapEnumInProgress ? isCollectionDisabled : true); return isHeapEnumInProgress; }
  1608. #if DBG
  1609. // There are limited cases that we have to allow allocation during heap enumeration. GC is explicitly
  1610. // disabled during heap enumeration for these limited cases. (See DefaultRecyclerCollectionWrapper)
  1611. // The only case of allocation right now is allocating property record for string based type handler
  1612. // so we can use the propertyId as the relation Id.
  1613. // Allocation during enumeration is still frown upon and should still be avoid if possible.
  1614. bool AllowAllocationDuringHeapEnum() const { return allowAllocationDuringHeapEnum; }
  1615. class AutoAllowAllocationDuringHeapEnum : public AutoBooleanToggle
  1616. {
  1617. public:
  1618. AutoAllowAllocationDuringHeapEnum(Recycler * recycler) : AutoBooleanToggle(&recycler->allowAllocationDuringHeapEnum) {};
  1619. };
  1620. #ifdef ENABLE_PROJECTION
  1621. bool IsInRefCountTrackingForProjection() const { return isInRefCountTrackingForProjection;}
  1622. class AutoIsInRefCountTrackingForProjection : public AutoBooleanToggle
  1623. {
  1624. public:
  1625. AutoIsInRefCountTrackingForProjection(Recycler * recycler) : AutoBooleanToggle(&recycler->isInRefCountTrackingForProjection) {};
  1626. };
  1627. #endif
  1628. #endif
  1629. class AutoAllowAllocationDuringReentrance : public AutoBooleanToggle
  1630. {
  1631. public:
  1632. AutoAllowAllocationDuringReentrance(Recycler * recycler) :
  1633. AutoBooleanToggle(&recycler->isCollectionDisabled)
  1634. #if DBG
  1635. , allowAllocationDuringRenentrance(&recycler->allowAllocationDuringRenentrance)
  1636. #endif
  1637. {};
  1638. #if DBG
  1639. private:
  1640. AutoBooleanToggle allowAllocationDuringRenentrance;
  1641. #endif
  1642. };
  1643. #ifdef HEAP_ENUMERATION_VALIDATION
  1644. typedef void(*PostHeapEnumScanCallback)(const HeapObject& heapObject, void *data);
  1645. PostHeapEnumScanCallback pfPostHeapEnumScanCallback;
  1646. void *postHeapEnunScanData;
  1647. void PostHeapEnumScan(PostHeapEnumScanCallback callback, void*data);
  1648. bool IsPostEnumHeapValidationInProgress() const { return pfPostHeapEnumScanCallback != NULL; }
  1649. #endif
  1650. private:
  1651. void* GetRealAddressFromInterior(void* candidate);
  1652. void BeginNonCollectingMark();
  1653. void EndNonCollectingMark();
  1654. #if defined(RECYCLER_DUMP_OBJECT_GRAPH) || defined(LEAK_REPORT) || defined(CHECK_MEMORY_LEAK)
  1655. public:
  1656. bool IsInDllCanUnloadNow() const { return inDllCanUnloadNow; }
  1657. bool IsInDetachProcess() const { return inDetachProcess; }
  1658. void SetInDllCanUnloadNow();
  1659. void SetInDetachProcess();
  1660. private:
  1661. bool inDllCanUnloadNow;
  1662. bool inDetachProcess;
  1663. bool isPrimaryMarkContextInitialized;
  1664. #endif
  1665. #if defined(LEAK_REPORT) || defined(CHECK_MEMORY_LEAK)
  1666. template <class Fn>
  1667. void ReportOnProcessDetach(Fn fn);
  1668. void PrintPinnedObjectStackTraces();
  1669. #endif
  1670. public:
  1671. typedef void (CALLBACK *ObjectBeforeCollectCallback)(void* object, void* callbackState); // same as jsrt JsObjectBeforeCollectCallback
  1672. // same as jsrt JsObjectBeforeCollectCallbackWrapper
  1673. typedef void (CALLBACK *ObjectBeforeCollectCallbackWrapper)(ObjectBeforeCollectCallback callback, void* object, void* callbackState, void* threadContext);
  1674. void SetObjectBeforeCollectCallback(void* object,
  1675. ObjectBeforeCollectCallback callback,
  1676. void* callbackState,
  1677. ObjectBeforeCollectCallbackWrapper callbackWrapper,
  1678. void* threadContext);
  1679. void ClearObjectBeforeCollectCallbacks();
  1680. bool IsInObjectBeforeCollectCallback() const { return objectBeforeCollectCallbackState != ObjectBeforeCollectCallback_None; }
  1681. private:
  1682. struct ObjectBeforeCollectCallbackData
  1683. {
  1684. ObjectBeforeCollectCallback callback;
  1685. void* callbackState;
  1686. void* threadContext;
  1687. ObjectBeforeCollectCallbackWrapper callbackWrapper;
  1688. ObjectBeforeCollectCallbackData() {}
  1689. ObjectBeforeCollectCallbackData(ObjectBeforeCollectCallbackWrapper callbackWrapper, ObjectBeforeCollectCallback callback, void* callbackState, void* threadContext) :
  1690. callbackWrapper(callbackWrapper), callback(callback), callbackState(callbackState), threadContext(threadContext) {}
  1691. };
  1692. typedef JsUtil::BaseDictionary<void*, ObjectBeforeCollectCallbackData, HeapAllocator,
  1693. PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> ObjectBeforeCollectCallbackMap;
  1694. ObjectBeforeCollectCallbackMap* objectBeforeCollectCallbackMap;
  1695. enum ObjectBeforeCollectCallbackState
  1696. {
  1697. ObjectBeforeCollectCallback_None,
  1698. ObjectBeforeCollectCallback_Normal, // Normal GC BeforeCollect callback
  1699. ObjectBeforeCollectCallback_Shutdown, // At shutdown invoke all BeforeCollect callback
  1700. } objectBeforeCollectCallbackState;
  1701. bool ProcessObjectBeforeCollectCallbacks(bool atShutdown = false);
  1702. #if GLOBAL_ENABLE_WRITE_BARRIER
  1703. private:
  1704. typedef JsUtil::BaseDictionary<void *, size_t, HeapAllocator, PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::AsymetricResizeLock> PendingWriteBarrierBlockMap;
  1705. PendingWriteBarrierBlockMap pendingWriteBarrierBlockMap;
  1706. public:
  1707. void RegisterPendingWriteBarrierBlock(void* address, size_t bytes);
  1708. void UnRegisterPendingWriteBarrierBlock(void* address);
  1709. #endif
  1710. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  1711. private:
  1712. static Recycler* recyclerList;
  1713. Recycler* next;
  1714. public:
  1715. static void WBSetBitJIT(char* addr)
  1716. {
  1717. return WBSetBit(addr);
  1718. }
  1719. static void WBSetBit(char* addr);
  1720. static void WBSetBitRange(char* addr, uint length);
  1721. static void WBVerifyBitIsSet(char* addr, char* target);
  1722. static bool WBCheckIsRecyclerAddress(char* addr);
  1723. #endif
  1724. };
  1725. class RecyclerHeapObjectInfo
  1726. {
  1727. void* m_address;
  1728. Recycler * m_recycler;
  1729. HeapBlock* m_heapBlock;
  1730. #if LARGEHEAPBLOCK_ENCODING
  1731. union
  1732. {
  1733. byte * m_attributes;
  1734. LargeObjectHeader * m_largeHeapBlockHeader;
  1735. };
  1736. bool isUsingLargeHeapBlock = false;
  1737. #else
  1738. byte * m_attributes;
  1739. #endif
  1740. public:
  1741. RecyclerHeapObjectInfo() : m_address(NULL), m_recycler(NULL), m_heapBlock(NULL), m_attributes(NULL) {}
  1742. RecyclerHeapObjectInfo(void* address, Recycler * recycler, HeapBlock* heapBlock, byte * attributes) :
  1743. m_address(address), m_recycler(recycler), m_heapBlock(heapBlock), m_attributes(attributes) { }
  1744. void* GetObjectAddress() const { return m_address; }
  1745. #ifdef RECYCLER_PAGE_HEAP
  1746. bool IsPageHeapAlloc() const
  1747. {
  1748. return isUsingLargeHeapBlock && ((LargeHeapBlock*)m_heapBlock)->InPageHeapMode();
  1749. }
  1750. void PageHeapLockPages() const
  1751. {
  1752. Assert(IsPageHeapAlloc());
  1753. ((LargeHeapBlock*)m_heapBlock)->PageHeapLockPages();
  1754. }
  1755. #endif
  1756. bool IsLeaf() const
  1757. {
  1758. #if LARGEHEAPBLOCK_ENCODING
  1759. if (isUsingLargeHeapBlock)
  1760. {
  1761. return (m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie) & LeafBit) != 0;
  1762. }
  1763. #endif
  1764. return ((*m_attributes & LeafBit) != 0 || this->m_heapBlock->IsLeafBlock());
  1765. }
  1766. bool IsImplicitRoot() const
  1767. {
  1768. #if LARGEHEAPBLOCK_ENCODING
  1769. if (isUsingLargeHeapBlock)
  1770. {
  1771. return (m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie) & ImplicitRootBit) != 0;
  1772. }
  1773. #endif
  1774. return (*m_attributes & ImplicitRootBit) != 0;
  1775. }
  1776. bool IsObjectMarked() const { Assert(m_recycler); return m_recycler->heapBlockMap.IsMarked(m_address); }
  1777. void SetObjectMarked() { Assert(m_recycler); m_recycler->heapBlockMap.SetMark(m_address); }
  1778. ObjectInfoBits GetAttributes() const
  1779. {
  1780. #if LARGEHEAPBLOCK_ENCODING
  1781. if (isUsingLargeHeapBlock)
  1782. {
  1783. return (ObjectInfoBits)m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
  1784. }
  1785. #endif
  1786. return (ObjectInfoBits)*m_attributes;
  1787. }
  1788. size_t GetSize() const;
  1789. #if LARGEHEAPBLOCK_ENCODING
  1790. void SetLargeHeapBlockHeader(LargeObjectHeader * largeHeapBlockHeader)
  1791. {
  1792. m_largeHeapBlockHeader = largeHeapBlockHeader;
  1793. isUsingLargeHeapBlock = true;
  1794. }
  1795. #endif
  1796. bool SetMemoryProfilerHasEnumerated()
  1797. {
  1798. Assert(m_heapBlock);
  1799. #if LARGEHEAPBLOCK_ENCODING
  1800. if (isUsingLargeHeapBlock)
  1801. {
  1802. return SetMemoryProfilerHasEnumeratedForLargeHeapBlock();
  1803. }
  1804. #endif
  1805. bool wasMemoryProfilerOldObject = (*m_attributes & MemoryProfilerOldObjectBit) != 0;
  1806. *m_attributes |= MemoryProfilerOldObjectBit;
  1807. return wasMemoryProfilerOldObject;
  1808. }
  1809. bool ClearImplicitRootBit()
  1810. {
  1811. // This can only be called on the main thread for non-finalizable block
  1812. // As finalizable block requires that the bit not be change during concurrent mark
  1813. // since the background thread change the NewTrackBit
  1814. Assert(!m_heapBlock->IsAnyFinalizableBlock());
  1815. #ifdef RECYCLER_PAGE_HEAP
  1816. Recycler* recycler = this->m_recycler;
  1817. if (recycler->IsPageHeapEnabled() && recycler->ShouldCapturePageHeapFreeStack())
  1818. {
  1819. #ifdef STACK_BACK_TRACE
  1820. if (this->isUsingLargeHeapBlock)
  1821. {
  1822. LargeHeapBlock* largeHeapBlock = (LargeHeapBlock*)this->m_heapBlock;
  1823. if (largeHeapBlock->InPageHeapMode())
  1824. {
  1825. largeHeapBlock->CapturePageHeapFreeStack();
  1826. }
  1827. }
  1828. #endif
  1829. }
  1830. #endif
  1831. #if LARGEHEAPBLOCK_ENCODING
  1832. if (isUsingLargeHeapBlock)
  1833. {
  1834. return ClearImplicitRootBitsForLargeHeapBlock();
  1835. }
  1836. #endif
  1837. Assert(m_attributes);
  1838. bool wasImplicitRoot = (*m_attributes & ImplicitRootBit) != 0;
  1839. *m_attributes &= ~ImplicitRootBit;
  1840. return wasImplicitRoot;
  1841. }
  1842. void ExplicitFree()
  1843. {
  1844. if (*m_attributes == ObjectInfoBits::LeafBit)
  1845. {
  1846. m_recycler->ExplicitFreeLeaf(m_address, GetSize());
  1847. }
  1848. else
  1849. {
  1850. Assert(*m_attributes == ObjectInfoBits::NoBit);
  1851. m_recycler->ExplicitFreeNonLeaf(m_address, GetSize());
  1852. }
  1853. }
  1854. #if LARGEHEAPBLOCK_ENCODING
  1855. bool ClearImplicitRootBitsForLargeHeapBlock()
  1856. {
  1857. Assert(m_largeHeapBlockHeader);
  1858. byte attributes = m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
  1859. bool wasImplicitRoot = (attributes & ImplicitRootBit) != 0;
  1860. m_largeHeapBlockHeader->SetAttributes(m_recycler->Cookie, attributes & ~ImplicitRootBit);
  1861. return wasImplicitRoot;
  1862. }
  1863. bool SetMemoryProfilerHasEnumeratedForLargeHeapBlock()
  1864. {
  1865. Assert(m_largeHeapBlockHeader);
  1866. byte attributes = m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
  1867. bool wasMemoryProfilerOldObject = (attributes & MemoryProfilerOldObjectBit) != 0;
  1868. m_largeHeapBlockHeader->SetAttributes(m_recycler->Cookie, attributes | MemoryProfilerOldObjectBit);
  1869. return wasMemoryProfilerOldObject;
  1870. }
  1871. #endif
  1872. };
  1873. // A fake heap block to replace the original heap block where the strong ref is when it has been collected
  1874. // as the original heap block may have been freed
  1875. class CollectedRecyclerWeakRefHeapBlock : public HeapBlock
  1876. {
  1877. public:
  1878. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  1879. virtual void WBVerifyBitIsSet(char* addr) override { Assert(false); }
  1880. virtual void WBSetBit(char* addr) override { Assert(false); }
  1881. virtual void WBSetBitRange(char* addr, uint count) override { Assert(false); }
  1882. virtual void WBClearBit(char* addr) override { Assert(false); }
  1883. virtual void WBClearObject(char* addr) override { Assert(false); }
  1884. #endif
  1885. #if DBG
  1886. virtual BOOL IsFreeObject(void* objectAddress) override { Assert(false); return false; }
  1887. #endif
  1888. virtual BOOL IsValidObject(void* objectAddress) override { Assert(false); return false; }
  1889. virtual byte* GetRealAddressFromInterior(void* interiorAddress) override { Assert(false); return nullptr; }
  1890. virtual size_t GetObjectSize(void* object) const override { Assert(false); return 0; }
  1891. virtual bool FindHeapObject(void* objectAddress, Recycler * recycler, FindHeapObjectFlags flags, RecyclerHeapObjectInfo& heapObject) override { Assert(false); return false; }
  1892. virtual bool TestObjectMarkedBit(void* objectAddress) override { Assert(false); return false; }
  1893. virtual void SetObjectMarkedBit(void* objectAddress) override { Assert(false); }
  1894. #ifdef RECYCLER_VERIFY_MARK
  1895. virtual bool VerifyMark(void * objectAddress, void * target) override { Assert(false); return false; }
  1896. #endif
  1897. #ifdef RECYCLER_PERF_COUNTERS
  1898. virtual void UpdatePerfCountersOnFree() override { Assert(false); }
  1899. #endif
  1900. #ifdef PROFILE_RECYCLER_ALLOC
  1901. virtual void * GetTrackerData(void * address) override { Assert(false); return nullptr; }
  1902. virtual void SetTrackerData(void * address, void * data) override { Assert(false); }
  1903. #endif
  1904. static CollectedRecyclerWeakRefHeapBlock Instance;
  1905. private:
  1906. CollectedRecyclerWeakRefHeapBlock() : HeapBlock(BlockTypeCount)
  1907. {
  1908. #if ENABLE_CONCURRENT_GC
  1909. isPendingConcurrentSweep = false;
  1910. #endif
  1911. }
  1912. };
  1913. class AutoIdleDecommit
  1914. {
  1915. public:
  1916. AutoIdleDecommit(Recycler * recycler) : recycler(recycler) { recycler->EnterIdleDecommit(); }
  1917. ~AutoIdleDecommit() { recycler->LeaveIdleDecommit(); }
  1918. private:
  1919. Recycler * recycler;
  1920. };
  1921. template <typename SmallHeapBlockAllocatorType>
  1922. void
  1923. Recycler::AddSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat)
  1924. {
  1925. autoHeap.AddSmallAllocator(allocator, sizeCat);
  1926. }
  1927. template <typename SmallHeapBlockAllocatorType>
  1928. void
  1929. Recycler::RemoveSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat)
  1930. {
  1931. autoHeap.RemoveSmallAllocator(allocator, sizeCat);
  1932. }
  1933. template <ObjectInfoBits attributes, typename SmallHeapBlockAllocatorType>
  1934. char *
  1935. Recycler::SmallAllocatorAlloc(SmallHeapBlockAllocatorType * allocator, DECLSPEC_GUARD_OVERFLOW size_t sizeCat, size_t size)
  1936. {
  1937. return autoHeap.SmallAllocatorAlloc<attributes>(this, allocator, sizeCat, size);
  1938. }
  1939. // Dummy recycler allocator policy classes to choose the allocation function
  1940. class _RecyclerLeafPolicy;
  1941. class _RecyclerNonLeafPolicy;
  1942. #ifdef RECYCLER_WRITE_BARRIER
  1943. class _RecyclerWriteBarrierPolicy;
  1944. #endif
  1945. template <typename Policy>
  1946. class _RecyclerAllocatorFunc
  1947. {};
  1948. template <>
  1949. class _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
  1950. {
  1951. public:
  1952. typedef char * (Recycler::*AllocFuncType)(size_t);
  1953. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  1954. static AllocFuncType GetAllocFunc()
  1955. {
  1956. return &Recycler::AllocLeaf;
  1957. }
  1958. static AllocFuncType GetAllocZeroFunc()
  1959. {
  1960. return &Recycler::AllocLeafZero;
  1961. }
  1962. static FreeFuncType GetFreeFunc()
  1963. {
  1964. return &Recycler::ExplicitFreeLeaf;
  1965. }
  1966. };
  1967. template <>
  1968. class _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
  1969. {
  1970. public:
  1971. typedef char * (Recycler::*AllocFuncType)(size_t);
  1972. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  1973. static AllocFuncType GetAllocFunc()
  1974. {
  1975. return &Recycler::Alloc;
  1976. }
  1977. static AllocFuncType GetAllocZeroFunc()
  1978. {
  1979. return &Recycler::AllocZero;
  1980. }
  1981. static FreeFuncType GetFreeFunc()
  1982. {
  1983. return &Recycler::ExplicitFreeNonLeaf;
  1984. }
  1985. };
  1986. #ifdef RECYCLER_WRITE_BARRIER
  1987. template <>
  1988. class _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  1989. {
  1990. public:
  1991. typedef char * (Recycler::*AllocFuncType)(size_t);
  1992. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  1993. static AllocFuncType GetAllocFunc()
  1994. {
  1995. return &Recycler::AllocWithBarrier;
  1996. }
  1997. static AllocFuncType GetAllocZeroFunc()
  1998. {
  1999. return &Recycler::AllocZeroWithBarrier;
  2000. }
  2001. static FreeFuncType GetFreeFunc()
  2002. {
  2003. return &Recycler::ExplicitFreeNonLeaf;
  2004. }
  2005. };
  2006. #endif
  2007. // This is used by the compiler; when T is NOT a pointer i.e. a value type - it causes leaf allocation
  2008. template <typename T>
  2009. class TypeAllocatorFunc<Recycler, T> : public _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
  2010. {
  2011. };
  2012. #if GLOBAL_ENABLE_WRITE_BARRIER
  2013. template <typename T>
  2014. class TypeAllocatorFunc<Recycler, T *> : public _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2015. {
  2016. };
  2017. #else
  2018. // Partial template specialization; applies to T when it is a pointer
  2019. template <typename T>
  2020. class TypeAllocatorFunc<Recycler, T *> : public _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
  2021. {
  2022. };
  2023. #endif
  2024. // Dummy class to choose the allocation function
  2025. class RecyclerLeafAllocator
  2026. {
  2027. public:
  2028. static const bool FakeZeroLengthArray = true;
  2029. };
  2030. class RecyclerNonLeafAllocator
  2031. {
  2032. public:
  2033. static const bool FakeZeroLengthArray = true;
  2034. };
  2035. class RecyclerWriteBarrierAllocator
  2036. {
  2037. public:
  2038. static const bool FakeZeroLengthArray = true;
  2039. };
  2040. // Choose RecyclerLeafAllocator / RecyclerNonLeafAllocator based on "bool isLeaf"
  2041. template <bool isLeaf>
  2042. struct _RecyclerLeaf { typedef RecyclerLeafAllocator AllocatorType; };
  2043. template <>
  2044. struct _RecyclerLeaf<false> { typedef RecyclerNonLeafAllocator AllocatorType; };
  2045. template <bool isLeaf>
  2046. class ListTypeAllocatorFunc<Recycler, isLeaf>
  2047. {
  2048. public:
  2049. // RecyclerLeafAllocator / RecyclerNonLeafAllocator based on "bool isLeaf"
  2050. // used by write barrier type traits
  2051. typedef typename _RecyclerLeaf<isLeaf>::AllocatorType EffectiveAllocatorType;
  2052. typedef char * (Recycler::*AllocFuncType)(size_t);
  2053. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  2054. static AllocFuncType GetAllocFunc()
  2055. {
  2056. return isLeaf ? &Recycler::AllocLeaf : &Recycler::Alloc;
  2057. }
  2058. static FreeFuncType GetFreeFunc()
  2059. {
  2060. if (isLeaf)
  2061. {
  2062. return &Recycler::ExplicitFreeLeaf;
  2063. }
  2064. else
  2065. {
  2066. return &Recycler::ExplicitFreeNonLeaf;
  2067. }
  2068. }
  2069. };
  2070. // Partial template specialization to allocate as non leaf
  2071. template <typename T>
  2072. class TypeAllocatorFunc<RecyclerNonLeafAllocator, T> :
  2073. #if GLOBAL_ENABLE_WRITE_BARRIER
  2074. public _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2075. #else
  2076. public _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
  2077. #endif
  2078. {
  2079. };
  2080. #ifdef RECYCLER_WRITE_BARRIER
  2081. template <typename T>
  2082. class TypeAllocatorFunc<RecyclerWriteBarrierAllocator, T> : public _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2083. {
  2084. };
  2085. #endif
  2086. template <typename T>
  2087. class TypeAllocatorFunc<RecyclerLeafAllocator, T> : public _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
  2088. {
  2089. };
  2090. template <typename TAllocType>
  2091. struct AllocatorInfo<Recycler, TAllocType>
  2092. {
  2093. typedef Recycler AllocatorType;
  2094. typedef TypeAllocatorFunc<Recycler, TAllocType> AllocatorFunc;
  2095. typedef _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy> InstAllocatorFunc; // By default any instance considered non-leaf
  2096. };
  2097. template <typename TAllocType>
  2098. struct AllocatorInfo<RecyclerNonLeafAllocator, TAllocType>
  2099. {
  2100. typedef Recycler AllocatorType;
  2101. typedef TypeAllocatorFunc<RecyclerNonLeafAllocator, TAllocType> AllocatorFunc;
  2102. typedef TypeAllocatorFunc<RecyclerNonLeafAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
  2103. };
  2104. template <typename TAllocType>
  2105. struct AllocatorInfo<RecyclerWriteBarrierAllocator, TAllocType>
  2106. {
  2107. typedef Recycler AllocatorType;
  2108. typedef TypeAllocatorFunc<RecyclerWriteBarrierAllocator, TAllocType> AllocatorFunc;
  2109. typedef TypeAllocatorFunc<RecyclerWriteBarrierAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
  2110. };
  2111. template <typename TAllocType>
  2112. struct AllocatorInfo<RecyclerLeafAllocator, TAllocType>
  2113. {
  2114. typedef Recycler AllocatorType;
  2115. typedef TypeAllocatorFunc<RecyclerLeafAllocator, TAllocType> AllocatorFunc;
  2116. typedef TypeAllocatorFunc<RecyclerLeafAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
  2117. };
  2118. template <>
  2119. struct ForceNonLeafAllocator<Recycler>
  2120. {
  2121. typedef RecyclerNonLeafAllocator AllocatorType;
  2122. };
  2123. template <>
  2124. struct ForceNonLeafAllocator<RecyclerLeafAllocator>
  2125. {
  2126. typedef RecyclerNonLeafAllocator AllocatorType;
  2127. };
  2128. template <>
  2129. struct ForceLeafAllocator<Recycler>
  2130. {
  2131. typedef RecyclerLeafAllocator AllocatorType;
  2132. };
  2133. template <>
  2134. struct ForceLeafAllocator<RecyclerNonLeafAllocator>
  2135. {
  2136. typedef RecyclerLeafAllocator AllocatorType;
  2137. };
  2138. // TODO: enable -profile for GC phases.
  2139. // access the same profiler object from multiple GC threads which shares one recycler object,
  2140. // but profiler object is not thread safe
  2141. #if defined(PROFILE_EXEC) && 0
  2142. #define RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase) if (recycler->profiler != nullptr) { recycler->profiler->Begin(phase); }
  2143. #define RECYCLER_PROFILE_EXEC_END(recycler, phase) if (recycler->profiler != nullptr) { recycler->profiler->End(phase); }
  2144. #define RECYCLER_PROFILE_EXEC_BEGIN2(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->Begin(phase1); recycler->profiler->Begin(phase2);}
  2145. #define RECYCLER_PROFILE_EXEC_END2(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->End(phase1); recycler->profiler->End(phase2);}
  2146. #define RECYCLER_PROFILE_EXEC_CHANGE(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->End(phase1); recycler->profiler->Begin(phase2); }
  2147. #define RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase) if (recycler->backgroundProfiler != nullptr) { recycler->backgroundProfiler->Begin(phase); }
  2148. #define RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase) if (recycler->backgroundProfiler != nullptr) { recycler->backgroundProfiler->End(phase); }
  2149. #define RECYCLER_PROFILE_EXEC_THREAD_BEGIN(background, recycler, phase) if (background) { RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase); } else { RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase); }
  2150. #define RECYCLER_PROFILE_EXEC_THREAD_END(background, recycler, phase) if (background) { RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase); } else { RECYCLER_PROFILE_EXEC_END(recycler, phase); }
  2151. #else
  2152. #define RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase)
  2153. #define RECYCLER_PROFILE_EXEC_END(recycler, phase)
  2154. #define RECYCLER_PROFILE_EXEC_BEGIN2(recycler, phase1, phase2)
  2155. #define RECYCLER_PROFILE_EXEC_END2(recycler, phase1, phase2)
  2156. #define RECYCLER_PROFILE_EXEC_CHANGE(recycler, phase1, phase2)
  2157. #define RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase)
  2158. #define RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase)
  2159. #define RECYCLER_PROFILE_EXEC_THREAD_BEGIN(background, recycler, phase)
  2160. #define RECYCLER_PROFILE_EXEC_THREAD_END(background, recycler, phase)
  2161. #endif
  2162. }
  2163. _Ret_notnull_ inline void * __cdecl
  2164. operator new(DECLSPEC_GUARD_OVERFLOW size_t byteSize, Recycler * alloc, HeapInfo * heapInfo)
  2165. {
  2166. return alloc->HeapAllocR(heapInfo, byteSize);
  2167. }
  2168. inline void __cdecl
  2169. operator delete(void * obj, Recycler * alloc, HeapInfo * heapInfo)
  2170. {
  2171. alloc->HeapFree(heapInfo, obj);
  2172. }
  2173. template<ObjectInfoBits infoBits>
  2174. _Ret_notnull_ inline void * __cdecl
  2175. operator new(DECLSPEC_GUARD_OVERFLOW size_t byteSize, Recycler * recycler, const InfoBitsWrapper<infoBits>&)
  2176. {
  2177. AssertCanHandleOutOfMemory();
  2178. Assert(byteSize != 0);
  2179. void * buffer;
  2180. if (infoBits & EnumClass_1_Bit)
  2181. {
  2182. buffer = recycler->AllocEnumClass<infoBits>(byteSize);
  2183. }
  2184. else
  2185. {
  2186. buffer = recycler->AllocWithInfoBits<infoBits>(byteSize);
  2187. }
  2188. // All of our allocation should throw on out of memory
  2189. Assume(buffer != nullptr);
  2190. return buffer;
  2191. }
  2192. #if DBG && defined(RECYCLER_VERIFY_MARK)
  2193. extern bool IsLikelyRuntimeFalseReference(
  2194. char* objectStartAddress, size_t offset, const char* typeName);
  2195. #define DECLARE_RECYCLER_VERIFY_MARK_FRIEND() \
  2196. private: \
  2197. friend bool ::IsLikelyRuntimeFalseReference( \
  2198. char* objectStartAddress, size_t offset, const char* typeName);
  2199. #else
  2200. #define DECLARE_RECYCLER_VERIFY_MARK_FRIEND()
  2201. #endif
  2202. template <typename ExternalAllocFunc>
  2203. bool Recycler::DoExternalAllocation(size_t size, ExternalAllocFunc externalAllocFunc)
  2204. {
  2205. // Request external memory allocation
  2206. if (!RequestExternalMemoryAllocation(size))
  2207. {
  2208. // Attempt to free some memory then try again
  2209. CollectNow<CollectOnTypedArrayAllocation>();
  2210. if (!RequestExternalMemoryAllocation(size))
  2211. {
  2212. return false;
  2213. }
  2214. }
  2215. struct AutoExternalAllocation
  2216. {
  2217. bool allocationSucceeded = false;
  2218. Recycler* recycler;
  2219. size_t size;
  2220. AutoExternalAllocation(Recycler* recycler, size_t size): recycler(recycler), size(size) {}
  2221. // In case the externalAllocFunc throws or fails, the destructor will report the failure
  2222. ~AutoExternalAllocation() { if (!allocationSucceeded) recycler->ReportExternalMemoryFailure(size); }
  2223. };
  2224. AutoExternalAllocation externalAllocation(this, size);
  2225. if (externalAllocFunc())
  2226. {
  2227. this->AddExternalMemoryUsage(size);
  2228. externalAllocation.allocationSucceeded = true;
  2229. return true;
  2230. }
  2231. return false;
  2232. }