Recycler.h 98 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. #include "CollectionState.h"
  7. #include "RecyclerTelemetryInfo.h"
  8. #include "RecyclerWaitReason.h"
  9. #include "Common/ObservableValue.h"
  10. #include "CollectionFlags.h"
  11. namespace Js
  12. {
  13. class Profiler;
  14. enum Phase: unsigned short;
  15. };
  16. namespace JsUtil
  17. {
  18. class ThreadService;
  19. };
  20. #ifdef STACK_BACK_TRACE
  21. class StackBackTraceNode;
  22. #endif
  23. class ScriptEngineBase;
  24. class JavascriptThreadService;
  25. #ifdef PROFILE_MEM
  26. struct RecyclerMemoryData;
  27. #endif
  28. class ThreadContext;
  29. namespace Memory
  30. {
  31. template <typename T> class RecyclerRootPtr;
  32. class AutoBooleanToggle
  33. {
  34. public:
  35. AutoBooleanToggle(bool * b, bool value = true, bool valueMayChange = false)
  36. : b(b)
  37. {
  38. Assert(!(*b));
  39. *b = value;
  40. #if DBG
  41. this->value = value;
  42. this->valueMayChange = valueMayChange;
  43. #endif
  44. }
  45. ~AutoBooleanToggle()
  46. {
  47. if (b)
  48. {
  49. Assert(valueMayChange || *b == value);
  50. *b = false;
  51. }
  52. }
  53. void Leave()
  54. {
  55. Assert(valueMayChange || *b == value);
  56. *b = false;
  57. b = nullptr;
  58. }
  59. private:
  60. bool * b;
  61. #if DBG
  62. bool value;
  63. bool valueMayChange;
  64. #endif
  65. };
  66. template <class T>
  67. class AutoRestoreValue
  68. {
  69. public:
  70. AutoRestoreValue(T* var, const T& val):
  71. variable(var)
  72. {
  73. Assert(var);
  74. oldValue = (*variable);
  75. (*variable) = val;
  76. #ifdef DEBUG
  77. debugSetValue = val;
  78. #endif
  79. }
  80. ~AutoRestoreValue()
  81. {
  82. Assert((*variable) == debugSetValue);
  83. (*variable) = oldValue;
  84. }
  85. private:
  86. #ifdef DEBUG
  87. T debugSetValue;
  88. #endif
  89. T* variable;
  90. T oldValue;
  91. };
  92. class Recycler;
  93. class RecyclerScanMemoryCallback
  94. {
  95. public:
  96. RecyclerScanMemoryCallback(Recycler* recycler) : recycler(recycler) {}
  97. void operator()(void** obj, size_t byteCount);
  98. private:
  99. Recycler* recycler;
  100. };
  101. template<ObjectInfoBits infoBits>
  102. struct InfoBitsWrapper{};
  103. #if ENABLE_WEAK_REFERENCE_REGIONS
  104. template<typename T>
  105. static constexpr bool is_pointer = false;
  106. template<typename K>
  107. static constexpr bool is_pointer<K*> = true;
  108. template<typename T>
  109. class RecyclerWeakReferenceRegionItem {
  110. static_assert(is_pointer<T>, "Weak references must be to pointer types");
  111. friend class Recycler;
  112. public:
  113. RecyclerWeakReferenceRegionItem() : ptr(T()), heapBlock(nullptr) {};
  114. operator T() const { return ptr; };
  115. T operator=(T newPtr) {
  116. Assert(ptr == nullptr); // For safety with concurrent marking, only allow setting the pointer to non-null from null
  117. heapBlock = nullptr;
  118. return ptr = newPtr;
  119. };
  120. private:
  121. RecyclerWeakReferenceRegionItem(RecyclerWeakReferenceRegionItem<T>&) = delete;
  122. FieldNoBarrier(T) ptr;
  123. FieldNoBarrier(HeapBlock*) heapBlock; // Note: the low bit of the heapBlock is used for background marking
  124. };
  125. class RecyclerWeakReferenceRegion {
  126. friend class Recycler;
  127. public:
  128. RecyclerWeakReferenceRegionItem<void*>* GetPtr() const { return ptr; }
  129. size_t GetCount() const { return count; }
  130. HeapBlock* GetHeapBlock() const { return arrayHeapBlock; }
  131. private:
  132. FieldNoBarrier(RecyclerWeakReferenceRegionItem<void*>*) ptr;
  133. FieldNoBarrier(size_t) count;
  134. FieldNoBarrier(HeapBlock*) arrayHeapBlock;
  135. };
  136. #endif
  137. // Allocation macro
  138. #define RecyclerNew(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocInlined, T, __VA_ARGS__)
  139. #define RecyclerNewPlus(recycler,size,T,...) AllocatorNewPlus(Recycler, recycler, size, T, __VA_ARGS__)
  140. #define RecyclerNewPlusZ(recycler,size,T,...) AllocatorNewPlusZ(Recycler, recycler, size, T, __VA_ARGS__)
  141. #define RecyclerNewZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocZeroInlined, T, __VA_ARGS__)
  142. #define RecyclerNewStruct(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocInlined, T)
  143. #define RecyclerNewStructZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocZeroInlined, T)
  144. #define RecyclerNewStructPlus(recycler,size,T) AllocatorNewStructPlus(Recycler, recycler, size, T)
  145. #define RecyclerNewArray(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, Alloc, T, count)
  146. #define RecyclerNewArrayZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZero, T, count)
  147. #define RecyclerNewFinalized(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedInlined, T, __VA_ARGS__)))
  148. #define RecyclerNewFinalizedPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalized, size, T, __VA_ARGS__)))
  149. #define RecyclerNewTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedInlined, T, __VA_ARGS__)))
  150. #define RecyclerNewEnumClass(recycler, enumClass, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<enumClass>()) T(__VA_ARGS__)
  151. #define RecyclerNewWithInfoBits(recycler, infoBits, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<infoBits>()) T(__VA_ARGS__)
  152. #define RecyclerNewFinalizedClientTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedClientTrackedInlined, T, __VA_ARGS__)))
  153. #if defined(RECYCLER_WRITE_BARRIER_ALLOC)
  154. #define RecyclerNewWithBarrier(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocWithBarrier, T, __VA_ARGS__)
  155. #define RecyclerNewWithBarrierPlus(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocWithBarrier, size, T, __VA_ARGS__)
  156. #define RecyclerNewWithBarrierPlusZ(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocZeroWithBarrier, size, T, __VA_ARGS__)
  157. #define RecyclerNewWithBarrierZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocZeroWithBarrier, T, __VA_ARGS__)
  158. #define RecyclerNewWithBarrierStruct(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocWithBarrier, T)
  159. #define RecyclerNewWithBarrierStructZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocZeroWithBarrier, T)
  160. #define RecyclerNewWithBarrierStructPlus(recycler,size,T) AllocatorNewStructPlusBase(Recycler, recycler, AllocWithBarrier, size, T)
  161. #define RecyclerNewWithBarrierArray(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocWithBarrier, T, count)
  162. #define RecyclerNewWithBarrierArrayZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZeroWithBarrier, T, count)
  163. #define RecyclerNewWithBarrierFinalized(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedWithBarrierInlined, T, __VA_ARGS__)))
  164. #define RecyclerNewWithBarrierFinalizedPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedWithBarrier, size, T, __VA_ARGS__)))
  165. #define RecyclerNewWithBarrierTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedWithBarrierInlined, T, __VA_ARGS__)))
  166. #define RecyclerNewWithBarrierEnumClass(recycler, enumClass, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<(ObjectInfoBits)(enumClass | WithBarrierBit)>()) T(__VA_ARGS__)
  167. #define RecyclerNewWithBarrierWithInfoBits(recycler, infoBits, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<(ObjectInfoBits)(infoBits | WithBarrierBit)>()) T(__VA_ARGS__)
  168. #define RecyclerNewWithBarrierFinalizedClientTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedClientTrackedWithBarrierInlined, T, __VA_ARGS__)))
  169. #endif
  170. #ifndef RECYCLER_WRITE_BARRIER
  171. #define RecyclerNewWithBarrier RecyclerNew
  172. #define RecyclerNewWithBarrierPlus RecyclerNewPlus
  173. #define RecyclerNewWithBarrierPlusZ RecyclerNewPlusZ
  174. #define RecyclerNewWithBarrierZ RecyclerNewZ
  175. #define RecyclerNewWithBarrierStruct RecyclerNewStruct
  176. #define RecyclerNewWithBarrierStructZ RecyclerNewStructZ
  177. #define RecyclerNewWithBarrierStructPlus RecyclerNewStructPlus
  178. #define RecyclerNewWithBarrierArray RecyclerNewArray
  179. #define RecyclerNewWithBarrierArrayZ RecyclerNewArrayZ
  180. #define RecyclerNewWithBarrierFinalized RecyclerNewFinalized
  181. #define RecyclerNewWithBarrierFinalizedPlus RecyclerNewFinalizedPlus
  182. #define RecyclerNewWithBarrierTracked RecyclerNewTracked
  183. #define RecyclerNewWithBarrierEnumClass RecyclerNewEnumClass
  184. #define RecyclerNewWithBarrierWithInfoBits RecyclerNewWithInfoBits
  185. #define RecyclerNewWithBarrierFinalizedClientTracked RecyclerNewFinalizedClientTracked
  186. #endif
  187. // Leaf allocators
  188. #define RecyclerNewLeaf(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafInlined, T, __VA_ARGS__)
  189. #define RecyclerNewLeafZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafZeroInlined, T, __VA_ARGS__)
  190. #define RecyclerNewPlusLeaf(recycler,size,T,...) AllocatorNewPlusLeaf(Recycler, recycler, size, T, __VA_ARGS__)
  191. #define RecyclerNewPlusLeafZ(recycler,size,T,...) AllocatorNewPlusLeafZ(Recycler, recycler, size, T, __VA_ARGS__)
  192. #define RecyclerNewStructLeaf(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocLeafInlined, T)
  193. #define RecyclerNewStructLeafZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocLeafZeroInlined, T)
  194. #define RecyclerNewArrayLeafZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafZero, T, count)
  195. #define RecyclerNewArrayLeaf(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeaf, T, count)
  196. #define RecyclerNewFinalizedLeaf(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedLeafInlined, T, __VA_ARGS__)))
  197. #define RecyclerNewFinalizedLeafPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedLeaf, size, T, __VA_ARGS__)))
  198. #define RecyclerNewTrackedLeaf(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedLeafInlined, T, __VA_ARGS__)))
  199. #define RecyclerNewTrackedLeafPlusZ(recycler,size,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocZeroTrackedLeafInlined, size, T, __VA_ARGS__)))
  200. #ifdef RECYCLER_VISITED_HOST
  201. // We need to track these allocations. The RecyclerVisitedHost* object allocation APIs don't provide us with the type of the objects being allocated. Use the DummyVTableObject type used elsewhere to track the allocations.
  202. #define RecyclerAllocVisitedHostTracedAndFinalized(recycler,size) (TRACK_ALLOC_INFO(recycler, DummyVTableObject, Recycler, size, (size_t)-1))->AllocVisitedHost<RecyclerVisitedHostTracedFinalizableBits>(size)
  203. #define RecyclerAllocVisitedHostFinalized(recycler,size) (TRACK_ALLOC_INFO(recycler, DummyVTableObject, Recycler, size, (size_t)-1))->AllocVisitedHost<RecyclerVisitedHostFinalizableBits>(size)
  204. #define RecyclerAllocVisitedHostTraced(recycler,size) (TRACK_ALLOC_INFO(recycler, DummyVTableObject, Recycler, size, (size_t)-1))->AllocVisitedHost<RecyclerVisitedHostTracedBits>(size)
  205. #define RecyclerAllocLeaf(recycler,size) (TRACK_ALLOC_INFO(recycler, DummyVTableObject, Recycler, size, (size_t)-1))->AllocVisitedHost<LeafBit>(size)
  206. #endif
  207. #ifdef TRACE_OBJECT_LIFETIME
  208. #define RecyclerNewLeafTrace(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafTrace, T, __VA_ARGS__)
  209. #define RecyclerNewLeafZTrace(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafZeroTrace, T, __VA_ARGS__)
  210. #define RecyclerNewPlusLeafTrace(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocLeafTrace, size, T, __VA_ARGS__)
  211. #define RecyclerNewArrayLeafZTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafZeroTrace, T, count)
  212. #define RecyclerNewArrayTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocTrace, T, count)
  213. #define RecyclerNewArrayZTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZeroTrace, T, count)
  214. #define RecyclerNewArrayLeafTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafTrace, T, count)
  215. #define RecyclerNewFinalizedTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedTrace, T, __VA_ARGS__)))
  216. #define RecyclerNewFinalizedLeafTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedLeafTrace, T, __VA_ARGS__)))
  217. #define RecyclerNewFinalizedPlusTrace(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedTrace, size, T, __VA_ARGS__)))
  218. #define RecyclerNewTrackedTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedTrace, T, __VA_ARGS__)))
  219. #define RecyclerNewTrackedLeafTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedLeafTrace, T, __VA_ARGS__)))
  220. #else
  221. #define RecyclerNewLeafTrace RecyclerNewLeaf
  222. #define RecyclerNewLeafZTrace RecyclerNewLeafZ
  223. #define RecyclerNewPlusLeafTrace RecyclerNewPlusLeaf
  224. #define RecyclerNewArrayLeafZTrace RecyclerNewArrayLeafZ
  225. #define RecyclerNewArrayTrace RecyclerNewArray
  226. #define RecyclerNewArrayZTrace RecyclerNewArrayZ
  227. #define RecyclerNewArrayLeafTrace RecyclerNewArrayLeaf
  228. #define RecyclerNewFinalizedTrace RecyclerNewFinalized
  229. #define RecyclerNewFinalizedLeafTrace RecyclerNewFinalizedLeaf
  230. #define RecyclerNewFinalizedPlusTrace RecyclerNewFinalizedPlus
  231. #define RecyclerNewTrackedTrace RecyclerNewTracked
  232. #define RecyclerNewTrackedLeafTrace RecyclerNewTrackedLeaf
  233. #endif
  234. #ifdef RECYCLER_TRACE
  235. #define RecyclerVerboseTrace(flags, ...) \
  236. if (flags.Verbose && flags.Trace.IsEnabled(Js::RecyclerPhase)) \
  237. { \
  238. Output::Print(__VA_ARGS__); \
  239. }
  240. #define AllocationVerboseTrace(flags, ...) \
  241. if (flags.Verbose && flags.Trace.IsEnabled(Js::MemoryAllocationPhase)) \
  242. { \
  243. Output::Print(__VA_ARGS__); \
  244. }
  245. #define LargeAllocationVerboseTrace(flags, ...) \
  246. if (flags.Verbose && \
  247. (flags.Trace.IsEnabled(Js::MemoryAllocationPhase) || \
  248. flags.Trace.IsEnabled(Js::LargeMemoryAllocationPhase))) \
  249. { \
  250. Output::Print(__VA_ARGS__); \
  251. }
  252. #define PageAllocatorAllocationVerboseTrace(flags, ...) \
  253. if (flags.Verbose && flags.Trace.IsEnabled(Js::PageAllocatorAllocPhase)) \
  254. { \
  255. Output::Print(__VA_ARGS__); \
  256. }
  257. #else
  258. #define RecyclerVerboseTrace(...)
  259. #define AllocationVerboseTrace(...)
  260. #define LargeAllocationVerboseTrace(...)
  261. #endif
  262. #define RecyclerHeapNew(recycler,heapInfo,T,...) new (recycler, heapInfo) T(__VA_ARGS__)
  263. #define RecyclerHeapDelete(recycler,heapInfo,addr) (static_cast<Recycler *>(recycler)->HeapFree(heapInfo,addr))
  264. typedef void (__cdecl* ExternalRootMarker)(void *);
  265. class RecyclerCollectionWrapper
  266. {
  267. public:
  268. RecyclerCollectionWrapper() :
  269. _isScriptContextCloseGCPending(FALSE)
  270. { }
  271. typedef BOOL (Recycler::*CollectionFunction)(CollectionFlags flags);
  272. virtual void PreCollectionCallBack(CollectionFlags flags) = 0;
  273. virtual void PreSweepCallback() = 0;
  274. virtual void PreRescanMarkCallback() = 0;
  275. virtual size_t RootMarkCallback(RecyclerScanMemoryCallback& scanMemoryCallback, BOOL * stacksScannedByRuntime) = 0;
  276. virtual void RescanMarkTimeoutCallback() = 0;
  277. virtual void EndMarkCallback() = 0;
  278. virtual void ConcurrentCallback() = 0;
  279. virtual void WaitCollectionCallBack() = 0;
  280. virtual void PostCollectionCallBack() = 0;
  281. virtual BOOL ExecuteRecyclerCollectionFunction(Recycler * recycler, CollectionFunction function, CollectionFlags flags) = 0;
  282. virtual uint GetRandomNumber() = 0;
  283. virtual bool DoSpecialMarkOnScanStack() = 0;
  284. virtual void OnScanStackCallback(void ** stackTop, size_t byteCount, void ** registers, size_t registersByteCount) = 0;
  285. virtual void PostSweepRedeferralCallBack() = 0;
  286. #ifdef FAULT_INJECTION
  287. virtual void DisposeScriptContextByFaultInjectionCallBack() = 0;
  288. #endif
  289. virtual void DisposeObjects(Recycler * recycler) = 0;
  290. virtual void PreDisposeObjectsCallBack() = 0;
  291. #ifdef ENABLE_PROJECTION
  292. virtual void MarkExternalWeakReferencedObjects(bool inPartialCollect) = 0;
  293. virtual void ResolveExternalWeakReferencedObjects() = 0;
  294. #endif
  295. #if DBG || defined(PROFILE_EXEC)
  296. virtual bool AsyncHostOperationStart(void *) = 0;
  297. virtual void AsyncHostOperationEnd(bool wasInAsync, void *) = 0;
  298. #endif
  299. #if DBG
  300. virtual void CheckJsReentrancyOnDispose() = 0;
  301. #endif
  302. BOOL GetIsScriptContextCloseGCPending()
  303. {
  304. return _isScriptContextCloseGCPending;
  305. }
  306. void ClearIsScriptContextCloseGCPending()
  307. {
  308. _isScriptContextCloseGCPending = FALSE;
  309. }
  310. void SetIsScriptContextCloseGCPending()
  311. {
  312. _isScriptContextCloseGCPending = TRUE;
  313. }
  314. protected:
  315. BOOL _isScriptContextCloseGCPending;
  316. };
  317. class DefaultRecyclerCollectionWrapper : public RecyclerCollectionWrapper
  318. {
  319. public:
  320. virtual void PreCollectionCallBack(CollectionFlags flags) override {}
  321. virtual void PreSweepCallback() override {}
  322. virtual void PreRescanMarkCallback() override {}
  323. virtual void RescanMarkTimeoutCallback() override {}
  324. virtual void EndMarkCallback() override {}
  325. virtual size_t RootMarkCallback(RecyclerScanMemoryCallback& scanMemoryCallback, BOOL * stacksScannedByRuntime) override { *stacksScannedByRuntime = FALSE; return 0; }
  326. virtual void ConcurrentCallback() override {}
  327. virtual void WaitCollectionCallBack() override {}
  328. virtual void PostCollectionCallBack() override {}
  329. virtual BOOL ExecuteRecyclerCollectionFunction(Recycler * recycler, CollectionFunction function, CollectionFlags flags) override;
  330. virtual uint GetRandomNumber() override { return 0; }
  331. virtual bool DoSpecialMarkOnScanStack() override { return false; }
  332. virtual void OnScanStackCallback(void ** stackTop, size_t byteCount, void ** registers, size_t registersByteCount) override {};
  333. virtual void PostSweepRedeferralCallBack() override {}
  334. #ifdef FAULT_INJECTION
  335. virtual void DisposeScriptContextByFaultInjectionCallBack() override {};
  336. #endif
  337. virtual void DisposeObjects(Recycler * recycler) override;
  338. virtual void PreDisposeObjectsCallBack() override {};
  339. #ifdef ENABLE_PROJECTION
  340. virtual void MarkExternalWeakReferencedObjects(bool inPartialCollect) override {};
  341. virtual void ResolveExternalWeakReferencedObjects() override {};
  342. #endif
  343. #if DBG || defined(PROFILE_EXEC)
  344. virtual bool AsyncHostOperationStart(void *) override { return false; };
  345. virtual void AsyncHostOperationEnd(bool wasInAsync, void *) override {};
  346. #endif
  347. #if DBG
  348. virtual void CheckJsReentrancyOnDispose() override {}
  349. #endif
  350. static DefaultRecyclerCollectionWrapper Instance;
  351. private:
  352. static bool IsCollectionDisabled(Recycler * recycler);
  353. };
  354. #ifdef RECYCLER_STATS
  355. struct RecyclerCollectionStats
  356. {
  357. size_t startCollectAllocBytes;
  358. #if ENABLE_PARTIAL_GC
  359. size_t startCollectNewPageCount;
  360. #endif
  361. size_t continueCollectAllocBytes;
  362. size_t finishCollectTryCount;
  363. // Heuristic Stats
  364. #if ENABLE_PARTIAL_GC
  365. size_t rescanRootBytes;
  366. size_t estimatedPartialReuseBytes;
  367. size_t uncollectedNewPageCountPartialCollect;
  368. size_t partialCollectSmallHeapBlockReuseMinFreeBytes;
  369. double collectEfficacy;
  370. double collectCost;
  371. #endif
  372. // Mark stats
  373. size_t tryMarkCount; // # of pointer try mark (* pointer size to get total number byte looked at)
  374. size_t tryMarkNullCount;
  375. size_t tryMarkUnalignedCount;
  376. size_t tryMarkNonRecyclerMemoryCount;
  377. size_t tryMarkInteriorCount;
  378. size_t tryMarkInteriorNullCount;
  379. size_t tryMarkInteriorNonRecyclerMemoryCount;
  380. size_t rootCount;
  381. size_t stackCount;
  382. size_t remarkCount;
  383. size_t scanCount; // non-leaf objects marked.
  384. size_t trackCount;
  385. size_t finalizeCount;
  386. size_t markThruNewObjCount;
  387. size_t markThruFalseNewObjCount;
  388. struct MarkData
  389. {
  390. // Rescan stats
  391. size_t rescanPageCount;
  392. size_t rescanObjectCount;
  393. size_t rescanObjectByteCount;
  394. size_t rescanLargePageCount;
  395. size_t rescanLargeObjectCount;
  396. size_t rescanLargeByteCount;
  397. size_t markCount; // total number of object marked
  398. size_t markBytes; // size of all objects marked.
  399. } markData;
  400. #if ENABLE_CONCURRENT_GC
  401. MarkData backgroundMarkData[RecyclerHeuristic::MaxBackgroundRepeatMarkCount];
  402. size_t trackedObjectCount;
  403. #endif
  404. #if ENABLE_PARTIAL_GC
  405. size_t clientTrackedObjectCount;
  406. #endif
  407. // Sweep stats
  408. size_t heapBlockCount[HeapBlock::BlockTypeCount]; // number of heap blocks (processed during swept)
  409. size_t heapBlockFreeCount[HeapBlock::BlockTypeCount]; // number of heap blocks deleted
  410. size_t heapBlockConcurrentSweptCount[HeapBlock::SmallBlockTypeCount];
  411. size_t heapBlockSweptCount[HeapBlock::SmallBlockTypeCount]; // number of heap blocks swept
  412. size_t objectSweptCount; // objects freed (free list + whole page freed)
  413. size_t objectSweptBytes;
  414. size_t objectSweptFreeListCount; // objects freed (free list)
  415. size_t objectSweptFreeListBytes;
  416. size_t objectSweepScanCount; // number of objects walked for sweeping (exclude whole page freed)
  417. size_t finalizeSweepCount; // number of objects finalizer/dispose called
  418. #if ENABLE_PARTIAL_GC
  419. size_t smallNonLeafHeapBlockPartialReuseCount[HeapBlock::SmallBlockTypeCount];
  420. size_t smallNonLeafHeapBlockPartialReuseBytes[HeapBlock::SmallBlockTypeCount];
  421. size_t smallNonLeafHeapBlockPartialUnusedCount[HeapBlock::SmallBlockTypeCount];
  422. size_t smallNonLeafHeapBlockPartialUnusedBytes[HeapBlock::SmallBlockTypeCount];
  423. #endif
  424. // Memory Stats
  425. size_t heapBlockFreeByteCount[HeapBlock::BlockTypeCount]; // The remaining usable free byte count
  426. size_t largeHeapBlockUsedByteCount; // Used byte count
  427. size_t largeHeapBlockTotalByteCount; // Total byte count
  428. // Empty/zero heap block stats
  429. uint numEmptySmallBlocks[HeapBlock::SmallBlockTypeCount];
  430. uint numZeroedOutSmallBlocks;
  431. };
  432. #define RECYCLER_STATS_INC_IF(cond, r, f) if (cond) { RECYCLER_STATS_INC(r, f); }
  433. #define RECYCLER_STATS_INC(r, f) ++r->collectionStats.f
  434. #define RECYCLER_STATS_INTERLOCKED_INC(r, f) { InterlockedIncrement((LONG *)&r->collectionStats.f); }
  435. #define RECYCLER_STATS_DEC(r, f) --r->collectionStats.f
  436. #define RECYCLER_STATS_ADD(r, f, v) r->collectionStats.f += (v)
  437. #define RECYCLER_STATS_INTERLOCKED_ADD(r, f, v) { InterlockedAdd((LONG *)&r->collectionStats.f, (LONG)(v)); }
  438. #define RECYCLER_STATS_SUB(r, f, v) r->collectionStats.f -= (v)
  439. #define RECYCLER_STATS_SET(r, f, v) r->collectionStats.f = v
  440. #else
  441. #define RECYCLER_STATS_INC_IF(cond, r, f)
  442. #define RECYCLER_STATS_INC(r, f)
  443. #define RECYCLER_STATS_INTERLOCKED_INC(r, f)
  444. #define RECYCLER_STATS_DEC(r, f)
  445. #define RECYCLER_STATS_ADD(r, f, v)
  446. #define RECYCLER_STATS_INTERLOCKED_ADD(r, f, v)
  447. #define RECYCLER_STATS_SUB(r, f, v)
  448. #define RECYCLER_STATS_SET(r, f, v)
  449. #endif
  450. #ifdef RECYCLER_TRACE
  451. struct CollectionParam
  452. {
  453. CollectionFlags flags;
  454. bool finishOnly;
  455. bool repeat;
  456. bool priorityBoostConcurrentSweepOverride;
  457. bool domCollect;
  458. int timeDiff;
  459. size_t uncollectedAllocBytes;
  460. size_t uncollectedPinnedObjects;
  461. #if ENABLE_PARTIAL_GC
  462. size_t uncollectedNewPageCountPartialCollect;
  463. size_t uncollectedNewPageCount;
  464. size_t unusedPartialCollectFreeBytes;
  465. bool inPartialCollectMode;
  466. #endif
  467. };
  468. #endif
  469. #include "RecyclerObjectGraphDumper.h"
  470. #if ENABLE_CONCURRENT_GC
  471. class RecyclerParallelThread
  472. {
  473. friend class ThreadContext;
  474. public:
  475. typedef void (Recycler::* WorkFunc)();
  476. RecyclerParallelThread(Recycler * recycler, WorkFunc workFunc) :
  477. recycler(recycler),
  478. workFunc(workFunc),
  479. concurrentWorkReadyEvent(NULL),
  480. concurrentWorkDoneEvent(NULL),
  481. concurrentThread(NULL)
  482. {
  483. }
  484. ~RecyclerParallelThread()
  485. {
  486. Assert(concurrentThread == NULL);
  487. Assert(concurrentWorkReadyEvent == NULL);
  488. Assert(concurrentWorkDoneEvent == NULL);
  489. }
  490. bool StartConcurrent();
  491. void WaitForConcurrent();
  492. void Shutdown();
  493. bool EnableConcurrent(bool synchronizeOnStartup);
  494. private:
  495. // Static entry point for thread creation
  496. static unsigned int CALLBACK StaticThreadProc(LPVOID lpParameter);
  497. // Static entry point for thread service usage
  498. static void CALLBACK StaticBackgroundWorkCallback(void * callbackData);
  499. private:
  500. WorkFunc workFunc;
  501. Recycler * recycler;
  502. HANDLE concurrentWorkReadyEvent;// main thread uses this event to tell concurrent threads that the work is ready
  503. HANDLE concurrentWorkDoneEvent;// concurrent threads use this event to tell main thread that the work allocated is done
  504. HANDLE concurrentThread;
  505. bool synchronizeOnStartup;
  506. };
  507. #endif
  508. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  509. class AutoProtectPages
  510. {
  511. public:
  512. AutoProtectPages(Recycler* recycler, bool protectEnabled);
  513. ~AutoProtectPages();
  514. void Unprotect();
  515. private:
  516. Recycler* recycler;
  517. bool isReadOnly;
  518. };
  519. #endif
  520. class Recycler
  521. {
  522. friend class RecyclerScanMemoryCallback;
  523. friend class RecyclerSweep;
  524. friend class RecyclerSweepManager;
  525. friend class MarkContext;
  526. friend class HeapBlock;
  527. friend class HeapBlockMap32;
  528. #if ENABLE_CONCURRENT_GC
  529. friend class RecyclerParallelThread;
  530. #endif
  531. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  532. friend class AutoProtectPages;
  533. #endif
  534. #ifdef ENABLE_BASIC_TELEMETRY
  535. friend class RecyclerTelemetryInfo;
  536. #endif
  537. template <typename T> friend class RecyclerWeakReference;
  538. template <typename T> friend class WeakReferenceHashTable;
  539. template <typename TBlockType>
  540. friend class SmallHeapBlockAllocator; // Needed for FindHeapBlock
  541. #if defined(RECYCLER_TRACE)
  542. friend class JavascriptThreadService;
  543. #endif
  544. #ifdef HEAP_ENUMERATION_VALIDATION
  545. friend class ActiveScriptProfilerHeapEnum;
  546. #endif
  547. friend class ScriptEngineBase; // This is for disabling GC for certain Host operations.
  548. #if !FLOATVAR
  549. friend class ::CodeGenNumberThreadAllocator;
  550. friend struct ::XProcNumberPageSegmentManager;
  551. #endif
  552. public:
  553. static const uint ConcurrentThreadStackSize = 300000;
  554. static const bool FakeZeroLengthArray = true;
  555. #ifdef RECYCLER_PAGE_HEAP
  556. // Keeping as constant in case we want to tweak the value here
  557. // Set to 0 so that the tool can do the filtering instead of the runtime
  558. #if DBG
  559. static const int s_numFramesToSkipForPageHeapAlloc = 10;
  560. static const int s_numFramesToSkipForPageHeapFree = 0;
  561. static const int s_numFramesToCaptureForPageHeap = 32;
  562. #else
  563. static const int s_numFramesToSkipForPageHeapAlloc = 0;
  564. static const int s_numFramesToSkipForPageHeapFree = 0;
  565. static const int s_numFramesToCaptureForPageHeap = 32;
  566. #endif
  567. #endif
  568. uint Cookie;
  569. class AutoEnterExternalStackSkippingGCMode
  570. {
  571. public:
  572. AutoEnterExternalStackSkippingGCMode(Recycler* recycler):
  573. _recycler(recycler)
  574. {
  575. // Setting this in a re-entrant mode is not allowed
  576. Assert(!recycler->isExternalStackSkippingGC);
  577. #if DBG
  578. _recycler->isExternalStackSkippingGC = true;
  579. #endif
  580. }
  581. ~AutoEnterExternalStackSkippingGCMode()
  582. {
  583. #if DBG
  584. _recycler->isExternalStackSkippingGC = false;
  585. #endif
  586. }
  587. private:
  588. Recycler* _recycler;
  589. };
  590. private:
  591. class AutoSwitchCollectionStates
  592. {
  593. public:
  594. AutoSwitchCollectionStates(Recycler* recycler, CollectionState entryState, CollectionState exitState):
  595. _recycler(recycler),
  596. _exitState(exitState)
  597. {
  598. _recycler->SetCollectionState(entryState);
  599. }
  600. ~AutoSwitchCollectionStates()
  601. {
  602. _recycler->SetCollectionState(_exitState);
  603. }
  604. private:
  605. Recycler* _recycler;
  606. CollectionState _exitState;
  607. };
  608. #if defined(ENABLE_JS_ETW)
  609. ETWEventGCActivationTrigger collectionStartReason;
  610. CollectionFlags collectionStartFlags;
  611. ETWEventGCActivationTrigger collectionFinishReason;
  612. #endif
  613. class CollectionStateChangedObserver : public ObservableValueObserver<CollectionState>
  614. {
  615. private:
  616. Recycler* recycler;
  617. public:
  618. CollectionStateChangedObserver(Recycler* recycler)
  619. {
  620. this->recycler = recycler;
  621. }
  622. virtual void ValueChanged(const CollectionState& newVal, const CollectionState& oldVal)
  623. {
  624. #ifdef ENABLE_BASIC_TELEMETRY
  625. if (oldVal == CollectionState::CollectionStateNotCollecting &&
  626. newVal != CollectionState::CollectionStateNotCollecting &&
  627. newVal != CollectionState::Collection_PreCollection &&
  628. newVal != CollectionState::CollectionStateExit)
  629. {
  630. this->recycler->GetRecyclerTelemetryInfo().StartPass(newVal);
  631. }
  632. else if (oldVal != CollectionState::CollectionStateNotCollecting &&
  633. oldVal != CollectionState::Collection_PreCollection &&
  634. oldVal != CollectionState::CollectionStateExit &&
  635. newVal == CollectionState::CollectionStateNotCollecting)
  636. {
  637. this->recycler->GetRecyclerTelemetryInfo().EndPass(oldVal);
  638. }
  639. #endif
  640. }
  641. };
  642. CollectionStateChangedObserver collectionStateChangedObserver;
  643. ObservableValue<CollectionState> collectionState;
  644. inline void SetCollectionState(CollectionState newState)
  645. {
  646. this->collectionState = newState;
  647. }
  648. JsUtil::ThreadService *threadService;
  649. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  650. bool allowAllocationsDuringConcurrentSweepForCollection;
  651. #endif
  652. HeapBlockMap heapBlockMap;
  653. #if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
  654. struct PinRecord
  655. {
  656. #ifdef STACK_BACK_TRACE
  657. PinRecord() : refCount(0), stackBackTraces(nullptr) {}
  658. #else
  659. PinRecord() : refCount(0) {}
  660. #endif
  661. PinRecord& operator=(uint newRefCount)
  662. {
  663. #ifdef STACK_BACK_TRACE
  664. Assert(stackBackTraces == nullptr);
  665. #endif
  666. Assert(newRefCount == 0); refCount = 0; return *this;
  667. }
  668. PinRecord& operator++() { ++refCount; return *this; }
  669. PinRecord& operator--() { --refCount; return *this; }
  670. operator uint() const { return refCount; }
  671. #ifdef STACK_BACK_TRACE
  672. StackBackTraceNode * stackBackTraces;
  673. #endif
  674. private:
  675. uint refCount;
  676. };
  677. #else
  678. typedef uint PinRecord;
  679. #endif
  680. typedef SimpleHashTable<void *, PinRecord, HeapAllocator, DefaultComparer, true, PrimePolicy> PinnedObjectHashTable;
  681. PinnedObjectHashTable pinnedObjectMap;
  682. WeakReferenceHashTable<PrimePolicy> weakReferenceMap;
  683. uint weakReferenceCleanupId;
  684. #if ENABLE_WEAK_REFERENCE_REGIONS
  685. SList<RecyclerWeakReferenceRegion, HeapAllocator> weakReferenceRegionList;
  686. #endif
  687. void * transientPinnedObject;
  688. #if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
  689. #ifdef STACK_BACK_TRACE
  690. StackBackTrace * transientPinnedObjectStackBackTrace;
  691. #endif
  692. #endif
  693. struct GuestArenaAllocator : public ArenaAllocator
  694. {
  695. GuestArenaAllocator(__in_z char16 const* name, PageAllocator * pageAllocator, void (*outOfMemoryFunc)())
  696. : ArenaAllocator(name, pageAllocator, outOfMemoryFunc), pendingDelete(false)
  697. {
  698. }
  699. bool pendingDelete;
  700. };
  701. DListBase<GuestArenaAllocator> guestArenaList;
  702. DListBase<ArenaData*> externalGuestArenaList; // guest arenas are scanned for roots
  703. #ifdef RECYCLER_PAGE_HEAP
  704. bool isPageHeapEnabled;
  705. bool capturePageHeapAllocStack;
  706. bool capturePageHeapFreeStack;
  707. inline bool IsPageHeapEnabled() const { return isPageHeapEnabled; }
  708. inline bool ShouldCapturePageHeapAllocStack() const { return capturePageHeapAllocStack; }
  709. void VerifyPageHeapFillAfterAlloc(char* memBlock, size_t size, ObjectInfoBits attributes);
  710. #else
  711. inline const bool IsPageHeapEnabled() const { return false; }
  712. inline bool ShouldCapturePageHeapAllocStack() const { return false; }
  713. #endif
  714. #ifdef RECYCLER_MARK_TRACK
  715. MarkMap* markMap;
  716. CriticalSection markMapCriticalSection;
  717. void PrintMarkMap();
  718. void ClearMarkMap();
  719. #endif
  720. // Number of pages to reserve for the primary mark stack
  721. // This is the minimum number of pages to guarantee that a single heap block
  722. // can be rescanned in the worst possible case where every object in a heap block
  723. // in the smallest bucket needs to be rescanned
  724. // These many pages being reserved guarantees that in OOM Rescan, we can make progress
  725. // on every rescan iteration
  726. // We add one because there is a small amount of the page reserved for page pool metadata
  727. // so we need to allocate an additional page to be sure
  728. // Currently, this works out to 2 pages on 32-bit and 5 pages on 64-bit
  729. // NOTE: We have reduced the PageCount for small blocks to 1. This brought down the number of pages reserved for x64 from 5 to 2. This has not shown
  730. // any adverse impact.
  731. static const int PrimaryMarkStackReservedPageCount =
  732. ((SmallAllocationBlockAttributes::PageCount * MarkContext::MarkCandidateSize) / SmallAllocationBlockAttributes::MinObjectSize) + 1;
  733. MarkContext markContext;
  734. // Contexts for parallel marking.
  735. // We support up to 4 way parallelism, main context + 3 additional parallel contexts.
  736. MarkContext parallelMarkContext1;
  737. MarkContext parallelMarkContext2;
  738. MarkContext parallelMarkContext3;
  739. // Page pools for above markContexts
  740. PagePool markPagePool;
  741. PagePool parallelMarkPagePool1;
  742. PagePool parallelMarkPagePool2;
  743. PagePool parallelMarkPagePool3;
  744. bool IsMarkStackEmpty();
  745. bool HasPendingMarkObjects() const { return markContext.HasPendingMarkObjects() || parallelMarkContext1.HasPendingMarkObjects() || parallelMarkContext2.HasPendingMarkObjects() || parallelMarkContext3.HasPendingMarkObjects(); }
  746. bool HasPendingTrackObjects() const { return markContext.HasPendingTrackObjects() || parallelMarkContext1.HasPendingTrackObjects() || parallelMarkContext2.HasPendingTrackObjects() || parallelMarkContext3.HasPendingTrackObjects(); }
  747. RecyclerCollectionWrapper * collectionWrapper;
  748. HANDLE mainThreadHandle;
  749. void * stackBase;
  750. class SavedRegisterState
  751. {
  752. public:
  753. #if _M_IX86
  754. static const int NumRegistersToSave = 8;
  755. #elif _M_ARM
  756. static const int NumRegistersToSave = 13;
  757. #elif _M_ARM64
  758. static const int NumRegistersToSave = 27;
  759. #elif _M_AMD64
  760. static const int NumRegistersToSave = 16;
  761. #endif
  762. SavedRegisterState()
  763. {
  764. memset(registers, 0, sizeof(void*) * NumRegistersToSave);
  765. }
  766. void** GetRegisters()
  767. {
  768. return registers;
  769. }
  770. void* GetStackTop()
  771. {
  772. // By convention, our register-saving routine will always
  773. // save the stack pointer as the first item in the array
  774. return registers[0];
  775. }
  776. private:
  777. void* registers[NumRegistersToSave];
  778. };
  779. SavedRegisterState savedThreadContext;
  780. #if __has_feature(address_sanitizer)
  781. void* savedAsanFakeStack;
  782. #define SAVE_THREAD_ASAN_FAKE_STACK() \
  783. this->savedAsanFakeStack = __asan_get_current_fake_stack()
  784. #else
  785. #define SAVE_THREAD_ASAN_FAKE_STACK()
  786. #endif
  787. bool inDispose;
  788. #if DBG || defined RECYCLER_TRACE
  789. uint collectionCount;
  790. bool inResolveExternalWeakReferences;
  791. #endif
  792. bool allowDispose;
  793. bool inDisposeWrapper;
  794. bool needOOMRescan;
  795. bool hasDisposableObject;
  796. bool hasNativeGCHost;
  797. DWORD tickCountNextDispose;
  798. bool inExhaustiveCollection;
  799. bool hasExhaustiveCandidate;
  800. bool inCacheCleanupCollection;
  801. bool inDecommitNowCollection;
  802. bool isScriptActive;
  803. bool isInScript;
  804. bool isShuttingDown;
  805. bool scanPinnedObjectMap;
  806. bool hasScannedInitialImplicitRoots;
  807. bool hasPendingUnpinnedObject;
  808. bool hasPendingDeleteGuestArena;
  809. bool inEndMarkOnLowMemory;
  810. bool decommitOnFinish;
  811. bool enableScanInteriorPointers;
  812. bool enableScanImplicitRoots;
  813. bool disableCollectOnAllocationHeuristics;
  814. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  815. bool disableCollection;
  816. #endif
  817. #if ENABLE_PARTIAL_GC
  818. bool enablePartialCollect;
  819. bool inPartialCollectMode;
  820. #if ENABLE_CONCURRENT_GC
  821. bool hasBackgroundFinishPartial;
  822. bool partialConcurrentNextCollection;
  823. #endif
  824. #endif
  825. #ifdef RECYCLER_STRESS
  826. bool forcePartialScanStack;
  827. bool recyclerStress;
  828. #if ENABLE_CONCURRENT_GC
  829. bool recyclerBackgroundStress;
  830. bool recyclerConcurrentStress;
  831. bool recyclerConcurrentRepeatStress;
  832. #endif
  833. #if ENABLE_PARTIAL_GC
  834. bool recyclerPartialStress;
  835. #endif
  836. #endif
  837. #if DBG
  838. bool isExternalStackSkippingGC;
  839. #endif
  840. bool skipStack;
  841. #if ENABLE_CONCURRENT_GC
  842. #if DBG
  843. bool isConcurrentGCOnIdle;
  844. bool isFinishGCOnIdle;
  845. #endif
  846. bool queueTrackedObject;
  847. bool hasPendingConcurrentFindRoot;
  848. bool priorityBoost;
  849. bool disableConcurrent;
  850. bool enableConcurrentMark;
  851. bool enableParallelMark;
  852. bool enableConcurrentSweep;
  853. uint maxParallelism; // Max # of total threads to run in parallel
  854. byte backgroundRescanCount; // for ETW events and stats
  855. byte backgroundFinishMarkCount;
  856. size_t backgroundRescanRootBytes;
  857. HANDLE concurrentWorkReadyEvent; // main thread uses this event to tell concurrent threads that the work is ready
  858. HANDLE concurrentWorkDoneEvent; // concurrent threads use this event to tell main thread that the work allocated is done
  859. HANDLE concurrentThread;
  860. template <uint parallelId>
  861. void ParallelWorkFunc();
  862. RecyclerParallelThread parallelThread1;
  863. RecyclerParallelThread parallelThread2;
  864. #if DBG
  865. // Variable indicating if the concurrent thread has exited or not
  866. // If the concurrent thread hasn't started yet, this is set to true
  867. // Once the concurrent thread starts, it sets this to false,
  868. // and when the concurrent thread exits, it sets this to true.
  869. bool concurrentThreadExited;
  870. bool disableConcurrentThreadExitedCheck;
  871. bool isProcessingTrackedObjects;
  872. #endif
  873. uint tickCountStartConcurrent;
  874. bool isAborting;
  875. #endif
  876. #if DBG
  877. bool hasIncompleteDoCollect;
  878. // This is set to true when we begin a Rescan, and set to false when either:
  879. // (1) We finish the final in-thread Rescan and are about to Mark
  880. // (2) We do a conditional ResetWriteWatch and are about to Mark
  881. // When this flag is true, we should not be modifying existing mark-related state,
  882. // including markBits and rescanState.
  883. bool isProcessingRescan;
  884. #endif
  885. Js::ConfigFlagsTable& recyclerFlagsTable;
  886. RecyclerSweepManager recyclerSweepManagerInstance;
  887. RecyclerSweepManager * recyclerSweepManager;
  888. static const uint tickDiffToNextCollect = 300;
  889. #ifdef IDLE_DECOMMIT_ENABLED
  890. HANDLE concurrentIdleDecommitEvent;
  891. LONG needIdleDecommitSignal;
  892. #endif
  893. #if ENABLE_PARTIAL_GC
  894. SListBase<void *> clientTrackedObjectList;
  895. ArenaAllocator clientTrackedObjectAllocator;
  896. size_t partialUncollectedAllocBytes;
  897. // Dynamic Heuristics for partial GC
  898. size_t uncollectedNewPageCountPartialCollect;
  899. #endif
  900. uint tickCountNextCollection;
  901. uint tickCountNextFinishCollection;
  902. void (*outOfMemoryFunc)();
  903. #ifdef RECYCLER_TEST_SUPPORT
  904. BOOL (*checkFn)(char* addr, size_t size);
  905. #endif
  906. ExternalRootMarker externalRootMarker;
  907. void * externalRootMarkerContext;
  908. #ifdef PROFILE_EXEC
  909. Js::Profiler * profiler;
  910. Js::Profiler * backgroundProfiler;
  911. PageAllocator backgroundProfilerPageAllocator;
  912. DListBase<ArenaAllocator> backgroundProfilerArena;
  913. #endif
  914. // destruct autoHeap after backgroundProfilerPageAllocator;
  915. HeapInfoManager autoHeap;
  916. template <ObjectInfoBits attributes>
  917. HeapInfo * GetHeapInfoForAllocation()
  918. {
  919. return this->GetHeapInfo<attributes>();
  920. }
  921. template <ObjectInfoBits attributes>
  922. HeapInfo * GetHeapInfo()
  923. {
  924. return this->autoHeap.GetDefaultHeap();
  925. }
  926. HeapInfo * GetHeapInfo()
  927. {
  928. return this->autoHeap.GetDefaultHeap();
  929. }
  930. #ifdef PROFILE_MEM
  931. RecyclerMemoryData * memoryData;
  932. #endif
  933. ThreadContextId mainThreadId;
  934. #if DBG
  935. uint heapBlockCount;
  936. bool disableThreadAccessCheck;
  937. #endif
  938. #if DBG || defined(RECYCLER_STATS)
  939. bool isForceSweeping;
  940. #endif
  941. #ifdef NTBUILD
  942. RecyclerWatsonTelemetryBlock localTelemetryBlock;
  943. RecyclerWatsonTelemetryBlock * telemetryBlock;
  944. #endif
  945. #ifdef ENABLE_BASIC_TELEMETRY
  946. private:
  947. RecyclerTelemetryInfo telemetryStats;
  948. GUID recyclerID;
  949. public:
  950. GUID& GetRecyclerID() { return this->recyclerID; }
  951. #endif
  952. public:
  953. bool GetIsInScript() { return this->isInScript; }
  954. bool GetIsScriptActive() { return this->isScriptActive; }
  955. private:
  956. #ifdef RECYCLER_STATS
  957. RecyclerCollectionStats collectionStats;
  958. void PrintHeapBlockStats(char16 const * name, HeapBlock::HeapBlockType type);
  959. void PrintHeapBlockMemoryStats(char16 const * name, HeapBlock::HeapBlockType type);
  960. void PrintCollectStats();
  961. void PrintHeuristicCollectionStats();
  962. void PrintMarkCollectionStats();
  963. void PrintBackgroundCollectionStats();
  964. void PrintMemoryStats();
  965. void PrintBackgroundCollectionStat(RecyclerCollectionStats::MarkData const& markData);
  966. #endif
  967. #ifdef RECYCLER_TRACE
  968. CollectionParam collectionParam;
  969. void PrintBlockStatus(HeapBucket * heapBucket, HeapBlock * heapBlock, char16 const * name);
  970. #endif
  971. #ifdef RECYCLER_MEMORY_VERIFY
  972. uint verifyPad;
  973. bool verifyEnabled;
  974. #endif
  975. #ifdef RECYCLER_DUMP_OBJECT_GRAPH
  976. friend class RecyclerObjectGraphDumper;
  977. RecyclerObjectGraphDumper * objectGraphDumper;
  978. public:
  979. bool dumpObjectOnceOnCollect;
  980. #endif
  981. public:
  982. Recycler(AllocationPolicyManager * policyManager, IdleDecommitPageAllocator * pageAllocator, void(*outOfMemoryFunc)(), Js::ConfigFlagsTable& flags, RecyclerTelemetryHostInterface* hostInterface);
  983. ~Recycler();
  984. void Initialize(const bool forceInThread, JsUtil::ThreadService *threadService, const bool deferThreadStartup = false
  985. #ifdef RECYCLER_PAGE_HEAP
  986. , PageHeapMode pageheapmode = PageHeapMode::PageHeapModeOff
  987. , bool captureAllocCallStack = false
  988. , bool captureFreeCallStack = false
  989. #endif
  990. );
  991. Js::ConfigFlagsTable& GetRecyclerFlagsTable() const { return this->recyclerFlagsTable; }
  992. void SetMemProtectMode();
  993. bool IsMemProtectMode();
  994. size_t GetUsedBytes();
  995. void LogMemProtectHeapSize(bool fromGC);
  996. char* Realloc(void* buffer, DECLSPEC_GUARD_OVERFLOW size_t existingBytes, DECLSPEC_GUARD_OVERFLOW size_t requestedBytes, bool truncate = true);
  997. #ifdef NTBUILD
  998. void SetTelemetryBlock(RecyclerWatsonTelemetryBlock * telemetryBlock) { this->telemetryBlock = telemetryBlock; }
  999. #endif
  1000. uint GetPinnedObjectCount() const { return this->pinnedObjectMap.Count(); }
  1001. void Prime();
  1002. void* GetOwnerContext() { return (void*) this->collectionWrapper; }
  1003. bool NeedOOMRescan() const;
  1004. void SetNeedOOMRescan();
  1005. void ClearNeedOOMRescan();
  1006. BOOL RequestConcurrentWrapperCallback();
  1007. BOOL CollectionInProgress() const;
  1008. BOOL IsExiting() const;
  1009. BOOL IsSweeping() const;
  1010. #ifdef RECYCLER_PAGE_HEAP
  1011. inline bool ShouldCapturePageHeapFreeStack() const { return capturePageHeapFreeStack; }
  1012. #else
  1013. inline bool ShouldCapturePageHeapFreeStack() const { return false; }
  1014. #endif
  1015. void SetIsThreadBound();
  1016. void SetIsScriptActive(bool isScriptActive);
  1017. void SetIsInScript(bool isInScript);
  1018. bool HasNativeGCHost() const;
  1019. void SetHasNativeGCHost();
  1020. bool ShouldIdleCollectOnExit();
  1021. void ScheduleNextCollection();
  1022. BOOL IsShuttingDown() const { return this->isShuttingDown; }
  1023. #if ENABLE_CONCURRENT_GC
  1024. #if DBG
  1025. BOOL IsConcurrentMarkEnabled() const { return enableConcurrentMark; }
  1026. BOOL IsConcurrentSweepEnabled() const { return enableConcurrentSweep; }
  1027. #endif
  1028. template <CollectionFlags flags>
  1029. BOOL FinishConcurrent();
  1030. void ShutdownThread();
  1031. bool EnableConcurrent(JsUtil::ThreadService *threadService, bool startAllThreads);
  1032. void DisableConcurrent();
  1033. void StartQueueTrackedObject();
  1034. bool DoQueueTrackedObject() const;
  1035. void PrepareSweep();
  1036. #endif
  1037. template <CollectionFlags flags>
  1038. void SetupPostCollectionFlags();
  1039. void EnsureNotCollecting();
  1040. #if ENABLE_CONCURRENT_GC
  1041. bool QueueTrackedObject(FinalizableObject * trackableObject);
  1042. #endif
  1043. // FindRoots
  1044. void TryMarkNonInterior(void* candidate, void* parentReference = nullptr);
  1045. void TryMarkInterior(void *candidate, void* parentReference = nullptr);
  1046. bool InCacheCleanupCollection() { return inCacheCleanupCollection; }
  1047. void ClearCacheCleanupCollection() { Assert(inCacheCleanupCollection); inCacheCleanupCollection = false; }
  1048. // Finalizer support
  1049. void SetExternalRootMarker(ExternalRootMarker fn, void * context);
  1050. ArenaAllocator * CreateGuestArena(char16 const * name, void (*outOfMemoryFunc)());
  1051. void DeleteGuestArena(ArenaAllocator * arenaAllocator);
  1052. ArenaData ** RegisterExternalGuestArena(ArenaData* guestArena)
  1053. {
  1054. return externalGuestArenaList.PrependNode(&NoThrowHeapAllocator::Instance, guestArena);
  1055. }
  1056. void UnregisterExternalGuestArena(ArenaData* guestArena)
  1057. {
  1058. externalGuestArenaList.Remove(&NoThrowHeapAllocator::Instance, guestArena);
  1059. // Any time a root is removed during a GC, it indicates that an exhaustive
  1060. // collection is likely going to have work to do so trigger an exhaustive
  1061. // candidate GC to indicate this fact
  1062. this->CollectNow<CollectExhaustiveCandidate>();
  1063. }
  1064. void UnregisterExternalGuestArena(ArenaData** guestArena)
  1065. {
  1066. externalGuestArenaList.RemoveElement(&NoThrowHeapAllocator::Instance, guestArena);
  1067. // Any time a root is removed during a GC, it indicates that an exhaustive
  1068. // collection is likely going to have work to do so trigger an exhaustive
  1069. // candidate GC to indicate this fact
  1070. this->CollectNow<CollectExhaustiveCandidate>();
  1071. }
  1072. #ifdef RECYCLER_TEST_SUPPORT
  1073. void SetCheckFn(BOOL(*checkFn)(char* addr, size_t size));
  1074. #endif
  1075. void SetCollectionWrapper(RecyclerCollectionWrapper * wrapper);
  1076. static size_t GetAlignedSize(size_t size) { return HeapInfo::GetAlignedSize(size); }
  1077. HeapInfo* GetDefaultHeapInfo() { return autoHeap.GetDefaultHeap(); }
  1078. template <CollectionFlags flags>
  1079. BOOL CollectNow();
  1080. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1081. void DisplayMemStats();
  1082. #endif
  1083. void AddExternalMemoryUsage(size_t size);
  1084. bool NeedDispose() { return this->hasDisposableObject; }
  1085. template <CollectionFlags flags>
  1086. bool FinishDisposeObjectsNow();
  1087. bool RequestExternalMemoryAllocation(size_t size);
  1088. void ReportExternalMemoryFailure(size_t size);
  1089. void ReportExternalMemoryFree(size_t size);
  1090. // ExternalAllocFunc returns true when allocation succeeds
  1091. template <typename ExternalAllocFunc>
  1092. bool DoExternalAllocation(size_t size, ExternalAllocFunc externalAllocFunc);
  1093. #ifdef TRACE_OBJECT_LIFETIME
  1094. #define DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes) \
  1095. inline char* AllocFunc##Trace(size_t size) \
  1096. { \
  1097. return AllocWithAttributesFunc<(ObjectInfoBits)(attributes | TraceBit), /* nothrow = */ false>(size); \
  1098. }
  1099. #else
  1100. #define DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributeFunc, attributes)
  1101. #endif
  1102. #define DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocWithAttributesFunc, attributes) \
  1103. inline char * AllocFunc(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1104. { \
  1105. return AllocWithAttributesFunc<attributes, /* nothrow = */ false>(size); \
  1106. } \
  1107. __forceinline char * AllocFunc##Inlined(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1108. { \
  1109. return AllocWithAttributesFunc##Inlined<attributes, /* nothrow = */ false>(size); \
  1110. } \
  1111. DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes);
  1112. #define DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocWithAttributesFunc, attributes) \
  1113. inline char * NoThrow##AllocFunc(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1114. { \
  1115. return AllocWithAttributesFunc<attributes, /* nothrow = */ true>(size); \
  1116. } \
  1117. inline char * NoThrow##AllocFunc##Inlined(DECLSPEC_GUARD_OVERFLOW size_t size) \
  1118. { \
  1119. return AllocWithAttributesFunc##Inlined<attributes, /* nothrow = */ true>(size); \
  1120. } \
  1121. DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes);
  1122. #define DEFINE_RECYCLER_ALLOC(AllocFunc, attributes) DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocWithAttributes, attributes)
  1123. #define DEFINE_RECYCLER_ALLOC_ZERO(AllocFunc, attributes) DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocZeroWithAttributes, attributes)
  1124. #define DEFINE_RECYCLER_NOTHROW_ALLOC(AllocFunc, attributes) DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocWithAttributes, attributes)
  1125. #define DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocFunc, attributes) DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocZeroWithAttributes, attributes)
  1126. #if GLOBAL_ENABLE_WRITE_BARRIER
  1127. DEFINE_RECYCLER_ALLOC(Alloc, WithBarrierBit);
  1128. DEFINE_RECYCLER_ALLOC_ZERO(AllocZero, WithBarrierBit);
  1129. DEFINE_RECYCLER_ALLOC(AllocFinalized, FinalizableWithBarrierObjectBits);
  1130. DEFINE_RECYCLER_ALLOC(AllocTracked, ClientTrackableObjectWithBarrierBits);
  1131. DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTracked, ClientTrackableObjectWithBarrierBits);
  1132. #else
  1133. DEFINE_RECYCLER_ALLOC(Alloc, NoBit);
  1134. DEFINE_RECYCLER_ALLOC_ZERO(AllocZero, NoBit);
  1135. DEFINE_RECYCLER_ALLOC(AllocFinalized, FinalizableObjectBits);
  1136. DEFINE_RECYCLER_ALLOC(AllocTracked, ClientTrackableObjectBits);
  1137. DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTracked, ClientFinalizableObjectBits);
  1138. #endif
  1139. #ifdef RECYCLER_WRITE_BARRIER_ALLOC
  1140. DEFINE_RECYCLER_ALLOC(AllocWithBarrier, WithBarrierBit);
  1141. DEFINE_RECYCLER_ALLOC_ZERO(AllocZeroWithBarrier, WithBarrierBit);
  1142. DEFINE_RECYCLER_ALLOC(AllocFinalizedWithBarrier, FinalizableWithBarrierObjectBits);
  1143. DEFINE_RECYCLER_ALLOC(AllocTrackedWithBarrier, ClientTrackableObjectWithBarrierBits);
  1144. DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTrackedWithBarrier, ClientFinalizableObjectWithBarrierBits);
  1145. #endif
  1146. DEFINE_RECYCLER_ALLOC(AllocLeaf, LeafBit);
  1147. DEFINE_RECYCLER_ALLOC(AllocFinalizedLeaf, FinalizableLeafBits);
  1148. DEFINE_RECYCLER_ALLOC(AllocTrackedLeaf, ClientTrackableLeafBits);
  1149. DEFINE_RECYCLER_ALLOC_ZERO(AllocLeafZero, LeafBit);
  1150. DEFINE_RECYCLER_ALLOC_ZERO(AllocZeroTrackedLeaf, ClientTrackableLeafBits);
  1151. DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocImplicitRootLeaf, ImplicitRootLeafBits);
  1152. DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocImplicitRoot, ImplicitRootBit);
  1153. template <ObjectInfoBits enumClass>
  1154. char * AllocEnumClass(DECLSPEC_GUARD_OVERFLOW size_t size)
  1155. {
  1156. Assert((enumClass & EnumClassMask) != 0);
  1157. //Assert((enumClass & ~EnumClassMask & ~WithBarrierBit) == 0);
  1158. return AllocWithAttributes<(ObjectInfoBits)(enumClass), /* nothrow = */ false>(size);
  1159. }
  1160. template <ObjectInfoBits infoBits>
  1161. char * AllocWithInfoBits(DECLSPEC_GUARD_OVERFLOW size_t size)
  1162. {
  1163. return AllocWithAttributes<infoBits, /* nothrow = */ false>(size);
  1164. }
  1165. template <ObjectInfoBits infoBits>
  1166. char * AllocVisitedHost(DECLSPEC_GUARD_OVERFLOW size_t size)
  1167. {
  1168. return AllocWithAttributes<infoBits, /* nothrow = */ true>(size);
  1169. }
  1170. template<typename T>
  1171. RecyclerWeakReference<T>* CreateWeakReferenceHandle(T* pStrongReference);
  1172. #if ENABLE_WEAK_REFERENCE_REGIONS
  1173. template<typename T>
  1174. RecyclerWeakReferenceRegionItem<T>* CreateWeakReferenceRegion(size_t count);
  1175. #endif
  1176. uint GetWeakReferenceCleanupId() const { return weakReferenceCleanupId; }
  1177. template<typename T>
  1178. bool FindOrCreateWeakReferenceHandle(T* pStrongReference, RecyclerWeakReference<T> **ppWeakRef);
  1179. template<typename T>
  1180. bool TryGetWeakReferenceHandle(T* pStrongReference, RecyclerWeakReference<T> **weakReference);
  1181. template <ObjectInfoBits attributes>
  1182. char* GetAddressOfAllocator(size_t sizeCat)
  1183. {
  1184. Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
  1185. return (char*)this->GetHeapInfo<attributes>()->template GetBucket<(ObjectInfoBits)(attributes & GetBlockTypeBitMask)>(sizeCat).GetAllocator();
  1186. }
  1187. template <ObjectInfoBits attributes>
  1188. uint32 GetEndAddressOffset(size_t sizeCat)
  1189. {
  1190. Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
  1191. return this->GetHeapInfo<attributes>()->template GetBucket<(ObjectInfoBits)(attributes & GetBlockTypeBitMask)>(sizeCat).GetAllocator()->GetEndAddressOffset();
  1192. }
  1193. template <ObjectInfoBits attributes>
  1194. uint32 GetFreeObjectListOffset(size_t sizeCat)
  1195. {
  1196. Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
  1197. return this->GetHeapInfo<attributes>()->template GetBucket<(ObjectInfoBits)(attributes & GetBlockTypeBitMask)>(sizeCat).GetAllocator()->GetFreeObjectListOffset();
  1198. }
  1199. void GetNormalHeapBlockAllocatorInfoForNativeAllocation(size_t sizeCat, void*& allocatorAddress, uint32& endAddressOffset, uint32& freeListOffset, bool allowBumpAllocation, bool isOOPJIT);
  1200. static void GetNormalHeapBlockAllocatorInfoForNativeAllocation(void* recyclerAddr, size_t sizeCat, void*& allocatorAddress, uint32& endAddressOffset, uint32& freeListOffset, bool allowBumpAllocation, bool isOOPJIT);
  1201. bool AllowNativeCodeBumpAllocation();
  1202. static void TrackNativeAllocatedMemoryBlock(Recycler * recycler, void * memBlock, size_t sizeCat);
  1203. void Free(void* buffer, size_t size)
  1204. {
  1205. Assert(false);
  1206. }
  1207. bool ExplicitFreeLeaf(void* buffer, size_t size);
  1208. bool ExplicitFreeNonLeaf(void* buffer, size_t size);
  1209. template <ObjectInfoBits attributes>
  1210. bool ExplicitFreeInternalWrapper(void* buffer, size_t allocSize);
  1211. template <ObjectInfoBits attributes, typename TBlockAttributes>
  1212. bool ExplicitFreeInternal(void* buffer, size_t size, size_t sizeCat);
  1213. size_t GetAllocSize(size_t size);
  1214. template <typename TBlockAttributes>
  1215. void SetExplicitFreeBitOnSmallBlock(HeapBlock* heapBlock, size_t sizeCat, void* buffer, ObjectInfoBits attributes);
  1216. char* HeapAllocR(HeapInfo* eHeap, DECLSPEC_GUARD_OVERFLOW size_t size)
  1217. {
  1218. return RealAlloc<LeafBit, /* nothrow = */ false>(eHeap, size);
  1219. }
  1220. void HeapFree(HeapInfo* eHeap,void* candidate);
  1221. void EnumerateObjects(ObjectInfoBits infoBits, void (*CallBackFunction)(void * address, size_t size));
  1222. void RootAddRef(void* obj, uint *count = nullptr);
  1223. void RootRelease(void* obj, uint *count = nullptr);
  1224. template <ObjectInfoBits attributes, bool nothrow>
  1225. inline char* RealAlloc(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size);
  1226. template <ObjectInfoBits attributes, bool isSmallAlloc, bool nothrow>
  1227. inline char* RealAllocFromBucket(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size);
  1228. void EnterIdleDecommit();
  1229. void LeaveIdleDecommit();
  1230. void DisposeObjects();
  1231. BOOL IsValidObject(void* candidate, size_t minimumSize = 0);
  1232. #if DBG
  1233. void SetDisableThreadAccessCheck();
  1234. void SetDisableConcurrentThreadExitedCheck();
  1235. void CheckAllocExternalMark() const;
  1236. BOOL IsFreeObject(void * candidate);
  1237. BOOL IsReentrantState() const;
  1238. #endif
  1239. #if DBG_DUMP
  1240. void PrintMarkStack();
  1241. #endif
  1242. #ifdef PROFILE_EXEC
  1243. Js::Profiler * GetProfiler() const { return this->profiler; }
  1244. ArenaAllocator * AddBackgroundProfilerArena();
  1245. void ReleaseBackgroundProfilerArena(ArenaAllocator * arena);
  1246. void SetProfiler(Js::Profiler * profiler, Js::Profiler * backgroundProfiler);
  1247. #endif
  1248. #ifdef RECYCLER_MEMORY_VERIFY
  1249. BOOL VerifyEnabled() const { return verifyEnabled; }
  1250. uint GetVerifyPad() const { return verifyPad; }
  1251. void Verify(Js::Phase phase);
  1252. static void VerifyCheck(BOOL cond, char16 const * msg, void * address, void * corruptedAddress);
  1253. static void VerifyCheckFill(void * address, size_t size);
  1254. void FillCheckPad(void * address, size_t size, size_t alignedAllocSize, bool objectAlreadyInitialized);
  1255. void FillCheckPad(void * address, size_t size, size_t alignedAllocSize)
  1256. {
  1257. FillCheckPad(address, size, alignedAllocSize, false);
  1258. }
  1259. static void FillPadNoCheck(void * address, size_t size, size_t alignedAllocSize, bool objectAlreadyInitialized);
  1260. void VerifyCheckPad(void * address, size_t size);
  1261. void VerifyCheckPadExplicitFreeList(void * address, size_t size);
  1262. static const byte VerifyMemFill = 0xCA;
  1263. #endif
  1264. #ifdef RECYCLER_ZERO_MEM_CHECK
  1265. void VerifyZeroFill(void * address, size_t size);
  1266. void VerifyLargeAllocZeroFill(void * address, size_t size, ObjectInfoBits attributes);
  1267. #endif
  1268. #ifdef RECYCLER_DUMP_OBJECT_GRAPH
  1269. bool DumpObjectGraph(RecyclerObjectGraphDumper::Param * param = nullptr);
  1270. void DumpObjectDescription(void *object);
  1271. #endif
  1272. #ifdef LEAK_REPORT
  1273. void ReportLeaks();
  1274. void ReportLeaksOnProcessDetach();
  1275. #endif
  1276. #ifdef CHECK_MEMORY_LEAK
  1277. void CheckLeaks(char16 const * header);
  1278. void CheckLeaksOnProcessDetach(char16 const * header);
  1279. #endif
  1280. #ifdef RECYCLER_TRACE
  1281. void SetDomCollect(bool isDomCollect) { collectionParam.domCollect = isDomCollect; }
  1282. void CaptureCollectionParam(CollectionFlags flags, bool repeat = false);
  1283. #endif
  1284. private:
  1285. // RecyclerRootPtr has implicit conversion to pointers, prevent it to be
  1286. // passed to RootAddRef/RootRelease directly
  1287. template <typename T>
  1288. void RootAddRef(RecyclerRootPtr<T>& ptr, uint *count = nullptr);
  1289. template <typename T>
  1290. void RootRelease(RecyclerRootPtr<T>& ptr, uint *count = nullptr);
  1291. template <CollectionFlags flags>
  1292. BOOL CollectInternal();
  1293. template <CollectionFlags flags>
  1294. BOOL Collect();
  1295. template <CollectionFlags flags>
  1296. BOOL CollectWithHeuristic();
  1297. template <CollectionFlags flags>
  1298. BOOL CollectWithExhaustiveCandidate();
  1299. template <CollectionFlags flags>
  1300. BOOL GetPartialFlag();
  1301. bool NeedExhaustiveRepeatCollect() const;
  1302. #if DBG
  1303. bool ExpectStackSkip() const;
  1304. #endif
  1305. static size_t const InvalidScanRootBytes = (size_t)-1;
  1306. // Small Allocator
  1307. template <typename SmallHeapBlockAllocatorType>
  1308. void AddSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat);
  1309. template <typename SmallHeapBlockAllocatorType>
  1310. void RemoveSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat);
  1311. template <ObjectInfoBits attributes, typename SmallHeapBlockAllocatorType>
  1312. char * SmallAllocatorAlloc(SmallHeapBlockAllocatorType * allocator, size_t sizeCat, size_t size);
  1313. // Allocation
  1314. template <ObjectInfoBits attributes, bool nothrow>
  1315. inline char * AllocWithAttributesInlined(DECLSPEC_GUARD_OVERFLOW size_t size);
  1316. template <ObjectInfoBits attributes, bool nothrow>
  1317. char * AllocWithAttributes(DECLSPEC_GUARD_OVERFLOW size_t size)
  1318. {
  1319. return AllocWithAttributesInlined<attributes, nothrow>(size);
  1320. }
  1321. template <ObjectInfoBits attributes, bool nothrow>
  1322. inline char* AllocZeroWithAttributesInlined(DECLSPEC_GUARD_OVERFLOW size_t size);
  1323. template <ObjectInfoBits attributes, bool nothrow>
  1324. char* AllocZeroWithAttributes(DECLSPEC_GUARD_OVERFLOW size_t size)
  1325. {
  1326. return AllocZeroWithAttributesInlined<attributes, nothrow>(size);
  1327. }
  1328. char* AllocWeakReferenceEntry(DECLSPEC_GUARD_OVERFLOW size_t size)
  1329. {
  1330. return AllocWithAttributes<WeakReferenceEntryBits, /* nothrow = */ false>(size);
  1331. }
  1332. bool NeedDisposeTimed()
  1333. {
  1334. DWORD ticks = ::GetTickCount();
  1335. return (ticks > tickCountNextDispose && this->hasDisposableObject);
  1336. }
  1337. char* TryLargeAlloc(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size, ObjectInfoBits attributes, bool nothrow);
  1338. template <bool nothrow>
  1339. char* LargeAlloc(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size, ObjectInfoBits attributes);
  1340. void OutOfMemory();
  1341. // Collection
  1342. BOOL DoCollect(CollectionFlags flags);
  1343. BOOL DoCollectWrapped(CollectionFlags flags);
  1344. BOOL CollectOnAllocatorThread();
  1345. #if DBG
  1346. void ResetThreadId();
  1347. #endif
  1348. template <bool background>
  1349. size_t ScanPinnedObjects();
  1350. size_t ScanStack();
  1351. size_t ScanArena(ArenaData * alloc, bool background);
  1352. void ScanImplicitRoots();
  1353. void ScanInitialImplicitRoots();
  1354. void ScanNewImplicitRoots();
  1355. size_t FindRoots();
  1356. size_t TryMarkArenaMemoryBlockList(ArenaMemoryBlock * memoryBlocks);
  1357. size_t TryMarkBigBlockList(BigBlock * memoryBlocks);
  1358. #if ENABLE_CONCURRENT_GC
  1359. #if FALSE // REVIEW: remove this code since not using
  1360. size_t TryMarkBigBlockListWithWriteWatch(BigBlock * memoryBlocks);
  1361. #endif
  1362. #endif
  1363. // Mark
  1364. void ResetMarks(ResetMarkFlags flags);
  1365. void Mark();
  1366. bool EndMark();
  1367. bool EndMarkCheckOOMRescan();
  1368. void EndMarkOnLowMemory();
  1369. #if ENABLE_CONCURRENT_GC
  1370. void DoParallelMark();
  1371. void DoBackgroundParallelMark();
  1372. #endif
  1373. size_t RootMark(CollectionState markState);
  1374. void ProcessMark(bool background);
  1375. void ProcessParallelMark(bool background, MarkContext * markContext);
  1376. template <bool parallel, bool interior>
  1377. void ProcessMarkContext(MarkContext * markContext);
  1378. public:
  1379. bool IsObjectMarked(void* candidate) { return this->heapBlockMap.IsMarked(candidate); }
  1380. #ifdef RECYCLER_STRESS
  1381. bool StressCollectNow();
  1382. #endif
  1383. private:
  1384. HeapBlock* FindHeapBlock(void * candidate);
  1385. struct FindBlockCache
  1386. {
  1387. FindBlockCache():
  1388. heapBlock(nullptr),
  1389. candidate(nullptr)
  1390. {
  1391. }
  1392. HeapBlock* heapBlock;
  1393. void* candidate;
  1394. } blockCache;
  1395. inline void ScanObjectInline(void ** obj, size_t byteCount);
  1396. inline void ScanObjectInlineInterior(void ** obj, size_t byteCount);
  1397. template <bool doSpecialMark, bool forceInterior = false>
  1398. inline void ScanMemoryInline(void ** obj, size_t byteCount
  1399. ADDRESS_SANITIZER_APPEND(RecyclerScanMemoryType scanMemoryType = RecyclerScanMemoryType::General));
  1400. template <bool doSpecialMark>
  1401. void ScanMemory(void ** obj, size_t byteCount) { if (byteCount != 0) { ScanMemoryInline<doSpecialMark>(obj, byteCount); } }
  1402. bool AddMark(void * candidate, size_t byteCount) throw();
  1403. #ifdef RECYCLER_VISITED_HOST
  1404. bool AddPreciselyTracedMark(IRecyclerVisitedObject * candidate) throw();
  1405. #endif
  1406. // Sweep
  1407. #if ENABLE_PARTIAL_GC
  1408. bool Sweep(size_t rescanRootBytes = (size_t)-1, bool concurrent = false, bool adjustPartialHeuristics = false);
  1409. #else
  1410. bool Sweep(bool concurrent = false);
  1411. #endif
  1412. void SweepWeakReference();
  1413. void SweepHeap(bool concurrent, RecyclerSweepManager& recyclerSweepManager);
  1414. void FinishSweep(RecyclerSweepManager& recyclerSweepManager);
  1415. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1416. void DoTwoPassConcurrentSweepPreCheck();
  1417. void FinishSweepPrep();
  1418. void FinishConcurrentSweepPass1();
  1419. void FinishConcurrentSweep();
  1420. #endif
  1421. bool FinishDisposeObjects();
  1422. template <CollectionFlags flags>
  1423. bool FinishDisposeObjectsWrapped();
  1424. // end collection
  1425. void FinishCollection();
  1426. void FinishCollection(bool needConcurrentSweep);
  1427. void EndCollection();
  1428. void ResetCollectionState();
  1429. void ResetMarkCollectionState();
  1430. void ResetHeuristicCounters();
  1431. void ResetPartialHeuristicCounters();
  1432. BOOL IsMarkState() const;
  1433. BOOL IsFindRootsState() const;
  1434. BOOL IsInThreadFindRootsState() const;
  1435. template <Js::Phase phase>
  1436. void CollectionBegin();
  1437. template <Js::Phase phase>
  1438. void CollectionEnd();
  1439. #if ENABLE_PARTIAL_GC
  1440. void ProcessClientTrackedObjects();
  1441. bool PartialCollect(bool concurrent);
  1442. void FinishPartialCollect(RecyclerSweepManager * recyclerSweep = nullptr);
  1443. void ClearPartialCollect();
  1444. #if ENABLE_CONCURRENT_GC
  1445. void BackgroundFinishPartialCollect(RecyclerSweepManager * recyclerSweep);
  1446. #endif
  1447. #endif
  1448. size_t RescanMark(DWORD waitTime);
  1449. size_t FinishMark(DWORD waitTime);
  1450. size_t FinishMarkRescan(bool background);
  1451. #if ENABLE_CONCURRENT_GC
  1452. void ProcessTrackedObjects();
  1453. #endif
  1454. BOOL IsAllocatableCallbackState()
  1455. {
  1456. return (collectionState & (Collection_PostSweepRedeferralCallback | Collection_PostCollectionCallback));
  1457. }
  1458. #if ENABLE_CONCURRENT_GC
  1459. // Concurrent GC
  1460. BOOL IsConcurrentEnabled() const { return this->enableConcurrentMark || this->enableParallelMark || this->enableConcurrentSweep; }
  1461. BOOL IsConcurrentMarkState() const;
  1462. BOOL IsConcurrentMarkExecutingState() const;
  1463. BOOL IsConcurrentResetMarksState() const;
  1464. BOOL IsConcurrentFindRootState() const;
  1465. BOOL IsConcurrentExecutingState() const;
  1466. BOOL IsConcurrentSweepExecutingState() const;
  1467. BOOL IsConcurrentSweepSetupState() const;
  1468. BOOL IsConcurrentSweepState() const;
  1469. BOOL IsConcurrentState() const;
  1470. BOOL InConcurrentSweep()
  1471. {
  1472. return ((collectionState & Collection_ConcurrentSweep) == Collection_ConcurrentSweep);
  1473. }
  1474. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1475. bool AllowAllocationsDuringConcurrentSweep()
  1476. {
  1477. return this->allowAllocationsDuringConcurrentSweepForCollection;
  1478. }
  1479. #endif
  1480. #if DBG
  1481. BOOL IsConcurrentFinishedState() const;
  1482. #endif // DBG
  1483. bool InitializeConcurrent(JsUtil::ThreadService* threadService);
  1484. bool AbortConcurrent(bool restoreState);
  1485. void FinalizeConcurrent(bool restoreState);
  1486. static unsigned int CALLBACK StaticThreadProc(LPVOID lpParameter);
  1487. static int ExceptFilter(LPEXCEPTION_POINTERS pEP);
  1488. DWORD ThreadProc();
  1489. void DoBackgroundWork(bool forceForeground = false);
  1490. static void CALLBACK StaticBackgroundWorkCallback(void * callbackData);
  1491. BOOL CollectOnConcurrentThread();
  1492. bool StartConcurrent(CollectionState const state);
  1493. BOOL StartBackgroundMarkCollect();
  1494. BOOL StartSynchronousBackgroundMark();
  1495. BOOL StartAsynchronousBackgroundMark();
  1496. BOOL StartBackgroundMark(bool foregroundResetMark, bool foregroundFindRoots);
  1497. BOOL StartConcurrentSweepCollect();
  1498. template <CollectionFlags flags>
  1499. BOOL TryFinishConcurrentCollect();
  1500. BOOL WaitForConcurrentThread(DWORD waitTime, RecyclerWaitReason caller = RecyclerWaitReason::Other);
  1501. void FlushBackgroundPages();
  1502. BOOL FinishConcurrentCollect(CollectionFlags flags);
  1503. void FinishTransferSwept(CollectionFlags flags);
  1504. BOOL FinishConcurrentCollectWrapped(CollectionFlags flags);
  1505. void BackgroundMark();
  1506. void BackgroundMarkWeakRefs();
  1507. void BackgroundResetMarks();
  1508. void PrepareBackgroundFindRoots();
  1509. void RevertPrepareBackgroundFindRoots();
  1510. size_t BackgroundFindRoots();
  1511. size_t BackgroundScanStack();
  1512. size_t BackgroundRepeatMark();
  1513. size_t BackgroundRescan(RescanFlags rescanFlags);
  1514. void BackgroundResetWriteWatchAll();
  1515. size_t BackgroundFinishMark();
  1516. char* GetScriptThreadStackTop();
  1517. void SweepPendingObjects(RecyclerSweepManager& recyclerSweepManager);
  1518. void ConcurrentTransferSweptObjects(RecyclerSweepManager& recyclerSweepManager);
  1519. #if ENABLE_PARTIAL_GC
  1520. void ConcurrentPartialTransferSweptObjects(RecyclerSweepManager& recyclerSweepManager);
  1521. #endif // ENABLE_PARTIAL_GC
  1522. #endif // ENABLE_CONCURRENT_GC
  1523. bool ForceSweepObject();
  1524. void NotifyFree(__in char * address, size_t size);
  1525. template <typename T>
  1526. void NotifyFree(T * heapBlock);
  1527. void CleanupPendingUnroot();
  1528. #ifdef ENABLE_JS_ETW
  1529. ULONG EventWriteFreeMemoryBlock(HeapBlock* heapBlock);
  1530. void FlushFreeRecord();
  1531. void AppendFreeMemoryETWRecord(__in char *address, size_t size);
  1532. static const uint BulkFreeMemoryCount = 400;
  1533. uint bulkFreeMemoryWrittenCount;
  1534. struct ETWFreeRecord {
  1535. char* memoryAddress;
  1536. uint32 objectSize;
  1537. };
  1538. ETWFreeRecord etwFreeRecords[BulkFreeMemoryCount];
  1539. #endif
  1540. template <ObjectInfoBits attributes>
  1541. bool IntegrateBlock(char * blockAddress, PageSegment * segment, size_t allocSize, size_t objectSize);
  1542. template <class TBlockAttributes> friend class SmallHeapBlockT;
  1543. template <class TBlockAttributes> friend class SmallNormalHeapBlockT;
  1544. template <class TBlockAttributes> friend class SmallLeafHeapBlockT;
  1545. template <class TBlockAttributes> friend class SmallFinalizableHeapBlockT;
  1546. #ifdef RECYCLER_VISITED_HOST
  1547. template <class TBlockAttributes> friend class SmallRecyclerVisitedHostHeapBlockT;
  1548. #endif
  1549. friend class LargeHeapBlock;
  1550. friend class HeapInfo;
  1551. friend class HeapInfoManager;
  1552. friend class LargeHeapBucket;
  1553. friend class ThreadContext;
  1554. template <typename TBlockType>
  1555. friend class HeapBucketT;
  1556. template <typename TBlockType>
  1557. friend class SmallNormalHeapBucketBase;
  1558. template <typename T, ObjectInfoBits attributes>
  1559. friend class RecyclerFastAllocator;
  1560. #ifdef RECYCLER_TRACE
  1561. void PrintCollectTrace(Js::Phase phase, bool finish = false, bool noConcurrentWork = false);
  1562. #endif
  1563. #ifdef RECYCLER_VERIFY_MARK
  1564. void VerifyMark();
  1565. void VerifyMarkRoots();
  1566. void VerifyMarkStack();
  1567. void VerifyMarkArena(ArenaData * arena);
  1568. void VerifyMarkBigBlockList(BigBlock * memoryBlocks);
  1569. void VerifyMarkArenaMemoryBlockList(ArenaMemoryBlock * memoryBlocks);
  1570. bool VerifyMark(void * objectAddress, void * target);
  1571. bool VerifyMark(void * target);
  1572. #endif
  1573. #if DBG_DUMP
  1574. bool forceTraceMark;
  1575. #endif
  1576. bool isHeapEnumInProgress;
  1577. #if DBG
  1578. bool allowAllocationDuringHeapEnum;
  1579. bool allowAllocationDuringRenentrance;
  1580. #ifdef ENABLE_PROJECTION
  1581. bool isInRefCountTrackingForProjection;
  1582. #endif
  1583. #endif
  1584. // There are two scenarios we allow limited allocation but disallow GC during those allocations:
  1585. // in heapenum when we allocate PropertyRecord, and
  1586. // in projection ExternalMark allowing allocating VarToDispEx. This is the common flag
  1587. // while we have debug only flag for each of the two scenarios.
  1588. bool isCollectionDisabled;
  1589. #ifdef ENABLE_BASIC_TELEMETRY
  1590. RecyclerTelemetryInfo& GetRecyclerTelemetryInfo() { return this->telemetryStats; }
  1591. #endif
  1592. #ifdef TRACK_ALLOC
  1593. public:
  1594. Recycler * TrackAllocInfo(TrackAllocData const& data);
  1595. void ClearTrackAllocInfo(TrackAllocData* data = NULL);
  1596. #ifdef PROFILE_RECYCLER_ALLOC
  1597. void PrintAllocStats();
  1598. private:
  1599. static bool DoProfileAllocTracker();
  1600. void InitializeProfileAllocTracker();
  1601. void TrackUnallocated(__in char* address, __in char *endAddress, size_t sizeCat);
  1602. void TrackAllocCore(void * object, size_t size, const TrackAllocData& trackAllocData, bool traceLifetime = false);
  1603. void* TrackAlloc(void * object, size_t size, const TrackAllocData& trackAllocData, bool traceLifetime = false);
  1604. void TrackIntegrate(__in_ecount(blockSize) char * blockAddress, size_t blockSize, size_t allocSize, size_t objectSize, const TrackAllocData& trackAllocData);
  1605. BOOL TrackFree(const char* address, size_t size);
  1606. void TrackAllocWeakRef(RecyclerWeakReferenceBase * weakRef);
  1607. void TrackFreeWeakRef(RecyclerWeakReferenceBase * weakRef);
  1608. struct TrackerData
  1609. {
  1610. TrackerData(type_info const * typeinfo, bool isArray) : typeinfo(typeinfo), isArray(isArray),
  1611. ItemSize(0), ItemCount(0), AllocCount(0), ReqSize(0), AllocSize(0), FreeCount(0), FreeSize(0), TraceLifetime(false)
  1612. #ifdef PERF_COUNTERS
  1613. , counter(PerfCounter::RecyclerTrackerCounterSet::GetPerfCounter(typeinfo, isArray))
  1614. , sizeCounter(PerfCounter::RecyclerTrackerCounterSet::GetPerfSizeCounter(typeinfo, isArray))
  1615. #endif
  1616. {
  1617. }
  1618. type_info const * typeinfo;
  1619. bool isArray;
  1620. #ifdef TRACE_OBJECT_LIFETIME
  1621. bool TraceLifetime;
  1622. #endif
  1623. size_t ItemSize;
  1624. size_t ItemCount;
  1625. int AllocCount;
  1626. int64 ReqSize;
  1627. int64 AllocSize;
  1628. int FreeCount;
  1629. int64 FreeSize;
  1630. #ifdef PERF_COUNTERS
  1631. PerfCounter::Counter& counter;
  1632. PerfCounter::Counter& sizeCounter;
  1633. #endif
  1634. static TrackerData EmptyData;
  1635. static TrackerData ExplicitFreeListObjectData;
  1636. };
  1637. TrackerData * GetTrackerData(void * address);
  1638. void SetTrackerData(void * address, TrackerData * data);
  1639. struct TrackerItem
  1640. {
  1641. TrackerItem(type_info const * typeinfo) : instanceData(typeinfo, false), arrayData(typeinfo, true)
  1642. #ifdef PERF_COUNTERS
  1643. , weakRefCounter(PerfCounter::RecyclerTrackerCounterSet::GetWeakRefPerfCounter(typeinfo))
  1644. #endif
  1645. {}
  1646. TrackerData instanceData;
  1647. TrackerData arrayData;
  1648. #ifdef PERF_COUNTERS
  1649. PerfCounter::Counter& weakRefCounter;
  1650. #endif
  1651. };
  1652. typedef JsUtil::BaseDictionary<type_info const *, TrackerItem *, NoCheckHeapAllocator, PrimeSizePolicy, DefaultComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> TypeInfotoTrackerItemMap;
  1653. typedef JsUtil::BaseDictionary<void *, TrackerData *, NoCheckHeapAllocator, PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> PointerToTrackerDataMap;
  1654. TypeInfotoTrackerItemMap * trackerDictionary;
  1655. CriticalSection * trackerCriticalSection;
  1656. #endif
  1657. TrackAllocData nextAllocData;
  1658. #endif
  1659. public:
  1660. // Enumeration
  1661. class AutoSetupRecyclerForNonCollectingMark
  1662. {
  1663. private:
  1664. Recycler& m_recycler;
  1665. bool m_setupDone;
  1666. CollectionState m_previousCollectionState;
  1667. #ifdef RECYCLER_STATS
  1668. RecyclerCollectionStats m_previousCollectionStats;
  1669. #endif
  1670. public:
  1671. AutoSetupRecyclerForNonCollectingMark(Recycler& recycler, bool setupForHeapEnumeration = false);
  1672. ~AutoSetupRecyclerForNonCollectingMark();
  1673. void DoCommonSetup();
  1674. void SetupForHeapEnumeration();
  1675. };
  1676. friend class RecyclerHeapObjectInfo;
  1677. bool FindImplicitRootObject(void* candidate, RecyclerHeapObjectInfo& heapObject);
  1678. bool FindHeapObject(void* candidate, FindHeapObjectFlags flags, RecyclerHeapObjectInfo& heapObject);
  1679. bool FindHeapObjectWithClearedAllocators(void* candidate, RecyclerHeapObjectInfo& heapObject);
  1680. bool IsCollectionDisabled() const { return isCollectionDisabled; }
  1681. bool IsHeapEnumInProgress() const { Assert(isHeapEnumInProgress ? isCollectionDisabled : true); return isHeapEnumInProgress; }
  1682. #if DBG
  1683. // There are limited cases that we have to allow allocation during heap enumeration. GC is explicitly
  1684. // disabled during heap enumeration for these limited cases. (See DefaultRecyclerCollectionWrapper)
  1685. // The only case of allocation right now is allocating property record for string based type handler
  1686. // so we can use the propertyId as the relation Id.
  1687. // Allocation during enumeration is still frown upon and should still be avoid if possible.
  1688. bool AllowAllocationDuringHeapEnum() const { return allowAllocationDuringHeapEnum; }
  1689. class AutoAllowAllocationDuringHeapEnum : public AutoBooleanToggle
  1690. {
  1691. public:
  1692. AutoAllowAllocationDuringHeapEnum(Recycler * recycler) : AutoBooleanToggle(&recycler->allowAllocationDuringHeapEnum) {};
  1693. };
  1694. #ifdef ENABLE_PROJECTION
  1695. bool IsInRefCountTrackingForProjection() const { return isInRefCountTrackingForProjection;}
  1696. class AutoIsInRefCountTrackingForProjection : public AutoBooleanToggle
  1697. {
  1698. public:
  1699. AutoIsInRefCountTrackingForProjection(Recycler * recycler) : AutoBooleanToggle(&recycler->isInRefCountTrackingForProjection) {};
  1700. };
  1701. #endif
  1702. #endif
  1703. class AutoAllowAllocationDuringReentrance : public AutoBooleanToggle
  1704. {
  1705. public:
  1706. AutoAllowAllocationDuringReentrance(Recycler * recycler) :
  1707. AutoBooleanToggle(&recycler->isCollectionDisabled)
  1708. #if DBG
  1709. , allowAllocationDuringRenentrance(&recycler->allowAllocationDuringRenentrance)
  1710. #endif
  1711. {};
  1712. #if DBG
  1713. private:
  1714. AutoBooleanToggle allowAllocationDuringRenentrance;
  1715. #endif
  1716. };
  1717. #ifdef HEAP_ENUMERATION_VALIDATION
  1718. typedef void(*PostHeapEnumScanCallback)(const HeapObject& heapObject, void *data);
  1719. PostHeapEnumScanCallback pfPostHeapEnumScanCallback;
  1720. void *postHeapEnunScanData;
  1721. void PostHeapEnumScan(PostHeapEnumScanCallback callback, void*data);
  1722. bool IsPostEnumHeapValidationInProgress() const { return pfPostHeapEnumScanCallback != NULL; }
  1723. #endif
  1724. public:
  1725. void* GetRealAddressFromInterior(void* candidate);
  1726. private:
  1727. void BeginNonCollectingMark();
  1728. void EndNonCollectingMark();
  1729. #if defined(RECYCLER_DUMP_OBJECT_GRAPH) || defined(LEAK_REPORT) || defined(CHECK_MEMORY_LEAK)
  1730. public:
  1731. bool IsInDllCanUnloadNow() const { return inDllCanUnloadNow; }
  1732. bool IsInDetachProcess() const { return inDetachProcess; }
  1733. void SetInDllCanUnloadNow();
  1734. void SetInDetachProcess();
  1735. private:
  1736. bool inDllCanUnloadNow;
  1737. bool inDetachProcess;
  1738. bool isPrimaryMarkContextInitialized;
  1739. #endif
  1740. #if defined(LEAK_REPORT) || defined(CHECK_MEMORY_LEAK)
  1741. template <class Fn>
  1742. void ReportOnProcessDetach(Fn fn);
  1743. void PrintPinnedObjectStackTraces();
  1744. #endif
  1745. public:
  1746. typedef void (CALLBACK *ObjectBeforeCollectCallback)(void* object, void* callbackState); // same as jsrt JsObjectBeforeCollectCallback
  1747. // same as jsrt JsObjectBeforeCollectCallbackWrapper
  1748. typedef void (CALLBACK *ObjectBeforeCollectCallbackWrapper)(ObjectBeforeCollectCallback callback, void* object, void* callbackState, void* threadContext);
  1749. void SetObjectBeforeCollectCallback(void* object,
  1750. ObjectBeforeCollectCallback callback,
  1751. void* callbackState,
  1752. ObjectBeforeCollectCallbackWrapper callbackWrapper,
  1753. void* threadContext);
  1754. void ClearObjectBeforeCollectCallbacks();
  1755. bool IsInObjectBeforeCollectCallback() const { return objectBeforeCollectCallbackState != ObjectBeforeCollectCallback_None; }
  1756. private:
  1757. struct ObjectBeforeCollectCallbackData
  1758. {
  1759. ObjectBeforeCollectCallback callback;
  1760. void* callbackState;
  1761. void* threadContext;
  1762. ObjectBeforeCollectCallbackWrapper callbackWrapper;
  1763. ObjectBeforeCollectCallbackData() {}
  1764. ObjectBeforeCollectCallbackData(ObjectBeforeCollectCallbackWrapper callbackWrapper, ObjectBeforeCollectCallback callback, void* callbackState, void* threadContext) :
  1765. callbackWrapper(callbackWrapper), callback(callback), callbackState(callbackState), threadContext(threadContext) {}
  1766. };
  1767. typedef JsUtil::BaseDictionary<void*, ObjectBeforeCollectCallbackData, HeapAllocator,
  1768. PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> ObjectBeforeCollectCallbackMap;
  1769. ObjectBeforeCollectCallbackMap* objectBeforeCollectCallbackMap;
  1770. enum ObjectBeforeCollectCallbackState
  1771. {
  1772. ObjectBeforeCollectCallback_None,
  1773. ObjectBeforeCollectCallback_Normal, // Normal GC BeforeCollect callback
  1774. ObjectBeforeCollectCallback_Shutdown, // At shutdown invoke all BeforeCollect callback
  1775. } objectBeforeCollectCallbackState;
  1776. bool ProcessObjectBeforeCollectCallbacks(bool atShutdown = false);
  1777. #if GLOBAL_ENABLE_WRITE_BARRIER
  1778. private:
  1779. typedef JsUtil::BaseDictionary<void *, size_t, HeapAllocator, PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::AsymetricResizeLock> PendingWriteBarrierBlockMap;
  1780. PendingWriteBarrierBlockMap pendingWriteBarrierBlockMap;
  1781. public:
  1782. void RegisterPendingWriteBarrierBlock(void* address, size_t bytes);
  1783. void UnRegisterPendingWriteBarrierBlock(void* address);
  1784. #endif
  1785. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  1786. private:
  1787. static Recycler* recyclerList;
  1788. static CriticalSection recyclerListLock;
  1789. Recycler* next;
  1790. public:
  1791. static void WBSetBitJIT(char* addr)
  1792. {
  1793. return WBSetBit(addr);
  1794. }
  1795. static void WBSetBit(char* addr);
  1796. static void WBSetBitRange(char* addr, uint length);
  1797. static void WBVerifyBitIsSet(char* addr, char* target);
  1798. static bool WBCheckIsRecyclerAddress(char* addr);
  1799. #endif
  1800. #ifdef RECYCLER_FINALIZE_CHECK
  1801. void VerifyFinalize();
  1802. #endif
  1803. };
  1804. class RecyclerHeapObjectInfo
  1805. {
  1806. void* m_address;
  1807. Recycler * m_recycler;
  1808. HeapBlock* m_heapBlock;
  1809. #if LARGEHEAPBLOCK_ENCODING
  1810. union
  1811. {
  1812. byte * m_attributes;
  1813. LargeObjectHeader * m_largeHeapBlockHeader;
  1814. };
  1815. bool isUsingLargeHeapBlock = false;
  1816. #else
  1817. byte * m_attributes;
  1818. #endif
  1819. public:
  1820. RecyclerHeapObjectInfo() : m_address(NULL), m_recycler(NULL), m_heapBlock(NULL), m_attributes(NULL) {}
  1821. RecyclerHeapObjectInfo(void* address, Recycler * recycler, HeapBlock* heapBlock, byte * attributes) :
  1822. m_address(address), m_recycler(recycler), m_heapBlock(heapBlock), m_attributes(attributes) { }
  1823. void* GetObjectAddress() const { return m_address; }
  1824. #ifdef RECYCLER_PAGE_HEAP
  1825. bool IsPageHeapAlloc() const
  1826. {
  1827. return isUsingLargeHeapBlock && ((LargeHeapBlock*)m_heapBlock)->InPageHeapMode();
  1828. }
  1829. void PageHeapLockPages() const
  1830. {
  1831. Assert(IsPageHeapAlloc());
  1832. ((LargeHeapBlock*)m_heapBlock)->PageHeapLockPages();
  1833. }
  1834. #endif
  1835. bool IsLeaf() const
  1836. {
  1837. #if LARGEHEAPBLOCK_ENCODING
  1838. if (isUsingLargeHeapBlock)
  1839. {
  1840. return (m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie) & LeafBit) != 0;
  1841. }
  1842. #endif
  1843. return ((*m_attributes & LeafBit) != 0 || this->m_heapBlock->IsLeafBlock());
  1844. }
  1845. bool IsImplicitRoot() const
  1846. {
  1847. #if LARGEHEAPBLOCK_ENCODING
  1848. if (isUsingLargeHeapBlock)
  1849. {
  1850. return (m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie) & ImplicitRootBit) != 0;
  1851. }
  1852. #endif
  1853. return (*m_attributes & ImplicitRootBit) != 0;
  1854. }
  1855. bool IsObjectMarked() const { Assert(m_recycler); return m_recycler->heapBlockMap.IsMarked(m_address); }
  1856. void SetObjectMarked() { Assert(m_recycler); m_recycler->heapBlockMap.SetMark(m_address); }
  1857. ObjectInfoBits GetAttributes() const
  1858. {
  1859. #if LARGEHEAPBLOCK_ENCODING
  1860. if (isUsingLargeHeapBlock)
  1861. {
  1862. return (ObjectInfoBits)m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
  1863. }
  1864. #endif
  1865. return (ObjectInfoBits)*m_attributes;
  1866. }
  1867. size_t GetSize() const;
  1868. #if LARGEHEAPBLOCK_ENCODING
  1869. void SetLargeHeapBlockHeader(LargeObjectHeader * largeHeapBlockHeader)
  1870. {
  1871. m_largeHeapBlockHeader = largeHeapBlockHeader;
  1872. isUsingLargeHeapBlock = true;
  1873. }
  1874. #endif
  1875. bool SetMemoryProfilerHasEnumerated()
  1876. {
  1877. Assert(m_heapBlock);
  1878. #if LARGEHEAPBLOCK_ENCODING
  1879. if (isUsingLargeHeapBlock)
  1880. {
  1881. return SetMemoryProfilerHasEnumeratedForLargeHeapBlock();
  1882. }
  1883. #endif
  1884. bool wasMemoryProfilerOldObject = (*m_attributes & MemoryProfilerOldObjectBit) != 0;
  1885. *m_attributes |= MemoryProfilerOldObjectBit;
  1886. return wasMemoryProfilerOldObject;
  1887. }
  1888. bool ClearImplicitRootBit()
  1889. {
  1890. // This can only be called on the main thread for non-finalizable block
  1891. // As finalizable block requires that the bit not be change during concurrent mark
  1892. // since the background thread change the NewTrackBit
  1893. Assert(!m_heapBlock->IsAnyFinalizableBlock());
  1894. #ifdef RECYCLER_PAGE_HEAP
  1895. Recycler* recycler = this->m_recycler;
  1896. if (recycler->IsPageHeapEnabled() && recycler->ShouldCapturePageHeapFreeStack())
  1897. {
  1898. #ifdef STACK_BACK_TRACE
  1899. if (this->isUsingLargeHeapBlock)
  1900. {
  1901. LargeHeapBlock* largeHeapBlock = (LargeHeapBlock*)this->m_heapBlock;
  1902. if (largeHeapBlock->InPageHeapMode())
  1903. {
  1904. largeHeapBlock->CapturePageHeapFreeStack();
  1905. }
  1906. }
  1907. #endif
  1908. }
  1909. #endif
  1910. #if LARGEHEAPBLOCK_ENCODING
  1911. if (isUsingLargeHeapBlock)
  1912. {
  1913. return ClearImplicitRootBitsForLargeHeapBlock();
  1914. }
  1915. #endif
  1916. Assert(m_attributes);
  1917. bool wasImplicitRoot = (*m_attributes & ImplicitRootBit) != 0;
  1918. *m_attributes &= ~ImplicitRootBit;
  1919. return wasImplicitRoot;
  1920. }
  1921. void ExplicitFree()
  1922. {
  1923. if (*m_attributes == ObjectInfoBits::LeafBit)
  1924. {
  1925. m_recycler->ExplicitFreeLeaf(m_address, GetSize());
  1926. }
  1927. else
  1928. {
  1929. Assert(*m_attributes == ObjectInfoBits::NoBit);
  1930. m_recycler->ExplicitFreeNonLeaf(m_address, GetSize());
  1931. }
  1932. }
  1933. #if LARGEHEAPBLOCK_ENCODING
  1934. bool ClearImplicitRootBitsForLargeHeapBlock()
  1935. {
  1936. Assert(m_largeHeapBlockHeader);
  1937. byte attributes = m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
  1938. bool wasImplicitRoot = (attributes & ImplicitRootBit) != 0;
  1939. m_largeHeapBlockHeader->SetAttributes(m_recycler->Cookie, attributes & ~ImplicitRootBit);
  1940. return wasImplicitRoot;
  1941. }
  1942. bool SetMemoryProfilerHasEnumeratedForLargeHeapBlock()
  1943. {
  1944. Assert(m_largeHeapBlockHeader);
  1945. byte attributes = m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
  1946. bool wasMemoryProfilerOldObject = (attributes & MemoryProfilerOldObjectBit) != 0;
  1947. m_largeHeapBlockHeader->SetAttributes(m_recycler->Cookie, attributes | MemoryProfilerOldObjectBit);
  1948. return wasMemoryProfilerOldObject;
  1949. }
  1950. #endif
  1951. };
  1952. // A fake heap block to replace the original heap block where the strong ref is when it has been collected
  1953. // as the original heap block may have been freed
  1954. class CollectedRecyclerWeakRefHeapBlock : public HeapBlock
  1955. {
  1956. public:
  1957. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  1958. virtual void WBVerifyBitIsSet(char* addr) override { Assert(false); }
  1959. virtual void WBSetBit(char* addr) override { Assert(false); }
  1960. virtual void WBSetBitRange(char* addr, uint count) override { Assert(false); }
  1961. virtual void WBClearBit(char* addr) override { Assert(false); }
  1962. virtual void WBClearObject(char* addr) override { Assert(false); }
  1963. #endif
  1964. #if DBG
  1965. virtual HeapInfo * GetHeapInfo() const override { Assert(false); return nullptr; }
  1966. virtual BOOL IsFreeObject(void* objectAddress) override { Assert(false); return false; }
  1967. #endif
  1968. virtual BOOL IsValidObject(void* objectAddress) override { Assert(false); return false; }
  1969. virtual byte* GetRealAddressFromInterior(void* interiorAddress) override { Assert(false); return nullptr; }
  1970. virtual size_t GetObjectSize(void* object) const override { Assert(false); return 0; }
  1971. virtual bool FindHeapObject(void* objectAddress, Recycler * recycler, FindHeapObjectFlags flags, RecyclerHeapObjectInfo& heapObject) override { Assert(false); return false; }
  1972. virtual bool TestObjectMarkedBit(void* objectAddress) override { Assert(false); return false; }
  1973. virtual void SetObjectMarkedBit(void* objectAddress) override { Assert(false); }
  1974. #ifdef RECYCLER_VERIFY_MARK
  1975. virtual bool VerifyMark(void * objectAddress, void * target) override { Assert(false); return false; }
  1976. #endif
  1977. #ifdef RECYCLER_PERF_COUNTERS
  1978. virtual void UpdatePerfCountersOnFree() override { Assert(false); }
  1979. #endif
  1980. #ifdef PROFILE_RECYCLER_ALLOC
  1981. virtual void * GetTrackerData(void * address) override { Assert(false); return nullptr; }
  1982. virtual void SetTrackerData(void * address, void * data) override { Assert(false); }
  1983. #endif
  1984. static CollectedRecyclerWeakRefHeapBlock Instance;
  1985. private:
  1986. CollectedRecyclerWeakRefHeapBlock() : HeapBlock(BlockTypeCount)
  1987. {
  1988. #if ENABLE_CONCURRENT_GC
  1989. isPendingConcurrentSweep = false;
  1990. #endif
  1991. }
  1992. };
  1993. class AutoIdleDecommit
  1994. {
  1995. public:
  1996. AutoIdleDecommit(Recycler * recycler) : recycler(recycler) { recycler->EnterIdleDecommit(); }
  1997. ~AutoIdleDecommit() { recycler->LeaveIdleDecommit(); }
  1998. private:
  1999. Recycler * recycler;
  2000. };
  2001. template <typename SmallHeapBlockAllocatorType>
  2002. void
  2003. Recycler::AddSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat)
  2004. {
  2005. this->GetDefaultHeapInfo()->AddSmallAllocator(allocator, sizeCat);
  2006. }
  2007. template <typename SmallHeapBlockAllocatorType>
  2008. void
  2009. Recycler::RemoveSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat)
  2010. {
  2011. this->GetDefaultHeapInfo()->RemoveSmallAllocator(allocator, sizeCat);
  2012. }
  2013. template <ObjectInfoBits attributes, typename SmallHeapBlockAllocatorType>
  2014. char *
  2015. Recycler::SmallAllocatorAlloc(SmallHeapBlockAllocatorType * allocator, DECLSPEC_GUARD_OVERFLOW size_t sizeCat, size_t size)
  2016. {
  2017. return this->GetDefaultHeapInfo()->SmallAllocatorAlloc<attributes>(this, allocator, sizeCat, size);
  2018. }
  2019. // Dummy recycler allocator policy classes to choose the allocation function
  2020. class _RecyclerLeafPolicy;
  2021. class _RecyclerNonLeafPolicy;
  2022. #ifdef RECYCLER_WRITE_BARRIER
  2023. class _RecyclerWriteBarrierPolicy;
  2024. #endif
  2025. template <typename Policy>
  2026. class _RecyclerAllocatorFunc
  2027. {};
  2028. template <>
  2029. class _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
  2030. {
  2031. public:
  2032. typedef char * (Recycler::*AllocFuncType)(size_t);
  2033. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  2034. static AllocFuncType GetAllocFunc()
  2035. {
  2036. return &Recycler::AllocLeaf;
  2037. }
  2038. static AllocFuncType GetAllocZeroFunc()
  2039. {
  2040. return &Recycler::AllocLeafZero;
  2041. }
  2042. static FreeFuncType GetFreeFunc()
  2043. {
  2044. return &Recycler::ExplicitFreeLeaf;
  2045. }
  2046. };
  2047. template <>
  2048. class _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
  2049. {
  2050. public:
  2051. typedef char * (Recycler::*AllocFuncType)(size_t);
  2052. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  2053. static AllocFuncType GetAllocFunc()
  2054. {
  2055. return &Recycler::Alloc;
  2056. }
  2057. static AllocFuncType GetAllocZeroFunc()
  2058. {
  2059. return &Recycler::AllocZero;
  2060. }
  2061. static FreeFuncType GetFreeFunc()
  2062. {
  2063. return &Recycler::ExplicitFreeNonLeaf;
  2064. }
  2065. };
  2066. #ifdef RECYCLER_WRITE_BARRIER
  2067. template <>
  2068. class _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2069. {
  2070. public:
  2071. typedef char * (Recycler::*AllocFuncType)(size_t);
  2072. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  2073. static AllocFuncType GetAllocFunc()
  2074. {
  2075. return &Recycler::AllocWithBarrier;
  2076. }
  2077. static AllocFuncType GetAllocZeroFunc()
  2078. {
  2079. return &Recycler::AllocZeroWithBarrier;
  2080. }
  2081. static FreeFuncType GetFreeFunc()
  2082. {
  2083. return &Recycler::ExplicitFreeNonLeaf;
  2084. }
  2085. };
  2086. #endif
  2087. // This is used by the compiler; when T is NOT a pointer i.e. a value type - it causes leaf allocation
  2088. template <typename T>
  2089. class TypeAllocatorFunc<Recycler, T> : public _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
  2090. {
  2091. };
  2092. #if GLOBAL_ENABLE_WRITE_BARRIER
  2093. template <typename T>
  2094. class TypeAllocatorFunc<Recycler, T *> : public _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2095. {
  2096. };
  2097. #else
  2098. // Partial template specialization; applies to T when it is a pointer
  2099. template <typename T>
  2100. class TypeAllocatorFunc<Recycler, T *> : public _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
  2101. {
  2102. };
  2103. #endif
  2104. // Dummy class to choose the allocation function
  2105. class RecyclerLeafAllocator
  2106. {
  2107. public:
  2108. static const bool FakeZeroLengthArray = true;
  2109. };
  2110. class RecyclerNonLeafAllocator
  2111. {
  2112. public:
  2113. static const bool FakeZeroLengthArray = true;
  2114. };
  2115. class RecyclerWriteBarrierAllocator
  2116. {
  2117. public:
  2118. static const bool FakeZeroLengthArray = true;
  2119. };
  2120. // Choose RecyclerLeafAllocator / RecyclerNonLeafAllocator based on "bool isLeaf"
  2121. template <bool isLeaf>
  2122. struct _RecyclerLeaf { typedef RecyclerLeafAllocator AllocatorType; };
  2123. template <>
  2124. struct _RecyclerLeaf<false> { typedef RecyclerNonLeafAllocator AllocatorType; };
  2125. template <bool isLeaf>
  2126. class ListTypeAllocatorFunc<Recycler, isLeaf>
  2127. {
  2128. public:
  2129. // RecyclerLeafAllocator / RecyclerNonLeafAllocator based on "bool isLeaf"
  2130. // used by write barrier type traits
  2131. typedef typename _RecyclerLeaf<isLeaf>::AllocatorType EffectiveAllocatorType;
  2132. typedef char * (Recycler::*AllocFuncType)(size_t);
  2133. typedef bool (Recycler::*FreeFuncType)(void*, size_t);
  2134. static AllocFuncType GetAllocFunc()
  2135. {
  2136. return isLeaf ? &Recycler::AllocLeaf : &Recycler::Alloc;
  2137. }
  2138. static FreeFuncType GetFreeFunc()
  2139. {
  2140. if (isLeaf)
  2141. {
  2142. return &Recycler::ExplicitFreeLeaf;
  2143. }
  2144. else
  2145. {
  2146. return &Recycler::ExplicitFreeNonLeaf;
  2147. }
  2148. }
  2149. };
  2150. // Partial template specialization to allocate as non leaf
  2151. template <typename T>
  2152. class TypeAllocatorFunc<RecyclerNonLeafAllocator, T> :
  2153. #if GLOBAL_ENABLE_WRITE_BARRIER
  2154. public _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2155. #else
  2156. public _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
  2157. #endif
  2158. {
  2159. };
  2160. #ifdef RECYCLER_WRITE_BARRIER
  2161. template <typename T>
  2162. class TypeAllocatorFunc<RecyclerWriteBarrierAllocator, T> : public _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
  2163. {
  2164. };
  2165. #endif
  2166. template <typename T>
  2167. class TypeAllocatorFunc<RecyclerLeafAllocator, T> : public _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
  2168. {
  2169. };
  2170. template <typename TAllocType>
  2171. struct AllocatorInfo<Recycler, TAllocType>
  2172. {
  2173. typedef Recycler AllocatorType;
  2174. typedef TypeAllocatorFunc<Recycler, TAllocType> AllocatorFunc;
  2175. typedef _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy> InstAllocatorFunc; // By default any instance considered non-leaf
  2176. };
  2177. template <typename TAllocType>
  2178. struct AllocatorInfo<RecyclerNonLeafAllocator, TAllocType>
  2179. {
  2180. typedef Recycler AllocatorType;
  2181. typedef TypeAllocatorFunc<RecyclerNonLeafAllocator, TAllocType> AllocatorFunc;
  2182. typedef TypeAllocatorFunc<RecyclerNonLeafAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
  2183. };
  2184. template <typename TAllocType>
  2185. struct AllocatorInfo<RecyclerWriteBarrierAllocator, TAllocType>
  2186. {
  2187. typedef Recycler AllocatorType;
  2188. typedef TypeAllocatorFunc<RecyclerWriteBarrierAllocator, TAllocType> AllocatorFunc;
  2189. typedef TypeAllocatorFunc<RecyclerWriteBarrierAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
  2190. };
  2191. template <typename TAllocType>
  2192. struct AllocatorInfo<RecyclerLeafAllocator, TAllocType>
  2193. {
  2194. typedef Recycler AllocatorType;
  2195. typedef TypeAllocatorFunc<RecyclerLeafAllocator, TAllocType> AllocatorFunc;
  2196. typedef TypeAllocatorFunc<RecyclerLeafAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
  2197. };
  2198. template <>
  2199. struct ForceNonLeafAllocator<Recycler>
  2200. {
  2201. typedef RecyclerNonLeafAllocator AllocatorType;
  2202. };
  2203. template <>
  2204. struct ForceNonLeafAllocator<RecyclerLeafAllocator>
  2205. {
  2206. typedef RecyclerNonLeafAllocator AllocatorType;
  2207. };
  2208. template <>
  2209. struct ForceLeafAllocator<Recycler>
  2210. {
  2211. typedef RecyclerLeafAllocator AllocatorType;
  2212. };
  2213. template <>
  2214. struct ForceLeafAllocator<RecyclerNonLeafAllocator>
  2215. {
  2216. typedef RecyclerLeafAllocator AllocatorType;
  2217. };
  2218. // TODO: enable -profile for GC phases.
  2219. // access the same profiler object from multiple GC threads which shares one recycler object,
  2220. // but profiler object is not thread safe
  2221. #if defined(PROFILE_EXEC) && 0
  2222. #define RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase) if (recycler->profiler != nullptr) { recycler->profiler->Begin(phase); }
  2223. #define RECYCLER_PROFILE_EXEC_END(recycler, phase) if (recycler->profiler != nullptr) { recycler->profiler->End(phase); }
  2224. #define RECYCLER_PROFILE_EXEC_BEGIN2(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->Begin(phase1); recycler->profiler->Begin(phase2);}
  2225. #define RECYCLER_PROFILE_EXEC_END2(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->End(phase1); recycler->profiler->End(phase2);}
  2226. #define RECYCLER_PROFILE_EXEC_CHANGE(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->End(phase1); recycler->profiler->Begin(phase2); }
  2227. #define RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase) if (recycler->backgroundProfiler != nullptr) { recycler->backgroundProfiler->Begin(phase); }
  2228. #define RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase) if (recycler->backgroundProfiler != nullptr) { recycler->backgroundProfiler->End(phase); }
  2229. #define RECYCLER_PROFILE_EXEC_THREAD_BEGIN(background, recycler, phase) if (background) { RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase); } else { RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase); }
  2230. #define RECYCLER_PROFILE_EXEC_THREAD_END(background, recycler, phase) if (background) { RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase); } else { RECYCLER_PROFILE_EXEC_END(recycler, phase); }
  2231. #else
  2232. #define RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase)
  2233. #define RECYCLER_PROFILE_EXEC_END(recycler, phase)
  2234. #define RECYCLER_PROFILE_EXEC_BEGIN2(recycler, phase1, phase2)
  2235. #define RECYCLER_PROFILE_EXEC_END2(recycler, phase1, phase2)
  2236. #define RECYCLER_PROFILE_EXEC_CHANGE(recycler, phase1, phase2)
  2237. #define RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase)
  2238. #define RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase)
  2239. #define RECYCLER_PROFILE_EXEC_THREAD_BEGIN(background, recycler, phase)
  2240. #define RECYCLER_PROFILE_EXEC_THREAD_END(background, recycler, phase)
  2241. #endif
  2242. }
  2243. _Ret_notnull_ inline void * __cdecl
  2244. operator new(DECLSPEC_GUARD_OVERFLOW size_t byteSize, Recycler * alloc, HeapInfo * heapInfo)
  2245. {
  2246. return alloc->HeapAllocR(heapInfo, byteSize);
  2247. }
  2248. inline void __cdecl
  2249. operator delete(void * obj, Recycler * alloc, HeapInfo * heapInfo)
  2250. {
  2251. alloc->HeapFree(heapInfo, obj);
  2252. }
  2253. template<ObjectInfoBits infoBits>
  2254. _Ret_notnull_ inline void * __cdecl
  2255. operator new(DECLSPEC_GUARD_OVERFLOW size_t byteSize, Recycler * recycler, const InfoBitsWrapper<infoBits>&)
  2256. {
  2257. AssertCanHandleOutOfMemory();
  2258. Assert(byteSize != 0);
  2259. void * buffer;
  2260. if (infoBits & EnumClass_1_Bit)
  2261. {
  2262. buffer = recycler->AllocEnumClass<infoBits>(byteSize);
  2263. }
  2264. else
  2265. {
  2266. buffer = recycler->AllocWithInfoBits<infoBits>(byteSize);
  2267. }
  2268. // All of our allocation should throw on out of memory
  2269. Assume(buffer != nullptr);
  2270. return buffer;
  2271. }
  2272. #if DBG && defined(RECYCLER_VERIFY_MARK)
  2273. extern bool IsLikelyRuntimeFalseReference(
  2274. char* objectStartAddress, size_t offset, const char* typeName);
  2275. #define DECLARE_RECYCLER_VERIFY_MARK_FRIEND() \
  2276. private: \
  2277. friend bool ::IsLikelyRuntimeFalseReference( \
  2278. char* objectStartAddress, size_t offset, const char* typeName);
  2279. #define IMPLEMENT_STUB_IsLikelyRuntimeFalseReference() \
  2280. bool IsLikelyRuntimeFalseReference( \
  2281. char* objectStartAddress, size_t offset, const char* typeName) \
  2282. { return false; }
  2283. #else
  2284. #define DECLARE_RECYCLER_VERIFY_MARK_FRIEND()
  2285. #define IMPLEMENT_STUB_IsLikelyRuntimeFalseReference()
  2286. #endif
  2287. template <typename ExternalAllocFunc>
  2288. bool Recycler::DoExternalAllocation(size_t size, ExternalAllocFunc externalAllocFunc)
  2289. {
  2290. // Request external memory allocation
  2291. if (!RequestExternalMemoryAllocation(size))
  2292. {
  2293. // Attempt to free some memory then try again
  2294. CollectNow<CollectOnTypedArrayAllocation>();
  2295. if (!RequestExternalMemoryAllocation(size))
  2296. {
  2297. return false;
  2298. }
  2299. }
  2300. struct AutoExternalAllocation
  2301. {
  2302. bool allocationSucceeded = false;
  2303. Recycler* recycler;
  2304. size_t size;
  2305. AutoExternalAllocation(Recycler* recycler, size_t size): recycler(recycler), size(size) {}
  2306. // In case the externalAllocFunc throws or fails, the destructor will report the failure
  2307. ~AutoExternalAllocation() { if (!allocationSucceeded) recycler->ReportExternalMemoryFailure(size); }
  2308. };
  2309. AutoExternalAllocation externalAllocation(this, size);
  2310. if (externalAllocFunc())
  2311. {
  2312. externalAllocation.allocationSucceeded = true;
  2313. return true;
  2314. }
  2315. return false;
  2316. }