RecyclerSweepManager.cpp 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "CommonMemoryPch.h"
  6. #if ENABLE_PARTIAL_GC
  7. #define KILOBYTES * 1024
  8. #define MEGABYTES * 1024 KILOBYTES
  9. #define MEGABYTES_OF_PAGES * 1024 * 1024 / AutoSystemInfo::PageSize;
  10. const uint RecyclerSweepManager::MinPartialUncollectedNewPageCount = 4 MEGABYTES_OF_PAGES;
  11. const uint RecyclerSweepManager::MaxPartialCollectRescanRootBytes = 5 MEGABYTES;
  12. static const uint MinPartialCollectRescanRootBytes = 128 KILOBYTES;
  13. // Maximum unused partial collect free bytes before we get out of partial GC mode
  14. static const uint MaxUnusedPartialCollectFreeBytes = 16 MEGABYTES;
  15. // Have to collected at least 10% before we would partial GC
  16. // CONSIDER: It may be good to do partial with low efficacy once we have concurrent partial
  17. // because old object are not getting collected as well, but without concurrent partial, we will have to mark
  18. // new objects in thread.
  19. static const double MinPartialCollectEfficacy = 0.1;
  20. #endif
  21. bool
  22. RecyclerSweepManager::IsBackground() const
  23. {
  24. return this->background;
  25. }
  26. bool
  27. RecyclerSweepManager::HasSetupBackgroundSweep() const
  28. {
  29. return this->IsBackground() || this->forceForeground;
  30. }
  31. bool
  32. RecyclerSweepManager::IsMemProtectMode() const
  33. {
  34. return recycler->IsMemProtectMode();
  35. }
  36. #if ENABLE_PARTIAL_GC
  37. void
  38. RecyclerSweepManager::BeginSweep(Recycler * recycler, size_t rescanRootBytes, bool adjustPartialHeuristics)
  39. #else
  40. void
  41. RecyclerSweepManager::BeginSweep(Recycler * recycler)
  42. #endif
  43. {
  44. {
  45. // We are about to sweep, give the runtime a chance to see the now-immutable state of the world.
  46. // And clean up all the cache not monitor by the GC (e.g. inline caches)
  47. AUTO_NO_EXCEPTION_REGION;
  48. // RecyclerSweep may not be initialized till later in this function but
  49. // GCETW relies on the recycler pointer being correctly set up
  50. this->recycler = recycler;
  51. GCETW(GC_PRESWEEPCALLBACK_START, (this));
  52. recycler->collectionWrapper->PreSweepCallback();
  53. GCETW(GC_PRESWEEPCALLBACK_STOP, (this));
  54. }
  55. Assert(!recycler->IsSweeping());
  56. Assert(recycler->recyclerSweepManager == nullptr);
  57. memset(this, 0, sizeof(RecyclerSweepManager));
  58. this->recycler = recycler;
  59. recycler->recyclerSweepManager = this;
  60. this->defaultHeapRecyclerSweep.BeginSweep(recycler, this, recycler->autoHeap.GetDefaultHeap());
  61. #if ENABLE_PARTIAL_GC
  62. Assert(recycler->clientTrackedObjectList.Empty());
  63. // We should not have partialUncollectedAllocBytes unless we are in partial collect at this point
  64. Assert(recycler->partialUncollectedAllocBytes == 0 || recycler->inPartialCollectMode);
  65. Assert(recycler->autoHeap.uncollectedAllocBytes >= recycler->partialUncollectedAllocBytes);
  66. // if the cost of rescan is too high, we want to disable partial GC starting from the
  67. // upcoming Sweep. We basically move the check up from AdjustPartialHeuristics to here
  68. // such that we can have the decision before sweep.
  69. this->rescanRootBytes = rescanRootBytes;
  70. RECYCLER_STATS_SET(recycler, rescanRootBytes, rescanRootBytes);
  71. if (this->DoPartialCollectMode())
  72. {
  73. // enable partial collect for sweep & next round of GC
  74. DebugOnly(this->partial = true);
  75. // REVIEW: is adjustPartialHeuristicsMode the same as in PartialCollectMode?
  76. this->adjustPartialHeuristics = adjustPartialHeuristics;
  77. this->StartPartialCollectMode();
  78. }
  79. else
  80. {
  81. // disable partial collect
  82. if (recycler->inPartialCollectMode)
  83. {
  84. recycler->FinishPartialCollect();
  85. }
  86. Assert(recycler->partialUncollectedAllocBytes == 0);
  87. Assert(!recycler->inPartialCollectMode);
  88. }
  89. if (this->inPartialCollect)
  90. {
  91. // We just did a partial collect.
  92. // We only want to count objects that survived this collect towards the next full GC.
  93. // Thus, clear out uncollectedAllocBytes here; we will adjust to account for objects that
  94. // survived this partial collect in EndSweep.
  95. recycler->ResetHeuristicCounters();
  96. }
  97. else
  98. #endif
  99. {
  100. Assert(!this->inPartialCollect);
  101. // We just did a full collect.
  102. // We reset uncollectedAllocBytes when we kicked off the collection,
  103. // so don't reset it here (but do reset partial heuristics).
  104. recycler->ResetPartialHeuristicCounters();
  105. }
  106. }
  107. void
  108. RecyclerSweepManager::FinishSweep()
  109. {
  110. #if ENABLE_PARTIAL_GC
  111. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  112. if (recycler->collectionState == CollectionStateConcurrentSweepPass2)
  113. {
  114. GCETW_INTERNAL(GC_START, (recycler, ETWEvent_ConcurrentSweep_Pass2));
  115. GCETW_INTERNAL(GC_START2, (recycler, ETWEvent_ConcurrentSweep_Pass2, recycler->collectionStartReason, recycler->collectionStartFlags));
  116. }
  117. #endif
  118. Assert(this->partial == recycler->inPartialCollectMode);
  119. // Adjust heuristics
  120. if (recycler->inPartialCollectMode)
  121. {
  122. if (this->AdjustPartialHeuristics())
  123. {
  124. GCETW(GC_SWEEP_PARTIAL_REUSE_PAGE_START, (recycler));
  125. // If we are doing a full concurrent GC, all allocated bytes are consider "collected".
  126. // We only start accumulating uncollected allocate bytes during partial GC.
  127. // FinishPartialCollect will reset it to 0 if we are not doing a partial GC
  128. recycler->partialUncollectedAllocBytes = this->InPartialCollect() ? this->nextPartialUncollectedAllocBytes : 0;
  129. #ifdef RECYCLER_TRACE
  130. if (recycler->GetRecyclerFlagsTable().Trace.IsEnabled(Js::PartialCollectPhase))
  131. {
  132. Output::Print(_u("AdjustPartialHeuristics returned true\n"));
  133. Output::Print(_u(" partialUncollectedAllocBytes = %d\n"), recycler->partialUncollectedAllocBytes);
  134. Output::Print(_u(" nextPartialUncollectedAllocBytes = %d\n"), this->nextPartialUncollectedAllocBytes);
  135. }
  136. #endif
  137. recycler->autoHeap.SweepPartialReusePages(*this);
  138. GCETW(GC_SWEEP_PARTIAL_REUSE_PAGE_STOP, (recycler));
  139. #ifdef RECYCLER_WRITE_WATCH
  140. if (!CONFIG_FLAG(ForceSoftwareWriteBarrier))
  141. {
  142. if (!this->IsBackground())
  143. {
  144. RECYCLER_PROFILE_EXEC_BEGIN(recycler, Js::ResetWriteWatchPhase);
  145. if (!recycler->autoHeap.ResetWriteWatch())
  146. {
  147. // Shouldn't happen
  148. Assert(false);
  149. recycler->enablePartialCollect = false;
  150. recycler->FinishPartialCollect(this);
  151. }
  152. RECYCLER_PROFILE_EXEC_END(recycler, Js::ResetWriteWatchPhase);
  153. }
  154. }
  155. #endif
  156. }
  157. else
  158. {
  159. #ifdef RECYCLER_TRACE
  160. if (recycler->GetRecyclerFlagsTable().Trace.IsEnabled(Js::PartialCollectPhase))
  161. {
  162. Output::Print(_u("AdjustPartialHeuristics returned false\n"));
  163. }
  164. #endif
  165. #if ENABLE_CONCURRENT_GC
  166. if (this->IsBackground())
  167. {
  168. recycler->BackgroundFinishPartialCollect(this);
  169. }
  170. else
  171. #endif
  172. {
  173. recycler->FinishPartialCollect(this);
  174. }
  175. }
  176. }
  177. else
  178. {
  179. Assert(!this->adjustPartialHeuristics);
  180. // Initial value or Sweep should have called FinishPartialCollect to these if we are not doing partial
  181. Assert(recycler->partialUncollectedAllocBytes == 0);
  182. }
  183. #if ENABLE_CONCURRENT_GC
  184. recycler->SweepPendingObjects(*this);
  185. #endif
  186. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  187. if (recycler->collectionState == CollectionStateConcurrentSweepPass2)
  188. {
  189. GCETW_INTERNAL(GC_STOP, (recycler, ETWEvent_ConcurrentSweep_Pass2));
  190. GCETW_INTERNAL(GC_STOP2, (recycler, ETWEvent_ConcurrentSweep_Pass2, recycler->collectionStartReason, recycler->collectionStartFlags));
  191. }
  192. #endif
  193. #endif
  194. }
  195. void
  196. RecyclerSweepManager::EndSweep()
  197. {
  198. #if ENABLE_PARTIAL_GC
  199. // We clear out the old uncollectedAllocBytes, restore it now to get the adjustment for partial
  200. // We clear it again after we are done collecting and if we are not in partial collect
  201. if (this->inPartialCollect)
  202. {
  203. recycler->autoHeap.uncollectedAllocBytes += this->nextPartialUncollectedAllocBytes;
  204. #ifdef RECYCLER_TRACE
  205. if (recycler->GetRecyclerFlagsTable().Trace.IsEnabled(Js::PartialCollectPhase))
  206. {
  207. Output::Print(_u("EndSweep for partial sweep\n"));
  208. Output::Print(_u(" uncollectedAllocBytes = %d\n"), recycler->autoHeap.uncollectedAllocBytes);
  209. Output::Print(_u(" nextPartialUncollectedAllocBytes = %d\n"), this->nextPartialUncollectedAllocBytes);
  210. }
  211. #endif
  212. }
  213. #endif
  214. recycler->recyclerSweepManager = nullptr;
  215. // Clean up the HeapBlockMap.
  216. // This will release any internal structures that are no longer needed after Sweep.
  217. recycler->heapBlockMap.Cleanup(!recycler->IsMemProtectMode());
  218. }
  219. void
  220. RecyclerSweepManager::ShutdownCleanup()
  221. {
  222. this->defaultHeapRecyclerSweep.ShutdownCleanup();
  223. }
  224. #if ENABLE_CONCURRENT_GC
  225. void
  226. RecyclerSweepManager::BackgroundSweep()
  227. {
  228. this->BeginBackground(forceForeground);
  229. // Finish the concurrent part of the first pass
  230. this->recycler->autoHeap.SweepSmallNonFinalizable(*this);
  231. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  232. if (!CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) || !this->recycler->AllowAllocationsDuringConcurrentSweep())
  233. #endif
  234. {
  235. // Finish the rest of the sweep
  236. this->FinishSweep();
  237. this->EndBackground();
  238. }
  239. }
  240. void
  241. RecyclerSweepManager::BeginBackground(bool forceForeground)
  242. {
  243. Assert(!background);
  244. this->background = !forceForeground;
  245. this->forceForeground = forceForeground;
  246. }
  247. void
  248. RecyclerSweepManager::EndBackground()
  249. {
  250. Assert(this->background || this->forceForeground);
  251. this->background = false;
  252. }
  253. #endif
  254. // Called by prepare sweep to track the new allocated bytes on block that is not fully allocated yet.
  255. template <typename TBlockAttributes>
  256. void
  257. RecyclerSweepManager::AddUnaccountedNewObjectAllocBytes(SmallHeapBlockT<TBlockAttributes> * heapBlock)
  258. {
  259. #if ENABLE_PARTIAL_GC
  260. // Only need to update the unaccounted alloc bytes if we are in partial collect mode
  261. if (recycler->inPartialCollectMode)
  262. {
  263. uint unaccountedAllocBytes = heapBlock->GetAndClearUnaccountedAllocBytes();
  264. Assert(heapBlock->lastUncollectedAllocBytes == 0 || unaccountedAllocBytes == 0);
  265. DebugOnly(heapBlock->lastUncollectedAllocBytes += unaccountedAllocBytes);
  266. recycler->partialUncollectedAllocBytes += unaccountedAllocBytes;
  267. this->nextPartialUncollectedAllocBytes += unaccountedAllocBytes;
  268. }
  269. else
  270. #endif
  271. {
  272. // We don't care, clear the unaccounted to start tracking for new object for next GC
  273. heapBlock->ClearAllAllocBytes();
  274. }
  275. }
  276. template void RecyclerSweepManager::AddUnaccountedNewObjectAllocBytes<SmallAllocationBlockAttributes>(SmallHeapBlock * heapBlock);
  277. template void RecyclerSweepManager::AddUnaccountedNewObjectAllocBytes<MediumAllocationBlockAttributes>(MediumHeapBlock * heapBlock);
  278. #if ENABLE_PARTIAL_GC
  279. bool
  280. RecyclerSweepManager::InPartialCollect() const
  281. {
  282. return inPartialCollect;
  283. }
  284. void
  285. RecyclerSweepManager::StartPartialCollectMode()
  286. {
  287. // Save the in partial collect, the main thread reset it after returning to the script
  288. // and the background thread still needs it
  289. this->inPartialCollect = recycler->inPartialCollectMode;
  290. recycler->inPartialCollectMode = true;
  291. // Tracks the unallocated alloc bytes for partial GC
  292. // Keep a copy Last collection's uncollected allocation bytes, so we can use it to calculate
  293. // the new object that is allocated since the last GC
  294. Assert(recycler->partialUncollectedAllocBytes == 0 || this->inPartialCollect);
  295. this->lastPartialUncollectedAllocBytes = recycler->partialUncollectedAllocBytes;
  296. size_t currentUncollectedAllocBytes = recycler->autoHeap.uncollectedAllocBytes;
  297. Assert(currentUncollectedAllocBytes >= this->lastPartialUncollectedAllocBytes);
  298. if (!this->inPartialCollect)
  299. {
  300. // If we did a full collect, then we need to include lastUncollectedAllocBytes
  301. // in the partialUncollectedAllocBytes calculation, because all objects allocated
  302. // since the previous GC are considered new, but we cleared uncollectedAllocBytes
  303. // when we kicked off the GC.
  304. currentUncollectedAllocBytes += recycler->autoHeap.lastUncollectedAllocBytes;
  305. }
  306. // Initially, the partial uncollected alloc bytes is the current uncollectedAllocBytes
  307. recycler->partialUncollectedAllocBytes = currentUncollectedAllocBytes;
  308. this->nextPartialUncollectedAllocBytes = currentUncollectedAllocBytes;
  309. #ifdef RECYCLER_TRACE
  310. if (recycler->GetRecyclerFlagsTable().Trace.IsEnabled(Js::PartialCollectPhase))
  311. {
  312. Output::Print(_u("StartPartialCollectMode\n"));
  313. Output::Print(_u(" was inPartialCollectMode = %d\n"), this->inPartialCollect);
  314. Output::Print(_u(" lastPartialUncollectedAllocBytes = %d\n"), this->lastPartialUncollectedAllocBytes);
  315. Output::Print(_u(" uncollectedAllocBytes = %d\n"), recycler->autoHeap.uncollectedAllocBytes);
  316. Output::Print(_u(" nextPartialUncollectedAllocBytes = %d\n"), this->nextPartialUncollectedAllocBytes);
  317. }
  318. #endif
  319. }
  320. void
  321. RecyclerSweepManager::SubtractSweepNewObjectAllocBytes(size_t newObjectExpectSweepByteCount)
  322. {
  323. Assert(recycler->inPartialCollectMode);
  324. // We shouldn't free more then we allocated
  325. Assert(this->nextPartialUncollectedAllocBytes >= newObjectExpectSweepByteCount);
  326. Assert(this->nextPartialUncollectedAllocBytes >= this->lastPartialUncollectedAllocBytes + newObjectExpectSweepByteCount);
  327. this->nextPartialUncollectedAllocBytes -= newObjectExpectSweepByteCount;
  328. }
  329. /*--------------------------------------------------------------------------------------------
  330. * Determine we want to go into partial collect mode for the next GC before we sweep,
  331. * based on the number bytes needed to rescan (<= 5MB)
  332. *--------------------------------------------------------------------------------------------*/
  333. bool
  334. RecyclerSweepManager::DoPartialCollectMode()
  335. {
  336. if (!recycler->enablePartialCollect)
  337. {
  338. return false;
  339. }
  340. // If we exceed 16MB of unused memory in partial blocks, get out of partial collect to avoid
  341. // memory fragmentation.
  342. if (recycler->autoHeap.unusedPartialCollectFreeBytes > MaxUnusedPartialCollectFreeBytes)
  343. {
  344. return false;
  345. }
  346. return this->rescanRootBytes <= MaxPartialCollectRescanRootBytes;
  347. }
  348. // Heuristic ratio is ((c * e + (1 - e)) * (1 - p)) + p and use that to linearly scale between min and max
  349. // This give cost/efficacy/pressure equal weight, while each can push it pass where partial GC is not
  350. // beneficial
  351. bool
  352. RecyclerSweepManager::AdjustPartialHeuristics()
  353. {
  354. Assert(recycler->inPartialCollectMode);
  355. Assert(this->adjustPartialHeuristics);
  356. Assert(this->InPartialCollect() || recycler->autoHeap.unusedPartialCollectFreeBytes == 0);
  357. // DoPartialCollectMode should have rejected these already
  358. Assert(this->rescanRootBytes <= (size_t)MaxPartialCollectRescanRootBytes);
  359. Assert(recycler->autoHeap.unusedPartialCollectFreeBytes <= MaxUnusedPartialCollectFreeBytes);
  360. // Page reuse Heuristics
  361. double collectEfficacy;
  362. const size_t allocBytes = this->GetNewObjectAllocBytes();
  363. if (allocBytes == 0)
  364. {
  365. // We may get collections without allocating memory (e.g. unpin heuristics).
  366. collectEfficacy = 1.0; // assume 100% efficacy
  367. this->partialCollectSmallHeapBlockReuseMinFreeBytes = 0; // reuse all pages
  368. }
  369. else
  370. {
  371. const size_t freedBytes = this->GetNewObjectFreeBytes();
  372. Assert(freedBytes <= allocBytes);
  373. collectEfficacy = (double)freedBytes / (double)allocBytes;
  374. // If we collected less then 10% of the memory, let's not do partial GC.
  375. // CONSIDER: It may be good to do partial with low efficacy once we have concurrent partial
  376. // because old object are not getting collected as well, but without concurrent partial, we will have to mark
  377. // new objects in thread.
  378. if (collectEfficacy < MinPartialCollectEfficacy)
  379. {
  380. return false;
  381. }
  382. // Scale the efficacy linearly such that an efficacy of MinPartialCollectEfficacy translates to an adjusted efficacy of
  383. // 0.0, and an efficacy of 1.0 translates to an adjusted efficacy of 1.0
  384. collectEfficacy = (collectEfficacy - MinPartialCollectEfficacy) / (1.0 - MinPartialCollectEfficacy);
  385. Assert(collectEfficacy <= 1.0);
  386. this->partialCollectSmallHeapBlockReuseMinFreeBytes = (size_t)(AutoSystemInfo::PageSize * collectEfficacy);
  387. }
  388. #ifdef RECYCLER_STATS
  389. recycler->collectionStats.collectEfficacy = collectEfficacy;
  390. recycler->collectionStats.partialCollectSmallHeapBlockReuseMinFreeBytes = this->partialCollectSmallHeapBlockReuseMinFreeBytes;
  391. #endif
  392. // Blocks which are being reused are likely to be touched again from allocation and contribute to Rescan cost.
  393. // If there are many of these, adjust rescanRootBytes to account for this.
  394. const size_t estimatedPartialReuseBlocks = (size_t)((double)this->reuseHeapBlockCount * (1.0 - collectEfficacy));
  395. const size_t estimatedPartialReuseBytes = estimatedPartialReuseBlocks * AutoSystemInfo::PageSize;
  396. const size_t newRescanRootBytes = max(this->rescanRootBytes, estimatedPartialReuseBytes);
  397. RECYCLER_STATS_SET(recycler, estimatedPartialReuseBytes, estimatedPartialReuseBytes);
  398. // Recheck the rescanRootBytes
  399. if (newRescanRootBytes > MaxPartialCollectRescanRootBytes)
  400. {
  401. return false;
  402. }
  403. double collectCost = (double)newRescanRootBytes / MaxPartialCollectRescanRootBytes;
  404. RECYCLER_STATS_SET(recycler, collectCost, collectCost);
  405. // Include the efficacy in equal portion, which is related to the cost of marking through new objects.
  406. // r = c * e + 1 - e;
  407. const double reuseRatio = 1.0 - collectEfficacy;
  408. double ratio = collectCost * collectEfficacy + reuseRatio;
  409. if (this->InPartialCollect())
  410. {
  411. // Avoid ratio of uncollectedBytesPressure > 1.0
  412. if (this->nextPartialUncollectedAllocBytes > RecyclerHeuristic::Instance.MaxUncollectedAllocBytesPartialCollect)
  413. {
  414. return false;
  415. }
  416. // Only add full collect pressure if we are doing partial collect,
  417. // account for the amount of uncollected bytes and unused bytes to increase
  418. // pressure to do a full GC by rising the partial GC new page heuristic
  419. double uncollectedBytesPressure = (double)this->nextPartialUncollectedAllocBytes / (double)RecyclerHeuristic::Instance.MaxUncollectedAllocBytesPartialCollect;
  420. double collectFullCollectPressure =
  421. (double)recycler->autoHeap.unusedPartialCollectFreeBytes / (double)MaxUnusedPartialCollectFreeBytes
  422. * (1.0 - uncollectedBytesPressure) + uncollectedBytesPressure;
  423. ratio = ratio * (1.0 - collectFullCollectPressure) + collectFullCollectPressure;
  424. }
  425. Assert(0.0 <= ratio && ratio <= 1.0);
  426. // Linear scale the partial GC new page heuristic using the ratio calculated
  427. recycler->uncollectedNewPageCountPartialCollect = MinPartialUncollectedNewPageCount
  428. + (size_t)((double)(RecyclerHeuristic::Instance.MaxPartialUncollectedNewPageCount - MinPartialUncollectedNewPageCount) * ratio);
  429. Assert(recycler->uncollectedNewPageCountPartialCollect >= MinPartialUncollectedNewPageCount &&
  430. recycler->uncollectedNewPageCountPartialCollect <= RecyclerHeuristic::Instance.MaxPartialUncollectedNewPageCount);
  431. // If the number of new page to reach the partial heuristics plus the existing uncollectedAllocBytes
  432. // and the memory we are going to reuse (assume we use it all) is greater then the full GC max size heuristic
  433. // (with 1M fudge factor), we trigger a full GC anyways, so let's not get into partial GC
  434. const size_t estimatedPartialReusedFreeByteCount = (size_t)((double)this->reuseByteCount * reuseRatio);
  435. if (recycler->uncollectedNewPageCountPartialCollect * AutoSystemInfo::PageSize
  436. + this->nextPartialUncollectedAllocBytes + estimatedPartialReusedFreeByteCount >= RecyclerHeuristic::Instance.MaxUncollectedAllocBytesPartialCollect)
  437. {
  438. return false;
  439. }
  440. #if ENABLE_CONCURRENT_GC
  441. recycler->partialConcurrentNextCollection = RecyclerHeuristic::PartialConcurrentNextCollection(ratio, recycler->GetRecyclerFlagsTable());
  442. #endif
  443. return true;
  444. }
  445. size_t
  446. RecyclerSweepManager::GetNewObjectAllocBytes() const
  447. {
  448. Assert(recycler->inPartialCollectMode);
  449. Assert(recycler->partialUncollectedAllocBytes >= this->lastPartialUncollectedAllocBytes);
  450. return recycler->partialUncollectedAllocBytes - this->lastPartialUncollectedAllocBytes;
  451. }
  452. size_t
  453. RecyclerSweepManager::GetNewObjectFreeBytes() const
  454. {
  455. Assert(recycler->inPartialCollectMode);
  456. Assert(recycler->partialUncollectedAllocBytes >= this->nextPartialUncollectedAllocBytes);
  457. return recycler->partialUncollectedAllocBytes - this->nextPartialUncollectedAllocBytes;
  458. }
  459. size_t
  460. RecyclerSweepManager::GetPartialUnusedFreeByteCount() const
  461. {
  462. return partialUnusedFreeByteCount;
  463. }
  464. size_t
  465. RecyclerSweepManager::GetPartialCollectSmallHeapBlockReuseMinFreeBytes() const
  466. {
  467. return partialCollectSmallHeapBlockReuseMinFreeBytes;
  468. }
  469. template <typename TBlockAttributes>
  470. void
  471. RecyclerSweepManager::NotifyAllocableObjects(SmallHeapBlockT<TBlockAttributes> * heapBlock)
  472. {
  473. this->reuseByteCount += heapBlock->GetExpectedFreeBytes();
  474. if (!heapBlock->IsLeafBlock())
  475. {
  476. this->reuseHeapBlockCount++;
  477. }
  478. }
  479. template void RecyclerSweepManager::NotifyAllocableObjects<SmallAllocationBlockAttributes>(SmallHeapBlock* heapBlock);
  480. template void RecyclerSweepManager::NotifyAllocableObjects<MediumAllocationBlockAttributes>(MediumHeapBlock* heapBlock);
  481. void
  482. RecyclerSweepManager::AddUnusedFreeByteCount(uint expectFreeByteCount)
  483. {
  484. this->partialUnusedFreeByteCount += expectFreeByteCount;
  485. }
  486. bool
  487. RecyclerSweepManager::DoAdjustPartialHeuristics() const
  488. {
  489. return this->adjustPartialHeuristics;
  490. }
  491. #endif
  492. #if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
  493. size_t RecyclerSweepManager::GetPendingMergeNewHeapBlockCount(HeapInfo const * heapInfo)
  494. {
  495. return this->defaultHeapRecyclerSweep.GetPendingMergeNewHeapBlockCount(heapInfo);
  496. }
  497. #endif