HeapBlock.cpp 76 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "CommonMemoryPch.h"
  6. #if defined(__clang__) && !defined(_MSC_VER)
  7. #include <cxxabi.h>
  8. #endif
  9. //========================================================================================================
  10. // HeapBlock
  11. //========================================================================================================
  12. template <typename TBlockAttributes>
  13. SmallNormalHeapBlockT<TBlockAttributes> *
  14. HeapBlock::AsNormalBlock()
  15. {
  16. Assert(IsAnyNormalBlock());
  17. return static_cast<SmallNormalHeapBlockT<TBlockAttributes> *>(this);
  18. }
  19. template <typename TBlockAttributes>
  20. SmallLeafHeapBlockT<TBlockAttributes> *
  21. HeapBlock::AsLeafBlock()
  22. {
  23. Assert(IsLeafBlock());
  24. return static_cast<SmallLeafHeapBlockT<TBlockAttributes> *>(this);
  25. }
  26. template <typename TBlockAttributes>
  27. SmallFinalizableHeapBlockT<TBlockAttributes> *
  28. HeapBlock::AsFinalizableBlock()
  29. {
  30. Assert(IsAnyFinalizableBlock());
  31. return static_cast<SmallFinalizableHeapBlockT<TBlockAttributes> *>(this);
  32. }
  33. #ifdef RECYCLER_VISITED_HOST
  34. template <typename TBlockAttributes>
  35. SmallRecyclerVisitedHostHeapBlockT<TBlockAttributes> *
  36. HeapBlock::AsRecyclerVisitedHostBlock()
  37. {
  38. Assert(IsRecyclerVisitedHostBlock());
  39. return static_cast<SmallRecyclerVisitedHostHeapBlockT<TBlockAttributes> *>(this);
  40. }
  41. #endif
  42. #ifdef RECYCLER_WRITE_BARRIER
  43. template <typename TBlockAttributes>
  44. SmallNormalWithBarrierHeapBlockT<TBlockAttributes> *
  45. HeapBlock::AsNormalWriteBarrierBlock()
  46. {
  47. Assert(IsNormalWriteBarrierBlock());
  48. return static_cast<SmallNormalWithBarrierHeapBlockT<TBlockAttributes> *>(this);
  49. }
  50. template <typename TBlockAttributes>
  51. SmallFinalizableWithBarrierHeapBlockT<TBlockAttributes> *
  52. HeapBlock::AsFinalizableWriteBarrierBlock()
  53. {
  54. Assert(IsFinalizableWriteBarrierBlock());
  55. return static_cast<SmallFinalizableWithBarrierHeapBlockT<TBlockAttributes> *>(this);
  56. }
  57. #endif
  58. void
  59. HeapBlock::SetNeedOOMRescan(Recycler * recycler)
  60. {
  61. Assert(!this->IsLeafBlock());
  62. this->needOOMRescan = true;
  63. recycler->SetNeedOOMRescan();
  64. }
  65. //========================================================================================================
  66. // SmallHeapBlock
  67. //========================================================================================================
  68. template <class TBlockAttributes>
  69. size_t
  70. SmallHeapBlockT<TBlockAttributes>::GetAllocPlusSize(uint objectCount)
  71. {
  72. // Small Heap Block Layout:
  73. // TrackerData * [objectCount] (Optional)
  74. // ObjectInfo [objectCount] (In reverse index order)
  75. // <Small*HeapBlock>
  76. size_t allocPlusSize = Math::Align<size_t>(sizeof(unsigned char) * objectCount, sizeof(size_t));
  77. #ifdef PROFILE_RECYCLER_ALLOC
  78. if (Recycler::DoProfileAllocTracker())
  79. {
  80. allocPlusSize += objectCount * sizeof(void *);
  81. }
  82. #endif
  83. return allocPlusSize;
  84. }
  85. template <class TBlockAttributes>
  86. void
  87. SmallHeapBlockT<TBlockAttributes>::ConstructorCommon(HeapBucket * bucket, ushort objectSize, ushort objectCount, HeapBlockType heapBlockType)
  88. {
  89. this->heapBucket = bucket;
  90. this->Init(objectSize, objectCount);
  91. Assert(heapBlockType < HeapBlock::HeapBlockType::SmallAllocBlockTypeCount + HeapBlock::HeapBlockType::MediumAllocBlockTypeCount);
  92. Assert(objectCount > 1 && objectCount == (this->GetPageCount() * AutoSystemInfo::PageSize) / objectSize);
  93. #if defined(RECYCLER_SLOW_CHECK_ENABLED)
  94. heapBucket->heapInfo->heapBlockCount[heapBlockType]++;
  95. #endif
  96. if (TBlockAttributes::IsSmallBlock)
  97. {
  98. Assert(heapBlockType < HeapBlockType::SmallAllocBlockTypeCount);
  99. }
  100. else
  101. {
  102. Assert(heapBlockType >= HeapBlockType::SmallAllocBlockTypeCount && heapBlockType < HeapBlockType::SmallBlockTypeCount);
  103. }
  104. DebugOnly(lastUncollectedAllocBytes = 0);
  105. }
  106. template <class TBlockAttributes>
  107. SmallHeapBlockT<TBlockAttributes>::SmallHeapBlockT(HeapBucket * bucket, ushort objectSize, ushort objectCount, HeapBlockType heapBlockType)
  108. : HeapBlock(heapBlockType),
  109. bucketIndex(HeapInfo::GetBucketIndex(objectSize)),
  110. validPointers(HeapInfo::smallAllocValidPointersMap.GetValidPointersForIndex(HeapInfo::GetBucketIndex(objectSize))),
  111. objectSize(objectSize), objectCount(objectCount)
  112. {
  113. ConstructorCommon(bucket, objectSize, objectCount, heapBlockType);
  114. }
  115. template <>
  116. SmallHeapBlockT<MediumAllocationBlockAttributes>::SmallHeapBlockT(HeapBucket * bucket, ushort objectSize, ushort objectCount, HeapBlockType heapBlockType)
  117. : HeapBlock((HeapBlockType)(heapBlockType)),
  118. bucketIndex(HeapInfo::GetMediumBucketIndex(objectSize)),
  119. validPointers(HeapInfo::mediumAllocValidPointersMap.GetValidPointersForIndex(HeapInfo::GetMediumBucketIndex(objectSize))),
  120. objectSize(objectSize), objectCount(objectCount)
  121. {
  122. ConstructorCommon(bucket, objectSize, objectCount, heapBlockType);
  123. }
  124. template <class TBlockAttributes>
  125. SmallHeapBlockT<TBlockAttributes>::~SmallHeapBlockT()
  126. {
  127. Assert((this->segment == nullptr && this->address == nullptr) ||
  128. (this->IsLeafBlock()) ||
  129. this->GetPageAllocator()->IsClosed());
  130. #if defined(RECYCLER_SLOW_CHECK_ENABLED)
  131. heapBucket->heapInfo->heapBlockCount[this->GetHeapBlockType()]--;
  132. #endif
  133. #if defined(RECYCLER_SLOW_CHECK_ENABLED) || ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  134. heapBucket->heapBlockCount--;
  135. #endif
  136. }
  137. template <class TBlockAttributes>
  138. uint
  139. SmallHeapBlockT<TBlockAttributes>::GetObjectBitDeltaForBucketIndex(uint bucketIndex)
  140. {
  141. return bucketIndex + 1;
  142. }
  143. template <>
  144. uint
  145. SmallHeapBlockT<MediumAllocationBlockAttributes>::GetObjectBitDeltaForBucketIndex(uint bucketIndex)
  146. {
  147. return HeapInfo::GetObjectSizeForBucketIndex<MediumAllocationBlockAttributes>(bucketIndex) / HeapConstants::ObjectGranularity;
  148. }
  149. template <class TBlockAttributes>
  150. uint
  151. SmallHeapBlockT<TBlockAttributes>::GetPageCount() const
  152. {
  153. return TBlockAttributes::PageCount;
  154. }
  155. template <>
  156. uint
  157. SmallHeapBlockT<MediumAllocationBlockAttributes>::GetUnusablePageCount()
  158. {
  159. return ((MediumAllocationBlockAttributes::PageCount * AutoSystemInfo::PageSize) % this->objectSize) / AutoSystemInfo::PageSize;
  160. }
  161. template <>
  162. void
  163. SmallHeapBlockT<MediumAllocationBlockAttributes>::ProtectUnusablePages()
  164. {
  165. size_t count = this->GetUnusablePageCount();
  166. if (count > 0)
  167. {
  168. char* startPage = this->address + (MediumAllocationBlockAttributes::PageCount - count) * AutoSystemInfo::PageSize;
  169. DWORD oldProtect;
  170. BOOL ret = ::VirtualProtect(startPage, count * AutoSystemInfo::PageSize, PAGE_READONLY, &oldProtect);
  171. Assert(ret && oldProtect == PAGE_READWRITE);
  172. #ifdef RECYCLER_WRITE_WATCH
  173. if (!CONFIG_FLAG(ForceSoftwareWriteBarrier))
  174. {
  175. ::ResetWriteWatch(startPage, count*AutoSystemInfo::PageSize);
  176. }
  177. #endif
  178. }
  179. }
  180. template <>
  181. void
  182. SmallHeapBlockT<MediumAllocationBlockAttributes>::RestoreUnusablePages()
  183. {
  184. size_t count = this->GetUnusablePageCount();
  185. if (count > 0)
  186. {
  187. char* startPage = (char*)this->address + (MediumAllocationBlockAttributes::PageCount - count) * AutoSystemInfo::PageSize;
  188. DWORD oldProtect;
  189. BOOL ret = ::VirtualProtect(startPage, count * AutoSystemInfo::PageSize, PAGE_READWRITE, &oldProtect);
  190. #if DBG
  191. HeapBlock* block = this->heapBucket->heapInfo->recycler->heapBlockMap.GetHeapBlock(this->address);
  192. // only need to do this after the unusable page is already successfully protected
  193. // currently we don't have a flag to save that, but it should not fail after it successfully added to blockmap (see SetPage() implementation)
  194. if (block)
  195. {
  196. Assert(block == this);
  197. Assert(ret && oldProtect == PAGE_READONLY);
  198. }
  199. #endif
  200. }
  201. }
  202. template <class TBlockAttributes>
  203. void
  204. SmallHeapBlockT<TBlockAttributes>::ClearObjectInfoList()
  205. {
  206. ushort count = this->objectCount;
  207. // the object info list is prefix to the object
  208. memset(((byte *)this) - count, 0, count);
  209. }
  210. template <class TBlockAttributes>
  211. byte&
  212. SmallHeapBlockT<TBlockAttributes>::ObjectInfo(uint index)
  213. {
  214. // See SmallHeapBlockT<TBlockAttributes>::GetAllocPlusSize for layout description
  215. // the object info list is prefix to the object and in reverse index order
  216. Assert(index < this->objectCount);
  217. return *(((byte *)this) - index - 1);
  218. }
  219. template <class TBlockAttributes>
  220. ushort
  221. SmallHeapBlockT<TBlockAttributes>::GetExpectedFreeObjectCount() const
  222. {
  223. Assert(this->GetRecycler()->IsSweeping());
  224. return objectCount - markCount;
  225. }
  226. template <class TBlockAttributes>
  227. uint
  228. SmallHeapBlockT<TBlockAttributes>::GetExpectedFreeBytes() const
  229. {
  230. return GetExpectedFreeObjectCount() * objectSize;
  231. }
  232. template <class TBlockAttributes>
  233. ushort
  234. SmallHeapBlockT<TBlockAttributes>::GetExpectedSweepObjectCount() const
  235. {
  236. return GetExpectedFreeObjectCount() - freeCount;
  237. }
  238. template <class TBlockAttributes>
  239. void
  240. SmallHeapBlockT<TBlockAttributes>::Init(ushort objectSize, ushort objectCount)
  241. {
  242. Assert(objectCount != 0);
  243. Assert(TBlockAttributes::IsAlignedObjectSize(objectSize));
  244. Assert(this->next == nullptr);
  245. Assert(this->freeObjectList == nullptr);
  246. Assert(this->freeCount == 0);
  247. #if ENABLE_PARTIAL_GC
  248. this->oldFreeCount = this->lastFreeCount = this->objectCount;
  249. #else
  250. this->lastFreeCount = this->objectCount;
  251. #endif
  252. #if ENABLE_CONCURRENT_GC
  253. this->isPendingConcurrentSweep = false;
  254. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  255. if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
  256. {
  257. // This flag is to identify whether this block was made available for allocations during the concurrent sweep and still needs to be swept.
  258. this->isPendingConcurrentSweepPrep = false;
  259. #if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
  260. this->objectsAllocatedDuringConcurrentSweepCount = 0;
  261. this->hasFinishedSweepObjects = false;
  262. this->wasAllocatedFromDuringSweep = false;
  263. this->lastObjectsAllocatedDuringConcurrentSweepCount = 0;
  264. #endif
  265. }
  266. #endif
  267. #endif
  268. Assert(!this->isInAllocator);
  269. Assert(!this->isClearedFromAllocator);
  270. Assert(!this->isIntegratedBlock);
  271. }
  272. template <class TBlockAttributes>
  273. BOOL
  274. SmallHeapBlockT<TBlockAttributes>::ReassignPages(Recycler * recycler)
  275. {
  276. Assert(this->address == nullptr);
  277. Assert(this->segment == nullptr);
  278. PageSegment * segment;
  279. auto pageAllocator = this->GetPageAllocator();
  280. uint pagecount = this->GetPageCount();
  281. char * address = pageAllocator->AllocPagesPageAligned(pagecount, &segment);
  282. if (address == NULL)
  283. {
  284. return FALSE;
  285. }
  286. #if ENABLE_PARTIAL_GC
  287. recycler->autoHeap.uncollectedNewPageCount += this->GetPageCount();
  288. #endif
  289. #ifdef RECYCLER_ZERO_MEM_CHECK
  290. if (!this->IsLeafBlock()
  291. #ifdef RECYCLER_WRITE_BARRIER_ALLOC_THREAD_PAGE
  292. && !this->IsWithBarrier()
  293. #endif
  294. )
  295. {
  296. recycler->VerifyZeroFill(address, AutoSystemInfo::PageSize * this->GetPageCount());
  297. }
  298. #endif
  299. if (!this->SetPage(address, segment, recycler))
  300. {
  301. this->GetPageAllocator()->SuspendIdleDecommit();
  302. this->ReleasePages(recycler);
  303. this->GetPageAllocator()->ResumeIdleDecommit();
  304. return FALSE;
  305. }
  306. RECYCLER_PERF_COUNTER_ADD(FreeObjectSize, this->GetPageCount() * AutoSystemInfo::PageSize);
  307. RECYCLER_PERF_COUNTER_ADD(SmallHeapBlockFreeObjectSize, this->GetPageCount() * AutoSystemInfo::PageSize);
  308. return TRUE;
  309. }
  310. template <class TBlockAttributes>
  311. BOOL
  312. SmallHeapBlockT<TBlockAttributes>::SetPage(__in_ecount_pagesize char * baseAddress, PageSegment * pageSegment, Recycler * recycler)
  313. {
  314. char* address = baseAddress;
  315. Assert(HeapBlockMap32::GetLevel2Id(address) + (TBlockAttributes::PageCount - 1) < 256);
  316. this->segment = pageSegment;
  317. this->address = address;
  318. // Set up the page to have nothing is free
  319. Assert(this->freeObjectList == nullptr);
  320. Assert(this->IsFreeBitsValid());
  321. Assert(this->freeCount == 0);
  322. Assert(this->freeCount == this->GetFreeBitVector()->Count());
  323. Assert(this->objectCount == this->lastFreeCount);
  324. Assert(this->explicitFreeBits.Count() == 0);
  325. #if ENABLE_CONCURRENT_GC
  326. Assert(recycler->IsConcurrentMarkState() || !recycler->IsMarkState() || recycler->IsCollectionDisabled());
  327. #else
  328. Assert(!recycler->IsMarkState() || recycler->IsCollectionDisabled());
  329. #endif
  330. Assert(this->bucketIndex <= 0xFF);
  331. // We use the block type directly here, without the getter so that we can tell on the heap block map,
  332. // whether the block is a medium block or not
  333. if (!recycler->heapBlockMap.SetHeapBlock(this->address, this->GetPageCount() - this->GetUnusablePageCount(), this, this->heapBlockType, (byte)this->bucketIndex))
  334. {
  335. return FALSE;
  336. }
  337. // Retrieve pointer to mark bits for this block and store it locally.
  338. // Note, mark bits aren't guaranteed to exist until after we register with HBM.
  339. this->markBits = recycler->heapBlockMap.GetMarkBitVectorForPages<TBlockAttributes::BitVectorCount>(this->address);
  340. Assert(this->markBits);
  341. #if defined(_M_ARM32_OR_ARM64)
  342. // We need to ensure that the above writes to the SmallHeapBlock are visible to the background GC thread.
  343. // In particular, see Threshold 331596 -- we were seeing an old value for SmallHeapBlockT<TBlockAttributes>::markBits in ResetMarks.
  344. // which caused the bit vector Copy operation there to AV.
  345. // See also SmallHeapBlockT<TBlockAttributes>::ResetMarks.
  346. MemoryBarrier();
  347. #endif
  348. this->ProtectUnusablePages();
  349. return TRUE;
  350. }
  351. template <class TBlockAttributes>
  352. void
  353. SmallHeapBlockT<TBlockAttributes>::ReleasePages(Recycler * recycler)
  354. {
  355. Assert(recycler->collectionState != CollectionStateMark);
  356. Assert(segment != nullptr);
  357. Assert(address != nullptr);
  358. #if DBG
  359. if (this->IsLeafBlock())
  360. {
  361. RecyclerVerboseTrace(recycler->GetRecyclerFlagsTable(), _u("Releasing leaf block pages at address 0x%p\n"), address);
  362. }
  363. #endif
  364. char* address = this->address;
  365. #ifdef RECYCLER_FREE_MEM_FILL
  366. memset(address, DbgMemFill, AutoSystemInfo::PageSize * (this->GetPageCount()-this->GetUnusablePageCount()));
  367. #endif
  368. if (this->GetUnusablePageCount() > 0)
  369. {
  370. this->RestoreUnusablePages();
  371. }
  372. this->GetPageAllocator()->ReleasePages(address, this->GetPageCount(), this->GetPageSegment());
  373. this->segment = nullptr;
  374. this->address = nullptr;
  375. }
  376. #if ENABLE_BACKGROUND_PAGE_FREEING
  377. template <class TBlockAttributes>
  378. void
  379. SmallHeapBlockT<TBlockAttributes>::BackgroundReleasePagesSweep(Recycler* recycler)
  380. {
  381. recycler->heapBlockMap.ClearHeapBlock(address, this->GetPageCount() - this->GetUnusablePageCount());
  382. char* address = this->address;
  383. if (this->GetUnusablePageCount() > 0)
  384. {
  385. this->RestoreUnusablePages();
  386. }
  387. this->GetPageAllocator()->BackgroundReleasePages(address, this->GetPageCount(), this->GetPageSegment());
  388. this->address = nullptr;
  389. this->segment = nullptr;
  390. this->Reset();
  391. }
  392. #endif
  393. template <class TBlockAttributes>
  394. void
  395. SmallHeapBlockT<TBlockAttributes>::ReleasePagesShutdown(Recycler * recycler)
  396. {
  397. #if DBG
  398. if (this->IsLeafBlock())
  399. {
  400. RecyclerVerboseTrace(recycler->GetRecyclerFlagsTable(), _u("Releasing leaf block pages at address 0x%p\n"), address);
  401. }
  402. RemoveFromHeapBlockMap(recycler);
  403. // Don't release the page in shut down, the page allocator will release them faster
  404. // Leaf block's allocator need not be closed
  405. // For non-large normal heap blocks ReleasePagesShutdown could be called during shutdown cleanup when the block is still pending concurrent
  406. // sweep i.e. it resides in the pendingSweepList of the RecyclerSweep instance. In this case the page allocator may not have been closed yet.
  407. Assert(this->IsLeafBlock() || this->GetPageAllocator()->IsClosed() || this->isPendingConcurrentSweep);
  408. #endif
  409. }
  410. template <class TBlockAttributes>
  411. void
  412. SmallHeapBlockT<TBlockAttributes>::RemoveFromHeapBlockMap(Recycler* recycler)
  413. {
  414. recycler->heapBlockMap.ClearHeapBlock(address, this->GetPageCount() - this->GetUnusablePageCount());
  415. }
  416. template <class TBlockAttributes>
  417. void
  418. SmallHeapBlockT<TBlockAttributes>::ReleasePagesSweep(Recycler * recycler)
  419. {
  420. RemoveFromHeapBlockMap(recycler);
  421. ReleasePages(recycler);
  422. }
  423. template <class TBlockAttributes>
  424. void
  425. SmallHeapBlockT<TBlockAttributes>::Reset()
  426. {
  427. this->GetFreeBitVector()->ClearAll();
  428. this->freeCount = 0;
  429. this->markCount = 0;
  430. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  431. if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
  432. {
  433. #if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
  434. this->hasFinishedSweepObjects = false;
  435. this->wasAllocatedFromDuringSweep = false;
  436. this->objectsMarkedDuringSweep = 0;
  437. this->objectsAllocatedDuringConcurrentSweepCount = 0;
  438. this->lastObjectsAllocatedDuringConcurrentSweepCount = 0;
  439. #endif
  440. this->isPendingConcurrentSweepPrep = false;
  441. }
  442. #endif
  443. #if ENABLE_PARTIAL_GC
  444. this->oldFreeCount = this->lastFreeCount = this->objectCount;
  445. #else
  446. this->lastFreeCount = this->objectCount;
  447. #endif
  448. this->freeObjectList = nullptr;
  449. this->lastFreeObjectHead = nullptr;
  450. this->ClearObjectInfoList();
  451. this->isInAllocator = false;
  452. #if DBG || defined(RECYCLER_STATS)
  453. this->GetDebugFreeBitVector()->ClearAll();
  454. #endif
  455. #if DBG
  456. this->isClearedFromAllocator = false;
  457. this->isIntegratedBlock = false;
  458. #endif
  459. // There is no page associated with this heap block,
  460. // and therefore we should have no mark bits either
  461. this->markBits = nullptr;
  462. Assert(this->explicitFreeBits.Count() == 0);
  463. }
  464. // Map any object address to it's object index within the heap block
  465. template <class TBlockAttributes>
  466. ushort
  467. SmallHeapBlockT<TBlockAttributes>::GetAddressIndex(void * objectAddress)
  468. {
  469. Assert(objectAddress >= address && objectAddress < this->GetEndAddress());
  470. Assert(HeapInfo::IsAlignedAddress(objectAddress));
  471. Assert(HeapInfo::IsAlignedAddress(address));
  472. unsigned int offset = (unsigned int)((char*)objectAddress - address);
  473. offset = offset >> HeapConstants::ObjectAllocationShift;
  474. ushort index = validPointers.GetAddressIndex(offset);
  475. Assert(index == SmallHeapBlockT<TBlockAttributes>::InvalidAddressBit ||
  476. index <= TBlockAttributes::MaxAddressBit);
  477. return index;
  478. }
  479. template <class TBlockAttributes>
  480. typename SmallHeapBlockT<TBlockAttributes>::SmallHeapBlockBitVector const*
  481. SmallHeapBlockT<TBlockAttributes>::GetInvalidBitVector()
  482. {
  483. return HeapInfo::GetInvalidBitVector<TBlockAttributes>(objectSize);
  484. }
  485. template <class TBlockAttributes>
  486. typename SmallHeapBlockT<TBlockAttributes>::BlockInfo const*
  487. SmallHeapBlockT<TBlockAttributes>::GetBlockInfo()
  488. {
  489. return HeapInfo::GetBlockInfo<TBlockAttributes>(objectSize);
  490. }
  491. template <class TBlockAttributes>
  492. ushort
  493. SmallHeapBlockT<TBlockAttributes>::GetInteriorAddressIndex(void * interiorAddress)
  494. {
  495. Assert(interiorAddress >= address && interiorAddress < this->GetEndAddress());
  496. Assert(HeapInfo::IsAlignedAddress(address));
  497. unsigned int offset = (unsigned int)((char*)interiorAddress - address);
  498. offset = offset >> HeapConstants::ObjectAllocationShift;
  499. ushort index = validPointers.GetInteriorAddressIndex(offset);
  500. Assert(index == SmallHeapBlockT<TBlockAttributes>::InvalidAddressBit ||
  501. index <= TBlockAttributes::MaxAddressBit);
  502. return index;
  503. }
  504. template <class TBlockAttributes>
  505. BOOL
  506. SmallHeapBlockT<TBlockAttributes>::IsInFreeObjectList(void * objectAddress)
  507. {
  508. FreeObject * freeObject = this->freeObjectList;
  509. while (freeObject != nullptr)
  510. {
  511. if (freeObject == objectAddress)
  512. {
  513. return true;
  514. }
  515. freeObject = freeObject->GetNext();
  516. }
  517. return false;
  518. }
  519. template <class TBlockAttributes>
  520. template <typename TBlockType>
  521. bool
  522. SmallHeapBlockT<TBlockAttributes>::FindHeapObjectImpl(void* objectAddress, Recycler * recycler, FindHeapObjectFlags flags, RecyclerHeapObjectInfo& heapObject)
  523. {
  524. if (flags & FindHeapObjectFlags_AllowInterior)
  525. {
  526. objectAddress = (void*) this->GetRealAddressFromInterior(objectAddress);
  527. if (objectAddress == nullptr)
  528. {
  529. return false;
  530. }
  531. }
  532. ushort index = GetAddressIndex(objectAddress);
  533. Assert(index != SmallHeapBlockT<TBlockAttributes>::InvalidAddressBit);
  534. if (index == SmallHeapBlockT<TBlockAttributes>::InvalidAddressBit)
  535. {
  536. return false;
  537. }
  538. // If we have pending object, we still need to check the free bit if the caller requested the attribute to be correct
  539. bool const disableCheck = ((flags & FindHeapObjectFlags_NoFreeBitVerify) != 0) ||
  540. ((flags & FindHeapObjectFlags_VerifyFreeBitForAttribute) != 0 && !this->HasPendingDisposeObjects());
  541. if (!disableCheck)
  542. {
  543. // REVIEW: Checking if an object if free is strictly not necessary
  544. // In all case, we should have a valid object, For memory protect heap, this is just to make sure we don't
  545. // free pointers that are invalid.
  546. #if ENABLE_CONCURRENT_GC
  547. if (recycler->IsConcurrentSweepExecutingState())
  548. {
  549. // TODO: unless we know the state of the heap block, we don't know.
  550. // skip the check for now.
  551. }
  552. else
  553. #endif
  554. {
  555. if (flags & FindHeapObjectFlags_ClearedAllocators)
  556. {
  557. // Heap enum has some case where it allocates, so we can't assert
  558. Assert(((HeapBucketT<TBlockType> *)this->heapBucket)->AllocatorsAreEmpty() || recycler->isHeapEnumInProgress);
  559. }
  560. else if (this->IsInAllocator())
  561. {
  562. ((HeapBucketT<TBlockType> *)this->heapBucket)->UpdateAllocators();
  563. }
  564. // REVIEW allocation heuristics
  565. if (this->EnsureFreeBitVector()->Test(this->GetObjectBitDelta() * index))
  566. {
  567. return false;
  568. }
  569. }
  570. }
  571. byte& attributes = ObjectInfo(index);
  572. heapObject = RecyclerHeapObjectInfo(objectAddress, recycler, this, &attributes);
  573. return true;
  574. }
  575. template <class TBlockAttributes>
  576. BOOL
  577. SmallHeapBlockT<TBlockAttributes>::IsValidObject(void* objectAddress)
  578. {
  579. if (objectAddress < this->GetAddress() || objectAddress >= this->GetEndAddress())
  580. {
  581. return false;
  582. }
  583. ushort index = GetAddressIndex(objectAddress);
  584. if (index == SmallHeapBlockT<TBlockAttributes>::InvalidAddressBit)
  585. {
  586. return false;
  587. }
  588. #if DBG
  589. return !this->GetDebugFreeBitVector()->Test(GetAddressBitIndex(objectAddress));
  590. #else
  591. return true;
  592. #endif
  593. }
  594. template <class TBlockAttributes>
  595. bool
  596. SmallHeapBlockT<TBlockAttributes>::IsInAllocator() const
  597. {
  598. return isInAllocator;
  599. }
  600. template <class TBlockAttributes>
  601. bool
  602. SmallHeapBlockT<TBlockAttributes>::HasPendingDisposeObjects()
  603. {
  604. return this->IsAnyFinalizableBlock() && this->AsFinalizableBlock<TBlockAttributes>()->HasPendingDisposeObjects();
  605. }
  606. template <class TBlockAttributes>
  607. bool
  608. SmallHeapBlockT<TBlockAttributes>::HasAnyDisposeObjects()
  609. {
  610. return this->IsAnyFinalizableBlock() && this->AsFinalizableBlock<TBlockAttributes>()->HasAnyDisposeObjects();
  611. }
  612. template <class TBlockAttributes>
  613. Recycler *
  614. SmallHeapBlockT<TBlockAttributes>::GetRecycler() const
  615. {
  616. #if DBG
  617. return this->heapBucket->heapInfo->recycler;
  618. #else
  619. return nullptr;
  620. #endif
  621. }
  622. #if DBG
  623. template <class TBlockAttributes>
  624. HeapInfo *
  625. SmallHeapBlockT<TBlockAttributes>::GetHeapInfo() const
  626. {
  627. return this->heapBucket->heapInfo;
  628. }
  629. template <class TBlockAttributes>
  630. BOOL
  631. SmallHeapBlockT<TBlockAttributes>::IsFreeObject(void * objectAddress)
  632. {
  633. if (objectAddress < this->GetAddress() || objectAddress >= this->GetEndAddress())
  634. {
  635. return false;
  636. }
  637. ushort index = GetAddressIndex(objectAddress);
  638. if (index == SmallHeapBlockT<TBlockAttributes>::InvalidAddressBit)
  639. {
  640. return false;
  641. }
  642. return this->GetDebugFreeBitVector()->Test(GetAddressBitIndex(objectAddress));
  643. }
  644. template <class TBlockAttributes>
  645. void
  646. SmallHeapBlockT<TBlockAttributes>::VerifyMarkBitVector()
  647. {
  648. this->GetRecycler()->heapBlockMap.template VerifyMarkCountForPages<TBlockAttributes::BitVectorCount>(this->address, TBlockAttributes::PageCount);
  649. }
  650. template <class TBlockAttributes>
  651. bool
  652. SmallHeapBlockT<TBlockAttributes>::IsClearedFromAllocator() const
  653. {
  654. return isClearedFromAllocator;
  655. }
  656. template <class TBlockAttributes>
  657. void
  658. SmallHeapBlockT<TBlockAttributes>::SetIsClearedFromAllocator(bool value)
  659. {
  660. isClearedFromAllocator = value;
  661. }
  662. #endif
  663. template <class TBlockAttributes>
  664. byte *
  665. SmallHeapBlockT<TBlockAttributes>::GetRealAddressFromInterior(void * interiorAddress)
  666. {
  667. Assert(interiorAddress >= this->address && interiorAddress < this->address + AutoSystemInfo::PageSize * this->GetPageCount());
  668. ushort index = GetInteriorAddressIndex(interiorAddress);
  669. if (index != SmallHeapBlockT<TBlockAttributes>::InvalidAddressBit)
  670. {
  671. return (byte *)this->address + index * this->GetObjectSize();
  672. }
  673. return nullptr;
  674. }
  675. template <class TBlockAttributes>
  676. bool
  677. SmallHeapBlockT<TBlockAttributes>::TestObjectMarkedBit(void* objectAddress)
  678. {
  679. Assert(this->address != nullptr);
  680. Assert(this->segment != nullptr);
  681. uint bitIndex = GetAddressBitIndex(objectAddress);
  682. Assert(IsValidBitIndex(bitIndex));
  683. return this->GetMarkedBitVector()->Test(bitIndex) != 0;
  684. }
  685. template <class TBlockAttributes>
  686. void
  687. SmallHeapBlockT<TBlockAttributes>::SetObjectMarkedBit(void* objectAddress)
  688. {
  689. Assert(this->address != nullptr);
  690. Assert(this->segment != nullptr);
  691. uint bitIndex = GetAddressBitIndex(objectAddress);
  692. Assert(IsValidBitIndex(bitIndex));
  693. this->GetMarkedBitVector()->Set(bitIndex);
  694. }
  695. #ifdef RECYCLER_MEMORY_VERIFY
  696. template <class TBlockAttributes>
  697. void
  698. SmallHeapBlockT<TBlockAttributes>::SetExplicitFreeBitForObject(void* objectAddress)
  699. {
  700. Assert(this->address != nullptr);
  701. Assert(this->segment != nullptr);
  702. uint bitIndex = GetAddressBitIndex(objectAddress);
  703. Assert(IsValidBitIndex(bitIndex));
  704. BOOLEAN wasSet = this->explicitFreeBits.TestAndSet(bitIndex);
  705. Assert(!wasSet);
  706. }
  707. template <class TBlockAttributes>
  708. void
  709. SmallHeapBlockT<TBlockAttributes>::ClearExplicitFreeBitForObject(void* objectAddress)
  710. {
  711. Assert(this->address != nullptr);
  712. Assert(this->segment != nullptr);
  713. uint bitIndex = GetAddressBitIndex(objectAddress);
  714. Assert(IsValidBitIndex(bitIndex));
  715. BOOLEAN wasSet = this->explicitFreeBits.TestAndClear(bitIndex);
  716. Assert(wasSet);
  717. }
  718. #endif
  719. #ifdef RECYCLER_VERIFY_MARK
  720. #if DBG
  721. void HeapBlock::PrintVerifyMarkFailure(Recycler* recycler, char* objectAddress, char* target)
  722. {
  723. // Due to possible GC mark optimization, the pointers may point to object
  724. // internal and "unaligned". Align them then FindHeapBlock.
  725. HeapBlock* block = recycler->FindHeapBlock(HeapInfo::GetAlignedAddress(objectAddress));
  726. if (block == nullptr)
  727. {
  728. return;
  729. }
  730. HeapBlock* targetBlock = recycler->FindHeapBlock(HeapInfo::GetAlignedAddress(target));
  731. if (targetBlock == nullptr)
  732. {
  733. return;
  734. }
  735. #ifdef TRACK_ALLOC
  736. Recycler::TrackerData* trackerData = nullptr;
  737. Recycler::TrackerData* targetTrackerData = nullptr;
  738. const char* typeName = nullptr;
  739. const char* targetTypeName = nullptr;
  740. uint offset = 0;
  741. uint targetOffset = 0;
  742. char* objectStartAddress = nullptr;
  743. char* targetStartAddress = nullptr;
  744. if (targetBlock->IsLargeHeapBlock())
  745. {
  746. targetOffset = (uint)(target - (char*)((LargeHeapBlock*)targetBlock)->GetRealAddressFromInterior(target));
  747. }
  748. else
  749. {
  750. targetOffset = (uint)(target - targetBlock->GetAddress()) % targetBlock->GetObjectSize(nullptr);
  751. }
  752. if (targetOffset != 0)
  753. {
  754. // "target" points to internal of an object. This is not a GC pointer.
  755. return;
  756. }
  757. if (Recycler::DoProfileAllocTracker())
  758. {
  759. // need CheckMemoryLeak or KeepRecyclerTrackData flag to have the tracker data and show following detailed info
  760. #if defined(__clang__) && !defined(_MSC_VER)
  761. auto getDemangledName = [](const type_info* typeinfo) ->const char*
  762. {
  763. int status;
  764. char buffer[1024];
  765. size_t buflen = 1024;
  766. char* name = abi::__cxa_demangle(typeinfo->name(), buffer, &buflen, &status);
  767. if (status != 0)
  768. {
  769. Output::Print(_u("Demangle failed: result=%d, buflen=%d\n"), status, buflen);
  770. }
  771. char* demangledName = (char*)malloc(buflen);
  772. memcpy(demangledName, name, buflen);
  773. return demangledName;
  774. };
  775. #else
  776. auto getDemangledName = [](const type_info* typeinfo) ->const char*
  777. {
  778. return typeinfo->name();
  779. };
  780. #endif
  781. if (block->IsLargeHeapBlock())
  782. {
  783. offset = (uint)(objectAddress - (char*)((LargeHeapBlock*)block)->GetRealAddressFromInterior(objectAddress));
  784. }
  785. else
  786. {
  787. offset = (uint)(objectAddress - block->address) % block->GetObjectSize(objectAddress);
  788. }
  789. objectStartAddress = objectAddress - offset;
  790. trackerData = (Recycler::TrackerData*)block->GetTrackerData(objectStartAddress);
  791. if (trackerData)
  792. {
  793. typeName = getDemangledName(trackerData->typeinfo);
  794. if (trackerData->isArray)
  795. {
  796. Output::Print(_u("Missing Barrier\nOn array of %S\n"), typeName);
  797. #ifdef STACK_BACK_TRACE
  798. if (CONFIG_FLAG(KeepRecyclerTrackData))
  799. {
  800. Output::Print(_u("Allocation stack:\n"));
  801. ((StackBackTrace*)(trackerData + 1))->Print();
  802. }
  803. #endif
  804. }
  805. else
  806. {
  807. auto dumpFalsePositive = [&]()
  808. {
  809. if (CONFIG_FLAG(Verbose))
  810. {
  811. Output::Print(_u("False Positive: %S+0x%x => 0x%p -> 0x%p\n"), typeName, offset, objectAddress, target);
  812. }
  813. };
  814. if (IsLikelyRuntimeFalseReference(objectStartAddress, offset, typeName))
  815. {
  816. dumpFalsePositive();
  817. return;
  818. }
  819. //TODO: (leish)(swb) analyze pdb to check if the field is a pointer field or not
  820. Output::Print(_u("Missing Barrier\nOn type %S+0x%x\n"), typeName, offset);
  821. }
  822. }
  823. targetStartAddress = target - targetOffset;
  824. targetTrackerData = (Recycler::TrackerData*)targetBlock->GetTrackerData(targetStartAddress);
  825. if (targetTrackerData)
  826. {
  827. targetTypeName = getDemangledName(targetTrackerData->typeinfo);
  828. if (targetTrackerData->isArray)
  829. {
  830. Output::Print(_u("Target type (missing barrier field type) is array item of %S\n"), targetTypeName);
  831. #ifdef STACK_BACK_TRACE
  832. if (CONFIG_FLAG(KeepRecyclerTrackData))
  833. {
  834. Output::Print(_u("Allocation stack:\n"));
  835. ((StackBackTrace*)(targetTrackerData + 1))->Print();
  836. }
  837. #endif
  838. }
  839. else if (targetOffset == 0)
  840. {
  841. Output::Print(_u("Target type (missing barrier field type) is %S\n"), targetTypeName);
  842. }
  843. else
  844. {
  845. Output::Print(_u("Target type (missing barrier field type) is pointing to %S+0x%x\n"), targetTypeName, targetOffset);
  846. }
  847. }
  848. Output::Print(_u("---------------------------------\n"));
  849. }
  850. #endif
  851. Output::Print(_u("Missing barrier on 0x%p, target is 0x%p\n"), objectAddress, target);
  852. AssertMsg(false, "Missing barrier.");
  853. }
  854. #endif // DBG
  855. template <class TBlockAttributes>
  856. void
  857. SmallHeapBlockT<TBlockAttributes>::VerifyMark()
  858. {
  859. Assert(!this->needOOMRescan);
  860. SmallHeapBlockBitVector * marked = this->GetMarkedBitVector();
  861. SmallHeapBlockBitVector tempFreeBits;
  862. this->BuildFreeBitVector(&tempFreeBits);
  863. SmallHeapBlockBitVector * free = &tempFreeBits;
  864. SmallHeapBlockBitVector const * invalid = this->GetInvalidBitVector();
  865. uint objectWordCount = this->GetObjectWordCount();
  866. Recycler * recycler = this->heapBucket->heapInfo->recycler;
  867. FOREACH_BITSET_IN_FIXEDBV(bitIndex, marked)
  868. {
  869. if (!free->Test(bitIndex) && !invalid->Test(bitIndex))
  870. {
  871. Assert(IsValidBitIndex(bitIndex));
  872. uint objectIndex = GetObjectIndexFromBitIndex((ushort)bitIndex);
  873. Assert((this->ObjectInfo(objectIndex) & NewTrackBit) == 0);
  874. // NOTE: We can't verify mark for software write barrier blocks, because they may have
  875. // non-pointer updates that don't trigger the write barrier, but still look like a false reference.
  876. // Thus, when we get here, we'll see a false reference that isn't marked.
  877. // Since this situation is hard to detect, just don't verify mark for write barrier blocks.
  878. // We could fix this if we had object layout info.
  879. if (!this->IsLeafBlock()
  880. #ifdef RECYCLER_WRITE_BARRIER
  881. && (!this->IsWithBarrier() || CONFIG_FLAG(ForceSoftwareWriteBarrier))
  882. #endif
  883. )
  884. {
  885. if ((ObjectInfo(objectIndex) & LeafBit) == 0)
  886. {
  887. char * objectAddress = this->address + objectIndex * objectSize;
  888. for (uint i = 0; i < objectWordCount; i++)
  889. {
  890. void* target = *(void**) objectAddress;
  891. if (recycler->VerifyMark(objectAddress, target))
  892. {
  893. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  894. if (CONFIG_FLAG(ForceSoftwareWriteBarrier) && CONFIG_FLAG(VerifyBarrierBit))
  895. {
  896. this->WBVerifyBitIsSet(objectAddress);
  897. }
  898. #endif
  899. }
  900. objectAddress += sizeof(void *);
  901. }
  902. }
  903. }
  904. }
  905. }
  906. NEXT_BITSET_IN_FIXEDBV;
  907. }
  908. template <class TBlockAttributes>
  909. bool
  910. SmallHeapBlockT<TBlockAttributes>::VerifyMark(void * objectAddress, void * target)
  911. {
  912. // Because we mark through new object, we might have a false reference
  913. // somewhere that we have scanned before this new block is allocated
  914. // so the object will not be marked even though it looks like a reference
  915. // Can't verify when the block is new
  916. if (this->heapBucket->GetRecycler()->heapBlockMap.IsAddressInNewChunk(target))
  917. {
  918. return false;
  919. }
  920. ushort bitIndex = GetAddressBitIndex(target);
  921. bool isMarked = this->GetMarkedBitVector()->Test(bitIndex) == TRUE;
  922. #if DBG
  923. if (!isMarked)
  924. {
  925. PrintVerifyMarkFailure(this->GetRecycler(), (char*)objectAddress, (char*)target);
  926. }
  927. #else
  928. if (!isMarked)
  929. {
  930. DebugBreak();
  931. }
  932. #endif
  933. return isMarked;
  934. }
  935. #endif
  936. #ifdef RECYCLER_STRESS
  937. template <class TBlockAttributes>
  938. void
  939. SmallHeapBlockT<TBlockAttributes>::InduceFalsePositive(Recycler * recycler)
  940. {
  941. // Induce a false positive mark by marking the first object on the free list, if any.
  942. // Note that if the block is in the allocator, freeObjectList is not up to date.
  943. // So we may be marking an already-allocated block, but that's okay --
  944. // we call TryMark so that normal processing (including tracked object processing, etc)
  945. // will occur just as if we had a false reference to this object previously.
  946. void * falsePositive = this->freeObjectList;
  947. if (falsePositive != nullptr)
  948. {
  949. recycler->TryMarkNonInterior(falsePositive, nullptr);
  950. }
  951. }
  952. #endif
  953. template <class TBlockAttributes>
  954. void
  955. SmallHeapBlockT<TBlockAttributes>::ClearAllAllocBytes()
  956. {
  957. #if ENABLE_PARTIAL_GC
  958. this->oldFreeCount = this->lastFreeCount = this->freeCount;
  959. #else
  960. this->lastFreeCount = this->freeCount;
  961. #endif
  962. }
  963. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  964. #if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
  965. template <class TBlockAttributes>
  966. void
  967. SmallHeapBlockT<TBlockAttributes>::ResetConcurrentSweepAllocationCounts()
  968. {
  969. if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && this->objectsAllocatedDuringConcurrentSweepCount > 0)
  970. {
  971. // Reset the count of objects allocated during this concurrent sweep; so we will start afresh the next time around.
  972. Assert(this->objectsAllocatedDuringConcurrentSweepCount == this->objectsMarkedDuringSweep);
  973. this->lastObjectsAllocatedDuringConcurrentSweepCount = this->objectsAllocatedDuringConcurrentSweepCount;
  974. this->objectsAllocatedDuringConcurrentSweepCount = 0;
  975. this->objectsMarkedDuringSweep = 0;
  976. }
  977. }
  978. #endif
  979. #endif
  980. #if ENABLE_PARTIAL_GC
  981. template <class TBlockAttributes>
  982. bool
  983. SmallHeapBlockT<TBlockAttributes>::DoPartialReusePage(RecyclerSweep const& recyclerSweep, uint& expectFreeByteCount)
  984. {
  985. // Partial GC page reuse heuristic
  986. Assert(recyclerSweep.InPartialCollectMode());
  987. expectFreeByteCount = GetExpectedFreeBytes();
  988. // PartialCollectSmallHeapBlockReuseMinFreeBytes is calculated by dwPageSize* efficacy. If efficacy is
  989. // high (== 1), and dwPageSize % objectSize != 0, all the pages in the bucket will be partial, and that
  990. // could increase in thread sweep time.
  991. // OTOH, if the object size is really large, the calculation below will reduce the chance for a page to be
  992. // partial. we might need to watch out for that.
  993. return (expectFreeByteCount + objectSize >= recyclerSweep.GetManager()->GetPartialCollectSmallHeapBlockReuseMinFreeBytes());
  994. }
  995. #if DBG
  996. // do debug assert for partial block that we are not going to sweep
  997. template <class TBlockAttributes>
  998. void
  999. SmallHeapBlockT<TBlockAttributes>::SweepVerifyPartialBlock(Recycler * recycler)
  1000. {
  1001. Assert(!this->IsLeafBlock());
  1002. // nothing in the partialHeapBlockList is sweepable
  1003. Assert(GetExpectedSweepObjectCount() == 0);
  1004. }
  1005. #endif
  1006. template <class TBlockAttributes>
  1007. uint
  1008. SmallHeapBlockT<TBlockAttributes>::GetAndClearUnaccountedAllocBytes()
  1009. {
  1010. Assert(this->lastFreeCount >= this->freeCount);
  1011. const ushort currentFreeCount = this->freeCount;
  1012. uint unaccountedAllocBytes = (this->lastFreeCount - currentFreeCount) * this->objectSize;
  1013. this->lastFreeCount = currentFreeCount;
  1014. return unaccountedAllocBytes;
  1015. }
  1016. template <class TBlockAttributes>
  1017. void
  1018. SmallHeapBlockT<TBlockAttributes>::AdjustPartialUncollectedAllocBytes(RecyclerSweep& recyclerSweep, uint const expectSweepCount)
  1019. {
  1020. const uint allObjectCount = this->objectCount;
  1021. const ushort currentFreeCount = this->freeCount;
  1022. Assert(this->lastFreeCount == currentFreeCount);
  1023. uint newAllocatedCount = this->oldFreeCount - currentFreeCount;
  1024. this->oldFreeCount = currentFreeCount;
  1025. uint newObjectExpectSweepCount = expectSweepCount;
  1026. #if ENABLE_CONCURRENT_GC
  1027. if (expectSweepCount != 0 && !recyclerSweep.InPartialCollect())
  1028. {
  1029. // We don't know which objects that we are going sweep are old and which object are new
  1030. // So just assume one way or the other by the amount of old vs. new object in the block
  1031. const uint allocatedObjectCount = allObjectCount - currentFreeCount;
  1032. Assert(allocatedObjectCount >= newAllocatedCount);
  1033. const uint oldObjectCount = allocatedObjectCount - newAllocatedCount;
  1034. if (oldObjectCount < newAllocatedCount)
  1035. {
  1036. // count all of the swept object as new, but don't exceed the amount we allocated
  1037. if (newObjectExpectSweepCount > newAllocatedCount)
  1038. {
  1039. newObjectExpectSweepCount = newAllocatedCount;
  1040. }
  1041. }
  1042. else
  1043. {
  1044. // count all of the swept object as old
  1045. newObjectExpectSweepCount = 0;
  1046. }
  1047. }
  1048. #endif
  1049. // The page can be old, or it is full (where we set lastFreeCount to 0)
  1050. // Otherwise, the newly allocated count must be bigger then the expect sweep count
  1051. Assert(newAllocatedCount >= newObjectExpectSweepCount);
  1052. Assert(this->lastUncollectedAllocBytes >= newObjectExpectSweepCount * this->objectSize);
  1053. recyclerSweep.GetManager()->SubtractSweepNewObjectAllocBytes(newObjectExpectSweepCount * this->objectSize);
  1054. }
  1055. #endif // RECYCLER_VERIFY_MARK
  1056. template <class TBlockAttributes>
  1057. uint
  1058. SmallHeapBlockT<TBlockAttributes>::GetMarkCountForSweep()
  1059. {
  1060. Assert(IsFreeBitsValid());
  1061. // Make a local copy of mark bits, so we don't modify the actual mark bits.
  1062. SmallHeapBlockBitVector temp;
  1063. temp.Copy(this->GetMarkedBitVector());
  1064. // Remove any invalid bits that may have been set
  1065. temp.Minus(this->GetInvalidBitVector());
  1066. // Remove the mark bit for things that are still free
  1067. if (this->freeCount != 0)
  1068. {
  1069. temp.Minus(this->GetFreeBitVector());
  1070. }
  1071. return temp.Count();
  1072. }
  1073. template <class TBlockAttributes>
  1074. SweepState
  1075. SmallHeapBlockT<TBlockAttributes>::Sweep(RecyclerSweep& recyclerSweep, bool queuePendingSweep, bool allocable, ushort finalizeCount, bool hasPendingDispose)
  1076. {
  1077. Assert(this->address != nullptr);
  1078. Assert(this->segment != nullptr);
  1079. #if ENABLE_CONCURRENT_GC
  1080. Assert(!this->isPendingConcurrentSweep);
  1081. #endif
  1082. #if DBG && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1083. // In concurrent sweep pass1, we mark the object directly in the mark bit vector for objects allocated during the sweep to prevent them from getting swept during the ongoing sweep itself.
  1084. // This will make the mark bit vector on the HeapBlockMap out-of-date w.r.t. these newly allocated objects.
  1085. if (!this->wasAllocatedFromDuringSweep)
  1086. #endif
  1087. {
  1088. DebugOnly(VerifyMarkBitVector());
  1089. }
  1090. if (allocable)
  1091. {
  1092. // This block has been allocated from since the last GC.
  1093. // We need to update its free bit vector so we can use it below.
  1094. DebugOnly(ushort currentFreeCount = (ushort)this->GetFreeBitVector()->Count());
  1095. Assert(freeCount == currentFreeCount);
  1096. #if ENABLE_PARTIAL_GC
  1097. Assert(this->lastFreeCount == 0 || this->oldFreeCount == this->lastFreeCount);
  1098. #endif
  1099. this->EnsureFreeBitVector();
  1100. Assert(this->lastFreeCount >= this->freeCount);
  1101. #if ENABLE_PARTIAL_GC
  1102. Assert(this->oldFreeCount >= this->freeCount);
  1103. #endif
  1104. #if ENABLE_PARTIAL_GC
  1105. // Accounting for partial heuristics
  1106. recyclerSweep.GetManager()->AddUnaccountedNewObjectAllocBytes(this);
  1107. #endif
  1108. }
  1109. Assert(this->freeCount == this->GetFreeBitVector()->Count());
  1110. RECYCLER_SLOW_CHECK(CheckFreeBitVector(true));
  1111. const uint localMarkCount = this->GetMarkCountForSweep();
  1112. this->markCount = (ushort)localMarkCount;
  1113. Assert(markCount <= objectCount - this->freeCount);
  1114. const uint expectFreeCount = objectCount - localMarkCount;
  1115. Assert(expectFreeCount >= this->freeCount);
  1116. uint expectSweepCount = expectFreeCount - this->freeCount;
  1117. Assert(!this->IsLeafBlock() || finalizeCount == 0);
  1118. Recycler * recycler = recyclerSweep.GetRecycler();
  1119. RECYCLER_STATS_INC(recycler, heapBlockCount[this->GetHeapBlockType()]);
  1120. #if ENABLE_PARTIAL_GC
  1121. if (recyclerSweep.GetManager()->DoAdjustPartialHeuristics() && allocable)
  1122. {
  1123. this->AdjustPartialUncollectedAllocBytes(recyclerSweep, expectSweepCount);
  1124. }
  1125. #endif
  1126. DebugOnly(this->lastUncollectedAllocBytes = 0);
  1127. bool noRealObjectsMarked = (localMarkCount == 0);
  1128. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1129. if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
  1130. {
  1131. Assert(!this->IsAnyFinalizableBlock() || !this->isPendingConcurrentSweepPrep);
  1132. // This heap block is ready to be swept concurrently.
  1133. #if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
  1134. this->hasFinishedSweepObjects = false;
  1135. #endif
  1136. this->isPendingConcurrentSweepPrep = false;
  1137. }
  1138. #endif
  1139. const bool isAllFreed = (finalizeCount == 0 && noRealObjectsMarked && !hasPendingDispose);
  1140. if (isAllFreed)
  1141. {
  1142. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1143. if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
  1144. {
  1145. AssertMsg(this->objectsAllocatedDuringConcurrentSweepCount == 0, "This block shouldn't be considered EMPTY if we allocated from it during concurrent sweep.");
  1146. }
  1147. #endif
  1148. recycler->NotifyFree(this);
  1149. Assert(!this->HasPendingDisposeObjects());
  1150. #ifdef RECYCLER_TRACE
  1151. recycler->PrintBlockStatus(this->heapBucket, this, _u("[**26**] ending sweep Pass1, state returned SweepStateEmpty."));
  1152. #endif
  1153. return SweepStateEmpty;
  1154. }
  1155. RECYCLER_STATS_ADD(recycler, heapBlockFreeByteCount[this->GetHeapBlockType()], expectFreeCount * this->objectSize);
  1156. Assert(!hasPendingDispose || (this->freeCount != 0));
  1157. SweepState state = SweepStateSwept;
  1158. if (hasPendingDispose)
  1159. {
  1160. state = SweepStatePendingDispose;
  1161. }
  1162. if (expectSweepCount == 0)
  1163. {
  1164. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1165. #if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
  1166. if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
  1167. {
  1168. this->ResetConcurrentSweepAllocationCounts();
  1169. }
  1170. #endif
  1171. #endif
  1172. // nothing has been freed
  1173. #ifdef RECYCLER_TRACE
  1174. if (recycler->GetRecyclerFlagsTable().Trace.IsEnabled(Js::ConcurrentSweepPhase) && CONFIG_FLAG_RELEASE(Verbose))
  1175. {
  1176. SweepState stateReturned = (this->freeCount == 0) ? SweepStateFull : state;
  1177. CollectionState collectionState = recycler->collectionState;
  1178. Output::Print(_u("[GC #%d] [HeapBucket 0x%p] HeapBlock 0x%p %s %d [CollectionState: %d] \n"), recycler->collectionCount, this->heapBucket, this, _u("[**37**] heapBlock swept. State returned:"), stateReturned, collectionState);
  1179. }
  1180. #endif
  1181. return (this->freeCount == 0) ? SweepStateFull : state;
  1182. }
  1183. RECYCLER_STATS_INC(recycler, heapBlockSweptCount[this->GetHeapBlockType()]);
  1184. // We need to sweep in thread if there are any finalizable object.
  1185. // So that the PrepareFinalize() can be called before concurrent sweep
  1186. // and other finalizer. This gives the object an opportunity before any
  1187. // other script can be ran to clean up their references/states that are not
  1188. // valid since we determine the object is not live any more.
  1189. //
  1190. // An example is the ITrackable's tracking alias. The reference to the alias
  1191. // object needs to be clear so that the reference will not be given out again
  1192. // in other script during concurrent sweep or finalizer called before.
  1193. #if ENABLE_CONCURRENT_GC
  1194. if (queuePendingSweep)
  1195. {
  1196. Assert(finalizeCount == 0);
  1197. Assert(!this->HasPendingDisposeObjects());
  1198. recyclerSweep.SetHasPendingSweepSmallHeapBlocks();
  1199. RECYCLER_STATS_INC(recycler, heapBlockConcurrentSweptCount[this->GetHeapBlockType()]);
  1200. // This heap block has objects that need to be swept concurrently.
  1201. this->isPendingConcurrentSweep = true;
  1202. #ifdef RECYCLER_TRACE
  1203. if (recycler->GetRecyclerFlagsTable().Trace.IsEnabled(Js::ConcurrentSweepPhase))
  1204. {
  1205. recycler->PrintBlockStatus(this->heapBucket, this, _u("[**29**] heapBlock swept. State returned: SweepStatePendingSweep"));
  1206. }
  1207. #endif
  1208. return SweepStatePendingSweep;
  1209. }
  1210. #else
  1211. Assert(!recyclerSweep.IsBackground());
  1212. #endif
  1213. #ifdef RECYCLER_TRACE
  1214. recycler->PrintBlockStatus(this->heapBucket, this, _u("[**16**] calling SweepObjects."));
  1215. #endif
  1216. SweepObjects<SweepMode_InThread>(recycler);
  1217. if (HasPendingDisposeObjects())
  1218. {
  1219. Assert(finalizeCount != 0);
  1220. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1221. if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
  1222. {
  1223. AssertMsg(this->objectsAllocatedDuringConcurrentSweepCount == 0, "Allocations during concurrent sweep not supported for finalizable blocks.");
  1224. }
  1225. #endif
  1226. return SweepStatePendingDispose;
  1227. }
  1228. // Already swept, no more work to be done. Put it back to the queue.
  1229. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1230. if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && !this->IsAnyFinalizableBlock())
  1231. {
  1232. #ifdef RECYCLER_TRACE
  1233. if (recycler->GetRecyclerFlagsTable().Trace.IsEnabled(Js::ConcurrentSweepPhase) && CONFIG_FLAG_RELEASE(Verbose))
  1234. {
  1235. SweepState stateReturned = (this->freeCount == 0) ? SweepStateFull : state;
  1236. CollectionState collectionState = recycler->collectionState;
  1237. Output::Print(_u("[GC #%d] [HeapBucket 0x%p] HeapBlock 0x%p %s %d [CollectionState: %d] \n"), recycler->collectionCount, this->heapBucket, this, _u("[**38**] heapBlock swept. State returned:"), stateReturned, collectionState);
  1238. }
  1239. #endif
  1240. // We always need to check the free count as we may have allocated from this block during concurrent sweep.
  1241. return (this->freeCount == 0) ? SweepStateFull : state;
  1242. }
  1243. else
  1244. #endif
  1245. {
  1246. return state;
  1247. }
  1248. }
  1249. #if DBG
  1250. template <class TBlockAttributes>
  1251. uint
  1252. SmallHeapBlockT<TBlockAttributes>::GetMarkCountOnHeapBlockMap() const
  1253. {
  1254. uint heapBlockMapMarkCount = 0;
  1255. char* startPage = this->GetAddress();
  1256. char* endPage = this->GetEndAddress();
  1257. const HeapBlockMap& blockMap = this->GetRecycler()->heapBlockMap;
  1258. for (char* page = startPage; page < endPage; page += AutoSystemInfo::PageSize)
  1259. {
  1260. heapBlockMapMarkCount += blockMap.GetPageMarkCount(page);
  1261. }
  1262. return heapBlockMapMarkCount;
  1263. }
  1264. #endif
  1265. template <class TBlockAttributes>
  1266. template <SweepMode mode>
  1267. void
  1268. SmallHeapBlockT<TBlockAttributes>::SweepObjects(Recycler * recycler)
  1269. {
  1270. #if ENABLE_CONCURRENT_GC
  1271. Assert(mode == SweepMode_InThread || this->isPendingConcurrentSweep);
  1272. Assert(mode == SweepMode_InThread || !this->IsAnyFinalizableBlock());
  1273. #else
  1274. Assert(mode == SweepMode_InThread);
  1275. #endif
  1276. Assert(this->IsFreeBitsValid());
  1277. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1278. AssertMsg(!hasFinishedSweepObjects, "Block in SweepObjects more than once during the ongoing sweep.");
  1279. if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
  1280. {
  1281. Assert(this->markCount != 0 || this->objectsAllocatedDuringConcurrentSweepCount > 0 || this->isForceSweeping || this->IsAnyFinalizableBlock());
  1282. }
  1283. else
  1284. #endif
  1285. {
  1286. Assert(this->markCount != 0 || this->isForceSweeping || this->IsAnyFinalizableBlock());
  1287. }
  1288. Assert(this->markCount == this->GetMarkCountForSweep());
  1289. #if DBG && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1290. // In concurrent sweep pass1, we mark the object directly in the mark bit vector for objects allocated during the sweep to prevent them from getting swept during the ongoing sweep itself.
  1291. // This will make the mark bit vector on the HeapBlockMap out-of-date w.r.t. these newly allocated objects.
  1292. if (!this->wasAllocatedFromDuringSweep)
  1293. #endif
  1294. {
  1295. DebugOnly(VerifyMarkBitVector());
  1296. }
  1297. SmallHeapBlockBitVector * marked = this->GetMarkedBitVector();
  1298. DebugOnly(uint expectedSweepCount = objectCount - freeCount - markCount);
  1299. #if DBG && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1300. if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
  1301. {
  1302. Assert(expectedSweepCount != 0 || this->isForceSweeping || this->objectsAllocatedDuringConcurrentSweepCount != 0);
  1303. }
  1304. else
  1305. #endif
  1306. {
  1307. Assert(expectedSweepCount != 0 || this->isForceSweeping);
  1308. }
  1309. DebugOnly(uint sweepCount = 0);
  1310. const uint localSize = objectSize;
  1311. const uint localObjectCount = objectCount;
  1312. const char* objectAddress = address;
  1313. uint objectBitDelta = this->GetObjectBitDelta();
  1314. for (uint objectIndex = 0, bitIndex = 0; objectIndex < localObjectCount; objectIndex++, bitIndex += objectBitDelta)
  1315. {
  1316. Assert(IsValidBitIndex(bitIndex));
  1317. RECYCLER_STATS_ADD(recycler, objectSweepScanCount, !isForceSweeping);
  1318. if (!marked->Test(bitIndex))
  1319. {
  1320. if (!this->GetFreeBitVector()->Test(bitIndex))
  1321. {
  1322. Assert((this->ObjectInfo(objectIndex) & ImplicitRootBit) == 0);
  1323. FreeObject* addr = (FreeObject*)objectAddress;
  1324. #if ENABLE_PARTIAL_GC && ENABLE_CONCURRENT_GC
  1325. if (mode != SweepMode_ConcurrentPartial)
  1326. #endif
  1327. {
  1328. // Don't call NotifyFree if we are doing a partial sweep.
  1329. // Since we are not actually collecting the object, we will do the NotifyFree later
  1330. // when the object is actually collected in a future Sweep.
  1331. recycler->NotifyFree((char *)addr, this->objectSize);
  1332. }
  1333. #if DBG
  1334. sweepCount++;
  1335. #endif
  1336. SweepObject<mode>(recycler, objectIndex, addr);
  1337. }
  1338. }
  1339. #if DBG
  1340. if (marked->Test(bitIndex))
  1341. {
  1342. Assert((ObjectInfo(objectIndex) & NewTrackBit) == 0);
  1343. }
  1344. #endif
  1345. objectAddress += localSize;
  1346. }
  1347. Assert(sweepCount == expectedSweepCount);
  1348. #if ENABLE_CONCURRENT_GC
  1349. this->isPendingConcurrentSweep = false;
  1350. #endif
  1351. #if ENABLE_PARTIAL_GC && ENABLE_CONCURRENT_GC
  1352. if (mode == SweepMode_ConcurrentPartial)
  1353. {
  1354. Assert(recycler->inPartialCollectMode);
  1355. // We didn't actually collect anything, so the free bit vector should still be valid.
  1356. Assert(IsFreeBitsValid());
  1357. }
  1358. else
  1359. #endif
  1360. {
  1361. // Update the free bit vector
  1362. // Need to update even if there are not swept object because finalizable object are
  1363. // consider freed but not on the free list.
  1364. ushort currentFreeCount = GetExpectedFreeObjectCount();
  1365. this->GetFreeBitVector()->OrComplimented(marked);
  1366. this->GetFreeBitVector()->Minus(this->GetInvalidBitVector());
  1367. #if ENABLE_PARTIAL_GC
  1368. this->oldFreeCount = this->lastFreeCount = this->freeCount = currentFreeCount;
  1369. #else
  1370. this->lastFreeCount = this->freeCount = currentFreeCount;
  1371. #endif
  1372. this->lastFreeObjectHead = this->freeObjectList;
  1373. }
  1374. // While allocations are allowed during concurrent sweep into still unswept blocks the
  1375. // free bit vectors are not valid yet.
  1376. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP && defined(RECYCLER_SLOW_CHECK_ENABLED)
  1377. if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && this->objectsAllocatedDuringConcurrentSweepCount == 0)
  1378. #endif
  1379. {
  1380. RECYCLER_SLOW_CHECK(CheckFreeBitVector(true));
  1381. }
  1382. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1383. #if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
  1384. if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
  1385. {
  1386. this->ResetConcurrentSweepAllocationCounts();
  1387. }
  1388. #endif
  1389. #endif
  1390. // The count of marked, non-free objects should still be the same
  1391. Assert(this->markCount == this->GetMarkCountForSweep());
  1392. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1393. DebugOnly(this->hasFinishedSweepObjects = true);
  1394. #endif
  1395. #ifdef RECYCLER_TRACE
  1396. recycler->PrintBlockStatus(this->heapBucket, this, _u("[**30**] finished SweepObjects, heapblock SWEPT."));
  1397. #endif
  1398. }
  1399. template <class TBlockAttributes>
  1400. void
  1401. SmallHeapBlockT<TBlockAttributes>::EnqueueProcessedObject(FreeObject ** list, void* objectAddress, uint index)
  1402. {
  1403. Assert(GetAddressIndex(objectAddress) == index);
  1404. Assert(index != SmallHeapBlockT<TBlockAttributes>::InvalidAddressBit);
  1405. Assert(this->objectCount != 1);
  1406. #if DBG || defined(RECYCLER_STATS)
  1407. if (list == &this->freeObjectList)
  1408. {
  1409. BOOL isSet = this->GetDebugFreeBitVector()->TestAndSet(GetAddressBitIndex(objectAddress));
  1410. Assert(!isSet);
  1411. }
  1412. #endif
  1413. FillFreeMemory(objectAddress, objectSize);
  1414. FreeObject * freeObject = (FreeObject *)objectAddress;
  1415. freeObject->SetNext(*list);
  1416. *list = freeObject;
  1417. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  1418. if (CONFIG_FLAG(ForceSoftwareWriteBarrier) && CONFIG_FLAG(RecyclerVerifyMark))
  1419. {
  1420. this->WBClearObject((char*)objectAddress);
  1421. }
  1422. #endif
  1423. // clear the attributes so that when we are allocating a leaf, we don't have to set the attribute
  1424. this->ObjectInfo(index) = 0;
  1425. }
  1426. template <class TBlockAttributes>
  1427. void
  1428. SmallHeapBlockT<TBlockAttributes>::EnqueueProcessedObject(FreeObject ** list, FreeObject ** tail, void* objectAddress, uint index)
  1429. {
  1430. if (*tail == nullptr)
  1431. {
  1432. Assert(*list == nullptr);
  1433. *tail = (FreeObject *)objectAddress;
  1434. }
  1435. EnqueueProcessedObject(list, objectAddress, index);
  1436. }
  1437. //
  1438. // This method transfers the list of objects starting at list and ending
  1439. // at tail to the free list.
  1440. // In debug mode, it also makes sure that none of the objects that are
  1441. // being prepended to the free list are already free
  1442. //
  1443. template <class TBlockAttributes>
  1444. void
  1445. SmallHeapBlockT<TBlockAttributes>::TransferProcessedObjects(FreeObject * list, FreeObject * tail)
  1446. {
  1447. Assert(tail != nullptr);
  1448. Assert(list);
  1449. #if DBG || defined(RECYCLER_STATS)
  1450. // make sure that object we are transferred to the free list are not freed yet
  1451. tail->SetNext(nullptr);
  1452. FreeObject * freeObject = list;
  1453. while (freeObject != nullptr)
  1454. {
  1455. Assert(!this->IsInFreeObjectList(freeObject));
  1456. BOOL isSet = this->GetDebugFreeBitVector()->TestAndSet(GetAddressBitIndex(freeObject));
  1457. Assert(!isSet);
  1458. freeObject = freeObject->GetNext();
  1459. }
  1460. #endif
  1461. tail->SetNext(this->freeObjectList);
  1462. this->freeObjectList = list;
  1463. RECYCLER_SLOW_CHECK(this->CheckDebugFreeBitVector(true));
  1464. }
  1465. template <class TBlockAttributes>
  1466. uint
  1467. SmallHeapBlockT<TBlockAttributes>::GetAndClearLastFreeCount()
  1468. {
  1469. uint lastFreeCount = this->lastFreeCount;
  1470. this->lastFreeCount = 0;
  1471. return lastFreeCount;
  1472. }
  1473. #ifdef RECYCLER_SLOW_CHECK_ENABLED
  1474. template <class TBlockAttributes>
  1475. void
  1476. SmallHeapBlockT<TBlockAttributes>::Check(bool expectFull, bool expectPending)
  1477. {
  1478. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1479. // If we allocated from this block during the concurrent sweep the free bit vectors would be invalid.
  1480. #if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
  1481. if (!this->wasAllocatedFromDuringSweep)
  1482. #endif
  1483. #endif
  1484. {
  1485. if (this->IsFreeBitsValid())
  1486. {
  1487. CheckFreeBitVector(false);
  1488. }
  1489. else
  1490. {
  1491. CheckDebugFreeBitVector(false);
  1492. }
  1493. }
  1494. Assert(expectPending == HasAnyDisposeObjects());
  1495. // As the blocks are added to the SLIST and used from there during concurrent sweep, the expectFull assertion doesn't hold anymore.
  1496. #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
  1497. if (!CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
  1498. #endif
  1499. {
  1500. if (this->isInAllocator || this->isClearedFromAllocator)
  1501. {
  1502. Assert(expectFull && !expectPending);
  1503. }
  1504. else
  1505. {
  1506. Assert(expectFull == (!this->HasFreeObject() && !HasAnyDisposeObjects()));
  1507. }
  1508. }
  1509. }
  1510. template <class TBlockAttributes>
  1511. template <typename TBlockType>
  1512. bool
  1513. SmallHeapBlockT<TBlockAttributes>::GetFreeObjectListOnAllocatorImpl(FreeObject ** freeObjectList)
  1514. {
  1515. // not during collection, the allocator has the current info
  1516. SmallHeapBlockAllocator<TBlockType> * head =
  1517. &((HeapBucketT<TBlockType> *)this->heapBucket)->allocatorHead;
  1518. SmallHeapBlockAllocator<TBlockType> * current = head;
  1519. do
  1520. {
  1521. if (current->GetHeapBlock() == this)
  1522. {
  1523. if (current->IsFreeListAllocMode())
  1524. {
  1525. *freeObjectList = current->freeObjectList;
  1526. return true;
  1527. }
  1528. return false;
  1529. }
  1530. current = current->GetNext();
  1531. }
  1532. while (current != head);
  1533. return false;
  1534. }
  1535. template <class TBlockAttributes>
  1536. void
  1537. SmallHeapBlockT<TBlockAttributes>::CheckDebugFreeBitVector(bool isCollecting)
  1538. {
  1539. FreeObject * freeObject = this->freeObjectList;
  1540. if (!isCollecting)
  1541. {
  1542. this->GetFreeObjectListOnAllocator(&freeObject);
  1543. }
  1544. uint verifyFreeCount = 0;
  1545. while (freeObject != nullptr)
  1546. {
  1547. uint index = this->GetAddressIndex(freeObject);
  1548. Assert(index != SmallHeapBlockT<TBlockAttributes>::InvalidAddressBit);
  1549. Assert(this->GetDebugFreeBitVector()->Test(GetAddressBitIndex(freeObject)));
  1550. verifyFreeCount++;
  1551. freeObject = freeObject->GetNext();
  1552. }
  1553. Assert(this->GetDebugFreeBitVector()->Count() == verifyFreeCount);
  1554. Assert(verifyFreeCount <= this->lastFreeCount);
  1555. }
  1556. template <class TBlockAttributes>
  1557. void
  1558. SmallHeapBlockT<TBlockAttributes>::CheckFreeBitVector(bool isCollecting)
  1559. {
  1560. // during collection, the heap block has the current info when we are verifying
  1561. if (!isCollecting)
  1562. {
  1563. FreeObject * freeObjectList;
  1564. this->GetFreeObjectListOnAllocator(&freeObjectList);
  1565. if (freeObjectList != this->freeObjectList)
  1566. {
  1567. // allocator has the current info and if we have already allocated some memory,
  1568. // the free bit vector isn't really correct, so we can't verify it.
  1569. // Just verify the debug free bit vector
  1570. this->CheckDebugFreeBitVector(false);
  1571. return;
  1572. }
  1573. }
  1574. SmallHeapBlockBitVector * free = this->GetFreeBitVector();
  1575. // Shouldn't be any invalid bits set in the free bit vector
  1576. SmallHeapBlockBitVector temp;
  1577. temp.Copy(free);
  1578. temp.And(this->GetInvalidBitVector());
  1579. Assert(temp.IsAllClear());
  1580. uint verifyFreeCount = 0;
  1581. FreeObject * freeObject = this->freeObjectList;
  1582. while (freeObject != nullptr)
  1583. {
  1584. uint bitIndex = GetAddressBitIndex(freeObject);
  1585. Assert(IsValidBitIndex(bitIndex));
  1586. Assert(this->GetDebugFreeBitVector()->Test(bitIndex));
  1587. Assert(free->Test(bitIndex));
  1588. verifyFreeCount++;
  1589. freeObject = freeObject->GetNext();
  1590. }
  1591. Assert(this->GetDebugFreeBitVector()->Count() == verifyFreeCount);
  1592. Assert(this->freeCount == this->GetFreeBitVector()->Count());
  1593. if (this->IsAnyFinalizableBlock())
  1594. {
  1595. auto finalizableBlock = this->AsFinalizableBlock<TBlockAttributes>();
  1596. // Include pending dispose objects
  1597. finalizableBlock->ForEachPendingDisposeObject([&] (uint index) {
  1598. uint bitIndex = ((uint)index) * this->GetObjectBitDelta();
  1599. Assert(IsValidBitIndex(bitIndex));
  1600. Assert(!this->GetDebugFreeBitVector()->Test(bitIndex));
  1601. Assert(free->Test(bitIndex));
  1602. verifyFreeCount++;
  1603. });
  1604. // Include disposed objects
  1605. verifyFreeCount += finalizableBlock->CheckDisposedObjectFreeBitVector();
  1606. }
  1607. Assert(verifyFreeCount == this->freeCount);
  1608. Assert(verifyFreeCount <= this->lastFreeCount);
  1609. Assert(this->IsFreeBitsValid());
  1610. }
  1611. #endif
  1612. template <class TBlockAttributes>
  1613. typename SmallHeapBlockT<TBlockAttributes>::SmallHeapBlockBitVector *
  1614. SmallHeapBlockT<TBlockAttributes>::EnsureFreeBitVector(bool isCollecting)
  1615. {
  1616. if (this->IsFreeBitsValid())
  1617. {
  1618. // the free object list hasn't change, so the free vector should be valid
  1619. RECYCLER_SLOW_CHECK(CheckFreeBitVector(isCollecting));
  1620. return this->GetFreeBitVector();
  1621. }
  1622. return BuildFreeBitVector();
  1623. }
  1624. template <class TBlockAttributes>
  1625. typename SmallHeapBlockT<TBlockAttributes>::SmallHeapBlockBitVector *
  1626. SmallHeapBlockT<TBlockAttributes>::BuildFreeBitVector()
  1627. {
  1628. SmallHeapBlockBitVector * free = this->GetFreeBitVector();
  1629. this->freeCount = this->BuildFreeBitVector(free);
  1630. this->lastFreeObjectHead = this->freeObjectList;
  1631. return free;
  1632. }
  1633. template <class TBlockAttributes>
  1634. ushort
  1635. SmallHeapBlockT<TBlockAttributes>::BuildFreeBitVector(SmallHeapBlockBitVector * free)
  1636. {
  1637. free->ClearAll();
  1638. ushort freeCount = 0;
  1639. FreeObject * freeObject = this->freeObjectList;
  1640. while (freeObject != nullptr)
  1641. {
  1642. uint bitIndex = GetAddressBitIndex(freeObject);
  1643. Assert(IsValidBitIndex(bitIndex));
  1644. Assert(this->GetDebugFreeBitVector()->Test(bitIndex));
  1645. free->Set(bitIndex);
  1646. freeCount++;
  1647. freeObject = freeObject->GetNext();
  1648. }
  1649. Assert(this->GetDebugFreeBitVector()->Count() == freeCount);
  1650. if (this->IsAnyFinalizableBlock())
  1651. {
  1652. auto finalizableBlock = this->AsFinalizableBlock<TBlockAttributes>();
  1653. // Include pending dispose objects
  1654. finalizableBlock->ForEachPendingDisposeObject([&] (uint index) {
  1655. uint bitIndex = ((uint)index) * this->GetObjectBitDelta();
  1656. Assert(IsValidBitIndex(bitIndex));
  1657. Assert(!this->GetDebugFreeBitVector()->Test(bitIndex));
  1658. free->Set(bitIndex);
  1659. freeCount++;
  1660. });
  1661. // Include disposed objects
  1662. freeCount += finalizableBlock->AddDisposedObjectFreeBitVector(free);
  1663. }
  1664. Assert(freeCount <= this->lastFreeCount);
  1665. return freeCount;
  1666. }
  1667. template <class TBlockAttributes>
  1668. void
  1669. SmallHeapBlockT<TBlockAttributes>::MarkImplicitRoots()
  1670. {
  1671. uint localObjectCount = this->GetObjectCount();
  1672. uint localObjectBitDelta = this->GetObjectBitDelta();
  1673. uint localMarkCount = 0;
  1674. SmallHeapBlockBitVector * mark = this->GetMarkedBitVector();
  1675. #if DBG
  1676. uint localObjectSize = this->GetObjectSize();
  1677. Assert(localObjectSize <= HeapConstants::MaxMediumObjectSize);
  1678. ushort markCountPerPage[TBlockAttributes::PageCount];
  1679. for (uint i = 0; i < TBlockAttributes::PageCount; i++)
  1680. {
  1681. markCountPerPage[i] = 0;
  1682. }
  1683. #endif
  1684. for (uint i = 0; i < localObjectCount; i++)
  1685. {
  1686. // REVIEW: This may include free object. It is okay to mark them and scan them
  1687. // But kind inefficient.
  1688. if (this->ObjectInfo(i) & ImplicitRootBit)
  1689. {
  1690. #if DBG
  1691. {
  1692. int index = (i * localObjectSize) / AutoSystemInfo::PageSize;
  1693. Assert(index < TBlockAttributes::PageCount);
  1694. markCountPerPage[index]++;
  1695. }
  1696. #endif
  1697. mark->Set(localObjectBitDelta * i);
  1698. localMarkCount++;
  1699. }
  1700. }
  1701. Assert(mark->Count() == localMarkCount);
  1702. this->markCount = (ushort)localMarkCount;
  1703. #if DBG
  1704. HeapBlockMap& map = this->GetRecycler()->heapBlockMap;
  1705. for (uint i = 0; i < TBlockAttributes::PageCount; i++)
  1706. {
  1707. map.SetPageMarkCount(this->address + (i * AutoSystemInfo::PageSize), markCountPerPage[i]);
  1708. }
  1709. #endif
  1710. }
  1711. template <class TBlockAttributes>
  1712. void
  1713. SmallHeapBlockT<TBlockAttributes>::EnumerateObjects(ObjectInfoBits infoBits, void (*CallBackFunction)(void * address, size_t size))
  1714. {
  1715. ForEachAllocatedObject(infoBits, [=](uint index, void * objectAddress)
  1716. {
  1717. CallBackFunction(objectAddress, this->objectSize);
  1718. });
  1719. }
  1720. template <class TBlockAttributes>
  1721. inline
  1722. void SmallHeapBlockT<TBlockAttributes>::FillFreeMemory(__in_bcount(size) void * address, size_t size)
  1723. {
  1724. #ifdef RECYCLER_MEMORY_VERIFY
  1725. if (this->heapBucket->heapInfo->recycler->VerifyEnabled())
  1726. {
  1727. memset(address, Recycler::VerifyMemFill, size);
  1728. return;
  1729. }
  1730. #endif
  1731. if (this->IsLeafBlock()
  1732. #ifdef RECYCLER_WRITE_BARRIER_ALLOC_THREAD_PAGE
  1733. || this->IsWithBarrier()
  1734. #endif
  1735. )
  1736. {
  1737. return;
  1738. }
  1739. // REVIEW: Do DbgMemFill on debug build?
  1740. #if defined(_M_IX86)
  1741. uint qwordCount = size / sizeof(uint64) ;
  1742. switch (qwordCount)
  1743. {
  1744. case 2:
  1745. ((uint64*)address)[0] = 0;
  1746. ((uint64*)address)[1] = 0;
  1747. break;
  1748. case 4:
  1749. ((uint64*)address)[0] = 0;
  1750. ((uint64*)address)[1] = 0;
  1751. ((uint64*)address)[2] = 0;
  1752. ((uint64*)address)[3] = 0;
  1753. break;
  1754. case 6:
  1755. ((uint64*)address)[0] = 0;
  1756. ((uint64*)address)[1] = 0;
  1757. ((uint64*)address)[2] = 0;
  1758. ((uint64*)address)[3] = 0;
  1759. ((uint64*)address)[4] = 0;
  1760. ((uint64*)address)[5] = 0;
  1761. break;
  1762. default:
  1763. memset(address, 0, size);
  1764. }
  1765. #else
  1766. memset(address, 0, size);
  1767. #endif
  1768. }
  1769. #ifdef RECYCLER_MEMORY_VERIFY
  1770. template <class TBlockAttributes>
  1771. void SmallHeapBlockT<TBlockAttributes>::VerifyBumpAllocated(_In_ char * bumpAllocateAddress)
  1772. {
  1773. ushort verifyFinalizeCount = 0;
  1774. Recycler * recycler = this->heapBucket->heapInfo->recycler;
  1775. char * memBlock = this->GetAddress();
  1776. for (uint i = 0; i < objectCount; i++)
  1777. {
  1778. if (memBlock >= bumpAllocateAddress)
  1779. {
  1780. Recycler::VerifyCheckFill(memBlock + sizeof(FreeObject), this->GetObjectSize() - sizeof(FreeObject));
  1781. }
  1782. else
  1783. {
  1784. recycler->VerifyCheckPad(memBlock, this->GetObjectSize());
  1785. if ((this->ObjectInfo(i) & FinalizeBit) != 0)
  1786. {
  1787. if (this->IsAnyFinalizableBlock())
  1788. {
  1789. verifyFinalizeCount++;
  1790. }
  1791. else
  1792. {
  1793. Recycler::VerifyCheck(false, _u("Non-Finalizable block should not have finalizable objects"),
  1794. this->GetAddress(), &this->ObjectInfo(i));
  1795. }
  1796. }
  1797. }
  1798. memBlock += this->GetObjectSize();
  1799. }
  1800. }
  1801. template <class TBlockAttributes>
  1802. void SmallHeapBlockT<TBlockAttributes>::Verify(bool pendingDispose)
  1803. {
  1804. ushort verifyFinalizeCount = 0;
  1805. SmallHeapBlockBitVector tempFree;
  1806. SmallHeapBlockBitVector *free = &tempFree;
  1807. SmallHeapBlockBitVector tempPending;
  1808. this->BuildFreeBitVector(free);
  1809. Recycler * recycler = this->heapBucket->heapInfo->recycler;
  1810. char * memBlock = this->GetAddress();
  1811. uint objectBitDelta = this->GetObjectBitDelta();
  1812. Recycler::VerifyCheck(!pendingDispose || this->IsAnyFinalizableBlock(),
  1813. _u("Non-finalizable block shouldn't be disposing. May have corrupted block type."),
  1814. this->GetAddress(), (void *)&this->heapBlockType);
  1815. if (HasPendingDisposeObjects())
  1816. {
  1817. Assert(pendingDispose);
  1818. // Pending object are not free yet, they don't have memory cleared.
  1819. this->AsFinalizableBlock<TBlockAttributes>()->ForEachPendingDisposeObject([&](uint index) {
  1820. uint bitIndex = ((uint)index) * this->GetObjectBitDelta();
  1821. Assert(IsValidBitIndex(bitIndex));
  1822. Assert(!this->GetDebugFreeBitVector()->Test(bitIndex));
  1823. Assert(free->Test(bitIndex));
  1824. tempPending.Set(bitIndex);
  1825. // We are a pending dispose block, so the finalize count hasn't been update yet.
  1826. // Including the pending objects in the finalize count
  1827. verifyFinalizeCount++;
  1828. });
  1829. }
  1830. for (uint i = 0; i < objectCount; i++)
  1831. {
  1832. if (free->Test(i * objectBitDelta))
  1833. {
  1834. if (!tempPending.Test(i * objectBitDelta))
  1835. {
  1836. char * nextFree = (char *)((FreeObject *)memBlock)->GetNext();
  1837. Recycler::VerifyCheck(nextFree == nullptr
  1838. || (nextFree >= address && nextFree < this->GetEndAddress()
  1839. && free->Test(GetAddressBitIndex(nextFree))),
  1840. _u("SmallHeapBlock memory written to after freed"), memBlock, memBlock);
  1841. Recycler::VerifyCheckFill(memBlock + sizeof(FreeObject), this->GetObjectSize() - sizeof(FreeObject));
  1842. }
  1843. }
  1844. else
  1845. {
  1846. if (explicitFreeBits.Test(i * objectBitDelta))
  1847. {
  1848. char * nextFree = (char *)((FreeObject *)memBlock)->GetNext();
  1849. HeapBlock* nextFreeHeapBlock = this;
  1850. if (nextFree != nullptr)
  1851. {
  1852. nextFreeHeapBlock = recycler->FindHeapBlock(nextFree);
  1853. }
  1854. Recycler::VerifyCheck(nextFree == nullptr
  1855. || (nextFree >= address && nextFree < this->GetEndAddress()
  1856. && explicitFreeBits.Test(GetAddressBitIndex(nextFree)))
  1857. || nextFreeHeapBlock->GetObjectSize(nextFree) == this->objectSize,
  1858. _u("SmallHeapBlock memory written to after freed"), memBlock, memBlock);
  1859. recycler->VerifyCheckPadExplicitFreeList(memBlock, this->GetObjectSize());
  1860. }
  1861. else
  1862. {
  1863. recycler->VerifyCheckPad(memBlock, this->GetObjectSize());
  1864. }
  1865. if ((this->ObjectInfo(i) & FinalizeBit) != 0)
  1866. {
  1867. if (this->IsAnyFinalizableBlock())
  1868. {
  1869. verifyFinalizeCount++;
  1870. }
  1871. else
  1872. {
  1873. Recycler::VerifyCheck(false, _u("Non-Finalizable block should not have finalizable objects"),
  1874. this->GetAddress(), &this->ObjectInfo(i));
  1875. }
  1876. }
  1877. }
  1878. memBlock += this->GetObjectSize();
  1879. }
  1880. if (this->IsAnyFinalizableBlock())
  1881. {
  1882. Recycler::VerifyCheck(this->AsFinalizableBlock<TBlockAttributes>()->finalizeCount == verifyFinalizeCount,
  1883. _u("SmallHeapBlock finalize count mismatch"), this->GetAddress(), &this->AsFinalizableBlock<TBlockAttributes>()->finalizeCount);
  1884. }
  1885. else
  1886. {
  1887. Assert(verifyFinalizeCount == 0);
  1888. }
  1889. }
  1890. #endif
  1891. #if ENABLE_MEM_STATS
  1892. template <class TBlockAttributes>
  1893. void
  1894. SmallHeapBlockT<TBlockAttributes>::AggregateBlockStats(HeapBucketStats& stats, bool isAllocatorBlock, FreeObject* freeObjectList, bool isBumpAllocated)
  1895. {
  1896. if (this->segment == nullptr || this->IsInAllocator() != isAllocatorBlock)
  1897. {
  1898. return; // skip empty blocks, or blocks mismatching isInAllocator to avoid double count
  1899. }
  1900. DUMP_FRAGMENTATION_STATS_ONLY(stats.totalBlockCount++);
  1901. ushort blockObjectCount = this->objectCount;
  1902. BVIndex blockFreeCount = this->GetFreeBitVector()->Count();
  1903. ushort blockObjectSize = this->objectSize;
  1904. uint objectCount = 0;
  1905. if (isBumpAllocated)
  1906. {
  1907. objectCount = static_cast<uint>(((char*) freeObjectList - this->address) / blockObjectSize);
  1908. }
  1909. else
  1910. {
  1911. objectCount = blockObjectCount;
  1912. // If this is an allocator block, remove the free objects on the allocator
  1913. // from this count. Otherwise, remove the free objects found in the free bit vector
  1914. if (freeObjectList)
  1915. {
  1916. Assert(isAllocatorBlock);
  1917. FreeObject* next = freeObjectList->GetNext();
  1918. while (next != nullptr && next != freeObjectList)
  1919. {
  1920. objectCount--;
  1921. next = next->GetNext();
  1922. }
  1923. }
  1924. else
  1925. {
  1926. objectCount -= blockFreeCount;
  1927. }
  1928. }
  1929. DUMP_FRAGMENTATION_STATS_ONLY(stats.objectCount += objectCount);
  1930. stats.objectByteCount += (objectCount * blockObjectSize);
  1931. stats.totalByteCount += this->GetPageCount() * AutoSystemInfo::PageSize;
  1932. #ifdef DUMP_FRAGMENTATION_STATS
  1933. if (!isAllocatorBlock)
  1934. {
  1935. if (this->IsAnyFinalizableBlock())
  1936. {
  1937. auto finalizableBlock = this->AsFinalizableBlock<TBlockAttributes>();
  1938. stats.finalizeCount += (finalizableBlock->GetFinalizeCount());
  1939. }
  1940. }
  1941. #endif
  1942. }
  1943. #endif
  1944. #ifdef RECYCLER_PERF_COUNTERS
  1945. template <class TBlockAttributes>
  1946. void
  1947. SmallHeapBlockT<TBlockAttributes>::UpdatePerfCountersOnFree()
  1948. {
  1949. Assert(markCount == 0);
  1950. Assert(this->IsFreeBitsValid());
  1951. size_t usedCount = (objectCount - freeCount);
  1952. size_t usedBytes = usedCount * objectSize;
  1953. RECYCLER_PERF_COUNTER_SUB(SmallHeapBlockLiveObject, usedCount);
  1954. RECYCLER_PERF_COUNTER_SUB(SmallHeapBlockLiveObjectSize, usedBytes);
  1955. RECYCLER_PERF_COUNTER_SUB(SmallHeapBlockFreeObjectSize, this->GetPageCount() * AutoSystemInfo::PageSize - usedBytes);
  1956. RECYCLER_PERF_COUNTER_SUB(LiveObject, usedCount);
  1957. RECYCLER_PERF_COUNTER_SUB(LiveObjectSize, usedBytes);
  1958. RECYCLER_PERF_COUNTER_SUB(FreeObjectSize, this->GetPageCount() * AutoSystemInfo::PageSize - usedBytes);
  1959. }
  1960. #endif
  1961. #ifdef PROFILE_RECYCLER_ALLOC
  1962. template <class TBlockAttributes>
  1963. void *
  1964. SmallHeapBlockT<TBlockAttributes>::GetTrackerData(void * address)
  1965. {
  1966. Assert(Recycler::DoProfileAllocTracker());
  1967. ushort index = this->GetAddressIndex(address);
  1968. Assert(index != SmallHeapBlockT<TBlockAttributes>::InvalidAddressBit);
  1969. return this->GetTrackerDataArray()[index];
  1970. }
  1971. template <class TBlockAttributes>
  1972. void
  1973. SmallHeapBlockT<TBlockAttributes>::SetTrackerData(void * address, void * data)
  1974. {
  1975. Assert(Recycler::DoProfileAllocTracker());
  1976. ushort index = this->GetAddressIndex(address);
  1977. Assert(index != SmallHeapBlockT<TBlockAttributes>::InvalidAddressBit);
  1978. void* existingTrackerData = this->GetTrackerDataArray()[index];
  1979. Assert((existingTrackerData == nullptr || data == nullptr) ||
  1980. (existingTrackerData == &Recycler::TrackerData::ExplicitFreeListObjectData || data == &Recycler::TrackerData::ExplicitFreeListObjectData));
  1981. this->GetTrackerDataArray()[index] = data;
  1982. }
  1983. template <class TBlockAttributes>
  1984. void **
  1985. SmallHeapBlockT<TBlockAttributes>::GetTrackerDataArray()
  1986. {
  1987. // See SmallHeapBlockT<TBlockAttributes>::GetAllocPlusSize for layout description
  1988. return (void **)((char *)this - SmallHeapBlockT<TBlockAttributes>::GetAllocPlusSize(this->objectCount));
  1989. }
  1990. #endif
  1991. #ifdef RECYCLER_WRITE_BARRIER
  1992. template <class TBlockAttributes>
  1993. bool
  1994. SmallHeapBlockT<TBlockAttributes>::IsWithBarrier() const
  1995. {
  1996. return IsNormalWriteBarrierBlock() || IsFinalizableWriteBarrierBlock();
  1997. }
  1998. #endif
  1999. namespace Memory
  2000. {
  2001. // Instantiate the template
  2002. template class SmallHeapBlockT<SmallAllocationBlockAttributes>;
  2003. template class SmallHeapBlockT<MediumAllocationBlockAttributes>;
  2004. };
  2005. #define TBlockTypeAttributes SmallAllocationBlockAttributes
  2006. #include "SmallBlockDeclarations.inl"
  2007. #undef TBlockTypeAttributes
  2008. #define TBlockTypeAttributes MediumAllocationBlockAttributes
  2009. #include "SmallBlockDeclarations.inl"
  2010. #undef TBlockTypeAttributes