Recycler.inl 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. template <ObjectInfoBits attributes>
  7. bool
  8. Recycler::IntegrateBlock(char * blockAddress, PageSegment * segment, size_t allocSize, size_t objectSize)
  9. {
  10. // We only support no bit and leaf bit right now, where we don't need to set the object info in either case
  11. CompileAssert(attributes == NoBit || attributes == LeafBit);
  12. // Currently only small object is supported
  13. Assert(HeapInfo::IsSmallObject(allocSize));
  14. Assert(HeapInfo::GetAlignedSizeNoCheck(allocSize) == allocSize);
  15. // This should only happen during a pre-collection callback.
  16. Assert(this->collectionState == Collection_PreCollection);
  17. bool success = this->GetHeapInfoForAllocation<attributes>()->IntegrateBlock<attributes>(blockAddress, segment, this, allocSize);
  18. #ifdef PROFILE_RECYCLER_ALLOC
  19. if (success)
  20. {
  21. TrackAllocData trackAllocData;
  22. ClearTrackAllocInfo(&trackAllocData);
  23. TrackIntegrate(blockAddress, SmallAllocationBlockAttributes::PageCount * AutoSystemInfo::PageSize, allocSize, objectSize, trackAllocData);
  24. }
  25. #endif
  26. return success;
  27. }
  28. namespace Memory
  29. {
  30. class DummyVTableObject : public FinalizableObject
  31. {
  32. public:
  33. virtual void Finalize(bool isShutdown) final {}
  34. virtual void Dispose(bool isShutdown) final {}
  35. virtual void Mark(Recycler * recycler) final {}
  36. virtual void Trace(IRecyclerHeapMarkingContext* markingContext) final {}
  37. };
  38. }
  39. template <ObjectInfoBits attributes, bool nothrow>
  40. inline char *
  41. Recycler::AllocWithAttributesInlined(DECLSPEC_GUARD_OVERFLOW size_t size)
  42. {
  43. // All tracked objects are client tracked or recycler host visited objects
  44. #ifndef RECYCLER_VISITED_HOST
  45. CompileAssert((attributes & TrackBit) == 0 || (attributes & ClientTrackedBit) != 0);
  46. #else
  47. CompileAssert((attributes & TrackBit) == 0 || (attributes & ClientTrackedBit) != 0 || (attributes & RecyclerVisitedHostBit) != 0);
  48. #endif
  49. Assert(this->enableScanImplicitRoots || (attributes & ImplicitRootBit) == 0);
  50. AssertMsg(this->disableThreadAccessCheck || this->mainThreadId == GetCurrentThreadContextId(),
  51. "Allocating from the recycler can only be done on the main thread");
  52. Assert(size != 0);
  53. AssertMsg(collectionState != Collection_PreCollection, "we cannot have allocation in precollection callback");
  54. #if ENABLE_CONCURRENT_GC
  55. // We shouldn't be allocating memory when we are running GC in thread, including finalizers
  56. Assert(this->IsConcurrentState() || !this->CollectionInProgress() || this->IsAllocatableCallbackState());
  57. #else
  58. // We shouldn't be allocating memory when we are running GC in thread, including finalizers
  59. Assert(!this->CollectionInProgress() || this->IsAllocatableCallbackState());
  60. #endif
  61. // There are some cases where we allow allocation during heap enum that doesn't affect the enumeration
  62. // Those should be really rare and not rely upon.
  63. Assert(!isHeapEnumInProgress || allowAllocationDuringHeapEnum);
  64. #ifdef PROFILE_RECYCLER_ALLOC
  65. TrackAllocData trackAllocData;
  66. ClearTrackAllocInfo(&trackAllocData);
  67. #endif
  68. size_t allocSize = size;
  69. #ifdef RECYCLER_MEMORY_VERIFY
  70. if (this->VerifyEnabled())
  71. {
  72. allocSize += verifyPad + sizeof(size_t);
  73. if (allocSize < size)
  74. {
  75. // An overflow occurred- if nothrow is false, we can throw here
  76. // Otherwise, return null
  77. if (nothrow == false)
  78. {
  79. this->OutOfMemory();
  80. }
  81. else
  82. {
  83. return nullptr;
  84. }
  85. }
  86. }
  87. #endif
  88. char* memBlock = nullptr;
  89. HeapInfo * heapInfo = this->GetHeapInfoForAllocation<attributes>();
  90. #if GLOBAL_ENABLE_WRITE_BARRIER
  91. if (CONFIG_FLAG(ForceSoftwareWriteBarrier))
  92. {
  93. if ((attributes & InternalObjectInfoBitMask) != LeafBit)
  94. {
  95. // none leaf allocation or Finalizable Leaf allocation, adding WithBarrierBit
  96. memBlock = RealAlloc<(ObjectInfoBits)((attributes | WithBarrierBit) & InternalObjectInfoBitMask), nothrow>(heapInfo, allocSize);
  97. }
  98. else
  99. {
  100. // pure Leaf allocation
  101. memBlock = RealAlloc<(ObjectInfoBits)(attributes & InternalObjectInfoBitMask), nothrow>(heapInfo, allocSize);
  102. }
  103. }
  104. else
  105. #endif
  106. {
  107. memBlock = RealAlloc<(ObjectInfoBits)(attributes & InternalObjectInfoBitMask), nothrow>(heapInfo, allocSize);
  108. }
  109. if (nothrow)
  110. {
  111. // If we aren't allowed to throw, then the memblock returned could be null
  112. // so we should check for that and bail out early here
  113. if (memBlock == nullptr)
  114. {
  115. return nullptr;
  116. }
  117. }
  118. #ifdef PROFILE_RECYCLER_ALLOC
  119. TrackAlloc(memBlock, size, trackAllocData, (CUSTOM_CONFIG_ISENABLED(GetRecyclerFlagsTable(), Js::TraceObjectAllocationFlag) && (attributes & TraceBit) == TraceBit));
  120. #endif
  121. RecyclerMemoryTracking::ReportAllocation(this, memBlock, size);
  122. RECYCLER_PERF_COUNTER_INC(LiveObject);
  123. RECYCLER_PERF_COUNTER_ADD(LiveObjectSize, HeapInfo::GetAlignedSizeNoCheck(allocSize));
  124. RECYCLER_PERF_COUNTER_SUB(FreeObjectSize, HeapInfo::GetAlignedSizeNoCheck(allocSize));
  125. if (HeapInfo::IsSmallBlockAllocation(HeapInfo::GetAlignedSizeNoCheck(allocSize)))
  126. {
  127. RECYCLER_PERF_COUNTER_INC(SmallHeapBlockLiveObject);
  128. RECYCLER_PERF_COUNTER_ADD(SmallHeapBlockLiveObjectSize, HeapInfo::GetAlignedSizeNoCheck(allocSize));
  129. RECYCLER_PERF_COUNTER_SUB(SmallHeapBlockFreeObjectSize, HeapInfo::GetAlignedSizeNoCheck(allocSize));
  130. }
  131. else
  132. {
  133. RECYCLER_PERF_COUNTER_INC(LargeHeapBlockLiveObject);
  134. RECYCLER_PERF_COUNTER_ADD(LargeHeapBlockLiveObjectSize, HeapInfo::GetAlignedSizeNoCheck(allocSize));
  135. RECYCLER_PERF_COUNTER_SUB(LargeHeapBlockFreeObjectSize, HeapInfo::GetAlignedSizeNoCheck(allocSize));
  136. }
  137. #ifdef RECYCLER_MEMORY_VERIFY
  138. size_t alignedSize = HeapInfo::GetAlignedSizeNoCheck(allocSize);
  139. if (HeapInfo::IsMediumObject(allocSize))
  140. {
  141. #if SMALLBLOCK_MEDIUM_ALLOC
  142. alignedSize = HeapInfo::GetMediumObjectAlignedSizeNoCheck(allocSize);
  143. #else
  144. HeapBlock* heapBlock = this->FindHeapBlock(memBlock);
  145. Assert(heapBlock->IsLargeHeapBlock());
  146. LargeHeapBlock* largeHeapBlock = (LargeHeapBlock*) heapBlock;
  147. LargeObjectHeader* header = nullptr;
  148. if (largeHeapBlock->GetObjectHeader(memBlock, &header))
  149. {
  150. size = header->objectSize - (verifyPad + sizeof(size_t));
  151. alignedSize = HeapInfo::GetAlignedSizeNoCheck(header->objectSize);
  152. }
  153. #endif
  154. }
  155. this->FillCheckPad(memBlock, size, alignedSize);
  156. #endif
  157. #ifdef RECYCLER_WRITE_BARRIER
  158. SwbVerboseTrace(this->GetRecyclerFlagsTable(), _u("Allocated SWB memory: 0x%p\n"), memBlock);
  159. #pragma prefast(suppress:6313, "attributes is a template parameter and can be 0")
  160. if ((attributes & NewTrackBit) &&
  161. #if GLOBAL_ENABLE_WRITE_BARRIER && defined(RECYCLER_STATS)
  162. true // Trigger WB to force re-mark, to work around old mark false positive
  163. #else
  164. (attributes & WithBarrierBit)
  165. #endif
  166. )
  167. {
  168. // For objects allocated with NewTrackBit, we need to trigger the write barrier since
  169. // there could be a GC triggered by an allocation in the constructor, and we'd miss
  170. // calling track on the partially constructed object. To deal with this, we set the write
  171. // barrier on all the pages of objects allocated with the NewTrackBit
  172. RecyclerWriteBarrierManager::WriteBarrier(memBlock, size);
  173. }
  174. #endif
  175. #if ENABLE_PARTIAL_GC
  176. #pragma prefast(suppress:6313, "attributes is a template parameter and can be 0")
  177. if (attributes & ClientTrackedBit)
  178. {
  179. if (this->inPartialCollectMode)
  180. {
  181. // with partial GC, we don't traverse ITrackable
  182. // So we have to mark all objects that could be in the ITrackable graph
  183. // This includes JavascriptDispatch and HostVariant
  184. this->clientTrackedObjectList.Prepend(&this->clientTrackedObjectAllocator, memBlock);
  185. }
  186. else
  187. {
  188. #if ENABLE_CONCURRENT_GC
  189. Assert(this->hasBackgroundFinishPartial || this->clientTrackedObjectList.Empty());
  190. #else
  191. Assert(this->clientTrackedObjectList.Empty());
  192. #endif
  193. }
  194. }
  195. #endif
  196. #ifdef RECYCLER_PAGE_HEAP
  197. VerifyPageHeapFillAfterAlloc(memBlock, size, attributes);
  198. #endif
  199. return memBlock;
  200. }
  201. template <ObjectInfoBits attributes, bool nothrow>
  202. inline char *
  203. Recycler::AllocZeroWithAttributesInlined(DECLSPEC_GUARD_OVERFLOW size_t size)
  204. {
  205. char* obj = AllocWithAttributesInlined<attributes, nothrow>(size);
  206. if (nothrow)
  207. {
  208. // If we aren't allowed to throw, then the obj returned could be null
  209. // so we should check for that and bail out early here
  210. if (obj == nullptr)
  211. {
  212. return nullptr;
  213. }
  214. }
  215. #ifdef RECYCLER_MEMORY_VERIFY
  216. if (this->VerifyEnabled())
  217. {
  218. memset(obj, 0, min(size, sizeof(FreeObject *)));
  219. }
  220. else
  221. #endif
  222. #ifdef RECYCLER_WRITE_BARRIER_ALLOC_THREAD_PAGE
  223. if ((((attributes & LeafBit) == LeafBit) || ((attributes & WithBarrierBit) == WithBarrierBit)) && HeapInfo::IsSmallObjectAllocation(size))
  224. #else
  225. if (((attributes & LeafBit) == LeafBit) && HeapInfo::IsSmallBlockAllocation(size))
  226. #endif
  227. {
  228. // If this was allocated from the small heap block, it's not
  229. // guaranteed to be zero so we should zero out here.
  230. memset((void*) obj, 0, size);
  231. }
  232. else
  233. {
  234. if (IsPageHeapEnabled())
  235. {
  236. // don't corrupt the page heap filled pattern
  237. memset((void*)obj, 0, min(size, sizeof(void*)));
  238. }
  239. else
  240. {
  241. // All recycler memory are allocated with zero except for the first word,
  242. // which store the next pointer for the free list. Just zero that one out
  243. ((FreeObject *)obj)->ZeroNext();
  244. }
  245. }
  246. #ifdef RECYCLER_PAGE_HEAP
  247. VerifyPageHeapFillAfterAlloc(obj, size, attributes);
  248. #endif
  249. #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
  250. if (CONFIG_FLAG(ForceSoftwareWriteBarrier) && CONFIG_FLAG(RecyclerVerifyMark))
  251. {
  252. this->FindHeapBlock(obj)->WBClearObject(obj);
  253. }
  254. #endif
  255. return obj;
  256. }
  257. template <ObjectInfoBits attributes, bool isSmallAlloc, bool nothrow>
  258. inline char*
  259. Recycler::RealAllocFromBucket(HeapInfo* heap, size_t size)
  260. {
  261. // Align the size
  262. Assert(HeapInfo::GetAlignedSizeNoCheck(size) <= UINT_MAX);
  263. uint sizeCat;
  264. char * memBlock;
  265. if (isSmallAlloc)
  266. {
  267. sizeCat = (uint)HeapInfo::GetAlignedSizeNoCheck(size);
  268. memBlock = heap->RealAlloc<attributes, nothrow>(this, sizeCat, size);
  269. }
  270. #ifdef BUCKETIZE_MEDIUM_ALLOCATIONS
  271. else
  272. {
  273. sizeCat = (uint)HeapInfo::GetMediumObjectAlignedSizeNoCheck(size);
  274. memBlock = heap->MediumAlloc<attributes, nothrow>(this, sizeCat, size);
  275. }
  276. #endif
  277. // If we are not allowed to throw, then the memory returned here could be null so check for that
  278. // If we are allowed to throw, then memBlock is not allowed to null so assert that
  279. if (nothrow)
  280. {
  281. if (memBlock == nullptr)
  282. {
  283. return nullptr;
  284. }
  285. }
  286. else
  287. {
  288. Assert(memBlock != nullptr);
  289. }
  290. #ifdef RECYCLER_ZERO_MEM_CHECK
  291. // Don't bother checking leaf allocations for zeroing out- they're not guaranteed to be so
  292. if ((attributes & LeafBit) == 0
  293. #ifdef RECYCLER_WRITE_BARRIER_ALLOC_THREAD_PAGE
  294. && (attributes & WithBarrierBit) == 0
  295. #endif
  296. )
  297. {
  298. // TODO: looks the check has been done already
  299. if (heap->IsPageHeapEnabled<attributes>(size))
  300. {
  301. VerifyZeroFill(memBlock, size);
  302. }
  303. else
  304. {
  305. VerifyZeroFill(memBlock + sizeof(FreeObject), sizeCat - (2 * sizeof(FreeObject)));
  306. }
  307. }
  308. #endif
  309. #ifdef PROFILE_MEM
  310. if (this->memoryData)
  311. {
  312. this->memoryData->requestCount++;
  313. this->memoryData->requestBytes += size;
  314. this->memoryData->alignmentBytes += sizeCat - size;
  315. }
  316. #endif
  317. return memBlock;
  318. }
  319. template <ObjectInfoBits attributes, bool nothrow>
  320. inline char*
  321. Recycler::RealAlloc(HeapInfo* heap, size_t size)
  322. {
  323. #ifdef RECYCLER_STRESS
  324. this->StressCollectNow();
  325. #endif
  326. if (nothrow)
  327. {
  328. FAULTINJECT_MEMORY_NOTHROW(_u("Recycler"), size);
  329. }
  330. else
  331. {
  332. FAULTINJECT_MEMORY_THROW(_u("Recycler"), size);
  333. }
  334. if (HeapInfo::IsSmallObject(size))
  335. {
  336. return RealAllocFromBucket<attributes, /* isSmallAlloc = */ true, nothrow>(heap, size);
  337. }
  338. #ifdef BUCKETIZE_MEDIUM_ALLOCATIONS
  339. if (HeapInfo::IsMediumObject(size))
  340. {
  341. return RealAllocFromBucket<attributes, /* isSmallAlloc = */ false, nothrow>(heap, size);
  342. }
  343. #endif
  344. return LargeAlloc<nothrow>(heap, size, attributes);
  345. }
  346. template<typename T>
  347. inline RecyclerWeakReference<T>* Recycler::CreateWeakReferenceHandle(T* pStrongReference)
  348. {
  349. // Return the weak reference that calling Add on the WR map returns
  350. // The entry returned is recycler-allocated memory
  351. RecyclerWeakReference<T>* weakRef = (RecyclerWeakReference<T>*) this->weakReferenceMap.Add((char*) pStrongReference, this);
  352. #if DBG
  353. #if ENABLE_RECYCLER_TYPE_TRACKING
  354. if (weakRef->typeInfo == nullptr)
  355. {
  356. weakRef->typeInfo = &typeid(T);
  357. #ifdef TRACK_ALLOC
  358. TrackAllocWeakRef(weakRef);
  359. #endif
  360. }
  361. #endif
  362. #endif
  363. return weakRef;
  364. }
  365. template<typename T>
  366. inline bool Recycler::FindOrCreateWeakReferenceHandle(T* pStrongReference, RecyclerWeakReference<T> **ppWeakRef)
  367. {
  368. // Ensure that the given strong ref has a weak ref in the map.
  369. // Return a result to indicate whether a new weak ref was created.
  370. bool ret = this->weakReferenceMap.FindOrAdd((char*) pStrongReference, this, (RecyclerWeakReferenceBase**)ppWeakRef);
  371. #if DBG
  372. if (!ret)
  373. {
  374. #if ENABLE_RECYCLER_TYPE_TRACKING
  375. (*ppWeakRef)->typeInfo = &typeid(T);
  376. #ifdef TRACK_ALLOC
  377. TrackAllocWeakRef(*ppWeakRef);
  378. #endif
  379. #endif
  380. }
  381. #endif
  382. return ret;
  383. }
  384. template<typename T>
  385. inline bool Recycler::TryGetWeakReferenceHandle(T* pStrongReference, RecyclerWeakReference<T> **weakReference)
  386. {
  387. return this->weakReferenceMap.TryGetValue((char*) pStrongReference, (RecyclerWeakReferenceBase**)weakReference);
  388. }
  389. #if ENABLE_WEAK_REFERENCE_REGIONS
  390. template<typename T>
  391. inline RecyclerWeakReferenceRegionItem<T>* Recycler::CreateWeakReferenceRegion(size_t count)
  392. {
  393. RecyclerWeakReferenceRegionItem<T>* regionArray = RecyclerNewArrayLeafZ(this, RecyclerWeakReferenceRegionItem<T>, count);
  394. RecyclerWeakReferenceRegion region;
  395. region.ptr = reinterpret_cast<RecyclerWeakReferenceRegionItem<void*>*>(regionArray);
  396. region.count = count;
  397. region.arrayHeapBlock = this->FindHeapBlock(regionArray);
  398. this->weakReferenceRegionList.Push(region);
  399. return regionArray;
  400. }
  401. #endif
  402. inline HeapBlock*
  403. Recycler::FindHeapBlock(void* candidate)
  404. {
  405. if ((size_t)candidate < 0x10000)
  406. {
  407. return nullptr;
  408. }
  409. if (!HeapInfo::IsAlignedAddress(candidate))
  410. {
  411. return nullptr;
  412. }
  413. return heapBlockMap.GetHeapBlock(candidate);
  414. }
  415. inline void
  416. Recycler::ScanObjectInline(void ** obj, size_t byteCount)
  417. {
  418. // This is never called during parallel marking
  419. Assert(this->collectionState != CollectionStateParallelMark);
  420. if (this->enableScanInteriorPointers)
  421. {
  422. ScanObjectInlineInterior(obj, byteCount);
  423. }
  424. else
  425. {
  426. markContext.ScanObject<false, false>(obj, byteCount);
  427. }
  428. }
  429. inline void
  430. Recycler::ScanObjectInlineInterior(void ** obj, size_t byteCount)
  431. {
  432. // This is never called during parallel marking
  433. Assert(this->collectionState != CollectionStateParallelMark);
  434. Assert(this->enableScanInteriorPointers);
  435. markContext.ScanObject<false, true>(obj, byteCount);
  436. }
  437. template <bool doSpecialMark, bool forceInterior>
  438. NO_SANITIZE_ADDRESS
  439. inline void
  440. Recycler::ScanMemoryInline(void ** obj, size_t byteCount
  441. ADDRESS_SANITIZER_APPEND(RecyclerScanMemoryType scanMemoryType))
  442. {
  443. // This is never called during parallel marking
  444. Assert(this->collectionState != CollectionStateParallelMark);
  445. #if __has_feature(address_sanitizer)
  446. void *asanFakeStack =
  447. scanMemoryType == RecyclerScanMemoryType::Stack ? this->savedAsanFakeStack : nullptr;
  448. #endif
  449. if (this->enableScanInteriorPointers || forceInterior)
  450. {
  451. markContext.ScanMemory<false, true, doSpecialMark>(
  452. obj, byteCount ADDRESS_SANITIZER_APPEND(asanFakeStack));
  453. }
  454. else
  455. {
  456. markContext.ScanMemory<false, false, doSpecialMark>(
  457. obj, byteCount ADDRESS_SANITIZER_APPEND(asanFakeStack));
  458. }
  459. }
  460. inline bool
  461. Recycler::AddMark(void * candidate, size_t byteCount) throw()
  462. {
  463. // This is never called during parallel marking
  464. Assert(this->collectionState != CollectionStateParallelMark);
  465. return markContext.AddMarkedObject(candidate, byteCount);
  466. }
  467. #ifdef RECYCLER_VISITED_HOST
  468. inline bool
  469. Recycler::AddPreciselyTracedMark(IRecyclerVisitedObject * candidate) throw()
  470. {
  471. // This API cannot be used for parallel marking as we don't have enough information to determine which MarkingContext to use.
  472. Assert((this->collectionState & Collection_Parallel) == 0);
  473. return markContext.AddPreciselyTracedObject(candidate);
  474. }
  475. #endif
  476. template <typename T>
  477. void
  478. Recycler::NotifyFree(T * heapBlock)
  479. {
  480. bool forceSweepObject = ForceSweepObject();
  481. if (forceSweepObject)
  482. {
  483. #if DBG || defined(RECYCLER_STATS)
  484. this->isForceSweeping = true;
  485. heapBlock->isForceSweeping = true;
  486. #endif
  487. #ifdef RECYCLER_TRACE
  488. this->PrintBlockStatus(nullptr, heapBlock, _u("[**34**] calling SweepObjects during NotifyFree."));
  489. #endif
  490. heapBlock->template SweepObjects<SweepMode_InThread>(this);
  491. #if DBG || defined(RECYCLER_STATS)
  492. heapBlock->isForceSweeping = false;
  493. this->isForceSweeping = false;
  494. #endif
  495. RECYCLER_STATS_INC(this, heapBlockFreeCount[heapBlock->GetHeapBlockType()]);
  496. }
  497. JS_ETW(EventWriteFreeMemoryBlock(heapBlock));
  498. #ifdef RECYCLER_PERF_COUNTERS
  499. if (forceSweepObject)
  500. {
  501. RECYCLER_PERF_COUNTER_SUB(FreeObjectSize, heapBlock->GetPageCount() * AutoSystemInfo::PageSize);
  502. if (heapBlock->IsLargeHeapBlock())
  503. {
  504. RECYCLER_PERF_COUNTER_SUB(LargeHeapBlockFreeObjectSize, heapBlock->GetPageCount() * AutoSystemInfo::PageSize);
  505. }
  506. else
  507. {
  508. RECYCLER_PERF_COUNTER_SUB(SmallHeapBlockFreeObjectSize, heapBlock->GetPageCount() * AutoSystemInfo::PageSize);
  509. }
  510. }
  511. else
  512. {
  513. heapBlock->UpdatePerfCountersOnFree();
  514. }
  515. #endif
  516. }
  517. template <class TBlockAttributes>
  518. inline ushort
  519. SmallHeapBlockT<TBlockAttributes>::GetObjectBitDelta()
  520. {
  521. return this->objectSize / HeapConstants::ObjectGranularity;
  522. }
  523. // Map any object address to it's bit index in the heap block bit vectors.
  524. // static
  525. template <class TBlockAttributes>
  526. __forceinline ushort
  527. SmallHeapBlockT<TBlockAttributes>::GetAddressBitIndex(void * objectAddress)
  528. {
  529. Assert(HeapInfo::IsAlignedAddress(objectAddress));
  530. ushort offset = (ushort)(::Math::PointerCastToIntegralTruncate<uint>(objectAddress) % (TBlockAttributes::PageCount * AutoSystemInfo::PageSize));
  531. offset = offset >> HeapConstants::ObjectAllocationShift;
  532. Assert(offset <= USHRT_MAX);
  533. Assert(offset <= TBlockAttributes::MaxAddressBit);
  534. return (ushort) offset;
  535. }
  536. template <class TBlockAttributes>
  537. __forceinline ushort
  538. SmallHeapBlockT<TBlockAttributes>::GetObjectIndexFromBitIndex(ushort bitIndex)
  539. {
  540. Assert(bitIndex <= TBlockAttributes::MaxAddressBit);
  541. ushort objectIndex = validPointers.GetAddressIndex(bitIndex);
  542. Assert(objectIndex == SmallHeapBlockT<TBlockAttributes>::InvalidAddressBit ||
  543. objectIndex <= TBlockAttributes::MaxAddressBit);
  544. return objectIndex;
  545. }
  546. template <class TBlockAttributes>
  547. __forceinline void *
  548. SmallHeapBlockT<TBlockAttributes>::GetRealAddressFromInterior(void * interiorAddress, uint objectSize, byte bucketIndex)
  549. {
  550. const ValidPointers<TBlockAttributes> validPointers = HeapInfo::GetValidPointersMapForBucket<TBlockAttributes>(bucketIndex);
  551. size_t rawInteriorAddress = reinterpret_cast<size_t>(interiorAddress);
  552. size_t baseAddress = rawInteriorAddress & ~(TBlockAttributes::PageCount * AutoSystemInfo::PageSize - 1);
  553. ushort offset = (ushort)(rawInteriorAddress - baseAddress);
  554. offset = validPointers.GetInteriorAddressIndex(offset >> HeapConstants::ObjectAllocationShift);
  555. if (offset == SmallHeapBlockT<TBlockAttributes>::InvalidAddressBit)
  556. {
  557. return nullptr;
  558. }
  559. return reinterpret_cast<void*>(baseAddress + offset * objectSize);
  560. }