Procházet zdrojové kódy

Part2: Prevent objects allocated during concurrent sweep from getting collected.

Atul Katti před 8 roky
rodič
revize
39fdc561db

+ 2 - 2
lib/Common/Memory/CollectionState.h

@@ -69,9 +69,9 @@ enum CollectionState
 
     CollectionStateSetupConcurrentSweep   = Collection_Sweep | Collection_ConcurrentSweepSetup,               // setting up concurrent sweep
     CollectionStateConcurrentSweep        = Collection_ConcurrentSweep | Collection_ExecutingConcurrent,      // concurrent sweep
-#if  ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
     CollectionStateConcurrentSweepPass1 = Collection_ConcurrentSweep | Collection_ConcurrentSweepPass1 | Collection_ExecutingConcurrent,          // concurrent sweep Pass 1
-    CollectionStateConcurrentSweepPass1Wait = Collection_ConcurrentSweep | Collection_ConcurrentSweepPass1Wait | Collection_ExecutingConcurrent,  // concurrent sweep wait state after Pass 1 has finished
+    CollectionStateConcurrentSweepPass1Wait = Collection_ConcurrentSweep | Collection_ConcurrentSweepPass1Wait /*| Collection_ExecutingConcurrent*/,  // concurrent sweep wait state after Pass 1 has finished
     CollectionStateConcurrentSweepPass2 = Collection_ConcurrentSweep | Collection_ConcurrentSweepPass2 | Collection_ExecutingConcurrent,          // concurrent sweep Pass 2
     CollectionStateConcurrentSweepPass2Wait = Collection_ConcurrentSweep | Collection_ConcurrentSweepPass2Wait | Collection_ExecutingConcurrent,  // concurrent sweep wait state after Pass 2 has finished
 #endif

+ 297 - 62
lib/Common/Memory/HeapBlock.cpp

@@ -333,8 +333,10 @@ ushort
 SmallHeapBlockT<TBlockAttributes>::GetExpectedFreeObjectCount() const
 {
     Assert(this->GetRecycler()->IsSweeping());
+
     return objectCount - markCount;
 }
+
 template <class TBlockAttributes>
 uint
 SmallHeapBlockT<TBlockAttributes>::GetExpectedFreeBytes() const
@@ -369,8 +371,16 @@ SmallHeapBlockT<TBlockAttributes>::Init(ushort objectSize, ushort objectCount)
 #if ENABLE_CONCURRENT_GC
     this->isPendingConcurrentSweep = false;
 #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
-    // This flag is to identify whether this block was made available for allocations during the concurrent sweep and still needs to be swept.
-    this->isPendingConcurrentSweepPrep = false;
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+    {
+        // This flag is to identify whether this block was made available for allocations during the concurrent sweep and still needs to be swept.
+        this->isPendingConcurrentSweepPrep = false;
+        this->objectsAllocatedDuringConcurrentSweepCount = 0;
+#if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
+        this->hasFinishedSweepObjects = false;
+        this->wasAllocatedFromDuringSweep = false;
+#endif
+    }
 #endif
 #endif
 
@@ -571,6 +581,19 @@ SmallHeapBlockT<TBlockAttributes>::Reset()
 
     this->freeCount = 0;
     this->markCount = 0;
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+    {
+#if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
+        this->hasFinishedSweepObjects = false;
+        this->wasAllocatedFromDuringSweep = false;
+#endif
+        this->isPendingConcurrentSweepPrep = false;
+        DebugOnly(this->objectsMarkedDuringSweep = 0);
+        this->objectsAllocatedDuringConcurrentSweepCount = 0;
+    }
+#endif
+
 #if ENABLE_PARTIAL_GC
     this->oldFreeCount = this->lastFreeCount = this->objectCount;
 #else
@@ -770,11 +793,11 @@ template <class TBlockAttributes>
 Recycler *
 SmallHeapBlockT<TBlockAttributes>::GetRecycler() const
 {
-#if DBG
+//#if DBG
     return this->heapBucket->heapInfo->recycler;
-#else
-    return nullptr;
-#endif
+//#else
+//    return nullptr;
+//#endif
 }
 
 #if DBG
@@ -1257,7 +1280,12 @@ SmallHeapBlockT<TBlockAttributes>::GetMarkCountForSweep()
     temp.Minus(this->GetInvalidBitVector());
 
     // Remove the mark bit for things that are still free
+    //TODO: akatti: Can this change be removed? We should already have set the mark bits for anything that was allcoated during the concurrent sweep.
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP 
+    if (this->freeCount != 0 || this->objectsAllocatedDuringConcurrentSweepCount != 0)
+#else
     if (this->freeCount != 0)
+#endif
     {
         temp.Minus(this->GetFreeBitVector());
     }
@@ -1274,13 +1302,26 @@ SmallHeapBlockT<TBlockAttributes>::Sweep(RecyclerSweep& recyclerSweep, bool queu
 #if ENABLE_CONCURRENT_GC
     Assert(!this->isPendingConcurrentSweep);
 #endif
-    DebugOnly(VerifyMarkBitVector());
+
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    // In concurrent sweep pass1, we mark the object directly in the mark bit vector for objects allocated during the sweep to prevent them from getting swept during the ongoing sweep itself.
+    // This will make the mark bit vector on the HeapBlockMap out-of-date w.r.t. these newly allocated objects.
+    if (this->objectsAllocatedDuringConcurrentSweepCount == 0)
+#endif
+    {
+        DebugOnly(VerifyMarkBitVector());
+    }
 
     if (allocable)
     {
         // This block has been allocated from since the last GC.
         // We need to update its free bit vector so we can use it below.
-        Assert(freeCount == this->GetFreeBitVector()->Count());
+        DebugOnly(ushort currentFreeCount = (ushort)this->GetFreeBitVector()->Count());
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+        Assert(freeCount - this->objectsAllocatedDuringConcurrentSweepCount == currentFreeCount);
+#else
+        Assert(freeCount == currentFreeCount);
+#endif
 #if ENABLE_PARTIAL_GC
         Assert(this->lastFreeCount == 0 || this->oldFreeCount == this->lastFreeCount);
 #endif
@@ -1306,7 +1347,19 @@ SmallHeapBlockT<TBlockAttributes>::Sweep(RecyclerSweep& recyclerSweep, bool queu
     const uint expectFreeCount = objectCount - localMarkCount;
     Assert(expectFreeCount >= this->freeCount);
 
-    const uint expectSweepCount = expectFreeCount - this->freeCount;
+    uint expectSweepCount = expectFreeCount - this->freeCount;
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+    {
+        //TODO:akatti: Revisit. Check if we still need this.
+        //if (this->objectsAllocatedDuringConcurrentSweepCount > 0)
+        //{
+        //    Assert(!this->IsAnyFinalizableBlock());
+        //    expectSweepCount -= this->objectsAllocatedDuringConcurrentSweepCount;
+        //}
+    }
+#endif
+
     Assert(!this->IsLeafBlock() || finalizeCount == 0);
 
     Recycler * recycler = recyclerSweep.GetRecycler();
@@ -1323,18 +1376,34 @@ SmallHeapBlockT<TBlockAttributes>::Sweep(RecyclerSweep& recyclerSweep, bool queu
     bool noRealObjectsMarked = (localMarkCount == 0);
 
 #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
-    // This heap block is ready to be swept concurrently.
-    this->isPendingConcurrentSweepPrep = false;
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+    {
+        Assert(!this->IsAnyFinalizableBlock() || !this->isPendingConcurrentSweepPrep);
+        // This heap block is ready to be swept concurrently.
+#if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
+        this->hasFinishedSweepObjects = false;
+        this->wasAllocatedFromDuringSweep = isPendingConcurrentSweepPrep;
+#endif
+        this->isPendingConcurrentSweepPrep = false;
+    }
 #endif
 
     const bool isAllFreed = (finalizeCount == 0 && noRealObjectsMarked && !hasPendingDispose);
     if (isAllFreed)
     {
-        recycler->NotifyFree(this);
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+        if (!CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) || this->objectsAllocatedDuringConcurrentSweepCount == 0)
+#endif
+        {
+            recycler->NotifyFree(this);
 
-        Assert(!this->HasPendingDisposeObjects());
+            Assert(!this->HasPendingDisposeObjects());
 
-        return SweepStateEmpty;
+#ifdef RECYCLER_TRACE
+            recycler->PrintBlockStatus(this->heapBucket, this, _u("[**26**] ending sweep Pass1, state returned SweepStateEmpty."));
+#endif
+            return SweepStateEmpty;
+        }
     }
 
     RECYCLER_STATS_ADD(recycler, heapBlockFreeByteCount[this->GetHeapBlockType()], expectFreeCount * this->objectSize);
@@ -1349,7 +1418,33 @@ SmallHeapBlockT<TBlockAttributes>::Sweep(RecyclerSweep& recyclerSweep, bool queu
 
     if (expectSweepCount == 0)
     {
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+        if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+        {
+            //TODO:akatti:PERF Do we need to do this for non-debug builds? We might be able to skip
+            // this if this is only needed for passing debug asserts.
+            // If we allocated from this block during concurrent sweep, we must recalculate the
+            // mark and free bits for these blocks.
+            if (this->objectsAllocatedDuringConcurrentSweepCount > 0)
+            {
+                Assert(!this->IsAnyFinalizableBlock());
+
+#ifdef RECYCLER_TRACE
+                recycler->PrintBlockStatus(this->heapBucket, this, _u("[**4**] calling SweepObjects to recalculate mark and free bits ONLY."));
+#endif
+                this->template SweepObjects<SweepMode_InThread>(recycler, true /*onlyRecalculateMarkCountAndFreeBits*/);
+            }
+        }
+#endif
+
         // nothing has been freed
+#ifdef RECYCLER_TRACE
+        if (recycler->GetRecyclerFlagsTable().Trace.IsEnabled(Js::ConcurrentSweepPhase))
+        {
+            SweepState stateReturned = (this->freeCount == 0) ? SweepStateFull : state;
+            Output::Print(_u("[GC #%d] [HeapBucket 0x%p] HeapBlock 0x%p %s %d [CollectionState: %d] \n"), recycler->collectionCount, this->heapBucket, this, _u("[**37**] heapBlock swept. State returned:"), stateReturned, recycler->collectionState);
+        }
+#endif
         return (this->freeCount == 0) ? SweepStateFull : state;
     }
 
@@ -1375,21 +1470,54 @@ SmallHeapBlockT<TBlockAttributes>::Sweep(RecyclerSweep& recyclerSweep, bool queu
         RECYCLER_STATS_INC(recycler, heapBlockConcurrentSweptCount[this->GetHeapBlockType()]);
         // This heap block has objects that need to be swept concurrently.
         this->isPendingConcurrentSweep = true;
+#ifdef RECYCLER_TRACE
+        if (recycler->GetRecyclerFlagsTable().Trace.IsEnabled(Js::ConcurrentSweepPhase))
+        {
+            recycler->PrintBlockStatus(this->heapBucket, this, _u("[**29**] heapBlock swept. State returned: SweepStatePendingSweep"));
+    }
+#endif
         return SweepStatePendingSweep;
     }
 #else
     Assert(!recyclerSweep.IsBackground());
 #endif
 
-    SweepObjects<SweepMode_InThread>(recycler);
+#ifdef RECYCLER_TRACE
+    recycler->PrintBlockStatus(this->heapBucket, this, _u("[**16**] calling SweepObjects."));
+#endif
+    SweepObjects<SweepMode_InThread>(recycler, false /*onlyRecalculateMarkCountAndFreeBits*/);
     if (HasPendingDisposeObjects())
     {
         Assert(finalizeCount != 0);
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+        if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+        {
+            AssertMsg(this->objectsAllocatedDuringConcurrentSweepCount == 0, "Allocations during concurrent sweep not supported for finalizable blocks.");
+        }
+#endif
+
         return SweepStatePendingDispose;
     }
 
-    // Already swept, no more work to be done.  Put it back to the queue
-    return state;
+    // Already swept, no more work to be done. Put it back to the queue.
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && !this->IsAnyFinalizableBlock())
+    {
+        // We always need to check the free count as we may have allocated from this block during concurrent sweep.
+#ifdef RECYCLER_TRACE
+        if (recycler->GetRecyclerFlagsTable().Trace.IsEnabled(Js::ConcurrentSweepPhase))
+        {
+            SweepState stateReturned = (this->freeCount == 0) ? SweepStateFull : state;
+            Output::Print(_u("[GC #%d] [HeapBucket 0x%p] HeapBlock 0x%p %s %d [CollectionState: %d] \n"), recycler->collectionCount, this->heapBucket, this, _u("[**38**] heapBlock swept. State returned:"), stateReturned, recycler->collectionState);
+        }
+#endif
+        return (this->freeCount == 0) ? SweepStateFull : state;
+    }
+    else
+#endif
+    {
+        return state;
+    }
 }
 
 #if DBG
@@ -1412,7 +1540,7 @@ SmallHeapBlockT<TBlockAttributes>::GetMarkCountOnHeapBlockMap() const
 template <class TBlockAttributes>
 template <SweepMode mode>
 void
-SmallHeapBlockT<TBlockAttributes>::SweepObjects(Recycler * recycler)
+SmallHeapBlockT<TBlockAttributes>::SweepObjects(Recycler * recycler, bool onlyRecalculateMarkCountAndFreeBits)
 {
 #if ENABLE_CONCURRENT_GC
     Assert(mode == SweepMode_InThread || this->isPendingConcurrentSweep);
@@ -1421,15 +1549,53 @@ SmallHeapBlockT<TBlockAttributes>::SweepObjects(Recycler * recycler)
     Assert(mode == SweepMode_InThread);
 #endif
     Assert(this->IsFreeBitsValid());
-    Assert(this->markCount != 0 || this->isForceSweeping || this->IsAnyFinalizableBlock());
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    AssertMsg(!hasFinishedSweepObjects, "Block in SweepObjects more than once during the ongoing sweep.");
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+    {
+        Assert(this->markCount != 0 || this->objectsAllocatedDuringConcurrentSweepCount > 0 || this->isForceSweeping || this->IsAnyFinalizableBlock());
+    }
+    else
+#endif
+    {
+        Assert(this->markCount != 0 || this->isForceSweeping || this->IsAnyFinalizableBlock());
+    }
+
     Assert(this->markCount == this->GetMarkCountForSweep());
 
-    DebugOnly(VerifyMarkBitVector());
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    // In concurrent sweep pass1, we mark the object directly in the mark bit vector for objects allocated during the sweep to prevent them from getting swept during the ongoing sweep itself.
+    // This will make the mark bit vector on the HeapBlockMap out-of-date w.r.t. these newly allocated objects.
+    if (this->objectsAllocatedDuringConcurrentSweepCount == 0)
+#endif
+    {
+        DebugOnly(VerifyMarkBitVector());
+    }
 
     SmallHeapBlockBitVector * marked = this->GetMarkedBitVector();
 
-    DebugOnly(const uint expectedSweepCount = objectCount - freeCount - markCount);
-    Assert(expectedSweepCount != 0 || this->isForceSweeping);
+    DebugOnly(uint expectedSweepCount = objectCount - freeCount - markCount);
+#if DBG && ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+    {
+        Assert(expectedSweepCount != 0 || this->isForceSweeping || this->objectsAllocatedDuringConcurrentSweepCount != 0);
+    }
+    else
+#endif
+    {
+        Assert(expectedSweepCount != 0 || this->isForceSweeping);
+    }
+
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+    {
+        if (this->objectsAllocatedDuringConcurrentSweepCount > 0)
+        {
+            AssertMsg(!this->IsAnyFinalizableBlock(), "Allocations during concurrent sweep are not supported for finalizable blocks.");
+        }
+    }
+#endif
+
     DebugOnly(uint sweepCount = 0);
 
     const uint localSize = objectSize;
@@ -1442,26 +1608,33 @@ SmallHeapBlockT<TBlockAttributes>::SweepObjects(Recycler * recycler)
         Assert(IsValidBitIndex(bitIndex));
 
         RECYCLER_STATS_ADD(recycler, objectSweepScanCount, !isForceSweeping);
+
         if (!marked->Test(bitIndex))
         {
-            if (!this->GetFreeBitVector()->Test(bitIndex))
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+            // Skip this if we are only clearing the bit set to prevent object from getting swept as it was allocated during the ongoing concurrent sweep.
+            if (!CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) || !onlyRecalculateMarkCountAndFreeBits)
+#endif
             {
-                Assert((this->ObjectInfo(objectIndex) & ImplicitRootBit) == 0);
-                FreeObject* addr = (FreeObject*)objectAddress;
+                if (!this->GetFreeBitVector()->Test(bitIndex))
+                {
+                    Assert((this->ObjectInfo(objectIndex) & ImplicitRootBit) == 0);
+                    FreeObject* addr = (FreeObject*)objectAddress;
 
 #if ENABLE_PARTIAL_GC && ENABLE_CONCURRENT_GC
-                if (mode != SweepMode_ConcurrentPartial)
+                    if (mode != SweepMode_ConcurrentPartial)
 #endif
-                {
-                    // Don't call NotifyFree if we are doing a partial sweep.
-                    // Since we are not actually collecting the object, we will do the NotifyFree later
-                    // when the object is actually collected in a future Sweep.
-                    recycler->NotifyFree((char *)addr, this->objectSize);
-                }
+                    {
+                        // Don't call NotifyFree if we are doing a partial sweep.
+                        // Since we are not actually collecting the object, we will do the NotifyFree later
+                        // when the object is actually collected in a future Sweep.
+                        recycler->NotifyFree((char *)addr, this->objectSize);
+                    }
 #if DBG
-                sweepCount++;
+                    sweepCount++;
 #endif
-                SweepObject<mode>(recycler, objectIndex, addr);
+                    SweepObject<mode>(recycler, objectIndex, addr);
+                }
             }
         }
 
@@ -1475,41 +1648,95 @@ SmallHeapBlockT<TBlockAttributes>::SweepObjects(Recycler * recycler)
         objectAddress += localSize;
     }
 
-    Assert(sweepCount == expectedSweepCount);
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    // Skip this if we are only clearing the bit set to prevent object from getting swept as it was allocated during the ongoing concurrent sweep.
+    if (!CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) || !onlyRecalculateMarkCountAndFreeBits)
+#endif
+    {
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+        // If allocations happened during concurrent sweep then we will not have the accurate count of expected sweep count as the mark/free information
+        // wasn't valid anymore.
+        Assert(sweepCount == expectedSweepCount || this->objectsAllocatedDuringConcurrentSweepCount > 0);
+#else
+        Assert(sweepCount == expectedSweepCount);
+#endif
 #if ENABLE_CONCURRENT_GC
-    this->isPendingConcurrentSweep = false;
+        this->isPendingConcurrentSweep = false;
 #endif
 
 #if ENABLE_PARTIAL_GC && ENABLE_CONCURRENT_GC
-    if (mode == SweepMode_ConcurrentPartial)
-    {
-        Assert(recycler->inPartialCollectMode);
+        if (mode == SweepMode_ConcurrentPartial)
+        {
+            Assert(recycler->inPartialCollectMode);
 
-        // We didn't actually collect anything, so the free bit vector should still be valid.
-        Assert(IsFreeBitsValid());
-    }
-    else
+            // We didn't actually collect anything, so the free bit vector should still be valid.
+            Assert(IsFreeBitsValid());
+        }
+        else
 #endif
+        {
+            // Update the free bit vector
+            // Need to update even if there are not swept object because finalizable object are
+            // consider freed but not on the free list.
+            ushort currentFreeCount = GetExpectedFreeObjectCount();
+
+            this->GetFreeBitVector()->OrComplimented(marked);
+            this->GetFreeBitVector()->Minus(this->GetInvalidBitVector());
+#if ENABLE_PARTIAL_GC
+            this->oldFreeCount = this->lastFreeCount = this->freeCount = currentFreeCount;
+#else
+            this->lastFreeCount = this->freeCount = currentFreeCount;
+#endif
+
+            this->lastFreeObjectHead = this->freeObjectList;
+        }
+
+        // While allocations are allowed during concurrent sweep into still unswept blocks the
+        // free bit vectors are not valid yet.
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+        if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && this->objectsAllocatedDuringConcurrentSweepCount == 0)
+#endif
+        {
+            RECYCLER_SLOW_CHECK(CheckFreeBitVector(true));
+        }
+    }
+
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && this->objectsAllocatedDuringConcurrentSweepCount > 0)
     {
-        // Update the free bit vector
-        // Need to update even if there are not swept object because finalizable object are
-        // consider freed but not on the free list.
-        ushort currentFreeCount = GetExpectedFreeObjectCount();
-        this->GetFreeBitVector()->OrComplimented(marked);
-        this->GetFreeBitVector()->Minus(this->GetInvalidBitVector());
+        Assert(!this->IsAnyFinalizableBlock());
+
+        // Adjust the mark and free bits to account for the objects we have allocated during the ongoing concurrent sweep.
+        this->markCount = (ushort)this->GetMarkCountForSweep();
+        this->EnsureFreeBitVector(true /*isCollecting*/);
 #if ENABLE_PARTIAL_GC
-        this->oldFreeCount = this->lastFreeCount = this->freeCount = currentFreeCount;
+        // Accounting for partial heuristics
+        this->GetRecycler()->recyclerSweep->AddUnaccountedNewObjectAllocBytes(this);
+
+        this->oldFreeCount = this->lastFreeCount = this->freeCount;
 #else
-        this->lastFreeCount = this->freeCount = currentFreeCount;
+        this->lastFreeCount = this->freeCount;
 #endif
 
-        this->lastFreeObjectHead = this->freeObjectList;
+        // Reset the count of objects allocated during this concurrent sweep; so we will start afresh the next time around.
+        Assert(this->objectsAllocatedDuringConcurrentSweepCount == this->objectsMarkedDuringSweep);
+        this->objectsAllocatedDuringConcurrentSweepCount = 0;
+        DebugOnly(this->objectsMarkedDuringSweep = 0);
+    }
+    else
+#endif
+    {
+        // The count of marked, non-free objects should still be the same
+        Assert(this->markCount == this->GetMarkCountForSweep());
     }
 
-    RECYCLER_SLOW_CHECK(CheckFreeBitVector(true));
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    DebugOnly(this->hasFinishedSweepObjects = true);
+#endif
 
-    // The count of marked, non-free objects should still be the same
-    Assert(this->markCount == this->GetMarkCountForSweep());
+#ifdef RECYCLER_TRACE
+    recycler->PrintBlockStatus(this->heapBucket, this, _u("[**30**] finished SweepObjects, heapblock SWEPT."));
+#endif
 }
 
 template <class TBlockAttributes>
@@ -1602,18 +1829,26 @@ template <class TBlockAttributes>
 void
 SmallHeapBlockT<TBlockAttributes>::Check(bool expectFull, bool expectPending)
 {
-    if (this->IsFreeBitsValid())
-    {
-        CheckFreeBitVector(false);
-    }
-    else
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    // If we allocated from this block during the concurrent sweep the free bit vectors would be invalid.
+#if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
+    if (!this->wasAllocatedFromDuringSweep)
+#endif
+#endif
     {
-        CheckDebugFreeBitVector(false);
+        if (this->IsFreeBitsValid())
+        {
+            CheckFreeBitVector(false);
+        }
+        else
+        {
+            CheckDebugFreeBitVector(false);
+        }
     }
 
     Assert(expectPending == HasAnyDisposeObjects());
 
-    // As the blocks are added to the SLIST and used from there during concurrent sweep, the exepectFull assertion doesn't hold anymore.
+    // As the blocks are added to the SLIST and used from there during concurrent sweep, the expectFull assertion doesn't hold anymore.
 #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
     if (!CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
 #endif

+ 30 - 13
lib/Common/Memory/HeapBlock.h

@@ -107,7 +107,7 @@ enum ObjectInfoBits : unsigned short
     EnumClass_1_Bit             = 0x01,    // This can be extended to add more enumerable classes (if we still have bits left)
 
     // Mask for above bits
-    StoredObjectInfoBitMask     = 0xFF,
+    StoredObjectInfoBitMask = 0xFF,
 
     // Bits that implied by the block type, and thus don't need to be stored (for small blocks)
     // Note, LeafBit is used in finalizable blocks, thus is not always implied by the block type
@@ -115,7 +115,7 @@ enum ObjectInfoBits : unsigned short
     // We can move it the upper byte.
 
 #ifdef RECYCLER_WRITE_BARRIER
-    WithBarrierBit              = 0x0100,
+    WithBarrierBit = 0x0100,
 #endif
 
 #ifdef RECYCLER_VISITED_HOST
@@ -133,34 +133,34 @@ enum ObjectInfoBits : unsigned short
     // Additional definitions based on above
 
 #ifdef RECYCLER_STATS
-    NewFinalizeBit              = NewTrackBit,  // Use to detect if the background thread has counted the finalizable object in stats
+    NewFinalizeBit = NewTrackBit,  // Use to detect if the background thread has counted the finalizable object in stats
 #else
-    NewFinalizeBit              = 0x00,
+    NewFinalizeBit = 0x00,
 #endif
 
 #ifdef RECYCLER_WRITE_BARRIER
-    FinalizableWithBarrierBit   = WithBarrierBit | FinalizeBit,
+    FinalizableWithBarrierBit = WithBarrierBit | FinalizeBit,
 #endif
 
     // Allocation bits
-    FinalizableLeafBits         = NewFinalizeBit | FinalizeBit | LeafBit,
-    FinalizableObjectBits       = NewFinalizeBit | FinalizeBit ,
+    FinalizableLeafBits = NewFinalizeBit | FinalizeBit | LeafBit,
+    FinalizableObjectBits = NewFinalizeBit | FinalizeBit,
 #ifdef RECYCLER_WRITE_BARRIER
     FinalizableWithBarrierObjectBits = NewFinalizeBit | FinalizableWithBarrierBit,
 #endif
     ClientFinalizableObjectBits = NewFinalizeBit | ClientTrackedBit | FinalizeBit,
 
-    ClientTrackableLeafBits     = NewTrackBit | ClientTrackedBit | TrackBit | FinalizeBit | LeafBit,
-    ClientTrackableObjectBits   = NewTrackBit | ClientTrackedBit | TrackBit | FinalizeBit,
+    ClientTrackableLeafBits = NewTrackBit | ClientTrackedBit | TrackBit | FinalizeBit | LeafBit,
+    ClientTrackableObjectBits = NewTrackBit | ClientTrackedBit | TrackBit | FinalizeBit,
 
 #ifdef RECYCLER_WRITE_BARRIER
     ClientTrackableObjectWithBarrierBits = ClientTrackableObjectBits | WithBarrierBit,
     ClientFinalizableObjectWithBarrierBits = ClientFinalizableObjectBits | WithBarrierBit,
 #endif
 
-    WeakReferenceEntryBits      = LeafBit,
+    WeakReferenceEntryBits = LeafBit,
 
-    ImplicitRootLeafBits        = LeafBit | ImplicitRootBit,
+    ImplicitRootLeafBits = LeafBit | ImplicitRootBit,
 
     // Pending dispose objects should have LeafBit set and no others
     PendingDisposeObjectBits    = PendingDisposeBit | LeafBit,
@@ -384,6 +384,14 @@ protected:
     // This flag is to identify whether this block was made available for allocations during the concurrent sweep and 
     // still needs to be swept.
     bool isPendingConcurrentSweepPrep;
+#if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
+    // This flag ensures a block doesn't get swept more than once during a given sweep.
+    bool hasFinishedSweepObjects;
+
+    // When allocate from a block during concurrent sweep some checks need to be delayed until
+    // the free and mark bits are rebuilt. This flag helps skip those validations until then.
+    bool wasAllocatedFromDuringSweep;
+#endif
 #endif
 #endif
 
@@ -552,6 +560,14 @@ public:
     ushort freeCount;
     ushort lastFreeCount;
     ushort markCount;
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    ushort objectsAllocatedDuringConcurrentSweepCount;
+#if DBG
+    ushort objectsMarkedDuringSweep;
+    bool blockNotReusedInPartialHeapBlockList;
+    bool blockNotReusedInPendingList;
+#endif
+#endif
 
 #if ENABLE_PARTIAL_GC
     ushort oldFreeCount;
@@ -736,7 +752,7 @@ public:
     uint GetMarkCountForSweep();
     SweepState Sweep(RecyclerSweep& recyclerSweep, bool queuePendingSweep, bool allocable, ushort finalizeCount = 0, bool hasPendingDispose = false);
     template <SweepMode mode>
-    void SweepObjects(Recycler * recycler);
+    void SweepObjects(Recycler * recycler, bool onlyRecalculateMarkCountAndFreeBits);
 
     uint GetAndClearLastFreeCount();
     void ClearAllAllocBytes();      // Reset all unaccounted alloc bytes and the new alloc count
@@ -794,10 +810,11 @@ public:
 protected:
     static size_t GetAllocPlusSize(uint objectCount);
     inline void SetAttributes(void * address, unsigned char attributes);
+    inline void UpdateAttributes(void * address, unsigned char attributes);
+    ushort GetAddressIndex(void * objectAddress);
 
     SmallHeapBlockT(HeapBucket * bucket, ushort objectSize, ushort objectCount, HeapBlockType heapBlockType);
 
-    ushort GetAddressIndex(void * objectAddress);
     ushort GetInteriorAddressIndex(void * interiorAddress);
     ushort GetObjectIndexFromBitIndex(ushort bitIndex);
 

+ 11 - 0
lib/Common/Memory/HeapBlock.inl

@@ -17,6 +17,17 @@ SmallHeapBlockT<TBlockAttributes>::SetAttributes(void * address, unsigned char a
     ObjectInfo(index) = attributes;
 }
 
+template <class TBlockAttributes>
+void
+SmallHeapBlockT<TBlockAttributes>::UpdateAttributes(void * address, unsigned char attributes)
+{
+    Assert(this->address != nullptr);
+    Assert(this->segment != nullptr);
+    ushort index = GetAddressIndex(address);
+    Assert(index != SmallHeapBlockT<TBlockAttributes>::InvalidAddressBit);
+    ObjectInfo(index) = attributes;
+}
+
 inline
 IdleDecommitPageAllocator*
 HeapBlock::GetPageAllocator(Recycler* recycler)

+ 273 - 91
lib/Common/Memory/HeapBucket.cpp

@@ -15,6 +15,10 @@ HeapBucket::HeapBucket() :
     emptyHeapBlockCount = 0;
 #endif
 
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    this->allocationsStartedDuringConcurrentSweep = false;
+#endif
+
 #ifdef RECYCLER_PAGE_HEAP
     isPageHeapEnabled = false;
 #endif
@@ -62,10 +66,6 @@ HeapBucketT<TBlockType>::HeapBucketT() :
     explicitFreeLockBlockList = nullptr;
 #endif
 
-#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP && SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
-    this->allocationsStartedDuringConcurrentSweep = false;
-#endif
-
     isAllocationStopped = false;
 }
 
@@ -142,7 +142,11 @@ HeapBucketT<TBlockType>::PushHeapBlockToSList(PSLIST_HEADER list, TBlockType * h
         return false;
     }
 
+    // While in the SLIST the blocks live as standalone, when they come out they
+    // will go into appropriate list and the Next block will be set accordingly.
+    heapBlock->SetNextBlock(nullptr);
     currentBlock->itemHeapBlock = heapBlock;
+
     ::InterlockedPushEntrySList(list, &(currentBlock->itemEntry));
     return true;
 }
@@ -398,9 +402,23 @@ HeapBucketT<TBlockType>::HasPendingDisposeHeapBlocks() const
     return IsFinalizableBucket && ((SmallFinalizableHeapBucketT<typename TBlockType::HeapBlockAttributes> *)this)->pendingDisposeList != nullptr;
 #endif
 }
+
 #endif
 
 #if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
+template <typename TBlockType>
+void
+HeapBucketT<TBlockType>::AssertCheckHeapBlockNotInAnyList(TBlockType * heapBlock)
+{
+    AssertMsg(!HeapBlockList::Contains(heapBlock, heapBlockList), "The heap block already exists in the heapBlockList.");
+    AssertMsg(!HeapBlockList::Contains(heapBlock, fullBlockList), "The heap block already exists in the fullBlockList.");
+    AssertMsg(!HeapBlockList::Contains(heapBlock, emptyBlockList), "The heap block already exists in the emptyBlockList.");
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    AssertMsg(!HeapBlockList::Contains(heapBlock, sweepableHeapBlockList), "The heap block already exists in the sweepableHeapBlockList.");
+    AssertMsg(!HeapBlockList::Contains(heapBlock, pendingSweepPrepHeapBlockList), "The heap block already exists in the pendingSweepPrepHeapBlockList.");
+#endif
+}
+
 template <typename TBlockType>
 size_t
 HeapBucketT<TBlockType>::GetNonEmptyHeapBlockCount(bool checkCount) const
@@ -412,7 +430,7 @@ HeapBucketT<TBlockType>::GetNonEmptyHeapBlockCount(bool checkCount) const
 #if ENABLE_CONCURRENT_GC
 #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
 #if SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
-    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && !this->IsAnyFinalizableBucket())
     {
         allocatingDuringConcurrentSweep = true;
         // This lock is needed only in the debug mode while we verify block counts. Not needed otherwise, as this list is never accessed concurrently.
@@ -435,7 +453,7 @@ HeapBucketT<TBlockType>::GetNonEmptyHeapBlockCount(bool checkCount) const
 #endif
 
     // There is no way to determine the number of item in an SLIST if there are >= 65535 items in the list.
-    RECYCLER_SLOW_CHECK(Assert(!checkCount || heapBlockCount == currentHeapBlockCount || allocatingDuringConcurrentSweep));
+    RECYCLER_SLOW_CHECK(Assert(!checkCount || heapBlockCount == currentHeapBlockCount || (heapBlockCount >= 65535 && allocatingDuringConcurrentSweep)));
 
     return currentHeapBlockCount;
 }
@@ -463,8 +481,10 @@ HeapBucketT<TBlockType>::TryAlloc(Recycler * recycler, TBlockAllocatorType * all
     TBlockType * heapBlock = this->nextAllocableBlockHead;
 #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP 
 #if SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
-    bool heapBlockInSweepableList = false;
-    if (heapBlock == nullptr && CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+    bool heapBlockFromAllocableHeapBlockList = false;
+    DebugOnly(bool heapBlockInPendingSweepPrepList = false);
+
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && heapBlock == nullptr && !this->IsAnyFinalizableBucket())
     {
 #if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
         // This lock is needed only in the debug mode while we verify block counts. Not needed otherwise, as this list is never accessed concurrently.
@@ -475,21 +495,35 @@ HeapBucketT<TBlockType>::TryAlloc(Recycler * recycler, TBlockAllocatorType * all
         heapBlock = PopHeapBlockFromSList(this->allocableHeapBlockListHead);
         if (heapBlock != nullptr)
         {
-            //TODO:akatti:Reenable this
+            DebugOnly(AssertCheckHeapBlockNotInAnyList(heapBlock));
             if (heapBlock->isPendingConcurrentSweepPrep)
             {
+#if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
+                heapBlock->wasAllocatedFromDuringSweep = true;
+#endif
                 // Put the block in the heap block list that still needs sweep prep (i.e. Pass-1 of sweep) so we don't lose track of it.
                 heapBlock->SetNextBlock(pendingSweepPrepHeapBlockList);
                 pendingSweepPrepHeapBlockList = heapBlock;
+                AssertMsg(heapBlock->objectsAllocatedDuringConcurrentSweepCount == 0, "We just picked up this block for allocations during concurrent sweep, we haven't allocated from it yet.");
+
+#ifdef RECYCLER_TRACE
+                recycler->PrintBlockStatus(this, heapBlock, _u("[**31**] pending Pass1 prep, picked up for allocations during concurrent sweep."));
+#endif
+                DebugOnly(heapBlockInPendingSweepPrepList = true);
             }
             else
             {
-                // Put the block in the sweepable heap block list so we don't lose track of it. The block will eventually be moved to the 
+                // Put the block in the sweepable heap block list so we don't lose track of it. The block will eventually be moved to the
                 // heapBlockList or fullBlockList as appropriate during the next sweep.
+                AssertMsg(!HeapBlockList::Contains(heapBlock, sweepableHeapBlockList), "The heap block already exists in this list.");
                 heapBlock->SetNextBlock(sweepableHeapBlockList);
                 sweepableHeapBlockList = heapBlock;
+
+#ifdef RECYCLER_TRACE
+                recycler->PrintBlockStatus(this, heapBlock, _u("[**32**] picked up for allocations during concurrent sweep."));
+#endif
             }
-            heapBlockInSweepableList = true;
+            heapBlockFromAllocableHeapBlockList = true;
         }
 #if DBG|| defined(RECYCLER_SLOW_CHECK_ENABLED)
         debugSweepableHeapBlockListLock.Leave();
@@ -507,7 +541,7 @@ HeapBucketT<TBlockType>::TryAlloc(Recycler * recycler, TBlockAllocatorType * all
         // interlocked SLIST. During that time, the heap block at the top of the SLIST is always the nextAllocableBlockHead.
         // If the heapBlock was just picked from the SLIST and nextAllocableBlockHead is not NULL then we just resumed normal allocations on the background thread
         // while finishing the concurrent sweep, and the nextAllocableBlockHead is already set properly.
-        if (this->nextAllocableBlockHead != nullptr && !heapBlockInSweepableList)
+        if (this->nextAllocableBlockHead != nullptr && !heapBlockFromAllocableHeapBlockList)
 #endif
         {
             this->nextAllocableBlockHead = heapBlock->GetNextBlock();
@@ -566,6 +600,12 @@ HeapBucket::GetRecycler() const
     return this->heapInfo->recycler;
 }
 
+bool
+HeapBucket::AllocationsStartedDuringConcurrentSweep() const
+{
+    return this->allocationsStartedDuringConcurrentSweep;
+}
+
 #ifdef RECYCLER_PAGE_HEAP
 template <typename TBlockType>
 char *
@@ -602,16 +642,52 @@ HeapBucketT<TBlockType>::SnailAlloc(Recycler * recycler, TBlockAllocatorType * a
     // No free memory, try to collect with allocated bytes and time heuristic, and concurrently
     BOOL collected = recycler->disableCollectOnAllocationHeuristics ? recycler->FinishConcurrent<FinishConcurrentOnAllocation>() :
         recycler->CollectNow<CollectOnAllocation>();
+
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    // If we started allocations during concurrent sweep; we may have found blocks to allocate from; try again.
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && this->AllocationsStartedDuringConcurrentSweep())
+    {
+        memBlock = this->TryAlloc(recycler, allocator, sizeCat, attributes);
+        if (memBlock != nullptr)
+        {
+            return memBlock;
+        }
+    }
+#endif
 #else
     BOOL collected = recycler->disableCollectOnAllocationHeuristics ? FALSE : recycler->CollectNow<CollectOnAllocation>();
 #endif
 
     AllocationVerboseTrace(recycler->GetRecyclerFlagsTable(), _u("TryAlloc failed, forced collection on allocation [Collected: %d]\n"), collected);
+
     if (!collected)
     {
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+        //if (recycler->IsConcurrentSweepExecutingState())
+        //{
+        //    recycler->FinishConcurrent<FinishConcurrentOnAllocation>();
+        //    memBlock = this->TryAlloc(recycler, allocator, sizeCat, attributes);
+        //    if (memBlock != nullptr)
+        //    {
+        //        return memBlock;
+        //    }
+
+            //if (recycler->IsConcurrentSweepExecutingState())
+            //{
+            //    // Concurrent Sweep takes 2 passes now. Try again if we didn't finish sweeping yet.
+            //    recycler->FinishConcurrent<FinishConcurrentOnAllocation>();
+            //    memBlock = this->TryAlloc(recycler, allocator, sizeCat, attributes);
+            //    if (memBlock != nullptr)
+            //    {
+            //        return memBlock;
+            //    }
+            //}
+        //}
+#endif
+
 #if ENABLE_CONCURRENT_GC
-        // wait for background sweeping finish if there are too many pages allocated during background sweeping
 #if ENABLE_PARTIAL_GC
+        // wait for background sweeping finish if there are too many pages allocated during background sweeping
         if (recycler->IsConcurrentSweepExecutingState() && this->heapInfo->uncollectedNewPageCount > (uint)CONFIG_FLAG(NewPagesCapDuringBGSweeping))
 #else
         if (recycler->IsConcurrentSweepExecutingState())
@@ -625,6 +701,7 @@ HeapBucketT<TBlockType>::SnailAlloc(Recycler * recycler, TBlockAllocatorType * a
             }
         }
 #endif
+
         // We didn't collect, try to add a new heap block
         memBlock = TryAllocFromNewHeapBlock(recycler, allocator, sizeCat, size, attributes);
         if (memBlock != nullptr)
@@ -764,7 +841,7 @@ HeapBucketT<TBlockType>::ResetMarks(ResetMarkFlags flags)
         });
 
 #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP && SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
-        if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+        if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && !this->IsAnyFinalizableBucket())
         {
             HeapBlockList::ForEach(sweepableHeapBlockList, [flags](TBlockType * heapBlock)
             {
@@ -788,11 +865,11 @@ HeapBucketT<TBlockType>::ResetMarks(ResetMarkFlags flags)
         // When allocations are enabled for buckets during oncurrent sweep we don't keep track of the nextAllocableBlockHead as it directly
         // comes out of the SLIST. As a result, the below validations can't be performed reliably on a heap block.
 #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
-        if (!CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+        if (!CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) || this->IsAnyFinalizableBucket())
 #endif
         {
             // Verify that if you are in the heapBlockList, before the nextAllocableBlockHead, we have fully allocated from
-        // the block already, except if we have cleared from the allocator, or it is still in the allocator
+            // the block already, except if we have cleared from the allocator, or it is still in the allocator
             HeapBlockList::ForEach(heapBlockList, nextAllocableBlockHead, [](TBlockType * heapBlock)
             {
                 // If the heap block is in the allocator, then the heap block may or may not have free object still
@@ -820,7 +897,7 @@ HeapBucketT<TBlockType>::ScanNewImplicitRoots(Recycler * recycler)
     });
 
 #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP && SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
-    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && !this->IsAnyFinalizableBucket())
     {
         HeapBlockList::ForEach(sweepableHeapBlockList, [recycler](TBlockType * heapBlock)
         {
@@ -870,9 +947,9 @@ HeapBucketT<TBlockType>::VerifyBlockConsistencyInList(TBlockType * heapBlock, Re
     }
     if (heapBlock->IsClearedFromAllocator())
     {
-        // As the blocks are added to a stack and used from there during concurrent sweep, the exepectFull assertion doesn't hold anymore. 
-        // We could do some work to make this work again but there may be perf hit and it may be fragile.
 #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+        // As the blocks are added to a SLIST and used from there during concurrent sweep, the expectFull assertion doesn't hold anymore.
+        // We could do some work to make this work again but there may be perf hit and it may be fragile.
         if (!CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
 #endif
         {
@@ -896,10 +973,10 @@ HeapBucketT<TBlockType>::VerifyBlockConsistencyInList(TBlockType * heapBlock, Re
         // However, the exception is if this is the only heap block in this bucket, in which case nextAllocableBlockHead
         // would be null
 #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+        // As the blocks are added to the SLIST and used from there during concurrent sweep, the expectFull assertion doesn't hold anymore.
         if (!CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
 #endif
         {
-            // As the blocks are added to the SLIST and used from there during concurrent sweep, the exepectFull assertion doesn't hold anymore.
             Assert(*expectFull == (!heapBlock->HasFreeObject() || heapBlock->IsInAllocator()) || nextAllocableBlockHead == nullptr);
         }
     }
@@ -1009,6 +1086,9 @@ HeapBucketT<TBlockType>::SweepHeapBlockList(RecyclerSweep& recyclerSweep, TBlock
         // The whole list need to be consistent
         DebugOnly(VerifyBlockConsistencyInList(heapBlock, recyclerSweep));
 
+#ifdef RECYCLER_TRACE
+        recyclerSweep.GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**1**] starting Sweep Pass1."));
+#endif
         SweepState state = heapBlock->Sweep(recyclerSweep, queuePendingSweep, allocable);
 
         DebugOnly(VerifyBlockConsistencyInList(heapBlock, recyclerSweep, state));
@@ -1022,8 +1102,14 @@ HeapBucketT<TBlockType>::SweepHeapBlockList(RecyclerSweep& recyclerSweep, TBlock
             // blocks that have swept object. Queue up the block for concurrent sweep.
             Assert(queuePendingSweep);
             TBlockType *& pendingSweepList = recyclerSweep.GetPendingSweepBlockList(this);
+            DebugOnly(AssertCheckHeapBlockNotInAnyList(heapBlock));
+            AssertMsg(!HeapBlockList::Contains(heapBlock, pendingSweepList), "The heap block already exists in the pendingSweepList.");
+
             heapBlock->SetNextBlock(pendingSweepList);
             pendingSweepList = heapBlock;
+#ifdef RECYCLER_TRACE
+            recyclerSweep.GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**2**] finished Sweep Pass1, heapblock added to pendingSweepList."));
+#endif
 #if ENABLE_PARTIAL_GC
             recyclerSweep.NotifyAllocableObjects(heapBlock);
 #endif
@@ -1038,6 +1124,12 @@ HeapBucketT<TBlockType>::SweepHeapBlockList(RecyclerSweep& recyclerSweep, TBlock
 #else
             Assert(IsFinalizableBucket);
 #endif
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+            if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+            {
+                AssertMsg(!heapBlock->isPendingConcurrentSweepPrep, "Finalizable blocks don't support allocations during concurrent sweep.");
+            }
+#endif
 
             DebugOnly(heapBlock->template AsFinalizableBlock<typename TBlockType::HeapBlockAttributes>()->SetIsPendingDispose());
 
@@ -1048,18 +1140,27 @@ HeapBucketT<TBlockType>::SweepHeapBlockList(RecyclerSweep& recyclerSweep, TBlock
             // finalizable objects, so that we can go through and call the dispose, and then
             // transfer the finalizable object back to the free list.
             SmallFinalizableHeapBucketT<typename TBlockType::HeapBlockAttributes> * finalizableHeapBucket = (SmallFinalizableHeapBucketT<typename TBlockType::HeapBlockAttributes>*)this;
+            DebugOnly(AssertCheckHeapBlockNotInAnyList(heapBlock));
+            //AssertMsg(!HeapBlockList::Contains(heapBlock, finalizableHeapBucket->pendingDisposeList), "The heap block already exists in the pendingDisposeList.");
             heapBlock->template AsFinalizableBlock<typename TBlockType::HeapBlockAttributes>()->SetNextBlock(finalizableHeapBucket->pendingDisposeList);
             finalizableHeapBucket->pendingDisposeList = heapBlock->template AsFinalizableBlock<typename TBlockType::HeapBlockAttributes>();
             Assert(!recycler->hasPendingTransferDisposedObjects);
             recycler->hasDisposableObject = true;
+#ifdef RECYCLER_TRACE
+            recyclerSweep.GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**3**] finished Sweep Pass1, heapblock added to pendingDisposeList."));
+#endif
             break;
         }
         case SweepStateSwept:
         {
             Assert(this->nextAllocableBlockHead == nullptr);
             Assert(heapBlock->HasFreeObject());
+            DebugOnly(AssertCheckHeapBlockNotInAnyList(heapBlock));
             heapBlock->SetNextBlock(this->heapBlockList);
             this->heapBlockList = heapBlock;
+#ifdef RECYCLER_TRACE
+            recyclerSweep.GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**6**] finished Sweep Pass1, heapblock added to heapBlockList."));
+#endif
 #if ENABLE_PARTIAL_GC
             recyclerSweep.NotifyAllocableObjects(heapBlock);
 #endif
@@ -1068,8 +1169,12 @@ HeapBucketT<TBlockType>::SweepHeapBlockList(RecyclerSweep& recyclerSweep, TBlock
         case SweepStateFull:
         {
             Assert(!heapBlock->HasFreeObject());
+            DebugOnly(AssertCheckHeapBlockNotInAnyList(heapBlock));
             heapBlock->SetNextBlock(this->fullBlockList);
             this->fullBlockList = heapBlock;
+#ifdef RECYCLER_TRACE
+            recyclerSweep.GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**7**] finished Sweep Pass1, heapblock FULL added to fullBlockList."));
+#endif
             break;
         }
         case SweepStateEmpty:
@@ -1088,6 +1193,14 @@ HeapBucketT<TBlockType>::SweepHeapBlockList(RecyclerSweep& recyclerSweep, TBlock
 #if ENABLE_CONCURRENT_GC
             // CONCURRENT-TODO: Finalizable block never have background == true and always be processed
             // in thread, so it will not queue up the pages even if we are doing concurrent GC
+            DebugOnly(AssertCheckHeapBlockNotInAnyList(heapBlock));
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+            if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+            {
+                AssertMsg(heapBlock->objectsAllocatedDuringConcurrentSweepCount == 0, "We allocated to this block during concurrent sweep; it's not EMPTY anymore, it should NOT be freed or queued as EMPTY.");
+            }
+#endif
+
             if (recyclerSweep.IsBackground())
             {
 #ifdef RECYCLER_WRITE_BARRIER
@@ -1099,6 +1212,9 @@ HeapBucketT<TBlockType>::SweepHeapBlockList(RecyclerSweep& recyclerSweep, TBlock
                 // the maximum and will get decommitted anyway
                 recyclerSweep.template QueueEmptyHeapBlock<TBlockType>(this, heapBlock);
                 RECYCLER_STATS_INC(recycler, numZeroedOutSmallBlocks);
+#ifdef RECYCLER_TRACE
+                recyclerSweep.GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**8**] finished Sweep Pass1, heapblock EMPTY added to pendingEmptyBlockList."));
+#endif
             }
             else
 #endif
@@ -1107,6 +1223,9 @@ HeapBucketT<TBlockType>::SweepHeapBlockList(RecyclerSweep& recyclerSweep, TBlock
                 heapBlock->ReleasePagesSweep(recycler);
                 FreeHeapBlock(heapBlock);
                 RECYCLER_SLOW_CHECK(this->heapBlockCount--);
+#ifdef RECYCLER_TRACE
+                recyclerSweep.GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**9**] finished Sweep Pass1, heapblock EMPTY, was FREED in-thread."));
+#endif
             }
 
             break;
@@ -1144,21 +1263,34 @@ HeapBucketT<TBlockType>::SweepBucket(RecyclerSweep& recyclerSweep)
 #endif
 
 #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP && SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
-    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && this->AllowAllocationsDuringConcurrentSweep())
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && this->sweepableHeapBlockList != nullptr)
     {
-        // Return the blocks we may have allocated from during a previous concurrent sweep back to the fullBlockList. We need to rebuild the free bit vectors for these blocks.
+        Assert(!this->IsAnyFinalizableBucket());
+
+#if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
+        // This lock is needed only in the debug mode while we verify block counts. Not needed otherwise, as this list is never accessed concurrently.
+        // Items are added to it by the allocator when allocations are allowed during concurrent sweep. The list is drained during the next sweep while
+        // allocation are stopped.
+        debugSweepableHeapBlockListLock.Enter();
+#endif
+        // Return the blocks we may have allocated from during the previous concurrent sweep back to the fullBlockList.
+        // We need to rebuild the free bit vectors for these blocks.
         HeapBlockList::ForEachEditing(this->sweepableHeapBlockList, [this](TBlockType * heapBlock)
         {
             heapBlock->BuildFreeBitVector();
+            AssertMsg(!HeapBlockList::Contains(heapBlock, heapBlockList), "The heap block already exists in the heapBlockList.");
+            AssertMsg(!HeapBlockList::Contains(heapBlock, fullBlockList), "The heap block already exists in the fullBlockList.");
+            AssertMsg(!HeapBlockList::Contains(heapBlock, emptyBlockList), "The heap block already exists in the emptyBlockList.");
+            AssertMsg(!HeapBlockList::Contains(heapBlock, pendingSweepPrepHeapBlockList), "The heap block already exists in the pendingSweepPrepHeapBlockList.");
             heapBlock->SetNextBlock(this->fullBlockList);
             this->fullBlockList = heapBlock;
         });
-
         this->sweepableHeapBlockList = nullptr;
+#if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
+        debugSweepableHeapBlockListLock.Leave();
+#endif
 
-        // The pendingSweepPrepHeapBlockList should always be empty prior to a sweep as its only used during concurrent sweep.
-        //TODO:akatti: Why is this not true?
-        Assert(this->pendingSweepPrepHeapBlockList == nullptr);
+        AssertMsg(this->pendingSweepPrepHeapBlockList == nullptr, "The pendingSweepPrepHeapBlockList should always be empty prior to a sweep as its only used during concurrent sweep.");
     }
 #endif
 
@@ -1184,12 +1316,13 @@ HeapBucketT<TBlockType>::SweepBucket(RecyclerSweep& recyclerSweep)
     this->fullBlockList = nullptr;
 
 #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
-    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && this->AllowAllocationsDuringConcurrentSweep())
+    if (this->AllowAllocationsDuringConcurrentSweep())
     {
-        // In order to allow allocations during sweep (Pass-1) we will set aside blocks after nextAllocableBlockHead (inclusive) and allow
+        Assert(!this->IsAnyFinalizableBucket());
+
+        // In order to allow allocations during sweep (Pass-1) we will set aside blocks after nextAllocableBlockHead (excluding) and allow
         // allocations to these blocks as we know that these blocks are not full yet. These will need to be swept later though before starting Pass-2
         // of the sweep.
-        //TODO:akatti:Reenable this
         this->PrepareForAllocationsDuringConcurrentSweep(currentHeapBlockList);
     }
 #endif
@@ -1266,13 +1399,6 @@ HeapBucketT<TBlockType>::StartAllocationAfterSweep()
     this->nextAllocableBlockHead = this->heapBlockList;
 }
 
-template <typename TBlockType>
-bool
-HeapBucketT<TBlockType>::AllocationsStartedDuringConcurrentSweep() const
-{
-    return this->allocationsStartedDuringConcurrentSweep;
-}
-
 #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
 template <typename TBlockType>
 void
@@ -1280,6 +1406,7 @@ HeapBucketT<TBlockType>::StartAllocationDuringConcurrentSweep()
 {
     Recycler * recycler = this->GetRecycler();
     Assert(!recycler->recyclerSweep->InPartialCollect());
+    Assert(!this->IsAnyFinalizableBucket());
 
     Assert(this->IsAllocationStopped());
     this->isAllocationStopped = false;
@@ -1304,6 +1431,7 @@ HeapBucketT<TBlockType>::ResumeNormalAllocationAfterConcurrentSweep(TBlockType *
     this->nextAllocableBlockHead = newNextAllocableBlockHead;
 }
 
+//TODO:akatti: Add comments here about the notion of Pass-1 and Pass-2 of the sweep and how blocks are managed during the concurrent sweep.
 template<typename TBlockType>
 void
 HeapBucketT<TBlockType>::PrepareForAllocationsDuringConcurrentSweep(TBlockType * &currentHeapBlockList)
@@ -1311,35 +1439,64 @@ HeapBucketT<TBlockType>::PrepareForAllocationsDuringConcurrentSweep(TBlockType *
 #if SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
     if (this->AllowAllocationsDuringConcurrentSweep())
     {
+        Assert(!this->IsAnyFinalizableBucket());
         Assert(HeapBucketT<TBlockType>::QueryDepthInterlockedSList(this->allocableHeapBlockListHead) == 0);
         Assert(HeapBlockList::Count(this->sweepableHeapBlockList) == 0);
         Assert(HeapBlockList::Count(this->pendingSweepPrepHeapBlockList) == 0);
 
         // TODO:akatti: What if lastKnownNextAllocableBlockHead is NULL?
-        if (this->lastKnownNextAllocableBlockHead != nullptr)
+        TBlockType* startingNextAllocableBlockHead = this->lastKnownNextAllocableBlockHead;
+        bool allocationsStarted = false;
+        if (startingNextAllocableBlockHead != nullptr)
         {
-            HeapBlockList::ForEach(this->lastKnownNextAllocableBlockHead, [this](TBlockType * heapBlock)
-            {
-                // This heap block is NOT ready to be swept concurrently as it hasn't yet been sweep prep (i.e. Pass1 of sweep).
-                heapBlock->isPendingConcurrentSweepPrep = true;
-                HeapBucketT<TBlockType>::PushHeapBlockToSList(this->allocableHeapBlockListHead, heapBlock);
-            });
-
-            TBlockType * previousBlock = HeapBlockList::FindPreviousBlock(currentHeapBlockList, this->lastKnownNextAllocableBlockHead);
-            if (previousBlock != nullptr)
+            // To avoid a race condition between the allocator attempting to allocate from the lastKnownNextAllocableBlockHead and this code
+            // where we are adding it to the SLIST we skip the lastKnownNextAllocableBlockHead and pick up the next block to start with.
+            // Allocations should have stopped by then; so allocator shouldn't pick up the lastKnownNextAllocableBlockHead->Next block.
+            TBlockType* savedNextAllocableBlockHead = startingNextAllocableBlockHead->GetNextBlock();
+            startingNextAllocableBlockHead->SetNextBlock(nullptr);
+            startingNextAllocableBlockHead = savedNextAllocableBlockHead;
+
+            if (startingNextAllocableBlockHead != nullptr)
             {
-                previousBlock->SetNextBlock(nullptr);
+                // The allocable blocks, if any are available, will now be added to the allocable blocks SLIST at this time; start allocations now.
+                this->StartAllocationDuringConcurrentSweep();
+                allocationsStarted = true;
+
+                HeapBlockList::ForEachEditing(startingNextAllocableBlockHead, [this, &allocationsStarted](TBlockType * heapBlock)
+                {
+                    // This heap block is NOT ready to be swept concurrently as it hasn't yet been through sweep prep (i.e. Pass1 of sweep).
+                    heapBlock->isPendingConcurrentSweepPrep = true;
+                    DebugOnly(AssertCheckHeapBlockNotInAnyList(heapBlock));
+                    bool blockAddedToSList = HeapBucketT<TBlockType>::PushHeapBlockToSList(this->allocableHeapBlockListHead, heapBlock);
+
+                    // If we encountered OOM while pushing the heapBlock to the SLIST we must add it to the heapBlockList so we don't lose track of it.
+                    if (!blockAddedToSList)
+                    {
+#ifdef RECYCLER_TRACE
+                        this->GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**5**] was being moved to SLIST, but OOM while adding to the SLIST."));
+#endif
+                        this->GetRecycler()->OutOfMemory();
+                    }
+                });
+#ifdef RECYCLER_TRACE
+                if (this->GetRecycler()->GetRecyclerFlagsTable().Trace.IsEnabled(Js::ConcurrentSweepPhase))
+                {
+                    size_t currentHeapBlockCount = QueryDepthInterlockedSList(allocableHeapBlockListHead);
+                    Output::Print(_u("[GC #%d] [HeapBucket 0x%p] Starting allocations during  concurrent sweep with %d blocks. [CollectionState: %d] \n"), this->GetRecycler()->collectionCount, this, currentHeapBlockCount, this->GetRecycler()->collectionState);
+                    Output::Print(_u("[GC #%d] [HeapBucket 0x%p] The heapBlockList has %d blocks. Total heapBlockCount is %d.\n\n"), this->GetRecycler()->collectionCount, this, HeapBlockList::Count(this->heapBlockList), this->heapBlockCount);
+                }
+#endif
             }
+        }
 
-            // All blocks from heapBlockList go to the allocable list.
-            if (this->lastKnownNextAllocableBlockHead == currentHeapBlockList)
-            {
-                currentHeapBlockList = nullptr;
-            }
+        if (!allocationsStarted)
+        {
+            // If we didn't start allocations yet, start them now in anticipation of blocks becoming available later as blocks complete sweep.
+            this->StartAllocationDuringConcurrentSweep();
+            allocationsStarted = true;
         }
 
-        // The allocable blocks, if any are available, will already be in the allocable blocks SLIST at this time; just start allocations.
-        this->StartAllocationDuringConcurrentSweep();
+        Assert(!this->IsAllocationStopped());
     }
 #endif
 }
@@ -1415,18 +1572,44 @@ template <typename TBlockType>
 void
 HeapBucketT<TBlockType>::FinishSweepPrep(RecyclerSweep& recyclerSweep)
 {
-    if (this->AllowAllocationsDuringConcurrentSweep()/* || this->AllocationsStartedDuringConcurrentSweep()*/)
+    if (this->AllocationsStartedDuringConcurrentSweep())
     {
-        this->ClearAllocators();
+        Assert(this->AllowAllocationsDuringConcurrentSweep());
+        Assert(!this->IsAnyFinalizableBucket());
+
         this->StopAllocationBeforeSweep();
+        this->ClearAllocators();
+
+#if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
+        // This lock is needed only in the debug mode while we verify block counts. Not needed otherwise, as this list is never accessed concurrently.
+        // Items are added to it by the allocator when allocations are allowed during concurrent sweep. The list is drained during the next sweep while
+        // allocation are stopped.
+        debugSweepableHeapBlockListLock.Enter();
+#endif
+
+        // Move the list locally. We kept this temporary list to track blocks we may have allocated from during the ongoing concurrent sweep. Now these
+        // blocks will move to the appropriate list for finishing the sweep.
+        TBlockType * currentPendingSweepPrepHeapBlockList = this->pendingSweepPrepHeapBlockList;
+        this->pendingSweepPrepHeapBlockList = nullptr;
 
         // Pull the blocks from the allocable SLIST that we didn't use. We need to finish the Pass-1 sweep of these blocks too.
-        TBlockType * unusedPendingSweepPrepHeapBlockList = nullptr;
         TBlockType * heapBlock = PopHeapBlockFromSList(this->allocableHeapBlockListHead);
         while (heapBlock != nullptr)
         {
-            heapBlock->SetNextBlock(unusedPendingSweepPrepHeapBlockList);
-            unusedPendingSweepPrepHeapBlockList = heapBlock;
+            DebugOnly(AssertCheckHeapBlockNotInAnyList(heapBlock));
+            AssertMsg(!HeapBlockList::Contains(heapBlock, currentPendingSweepPrepHeapBlockList), "The heap block already exists in the currentPendingSweepPrepHeapBlockList.");
+            if (heapBlock->isPendingConcurrentSweepPrep)
+            {
+                heapBlock->SetNextBlock(currentPendingSweepPrepHeapBlockList);
+                currentPendingSweepPrepHeapBlockList = heapBlock;
+            }
+            else
+            {
+                // Already swept, put it back to the sweepableHeapBlockList list; so it can be processed later.
+                heapBlock->SetNextBlock(this->sweepableHeapBlockList);
+                this->sweepableHeapBlockList = heapBlock;
+            }
+
             heapBlock = PopHeapBlockFromSList(this->allocableHeapBlockListHead);
         }
         Assert(QueryDepthInterlockedSList(this->allocableHeapBlockListHead) == 0);
@@ -1445,34 +1628,12 @@ HeapBucketT<TBlockType>::FinishSweepPrep(RecyclerSweep& recyclerSweep)
             Assert(false);
         }
 #endif
-        this->SweepHeapBlockList(recyclerSweep, unusedPendingSweepPrepHeapBlockList, true /*allocable*/);
 
-        // We need to rebuild the free bit vectors for these blocks as we may have allocated from these during the current concurrent sweep.
-        HeapBlockList::ForEach(this->pendingSweepPrepHeapBlockList, [this](TBlockType * heapBlock)
-        {
-            heapBlock->BuildFreeBitVector();
-        });
-
-        // Move the list locally.  We kept this temporary list to track blocks we may have allocated from during the ongoing concurrent sweep. Now these
-        // blocks will move to the appropriate list for finishing the sweep.
-        TBlockType * currentPendingSweepPrepHeapBlockList = this->pendingSweepPrepHeapBlockList;
-        this->pendingSweepPrepHeapBlockList = nullptr;
+        this->SweepHeapBlockList(recyclerSweep, currentPendingSweepPrepHeapBlockList, true /*allocable*/);
 
-#if DBG
-        if (TBlockType::HeapBlockAttributes::IsSmallBlock)
-        {
-            recyclerSweep.SetupVerifyListConsistencyDataForSmallBlock(nullptr, true, false);
-        }
-        else if (TBlockType::HeapBlockAttributes::IsMediumBlock)
-        {
-            recyclerSweep.SetupVerifyListConsistencyDataForMediumBlock(nullptr, true, false);
-        }
-        else
-        {
-            Assert(false);
-        }
+#if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
+        debugSweepableHeapBlockListLock.Leave();
 #endif
-        this->SweepHeapBlockList(recyclerSweep, currentPendingSweepPrepHeapBlockList, true /*allocable*/);
 
         this->StartAllocationDuringConcurrentSweep();
     }
@@ -1483,16 +1644,29 @@ void
 HeapBucketT<TBlockType>::FinishConcurrentSweep()
 {
 #if SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
+    Assert(!this->IsAnyFinalizableBucket());
     Assert(this->allocableHeapBlockListHead != nullptr);
 
+#ifdef RECYCLER_TRACE
+    if (this->GetRecycler()->GetRecyclerFlagsTable().Trace.IsEnabled(Js::ConcurrentSweepPhase))
+    {
+        Output::Print(_u("[GC #%d] [HeapBucket 0x%p] starting FinishConcurrentSweep [CollectionState: %d] \n"), this->GetRecycler()->collectionCount, this, this->GetRecycler()->collectionState);
+    }
+#endif
+
     TBlockType * newNextAllocableBlockHead = nullptr;
     // Put the blocks from the allocable SLIST into the heapBlockList.
     TBlockType * heapBlock = PopHeapBlockFromSList(this->allocableHeapBlockListHead);
     while (heapBlock != nullptr)
     {
+        DebugOnly(AssertCheckHeapBlockNotInAnyList(heapBlock));
+        AssertMsg(!heapBlock->isPendingConcurrentSweepPrep, "The blocks in the SLIST at this time should NOT have sweep prep i.e. sweep-Pass1 pending.");
         newNextAllocableBlockHead = heapBlock;
         heapBlock->SetNextBlock(this->heapBlockList);
         this->heapBlockList = heapBlock;
+#ifdef RECYCLER_TRACE
+        this->GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**40**] finished FinishConcurrentSweep, heapblock removed from SLIST and added to heapBlockList."));
+#endif
         heapBlock = PopHeapBlockFromSList(this->allocableHeapBlockListHead);
     }
 
@@ -1509,6 +1683,12 @@ template <typename TBlockType>
 void
 HeapBucketT<TBlockType>::AppendAllocableHeapBlockList(TBlockType * list)
 {
+#ifdef RECYCLER_TRACE
+    if (this->GetRecycler()->GetRecyclerFlagsTable().Trace.IsEnabled(Js::ConcurrentSweepPhase))
+    {
+        Output::Print(_u("[GC #%d] [HeapBucket 0x%p] in AppendAllocableHeapBlockList [CollectionState: %d] \n"), this->GetRecycler()->collectionCount, this, this->GetRecycler()->collectionState);
+    }
+#endif
     // Add the list to the end of the current list
     TBlockType * currentHeapBlockList = this->heapBlockList;
     if (currentHeapBlockList == nullptr)
@@ -1519,7 +1699,7 @@ HeapBucketT<TBlockType>::AppendAllocableHeapBlockList(TBlockType * list)
     }
     else
     {
-        // Find the last block and append the pendingSwpetList
+        // Find the last block and append the list
         TBlockType * tail = HeapBlockList::Tail(currentHeapBlockList);
         Assert(tail != nullptr);
         tail->SetNextBlock(list);
@@ -1540,7 +1720,7 @@ HeapBucketT<TBlockType>::EnumerateObjects(ObjectInfoBits infoBits, void (*CallBa
     UpdateAllocators();
     HeapBucket::EnumerateObjects(fullBlockList, infoBits, CallBackFunction);
 #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP && SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
-    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && !this->IsAnyFinalizableBucket())
     {
         HeapBucket::EnumerateObjects(sweepableHeapBlockList, infoBits, CallBackFunction);
         HeapBucket::EnumerateObjects(pendingSweepPrepHeapBlockList, infoBits, CallBackFunction);
@@ -1569,9 +1749,11 @@ HeapBucketT<TBlockType>::Check(bool checkCount)
     Assert(this->GetRecycler()->recyclerSweep == nullptr);
     UpdateAllocators();
     size_t smallHeapBlockCount = HeapInfo::Check(true, false, this->fullBlockList);
+    bool allocatingDuringConcurrentSweep = false;
 #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP && SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
-    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && !this->IsAnyFinalizableBucket())
     {
+        allocatingDuringConcurrentSweep = true;
         // This lock is needed only in the debug mode while we verify block counts. Not needed otherwise, as this list is never accessed concurrently.
         // Items are added to it by the allocator when allocations are allowed during concurrent sweep. The list is drained during the next sweep while
         // allocation are stopped.
@@ -1583,7 +1765,7 @@ HeapBucketT<TBlockType>::Check(bool checkCount)
 #endif
     smallHeapBlockCount += HeapInfo::Check(true, false, this->heapBlockList, this->nextAllocableBlockHead);
     smallHeapBlockCount += HeapInfo::Check(false, false, this->nextAllocableBlockHead);
-    Assert(!checkCount || this->heapBlockCount == smallHeapBlockCount);
+    Assert(!checkCount || this->heapBlockCount == smallHeapBlockCount || (this->heapBlockCount >= 65535 && allocatingDuringConcurrentSweep));
     return smallHeapBlockCount;
 }
 #endif
@@ -1614,7 +1796,7 @@ HeapBucketT<TBlockType>::AggregateBucketStats()
 
     HeapBlockList::ForEach(fullBlockList, blockStatsAggregator);
 #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP && SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
-    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && !this->IsAnyFinalizableBucket())
     {
         HeapBlockList::ForEach(sweepableHeapBlockList, blockStatsAggregator);
         HeapBlockList::ForEach(pendingSweepPrepHeapBlockList, blockStatsAggregator);
@@ -1654,7 +1836,7 @@ HeapBucketT<TBlockType>::Verify()
     });
 
 #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP && SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
-    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && !this->IsAnyFinalizableBucket())
     {
 #if DBG
         if (TBlockType::HeapBlockAttributes::IsSmallBlock)
@@ -1750,7 +1932,7 @@ HeapBucketT<TBlockType>::VerifyMark()
     });
 
 #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP && SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
-    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && !this->IsAnyFinalizableBucket())
     {
         HeapBlockList::ForEach(this->sweepableHeapBlockList, [](TBlockType * heapBlock)
         {

+ 5 - 3
lib/Common/Memory/HeapBucket.h

@@ -75,6 +75,7 @@ public:
 protected:
     HeapInfo * heapInfo;
     uint sizeCat;
+    bool allocationsStartedDuringConcurrentSweep;
 
 #ifdef RECYCLER_SLOW_CHECK_ENABLED
     size_t heapBlockCount;
@@ -114,6 +115,7 @@ public:
 #endif
 
     Recycler * GetRecycler() const;
+    bool AllocationsStartedDuringConcurrentSweep() const;
 
     template <typename TBlockType>
     friend class SmallHeapBlockAllocator;
@@ -248,7 +250,6 @@ protected:
     void ResumeNormalAllocationAfterConcurrentSweep(TBlockType * newNextAllocableBlockHead = nullptr);
 #endif
 
-    bool AllocationsStartedDuringConcurrentSweep() const;
     bool AllowAllocationsDuringConcurrentSweep();
     void StopAllocationBeforeSweep();
     void StartAllocationAfterSweep();
@@ -263,9 +264,11 @@ protected:
     // Partial/Concurrent GC
     void EnumerateObjects(ObjectInfoBits infoBits, void (*CallBackFunction)(void * address, size_t size));
 
+
 #if DBG
     bool AllocatorsAreEmpty() const;
     bool HasPendingDisposeHeapBlocks() const;
+    void AssertCheckHeapBlockNotInAnyList(TBlockType * heapBlock);
 
     static void VerifyBlockConsistencyInList(TBlockType * heapBlock, RecyclerVerifyListConsistencyData& recyclerSweep);
     static void VerifyBlockConsistencyInList(TBlockType * heapBlock, RecyclerVerifyListConsistencyData const& recyclerSweep, SweepState state);
@@ -303,7 +306,7 @@ protected:
     mutable CriticalSection debugSweepableHeapBlockListLock;
 #endif
     // This is the list of blocks that we allocated from during concurrent sweep. These blocks will eventually get processed during the next sweep and either go into
-    // the heapBlockList or fullBlockList.
+    // the fullBlockList.
     TBlockType * sweepableHeapBlockList;
 
     // This is the list of blocks that we allocated from during concurrent sweep prior to adjusting prtial GC heuristics (AdjustPartialHeuristics). These blocks will need to 
@@ -312,7 +315,6 @@ protected:
     TBlockType * pendingSweepPrepHeapBlockList;
 #endif
 #endif
-    bool allocationsStartedDuringConcurrentSweep;
 
     FreeObject* explicitFreeList; // List of objects that have been explicitly freed
     TBlockAllocatorType * lastExplicitFreeListAllocator;

+ 9 - 0
lib/Common/Memory/HeapInfo.cpp

@@ -1647,7 +1647,12 @@ HeapInfo::DisposeObjects()
 
     recycler->hasPendingTransferDisposedObjects = true;
 #if ENABLE_CONCURRENT_GC
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP 
+    // As during concurrent sweep we start/stop allocations it is safer to prevent transferring disposed objects altogether.
+    if (!recycler->IsConcurrentExecutingState() /*&& !recycler->IsConcurrentSweepState()*/)
+#else
     if (!recycler->IsConcurrentExecutingState())
+#endif
 #endif
     {
         // Can't transfer disposed object when the background thread is walking the heap block list
@@ -1669,7 +1674,11 @@ HeapInfo::TransferDisposedObjects()
     Recycler * recycler = this->recycler;
     Assert(recycler->hasPendingTransferDisposedObjects);
 #if ENABLE_CONCURRENT_GC
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP 
+    Assert(!recycler->IsConcurrentExecutingState() /*&& !recycler->IsConcurrentSweepState()*/);
+#else
     Assert(!recycler->IsConcurrentExecutingState());
+#endif
 #endif
     recycler->hasPendingTransferDisposedObjects = false;
 

+ 9 - 6
lib/Common/Memory/LargeHeapBlock.cpp

@@ -193,6 +193,9 @@ LargeHeapBlock::LargeHeapBlock(__in char * address, size_t pageCount, Segment *
 #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
     // This flag is to identify whether this block was made available for allocations during the concurrent sweep and still needs to be swept.
     this->isPendingConcurrentSweepPrep = false;
+#if DBG || defined(RECYCLER_SLOW_CHECK_ENABLED)
+    this->wasAllocatedFromDuringSweep = false;
+#endif
 #endif
 #endif
     this->addressEnd = this->address + this->pageCount * AutoSystemInfo::PageSize;
@@ -1548,7 +1551,7 @@ LargeHeapBlock::Sweep(RecyclerSweep& recyclerSweep, bool queuePendingSweep)
         Assert(!queuePendingSweep);
 #endif
 
-        SweepObjects<SweepMode_InThread>(recycler);
+        SweepObjects<SweepMode_InThread>(recycler, false /*onlyRecalculateMarkCountAndFreeBits*/);
         if (TransferSweptObjects())
         {
             return SweepStatePendingDispose;
@@ -1559,7 +1562,7 @@ LargeHeapBlock::Sweep(RecyclerSweep& recyclerSweep, bool queuePendingSweep)
     {
         Assert(expectedSweepCount == 0);
         isForceSweeping = true;
-        SweepObjects<SweepMode_InThread>(recycler);
+        SweepObjects<SweepMode_InThread>(recycler, false /*onlyRecalculateMarkCountAndFreeBits*/);
         isForceSweeping = false;
     }
 #endif
@@ -1725,7 +1728,7 @@ LargeHeapBlock::FinalizeObject(Recycler* recycler, LargeObjectHeader* header)
 }
 
 // Explicitly instantiate all the sweep modes
-template void LargeHeapBlock::SweepObjects<SweepMode_InThread>(Recycler * recycler);
+template void LargeHeapBlock::SweepObjects<SweepMode_InThread>(Recycler * recycler, bool onlyRecalculateMarkCountAndFreeBits);
 #if ENABLE_CONCURRENT_GC
 template <>
 void
@@ -1738,7 +1741,7 @@ LargeHeapBlock::SweepObject<SweepMode_Concurrent>(Recycler * recycler, LargeObje
 }
 
 // Explicitly instantiate all the sweep modes
-template void LargeHeapBlock::SweepObjects<SweepMode_Concurrent>(Recycler * recycler);
+template void LargeHeapBlock::SweepObjects<SweepMode_Concurrent>(Recycler * recycler, bool onlyRecalculateMarkCountAndFreeBits);
 #if ENABLE_PARTIAL_GC
 template <>
 void
@@ -1751,7 +1754,7 @@ LargeHeapBlock::SweepObject<SweepMode_ConcurrentPartial>(Recycler * recycler, La
 }
 
 // Explicitly instantiate all the sweep modes
-template void LargeHeapBlock::SweepObjects<SweepMode_ConcurrentPartial>(Recycler * recycler);
+template void LargeHeapBlock::SweepObjects<SweepMode_ConcurrentPartial>(Recycler * recycler, bool onlyRecalculateMarkCountAndFreeBits);
 #endif
 #endif
 
@@ -1794,7 +1797,7 @@ void LargeHeapBlock::FinalizeObjects(Recycler* recycler)
 
 template <SweepMode mode>
 void
-LargeHeapBlock::SweepObjects(Recycler * recycler)
+LargeHeapBlock::SweepObjects(Recycler * recycler, bool onlyRecalculateMarkCountAndFreeBits)
 {
 #if ENABLE_CONCURRENT_GC
     Assert(mode == SweepMode_InThread || this->isPendingConcurrentSweep);

+ 1 - 1
lib/Common/Memory/LargeHeapBlock.h

@@ -166,7 +166,7 @@ public:
     void ScanNewImplicitRoots(Recycler * recycler);
     SweepState Sweep(RecyclerSweep& recyclerSweep, bool queuePendingSweep);
     template <SweepMode mode>
-    void SweepObjects(Recycler * recycler);
+    void SweepObjects(Recycler * recycler, bool onlyRecalculateMarkCountAndFreeBits);
     bool TransferSweptObjects();
     void DisposeObjects(Recycler * recycler);
     void FinalizeObjects(Recycler* recycler);

+ 10 - 3
lib/Common/Memory/LargeHeapBucket.cpp

@@ -587,7 +587,11 @@ LargeHeapBucket::Sweep(RecyclerSweep& recyclerSweep)
 {
 #if ENABLE_CONCURRENT_GC
     // CONCURRENT-TODO: large buckets are not swept in the background currently.
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    Assert(!recyclerSweep.GetRecycler()->IsConcurrentExecutingState() && !recyclerSweep.GetRecycler()->IsConcurrentSweepState());
+#else
     Assert(!recyclerSweep.GetRecycler()->IsConcurrentExecutingState());
+#endif
 #endif
 
     LargeHeapBlock * currentLargeObjectBlocks = largeBlockList;
@@ -821,7 +825,7 @@ LargeHeapBucket::SweepPendingObjects(RecyclerSweep& recyclerSweep)
             HeapBlockList::ForEach(this->pendingSweepLargeBlockList, [recycler](LargeHeapBlock * heapBlock)
             {
                 // Page heap blocks are never swept concurrently
-                heapBlock->SweepObjects<SweepMode_ConcurrentPartial>(recycler);
+                heapBlock->SweepObjects<SweepMode_ConcurrentPartial>(recycler, false /*onlyRecalculateMarkCountAndFreeBits*/);
             });
         }
         else
@@ -830,7 +834,7 @@ LargeHeapBucket::SweepPendingObjects(RecyclerSweep& recyclerSweep)
             HeapBlockList::ForEach(this->pendingSweepLargeBlockList, [recycler](LargeHeapBlock * heapBlock)
             {
                 // Page heap blocks are never swept concurrently
-                heapBlock->SweepObjects<SweepMode_Concurrent>(recycler);
+                heapBlock->SweepObjects<SweepMode_Concurrent>(recycler, false /*onlyRecalculateMarkCountAndFreeBits*/);
             });
         }
     }
@@ -979,9 +983,12 @@ LargeHeapBucket::TransferDisposedObjects()
 {
 #if ENABLE_CONCURRENT_GC
     Recycler * recycler = this->heapInfo->recycler;
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    Assert(!recycler->IsConcurrentExecutingState() && !recycler->IsConcurrentSweepState());
+#else
     Assert(!recycler->IsConcurrentExecutingState());
 #endif
-
+#endif
     HeapBlockList::ForEachEditing(this->pendingDisposeLargeBlockList, [this](LargeHeapBlock * heapBlock)
     {
         /* GC-TODO: large heap block doesn't support free list yet */

+ 4 - 0
lib/Common/Memory/MarkContext.inl

@@ -196,8 +196,12 @@ void MarkContext::MarkTrackedObject(FinalizableObject * trackedObject)
 {
 #if ENABLE_CONCURRENT_GC
     Assert(!recycler->queueTrackedObject);
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    Assert(!recycler->IsConcurrentExecutingState() && !recycler->IsConcurrentSweepState());
+#else
     Assert(!recycler->IsConcurrentExecutingState());
 #endif
+#endif
 #if ENABLE_PARTIAL_GC
     Assert(!recycler->inPartialCollectMode);
 #endif

+ 100 - 27
lib/Common/Memory/Recycler.cpp

@@ -37,16 +37,18 @@ Recycler::TrackerData Recycler::TrackerData::ExplicitFreeListObjectData(&typeid(
 
 enum ETWEventGCActivationKind : unsigned
 {
-    ETWEvent_GarbageCollect          = 0,      // force in-thread GC
-    ETWEvent_ThreadCollect           = 1,      // thread GC with wait
-    ETWEvent_ConcurrentCollect       = 2,
-    ETWEvent_PartialCollect          = 3,
-
-    ETWEvent_ConcurrentMark          = 11,
-    ETWEvent_ConcurrentRescan        = 12,
-    ETWEvent_ConcurrentSweep         = 13,
-    ETWEvent_ConcurrentTransferSwept = 14,
-    ETWEvent_ConcurrentFinishMark    = 15,
+    ETWEvent_GarbageCollect                        = 0,      // force in-thread GC
+    ETWEvent_ThreadCollect                         = 1,      // thread GC with wait
+    ETWEvent_ConcurrentCollect                     = 2,
+    ETWEvent_PartialCollect                        = 3,
+
+    ETWEvent_ConcurrentMark                        = 11,
+    ETWEvent_ConcurrentRescan                      = 12,
+    ETWEvent_ConcurrentSweep                       = 13,
+    ETWEvent_ConcurrentTransferSwept               = 14,
+    ETWEvent_ConcurrentFinishMark                  = 15,
+    ETWEvent_ConcurrentSweep_FinishSweepPrep       = 16,
+    ETWEvent_ConcurrentSweep_FinishConcurrentSweep = 17,
 };
 
 DefaultRecyclerCollectionWrapper DefaultRecyclerCollectionWrapper::Instance;
@@ -293,13 +295,13 @@ Recycler::Recycler(AllocationPolicyManager * policyManager, IdleDecommitPageAllo
 
 #if DBG
     this->heapBlockCount = 0;
-    this->collectionCount = 0;
     this->disableThreadAccessCheck = false;
 #if ENABLE_CONCURRENT_GC
     this->disableConcurrentThreadExitedCheck = false;
 #endif
 #endif
 #if DBG || defined RECYCLER_TRACE
+    this->collectionCount = 0;
     this->inResolveExternalWeakReferences = false;
 #endif
 #if DBG || defined(RECYCLER_STATS)
@@ -3389,7 +3391,7 @@ Recycler::FinishDisposeObjects()
     if (!this->inDispose && this->hasDisposableObject
         && GetRecyclerFlagsTable().Trace.IsEnabled(Js::RecyclerPhase))
     {
-        Output::Print(_u("%04X> RC(%p): %s\n"), this->mainThreadId, this, _u("Dispose object delayed"));
+        Output::Print(_u("%04X> RC(%p): %s %d\n"), this->mainThreadId, this, _u("Dispose object delayed"), this->collectionState);
     }
 #endif
     return false;
@@ -3778,8 +3780,13 @@ Recycler::DoCollectWrapped(CollectionFlags flags)
     this->allowDispose = (flags & CollectOverride_AllowDispose) == CollectOverride_AllowDispose;
     BOOL collected = collectionWrapper->ExecuteRecyclerCollectionFunction(this, &Recycler::DoCollect, flags);
 
+    //TODO:akatti: Remove this.
 #if ENABLE_CONCURRENT_GC
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    Assert(IsConcurrentExecutingState() || IsConcurrentFinishedState() /*|| IsConcurrentSweepState()*/ || !CollectionInProgress());
+#else
     Assert(IsConcurrentExecutingState() || IsConcurrentFinishedState() || !CollectionInProgress());
+#endif
 #else
     Assert(!CollectionInProgress());
 #endif
@@ -3883,7 +3890,7 @@ Recycler::DoCollect(CollectionFlags flags)
         Assert(this->backgroundFinishMarkCount == 0);
 #endif
 
-#if DBG
+#if DBG || defined RECYCLER_TRACE
         collectionCount++;
 #endif
         collectionState = Collection_PreCollection;
@@ -4392,7 +4399,11 @@ BOOL
 Recycler::RequestConcurrentWrapperCallback()
 {
 #if ENABLE_CONCURRENT_GC
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    Assert(!IsConcurrentExecutingState() && !IsConcurrentSweepState());
+#else
     Assert(!IsConcurrentExecutingState());
+#endif
 
     // Save the original collection state
     CollectionState oldState = this->collectionState;
@@ -4498,7 +4509,12 @@ Recycler::FinishConcurrent()
 
         const BOOL forceFinish = flags & CollectOverride_ForceFinish;
 
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+        // TODO:akatti: Should the concurrent wait states be considered executing states??
+        if (forceFinish || !(IsConcurrentExecutingState() /*|| IsConcurrentSweepState()*/))
+#else
         if (forceFinish || !IsConcurrentExecutingState())
+#endif
         {
 #if ENABLE_BACKGROUND_PAGE_FREEING
             if (CONFIG_FLAG(EnableBGFreeZero))
@@ -4556,7 +4572,12 @@ Recycler::TryFinishConcurrentCollect()
     Assert(!concurrent || !forceInThread);
     if (concurrent && concurrentThread != NULL)
     {
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+        // TODO:akatti: Should the concurrent wait states be considered executing states??
+        if (IsConcurrentExecutingState() /*|| IsConcurrentSweepState()*/)
+#else
         if (IsConcurrentExecutingState())
+#endif
         {
             if (!this->priorityBoost)
             {
@@ -4781,6 +4802,7 @@ bool Recycler::AbortConcurrent(bool restoreState)
             {
                 this->ResetMarkCollectionState();
             }
+            //TODO:akatti: Do we need to handle the Pass1Wait state and finish ConcurrentSweep here??
             else if (collectionState == CollectionStateTransferSweptWait)
             {
                 // Make sure we don't do another GC after finishing this one.
@@ -5726,32 +5748,53 @@ Recycler::FinishConcurrentCollect(CollectionFlags flags)
     else if (collectionState == CollectionStateConcurrentSweepPass1Wait)
     {
         this->FinishSweepPrep();
+        this->collectionState = CollectionStateConcurrentSweepPass2;
 
         if (forceInThread)
         {
-            this->collectionState = CollectionStateConcurrentSweepPass2;
+#ifdef RECYCLER_TRACE
+            if (this->GetRecyclerFlagsTable().Trace.IsEnabled(Js::ConcurrentSweepPhase))
+            {
+                Output::Print(_u("[GC #%d] Finishing Sweep Pass2 in-thread. \n"), this->collectionCount);
+            }
+#endif
             this->recyclerSweep->FinishSweep();
             this->FinishConcurrentSweep();
-            this->collectionState = CollectionStateConcurrentSweepPass2Wait;
+            this->recyclerSweep->EndBackground();
+            //this->collectionState = CollectionStateConcurrentSweepPass2Wait;
 
-            if (this->recyclerSweep != nullptr)
-            {
-                this->recyclerSweep->EndBackground();
-            }
+            uint sweptBytes = 0;
+#ifdef RECYCLER_STATS
+            sweptBytes = (uint)collectionStats.objectSweptBytes;
+#endif
+
+            GCETW(GC_BACKGROUNDSWEEP_STOP, (this, sweptBytes));
+            GCETW_INTERNAL(GC_STOP, (this, ETWEvent_ConcurrentSweep));
+
+            this->collectionState = CollectionStateTransferSweptWait;
+            RECYCLER_PROFILE_EXEC_BACKGROUND_END(this, Js::ConcurrentSweepPhase);
 
-            collectionState = CollectionStateTransferSweptWait;
             FinishTransferSwept(flags);
         }
         else
         {
-            // Signal the background thread to do SweepPendingObjects for all the buckets.
+            // Signal the background thread to finish concurrent sweep Pass2 for all the buckets.
             SetEvent(this->concurrentWorkReadyEvent);
             needConcurrentSweep = true;
         }
     }
+    //else if (collectionState == CollectionStateConcurrentSweepPass2Wait)
+    //{
+    //    // This needs to happen in-thread as we will return the swept blocks from the SLIST to the heapBlockList.
+    //    this->FinishConcurrentSweep();
+
+    //    collectionState = CollectionStateTransferSweptWait;
+    //    FinishTransferSwept(flags);
+    //}
 #endif
     else
     {
+        AssertMsg(this->collectionState == CollectionStateTransferSweptWait, "Do we need to handle this state?");
         FinishTransferSwept(flags);
     }
 
@@ -5953,7 +5996,7 @@ Recycler::DoBackgroundWork(bool forceForeground)
 #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
         if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
         {
-            Assert(this->collectionState == CollectionStateConcurrentSweepPass1);
+            Assert(this->collectionState == CollectionStateConcurrentSweepPass1 || this->collectionState == CollectionStateConcurrentSweepPass2);
         }
         else
 #endif
@@ -6002,10 +6045,28 @@ Recycler::DoBackgroundWork(bool forceForeground)
         {
             if (this->collectionState == CollectionStateConcurrentSweepPass2)
             {
+#ifdef RECYCLER_TRACE
+                if (this->GetRecyclerFlagsTable().Trace.IsEnabled(Js::ConcurrentSweepPhase))
+                {
+                    Output::Print(_u("[GC #%d] Finishing Sweep Pass2 on background thread. \n"), this->collectionCount);
+                }
+#endif
+#if ENABLE_BACKGROUND_PAGE_ZEROING
+                if (CONFIG_FLAG(EnableBGFreeZero))
+                {
+                    // Drain the zero queue again as we might have free more during sweep
+                    // in the background
+                    GCETW(GC_BACKGROUNDZEROPAGE_START, (this));
+                    recyclerPageAllocator.BackgroundZeroQueuedPages();
+#ifdef RECYCLER_WRITE_BARRIER_ALLOC_SEPARATE_PAGE
+                    recyclerWithBarrierPageAllocator.BackgroundZeroQueuedPages();
+#endif
+                    recyclerLargeBlockPageAllocator.BackgroundZeroQueuedPages();
+                    GCETW(GC_BACKGROUNDZEROPAGE_STOP, (this));
+                }
+#endif
                 this->recyclerSweep->FinishSweep();
-
                 this->FinishConcurrentSweep();
-
                 this->recyclerSweep->EndBackground();
 
                 this->collectionState = CollectionStateConcurrentSweepPass2Wait;
@@ -6201,17 +6262,18 @@ Recycler::ThreadProc()
 void
 Recycler::FinishSweepPrep()
 {
+    GCETW_INTERNAL(GC_START, (this, ETWEvent_ConcurrentSweep_FinishSweepPrep));
     this->autoHeap.FinishSweepPrep(this->recyclerSweepInstance);
-
-    // Begin the actual sweep i.e. SweepPendingObjects on the background thead.
-    collectionState = CollectionStateConcurrentSweepPass2;
+    GCETW_INTERNAL(GC_STOP, (this, ETWEvent_ConcurrentSweep_FinishSweepPrep));
 }
 
 void
 Recycler::FinishConcurrentSweep()
 {
 #if SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
+    GCETW_INTERNAL(GC_START, (this, ETWEvent_ConcurrentSweep_FinishConcurrentSweep));
     this->autoHeap.FinishConcurrentSweep();
+    GCETW_INTERNAL(GC_STOP, (this, ETWEvent_ConcurrentSweep_FinishConcurrentSweep));
 #endif
 }
 #endif
@@ -6892,6 +6954,17 @@ Recycler::PrintCollectTrace(Js::Phase phase, bool finish, bool noConcurrentWork)
 }
 #endif
 
+#ifdef RECYCLER_TRACE
+void
+Recycler::PrintBlockStatus(HeapBucket * heapBucket, HeapBlock * heapBlock, char16 const * statusMessage)
+{
+    if (this->GetRecyclerFlagsTable().Trace.IsEnabled(Js::ConcurrentSweepPhase))
+    {
+        Output::Print(_u("[GC #%d] [HeapBucket 0x%p] HeapBlock 0x%p %s [CollectionState: %d] \n"), this->collectionCount, heapBucket, heapBlock, statusMessage, this->collectionState);
+    }
+}
+#endif
+
 #ifdef RECYCLER_STATS
 void
 Recycler::PrintHeapBlockStats(char16 const * name, HeapBlock::HeapBlockType type)

+ 4 - 5
lib/Common/Memory/Recycler.h

@@ -783,8 +783,9 @@ private:
     DListBase<GuestArenaAllocator> guestArenaList;
     DListBase<ArenaData*> externalGuestArenaList;    // guest arenas are scanned for roots
 
-#ifdef RECYCLER_PAGE_HEAP
     bool isPageHeapEnabled;
+
+#ifdef RECYCLER_PAGE_HEAP
     bool capturePageHeapAllocStack;
     bool capturePageHeapFreeStack;
 
@@ -887,10 +888,8 @@ private:
 
     bool inDispose;
 
-#if DBG
-    uint collectionCount;
-#endif
 #if DBG || defined RECYCLER_TRACE
+    uint collectionCount;
     bool inResolveExternalWeakReferences;
 #endif
 
@@ -1068,6 +1067,7 @@ private:
 #endif
 #ifdef RECYCLER_TRACE
     CollectionParam collectionParam;
+    void PrintBlockStatus(HeapBucket * heapBucket, HeapBlock * heapBlock, char16 const * name);
 #endif
 #ifdef RECYCLER_MEMORY_VERIFY
     uint verifyPad;
@@ -2210,7 +2210,6 @@ public:
     virtual bool FindHeapObject(void* objectAddress, Recycler * recycler, FindHeapObjectFlags flags, RecyclerHeapObjectInfo& heapObject) override { Assert(false); return false; }
     virtual bool TestObjectMarkedBit(void* objectAddress) override { Assert(false); return false; }
     virtual void SetObjectMarkedBit(void* objectAddress) override { Assert(false); }
-
 #ifdef RECYCLER_VERIFY_MARK
     virtual bool VerifyMark(void * objectAddress, void * target) override { Assert(false); return false; }
 #endif

+ 4 - 1
lib/Common/Memory/Recycler.inl

@@ -549,7 +549,10 @@ Recycler::NotifyFree(T * heapBlock)
         this->isForceSweeping = true;
         heapBlock->isForceSweeping = true;
 #endif
-        heapBlock->template SweepObjects<SweepMode_InThread>(this);
+#ifdef RECYCLER_TRACE
+        this->PrintBlockStatus(nullptr, heapBlock, _u("[**34**] calling SweepObjects during NotifyFree."));
+#endif
+        heapBlock->template SweepObjects<SweepMode_InThread>(this, false /*onlyRecalculateMarkCountAndFreeBits*/);
 #if DBG || defined(RECYCLER_STATS)
         heapBlock->isForceSweeping = false;
         this->isForceSweeping = false;

+ 1 - 1
lib/Common/Memory/RecyclerWeakReference.h

@@ -355,7 +355,7 @@ private:
         entry->weakRefHeapBlock = weakRefHeapBlock;
 
 #ifdef RECYCLER_TRACE_WEAKREF
-        Output::Print(_u("Add 0x%08x to bucket %d\n"), entry, targetBucket);
+        Output::Print(_u("Add WeakRef 0x%08x for StrongRef %p to bucket %d\n"), entry, strongReference, targetBucket);
 #endif
         AddEntry(entry, &buckets[targetBucket]);
         count++;

+ 7 - 3
lib/Common/Memory/SmallBlockDeclarations.inl

@@ -53,7 +53,7 @@ template bool SmallHeapBlockT<TBlockTypeAttributes>::GetFreeObjectListOnAllocato
 // template const SmallHeapBlockT<TBlockTypeAttributes>::SmallHeapBlockBitVector * HeapInfo::ValidPointersMap<TBlockTypeAttributes>::GetInvalidBitVector(uint index) const;
 
 // Explicit instantiate all the sweep mode
-template void SmallHeapBlockT<TBlockTypeAttributes>::SweepObjects<SweepMode_InThread>(Recycler * recycler);
+template void SmallHeapBlockT<TBlockTypeAttributes>::SweepObjects<SweepMode_InThread>(Recycler * recycler, bool onlyRecalculateMarkCountAndFreeBits);
 #if ENABLE_CONCURRENT_GC
 template <>
 template <>
@@ -64,7 +64,7 @@ SmallHeapBlockT<TBlockTypeAttributes>::SweepObject<SweepMode_Concurrent>(Recycle
     EnqueueProcessedObject(&freeObjectList, addr, i);
 }
 // Explicit instantiate all the sweep mode
-template void SmallHeapBlockT<TBlockTypeAttributes>::SweepObjects<SweepMode_Concurrent>(Recycler * recycler);
+template void SmallHeapBlockT<TBlockTypeAttributes>::SweepObjects<SweepMode_Concurrent>(Recycler * recycler, bool onlyRecalculateMarkCountAndFreeBits);
 #if ENABLE_PARTIAL_GC
 template <>
 template <>
@@ -84,7 +84,7 @@ SmallHeapBlockT<TBlockTypeAttributes>::SweepObject<SweepMode_ConcurrentPartial>(
 }
 
 // Explicit instantiate all the sweep mode
-template void SmallHeapBlockT<TBlockTypeAttributes>::SweepObjects<SweepMode_ConcurrentPartial>(Recycler * recycler);
+template void SmallHeapBlockT<TBlockTypeAttributes>::SweepObjects<SweepMode_ConcurrentPartial>(Recycler * recycler, bool onlyRecalculateMarkCountAndFreeBits);
 #endif
 #endif
 
@@ -98,7 +98,11 @@ SmallHeapBlockT<TBlockTypeAttributes>::SweepObject<SweepMode_InThread>(Recycler
         Assert(this->IsAnyFinalizableBlock());
 
 #if ENABLE_CONCURRENT_GC
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+        Assert(!recycler->IsConcurrentExecutingState() && !recycler->IsConcurrentSweepState());
+#else
         Assert(!recycler->IsConcurrentExecutingState());
+#endif
 #endif
 
         // Call prepare finalize to do clean up that needs to be done immediately

+ 7 - 0
lib/Common/Memory/SmallFinalizableHeapBlock.cpp

@@ -134,6 +134,13 @@ SmallFinalizableHeapBlockT<TBlockAttributes>::SetAttributes(void * address, unsi
     __super::SetAttributes(address, attributes);
     finalizeCount++;
 
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+    {
+        AssertMsg(!this->isPendingConcurrentSweepPrep, "Finalizable blocks don't support allocations during concurrent sweep.");
+    }
+#endif
+
 #ifdef RECYCLER_FINALIZE_CHECK
     HeapInfo * heapInfo = this->heapBucket->heapInfo;
     heapInfo->liveFinalizableObjectCount++;

+ 7 - 0
lib/Common/Memory/SmallFinalizableHeapBlock.h

@@ -58,6 +58,13 @@ public:
 
     void AddPendingDisposeObject()
     {
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+        if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+        {
+            AssertMsg(!this->isPendingConcurrentSweepPrep, "Finalizable blocks don't support allocations during concurrent sweep.");
+        }
+#endif
+
         this->pendingDisposeCount++;
         Assert(this->pendingDisposeCount <= this->objectCount);
     }

+ 15 - 1
lib/Common/Memory/SmallHeapBlockAllocator.cpp

@@ -27,6 +27,10 @@ SmallHeapBlockAllocator<TBlockType>::Initialize()
 
     this->prev = this;
     this->next = this;
+
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP 
+    DebugOnly(this->isAllocatingFromNewBlock = false);
+#endif
 }
 
 template <typename TBlockType>
@@ -134,7 +138,9 @@ SmallHeapBlockAllocator<TBlockType>::Clear()
 #endif
         this->freeObjectList = nullptr;
     }
-
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP 
+    DebugOnly(this->isAllocatingFromNewBlock = false);
+#endif
 }
 
 template <typename TBlockType>
@@ -155,6 +161,10 @@ SmallHeapBlockAllocator<TBlockType>::SetNew(BlockType * heapBlock)
     this->heapBlock = heapBlock;
     this->freeObjectList = (FreeObject *)heapBlock->GetAddress();
     this->endAddress = heapBlock->GetEndAddress();
+
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP 
+    DebugOnly(this->isAllocatingFromNewBlock = true);
+#endif
 }
 
 template <typename TBlockType>
@@ -175,6 +185,10 @@ SmallHeapBlockAllocator<TBlockType>::Set(BlockType * heapBlock)
     this->heapBlock = heapBlock;
     RECYCLER_SLOW_CHECK(this->heapBlock->CheckDebugFreeBitVector(true));
     this->freeObjectList = this->heapBlock->freeObjectList;
+
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP 
+    DebugOnly(this->isAllocatingFromNewBlock = false);
+#endif
 }
 
 

+ 40 - 0
lib/Common/Memory/SmallHeapBlockAllocator.h

@@ -75,6 +75,11 @@ private:
     char * endAddress;
     FreeObject * freeObjectList;
     TBlockType * heapBlock;
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP 
+#if DBG
+    bool isAllocatingFromNewBlock;
+#endif
+#endif
 
     SmallHeapBlockAllocator * prev;
     SmallHeapBlockAllocator * next;
@@ -193,6 +198,41 @@ SmallHeapBlockAllocator<TBlockType>::InlinedAllocImpl(Recycler * recycler, DECLS
             BOOL isSet = heapBlock->GetDebugFreeBitVector()->TestAndClear(heapBlock->GetAddressBitIndex(memBlock));
             Assert(isSet);
         }
+#endif
+#if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+        if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+        {
+            // If we are allocating during concurrent sweep we must mark the object to prevent it from being swept
+            // in the ongoing sweep.
+            if (heapBlock != nullptr && heapBlock->isPendingConcurrentSweepPrep)
+            {
+                AssertMsg(!this->isAllocatingFromNewBlock, "We shouldn't be tracking allocation to a new block; i.e. bump allcoation; during concurrent sweep.");
+                AssertMsg(!heapBlock->IsAnyFinalizableBlock(), "Allocations are not allowed to finalizable blocks during concurrent sweep.");
+                AssertMsg(heapBlock->heapBucket->AllocationsStartedDuringConcurrentSweep(), "We shouldn't be allocating from this block while allocations are disabled.");
+
+                // Explcitly mark this object and also clear the free bit.
+                uint bitIndex = heapBlock->GetAddressBitIndex(memBlock);
+                Assert(heapBlock->IsValidBitIndex(bitIndex));
+
+                heapBlock->GetMarkedBitVector()->Set(bitIndex);
+                heapBlock->GetFreeBitVector()->Clear(bitIndex);
+#if DBG
+                heapBlock->GetDebugFreeBitVector()->Clear(bitIndex);
+#endif
+
+                DebugOnly(heapBlock->objectsMarkedDuringSweep++);
+                // We need to keep track of the number of objects allocated during concurrent sweep, to be
+                // able to make the correct determination about whether a block is EMPTY or FULL when the actual
+                // sweep of this block happens.
+                heapBlock->objectsAllocatedDuringConcurrentSweepCount++;
+#ifdef RECYCLER_TRACE
+                if (recycler->GetRecyclerFlagsTable().Trace.IsEnabled(Js::ConcurrentSweepPhase) && recycler->GetRecyclerFlagsTable().Trace.IsEnabled(Js::MemoryAllocationPhase))
+                {
+                    Output::Print(_u("[**33**]FreeListAlloc: Object 0x%p from HeapBlock 0x%p used for allocation during ConcurrentSweep [CollectionState: %d] \n"), memBlock, heapBlock, recycler->collectionState);
+                }
+#endif
+            }
+        }
 #endif
         return memBlock;
     }

+ 160 - 25
lib/Common/Memory/SmallNormalHeapBucket.cpp

@@ -42,7 +42,7 @@ SmallNormalHeapBucketBase<TBlockType>::ScanInitialImplicitRoots(Recycler * recyc
     });
 
 #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
-    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc) && !this->IsAnyFinalizableBucket())
     {
         HeapBlockList::ForEach(this->sweepableHeapBlockList, [recycler](TBlockType * heapBlock)
         {
@@ -246,20 +246,31 @@ SmallNormalHeapBucketBase<TBlockType>::SweepPendingObjects(RecyclerSweep& recycl
                 // The sweepable objects will be collected in a future Sweep.
 
                 // Note, page heap blocks are never swept concurrently
-                heapBlock->template SweepObjects<SweepMode_ConcurrentPartial>(recycler);
+#ifdef RECYCLER_TRACE
+                recycler->PrintBlockStatus(this, heapBlock, _u("[**17**] calling SweepObjects."));
+#endif
+                heapBlock->template SweepObjects<SweepMode_ConcurrentPartial>(recycler, false /*onlyRecalculateMarkCountAndFreeBits*/);
 
                 // page heap mode should never reach here, so don't check pageheap enabled or not
+                DebugOnly(AssertCheckHeapBlockNotInAnyList(heapBlock));
                 if (heapBlock->HasFreeObject())
                 {
+                    AssertMsg(!HeapBlockList::Contains(heapBlock, partialSweptHeapBlockList), "The heap block already exists in the partialSweptHeapBlockList.");
                     // We have pre-existing free objects, so put this in the partialSweptHeapBlockList
                     heapBlock->SetNextBlock(this->partialSweptHeapBlockList);
                     this->partialSweptHeapBlockList = heapBlock;
+#ifdef RECYCLER_TRACE
+                    this->GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**21**] finished SweepPendingObjects, heapblock added to partialSweptHeapBlockList."));
+#endif
                 }
                 else
                 {
                     // No free objects, so put in the fullBlockList
                     heapBlock->SetNextBlock(this->fullBlockList);
                     this->fullBlockList = heapBlock;
+#ifdef RECYCLER_TRACE
+                    this->GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**22**] finished SweepPendingObjects, heapblock FULL added to fullBlockList."));
+#endif
                 }
             });
         }
@@ -274,6 +285,7 @@ SmallNormalHeapBucketBase<TBlockType>::SweepPendingObjects(RecyclerSweep& recycl
             {
                 if (this->AllowAllocationsDuringConcurrentSweep() && !this->AllocationsStartedDuringConcurrentSweep())
                 {
+                    Assert(!this->IsAnyFinalizableBucket());
                     this->StartAllocationDuringConcurrentSweep();
                 }
             }
@@ -309,19 +321,45 @@ SmallNormalHeapBucketBase<TBlockType>::SweepPendingObjects(Recycler * recycler,
     HeapBlockList::ForEachEditing(list, [this, recycler, &tail](TBlockType * heapBlock)
     {
         // Note, page heap blocks are never swept concurrently
-        heapBlock->template SweepObjects<mode>(recycler);
+#ifdef RECYCLER_TRACE
+        recycler->PrintBlockStatus(this, heapBlock, _u("[**18**] calling SweepObjects."));
+#endif
+        heapBlock->template SweepObjects<mode>(recycler, false /*onlyRecalculateMarkCountAndFreeBits*/);
         tail = heapBlock;
 
 #if ENABLE_CONCURRENT_GC && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP && SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
         if (this->AllowAllocationsDuringConcurrentSweep())
         {
-            bool blockAddedToSList = HeapBucketT<TBlockType>::PushHeapBlockToSList(this->allocableHeapBlockListHead, heapBlock);
+            Assert(!this->IsAnyFinalizableBucket());
+            DebugOnly(AssertCheckHeapBlockNotInAnyList(heapBlock));
+            // If we exhausted the free list during this sweep, we will need to send this block to the FullBlockList.
+            if (heapBlock->HasFreeObject())
+            {
+                bool blockAddedToSList = HeapBucketT<TBlockType>::PushHeapBlockToSList(this->allocableHeapBlockListHead, heapBlock);
 
-            // If we encountered OOM while pushing the heapBlock to the SLIST we must add it to the heapBlockList so we don't lose track of it.
-            if (!blockAddedToSList)
+                // If we encountered OOM while pushing the heapBlock to the SLIST we must add it to the heapBlockList so we don't lose track of it.
+                if (!blockAddedToSList)
+                {
+#ifdef RECYCLER_TRACE
+                    recycler->PrintBlockStatus(this, heapBlock, _u("[**23**] finished SweepPendingObjects, OOM while adding to the SLIST."));
+#endif
+                    recycler->OutOfMemory();
+                }
+#ifdef RECYCLER_TRACE
+                else
+                {
+                    recycler->PrintBlockStatus(this, heapBlock, _u("[**24**] finished SweepPendingObjects, heapblock added to SLIST allocableHeapBlockListHead."));
+                }
+#endif
+            }
+            else
             {
-                heapBlock->SetNextBlock(this->heapBlockList);
-                this->heapBlockList = heapBlock;
+                DebugOnly(AssertCheckHeapBlockNotInAnyList(heapBlock));
+                heapBlock->SetNextBlock(this->fullBlockList);
+                this->fullBlockList = heapBlock;
+#ifdef RECYCLER_TRACE
+                recycler->PrintBlockStatus(this, heapBlock, _u("[**25**] finished SweepPendingObjects, heapblock added to fullBlockList."));
+#endif
             }
         }
 #endif
@@ -347,7 +385,7 @@ SmallNormalHeapBucketBase<TBlockType>::SweepPartialReusePages(RecyclerSweep& rec
     TBlockType *& reuseBlocklist, TBlockType *&unusedBlockList, bool allocationsAllowedDuringConcurrentSweep, Fn callback)
 {
     HeapBlockList::ForEachEditing(heapBlockList,
-        [&recyclerSweep, &reuseBlocklist, &unusedBlockList, callback, allocationsAllowedDuringConcurrentSweep](TBlockType * heapBlock)
+        [&recyclerSweep, &reuseBlocklist, &unusedBlockList, callback, allocationsAllowedDuringConcurrentSweep, this](TBlockType * heapBlock)
     {
         uint expectFreeByteCount;
         if (heapBlock->DoPartialReusePage(recyclerSweep, expectFreeByteCount))
@@ -361,6 +399,7 @@ SmallNormalHeapBucketBase<TBlockType>::SweepPartialReusePages(RecyclerSweep& rec
             if(!allocationsAllowedDuringConcurrentSweep)
 #endif
             {
+                DebugOnly(AssertCheckHeapBlockNotInAnyList(heapBlock));
                 // Reuse the page
                 heapBlock->SetNextBlock(reuseBlocklist);
                 reuseBlocklist = heapBlock;
@@ -374,6 +413,7 @@ SmallNormalHeapBucketBase<TBlockType>::SweepPartialReusePages(RecyclerSweep& rec
             // Don't not reuse the page if it don't have much free memory.
             callback(heapBlock, false);
 
+            DebugOnly(AssertCheckHeapBlockNotInAnyList(heapBlock));
             heapBlock->SetNextBlock(unusedBlockList);
             unusedBlockList = heapBlock;
 
@@ -394,6 +434,7 @@ SmallNormalHeapBucketBase<TBlockType>::SweepPartialReusePages(RecyclerSweep& rec
 #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP && SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
     if (this->AllowAllocationsDuringConcurrentSweep() && !this->AllocationsStartedDuringConcurrentSweep())
     {
+        Assert(!this->IsAnyFinalizableBucket());
         this->StartAllocationDuringConcurrentSweep();
     }
 #endif
@@ -407,17 +448,61 @@ SmallNormalHeapBucketBase<TBlockType>::SweepPartialReusePages(RecyclerSweep& rec
 #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP && SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
         if (isReused)
         {
-            if (this->AllowAllocationsDuringConcurrentSweep())
-            {
-                bool blockAddedToSList = HeapBucketT<TBlockType>::PushHeapBlockToSList(this->allocableHeapBlockListHead, heapBlock);
+            DebugOnly(heapBlock->blockNotReusedInPartialHeapBlockList = false);
 
-                // If we encountered OOM while pushing the heapBlock to the SLIST we must add it to the heapBlockList so we don't lose track of it.
-                if (!blockAddedToSList)
+            DebugOnly(AssertCheckHeapBlockNotInAnyList(heapBlock));
+            if (heapBlock->HasFreeObject())
+            {
+                if (this->AllowAllocationsDuringConcurrentSweep())
                 {
-                    heapBlock->SetNextBlock(this->heapBlockList);
-                    this->heapBlockList = heapBlock;
+                    Assert(!this->IsAnyFinalizableBucket());
+                    bool blockAddedToSList = HeapBucketT<TBlockType>::PushHeapBlockToSList(this->allocableHeapBlockListHead, heapBlock);
+
+                    // If we encountered OOM while pushing the heapBlock to the SLIST we must add it to the heapBlockList so we don't lose track of it.
+                    if (!blockAddedToSList)
+                    {
+#ifdef RECYCLER_TRACE
+                        this->GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**10**] finished SweepPartialReusePages, heapblock REUSED but OOM while adding to the SLIST."));
+#endif
+                        this->GetRecycler()->OutOfMemory();
+                    }
+#ifdef RECYCLER_TRACE
+                    else
+                    {
+                        this->GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**11**] finished SweepPartialReusePages, heapblock REUSED added to SLIST allocableHeapBlockListHead."));
+                    }
+#endif
                 }
             }
+            else
+            {
+                heapBlock->SetNextBlock(this->fullBlockList);
+                this->fullBlockList = heapBlock;
+#ifdef RECYCLER_TRACE
+                this->GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**27**] finished SweepPartialReusePages, heapblock FULL added to fullBlockList."));
+#endif
+            }
+        }
+        else
+        {
+            DebugOnly(heapBlock->blockNotReusedInPartialHeapBlockList = true);
+#ifdef RECYCLER_TRACE
+            this->GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**12**] finished SweepPartialReusePages, heapblock NOT REUSED, added to partialHeapBlockList."));
+#endif
+
+            //TODO:akatti:PERF Do we need to do this for non-debug builds? We might be able to skip
+            // this if this is only needed for passing debug asserts.
+            // If we allocated from this block during concurrent sweep, the block may now have become
+            // full (or almost full) and hence not reusable. If we allocated from this block during 
+            // concurrent sweep, we must recalculate the mark count and rebuild free bits for this block.
+            if (heapBlock->objectsAllocatedDuringConcurrentSweepCount > 0)
+            {
+                Assert(!this->IsAnyFinalizableBucket());
+#ifdef RECYCLER_TRACE
+                this->GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**19**] calling SweepObjects to recalculate mark and free bits ONLY."));
+#endif
+                heapBlock->template SweepObjects<SweepMode_InThread>(this->GetRecycler(), true /*onlyRecalculateMarkCountAndFreeBits*/);
+            }
         }
 #endif
     });
@@ -436,29 +521,65 @@ SmallNormalHeapBucketBase<TBlockType>::SweepPartialReusePages(RecyclerSweep& rec
         {
             if (isReused)
             {
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+                DebugOnly(heapBlock->blockNotReusedInPendingList = false);
+#endif
                 // Finalizable blocks are always swept in thread, so shouldn't be here
                 Assert(!heapBlock->IsAnyFinalizableBlock());
 
                 // Page heap blocks are never swept concurrently
-                heapBlock->template SweepObjects<SweepMode_InThread>(recycler);
+#ifdef RECYCLER_TRACE
+                recycler->PrintBlockStatus(this, heapBlock, _u("[**20**] calling SweepObjects."));
+#endif
+                heapBlock->template SweepObjects<SweepMode_InThread>(recycler, false /*onlyRecalculateMarkCountAndFreeBits*/);
 #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP && SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
-                if (this->AllowAllocationsDuringConcurrentSweep())
+                DebugOnly(AssertCheckHeapBlockNotInAnyList(heapBlock));
+                if (heapBlock->HasFreeObject())
                 {
-                    bool blockAddedToSList = HeapBucketT<TBlockType>::PushHeapBlockToSList(this->allocableHeapBlockListHead, heapBlock);
-
-                    // If we encountered OOM while pushing the heapBlock to the SLIST we must add it to the heapBlockList so we don't lose track of it.
-                    if (!blockAddedToSList)
+                    if (this->AllowAllocationsDuringConcurrentSweep())
                     {
-                        heapBlock->SetNextBlock(this->heapBlockList);
-                        this->heapBlockList = heapBlock;
+                        Assert(!this->IsAnyFinalizableBucket());
+                        bool blockAddedToSList = HeapBucketT<TBlockType>::PushHeapBlockToSList(this->allocableHeapBlockListHead, heapBlock);
+
+                        // If we encountered OOM while pushing the heapBlock to the SLIST we must add it to the heapBlockList so we don't lose track of it.
+                        if (!blockAddedToSList)
+                        {
+#ifdef RECYCLER_TRACE
+                            this->GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**13**] finished SweepPartialReusePages, heapblock REUSED but OOM while adding to the SLIST."));
+#endif
+                            this->GetRecycler()->OutOfMemory();
+                        }
+#ifdef RECYCLER_TRACE
+                        else
+                        {
+                            this->GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**14**] finished SweepPartialReusePages, heapblock from PendingSweepList REUSED added to SLIST allocableHeapBlockListHead."));
+                        }
+#endif
                     }
                 }
+                else
+                {
+                    heapBlock->SetNextBlock(this->fullBlockList);
+                    this->fullBlockList = heapBlock;
+#ifdef RECYCLER_TRACE
+                    this->GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**28**] finished SweepPartialReusePages, heapblock FULL added to fullBlockList."));
+#endif
+                }
 #endif
 
                 // This block has been counted as concurrently swept, and now we changed our mind
                 // and sweep it in thread. Remove the count
                 RECYCLER_STATS_DEC(recycler, heapBlockConcurrentSweptCount[heapBlock->GetHeapBlockType()]);
             }
+            else
+            {
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+                DebugOnly(heapBlock->blockNotReusedInPendingList = true);
+#endif
+#ifdef RECYCLER_TRACE
+                this->GetRecycler()->PrintBlockStatus(this, heapBlock, _u("[**15**] finished SweepPartialReusePages, heapblock NOT REUSED added to pendingSweepList."));
+#endif
+            }
         }
     );
 #endif
@@ -590,7 +711,19 @@ SmallNormalHeapBucketBase<TBlockType>::GetNonEmptyHeapBlockCount(bool checkCount
 #if ENABLE_CONCURRENT_GC
     currentHeapBlockCount += HeapBlockList::Count(partialSweptHeapBlockList);
 #endif
-    RECYCLER_SLOW_CHECK(Assert(!checkCount || this->heapBlockCount == currentHeapBlockCount));
+    bool allocatingDuringConcurrentSweep = false;
+
+#if ENABLE_CONCURRENT_GC
+#if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
+#if SUPPORT_WIN32_SLIST && ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP_USE_SLIST
+    if (CONFIG_FLAG_RELEASE(EnableConcurrentSweepAlloc))
+    {
+        allocatingDuringConcurrentSweep = true;
+    }
+#endif
+#endif
+#endif
+    RECYCLER_SLOW_CHECK(Assert(!checkCount || this->heapBlockCount == currentHeapBlockCount || (this->heapBlockCount >= 65535 && allocatingDuringConcurrentSweep)));
     return currentHeapBlockCount;
 }
 #endif
@@ -607,6 +740,8 @@ SmallNormalHeapBucketBase<TBlockType>::Check(bool checkCount)
     Assert(partialSweptHeapBlockList == nullptr || this->GetRecycler()->inPartialCollectMode);
     smallHeapBlockCount += HeapInfo::Check(false, false, this->partialSweptHeapBlockList);
 #endif
+
+    //TODO:akatti:Can this assert fail because blocks are in the SLIST and are > 65535 blocks?
     Assert(!checkCount || this->heapBlockCount == smallHeapBlockCount);
     return smallHeapBlockCount;
 }

+ 1 - 1
lib/Common/Memory/SmallNormalHeapBucket.h

@@ -44,7 +44,7 @@ protected:
     ~SmallNormalHeapBucketBase();
 
     template <class Fn>
-    static void SweepPartialReusePages(RecyclerSweep& recyclerSweep, TBlockType * heapBlockList,
+    void SweepPartialReusePages(RecyclerSweep& recyclerSweep, TBlockType * heapBlockList,
         TBlockType *& reuseBlocklist, TBlockType *&unusedBlockList, bool allocationsAllowedDuringConcurrentSweep, Fn callBack);
     void SweepPartialReusePages(RecyclerSweep& recyclerSweep);
     void FinishPartialCollect(RecyclerSweep * recyclerSweep);