Browse Source

Adding notion of weak reference regions, and concurrently marking those regions.

Also adding new weak ref region dictionary to make use of this type.
Jimmy Thomson 8 năm trước cách đây
mục cha
commit
89fc0ac566

+ 3 - 0
lib/Common/CommonDefines.h

@@ -263,6 +263,9 @@
 #define RECYCLER_VISITED_HOST
 #endif
 
+
+#define ENABLE_WEAK_REFERENCE_REGIONS 1
+
 // JIT features
 
 #if DISABLE_JIT

+ 11 - 9
lib/Common/DataStructures/BaseDictionary.h

@@ -102,6 +102,8 @@ namespace JsUtil
         Field(int) freeCount;
         Field(int) modFunctionIndex;
 
+        static const int FreeListSentinel = -2;
+
 #if PROFILE_DICTIONARY
         FieldNoBarrier(DictionaryStats*) stats;
 #endif
@@ -780,9 +782,9 @@ namespace JsUtil
 
         static bool IsFreeEntry(const EntryType &entry)
         {
-            // A free entry's next index will be (-2 - nextIndex), such that it is always <= -2, for fast entry iteration
+            // A free entry's next index will be (FreeListSentinel - nextIndex), such that it is always <= FreeListSentinel, for fast entry iteration
             // allowing for skipping over free entries. -1 is reserved for the end-of-chain marker for a used entry.
-            return entry.next <= -2;
+            return entry.next <= FreeListSentinel;
         }
 
         void SetNextFreeEntryIndex(EntryType &freeEntry, const int nextFreeEntryIndex)
@@ -791,15 +793,15 @@ namespace JsUtil
             Assert(nextFreeEntryIndex >= -1);
             Assert(nextFreeEntryIndex < count);
 
-            // The last entry in the free list chain will have a next of -2 to indicate that it is a free entry. The end of the
+            // The last entry in the free list chain will have a next of FreeListSentinel to indicate that it is a free entry. The end of the
             // free list chain is identified using freeCount.
-            freeEntry.next = nextFreeEntryIndex >= 0 ? -2 - nextFreeEntryIndex : -2;
+            freeEntry.next = nextFreeEntryIndex >= 0 ? FreeListSentinel - nextFreeEntryIndex : FreeListSentinel;
         }
 
         static int GetNextFreeEntryIndex(const EntryType &freeEntry)
         {
             Assert(IsFreeEntry(freeEntry));
-            return -2 - freeEntry.next;
+            return FreeListSentinel - freeEntry.next;
         }
 
         template <typename LookupType>
@@ -1327,8 +1329,8 @@ namespace JsUtil
                 bucketIndex(0u - 1)
             #if DBG
                 ,
-                previousEntryIndexInBucket(-2),
-                indexOfEntryAfterRemovedEntry(-2)
+                previousEntryIndexInBucket(FreeListSentinel),
+                indexOfEntryAfterRemovedEntry(FreeListSentinel)
             #endif
             {
                 if(dictionary.Count() != 0)
@@ -1345,9 +1347,9 @@ namespace JsUtil
                 Assert(this->entryIndex >= -1);
                 Assert(this->entryIndex < dictionary.count);
                 Assert(bucketIndex == 0u - 1 || bucketIndex <= bucketCount);
-                Assert(previousEntryIndexInBucket >= -2);
+                Assert(previousEntryIndexInBucket >= FreeListSentinel);
                 Assert(previousEntryIndexInBucket < dictionary.count);
-                Assert(indexOfEntryAfterRemovedEntry >= -2);
+                Assert(indexOfEntryAfterRemovedEntry >= FreeListSentinel);
                 Assert(indexOfEntryAfterRemovedEntry < dictionary.count);
 
                 return Base::IsValid() && this->entryIndex >= 0;

+ 37 - 0
lib/Common/DataStructures/DictionaryEntry.h

@@ -118,11 +118,23 @@ namespace JsUtil
         TKey const& Key() const  { return key; }
     };
 
+    template <class TKey>
+    class BaseKeyEntry : public ValueEntry<TKey>
+    {
+    public:
+        TKey const& Key() const { return this->value; }
+    };
+
     template <class TKey, class TValue>
     class KeyValueEntry : public BaseKeyValueEntry<TKey, TValue>
     {
     };
 
+    template <class TKey>
+    class KeyEntry : public BaseKeyEntry<TKey>
+    {
+    };
+
     template <class TKey, class TValue>
     class KeyValueEntry<TKey*, TValue> : public BaseKeyValueEntry<TKey*, TValue>
     {
@@ -167,6 +179,28 @@ namespace JsUtil
         }
     };
 
+    template <class TKey, template <class K> class THashKeyEntry>
+    class DefaultHashedKeyEntry : public THashKeyEntry<TKey>
+    {
+    public:
+        template<typename Comparer, typename TLookup>
+        inline bool KeyEquals(TLookup const& otherKey, hash_t otherHashCode)
+        {
+            return Comparer::Equals(this->Key(), otherKey);
+        }
+
+        template<typename Comparer>
+        inline hash_t GetHashCode()
+        {
+            return ((Comparer::GetHashCode(this->Key()) & 0x7fffffff) << 1) | 1;
+        }
+
+        void Set(TKey const& key, int hashCode)
+        {
+            __super::Set(key);
+        }
+    };
+
     template <class TKey, class TValue, template <class K, class V> class THashEntry>
     class CacheHashedEntry : public THashEntry<TKey, TValue>
     {
@@ -234,4 +268,7 @@ namespace JsUtil
             return (weakReference == nullptr || weakReference->Get() == nullptr);
         }
     };
+
+    template <class TKey>
+    class SimpleDictionaryKeyEntry : public DefaultHashedKeyEntry<TKey, KeyEntry> {};
 }

+ 672 - 0
lib/Common/DataStructures/WeakReferenceDictionary.h

@@ -44,4 +44,676 @@ namespace JsUtil
 
         PREVENT_COPY(WeakReferenceDictionary);
     };
+
+#if ENABLE_WEAK_REFERENCE_REGIONS
+
+    template <class TKey>
+    class WeakRefRegionValueDictionaryEntry : public SimpleDictionaryKeyEntry<TKey>
+    {
+    public:
+        void Clear()
+        {
+            this->value = TKey();
+        }
+    };
+
+    // TODO: It would be good to adapt WeaklyReferencedKeyDictionary to also use WeakRefRegions
+    //       One possibility is to create a BaseSplitDictionary which has the collections of
+    //       buckets, entries, and RecyclerWeakReferenceRegionItems, and then the entries are
+    //       either value/next or key/next pairs, with the weak ref region storing the keys or
+    //       values in a weak manner. 
+    template <
+        class TKey,
+        class TValue,
+        class SizePolicy = PowerOf2SizePolicy,
+        template <typename ValueOrKey> class Comparer = DefaultComparer,
+        typename Lock = NoResizeLock,
+        class AllocType = Recycler // Should always be recycler; this is to sufficiently confuse the RecyclerChecker
+    >
+    class WeakReferenceRegionDictionary : protected Lock, public IWeakReferenceDictionary
+    {
+        typedef WeakRefRegionValueDictionaryEntry<TKey> EntryType;
+        typedef RecyclerWeakReferenceRegionItem<TValue> ValueType;
+
+        typedef typename AllocatorInfo<Recycler, TValue>::AllocatorFunc EntryAllocatorFuncType;
+
+    private:
+        Field(int*) buckets;
+        Field(EntryType*) entries;
+        Field(ValueType*) values;
+        FieldNoBarrier(Recycler*) alloc;
+        Field(int) size;
+        Field(uint) bucketCount;
+        Field(int) count;
+        Field(int) freeList;
+        Field(int) freeCount;
+        Field(int) modFunctionIndex;
+
+        static const int FreeListSentinel = -2;
+
+        PREVENT_COPY(WeakReferenceRegionDictionary);
+
+        enum InsertOperations
+        {
+            Insert_Add,          // FatalInternalError if the item already exist in debug build
+            Insert_AddNew,          // Ignore add if the item already exist
+            Insert_Item             // Replace the item if it already exist
+        };
+
+        class AutoDoResize
+        {
+        public:
+            AutoDoResize(Lock& lock) : lock(lock) { lock.BeginResize(); };
+            ~AutoDoResize() { lock.EndResize(); };
+        private:
+            Lock & lock;
+        };
+
+    public:
+
+        virtual void Cleanup() override
+        {
+            this->MapAndRemoveIf([](EntryType& entry, ValueType& value)
+            {
+                return value == nullptr;
+            });
+        }
+
+        WeakReferenceRegionDictionary(Recycler* allocator, int capacity = 0)
+            : buckets(nullptr),
+            entries(nullptr),
+            values(nullptr),
+            alloc(allocator),
+            size(0),
+            bucketCount(0),
+            count(0),
+            freeList(0),
+            freeCount(0),
+            modFunctionIndex(UNKNOWN_MOD_INDEX)
+        {
+            Assert(reinterpret_cast<void*>(this) == reinterpret_cast<void*>((IWeakReferenceDictionary*)this));
+            Assert(allocator);
+
+            // If initial capacity is negative or 0, lazy initialization on
+            // the first insert operation is performed.
+            if (capacity > 0)
+            {
+                Initialize(capacity);
+            }
+        }
+
+        inline int Capacity() const
+        {
+            return size;
+        }
+
+        inline int Count() const
+        {
+            return count - freeCount;
+        }
+
+        TValue Item(const TKey& key)
+        {
+            int i = FindEntry(key);
+            Assert(i >= 0);
+            return values[i];
+        }
+
+        const TValue Item(const TKey& key) const
+        {
+            int i = FindEntry(key);
+            Assert(i >= 0);
+            return values[i];
+        }
+
+        int Add(const TKey& key, const TValue& value)
+        {
+            return Insert<Insert_Add>(key, value);
+        }
+
+        int AddNew(const TKey& key, const TValue& value)
+        {
+            return Insert<Insert_AddNew>(key, value);
+        }
+
+        int Item(const TKey& key, const TValue& value)
+        {
+            return Insert<Insert_Item>(key, value);
+        }
+
+        bool Contains(KeyValuePair<TKey, TValue> keyValuePair)
+        {
+            int i = FindEntry(keyValuePair.Key());
+            if (i >= 0 && Comparer<TValue>::Equals(values[i], keyValuePair.Value()))
+            {
+                return true;
+            }
+            return false;
+        }
+
+        bool Remove(KeyValuePair<TKey, TValue> keyValuePair)
+        {
+            int i, last;
+            uint targetBucket;
+            if (FindEntryWithKey(keyValuePair.Key(), &i, &last, &targetBucket))
+            {
+                const TValue &value = values[i];
+                if (Comparer<TValue>::Equals(value, keyValuePair.Value()))
+                {
+                    RemoveAt(i, last, targetBucket);
+                    return true;
+                }
+            }
+            return false;
+        }
+
+        void Clear()
+        {
+            if (count > 0)
+            {
+                memset(buckets, -1, bucketCount * sizeof(buckets[0]));
+                memset(entries, 0, sizeof(EntryType) * size);
+                memset(values, 0, sizeof(ValueType) * size); // TODO: issues with background threads?
+                count = 0;
+                freeCount = 0;
+            }
+        }
+
+        void ResetNoDelete()
+        {
+            this->size = 0;
+            this->bucketCount = 0;
+            this->buckets = nullptr;
+            this->entries = nullptr;
+            this->values = nullptr;
+            this->count = 0;
+            this->freeCount = 0;
+            this->modFunctionIndex = UNKNOWN_MOD_INDEX;
+        }
+
+        void Reset()
+        {
+            ResetNoDelete();
+        }
+
+        bool ContainsKey(const TKey& key) const
+        {
+            return FindEntry(key) >= 0;
+        }
+
+        template <typename TLookup>
+        inline const TValue& LookupWithKey(const TLookup& key, const TValue& defaultValue) const
+        {
+            int i = FindEntryWithKey(key);
+            if (i >= 0)
+            {
+                return values[i];
+            }
+            return defaultValue;
+        }
+
+        inline const TValue& Lookup(const TKey& key, const TValue& defaultValue) const
+        {
+            return LookupWithKey<TKey>(key, defaultValue);
+        }
+
+        template <typename TLookup>
+        bool TryGetValue(const TLookup& key, TValue* value) const
+        {
+            int i = FindEntryWithKey(key);
+            if (i >= 0)
+            {
+                *value = values[i];
+                return true;
+            }
+            return false;
+        }
+
+
+        bool TryGetValueAndRemove(const TKey& key, TValue* value)
+        {
+            int i, last;
+            uint targetBucket;
+            if (FindEntryWithKey(key, &i, &last, &targetBucket))
+            {
+                *value = values[i];
+                RemoveAt(i, last, targetBucket);
+                return true;
+            }
+            return false;
+        }
+
+        const TValue& GetValueAt(const int index) const
+        {
+            Assert(index >= 0);
+            Assert(index < count);
+
+            return values[index];
+        }
+
+        TKey const& GetKeyAt(const int index) const
+        {
+            Assert(index >= 0);
+            Assert(index < count);
+
+            return entries[index].Key();
+        }
+
+        bool TryGetValueAt(int index, TValue * value) const
+        {
+            if (index >= 0 && index < count)
+            {
+                *value = values[index];
+                return true;
+            }
+            return false;
+        }
+
+        bool Remove(const TKey& key)
+        {
+            int i, last;
+            uint targetBucket;
+            if (FindEntryWithKey(key, &i, &last, &targetBucket))
+            {
+                RemoveAt(i, last, targetBucket);
+                return true;
+            }
+            return false;
+        }
+
+        // Returns whether the dictionary was resized or not
+        bool EnsureCapacity()
+        {
+            if (freeCount == 0 && count == size)
+            {
+                Resize();
+                return true;
+            }
+
+            return false;
+        }
+
+        int GetNextIndex()
+        {
+            if (freeCount != 0)
+            {
+                Assert(freeCount > 0);
+                Assert(freeList >= 0);
+                Assert(freeList < count);
+                return freeList;
+            }
+
+            return count;
+        }
+
+        int GetLastIndex()
+        {
+            return count - 1;
+        }
+
+        void LockResize()
+        {
+            __super::LockResize();
+        }
+
+        void UnlockResize()
+        {
+            __super::UnlockResize();
+        }
+
+    private:
+
+        template <typename TLookup>
+        static hash_t GetHashCodeWithKey(const TLookup& key)
+        {
+            // set last bit to 1 to avoid false positive to make hash appears to be a valid recycler address.
+            // In the same line, 0 should be use to indicate a non-existing entry.
+            return TAGHASH(Comparer<TLookup>::GetHashCode(key));
+        }
+
+        static hash_t GetHashCode(const TKey& key)
+        {
+            return GetHashCodeWithKey<TKey>(key);
+        }
+
+        static uint GetBucket(hash_t hashCode, int bucketCount, int modFunctionIndex)
+        {
+            return SizePolicy::GetBucket(UNTAGHASH(hashCode), bucketCount, modFunctionIndex);
+        }
+
+        uint GetBucket(hash_t hashCode) const
+        {
+            return GetBucket(hashCode, this->bucketCount, modFunctionIndex);
+        }
+
+        static bool IsFreeEntry(const EntryType &entry)
+        {
+            // A free entry's next index will be (FreeListSentinel - nextIndex), such that it is always <= FreeListSentinel, for fast entry iteration
+            // allowing for skipping over free entries. -1 is reserved for the end-of-chain marker for a used entry.
+            return entry.next <= FreeListSentinel;
+        }
+
+
+        void SetNextFreeEntryIndex(EntryType &freeEntry, const int nextFreeEntryIndex)
+        {
+            Assert(!IsFreeEntry(freeEntry));
+            Assert(nextFreeEntryIndex >= -1);
+            Assert(nextFreeEntryIndex < count);
+
+            // The last entry in the free list chain will have a next of FreeListSentinel to indicate that it is a free entry. The end of the
+            // free list chain is identified using freeCount.
+            freeEntry.next = nextFreeEntryIndex >= 0 ? FreeListSentinel - nextFreeEntryIndex : FreeListSentinel;
+        }
+
+        static int GetNextFreeEntryIndex(const EntryType &freeEntry)
+        {
+            Assert(IsFreeEntry(freeEntry));
+            return FreeListSentinel - freeEntry.next;
+        }
+
+        template <typename LookupType>
+        inline int FindEntryWithKey(const LookupType& key) const
+        {
+            int * localBuckets = buckets;
+            if (localBuckets != nullptr)
+            {
+                hash_t hashCode = GetHashCodeWithKey<LookupType>(key);
+                uint targetBucket = this->GetBucket(hashCode);
+                EntryType * localEntries = entries;
+                for (int i = localBuckets[targetBucket]; i >= 0; i = localEntries[i].next)
+                {
+                    if (localEntries[i].template KeyEquals<Comparer<TKey>>(key, hashCode))
+                    {
+                        return i;
+                    }
+
+                }
+            }
+
+            return -1;
+        }
+
+        inline int FindEntry(const TKey& key) const
+        {
+            return FindEntryWithKey<TKey>(key);
+        }
+
+        template <typename LookupType>
+        inline bool FindEntryWithKey(const LookupType& key, int *const i, int *const last, uint *const targetBucket)
+        {
+            int * localBuckets = buckets;
+            if (localBuckets != nullptr)
+            {
+                hash_t hashCode = GetHashCodeWithKey<LookupType>(key);
+                *targetBucket = this->GetBucket(hashCode);
+                *last = -1;
+                EntryType * localEntries = entries;
+                for (*i = localBuckets[*targetBucket]; *i >= 0; *last = *i, *i = localEntries[*i].next)
+                {
+                    if (localEntries[*i].template KeyEquals<Comparer<TKey>>(key, hashCode))
+                    {
+                        return true;
+                    }
+                }
+            }
+            return false;
+        }
+
+        template<class Fn>
+        void MapAndRemoveIf(Fn fn)
+        {
+            for (uint i = 0; i < bucketCount; i++)
+            {
+                if (buckets[i] != -1)
+                {
+                    for (int currentIndex = buckets[i], lastIndex = -1; currentIndex != -1;)
+                    {
+                        // If the predicate says we should remove this item
+                        if (fn(entries[currentIndex], values[currentIndex]) == true)
+                        {
+                            const int nextIndex = entries[currentIndex].next;
+                            RemoveAt(currentIndex, lastIndex, i);
+                            currentIndex = nextIndex;
+                        }
+                        else
+                        {
+                            lastIndex = currentIndex;
+                            currentIndex = entries[currentIndex].next;
+                        }
+                    }
+                }
+            }
+        }
+
+        void Initialize(int capacity)
+        {
+            // minimum capacity is 4
+            int initSize = max(capacity, 4);
+            int modIndex = UNKNOWN_MOD_INDEX;
+            uint initBucketCount = SizePolicy::GetBucketSize(initSize, &modIndex);
+            AssertMsg(initBucketCount > 0, "Size returned by policy should be greater than 0");
+
+            int* newBuckets = nullptr;
+            EntryType* newEntries = nullptr;
+            ValueType* newValues = nullptr;
+            Allocate(&newBuckets, &newEntries, &newValues, initBucketCount, initSize);
+
+            // Allocation can throw - assign only after allocation has succeeded.
+            this->buckets = newBuckets;
+            this->entries = newEntries;
+            this->values = newValues;
+            this->bucketCount = initBucketCount;
+            this->size = initSize;
+            this->modFunctionIndex = modIndex;
+            Assert(this->freeCount == 0);
+
+        }
+
+
+        template <InsertOperations op>
+        int Insert(const TKey& key, const TValue& value)
+        {
+            int * localBuckets = buckets;
+            if (localBuckets == nullptr)
+            {
+                Initialize(0);
+                localBuckets = buckets;
+            }
+
+#if DBG
+            // Always search and verify
+            const bool needSearch = true;
+#else
+            const bool needSearch = (op != Insert_Add);
+#endif
+            hash_t hashCode = GetHashCode(key);
+            uint targetBucket = this->GetBucket(hashCode);
+            if (needSearch)
+            {
+                EntryType * localEntries = entries;
+                for (int i = localBuckets[targetBucket]; i >= 0; i = localEntries[i].next)
+                {
+                    if (localEntries[i].template KeyEquals<Comparer<TKey>>(key, hashCode))
+                    {
+                        Assert(op != Insert_Add);
+                        if (op == Insert_Item)
+                        {
+                            values[i] = value;
+                            return i;
+                        }
+                        return -1;
+                    }
+                }
+            }
+
+            // Ideally we'd do cleanup only if weak references have been collected since the last resize
+            // but that would require us to use an additional field to store the last recycler cleanup id
+            // that we saw
+            // We can add that optimization later if we have to.
+            if (freeCount == 0 && count == size)
+            {
+                this->Cleanup();
+            }
+
+            int index;
+            if (freeCount != 0)
+            {
+                Assert(freeCount > 0);
+                Assert(freeList >= 0);
+                Assert(freeList < count);
+                index = freeList;
+                freeCount--;
+                if (freeCount != 0)
+                {
+                    freeList = GetNextFreeEntryIndex(entries[index]);
+                }
+            }
+            else
+            {
+                // If there's nothing free, then in general, we set index to count, and increment count
+                // If we resize, we also need to recalculate the target
+                // However, if cleanup is supported, then before resize, we should try and clean up and see
+                // if something got freed, and if it did, reuse that index
+                if (count == size)
+                {
+                    Resize();
+                    targetBucket = this->GetBucket(hashCode);
+                    index = count;
+                    count++;
+                }
+                else
+                {
+                    index = count;
+                    count++;
+                }
+
+                Assert(count <= size);
+                Assert(index < size);
+            }
+
+            entries[index].Set(key, hashCode);
+            values[index] = value;
+            entries[index].next = buckets[targetBucket];
+            buckets[targetBucket] = index;
+
+            return index;
+        }
+
+        void Resize()
+        {
+            AutoDoResize autoDoResize(*this);
+
+            int newSize = SizePolicy::GetNextSize(count);
+            int modIndex = UNKNOWN_MOD_INDEX;
+            uint newBucketCount = SizePolicy::GetBucketSize(newSize, &modIndex);
+
+            __analysis_assume(newSize > count);
+            int* newBuckets = nullptr;
+            EntryType* newEntries = nullptr;
+            ValueType* newValues = nullptr;
+            if (newBucketCount == bucketCount)
+            {
+                // no need to rehash
+                newEntries = AllocateEntries(newSize);
+                newValues = AllocateValues(newSize);
+                CopyArray<EntryType>(newEntries, newSize, entries, count);
+                CopyArray<ValueType>(newValues, newSize, values, count); // TODO: concurrency issues?
+
+                this->entries = newEntries;
+                this->values = newValues;
+                this->size = newSize;
+                this->modFunctionIndex = modIndex;
+                return;
+            }
+
+            Allocate(&newBuckets, &newEntries, &newValues, newBucketCount, newSize);
+            CopyArray<EntryType>(newEntries, newSize, entries, count);
+            CopyArray<ValueType>(newValues, newSize, values, count); // TODO: concurrency issues?
+
+            // When TAllocator is of type Recycler, it is possible that the Allocate above causes a collection, which
+            // in turn can cause entries in the dictionary to be removed - i.e. the dictionary contains weak references
+            // that remove themselves when no longer valid. This means the free list might not be empty anymore.
+            this->modFunctionIndex = modIndex;
+            for (int i = 0; i < count; i++)
+            {
+                __analysis_assume(i < newSize);
+
+                if (!IsFreeEntry(newEntries[i]))
+                {
+                    hash_t hashCode = newEntries[i].template GetHashCode<Comparer<TKey>>();
+                    int bucket = GetBucket(hashCode, newBucketCount, modFunctionIndex);
+                    newEntries[i].next = newBuckets[bucket];
+                    newBuckets[bucket] = i;
+                }
+            }
+
+            this->buckets = newBuckets;
+            this->entries = newEntries;
+            this->values = newValues;
+            bucketCount = newBucketCount;
+            size = newSize;
+        }
+
+        __ecount(bucketCount) int *AllocateBuckets(DECLSPEC_GUARD_OVERFLOW const uint bucketCount)
+        {
+            return
+                AllocateArray<AllocType, int, false>(
+                    TRACK_ALLOC_INFO(alloc, int, AllocType, 0, bucketCount),
+                    TypeAllocatorFunc<AllocType, int>::GetAllocFunc(),
+                    bucketCount);
+        }
+
+        __ecount(size) EntryType * AllocateEntries(DECLSPEC_GUARD_OVERFLOW int size, const bool zeroAllocate = true)
+        {
+            // Note that the choice of leaf/non-leaf node is decided for the EntryType on the basis of TValue. By default, if
+            // TValue is a pointer, a non-leaf allocation is done. This behavior can be overridden by specializing
+            // TypeAllocatorFunc for TValue.
+            return
+                AllocateArray<Recycler, EntryType, false>(
+                    TRACK_ALLOC_INFO(alloc, EntryType, Recycler, 0, size),
+                    zeroAllocate ? EntryAllocatorFuncType::GetAllocZeroFunc() : EntryAllocatorFuncType::GetAllocFunc(),
+                    size);
+        }
+
+        __ecount(size) ValueType * AllocateValues(DECLSPEC_GUARD_OVERFLOW int size)
+        {
+            return alloc->CreateWeakReferenceRegion<TValue>(size);
+        }
+
+
+        void Allocate(__deref_out_ecount(bucketCount) int** ppBuckets, __deref_out_ecount(size) EntryType** ppEntries, __deref_out_ecount(size) ValueType** ppValues, DECLSPEC_GUARD_OVERFLOW uint bucketCount, DECLSPEC_GUARD_OVERFLOW int size)
+        {
+            int *const buckets = AllocateBuckets(bucketCount);
+            Assert(buckets); // no-throw allocators are currently not supported
+
+            EntryType *entries;
+            entries = AllocateEntries(size);
+            Assert(entries); // no-throw allocators are currently not supported
+
+            ValueType * values = AllocateValues(size);
+
+            memset(buckets, -1, bucketCount * sizeof(buckets[0]));
+
+            *ppBuckets = buckets;
+            *ppEntries = entries;
+            *ppValues = values;
+        }
+
+        inline void RemoveAt(const int i, const int last, const uint targetBucket)
+        {
+            if (last < 0)
+            {
+                buckets[targetBucket] = entries[i].next;
+            }
+            else
+            {
+                entries[last].next = entries[i].next;
+            }
+            entries[i].Clear();
+            SetNextFreeEntryIndex(entries[i], freeCount == 0 ? -1 : freeList);
+            freeList = i;
+            freeCount++;
+        }
+    };
+#endif
 };

+ 2 - 2
lib/Common/Memory/Chakra.Common.Memory.vcxproj.filters

@@ -1,4 +1,4 @@
-<?xml version="1.0" encoding="utf-8"?>
+<?xml version="1.0" encoding="utf-8"?>
 <Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
   <ItemGroup>
     <ARMASM Include="$(MSBuildThisFileDirectory)arm\arm_SAVE_REGISTERS.asm">
@@ -153,4 +153,4 @@
       <Filter>amd64</Filter>
     </MASM>
   </ItemGroup>
-</Project>
+</Project>

+ 4 - 0
lib/Common/Memory/CollectionState.h

@@ -52,6 +52,8 @@ enum CollectionState
     Collection_ConcurrentSweepPass2Wait = 0x00800000,
 #endif
 
+    Collection_WeakRefMark          = 0x01000000,
+
     // Actual states
     CollectionStateNotCollecting          = 0,                                                                // not collecting
     CollectionStateResetMarks             = Collection_Mark | Collection_ResetMarks,                          // reset marks
@@ -90,4 +92,6 @@ enum CollectionState
 #endif
     CollectionStatePostSweepRedeferralCallback = Collection_PostSweepRedeferralCallback,
     CollectionStatePostCollectionCallback = Collection_PostCollectionCallback,
+
+    CollectionStateConcurrentMarkWeakRef = Collection_ConcurrentMark | Collection_ExecutingConcurrent | Collection_WeakRefMark,
 };

+ 126 - 5
lib/Common/Memory/Recycler.cpp

@@ -193,6 +193,9 @@ Recycler::Recycler(AllocationPolicyManager * policyManager, IdleDecommitPageAllo
     pinnedObjectMap(1024, HeapAllocator::GetNoMemProtectInstance()),
     weakReferenceMap(1024, HeapAllocator::GetNoMemProtectInstance()),
     weakReferenceCleanupId(0),
+#if ENABLE_WEAK_REFERENCE_REGIONS
+    weakReferenceRegionList(HeapAllocator::GetNoMemProtectInstance()),
+#endif
     collectionWrapper(&DefaultRecyclerCollectionWrapper::Instance),
     isScriptActive(false),
     isInScript(false),
@@ -3096,7 +3099,9 @@ Recycler::SweepWeakReference()
 
     // REVIEW: Clean up the weak reference map concurrently?
     bool hasCleanup = false;
+#if defined(GCETW) && defined(ENABLE_JS_ETW)
     uint scannedCount = weakReferenceMap.Count();
+#endif
 
     weakReferenceMap.Map([&hasCleanup](RecyclerWeakReferenceBase * weakRef) -> bool
     {
@@ -3122,13 +3127,82 @@ Recycler::SweepWeakReference()
 
         return true;
     });
+
+#if defined(GCETW) && defined(ENABLE_JS_ETW)
+    uint regionScannedCount = 0;
+    uint regionClearedCount = 0;
+#endif
+
+#if ENABLE_WEAK_REFERENCE_REGIONS
+
+    auto edIt = this->weakReferenceRegionList.GetEditingIterator();
+    while (edIt.Next())
+    {
+        RecyclerWeakReferenceRegion region = edIt.Data();
+        // We want to see if user code has any reference to the region, if not, we can free the whole thing
+        if (!region.GetHeapBlock()->TestObjectMarkedBit(region.GetPtr()))
+        {
+            edIt.RemoveCurrent();
+            hasCleanup = true;
+#if defined(GCETW) && defined(ENABLE_JS_ETW)
+            regionClearedCount += (uint)region.GetCount();
+#endif
+            continue;
+        }
+
+        // The region is referenced, clean up any stale weak references
+        RecyclerWeakReferenceRegionItem<void*>* refs = region.GetPtr();
+#if defined(GCETW) && defined(ENABLE_JS_ETW)
+        regionScannedCount += (uint)region.GetCount();
+#endif
+        for (size_t i = 0; i < region.GetCount(); ++i)
+        {
+            RecyclerWeakReferenceRegionItem<void*> &ref = refs[i];
+            if (ref.ptr == nullptr)
+            {
+                continue;
+            }
+
+            if (((uintptr_t)ref.heapBlock & 0x1) == 0x1)
+            {
+                // Background thread marked this ref. Unmark it, and keep it
+                ref.heapBlock = (HeapBlock*)((uintptr_t)ref.heapBlock & ~0x1);
+                continue;
+            }
+
+            if (ref.heapBlock == nullptr)
+            {
+                HeapBlock* block = this->FindHeapBlock(ref.ptr);
+                if (block == nullptr)
+                {
+                    // This is not a real reference
+                    AssertMsg(false, "WeakReferenceRegionItems should only contain recycler references");
+                    continue;
+                }
+                else
+                {
+                    ref.heapBlock = block;
+                }
+            }
+
+            if (!ref.heapBlock->TestObjectMarkedBit(ref))
+            {
+                ref.ptr = nullptr;
+                ref.heapBlock = nullptr;
+                hasCleanup = true;
+#if defined(GCETW) && defined(ENABLE_JS_ETW)
+                regionClearedCount++;
+#endif
+            }
+        }
+    }
+#endif
+
     this->weakReferenceCleanupId += hasCleanup;
 
-#ifdef GCETW
+#if defined(GCETW) && defined(ENABLE_JS_ETW)
     const uint keptCount = weakReferenceMap.Count();
-    const uint scannedBytes = scannedCount * sizeof(RecyclerWeakReferenceBase*);
-    const uint cleanedBytes = (scannedCount - keptCount) * sizeof(RecyclerWeakReferenceBase*);
-    GCETW(GC_SWEEP_WEAKREF_STOP, (this, scannedBytes, cleanedBytes));
+    GCETW(GC_SWEEP_WEAKREF_STOP_EX, (this, scannedCount, (scannedCount - keptCount), regionScannedCount, regionClearedCount));
 #endif
 
     RECYCLER_PROFILE_EXEC_END(this, Js::SweepWeakPhase);
@@ -5362,6 +5436,49 @@ Recycler::BackgroundMark()
     }
 }
 
+void
+Recycler::BackgroundMarkWeakRefs()
+{
+#if ENABLE_WEAK_REFERENCE_REGIONS
+    auto iterator = this->weakReferenceRegionList.GetIterator();
+    while (iterator.Next())
+    {
+        RecyclerWeakReferenceRegion region = iterator.Data();
+        RecyclerWeakReferenceRegionItem<void*> *items = region.GetPtr();
+        size_t count = region.GetCount();
+        for (size_t index = 0; index < count; ++index)
+        {
+            RecyclerWeakReferenceRegionItem<void*> &item = items[index];
+            if (item.ptr == nullptr)
+            {
+                continue;
+            }
+
+            if (((uintptr_t)item.heapBlock & 0x1) == 0x1)
+            {
+                // This weak reference is already marked
+                continue;
+            }
+
+            if (item.heapBlock == nullptr)
+            {
+                item.heapBlock = this->FindHeapBlock(item.ptr);
+                if (item.heapBlock == nullptr)
+                {
+                    // This isn't a real weak reference, ignore it
+                    continue;
+                }
+            }
+
+            if (item.heapBlock->TestObjectMarkedBit(item.ptr))
+            {
+                item.heapBlock = (HeapBlock*) ((uintptr_t)item.heapBlock | 0x1);
+            }
+        }
+    }
+#endif
+}
+
 void
 Recycler::BackgroundResetMarks()
 {
@@ -5935,7 +6052,11 @@ Recycler::DoBackgroundWork(bool forceForeground)
             // fall-through
         case CollectionStateConcurrentMark:
             this->BackgroundMark();
-            Assert(this->collectionState == CollectionStateConcurrentMark);
+            this->collectionState = CollectionStateConcurrentMarkWeakRef;
+            // fall-through
+        case CollectionStateConcurrentMarkWeakRef:
+            this->BackgroundMarkWeakRefs();
+            Assert(this->collectionState == CollectionStateConcurrentMarkWeakRef);
             RECORD_TIMESTAMP(concurrentMarkFinishTime);
             break;
         case CollectionStateConcurrentFinishMark:

+ 49 - 0
lib/Common/Memory/Recycler.h

@@ -155,6 +155,46 @@ template<ObjectInfoBits infoBits>
 struct InfoBitsWrapper{};
 
 
+#if ENABLE_WEAK_REFERENCE_REGIONS
+template<typename T>
+static constexpr bool is_pointer = false;
+template<typename K>
+static constexpr bool is_pointer<K*> = true;
+
+template<typename T>
+class RecyclerWeakReferenceRegionItem {
+    static_assert(is_pointer<T>, "Weak references must be to pointer types");
+    friend class Recycler;
+public:
+    RecyclerWeakReferenceRegionItem() : ptr(T()), heapBlock(nullptr) {};
+    operator T() const { return ptr; };
+    T operator=(T newPtr) {
+        Assert(ptr == nullptr); // For safety with concurrent marking, only allow setting the pointer to non-null from null
+        heapBlock = nullptr;
+        return ptr = newPtr;
+    };
+private:
+    RecyclerWeakReferenceRegionItem(RecyclerWeakReferenceRegionItem<T>&) = delete;
+
+    FieldNoBarrier(T) ptr;
+    FieldNoBarrier(HeapBlock*) heapBlock; // Note: the low bit of the heapBlock is used for background marking
+};
+
+class RecyclerWeakReferenceRegion {
+    friend class Recycler;
+public:
+    RecyclerWeakReferenceRegionItem<void*>* GetPtr() const { return ptr; }
+    size_t GetCount() const { return count; }
+    HeapBlock* GetHeapBlock() const { return arrayHeapBlock; }
+private:
+    FieldNoBarrier(RecyclerWeakReferenceRegionItem<void*>*) ptr;
+    FieldNoBarrier(size_t) count;
+    FieldNoBarrier(HeapBlock*) arrayHeapBlock;
+};
+
+#endif
+
+
 // Allocation macro
 
 #define RecyclerNew(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocInlined, T, __VA_ARGS__)
@@ -799,6 +839,9 @@ private:
 
     WeakReferenceHashTable<PrimePolicy> weakReferenceMap;
     uint weakReferenceCleanupId;
+#if ENABLE_WEAK_REFERENCE_REGIONS
+    SList<RecyclerWeakReferenceRegion, HeapAllocator> weakReferenceRegionList;
+#endif
 
     void * transientPinnedObject;
 #if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
@@ -1347,6 +1390,11 @@ public:
 
     template<typename T>
     RecyclerWeakReference<T>* CreateWeakReferenceHandle(T* pStrongReference);
+#if ENABLE_WEAK_REFERENCE_REGIONS
+    template<typename T>
+    RecyclerWeakReferenceRegionItem<T>* CreateWeakReferenceRegion(size_t count);
+#endif
+
     uint GetWeakReferenceCleanupId() const { return weakReferenceCleanupId; }
 
     template<typename T>
@@ -1742,6 +1790,7 @@ private:
     void FinishTransferSwept(CollectionFlags flags);
     BOOL FinishConcurrentCollectWrapped(CollectionFlags flags);
     void BackgroundMark();
+    void BackgroundMarkWeakRefs();
     void BackgroundResetMarks();
     void PrepareBackgroundFindRoots();
     void RevertPrepareBackgroundFindRoots();

+ 16 - 0
lib/Common/Memory/Recycler.inl

@@ -429,6 +429,22 @@ inline bool Recycler::TryGetWeakReferenceHandle(T* pStrongReference, RecyclerWea
     return this->weakReferenceMap.TryGetValue((char*) pStrongReference, (RecyclerWeakReferenceBase**)weakReference);
 }
 
+#if ENABLE_WEAK_REFERENCE_REGIONS
+template<typename T>
+inline RecyclerWeakReferenceRegionItem<T>* Recycler::CreateWeakReferenceRegion(size_t count)
+{
+    RecyclerWeakReferenceRegionItem<T>* regionArray = RecyclerNewArrayLeafZ(this, RecyclerWeakReferenceRegionItem<T>, count);
+    RecyclerWeakReferenceRegion region;
+    region.ptr = reinterpret_cast<RecyclerWeakReferenceRegionItem<void*>*>(regionArray);
+    region.count = count;
+    region.arrayHeapBlock = this->FindHeapBlock(regionArray);
+    this->weakReferenceRegionList.Push(region);
+    return regionArray;
+}
+#endif
+
+
+
 inline HeapBlock*
 Recycler::FindHeapBlock(void* candidate)
 {

+ 12 - 0
lib/Runtime/Base/ScriptContext.cpp

@@ -1690,10 +1690,18 @@ namespace Js
 
         if (propertyMap != nullptr)
         {
+#if ENABLE_WEAK_REFERENCE_REGIONS
+            TProperty* propReference = nullptr;
+#else
             RecyclerWeakReference<TProperty>* propReference = nullptr;
+#endif
             if (propertyMap->TryGetValue(propertyId, &propReference))
             {
+#if ENABLE_WEAK_REFERENCE_REGIONS
+                return propReference;
+#else
                 return propReference->Get();
+#endif
             }
         }
 
@@ -1719,7 +1727,11 @@ namespace Js
         // Library doesn't cache PropertyString instances because we might hold them in the 2-letter cache instead.
         PropertyStringCacheMap* propertyMap = this->GetLibrary()->EnsurePropertyStringMap();
         PropertyString* prop = this->GetLibrary()->CreatePropertyString(propertyRecord);
+#if ENABLE_WEAK_REFERENCE_REGIONS
+        propertyMap->Item(propertyRecord->GetPropertyId(), prop);
+#else
         propertyMap->Item(propertyRecord->GetPropertyId(), recycler->CreateWeakReferenceHandle(prop));
+#endif
         return prop;
     }
 

+ 4 - 0
lib/Runtime/Library/JavascriptLibrary.cpp

@@ -5876,7 +5876,11 @@ namespace Js
         AssertMsg(symbolTypeStatic, "Where's symbolTypeStatic?");
         SymbolCacheMap* symbolMap = this->EnsureSymbolMap();
         JavascriptSymbol* symbol = RecyclerNew(this->GetRecycler(), JavascriptSymbol, propertyRecord, symbolTypeStatic);
+#if ENABLE_WEAK_REFERENCE_REGIONS
+        symbolMap->Item(propertyRecord->GetPropertyId(), symbol);
+#else
         symbolMap->Item(propertyRecord->GetPropertyId(), recycler->CreateWeakReferenceHandle(symbol));
+#endif
         return symbol;
     }
 

+ 4 - 0
lib/Runtime/Runtime.h

@@ -280,7 +280,11 @@ namespace Js
     class AsmJSByteCodeGenerator;
     enum AsmJSMathBuiltinFunction: int;
     //////////////////////////////////////////////////////////////////////////
+#if ENABLE_WEAK_REFERENCE_REGIONS
+    template <typename T> using WeakPropertyIdMap = JsUtil::WeakReferenceRegionDictionary<PropertyId, T*, PrimeSizePolicy>;
+#else
     template <typename T> using WeakPropertyIdMap = JsUtil::WeakReferenceDictionary<PropertyId, T, PrimeSizePolicy>;
+#endif
     typedef WeakPropertyIdMap<PropertyString> PropertyStringCacheMap;
     typedef WeakPropertyIdMap<JavascriptSymbol> SymbolCacheMap;
 

+ 10 - 2
manifests/Microsoft-Scripting-Chakra-Instrumentation.man

@@ -1382,11 +1382,19 @@
                     />
                 <data
                     inType="win:UInt32"
-                    name="ScannedBytes"
+                    name="ScannedCount"
                     />
                 <data
                     inType="win:UInt32"
-                    name="ClearedBytes"
+                    name="ClearedCount"
+                    />
+                <data
+                    inType="win:UInt32"
+                    name="RegionScannedCount"
+                    />
+                    <data
+                    inType="win:UInt32"
+                    name="RegionClearedCount"
                     />
                 </template>
                 <template tid="GCClearInlineCacheData">