WeakReferenceDictionary.h 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. namespace JsUtil
  7. {
  8. interface IWeakReferenceDictionary
  9. {
  10. virtual void Cleanup() = 0;
  11. };
  12. template <
  13. class TKey,
  14. class TValue,
  15. class SizePolicy = PowerOf2SizePolicy,
  16. template <typename ValueOrKey> class Comparer = DefaultComparer
  17. >
  18. class WeakReferenceDictionary: public BaseDictionary<TKey, RecyclerWeakReference<TValue>*, RecyclerNonLeafAllocator, SizePolicy, Comparer, WeakRefValueDictionaryEntry>,
  19. public IWeakReferenceDictionary
  20. {
  21. typedef BaseDictionary<TKey, RecyclerWeakReference<TValue>*, RecyclerNonLeafAllocator, SizePolicy, Comparer, WeakRefValueDictionaryEntry> Base;
  22. typedef typename Base::EntryType EntryType;
  23. public:
  24. WeakReferenceDictionary(Recycler* recycler, int capacity = 0):
  25. Base(recycler, capacity)
  26. {
  27. Assert(reinterpret_cast<void*>(this) == reinterpret_cast<void*>((IWeakReferenceDictionary*) this));
  28. }
  29. virtual void Cleanup() override
  30. {
  31. this->MapAndRemoveIf([](typename Base::EntryType &entry)
  32. {
  33. return (Base::EntryType::NeedsCleanup(entry));
  34. });
  35. }
  36. private:
  37. using Base::Clone;
  38. using Base::Copy;
  39. PREVENT_COPY(WeakReferenceDictionary);
  40. };
  41. #if ENABLE_WEAK_REFERENCE_REGIONS
  42. template<bool, typename T = void>
  43. struct enable_if {};
  44. template<typename T>
  45. struct enable_if<true, T>
  46. {
  47. typedef T type;
  48. };
  49. template <
  50. class TEntry,
  51. class TWeak,
  52. bool keyIsWeak,
  53. class SizePolicy = PowerOf2SizePolicy,
  54. template <typename ValueOrKey> class Comparer = DefaultComparer,
  55. typename Lock = NoResizeLock,
  56. class AllocType = Recycler // Should always be recycler; this is to sufficiently confuse the RecyclerChecker
  57. >
  58. class SplitWeakDictionary : protected Lock, public IWeakReferenceDictionary
  59. {
  60. private:
  61. template <typename T1, typename T2, bool second>
  62. struct Select;
  63. template <typename T1, typename T2>
  64. struct Select<T1, T2, false>
  65. {
  66. typedef T1 type;
  67. };
  68. template <typename T1, typename T2>
  69. struct Select<T1, T2, true>
  70. {
  71. typedef T2 type;
  72. };
  73. typedef typename Select<TEntry, TWeak, keyIsWeak>::type TBKey;
  74. typedef typename Select<TEntry, TWeak, !keyIsWeak>::type TBValue;
  75. typedef typename AllocatorInfo<Recycler, TEntry>::AllocatorFunc EntryAllocatorFuncType;
  76. enum InsertOperations
  77. {
  78. Insert_Add, // FatalInternalError if the item already exist in debug build
  79. Insert_AddNew, // Ignore add if the item already exist
  80. Insert_Item // Replace the item if it already exist
  81. };
  82. protected:
  83. typedef ValueEntry<TEntry> EntryType;
  84. typedef RecyclerWeakReferenceRegionItem<TWeak> WeakType;
  85. Field(int*) buckets;
  86. Field(EntryType*) entries;
  87. Field(WeakType*) weakRefs;
  88. FieldNoBarrier(Recycler*) alloc;
  89. Field(int) size;
  90. Field(uint) bucketCount;
  91. Field(int) count;
  92. Field(int) freeList;
  93. Field(int) freeCount;
  94. Field(int) modFunctionIndex;
  95. private:
  96. static const int FreeListSentinel = -2;
  97. PREVENT_COPY(SplitWeakDictionary);
  98. class AutoDoResize
  99. {
  100. public:
  101. AutoDoResize(Lock& lock) : lock(lock) { lock.BeginResize(); };
  102. ~AutoDoResize() { lock.EndResize(); };
  103. private:
  104. Lock & lock;
  105. };
  106. public:
  107. // Allow SplitWeakDictionary field to be inlined in classes with DEFINE_VTABLE_CTOR_MEMBER_INIT
  108. SplitWeakDictionary(VirtualTableInfoCtorEnum) { }
  109. SplitWeakDictionary(Recycler* allocator, int capacity = 0)
  110. : buckets(nullptr),
  111. entries(nullptr),
  112. weakRefs(nullptr),
  113. alloc(allocator),
  114. size(0),
  115. bucketCount(0),
  116. count(0),
  117. freeList(0),
  118. freeCount(0),
  119. modFunctionIndex(UNKNOWN_MOD_INDEX)
  120. {
  121. Assert(allocator);
  122. Assert(reinterpret_cast<void*>(this) == reinterpret_cast<void*>((IWeakReferenceDictionary*)this));
  123. // If initial capacity is negative or 0, lazy initialization on
  124. // the first insert operation is performed.
  125. if (capacity > 0)
  126. {
  127. Initialize(capacity);
  128. }
  129. }
  130. virtual void Cleanup() override
  131. {
  132. this->MapAndRemoveIf([](EntryType& entry, WeakType& weakRef)
  133. {
  134. return weakRef == nullptr;
  135. });
  136. }
  137. inline int Capacity() const
  138. {
  139. return size;
  140. }
  141. inline int Count() const
  142. {
  143. return count - freeCount;
  144. }
  145. void Clear()
  146. {
  147. if (count > 0)
  148. {
  149. memset(buckets, -1, bucketCount * sizeof(buckets[0]));
  150. memset(entries, 0, sizeof(EntryType) * size);
  151. memset(weakRefs, 0, sizeof(WeakType) * size); // TODO: issues with background threads?
  152. count = 0;
  153. freeCount = 0;
  154. }
  155. }
  156. void ResetNoDelete()
  157. {
  158. this->size = 0;
  159. this->bucketCount = 0;
  160. this->buckets = nullptr;
  161. this->entries = nullptr;
  162. this->weakRefs = nullptr;
  163. this->count = 0;
  164. this->freeCount = 0;
  165. this->modFunctionIndex = UNKNOWN_MOD_INDEX;
  166. }
  167. void Reset()
  168. {
  169. ResetNoDelete();
  170. }
  171. // Returns whether the dictionary was resized or not
  172. bool EnsureCapacity()
  173. {
  174. if (freeCount == 0 && count == size)
  175. {
  176. Resize();
  177. return true;
  178. }
  179. return false;
  180. }
  181. int GetNextIndex()
  182. {
  183. if (freeCount != 0)
  184. {
  185. Assert(freeCount > 0);
  186. Assert(freeList >= 0);
  187. Assert(freeList < count);
  188. return freeList;
  189. }
  190. return count;
  191. }
  192. TBValue Item(const TBKey& key)
  193. {
  194. int i = FindEntry(key);
  195. Assert(i >= 0);
  196. return values[i];
  197. }
  198. const TBValue Item(const TBKey& key) const
  199. {
  200. int i = FindEntry(key);
  201. Assert(i >= 0);
  202. return values[i];
  203. }
  204. int Add(const TBKey& key, const TBValue& value)
  205. {
  206. return Insert<Insert_Add>(key, value);
  207. }
  208. int AddNew(const TBKey& key, const TBValue& value)
  209. {
  210. return Insert<Insert_AddNew>(key, value);
  211. }
  212. int Item(const TBKey& key, const TBValue& value)
  213. {
  214. return Insert<Insert_Item>(key, value);
  215. }
  216. bool Contains(KeyValuePair<TBKey, TBValue> keyValuePair)
  217. {
  218. int i = FindEntry(keyValuePair.Key());
  219. TBValue val = this->GetValue(i);
  220. if (i >= 0 && Comparer<TBValue>::Equals(val, keyValuePair.Value()))
  221. {
  222. return true;
  223. }
  224. return false;
  225. }
  226. bool ContainsKey(const TBKey& key) const
  227. {
  228. return FindEntry(key) >= 0;
  229. }
  230. template <typename TLookup>
  231. inline const TBValue& LookupWithKey(const TLookup& key, const TBValue& defaultValue)
  232. {
  233. int i = FindEntryWithKey(key);
  234. if (i >= 0)
  235. {
  236. return this->GetValue(i);
  237. }
  238. return defaultValue;
  239. }
  240. inline const TBValue& Lookup(const TBKey& key, const TBValue& defaultValue)
  241. {
  242. return LookupWithKey<TBKey>(key, defaultValue);
  243. }
  244. template <typename TLookup>
  245. bool TryGetValue(const TLookup& key, TBValue* value)
  246. {
  247. int i = FindEntryWithKey(key);
  248. if (i >= 0)
  249. {
  250. *value = this->GetValue(i);
  251. return true;
  252. }
  253. return false;
  254. }
  255. bool TryGetValueAndRemove(const TBKey& key, TBValue* value)
  256. {
  257. int i, last;
  258. uint targetBucket;
  259. if (FindEntryWithKey(key, &i, &last, &targetBucket))
  260. {
  261. *value = this->GetValue(i);
  262. RemoveAt(i, last, targetBucket);
  263. return true;
  264. }
  265. return false;
  266. }
  267. bool Remove(const TBKey& key)
  268. {
  269. int i, last;
  270. uint targetBucket;
  271. if (FindEntryWithKey(key, &i, &last, &targetBucket))
  272. {
  273. RemoveAt(i, last, targetBucket);
  274. return true;
  275. }
  276. return false;
  277. }
  278. bool Remove(KeyValuePair<TBKey, TBValue> keyValuePair)
  279. {
  280. int i, last;
  281. uint targetBucket;
  282. if (FindEntryWithKey(keyValuePair.Key(), &i, &last, &targetBucket))
  283. {
  284. const TBValue& value = this->GetValue(i);
  285. if (Comparer<TBValue>::Equals(value, keyValuePair.Value()))
  286. {
  287. RemoveAt(i, last, targetBucket);
  288. return true;
  289. }
  290. }
  291. return false;
  292. }
  293. int GetLastIndex()
  294. {
  295. return count - 1;
  296. }
  297. void LockResize()
  298. {
  299. __super::LockResize();
  300. }
  301. void UnlockResize()
  302. {
  303. __super::UnlockResize();
  304. }
  305. protected:
  306. template<class Fn>
  307. void MapAndRemoveIf(Fn fn)
  308. {
  309. for (uint i = 0; i < bucketCount; i++)
  310. {
  311. if (buckets[i] != -1)
  312. {
  313. for (int currentIndex = buckets[i], lastIndex = -1; currentIndex != -1;)
  314. {
  315. // If the predicate says we should remove this item
  316. if (fn(entries[currentIndex], weakRefs[currentIndex]) == true)
  317. {
  318. const int nextIndex = entries[currentIndex].next;
  319. RemoveAt(currentIndex, lastIndex, i);
  320. currentIndex = nextIndex;
  321. }
  322. else
  323. {
  324. lastIndex = currentIndex;
  325. currentIndex = entries[currentIndex].next;
  326. }
  327. }
  328. }
  329. }
  330. }
  331. template <typename TLookup>
  332. static hash_t GetHashCodeWithKey(const TLookup& key)
  333. {
  334. // set last bit to 1 to avoid false positive to make hash appears to be a valid recycler address.
  335. // In the same line, 0 should be use to indicate a non-existing entry.
  336. return TAGHASH(Comparer<TLookup>::GetHashCode(key));
  337. }
  338. static uint GetBucket(hash_t hashCode, int bucketCount, int modFunctionIndex)
  339. {
  340. return SizePolicy::GetBucket(UNTAGHASH(hashCode), bucketCount, modFunctionIndex);
  341. }
  342. uint GetBucket(hash_t hashCode) const
  343. {
  344. return GetBucket(hashCode, this->bucketCount, modFunctionIndex);
  345. }
  346. static bool IsFreeEntry(const EntryType &entry)
  347. {
  348. // A free entry's next index will be (FreeListSentinel - nextIndex), such that it is always <= FreeListSentinel, for fast entry iteration
  349. // allowing for skipping over free entries. -1 is reserved for the end-of-chain marker for a used entry.
  350. return entry.next <= FreeListSentinel;
  351. }
  352. void SetNextFreeEntryIndex(EntryType &freeEntry, const int nextFreeEntryIndex)
  353. {
  354. Assert(!IsFreeEntry(freeEntry));
  355. Assert(nextFreeEntryIndex >= -1);
  356. Assert(nextFreeEntryIndex < count);
  357. // The last entry in the free list chain will have a next of FreeListSentinel to indicate that it is a free entry. The end of the
  358. // free list chain is identified using freeCount.
  359. freeEntry.next = nextFreeEntryIndex >= 0 ? FreeListSentinel - nextFreeEntryIndex : FreeListSentinel;
  360. }
  361. static int GetNextFreeEntryIndex(const EntryType &freeEntry)
  362. {
  363. Assert(IsFreeEntry(freeEntry));
  364. return FreeListSentinel - freeEntry.next;
  365. }
  366. void Initialize(int capacity)
  367. {
  368. // minimum capacity is 4
  369. int initSize = max(capacity, 4);
  370. int modIndex = UNKNOWN_MOD_INDEX;
  371. uint initBucketCount = SizePolicy::GetBucketSize(initSize, &modIndex);
  372. AssertMsg(initBucketCount > 0, "Size returned by policy should be greater than 0");
  373. int* newBuckets = nullptr;
  374. EntryType* newEntries = nullptr;
  375. WeakType* newWeakRefs = nullptr;
  376. Allocate(&newBuckets, &newEntries, &newWeakRefs, initBucketCount, initSize);
  377. // Allocation can throw - assign only after allocation has succeeded.
  378. this->buckets = newBuckets;
  379. this->entries = newEntries;
  380. this->weakRefs = newWeakRefs;
  381. this->bucketCount = initBucketCount;
  382. this->size = initSize;
  383. this->modFunctionIndex = modIndex;
  384. Assert(this->freeCount == 0);
  385. }
  386. inline void RemoveAt(const int i, const int last, const uint targetBucket)
  387. {
  388. if (last < 0)
  389. {
  390. buckets[targetBucket] = entries[i].next;
  391. }
  392. else
  393. {
  394. entries[last].next = entries[i].next;
  395. }
  396. entries[i].Clear();
  397. weakRefs[i].Clear();
  398. SetNextFreeEntryIndex(entries[i], freeCount == 0 ? -1 : freeList);
  399. freeList = i;
  400. freeCount++;
  401. }
  402. inline int FindEntry(const TBKey& key)
  403. {
  404. return FindEntryWithKey<TBKey>(key);
  405. }
  406. template <typename LookupType>
  407. inline int FindEntryWithKey(const LookupType& key)
  408. {
  409. int * localBuckets = buckets;
  410. if (localBuckets != nullptr)
  411. {
  412. hash_t hashCode = GetHashCodeWithKey<LookupType>(key);
  413. uint targetBucket = this->GetBucket(hashCode);
  414. EntryType * localEntries = entries;
  415. for (int i = localBuckets[targetBucket], last = -1; i >= 0;)
  416. {
  417. TBKey k = this->GetKey(i);
  418. if (this->RemoveIfCollected(k, &i, last, targetBucket))
  419. {
  420. continue;
  421. }
  422. if (Comparer<TBKey>::Equals(k, key))
  423. {
  424. return i;
  425. }
  426. last = i;
  427. i = localEntries[i].next;
  428. }
  429. }
  430. return -1;
  431. }
  432. template <typename LookupType>
  433. inline bool FindEntryWithKey(const LookupType& key, int *const i, int *const last, uint *const targetBucket)
  434. {
  435. int * localBuckets = buckets;
  436. if (localBuckets != nullptr)
  437. {
  438. hash_t hashCode = GetHashCodeWithKey<LookupType>(key);
  439. *targetBucket = this->GetBucket(hashCode);
  440. *last = -1;
  441. EntryType * localEntries = entries;
  442. for (*i = localBuckets[*targetBucket]; *i >= 0;)
  443. {
  444. TBKey k = this->GetKey(*i);
  445. if (this->RemoveIfCollected(k, i, *last, *targetBucket))
  446. {
  447. continue;
  448. }
  449. if (Comparer<TBKey>::Equals(k, key))
  450. {
  451. return true;
  452. }
  453. *last = *i;
  454. *i = localEntries[*i].next;
  455. }
  456. }
  457. return false;
  458. }
  459. private:
  460. void Resize()
  461. {
  462. AutoDoResize autoDoResize(*this);
  463. __analysis_assert(count > 1);
  464. int newSize = SizePolicy::GetNextSize(count);
  465. int modIndex = UNKNOWN_MOD_INDEX;
  466. uint newBucketCount = SizePolicy::GetBucketSize(newSize, &modIndex);
  467. __analysis_assume(newSize > count);
  468. int* newBuckets = nullptr;
  469. EntryType* newEntries = nullptr;
  470. WeakType* newWeakRefs = nullptr;
  471. if (newBucketCount == bucketCount)
  472. {
  473. // no need to rehash
  474. newEntries = AllocateEntries(newSize);
  475. newWeakRefs = AllocateWeakRefs(newSize);
  476. CopyArray<EntryType>(newEntries, newSize, entries, count);
  477. CopyArray<WeakType>(newWeakRefs, newSize, weakRefs, count); // TODO: concurrency issues?
  478. this->entries = newEntries;
  479. this->weakRefs = newWeakRefs;
  480. this->size = newSize;
  481. this->modFunctionIndex = modIndex;
  482. return;
  483. }
  484. Allocate(&newBuckets, &newEntries, &newWeakRefs, newBucketCount, newSize);
  485. this->modFunctionIndex = modIndex;
  486. // Need to re-bucket the entries
  487. // Also need to account for whether the weak refs have been collected
  488. // Have to go in order so we can remove entries as appropriate
  489. this->count = 0;
  490. for (uint i = 0; i < this->bucketCount; ++i)
  491. {
  492. if (this->buckets[i] < 0)
  493. {
  494. continue;
  495. }
  496. for (int currentEntry = this->buckets[i]; currentEntry != -1; )
  497. {
  498. if (IsFreeEntry(entries[currentEntry]))
  499. {
  500. // Entry is free; this shouldn't happen, but stop following the chain
  501. AssertMsg(false, "Following bucket chains should not result in free entries");
  502. break;
  503. }
  504. if (this->weakRefs[currentEntry] == nullptr)
  505. {
  506. // The weak ref has been collected; don't bother bringing it to the new collection
  507. currentEntry = this->entries[currentEntry].next;
  508. continue;
  509. }
  510. hash_t hashCode = GetHashCodeWithKey<TBKey>(this->GetKey(currentEntry));
  511. int bucket = GetBucket(hashCode, newBucketCount, modFunctionIndex);
  512. newEntries[count].next = newBuckets[bucket];
  513. newEntries[count].SetValue(this->entries[currentEntry].Value());
  514. newWeakRefs[count] = this->weakRefs[currentEntry];
  515. newBuckets[bucket] = count;
  516. ++count;
  517. currentEntry = this->entries[currentEntry].next;
  518. }
  519. }
  520. this->freeCount = 0;
  521. this->freeList = 0;
  522. this->buckets = newBuckets;
  523. this->entries = newEntries;
  524. this->weakRefs = newWeakRefs;
  525. this->bucketCount = newBucketCount;
  526. this->size = newSize;
  527. }
  528. template <InsertOperations op>
  529. int Insert(const TBKey& key, const TBValue& value)
  530. {
  531. int * localBuckets = buckets;
  532. if (localBuckets == nullptr)
  533. {
  534. Initialize(0);
  535. localBuckets = buckets;
  536. }
  537. #if DBG
  538. // Always search and verify
  539. const bool needSearch = true;
  540. #else
  541. const bool needSearch = (op != Insert_Add);
  542. #endif
  543. hash_t hashCode = GetHashCodeWithKey<TBKey>(key);
  544. uint targetBucket = this->GetBucket(hashCode);
  545. if (needSearch)
  546. {
  547. EntryType * localEntries = entries;
  548. for (int i = localBuckets[targetBucket], last = -1; i >= 0;)
  549. {
  550. TBKey k = this->GetKey(i);
  551. if (this->RemoveIfCollected(k, &i, last, targetBucket))
  552. {
  553. continue;
  554. }
  555. if (Comparer<TBKey>::Equals(k, key))
  556. {
  557. Assert(op != Insert_Add);
  558. if (op == Insert_Item)
  559. {
  560. SetValue(value, i);
  561. return i;
  562. }
  563. return -1;
  564. }
  565. last = i;
  566. i = localEntries[i].next;
  567. }
  568. }
  569. // Ideally we'd do cleanup only if weak references have been collected since the last resize
  570. // but that would require us to use an additional field to store the last recycler cleanup id
  571. // that we saw
  572. // We can add that optimization later if we have to.
  573. if (freeCount == 0 && count == size)
  574. {
  575. this->Cleanup();
  576. }
  577. int index;
  578. if (freeCount != 0)
  579. {
  580. Assert(freeCount > 0);
  581. Assert(freeList >= 0);
  582. Assert(freeList < count);
  583. index = freeList;
  584. freeCount--;
  585. if (freeCount != 0)
  586. {
  587. freeList = GetNextFreeEntryIndex(entries[index]);
  588. }
  589. }
  590. else
  591. {
  592. // If there's nothing free, then in general, we set index to count, and increment count
  593. // If we resize, we also need to recalculate the target
  594. // However, if cleanup is supported, then before resize, we should try and clean up and see
  595. // if something got freed, and if it did, reuse that index
  596. if (count == size)
  597. {
  598. Resize();
  599. targetBucket = this->GetBucket(hashCode);
  600. index = count;
  601. count++;
  602. }
  603. else
  604. {
  605. index = count;
  606. count++;
  607. }
  608. Assert(count <= size);
  609. Assert(index < size);
  610. }
  611. SetKeyValue(key, value, index);
  612. entries[index].next = buckets[targetBucket];
  613. buckets[targetBucket] = index;
  614. return index;
  615. }
  616. __ecount(bucketCount) int *AllocateBuckets(DECLSPEC_GUARD_OVERFLOW const uint bucketCount)
  617. {
  618. return
  619. AllocateArray<AllocType, int, false>(
  620. TRACK_ALLOC_INFO(alloc, int, AllocType, 0, bucketCount),
  621. TypeAllocatorFunc<AllocType, int>::GetAllocFunc(),
  622. bucketCount);
  623. }
  624. __ecount(size) EntryType * AllocateEntries(DECLSPEC_GUARD_OVERFLOW int size, const bool zeroAllocate = true)
  625. {
  626. // Note that the choice of leaf/non-leaf node is decided for the EntryType on the basis of TValue. By default, if
  627. // TValue is a pointer, a non-leaf allocation is done. This behavior can be overridden by specializing
  628. // TypeAllocatorFunc for TValue.
  629. return
  630. AllocateArray<Recycler, EntryType, false>(
  631. TRACK_ALLOC_INFO(alloc, EntryType, Recycler, 0, size),
  632. zeroAllocate ? EntryAllocatorFuncType::GetAllocZeroFunc() : EntryAllocatorFuncType::GetAllocFunc(),
  633. size);
  634. }
  635. __ecount(size) WeakType * AllocateWeakRefs(DECLSPEC_GUARD_OVERFLOW int size)
  636. {
  637. return alloc->CreateWeakReferenceRegion<TWeak>(size);
  638. }
  639. void Allocate(__deref_out_ecount(bucketCount) int** ppBuckets, __deref_out_ecount(size) EntryType** ppEntries, __deref_out_ecount(size) WeakType** ppWeakRefs, DECLSPEC_GUARD_OVERFLOW uint bucketCount, DECLSPEC_GUARD_OVERFLOW int size)
  640. {
  641. int *const newBuckets = AllocateBuckets(bucketCount);
  642. Assert(newBuckets); // no-throw allocators are currently not supported
  643. EntryType *newEntries = AllocateEntries(size);
  644. Assert(newEntries); // no-throw allocators are currently not supported
  645. WeakType * newWeakRefs = AllocateWeakRefs(size);
  646. memset(newBuckets, -1, bucketCount * sizeof(newBuckets[0]));
  647. *ppBuckets = newBuckets;
  648. *ppEntries = newEntries;
  649. *ppWeakRefs = newWeakRefs;
  650. }
  651. template <bool weakKey = keyIsWeak>
  652. inline typename enable_if<weakKey, TBKey>::type GetKey(int index) const
  653. {
  654. return this->weakRefs[index];
  655. }
  656. template <bool weakKey = keyIsWeak>
  657. inline typename enable_if<!weakKey, TBKey>::type GetKey(int index) const
  658. {
  659. return this->entries[index].Value();
  660. }
  661. template <bool weakKey = keyIsWeak>
  662. inline typename enable_if<weakKey, TBValue>::type GetValue(int index) const
  663. {
  664. return this->entries[index].Value();
  665. }
  666. template <bool weakKey = keyIsWeak>
  667. inline typename enable_if<!weakKey, TBValue>::type GetValue(int index) const
  668. {
  669. return this->weakRefs[index];
  670. }
  671. template <bool weakKey = keyIsWeak>
  672. inline typename enable_if<weakKey, bool>::type RemoveIfCollected(TBKey const key, int* i, int last, int targetBucket)
  673. {
  674. if (key == nullptr)
  675. {
  676. // Key has been collected
  677. int next = entries[*i].next;
  678. RemoveAt(*i, last, targetBucket);
  679. *i = next;
  680. return true;
  681. }
  682. return false;
  683. }
  684. template <bool weakKey = keyIsWeak>
  685. inline typename enable_if<!weakKey, bool>::type RemoveIfCollected(TBKey const key, int* i, int last, int targetBucket)
  686. {
  687. return false;
  688. }
  689. template <bool weakKey = keyIsWeak>
  690. inline typename enable_if<weakKey, void>::type SetKeyValue(const TBKey& key, const TBValue& value, int index)
  691. {
  692. weakRefs[index] = key;
  693. entries[index].SetValue(value);
  694. }
  695. template <bool weakKey = keyIsWeak>
  696. inline typename enable_if<!weakKey, void>::type SetKeyValue(const TBKey& key, const TBValue& value, int index)
  697. {
  698. entries[index].SetValue(key);
  699. weakRefs[index] = value;
  700. }
  701. template <bool weakKey = keyIsWeak>
  702. inline typename enable_if<weakKey, void>::type SetValue(const TBValue& value, int index)
  703. {
  704. this->entries[index].SetValue(value);
  705. }
  706. template <bool weakKey = keyIsWeak>
  707. inline typename enable_if<!weakKey, void>::type SetValue(const TBValue& value, int index)
  708. {
  709. this->weakRefs[index] = value;
  710. }
  711. };
  712. template <
  713. class TKey,
  714. class TValue,
  715. class SizePolicy = PowerOf2SizePolicy,
  716. template <typename ValueOrKey> class Comparer = DefaultComparer,
  717. typename Lock = NoResizeLock
  718. >
  719. class WeakReferenceRegionDictionary : public SplitWeakDictionary<TKey, TValue, false, SizePolicy, Comparer, Lock, Recycler>
  720. {
  721. typedef SplitWeakDictionary<TKey, TValue, false, SizePolicy, Comparer, Lock, Recycler> Base;
  722. typedef typename Base::EntryType EntryType;
  723. typedef typename Base::WeakType ValueType;
  724. public:
  725. WeakReferenceRegionDictionary(Recycler* allocator, int capacity = 0)
  726. : Base(allocator, capacity)
  727. {
  728. }
  729. };
  730. template <
  731. class TKey,
  732. class TValue,
  733. template <typename ValueOrKey> class Comparer = DefaultComparer,
  734. class SizePolicy = PrimeSizePolicy,
  735. typename Lock = NoResizeLock
  736. >
  737. class WeakReferenceRegionKeyDictionary : public SplitWeakDictionary<TValue, TKey, true, SizePolicy, Comparer, Lock, Recycler>
  738. {
  739. typedef SplitWeakDictionary<TValue, TKey, true, SizePolicy, Comparer, Lock, Recycler> Base;
  740. typedef typename Base::EntryType EntryType;
  741. typedef typename Base::WeakType KeyType;
  742. public:
  743. // Allow WeakReferenceRegionKeyDictionary field to be inlined in classes with DEFINE_VTABLE_CTOR_MEMBER_INIT
  744. WeakReferenceRegionKeyDictionary(VirtualTableInfoCtorEnum v): Base(v) { }
  745. WeakReferenceRegionKeyDictionary(Recycler* allocator, int capacity = 0)
  746. : Base(allocator, capacity)
  747. {
  748. }
  749. template <class Fn>
  750. void Map(Fn fn)
  751. {
  752. this->MapAndRemoveIf([=](EntryType& entry, KeyType& weakRef)
  753. {
  754. if (weakRef == nullptr)
  755. {
  756. return true;
  757. }
  758. fn(weakRef, entry.Value(), weakRef);
  759. return false;
  760. });
  761. }
  762. bool Clean()
  763. {
  764. this->Cleanup();
  765. return this->freeCount > 0;
  766. }
  767. };
  768. #endif // ENABLE_WEAK_REFERENCE_REGIONS
  769. };