ArenaAllocator.h 33 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #ifdef PROFILE_MEM
  6. struct ArenaMemoryData;
  7. #endif
  8. namespace Memory
  9. {
  10. // Arena allocator
  11. #define Anew(alloc,T,...) AllocatorNew(ArenaAllocator, alloc, T, __VA_ARGS__)
  12. #define AnewZ(alloc,T,...) AllocatorNewZ(ArenaAllocator, alloc, T, __VA_ARGS__)
  13. #define AnewPlus(alloc, size, T, ...) AllocatorNewPlus(ArenaAllocator, alloc, size, T, __VA_ARGS__)
  14. #define AnewPlusZ(alloc, size, T, ...) AllocatorNewPlusZ(ArenaAllocator, alloc, size, T, __VA_ARGS__)
  15. #define AnewStruct(alloc,T) AllocatorNewStruct(ArenaAllocator, alloc, T)
  16. #define AnewStructZ(alloc,T) AllocatorNewStructZ(ArenaAllocator, alloc, T)
  17. #define AnewStructPlus(alloc, size, T) AllocatorNewStructPlus(ArenaAllocator, alloc, size, T)
  18. #define AnewArray(alloc, T, count) AllocatorNewArray(ArenaAllocator, alloc, T, count)
  19. #define AnewArrayZ(alloc, T, count) AllocatorNewArrayZ(ArenaAllocator, alloc, T, count)
  20. #define Adelete(alloc, obj) AllocatorDelete(ArenaAllocator, alloc, obj)
  21. #define AdeletePlus(alloc, size, obj) AllocatorDeletePlus(ArenaAllocator, alloc, size, obj)
  22. #define AdeleteArray(alloc, count, obj) AllocatorDeleteArray(ArenaAllocator, alloc, count, obj)
  23. #define AdeleteUnlessNull(alloc, obj) \
  24. if (obj != nullptr) \
  25. { \
  26. Adelete(alloc, obj); \
  27. obj = nullptr; \
  28. }
  29. #define AnewNoThrow(alloc,T,...) AllocatorNewNoThrow(ArenaAllocator, alloc, T, __VA_ARGS__)
  30. #define AnewNoThrowZ(alloc,T,...) AllocatorNewNoThrowZ(ArenaAllocator, alloc, T, __VA_ARGS__)
  31. #define AnewNoThrowPlus(alloc, size, T, ...) AllocatorNewNoThrowPlus(ArenaAllocator, alloc, size, T, __VA_ARGS__)
  32. #define AnewNoThrowPlusZ(alloc, size, T, ...) AllocatorNewNoThrowPlusZ(ArenaAllocator, alloc, size, T, __VA_ARGS__)
  33. #define AnewNoThrowStruct(alloc,T) AllocatorNewNoThrowStruct(ArenaAllocator, alloc, T)
  34. #define AnewNoThrowStructZ(alloc,T) AllocatorNewNoThrowStructZ(ArenaAllocator, alloc, T)
  35. #define AnewNoThrowArray(alloc, T, count) AllocatorNewNoThrowArray(ArenaAllocator, alloc, T, count)
  36. #define AnewNoThrowArrayZ(alloc, T, count) AllocatorNewNoThrowArrayZ(ArenaAllocator, alloc, T, count)
  37. #define JitAnew(alloc,T,...) AllocatorNew(JitArenaAllocator, alloc, T, __VA_ARGS__)
  38. #define JitAnewZ(alloc,T,...) AllocatorNewZ(JitArenaAllocator, alloc, T, __VA_ARGS__)
  39. #define JitAnewPlus(alloc, size, T, ...) AllocatorNewPlus(JitArenaAllocator, alloc, size, T, __VA_ARGS__)
  40. #define JitAnewPlusZ(alloc, size, T, ...) AllocatorNewPlusZ(JitArenaAllocator, alloc, size, T, __VA_ARGS__)
  41. #define JitAnewStruct(alloc,T) AllocatorNewStruct(JitArenaAllocator, alloc, T)
  42. #define JitAnewStructZ(alloc,T) AllocatorNewStructZ(JitArenaAllocator, alloc, T)
  43. #define JitAnewStructPlus(alloc, size, T) AllocatorNewStructPlus(JitArenaAllocator, alloc, size, T)
  44. #define JitAnewArray(alloc, T, count) AllocatorNewArray(JitArenaAllocator, alloc, T, count)
  45. #define JitAnewArrayZ(alloc, T, count) AllocatorNewArrayZ(JitArenaAllocator, alloc, T, count)
  46. #define JitAdelete(alloc, obj) AllocatorDelete(JitArenaAllocator, alloc, obj)
  47. #define JitAdeletePlus(alloc, size, obj) AllocatorDeletePlus(JitArenaAllocator, alloc, size, obj)
  48. #define JitAdeleteArray(alloc, count, obj) AllocatorDeleteArray(JitArenaAllocator, alloc, count, obj)
  49. #define JitAnewNoThrow(alloc,T,...) AllocatorNewNoThrow(JitArenaAllocator, alloc, T, __VA_ARGS__)
  50. #define JitAnewNoThrowZ(alloc,T,...) AllocatorNewNoThrowZ(JitArenaAllocator, alloc, T, __VA_ARGS__)
  51. #define JitAnewNoThrowPlus(alloc, size, T, ...) AllocatorNewNoThrowPlus(JitArenaAllocator, alloc, size, T, __VA_ARGS__)
  52. #define JitAnewNoThrowPlusZ(alloc, size, T, ...) AllocatorNewNoThrowPlusZ(JitArenaAllocator, alloc, size, T, __VA_ARGS__)
  53. #define JitAnewNoThrowStruct(alloc,T) AllocatorNewNoThrowStruct(JitArenaAllocator, alloc, T)
  54. #define JitAnewNoThrowArray(alloc, T, count) AllocatorNewNoThrowArray(JitArenaAllocator, alloc, T, count)
  55. #define JitAnewNoThrowArrayZ(alloc, T, count) AllocatorNewNoThrowArrayZ(JitArenaAllocator, alloc, T, count)
  56. struct BigBlock;
  57. struct ArenaMemoryBlock
  58. {
  59. union
  60. {
  61. ArenaMemoryBlock * next;
  62. BigBlock * nextBigBlock;
  63. };
  64. size_t nbytes;
  65. char * GetBytes() const
  66. {
  67. return ((char *)this) + sizeof(ArenaMemoryBlock);
  68. }
  69. };
  70. struct BigBlock : public ArenaMemoryBlock
  71. {
  72. public:
  73. PageAllocation * allocation;
  74. size_t currentByte;
  75. char * GetBytes() const
  76. {
  77. return ((char *)this) + sizeof(BigBlock);
  78. }
  79. };
  80. #define ASSERT_THREAD() AssertMsg(this->pageAllocator->ValidThreadAccess(), "Arena allocation should only be used by a single thread")
  81. // Basic data layout of arena allocators. This data should be all that is needed
  82. // to perform operations that traverse all allocated memory in the arena:
  83. // the recycler used this to mark through registered arenas and inline cache
  84. // allocator uses this to zero out allocated memory.
  85. class ArenaData
  86. {
  87. protected:
  88. ArenaData(PageAllocator * pageAllocator);
  89. protected:
  90. BigBlock * bigBlocks;
  91. BigBlock * fullBlocks;
  92. ArenaMemoryBlock * mallocBlocks;
  93. PageAllocator * pageAllocator;
  94. char * cacheBlockCurrent;
  95. bool lockBlockList;
  96. public:
  97. BigBlock* GetBigBlocks(bool background)
  98. {
  99. if (!background)
  100. {
  101. UpdateCacheBlock();
  102. }
  103. return bigBlocks;
  104. }
  105. BigBlock* GetFullBlocks() { return fullBlocks; }
  106. ArenaMemoryBlock * GetMemoryBlocks() { return mallocBlocks; }
  107. PageAllocator * GetPageAllocator() const
  108. {
  109. return pageAllocator;
  110. }
  111. bool IsBlockListLocked() { return lockBlockList; }
  112. void SetLockBlockList(bool lock) { lockBlockList = lock; }
  113. protected:
  114. void UpdateCacheBlock() const;
  115. };
  116. // Implements most of memory management operations over ArenaData.
  117. // The TFreeListPolicy handles free-listing for "small objects". There
  118. // is no support for free-listing for "large objects".
  119. #if defined(_M_X64_OR_ARM64)
  120. // Some data structures such as jmp_buf expect to be 16 byte aligned on AMD64.
  121. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg = 4, bool RequireObjectAlignment = false, size_t MaxObjectSize = 0>
  122. #else
  123. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg = 3, bool RequireObjectAlignment = false, size_t MaxObjectSize = 0>
  124. #endif
  125. class ArenaAllocatorBase : public Allocator, public ArenaData
  126. {
  127. private:
  128. // cacheBlockEnd may be the start address of actual GC memory, tag it to
  129. // avoid GC false positive
  130. TaggedPointer<char> cacheBlockEnd;
  131. size_t largestHole;
  132. uint blockState; // 0 = no block, 1 = one big block, other more then one big block or have malloc blocks
  133. #ifdef PROFILE_MEM
  134. LPCWSTR name;
  135. #endif
  136. #ifdef PROFILE_MEM
  137. struct ArenaMemoryData * memoryData;
  138. #endif
  139. #if DBG
  140. bool needsDelayFreeList;
  141. #endif
  142. public:
  143. static const size_t ObjectAlignmentBitShift = ObjectAlignmentBitShiftArg;
  144. static const size_t ObjectAlignment = 1 << ObjectAlignmentBitShift;
  145. static const size_t ObjectAlignmentMask = ObjectAlignment - 1;
  146. static const bool FakeZeroLengthArray = true;
  147. static const size_t MaxSmallObjectSize = 1024;
  148. ArenaAllocatorBase(__in char16 const* name, PageAllocator * pageAllocator, void (*outOfMemoryFunc)(), void (*recoverMemoryFunc)() = JsUtil::ExternalApi::RecoverUnusedMemory);
  149. ~ArenaAllocatorBase();
  150. void Reset()
  151. {
  152. ASSERT_THREAD();
  153. Assert(!lockBlockList);
  154. freeList = TFreeListPolicy::Reset(freeList);
  155. #ifdef PROFILE_MEM
  156. LogReset();
  157. #endif
  158. ArenaMemoryTracking::ReportFreeAll(this);
  159. if (this->blockState == 1)
  160. {
  161. Assert(this->bigBlocks != nullptr && this->fullBlocks == nullptr && this->mallocBlocks == nullptr && this->bigBlocks->nextBigBlock == nullptr);
  162. Assert(this->largestHole == 0);
  163. Assert(cacheBlockEnd == bigBlocks->GetBytes() + bigBlocks->nbytes);
  164. Assert(bigBlocks->GetBytes() <= cacheBlockCurrent && cacheBlockCurrent <= cacheBlockEnd);
  165. cacheBlockCurrent = bigBlocks->GetBytes();
  166. #ifdef PROFILE_MEM
  167. LogRealAlloc(bigBlocks->allocation->GetSize() + sizeof(PageAllocation));
  168. #endif
  169. return;
  170. }
  171. FullReset();
  172. }
  173. void Move(ArenaAllocatorBase *srcAllocator);
  174. void Clear()
  175. {
  176. ASSERT_THREAD();
  177. Assert(!lockBlockList);
  178. ArenaMemoryTracking::ReportFreeAll(this);
  179. freeList = TFreeListPolicy::Reset(freeList);
  180. #ifdef ARENA_ALLOCATOR_FREE_LIST_SIZE
  181. this->freeListSize = 0;
  182. #endif
  183. ReleaseMemory();
  184. this->cacheBlockCurrent = nullptr;
  185. this->cacheBlockEnd = nullptr;
  186. this->bigBlocks = nullptr;
  187. this->fullBlocks = nullptr;
  188. this->largestHole = 0;
  189. this->mallocBlocks = nullptr;
  190. this->blockState = 0;
  191. }
  192. size_t AllocatedSize(); // amount of memory allocated
  193. size_t Size(); // amount of allocated memory is used.
  194. size_t FreeListSize()
  195. {
  196. #ifdef ARENA_ALLOCATOR_FREE_LIST_SIZE
  197. return this->freeListSize;
  198. #else
  199. return 0;
  200. #endif
  201. }
  202. static size_t GetAlignedSize(size_t size) { return AllocSizeMath::Align(size, ArenaAllocatorBase::ObjectAlignment); }
  203. char * AllocInternal(DECLSPEC_GUARD_OVERFLOW size_t requestedBytes);
  204. char* Realloc(void* buffer, DECLSPEC_GUARD_OVERFLOW size_t existingBytes, DECLSPEC_GUARD_OVERFLOW size_t requestedBytes);
  205. void Free(void * buffer, size_t byteSize);
  206. #if DBG
  207. bool HasDelayFreeList() const
  208. {
  209. return needsDelayFreeList;
  210. }
  211. void SetNeedsDelayFreeList()
  212. {
  213. needsDelayFreeList = true;
  214. }
  215. void MergeDelayFreeList();
  216. #endif
  217. #ifdef TRACK_ALLOC
  218. // Doesn't support tracking information, dummy implementation
  219. ArenaAllocatorBase * TrackAllocInfo(TrackAllocData const& data) { return this; }
  220. void ClearTrackAllocInfo(TrackAllocData* data = nullptr) {}
  221. #endif
  222. protected:
  223. char * RealAlloc(DECLSPEC_GUARD_OVERFLOW size_t nbytes);
  224. __forceinline char * RealAllocInlined(DECLSPEC_GUARD_OVERFLOW size_t nbytes);
  225. private:
  226. #ifdef PROFILE_MEM
  227. void LogBegin();
  228. void LogReset();
  229. void LogEnd();
  230. void LogAlloc(size_t requestedBytes, size_t allocateBytes);
  231. void LogRealAlloc(size_t size);
  232. #endif
  233. static size_t AllocatedSize(ArenaMemoryBlock * blockList);
  234. static size_t Size(BigBlock * blockList);
  235. void FullReset();
  236. void SetCacheBlock(BigBlock * cacheBlock);
  237. template <bool DoRecoverMemory> char * AllocFromHeap(DECLSPEC_GUARD_OVERFLOW size_t nbytes);
  238. void ReleaseMemory();
  239. void ReleasePageMemory();
  240. void ReleaseHeapMemory();
  241. char * SnailAlloc(DECLSPEC_GUARD_OVERFLOW size_t nbytes);
  242. BigBlock * AddBigBlock(size_t pages);
  243. #ifdef ARENA_ALLOCATOR_FREE_LIST_SIZE
  244. size_t freeListSize;
  245. #endif
  246. void * freeList;
  247. #ifdef PROFILE_MEM
  248. void LogFree(size_t size);
  249. void LogReuse(size_t size);
  250. #endif
  251. };
  252. // Implements free-listing in-place. Bucketizes allocation sizes, there is a free list
  253. // per bucket. The freed memory is treated as nodes in the lists.
  254. class InPlaceFreeListPolicy
  255. {
  256. private:
  257. // Free list support;
  258. struct FreeObject
  259. {
  260. FreeObject * next;
  261. };
  262. private:
  263. static const uint buckets =
  264. ArenaAllocatorBase<InPlaceFreeListPolicy>::MaxSmallObjectSize >> ArenaAllocatorBase<InPlaceFreeListPolicy>::ObjectAlignmentBitShift;
  265. public:
  266. #ifdef DBG
  267. static const unsigned char DbgFreeMemFill = DbgMemFill;
  268. #endif
  269. static void * New(ArenaAllocatorBase<InPlaceFreeListPolicy> * allocator);
  270. static void * Allocate(void * policy, DECLSPEC_GUARD_OVERFLOW size_t size);
  271. static void * Free(void * policy, void * object, size_t size);
  272. static void * Reset(void * policy);
  273. #if DBG
  274. static void MergeDelayFreeList(void * freeList);
  275. #endif
  276. static void PrepareFreeObject(__out_bcount(size) void * object, _In_ size_t size)
  277. {
  278. #ifdef ARENA_MEMORY_VERIFY
  279. memset(object, InPlaceFreeListPolicy::DbgFreeMemFill, size);
  280. #endif
  281. }
  282. #ifdef ARENA_MEMORY_VERIFY
  283. static void VerifyFreeObjectIsFreeMemFilled(void * object, size_t size);
  284. #endif
  285. static void Release(void * policy) {}
  286. };
  287. // Implements free-listing in separate memory. Bucketizes allocation sizes, there is a free list
  288. // per bucket. Space for the free lists is allocated from the heap. This is used by
  289. // InlineCacheAllocator to quickly zero out the entire arena w/o loosing the free lists.
  290. class StandAloneFreeListPolicy
  291. {
  292. private:
  293. struct FreeObjectListEntry
  294. {
  295. void * object;
  296. uint next;
  297. };
  298. uint allocated;
  299. uint used;
  300. uint freeList;
  301. uint* freeObjectLists;
  302. FreeObjectListEntry* entries;
  303. static const uint buckets =
  304. ArenaAllocatorBase<StandAloneFreeListPolicy>::MaxSmallObjectSize >> ArenaAllocatorBase<StandAloneFreeListPolicy>::ObjectAlignmentBitShift;
  305. static const uint InitialEntries = 64;
  306. static const uint MaxEntriesGrowth = 1024;
  307. static StandAloneFreeListPolicy * NewInternal(uint entriesPerBucket);
  308. static bool TryEnsureFreeListEntry(StandAloneFreeListPolicy *& _this);
  309. static uint GetPlusSize(const StandAloneFreeListPolicy * policy)
  310. {
  311. return buckets * sizeof(uint) + policy->allocated * sizeof(FreeObjectListEntry);
  312. }
  313. public:
  314. #ifdef DBG
  315. // TODO: Consider making DbgFreeMemFill == DbgFill, now that we have ArenaAllocatorBase properly handling filling when free-listing and asserting debug fill at re-allocation.
  316. static const char DbgFreeMemFill = 0x0;
  317. #endif
  318. static void * New(ArenaAllocatorBase<StandAloneFreeListPolicy> * allocator);
  319. static void * Allocate(void * policy, DECLSPEC_GUARD_OVERFLOW size_t size);
  320. static void * Free(void * policy, void * object, size_t size);
  321. static void * Reset(void * policy);
  322. static void PrepareFreeObject(_Out_writes_bytes_all_(size) void * object, _In_ size_t size)
  323. {
  324. #ifdef ARENA_MEMORY_VERIFY
  325. memset(object, StandAloneFreeListPolicy::DbgFreeMemFill, size);
  326. #endif
  327. }
  328. #ifdef ARENA_MEMORY_VERIFY
  329. static void VerifyFreeObjectIsFreeMemFilled(void * object, size_t size);
  330. #endif
  331. #if DBG
  332. static void MergeDelayFreeList(void * freeList);
  333. #endif
  334. static void Release(void * policy);
  335. };
  336. #define ARENA_FAULTINJECT_MEMORY(name, size) { \
  337. if (outOfMemoryFunc) \
  338. { \
  339. FAULTINJECT_MEMORY_THROW(name, size); \
  340. } \
  341. else \
  342. { \
  343. FAULTINJECT_MEMORY_NOTHROW(name, size); \
  344. } \
  345. }
  346. // This allocator by default on OOM makes an attempt to recover memory from Recycler and further throws if that doesn't help the allocation.
  347. class ArenaAllocator : public ArenaAllocatorBase<InPlaceFreeListPolicy>
  348. {
  349. public:
  350. ArenaAllocator(__in LPCWSTR name, PageAllocator * pageAllocator, void (*outOfMemoryFunc)(), void (*recoverMemoryFunc)() = JsUtil::ExternalApi::RecoverUnusedMemory) :
  351. ArenaAllocatorBase<InPlaceFreeListPolicy>(name, pageAllocator, outOfMemoryFunc, recoverMemoryFunc)
  352. {
  353. }
  354. __forceinline
  355. char * Alloc(DECLSPEC_GUARD_OVERFLOW size_t requestedBytes)
  356. {
  357. return AllocInternal(requestedBytes);
  358. }
  359. char * AllocZero(DECLSPEC_GUARD_OVERFLOW size_t nbytes)
  360. {
  361. char * buffer = Alloc(nbytes);
  362. memset(buffer, 0, nbytes);
  363. #if DBG
  364. // Since we successfully allocated, we shouldn't have integer overflow here
  365. memset(buffer + nbytes, 0, Math::Align(nbytes, ArenaAllocatorBase::ObjectAlignment) - nbytes);
  366. #endif
  367. return buffer;
  368. }
  369. char * AllocLeaf(DECLSPEC_GUARD_OVERFLOW size_t requestedBytes)
  370. {
  371. // Leaf allocation is not meaningful here, but needed by Allocator-templatized classes that may call one of the Leaf versions of AllocatorNew
  372. return Alloc(requestedBytes);
  373. }
  374. char * NoThrowAlloc(DECLSPEC_GUARD_OVERFLOW size_t requestedBytes)
  375. {
  376. void (*tempOutOfMemoryFunc)() = outOfMemoryFunc;
  377. outOfMemoryFunc = nullptr;
  378. char * buffer = AllocInternal(requestedBytes);
  379. outOfMemoryFunc = tempOutOfMemoryFunc;
  380. return buffer;
  381. }
  382. char * NoThrowAllocZero(DECLSPEC_GUARD_OVERFLOW size_t requestedBytes)
  383. {
  384. char * buffer = NoThrowAlloc(requestedBytes);
  385. if (buffer != nullptr)
  386. {
  387. memset(buffer, 0, requestedBytes);
  388. }
  389. return buffer;
  390. }
  391. char * NoThrowNoRecoveryAlloc(DECLSPEC_GUARD_OVERFLOW size_t requestedBytes)
  392. {
  393. void (*tempRecoverMemoryFunc)() = recoverMemoryFunc;
  394. recoverMemoryFunc = nullptr;
  395. char * buffer = NoThrowAlloc(requestedBytes);
  396. recoverMemoryFunc = tempRecoverMemoryFunc;
  397. return buffer;
  398. }
  399. char * NoThrowNoRecoveryAllocZero(DECLSPEC_GUARD_OVERFLOW size_t requestedBytes)
  400. {
  401. char * buffer = NoThrowNoRecoveryAlloc(requestedBytes);
  402. if (buffer != nullptr)
  403. {
  404. memset(buffer, 0, requestedBytes);
  405. }
  406. return buffer;
  407. }
  408. };
  409. class JitArenaAllocator : public ArenaAllocator
  410. {
  411. // The only difference between ArenaAllocator and the JitArenaAllocator is it has fast path of anything of size BVSparseNode (16 bytes)
  412. // Throughput improvement in the backend is substantial with this freeList.
  413. private:
  414. typedef BVSparseNode<JitArenaAllocator> BVSparseNode;
  415. BVSparseNode *bvFreeList;
  416. public:
  417. JitArenaAllocator(__in LPCWSTR name, PageAllocator * pageAllocator, void(*outOfMemoryFunc)(), void(*recoverMemoryFunc)() = JsUtil::ExternalApi::RecoverUnusedMemory) :
  418. bvFreeList(nullptr), ArenaAllocator(name, pageAllocator, outOfMemoryFunc, recoverMemoryFunc)
  419. {
  420. }
  421. char * Alloc(DECLSPEC_GUARD_OVERFLOW size_t requestedBytes)
  422. {
  423. // Fast path
  424. if (sizeof(BVSparseNode) == requestedBytes)
  425. {
  426. // Fast path for BVSparseNode allocation
  427. if (bvFreeList)
  428. {
  429. BVSparseNode *node = bvFreeList;
  430. bvFreeList = bvFreeList->next;
  431. return (char*)node;
  432. }
  433. // If the free list is empty, then do the allocation right away for the BVSparseNode size.
  434. // You could call ArenaAllocator::Alloc here, but direct RealAlloc avoids unnecessary checks.
  435. return ArenaAllocatorBase::RealAllocInlined(requestedBytes);
  436. }
  437. return ArenaAllocator::Alloc(requestedBytes);
  438. }
  439. void Free(void * buffer, size_t byteSize)
  440. {
  441. return FreeInline(buffer, byteSize);
  442. }
  443. __forceinline void FreeInline(void * buffer, size_t byteSize)
  444. {
  445. if (sizeof(BVSparseNode) == byteSize)
  446. {
  447. //FastPath
  448. ((BVSparseNode*)buffer)->next = bvFreeList;
  449. bvFreeList = (BVSparseNode*)buffer;
  450. return;
  451. }
  452. return ArenaAllocator::Free(buffer, byteSize);
  453. }
  454. char * AllocZero(DECLSPEC_GUARD_OVERFLOW size_t nbytes)
  455. {
  456. return ArenaAllocator::AllocZero(nbytes);
  457. }
  458. char * AllocLeaf(DECLSPEC_GUARD_OVERFLOW size_t requestedBytes)
  459. {
  460. return ArenaAllocator::AllocLeaf(requestedBytes);
  461. }
  462. char * NoThrowAlloc(DECLSPEC_GUARD_OVERFLOW size_t requestedBytes)
  463. {
  464. return ArenaAllocator::NoThrowAlloc(requestedBytes);
  465. }
  466. char * NoThrowAllocZero(DECLSPEC_GUARD_OVERFLOW size_t requestedBytes)
  467. {
  468. return ArenaAllocator::NoThrowAllocZero(requestedBytes);
  469. }
  470. void Reset()
  471. {
  472. bvFreeList = nullptr;
  473. ArenaAllocator::Reset();
  474. }
  475. void Clear()
  476. {
  477. bvFreeList = nullptr;
  478. ArenaAllocator::Clear();
  479. }
  480. };
  481. // This allocator by default on OOM does not attempt to recover memory from Recycler, just throws OOM.
  482. class NoRecoverMemoryJitArenaAllocator : public JitArenaAllocator
  483. {
  484. public:
  485. NoRecoverMemoryJitArenaAllocator(__in LPCWSTR name, PageAllocator * pageAllocator, void(*outOfMemoryFunc)()) :
  486. JitArenaAllocator(name, pageAllocator, outOfMemoryFunc, NULL)
  487. {
  488. }
  489. };
  490. // This allocator by default on OOM does not attempt to recover memory from Recycler, just throws OOM.
  491. class NoRecoverMemoryArenaAllocator : public ArenaAllocator
  492. {
  493. public:
  494. NoRecoverMemoryArenaAllocator(__in LPCWSTR name, PageAllocator * pageAllocator, void (*outOfMemoryFunc)()) :
  495. ArenaAllocator(name, pageAllocator, outOfMemoryFunc, NULL)
  496. {
  497. }
  498. };
  499. #define InlineCacheAuxSlotTypeTag 4
  500. #define MinPropertyStringInlineCacheSize 1
  501. #define MinPolymorphicInlineCacheSize 4
  502. #define MaxPolymorphicInlineCacheSize 32
  503. #ifdef PERSISTENT_INLINE_CACHES
  504. class InlineCacheAllocatorInfo
  505. {
  506. public:
  507. struct CacheLayout
  508. {
  509. intptr_t weakRefs[3];
  510. intptr_t strongRef;
  511. };
  512. struct FreeObject
  513. {
  514. intptr_t blankSlots[3];
  515. FreeObject * next;
  516. };
  517. CompileAssert(sizeof(CacheLayout) == sizeof(FreeObject));
  518. CompileAssert(offsetof(CacheLayout, strongRef) == offsetof(FreeObject, next));
  519. #if defined(_M_X64_OR_ARM64)
  520. CompileAssert(sizeof(CacheLayout) == 32);
  521. static const size_t ObjectAlignmentBitShift = 5;
  522. #else
  523. CompileAssert(sizeof(CacheLayout) == 16);
  524. static const size_t ObjectAlignmentBitShift = 4;
  525. #endif
  526. static const size_t ObjectAlignment = 1 << ObjectAlignmentBitShift;
  527. static const size_t MaxObjectSize = MaxPolymorphicInlineCacheSize * sizeof(CacheLayout);
  528. };
  529. #define InlineCacheFreeListTag 0x01
  530. #define InlineCacheAllocatorTraits InlineCacheFreeListPolicy, InlineCacheAllocatorInfo::ObjectAlignmentBitShift, true, InlineCacheAllocatorInfo::MaxObjectSize
  531. class InlineCacheFreeListPolicy : public InlineCacheAllocatorInfo
  532. {
  533. public:
  534. #ifdef DBG
  535. static const unsigned char DbgFreeMemFill = DbgMemFill;
  536. #endif
  537. static void * New(ArenaAllocatorBase<InlineCacheAllocatorTraits> * allocator);
  538. static void * Allocate(void * policy, DECLSPEC_GUARD_OVERFLOW size_t size);
  539. static void * Free(void * policy, void * object, size_t size);
  540. static void * Reset(void * policy);
  541. static void Release(void * policy);
  542. static void PrepareFreeObject(_Out_writes_bytes_all_(size) void * object, _In_ size_t size)
  543. {
  544. #ifdef ARENA_MEMORY_VERIFY
  545. // In debug builds if we're verifying arena memory to avoid "use after free" problems, we want to fill the whole object with the debug pattern here.
  546. // There is a very subtle point here. Inline caches can be allocated and freed in batches. This happens commonly when a PolymorphicInlineCache grows,
  547. // frees up its old array of inline caches, and allocates a bigger one. ArenaAllocatorBase::AllocInternal when allocating an object from the free list
  548. // will verify that the entire object - not just its first sizeof(InlineCache) worth of bytes - is filled with the debug pattern.
  549. memset(object, StandAloneFreeListPolicy::DbgFreeMemFill, size);
  550. #else
  551. // On the other hand, in retail builds when we don't do arena memory validation we want to zero out the whole object, so that during every subsequent garbage collection
  552. // we don't try to trace pointers from freed objects (inside ClearCacheIfHasDeadWeakRefs) and check if they are still reachable. Note that in ClearCacheIfHasDeadWeakRefs
  553. // we cannot distinguish between live inline caches and portions of free objects. That's again because inline caches may be allocated and freed in batches, in which case
  554. // only the first cache in the batch gets the free object's next pointer tag. The rest of the batch is indistinguishable from a batch of live caches. Hence, we scan them
  555. // all for pointers to unreachable objects, and it makes good sense to zero these bytes out, to avoid unnecessary Recycler::IsObjectMarked calls.
  556. memset(object, NULL, size);
  557. #endif
  558. }
  559. #if DBG
  560. static void MergeDelayFreeList(void * freeList);
  561. #endif
  562. #ifdef ARENA_MEMORY_VERIFY
  563. static void VerifyFreeObjectIsFreeMemFilled(void * object, size_t size);
  564. #endif
  565. private:
  566. static const uint bucketCount = MaxObjectSize >> ObjectAlignmentBitShift;
  567. FreeObject* freeListBuckets[bucketCount];
  568. static InlineCacheFreeListPolicy * NewInternal();
  569. InlineCacheFreeListPolicy();
  570. bool AreFreeListBucketsEmpty();
  571. };
  572. bool IsAll(byte* buffer, size_t size, byte c);
  573. class InlineCacheAllocator : public InlineCacheAllocatorInfo, public ArenaAllocatorBase<InlineCacheAllocatorTraits>
  574. {
  575. #ifdef POLY_INLINE_CACHE_SIZE_STATS
  576. private:
  577. size_t polyCacheAllocSize;
  578. #endif
  579. bool hasUsedInlineCache;
  580. bool hasProtoOrStoreFieldInlineCache;
  581. public:
  582. // Zeroing and freeing w/o leaking is not implemented for large objects
  583. CompileAssert(MaxObjectSize <= MaxSmallObjectSize);
  584. InlineCacheAllocator(__in LPCWSTR name, PageAllocator * pageAllocator, void(*outOfMemoryFunc)(), void(*recoverMemoryFunc)() = JsUtil::ExternalApi::RecoverUnusedMemory) :
  585. ArenaAllocatorBase<InlineCacheAllocatorTraits>(name, pageAllocator, outOfMemoryFunc, recoverMemoryFunc), hasUsedInlineCache(false), hasProtoOrStoreFieldInlineCache(false)
  586. #ifdef POLY_INLINE_CACHE_SIZE_STATS
  587. , polyCacheAllocSize(0)
  588. #endif
  589. #if DBG
  590. , verifiedAllZeroAndLockedDown(false)
  591. #endif
  592. {}
  593. void SetHasUsedInlineCache(bool value)
  594. {
  595. hasUsedInlineCache = value;
  596. #if DBG
  597. if (hasUsedInlineCache)
  598. {
  599. Unlock();
  600. }
  601. #endif
  602. }
  603. bool GetHasUsedInlineCache() { return hasUsedInlineCache; }
  604. void SetHasProtoOrStoreFieldInlineCache(bool value) { hasProtoOrStoreFieldInlineCache = value; }
  605. bool GetHasProtoOrStoreFieldInlineCache() { return hasProtoOrStoreFieldInlineCache; }
  606. char * Alloc(DECLSPEC_GUARD_OVERFLOW size_t requestedBytes)
  607. {
  608. DebugOnly(Unlock());
  609. return AllocInternal(requestedBytes);
  610. }
  611. char * AllocZero(DECLSPEC_GUARD_OVERFLOW size_t nbytes)
  612. {
  613. char * buffer = Alloc(nbytes);
  614. memset(buffer, 0, nbytes);
  615. #if DBG
  616. // Since we successfully allocated, we shouldn't have integer overflow here
  617. memset(buffer + nbytes, 0, Math::Align(nbytes, ArenaAllocatorBase::ObjectAlignment) - nbytes);
  618. #endif
  619. return buffer;
  620. }
  621. #if DBG
  622. bool verifiedAllZeroAndLockedDown;
  623. ~InlineCacheAllocator();
  624. void CheckIsAllZero(bool lockdown); // lockdown means make the page read only so no need to do the check again
  625. void Unlock();
  626. void Free(void * buffer, size_t byteSize)
  627. {
  628. Unlock();
  629. __super::Free(buffer, byteSize);
  630. }
  631. #endif
  632. void ZeroAll();
  633. bool IsDeadWeakRef(Recycler* recycler, void* ptr);
  634. bool CacheHasDeadWeakRefs(Recycler* recycler, CacheLayout* cache);
  635. bool HasNoDeadWeakRefs(Recycler* recycler);
  636. void ClearCacheIfHasDeadWeakRefs(Recycler* recycler, CacheLayout* cache);
  637. void ClearCachesWithDeadWeakRefs(Recycler* recycler);
  638. #ifdef POLY_INLINE_CACHE_SIZE_STATS
  639. size_t GetPolyInlineCacheSize() { return this->polyCacheAllocSize; }
  640. void LogPolyCacheAlloc(size_t size) { this->polyCacheAllocSize += size; }
  641. void LogPolyCacheFree(size_t size) { this->polyCacheAllocSize -= size; }
  642. #endif
  643. };
  644. #else
  645. #define InlineCacheAllocatorTraits StandAloneFreeListPolicy, InlineCacheAllocatorInfo::ObjectAlignmentBitShift, true, InlineCacheAllocatorInfo::MaxObjectSize
  646. class InlineCacheAllocator : public ArenaAllocatorBase<InlineCacheAllocatorTraits>
  647. {
  648. public:
  649. struct CacheLayout
  650. {
  651. intptr_t weakRefs[3];
  652. intptr_t strongRef;
  653. };
  654. #ifdef POLY_INLINE_CACHE_SIZE_STATS
  655. private:
  656. size_t polyCacheAllocSize;
  657. #endif
  658. public:
  659. InlineCacheAllocator(__in LPCWSTR name, PageAllocator * pageAllocator, void(*outOfMemoryFunc)()) :
  660. ArenaAllocatorBase<InlineCacheAllocatorTraits>(name, pageAllocator, outOfMemoryFunc) {}
  661. char * Alloc(DECLSPEC_GUARD_OVERFLOW size_t requestedBytes)
  662. {
  663. return AllocInternal(requestedBytes);
  664. }
  665. char * AllocZero(DECLSPEC_GUARD_OVERFLOW size_t nbytes)
  666. {
  667. char * buffer = Alloc(nbytes);
  668. memset(buffer, 0, nbytes);
  669. #if DBG
  670. // Since we successfully allocated, we shouldn't have integer overflow here
  671. memset(buffer + nbytes, 0, Math::Align(nbytes, ArenaAllocatorBase::ObjectAlignment) - nbytes);
  672. #endif
  673. return buffer;
  674. }
  675. #if DBG
  676. void CheckIsAllZero();
  677. #endif
  678. void ZeroAll();
  679. #ifdef POLY_INLINE_CACHE_SIZE_STATS
  680. size_t GetPolyInlineCacheSize() { return this->polyCacheAllocSize; }
  681. void LogPolyCacheAlloc(size_t size) { this->polyCacheAllocSize += size; }
  682. void LogPolyCacheFree(size_t size) { this->polyCacheAllocSize -= size; }
  683. #endif
  684. };
  685. #endif
  686. #define CacheAllocatorTraits StandAloneFreeListPolicy
  687. class CacheAllocator : public ArenaAllocatorBase<CacheAllocatorTraits>
  688. {
  689. public:
  690. CacheAllocator(__in LPCWSTR name, PageAllocator * pageAllocator, void(*outOfMemoryFunc)()) :
  691. ArenaAllocatorBase<CacheAllocatorTraits>(name, pageAllocator, outOfMemoryFunc)
  692. #if DBG
  693. , verifiedAllZeroAndLockedDown(false)
  694. #endif
  695. {}
  696. char * Alloc(DECLSPEC_GUARD_OVERFLOW size_t requestedBytes)
  697. {
  698. DebugOnly(Unlock());
  699. return AllocInternal(requestedBytes);
  700. }
  701. char * AllocZero(DECLSPEC_GUARD_OVERFLOW size_t nbytes)
  702. {
  703. char * buffer = Alloc(nbytes);
  704. memset(buffer, 0, nbytes);
  705. #if DBG
  706. // Since we successfully allocated, we shouldn't have integer overflow here
  707. memset(buffer + nbytes, 0, Math::Align(nbytes, ArenaAllocatorBase::ObjectAlignment) - nbytes);
  708. #endif
  709. return buffer;
  710. }
  711. #if DBG
  712. bool verifiedAllZeroAndLockedDown;
  713. ~CacheAllocator();
  714. void CheckIsAllZero(bool lockdown);
  715. void Unlock();
  716. void Free(void * buffer, size_t byteSize)
  717. {
  718. Unlock();
  719. __super::Free(buffer, byteSize);
  720. }
  721. #endif
  722. void ZeroAll();
  723. };
  724. #undef ASSERT_THREAD
  725. class RefCounted
  726. {
  727. volatile LONG refCount;
  728. protected:
  729. RefCounted()
  730. : refCount(1)
  731. {
  732. }
  733. virtual ~RefCounted()
  734. {
  735. }
  736. void operator delete(void* p, size_t size)
  737. {
  738. HeapFree(p, size);
  739. }
  740. public:
  741. uint32 AddRef(void)
  742. {
  743. return (uint32)InterlockedIncrement(&refCount);
  744. }
  745. uint32 Release(void)
  746. {
  747. uint32 refs = (uint32)InterlockedDecrement(&refCount);
  748. if (0 == refs)
  749. {
  750. delete this; // invokes overrided operator delete
  751. }
  752. return refs;
  753. }
  754. };
  755. class ReferencedArenaAdapter;
  756. template<class T>
  757. class WeakArenaReference
  758. {
  759. ReferencedArenaAdapter* adapter;
  760. T* p;
  761. public:
  762. WeakArenaReference(ReferencedArenaAdapter* _adapter,T* _p)
  763. : adapter(_adapter),
  764. p(_p)
  765. {
  766. adapter->AddRef();
  767. }
  768. ~WeakArenaReference()
  769. {
  770. adapter->Release();
  771. adapter = NULL;
  772. }
  773. T* GetStrongReference()
  774. {
  775. if(adapter->AddStrongReference())
  776. {
  777. return p;
  778. }
  779. else
  780. {
  781. return NULL;
  782. }
  783. }
  784. void ReleaseStrongReference()
  785. {
  786. adapter->ReleaseStrongReference();
  787. }
  788. };
  789. // This class enables WeakArenaReferences to track whether
  790. // the arena has been deleted, and allows for extending
  791. // the lifetime of the arena for StrongReferences
  792. // Strong references should be short lived
  793. class ReferencedArenaAdapter : public RefCounted
  794. {
  795. CriticalSection adapterLock;
  796. uint32 strongRefCount;
  797. ArenaAllocator* arena;
  798. bool deleteFlag;
  799. public:
  800. ~ReferencedArenaAdapter()
  801. {
  802. if (this->arena)
  803. {
  804. HeapDelete(this->arena);
  805. }
  806. }
  807. ReferencedArenaAdapter(ArenaAllocator* _arena)
  808. : RefCounted(),
  809. strongRefCount(0),
  810. arena(_arena),
  811. deleteFlag(false)
  812. { }
  813. bool AddStrongReference()
  814. {
  815. bool retval = false;
  816. adapterLock.Enter();
  817. if (deleteFlag)
  818. {
  819. // Arena exists and is marked deleted, we must fail to acquire a new reference
  820. if (arena && 0 == strongRefCount)
  821. {
  822. // All strong references are gone, delete the arena
  823. HeapDelete(this->arena);
  824. this->arena = nullptr;
  825. }
  826. }
  827. else
  828. {
  829. // Succeed at acquiring a Strong Reference into the Arena
  830. strongRefCount++;
  831. retval = true;
  832. }
  833. adapterLock.Leave();
  834. return retval;
  835. }
  836. void ReleaseStrongReference()
  837. {
  838. adapterLock.Enter();
  839. strongRefCount--;
  840. if (deleteFlag && this->arena && 0 == strongRefCount)
  841. {
  842. // All strong references are gone, delete the arena
  843. HeapDelete(this->arena);
  844. this->arena = NULL;
  845. }
  846. adapterLock.Leave();
  847. }
  848. void DeleteArena()
  849. {
  850. deleteFlag = true;
  851. if (adapterLock.TryEnter())
  852. {
  853. if (0 == strongRefCount)
  854. {
  855. // All strong references are gone, delete the arena
  856. HeapDelete(this->arena);
  857. this->arena = NULL;
  858. }
  859. adapterLock.Leave();
  860. }
  861. }
  862. ArenaAllocator* Arena()
  863. {
  864. if (!deleteFlag)
  865. {
  866. return this->arena;
  867. }
  868. return NULL;
  869. }
  870. };
  871. }
  872. //we don't need these for the ArenaAllocator
  873. #if 0
  874. inline void __cdecl
  875. operator delete(void * obj, ArenaAllocator * alloc, char * (ArenaAllocator::*AllocFunc)(size_t))
  876. {
  877. alloc->Free(obj, (size_t)-1);
  878. }
  879. inline void __cdecl
  880. operator delete(void * obj, ArenaAllocator * alloc, char * (ArenaAllocator::*AllocFunc)(size_t), size_t plusSize)
  881. {
  882. alloc->Free(obj, (size_t)-1);
  883. }
  884. #endif