ArenaAllocator.cpp 48 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "CommonMemoryPch.h"
  6. #define ASSERT_THREAD() AssertMsg(this->pageAllocator->ValidThreadAccess(), "Arena allocation should only be used by a single thread")
  7. const uint Memory::StandAloneFreeListPolicy::MaxEntriesGrowth;
  8. template __forceinline BVSparseNode * BVSparse<JitArenaAllocator>::NodeFromIndex(BVIndex i, BVSparseNode *** prevNextFieldOut, bool create);
  9. ArenaData::ArenaData(PageAllocator * pageAllocator) :
  10. pageAllocator(pageAllocator),
  11. bigBlocks(nullptr),
  12. mallocBlocks(nullptr),
  13. fullBlocks(nullptr),
  14. cacheBlockCurrent(nullptr),
  15. lockBlockList(false)
  16. {
  17. }
  18. void ArenaData::UpdateCacheBlock() const
  19. {
  20. if (bigBlocks != nullptr)
  21. {
  22. size_t currentByte = (cacheBlockCurrent - bigBlocks->GetBytes());
  23. // Avoid writing to the page unnecessary, it might be write watched
  24. if (currentByte != bigBlocks->currentByte)
  25. {
  26. bigBlocks->currentByte = currentByte;
  27. }
  28. }
  29. }
  30. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  31. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  32. ArenaAllocatorBase(__in LPCWSTR name, PageAllocator * pageAllocator, void(*outOfMemoryFunc)(), void(*recoverMemoryFunc)()) :
  33. Allocator(outOfMemoryFunc, recoverMemoryFunc),
  34. ArenaData(pageAllocator),
  35. #ifdef ARENA_ALLOCATOR_FREE_LIST_SIZE
  36. freeListSize(0),
  37. #endif
  38. freeList(nullptr),
  39. largestHole(0),
  40. cacheBlockEnd(nullptr),
  41. blockState(0)
  42. {
  43. #ifdef PROFILE_MEM
  44. this->name = name;
  45. LogBegin();
  46. #endif
  47. ArenaMemoryTracking::ArenaCreated(this, name);
  48. }
  49. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  50. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  51. ~ArenaAllocatorBase()
  52. {
  53. Assert(!lockBlockList);
  54. ArenaMemoryTracking::ReportFreeAll(this);
  55. ArenaMemoryTracking::ArenaDestroyed(this);
  56. if (!pageAllocator->IsClosed())
  57. {
  58. ReleasePageMemory();
  59. }
  60. ReleaseHeapMemory();
  61. TFreeListPolicy::Release(this->freeList);
  62. #ifdef PROFILE_MEM
  63. LogEnd();
  64. #endif
  65. }
  66. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  67. void
  68. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  69. Move(ArenaAllocatorBase *srcAllocator)
  70. {
  71. Assert(!lockBlockList);
  72. Assert(srcAllocator != nullptr);
  73. Allocator::Move(srcAllocator);
  74. Assert(this->pageAllocator == srcAllocator->pageAllocator);
  75. AllocatorFieldMove(this, srcAllocator, bigBlocks);
  76. AllocatorFieldMove(this, srcAllocator, largestHole);
  77. AllocatorFieldMove(this, srcAllocator, cacheBlockCurrent);
  78. AllocatorFieldMove(this, srcAllocator, cacheBlockEnd);
  79. AllocatorFieldMove(this, srcAllocator, mallocBlocks);
  80. AllocatorFieldMove(this, srcAllocator, fullBlocks);
  81. AllocatorFieldMove(this, srcAllocator, blockState);
  82. AllocatorFieldMove(this, srcAllocator, freeList);
  83. #ifdef PROFILE_MEM
  84. this->name = srcAllocator->name;
  85. srcAllocator->name = nullptr;
  86. AllocatorFieldMove(this, srcAllocator, memoryData);
  87. #endif
  88. }
  89. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  90. size_t
  91. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  92. AllocatedSize(ArenaMemoryBlock * blockList)
  93. {
  94. ArenaMemoryBlock * memoryBlock = blockList;
  95. size_t totalBytes = 0;
  96. while (memoryBlock != NULL)
  97. {
  98. totalBytes += memoryBlock->nbytes;
  99. memoryBlock = memoryBlock->next;
  100. }
  101. return totalBytes;
  102. }
  103. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  104. size_t
  105. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  106. AllocatedSize()
  107. {
  108. UpdateCacheBlock();
  109. return AllocatedSize(this->fullBlocks) + AllocatedSize(this->bigBlocks) + AllocatedSize(this->mallocBlocks);
  110. }
  111. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  112. size_t
  113. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  114. Size()
  115. {
  116. UpdateCacheBlock();
  117. return Size(this->fullBlocks) + Size(this->bigBlocks) + AllocatedSize(this->mallocBlocks);
  118. }
  119. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  120. size_t
  121. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  122. Size(BigBlock * blockList)
  123. {
  124. BigBlock * memoryBlock = blockList;
  125. size_t totalBytes = 0;
  126. while (memoryBlock != NULL)
  127. {
  128. totalBytes += memoryBlock->currentByte;
  129. memoryBlock = (BigBlock *)memoryBlock->next;
  130. }
  131. return totalBytes;
  132. }
  133. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  134. char *
  135. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  136. RealAlloc(size_t nbytes)
  137. {
  138. return RealAllocInlined(nbytes);
  139. }
  140. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  141. char *
  142. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  143. RealAllocInlined(size_t nbytes)
  144. {
  145. Assert(nbytes != 0);
  146. Assert((nbytes & (ObjectAlignment - 1)) == 0);
  147. #ifdef ARENA_MEMORY_VERIFY
  148. if (Js::Configuration::Global.flags.ArenaUseHeapAlloc)
  149. {
  150. return AllocFromHeap<true>(nbytes);
  151. }
  152. #endif
  153. Assert(cacheBlockEnd >= cacheBlockCurrent);
  154. char * p = cacheBlockCurrent;
  155. if ((size_t)(cacheBlockEnd - p) >= nbytes)
  156. {
  157. Assert(cacheBlockEnd == bigBlocks->GetBytes() + bigBlocks->nbytes);
  158. Assert(bigBlocks->GetBytes() <= cacheBlockCurrent && cacheBlockCurrent <= cacheBlockEnd);
  159. cacheBlockCurrent = p + nbytes;
  160. ArenaMemoryTracking::ReportAllocation(this, p, nbytes);
  161. return(p);
  162. }
  163. return SnailAlloc(nbytes);
  164. }
  165. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  166. void
  167. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  168. SetCacheBlock(BigBlock * newCacheBlock)
  169. {
  170. if (bigBlocks != nullptr)
  171. {
  172. Assert(cacheBlockEnd == bigBlocks->GetBytes() + bigBlocks->nbytes);
  173. Assert(bigBlocks->GetBytes() <= cacheBlockCurrent && cacheBlockCurrent <= cacheBlockEnd);
  174. bigBlocks->currentByte = (cacheBlockCurrent - bigBlocks->GetBytes());
  175. uint cacheBlockRemainBytes = (uint)(cacheBlockEnd - cacheBlockCurrent);
  176. if (cacheBlockRemainBytes < ObjectAlignment && !lockBlockList)
  177. {
  178. BigBlock * cacheBlock = bigBlocks;
  179. bigBlocks = bigBlocks->nextBigBlock;
  180. cacheBlock->next = fullBlocks;
  181. fullBlocks = cacheBlock;
  182. }
  183. else
  184. {
  185. largestHole = max(largestHole, static_cast<size_t>(cacheBlockRemainBytes));
  186. }
  187. }
  188. cacheBlockCurrent = newCacheBlock->GetBytes() + newCacheBlock->currentByte;
  189. cacheBlockEnd = newCacheBlock->GetBytes() + newCacheBlock->nbytes;
  190. newCacheBlock->nextBigBlock = bigBlocks;
  191. bigBlocks = newCacheBlock;
  192. }
  193. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  194. char *
  195. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  196. SnailAlloc(size_t nbytes)
  197. {
  198. BigBlock* blockp = NULL;
  199. size_t currentLargestHole = 0;
  200. if (nbytes <= largestHole)
  201. {
  202. Assert(bigBlocks != nullptr);
  203. Assert(cacheBlockEnd == bigBlocks->GetBytes() + bigBlocks->nbytes);
  204. Assert(bigBlocks->GetBytes() <= cacheBlockCurrent && cacheBlockCurrent <= cacheBlockEnd);
  205. BigBlock * cacheBlock = bigBlocks;
  206. BigBlock** pPrev= &(bigBlocks->nextBigBlock);
  207. blockp = bigBlocks->nextBigBlock;
  208. int giveUpAfter = 10;
  209. do
  210. {
  211. size_t remainingBytes = blockp->nbytes - blockp->currentByte;
  212. if (remainingBytes >= nbytes)
  213. {
  214. char *p = blockp->GetBytes() + blockp->currentByte;
  215. blockp->currentByte += nbytes;
  216. if (remainingBytes == largestHole || currentLargestHole > largestHole)
  217. {
  218. largestHole = currentLargestHole;
  219. }
  220. remainingBytes -= nbytes;
  221. if (remainingBytes > cacheBlock->nbytes - cacheBlock->currentByte)
  222. {
  223. *pPrev = blockp->nextBigBlock;
  224. SetCacheBlock(blockp);
  225. }
  226. else if (remainingBytes < ObjectAlignment && !lockBlockList)
  227. {
  228. *pPrev = blockp->nextBigBlock;
  229. blockp->nextBigBlock = fullBlocks;
  230. fullBlocks = blockp;
  231. }
  232. ArenaMemoryTracking::ReportAllocation(this, p, nbytes);
  233. return(p);
  234. }
  235. currentLargestHole = max(currentLargestHole, remainingBytes);
  236. if (--giveUpAfter == 0)
  237. {
  238. break;
  239. }
  240. pPrev = &(blockp->nextBigBlock);
  241. blockp = blockp->nextBigBlock;
  242. }
  243. while (blockp != nullptr);
  244. }
  245. blockp = AddBigBlock(nbytes);
  246. if (blockp == nullptr)
  247. {
  248. return AllocFromHeap<false>(nbytes); // Passing DoRecoverMemory=false as we already tried recovering memory in AddBigBlock, and it is costly.
  249. }
  250. this->blockState++;
  251. SetCacheBlock(blockp);
  252. char *p = cacheBlockCurrent;
  253. Assert(p + nbytes <= cacheBlockEnd);
  254. cacheBlockCurrent += nbytes;
  255. ArenaMemoryTracking::ReportAllocation(this, p, nbytes);
  256. return(p);
  257. }
  258. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  259. template <bool DoRecoverMemory>
  260. char *
  261. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  262. AllocFromHeap(size_t requestBytes)
  263. {
  264. size_t allocBytes = AllocSizeMath::Add(requestBytes, sizeof(ArenaMemoryBlock));
  265. ARENA_FAULTINJECT_MEMORY(this->name, requestBytes);
  266. char * buffer = HeapNewNoThrowArray(char, allocBytes);
  267. if (buffer == nullptr)
  268. {
  269. if (DoRecoverMemory && recoverMemoryFunc)
  270. {
  271. // Try to recover some memory and see if after that we can allocate.
  272. recoverMemoryFunc();
  273. buffer = HeapNewNoThrowArray(char, allocBytes);
  274. }
  275. if (buffer == nullptr)
  276. {
  277. if (outOfMemoryFunc)
  278. {
  279. outOfMemoryFunc();
  280. }
  281. return nullptr;
  282. }
  283. }
  284. ArenaMemoryBlock * memoryBlock = (ArenaMemoryBlock *)buffer;
  285. memoryBlock->nbytes = requestBytes;
  286. memoryBlock->next = this->mallocBlocks;
  287. this->mallocBlocks = memoryBlock;
  288. this->blockState = 2; // set the block state to 2 to disable the reset fast path.
  289. ArenaMemoryTracking::ReportAllocation(this, buffer + sizeof(ArenaMemoryBlock), requestBytes);
  290. return buffer + sizeof(ArenaMemoryBlock);
  291. }
  292. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  293. BigBlock *
  294. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  295. AddBigBlock(size_t requestBytes)
  296. {
  297. FAULTINJECT_MEMORY_NOTHROW(this->name, requestBytes);
  298. size_t allocBytes = AllocSizeMath::Add(requestBytes, sizeof(BigBlock));
  299. PageAllocation * allocation = this->GetPageAllocator()->AllocPagesForBytes(allocBytes);
  300. if (allocation == nullptr)
  301. {
  302. // Try to recover some memory and see if after that we can allocate.
  303. if (recoverMemoryFunc)
  304. {
  305. recoverMemoryFunc();
  306. allocation = this->GetPageAllocator()->AllocPagesForBytes(allocBytes);
  307. }
  308. if (allocation == nullptr)
  309. {
  310. return nullptr;
  311. }
  312. }
  313. BigBlock * blockp = (BigBlock *)allocation->GetAddress();
  314. blockp->allocation = allocation;
  315. blockp->nbytes = allocation->GetSize() - sizeof(BigBlock);
  316. blockp->currentByte = 0;
  317. #ifdef PROFILE_MEM
  318. LogRealAlloc(allocation->GetSize() + sizeof(PageAllocation));
  319. #endif
  320. return(blockp);
  321. }
  322. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  323. void
  324. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  325. FullReset()
  326. {
  327. BigBlock * initBlock = this->bigBlocks;
  328. if (initBlock != nullptr)
  329. {
  330. this->bigBlocks = initBlock->nextBigBlock;
  331. }
  332. Clear();
  333. if (initBlock != nullptr)
  334. {
  335. this->blockState = 1;
  336. initBlock->currentByte = 0;
  337. SetCacheBlock(initBlock);
  338. }
  339. }
  340. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  341. void
  342. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  343. ReleaseMemory()
  344. {
  345. ReleasePageMemory();
  346. ReleaseHeapMemory();
  347. }
  348. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  349. void
  350. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  351. ReleasePageMemory()
  352. {
  353. pageAllocator->SuspendIdleDecommit();
  354. #ifdef ARENA_MEMORY_VERIFY
  355. bool reenableDisablePageReuse = false;
  356. if (Js::Configuration::Global.flags.ArenaNoPageReuse)
  357. {
  358. reenableDisablePageReuse = !pageAllocator->DisablePageReuse();
  359. }
  360. #endif
  361. BigBlock *blockp = bigBlocks;
  362. while (blockp != NULL)
  363. {
  364. PageAllocation * allocation = blockp->allocation;
  365. blockp = blockp->nextBigBlock;
  366. GetPageAllocator()->ReleaseAllocationNoSuspend(allocation);
  367. }
  368. blockp = fullBlocks;
  369. while (blockp != NULL)
  370. {
  371. PageAllocation * allocation = blockp->allocation;
  372. blockp = blockp->nextBigBlock;
  373. GetPageAllocator()->ReleaseAllocationNoSuspend(allocation);
  374. }
  375. #ifdef ARENA_MEMORY_VERIFY
  376. if (reenableDisablePageReuse)
  377. {
  378. pageAllocator->ReenablePageReuse();
  379. }
  380. #endif
  381. pageAllocator->ResumeIdleDecommit();
  382. }
  383. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  384. void
  385. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  386. ReleaseHeapMemory()
  387. {
  388. ArenaMemoryBlock * memoryBlock = this->mallocBlocks;
  389. while (memoryBlock != nullptr)
  390. {
  391. ArenaMemoryBlock * next = memoryBlock->next;
  392. HeapDeleteArray(memoryBlock->nbytes + sizeof(ArenaMemoryBlock), (char *)memoryBlock);
  393. memoryBlock = next;
  394. }
  395. }
  396. template __forceinline char *ArenaAllocatorBase<InPlaceFreeListPolicy, 0, 0, 0>::AllocInternal(size_t requestedBytes);
  397. template __forceinline char *ArenaAllocatorBase<InPlaceFreeListPolicy, 3, 0, 0>::AllocInternal(size_t requestedBytes);
  398. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  399. char *
  400. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  401. AllocInternal(size_t requestedBytes)
  402. {
  403. Assert(requestedBytes != 0);
  404. if (MaxObjectSize > 0)
  405. {
  406. Assert(requestedBytes <= MaxObjectSize);
  407. }
  408. if (RequireObjectAlignment)
  409. {
  410. Assert(requestedBytes % ObjectAlignment == 0);
  411. }
  412. // If out of memory function is set, that means that the caller is a throwing allocation
  413. // routine, so we can throw from here. Otherwise, we shouldn't throw.
  414. ARENA_FAULTINJECT_MEMORY(this->name, requestedBytes);
  415. ASSERT_THREAD();
  416. size_t nbytes;
  417. if (freeList != nullptr && requestedBytes > 0 && requestedBytes <= ArenaAllocatorBase::MaxSmallObjectSize)
  418. {
  419. // We have checked the size requested, so no integer overflow check
  420. nbytes = Math::Align(requestedBytes, ArenaAllocator::ObjectAlignment);
  421. Assert(nbytes <= ArenaAllocator::MaxSmallObjectSize);
  422. #ifdef PROFILE_MEM
  423. LogAlloc(requestedBytes, nbytes);
  424. #endif
  425. void * freeObject = TFreeListPolicy::Allocate(this->freeList, nbytes);
  426. if (freeObject != nullptr)
  427. {
  428. #ifdef ARENA_MEMORY_VERIFY
  429. TFreeListPolicy::VerifyFreeObjectIsFreeMemFilled(freeObject, nbytes);
  430. #endif
  431. #ifdef ARENA_ALLOCATOR_FREE_LIST_SIZE
  432. this->freeListSize -= nbytes;
  433. #endif
  434. #ifdef PROFILE_MEM
  435. LogReuse(nbytes);
  436. #endif
  437. ArenaMemoryTracking::ReportAllocation(this, freeObject, nbytes);
  438. return (char *)freeObject;
  439. }
  440. }
  441. else
  442. {
  443. nbytes = AllocSizeMath::Align(requestedBytes, ArenaAllocator::ObjectAlignment);
  444. #ifdef PROFILE_MEM
  445. LogAlloc(requestedBytes, nbytes);
  446. #endif
  447. }
  448. // TODO: Support large object free listing
  449. return ArenaAllocatorBase::RealAllocInlined(nbytes);
  450. }
  451. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  452. void
  453. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  454. Free(void * buffer, size_t byteSize)
  455. {
  456. ASSERT_THREAD();
  457. Assert(byteSize != 0);
  458. if (MaxObjectSize > 0)
  459. {
  460. Assert(byteSize <= MaxObjectSize);
  461. }
  462. if (RequireObjectAlignment)
  463. {
  464. Assert(byteSize % ObjectAlignment == 0);
  465. }
  466. // Since we successfully allocated, we shouldn't have integer overflow here
  467. size_t size = Math::Align(byteSize, ArenaAllocator::ObjectAlignment);
  468. Assert(size >= byteSize);
  469. ArenaMemoryTracking::ReportFree(this, buffer, byteSize);
  470. #ifdef ARENA_MEMORY_VERIFY
  471. if (Js::Configuration::Global.flags.ArenaNoFreeList)
  472. {
  473. return;
  474. }
  475. #endif
  476. if (buffer == cacheBlockCurrent - byteSize)
  477. {
  478. #ifdef PROFILE_MEM
  479. LogFree(byteSize);
  480. #endif
  481. cacheBlockCurrent = (char *)buffer;
  482. return;
  483. }
  484. else if (this->pageAllocator->IsClosed())
  485. {
  486. return;
  487. }
  488. else if (size <= ArenaAllocator::MaxSmallObjectSize)
  489. {
  490. // If we plan to free-list this object, we must prepare (typically, debug pattern fill) its memory here, in case we fail to allocate the free list because we're out of memory (see below),
  491. // and we never get to call TFreeListPolicy::Free.
  492. TFreeListPolicy::PrepareFreeObject(buffer, size);
  493. if (freeList == nullptr)
  494. {
  495. // Caution: TFreeListPolicy::New may fail silently if we're out of memory.
  496. freeList = TFreeListPolicy::New(this);
  497. if (freeList == nullptr)
  498. {
  499. return;
  500. }
  501. }
  502. this->freeList = TFreeListPolicy::Free(this->freeList, buffer, size);
  503. #ifdef ARENA_ALLOCATOR_FREE_LIST_SIZE
  504. this->freeListSize += size;
  505. #endif
  506. #ifdef PROFILE_MEM
  507. LogFree(byteSize);
  508. #endif
  509. return;
  510. }
  511. // TODO: Free list bigger objects
  512. }
  513. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  514. char *
  515. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  516. Realloc(void* buffer, size_t existingBytes, size_t requestedBytes)
  517. {
  518. ASSERT_THREAD();
  519. if (existingBytes == 0)
  520. {
  521. Assert(buffer == nullptr);
  522. return AllocInternal(requestedBytes);
  523. }
  524. if (MaxObjectSize > 0)
  525. {
  526. Assert(requestedBytes <= MaxObjectSize);
  527. }
  528. if (RequireObjectAlignment)
  529. {
  530. Assert(requestedBytes % ObjectAlignment == 0);
  531. }
  532. size_t nbytes = AllocSizeMath::Align(requestedBytes, ArenaAllocator::ObjectAlignment);
  533. // Since we successfully allocated, we shouldn't have integer overflow here
  534. size_t nbytesExisting = Math::Align(existingBytes, ArenaAllocator::ObjectAlignment);
  535. Assert(nbytesExisting >= existingBytes);
  536. if (nbytes == nbytesExisting)
  537. {
  538. return (char *)buffer;
  539. }
  540. if (nbytes < nbytesExisting)
  541. {
  542. ArenaMemoryTracking::ReportReallocation(this, buffer, nbytesExisting, nbytes);
  543. Free(((char *)buffer) + nbytes, nbytesExisting - nbytes);
  544. return (char *)buffer;
  545. }
  546. char* replacementBuf = nullptr;
  547. if (requestedBytes > 0)
  548. {
  549. replacementBuf = AllocInternal(requestedBytes);
  550. if (replacementBuf != nullptr)
  551. {
  552. js_memcpy_s(replacementBuf, requestedBytes, buffer, existingBytes);
  553. }
  554. }
  555. if (nbytesExisting > 0)
  556. {
  557. Free(buffer, nbytesExisting);
  558. }
  559. return replacementBuf;
  560. }
  561. #ifdef PROFILE_MEM
  562. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  563. void
  564. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  565. LogBegin()
  566. {
  567. memoryData = MemoryProfiler::Begin(this->name);
  568. }
  569. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  570. void
  571. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  572. LogReset()
  573. {
  574. if (memoryData)
  575. {
  576. MemoryProfiler::Reset(this->name, memoryData);
  577. }
  578. }
  579. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  580. void
  581. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  582. LogEnd()
  583. {
  584. if (memoryData)
  585. {
  586. MemoryProfiler::End(this->name, memoryData);
  587. }
  588. }
  589. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  590. void
  591. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  592. LogAlloc(size_t requestedBytes, size_t allocateBytes)
  593. {
  594. if (memoryData)
  595. {
  596. memoryData->requestCount++;
  597. memoryData->requestBytes += requestedBytes;
  598. memoryData->alignmentBytes += allocateBytes - requestedBytes;
  599. }
  600. }
  601. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  602. void
  603. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  604. LogRealAlloc(size_t size)
  605. {
  606. if (memoryData)
  607. {
  608. memoryData->allocatedBytes += size;
  609. }
  610. }
  611. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  612. void
  613. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  614. LogFree(size_t size)
  615. {
  616. if (memoryData)
  617. {
  618. memoryData->freelistBytes += size;
  619. memoryData->freelistCount++;
  620. }
  621. }
  622. template <class TFreeListPolicy, size_t ObjectAlignmentBitShiftArg, bool RequireObjectAlignment, size_t MaxObjectSize>
  623. void
  624. ArenaAllocatorBase<TFreeListPolicy, ObjectAlignmentBitShiftArg, RequireObjectAlignment, MaxObjectSize>::
  625. LogReuse(size_t size)
  626. {
  627. if (memoryData)
  628. {
  629. memoryData->reuseCount++;
  630. memoryData->reuseBytes += size;
  631. }
  632. }
  633. #endif
  634. void * InPlaceFreeListPolicy::New(ArenaAllocatorBase<InPlaceFreeListPolicy> * allocator)
  635. {
  636. return AllocatorNewNoThrowNoRecoveryArrayZ(ArenaAllocator, allocator, FreeObject *, buckets);
  637. }
  638. void * InPlaceFreeListPolicy::Allocate(void * policy, size_t size)
  639. {
  640. Assert(policy);
  641. FreeObject ** freeObjectLists = reinterpret_cast<FreeObject **>(policy);
  642. size_t index = (size >> ArenaAllocator::ObjectAlignmentBitShift) - 1;
  643. FreeObject * freeObject = freeObjectLists[index];
  644. if (NULL != freeObject)
  645. {
  646. freeObjectLists[index] = freeObject->next;
  647. #ifdef ARENA_MEMORY_VERIFY
  648. #ifndef _MSC_VER
  649. #pragma clang diagnostic push
  650. #pragma clang diagnostic ignored "-Wsizeof-pointer-memaccess"
  651. #endif
  652. // Make sure the next pointer bytes are also DbgFreeMemFill-ed.
  653. memset(freeObject, DbgFreeMemFill, sizeof(freeObject->next));
  654. #ifndef _MSC_VER
  655. #pragma clang diagnostic pop
  656. #endif
  657. #endif
  658. }
  659. return freeObject;
  660. }
  661. void * InPlaceFreeListPolicy::Free(void * policy, void * object, size_t size)
  662. {
  663. Assert(policy);
  664. FreeObject ** freeObjectLists = reinterpret_cast<FreeObject **>(policy);
  665. FreeObject * freeObject = reinterpret_cast<FreeObject *>(object);
  666. size_t index = (size >> ArenaAllocator::ObjectAlignmentBitShift) - 1;
  667. freeObject->next = freeObjectLists[index];
  668. freeObjectLists[index] = freeObject;
  669. return policy;
  670. }
  671. void * InPlaceFreeListPolicy::Reset(void * policy)
  672. {
  673. return NULL;
  674. }
  675. #ifdef ARENA_MEMORY_VERIFY
  676. void InPlaceFreeListPolicy::VerifyFreeObjectIsFreeMemFilled(void * object, size_t size)
  677. {
  678. unsigned char * bytes = reinterpret_cast<unsigned char*>(object);
  679. for (size_t i = 0; i < size; i++)
  680. {
  681. Assert(bytes[i] == InPlaceFreeListPolicy::DbgFreeMemFill);
  682. }
  683. }
  684. #endif
  685. namespace Memory
  686. {
  687. template class ArenaAllocatorBase<InPlaceFreeListPolicy>;
  688. }
  689. void * StandAloneFreeListPolicy::New(ArenaAllocatorBase<StandAloneFreeListPolicy> * /*allocator*/)
  690. {
  691. return NewInternal(InitialEntries);
  692. }
  693. void * StandAloneFreeListPolicy::Allocate(void * policy, size_t size)
  694. {
  695. Assert(policy);
  696. StandAloneFreeListPolicy * _this = reinterpret_cast<StandAloneFreeListPolicy *>(policy);
  697. size_t index = (size >> ArenaAllocator::ObjectAlignmentBitShift) - 1;
  698. void * object = NULL;
  699. uint * freeObjectList = &_this->freeObjectLists[index];
  700. if (0 != *freeObjectList)
  701. {
  702. FreeObjectListEntry * entry = &_this->entries[*freeObjectList - 1];
  703. uint oldFreeList = _this->freeList;
  704. _this->freeList = *freeObjectList;
  705. *freeObjectList = entry->next;
  706. object = entry->object;
  707. Assert(object != NULL);
  708. entry->next = oldFreeList;
  709. entry->object = NULL;
  710. }
  711. return object;
  712. }
  713. void * StandAloneFreeListPolicy::Free(void * policy, void * object, size_t size)
  714. {
  715. Assert(policy);
  716. StandAloneFreeListPolicy * _this = reinterpret_cast<StandAloneFreeListPolicy *>(policy);
  717. size_t index = (size >> ArenaAllocator::ObjectAlignmentBitShift) - 1;
  718. if (TryEnsureFreeListEntry(_this))
  719. {
  720. Assert(_this->freeList != 0);
  721. uint * freeObjectList = &_this->freeObjectLists[index];
  722. FreeObjectListEntry * entry = &_this->entries[_this->freeList - 1];
  723. uint oldFreeObjectList = *freeObjectList;
  724. *freeObjectList = _this->freeList;
  725. _this->freeList = entry->next;
  726. entry->object = object;
  727. entry->next = oldFreeObjectList;
  728. }
  729. return _this;
  730. }
  731. void * StandAloneFreeListPolicy::Reset(void * policy)
  732. {
  733. Assert(policy);
  734. StandAloneFreeListPolicy * _this = reinterpret_cast<StandAloneFreeListPolicy *>(policy);
  735. HeapDeletePlus(GetPlusSize(_this), _this);
  736. return NULL;
  737. }
  738. #ifdef ARENA_MEMORY_VERIFY
  739. void StandAloneFreeListPolicy::VerifyFreeObjectIsFreeMemFilled(void * object, size_t size)
  740. {
  741. char * bytes = reinterpret_cast<char*>(object);
  742. for (size_t i = 0; i < size; i++)
  743. {
  744. Assert(bytes[i] == StandAloneFreeListPolicy::DbgFreeMemFill);
  745. }
  746. }
  747. #endif
  748. void StandAloneFreeListPolicy::Release(void * policy)
  749. {
  750. if (NULL != policy)
  751. {
  752. Reset(policy);
  753. }
  754. }
  755. StandAloneFreeListPolicy * StandAloneFreeListPolicy::NewInternal(uint entries)
  756. {
  757. size_t plusSize = buckets * sizeof(uint) + entries * sizeof(FreeObjectListEntry);
  758. StandAloneFreeListPolicy * _this = HeapNewNoThrowPlusZ(plusSize, StandAloneFreeListPolicy);
  759. if (NULL != _this)
  760. {
  761. _this->allocated = entries;
  762. _this->freeObjectLists = (uint *)(_this + 1);
  763. _this->entries = (FreeObjectListEntry *)(_this->freeObjectLists + buckets);
  764. }
  765. return _this;
  766. }
  767. bool StandAloneFreeListPolicy::TryEnsureFreeListEntry(StandAloneFreeListPolicy *& _this)
  768. {
  769. if (0 == _this->freeList)
  770. {
  771. if (_this->used < _this->allocated)
  772. {
  773. _this->used++;
  774. _this->freeList = _this->used;
  775. }
  776. else
  777. {
  778. Assert(_this->used == _this->allocated);
  779. StandAloneFreeListPolicy * oldThis = _this;
  780. uint entries = oldThis->allocated + min(oldThis->allocated, MaxEntriesGrowth);
  781. StandAloneFreeListPolicy * newThis = NewInternal(entries);
  782. if (NULL != newThis)
  783. {
  784. uint sizeInBytes = buckets * sizeof(uint);
  785. js_memcpy_s(newThis->freeObjectLists, sizeInBytes, oldThis->freeObjectLists, sizeInBytes);
  786. js_memcpy_s(newThis->entries, newThis->allocated * sizeof(FreeObjectListEntry), oldThis->entries, oldThis->used * sizeof(FreeObjectListEntry));
  787. newThis->used = oldThis->used + 1;
  788. newThis->freeList = newThis->used;
  789. _this = newThis;
  790. HeapDeletePlus(GetPlusSize(oldThis), oldThis);
  791. }
  792. else
  793. {
  794. return false;
  795. }
  796. }
  797. }
  798. return true;
  799. }
  800. namespace Memory
  801. {
  802. template class ArenaAllocatorBase<StandAloneFreeListPolicy>;
  803. }
  804. #ifdef PERSISTENT_INLINE_CACHES
  805. void * InlineCacheFreeListPolicy::New(ArenaAllocatorBase<InlineCacheAllocatorTraits> * allocator)
  806. {
  807. return NewInternal();
  808. }
  809. InlineCacheFreeListPolicy * InlineCacheFreeListPolicy::NewInternal()
  810. {
  811. InlineCacheFreeListPolicy * _this = HeapNewNoThrowZ(InlineCacheFreeListPolicy);
  812. return _this;
  813. }
  814. InlineCacheFreeListPolicy::InlineCacheFreeListPolicy()
  815. {
  816. Assert(AreFreeListBucketsEmpty());
  817. }
  818. bool InlineCacheFreeListPolicy::AreFreeListBucketsEmpty()
  819. {
  820. for (int b = 0; b < bucketCount; b++)
  821. {
  822. if (this->freeListBuckets[b] != 0) return false;
  823. }
  824. return true;
  825. }
  826. void * InlineCacheFreeListPolicy::Allocate(void * policy, size_t size)
  827. {
  828. Assert(policy);
  829. FreeObject ** freeObjectLists = reinterpret_cast<FreeObject **>(policy);
  830. size_t index = (size >> InlineCacheAllocatorInfo::ObjectAlignmentBitShift) - 1;
  831. FreeObject * freeObject = freeObjectLists[index];
  832. if (NULL != freeObject)
  833. {
  834. freeObjectLists[index] = reinterpret_cast<FreeObject *>(reinterpret_cast<intptr_t>(freeObject->next) & ~InlineCacheFreeListTag);
  835. #ifdef ARENA_MEMORY_VERIFY
  836. // Make sure the next pointer bytes are also DbgFreeMemFill-ed, before we give them out.
  837. memset(&freeObject->next, DbgFreeMemFill, sizeof(freeObject->next));
  838. #endif
  839. }
  840. return freeObject;
  841. }
  842. void * InlineCacheFreeListPolicy::Free(void * policy, void * object, size_t size)
  843. {
  844. Assert(policy);
  845. FreeObject ** freeObjectLists = reinterpret_cast<FreeObject **>(policy);
  846. FreeObject * freeObject = reinterpret_cast<FreeObject *>(object);
  847. size_t index = (size >> InlineCacheAllocatorInfo::ObjectAlignmentBitShift) - 1;
  848. freeObject->next = reinterpret_cast<FreeObject *>(reinterpret_cast<intptr_t>(freeObjectLists[index]) | InlineCacheFreeListTag);
  849. freeObjectLists[index] = freeObject;
  850. return policy;
  851. }
  852. void * InlineCacheFreeListPolicy::Reset(void * policy)
  853. {
  854. Assert(policy);
  855. InlineCacheFreeListPolicy * _this = reinterpret_cast<InlineCacheFreeListPolicy *>(policy);
  856. HeapDelete(_this);
  857. return NULL;
  858. }
  859. #ifdef ARENA_MEMORY_VERIFY
  860. void InlineCacheFreeListPolicy::VerifyFreeObjectIsFreeMemFilled(void * object, size_t size)
  861. {
  862. unsigned char * bytes = reinterpret_cast<unsigned char*>(object);
  863. for (size_t i = 0; i < size; i++)
  864. {
  865. // We must allow for zero-filled free listed objects (at least their weakRefs/blankSlots bytes), because during garbage collection, we may zero out
  866. // some of the weakRefs (those that have become unreachable), and this is NOT a sign of "use after free" problem. It would be nice if during collection
  867. // we could reliably distinguish free-listed objects from live caches, but that's not possible because caches can be allocated and freed in batches
  868. // (see more on that in comments inside InlineCacheFreeListPolicy::PrepareFreeObject).
  869. Assert(bytes[i] == NULL || bytes[i] == InlineCacheFreeListPolicy::DbgFreeMemFill);
  870. }
  871. }
  872. #endif
  873. void InlineCacheFreeListPolicy::Release(void * policy)
  874. {
  875. if (NULL != policy)
  876. {
  877. Reset(policy);
  878. }
  879. }
  880. namespace Memory
  881. {
  882. template class ArenaAllocatorBase<InlineCacheAllocatorTraits>;
  883. }
  884. #if DBG
  885. bool InlineCacheAllocator::IsAllZero()
  886. {
  887. UpdateCacheBlock();
  888. // See InlineCacheAllocator::ZeroAll for why we ignore the strongRef slot of the CacheLayout.
  889. BigBlock *bigBlock = this->bigBlocks;
  890. while (bigBlock != NULL)
  891. {
  892. Assert(bigBlock->currentByte % sizeof(CacheLayout) == 0);
  893. CacheLayout* endPtr = (CacheLayout*)(bigBlock->GetBytes() + bigBlock->currentByte);
  894. for (CacheLayout* cache = (CacheLayout*)bigBlock->GetBytes(); cache < endPtr; cache++)
  895. {
  896. unsigned char* weakRefBytes = (unsigned char *)cache->weakRefs;
  897. for (size_t i = 0; i < sizeof(cache->weakRefs); i++)
  898. {
  899. // If we're verifying arena memory (in debug builds) caches on the free list
  900. // will be debug pattern filled (specifically, at least their weak reference slots).
  901. // All other caches must be zeroed out (again, at least their weak reference slots).
  902. #ifdef ARENA_MEMORY_VERIFY
  903. if (weakRefBytes[i] != NULL && weakRefBytes[i] != InlineCacheFreeListPolicy::DbgFreeMemFill)
  904. {
  905. AssertMsg(false, "Inline cache arena is not zeroed!");
  906. return false;
  907. }
  908. #else
  909. if (weakRefBytes[i] != NULL)
  910. {
  911. AssertMsg(false, "Inline cache arena is not zeroed!");
  912. return false;
  913. }
  914. #endif
  915. }
  916. }
  917. bigBlock = bigBlock->nextBigBlock;
  918. }
  919. bigBlock = this->fullBlocks;
  920. while (bigBlock != NULL)
  921. {
  922. Assert(bigBlock->currentByte % sizeof(CacheLayout) == 0);
  923. CacheLayout* endPtr = (CacheLayout*)(bigBlock->GetBytes() + bigBlock->currentByte);
  924. for (CacheLayout* cache = (CacheLayout*)bigBlock->GetBytes(); cache < endPtr; cache++)
  925. {
  926. char* weakRefBytes = (char *)cache->weakRefs;
  927. for (size_t i = 0; i < sizeof(cache->weakRefs); i++)
  928. {
  929. // If we're verifying arena memory (in debug builds) caches on the free list
  930. // will be debug pattern filled (specifically, their weak reference slots).
  931. // All other caches must be zeroed out (again, their weak reference slots).
  932. #ifdef ARENA_MEMORY_VERIFY
  933. if (weakRefBytes[i] != NULL && weakRefBytes[i] != InlineCacheFreeListPolicy::DbgFreeMemFill)
  934. {
  935. AssertMsg(false, "Inline cache arena is not zeroed!");
  936. return false;
  937. }
  938. #else
  939. if (weakRefBytes[i] != NULL)
  940. {
  941. AssertMsg(false, "Inline cache arena is not zeroed!");
  942. return false;
  943. }
  944. #endif
  945. }
  946. }
  947. bigBlock = bigBlock->nextBigBlock;
  948. }
  949. ArenaMemoryBlock * memoryBlock = this->mallocBlocks;
  950. while (memoryBlock != nullptr)
  951. {
  952. Assert(memoryBlock->nbytes % sizeof(CacheLayout) == 0);
  953. ArenaMemoryBlock * next = memoryBlock->next;
  954. CacheLayout* endPtr = (CacheLayout*)(memoryBlock->GetBytes() + memoryBlock->nbytes);
  955. for (CacheLayout* cache = (CacheLayout*)memoryBlock->GetBytes(); cache < endPtr; cache++)
  956. {
  957. unsigned char* weakRefBytes = (unsigned char *)cache->weakRefs;
  958. for (size_t i = 0; i < sizeof(cache->weakRefs); i++)
  959. {
  960. #ifdef ARENA_MEMORY_VERIFY
  961. if (weakRefBytes[i] != NULL && weakRefBytes[i] != InlineCacheFreeListPolicy::DbgFreeMemFill)
  962. {
  963. AssertMsg(false, "Inline cache arena is not zeroed!");
  964. return false;
  965. }
  966. #else
  967. if (weakRefBytes[i] != NULL)
  968. {
  969. AssertMsg(false, "Inline cache arena is not zeroed!");
  970. return false;
  971. }
  972. #endif
  973. }
  974. }
  975. memoryBlock = next;
  976. }
  977. return true;
  978. }
  979. #endif
  980. void InlineCacheAllocator::ZeroAll()
  981. {
  982. UpdateCacheBlock();
  983. // We zero the weakRefs part of each cache in the arena unconditionally. The strongRef slot is zeroed only
  984. // if it isn't tagged with InlineCacheFreeListTag. That's so we don't lose our free list, which is
  985. // formed by caches linked via their strongRef slot tagged with InlineCacheFreeListTag. On the other hand,
  986. // inline caches that require invalidation use the same slot as a pointer (untagged) to the cache's address
  987. // in the invalidation list. Hence, we must zero the strongRef slot when untagged to ensure the cache
  988. // doesn't appear registered for invalidation when it's actually blank (which would trigger asserts in InlineCache::VerifyRegistrationForInvalidation).
  989. BigBlock *bigBlock = this->bigBlocks;
  990. while (bigBlock != NULL)
  991. {
  992. Assert(bigBlock->currentByte % sizeof(CacheLayout) == 0);
  993. CacheLayout* endPtr = (CacheLayout*)(bigBlock->GetBytes() + bigBlock->currentByte);
  994. for (CacheLayout* cache = (CacheLayout*)bigBlock->GetBytes(); cache < endPtr; cache++)
  995. {
  996. memset(cache->weakRefs, 0, sizeof(cache->weakRefs));
  997. // We want to preserve the free list, whose next pointers are tagged with InlineCacheFreeListTag.
  998. if ((cache->strongRef & InlineCacheFreeListTag) == 0) cache->strongRef = 0;
  999. if (cache->weakRefs[0] != NULL || cache->weakRefs[1] != NULL || cache->weakRefs[2] != NULL)
  1000. {
  1001. AssertMsg(false, "Inline cache arena is not zeroed!");
  1002. }
  1003. }
  1004. bigBlock = bigBlock->nextBigBlock;
  1005. }
  1006. bigBlock = this->fullBlocks;
  1007. while (bigBlock != NULL)
  1008. {
  1009. Assert(bigBlock->currentByte % sizeof(CacheLayout) == 0);
  1010. CacheLayout* endPtr = (CacheLayout*)(bigBlock->GetBytes() + bigBlock->currentByte);
  1011. for (CacheLayout* cache = (CacheLayout*)bigBlock->GetBytes(); cache < endPtr; cache++)
  1012. {
  1013. memset(cache->weakRefs, 0, sizeof(cache->weakRefs));
  1014. // We want to preserve the free list, whose next pointers are tagged with InlineCacheFreeListTag.
  1015. if ((cache->strongRef & InlineCacheFreeListTag) == 0) cache->strongRef = 0;
  1016. if (cache->weakRefs[0] != NULL || cache->weakRefs[1] != NULL || cache->weakRefs[2] != NULL)
  1017. {
  1018. AssertMsg(false, "Inline cache arena is not zeroed!");
  1019. }
  1020. }
  1021. bigBlock = bigBlock->nextBigBlock;
  1022. }
  1023. ArenaMemoryBlock * memoryBlock = this->mallocBlocks;
  1024. while (memoryBlock != nullptr)
  1025. {
  1026. Assert(memoryBlock->nbytes % sizeof(CacheLayout) == 0);
  1027. ArenaMemoryBlock * next = memoryBlock->next;
  1028. CacheLayout* endPtr = (CacheLayout*)(memoryBlock->GetBytes() + memoryBlock->nbytes);
  1029. for (CacheLayout* cache = (CacheLayout*)memoryBlock->GetBytes(); cache < endPtr; cache++)
  1030. {
  1031. memset(cache->weakRefs, 0, sizeof(cache->weakRefs));
  1032. // We want to preserve the free list, whose next pointers are tagged with InlineCacheFreeListTag.
  1033. if ((cache->strongRef & InlineCacheFreeListTag) == 0) cache->strongRef = 0;
  1034. if (cache->weakRefs[0] != NULL || cache->weakRefs[1] != NULL || cache->weakRefs[2] != NULL)
  1035. {
  1036. AssertMsg(false, "Inline cache arena is not zeroed!");
  1037. }
  1038. }
  1039. memoryBlock = next;
  1040. }
  1041. }
  1042. bool InlineCacheAllocator::IsDeadWeakRef(Recycler* recycler, void* ptr)
  1043. {
  1044. return recycler->IsObjectMarked(ptr);
  1045. }
  1046. bool InlineCacheAllocator::CacheHasDeadWeakRefs(Recycler* recycler, CacheLayout* cache)
  1047. {
  1048. for (intptr_t* curWeakRefPtr = cache->weakRefs; curWeakRefPtr < &cache->strongRef; curWeakRefPtr++)
  1049. {
  1050. intptr_t curWeakRef = *curWeakRefPtr;
  1051. if (curWeakRef == 0)
  1052. {
  1053. continue;
  1054. }
  1055. curWeakRef &= ~(intptr_t)InlineCacheAuxSlotTypeTag;
  1056. if ((curWeakRef & (HeapConstants::ObjectGranularity - 1)) != 0)
  1057. {
  1058. continue;
  1059. }
  1060. if (!recycler->IsObjectMarked((void*)curWeakRef))
  1061. {
  1062. return true;
  1063. }
  1064. }
  1065. return false;
  1066. }
  1067. bool InlineCacheAllocator::HasNoDeadWeakRefs(Recycler* recycler)
  1068. {
  1069. UpdateCacheBlock();
  1070. BigBlock *bigBlock = this->bigBlocks;
  1071. while (bigBlock != NULL)
  1072. {
  1073. Assert(bigBlock->currentByte % sizeof(CacheLayout) == 0);
  1074. CacheLayout* endPtr = (CacheLayout*)(bigBlock->GetBytes() + bigBlock->currentByte);
  1075. for (CacheLayout* cache = (CacheLayout*)bigBlock->GetBytes(); cache < endPtr; cache++)
  1076. {
  1077. if (CacheHasDeadWeakRefs(recycler, cache))
  1078. {
  1079. return false;
  1080. }
  1081. }
  1082. bigBlock = bigBlock->nextBigBlock;
  1083. }
  1084. bigBlock = this->fullBlocks;
  1085. while (bigBlock != NULL)
  1086. {
  1087. Assert(bigBlock->currentByte % sizeof(CacheLayout) == 0);
  1088. CacheLayout* endPtr = (CacheLayout*)(bigBlock->GetBytes() + bigBlock->currentByte);
  1089. for (CacheLayout* cache = (CacheLayout*)bigBlock->GetBytes(); cache < endPtr; cache++)
  1090. {
  1091. if (CacheHasDeadWeakRefs(recycler, cache))
  1092. {
  1093. return false;
  1094. }
  1095. }
  1096. bigBlock = bigBlock->nextBigBlock;
  1097. }
  1098. ArenaMemoryBlock * memoryBlock = this->mallocBlocks;
  1099. while (memoryBlock != nullptr)
  1100. {
  1101. Assert(memoryBlock->nbytes % sizeof(CacheLayout) == 0);
  1102. ArenaMemoryBlock * next = memoryBlock->next;
  1103. CacheLayout* endPtr = (CacheLayout*)(memoryBlock->GetBytes() + memoryBlock->nbytes);
  1104. for (CacheLayout* cache = (CacheLayout*)memoryBlock->GetBytes(); cache < endPtr; cache++)
  1105. {
  1106. if (CacheHasDeadWeakRefs(recycler, cache))
  1107. {
  1108. return false;
  1109. }
  1110. }
  1111. memoryBlock = next;
  1112. }
  1113. return true;
  1114. }
  1115. void InlineCacheAllocator::ClearCacheIfHasDeadWeakRefs(Recycler* recycler, CacheLayout* cache)
  1116. {
  1117. for (intptr_t* curWeakRefPtr = cache->weakRefs; curWeakRefPtr < &cache->strongRef; curWeakRefPtr++)
  1118. {
  1119. intptr_t curWeakRef = *curWeakRefPtr;
  1120. if (curWeakRef == 0)
  1121. {
  1122. continue;
  1123. }
  1124. curWeakRef &= ~(intptr_t)InlineCacheAuxSlotTypeTag;
  1125. if ((curWeakRef & (HeapConstants::ObjectGranularity - 1)) != 0)
  1126. {
  1127. continue;
  1128. }
  1129. if (!recycler->IsObjectMarked((void*)curWeakRef))
  1130. {
  1131. cache->weakRefs[0] = 0;
  1132. cache->weakRefs[1] = 0;
  1133. cache->weakRefs[2] = 0;
  1134. break;
  1135. }
  1136. }
  1137. }
  1138. void InlineCacheAllocator::ClearCachesWithDeadWeakRefs(Recycler* recycler)
  1139. {
  1140. UpdateCacheBlock();
  1141. BigBlock *bigBlock = this->bigBlocks;
  1142. while (bigBlock != NULL)
  1143. {
  1144. Assert(bigBlock->currentByte % sizeof(CacheLayout) == 0);
  1145. CacheLayout* endPtr = (CacheLayout*)(bigBlock->GetBytes() + bigBlock->currentByte);
  1146. for (CacheLayout* cache = (CacheLayout*)bigBlock->GetBytes(); cache < endPtr; cache++)
  1147. {
  1148. ClearCacheIfHasDeadWeakRefs(recycler, cache);
  1149. }
  1150. bigBlock = bigBlock->nextBigBlock;
  1151. }
  1152. bigBlock = this->fullBlocks;
  1153. while (bigBlock != NULL)
  1154. {
  1155. Assert(bigBlock->currentByte % sizeof(CacheLayout) == 0);
  1156. CacheLayout* endPtr = (CacheLayout*)(bigBlock->GetBytes() + bigBlock->currentByte);
  1157. for (CacheLayout* cache = (CacheLayout*)bigBlock->GetBytes(); cache < endPtr; cache++)
  1158. {
  1159. ClearCacheIfHasDeadWeakRefs(recycler, cache);
  1160. }
  1161. bigBlock = bigBlock->nextBigBlock;
  1162. }
  1163. ArenaMemoryBlock * memoryBlock = this->mallocBlocks;
  1164. while (memoryBlock != nullptr)
  1165. {
  1166. Assert(memoryBlock->nbytes % sizeof(CacheLayout) == 0);
  1167. ArenaMemoryBlock * next = memoryBlock->next;
  1168. CacheLayout* endPtr = (CacheLayout*)(memoryBlock->GetBytes() + memoryBlock->nbytes);
  1169. for (CacheLayout* cache = (CacheLayout*)memoryBlock->GetBytes(); cache < endPtr; cache++)
  1170. {
  1171. ClearCacheIfHasDeadWeakRefs(recycler, cache);
  1172. }
  1173. memoryBlock = next;
  1174. }
  1175. }
  1176. #else
  1177. template class ArenaAllocatorBase<InlineCacheAllocatorTraits>;
  1178. #if DBG
  1179. bool InlineCacheAllocator::IsAllZero()
  1180. {
  1181. UpdateCacheBlock();
  1182. BigBlock *blockp = this->bigBlocks;
  1183. while (blockp != NULL)
  1184. {
  1185. for (size_t i = 0; i < blockp->currentByte; i++)
  1186. {
  1187. if (blockp->GetBytes()[i] != 0)
  1188. {
  1189. return false;
  1190. }
  1191. }
  1192. blockp = blockp->nextBigBlock;
  1193. }
  1194. blockp = this->fullBlocks;
  1195. while (blockp != NULL)
  1196. {
  1197. for (size_t i = 0; i < blockp->currentByte; i++)
  1198. {
  1199. if (blockp->GetBytes()[i] != 0)
  1200. {
  1201. return false;
  1202. }
  1203. }
  1204. blockp = blockp->nextBigBlock;
  1205. }
  1206. ArenaMemoryBlock * memoryBlock = this->mallocBlocks;
  1207. while (memoryBlock != nullptr)
  1208. {
  1209. ArenaMemoryBlock * next = memoryBlock->next;
  1210. for (size_t i = 0; i < memoryBlock->nbytes; i++)
  1211. {
  1212. if (memoryBlock->GetBytes()[i] != 0)
  1213. {
  1214. return false;
  1215. }
  1216. }
  1217. memoryBlock = next;
  1218. }
  1219. return true;
  1220. }
  1221. #endif
  1222. void InlineCacheAllocator::ZeroAll()
  1223. {
  1224. UpdateCacheBlock();
  1225. BigBlock *blockp = this->bigBlocks;
  1226. while (blockp != NULL)
  1227. {
  1228. memset(blockp->GetBytes(), 0, blockp->currentByte);
  1229. blockp = blockp->nextBigBlock;
  1230. }
  1231. blockp = this->fullBlocks;
  1232. while (blockp != NULL)
  1233. {
  1234. memset(blockp->GetBytes(), 0, blockp->currentByte);
  1235. blockp = blockp->nextBigBlock;
  1236. }
  1237. ArenaMemoryBlock * memoryBlock = this->mallocBlocks;
  1238. while (memoryBlock != nullptr)
  1239. {
  1240. ArenaMemoryBlock * next = memoryBlock->next;
  1241. memset(memoryBlock->GetBytes(), 0, memoryBlock->nbytes);
  1242. memoryBlock = next;
  1243. }
  1244. }
  1245. #endif
  1246. #if DBG
  1247. bool IsInstInlineCacheAllocator::IsAllZero()
  1248. {
  1249. UpdateCacheBlock();
  1250. BigBlock *blockp = this->bigBlocks;
  1251. while (blockp != NULL)
  1252. {
  1253. for (size_t i = 0; i < blockp->currentByte; i++)
  1254. {
  1255. if (blockp->GetBytes()[i] != 0)
  1256. {
  1257. return false;
  1258. }
  1259. }
  1260. blockp = blockp->nextBigBlock;
  1261. }
  1262. blockp = this->fullBlocks;
  1263. while (blockp != NULL)
  1264. {
  1265. for (size_t i = 0; i < blockp->currentByte; i++)
  1266. {
  1267. if (blockp->GetBytes()[i] != 0)
  1268. {
  1269. return false;
  1270. }
  1271. }
  1272. blockp = blockp->nextBigBlock;
  1273. }
  1274. ArenaMemoryBlock * memoryBlock = this->mallocBlocks;
  1275. while (memoryBlock != nullptr)
  1276. {
  1277. ArenaMemoryBlock * next = memoryBlock->next;
  1278. for (size_t i = 0; i < memoryBlock->nbytes; i++)
  1279. {
  1280. if (memoryBlock->GetBytes()[i] != 0)
  1281. {
  1282. return false;
  1283. }
  1284. }
  1285. memoryBlock = next;
  1286. }
  1287. return true;
  1288. }
  1289. #endif
  1290. void IsInstInlineCacheAllocator::ZeroAll()
  1291. {
  1292. UpdateCacheBlock();
  1293. BigBlock *blockp = this->bigBlocks;
  1294. while (blockp != NULL)
  1295. {
  1296. memset(blockp->GetBytes(), 0, blockp->currentByte);
  1297. blockp = blockp->nextBigBlock;
  1298. }
  1299. blockp = this->fullBlocks;
  1300. while (blockp != NULL)
  1301. {
  1302. memset(blockp->GetBytes(), 0, blockp->currentByte);
  1303. blockp = blockp->nextBigBlock;
  1304. }
  1305. ArenaMemoryBlock * memoryBlock = this->mallocBlocks;
  1306. while (memoryBlock != nullptr)
  1307. {
  1308. ArenaMemoryBlock * next = memoryBlock->next;
  1309. memset(memoryBlock->GetBytes(), 0, memoryBlock->nbytes);
  1310. memoryBlock = next;
  1311. }
  1312. }
  1313. #undef ASSERT_TRHEAD