MarkContext.inl 6.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. __inline
  6. bool MarkContext::AddMarkedObject(void * objectAddress, size_t objectSize)
  7. {
  8. Assert(objectAddress != nullptr);
  9. Assert(objectSize > 0);
  10. Assert(objectSize % sizeof(void *) == 0);
  11. FAULTINJECT_MEMORY_MARK_NOTHROW(L"AddMarkedObject", objectSize);
  12. #if DBG_DUMP
  13. if (recycler->forceTraceMark || recycler->GetRecyclerFlagsTable().Trace.IsEnabled(Js::MarkPhase))
  14. {
  15. Output::Print(L" %p", objectAddress);
  16. }
  17. #endif
  18. RECYCLER_STATS_INTERLOCKED_INC(recycler, scanCount);
  19. MarkCandidate markCandidate;
  20. markCandidate.obj = (void **) objectAddress;
  21. markCandidate.byteCount = objectSize;
  22. return markStack.Push(markCandidate);
  23. }
  24. __inline
  25. bool MarkContext::AddTrackedObject(FinalizableObject * obj)
  26. {
  27. Assert(obj != nullptr);
  28. Assert(recycler->DoQueueTrackedObject());
  29. #ifdef PARTIAL_GC_ENABLED
  30. Assert(!recycler->inPartialCollectMode);
  31. #endif
  32. FAULTINJECT_MEMORY_MARK_NOTHROW(L"AddTrackedObject", 0);
  33. return trackStack.Push(obj);
  34. }
  35. template <bool parallel, bool interior>
  36. __inline
  37. void MarkContext::ScanMemory(void ** obj, size_t byteCount)
  38. {
  39. Assert(byteCount != 0);
  40. Assert(byteCount % sizeof(void *) == 0);
  41. void ** objEnd = obj + (byteCount / sizeof(void *));
  42. void * parentObject = (void*)obj;
  43. #if DBG_DUMP
  44. if (recycler->forceTraceMark || recycler->GetRecyclerFlagsTable().Trace.IsEnabled(Js::MarkPhase))
  45. {
  46. Output::Print(L"Scanning %p(%8d): ", obj, byteCount);
  47. }
  48. #endif
  49. do
  50. {
  51. // We need to ensure that the compiler does not reintroduce reads to the object after inlining.
  52. // This could cause the value to change after the marking checks (e.g., the null/low address check).
  53. // Intrinsics avoid the expensive memory barrier on ARM (due to /volatile:ms).
  54. #if defined(_M_ARM64)
  55. void * candidate = reinterpret_cast<void *>(__iso_volatile_load64(reinterpret_cast<volatile __int64 *>(obj)));
  56. #elif defined(_M_ARM)
  57. void * candidate = reinterpret_cast<void *>(__iso_volatile_load32(reinterpret_cast<volatile __int32 *>(obj)));
  58. #else
  59. void * candidate = *(static_cast<void * volatile *>(obj));
  60. #endif
  61. Mark<parallel, interior>(candidate, parentObject);
  62. obj++;
  63. } while (obj != objEnd);
  64. #if DBG_DUMP
  65. if (recycler->forceTraceMark || recycler->GetRecyclerFlagsTable().Trace.IsEnabled(Js::MarkPhase))
  66. {
  67. Output::Print(L"\n");
  68. Output::Flush();
  69. }
  70. #endif
  71. }
  72. template <bool parallel, bool interior>
  73. __inline
  74. void MarkContext::ScanObject(void ** obj, size_t byteCount)
  75. {
  76. BEGIN_DUMP_OBJECT(recycler, obj);
  77. ScanMemory<parallel, interior>(obj, byteCount);
  78. END_DUMP_OBJECT(recycler);
  79. }
  80. template <bool parallel, bool interior>
  81. __inline
  82. void MarkContext::Mark(void * candidate, void * parentReference)
  83. {
  84. // We should never reach here while we are processing Rescan.
  85. // Otherwise our rescanState could be out of sync with mark state.
  86. Assert(!recycler->isProcessingRescan);
  87. if ((size_t)candidate < 0x10000)
  88. {
  89. RECYCLER_STATS_INTERLOCKED_INC(recycler, tryMarkNullCount);
  90. return;
  91. }
  92. if (interior)
  93. {
  94. Assert(recycler->enableScanInteriorPointers
  95. || (!recycler->IsConcurrentState() && recycler->collectionState != CollectionStateParallelMark));
  96. recycler->heapBlockMap.MarkInterior<parallel>(candidate, this);
  97. return;
  98. }
  99. if (!HeapInfo::IsAlignedAddress(candidate))
  100. {
  101. RECYCLER_STATS_INTERLOCKED_INC(recycler, tryMarkUnalignedCount);
  102. return;
  103. }
  104. recycler->heapBlockMap.Mark<parallel>(candidate, this);
  105. #ifdef RECYCLER_MARK_TRACK
  106. this->OnObjectMarked(candidate, parentReference);
  107. #endif
  108. }
  109. __inline
  110. void MarkContext::MarkTrackedObject(FinalizableObject * trackedObject)
  111. {
  112. Assert(!recycler->queueTrackedObject);
  113. #ifdef PARTIAL_GC_ENABLED
  114. Assert(!recycler->inPartialCollectMode);
  115. #endif
  116. Assert(!recycler->IsConcurrentExecutingState());
  117. Assert(!(recycler->collectionState == CollectionStateParallelMark));
  118. // Mark is not expected to throw.
  119. BEGIN_NO_EXCEPTION
  120. {
  121. trackedObject->Mark(recycler);
  122. }
  123. END_NO_EXCEPTION
  124. }
  125. template <bool parallel, bool interior>
  126. __inline
  127. void MarkContext::ProcessMark()
  128. {
  129. #ifdef RECYCLER_STRESS
  130. if (recycler->GetRecyclerFlagsTable().RecyclerInduceFalsePositives)
  131. {
  132. // InduceFalsePositives logic doesn't support parallel marking
  133. if (!parallel)
  134. {
  135. recycler->heapBlockMap.InduceFalsePositives(recycler);
  136. }
  137. }
  138. #endif
  139. #if defined(_M_IX86) || defined(_M_X64)
  140. MarkCandidate current, next;
  141. while (markStack.Pop(&current))
  142. {
  143. // Process entries and prefetch as we go.
  144. while (markStack.Pop(&next))
  145. {
  146. // Prefetch the next entry so it's ready when we need it.
  147. _mm_prefetch((char *)next.obj, _MM_HINT_T0);
  148. // Process the previously retrieved entry.
  149. ScanObject<parallel, interior>(current.obj, current.byteCount);
  150. current = next;
  151. }
  152. // The stack is empty, but we still have a previously retrieved entry; process it now.
  153. ScanObject<parallel, interior>(current.obj, current.byteCount);
  154. // Processing that entry may have generated more entries in the mark stack, so continue the loop.
  155. }
  156. #else
  157. // _mm_prefetch intrinsic is specific to Intel platforms.
  158. // CONSIDER: There does seem to be a compiler intrinsic for prefetch on ARM,
  159. // however, the information on this is scarce, so for now just don't do prefetch on ARM.
  160. MarkCandidate current;
  161. while (markStack.Pop(&current))
  162. {
  163. ScanObject<parallel, interior>(current.obj, current.byteCount);
  164. }
  165. #endif
  166. Assert(markStack.IsEmpty());
  167. }