| 12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678 |
- //-------------------------------------------------------------------------------------------------------
- // Copyright (C) Microsoft. All rights reserved.
- // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
- //-------------------------------------------------------------------------------------------------------
- #pragma once
- #include "CollectionState.h"
- #include "RecyclerTelemetryInfo.h"
- #include "RecyclerWaitReason.h"
- #include "Common/ObservableValue.h"
- #include "CollectionFlags.h"
- namespace Js
- {
- class Profiler;
- enum Phase: unsigned short;
- };
- namespace JsUtil
- {
- class ThreadService;
- };
- #ifdef STACK_BACK_TRACE
- class StackBackTraceNode;
- #endif
- class ScriptEngineBase;
- class JavascriptThreadService;
- #ifdef PROFILE_MEM
- struct RecyclerMemoryData;
- #endif
- class ThreadContext;
- namespace Memory
- {
- template <typename T> class RecyclerRootPtr;
- class AutoBooleanToggle
- {
- public:
- AutoBooleanToggle(bool * b, bool value = true, bool valueMayChange = false)
- : b(b)
- {
- Assert(!(*b));
- *b = value;
- #if DBG
- this->value = value;
- this->valueMayChange = valueMayChange;
- #endif
- }
- ~AutoBooleanToggle()
- {
- if (b)
- {
- Assert(valueMayChange || *b == value);
- *b = false;
- }
- }
- void Leave()
- {
- Assert(valueMayChange || *b == value);
- *b = false;
- b = nullptr;
- }
- private:
- bool * b;
- #if DBG
- bool value;
- bool valueMayChange;
- #endif
- };
- template <class T>
- class AutoRestoreValue
- {
- public:
- AutoRestoreValue(T* var, const T& val):
- variable(var)
- {
- Assert(var);
- oldValue = (*variable);
- (*variable) = val;
- #ifdef DEBUG
- debugSetValue = val;
- #endif
- }
- ~AutoRestoreValue()
- {
- Assert((*variable) == debugSetValue);
- (*variable) = oldValue;
- }
- private:
- #ifdef DEBUG
- T debugSetValue;
- #endif
- T* variable;
- T oldValue;
- };
- class Recycler;
- class RecyclerScanMemoryCallback
- {
- public:
- RecyclerScanMemoryCallback(Recycler* recycler) : recycler(recycler) {}
- void operator()(void** obj, size_t byteCount);
- private:
- Recycler* recycler;
- };
- template<ObjectInfoBits infoBits>
- struct InfoBitsWrapper{};
- #if ENABLE_WEAK_REFERENCE_REGIONS
- template<typename T>
- static constexpr bool is_pointer = false;
- template<typename K>
- static constexpr bool is_pointer<K*> = true;
- template<typename T>
- class RecyclerWeakReferenceRegionItem {
- static_assert(is_pointer<T>, "Weak references must be to pointer types");
- friend class Recycler;
- public:
- RecyclerWeakReferenceRegionItem() : ptr(T()), heapBlock(nullptr) {};
- operator T() const { return ptr; };
- T operator=(T newPtr) {
- Assert(ptr == nullptr); // For safety with concurrent marking, only allow setting the pointer to non-null from null
- heapBlock = nullptr;
- return ptr = newPtr;
- };
- private:
- RecyclerWeakReferenceRegionItem(RecyclerWeakReferenceRegionItem<T>&) = delete;
- FieldNoBarrier(T) ptr;
- FieldNoBarrier(HeapBlock*) heapBlock; // Note: the low bit of the heapBlock is used for background marking
- };
- class RecyclerWeakReferenceRegion {
- friend class Recycler;
- public:
- RecyclerWeakReferenceRegionItem<void*>* GetPtr() const { return ptr; }
- size_t GetCount() const { return count; }
- HeapBlock* GetHeapBlock() const { return arrayHeapBlock; }
- private:
- FieldNoBarrier(RecyclerWeakReferenceRegionItem<void*>*) ptr;
- FieldNoBarrier(size_t) count;
- FieldNoBarrier(HeapBlock*) arrayHeapBlock;
- };
- #endif
- // Allocation macro
- #define RecyclerNew(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocInlined, T, __VA_ARGS__)
- #define RecyclerNewPlus(recycler,size,T,...) AllocatorNewPlus(Recycler, recycler, size, T, __VA_ARGS__)
- #define RecyclerNewPlusZ(recycler,size,T,...) AllocatorNewPlusZ(Recycler, recycler, size, T, __VA_ARGS__)
- #define RecyclerNewZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocZeroInlined, T, __VA_ARGS__)
- #define RecyclerNewStruct(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocInlined, T)
- #define RecyclerNewStructZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocZeroInlined, T)
- #define RecyclerNewStructPlus(recycler,size,T) AllocatorNewStructPlus(Recycler, recycler, size, T)
- #define RecyclerNewArray(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, Alloc, T, count)
- #define RecyclerNewArrayZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZero, T, count)
- #define RecyclerNewFinalized(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedInlined, T, __VA_ARGS__)))
- #define RecyclerNewFinalizedPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalized, size, T, __VA_ARGS__)))
- #define RecyclerNewTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedInlined, T, __VA_ARGS__)))
- #define RecyclerNewEnumClass(recycler, enumClass, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<enumClass>()) T(__VA_ARGS__)
- #define RecyclerNewWithInfoBits(recycler, infoBits, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<infoBits>()) T(__VA_ARGS__)
- #define RecyclerNewFinalizedClientTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedClientTrackedInlined, T, __VA_ARGS__)))
- #if defined(RECYCLER_WRITE_BARRIER_ALLOC)
- #define RecyclerNewWithBarrier(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocWithBarrier, T, __VA_ARGS__)
- #define RecyclerNewWithBarrierPlus(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocWithBarrier, size, T, __VA_ARGS__)
- #define RecyclerNewWithBarrierPlusZ(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocZeroWithBarrier, size, T, __VA_ARGS__)
- #define RecyclerNewWithBarrierZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocZeroWithBarrier, T, __VA_ARGS__)
- #define RecyclerNewWithBarrierStruct(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocWithBarrier, T)
- #define RecyclerNewWithBarrierStructZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocZeroWithBarrier, T)
- #define RecyclerNewWithBarrierStructPlus(recycler,size,T) AllocatorNewStructPlusBase(Recycler, recycler, AllocWithBarrier, size, T)
- #define RecyclerNewWithBarrierArray(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocWithBarrier, T, count)
- #define RecyclerNewWithBarrierArrayZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZeroWithBarrier, T, count)
- #define RecyclerNewWithBarrierFinalized(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedWithBarrierInlined, T, __VA_ARGS__)))
- #define RecyclerNewWithBarrierFinalizedPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedWithBarrier, size, T, __VA_ARGS__)))
- #define RecyclerNewWithBarrierTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedWithBarrierInlined, T, __VA_ARGS__)))
- #define RecyclerNewWithBarrierEnumClass(recycler, enumClass, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<(ObjectInfoBits)(enumClass | WithBarrierBit)>()) T(__VA_ARGS__)
- #define RecyclerNewWithBarrierWithInfoBits(recycler, infoBits, T, ...) new (TRACK_ALLOC_INFO(static_cast<Recycler *>(recycler), T, Recycler, 0, (size_t)-1), InfoBitsWrapper<(ObjectInfoBits)(infoBits | WithBarrierBit)>()) T(__VA_ARGS__)
- #define RecyclerNewWithBarrierFinalizedClientTracked(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedClientTrackedWithBarrierInlined, T, __VA_ARGS__)))
- #endif
- #ifndef RECYCLER_WRITE_BARRIER
- #define RecyclerNewWithBarrier RecyclerNew
- #define RecyclerNewWithBarrierPlus RecyclerNewPlus
- #define RecyclerNewWithBarrierPlusZ RecyclerNewPlusZ
- #define RecyclerNewWithBarrierZ RecyclerNewZ
- #define RecyclerNewWithBarrierStruct RecyclerNewStruct
- #define RecyclerNewWithBarrierStructZ RecyclerNewStructZ
- #define RecyclerNewWithBarrierStructPlus RecyclerNewStructPlus
- #define RecyclerNewWithBarrierArray RecyclerNewArray
- #define RecyclerNewWithBarrierArrayZ RecyclerNewArrayZ
- #define RecyclerNewWithBarrierFinalized RecyclerNewFinalized
- #define RecyclerNewWithBarrierFinalizedPlus RecyclerNewFinalizedPlus
- #define RecyclerNewWithBarrierTracked RecyclerNewTracked
- #define RecyclerNewWithBarrierEnumClass RecyclerNewEnumClass
- #define RecyclerNewWithBarrierWithInfoBits RecyclerNewWithInfoBits
- #define RecyclerNewWithBarrierFinalizedClientTracked RecyclerNewFinalizedClientTracked
- #endif
- // Leaf allocators
- #define RecyclerNewLeaf(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafInlined, T, __VA_ARGS__)
- #define RecyclerNewLeafZ(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafZeroInlined, T, __VA_ARGS__)
- #define RecyclerNewPlusLeaf(recycler,size,T,...) AllocatorNewPlusLeaf(Recycler, recycler, size, T, __VA_ARGS__)
- #define RecyclerNewPlusLeafZ(recycler,size,T,...) AllocatorNewPlusLeafZ(Recycler, recycler, size, T, __VA_ARGS__)
- #define RecyclerNewStructLeaf(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocLeafInlined, T)
- #define RecyclerNewStructLeafZ(recycler,T) AllocatorNewStructBase(Recycler, recycler, AllocLeafZeroInlined, T)
- #define RecyclerNewArrayLeafZ(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafZero, T, count)
- #define RecyclerNewArrayLeaf(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeaf, T, count)
- #define RecyclerNewFinalizedLeaf(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedLeafInlined, T, __VA_ARGS__)))
- #define RecyclerNewFinalizedLeafPlus(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedLeaf, size, T, __VA_ARGS__)))
- #define RecyclerNewTrackedLeaf(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedLeafInlined, T, __VA_ARGS__)))
- #define RecyclerNewTrackedLeafPlusZ(recycler,size,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocZeroTrackedLeafInlined, size, T, __VA_ARGS__)))
- #ifdef RECYCLER_VISITED_HOST
- // We need to track these allocations. The RecyclerVisitedHost* object allocation APIs don't provide us with the type of the objects being allocated. Use the DummyVTableObject type used elsewhere to track the allocations.
- #define RecyclerAllocVisitedHostTracedAndFinalized(recycler,size) (TRACK_ALLOC_INFO(recycler, DummyVTableObject, Recycler, size, (size_t)-1))->AllocVisitedHost<RecyclerVisitedHostTracedFinalizableBits>(size)
- #define RecyclerAllocVisitedHostFinalized(recycler,size) (TRACK_ALLOC_INFO(recycler, DummyVTableObject, Recycler, size, (size_t)-1))->AllocVisitedHost<RecyclerVisitedHostFinalizableBits>(size)
- #define RecyclerAllocVisitedHostTraced(recycler,size) (TRACK_ALLOC_INFO(recycler, DummyVTableObject, Recycler, size, (size_t)-1))->AllocVisitedHost<RecyclerVisitedHostTracedBits>(size)
- #define RecyclerAllocLeaf(recycler,size) (TRACK_ALLOC_INFO(recycler, DummyVTableObject, Recycler, size, (size_t)-1))->AllocVisitedHost<LeafBit>(size)
- #endif
- #ifdef TRACE_OBJECT_LIFETIME
- #define RecyclerNewLeafTrace(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafTrace, T, __VA_ARGS__)
- #define RecyclerNewLeafZTrace(recycler,T,...) AllocatorNewBase(Recycler, recycler, AllocLeafZeroTrace, T, __VA_ARGS__)
- #define RecyclerNewPlusLeafTrace(recycler,size,T,...) AllocatorNewPlusBase(Recycler, recycler, AllocLeafTrace, size, T, __VA_ARGS__)
- #define RecyclerNewArrayLeafZTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafZeroTrace, T, count)
- #define RecyclerNewArrayTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocTrace, T, count)
- #define RecyclerNewArrayZTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocZeroTrace, T, count)
- #define RecyclerNewArrayLeafTrace(recycler,T,count) AllocatorNewArrayBase(Recycler, recycler, AllocLeafTrace, T, count)
- #define RecyclerNewFinalizedTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedTrace, T, __VA_ARGS__)))
- #define RecyclerNewFinalizedLeafTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocFinalizedLeafTrace, T, __VA_ARGS__)))
- #define RecyclerNewFinalizedPlusTrace(recycler, size, T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewPlusBase(Recycler, recycler, AllocFinalizedTrace, size, T, __VA_ARGS__)))
- #define RecyclerNewTrackedTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedTrace, T, __VA_ARGS__)))
- #define RecyclerNewTrackedLeafTrace(recycler,T,...) static_cast<T *>(static_cast<FinalizableObject *>(AllocatorNewBase(Recycler, recycler, AllocTrackedLeafTrace, T, __VA_ARGS__)))
- #else
- #define RecyclerNewLeafTrace RecyclerNewLeaf
- #define RecyclerNewLeafZTrace RecyclerNewLeafZ
- #define RecyclerNewPlusLeafTrace RecyclerNewPlusLeaf
- #define RecyclerNewArrayLeafZTrace RecyclerNewArrayLeafZ
- #define RecyclerNewArrayTrace RecyclerNewArray
- #define RecyclerNewArrayZTrace RecyclerNewArrayZ
- #define RecyclerNewArrayLeafTrace RecyclerNewArrayLeaf
- #define RecyclerNewFinalizedTrace RecyclerNewFinalized
- #define RecyclerNewFinalizedLeafTrace RecyclerNewFinalizedLeaf
- #define RecyclerNewFinalizedPlusTrace RecyclerNewFinalizedPlus
- #define RecyclerNewTrackedTrace RecyclerNewTracked
- #define RecyclerNewTrackedLeafTrace RecyclerNewTrackedLeaf
- #endif
- #ifdef RECYCLER_TRACE
- #define RecyclerVerboseTrace(flags, ...) \
- if (flags.Verbose && flags.Trace.IsEnabled(Js::RecyclerPhase)) \
- { \
- Output::Print(__VA_ARGS__); \
- }
- #define AllocationVerboseTrace(flags, ...) \
- if (flags.Verbose && flags.Trace.IsEnabled(Js::MemoryAllocationPhase)) \
- { \
- Output::Print(__VA_ARGS__); \
- }
- #define LargeAllocationVerboseTrace(flags, ...) \
- if (flags.Verbose && \
- (flags.Trace.IsEnabled(Js::MemoryAllocationPhase) || \
- flags.Trace.IsEnabled(Js::LargeMemoryAllocationPhase))) \
- { \
- Output::Print(__VA_ARGS__); \
- }
- #define PageAllocatorAllocationVerboseTrace(flags, ...) \
- if (flags.Verbose && flags.Trace.IsEnabled(Js::PageAllocatorAllocPhase)) \
- { \
- Output::Print(__VA_ARGS__); \
- }
- #else
- #define RecyclerVerboseTrace(...)
- #define AllocationVerboseTrace(...)
- #define LargeAllocationVerboseTrace(...)
- #endif
- #define RecyclerHeapNew(recycler,heapInfo,T,...) new (recycler, heapInfo) T(__VA_ARGS__)
- #define RecyclerHeapDelete(recycler,heapInfo,addr) (static_cast<Recycler *>(recycler)->HeapFree(heapInfo,addr))
- typedef void (__cdecl* ExternalRootMarker)(void *);
- class RecyclerCollectionWrapper
- {
- public:
- RecyclerCollectionWrapper() :
- _isScriptContextCloseGCPending(FALSE)
- { }
- typedef BOOL (Recycler::*CollectionFunction)(CollectionFlags flags);
- virtual void PreCollectionCallBack(CollectionFlags flags) = 0;
- virtual void PreSweepCallback() = 0;
- virtual void PreRescanMarkCallback() = 0;
- virtual size_t RootMarkCallback(RecyclerScanMemoryCallback& scanMemoryCallback, BOOL * stacksScannedByRuntime) = 0;
- virtual void RescanMarkTimeoutCallback() = 0;
- virtual void EndMarkCallback() = 0;
- virtual void ConcurrentCallback() = 0;
- virtual void WaitCollectionCallBack() = 0;
- virtual void PostCollectionCallBack() = 0;
- virtual BOOL ExecuteRecyclerCollectionFunction(Recycler * recycler, CollectionFunction function, CollectionFlags flags) = 0;
- virtual uint GetRandomNumber() = 0;
- virtual bool DoSpecialMarkOnScanStack() = 0;
- virtual void OnScanStackCallback(void ** stackTop, size_t byteCount, void ** registers, size_t registersByteCount) = 0;
- virtual void PostSweepRedeferralCallBack() = 0;
- #ifdef FAULT_INJECTION
- virtual void DisposeScriptContextByFaultInjectionCallBack() = 0;
- #endif
- virtual void DisposeObjects(Recycler * recycler) = 0;
- virtual void PreDisposeObjectsCallBack() = 0;
- #ifdef ENABLE_PROJECTION
- virtual void MarkExternalWeakReferencedObjects(bool inPartialCollect) = 0;
- virtual void ResolveExternalWeakReferencedObjects() = 0;
- #endif
- #if DBG || defined(PROFILE_EXEC)
- virtual bool AsyncHostOperationStart(void *) = 0;
- virtual void AsyncHostOperationEnd(bool wasInAsync, void *) = 0;
- #endif
- #if DBG
- virtual void CheckJsReentrancyOnDispose() = 0;
- #endif
- BOOL GetIsScriptContextCloseGCPending()
- {
- return _isScriptContextCloseGCPending;
- }
- void ClearIsScriptContextCloseGCPending()
- {
- _isScriptContextCloseGCPending = FALSE;
- }
- void SetIsScriptContextCloseGCPending()
- {
- _isScriptContextCloseGCPending = TRUE;
- }
- protected:
- BOOL _isScriptContextCloseGCPending;
- };
- class DefaultRecyclerCollectionWrapper : public RecyclerCollectionWrapper
- {
- public:
- virtual void PreCollectionCallBack(CollectionFlags flags) override {}
- virtual void PreSweepCallback() override {}
- virtual void PreRescanMarkCallback() override {}
- virtual void RescanMarkTimeoutCallback() override {}
- virtual void EndMarkCallback() override {}
- virtual size_t RootMarkCallback(RecyclerScanMemoryCallback& scanMemoryCallback, BOOL * stacksScannedByRuntime) override { *stacksScannedByRuntime = FALSE; return 0; }
- virtual void ConcurrentCallback() override {}
- virtual void WaitCollectionCallBack() override {}
- virtual void PostCollectionCallBack() override {}
- virtual BOOL ExecuteRecyclerCollectionFunction(Recycler * recycler, CollectionFunction function, CollectionFlags flags) override;
- virtual uint GetRandomNumber() override { return 0; }
- virtual bool DoSpecialMarkOnScanStack() override { return false; }
- virtual void OnScanStackCallback(void ** stackTop, size_t byteCount, void ** registers, size_t registersByteCount) override {};
- virtual void PostSweepRedeferralCallBack() override {}
- #ifdef FAULT_INJECTION
- virtual void DisposeScriptContextByFaultInjectionCallBack() override {};
- #endif
- virtual void DisposeObjects(Recycler * recycler) override;
- virtual void PreDisposeObjectsCallBack() override {};
- #ifdef ENABLE_PROJECTION
- virtual void MarkExternalWeakReferencedObjects(bool inPartialCollect) override {};
- virtual void ResolveExternalWeakReferencedObjects() override {};
- #endif
- #if DBG || defined(PROFILE_EXEC)
- virtual bool AsyncHostOperationStart(void *) override { return false; };
- virtual void AsyncHostOperationEnd(bool wasInAsync, void *) override {};
- #endif
- #if DBG
- virtual void CheckJsReentrancyOnDispose() override {}
- #endif
- static DefaultRecyclerCollectionWrapper Instance;
- private:
- static bool IsCollectionDisabled(Recycler * recycler);
- };
- #ifdef RECYCLER_STATS
- struct RecyclerCollectionStats
- {
- size_t startCollectAllocBytes;
- #if ENABLE_PARTIAL_GC
- size_t startCollectNewPageCount;
- #endif
- size_t continueCollectAllocBytes;
- size_t finishCollectTryCount;
- // Heuristic Stats
- #if ENABLE_PARTIAL_GC
- size_t rescanRootBytes;
- size_t estimatedPartialReuseBytes;
- size_t uncollectedNewPageCountPartialCollect;
- size_t partialCollectSmallHeapBlockReuseMinFreeBytes;
- double collectEfficacy;
- double collectCost;
- #endif
- // Mark stats
- size_t tryMarkCount; // # of pointer try mark (* pointer size to get total number byte looked at)
- size_t tryMarkNullCount;
- size_t tryMarkUnalignedCount;
- size_t tryMarkNonRecyclerMemoryCount;
- size_t tryMarkInteriorCount;
- size_t tryMarkInteriorNullCount;
- size_t tryMarkInteriorNonRecyclerMemoryCount;
- size_t rootCount;
- size_t stackCount;
- size_t remarkCount;
- size_t scanCount; // non-leaf objects marked.
- size_t trackCount;
- size_t finalizeCount;
- size_t markThruNewObjCount;
- size_t markThruFalseNewObjCount;
- struct MarkData
- {
- // Rescan stats
- size_t rescanPageCount;
- size_t rescanObjectCount;
- size_t rescanObjectByteCount;
- size_t rescanLargePageCount;
- size_t rescanLargeObjectCount;
- size_t rescanLargeByteCount;
- size_t markCount; // total number of object marked
- size_t markBytes; // size of all objects marked.
- } markData;
- #if ENABLE_CONCURRENT_GC
- MarkData backgroundMarkData[RecyclerHeuristic::MaxBackgroundRepeatMarkCount];
- size_t trackedObjectCount;
- #endif
- #if ENABLE_PARTIAL_GC
- size_t clientTrackedObjectCount;
- #endif
- // Sweep stats
- size_t heapBlockCount[HeapBlock::BlockTypeCount]; // number of heap blocks (processed during swept)
- size_t heapBlockFreeCount[HeapBlock::BlockTypeCount]; // number of heap blocks deleted
- size_t heapBlockConcurrentSweptCount[HeapBlock::SmallBlockTypeCount];
- size_t heapBlockSweptCount[HeapBlock::SmallBlockTypeCount]; // number of heap blocks swept
- size_t objectSweptCount; // objects freed (free list + whole page freed)
- size_t objectSweptBytes;
- size_t objectSweptFreeListCount; // objects freed (free list)
- size_t objectSweptFreeListBytes;
- size_t objectSweepScanCount; // number of objects walked for sweeping (exclude whole page freed)
- size_t finalizeSweepCount; // number of objects finalizer/dispose called
- #if ENABLE_PARTIAL_GC
- size_t smallNonLeafHeapBlockPartialReuseCount[HeapBlock::SmallBlockTypeCount];
- size_t smallNonLeafHeapBlockPartialReuseBytes[HeapBlock::SmallBlockTypeCount];
- size_t smallNonLeafHeapBlockPartialUnusedCount[HeapBlock::SmallBlockTypeCount];
- size_t smallNonLeafHeapBlockPartialUnusedBytes[HeapBlock::SmallBlockTypeCount];
- #endif
- // Memory Stats
- size_t heapBlockFreeByteCount[HeapBlock::BlockTypeCount]; // The remaining usable free byte count
- size_t largeHeapBlockUsedByteCount; // Used byte count
- size_t largeHeapBlockTotalByteCount; // Total byte count
- // Empty/zero heap block stats
- uint numEmptySmallBlocks[HeapBlock::SmallBlockTypeCount];
- uint numZeroedOutSmallBlocks;
- };
- #define RECYCLER_STATS_INC_IF(cond, r, f) if (cond) { RECYCLER_STATS_INC(r, f); }
- #define RECYCLER_STATS_INC(r, f) ++r->collectionStats.f
- #define RECYCLER_STATS_INTERLOCKED_INC(r, f) { InterlockedIncrement((LONG *)&r->collectionStats.f); }
- #define RECYCLER_STATS_DEC(r, f) --r->collectionStats.f
- #define RECYCLER_STATS_ADD(r, f, v) r->collectionStats.f += (v)
- #define RECYCLER_STATS_INTERLOCKED_ADD(r, f, v) { InterlockedAdd((LONG *)&r->collectionStats.f, (LONG)(v)); }
- #define RECYCLER_STATS_SUB(r, f, v) r->collectionStats.f -= (v)
- #define RECYCLER_STATS_SET(r, f, v) r->collectionStats.f = v
- #else
- #define RECYCLER_STATS_INC_IF(cond, r, f)
- #define RECYCLER_STATS_INC(r, f)
- #define RECYCLER_STATS_INTERLOCKED_INC(r, f)
- #define RECYCLER_STATS_DEC(r, f)
- #define RECYCLER_STATS_ADD(r, f, v)
- #define RECYCLER_STATS_INTERLOCKED_ADD(r, f, v)
- #define RECYCLER_STATS_SUB(r, f, v)
- #define RECYCLER_STATS_SET(r, f, v)
- #endif
- #ifdef RECYCLER_TRACE
- struct CollectionParam
- {
- CollectionFlags flags;
- bool finishOnly;
- bool repeat;
- bool priorityBoostConcurrentSweepOverride;
- bool domCollect;
- int timeDiff;
- size_t uncollectedAllocBytes;
- size_t uncollectedPinnedObjects;
- #if ENABLE_PARTIAL_GC
- size_t uncollectedNewPageCountPartialCollect;
- size_t uncollectedNewPageCount;
- size_t unusedPartialCollectFreeBytes;
- bool inPartialCollectMode;
- #endif
- };
- #endif
- #include "RecyclerObjectGraphDumper.h"
- #if ENABLE_CONCURRENT_GC
- class RecyclerParallelThread
- {
- friend class ThreadContext;
- public:
- typedef void (Recycler::* WorkFunc)();
- RecyclerParallelThread(Recycler * recycler, WorkFunc workFunc) :
- recycler(recycler),
- workFunc(workFunc),
- concurrentWorkReadyEvent(NULL),
- concurrentWorkDoneEvent(NULL),
- concurrentThread(NULL)
- {
- }
- ~RecyclerParallelThread()
- {
- Assert(concurrentThread == NULL);
- Assert(concurrentWorkReadyEvent == NULL);
- Assert(concurrentWorkDoneEvent == NULL);
- }
- bool StartConcurrent();
- void WaitForConcurrent();
- void Shutdown();
- bool EnableConcurrent(bool synchronizeOnStartup);
- private:
- // Static entry point for thread creation
- static unsigned int CALLBACK StaticThreadProc(LPVOID lpParameter);
- // Static entry point for thread service usage
- static void CALLBACK StaticBackgroundWorkCallback(void * callbackData);
- private:
- WorkFunc workFunc;
- Recycler * recycler;
- HANDLE concurrentWorkReadyEvent;// main thread uses this event to tell concurrent threads that the work is ready
- HANDLE concurrentWorkDoneEvent;// concurrent threads use this event to tell main thread that the work allocated is done
- HANDLE concurrentThread;
- bool synchronizeOnStartup;
- };
- #endif
- #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
- class AutoProtectPages
- {
- public:
- AutoProtectPages(Recycler* recycler, bool protectEnabled);
- ~AutoProtectPages();
- void Unprotect();
- private:
- Recycler* recycler;
- bool isReadOnly;
- };
- #endif
- class Recycler
- {
- friend class RecyclerScanMemoryCallback;
- friend class RecyclerSweep;
- friend class RecyclerSweepManager;
- friend class MarkContext;
- friend class HeapBlock;
- friend class HeapBlockMap32;
- #if ENABLE_CONCURRENT_GC
- friend class RecyclerParallelThread;
- #endif
- #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
- friend class AutoProtectPages;
- #endif
- #ifdef ENABLE_BASIC_TELEMETRY
- friend class RecyclerTelemetryInfo;
- #endif
- template <typename T> friend class RecyclerWeakReference;
- template <typename T> friend class WeakReferenceHashTable;
- template <typename TBlockType>
- friend class SmallHeapBlockAllocator; // Needed for FindHeapBlock
- #if defined(RECYCLER_TRACE)
- friend class JavascriptThreadService;
- #endif
- #ifdef HEAP_ENUMERATION_VALIDATION
- friend class ActiveScriptProfilerHeapEnum;
- #endif
- friend class ScriptEngineBase; // This is for disabling GC for certain Host operations.
- #if !FLOATVAR
- friend class ::CodeGenNumberThreadAllocator;
- friend struct ::XProcNumberPageSegmentManager;
- #endif
- public:
- static const uint ConcurrentThreadStackSize = 300000;
- static const bool FakeZeroLengthArray = true;
- #ifdef RECYCLER_PAGE_HEAP
- // Keeping as constant in case we want to tweak the value here
- // Set to 0 so that the tool can do the filtering instead of the runtime
- #if DBG
- static const int s_numFramesToSkipForPageHeapAlloc = 10;
- static const int s_numFramesToSkipForPageHeapFree = 0;
- static const int s_numFramesToCaptureForPageHeap = 32;
- #else
- static const int s_numFramesToSkipForPageHeapAlloc = 0;
- static const int s_numFramesToSkipForPageHeapFree = 0;
- static const int s_numFramesToCaptureForPageHeap = 32;
- #endif
- #endif
- uint Cookie;
- class AutoEnterExternalStackSkippingGCMode
- {
- public:
- AutoEnterExternalStackSkippingGCMode(Recycler* recycler):
- _recycler(recycler)
- {
- // Setting this in a re-entrant mode is not allowed
- Assert(!recycler->isExternalStackSkippingGC);
- #if DBG
- _recycler->isExternalStackSkippingGC = true;
- #endif
- }
- ~AutoEnterExternalStackSkippingGCMode()
- {
- #if DBG
- _recycler->isExternalStackSkippingGC = false;
- #endif
- }
- private:
- Recycler* _recycler;
- };
- private:
- class AutoSwitchCollectionStates
- {
- public:
- AutoSwitchCollectionStates(Recycler* recycler, CollectionState entryState, CollectionState exitState):
- _recycler(recycler),
- _exitState(exitState)
- {
- _recycler->SetCollectionState(entryState);
- }
- ~AutoSwitchCollectionStates()
- {
- _recycler->SetCollectionState(_exitState);
- }
- private:
- Recycler* _recycler;
- CollectionState _exitState;
- };
- #if defined(ENABLE_JS_ETW)
- ETWEventGCActivationTrigger collectionStartReason;
- CollectionFlags collectionStartFlags;
- ETWEventGCActivationTrigger collectionFinishReason;
- #endif
- class CollectionStateChangedObserver : public ObservableValueObserver<CollectionState>
- {
- private:
- Recycler* recycler;
- public:
- CollectionStateChangedObserver(Recycler* recycler)
- {
- this->recycler = recycler;
- }
- virtual void ValueChanged(const CollectionState& newVal, const CollectionState& oldVal)
- {
- #ifdef ENABLE_BASIC_TELEMETRY
- if (oldVal == CollectionState::CollectionStateNotCollecting &&
- newVal != CollectionState::CollectionStateNotCollecting &&
- newVal != CollectionState::Collection_PreCollection &&
- newVal != CollectionState::CollectionStateExit)
- {
- this->recycler->GetRecyclerTelemetryInfo().StartPass(newVal);
- }
- else if (oldVal != CollectionState::CollectionStateNotCollecting &&
- oldVal != CollectionState::Collection_PreCollection &&
- oldVal != CollectionState::CollectionStateExit &&
- newVal == CollectionState::CollectionStateNotCollecting)
- {
- this->recycler->GetRecyclerTelemetryInfo().EndPass(oldVal);
- }
- #endif
- }
- };
- CollectionStateChangedObserver collectionStateChangedObserver;
- ObservableValue<CollectionState> collectionState;
- inline void SetCollectionState(CollectionState newState)
- {
- this->collectionState = newState;
- }
- JsUtil::ThreadService *threadService;
- #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
- bool allowAllocationsDuringConcurrentSweepForCollection;
- #endif
- HeapBlockMap heapBlockMap;
- #if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
- struct PinRecord
- {
- #ifdef STACK_BACK_TRACE
- PinRecord() : refCount(0), stackBackTraces(nullptr) {}
- #else
- PinRecord() : refCount(0) {}
- #endif
- PinRecord& operator=(uint newRefCount)
- {
- #ifdef STACK_BACK_TRACE
- Assert(stackBackTraces == nullptr);
- #endif
- Assert(newRefCount == 0); refCount = 0; return *this;
- }
- PinRecord& operator++() { ++refCount; return *this; }
- PinRecord& operator--() { --refCount; return *this; }
- operator uint() const { return refCount; }
- #ifdef STACK_BACK_TRACE
- StackBackTraceNode * stackBackTraces;
- #endif
- private:
- uint refCount;
- };
- #else
- typedef uint PinRecord;
- #endif
- typedef SimpleHashTable<void *, PinRecord, HeapAllocator, DefaultComparer, true, PrimePolicy> PinnedObjectHashTable;
- PinnedObjectHashTable pinnedObjectMap;
- WeakReferenceHashTable<PrimePolicy> weakReferenceMap;
- uint weakReferenceCleanupId;
- #if ENABLE_WEAK_REFERENCE_REGIONS
- SList<RecyclerWeakReferenceRegion, HeapAllocator> weakReferenceRegionList;
- #endif
- void * transientPinnedObject;
- #if defined(CHECK_MEMORY_LEAK) || defined(LEAK_REPORT)
- #ifdef STACK_BACK_TRACE
- StackBackTrace * transientPinnedObjectStackBackTrace;
- #endif
- #endif
- struct GuestArenaAllocator : public ArenaAllocator
- {
- GuestArenaAllocator(__in_z char16 const* name, PageAllocator * pageAllocator, void (*outOfMemoryFunc)())
- : ArenaAllocator(name, pageAllocator, outOfMemoryFunc), pendingDelete(false)
- {
- }
- bool pendingDelete;
- };
- DListBase<GuestArenaAllocator> guestArenaList;
- DListBase<ArenaData*> externalGuestArenaList; // guest arenas are scanned for roots
- #ifdef RECYCLER_PAGE_HEAP
- bool isPageHeapEnabled;
- bool capturePageHeapAllocStack;
- bool capturePageHeapFreeStack;
- inline bool IsPageHeapEnabled() const { return isPageHeapEnabled; }
- inline bool ShouldCapturePageHeapAllocStack() const { return capturePageHeapAllocStack; }
- void VerifyPageHeapFillAfterAlloc(char* memBlock, size_t size, ObjectInfoBits attributes);
- #else
- inline const bool IsPageHeapEnabled() const { return false; }
- inline bool ShouldCapturePageHeapAllocStack() const { return false; }
- #endif
- #ifdef RECYCLER_MARK_TRACK
- MarkMap* markMap;
- CriticalSection markMapCriticalSection;
- void PrintMarkMap();
- void ClearMarkMap();
- #endif
- // Number of pages to reserve for the primary mark stack
- // This is the minimum number of pages to guarantee that a single heap block
- // can be rescanned in the worst possible case where every object in a heap block
- // in the smallest bucket needs to be rescanned
- // These many pages being reserved guarantees that in OOM Rescan, we can make progress
- // on every rescan iteration
- // We add one because there is a small amount of the page reserved for page pool metadata
- // so we need to allocate an additional page to be sure
- // Currently, this works out to 2 pages on 32-bit and 5 pages on 64-bit
- // NOTE: We have reduced the PageCount for small blocks to 1. This brought down the number of pages reserved for x64 from 5 to 2. This has not shown
- // any adverse impact.
- static const int PrimaryMarkStackReservedPageCount =
- ((SmallAllocationBlockAttributes::PageCount * MarkContext::MarkCandidateSize) / SmallAllocationBlockAttributes::MinObjectSize) + 1;
- MarkContext markContext;
- // Contexts for parallel marking.
- // We support up to 4 way parallelism, main context + 3 additional parallel contexts.
- MarkContext parallelMarkContext1;
- MarkContext parallelMarkContext2;
- MarkContext parallelMarkContext3;
- // Page pools for above markContexts
- PagePool markPagePool;
- PagePool parallelMarkPagePool1;
- PagePool parallelMarkPagePool2;
- PagePool parallelMarkPagePool3;
- bool IsMarkStackEmpty();
- bool HasPendingMarkObjects() const { return markContext.HasPendingMarkObjects() || parallelMarkContext1.HasPendingMarkObjects() || parallelMarkContext2.HasPendingMarkObjects() || parallelMarkContext3.HasPendingMarkObjects(); }
- bool HasPendingTrackObjects() const { return markContext.HasPendingTrackObjects() || parallelMarkContext1.HasPendingTrackObjects() || parallelMarkContext2.HasPendingTrackObjects() || parallelMarkContext3.HasPendingTrackObjects(); }
- RecyclerCollectionWrapper * collectionWrapper;
- HANDLE mainThreadHandle;
- void * stackBase;
- class SavedRegisterState
- {
- public:
- #if _M_IX86
- static const int NumRegistersToSave = 8;
- #elif _M_ARM
- static const int NumRegistersToSave = 13;
- #elif _M_ARM64
- static const int NumRegistersToSave = 27;
- #elif _M_AMD64
- static const int NumRegistersToSave = 16;
- #endif
- SavedRegisterState()
- {
- memset(registers, 0, sizeof(void*) * NumRegistersToSave);
- }
- void** GetRegisters()
- {
- return registers;
- }
- void* GetStackTop()
- {
- // By convention, our register-saving routine will always
- // save the stack pointer as the first item in the array
- return registers[0];
- }
- private:
- void* registers[NumRegistersToSave];
- };
- SavedRegisterState savedThreadContext;
- #if __has_feature(address_sanitizer)
- void* savedAsanFakeStack;
- #define SAVE_THREAD_ASAN_FAKE_STACK() \
- this->savedAsanFakeStack = __asan_get_current_fake_stack()
- #else
- #define SAVE_THREAD_ASAN_FAKE_STACK()
- #endif
- bool inDispose;
- #if DBG || defined RECYCLER_TRACE
- uint collectionCount;
- bool inResolveExternalWeakReferences;
- #endif
- bool allowDispose;
- bool inDisposeWrapper;
- bool needOOMRescan;
- bool hasDisposableObject;
- bool hasNativeGCHost;
- DWORD tickCountNextDispose;
- bool inExhaustiveCollection;
- bool hasExhaustiveCandidate;
- bool inCacheCleanupCollection;
- bool inDecommitNowCollection;
- bool isScriptActive;
- bool isInScript;
- bool isShuttingDown;
- bool scanPinnedObjectMap;
- bool hasScannedInitialImplicitRoots;
- bool hasPendingUnpinnedObject;
- bool hasPendingDeleteGuestArena;
- bool inEndMarkOnLowMemory;
- bool decommitOnFinish;
- bool enableScanInteriorPointers;
- bool enableScanImplicitRoots;
- bool disableCollectOnAllocationHeuristics;
- #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
- bool disableCollection;
- #endif
- #if ENABLE_PARTIAL_GC
- bool enablePartialCollect;
- bool inPartialCollectMode;
- #if ENABLE_CONCURRENT_GC
- bool hasBackgroundFinishPartial;
- bool partialConcurrentNextCollection;
- #endif
- #endif
- #ifdef RECYCLER_STRESS
- bool forcePartialScanStack;
- bool recyclerStress;
- #if ENABLE_CONCURRENT_GC
- bool recyclerBackgroundStress;
- bool recyclerConcurrentStress;
- bool recyclerConcurrentRepeatStress;
- #endif
- #if ENABLE_PARTIAL_GC
- bool recyclerPartialStress;
- #endif
- #endif
- #if DBG
- bool isExternalStackSkippingGC;
- #endif
- bool skipStack;
- #if ENABLE_CONCURRENT_GC
- #if DBG
- bool isConcurrentGCOnIdle;
- bool isFinishGCOnIdle;
- #endif
- bool queueTrackedObject;
- bool hasPendingConcurrentFindRoot;
- bool priorityBoost;
- bool disableConcurrent;
- bool enableConcurrentMark;
- bool enableParallelMark;
- bool enableConcurrentSweep;
- uint maxParallelism; // Max # of total threads to run in parallel
- byte backgroundRescanCount; // for ETW events and stats
- byte backgroundFinishMarkCount;
- size_t backgroundRescanRootBytes;
- HANDLE concurrentWorkReadyEvent; // main thread uses this event to tell concurrent threads that the work is ready
- HANDLE concurrentWorkDoneEvent; // concurrent threads use this event to tell main thread that the work allocated is done
- HANDLE concurrentThread;
- template <uint parallelId>
- void ParallelWorkFunc();
- RecyclerParallelThread parallelThread1;
- RecyclerParallelThread parallelThread2;
- #if DBG
- // Variable indicating if the concurrent thread has exited or not
- // If the concurrent thread hasn't started yet, this is set to true
- // Once the concurrent thread starts, it sets this to false,
- // and when the concurrent thread exits, it sets this to true.
- bool concurrentThreadExited;
- bool disableConcurrentThreadExitedCheck;
- bool isProcessingTrackedObjects;
- #endif
- uint tickCountStartConcurrent;
- bool isAborting;
- #endif
- #if DBG
- bool hasIncompleteDoCollect;
- // This is set to true when we begin a Rescan, and set to false when either:
- // (1) We finish the final in-thread Rescan and are about to Mark
- // (2) We do a conditional ResetWriteWatch and are about to Mark
- // When this flag is true, we should not be modifying existing mark-related state,
- // including markBits and rescanState.
- bool isProcessingRescan;
- #endif
- Js::ConfigFlagsTable& recyclerFlagsTable;
- RecyclerSweepManager recyclerSweepManagerInstance;
- RecyclerSweepManager * recyclerSweepManager;
- static const uint tickDiffToNextCollect = 300;
- #ifdef IDLE_DECOMMIT_ENABLED
- HANDLE concurrentIdleDecommitEvent;
- LONG needIdleDecommitSignal;
- #endif
- #if ENABLE_PARTIAL_GC
- SListBase<void *> clientTrackedObjectList;
- ArenaAllocator clientTrackedObjectAllocator;
- size_t partialUncollectedAllocBytes;
- // Dynamic Heuristics for partial GC
- size_t uncollectedNewPageCountPartialCollect;
- #endif
- uint tickCountNextCollection;
- uint tickCountNextFinishCollection;
- void (*outOfMemoryFunc)();
- #ifdef RECYCLER_TEST_SUPPORT
- BOOL (*checkFn)(char* addr, size_t size);
- #endif
- ExternalRootMarker externalRootMarker;
- void * externalRootMarkerContext;
- #ifdef PROFILE_EXEC
- Js::Profiler * profiler;
- Js::Profiler * backgroundProfiler;
- PageAllocator backgroundProfilerPageAllocator;
- DListBase<ArenaAllocator> backgroundProfilerArena;
- #endif
- // destruct autoHeap after backgroundProfilerPageAllocator;
- HeapInfoManager autoHeap;
- template <ObjectInfoBits attributes>
- HeapInfo * GetHeapInfoForAllocation()
- {
- return this->GetHeapInfo<attributes>();
- }
- template <ObjectInfoBits attributes>
- HeapInfo * GetHeapInfo()
- {
- return this->autoHeap.GetDefaultHeap();
- }
- HeapInfo * GetHeapInfo()
- {
- return this->autoHeap.GetDefaultHeap();
- }
- #ifdef PROFILE_MEM
- RecyclerMemoryData * memoryData;
- #endif
- ThreadContextId mainThreadId;
- #if DBG
- uint heapBlockCount;
- bool disableThreadAccessCheck;
- #endif
- #if DBG || defined(RECYCLER_STATS)
- bool isForceSweeping;
- #endif
- #ifdef NTBUILD
- RecyclerWatsonTelemetryBlock localTelemetryBlock;
- RecyclerWatsonTelemetryBlock * telemetryBlock;
- #endif
- #ifdef ENABLE_BASIC_TELEMETRY
- private:
- RecyclerTelemetryInfo telemetryStats;
- GUID recyclerID;
- public:
- GUID& GetRecyclerID() { return this->recyclerID; }
- #endif
-
- public:
- bool GetIsInScript() { return this->isInScript; }
- bool GetIsScriptActive() { return this->isScriptActive; }
- private:
- #ifdef RECYCLER_STATS
- RecyclerCollectionStats collectionStats;
- void PrintHeapBlockStats(char16 const * name, HeapBlock::HeapBlockType type);
- void PrintHeapBlockMemoryStats(char16 const * name, HeapBlock::HeapBlockType type);
- void PrintCollectStats();
- void PrintHeuristicCollectionStats();
- void PrintMarkCollectionStats();
- void PrintBackgroundCollectionStats();
- void PrintMemoryStats();
- void PrintBackgroundCollectionStat(RecyclerCollectionStats::MarkData const& markData);
- #endif
- #ifdef RECYCLER_TRACE
- CollectionParam collectionParam;
- void PrintBlockStatus(HeapBucket * heapBucket, HeapBlock * heapBlock, char16 const * name);
- #endif
- #ifdef RECYCLER_MEMORY_VERIFY
- uint verifyPad;
- bool verifyEnabled;
- #endif
- #ifdef RECYCLER_DUMP_OBJECT_GRAPH
- friend class RecyclerObjectGraphDumper;
- RecyclerObjectGraphDumper * objectGraphDumper;
- public:
- bool dumpObjectOnceOnCollect;
- #endif
- public:
- Recycler(AllocationPolicyManager * policyManager, IdleDecommitPageAllocator * pageAllocator, void(*outOfMemoryFunc)(), Js::ConfigFlagsTable& flags, RecyclerTelemetryHostInterface* hostInterface);
- ~Recycler();
- void Initialize(const bool forceInThread, JsUtil::ThreadService *threadService, const bool deferThreadStartup = false
- #ifdef RECYCLER_PAGE_HEAP
- , PageHeapMode pageheapmode = PageHeapMode::PageHeapModeOff
- , bool captureAllocCallStack = false
- , bool captureFreeCallStack = false
- #endif
- );
- Js::ConfigFlagsTable& GetRecyclerFlagsTable() const { return this->recyclerFlagsTable; }
- void SetMemProtectMode();
- bool IsMemProtectMode();
- size_t GetUsedBytes();
- void LogMemProtectHeapSize(bool fromGC);
- char* Realloc(void* buffer, DECLSPEC_GUARD_OVERFLOW size_t existingBytes, DECLSPEC_GUARD_OVERFLOW size_t requestedBytes, bool truncate = true);
- #ifdef NTBUILD
- void SetTelemetryBlock(RecyclerWatsonTelemetryBlock * telemetryBlock) { this->telemetryBlock = telemetryBlock; }
- #endif
- uint GetPinnedObjectCount() const { return this->pinnedObjectMap.Count(); }
- void Prime();
- void* GetOwnerContext() { return (void*) this->collectionWrapper; }
- bool NeedOOMRescan() const;
- void SetNeedOOMRescan();
- void ClearNeedOOMRescan();
- BOOL RequestConcurrentWrapperCallback();
- BOOL CollectionInProgress() const;
- BOOL IsExiting() const;
- BOOL IsSweeping() const;
- #ifdef RECYCLER_PAGE_HEAP
- inline bool ShouldCapturePageHeapFreeStack() const { return capturePageHeapFreeStack; }
- #else
- inline bool ShouldCapturePageHeapFreeStack() const { return false; }
- #endif
- void SetIsThreadBound();
- void SetIsScriptActive(bool isScriptActive);
- void SetIsInScript(bool isInScript);
- bool HasNativeGCHost() const;
- void SetHasNativeGCHost();
- bool ShouldIdleCollectOnExit();
- void ScheduleNextCollection();
- BOOL IsShuttingDown() const { return this->isShuttingDown; }
- #if ENABLE_CONCURRENT_GC
- #if DBG
- BOOL IsConcurrentMarkEnabled() const { return enableConcurrentMark; }
- BOOL IsConcurrentSweepEnabled() const { return enableConcurrentSweep; }
- #endif
- template <CollectionFlags flags>
- BOOL FinishConcurrent();
- void ShutdownThread();
- bool EnableConcurrent(JsUtil::ThreadService *threadService, bool startAllThreads);
- void DisableConcurrent();
- void StartQueueTrackedObject();
- bool DoQueueTrackedObject() const;
- void PrepareSweep();
- #endif
- template <CollectionFlags flags>
- void SetupPostCollectionFlags();
- void EnsureNotCollecting();
- #if ENABLE_CONCURRENT_GC
- bool QueueTrackedObject(FinalizableObject * trackableObject);
- #endif
- // FindRoots
- void TryMarkNonInterior(void* candidate, void* parentReference = nullptr);
- void TryMarkInterior(void *candidate, void* parentReference = nullptr);
- bool InCacheCleanupCollection() { return inCacheCleanupCollection; }
- void ClearCacheCleanupCollection() { Assert(inCacheCleanupCollection); inCacheCleanupCollection = false; }
- // Finalizer support
- void SetExternalRootMarker(ExternalRootMarker fn, void * context);
- ArenaAllocator * CreateGuestArena(char16 const * name, void (*outOfMemoryFunc)());
- void DeleteGuestArena(ArenaAllocator * arenaAllocator);
- ArenaData ** RegisterExternalGuestArena(ArenaData* guestArena)
- {
- return externalGuestArenaList.PrependNode(&NoThrowHeapAllocator::Instance, guestArena);
- }
- void UnregisterExternalGuestArena(ArenaData* guestArena)
- {
- externalGuestArenaList.Remove(&NoThrowHeapAllocator::Instance, guestArena);
- // Any time a root is removed during a GC, it indicates that an exhaustive
- // collection is likely going to have work to do so trigger an exhaustive
- // candidate GC to indicate this fact
- this->CollectNow<CollectExhaustiveCandidate>();
- }
- void UnregisterExternalGuestArena(ArenaData** guestArena)
- {
- externalGuestArenaList.RemoveElement(&NoThrowHeapAllocator::Instance, guestArena);
- // Any time a root is removed during a GC, it indicates that an exhaustive
- // collection is likely going to have work to do so trigger an exhaustive
- // candidate GC to indicate this fact
- this->CollectNow<CollectExhaustiveCandidate>();
- }
- #ifdef RECYCLER_TEST_SUPPORT
- void SetCheckFn(BOOL(*checkFn)(char* addr, size_t size));
- #endif
- void SetCollectionWrapper(RecyclerCollectionWrapper * wrapper);
- static size_t GetAlignedSize(size_t size) { return HeapInfo::GetAlignedSize(size); }
- HeapInfo* GetDefaultHeapInfo() { return autoHeap.GetDefaultHeap(); }
- template <CollectionFlags flags>
- BOOL CollectNow();
- #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
- void DisplayMemStats();
- #endif
- void AddExternalMemoryUsage(size_t size);
- bool NeedDispose() { return this->hasDisposableObject; }
- template <CollectionFlags flags>
- bool FinishDisposeObjectsNow();
- bool RequestExternalMemoryAllocation(size_t size);
- void ReportExternalMemoryFailure(size_t size);
- void ReportExternalMemoryFree(size_t size);
- // ExternalAllocFunc returns true when allocation succeeds
- template <typename ExternalAllocFunc>
- bool DoExternalAllocation(size_t size, ExternalAllocFunc externalAllocFunc);
- #ifdef TRACE_OBJECT_LIFETIME
- #define DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes) \
- inline char* AllocFunc##Trace(size_t size) \
- { \
- return AllocWithAttributesFunc<(ObjectInfoBits)(attributes | TraceBit), /* nothrow = */ false>(size); \
- }
- #else
- #define DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributeFunc, attributes)
- #endif
- #define DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocWithAttributesFunc, attributes) \
- inline char * AllocFunc(DECLSPEC_GUARD_OVERFLOW size_t size) \
- { \
- return AllocWithAttributesFunc<attributes, /* nothrow = */ false>(size); \
- } \
- __forceinline char * AllocFunc##Inlined(DECLSPEC_GUARD_OVERFLOW size_t size) \
- { \
- return AllocWithAttributesFunc##Inlined<attributes, /* nothrow = */ false>(size); \
- } \
- DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes);
- #define DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocWithAttributesFunc, attributes) \
- inline char * NoThrow##AllocFunc(DECLSPEC_GUARD_OVERFLOW size_t size) \
- { \
- return AllocWithAttributesFunc<attributes, /* nothrow = */ true>(size); \
- } \
- inline char * NoThrow##AllocFunc##Inlined(DECLSPEC_GUARD_OVERFLOW size_t size) \
- { \
- return AllocWithAttributesFunc##Inlined<attributes, /* nothrow = */ true>(size); \
- } \
- DEFINE_RECYCLER_ALLOC_TRACE(AllocFunc, AllocWithAttributesFunc, attributes);
- #define DEFINE_RECYCLER_ALLOC(AllocFunc, attributes) DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocWithAttributes, attributes)
- #define DEFINE_RECYCLER_ALLOC_ZERO(AllocFunc, attributes) DEFINE_RECYCLER_ALLOC_BASE(AllocFunc, AllocZeroWithAttributes, attributes)
- #define DEFINE_RECYCLER_NOTHROW_ALLOC(AllocFunc, attributes) DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocWithAttributes, attributes)
- #define DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocFunc, attributes) DEFINE_RECYCLER_NOTHROW_ALLOC_BASE(AllocFunc, AllocZeroWithAttributes, attributes)
- #if GLOBAL_ENABLE_WRITE_BARRIER
- DEFINE_RECYCLER_ALLOC(Alloc, WithBarrierBit);
- DEFINE_RECYCLER_ALLOC_ZERO(AllocZero, WithBarrierBit);
- DEFINE_RECYCLER_ALLOC(AllocFinalized, FinalizableWithBarrierObjectBits);
- DEFINE_RECYCLER_ALLOC(AllocTracked, ClientTrackableObjectWithBarrierBits);
- DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTracked, ClientTrackableObjectWithBarrierBits);
- #else
- DEFINE_RECYCLER_ALLOC(Alloc, NoBit);
- DEFINE_RECYCLER_ALLOC_ZERO(AllocZero, NoBit);
- DEFINE_RECYCLER_ALLOC(AllocFinalized, FinalizableObjectBits);
- DEFINE_RECYCLER_ALLOC(AllocTracked, ClientTrackableObjectBits);
- DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTracked, ClientFinalizableObjectBits);
- #endif
- #ifdef RECYCLER_WRITE_BARRIER_ALLOC
- DEFINE_RECYCLER_ALLOC(AllocWithBarrier, WithBarrierBit);
- DEFINE_RECYCLER_ALLOC_ZERO(AllocZeroWithBarrier, WithBarrierBit);
- DEFINE_RECYCLER_ALLOC(AllocFinalizedWithBarrier, FinalizableWithBarrierObjectBits);
- DEFINE_RECYCLER_ALLOC(AllocTrackedWithBarrier, ClientTrackableObjectWithBarrierBits);
- DEFINE_RECYCLER_ALLOC(AllocFinalizedClientTrackedWithBarrier, ClientFinalizableObjectWithBarrierBits);
- #endif
- DEFINE_RECYCLER_ALLOC(AllocLeaf, LeafBit);
- DEFINE_RECYCLER_ALLOC(AllocFinalizedLeaf, FinalizableLeafBits);
- DEFINE_RECYCLER_ALLOC(AllocTrackedLeaf, ClientTrackableLeafBits);
- DEFINE_RECYCLER_ALLOC_ZERO(AllocLeafZero, LeafBit);
- DEFINE_RECYCLER_ALLOC_ZERO(AllocZeroTrackedLeaf, ClientTrackableLeafBits);
- DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocImplicitRootLeaf, ImplicitRootLeafBits);
- DEFINE_RECYCLER_NOTHROW_ALLOC_ZERO(AllocImplicitRoot, ImplicitRootBit);
- template <ObjectInfoBits enumClass>
- char * AllocEnumClass(DECLSPEC_GUARD_OVERFLOW size_t size)
- {
- Assert((enumClass & EnumClassMask) != 0);
- //Assert((enumClass & ~EnumClassMask & ~WithBarrierBit) == 0);
- return AllocWithAttributes<(ObjectInfoBits)(enumClass), /* nothrow = */ false>(size);
- }
- template <ObjectInfoBits infoBits>
- char * AllocWithInfoBits(DECLSPEC_GUARD_OVERFLOW size_t size)
- {
- return AllocWithAttributes<infoBits, /* nothrow = */ false>(size);
- }
- template <ObjectInfoBits infoBits>
- char * AllocVisitedHost(DECLSPEC_GUARD_OVERFLOW size_t size)
- {
- return AllocWithAttributes<infoBits, /* nothrow = */ true>(size);
- }
- template<typename T>
- RecyclerWeakReference<T>* CreateWeakReferenceHandle(T* pStrongReference);
- #if ENABLE_WEAK_REFERENCE_REGIONS
- template<typename T>
- RecyclerWeakReferenceRegionItem<T>* CreateWeakReferenceRegion(size_t count);
- #endif
- uint GetWeakReferenceCleanupId() const { return weakReferenceCleanupId; }
- template<typename T>
- bool FindOrCreateWeakReferenceHandle(T* pStrongReference, RecyclerWeakReference<T> **ppWeakRef);
- template<typename T>
- bool TryGetWeakReferenceHandle(T* pStrongReference, RecyclerWeakReference<T> **weakReference);
- template <ObjectInfoBits attributes>
- char* GetAddressOfAllocator(size_t sizeCat)
- {
- Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
- return (char*)this->GetHeapInfo<attributes>()->template GetBucket<(ObjectInfoBits)(attributes & GetBlockTypeBitMask)>(sizeCat).GetAllocator();
- }
- template <ObjectInfoBits attributes>
- uint32 GetEndAddressOffset(size_t sizeCat)
- {
- Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
- return this->GetHeapInfo<attributes>()->template GetBucket<(ObjectInfoBits)(attributes & GetBlockTypeBitMask)>(sizeCat).GetAllocator()->GetEndAddressOffset();
- }
- template <ObjectInfoBits attributes>
- uint32 GetFreeObjectListOffset(size_t sizeCat)
- {
- Assert(HeapInfo::IsAlignedSmallObjectSize(sizeCat));
- return this->GetHeapInfo<attributes>()->template GetBucket<(ObjectInfoBits)(attributes & GetBlockTypeBitMask)>(sizeCat).GetAllocator()->GetFreeObjectListOffset();
- }
- void GetNormalHeapBlockAllocatorInfoForNativeAllocation(size_t sizeCat, void*& allocatorAddress, uint32& endAddressOffset, uint32& freeListOffset, bool allowBumpAllocation, bool isOOPJIT);
- static void GetNormalHeapBlockAllocatorInfoForNativeAllocation(void* recyclerAddr, size_t sizeCat, void*& allocatorAddress, uint32& endAddressOffset, uint32& freeListOffset, bool allowBumpAllocation, bool isOOPJIT);
- bool AllowNativeCodeBumpAllocation();
- static void TrackNativeAllocatedMemoryBlock(Recycler * recycler, void * memBlock, size_t sizeCat);
- void Free(void* buffer, size_t size)
- {
- Assert(false);
- }
- bool ExplicitFreeLeaf(void* buffer, size_t size);
- bool ExplicitFreeNonLeaf(void* buffer, size_t size);
- template <ObjectInfoBits attributes>
- bool ExplicitFreeInternalWrapper(void* buffer, size_t allocSize);
- template <ObjectInfoBits attributes, typename TBlockAttributes>
- bool ExplicitFreeInternal(void* buffer, size_t size, size_t sizeCat);
- size_t GetAllocSize(size_t size);
- template <typename TBlockAttributes>
- void SetExplicitFreeBitOnSmallBlock(HeapBlock* heapBlock, size_t sizeCat, void* buffer, ObjectInfoBits attributes);
- char* HeapAllocR(HeapInfo* eHeap, DECLSPEC_GUARD_OVERFLOW size_t size)
- {
- return RealAlloc<LeafBit, /* nothrow = */ false>(eHeap, size);
- }
- void HeapFree(HeapInfo* eHeap,void* candidate);
- void EnumerateObjects(ObjectInfoBits infoBits, void (*CallBackFunction)(void * address, size_t size));
- void RootAddRef(void* obj, uint *count = nullptr);
- void RootRelease(void* obj, uint *count = nullptr);
- template <ObjectInfoBits attributes, bool nothrow>
- inline char* RealAlloc(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size);
- template <ObjectInfoBits attributes, bool isSmallAlloc, bool nothrow>
- inline char* RealAllocFromBucket(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size);
- void EnterIdleDecommit();
- void LeaveIdleDecommit();
- void DisposeObjects();
- BOOL IsValidObject(void* candidate, size_t minimumSize = 0);
- #if DBG
- void SetDisableThreadAccessCheck();
- void SetDisableConcurrentThreadExitedCheck();
- void CheckAllocExternalMark() const;
- BOOL IsFreeObject(void * candidate);
- BOOL IsReentrantState() const;
- #endif
- #if DBG_DUMP
- void PrintMarkStack();
- #endif
- #ifdef PROFILE_EXEC
- Js::Profiler * GetProfiler() const { return this->profiler; }
- ArenaAllocator * AddBackgroundProfilerArena();
- void ReleaseBackgroundProfilerArena(ArenaAllocator * arena);
- void SetProfiler(Js::Profiler * profiler, Js::Profiler * backgroundProfiler);
- #endif
- #ifdef RECYCLER_MEMORY_VERIFY
- BOOL VerifyEnabled() const { return verifyEnabled; }
- uint GetVerifyPad() const { return verifyPad; }
- void Verify(Js::Phase phase);
- static void VerifyCheck(BOOL cond, char16 const * msg, void * address, void * corruptedAddress);
- static void VerifyCheckFill(void * address, size_t size);
- void FillCheckPad(void * address, size_t size, size_t alignedAllocSize, bool objectAlreadyInitialized);
- void FillCheckPad(void * address, size_t size, size_t alignedAllocSize)
- {
- FillCheckPad(address, size, alignedAllocSize, false);
- }
- static void FillPadNoCheck(void * address, size_t size, size_t alignedAllocSize, bool objectAlreadyInitialized);
- void VerifyCheckPad(void * address, size_t size);
- void VerifyCheckPadExplicitFreeList(void * address, size_t size);
- static const byte VerifyMemFill = 0xCA;
- #endif
- #ifdef RECYCLER_ZERO_MEM_CHECK
- void VerifyZeroFill(void * address, size_t size);
- void VerifyLargeAllocZeroFill(void * address, size_t size, ObjectInfoBits attributes);
- #endif
- #ifdef RECYCLER_DUMP_OBJECT_GRAPH
- bool DumpObjectGraph(RecyclerObjectGraphDumper::Param * param = nullptr);
- void DumpObjectDescription(void *object);
- #endif
- #ifdef LEAK_REPORT
- void ReportLeaks();
- void ReportLeaksOnProcessDetach();
- #endif
- #ifdef CHECK_MEMORY_LEAK
- void CheckLeaks(char16 const * header);
- void CheckLeaksOnProcessDetach(char16 const * header);
- #endif
- #ifdef RECYCLER_TRACE
- void SetDomCollect(bool isDomCollect) { collectionParam.domCollect = isDomCollect; }
- void CaptureCollectionParam(CollectionFlags flags, bool repeat = false);
- #endif
- private:
- // RecyclerRootPtr has implicit conversion to pointers, prevent it to be
- // passed to RootAddRef/RootRelease directly
- template <typename T>
- void RootAddRef(RecyclerRootPtr<T>& ptr, uint *count = nullptr);
- template <typename T>
- void RootRelease(RecyclerRootPtr<T>& ptr, uint *count = nullptr);
- template <CollectionFlags flags>
- BOOL CollectInternal();
- template <CollectionFlags flags>
- BOOL Collect();
- template <CollectionFlags flags>
- BOOL CollectWithHeuristic();
- template <CollectionFlags flags>
- BOOL CollectWithExhaustiveCandidate();
- template <CollectionFlags flags>
- BOOL GetPartialFlag();
- bool NeedExhaustiveRepeatCollect() const;
- #if DBG
- bool ExpectStackSkip() const;
- #endif
- static size_t const InvalidScanRootBytes = (size_t)-1;
- // Small Allocator
- template <typename SmallHeapBlockAllocatorType>
- void AddSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat);
- template <typename SmallHeapBlockAllocatorType>
- void RemoveSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat);
- template <ObjectInfoBits attributes, typename SmallHeapBlockAllocatorType>
- char * SmallAllocatorAlloc(SmallHeapBlockAllocatorType * allocator, size_t sizeCat, size_t size);
- // Allocation
- template <ObjectInfoBits attributes, bool nothrow>
- inline char * AllocWithAttributesInlined(DECLSPEC_GUARD_OVERFLOW size_t size);
- template <ObjectInfoBits attributes, bool nothrow>
- char * AllocWithAttributes(DECLSPEC_GUARD_OVERFLOW size_t size)
- {
- return AllocWithAttributesInlined<attributes, nothrow>(size);
- }
- template <ObjectInfoBits attributes, bool nothrow>
- inline char* AllocZeroWithAttributesInlined(DECLSPEC_GUARD_OVERFLOW size_t size);
- template <ObjectInfoBits attributes, bool nothrow>
- char* AllocZeroWithAttributes(DECLSPEC_GUARD_OVERFLOW size_t size)
- {
- return AllocZeroWithAttributesInlined<attributes, nothrow>(size);
- }
- char* AllocWeakReferenceEntry(DECLSPEC_GUARD_OVERFLOW size_t size)
- {
- return AllocWithAttributes<WeakReferenceEntryBits, /* nothrow = */ false>(size);
- }
- bool NeedDisposeTimed()
- {
- DWORD ticks = ::GetTickCount();
- return (ticks > tickCountNextDispose && this->hasDisposableObject);
- }
- char* TryLargeAlloc(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size, ObjectInfoBits attributes, bool nothrow);
- template <bool nothrow>
- char* LargeAlloc(HeapInfo* heap, DECLSPEC_GUARD_OVERFLOW size_t size, ObjectInfoBits attributes);
- void OutOfMemory();
- // Collection
- BOOL DoCollect(CollectionFlags flags);
- BOOL DoCollectWrapped(CollectionFlags flags);
- BOOL CollectOnAllocatorThread();
- #if DBG
- void ResetThreadId();
- #endif
- template <bool background>
- size_t ScanPinnedObjects();
- size_t ScanStack();
- size_t ScanArena(ArenaData * alloc, bool background);
- void ScanImplicitRoots();
- void ScanInitialImplicitRoots();
- void ScanNewImplicitRoots();
- size_t FindRoots();
- size_t TryMarkArenaMemoryBlockList(ArenaMemoryBlock * memoryBlocks);
- size_t TryMarkBigBlockList(BigBlock * memoryBlocks);
- #if ENABLE_CONCURRENT_GC
- #if FALSE // REVIEW: remove this code since not using
- size_t TryMarkBigBlockListWithWriteWatch(BigBlock * memoryBlocks);
- #endif
- #endif
- // Mark
- void ResetMarks(ResetMarkFlags flags);
- void Mark();
- bool EndMark();
- bool EndMarkCheckOOMRescan();
- void EndMarkOnLowMemory();
- #if ENABLE_CONCURRENT_GC
- void DoParallelMark();
- void DoBackgroundParallelMark();
- #endif
- size_t RootMark(CollectionState markState);
- void ProcessMark(bool background);
- void ProcessParallelMark(bool background, MarkContext * markContext);
- template <bool parallel, bool interior>
- void ProcessMarkContext(MarkContext * markContext);
- public:
- bool IsObjectMarked(void* candidate) { return this->heapBlockMap.IsMarked(candidate); }
- #ifdef RECYCLER_STRESS
- bool StressCollectNow();
- #endif
- private:
- HeapBlock* FindHeapBlock(void * candidate);
- struct FindBlockCache
- {
- FindBlockCache():
- heapBlock(nullptr),
- candidate(nullptr)
- {
- }
- HeapBlock* heapBlock;
- void* candidate;
- } blockCache;
- inline void ScanObjectInline(void ** obj, size_t byteCount);
- inline void ScanObjectInlineInterior(void ** obj, size_t byteCount);
- template <bool doSpecialMark, bool forceInterior = false>
- inline void ScanMemoryInline(void ** obj, size_t byteCount
- ADDRESS_SANITIZER_APPEND(RecyclerScanMemoryType scanMemoryType = RecyclerScanMemoryType::General));
- template <bool doSpecialMark>
- void ScanMemory(void ** obj, size_t byteCount) { if (byteCount != 0) { ScanMemoryInline<doSpecialMark>(obj, byteCount); } }
- bool AddMark(void * candidate, size_t byteCount) throw();
- #ifdef RECYCLER_VISITED_HOST
- bool AddPreciselyTracedMark(IRecyclerVisitedObject * candidate) throw();
- #endif
- // Sweep
- #if ENABLE_PARTIAL_GC
- bool Sweep(size_t rescanRootBytes = (size_t)-1, bool concurrent = false, bool adjustPartialHeuristics = false);
- #else
- bool Sweep(bool concurrent = false);
- #endif
- void SweepWeakReference();
- void SweepHeap(bool concurrent, RecyclerSweepManager& recyclerSweepManager);
- void FinishSweep(RecyclerSweepManager& recyclerSweepManager);
- #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
- void DoTwoPassConcurrentSweepPreCheck();
- void FinishSweepPrep();
- void FinishConcurrentSweepPass1();
- void FinishConcurrentSweep();
- #endif
- bool FinishDisposeObjects();
- template <CollectionFlags flags>
- bool FinishDisposeObjectsWrapped();
- // end collection
- void FinishCollection();
- void FinishCollection(bool needConcurrentSweep);
- void EndCollection();
- void ResetCollectionState();
- void ResetMarkCollectionState();
- void ResetHeuristicCounters();
- void ResetPartialHeuristicCounters();
- BOOL IsMarkState() const;
- BOOL IsFindRootsState() const;
- BOOL IsInThreadFindRootsState() const;
- template <Js::Phase phase>
- void CollectionBegin();
- template <Js::Phase phase>
- void CollectionEnd();
- #if ENABLE_PARTIAL_GC
- void ProcessClientTrackedObjects();
- bool PartialCollect(bool concurrent);
- void FinishPartialCollect(RecyclerSweepManager * recyclerSweep = nullptr);
- void ClearPartialCollect();
- #if ENABLE_CONCURRENT_GC
- void BackgroundFinishPartialCollect(RecyclerSweepManager * recyclerSweep);
- #endif
- #endif
- size_t RescanMark(DWORD waitTime);
- size_t FinishMark(DWORD waitTime);
- size_t FinishMarkRescan(bool background);
- #if ENABLE_CONCURRENT_GC
- void ProcessTrackedObjects();
- #endif
- BOOL IsAllocatableCallbackState()
- {
- return (collectionState & (Collection_PostSweepRedeferralCallback | Collection_PostCollectionCallback));
- }
- #if ENABLE_CONCURRENT_GC
- // Concurrent GC
- BOOL IsConcurrentEnabled() const { return this->enableConcurrentMark || this->enableParallelMark || this->enableConcurrentSweep; }
- BOOL IsConcurrentMarkState() const;
- BOOL IsConcurrentMarkExecutingState() const;
- BOOL IsConcurrentResetMarksState() const;
- BOOL IsConcurrentFindRootState() const;
- BOOL IsConcurrentExecutingState() const;
- BOOL IsConcurrentSweepExecutingState() const;
- BOOL IsConcurrentSweepSetupState() const;
- BOOL IsConcurrentSweepState() const;
- BOOL IsConcurrentState() const;
- BOOL InConcurrentSweep()
- {
- return ((collectionState & Collection_ConcurrentSweep) == Collection_ConcurrentSweep);
- }
- #if ENABLE_ALLOCATIONS_DURING_CONCURRENT_SWEEP
- bool AllowAllocationsDuringConcurrentSweep()
- {
- return this->allowAllocationsDuringConcurrentSweepForCollection;
- }
- #endif
- #if DBG
- BOOL IsConcurrentFinishedState() const;
- #endif // DBG
- bool InitializeConcurrent(JsUtil::ThreadService* threadService);
- bool AbortConcurrent(bool restoreState);
- void FinalizeConcurrent(bool restoreState);
- static unsigned int CALLBACK StaticThreadProc(LPVOID lpParameter);
- static int ExceptFilter(LPEXCEPTION_POINTERS pEP);
- DWORD ThreadProc();
- void DoBackgroundWork(bool forceForeground = false);
- static void CALLBACK StaticBackgroundWorkCallback(void * callbackData);
- BOOL CollectOnConcurrentThread();
- bool StartConcurrent(CollectionState const state);
- BOOL StartBackgroundMarkCollect();
- BOOL StartSynchronousBackgroundMark();
- BOOL StartAsynchronousBackgroundMark();
- BOOL StartBackgroundMark(bool foregroundResetMark, bool foregroundFindRoots);
- BOOL StartConcurrentSweepCollect();
- template <CollectionFlags flags>
- BOOL TryFinishConcurrentCollect();
- BOOL WaitForConcurrentThread(DWORD waitTime, RecyclerWaitReason caller = RecyclerWaitReason::Other);
- void FlushBackgroundPages();
- BOOL FinishConcurrentCollect(CollectionFlags flags);
- void FinishTransferSwept(CollectionFlags flags);
- BOOL FinishConcurrentCollectWrapped(CollectionFlags flags);
- void BackgroundMark();
- void BackgroundMarkWeakRefs();
- void BackgroundResetMarks();
- void PrepareBackgroundFindRoots();
- void RevertPrepareBackgroundFindRoots();
- size_t BackgroundFindRoots();
- size_t BackgroundScanStack();
- size_t BackgroundRepeatMark();
- size_t BackgroundRescan(RescanFlags rescanFlags);
- void BackgroundResetWriteWatchAll();
- size_t BackgroundFinishMark();
- char* GetScriptThreadStackTop();
- void SweepPendingObjects(RecyclerSweepManager& recyclerSweepManager);
- void ConcurrentTransferSweptObjects(RecyclerSweepManager& recyclerSweepManager);
- #if ENABLE_PARTIAL_GC
- void ConcurrentPartialTransferSweptObjects(RecyclerSweepManager& recyclerSweepManager);
- #endif // ENABLE_PARTIAL_GC
- #endif // ENABLE_CONCURRENT_GC
- bool ForceSweepObject();
- void NotifyFree(__in char * address, size_t size);
- template <typename T>
- void NotifyFree(T * heapBlock);
- void CleanupPendingUnroot();
- #ifdef ENABLE_JS_ETW
- ULONG EventWriteFreeMemoryBlock(HeapBlock* heapBlock);
- void FlushFreeRecord();
- void AppendFreeMemoryETWRecord(__in char *address, size_t size);
- static const uint BulkFreeMemoryCount = 400;
- uint bulkFreeMemoryWrittenCount;
- struct ETWFreeRecord {
- char* memoryAddress;
- uint32 objectSize;
- };
- ETWFreeRecord etwFreeRecords[BulkFreeMemoryCount];
- #endif
- template <ObjectInfoBits attributes>
- bool IntegrateBlock(char * blockAddress, PageSegment * segment, size_t allocSize, size_t objectSize);
- template <class TBlockAttributes> friend class SmallHeapBlockT;
- template <class TBlockAttributes> friend class SmallNormalHeapBlockT;
- template <class TBlockAttributes> friend class SmallLeafHeapBlockT;
- template <class TBlockAttributes> friend class SmallFinalizableHeapBlockT;
- #ifdef RECYCLER_VISITED_HOST
- template <class TBlockAttributes> friend class SmallRecyclerVisitedHostHeapBlockT;
- #endif
- friend class LargeHeapBlock;
- friend class HeapInfo;
- friend class HeapInfoManager;
- friend class LargeHeapBucket;
- friend class ThreadContext;
- template <typename TBlockType>
- friend class HeapBucketT;
- template <typename TBlockType>
- friend class SmallNormalHeapBucketBase;
- template <typename T, ObjectInfoBits attributes>
- friend class RecyclerFastAllocator;
- #ifdef RECYCLER_TRACE
- void PrintCollectTrace(Js::Phase phase, bool finish = false, bool noConcurrentWork = false);
- #endif
- #ifdef RECYCLER_VERIFY_MARK
- void VerifyMark();
- void VerifyMarkRoots();
- void VerifyMarkStack();
- void VerifyMarkArena(ArenaData * arena);
- void VerifyMarkBigBlockList(BigBlock * memoryBlocks);
- void VerifyMarkArenaMemoryBlockList(ArenaMemoryBlock * memoryBlocks);
- bool VerifyMark(void * objectAddress, void * target);
- bool VerifyMark(void * target);
- #endif
- #if DBG_DUMP
- bool forceTraceMark;
- #endif
- bool isHeapEnumInProgress;
- #if DBG
- bool allowAllocationDuringHeapEnum;
- bool allowAllocationDuringRenentrance;
- #ifdef ENABLE_PROJECTION
- bool isInRefCountTrackingForProjection;
- #endif
- #endif
- // There are two scenarios we allow limited allocation but disallow GC during those allocations:
- // in heapenum when we allocate PropertyRecord, and
- // in projection ExternalMark allowing allocating VarToDispEx. This is the common flag
- // while we have debug only flag for each of the two scenarios.
- bool isCollectionDisabled;
- #ifdef ENABLE_BASIC_TELEMETRY
- RecyclerTelemetryInfo& GetRecyclerTelemetryInfo() { return this->telemetryStats; }
- #endif
- #ifdef TRACK_ALLOC
- public:
- Recycler * TrackAllocInfo(TrackAllocData const& data);
- void ClearTrackAllocInfo(TrackAllocData* data = NULL);
- #ifdef PROFILE_RECYCLER_ALLOC
- void PrintAllocStats();
- private:
- static bool DoProfileAllocTracker();
- void InitializeProfileAllocTracker();
- void TrackUnallocated(__in char* address, __in char *endAddress, size_t sizeCat);
- void TrackAllocCore(void * object, size_t size, const TrackAllocData& trackAllocData, bool traceLifetime = false);
- void* TrackAlloc(void * object, size_t size, const TrackAllocData& trackAllocData, bool traceLifetime = false);
- void TrackIntegrate(__in_ecount(blockSize) char * blockAddress, size_t blockSize, size_t allocSize, size_t objectSize, const TrackAllocData& trackAllocData);
- BOOL TrackFree(const char* address, size_t size);
- void TrackAllocWeakRef(RecyclerWeakReferenceBase * weakRef);
- void TrackFreeWeakRef(RecyclerWeakReferenceBase * weakRef);
- struct TrackerData
- {
- TrackerData(type_info const * typeinfo, bool isArray) : typeinfo(typeinfo), isArray(isArray),
- ItemSize(0), ItemCount(0), AllocCount(0), ReqSize(0), AllocSize(0), FreeCount(0), FreeSize(0), TraceLifetime(false)
- #ifdef PERF_COUNTERS
- , counter(PerfCounter::RecyclerTrackerCounterSet::GetPerfCounter(typeinfo, isArray))
- , sizeCounter(PerfCounter::RecyclerTrackerCounterSet::GetPerfSizeCounter(typeinfo, isArray))
- #endif
- {
- }
- type_info const * typeinfo;
- bool isArray;
- #ifdef TRACE_OBJECT_LIFETIME
- bool TraceLifetime;
- #endif
- size_t ItemSize;
- size_t ItemCount;
- int AllocCount;
- int64 ReqSize;
- int64 AllocSize;
- int FreeCount;
- int64 FreeSize;
- #ifdef PERF_COUNTERS
- PerfCounter::Counter& counter;
- PerfCounter::Counter& sizeCounter;
- #endif
- static TrackerData EmptyData;
- static TrackerData ExplicitFreeListObjectData;
- };
- TrackerData * GetTrackerData(void * address);
- void SetTrackerData(void * address, TrackerData * data);
- struct TrackerItem
- {
- TrackerItem(type_info const * typeinfo) : instanceData(typeinfo, false), arrayData(typeinfo, true)
- #ifdef PERF_COUNTERS
- , weakRefCounter(PerfCounter::RecyclerTrackerCounterSet::GetWeakRefPerfCounter(typeinfo))
- #endif
- {}
- TrackerData instanceData;
- TrackerData arrayData;
- #ifdef PERF_COUNTERS
- PerfCounter::Counter& weakRefCounter;
- #endif
- };
- typedef JsUtil::BaseDictionary<type_info const *, TrackerItem *, NoCheckHeapAllocator, PrimeSizePolicy, DefaultComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> TypeInfotoTrackerItemMap;
- typedef JsUtil::BaseDictionary<void *, TrackerData *, NoCheckHeapAllocator, PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> PointerToTrackerDataMap;
- TypeInfotoTrackerItemMap * trackerDictionary;
- CriticalSection * trackerCriticalSection;
- #endif
- TrackAllocData nextAllocData;
- #endif
- public:
- // Enumeration
- class AutoSetupRecyclerForNonCollectingMark
- {
- private:
- Recycler& m_recycler;
- bool m_setupDone;
- CollectionState m_previousCollectionState;
- #ifdef RECYCLER_STATS
- RecyclerCollectionStats m_previousCollectionStats;
- #endif
- public:
- AutoSetupRecyclerForNonCollectingMark(Recycler& recycler, bool setupForHeapEnumeration = false);
- ~AutoSetupRecyclerForNonCollectingMark();
- void DoCommonSetup();
- void SetupForHeapEnumeration();
- };
- friend class RecyclerHeapObjectInfo;
- bool FindImplicitRootObject(void* candidate, RecyclerHeapObjectInfo& heapObject);
- bool FindHeapObject(void* candidate, FindHeapObjectFlags flags, RecyclerHeapObjectInfo& heapObject);
- bool FindHeapObjectWithClearedAllocators(void* candidate, RecyclerHeapObjectInfo& heapObject);
- bool IsCollectionDisabled() const { return isCollectionDisabled; }
- bool IsHeapEnumInProgress() const { Assert(isHeapEnumInProgress ? isCollectionDisabled : true); return isHeapEnumInProgress; }
- #if DBG
- // There are limited cases that we have to allow allocation during heap enumeration. GC is explicitly
- // disabled during heap enumeration for these limited cases. (See DefaultRecyclerCollectionWrapper)
- // The only case of allocation right now is allocating property record for string based type handler
- // so we can use the propertyId as the relation Id.
- // Allocation during enumeration is still frown upon and should still be avoid if possible.
- bool AllowAllocationDuringHeapEnum() const { return allowAllocationDuringHeapEnum; }
- class AutoAllowAllocationDuringHeapEnum : public AutoBooleanToggle
- {
- public:
- AutoAllowAllocationDuringHeapEnum(Recycler * recycler) : AutoBooleanToggle(&recycler->allowAllocationDuringHeapEnum) {};
- };
- #ifdef ENABLE_PROJECTION
- bool IsInRefCountTrackingForProjection() const { return isInRefCountTrackingForProjection;}
- class AutoIsInRefCountTrackingForProjection : public AutoBooleanToggle
- {
- public:
- AutoIsInRefCountTrackingForProjection(Recycler * recycler) : AutoBooleanToggle(&recycler->isInRefCountTrackingForProjection) {};
- };
- #endif
- #endif
- class AutoAllowAllocationDuringReentrance : public AutoBooleanToggle
- {
- public:
- AutoAllowAllocationDuringReentrance(Recycler * recycler) :
- AutoBooleanToggle(&recycler->isCollectionDisabled)
- #if DBG
- , allowAllocationDuringRenentrance(&recycler->allowAllocationDuringRenentrance)
- #endif
- {};
- #if DBG
- private:
- AutoBooleanToggle allowAllocationDuringRenentrance;
- #endif
- };
- #ifdef HEAP_ENUMERATION_VALIDATION
- typedef void(*PostHeapEnumScanCallback)(const HeapObject& heapObject, void *data);
- PostHeapEnumScanCallback pfPostHeapEnumScanCallback;
- void *postHeapEnunScanData;
- void PostHeapEnumScan(PostHeapEnumScanCallback callback, void*data);
- bool IsPostEnumHeapValidationInProgress() const { return pfPostHeapEnumScanCallback != NULL; }
- #endif
- public:
- void* GetRealAddressFromInterior(void* candidate);
- private:
- void BeginNonCollectingMark();
- void EndNonCollectingMark();
- #if defined(RECYCLER_DUMP_OBJECT_GRAPH) || defined(LEAK_REPORT) || defined(CHECK_MEMORY_LEAK)
- public:
- bool IsInDllCanUnloadNow() const { return inDllCanUnloadNow; }
- bool IsInDetachProcess() const { return inDetachProcess; }
- void SetInDllCanUnloadNow();
- void SetInDetachProcess();
- private:
- bool inDllCanUnloadNow;
- bool inDetachProcess;
- bool isPrimaryMarkContextInitialized;
- #endif
- #if defined(LEAK_REPORT) || defined(CHECK_MEMORY_LEAK)
- template <class Fn>
- void ReportOnProcessDetach(Fn fn);
- void PrintPinnedObjectStackTraces();
- #endif
- public:
- typedef void (CALLBACK *ObjectBeforeCollectCallback)(void* object, void* callbackState); // same as jsrt JsObjectBeforeCollectCallback
- // same as jsrt JsObjectBeforeCollectCallbackWrapper
- typedef void (CALLBACK *ObjectBeforeCollectCallbackWrapper)(ObjectBeforeCollectCallback callback, void* object, void* callbackState, void* threadContext);
- void SetObjectBeforeCollectCallback(void* object,
- ObjectBeforeCollectCallback callback,
- void* callbackState,
- ObjectBeforeCollectCallbackWrapper callbackWrapper,
- void* threadContext);
- void ClearObjectBeforeCollectCallbacks();
- bool IsInObjectBeforeCollectCallback() const { return objectBeforeCollectCallbackState != ObjectBeforeCollectCallback_None; }
- private:
- struct ObjectBeforeCollectCallbackData
- {
- ObjectBeforeCollectCallback callback;
- void* callbackState;
- void* threadContext;
- ObjectBeforeCollectCallbackWrapper callbackWrapper;
- ObjectBeforeCollectCallbackData() {}
- ObjectBeforeCollectCallbackData(ObjectBeforeCollectCallbackWrapper callbackWrapper, ObjectBeforeCollectCallback callback, void* callbackState, void* threadContext) :
- callbackWrapper(callbackWrapper), callback(callback), callbackState(callbackState), threadContext(threadContext) {}
- };
- typedef JsUtil::BaseDictionary<void*, ObjectBeforeCollectCallbackData, HeapAllocator,
- PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::NoResizeLock> ObjectBeforeCollectCallbackMap;
- ObjectBeforeCollectCallbackMap* objectBeforeCollectCallbackMap;
- enum ObjectBeforeCollectCallbackState
- {
- ObjectBeforeCollectCallback_None,
- ObjectBeforeCollectCallback_Normal, // Normal GC BeforeCollect callback
- ObjectBeforeCollectCallback_Shutdown, // At shutdown invoke all BeforeCollect callback
- } objectBeforeCollectCallbackState;
- bool ProcessObjectBeforeCollectCallbacks(bool atShutdown = false);
- #if GLOBAL_ENABLE_WRITE_BARRIER
- private:
- typedef JsUtil::BaseDictionary<void *, size_t, HeapAllocator, PrimeSizePolicy, RecyclerPointerComparer, JsUtil::SimpleDictionaryEntry, JsUtil::AsymetricResizeLock> PendingWriteBarrierBlockMap;
- PendingWriteBarrierBlockMap pendingWriteBarrierBlockMap;
- public:
- void RegisterPendingWriteBarrierBlock(void* address, size_t bytes);
- void UnRegisterPendingWriteBarrierBlock(void* address);
- #endif
- #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
- private:
- static Recycler* recyclerList;
- static CriticalSection recyclerListLock;
- Recycler* next;
- public:
- static void WBSetBitJIT(char* addr)
- {
- return WBSetBit(addr);
- }
- static void WBSetBit(char* addr);
- static void WBSetBitRange(char* addr, uint length);
- static void WBVerifyBitIsSet(char* addr, char* target);
- static bool WBCheckIsRecyclerAddress(char* addr);
- #endif
- #ifdef RECYCLER_FINALIZE_CHECK
- void VerifyFinalize();
- #endif
- };
- class RecyclerHeapObjectInfo
- {
- void* m_address;
- Recycler * m_recycler;
- HeapBlock* m_heapBlock;
- #if LARGEHEAPBLOCK_ENCODING
- union
- {
- byte * m_attributes;
- LargeObjectHeader * m_largeHeapBlockHeader;
- };
- bool isUsingLargeHeapBlock = false;
- #else
- byte * m_attributes;
- #endif
- public:
- RecyclerHeapObjectInfo() : m_address(NULL), m_recycler(NULL), m_heapBlock(NULL), m_attributes(NULL) {}
- RecyclerHeapObjectInfo(void* address, Recycler * recycler, HeapBlock* heapBlock, byte * attributes) :
- m_address(address), m_recycler(recycler), m_heapBlock(heapBlock), m_attributes(attributes) { }
- void* GetObjectAddress() const { return m_address; }
- #ifdef RECYCLER_PAGE_HEAP
- bool IsPageHeapAlloc() const
- {
- return isUsingLargeHeapBlock && ((LargeHeapBlock*)m_heapBlock)->InPageHeapMode();
- }
- void PageHeapLockPages() const
- {
- Assert(IsPageHeapAlloc());
- ((LargeHeapBlock*)m_heapBlock)->PageHeapLockPages();
- }
- #endif
- bool IsLeaf() const
- {
- #if LARGEHEAPBLOCK_ENCODING
- if (isUsingLargeHeapBlock)
- {
- return (m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie) & LeafBit) != 0;
- }
- #endif
- return ((*m_attributes & LeafBit) != 0 || this->m_heapBlock->IsLeafBlock());
- }
- bool IsImplicitRoot() const
- {
- #if LARGEHEAPBLOCK_ENCODING
- if (isUsingLargeHeapBlock)
- {
- return (m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie) & ImplicitRootBit) != 0;
- }
- #endif
- return (*m_attributes & ImplicitRootBit) != 0;
- }
- bool IsObjectMarked() const { Assert(m_recycler); return m_recycler->heapBlockMap.IsMarked(m_address); }
- void SetObjectMarked() { Assert(m_recycler); m_recycler->heapBlockMap.SetMark(m_address); }
- ObjectInfoBits GetAttributes() const
- {
- #if LARGEHEAPBLOCK_ENCODING
- if (isUsingLargeHeapBlock)
- {
- return (ObjectInfoBits)m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
- }
- #endif
- return (ObjectInfoBits)*m_attributes;
- }
- size_t GetSize() const;
- #if LARGEHEAPBLOCK_ENCODING
- void SetLargeHeapBlockHeader(LargeObjectHeader * largeHeapBlockHeader)
- {
- m_largeHeapBlockHeader = largeHeapBlockHeader;
- isUsingLargeHeapBlock = true;
- }
- #endif
- bool SetMemoryProfilerHasEnumerated()
- {
- Assert(m_heapBlock);
- #if LARGEHEAPBLOCK_ENCODING
- if (isUsingLargeHeapBlock)
- {
- return SetMemoryProfilerHasEnumeratedForLargeHeapBlock();
- }
- #endif
- bool wasMemoryProfilerOldObject = (*m_attributes & MemoryProfilerOldObjectBit) != 0;
- *m_attributes |= MemoryProfilerOldObjectBit;
- return wasMemoryProfilerOldObject;
- }
- bool ClearImplicitRootBit()
- {
- // This can only be called on the main thread for non-finalizable block
- // As finalizable block requires that the bit not be change during concurrent mark
- // since the background thread change the NewTrackBit
- Assert(!m_heapBlock->IsAnyFinalizableBlock());
- #ifdef RECYCLER_PAGE_HEAP
- Recycler* recycler = this->m_recycler;
- if (recycler->IsPageHeapEnabled() && recycler->ShouldCapturePageHeapFreeStack())
- {
- #ifdef STACK_BACK_TRACE
- if (this->isUsingLargeHeapBlock)
- {
- LargeHeapBlock* largeHeapBlock = (LargeHeapBlock*)this->m_heapBlock;
- if (largeHeapBlock->InPageHeapMode())
- {
- largeHeapBlock->CapturePageHeapFreeStack();
- }
- }
- #endif
- }
- #endif
- #if LARGEHEAPBLOCK_ENCODING
- if (isUsingLargeHeapBlock)
- {
- return ClearImplicitRootBitsForLargeHeapBlock();
- }
- #endif
- Assert(m_attributes);
- bool wasImplicitRoot = (*m_attributes & ImplicitRootBit) != 0;
- *m_attributes &= ~ImplicitRootBit;
- return wasImplicitRoot;
- }
- void ExplicitFree()
- {
- if (*m_attributes == ObjectInfoBits::LeafBit)
- {
- m_recycler->ExplicitFreeLeaf(m_address, GetSize());
- }
- else
- {
- Assert(*m_attributes == ObjectInfoBits::NoBit);
- m_recycler->ExplicitFreeNonLeaf(m_address, GetSize());
- }
- }
- #if LARGEHEAPBLOCK_ENCODING
- bool ClearImplicitRootBitsForLargeHeapBlock()
- {
- Assert(m_largeHeapBlockHeader);
- byte attributes = m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
- bool wasImplicitRoot = (attributes & ImplicitRootBit) != 0;
- m_largeHeapBlockHeader->SetAttributes(m_recycler->Cookie, attributes & ~ImplicitRootBit);
- return wasImplicitRoot;
- }
- bool SetMemoryProfilerHasEnumeratedForLargeHeapBlock()
- {
- Assert(m_largeHeapBlockHeader);
- byte attributes = m_largeHeapBlockHeader->GetAttributes(m_recycler->Cookie);
- bool wasMemoryProfilerOldObject = (attributes & MemoryProfilerOldObjectBit) != 0;
- m_largeHeapBlockHeader->SetAttributes(m_recycler->Cookie, attributes | MemoryProfilerOldObjectBit);
- return wasMemoryProfilerOldObject;
- }
- #endif
- };
- // A fake heap block to replace the original heap block where the strong ref is when it has been collected
- // as the original heap block may have been freed
- class CollectedRecyclerWeakRefHeapBlock : public HeapBlock
- {
- public:
- #if DBG && GLOBAL_ENABLE_WRITE_BARRIER
- virtual void WBVerifyBitIsSet(char* addr) override { Assert(false); }
- virtual void WBSetBit(char* addr) override { Assert(false); }
- virtual void WBSetBitRange(char* addr, uint count) override { Assert(false); }
- virtual void WBClearBit(char* addr) override { Assert(false); }
- virtual void WBClearObject(char* addr) override { Assert(false); }
- #endif
- #if DBG
- virtual HeapInfo * GetHeapInfo() const override { Assert(false); return nullptr; }
- virtual BOOL IsFreeObject(void* objectAddress) override { Assert(false); return false; }
- #endif
- virtual BOOL IsValidObject(void* objectAddress) override { Assert(false); return false; }
- virtual byte* GetRealAddressFromInterior(void* interiorAddress) override { Assert(false); return nullptr; }
- virtual size_t GetObjectSize(void* object) const override { Assert(false); return 0; }
- virtual bool FindHeapObject(void* objectAddress, Recycler * recycler, FindHeapObjectFlags flags, RecyclerHeapObjectInfo& heapObject) override { Assert(false); return false; }
- virtual bool TestObjectMarkedBit(void* objectAddress) override { Assert(false); return false; }
- virtual void SetObjectMarkedBit(void* objectAddress) override { Assert(false); }
- #ifdef RECYCLER_VERIFY_MARK
- virtual bool VerifyMark(void * objectAddress, void * target) override { Assert(false); return false; }
- #endif
- #ifdef RECYCLER_PERF_COUNTERS
- virtual void UpdatePerfCountersOnFree() override { Assert(false); }
- #endif
- #ifdef PROFILE_RECYCLER_ALLOC
- virtual void * GetTrackerData(void * address) override { Assert(false); return nullptr; }
- virtual void SetTrackerData(void * address, void * data) override { Assert(false); }
- #endif
- static CollectedRecyclerWeakRefHeapBlock Instance;
- private:
- CollectedRecyclerWeakRefHeapBlock() : HeapBlock(BlockTypeCount)
- {
- #if ENABLE_CONCURRENT_GC
- isPendingConcurrentSweep = false;
- #endif
- }
- };
- class AutoIdleDecommit
- {
- public:
- AutoIdleDecommit(Recycler * recycler) : recycler(recycler) { recycler->EnterIdleDecommit(); }
- ~AutoIdleDecommit() { recycler->LeaveIdleDecommit(); }
- private:
- Recycler * recycler;
- };
- template <typename SmallHeapBlockAllocatorType>
- void
- Recycler::AddSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat)
- {
- this->GetDefaultHeapInfo()->AddSmallAllocator(allocator, sizeCat);
- }
- template <typename SmallHeapBlockAllocatorType>
- void
- Recycler::RemoveSmallAllocator(SmallHeapBlockAllocatorType * allocator, size_t sizeCat)
- {
- this->GetDefaultHeapInfo()->RemoveSmallAllocator(allocator, sizeCat);
- }
- template <ObjectInfoBits attributes, typename SmallHeapBlockAllocatorType>
- char *
- Recycler::SmallAllocatorAlloc(SmallHeapBlockAllocatorType * allocator, DECLSPEC_GUARD_OVERFLOW size_t sizeCat, size_t size)
- {
- return this->GetDefaultHeapInfo()->SmallAllocatorAlloc<attributes>(this, allocator, sizeCat, size);
- }
- // Dummy recycler allocator policy classes to choose the allocation function
- class _RecyclerLeafPolicy;
- class _RecyclerNonLeafPolicy;
- #ifdef RECYCLER_WRITE_BARRIER
- class _RecyclerWriteBarrierPolicy;
- #endif
- template <typename Policy>
- class _RecyclerAllocatorFunc
- {};
- template <>
- class _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
- {
- public:
- typedef char * (Recycler::*AllocFuncType)(size_t);
- typedef bool (Recycler::*FreeFuncType)(void*, size_t);
- static AllocFuncType GetAllocFunc()
- {
- return &Recycler::AllocLeaf;
- }
- static AllocFuncType GetAllocZeroFunc()
- {
- return &Recycler::AllocLeafZero;
- }
- static FreeFuncType GetFreeFunc()
- {
- return &Recycler::ExplicitFreeLeaf;
- }
- };
- template <>
- class _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
- {
- public:
- typedef char * (Recycler::*AllocFuncType)(size_t);
- typedef bool (Recycler::*FreeFuncType)(void*, size_t);
- static AllocFuncType GetAllocFunc()
- {
- return &Recycler::Alloc;
- }
- static AllocFuncType GetAllocZeroFunc()
- {
- return &Recycler::AllocZero;
- }
- static FreeFuncType GetFreeFunc()
- {
- return &Recycler::ExplicitFreeNonLeaf;
- }
- };
- #ifdef RECYCLER_WRITE_BARRIER
- template <>
- class _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
- {
- public:
- typedef char * (Recycler::*AllocFuncType)(size_t);
- typedef bool (Recycler::*FreeFuncType)(void*, size_t);
- static AllocFuncType GetAllocFunc()
- {
- return &Recycler::AllocWithBarrier;
- }
- static AllocFuncType GetAllocZeroFunc()
- {
- return &Recycler::AllocZeroWithBarrier;
- }
- static FreeFuncType GetFreeFunc()
- {
- return &Recycler::ExplicitFreeNonLeaf;
- }
- };
- #endif
- // This is used by the compiler; when T is NOT a pointer i.e. a value type - it causes leaf allocation
- template <typename T>
- class TypeAllocatorFunc<Recycler, T> : public _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
- {
- };
- #if GLOBAL_ENABLE_WRITE_BARRIER
- template <typename T>
- class TypeAllocatorFunc<Recycler, T *> : public _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
- {
- };
- #else
- // Partial template specialization; applies to T when it is a pointer
- template <typename T>
- class TypeAllocatorFunc<Recycler, T *> : public _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
- {
- };
- #endif
- // Dummy class to choose the allocation function
- class RecyclerLeafAllocator
- {
- public:
- static const bool FakeZeroLengthArray = true;
- };
- class RecyclerNonLeafAllocator
- {
- public:
- static const bool FakeZeroLengthArray = true;
- };
- class RecyclerWriteBarrierAllocator
- {
- public:
- static const bool FakeZeroLengthArray = true;
- };
- // Choose RecyclerLeafAllocator / RecyclerNonLeafAllocator based on "bool isLeaf"
- template <bool isLeaf>
- struct _RecyclerLeaf { typedef RecyclerLeafAllocator AllocatorType; };
- template <>
- struct _RecyclerLeaf<false> { typedef RecyclerNonLeafAllocator AllocatorType; };
- template <bool isLeaf>
- class ListTypeAllocatorFunc<Recycler, isLeaf>
- {
- public:
- // RecyclerLeafAllocator / RecyclerNonLeafAllocator based on "bool isLeaf"
- // used by write barrier type traits
- typedef typename _RecyclerLeaf<isLeaf>::AllocatorType EffectiveAllocatorType;
- typedef char * (Recycler::*AllocFuncType)(size_t);
- typedef bool (Recycler::*FreeFuncType)(void*, size_t);
- static AllocFuncType GetAllocFunc()
- {
- return isLeaf ? &Recycler::AllocLeaf : &Recycler::Alloc;
- }
- static FreeFuncType GetFreeFunc()
- {
- if (isLeaf)
- {
- return &Recycler::ExplicitFreeLeaf;
- }
- else
- {
- return &Recycler::ExplicitFreeNonLeaf;
- }
- }
- };
- // Partial template specialization to allocate as non leaf
- template <typename T>
- class TypeAllocatorFunc<RecyclerNonLeafAllocator, T> :
- #if GLOBAL_ENABLE_WRITE_BARRIER
- public _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
- #else
- public _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy>
- #endif
- {
- };
- #ifdef RECYCLER_WRITE_BARRIER
- template <typename T>
- class TypeAllocatorFunc<RecyclerWriteBarrierAllocator, T> : public _RecyclerAllocatorFunc<_RecyclerWriteBarrierPolicy>
- {
- };
- #endif
- template <typename T>
- class TypeAllocatorFunc<RecyclerLeafAllocator, T> : public _RecyclerAllocatorFunc<_RecyclerLeafPolicy>
- {
- };
- template <typename TAllocType>
- struct AllocatorInfo<Recycler, TAllocType>
- {
- typedef Recycler AllocatorType;
- typedef TypeAllocatorFunc<Recycler, TAllocType> AllocatorFunc;
- typedef _RecyclerAllocatorFunc<_RecyclerNonLeafPolicy> InstAllocatorFunc; // By default any instance considered non-leaf
- };
- template <typename TAllocType>
- struct AllocatorInfo<RecyclerNonLeafAllocator, TAllocType>
- {
- typedef Recycler AllocatorType;
- typedef TypeAllocatorFunc<RecyclerNonLeafAllocator, TAllocType> AllocatorFunc;
- typedef TypeAllocatorFunc<RecyclerNonLeafAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
- };
- template <typename TAllocType>
- struct AllocatorInfo<RecyclerWriteBarrierAllocator, TAllocType>
- {
- typedef Recycler AllocatorType;
- typedef TypeAllocatorFunc<RecyclerWriteBarrierAllocator, TAllocType> AllocatorFunc;
- typedef TypeAllocatorFunc<RecyclerWriteBarrierAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
- };
- template <typename TAllocType>
- struct AllocatorInfo<RecyclerLeafAllocator, TAllocType>
- {
- typedef Recycler AllocatorType;
- typedef TypeAllocatorFunc<RecyclerLeafAllocator, TAllocType> AllocatorFunc;
- typedef TypeAllocatorFunc<RecyclerLeafAllocator, TAllocType> InstAllocatorFunc; // Same as TypeAllocatorFunc
- };
- template <>
- struct ForceNonLeafAllocator<Recycler>
- {
- typedef RecyclerNonLeafAllocator AllocatorType;
- };
- template <>
- struct ForceNonLeafAllocator<RecyclerLeafAllocator>
- {
- typedef RecyclerNonLeafAllocator AllocatorType;
- };
- template <>
- struct ForceLeafAllocator<Recycler>
- {
- typedef RecyclerLeafAllocator AllocatorType;
- };
- template <>
- struct ForceLeafAllocator<RecyclerNonLeafAllocator>
- {
- typedef RecyclerLeafAllocator AllocatorType;
- };
- // TODO: enable -profile for GC phases.
- // access the same profiler object from multiple GC threads which shares one recycler object,
- // but profiler object is not thread safe
- #if defined(PROFILE_EXEC) && 0
- #define RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase) if (recycler->profiler != nullptr) { recycler->profiler->Begin(phase); }
- #define RECYCLER_PROFILE_EXEC_END(recycler, phase) if (recycler->profiler != nullptr) { recycler->profiler->End(phase); }
- #define RECYCLER_PROFILE_EXEC_BEGIN2(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->Begin(phase1); recycler->profiler->Begin(phase2);}
- #define RECYCLER_PROFILE_EXEC_END2(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->End(phase1); recycler->profiler->End(phase2);}
- #define RECYCLER_PROFILE_EXEC_CHANGE(recycler, phase1, phase2) if (recycler->profiler != nullptr) { recycler->profiler->End(phase1); recycler->profiler->Begin(phase2); }
- #define RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase) if (recycler->backgroundProfiler != nullptr) { recycler->backgroundProfiler->Begin(phase); }
- #define RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase) if (recycler->backgroundProfiler != nullptr) { recycler->backgroundProfiler->End(phase); }
- #define RECYCLER_PROFILE_EXEC_THREAD_BEGIN(background, recycler, phase) if (background) { RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase); } else { RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase); }
- #define RECYCLER_PROFILE_EXEC_THREAD_END(background, recycler, phase) if (background) { RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase); } else { RECYCLER_PROFILE_EXEC_END(recycler, phase); }
- #else
- #define RECYCLER_PROFILE_EXEC_BEGIN(recycler, phase)
- #define RECYCLER_PROFILE_EXEC_END(recycler, phase)
- #define RECYCLER_PROFILE_EXEC_BEGIN2(recycler, phase1, phase2)
- #define RECYCLER_PROFILE_EXEC_END2(recycler, phase1, phase2)
- #define RECYCLER_PROFILE_EXEC_CHANGE(recycler, phase1, phase2)
- #define RECYCLER_PROFILE_EXEC_BACKGROUND_BEGIN(recycler, phase)
- #define RECYCLER_PROFILE_EXEC_BACKGROUND_END(recycler, phase)
- #define RECYCLER_PROFILE_EXEC_THREAD_BEGIN(background, recycler, phase)
- #define RECYCLER_PROFILE_EXEC_THREAD_END(background, recycler, phase)
- #endif
- }
- _Ret_notnull_ inline void * __cdecl
- operator new(DECLSPEC_GUARD_OVERFLOW size_t byteSize, Recycler * alloc, HeapInfo * heapInfo)
- {
- return alloc->HeapAllocR(heapInfo, byteSize);
- }
- inline void __cdecl
- operator delete(void * obj, Recycler * alloc, HeapInfo * heapInfo)
- {
- alloc->HeapFree(heapInfo, obj);
- }
- template<ObjectInfoBits infoBits>
- _Ret_notnull_ inline void * __cdecl
- operator new(DECLSPEC_GUARD_OVERFLOW size_t byteSize, Recycler * recycler, const InfoBitsWrapper<infoBits>&)
- {
- AssertCanHandleOutOfMemory();
- Assert(byteSize != 0);
- void * buffer;
- if (infoBits & EnumClass_1_Bit)
- {
- buffer = recycler->AllocEnumClass<infoBits>(byteSize);
- }
- else
- {
- buffer = recycler->AllocWithInfoBits<infoBits>(byteSize);
- }
- // All of our allocation should throw on out of memory
- Assume(buffer != nullptr);
- return buffer;
- }
- #if DBG && defined(RECYCLER_VERIFY_MARK)
- extern bool IsLikelyRuntimeFalseReference(
- char* objectStartAddress, size_t offset, const char* typeName);
- #define DECLARE_RECYCLER_VERIFY_MARK_FRIEND() \
- private: \
- friend bool ::IsLikelyRuntimeFalseReference( \
- char* objectStartAddress, size_t offset, const char* typeName);
- #define IMPLEMENT_STUB_IsLikelyRuntimeFalseReference() \
- bool IsLikelyRuntimeFalseReference( \
- char* objectStartAddress, size_t offset, const char* typeName) \
- { return false; }
- #else
- #define DECLARE_RECYCLER_VERIFY_MARK_FRIEND()
- #define IMPLEMENT_STUB_IsLikelyRuntimeFalseReference()
- #endif
- template <typename ExternalAllocFunc>
- bool Recycler::DoExternalAllocation(size_t size, ExternalAllocFunc externalAllocFunc)
- {
- // Request external memory allocation
- if (!RequestExternalMemoryAllocation(size))
- {
- // Attempt to free some memory then try again
- CollectNow<CollectOnTypedArrayAllocation>();
- if (!RequestExternalMemoryAllocation(size))
- {
- return false;
- }
- }
- struct AutoExternalAllocation
- {
- bool allocationSucceeded = false;
- Recycler* recycler;
- size_t size;
- AutoExternalAllocation(Recycler* recycler, size_t size): recycler(recycler), size(size) {}
- // In case the externalAllocFunc throws or fails, the destructor will report the failure
- ~AutoExternalAllocation() { if (!allocationSucceeded) recycler->ReportExternalMemoryFailure(size); }
- };
- AutoExternalAllocation externalAllocation(this, size);
- if (externalAllocFunc())
- {
- externalAllocation.allocationSucceeded = true;
- return true;
- }
- return false;
- }
|