ソースを参照

swb: fix AllocateArray annotations

RecyclerChecker plugin was not able to detect (write barriered) types
allocated through AllocateArray because AllocateArray was an inlined
function. The plugin works by checking "new" operators.

Fixed by enhancing the plugin to check AllocateArray. (Originally
attempted to revert AllocateArray back to macro, but run into prefast
failures.)

Fixed a plugin bug introduced in last commit that results in over
annotating. I was checking wrong bits and ended up requiring any Recycler
allocation including leaf annotation to be annotated. Fixed the bits.

Annotated newly discovered types from AllocateArray.
Jianchun Xu 9 年 前
コミット
77dfebad40

+ 2 - 1
build.sh

@@ -413,9 +413,10 @@ else
 fi
 
 echo Generating $BUILD_TYPE makefiles
+# -DCMAKE_EXPORT_COMPILE_COMMANDS=ON useful for clang-query tool
 cmake $CMAKE_GEN $CC_PREFIX $ICU_PATH $LTO $STATIC_LIBRARY $ARCH \
     -DCMAKE_BUILD_TYPE=$BUILD_TYPE $SANITIZE $NO_JIT $WITHOUT_FEATURES \
-    $WB_FLAG $WB_ARGS \
+    $WB_FLAG $WB_ARGS -DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
     ../..
 
 _RET=$?

+ 1 - 1
lib/Backend/Encoder.cpp

@@ -490,7 +490,7 @@ Encoder::Encode()
                 equivalentTypeGuardOffsets->guards[i].cache.record.propertyOffset = NativeCodeData::GetDataTotalOffset(cache->record.properties);
                 for (int j = 0; j < EQUIVALENT_TYPE_CACHE_SIZE; j++)
                 {
-                    equivalentTypeGuardOffsets->guards[i].cache.types[j] = (intptr_t)cache->types[j];
+                    equivalentTypeGuardOffsets->guards[i].cache.types[j] = (intptr_t)PointerValue(cache->types[j]);
                 }
                 i++;
             });

+ 1 - 8
lib/Backend/GlobOpt.cpp

@@ -2900,8 +2900,6 @@ BOOL GlobOpt::PreloadPRECandidate(Loop *loop, GlobHashBucket* candidate)
     IR::Instr * ldInstr = this->prePassInstrMap->Lookup(propertySym->m_id, nullptr);
     Assert(ldInstr);
 
-    JITTypeHolder propertyType(nullptr);
-
     // Create instr to put in landing pad for compensation
     Assert(IsPREInstrCandidateLoad(ldInstr->m_opcode));
     IR::SymOpnd *ldSrc = ldInstr->GetSrc1()->AsSymOpnd();
@@ -2928,11 +2926,6 @@ BOOL GlobOpt::PreloadPRECandidate(Loop *loop, GlobHashBucket* candidate)
         IR::PropertySymOpnd *propSymOpnd = ldSrc->AsPropertySymOpnd();
         IR::PropertySymOpnd *newPropSymOpnd;
 
-        if (propSymOpnd->IsMonoObjTypeSpecCandidate())
-        {
-            propertyType = propSymOpnd->GetType();
-        }
-
         newPropSymOpnd = propSymOpnd->AsPropertySymOpnd()->CopyWithoutFlowSensitiveInfo(this->func);
         ldInstr->ReplaceSrc1(newPropSymOpnd);
     }
@@ -4191,7 +4184,7 @@ GlobOpt::OptArguments(IR::Instr *instr)
         {
             instr->usesStackArgumentsObject = true;
         }
-        
+
         break;
     }
 

+ 1 - 1
lib/Backend/JITObjTypeSpecFldInfo.cpp

@@ -302,7 +302,7 @@ JITObjTypeSpecFldInfo::BuildObjTypeSpecFldInfoArray(
         Js::FixedFieldInfo * ffInfo = objTypeSpecInfo[i]->GetFixedFieldInfoArray();
         for (uint16 j = 0; j < jitData[i].fixedFieldInfoArraySize; ++j)
         {
-            jitData[i].fixedFieldInfoArray[j].fieldValue = (intptr_t)ffInfo[j].fieldValue;
+            jitData[i].fixedFieldInfoArray[j].fieldValue = (intptr_t)PointerValue(ffInfo[j].fieldValue);
             jitData[i].fixedFieldInfoArray[j].nextHasSameFixedField = ffInfo[j].nextHasSameFixedField;
             if (ffInfo[j].fieldValue != nullptr && Js::JavascriptFunction::Is(ffInfo[j].fieldValue))
             {

+ 1 - 1
lib/Backend/JITTimeConstructorCache.cpp

@@ -32,7 +32,7 @@ JITTimeConstructorCache::JITTimeConstructorCache(const JITTimeConstructorCache*
     Assert(other->GetRuntimeCacheAddr() != 0);
     m_data.runtimeCacheAddr = other->GetRuntimeCacheAddr();
     m_data.runtimeCacheGuardAddr = other->GetRuntimeCacheGuardAddr();
-    m_data.type = *(TypeIDL*)other->GetType().t;
+    m_data.type = *(TypeIDL*)PointerValue(other->GetType().t);
     m_data.slotCount = other->GetSlotCount();
     m_data.inlineSlotCount = other->GetInlineSlotCount();
     m_data.skipNewScObject = other->SkipNewScObject();

+ 3 - 1
lib/Backend/JITType.h

@@ -32,7 +32,9 @@ private:
 class JITTypeHolder
 {
 public:
-    JITType * t;
+    // SWB-TODO: Fix this. JITTypeHolder is used both as GC object and also
+    // background JIT stack object. The later cannot use write barrier currently.
+    FieldNoBarrier(JITType *) t;
 
     JITTypeHolder();
     JITTypeHolder(JITType * t);

+ 3 - 3
lib/Common/Memory/RecyclerPointers.h

@@ -495,7 +495,7 @@ template <class Policy>
 struct _QuickSortImpl
 {
     template<class T, class Comparer>
-    static void qsort_s(T* arr, size_t count, const Comparer& comparer, void* context)
+    static void Sort(T* arr, size_t count, const Comparer& comparer, void* context)
     {
         // by default use system qsort_s
         ::qsort_s(arr, count, sizeof(T), comparer, context);
@@ -505,7 +505,7 @@ template <>
 struct _QuickSortImpl<_write_barrier_policy>
 {
     template<class T, class Comparer>
-    static void qsort_s(T* arr, size_t count, const Comparer& comparer, void* context)
+    static void Sort(T* arr, size_t count, const Comparer& comparer, void* context)
     {
         // Use custom implementation if policy needs write barrier
         JsUtil::QuickSort<T, Comparer>::Sort(arr, arr + count - 1, comparer, context);
@@ -517,7 +517,7 @@ void qsort_s(T* arr, size_t count, const Comparer& comparer, void* context)
 {
     // Note use of "_ArrayItemWriteBarrierPolicy".
     typedef typename _ArrayItemWriteBarrierPolicy<PolicyType>::Policy Policy;
-    _QuickSortImpl<Policy>::qsort_s(arr, count, comparer, context);
+    _QuickSortImpl<Policy>::Sort(arr, count, comparer, context);
 }
 template<class T, class Comparer>
 void qsort_s(WriteBarrierPtr<T>* _Base, size_t _NumOfElements, size_t _SizeOfElements,

+ 3 - 3
lib/JITIDL/JITTypes.h

@@ -632,11 +632,11 @@ typedef struct XProcNumberPageSegment
 
 typedef struct PolymorphicInlineCacheIDL
 {
-    unsigned short size;
+    IDL_Field(unsigned short) size;
     IDL_PAD2(0)
     X64_PAD4(1)
-    CHAKRA_PTR addr;
-    CHAKRA_PTR inlineCachesAddr;
+    IDL_Field(CHAKRA_PTR) addr;
+    IDL_Field(CHAKRA_PTR) inlineCachesAddr;
 } PolymorphicInlineCacheIDL;
 
 typedef struct PolymorphicInlineCacheInfoIDL

+ 5 - 5
lib/Parser/Parse.cpp

@@ -31,11 +31,11 @@ bool Parser::IsES6DestructuringEnabled() const
 
 struct DeferredFunctionStub
 {
-    RestorePoint restorePoint;
-    uint fncFlags;
-    uint nestedCount;
-    DeferredFunctionStub *deferredStubs;
-    charcount_t ichMin;
+    Field(RestorePoint) restorePoint;
+    Field(uint) fncFlags;
+    Field(uint) nestedCount;
+    Field(DeferredFunctionStub *) deferredStubs;
+    Field(charcount_t) ichMin;
 };
 
 struct StmtNest

+ 9 - 9
lib/Parser/Scan.h

@@ -326,17 +326,17 @@ typedef HRESULT (*CommentCallback)(void *data, OLECHAR firstChar, OLECHAR second
 // Restore point defined using a relative offset rather than a pointer.
 struct RestorePoint
 {
-    charcount_t m_ichMinTok;
-    charcount_t m_ichMinLine;
-    size_t m_cMinTokMultiUnits;
-    size_t m_cMinLineMultiUnits;
-    charcount_t m_line;
-    uint functionIdIncrement;
-    size_t lengthDecr;
-    BOOL m_fHadEol;
+    Field(charcount_t) m_ichMinTok;
+    Field(charcount_t) m_ichMinLine;
+    Field(size_t) m_cMinTokMultiUnits;
+    Field(size_t) m_cMinLineMultiUnits;
+    Field(charcount_t) m_line;
+    Field(uint) functionIdIncrement;
+    Field(size_t) lengthDecr;
+    Field(BOOL) m_fHadEol;
 
 #ifdef DEBUG
-    size_t m_cMultiUnits;
+    Field(size_t) m_cMultiUnits;
 #endif
 
     RestorePoint()

+ 9 - 9
lib/Runtime/Base/FunctionBody.h

@@ -1179,19 +1179,19 @@ namespace Js
     struct LoopHeader
     {
     private:
-        LoopEntryPointList* entryPoints;
+        Field(LoopEntryPointList*) entryPoints;
 
     public:
-        uint startOffset;
-        uint endOffset;
-        uint interpretCount;
-        uint profiledLoopCounter;
-        bool isNested;
-        bool isInTry;
-        FunctionBody * functionBody;
+        Field(uint) startOffset;
+        Field(uint) endOffset;
+        Field(uint) interpretCount;
+        Field(uint) profiledLoopCounter;
+        Field(bool) isNested;
+        Field(bool) isInTry;
+        Field(FunctionBody *) functionBody;
 
 #if DBG_DUMP
-        uint nativeCount;
+        Field(uint) nativeCount;
 #endif
         static const uint NoLoop = (uint)-1;
 

+ 18 - 17
lib/Runtime/Language/AsmJsModule.h

@@ -349,31 +349,32 @@ namespace Js {
         /// proxy of asmjs module
         struct ModuleVar
         {
-            RegSlot location;
-            AsmJsVarType::Which type;
-            union
+            Field(RegSlot) location;
+            Field(AsmJsVarType::Which) type;
+            union InitialiserType
             {
-                int intInit;
-                float floatInit;
-                double doubleInit;
-                AsmJsSIMDValue simdInit;
-            } initialiser;
-            bool isMutable;
+                Field(int) intInit;
+                Field(float) floatInit;
+                Field(double) doubleInit;
+                Field(AsmJsSIMDValue) simdInit;
+            };
+            Field(InitialiserType) initialiser;
+            Field(bool) isMutable;
         };
         struct ModuleVarImport
         {
-            RegSlot location;
-            AsmJsVarType::Which type;
-            PropertyId field;
+            Field(RegSlot) location;
+            Field(AsmJsVarType::Which) type;
+            Field(PropertyId) field;
         };
         struct ModuleFunctionImport
         {
-            RegSlot location;
-            PropertyId field;
+            Field(RegSlot) location;
+            Field(PropertyId) field;
         };
         struct ModuleFunction
         {
-            RegSlot location;
+            Field(RegSlot) location;
         };
         struct ModuleExport
         {
@@ -382,8 +383,8 @@ namespace Js {
         };
         struct ModuleFunctionTable
         {
-            uint size;
-            RegSlot* moduleFunctionIndex;
+            Field(uint) size;
+            Field(RegSlot*) moduleFunctionIndex;
         };
 
         typedef JsUtil::BaseDictionary<PropertyId, AsmJsSlot*, Memory::Recycler> AsmJsSlotMap;

+ 3 - 3
lib/Runtime/Language/ObjTypeSpecFldInfo.h

@@ -17,9 +17,9 @@ namespace Js
 
     struct FixedFieldInfo
     {
-        Var fieldValue;
-        Type* type;
-        bool nextHasSameFixedField; // set to true if the next entry in the FixedFieldInfo array on ObjTypeSpecFldInfo has the same type
+        Field(Var) fieldValue;
+        Field(Type*) type;
+        Field(bool) nextHasSameFixedField; // set to true if the next entry in the FixedFieldInfo array on ObjTypeSpecFldInfo has the same type
     };
 
     // Union with uint16 flags for fast default initialization

+ 5 - 5
lib/Runtime/Library/JavascriptObject.cpp

@@ -1668,9 +1668,9 @@ namespace Js
         size_t descCount = 0;
         struct DescriptorMap
         {
-            PropertyRecord const * propRecord;
-            PropertyDescriptor descriptor;
-            Var originalVar;
+            Field(PropertyRecord const *) propRecord;
+            Field(PropertyDescriptor) descriptor;
+            Field(Var) originalVar;
         };
 
         JavascriptStaticEnumerator enumerator;
@@ -1764,8 +1764,8 @@ namespace Js
         size_t descCount = 0;
         struct DescriptorMap
         {
-            PropertyRecord const * propRecord;
-            PropertyDescriptor descriptor;
+            Field(PropertyRecord const *) propRecord;
+            Field(PropertyDescriptor) descriptor;
         };
 
         //3.  Let keys be props.[[OwnPropertyKeys]]().

+ 88 - 50
tools/RecyclerChecker/RecyclerChecker.cpp

@@ -88,6 +88,11 @@ void MainVisitor::ProcessUnbarriedFields(CXXRecordDecl* recordDecl,
                                          const PushFieldType& pushFieldType)
 {
     std::string typeName = recordDecl->getQualifiedNameAsString();
+    if (typeName == "Memory::WriteBarrierPtr")
+    {
+        return;  // Skip WriteBarrierPtr itself
+    }
+
     const auto& sourceMgr = _compilerInstance.getSourceManager();
     DiagnosticsEngine& diagEngine = _context.getDiagnostics();
     const unsigned diagID = diagEngine.getCustomDiagID(
@@ -444,75 +449,108 @@ static AllocationTypes CheckAllocationType(const CXXStaticCastExpr* castNode)
     }
 }
 
-bool CheckAllocationsInFunctionVisitor::VisitCXXNewExpr(CXXNewExpr* newExpr)
+template <class A0, class A1, class T>
+void CheckAllocationsInFunctionVisitor::VisitAllocate(
+    const A0& getArg0, const A1& getArg1, const T& getAllocType)
 {
-    if (newExpr->getNumPlacementArgs() > 1)
+    const Expr* firstArgNode = getArg0();
+
+    // Check if the first argument (to new or AllocateArray) is a static cast
+    // AllocatorNew/AllocateArray in Chakra always does a static_cast to the AllocatorType
+    const CXXStaticCastExpr* castNode = nullptr;
+    if (firstArgNode != nullptr &&
+        (castNode = dyn_cast<CXXStaticCastExpr>(firstArgNode)))
     {
-        const Expr* firstArgNode = newExpr->getPlacementArg(0);
+        QualType allocatedType = getAllocType();
+        string allocatedTypeStr = allocatedType.getAsString();
 
-        // Check if the first argument to new is a static cast
-        // AllocatorNew in Chakra always does a static_cast to the AllocatorType
-        CXXStaticCastExpr* castNode = nullptr;
-        if (firstArgNode != nullptr &&
-            (castNode = const_cast<CXXStaticCastExpr*>(dyn_cast<CXXStaticCastExpr>(firstArgNode))))
+        auto allocationType = CheckAllocationType(castNode);
+        if (allocationType == AllocationTypes::Recycler)  // Recycler allocation
         {
-            QualType allocatedType = newExpr->getAllocatedType();
-            string allocatedTypeStr = allocatedType.getAsString();
+            const Expr* secondArgNode = getArg1();
 
-            auto allocationType = CheckAllocationType(castNode);
-            if (allocationType == AllocationTypes::Recycler)  // Recycler allocation
+            // Chakra has two types of allocating functions- throwing and non-throwing
+            // However, recycler allocations are always throwing, so the second parameter
+            // should be the address of the allocator function
+            auto unaryNode = cast<UnaryOperator>(secondArgNode);
+            if (unaryNode != nullptr && unaryNode->getOpcode() == UnaryOperatorKind::UO_AddrOf)
             {
-                const Expr* secondArgNode = newExpr->getPlacementArg(1);
-
-                // Chakra has two types of allocating functions- throwing and non-throwing
-                // However, recycler allocations are always throwing, so the second parameter
-                // should be the address of the allocator function
-                auto unaryNode = cast<UnaryOperator>(secondArgNode);
-                if (unaryNode != nullptr && unaryNode->getOpcode() == UnaryOperatorKind::UO_AddrOf)
+                Expr* subExpr = unaryNode->getSubExpr();
+                if (DeclRefExpr* declRef = cast<DeclRefExpr>(subExpr))
                 {
-                    Expr* subExpr = unaryNode->getSubExpr();
-                    if (DeclRefExpr* declRef = cast<DeclRefExpr>(subExpr))
-                    {
-                        auto declNameInfo = declRef->getNameInfo();
-                        auto allocationFunctionStr = declNameInfo.getName().getAsString();
-                        _mainVisitor->RecordRecyclerAllocation(allocationFunctionStr, allocatedTypeStr);
-
-                        if (Contains(allocationFunctionStr, "WithBarrier"))
-                        {
-                            // Recycler write barrier allocation
-                            allocationType = AllocationTypes::WriteBarrier;
-                        }
-                    }
-                    else
+                    auto declNameInfo = declRef->getNameInfo();
+                    auto allocationFunctionStr = declNameInfo.getName().getAsString();
+                    _mainVisitor->RecordRecyclerAllocation(allocationFunctionStr, allocatedTypeStr);
+
+                    if (Contains(allocationFunctionStr, "WithBarrier"))
                     {
-                        Log::errs() << "ERROR: (internal) Expected DeclRefExpr:\n";
-                        subExpr->dump();
+                        // Recycler write barrier allocation
+                        allocationType = AllocationTypes::RecyclerWriteBarrier;
                     }
                 }
-                else if (auto mExpr = cast<MaterializeTemporaryExpr>(secondArgNode))
+                else
                 {
-                    auto name = mExpr->GetTemporaryExpr()->IgnoreImpCasts()->getType().getAsString();
-                    if (StartsWith(name, "InfoBitsWrapper<") && Contains(name, "WithBarrierBit"))
-                    {
-                        // RecyclerNewWithBarrierEnumClass, RecyclerNewWithBarrierWithInfoBits
-                        allocationType = AllocationTypes::WriteBarrier;
-                    }
+                    Log::errs() << "ERROR: (internal) Expected DeclRefExpr:\n";
+                    subExpr->dump();
                 }
-                else
+            }
+            else if (auto mExpr = cast<MaterializeTemporaryExpr>(secondArgNode))
+            {
+                auto name = mExpr->GetTemporaryExpr()->IgnoreImpCasts()->getType().getAsString();
+                if (StartsWith(name, "InfoBitsWrapper<") && Contains(name, "WithBarrierBit"))
                 {
-                    Log::errs() << "ERROR: (internal) Expected unary node or MaterializeTemporaryExpr:\n";
-                    secondArgNode->dump();
+                    // RecyclerNewWithBarrierEnumClass, RecyclerNewWithBarrierWithInfoBits
+                    allocationType = AllocationTypes::RecyclerWriteBarrier;
                 }
             }
-
-            if (allocationType == AllocationTypes::WriteBarrier)
+            else
             {
-                Log::outs() << "In \"" << _functionDecl->getQualifiedNameAsString() << "\"\n";
-                Log::outs() << "  Allocating \"" << allocatedTypeStr << "\" in write barriered memory\n";
+                Log::errs() << "ERROR: (internal) Expected unary node or MaterializeTemporaryExpr:\n";
+                secondArgNode->dump();
             }
+        }
 
-            _mainVisitor->RecordAllocation(allocatedType, allocationType);
+        if (allocationType & AllocationTypes::WriteBarrier)
+        {
+            Log::outs() << "In \"" << _functionDecl->getQualifiedNameAsString() << "\"\n";
+            Log::outs() << "  Allocating \"" << allocatedTypeStr << "\" in write barriered memory\n";
         }
+
+        _mainVisitor->RecordAllocation(allocatedType, allocationType);
+    }
+}
+
+bool CheckAllocationsInFunctionVisitor::VisitCXXNewExpr(CXXNewExpr* newExpr)
+{
+    if (newExpr->getNumPlacementArgs() > 1)
+    {
+        VisitAllocate(
+            [=]() { return newExpr->getPlacementArg(0); },
+            [=]() { return newExpr->getPlacementArg(1); },
+            [=]() { return newExpr->getAllocatedType(); }
+        );
+    }
+
+    return true;
+}
+
+bool CheckAllocationsInFunctionVisitor::VisitCallExpr(CallExpr* callExpr)
+{
+    // Check callExpr for AllocateArray
+    auto callee = callExpr->getDirectCallee();
+    if (callExpr->getNumArgs() == 3 &&
+        callee &&
+        callee->getName().equals("AllocateArray"))
+    {
+        VisitAllocate(
+            [=]() { return callExpr->getArg(0); },
+            [=]() { return callExpr->getArg(1); },
+            [=]()
+            {
+                auto retType = callExpr->getCallReturnType(_mainVisitor->getContext());
+                return QualType(retType->getAs<PointerType>()->getPointeeType());
+            }
+        );
     }
 
     return true;

+ 9 - 1
tools/RecyclerChecker/RecyclerChecker.h

@@ -25,7 +25,9 @@ enum AllocationTypes
     Unknown = 0x0,                  // e.g. template dependent
     NonRecycler = 0x1,              // Heap, Arena, JitArena, ...
     Recycler = 0x2,                 // Recycler
-    WriteBarrier = 0x4 | Recycler,  // Recycler write barrier
+    WriteBarrier = 0x4,             // Recycler write barrier
+
+    RecyclerWriteBarrier = Recycler | WriteBarrier,
 };
 
 class MainVisitor:
@@ -48,6 +50,8 @@ private:
 public:
     MainVisitor(CompilerInstance& compilerInstance, ASTContext& context, bool fix);
 
+    const ASTContext& getContext() const { return _context; }
+
     bool VisitCXXRecordDecl(CXXRecordDecl* recordDecl);
     bool VisitFunctionDecl(FunctionDecl* functionDecl);
 
@@ -81,10 +85,14 @@ public:
     {}
 
     bool VisitCXXNewExpr(CXXNewExpr* newExpression);
+    bool VisitCallExpr(CallExpr* callExpr);
 
 private:
     MainVisitor* _mainVisitor;
     FunctionDecl* _functionDecl;
+
+    template <class A0, class A1, class T>
+    void VisitAllocate(const A0& getArg0, const A1& getArg1, const T& getAllocType);
 };
 
 class RecyclerCheckerConsumer: public ASTConsumer