Răsfoiți Sursa

swb: some annotation fixes

Enhance diagnostics util class StackBackTraceRing a bit.

Update IDL_PADx to pass write barrier plugin checker.

GetParseableFunctionInfoRef() returns raw address of functionBodyImpl,
which is used and modified by GenerateByteCode() bypassing write barrier.
Removed this dangerous API and replaced with SetParseableFunctionInfo().

Enhanced m_constTable annotation and removed some casts.

Corrected a test file name case.

Tagged StackTraceLimitOOS test which times out when verifyMark on xplat,
because by default Linux uses 8MB stack (Windows 1MB), causing significant
more stack frames to mark and verify. Can use the tag to exclude them to
reduce noise.

Update the plugin to work with recent FieldWithBarrier macro change (
removing "typename" to support legacy compiler).
Jianchun Xu 9 ani în urmă
părinte
comite
732a5e3352

+ 56 - 13
lib/Common/Core/StackBackTrace.h

@@ -119,6 +119,22 @@ private:
     StackBackTraceNode * next;
 };
 
+//
+// A buffer of requested "size", dynamically allocated or statically embedded.
+//
+template <ULONG size, bool useStatic>
+struct _SimpleBuffer
+{
+    BYTE* _buf;
+    _SimpleBuffer() { _buf = new BYTE[size]; }
+    ~_SimpleBuffer() { delete[] _buf; }
+};
+template <ULONG size>
+struct _SimpleBuffer<size, true>
+{
+    BYTE _buf[size];
+};
+
 //
 // Capture multiple call stack traces using an in-memory ring buffer. Useful for instrumenting source
 // code to track calls.
@@ -128,7 +144,12 @@ private:
 //  HEADER:     Number of pointer-sized data reserved in the header of each trace. You can save runtime
 //              data in the header of each trace to record runtime state of the stack trace.
 //  FRAMES:     Number of stack frames for each trace.
-//  SKIPFRAMES: Top frames to skip for each capture. e.g., at least StackBackTraceRing::Capture frame is useless.
+//              This can be 0, only captures header data without stack.
+//  SKIPFRAMES: Top frames to skip for each capture. e.g., at least the "StackBackTraceRing::Capture"
+//              frame is useless.
+//  USE_STATIC_BUFFER:
+//              Use embedded buffer instead of dynamically allocate. This may be helpful to avoid
+//              initialization problem when global static StackBackTraceRing.
 //
 //  Usage: Following captures the last 100 stacks that changes scriptContext->debuggerMode:
 //      Declare an instance:                            StackBackTraceRing<100> s_debuggerMode;
@@ -140,38 +161,37 @@ private:
 //      Inspect trace N:            dds [buf]+0n32*4*N
 //      Inspect last trace:         dds [buf]+0n32*4*[cur-1]
 //
-template <ULONG BUFFERS, ULONG HEADER = 2, ULONG FRAMES = 30, ULONG SKIPFRAMES = 1>
+template <ULONG BUFFERS, ULONG HEADER = 2, ULONG FRAMES = 30, ULONG SKIPFRAMES = 1,
+          bool USE_STATIC_BUFFER = false>
 class StackBackTraceRing
 {
     static const ULONG ONE_TRACE = HEADER + FRAMES;
 
-private:
-    LPVOID* buf;
+protected:
+    _SimpleBuffer<sizeof(LPVOID) * ONE_TRACE * BUFFERS, USE_STATIC_BUFFER> _simple_buf;
     ULONG cur;
 
 public:
     StackBackTraceRing()
     {
-        buf = new LPVOID[ONE_TRACE * BUFFERS];
         cur = 0;
     }
 
-    ~StackBackTraceRing()
-    {
-        delete[] buf;
-    }
-
     template <class HeaderFunc>
     void CaptureWithHeader(HeaderFunc writeHeader)
     {
-        LPVOID* buffer = &buf[ONE_TRACE * cur++];
+        cur = cur % BUFFERS;
+        LPVOID* buffer = reinterpret_cast<LPVOID*>(_simple_buf._buf) + ONE_TRACE * cur++;
         cur = cur % BUFFERS;
 
         memset(buffer, 0, sizeof(LPVOID) * ONE_TRACE);
         writeHeader(buffer);
 
-        LPVOID* frames = &buffer[HEADER];
-        CaptureStackBackTrace(SKIPFRAMES, FRAMES, frames, nullptr);
+        if (FRAMES > 0)
+        {
+            LPVOID* frames = &buffer[HEADER];
+            CaptureStackBackTrace(SKIPFRAMES, FRAMES, frames, nullptr);
+        }
     }
 
     // Capture a stack trace
@@ -206,6 +226,29 @@ public:
             buffer[1] = reinterpret_cast<LPVOID>(data1);
         });
     }
+
+    template <class T0, class T1, class T2>
+    void Capture(T0 data0, T1 data1, T2 data2) {
+      C_ASSERT(HEADER >= 3);
+
+      CaptureWithHeader([=](_Out_writes_(HEADER) LPVOID* buffer) {
+        buffer[0] = reinterpret_cast<LPVOID>(data0);
+        buffer[1] = reinterpret_cast<LPVOID>(data1);
+        buffer[2] = reinterpret_cast<LPVOID>(data2);
+      });
+    }
+
+    template <class T0, class T1, class T2, class T3>
+    void Capture(T0 data0, T1 data1, T2 data2, T3 data3) {
+      C_ASSERT(HEADER >= 4);
+
+      CaptureWithHeader([=](_Out_writes_(HEADER) LPVOID* buffer) {
+        buffer[0] = reinterpret_cast<LPVOID>(data0);
+        buffer[1] = reinterpret_cast<LPVOID>(data1);
+        buffer[2] = reinterpret_cast<LPVOID>(data2);
+        buffer[3] = reinterpret_cast<LPVOID>(data3);
+      });
+    }
 };
 
 #endif

+ 5 - 5
lib/JITIDL/JITTypes.h

@@ -33,18 +33,18 @@ import "wtypes.idl";
 #define IDL_DEF(def)
 #endif
 
-#define IDL_PAD1(num) byte struct_pad_##num;
-#define IDL_PAD2(num) short struct_pad_##num;
-#define IDL_PAD4(num) int struct_pad_##num;
+#define IDL_PAD1(num) IDL_Field(byte) struct_pad_##num;
+#define IDL_PAD2(num) IDL_Field(short) struct_pad_##num;
+#define IDL_PAD4(num) IDL_Field(int) struct_pad_##num;
 
 #if defined(_M_X64) || defined(_M_ARM64)
-#define X64_PAD4(num) int struct_pad_##num;
+#define X64_PAD4(num) IDL_Field(int) struct_pad_##num;
 #else
 #define X64_PAD4(num)
 #endif
 
 #if defined(_M_IX86) || defined(_M_ARM)
-#define X86_PAD4(num) int struct_pad_##num;
+#define X86_PAD4(num) IDL_Field(int) struct_pad_##num;
 #else
 #define X86_PAD4(num)
 #endif

+ 5 - 2
lib/Runtime/Base/FunctionBody.cpp

@@ -2316,9 +2316,12 @@ namespace Js
                         Js::AutoDynamicCodeReference dynamicFunctionReference(m_scriptContext);
 
                         bool forceNoNative = isDebugOrAsmJsReparse ? this->GetScriptContext()->IsInterpreted() : false;
+
+                        ParseableFunctionInfo* rootFunc = funcBody->GetParseableFunctionInfo();
                         hrParseCodeGen = GenerateByteCode(parseTree, grfscr, m_scriptContext,
-                            funcBody->GetParseableFunctionInfoRef(), funcBody->GetSourceIndex(),
+                            &rootFunc, funcBody->GetSourceIndex(),
                             forceNoNative, &ps, &se, funcBody->GetScopeInfo(), functionRef);
+                        funcBody->SetParseableFunctionInfo(rootFunc);
 
                         if (se.ei.scode == JSERR_AsmJsCompileError)
                         {
@@ -4077,7 +4080,7 @@ namespace Js
         Assert(this->GetConstTable() == nullptr);
         Assert(GetConstantCount() > FirstRegSlot);
 
-        this->SetConstTable(RecyclerNewArrayZ(this->m_scriptContext->GetRecycler(), Var, GetConstantCount()));
+        this->SetConstTable(RecyclerNewArrayZ(this->m_scriptContext->GetRecycler(), Field(Var), GetConstantCount()));
 
         // Initialize with the root object, which will always be recorded here.
         Js::RootObjectBase * rootObject = this->LoadRootObject();

+ 7 - 7
lib/Runtime/Base/FunctionBody.h

@@ -1391,7 +1391,7 @@ namespace Js
         void SetLocalFunctionId(LocalFunctionId functionId);
 
         ParseableFunctionInfo* GetParseableFunctionInfo() const;
-        ParseableFunctionInfo** GetParseableFunctionInfoRef() const;
+        void SetParseableFunctionInfo(ParseableFunctionInfo* func);
         DeferDeserializeFunctionInfo* GetDeferDeserializeFunctionInfo() const;
         FunctionBody * GetFunctionBody() const;
 
@@ -1684,11 +1684,11 @@ namespace Js
         GetFunctionInfo()->SetAttributes(attributes);
     }
 
-    inline ParseableFunctionInfo** FunctionProxy::GetParseableFunctionInfoRef() const
+    inline void FunctionProxy::SetParseableFunctionInfo(ParseableFunctionInfo* func)
     {
         Assert(GetFunctionInfo());
         Assert(GetFunctionInfo()->GetFunctionProxy() == this);
-        return GetFunctionInfo()->GetParseableFunctionInfoRef();
+        GetFunctionInfo()->SetParseableFunctionInfo(func);
     }
 
     inline bool FunctionProxy::IsLambda() const
@@ -2377,7 +2377,7 @@ namespace Js
     private:
         FieldWithBarrier(ByteBlock*) byteCodeBlock;                // Function byte-code for script functions
         FieldWithBarrier(FunctionEntryPointList*) entryPoints;
-        FieldWithBarrier(Var*) m_constTable;
+        FieldWithBarrier(Field(Var)*) m_constTable;
         FieldWithBarrier(void**) inlineCaches;
         FieldWithBarrier(InlineCachePointerArray<PolymorphicInlineCache>) polymorphicInlineCaches; // Contains the latest polymorphic inline caches
         FieldWithBarrier(PropertyId*) cacheIdToPropertyIdMap;
@@ -2431,7 +2431,7 @@ namespace Js
 #define CURRENT_ACCESS_MODIFIER public:
 #include "SerializableFunctionFields.h"
 
-    private:        
+    private:
         FieldWithBarrier(uint) inactiveCount;
 
         // aligned with 8
@@ -3323,8 +3323,8 @@ namespace Js
         void RecordStrictNullDisplayConstant(RegSlot location);
         void InitConstantSlots(Var *dstSlots);
         Var GetConstantVar(RegSlot location);
-        Field(Js::Var)* GetConstTable() const { return (Field(Js::Var)*)PointerValue(this->m_constTable); }
-        void SetConstTable(Js::Var* constTable) { this->m_constTable = constTable; }
+        Field(Js::Var)* GetConstTable() const { return this->m_constTable; }
+        void SetConstTable(Field(Js::Var)* constTable) { this->m_constTable = constTable; }
 
         void MarkScript(ByteBlock * pblkByteCode, ByteBlock * pblkAuxiliaryData, ByteBlock* auxContextBlock,
             uint byteCodeCount, uint byteCodeInLoopCount, uint byteCodeWithoutLDACount);

+ 4 - 4
lib/Runtime/Base/FunctionInfo.h

@@ -92,12 +92,12 @@ namespace Js
         ParseableFunctionInfo* GetParseableFunctionInfo() const
         {
             Assert(functionBodyImpl == nullptr || !IsDeferredDeserializeFunction());
-            return (ParseableFunctionInfo*)PointerValue(functionBodyImpl);
+            return (ParseableFunctionInfo*)GetFunctionProxy();
         }
-        ParseableFunctionInfo** GetParseableFunctionInfoRef() const
+        void SetParseableFunctionInfo(ParseableFunctionInfo* func)
         {
-            Assert(functionBodyImpl == NULL || !IsDeferredDeserializeFunction());
-            return (ParseableFunctionInfo**)&functionBodyImpl;
+            Assert(functionBodyImpl == nullptr || !IsDeferredDeserializeFunction());
+            SetFunctionProxy((FunctionProxy*)func);
         }
         DeferDeserializeFunctionInfo* GetDeferDeserializeFunctionInfo() const
         {

+ 2 - 2
lib/Runtime/Library/SparseArraySegment.inl

@@ -241,7 +241,7 @@ namespace Js
             }
             for (i = (start + step-1)/step; i < (size/step); i++)
             {
-                ((Var*)(this->elements))[i] = fill;
+                ((Var*)(this->elements))[i] = fill; // swb: no write barrier, set to non-GC pointer
             }
             if ((i *= step) < size)
             {
@@ -257,7 +257,7 @@ namespace Js
 
             for (uint i = start; i < size * step; i++)
             {
-                ((Var*)(this->elements))[i] = fill;
+                ((Var*)(this->elements))[i] = fill; // swb: no write barrier, set to non-GC pointer
             }
         }
     }

+ 1 - 1
test/Object/rlexe.xml

@@ -422,7 +422,7 @@
   </test>
   <test>
     <default>
-      <files>ForInEnumCacheBuiltIn.js</files>
+      <files>forinenumcachebuiltin.js</files>
       <baseline />
     </default>
   </test>

+ 2 - 2
test/StackTrace/rlexe.xml

@@ -74,7 +74,7 @@
   </test>
   <test>
     <default>
-      <tags>StackTrace</tags>
+      <tags>StackTrace,xplat_verifymark_slow</tags>
       <files>StackTraceLimitOOS.js</files>
       <baseline>StackTraceLimitOOS.baseline</baseline>
       <compile-flags>-ExtendedErrorStackForTestHost</compile-flags>
@@ -82,7 +82,7 @@
   </test>
   <test>
     <default>
-      <tags>StackTrace,exclude_ship</tags>
+      <tags>StackTrace,xplat_verifymark_slow,exclude_ship</tags>
       <files>StackTraceLimitOOS.js</files>
       <baseline>StackTraceLimitOOS.baseline</baseline>
       <compile-flags>-ExtendedErrorStackForTestHost -on:interruptprobe</compile-flags>

+ 2 - 0
tools/RecyclerChecker/RecyclerChecker.cpp

@@ -112,7 +112,9 @@ void MainVisitor::ProcessUnbarriedFields(CXXRecordDecl* recordDecl,
         // If an annotated field type is struct/class/union (RecordType), the
         // field type in turn should likely be annoatated.
         if (StartsWith(fieldTypeName, "typename WriteBarrierFieldTypeTraits") ||
+            StartsWith(fieldTypeName, "WriteBarrierFieldTypeTraits") ||
             StartsWith(fieldTypeName, "const typename WriteBarrierFieldTypeTraits") ||
+            StartsWith(fieldTypeName, "const WriteBarrierFieldTypeTraits") ||
             fieldName.length() == 0) // anonymous union/struct
         {
             // Do not track down FieldNoBarrier types