Ver Fonte

[1.8>1.9] [MERGE #4676 @pleath] ChakraCore 2018-02 security updates

Merge pull request #4676 from pleath:1802-1.8
Paul Leathers há 8 anos atrás
pai
commit
e3e1a360e5
71 ficheiros alterados com 1038 adições e 874 exclusões
  1. 3 0
      lib/Backend/FlowGraph.cpp
  2. 6 7
      lib/Backend/FunctionJITTimeInfo.cpp
  3. 42 1
      lib/Backend/GlobOpt.cpp
  4. 32 24
      lib/Backend/IRBuilder.cpp
  5. 1 1
      lib/Backend/IRBuilder.h
  6. 8 8
      lib/Backend/IRBuilderAsmJs.cpp
  7. 0 2
      lib/Backend/IRBuilderAsmJs.h
  8. 9 2
      lib/Backend/Inline.cpp
  9. 14 14
      lib/Backend/JITTimeFunctionBody.cpp
  10. 12 10
      lib/Backend/JITTimeProfileInfo.cpp
  11. 430 45
      lib/Backend/Lower.cpp
  12. 5 1
      lib/Backend/Lower.h
  13. 27 196
      lib/Backend/LowerMDShared.cpp
  14. 1 2
      lib/Backend/LowerMDShared.h
  15. 1 0
      lib/Backend/Opnd.h
  16. 1 1
      lib/Backend/ServerScriptContext.cpp
  17. 0 185
      lib/Backend/arm/LowerMD.cpp
  18. 0 5
      lib/Backend/arm/LowerMD.h
  19. 8 0
      lib/Backend/arm64/EncoderMD.cpp
  20. 28 189
      lib/Backend/arm64/LowerMD.cpp
  21. 2 4
      lib/Backend/arm64/LowerMD.h
  22. 3 1
      lib/Backend/arm64/MdOpCodes.h
  23. 18 0
      lib/Common/ConfigFlagsList.h
  24. 21 0
      lib/Common/Core/ConfigParser.cpp
  25. 1 1
      lib/Common/Memory/Allocator.h
  26. 2 2
      lib/Common/Memory/HeapBlockMap.h
  27. 32 10
      lib/Common/Memory/HeapBlockMap.inl
  28. 1 1
      lib/Common/Memory/MarkContext.inl
  29. 46 9
      lib/Common/Memory/Recycler.cpp
  30. 1 1
      lib/Common/Memory/Recycler.h
  31. 2 2
      lib/Common/Memory/Recycler.inl
  32. 8 0
      lib/Common/Memory/WriteBarrierMacros.h
  33. 5 4
      lib/Parser/Parse.cpp
  34. 3 2
      lib/Runtime/Base/FunctionBody.h
  35. 40 10
      lib/Runtime/Base/ThreadContext.h
  36. 1 1
      lib/Runtime/ByteCode/AsmJsByteCodeDumper.cpp
  37. 1 1
      lib/Runtime/Debug/DiagObjectModel.cpp
  38. 3 2
      lib/Runtime/Debug/DiagObjectModel.h
  39. 2 2
      lib/Runtime/Debug/TTInflateMap.cpp
  40. 4 4
      lib/Runtime/Debug/TTInflateMap.h
  41. 1 1
      lib/Runtime/Debug/TTSnapObjects.cpp
  42. 4 4
      lib/Runtime/Debug/TTSnapValues.cpp
  43. 1 1
      lib/Runtime/Debug/TTSnapValues.h
  44. 1 1
      lib/Runtime/Debug/TTSnapshot.cpp
  45. 3 3
      lib/Runtime/Debug/TTSnapshotExtractor.cpp
  46. 1 1
      lib/Runtime/Debug/TTSnapshotExtractor.h
  47. 1 1
      lib/Runtime/Language/AsmJsModule.cpp
  48. 4 3
      lib/Runtime/Language/InterpreterStackFrame.cpp
  49. 1 1
      lib/Runtime/Language/InterpreterStackFrame.h
  50. 2 2
      lib/Runtime/Language/JavascriptOperators.cpp
  51. 4 3
      lib/Runtime/Library/BoundFunction.cpp
  52. 2 1
      lib/Runtime/Library/BoundFunction.h
  53. 3 1
      lib/Runtime/Library/ConcatString.cpp
  54. 19 37
      lib/Runtime/Library/JavascriptArray.cpp
  55. 3 4
      lib/Runtime/Library/JavascriptArray.h
  56. 2 2
      lib/Runtime/Library/JavascriptGeneratorFunction.cpp
  57. 2 1
      lib/Runtime/Library/JavascriptLibrary.cpp
  58. 2 1
      lib/Runtime/Library/JavascriptLibrary.h
  59. 4 4
      lib/Runtime/Library/RegexHelper.cpp
  60. 6 4
      lib/Runtime/Library/ScriptFunction.cpp
  61. 5 0
      lib/Runtime/Library/SparseArraySegment.h
  62. 17 15
      lib/Runtime/Library/StackScriptFunction.cpp
  63. 1 1
      lib/Runtime/Library/StackScriptFunction.h
  64. 1 1
      lib/Runtime/Library/WebAssemblyEnvironment.cpp
  65. 1 1
      lib/Runtime/Library/WebAssemblyEnvironment.h
  66. 2 2
      lib/Runtime/Library/WebAssemblyInstance.cpp
  67. 4 3
      lib/Runtime/Library/WebAssemblyTable.cpp
  68. 2 1
      lib/Runtime/Library/WebAssemblyTable.h
  69. 1 1
      lib/Runtime/Types/DynamicObjectPropertyEnumerator.cpp
  70. 89 23
      tools/RecyclerChecker/RecyclerChecker.cpp
  71. 25 0
      tools/RecyclerChecker/RecyclerChecker.h

+ 3 - 0
lib/Backend/FlowGraph.cpp

@@ -293,6 +293,7 @@ FlowGraph::Build(void)
         case Js::OpCode::TryCatch:
             if (this->catchLabelStack)
             {
+                AssertOrFailFast(!this->catchLabelStack->Empty());
                 this->catchLabelStack->Pop();
             }
             break;
@@ -300,6 +301,7 @@ FlowGraph::Build(void)
         case Js::OpCode::TryFinally:
             if (this->finallyLabelStack)
             {
+                AssertOrFailFast(!this->finallyLabelStack->Empty());
                 this->finallyLabelStack->Pop();
             }
             break;
@@ -497,6 +499,7 @@ FlowGraph::Build(void)
             }
             else if (instr->m_opcode == Js::OpCode::Finally)
             {
+                AssertOrFailFast(!this->finallyLabelStack->Empty());
                 this->finallyLabelStack->Pop();
             }
         }

+ 6 - 7
lib/Backend/FunctionJITTimeInfo.cpp

@@ -112,7 +112,7 @@ FunctionJITTimeInfo::BuildJITTimeData(
         if(objTypeSpecInfo)
         {
             jitData->objTypeSpecFldInfoCount = jitData->bodyData->inlineCacheCount;
-            jitData->objTypeSpecFldInfoArray = (ObjTypeSpecFldIDL**)objTypeSpecInfo;
+            jitData->objTypeSpecFldInfoArray = unsafe_write_barrier_cast<ObjTypeSpecFldIDL**>(objTypeSpecInfo);
         }
         for (Js::InlineCacheIndex i = 0; i < jitData->bodyData->inlineCacheCount; ++i)
         {
@@ -131,7 +131,7 @@ FunctionJITTimeInfo::BuildJITTimeData(
         Assert(globObjTypeSpecInfo != nullptr);
 
         jitData->globalObjTypeSpecFldInfoCount = codeGenData->GetGlobalObjTypeSpecFldInfoCount();
-        jitData->globalObjTypeSpecFldInfoArray = (ObjTypeSpecFldIDL**)globObjTypeSpecInfo;
+        jitData->globalObjTypeSpecFldInfoArray = unsafe_write_barrier_cast<ObjTypeSpecFldIDL**>(globObjTypeSpecInfo);
     }
     const Js::FunctionCodeGenJitTimeData * nextJITData = codeGenData->GetNext();
     if (nextJITData != nullptr)
@@ -259,7 +259,7 @@ FunctionJITTimeInfo::GetRuntimeInfo() const
 ObjTypeSpecFldInfo *
 FunctionJITTimeInfo::GetObjTypeSpecFldInfo(uint index) const
 {
-    Assert(index < GetBody()->GetInlineCacheCount());
+    AssertOrFailFast(index < GetBody()->GetInlineCacheCount());
     if (m_data.objTypeSpecFldInfoArray == nullptr)
     {
         return nullptr;
@@ -271,7 +271,7 @@ FunctionJITTimeInfo::GetObjTypeSpecFldInfo(uint index) const
 ObjTypeSpecFldInfo *
 FunctionJITTimeInfo::GetGlobalObjTypeSpecFldInfo(uint index) const
 {
-    Assert(index < m_data.globalObjTypeSpecFldInfoCount);
+    AssertOrFailFast(index < m_data.globalObjTypeSpecFldInfoCount);
 
     return reinterpret_cast<ObjTypeSpecFldInfo *>(m_data.globalObjTypeSpecFldInfoArray[index]);
 }
@@ -298,8 +298,7 @@ FunctionJITTimeInfo::GetLdFldInlinee(Js::InlineCacheIndex inlineCacheIndex) cons
     {
         return nullptr;
     }
-    Assert(inlineCacheIndex < m_data.ldFldInlineeCount);
-
+    AssertOrFailFast(inlineCacheIndex < m_data.ldFldInlineeCount);
 
     return reinterpret_cast<const FunctionJITTimeInfo*>(m_data.ldFldInlinees[inlineCacheIndex]);
 }
@@ -312,7 +311,7 @@ FunctionJITTimeInfo::GetInlinee(Js::ProfileId profileId) const
     {
         return nullptr;
     }
-    Assert(profileId < m_data.inlineeCount);
+    AssertOrFailFast(profileId < m_data.inlineeCount);
 
     auto inlinee = reinterpret_cast<const FunctionJITTimeInfo *>(m_data.inlinees[profileId]);
     if (inlinee == nullptr && m_data.inlineesRecursionFlags[profileId])

+ 42 - 1
lib/Backend/GlobOpt.cpp

@@ -5191,7 +5191,7 @@ GlobOpt::ValueNumberDst(IR::Instr **pInstr, Value *src1Val, Value *src2Val)
         if (!PHASE_OFF(Js::OptTagChecksPhase, this->func) &&
             (src1ValueInfo == nullptr || src1ValueInfo->IsUninitialized()))
         {
-            return this->NewGenericValue(ValueType::GetObject(ObjectType::Object), dst);
+            return this->NewGenericValue(ValueType::GetObject(ObjectType::Object).ToLikely().SetCanBeTaggedValue(false), dst);
         }
         break;
 
@@ -15399,6 +15399,47 @@ GlobOpt::CheckJsArrayKills(IR::Instr *const instr)
             }
             break;
         }
+
+        case Js::OpCode::InitProto:
+        {
+            // Find the 'this' parameter and check if it's possible for it to be an array
+            IR::Opnd *const arrayOpnd = instr->GetSrc1();
+            Assert(arrayOpnd);
+            const ValueType arrayValueType(arrayOpnd->GetValueType());
+            if(!arrayOpnd->IsRegOpnd() || (useValueTypes && arrayValueType.IsNotArrayOrObjectWithArray()))
+            {
+                break;
+            }
+
+            if(doNativeArrayTypeSpec && !(useValueTypes && arrayValueType.IsNotNativeArray()))
+            {
+                kills.SetKillsNativeArrays();
+            }
+            break;
+        }            
+
+        case Js::OpCode::InitClass:
+            Assert(instr->GetSrc1());
+            if (instr->GetSrc2() == nullptr)
+            {
+                // No extends operand, so the InitClass will not make something into a prototype
+                break;
+            }
+
+            if(doNativeArrayTypeSpec)
+            {
+                // Class/object construction can make something a prototype
+                kills.SetKillsNativeArrays();
+            }
+            break;
+
+        case Js::OpCode::NewScObjectNoCtor:
+            if(doNativeArrayTypeSpec)
+            {
+                // Class/object construction can make something a prototype
+                kills.SetKillsNativeArrays();
+            }
+            break;
     }
 
     return kills;

+ 32 - 24
lib/Backend/IRBuilder.cpp

@@ -460,9 +460,7 @@ IRBuilder::Build()
         this->m_loopBodyLocalsStartSlot = (Js::PropertyId)(localsOffset / sizeof(Js::Var));
     }
 
-#if DBG
     m_offsetToInstructionCount = offsetToInstructionCount;
-#endif
     m_offsetToInstruction = JitAnewArrayZ(m_tempAlloc, IR::Instr *, offsetToInstructionCount);
 
 #ifdef BYTECODE_BRANCH_ISLAND
@@ -820,7 +818,8 @@ IRBuilder::Build()
                     m_lastInstr->m_opcode == Js::OpCode::RuntimeTypeError)
                 {
                     uint32 lastInstrOffset = m_lastInstr->GetByteCodeOffset();
-                    Assert(lastInstrOffset < m_offsetToInstructionCount);
+
+                    AssertOrFailFast(lastInstrOffset < m_offsetToInstructionCount);
 #if DBG
                     __analysis_assume(lastInstrOffset < this->m_offsetToInstructionCount);
 #endif
@@ -1069,7 +1068,7 @@ IRBuilder::CreateLabel(IR::BranchInstr * branchInstr, uint& offset)
 
     for (;;)
     {
-        Assert(offset < m_offsetToInstructionCount);
+        AssertOrFailFast(offset < m_offsetToInstructionCount);
         targetInstr = this->m_offsetToInstruction[offset];
         if (targetInstr != nullptr)
         {
@@ -1118,7 +1117,7 @@ IRBuilder::CreateLabel(IR::BranchInstr * branchInstr, uint& offset)
 
 void IRBuilder::InsertInstr(IR::Instr *instr, IR::Instr* insertBeforeInstr)
 {
-    Assert(insertBeforeInstr->GetByteCodeOffset() < m_offsetToInstructionCount);
+    AssertOrFailFast(insertBeforeInstr->GetByteCodeOffset() < m_offsetToInstructionCount);
     instr->SetByteCodeOffset(insertBeforeInstr);
     uint32 offset = insertBeforeInstr->GetByteCodeOffset();
     if (m_offsetToInstruction[offset] == insertBeforeInstr)
@@ -1150,7 +1149,7 @@ IRBuilder::AddInstr(IR::Instr *instr, uint32 offset)
     m_lastInstr->InsertAfter(instr);
     if (offset != Js::Constants::NoByteCodeOffset)
     {
-        Assert(offset < m_offsetToInstructionCount);
+        AssertOrFailFast(offset < m_offsetToInstructionCount);
         if (m_offsetToInstruction[offset] == nullptr)
         {
             m_offsetToInstruction[offset] = instr;
@@ -1213,6 +1212,7 @@ IRBuilder::BuildIndirOpnd(IR::RegOpnd *baseReg, uint32 offset, const char16 *des
 IR::SymOpnd *
 IRBuilder::BuildFieldOpnd(Js::OpCode newOpcode, Js::RegSlot reg, Js::PropertyId propertyId, Js::PropertyIdIndexType propertyIdIndex, PropertyKind propertyKind, uint inlineCacheIndex)
 {
+    AssertOrFailFast(inlineCacheIndex < m_func->GetJITFunctionBody()->GetInlineCacheCount() || inlineCacheIndex == Js::Constants::NoInlineCacheIndex);
     PropertySym * propertySym = BuildFieldSym(reg, propertyId, propertyIdIndex, inlineCacheIndex, propertyKind);
     IR::SymOpnd * symOpnd;
 
@@ -1798,7 +1798,8 @@ IRBuilder::BuildReg1(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0)
     case Js::OpCode::Catch:
         if (this->handlerOffsetStack)
         {
-            Assert(this->handlerOffsetStack->Top().Second() == true);
+            AssertOrFailFast(!this->handlerOffsetStack->Empty());
+            AssertOrFailFast(this->handlerOffsetStack->Top().Second() == true);
             this->handlerOffsetStack->Pop();
         }
         dstIsCatchObject = true;
@@ -6125,19 +6126,24 @@ IRBuilder::BuildProfiledCallI(Js::OpCode opcode, uint32 offset, Js::RegSlot retu
                 if(this->m_func->GetWorkItem()->GetJITTimeInfo())
                 {
                     const FunctionJITTimeInfo *inlinerData = this->m_func->GetWorkItem()->GetJITTimeInfo();
-                    if(!(this->IsLoopBody() && PHASE_OFF(Js::InlineInJitLoopBodyPhase, this->m_func)) && 
-                        inlinerData && inlinerData->GetInlineesBV() && (!inlinerData->GetInlineesBV()->Test(profileId)
+                    if (!(this->IsLoopBody() && PHASE_OFF(Js::InlineInJitLoopBodyPhase, this->m_func))
+                        && inlinerData && inlinerData->GetInlineesBV())
+                    {
+                        AssertOrFailFast(profileId < inlinerData->GetInlineesBV()->Length());
+                        if (!inlinerData->GetInlineesBV()->Test(profileId)
 #if DBG
-                        || (PHASE_STRESS(Js::BailOnNoProfilePhase, this->m_func->GetTopFunc()) &&
-                            (CONFIG_FLAG(SkipFuncCountForBailOnNoProfile) < 0 ||
-                            this->m_func->m_callSiteCount >= (uint)CONFIG_FLAG(SkipFuncCountForBailOnNoProfile)))
+                            || (PHASE_STRESS(Js::BailOnNoProfilePhase, this->m_func->GetTopFunc())
+                                && (CONFIG_FLAG(SkipFuncCountForBailOnNoProfile) < 0
+                                    || this->m_func->m_callSiteCount >= (uint)CONFIG_FLAG(SkipFuncCountForBailOnNoProfile)))
 #endif
-                        ))
-                    {
-                        this->InsertBailOnNoProfile(offset);
-                        isProtectedByNoProfileBailout = true;
+                            )
+                        {
+                            this->InsertBailOnNoProfile(offset);
+                            isProtectedByNoProfileBailout = true;
+                        }
                     }
-                    else
+
+                    if (!isProtectedByNoProfileBailout)
                     {
                         this->callTreeHasSomeProfileInfo = true;
                     }
@@ -6398,10 +6404,10 @@ IRBuilder::BuildCallCommon(IR::Instr * instr, StackSym * symDst, Js::ArgSlot arg
 #endif
 
     // Link all the args of this call by creating a def/use chain through the src2.
-
-    for (argInstr = this->m_argStack->Pop();
-        argInstr && argInstr->m_opcode != Js::OpCode::StartCall;
-        argInstr = this->m_argStack->Pop())
+    AssertOrFailFast(!m_argStack->Empty());
+    for (argInstr = m_argStack->Pop();
+        argInstr && !m_argStack->Empty() && argInstr->m_opcode != Js::OpCode::StartCall;
+        argInstr = m_argStack->Pop())
     {
         prevInstr->SetSrc2(argInstr->GetDst());
         prevInstr = argInstr;
@@ -6409,8 +6415,9 @@ IRBuilder::BuildCallCommon(IR::Instr * instr, StackSym * symDst, Js::ArgSlot arg
         count++;
 #endif
     }
+    AssertOrFailFast(argInstr == nullptr || argInstr->m_opcode == Js::OpCode::StartCall);
 
-    if (this->m_argStack->Empty())
+    if (m_argStack->Empty())
     {
         this->callTreeHasSomeProfileInfo = false;
     }
@@ -6736,7 +6743,8 @@ IRBuilder::BuildEmpty(Js::OpCode newOpcode, uint32 offset)
     case Js::OpCode::Finally:
         if (this->handlerOffsetStack)
         {
-            Assert(this->handlerOffsetStack->Top().Second() == false);
+            AssertOrFailFast(!this->handlerOffsetStack->Empty());
+            AssertOrFailFast(this->handlerOffsetStack->Top().Second() == false);
             this->handlerOffsetStack->Pop();
         }
         finallyBlockLevel++;
@@ -6971,7 +6979,6 @@ IRBuilder::BuildBr(Js::OpCode newOpcode, uint32 offset)
     IR::BranchInstr * branchInstr;
     const unaligned   Js::OpLayoutBr *branchInsn = m_jnReader.Br();
     unsigned int      targetOffset = m_jnReader.GetCurrentOffset() + branchInsn->RelativeJumpOffset;
-
 #ifdef BYTECODE_BRANCH_ISLAND
     bool isLongBranchIsland = (m_jnReader.PeekOp() == Js::OpCode::BrLong);
     if (isLongBranchIsland)
@@ -7154,6 +7161,7 @@ IRBuilder::BuildBrEnvProperty(Js::OpCode newOpcode, uint32 offset)
 BranchReloc *
 IRBuilder::AddBranchInstr(IR::BranchInstr * branchInstr, uint32 offset, uint32 targetOffset)
 {
+    AssertOrFailFast(targetOffset <= m_func->GetJITFunctionBody()->GetByteCodeLength());
     //
     // Loop jitting would be done only till the LoopEnd
     // Any branches beyond that offset are for the return stmt

+ 1 - 1
lib/Backend/IRBuilder.h

@@ -333,6 +333,7 @@ private:
     Func *              m_func;
     IR::Instr *         m_lastInstr;
     IR::Instr **        m_offsetToInstruction;
+    uint32              m_offsetToInstructionCount;
     uint32              m_functionStartOffset;
     Js::ByteCodeReader  m_jnReader;
     Js::StatementReader<Js::FunctionBody::ArenaStatementMapList> m_statementReader;
@@ -363,7 +364,6 @@ private:
     // used to estimate how much stack we should probe for at the
     // beginning of a JITted function.
 #if DBG
-    uint32              m_offsetToInstructionCount;
     uint32              m_callsOnStack;
 #endif
     uint32              m_argsOnStack;

+ 8 - 8
lib/Backend/IRBuilderAsmJs.cpp

@@ -112,9 +112,7 @@ IRBuilderAsmJs::Build()
         offsetToInstructionCount = lastOffset + 2;
     }
 
-#if DBG
     m_offsetToInstructionCount = offsetToInstructionCount;
-#endif
     m_offsetToInstruction = JitAnewArrayZ(m_tempAlloc, IR::Instr *, offsetToInstructionCount);
 
     LoadNativeCodeData();
@@ -220,7 +218,7 @@ IRBuilderAsmJs::AddInstr(IR::Instr * instr, uint32 offset)
     m_lastInstr->InsertAfter(instr);
     if (offset != Js::Constants::NoByteCodeOffset)
     {
-        Assert(offset < m_offsetToInstructionCount);
+        AssertOrFailFast(offset < m_offsetToInstructionCount);
         if (m_offsetToInstruction[offset] == nullptr)
         {
             m_offsetToInstruction[offset] = instr;
@@ -670,6 +668,7 @@ IRBuilderAsmJs::RegIsConstant(Js::RegSlot reg)
 BranchReloc *
 IRBuilderAsmJs::AddBranchInstr(IR::BranchInstr * branchInstr, uint32 offset, uint32 targetOffset)
 {
+    AssertOrFailFast(targetOffset <= m_func->GetJITFunctionBody()->GetByteCodeLength());
     //
     // Loop jitting would be done only till the LoopEnd
     // Any branches beyond that offset are for the return statement
@@ -1042,8 +1041,8 @@ IRBuilderAsmJs::CreateLabel(IR::BranchInstr * branchInstr, uint & offset)
     IR::Instr * targetInstr = nullptr;
     while (targetInstr == nullptr)
     {
+        AssertOrFailFast(offset < m_offsetToInstructionCount);
         targetInstr = m_offsetToInstruction[offset];
-        Assert(offset < m_offsetToInstructionCount);
         ++offset;
     }
 
@@ -1789,7 +1788,7 @@ IRBuilderAsmJs::BuildAsmCall(Js::OpCodeAsmJs newOpcode, uint32 offset, Js::ArgSl
                 instr->AsProfiledInstr()->u.profileId = profileId;
             }
         }
-
+        AssertOrFailFast(!this->m_argOffsetStack->Empty());
         argOffset = m_argOffsetStack->Pop();
         argOffset -= MachPtr;
         break;
@@ -1811,7 +1810,8 @@ IRBuilderAsmJs::BuildAsmCall(Js::OpCodeAsmJs newOpcode, uint32 offset, Js::ArgSl
     IR::Instr * argInstr = nullptr;
     IR::Instr * prevInstr = instr;
 
-    for (argInstr = m_argStack->Pop(); argInstr->m_opcode != Js::OpCode::StartCall; argInstr = m_argStack->Pop())
+    AssertOrFailFast(!this->m_argStack->Empty());
+    for (argInstr = m_argStack->Pop(); !m_argStack->Empty() && argInstr->m_opcode != Js::OpCode::StartCall; argInstr = m_argStack->Pop())
     {
         if (newOpcode == Js::OpCodeAsmJs::I_Call || newOpcode == Js::OpCodeAsmJs::ProfiledI_Call)
         {
@@ -1841,10 +1841,10 @@ IRBuilderAsmJs::BuildAsmCall(Js::OpCodeAsmJs newOpcode, uint32 offset, Js::ArgSl
         count++;
     }
 
-    Assert(argInstr->m_opcode == Js::OpCode::StartCall);
+    AssertOrFailFast(argInstr->m_opcode == Js::OpCode::StartCall);
     argInstr->SetSrc1(IR::IntConstOpnd::New(count, TyUint16, m_func));
 
-    Assert(argOffset == 0);
+    AssertOrFailFast(argOffset == 0);
     prevInstr->SetSrc2(argInstr->GetDst());
 
 #ifdef ENABLE_SIMDJS

+ 0 - 2
lib/Backend/IRBuilderAsmJs.h

@@ -252,9 +252,7 @@ private:
     JitLoopBodyData*        m_jitLoopBodyData = nullptr;
     IRBuilderAsmJsSwitchAdapter m_switchAdapter;
     SwitchIRBuilder         m_switchBuilder;
-#if DBG
     uint32                  m_offsetToInstructionCount;
-#endif
 
 #define BUILD_LAYOUT_DEF(layout, ...) void Build##layout (Js::OpCodeAsmJs, uint32, __VA_ARGS__);
 #define Reg_Type Js::RegSlot

+ 9 - 2
lib/Backend/Inline.cpp

@@ -292,6 +292,7 @@ Inline::Optimize(Func *func, __in_ecount_opt(callerArgOutCount) IR::Instr *calle
                             break;
                         }
 
+                        bool hasDstUsedBuiltInReturnType = false;
                         if(!inlineeData->HasBody())
                         {
                             Assert(builtInInlineCandidateOpCode != 0);
@@ -303,11 +304,12 @@ Inline::Optimize(Func *func, __in_ecount_opt(callerArgOutCount) IR::Instr *calle
                                 break;
                             }
 
-                            // This built-in function is going to be inlined, so reset the destination's value type
+                            // This built-in function should be inlined, so reset the destination's value type
                             if(!builtInReturnType.IsUninitialized())
                             {
                                 if(instr->GetDst())
                                 {
+                                    hasDstUsedBuiltInReturnType = true;
                                     instr->GetDst()->SetValueType(builtInReturnType);
                                     if(builtInReturnType.IsDefinite())
                                     {
@@ -367,7 +369,12 @@ Inline::Optimize(Func *func, __in_ecount_opt(callerArgOutCount) IR::Instr *calle
                         instrNext = builtInInlineCandidateOpCode != 0 ?
                             this->InlineBuiltInFunction(instr, inlineeData, builtInInlineCandidateOpCode, inlinerData, symThis, &isInlined, profileId, recursiveInlineDepth) :
                             this->InlineScriptFunction(instr, inlineeData, symThis, profileId, &isInlined, recursiveInlineDepth);
-
+                         if (!isInlined && hasDstUsedBuiltInReturnType)
+                        {
+                            // We haven't actually inlined the builtin, we need to revert the value type to likely
+                            instr->GetDst()->UnsetValueTypeFixed();
+                            instr->GetDst()->SetValueType(instr->GetDst()->GetValueType().ToLikely());
+                        }
                     }
                     if(++this->inlineesProcessed == inlinerData->GetInlineeCount())
                     {

+ 14 - 14
lib/Backend/JITTimeFunctionBody.cpp

@@ -25,7 +25,7 @@ JITTimeFunctionBody::InitializeJITFunctionData(
     jitBody->constCount = numConstants;
     if (numConstants > 0)
     {
-        jitBody->constTable = (intptr_t *)PointerValue(functionBody->GetConstTable());
+        jitBody->constTable = unsafe_write_barrier_cast<intptr_t *>(functionBody->GetConstTable());
         if (!functionBody->GetIsAsmJsFunction())
         {
             jitBody->constTableContent = AnewStructZ(arena, ConstTableContentIDL);
@@ -242,7 +242,7 @@ JITTimeFunctionBody::InitializeJITFunctionData(
     jitBody->displayName = (char16 *)functionBody->GetDisplayName();
     jitBody->objectLiteralTypesAddr = (intptr_t)functionBody->GetObjectLiteralTypesWithLock();
     jitBody->literalRegexCount = functionBody->GetLiteralRegexCount();
-    jitBody->literalRegexes = (intptr_t*)functionBody->GetLiteralRegexesWithLock();
+    jitBody->literalRegexes = unsafe_write_barrier_cast<intptr_t*>(functionBody->GetLiteralRegexesWithLock());
 
     Js::AuxArray<uint32> * slotIdInCachedScopeToNestedIndexArray = functionBody->GetSlotIdInCachedScopeToNestedIndexArrayWithLock();
     if (slotIdInCachedScopeToNestedIndexArray)
@@ -490,7 +490,7 @@ Js::PropertyId
 JITTimeFunctionBody::GetPropertyIdFromCacheId(uint cacheId) const
 {
     Assert(m_bodyData.cacheIdToPropertyIdMap);
-    Assert(cacheId < GetInlineCacheCount());
+    AssertOrFailFast(cacheId < GetInlineCacheCount());
     return static_cast<Js::PropertyId>(m_bodyData.cacheIdToPropertyIdMap[cacheId]);
 }
 
@@ -504,7 +504,7 @@ JITTimeFunctionBody::GetReferencedPropertyId(uint index) const
     uint mapIndex = index - TotalNumberOfBuiltInProperties;
 
     Assert(m_bodyData.referencedPropertyIdMap != nullptr);
-    Assert(mapIndex < m_bodyData.referencedPropertyIdCount);
+    AssertOrFailFast(mapIndex < m_bodyData.referencedPropertyIdCount);
 
     return m_bodyData.referencedPropertyIdMap[mapIndex];
 }
@@ -852,7 +852,7 @@ intptr_t
 JITTimeFunctionBody::GetConstantVar(Js::RegSlot location) const
 {
     Assert(m_bodyData.constTable != nullptr);
-    Assert(location < GetConstCount());
+    AssertOrFailFast(location < GetConstCount());
     Assert(location != 0);
 
     return static_cast<intptr_t>(m_bodyData.constTable[location - Js::FunctionBody::FirstRegSlot]);
@@ -863,7 +863,7 @@ JITTimeFunctionBody::GetConstantContent(Js::RegSlot location) const
 {
     Assert(m_bodyData.constTableContent != nullptr);
     Assert(m_bodyData.constTableContent->content != nullptr);
-    Assert(location < GetConstCount());
+    AssertOrFailFast(location < GetConstCount());
     Assert(location != 0);
 
     JITRecyclableObject * obj = (JITRecyclableObject *)m_bodyData.constTableContent->content[location - Js::FunctionBody::FirstRegSlot];
@@ -888,7 +888,7 @@ intptr_t
 JITTimeFunctionBody::GetInlineCache(uint index) const
 {
     Assert(m_bodyData.inlineCaches != nullptr);
-    Assert(index < GetInlineCacheCount());
+    AssertOrFailFast(index < GetInlineCacheCount());
 #if 0 // TODO: michhol OOP JIT, add these asserts
     Assert(this->m_inlineCacheTypes[index] == InlineCacheTypeNone ||
         this->m_inlineCacheTypes[index] == InlineCacheTypeInlineCache);
@@ -901,7 +901,7 @@ intptr_t
 JITTimeFunctionBody::GetIsInstInlineCache(uint index) const
 {
     Assert(m_bodyData.inlineCaches != nullptr);
-    Assert(index < m_bodyData.isInstInlineCacheCount);
+    AssertOrFailFast(index < m_bodyData.isInstInlineCacheCount);
     index += GetInlineCacheCount();
 #if 0 // TODO: michhol OOP JIT, add these asserts
     Assert(this->m_inlineCacheTypes[index] == InlineCacheTypeNone ||
@@ -916,7 +916,7 @@ JITTimeFunctionBody::GetConstantType(Js::RegSlot location) const
 {
     Assert(m_bodyData.constTable != nullptr);
     Assert(m_bodyData.constTableContent != nullptr);
-    Assert(location < GetConstCount());
+    AssertOrFailFast(location < GetConstCount());
     Assert(location != 0);
     auto obj = m_bodyData.constTableContent->content[location - Js::FunctionBody::FirstRegSlot];
 
@@ -939,7 +939,7 @@ JITTimeFunctionBody::GetConstantType(Js::RegSlot location) const
 intptr_t
 JITTimeFunctionBody::GetLiteralRegexAddr(uint index) const
 {
-    Assert(index < m_bodyData.literalRegexCount);
+    AssertOrFailFast(index < m_bodyData.literalRegexCount);
 
     return m_bodyData.literalRegexes[index];
 }
@@ -968,7 +968,7 @@ JITTimeFunctionBody::GetRootObject() const
 Js::FunctionInfoPtrPtr
 JITTimeFunctionBody::GetNestedFuncRef(uint index) const
 {
-    Assert(index < GetNestedCount());
+    AssertOrFailFast(index < GetNestedCount());
     Js::FunctionInfoPtrPtr baseAddr = (Js::FunctionInfoPtrPtr)m_bodyData.nestedFuncArrayAddr;
     return baseAddr + index;
 }
@@ -976,7 +976,7 @@ JITTimeFunctionBody::GetNestedFuncRef(uint index) const
 intptr_t
 JITTimeFunctionBody::GetLoopHeaderAddr(uint loopNum) const
 {
-    Assert(loopNum < GetLoopCount());
+    AssertOrFailFast(loopNum < GetLoopCount());
     intptr_t baseAddr = m_bodyData.loopHeaderArrayAddr;
     return baseAddr + (loopNum * sizeof(Js::LoopHeader));
 }
@@ -984,7 +984,7 @@ JITTimeFunctionBody::GetLoopHeaderAddr(uint loopNum) const
 const JITLoopHeaderIDL *
 JITTimeFunctionBody::GetLoopHeaderData(uint loopNum) const
 {
-    Assert(loopNum < GetLoopCount());
+    AssertOrFailFast(loopNum < GetLoopCount());
     return &m_bodyData.loopHeaders[loopNum];
 }
 
@@ -1021,7 +1021,7 @@ JITTimeFunctionBody::HasPropIdToFormalsMap() const
 bool
 JITTimeFunctionBody::IsRegSlotFormal(Js::RegSlot reg) const
 {
-    Assert(reg < m_bodyData.propertyIdsForRegSlotsCount);
+    AssertOrFailFast(reg < m_bodyData.propertyIdsForRegSlotsCount);
     Js::PropertyId propId = (Js::PropertyId)m_bodyData.propertyIdsForRegSlots[reg];
     Js::PropertyIdArray * formalProps = GetFormalsPropIdArray();
     for (uint32 i = 0; i < formalProps->count; i++)

+ 12 - 10
lib/Backend/JITTimeProfileInfo.cpp

@@ -141,33 +141,35 @@ JITTimeProfileInfo::InitializeJITProfileData(
 const Js::LdElemInfo *
 JITTimeProfileInfo::GetLdElemInfo(Js::ProfileId ldElemId) const
 {
+    AssertOrFailFast(ldElemId < m_profileData.profiledLdElemCount);
     return &(reinterpret_cast<Js::LdElemInfo*>(m_profileData.ldElemData)[ldElemId]);
 }
 
 const Js::StElemInfo *
 JITTimeProfileInfo::GetStElemInfo(Js::ProfileId stElemId) const
 {
+    AssertOrFailFast(stElemId < m_profileData.profiledStElemCount);
     return &(reinterpret_cast<Js::StElemInfo*>(m_profileData.stElemData)[stElemId]);
 }
 
 Js::ArrayCallSiteInfo *
 JITTimeProfileInfo::GetArrayCallSiteInfo(Js::ProfileId index) const
 {
-    Assert(index < GetProfiledArrayCallSiteCount());
+    AssertOrFailFast(index < GetProfiledArrayCallSiteCount());
     return &(reinterpret_cast<Js::ArrayCallSiteInfo*>(m_profileData.arrayCallSiteData)[index]);
 }
 
 intptr_t
 JITTimeProfileInfo::GetArrayCallSiteInfoAddr(Js::ProfileId index) const
 {
-    Assert(index < GetProfiledArrayCallSiteCount());
+    AssertOrFailFast(index < GetProfiledArrayCallSiteCount());
     return m_profileData.arrayCallSiteDataAddr + index * sizeof(ArrayCallSiteIDL);
 }
 
 Js::FldInfo *
 JITTimeProfileInfo::GetFldInfo(uint fieldAccessId) const
 {
-    Assert(fieldAccessId < GetProfiledFldCount());
+    AssertOrFailFast(fieldAccessId < GetProfiledFldCount());
     return &(reinterpret_cast<Js::FldInfo*>(m_profileData.fldData)[fieldAccessId]);
 }
 
@@ -181,7 +183,7 @@ JITTimeProfileInfo::GetFldInfoAddr(uint fieldAccessId) const
 ValueType
 JITTimeProfileInfo::GetSlotLoad(Js::ProfileId slotLoadId) const
 {
-    Assert(slotLoadId < GetProfiledSlotCount());
+    AssertOrFailFast(slotLoadId < GetProfiledSlotCount());
     return reinterpret_cast<ValueType*>(m_profileData.slotData)[slotLoadId];
 }
 
@@ -197,32 +199,32 @@ JITTimeProfileInfo::GetReturnType(Js::OpCode opcode, Js::ProfileId callSiteId) c
     if (opcode < Js::OpCode::ProfiledReturnTypeCallI || (opcode > Js::OpCode::ProfiledReturnTypeCallIFlags && opcode < Js::OpCode::ProfiledReturnTypeCallIExtended) || opcode > Js::OpCode::ProfiledReturnTypeCallIExtendedFlags)
     {
         Assert(Js::DynamicProfileInfo::IsProfiledCallOp(opcode));
-        Assert(callSiteId < GetProfiledCallSiteCount());
+        AssertOrFailFast(callSiteId < GetProfiledCallSiteCount());
         return GetCallSiteInfo()[callSiteId].returnType;
     }
     Assert(Js::DynamicProfileInfo::IsProfiledReturnTypeOp(opcode));
-    Assert(callSiteId < GetProfiledReturnTypeCount());
+    AssertOrFailFast(callSiteId < GetProfiledReturnTypeCount());
     return reinterpret_cast<ValueType*>(m_profileData.returnTypeData)[callSiteId];
 }
 
 ValueType
 JITTimeProfileInfo::GetDivProfileInfo(Js::ProfileId divideId) const
 {
-    Assert(divideId < GetProfiledDivOrRemCount());
+    AssertOrFailFast(divideId < GetProfiledDivOrRemCount());
     return reinterpret_cast<ValueType*>(m_profileData.divideTypeInfo)[divideId];
 }
 
 ValueType
 JITTimeProfileInfo::GetSwitchProfileInfo(Js::ProfileId switchId) const
 {
-    Assert(switchId < GetProfiledSwitchCount());
+    AssertOrFailFast(switchId < GetProfiledSwitchCount());
     return reinterpret_cast<ValueType*>(m_profileData.switchTypeInfo)[switchId];
 }
 
 ValueType
 JITTimeProfileInfo::GetParameterInfo(Js::ArgSlot index) const
 {
-    Assert(index < GetProfiledInParamsCount());
+    AssertOrFailFast(index < GetProfiledInParamsCount());
     return reinterpret_cast<ValueType*>(m_profileData.parameterInfo)[index];
 }
 
@@ -231,7 +233,7 @@ JITTimeProfileInfo::GetLoopImplicitCallFlags(uint loopNum) const
 {
     // TODO: michhol OOP JIT, investigate vaibility of reenabling this assert
     // Assert(Js::DynamicProfileInfo::EnableImplicitCallFlags(functionBody));
-    Assert(loopNum < GetLoopCount());
+    AssertOrFailFast(loopNum < GetLoopCount());
 
     // Mask out the dispose implicit call. We would bailout on reentrant dispose,
     // but it shouldn't affect optimization.

+ 430 - 45
lib/Backend/Lower.cpp

@@ -5096,16 +5096,18 @@ Lowerer::LowerUpdateNewScObjectCache(IR::Instr * insertInstr, IR::Opnd *dst, IR:
     // Check if constructor is a function if we don't already know it.
     if (!isCtorFunction)
     {
+        IR::RegOpnd* src1RegOpnd = src1->AsRegOpnd();
         //  MOV r1, [src1 + offset(type)]       -- check base TypeIds_Function
         IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
-        IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src1->AsRegOpnd(), Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
+        IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(src1RegOpnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
         Lowerer::InsertMove(r1, indirOpnd, insertInstr);
 
         // CMP [r1 + offset(typeId)], TypeIds_Function
         // JNE $fallThru
         indirOpnd = IR::IndirOpnd::New(r1, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
         IR::IntConstOpnd *intOpnd = IR::IntConstOpnd::New(Js::TypeIds_Function, TyInt32, this->m_func, true);
-        InsertCompareBranch(indirOpnd, intOpnd, Js::OpCode::BrNeq_A, labelFallThru, insertInstr);
+        IR::BranchInstr* branchInstr = InsertCompareBranch(indirOpnd, intOpnd, Js::OpCode::BrNeq_A, labelFallThru, insertInstr);
+        InsertObjectPoison(src1RegOpnd, branchInstr, insertInstr);
     }
 
     // Every function has a constructor cache, even if only the default blank one.
@@ -7190,7 +7192,9 @@ Lowerer::GenerateCachedTypeCheck(IR::Instr *instrChk, IR::PropertySymOpnd *prope
         // to try the equivalent type cache, miss it and do the slow comparison. This may be as easy as sticking a null on the main
         // type in the equivalent type cache.
         IR::LabelInstr* labelCheckEquivalentType = IR::LabelInstr::New(Js::OpCode::Label, func, true);
-        InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelCheckEquivalentType, instrChk);
+        IR::BranchInstr* branchInstr = InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelCheckEquivalentType, instrChk);
+
+        InsertObjectPoison(regOpnd, branchInstr, instrChk);
 
         IR::LabelInstr *labelTypeCheckSucceeded = IR::LabelInstr::New(Js::OpCode::Label, func, false);
         InsertBranch(Js::OpCode::Br, labelTypeCheckSucceeded, instrChk);
@@ -7240,7 +7244,8 @@ Lowerer::GenerateCachedTypeCheck(IR::Instr *instrChk, IR::PropertySymOpnd *prope
     }
     else
     {
-        InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelSecondChance != nullptr ? labelSecondChance : labelTypeCheckFailed, instrChk);
+        IR::BranchInstr* branchInstr = InsertCompareBranch(typeOpnd, expectedTypeOpnd, Js::OpCode::BrNeq_A, labelSecondChance != nullptr ? labelSecondChance : labelTypeCheckFailed, instrChk);
+        InsertObjectPoison(regOpnd, branchInstr, instrChk);
     }
 
     // Don't pin the type for polymorphic operations. The code can successfully execute even if this type is no longer referenced by any objects,
@@ -7255,6 +7260,14 @@ Lowerer::GenerateCachedTypeCheck(IR::Instr *instrChk, IR::PropertySymOpnd *prope
     return typeOpnd;
 }
 
+void
+Lowerer::InsertObjectPoison(IR::Opnd* poisonedOpnd, IR::BranchInstr* branchInstr, IR::Instr* insertInstr)
+{
+#ifndef _M_ARM
+    LowererMD::InsertObjectPoison(poisonedOpnd, branchInstr, insertInstr);
+#endif
+}
+
 void
 Lowerer::PinTypeRef(JITTypeHolder type, void* typeRef, IR::Instr* instr, Js::PropertyId propertyId)
 {
@@ -7623,7 +7636,9 @@ Lowerer::GeneratePropertyGuardCheck(IR::Instr *insertPointInstr, IR::PropertySym
         Assert(Js::PropertyGuard::GetSizeOfValue() == static_cast<size_t>(TySize[TyMachPtr]));
         IR::AddrOpnd* zeroOpnd = IR::AddrOpnd::NewNull(this->m_func);
         IR::MemRefOpnd* guardOpnd = IR::MemRefOpnd::New(guard, TyMachPtr, this->m_func, IR::AddrOpndKindDynamicGuardValueRef);
-        InsertCompareBranch(guardOpnd, zeroOpnd, Js::OpCode::BrEq_A, labelBailOut, insertPointInstr);
+        IR::BranchInstr *branchInstr = InsertCompareBranch(guardOpnd, zeroOpnd, Js::OpCode::BrEq_A, labelBailOut, insertPointInstr);
+        IR::RegOpnd *objPtrReg = IR::RegOpnd::New(propertySymOpnd->GetObjectSym(), TyMachPtr, m_func);
+        InsertObjectPoison(objPtrReg, branchInstr, insertPointInstr);
     }
     else
     {
@@ -8337,13 +8352,15 @@ Lowerer::LowerAddLeftDeadForString(IR::Instr *instr)
         this->m_lowererMD.GenerateObjectTest(opndLeft->AsRegOpnd(), insertBeforeInstr, labelHelper);
     }
 
-    InsertCompareBranch(
+    IR::BranchInstr* branchInstr = InsertCompareBranch(
         IR::IndirOpnd::New(opndLeft->AsRegOpnd(), 0, TyMachPtr, m_func),
         this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableCompoundString),
         Js::OpCode::BrNeq_A,
         labelHelper,
         insertBeforeInstr);
 
+    InsertObjectPoison(opndLeft->AsRegOpnd(), branchInstr, insertBeforeInstr);
+
     GenerateStringTest(opndRight->AsRegOpnd(), insertBeforeInstr, labelHelper);
 
     // left->m_charLength <= JavascriptArray::MaxCharLength
@@ -11006,7 +11023,7 @@ Lowerer::InlineBuiltInLibraryCall(IR::Instr *callInstr)
 
         case Js::BuiltinFunction::JavascriptString_CharCodeAt:
         case Js::BuiltinFunction::JavascriptString_CharAt:
-            success = this->m_lowererMD.GenerateFastCharAt(index, callInstr->GetDst(), argsOpnd[0], argsOpnd[1],
+            success = GenerateFastCharAt(index, callInstr->GetDst(), argsOpnd[0], argsOpnd[1],
                 callInstr, labelHelper, labelHelper, doneLabel);
             break;
 
@@ -13833,12 +13850,14 @@ void Lowerer::GenerateObjectTypeTest(IR::RegOpnd *srcReg, IR::Instr *instrInsert
 
     // CMP [srcReg], Js::DynamicObject::`vtable'
     // JNE $helper
-    InsertCompareBranch(
+    IR::BranchInstr *branchInstr = InsertCompareBranch(
         IR::IndirOpnd::New(srcReg, 0, TyMachPtr, m_func),
         LoadVTableValueOpnd(instrInsert, VTableValue::VtableDynamicObject),
         Js::OpCode::BrNeq_A,
         labelHelper,
         instrInsert);
+
+    InsertObjectPoison(srcReg, branchInstr, instrInsert);
 }
 
 const VTableValue Lowerer::VtableAddresses[static_cast<ValueType::TSize>(ObjectType::Count)] =
@@ -14328,13 +14347,18 @@ IR::RegOpnd *Lowerer::GenerateArrayTest(
         // We expect a native float array. If we get native int instead, convert it on the spot and bail out afterward.
         const auto goodArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
 
-        InsertCompareBranch(
+        IR::BranchInstr* branchInstr = InsertCompareBranch(
             IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
             vtableOpnd,
             Js::OpCode::BrEq_A,
             goodArrayLabel,
             insertBeforeInstr);
 
+        if (!isStore)
+        {
+            InsertObjectPoison(arrayOpnd, branchInstr, insertBeforeInstr);
+        }
+
         IR::LabelInstr *notFloatArrayLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
         insertBeforeInstr->InsertBefore(notFloatArrayLabel);
 
@@ -14350,13 +14374,18 @@ IR::RegOpnd *Lowerer::GenerateArrayTest(
             vtableOpnd = LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableJavascriptNativeIntArray);
         }
 
-        InsertCompareBranch(
+        branchInstr = InsertCompareBranch(
             IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
             vtableOpnd,
             Js::OpCode::BrNeq_A,
             isNotArrayLabel,
             insertBeforeInstr);
 
+        if (!isStore)
+        {
+            InsertObjectPoison(arrayOpnd, branchInstr, insertBeforeInstr);
+        }
+
         m_lowererMD.LoadHelperArgument(insertBeforeInstr, arrayOpnd);
 
         IR::Instr *helperInstr = IR::Instr::New(Js::OpCode::Call, m_func);
@@ -14383,22 +14412,32 @@ IR::RegOpnd *Lowerer::GenerateArrayTest(
             Assert(virtualVtableAddress);
             vtableVirtualOpnd = LoadVTableValueOpnd(insertBeforeInstr, virtualVtableAddress);
             Assert(vtableVirtualOpnd);
-            InsertCompareBranch(
+            IR::BranchInstr* branchInstr = InsertCompareBranch(
                 IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
                 vtableVirtualOpnd,
                 Js::OpCode::BrNeq_A,
                 isNotArrayLabel,
                 insertBeforeInstr);
+
+            if (!isStore)
+            {
+                InsertObjectPoison(arrayOpnd, branchInstr, insertBeforeInstr);
+            }
             insertBeforeInstr->InsertBefore(goodArrayLabel);
         }
         else
         {
-            InsertCompareBranch(
+            IR::BranchInstr *branchInstr = InsertCompareBranch(
                 IR::IndirOpnd::New(arrayOpnd, 0, TyMachPtr, func),
                 vtableOpnd,
                 Js::OpCode::BrNeq_A,
                 isNotArrayLabel,
                 insertBeforeInstr);
+
+            if (!isStore)
+            {
+                InsertObjectPoison(arrayOpnd, branchInstr, insertBeforeInstr);
+            }
         }
 
     }
@@ -15124,6 +15163,7 @@ Lowerer::GenerateFastElemICommon(
     bool * pIsTypedArrayElement,
     bool * pIsStringIndex,
     bool *emitBailoutRef,
+    IR::Opnd** maskOpnd,
     IR::LabelInstr **pLabelSegmentLengthIncreased /*= nullptr*/,
     bool checkArrayLengthOverflow /*= true*/,
     bool forceGenerateFastPath /* = false */,
@@ -15180,6 +15220,7 @@ Lowerer::GenerateFastElemICommon(
             emitBailoutRef,
             pLabelSegmentLengthIncreased,
             checkArrayLengthOverflow,
+            maskOpnd,
             false,
             returnLength,
             bailOutLabelInstr,
@@ -15245,19 +15286,29 @@ Lowerer::GenerateFastElemIStringIndexCommon(IR::Instr * instrInsert, bool isStor
 
     IR::LabelInstr * notPropStrLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func, true);
     IR::LabelInstr * propStrLoadedLabel = IR::LabelInstr::New(Js::OpCode::Label, m_func);
-    InsertCompareBranch(
+    IR::BranchInstr *branchInstr = InsertCompareBranch(
         IR::IndirOpnd::New(indexOpnd, 0, TyMachPtr, m_func),
         LoadVTableValueOpnd(instrInsert, VTableValue::VtablePropertyString),
         Js::OpCode::BrNeq_A, notPropStrLabel, instrInsert);
     InsertBranch(Js::OpCode::Br, propStrLoadedLabel, instrInsert);
 
+    if (!isStore)
+    {
+        InsertObjectPoison(indexOpnd, branchInstr, instrInsert);
+    }
+
     instrInsert->InsertBefore(notPropStrLabel);
 
-    InsertCompareBranch(
+    branchInstr = InsertCompareBranch(
         IR::IndirOpnd::New(indexOpnd, 0, TyMachPtr, m_func),
         LoadVTableValueOpnd(instrInsert, VTableValue::VtableLiteralStringWithPropertyStringPtr),
         Js::OpCode::BrNeq_A, labelHelper, instrInsert);
 
+    if (!isStore)
+    {
+        InsertObjectPoison(indexOpnd, branchInstr, instrInsert);
+    }
+
     IR::IndirOpnd * propStrOpnd = IR::IndirOpnd::New(indexOpnd, Js::LiteralStringWithPropertyStringPtr::GetOffsetOfPropertyString(), TyMachPtr, m_func);
     InsertCompareBranch(propStrOpnd, IR::IntConstOpnd::New(NULL, TyMachPtr, m_func), Js::OpCode::BrNeq_A, labelHelper, instrInsert);
 
@@ -15320,6 +15371,7 @@ Lowerer::GenerateFastElemIIntIndexCommon(
     bool *emitBailoutRef,
     IR::LabelInstr **pLabelSegmentLengthIncreased,
     bool checkArrayLengthOverflow /*= true*/,
+    IR::Opnd** maskOpnd,
     bool forceGenerateFastPath /* = false */,
     bool returnLength,
     IR::LabelInstr *bailOutLabelInstr /* = nullptr*/,
@@ -16085,6 +16137,29 @@ Lowerer::GenerateFastElemIIntIndexCommon(
         }
     } // #endif
 
+    bool shouldPoisonLoad = maskOpnd != nullptr
+        && (baseValueType.IsLikelyTypedArray()
+            ? CONFIG_FLAG_RELEASE(PoisonTypedArrayLoad)
+            : ((indirType == TyVar && CONFIG_FLAG_RELEASE(PoisonVarArrayLoad))
+                || (IRType_IsNativeInt(indirType) && CONFIG_FLAG_RELEASE(PoisonIntArrayLoad))
+                || (IRType_IsFloat(indirType) && CONFIG_FLAG_RELEASE(PoisonFloatArrayLoad))));
+#if TARGET_32
+    if (shouldPoisonLoad)
+    {
+        // Prevent index from being negative, which would break the poisoning
+        if (indexValueOpnd->IsIntConstOpnd())
+        {
+            indexValueOpnd = IR::IntConstOpnd::New(value & INT32_MAX, TyUint32, m_func);
+        }
+        else
+        {
+            IR::RegOpnd* newIndexValueOpnd = IR::RegOpnd::New(TyUint32, m_func);
+            InsertAnd(newIndexValueOpnd, indexValueOpnd, IR::IntConstOpnd::New(INT32_MAX, TyUint32, m_func), instr);
+            indexValueOpnd = newIndexValueOpnd;
+        }
+    }
+#endif
+
     if (baseValueType.IsLikelyTypedArray())
     {
         if(!headSegmentOpnd)
@@ -16124,32 +16199,88 @@ Lowerer::GenerateFastElemIIntIndexCommon(
         indirOpnd->SetOffset(offsetof(Js::SparseArraySegment<Js::Var>, elements));
     }
 
-    return indirOpnd;
-}
-
-void
-Lowerer::GenerateTypeIdCheck(Js::TypeId typeId, IR::RegOpnd * opnd, IR::LabelInstr * labelFail, IR::Instr * insertBeforeInstr, bool generateObjectCheck)
-{
-    if (generateObjectCheck && !opnd->IsNotTaggedValue())
+    if (shouldPoisonLoad)
     {
-        m_lowererMD.GenerateObjectTest(opnd, insertBeforeInstr, labelFail);
+        // Use a mask to prevent arbitrary speculative reads
+        if (!headSegmentLengthOpnd)
+        {
+            if (baseValueType.IsLikelyTypedArray())
+            {
+                int lengthOffset;
+                lengthOffset = Js::Float64Array::GetOffsetOfLength();
+                headSegmentLengthOpnd = IR::IndirOpnd::New(arrayOpnd, lengthOffset, TyUint32, m_func);
+                autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
+            }
+            else
+            {
+                headSegmentLengthOpnd =
+                    IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
+                autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
+            }
+        }
+        IR::RegOpnd* localMaskOpnd = nullptr;
+#if TARGET_64
+        IR::RegOpnd* headSegmentLengthRegOpnd = IR::RegOpnd::New(headSegmentLengthOpnd->GetType(), m_func);
+        IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV_TRUNC, headSegmentLengthRegOpnd, headSegmentLengthOpnd, m_func);
+        instr->InsertBefore(instrMov);
+        LowererMD::Legalize(instrMov);
+
+        if (headSegmentLengthRegOpnd->GetSize() != MachPtr)
+        {
+            headSegmentLengthRegOpnd = headSegmentLengthRegOpnd->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
     }
 
     //  MOV r1, [opnd + offset(type)]
-    IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
-    const IR::AutoReuseOpnd autoReuseR1(r1, m_func);
-    IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(opnd, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, this->m_func);
-    InsertMove(r1, indirOpnd, insertBeforeInstr);
+        IR::RegOpnd* indexValueRegOpnd = IR::RegOpnd::New(indexValueOpnd->GetType(), m_func);
 
-    //  CMP [r1 + offset(typeId)], typeid -- check src isString
-    //  JNE $fail
-    indirOpnd = IR::IndirOpnd::New(r1, Js::Type::GetOffsetOfTypeId(), TyInt32, this->m_func);
-    InsertCompareBranch(
-        indirOpnd,
-        IR::IntConstOpnd::New(typeId, TyInt32, this->m_func),
-        Js::OpCode::BrNeq_A,
-        labelFail,
-        insertBeforeInstr);
+        instrMov = IR::Instr::New(Js::OpCode::MOV_TRUNC, indexValueRegOpnd, indexValueOpnd, m_func);
+        instr->InsertBefore(instrMov);
+        LowererMD::Legalize(instrMov);
+
+        if (indexValueRegOpnd->GetSize() != MachPtr)
+        {
+            indexValueRegOpnd = indexValueRegOpnd->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
+    }
+
+        localMaskOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
+        InsertSub(false, localMaskOpnd, indexValueRegOpnd, headSegmentLengthRegOpnd, instr);
+        InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(63, TyInt8, m_func), instr);
+#else
+        localMaskOpnd = IR::RegOpnd::New(TyInt32, m_func);
+        InsertSub(false, localMaskOpnd, indexValueOpnd, headSegmentLengthOpnd, instr);
+        InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(31, TyInt8, m_func), instr);
+#endif
+
+        if (IRType_IsNativeInt(indirType) || indirType == TyVar)
+        {
+            *maskOpnd = localMaskOpnd;
+        }
+        else
+        {
+            // for float values, do the poisoning before the load to avoid needing slow floating point conversions
+            IR::RegOpnd* loadAddr = IR::RegOpnd::New(TyMachPtr, m_func);
+
+#if _M_ARM32_OR_ARM64
+            if (indirOpnd->GetIndexOpnd() != nullptr && indirOpnd->GetScale() > 0)
+            {
+                // We don't support encoding for LEA with scale on ARM/ARM64, so do the scale calculation as a separate instruction
+                IR::RegOpnd* fullIndexOpnd = IR::RegOpnd::New(indirOpnd->GetIndexOpnd()->GetType(), m_func);
+                InsertShift(Js::OpCode::Shl_A, false, fullIndexOpnd, indirOpnd->GetIndexOpnd(), IR::IntConstOpnd::New(indirOpnd->GetScale(), TyInt8, m_func), instr);
+                IR::IndirOpnd* newIndir = IR::IndirOpnd::New(indirOpnd->GetBaseOpnd(), fullIndexOpnd, indirType, m_func);
+                if (indirOpnd->GetOffset() != 0)
+                {
+                    newIndir->SetOffset(indirOpnd->GetOffset());
+                }
+                indirOpnd = newIndir;
+            }
+#endif
+
+            InsertLea(loadAddr, indirOpnd, instr);
+            InsertAnd(loadAddr, loadAddr, localMaskOpnd, instr);
+            indirOpnd = IR::IndirOpnd::New(loadAddr, 0, indirType, m_func);
+        }
+    }
+    return indirOpnd;
 }
 
 IR::RegOpnd *
@@ -16216,8 +16347,9 @@ Lowerer::GenerateFastStringLdElem(IR::Instr * ldElem, IR::LabelInstr * labelHelp
     //  CMP [baseOpnd + offset(length)], indexOpnd     --  string length
     //  JBE $helper                                    -- unsigned compare, and string length are at most INT_MAX - 1
     //                                                 -- so that even if we have a negative index, this will fail
-    InsertCompareBranch(IR::IndirOpnd::New(baseOpnd, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func)
-        , index32CmpOpnd, Js::OpCode::BrLe_A, true, labelHelper, ldElem);
+    IR::RegOpnd* lengthOpnd = IR::RegOpnd::New(TyUint32, m_func);
+    InsertMove(lengthOpnd, IR::IndirOpnd::New(baseOpnd, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func), ldElem);
+    InsertCompareBranch(lengthOpnd, index32CmpOpnd, Js::OpCode::BrLe_A, true, labelHelper, ldElem);
 
     // Load the string buffer and make sure it is not null
     //  MOV bufferOpnd, [baseOpnd + offset(m_pszValue)]
@@ -16228,6 +16360,25 @@ Lowerer::GenerateFastStringLdElem(IR::Instr * ldElem, IR::LabelInstr * labelHelp
     InsertMove(bufferOpnd, indirOpnd, ldElem);
     GenerateNotZeroTest(bufferOpnd, labelHelper, ldElem);
 
+    IR::RegOpnd* maskOpnd = nullptr;
+    if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
+    {
+        // Mask off the sign before loading so that poisoning will work for negative indices
+        if (index32CmpOpnd->IsIntConstOpnd())
+        {
+            charIndirOpnd->SetOffset((index32CmpOpnd->AsIntConstOpnd()->AsUint32() & INT32_MAX) * sizeof(char16));
+        }
+        else
+        {
+            InsertAnd(index32CmpOpnd, index32CmpOpnd, IR::IntConstOpnd::New(INT32_MAX, TyInt32, m_func), ldElem);
+        }
+
+        // All bits in mask will be 1 for a valid index or 0 for an OOB index
+        maskOpnd = IR::RegOpnd::New(TyInt32, m_func);
+        InsertSub(false, maskOpnd, index32CmpOpnd, lengthOpnd, ldElem);
+        InsertShift(Js::OpCode::Shr_A, false, maskOpnd, maskOpnd, IR::IntConstOpnd::New(31, TyInt8, m_func), ldElem);
+    }
+
     // Load the character and check if it is 7bit ASCI (which we have the cache for)
     //  MOV charOpnd, [bufferOpnd + index32Opnd]
     //  CMP charOpnd, 0x80
@@ -16235,6 +16386,12 @@ Lowerer::GenerateFastStringLdElem(IR::Instr * ldElem, IR::LabelInstr * labelHelp
     IR::RegOpnd * charOpnd = IR::RegOpnd::New(TyUint32, this->m_func);
     const IR::AutoReuseOpnd autoReuseCharOpnd(charOpnd, m_func);
     InsertMove(charOpnd, charIndirOpnd, ldElem);
+
+    if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
+    {
+        InsertAnd(charOpnd, charOpnd, maskOpnd, ldElem);
+    }
+
     InsertCompareBranch(charOpnd, IR::IntConstOpnd::New(Js::CharStringCache::CharStringCacheSize, TyUint16, this->m_func),
         Js::OpCode::BrGe_A, true, labelHelper, ldElem);
 
@@ -16344,6 +16501,7 @@ Lowerer::GenerateFastLdElemI(IR::Instr *& ldElem, bool *instrIsInHelperBlockRef)
         }
 
         bool isTypedArrayElement, isStringIndex, indirOpndOverflowed = false;
+        IR::Opnd* maskOpnd = nullptr;
         indirOpnd =
             GenerateFastElemICommon(
                 ldElem,
@@ -16355,6 +16513,7 @@ Lowerer::GenerateFastLdElemI(IR::Instr *& ldElem, bool *instrIsInHelperBlockRef)
                 &isTypedArrayElement,
                 &isStringIndex,
                 &emitBailout,
+                &maskOpnd,
                 nullptr,    /* pLabelSegmentLengthIncreased */
                 true,       /* checkArrayLengthOverflow */
                 false,      /* forceGenerateFastPath */
@@ -16578,7 +16737,17 @@ Lowerer::GenerateFastLdElemI(IR::Instr *& ldElem, bool *instrIsInHelperBlockRef)
                 }
 
                 // MOV/MOVZX/MOVSX dst/reg.int32, IndirOpnd.type
-                IR::Instr *const instrMov = InsertMove(reg, indirOpnd, ldElem);
+                IR::Instr* instrMov = InsertMove(reg, indirOpnd, ldElem);
+                if (maskOpnd)
+                {
+#if TARGET_64
+                    if (maskOpnd->GetSize() != reg->GetType())
+                    {
+                        maskOpnd = maskOpnd->UseWithNewType(reg->GetType(), m_func)->AsRegOpnd();
+                    }
+#endif
+                    instrMov = InsertAnd(reg, reg, maskOpnd, ldElem);
+                }
 
                 if (dstType == TyInt32)
                 {
@@ -16636,6 +16805,16 @@ Lowerer::GenerateFastLdElemI(IR::Instr *& ldElem, bool *instrIsInHelperBlockRef)
         {
             // MOV dst, indirOpnd
             InsertMove(dst, indirOpnd, ldElem);
+            if (maskOpnd)
+            {
+#if TARGET_64
+                if (maskOpnd->GetSize() != dst->GetType())
+                {
+                    maskOpnd = maskOpnd->UseWithNewType(dst->GetType(), m_func)->AsRegOpnd();
+                }
+#endif
+                InsertAnd(dst, dst, maskOpnd, ldElem);
+            }
 
             // The string index fast path does not operate on index properties (we don't get a PropertyString in that case), so
             // we don't need to do any further checks in that case
@@ -16888,6 +17067,7 @@ Lowerer::GenerateFastStElemI(IR::Instr *& stElem, bool *instrIsInHelperBlockRef)
             &isTypedArrayElement,
             &isStringIndex,
             &emitBailout,
+            nullptr,
             &labelSegmentLengthIncreased,
             true,       /* checkArrayLengthOverflow */
             false,      /* forceGenerateFastPath */
@@ -18950,7 +19130,7 @@ Lowerer::GenerateFastInlineStringCharCodeAt(IR::Instr * instr, Js::BuiltinFuncti
 
     IR::LabelInstr *labelHelper = IR::LabelInstr::New(Js::OpCode::Label, this->m_func, true);
 
-    bool success = this->m_lowererMD.GenerateFastCharAt(index, instr->GetDst(), argsOpnd[0], argsOpnd[1],
+    bool success = GenerateFastCharAt(index, instr->GetDst(), argsOpnd[0], argsOpnd[1],
             instr, instr, labelHelper, doneLabel);
 
     instr->InsertBefore(labelHelper);
@@ -19275,6 +19455,7 @@ bool Lowerer::GenerateFastPop(IR::Opnd *baseOpndParam, IR::Instr *callInstr, IR:
             &isTypedArrayElement,
             &isStringIndex,
             nullptr,
+            nullptr,
             nullptr /*pLabelSegmentLengthIncreased*/,
             true /*checkArrayLengthOverflow*/,
             true /* forceGenerateFastPath */,
@@ -19357,6 +19538,7 @@ bool Lowerer::GenerateFastPush(IR::Opnd *baseOpndParam, IR::Opnd *src, IR::Instr
             &isTypedArrayElement,
             &isStringIndex,
             nullptr,
+            nullptr,
             nullptr /*pLabelSegmentLengthIncreased*/,
             false /*checkArrayLengthOverflow*/,
             true /* forceGenerateFastPath */,
@@ -19372,6 +19554,202 @@ bool Lowerer::GenerateFastPush(IR::Opnd *baseOpndParam, IR::Opnd *src, IR::Instr
     return true;
 }
 
+bool
+Lowerer::GenerateFastCharAt(Js::BuiltinFunction index, IR::Opnd *dst, IR::Opnd *srcStr, IR::Opnd *srcIndex, IR::Instr *callInstr,
+    IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
+{
+    //  if regSrcStr is not object, JMP $helper
+    //  CMP [regSrcStr + offset(type)] , static string type   -- check base string type
+    //  JNE $helper
+    //  MOV r1, [regSrcStr + offset(m_pszValue)]
+    //  TEST r1, r1
+    //  JEQ $helper
+    //  MOV r2, srcIndex
+    //  If r2 is not int, JMP $helper
+    //  Convert r2 to int
+    //  CMP [regSrcStr + offsetof(length)], r2
+    //  JBE $helper
+    //  MOVZX r2, [r1 + r2 * 2]
+    //  if (charAt)
+    //      PUSH r1
+    //      PUSH scriptContext
+    //      CALL GetStringFromChar
+    //      MOV dst, EAX
+    //  else (charCodeAt)
+    //      if (codePointAt)
+    //          Lowerer.GenerateFastCodePointAt -- Common inline functions
+    //      Convert r2 to Var
+    //      MOV dst, r2
+    bool isInt = false;
+    bool isNotTaggedValue = false;
+    IR::RegOpnd *regSrcStr;
+
+    if (srcStr->IsRegOpnd())
+    {
+        if (srcStr->AsRegOpnd()->IsTaggedInt())
+        {
+            isInt = true;
+
+        }
+        else if (srcStr->AsRegOpnd()->IsNotTaggedValue())
+        {
+            isNotTaggedValue = true;
+        }
+    }
+
+    if (srcStr->IsRegOpnd() == false)
+    {
+        IR::RegOpnd *regOpnd = IR::RegOpnd::New(TyVar, this->m_func);
+        InsertMove(regOpnd, srcStr, insertInstr);
+        regSrcStr = regOpnd;
+    }
+    else
+    {
+        regSrcStr = srcStr->AsRegOpnd();
+    }
+
+    if (!isNotTaggedValue)
+    {
+        if (!isInt)
+        {
+            m_lowererMD.GenerateObjectTest(regSrcStr, insertInstr, labelHelper);
+        }
+        else
+        {
+            // Insert delete branch opcode to tell the dbChecks not to assert on this helper label
+            IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
+            insertInstr->InsertBefore(fakeBr);
+
+            InsertBranch(Js::OpCode::Br, labelHelper, insertInstr);
+        }
+    }
+
+    // Bail out if index a constant and is less than zero.
+    if (srcIndex->IsAddrOpnd() && Js::TaggedInt::ToInt32(srcIndex->AsAddrOpnd()->m_address) < 0)
+    {
+        labelHelper->isOpHelper = false;
+        InsertBranch(Js::OpCode::Br, labelHelper, insertInstr);
+        return false;
+    }
+
+    GenerateStringTest(regSrcStr, insertInstr, labelHelper, nullptr, false);
+
+    // r1 contains the value of the char16* pointer inside JavascriptString.
+    // MOV r1, [regSrcStr + offset(m_pszValue)]
+    IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
+    IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(regSrcStr->AsRegOpnd(), Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, this->m_func);
+    InsertMove(r1, indirOpnd, insertInstr);
+
+    // TEST r1, r1 -- Null pointer test
+    // JEQ $helper
+    InsertTestBranch(r1, r1, Js::OpCode::BrEq_A, labelHelper, insertInstr);
+
+    IR::RegOpnd *strLength = IR::RegOpnd::New(TyUint32, m_func);
+    InsertMove(strLength, IR::IndirOpnd::New(regSrcStr, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func), insertInstr);
+    IR::Opnd* indexOpnd = nullptr;
+    if (srcIndex->IsAddrOpnd())
+    {
+        uint32 indexValue = Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address);
+        // CMP [regSrcStr + offsetof(length)], index
+        // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
+        // JBE $helper
+        InsertCompareBranch(strLength, IR::IntConstOpnd::New(indexValue, TyUint32, m_func), Js::OpCode::BrLe_A, true, labelHelper, insertInstr);
+
+        // Mask off the sign so that poisoning will work for negative indices
+#if TARGET_32
+        uint32 maskedIndex = CONFIG_FLAG_RELEASE(PoisonStringLoad) ? (indexValue & INT32_MAX) : indexValue;
+#else
+        uint32 maskedIndex = indexValue;
+#endif
+        indirOpnd = IR::IndirOpnd::New(r1, maskedIndex * sizeof(char16), TyUint16, this->m_func);
+        indexOpnd = IR::IntConstOpnd::New(maskedIndex, TyMachPtr, m_func);
+    }
+    else
+    {
+        IR::RegOpnd *r2 = IR::RegOpnd::New(TyVar, this->m_func);
+        // MOV r2, srcIndex
+        InsertMove(r2, srcIndex, insertInstr);
+
+        r2 = GenerateUntagVar(r2, labelHelper, insertInstr);
+
+        // CMP [regSrcStr + offsetof(length)], r2
+        // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
+        // JBE $helper
+        InsertCompareBranch(strLength, r2, Js::OpCode::BrLe_A, true, labelHelper, insertInstr);
+
+#if TARGET_32
+        if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
+        {
+            // Mask off the sign so that poisoning will work for negative indices
+            InsertAnd(r2, r2, IR::IntConstOpnd::New(INT32_MAX, TyInt32, m_func), insertInstr);
+        }
+#endif
+
+        if (r2->GetSize() != MachPtr)
+        {
+            r2 = r2->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
+        }
+        indexOpnd = r2;
+
+        indirOpnd = IR::IndirOpnd::New(r1, r2, 1, TyUint16, this->m_func);
+    }
+    IR::RegOpnd* maskOpnd = nullptr;
+    if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
+    {
+        maskOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
+        if (strLength->GetSize() != MachPtr)
+        {
+            strLength = strLength->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
+        }
+        InsertSub(false, maskOpnd, indexOpnd, strLength, insertInstr);
+        InsertShift(Js::OpCode::Shr_A, false, maskOpnd, maskOpnd, IR::IntConstOpnd::New(MachRegInt * 8 - 1, TyInt8, m_func), insertInstr);
+
+        if (maskOpnd->GetSize() != TyUint32)
+        {
+            maskOpnd = maskOpnd->UseWithNewType(TyUint32, this->m_func)->AsRegOpnd();
+        }
+    }
+    // MOVZX charReg, [r1 + r2 * 2]  -- this is the value of the char
+    IR::RegOpnd *charReg = IR::RegOpnd::New(TyUint32, this->m_func);
+    InsertMove(charReg, indirOpnd, insertInstr);
+    if (CONFIG_FLAG_RELEASE(PoisonStringLoad))
+    {
+        InsertAnd(charReg, charReg, maskOpnd, insertInstr);
+    }
+    if (index == Js::BuiltinFunction::JavascriptString_CharAt)
+    {
+        IR::Opnd *resultOpnd;
+        if (dst->IsEqual(srcStr))
+        {
+            resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
+        }
+        else
+        {
+            resultOpnd = dst;
+        }
+        GenerateGetSingleCharString(charReg, resultOpnd, labelHelper, doneLabel, insertInstr, false);
+    }
+    else
+    {
+        Assert(index == Js::BuiltinFunction::JavascriptString_CharCodeAt || index == Js::BuiltinFunction::JavascriptString_CodePointAt);
+
+        if (index == Js::BuiltinFunction::JavascriptString_CodePointAt)
+        {
+            GenerateFastInlineStringCodePointAt(insertInstr, this->m_func, strLength, srcIndex, charReg, r1);
+        }
+
+        if (charReg->GetSize() != MachPtr)
+        {
+            charReg = charReg->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
+        }
+        m_lowererMD.GenerateInt32ToVarConversion(charReg, insertInstr);
+
+        // MOV dst, charReg
+        InsertMove(dst, charReg, insertInstr);
+    }
+    return true;
+}
+
 IR::Opnd*
 Lowerer::GenerateArgOutForInlineeStackArgs(IR::Instr* callInstr, IR::Instr* stackArgsInstr)
 {
@@ -22720,7 +23098,7 @@ Lowerer::TryGenerateFastBrOrCmTypeOf(IR::Instr *instr, IR::Instr **prev, bool is
                 instr->InsertBefore(doneLabel);
 #else
                 InsertCompare(instrSrc1, instrSrc2, instr);
-                m_lowererMD.InsertCmovCC(isNeqOp ? Js::OpCode::CMOVE : Js::OpCode::CMOVNE, instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
+                LowererMD::InsertCmovCC(isNeqOp ? Js::OpCode::CMOVE : Js::OpCode::CMOVNE, instr->GetDst(), LoadLibraryValueOpnd(instr, LibraryValue::ValueFalse), instr);
 #endif
             }
             instr->Remove();
@@ -23202,9 +23580,11 @@ Lowerer::GenerateLdHomeObj(IR::Instr* instr)
     Lowerer::InsertMove(instanceRegOpnd, functionObjOpnd, instr);
 
     IR::Opnd * vtableAddressOpnd = this->LoadVTableValueOpnd(instr, VTableValue::VtableStackScriptFunction);
-    InsertCompareBranch(IR::IndirOpnd::New(instanceRegOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
+    IR::BranchInstr* branchInstr = InsertCompareBranch(IR::IndirOpnd::New(instanceRegOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
         Js::OpCode::BrEq_A, true, labelDone, instr);
 
+    InsertObjectPoison(instanceRegOpnd, branchInstr, instr);
+
     IR::IndirOpnd *indirOpnd = IR::IndirOpnd::New(instanceRegOpnd, Js::ScriptFunction::GetOffsetOfHomeObj(), TyMachPtr, func);
     Lowerer::InsertMove(instanceRegOpnd, indirOpnd, instr);
 
@@ -23375,9 +23755,11 @@ Lowerer::GenerateSetHomeObj(IR::Instr* instrInsert)
         Js::OpCode::BrEq_A, true, labelForGeneratorScriptFunction, instrInsert);
 
     vtableAddressOpnd = this->LoadVTableValueOpnd(instrInsert, VTableValue::VtableJavascriptAsyncFunction);
-    InsertCompareBranch(IR::IndirOpnd::New(funcObjRegOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
+    IR::BranchInstr *branchInstr = InsertCompareBranch(IR::IndirOpnd::New(funcObjRegOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
         Js::OpCode::BrNeq_A, true, labelScriptFunction, instrInsert);
 
+    InsertObjectPoison(funcObjRegOpnd, branchInstr, instrInsert);
+
     instrInsert->InsertBefore(labelForGeneratorScriptFunction);
 
     indirOpnd = IR::IndirOpnd::New(funcObjRegOpnd, Js::JavascriptGeneratorFunction::GetOffsetOfScriptFunction(), TyMachPtr, func);
@@ -23462,8 +23844,9 @@ Lowerer::GenerateGetCurrentFunctionObject(IR::Instr * instr)
     IR::RegOpnd * functionObjectOpnd = instr->GetDst()->AsRegOpnd();
     IR::Opnd * vtableAddressOpnd = this->LoadVTableValueOpnd(insertBeforeInstr, VTableValue::VtableStackScriptFunction);
     IR::LabelInstr * labelDone = IR::LabelInstr::New(Js::OpCode::Label, func, false);
-    InsertCompareBranch(IR::IndirOpnd::New(functionObjectOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
+    IR::BranchInstr *branchInstr = InsertCompareBranch(IR::IndirOpnd::New(functionObjectOpnd, 0, TyMachPtr, func), vtableAddressOpnd,
         Js::OpCode::BrNeq_A, true, labelDone, insertBeforeInstr);
+    InsertObjectPoison(functionObjectOpnd, branchInstr, insertBeforeInstr);
     IR::RegOpnd * boxedFunctionObjectOpnd = IR::RegOpnd::New(TyMachPtr, func);
     InsertMove(boxedFunctionObjectOpnd, IR::IndirOpnd::New(functionObjectOpnd,
         Js::StackScriptFunction::GetOffsetOfBoxedScriptFunction(), TyMachPtr, func), insertBeforeInstr);
@@ -25778,14 +26161,16 @@ Lowerer::GenerateStringTest(IR::RegOpnd *srcReg, IR::Instr *insertInstr, IR::Lab
         // BrEq/BrNeq labelHelper.
         IR::IndirOpnd * src1 = IR::IndirOpnd::New(srcReg, Js::RecyclableObject::GetOffsetOfType(), TyMachReg, m_func);
         IR::Opnd * src2 = this->LoadLibraryValueOpnd(insertInstr, LibraryValue::ValueStringTypeStatic);
+        IR::BranchInstr* branchInstr = nullptr;
         if (continueLabel)
         {
-            InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, continueLabel, insertInstr);
+            branchInstr = InsertCompareBranch(src1, src2, Js::OpCode::BrEq_A, continueLabel, insertInstr);
         }
         else
         {
-            InsertCompareBranch(src1, src2, Js::OpCode::BrNeq_A, labelHelper, insertInstr);
+            branchInstr = InsertCompareBranch(src1, src2, Js::OpCode::BrNeq_A, labelHelper, insertInstr);
         }
+        InsertObjectPoison(srcReg, branchInstr, insertInstr);
     }
 }
 

+ 5 - 1
lib/Backend/Lower.h

@@ -318,7 +318,6 @@ private:
     void            GenerateIsEnabledArraySetElementFastPathCheck(IR::LabelInstr * isDisabledLabel, IR::Instr * const insertBeforeInstr);
     void            GenerateIsEnabledIntArraySetElementFastPathCheck(IR::LabelInstr * isDisabledLabel, IR::Instr * const insertBeforeInstr);
     void            GenerateIsEnabledFloatArraySetElementFastPathCheck(IR::LabelInstr * isDisabledLabel, IR::Instr * const insertBeforeInstr);
-    void            GenerateTypeIdCheck(Js::TypeId typeId, IR::RegOpnd * opnd, IR::LabelInstr * labelFail, IR::Instr * insertBeforeInstr, bool generateObjectCheck = true);
     void            GenerateStringTest(IR::RegOpnd *srcReg, IR::Instr *instrInsert, IR::LabelInstr * failLabel, IR::LabelInstr * succeedLabel = nullptr, bool generateObjectCheck = true);
     IR::RegOpnd *   GenerateUntagVar(IR::RegOpnd * opnd, IR::LabelInstr * labelFail, IR::Instr * insertBeforeInstr, bool generateTagCheck = true);
     void            GenerateNotZeroTest( IR::Opnd * opndSrc, IR::LabelInstr * labelZero, IR::Instr * instrInsert);
@@ -397,6 +396,7 @@ private:
         bool * pIsTypedArrayElement,
         bool * pIsStringIndex,
         bool *emitBailoutRef,
+        IR::Opnd** maskOpnd,
         IR::LabelInstr **pLabelSegmentLengthIncreased = nullptr,
         bool checkArrayLengthOverflow = true,
         bool forceGenerateFastPath = false,
@@ -415,6 +415,7 @@ private:
         bool *emitBailoutRef,
         IR::LabelInstr **pLabelSegmentLengthIncreased,
         bool checkArrayLengthOverflow,
+        IR::Opnd** maskOpnd,
         bool forceGenerateFastPath = false,
         bool returnLength = false,
         IR::LabelInstr *bailOutLabelInstr = nullptr,
@@ -424,6 +425,8 @@ private:
     bool            GenerateFastLdElemI(IR::Instr *& ldElem, bool *instrIsInHelperBlockRef);
     bool            GenerateFastStElemI(IR::Instr *& StElem, bool *instrIsInHelperBlockRef);
     bool            GenerateFastLdLen(IR::Instr *ldLen, bool *instrIsInHelperBlockRef);
+    bool            GenerateFastCharAt(Js::BuiltinFunction index, IR::Opnd *dst, IR::Opnd *srcStr, IR::Opnd *srcIndex, IR::Instr *callInstr, IR::Instr *insertInstr,
+        IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel);
     bool            GenerateFastInlineGlobalObjectParseInt(IR::Instr *instr);
     bool            GenerateFastInlineStringFromCharCode(IR::Instr* instr);
     bool            GenerateFastInlineStringFromCodePoint(IR::Instr* instr);
@@ -658,6 +661,7 @@ private:
     IR::Instr *     LowerSlotArrayCheck(IR::Instr * instr);
     void            InsertSlotArrayCheck(IR::Instr * instr, StackSym * dstSym, uint32 slotId);
     void            InsertFrameDisplayCheck(IR::Instr * instr, StackSym * dstSym, FrameDisplayCheckRecord * record);
+    static void     InsertObjectPoison(IR::Opnd* poisonedOpnd, IR::BranchInstr* branchInstr, IR::Instr* insertInstr);
 
     IR::RegOpnd *   LoadIndexFromLikelyFloat(IR::RegOpnd *indexOpnd, const bool skipNegativeCheck, IR::LabelInstr *const notTaggedIntLabel, IR::LabelInstr *const negativeLabel, IR::Instr *const insertBeforeInstr);
 

+ 27 - 196
lib/Backend/LowerMDShared.cpp

@@ -1187,7 +1187,7 @@ void LowererMD::ChangeToShift(IR::Instr *const instr, const bool needFlags)
     {
         // Only values between 0-31 mean anything
         IntConstType value = instr->GetSrc2()->AsIntConstOpnd()->GetValue();
-        value &= 0x1f;
+        value &= TySize[instr->GetDst()->GetType()] == 8 ? 63 : 31;
         instr->GetSrc2()->AsIntConstOpnd()->SetValue(value);
     }
 }
@@ -4836,199 +4836,6 @@ IR::Instr * LowererMD::GenerateFloatAbs(IR::RegOpnd * regOpnd, IR::Instr * inser
     return instr;
 }
 
-bool LowererMD::GenerateFastCharAt(Js::BuiltinFunction index, IR::Opnd *dst, IR::Opnd *srcStr, IR::Opnd *srcIndex, IR::Instr *callInstr,
-                                  IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
-{
-    //  if regSrcStr is not object, JMP $helper
-    //  CMP [regSrcStr + offset(type)] , static string type   -- check base string type
-    //  JNE $helper
-    //  MOV r1, [regSrcStr + offset(m_pszValue)]
-    //  TEST r1, r1
-    //  JEQ $helper
-    //  MOV r2, srcIndex
-    //  If r2 is not int, JMP $helper
-    //  Convert r2 to int
-    //  CMP [regSrcStr + offsetof(length)], r2
-    //  JBE $helper
-    //  MOVZX r2, [r1 + r2 * 2]
-    //  if (charAt)
-    //      PUSH r1
-    //      PUSH scriptContext
-    //      CALL GetStringFromChar
-    //      MOV dst, EAX
-    //  else (charCodeAt)
-    //      if (codePointAt)
-    //          Lowerer.GenerateFastCodePointAt -- Common inline functions
-    //      Convert r2 to Var
-    //      MOV dst, r2
-    bool isInt = false;
-    bool isNotTaggedValue = false;
-    IR::Instr *instr;
-    IR::RegOpnd *regSrcStr;
-
-    if (srcStr->IsRegOpnd())
-    {
-        if (srcStr->AsRegOpnd()->IsTaggedInt())
-        {
-            isInt = true;
-
-        }
-        else if (srcStr->AsRegOpnd()->IsNotTaggedValue())
-        {
-            isNotTaggedValue = true;
-        }
-    }
-
-    if (srcStr->IsRegOpnd() == false)
-    {
-        IR::RegOpnd *regOpnd = IR::RegOpnd::New(TyVar, this->m_func);
-        instr = IR::Instr::New(Js::OpCode::MOV, regOpnd, srcStr, this->m_func);
-        insertInstr->InsertBefore(instr);
-        regSrcStr = regOpnd;
-    }
-    else
-    {
-        regSrcStr = srcStr->AsRegOpnd();
-    }
-
-    if (!isNotTaggedValue)
-    {
-        if (!isInt)
-        {
-            GenerateObjectTest(regSrcStr, insertInstr, labelHelper);
-        }
-        else
-        {
-            // Insert delete branch opcode to tell the dbChecks not to assert on this helper label
-            IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
-            insertInstr->InsertBefore(fakeBr);
-
-            instr = IR::BranchInstr::New(Js::OpCode::JMP, labelHelper, this->m_func);
-            insertInstr->InsertBefore(instr);
-        }
-    }
-
-    // Bail out if index a constant and is less than zero.
-    if (srcIndex->IsAddrOpnd() && Js::TaggedInt::ToInt32(srcIndex->AsAddrOpnd()->m_address) < 0)
-    {
-        labelHelper->isOpHelper = false;
-        instr = IR::BranchInstr::New(Js::OpCode::JMP, labelHelper, this->m_func);
-        insertInstr->InsertBefore(instr);
-        return false;
-    }
-
-    this->m_lowerer->GenerateStringTest(regSrcStr, insertInstr, labelHelper, nullptr, false);
-
-    // r1 contains the value of the char16* pointer inside JavascriptString.
-    // MOV r1, [regSrcStr + offset(m_pszValue)]
-    IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
-    IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(regSrcStr->AsRegOpnd(), Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, this->m_func);
-    instr = IR::Instr::New(Js::OpCode::MOV, r1, indirOpnd, this->m_func);
-    insertInstr->InsertBefore(instr);
-
-    // TEST r1, r1 -- Null pointer test
-    instr = IR::Instr::New(Js::OpCode::TEST, this->m_func);
-    instr->SetSrc1(r1);
-    instr->SetSrc2(r1);
-    insertInstr->InsertBefore(instr);
-
-    // JEQ $helper
-    instr = IR::BranchInstr::New(Js::OpCode::JEQ, labelHelper, this->m_func);
-    insertInstr->InsertBefore(instr);
-
-    IR::IndirOpnd *strLength = IR::IndirOpnd::New(regSrcStr, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func);
-    if (srcIndex->IsAddrOpnd())
-    {
-        // CMP [regSrcStr + offsetof(length)], index
-        instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
-        instr->SetSrc1(strLength);
-        instr->SetSrc2(IR::IntConstOpnd::New(Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address), TyUint32, this->m_func));
-        insertInstr->InsertBefore(instr);
-
-        // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
-        // JBE $helper
-        instr = IR::BranchInstr::New(Js::OpCode::JBE, labelHelper, this->m_func);
-        insertInstr->InsertBefore(instr);
-
-        indirOpnd = IR::IndirOpnd::New(r1, Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address) * sizeof(char16), TyInt16, this->m_func);
-    }
-    else
-    {
-        IR::RegOpnd *r2 = IR::RegOpnd::New(TyVar, this->m_func);
-        // MOV r2, srcIndex
-        instr = IR::Instr::New(Js::OpCode::MOV, r2, srcIndex, this->m_func);
-        insertInstr->InsertBefore(instr);
-
-        if (!srcIndex->IsRegOpnd() || !srcIndex->AsRegOpnd()->IsTaggedInt())
-        {
-            GenerateSmIntTest(r2, insertInstr, labelHelper);
-        }
-#if INT32VAR
-        // Remove the tag
-        // MOV r2, [32-bit] r2
-        IR::Opnd * r2_32 = r2->UseWithNewType(TyInt32, this->m_func);
-        instr = IR::Instr::New(Js::OpCode::MOVSXD, r2, r2_32, this->m_func);
-        insertInstr->InsertBefore(instr);
-        r2 = r2_32->AsRegOpnd();
-#else
-        // r2 = SAR r2, VarTag_Shift
-        instr = IR::Instr::New(Js::OpCode::SAR, r2, r2, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
-        insertInstr->InsertBefore(instr);
-#endif
-
-        // CMP [regSrcStr + offsetof(length)], r2
-        instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
-        instr->SetSrc1(strLength);
-        instr->SetSrc2(r2);
-        insertInstr->InsertBefore(instr);
-
-        if (r2->GetSize() != MachPtr)
-        {
-            r2 = r2->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
-        }
-
-        // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
-        // JBE $helper
-        instr = IR::BranchInstr::New(Js::OpCode::JBE, labelHelper, this->m_func);
-        insertInstr->InsertBefore(instr);
-
-        indirOpnd = IR::IndirOpnd::New(r1, r2, 1, TyInt16, this->m_func);
-    }
-    // MOVZX charReg, [r1 + r2 * 2]  -- this is the value of the char
-    IR::RegOpnd *charReg = IR::RegOpnd::New(TyMachReg, this->m_func);
-    instr = IR::Instr::New(Js::OpCode::MOVZXW, charReg, indirOpnd, this->m_func);
-    insertInstr->InsertBefore(instr);
-    if (index == Js::BuiltinFunction::JavascriptString_CharAt)
-    {
-        IR::Opnd *resultOpnd;
-        if (dst->IsEqual(srcStr))
-        {
-            resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
-        }
-        else
-        {
-            resultOpnd = dst;
-        }
-        this->m_lowerer->GenerateGetSingleCharString(charReg, resultOpnd, labelHelper, doneLabel, insertInstr, false);
-    }
-    else
-    {
-        Assert(index == Js::BuiltinFunction::JavascriptString_CharCodeAt || index == Js::BuiltinFunction::JavascriptString_CodePointAt);
-
-        if (index == Js::BuiltinFunction::JavascriptString_CodePointAt)
-        {
-            this->m_lowerer->GenerateFastInlineStringCodePointAt(insertInstr, this->m_func, strLength, srcIndex, charReg, r1);
-        }
-
-        GenerateInt32ToVarConversion(charReg, insertInstr);
-
-        // MOV dst, charReg
-        instr = IR::Instr::New(Js::OpCode::MOV, dst, charReg, this->m_func);
-        insertInstr->InsertBefore(instr);
-    }
-    return true;
-}
-
 IR::RegOpnd* LowererMD::MaterializeDoubleConstFromInt(intptr_t constAddr, IR::Instr* instr)
 {
     IR::Opnd* constVal = IR::MemRefOpnd::New(constAddr, IRType::TyFloat64, this->m_func);
@@ -6874,8 +6681,9 @@ bool LowererMD::GenerateObjectTest(IR::Opnd * opndSrc, IR::Instr * insertInstr,
     else
     {
         // JNE $labelHelper
-        instr = IR::BranchInstr::New(Js::OpCode::JNE, labelTarget, this->m_func);
-        insertInstr->InsertBefore(instr);
+        IR::BranchInstr* branchInstr = IR::BranchInstr::New(Js::OpCode::JNE, labelTarget, this->m_func);
+        insertInstr->InsertBefore(branchInstr);
+        InsertObjectPoison(opndSrc, branchInstr, insertInstr);
     }
     return true;
 }
@@ -8861,6 +8669,29 @@ LowererMD::LowerTypeof(IR::Instr * typeOfInstr)
     m_lowerer->LowerUnaryHelperMem(typeOfInstr, IR::HelperOp_Typeof);
 }
 
+void
+LowererMD::InsertObjectPoison(IR::Opnd* poisonedOpnd, IR::BranchInstr* branchInstr, IR::Instr* insertInstr)
+{
+    if (CONFIG_FLAG_RELEASE(PoisonObjects))
+    {
+        Js::OpCode opcode;
+        if (branchInstr->m_opcode == Js::OpCode::JNE)
+        {
+            opcode = Js::OpCode::CMOVNE;
+        }
+        else
+        {
+            AssertOrFailFast(branchInstr->m_opcode == Js::OpCode::JEQ);
+            opcode = Js::OpCode::CMOVE;
+        }
+        AssertOrFailFast(branchInstr->m_prev->m_opcode == Js::OpCode::CMP || branchInstr->m_prev->m_opcode == Js::OpCode::TEST);
+
+        IR::RegOpnd* regZero = IR::RegOpnd::New(TyMachPtr, insertInstr->m_func);
+        Lowerer::InsertMove(regZero, IR::IntConstOpnd::New(0, TyMachPtr, insertInstr->m_func), branchInstr->m_prev);
+        InsertCmovCC(opcode, poisonedOpnd, regZero, insertInstr);
+    }
+}
+
 IR::Instr*
 LowererMD::InsertCmovCC(const Js::OpCode opCode, IR::Opnd * dst, IR::Opnd* src1, IR::Instr* insertBeforeInstr, bool postRegAlloc)
 {

+ 1 - 2
lib/Backend/LowerMDShared.h

@@ -160,8 +160,6 @@ public:
             IR::Instr *     GenerateFastScopedStFld(IR::Instr * instrStFld);
             void            GenerateFastAbs(IR::Opnd *dst, IR::Opnd *src, IR::Instr *callInstr, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel);
             IR::Instr *     GenerateFloatAbs(IR::RegOpnd * regOpnd, IR::Instr * insertInstr);
-            bool            GenerateFastCharAt(Js::BuiltinFunction index, IR::Opnd *dst, IR::Opnd *srcStr, IR::Opnd *srcIndex, IR::Instr *callInstr, IR::Instr *insertInstr,
-                IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel);
             void            GenerateClz(IR::Instr * instr);
             void            GenerateCtz(IR::Instr * instr);
             void            GeneratePopCnt(IR::Instr * instr);
@@ -238,6 +236,7 @@ public:
             void            GenerateIsJsObjectTest(IR::RegOpnd* instanceReg, IR::Instr* insertInstr, IR::LabelInstr* labelHelper);
             void            LowerTypeof(IR::Instr * typeOfInstr);
 
+     static void            InsertObjectPoison(IR::Opnd* poisonedOpnd, IR::BranchInstr* branchInstr, IR::Instr* insertInstr);
 public:
             //
             // These methods are simply forwarded to lowererMDArch

+ 1 - 0
lib/Backend/Opnd.h

@@ -275,6 +275,7 @@ public:
 
     bool                IsValueTypeFixed() const { return m_isValueTypeFixed; }
     void                SetValueTypeFixed() { m_isValueTypeFixed = true; }
+    void                UnsetValueTypeFixed() { m_isValueTypeFixed = false; }
     IR::RegOpnd *       FindRegUse(IR::RegOpnd *regOpnd);
     bool                IsArgumentsObject();
 

+ 1 - 1
lib/Backend/ServerScriptContext.cpp

@@ -384,7 +384,7 @@ ServerScriptContext::Release()
 Field(Js::Var)*
 ServerScriptContext::GetModuleExportSlotArrayAddress(uint moduleIndex, uint slotIndex)
 {
-    Assert(m_moduleRecords.ContainsKey(moduleIndex));
+    AssertOrFailFast(m_moduleRecords.ContainsKey(moduleIndex));
     auto record = m_moduleRecords.Item(moduleIndex);
     return record->localExportSlotsAddr;
 }

+ 0 - 185
lib/Backend/arm/LowerMD.cpp

@@ -5616,191 +5616,6 @@ LowererMD::GenerateFastAbs(IR::Opnd *dst, IR::Opnd *src, IR::Instr *callInstr, I
      }
 }
 
-bool LowererMD::GenerateFastCharAt(Js::BuiltinFunction index, IR::Opnd *dst, IR::Opnd *srcStr, IR::Opnd *srcIndex, IR::Instr *callInstr,
-                                  IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *labelDone)
-{
-    //                  TST regSrc, AtomTag
-    //                  BNE $helper
-    // type         =   LDR [regSrc + offset(type)]
-    // typeid       =   LDR [type + offset(typeid)]
-    //                  CMP typeid, TypeIds_String
-    //                  BNE $helper
-    // psz          =   LDR [regSrc + offset(m_pszValue)]
-    //                  CMP psz, 0
-    //                  BEQ $helper
-    // index32      =   ASRS srcIndex, VarShift
-    //                  BCC $helper
-    // length       =   LDR [regSrc + offset(length)]
-    //                  CMP length, index32
-    //                  BLS $helper
-    // char         =   LDRH [regSrc + index32, LSL #1]
-    //
-    // if (charAt)
-    // (r1)         =   MOV char
-    // (r0)         =   LDIMM scriptContext
-    // dst          =   CALL GetStringFromChar
-    //
-    // else
-    //      if (codePointAt)
-    //                  Lowerer.GenerateFastCodePointAt -- Common inline functions
-    //
-    // char         =   LSL char, VarShift
-    // dst          =   ADD char, AtomTag
-
-    bool isInt = false;
-    IR::Instr *instr;
-    IR::IndirOpnd *indirOpnd;
-    IR::RegOpnd *regSrcStr;
-
-    if (srcStr->IsRegOpnd())
-    {
-        if (srcStr->AsRegOpnd()->IsTaggedInt())
-        {
-            isInt = true;
-        }
-    }
-
-    if (isInt)
-    {
-        // Insert delete branch opcode to tell the dbChecks not to assert on this helper label
-        IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
-        insertInstr->InsertBefore(fakeBr);
-
-        // The "string" is an int. Just bail out.
-        instr = IR::BranchInstr::New(Js::OpCode::B, labelHelper, this->m_func);
-        insertInstr->InsertBefore(instr);
-        return false;
-    }
-
-    // Bail out if index a constant and is less than zero.
-    if (srcIndex->IsImmediateOpnd() && srcIndex->GetImmediateValue(this->m_func) < 0)
-    {
-        instr = IR::BranchInstr::New(Js::OpCode::B, labelHelper, this->m_func);
-        insertInstr->InsertBefore(instr);
-        return false;
-    }
-
-    // Force the string into a reg at the top. Otherwise we'll be loading it over and over...
-    if (srcStr->IsRegOpnd())
-    {
-        regSrcStr = srcStr->AsRegOpnd();
-    }
-    else
-    {
-        regSrcStr = IR::RegOpnd::New(TyMachReg, this->m_func);
-        Lowerer::InsertMove(regSrcStr, srcStr, insertInstr);
-    }
-
-    this->m_lowerer->GenerateStringTest(regSrcStr, insertInstr, labelHelper);
-
-    // psz = LDR [regSrc + offset(m_pszValue)]
-    IR::RegOpnd *psz = IR::RegOpnd::New(TyMachPtr, this->m_func);
-    indirOpnd = IR::IndirOpnd::New(regSrcStr, Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, this->m_func);
-    Lowerer::InsertMove(psz, indirOpnd, insertInstr);
-
-    //      CMP psz, 0
-    instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
-    instr->SetSrc1(psz);
-    instr->SetSrc2(IR::IntConstOpnd::New(0, TyMachPtr, this->m_func));
-    insertInstr->InsertBefore(instr);
-
-    //      BEQ $helper
-    instr = IR::BranchInstr::New(Js::OpCode::BEQ, labelHelper, this->m_func);
-    insertInstr->InsertBefore(instr);
-
-    // Arm should change to Uint32 for the length
-    // length = LDR [regSrcStr + offsetof(length)]
-    IR::RegOpnd *length = IR::RegOpnd::New(TyMachReg, this->m_func);
-    indirOpnd = IR::IndirOpnd::New(regSrcStr, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func);
-    Lowerer::InsertMove(length, indirOpnd, insertInstr);
-
-    if (srcIndex->IsAddrOpnd())
-    {
-        // The index is a constant, so just use it.
-        uint32 constIndex = Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address);
-
-        // CMP length, index32
-        instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
-        instr->SetSrc1(length);
-        instr->SetSrc2(IR::IntConstOpnd::New(constIndex, TyUint32, this->m_func));
-        insertInstr->InsertBefore(instr);
-        LegalizeMD::LegalizeInstr(instr, false);
-
-        // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
-        // BLS $helper
-        instr = IR::BranchInstr::New(Js::OpCode::BLS, labelHelper, this->m_func);
-        insertInstr->InsertBefore(instr);
-
-        // indir = [psz + index32 * 2]
-        indirOpnd = IR::IndirOpnd::New(psz, constIndex * sizeof(char16), TyUint16, this->m_func);
-    }
-    else
-    {
-        // index32 = ASRS srcIndex, VarShift
-        IR::RegOpnd *index32 = IR::RegOpnd::New(TyMachReg, this->m_func);
-        instr = IR::Instr::New(Js::OpCode::ASRS, index32, srcIndex, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
-        insertInstr->InsertBefore(instr);
-
-        if (!srcIndex->IsRegOpnd() || !srcIndex->AsRegOpnd()->IsTaggedInt())
-        {
-            // BCC $helper
-            instr = IR::BranchInstr::New(Js::OpCode::BCC, labelHelper, this->m_func);
-            insertInstr->InsertBefore(instr);
-        }
-
-        // CMP length, index32
-        instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
-        instr->SetSrc1(length);
-        instr->SetSrc2(index32);
-        insertInstr->InsertBefore(instr);
-
-        // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
-        // BLS $helper
-        instr = IR::BranchInstr::New(Js::OpCode::BLS, labelHelper, this->m_func);
-        insertInstr->InsertBefore(instr);
-
-        // indir = [psz + index32 * 2]
-        indirOpnd = IR::IndirOpnd::New(psz, index32, (byte)Math::Log2(sizeof(char16)), TyUint16, this->m_func);
-    }
-
-    // char = LDRH [regSrc + index32, LSL #1]
-    IR::RegOpnd *charResult = IR::RegOpnd::New(TyUint32, this->m_func);
-    Lowerer::InsertMove(charResult, indirOpnd, insertInstr);
-
-    if (index == Js::BuiltinFunction::JavascriptString_CharAt)
-    {
-        IR::Opnd *resultOpnd;
-        if (dst->IsEqual(srcStr))
-        {
-            resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
-        }
-        else
-        {
-            resultOpnd = dst;
-        }
-        this->m_lowerer->GenerateGetSingleCharString(charResult, resultOpnd, labelHelper, labelDone, insertInstr, false);
-    }
-    else
-    {
-        Assert(index == Js::BuiltinFunction::JavascriptString_CharCodeAt || index == Js::BuiltinFunction::JavascriptString_CodePointAt);
-
-        if (index == Js::BuiltinFunction::JavascriptString_CodePointAt)
-        {
-            this->m_lowerer->GenerateFastInlineStringCodePointAt(insertInstr, this->m_func, length, srcIndex, charResult, psz);
-        }
-        // result = LSL result, VarShift
-        instr = IR::Instr::New(Js::OpCode::LSL, charResult, charResult, IR::IntConstOpnd::New(Js::VarTag_Shift, TyInt8, this->m_func), this->m_func);
-        insertInstr->InsertBefore(instr);
-
-        // dst = ADD result, AtomTag
-        instr = IR::Instr::New(Js::OpCode::ADD, dst, charResult, IR::IntConstOpnd::New(Js::AtomTag, TyMachReg, this->m_func), this->m_func);
-        insertInstr->InsertBefore(instr);
-        LegalizeMD::LegalizeInstr(instr, false);
-    }
-
-    return true;
-}
-
 void
 LowererMD::EmitInt4Instr(IR::Instr *instr)
 {

+ 0 - 5
lib/Backend/arm/LowerMD.h

@@ -126,8 +126,6 @@ public:
             IR::Instr *     GenerateFastScopedLdFld(IR::Instr * instrLdFld);
             IR::Instr *     GenerateFastScopedStFld(IR::Instr * instrStFld);
             void            GenerateFastAbs(IR::Opnd *dst, IR::Opnd *src, IR::Instr *callInstr, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel);
-            bool            GenerateFastCharAt(Js::BuiltinFunction index, IR::Opnd *dst, IR::Opnd *srcStr, IR::Opnd *srcIndex, IR::Instr *callInstr, IR::Instr *insertInstr,
-                IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel);
             bool            TryGenerateFastMulAdd(IR::Instr * instrAdd, IR::Instr ** pInstrPrev);
             void            GenerateFloatTest(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper, const bool checkForNullInLoopBody = false);
 
@@ -246,9 +244,6 @@ public:
             void                LowerInlineSpreadArgOutLoop(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd);
             void                LowerTypeof(IR::Instr * typeOfInstr);
             void                GenerateMemInit(IR::RegOpnd * opnd, int32 offset, size_t value, IR::Instr * insertBeforeInstr, bool isZeroed = false);
-
-public:
-    static IR::Instr * InsertCmovCC(const Js::OpCode opCode, IR::Opnd * dst, IR::Opnd* src1, IR::Instr* insertBeforeInstr, bool postRegAlloc);
 private:
     IR::Opnd* IsOpndNegZero(IR::Opnd* opnd, IR::Instr* instr);
 

+ 8 - 0
lib/Backend/arm64/EncoderMD.cpp

@@ -921,6 +921,14 @@ EncoderMD::GenerateEncoding(IR::Instr* instr, BYTE *pc)
         Assert(false);
         break;
 
+    case Js::OpCode::CSELEQ:
+        bytes = this->EmitConditionalSelect(Emitter, instr, COND_EQ, EmitCsel, EmitCsel64);
+        break;
+
+    case Js::OpCode::CSELNE:
+        bytes = this->EmitConditionalSelect(Emitter, instr, COND_NE, EmitCsel, EmitCsel64);
+        break;
+
     case Js::OpCode::CSELLT:
         bytes = this->EmitConditionalSelect(Emitter, instr, COND_LT, EmitCsel, EmitCsel64);
         break;

+ 28 - 189
lib/Backend/arm64/LowerMD.cpp

@@ -2306,10 +2306,11 @@ void LowererMD::ChangeToShift(IR::Instr *const instr, const bool needFlags)
     }
 
     // Javascript requires the ShiftCount is masked to the bottom 5 bits.
-    if(instr->GetSrc2()->IsIntConstOpnd())
+    uint8 mask = TySize[instr->GetDst()->GetType()] == 8 ? 63 : 31;
+    if (instr->GetSrc2()->IsIntConstOpnd())
     {
         // In the constant case, do the mask manually.
-        IntConstType immed = instr->GetSrc2()->AsIntConstOpnd()->GetValue() & 0x1f;
+        IntConstType immed = instr->GetSrc2()->AsIntConstOpnd()->GetValue() & mask;
         if (immed == 0)
         {
             // Shift by zero is just a move, and the shift-right instructions
@@ -2326,9 +2327,9 @@ void LowererMD::ChangeToShift(IR::Instr *const instr, const bool needFlags)
     {
         // In the variable case, generate code to do the mask.
         IR::Opnd *const src2 = instr->UnlinkSrc2();
-        instr->SetSrc2(IR::RegOpnd::New(TyInt32, func));
+        instr->SetSrc2(IR::RegOpnd::New(src2->GetType(), func));
         IR::Instr *const newInstr = IR::Instr::New(
-            Js::OpCode::AND, instr->GetSrc2(), src2, IR::IntConstOpnd::New(0x1f, TyInt8, func), func);
+            Js::OpCode::AND, instr->GetSrc2(), src2, IR::IntConstOpnd::New(mask, TyInt8, func), func);
         instr->InsertBefore(newInstr);
     }
 }
@@ -5125,191 +5126,6 @@ LowererMD::GenerateFastAbs(IR::Opnd *dst, IR::Opnd *src, IR::Instr *callInstr, I
     }
 }
 
-bool LowererMD::GenerateFastCharAt(Js::BuiltinFunction index, IR::Opnd *dst, IR::Opnd *srcStr, IR::Opnd *srcIndex, IR::Instr *callInstr,
-                                   IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel)
-{
-    //  if regSrcStr is not object, JMP $helper
-    //  CMP [regSrcStr + offset(type)] , static string type   -- check base string type
-    //  BNE $helper
-    //  MOV r1, [regSrcStr + offset(m_pszValue)]
-    //  CBZ r1, $helper
-    //  MOV r2, srcIndex
-    //  If r2 is not int, B $helper
-    //  Convert r2 to int
-    //  CMP [regSrcStr + offsetof(length)], r2
-    //  BLS $helper
-    //  LDRH r2, [r1 + r2 * 2]
-    //  if (charAt)
-    //      MOV x0, r1
-    //      MOV x1, scriptContext
-    //      BL GetStringFromChar
-    //      MOV dst, x0
-    //  else (charCodeAt)
-    //      if (codePointAt)
-    //          Lowerer.GenerateFastCodePointAt -- Common inline functions
-    //      Convert r2 to Var
-    //      MOV dst, r2
-    bool isInt = false;
-    bool isNotTaggedValue = false;
-    IR::Instr *instr;
-    IR::RegOpnd *regSrcStr;
-
-    if (srcStr->IsRegOpnd())
-    {
-        if (srcStr->AsRegOpnd()->IsTaggedInt())
-        {
-            isInt = true;
-
-        }
-        else if (srcStr->AsRegOpnd()->IsNotTaggedValue())
-        {
-            isNotTaggedValue = true;
-        }
-    }
-
-    if (srcStr->IsRegOpnd() == false)
-    {
-        IR::RegOpnd *regOpnd = IR::RegOpnd::New(TyVar, this->m_func);
-        instr = IR::Instr::New(Js::OpCode::MOV, regOpnd, srcStr, this->m_func);
-        insertInstr->InsertBefore(instr);
-        regSrcStr = regOpnd;
-    }
-    else
-    {
-        regSrcStr = srcStr->AsRegOpnd();
-    }
-
-    if (!isNotTaggedValue)
-    {
-        if (!isInt)
-        {
-            GenerateObjectTest(regSrcStr, insertInstr, labelHelper);
-        }
-        else
-        {
-            // Insert delete branch opcode to tell the dbChecks not to assert on this helper label
-            IR::Instr *fakeBr = IR::PragmaInstr::New(Js::OpCode::DeletedNonHelperBranch, 0, this->m_func);
-            insertInstr->InsertBefore(fakeBr);
-
-            instr = IR::BranchInstr::New(Js::OpCode::B, labelHelper, this->m_func);
-            insertInstr->InsertBefore(instr);
-        }
-    }
-
-    // Bail out if index a constant and is less than zero.
-    if (srcIndex->IsAddrOpnd() && Js::TaggedInt::ToInt32(srcIndex->AsAddrOpnd()->m_address) < 0)
-    {
-        labelHelper->isOpHelper = false;
-        instr = IR::BranchInstr::New(Js::OpCode::B, labelHelper, this->m_func);
-        insertInstr->InsertBefore(instr);
-        return false;
-    }
-
-    this->m_lowerer->GenerateStringTest(regSrcStr, insertInstr, labelHelper, nullptr, false);
-
-    // r1 contains the value of the char16* pointer inside JavascriptString.
-    // MOV r1, [regSrcStr + offset(m_pszValue)]
-    IR::RegOpnd *r1 = IR::RegOpnd::New(TyMachReg, this->m_func);
-    IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(regSrcStr->AsRegOpnd(), Js::JavascriptString::GetOffsetOfpszValue(), TyMachPtr, this->m_func);
-    instr = IR::Instr::New(Js::OpCode::MOV, r1, indirOpnd, this->m_func);
-    insertInstr->InsertBefore(instr);
-    Legalize(instr);
-
-    // CBZ r1, $helper -- Null pointer test
-    instr = IR::BranchInstr::New(Js::OpCode::CBZ, labelHelper, this->m_func);
-    instr->SetSrc1(r1);
-    insertInstr->InsertBefore(instr);
-
-    IR::IndirOpnd *strLength = IR::IndirOpnd::New(regSrcStr, offsetof(Js::JavascriptString, m_charLength), TyUint32, this->m_func);
-    if (srcIndex->IsAddrOpnd())
-    {
-        // CMP [regSrcStr + offsetof(length)], index
-        instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
-        instr->SetSrc1(strLength);
-        instr->SetSrc2(IR::IntConstOpnd::New(Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address), TyUint32, this->m_func));
-        insertInstr->InsertBefore(instr);
-        Legalize(instr);
-
-        // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
-        // BLS $helper
-        instr = IR::BranchInstr::New(Js::OpCode::BLS, labelHelper, this->m_func);
-        insertInstr->InsertBefore(instr);
-
-        indirOpnd = IR::IndirOpnd::New(r1, Js::TaggedInt::ToUInt32(srcIndex->AsAddrOpnd()->m_address) * sizeof(char16), TyUint16, this->m_func);
-    }
-    else
-    {
-        IR::RegOpnd *r2 = IR::RegOpnd::New(TyVar, this->m_func);
-        // MOV r2, srcIndex
-        instr = IR::Instr::New(Js::OpCode::MOV, r2, srcIndex, this->m_func);
-        insertInstr->InsertBefore(instr);
-
-        if (!srcIndex->IsRegOpnd() || !srcIndex->AsRegOpnd()->IsTaggedInt())
-        {
-            GenerateSmIntTest(r2, insertInstr, labelHelper);
-        }
-
-        // Remove the tag
-        // MOV r2, r2 [32-bit move zeros upper 32 bits and thus the tag]
-        IR::Opnd * r2_32 = r2->UseWithNewType(TyInt32, this->m_func);
-        instr = IR::Instr::New(Js::OpCode::MOV_TRUNC, r2_32, r2_32, this->m_func);
-        insertInstr->InsertBefore(instr);
-        r2 = r2_32->AsRegOpnd();
-
-        // CMP [regSrcStr + offsetof(length)], r2
-        instr = IR::Instr::New(Js::OpCode::CMP, this->m_func);
-        instr->SetSrc1(strLength);
-        instr->SetSrc2(r2);
-        insertInstr->InsertBefore(instr);
-        Legalize(instr);
-
-        if (r2->GetSize() != MachPtr)
-        {
-            r2 = r2->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
-        }
-
-        // Use unsigned compare, this should handle negative indexes as well (they become > INT_MAX)
-        // BLS $helper
-        instr = IR::BranchInstr::New(Js::OpCode::BLS, labelHelper, this->m_func);
-        insertInstr->InsertBefore(instr);
-
-        indirOpnd = IR::IndirOpnd::New(r1, r2, 1, TyUint16, this->m_func);
-    }
-    // LDRH charReg, [r1 + r2 * 2]  -- this is the value of the char
-    IR::RegOpnd *charReg = IR::RegOpnd::New(TyMachReg, this->m_func);
-    Lowerer::InsertMove(charReg, indirOpnd, insertInstr);
-
-    if (index == Js::BuiltinFunction::JavascriptString_CharAt)
-    {
-        IR::Opnd *resultOpnd;
-        if (dst->IsEqual(srcStr))
-        {
-            resultOpnd = IR::RegOpnd::New(TyVar, this->m_func);
-        }
-        else
-        {
-            resultOpnd = dst;
-        }
-        this->m_lowerer->GenerateGetSingleCharString(charReg, resultOpnd, labelHelper, doneLabel, insertInstr, false);
-    }
-    else
-    {
-        Assert(index == Js::BuiltinFunction::JavascriptString_CharCodeAt || index == Js::BuiltinFunction::JavascriptString_CodePointAt);
-
-        if (index == Js::BuiltinFunction::JavascriptString_CodePointAt)
-        {
-            this->m_lowerer->GenerateFastInlineStringCodePointAt(insertInstr, this->m_func, strLength, srcIndex, charReg, r1);
-        }
-
-        GenerateInt32ToVarConversion(charReg, insertInstr);
-
-        // MOV dst, charReg
-        instr = IR::Instr::New(Js::OpCode::MOV, dst, charReg, this->m_func);
-        insertInstr->InsertBefore(instr);
-    }
-    return true;
-}
-
 void
 LowererMD::EmitInt4Instr(IR::Instr *instr)
 {
@@ -7270,6 +7086,29 @@ LowererMD::LowerTypeof(IR::Instr* typeOfInstr)
     m_lowerer->LowerUnaryHelperMem(typeOfInstr, IR::HelperOp_Typeof);
 }
 
+void
+LowererMD::InsertObjectPoison(IR::Opnd* poisonedOpnd, IR::BranchInstr* branchInstr, IR::Instr* insertInstr)
+{
+    if (CONFIG_FLAG_RELEASE(PoisonObjects))
+    {
+        Js::OpCode opcode;
+        if (branchInstr->m_opcode == Js::OpCode::BNE)
+        {
+            opcode = Js::OpCode::CSELEQ;
+        }
+        else
+        {
+            AssertOrFailFast(branchInstr->m_opcode == Js::OpCode::BEQ);
+            opcode = Js::OpCode::CSELNE;
+        }
+        AssertOrFailFast(branchInstr->m_prev->m_opcode == Js::OpCode::SUBS || branchInstr->m_prev->m_opcode == Js::OpCode::ANDS);
+
+        IR::RegOpnd* regZero = IR::RegOpnd::New(nullptr, RegZR, TyMachPtr, insertInstr->m_func);
+        IR::Instr* csel = IR::Instr::New(opcode, poisonedOpnd, poisonedOpnd, regZero, insertInstr->m_func);
+        insertInstr->InsertBefore(csel);
+    }
+}
+
 #if DBG
 //
 // Helps in debugging of fast paths.

+ 2 - 4
lib/Backend/arm64/LowerMD.h

@@ -124,8 +124,6 @@ public:
             IR::Instr *     GenerateFastScopedLdFld(IR::Instr * instrLdFld);
             IR::Instr *     GenerateFastScopedStFld(IR::Instr * instrStFld);
             void            GenerateFastAbs(IR::Opnd *dst, IR::Opnd *src, IR::Instr *callInstr, IR::Instr *insertInstr, IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel);
-            bool            GenerateFastCharAt(Js::BuiltinFunction index, IR::Opnd *dst, IR::Opnd *srcStr, IR::Opnd *srcIndex, IR::Instr *callInstr, IR::Instr *insertInstr,
-                IR::LabelInstr *labelHelper, IR::LabelInstr *doneLabel);
             bool            TryGenerateFastMulAdd(IR::Instr * instrAdd, IR::Instr ** pInstrPrev);
             void            GenerateFloatTest(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper, const bool checkForNullInLoopBody = false);
             IR::RegOpnd*    CheckFloatAndUntag(IR::RegOpnd * opndSrc, IR::Instr * insertInstr, IR::LabelInstr* labelHelper);
@@ -246,8 +244,8 @@ public:
             void                LowerTypeof(IR::Instr * typeOfInstr);
 
             void                GenerateMemInit(IR::RegOpnd * opnd, int32 offset, size_t value, IR::Instr * insertBeforeInstr, bool isZeroed = false);
-public:
-    static IR::Instr * InsertCmovCC(const Js::OpCode opCode, IR::Opnd * dst, IR::Opnd* src1, IR::Instr* insertBeforeInstr, bool postRegAlloc);
+
+            static void            InsertObjectPoison(IR::Opnd* poisonedOpnd, IR::BranchInstr* branchInstr, IR::Instr* insertInstr);
 private:
     static  IR::Instr *     ChangeToAssign(IR::Instr * instr, IRType destType);
 

+ 3 - 1
lib/Backend/arm64/MdOpCodes.h

@@ -49,7 +49,9 @@ MACRO(CMP,        Reg1,       OpSideEffect,   UNUSED,   LEGAL_PSEUDO,   UNUSED,
 MACRO(CMN,        Reg1,       OpSideEffect,   UNUSED,   LEGAL_PSEUDO,   UNUSED,   D__S)
 // CMP src1, src2, SXTW -- used in multiply overflow checks
 MACRO(CMP_SXTW,   Reg1,       OpSideEffect,   UNUSED,   LEGAL_REG3_ND,  UNUSED,   D__S)
-// CSELLT src1, src2 -- select src1 if LT or src2 if not; used in integer min/max
+// CSELcc src1, src2 -- select src1 if cc or src2 if not
+MACRO(CSELEQ,     Reg3,       0,              UNUSED,   LEGAL_REG3,     UNUSED,   D___)
+MACRO(CSELNE,     Reg3,       0,              UNUSED,   LEGAL_REG3,     UNUSED,   D___)
 MACRO(CSELLT,     Reg3,       0,              UNUSED,   LEGAL_REG3,     UNUSED,   D___)
 // CSNEGPL src1, src2 -- select src1 if PL or -src1 if not; used in integer absolute value
 MACRO(CSNEGPL,    Reg3,       0,              UNUSED,   LEGAL_REG3,     UNUSED,   D___)

+ 18 - 0
lib/Common/ConfigFlagsList.h

@@ -489,6 +489,15 @@ PHASE(All)
 #define DEFAULT_CONFIG_MaxJitThreadCount        (2)
 #define DEFAULT_CONFIG_ForceMaxJitThreadCount   (false)
 
+#define DEFAULT_CONFIG_MitigateSpectre (true)
+
+#define DEFAULT_CONFIG_PoisonVarArrayLoad (true)
+#define DEFAULT_CONFIG_PoisonIntArrayLoad (true)
+#define DEFAULT_CONFIG_PoisonFloatArrayLoad (true)
+#define DEFAULT_CONFIG_PoisonTypedArrayLoad (true)
+#define DEFAULT_CONFIG_PoisonStringLoad (true)
+#define DEFAULT_CONFIG_PoisonObjects (true)
+
 #ifdef RECYCLER_PAGE_HEAP
 #define DEFAULT_CONFIG_PageHeap             ((Js::Number) PageHeapMode::PageHeapModeOff)
 #define DEFAULT_CONFIG_PageHeapAllocStack   (false)
@@ -1233,6 +1242,15 @@ FLAGNR(Number,  LoopBodySizeThresholdToDisableOpts, "Minimum bytecode size of a
 FLAGNR(Number,  MaxJitThreadCount     , "Number of maximum allowed parallel jit threads (actual number is factor of number of processors and other heuristics)", DEFAULT_CONFIG_MaxJitThreadCount)
 FLAGNR(Boolean, ForceMaxJitThreadCount, "Force the number of parallel jit threads as specified by MaxJitThreadCount flag (creation guaranteed)", DEFAULT_CONFIG_ForceMaxJitThreadCount)
 
+FLAGR(Boolean, MitigateSpectre, "Use mitigations for Spectre", DEFAULT_CONFIG_MitigateSpectre)
+
+FLAGPR(Boolean, MitigateSpectre, PoisonVarArrayLoad, "Poison loads from Var arrays", DEFAULT_CONFIG_PoisonVarArrayLoad)
+FLAGPR(Boolean, MitigateSpectre, PoisonIntArrayLoad, "Poison loads from Int arrays", DEFAULT_CONFIG_PoisonIntArrayLoad)
+FLAGPR(Boolean, MitigateSpectre, PoisonFloatArrayLoad, "Poison loads from Float arrays", DEFAULT_CONFIG_PoisonFloatArrayLoad)
+FLAGPR(Boolean, MitigateSpectre, PoisonTypedArrayLoad, "Poison loads from TypedArrays", DEFAULT_CONFIG_PoisonTypedArrayLoad)
+FLAGPR(Boolean, MitigateSpectre, PoisonStringLoad, "Poison indexed loads from strings", DEFAULT_CONFIG_PoisonStringLoad)
+FLAGPR(Boolean, MitigateSpectre, PoisonObjects, "Poison objects after type checks", DEFAULT_CONFIG_PoisonObjects)
+
 FLAGNR(Number,  MinInterpretCount     , "Minimum number of times a function must be interpreted", 0)
 FLAGNR(Number,  MinSimpleJitRunCount  , "Minimum number of times a function must be run in simple jit", 0)
 FLAGNRA(Number, MaxInterpretCount     , Mic, "Maximum number of times a function can be interpreted", 0)

+ 21 - 0
lib/Common/Core/ConfigParser.cpp

@@ -303,6 +303,27 @@ void ConfigParser::ParseRegistryKey(HKEY hk, CmdLineArgsParser &parser)
             Js::Configuration::Global.flags.Asmjs = true;
         }
     }
+
+    // Spectre mitigation feature control
+    // This setting allows enabling\disabling spectre mitigations
+    //     0 - Disable Spectre mitigations
+    //     1 - Enable Spectre mitigations - Also default behavior
+    dwValue = 0;
+    dwSize = sizeof(dwValue);
+    if (NOERROR == RegGetValueW(hk, nullptr, _u("MitigateSpectre"), RRF_RT_DWORD, nullptr, (LPBYTE)&dwValue, &dwSize))
+    {
+        Js::ConfigFlagsTable &configFlags = Js::Configuration::Global.flags;
+        configFlags.Enable(Js::MitigateSpectreFlag);
+        if (dwValue == 0)
+        {
+            configFlags.SetAsBoolean(Js::MitigateSpectreFlag, false);
+        }
+        else if (dwValue == 1)
+        {
+            configFlags.SetAsBoolean(Js::MitigateSpectreFlag, true);
+        }
+    }
+
 #endif // _WIN32
 }
 

+ 1 - 1
lib/Common/Memory/Allocator.h

@@ -174,7 +174,7 @@ inline T* PostAllocationCallback(const type_info& objType, T *obj)
 
 // Free routine where we don't care about following C++ semantics (e.g. calling the destructor)
 #define AllocatorFree(alloc, freeFunc, obj, size) \
-        (alloc->*freeFunc)((void*)obj, size)
+        (alloc->*freeFunc)(obj, size)
 
 // default type allocator implementation
 template <typename TAllocator, typename T>

+ 2 - 2
lib/Common/Memory/HeapBlockMap.h

@@ -56,7 +56,7 @@ public:
     uint GetMarkCount(void* address, uint pageCount);
     template <bool interlocked, bool doSpecialMark>
     void Mark(void * candidate, MarkContext * markContext);
-    template <bool interlocked>
+    template <bool interlocked, bool doSpecialMark>
     void MarkInterior(void * candidate, MarkContext * markContext);
 
     bool IsMarked(void * address) const;
@@ -250,7 +250,7 @@ public:
     uint GetMarkCount(void* address, uint pageCount);
     template <bool interlocked, bool doSpecialMark>
     void Mark(void * candidate, MarkContext * markContext);
-    template <bool interlocked>
+    template <bool interlocked, bool doSpecialMark>
     void MarkInterior(void * candidate, MarkContext * markContext);
 
     bool IsMarked(void * address) const;

+ 32 - 10
lib/Common/Memory/HeapBlockMap.inl

@@ -287,7 +287,7 @@ HeapBlockMap32::MarkInteriorInternal(MarkContext * markContext, L2MapChunk *& ch
     return MarkInternal<interlocked>(chunk, realCandidate);
 }
 
-template <bool interlocked>
+template <bool interlocked, bool doSpecialMark>
 inline
 void
 HeapBlockMap32::MarkInterior(void * candidate, MarkContext * markContext)
@@ -305,6 +305,11 @@ HeapBlockMap32::MarkInterior(void * candidate, MarkContext * markContext)
 
     if (MarkInternal<interlocked>(chunk, candidate))
     {
+        if (doSpecialMark)
+        {
+            this->OnSpecialMark(chunk, candidate);
+        }
+
         // Already marked (mark internal-then-actual first)
         return;
     }
@@ -320,10 +325,16 @@ HeapBlockMap32::MarkInterior(void * candidate, MarkContext * markContext)
         break;
 
     case HeapBlock::HeapBlockType::SmallLeafBlockType:
-    case HeapBlock::HeapBlockType::MediumLeafBlockType:
-        // Leaf blocks don't need to be scanned.  Do nothing.
-        break;
+        {
+            // We want to scan leaf blocks for preventing UAFs due to interior pointers on stack.
+            byte bucketIndex = chunk->blockInfo[id2].bucketIndex;
+            uint objectSize = HeapInfo::GetObjectSizeForBucketIndex<SmallAllocationBlockAttributes>(bucketIndex);
+            void * realCandidate = SmallLeafHeapBlock::GetRealAddressFromInterior(candidate, objectSize, bucketIndex);
+            MarkInteriorInternal<interlocked, false>(markContext, chunk, candidate, realCandidate);
 
+            // Leaf object doesn't need to be added to the mark stack.
+        }
+        break;
     case HeapBlock::HeapBlockType::SmallNormalBlockType:
 #ifdef RECYCLER_WRITE_BARRIER
     case HeapBlock::HeapBlockType::SmallNormalBlockWithBarrierType:
@@ -344,6 +355,17 @@ HeapBlockMap32::MarkInterior(void * candidate, MarkContext * markContext)
             }
         }
         break;
+    case HeapBlock::HeapBlockType::MediumLeafBlockType:
+        {
+            // We want to scan leaf blocks for preventing UAFs due to interior pointers on stack.
+            byte bucketIndex = chunk->blockInfo[id2].bucketIndex;
+            uint objectSize = HeapInfo::GetObjectSizeForBucketIndex<MediumAllocationBlockAttributes>(bucketIndex);
+            void * realCandidate = MediumLeafHeapBlock::GetRealAddressFromInterior(candidate, objectSize, bucketIndex);
+            MarkInteriorInternal<interlocked, false>(markContext, chunk, candidate, realCandidate);
+
+            // Leaf object doesn't need to be added to the mark stack.
+        }
+        break;
     case HeapBlock::HeapBlockType::MediumNormalBlockType:
 #ifdef RECYCLER_WRITE_BARRIER
     case HeapBlock::HeapBlockType::MediumNormalBlockWithBarrierType:
@@ -375,7 +397,7 @@ HeapBlockMap32::MarkInterior(void * candidate, MarkContext * markContext)
                 break;
             }
 
-            ((SmallFinalizableHeapBlock*)chunk->map[id2])->ProcessMarkedObject<false>(realCandidate, markContext);
+            ((SmallFinalizableHeapBlock*)chunk->map[id2])->ProcessMarkedObject<doSpecialMark>(realCandidate, markContext);
         }
         break;
     case HeapBlock::HeapBlockType::MediumFinalizableBlockType:
@@ -389,8 +411,8 @@ HeapBlockMap32::MarkInterior(void * candidate, MarkContext * markContext)
                 break;
             }
 
-            ((MediumFinalizableHeapBlock*)chunk->map[id2])->ProcessMarkedObject<false>(realCandidate, markContext);
-        }
+            ((MediumFinalizableHeapBlock*)chunk->map[id2])->ProcessMarkedObject<doSpecialMark>(realCandidate, markContext);
+    }
         break;
 #ifdef RECYCLER_VISITED_HOST
     case HeapBlock::HeapBlockType::SmallRecyclerVisitedHostBlockType:
@@ -424,7 +446,7 @@ HeapBlockMap32::MarkInterior(void * candidate, MarkContext * markContext)
                 break;
             }
 
-            ((LargeHeapBlock*)chunk->map[GetLevel2Id(realCandidate)])->Mark<false>(realCandidate, markContext);
+            ((LargeHeapBlock*)chunk->map[GetLevel2Id(realCandidate)])->Mark<doSpecialMark>(realCandidate, markContext);
         }
         break;
 
@@ -474,7 +496,7 @@ HeapBlockMap64::Mark(void * candidate, MarkContext * markContext)
     // No Node found; must be an invalid reference. Do nothing.
 }
 
-template <bool interlocked>
+template <bool interlocked, bool doSpecialMark>
 inline
 void
 HeapBlockMap64::MarkInterior(void * candidate, MarkContext * markContext)
@@ -492,7 +514,7 @@ HeapBlockMap64::MarkInterior(void * candidate, MarkContext * markContext)
         {
             // Found the correct Node.
             // Process the mark and return.
-            node->map.MarkInterior<interlocked>(candidate, markContext);
+            node->map.MarkInterior<interlocked, doSpecialMark>(candidate, markContext);
             return;
         }
 

+ 1 - 1
lib/Common/Memory/MarkContext.inl

@@ -172,7 +172,7 @@ void MarkContext::Mark(void * candidate, void * parentReference)
 
     if (interior)
     {
-        recycler->heapBlockMap.MarkInterior<parallel>(candidate, this);
+        recycler->heapBlockMap.MarkInterior<parallel, doSpecialMark>(candidate, this);
         return;
     }
 

+ 46 - 9
lib/Common/Memory/Recycler.cpp

@@ -1734,30 +1734,67 @@ Recycler::ScanStack()
     bool doSpecialMark = collectionWrapper->DoSpecialMarkOnScanStack();
 
     BEGIN_DUMP_OBJECT(this, _u("Registers"));
-    if (doSpecialMark)
+    // We will not scan interior pointers on stack if we are not in script or we are in mem-protect mode.
+    if (!this->isInScript || this->IsMemProtectMode())
     {
+        if (doSpecialMark)
+        {
         ScanMemoryInline<true>(
             this->savedThreadContext.GetRegisters(), sizeof(void*) * SavedRegisterState::NumRegistersToSave
             ADDRESS_SANITIZER_APPEND(RecyclerScanMemoryType::Stack));
-    }
-    else
-    {
+        }
+        else
+        {
         ScanMemoryInline<false>(
             this->savedThreadContext.GetRegisters(), sizeof(void*) * SavedRegisterState::NumRegistersToSave
             ADDRESS_SANITIZER_APPEND(RecyclerScanMemoryType::Stack));
+        }
+    }
+    else
+    {
+        // We may have interior pointers on the stack such as pointers in the middle of the character buffers backing a JavascriptString or SubString object.
+        // To prevent UAFs of these buffers after the GC we will always do MarkInterior for the pointers on stack. This is necessary only when we are doing a
+        // GC while running a script as that is when the possiblity of a UAF after GC exists.
+        if (doSpecialMark)
+        {
+            ScanMemoryInline<true, true /* forceInterior */>(this->savedThreadContext.GetRegisters(), sizeof(void*) * SavedRegisterState::NumRegistersToSave);
+        }
+        else
+        {
+            ScanMemoryInline<false, true /* forceInterior */>(this->savedThreadContext.GetRegisters(), sizeof(void*) * SavedRegisterState::NumRegistersToSave);
+        }
     }
     END_DUMP_OBJECT(this);
 
     BEGIN_DUMP_OBJECT(this, _u("Stack"));
-    if (doSpecialMark)
+    // We will not scan interior pointers on stack if we are not in script or we are in mem-protect mode.
+    if (!this->isInScript || this->IsMemProtectMode())
     {
-        ScanMemoryInline<true>((void**) stackTop, stackScanned
-            ADDRESS_SANITIZER_APPEND(RecyclerScanMemoryType::Stack));
+        if (doSpecialMark)
+        {
+            ScanMemoryInline<true>((void**) stackTop, stackScanned
+                ADDRESS_SANITIZER_APPEND(RecyclerScanMemoryType::Stack));
+        }
+        else
+        {
+            ScanMemoryInline<false>((void**) stackTop, stackScanned
+                ADDRESS_SANITIZER_APPEND(RecyclerScanMemoryType::Stack));
     }
     else
     {
-        ScanMemoryInline<false>((void**) stackTop, stackScanned
-            ADDRESS_SANITIZER_APPEND(RecyclerScanMemoryType::Stack));
+        // We may have interior pointers on the stack such as pointers in the middle of the character buffers backing a JavascriptString or SubString object.
+        // To prevent UAFs of these buffers after the GC we will always do MarkInterior for the pointers on stack. This is necessary only when we are doing a
+        // GC while running a script as that is when the possiblity of a UAF after GC exists.
+        if (doSpecialMark)
+        {
+            ScanMemoryInline<true, true /* forceInterior */>((void**)stackTop, stackScanned
+                ADDRESS_SANITIZER_APPEND(RecyclerScanMemoryType::Stack));
+        }
+        else
+        {
+            ScanMemoryInline<false, true /* forceInterior */>((void**)stackTop, stackScanned
+                ADDRESS_SANITIZER_APPEND(RecyclerScanMemoryType::Stack));
+        }
     }
     END_DUMP_OBJECT(this);
 

+ 1 - 1
lib/Common/Memory/Recycler.h

@@ -1605,7 +1605,7 @@ private:
     inline void ScanObjectInline(void ** obj, size_t byteCount);
     inline void ScanObjectInlineInterior(void ** obj, size_t byteCount);
 
-    template <bool doSpecialMark>
+    template <bool doSpecialMark, bool forceInterior = false>
     inline void ScanMemoryInline(void ** obj, size_t byteCount
         ADDRESS_SANITIZER_APPEND(RecyclerScanMemoryType scanMemoryType = RecyclerScanMemoryType::General));
 

+ 2 - 2
lib/Common/Memory/Recycler.inl

@@ -493,7 +493,7 @@ Recycler::ScanObjectInlineInterior(void ** obj, size_t byteCount)
     markContext.ScanObject<false, true>(obj, byteCount);
 }
 
-template <bool doSpecialMark>
+template <bool doSpecialMark, bool forceInterior>
 NO_SANITIZE_ADDRESS
 inline void
 Recycler::ScanMemoryInline(void ** obj, size_t byteCount
@@ -507,7 +507,7 @@ Recycler::ScanMemoryInline(void ** obj, size_t byteCount
         scanMemoryType == RecyclerScanMemoryType::Stack ? this->savedAsanFakeStack : nullptr;
 #endif
 
-    if (this->enableScanInteriorPointers)
+    if (this->enableScanInteriorPointers || forceInterior)
     {
         markContext.ScanMemory<false, true, doSpecialMark>(
                 obj, byteCount ADDRESS_SANITIZER_APPEND(asanFakeStack));

+ 8 - 0
lib/Common/Memory/WriteBarrierMacros.h

@@ -41,3 +41,11 @@
 
 // use with FieldWithBarrier structs
 #define FORCE_NO_WRITE_BARRIER_TAG(arg) arg, _no_write_barrier_tag()
+
+// Unsafely cast a typical "Field() *" type. Only use in rare cases where we
+// understand the underlying memory usage.
+template <class T, class U>
+inline T unsafe_write_barrier_cast(U p)
+{
+    return (T)p;
+}

+ 5 - 4
lib/Parser/Parse.cpp

@@ -9477,10 +9477,6 @@ ParseNodePtr Parser::ParseVariableDeclaration(
                 if (pnodeThis && pnodeThis->sxVar.pnodeInit != nullptr)
                 {
                     pnodeThis->sxVar.sym->PromoteAssignmentState();
-                    if (m_currentNodeFunc && pnodeThis->sxVar.sym->GetIsFormal())
-                    {
-                        m_currentNodeFunc->sxFnc.SetHasAnyWriteToFormals(true);
-                    }
                 }
             }
             else if (declarationType == tkCONST /*pnodeThis->nop == knopConstDecl*/
@@ -9489,6 +9485,11 @@ ParseNodePtr Parser::ParseVariableDeclaration(
             {
                 Error(ERRUninitializedConst);
             }
+
+            if (m_currentNodeFunc && pnodeThis && pnodeThis->sxVar.sym->GetIsFormal())
+            {
+                m_currentNodeFunc->sxFnc.SetHasAnyWriteToFormals(true);
+            }
         }
 
         if (singleDefOnly)

+ 3 - 2
lib/Runtime/Base/FunctionBody.h

@@ -3598,8 +3598,9 @@ namespace Js
         static uint const EncodedSlotCountSlotIndex = 0;
         static uint const ScopeMetadataSlotIndex = 1;    // Either a FunctionBody* or DebuggerScope*
         static uint const FirstSlotIndex = 2;
+
     public:
-        ScopeSlots(Var* slotArray) : slotArray((Field(Var)*)slotArray)
+        ScopeSlots(Field(Var)* slotArray) : slotArray(slotArray)
         {
         }
 
@@ -3710,7 +3711,7 @@ namespace Js
         bool   GetStrictMode() const { return strictMode; }
         void   SetStrictMode(bool flag) { this->strictMode = flag; }
 
-        void** GetDataAddress() { return (void**)&this->scopes; }
+        Field(void*)* GetDataAddress() { return this->scopes; }
         static uint32 GetOffsetOfStrictMode() { return offsetof(FrameDisplay, strictMode); }
         static uint32 GetOffsetOfLength() { return offsetof(FrameDisplay, length); }
         static uint32 GetOffsetOfScopes() { return offsetof(FrameDisplay, scopes); }

+ 40 - 10
lib/Runtime/Base/ThreadContext.h

@@ -1590,10 +1590,6 @@ public:
     template <class Fn>
     inline Js::Var ExecuteImplicitCall(Js::RecyclableObject * function, Js::ImplicitCallFlags flags, Fn implicitCall)
     {
-        // For now, we will not allow Function that is marked as HasNoSideEffect to be called, and we will just bailout.
-        // These function may still throw exceptions, so we will need to add checks with RecordImplicitException
-        // so that we don't throw exception when disableImplicitCall is set before we allow these function to be called
-        // as an optimization.  (These functions are valueOf and toString calls for built-in non primitive types)
 
         Js::FunctionInfo::Attributes attributes = Js::FunctionInfo::GetAttributes(function);
 
@@ -1603,7 +1599,16 @@ public:
         {
             // Has no side effect means the function does not change global value or
             // will check for implicit call flags
-            return implicitCall();
+            Js::Var result = implicitCall();
+
+            // If the value is on stack we need to bailout so that it can be boxed.
+            // Instead of putting this in valueOf (or other builtins which have no side effect) adding
+            // the check here to cover any other scenario we might miss.
+            if (IsOnStack(result))
+            {
+                AddImplicitCallFlags(flags);
+            }
+            return result;
         }
 
         // Don't call the implicit call if disable implicit call
@@ -1619,15 +1624,40 @@ public:
         {
             // Has no side effect means the function does not change global value or
             // will check for implicit call flags
-            return implicitCall();
+            Js::Var result = implicitCall();
+
+            // If the value is on stack we need to bailout so that it can be boxed.
+            // Instead of putting this in valueOf (or other builtins which have no side effect) adding
+            // the check here to cover any other scenario we might miss.
+            if (IsOnStack(result))
+            {
+                AddImplicitCallFlags(flags);
+            }
+            return result;
         }
 
         // Save and restore implicit flags around the implicit call
+        struct RestoreFlags
+        {
+            ThreadContext * const ctx;
+            const Js::ImplicitCallFlags flags;
+            const Js::ImplicitCallFlags savedFlags;
+
+            RestoreFlags(ThreadContext *ctx, Js::ImplicitCallFlags flags) :
+                ctx(ctx),
+                flags(flags),
+                savedFlags(ctx->GetImplicitCallFlags())
+            {
+            }
+
+            ~RestoreFlags()
+            {
+                ctx->SetImplicitCallFlags(static_cast<Js::ImplicitCallFlags>(savedFlags | flags));
+            }
+        };
 
-        Js::ImplicitCallFlags saveImplicitCallFlags = this->GetImplicitCallFlags();
-        Js::Var result = implicitCall();
-        this->SetImplicitCallFlags((Js::ImplicitCallFlags)(saveImplicitCallFlags | flags));
-        return result;
+        RestoreFlags restoreFlags(this, flags);
+        return implicitCall();
     }
     bool HasNoSideEffect(Js::RecyclableObject * function) const;
     bool HasNoSideEffect(Js::RecyclableObject * function, Js::FunctionInfo::Attributes attr) const;

+ 1 - 1
lib/Runtime/ByteCode/AsmJsByteCodeDumper.cpp

@@ -220,7 +220,7 @@ namespace Js
 
     void AsmJsByteCodeDumper::DumpConstants(AsmJsFunc* func, FunctionBody* body)
     {
-        byte* table = (byte*)((Var*)body->GetConstTable());
+        byte* table = (byte*)body->GetConstTable();
         auto constSrcInfos = func->GetTypedRegisterAllocator().GetConstSourceInfos();
         for (int i = 0; i < WAsmJs::LIMIT; ++i)
         {

+ 1 - 1
lib/Runtime/Debug/DiagObjectModel.cpp

@@ -4084,7 +4084,7 @@ namespace Js
         else
         {
             // The scope is defined by a slot array object so grab the function body out to get the function name.
-            ScopeSlots slotArray = ScopeSlots(reinterpret_cast<Var*>(instance));
+            ScopeSlots slotArray = ScopeSlots(reinterpret_cast<Field(Var)*>(instance));
 
             if(slotArray.IsDebuggerScopeSlotArray())
             {

+ 3 - 2
lib/Runtime/Debug/DiagObjectModel.h

@@ -251,8 +251,9 @@ namespace Js
         virtual void PopulateMembers() override;
         virtual IDiagObjectAddress * GetObjectAddress(int index) override;
 
-        ScopeSlots GetSlotArray() {
-            Var *slotArray = (Var *) instance;
+        ScopeSlots GetSlotArray()
+        {
+            Field(Var) *slotArray = (Field(Var) *) instance;
             Assert(slotArray != nullptr);
             return ScopeSlots(slotArray);
         }

+ 2 - 2
lib/Runtime/Debug/TTInflateMap.cpp

@@ -191,7 +191,7 @@ namespace TTD
         return this->m_environmentMap.LookupKnownItem(envid);
     }
 
-    Js::Var* InflateMap::LookupSlotArray(TTD_PTR_ID slotid) const
+    Field(Js::Var)* InflateMap::LookupSlotArray(TTD_PTR_ID slotid) const
     {
         return this->m_slotArrayMap.LookupKnownItem(slotid);
     }
@@ -238,7 +238,7 @@ namespace TTD
         this->m_environmentPinSet->AddNew(value);
     }
 
-    void InflateMap::AddSlotArray(TTD_PTR_ID slotId, Js::Var* value)
+    void InflateMap::AddSlotArray(TTD_PTR_ID slotId, Field(Js::Var)* value)
     {
         this->m_slotArrayMap.AddItem(slotId, value);
         this->m_slotArrayPinSet->AddNew(value);

+ 4 - 4
lib/Runtime/Debug/TTInflateMap.h

@@ -15,14 +15,14 @@ namespace TTD
         TTDIdentifierDictionary<TTD_PTR_ID, Js::DynamicTypeHandler*> m_handlerMap;
         TTDIdentifierDictionary<TTD_PTR_ID, Js::Type*> m_typeMap;
 
-        //The maps for script contexts and objects 
+        //The maps for script contexts and objects
         TTDIdentifierDictionary<TTD_LOG_PTR_ID, Js::GlobalObject*> m_tagToGlobalObjectMap; //get the script context from here
         TTDIdentifierDictionary<TTD_PTR_ID, Js::RecyclableObject*> m_objectMap;
 
         //The maps for inflated function bodies
         TTDIdentifierDictionary<TTD_PTR_ID, Js::FunctionBody*> m_functionBodyMap;
         TTDIdentifierDictionary<TTD_PTR_ID, Js::FrameDisplay*> m_environmentMap;
-        TTDIdentifierDictionary<TTD_PTR_ID, Js::Var*> m_slotArrayMap;
+        TTDIdentifierDictionary<TTD_PTR_ID, Field(Js::Var)*> m_slotArrayMap;
 
         //The maps for resolving debug scopes
         TTDIdentifierDictionary<TTD_PTR_ID, Js::FunctionBody*> m_debuggerScopeHomeBodyMap;
@@ -72,7 +72,7 @@ namespace TTD
 
         Js::FunctionBody* LookupFunctionBody(TTD_PTR_ID functionId) const;
         Js::FrameDisplay* LookupEnvironment(TTD_PTR_ID envid) const;
-        Js::Var* LookupSlotArray(TTD_PTR_ID slotid) const;
+        Field(Js::Var)* LookupSlotArray(TTD_PTR_ID slotid) const;
 
         void LookupInfoForDebugScope(TTD_PTR_ID dbgScopeId, Js::FunctionBody** homeBody, int32* chainIndex) const;
 
@@ -86,7 +86,7 @@ namespace TTD
 
         void AddInflationFunctionBody(TTD_PTR_ID functionId, Js::FunctionBody* value);
         void AddEnvironment(TTD_PTR_ID envId, Js::FrameDisplay* value);
-        void AddSlotArray(TTD_PTR_ID slotId, Js::Var* value);
+        void AddSlotArray(TTD_PTR_ID slotId, Field(Js::Var)* value);
 
         void UpdateFBScopes(const NSSnapValues::SnapFunctionBodyScopeChain& scopeChainInfo, Js::FunctionBody* fb);
 

+ 1 - 1
lib/Runtime/Debug/TTSnapObjects.cpp

@@ -934,7 +934,7 @@ namespace TTD
                 }
             }
 
-            return ctx->GetLibrary()->CreateBoundFunction_TTD(bFunction, bThis, snapBoundInfo->ArgCount, (Js::Var*)bArgs);
+            return ctx->GetLibrary()->CreateBoundFunction_TTD(bFunction, bThis, snapBoundInfo->ArgCount, bArgs);
         }
 
         void EmitAddtlInfo_SnapBoundFunctionInfo(const SnapObject* snpObject, FileWriter* writer)

+ 4 - 4
lib/Runtime/Debug/TTSnapValues.cpp

@@ -556,12 +556,12 @@ namespace TTD
 
         //////////////////
 
-        Js::Var* InflateSlotArrayInfo(const SlotArrayInfo* slotInfo, InflateMap* inflator)
+        Field(Js::Var)* InflateSlotArrayInfo(const SlotArrayInfo* slotInfo, InflateMap* inflator)
         {
             Js::ScriptContext* ctx = inflator->LookupScriptContext(slotInfo->ScriptContextLogId);
             Field(Js::Var)* slotArray = RecyclerNewArray(ctx->GetRecycler(), Field(Js::Var), slotInfo->SlotCount + Js::ScopeSlots::FirstSlotIndex);
 
-            Js::ScopeSlots scopeSlots((Js::Var*)slotArray);
+            Js::ScopeSlots scopeSlots(slotArray);
             scopeSlots.SetCount(slotInfo->SlotCount);
 
             Js::Var undef = ctx->GetLibrary()->GetUndefined();
@@ -625,7 +625,7 @@ namespace TTD
                 }
             }
 
-            return (Js::Var*)slotArray;
+            return slotArray;
         }
 
         void EmitSlotArrayInfo(const SlotArrayInfo* slotInfo, FileWriter* writer, NSTokens::Separator separator)
@@ -782,7 +782,7 @@ namespace TTD
                 }
                 case Js::ScopeType::ScopeType_SlotArray:
                 {
-                    Js::Var* saval = inflator->LookupSlotArray(scp.IDValue);
+                    Field(Js::Var)* saval = inflator->LookupSlotArray(scp.IDValue);
                     environment->SetItem(i, saval);
                     break;
                 }

+ 1 - 1
lib/Runtime/Debug/TTSnapValues.h

@@ -153,7 +153,7 @@ namespace TTD
             TTD_WELLKNOWN_TOKEN OptWellKnownDbgScope;
         };
 
-        Js::Var* InflateSlotArrayInfo(const SlotArrayInfo* slotInfo, InflateMap* inflator);
+        Field(Js::Var)* InflateSlotArrayInfo(const SlotArrayInfo* slotInfo, InflateMap* inflator);
 
         void EmitSlotArrayInfo(const SlotArrayInfo* slotInfo, FileWriter* writer, NSTokens::Separator separator);
         void ParseSlotArrayInfo(SlotArrayInfo* slotInfo, bool readSeparator, FileReader* reader, SlabAllocator& alloc);

+ 1 - 1
lib/Runtime/Debug/TTSnapshot.cpp

@@ -581,7 +581,7 @@ namespace TTD
         for(auto iter = this->m_slotArrayEntries.GetIterator(); iter.IsValid(); iter.MoveNext())
         {
             const NSSnapValues::SlotArrayInfo* sai = iter.Current();
-            Js::Var* slots = NSSnapValues::InflateSlotArrayInfo(sai, inflator);
+            Field(Js::Var)* slots = NSSnapValues::InflateSlotArrayInfo(sai, inflator);
 
             inflator->AddSlotArray(sai->SlotId, slots);
         }

+ 3 - 3
lib/Runtime/Debug/TTSnapshotExtractor.cpp

@@ -88,7 +88,7 @@ namespace TTD
         }
     }
 
-    void SnapshotExtractor::ExtractSlotArrayIfNeeded(Js::ScriptContext* ctx, Js::Var* scope)
+    void SnapshotExtractor::ExtractSlotArrayIfNeeded(Js::ScriptContext* ctx, Field(Js::Var)* scope)
     {
         if(this->m_marks.IsMarked(scope))
         {
@@ -177,7 +177,7 @@ namespace TTD
                     break;
                 case Js::ScopeType::ScopeType_SlotArray:
                 {
-                    this->ExtractSlotArrayIfNeeded(ctx, (Js::Var*)scope);
+                    this->ExtractSlotArrayIfNeeded(ctx, (Field(Js::Var)*)scope);
 
                     entryInfo->IDValue = TTD_CONVERT_SLOTARRAY_TO_PTR_ID((Js::Var*)scope);
                     break;
@@ -322,7 +322,7 @@ namespace TTD
                 {
                     if(this->m_marks.MarkAndTestAddr<MarkTableTag::SlotArrayTag>(scope))
                     {
-                        Js::ScopeSlots slotArray = (Js::Var*)scope;
+                        Js::ScopeSlots slotArray = (Field(Js::Var)*)scope;
                         uint slotArrayCount = static_cast<uint>(slotArray.GetCount());
                         if(!slotArray.IsDebuggerScopeSlotArray())
                         {

+ 1 - 1
lib/Runtime/Debug/TTSnapshotExtractor.h

@@ -39,7 +39,7 @@ namespace TTD
         void ExtractTypeIfNeeded(Js::Type* jstype, ThreadContext* threadContext);
 
         //Ensure that a slot/scope has been extracted
-        void ExtractSlotArrayIfNeeded(Js::ScriptContext* ctx, Js::Var* scope);
+        void ExtractSlotArrayIfNeeded(Js::ScriptContext* ctx, Field(Js::Var)* scope);
         void ExtractScopeIfNeeded(Js::ScriptContext* ctx, Js::FrameDisplay* environment);
         void ExtractScriptFunctionEnvironmentIfNeeded(Js::ScriptFunction* function);
 

+ 1 - 1
lib/Runtime/Language/AsmJsModule.cpp

@@ -2501,7 +2501,7 @@ namespace Js
 #else
         Field(Var) * slotArray = RecyclerNewArray(scriptContext->GetRecycler(), Field(Var), moduleBody->scopeSlotArraySize + ScopeSlots::FirstSlotIndex);
 #endif
-        ScopeSlots scopeSlots((Js::Var*)slotArray);
+        ScopeSlots scopeSlots(slotArray);
         scopeSlots.SetCount(moduleBody->scopeSlotArraySize);
         scopeSlots.SetScopeMetadata(moduleBody->GetFunctionInfo());
 

+ 4 - 3
lib/Runtime/Language/InterpreterStackFrame.cpp

@@ -1254,7 +1254,7 @@ namespace Js
                 {
                     uint32 scopeSlots = this->executeFunction->scopeSlotArraySize;
                     Assert(scopeSlots != 0);
-                    ScopeSlots((Var*)nextAllocBytes).SetCount(0); // Start with count as 0. It will get set in NewScopeSlots
+                    ScopeSlots((Field(Var)*)nextAllocBytes).SetCount(0); // Start with count as 0. It will get set in NewScopeSlots
                     newInstance->localClosure = nextAllocBytes;
                     nextAllocBytes += (scopeSlots + ScopeSlots::FirstSlotIndex) * sizeof(Var);
                 }
@@ -2766,7 +2766,7 @@ namespace Js
 
                 scriptFuncObj->GetDynamicType()->SetEntryPoint(AsmJsExternalEntryPoint);
                 scriptFuncObj->GetFunctionBody()->GetAsmJsFunctionInfo()->SetModuleFunctionBody(asmJsModuleFunctionBody);
-                scriptFuncObj->SetModuleEnvironment((Field(Var)*)moduleMemoryPtr);
+                scriptFuncObj->SetModuleEnvironment(moduleMemoryPtr);
                 if (!info->IsRuntimeProcessed())
                 {
                     // don't reset entrypoint upon relinking
@@ -3865,6 +3865,7 @@ namespace Js
 #endif
 
         if (playout->Return == Js::Constants::NoRegister)
+        
         {
             Arguments args(CallInfo(CallFlags_NotUsed, playout->ArgCount), m_outParams);
             JavascriptFunction::CallFunction<true>(function, function->GetEntryPoint(), args);
@@ -7459,7 +7460,7 @@ const byte * InterpreterStackFrame::OP_ProfiledLoopBodyStart(uint loopId)
         slotArray = (Field(Var)*)this->GetLocalClosure();
         Assert(slotArray != nullptr);
 
-        ScopeSlots scopeSlots((Js::Var*)slotArray);
+        ScopeSlots scopeSlots(slotArray);
         scopeSlots.SetCount(scopeSlotCount);
         scopeSlots.SetScopeMetadata((Var)functionBody->GetFunctionInfo());
         Var undef = functionBody->GetScriptContext()->GetLibrary()->GetUndefined();

+ 1 - 1
lib/Runtime/Language/InterpreterStackFrame.h

@@ -831,7 +831,7 @@ namespace Js
             {
                 interpreterFrame->returnAddress = returnAddress; // Ensure these are set before pushing to interpreter frame list
                 interpreterFrame->addressOfReturnAddress = addressOfReturnAddress;
-                if (interpreterFrame->GetFunctionBody()->GetIsAsmJsFunction())
+                if (interpreterFrame->GetFunctionBody()->GetIsAsmjsMode())
                 {
                     m_isHiddenFrame = true;
                 }

+ 2 - 2
lib/Runtime/Language/JavascriptOperators.cpp

@@ -6969,7 +6969,7 @@ SetElementIHelper_INDEX_TYPE_IS_NUMBER:
         Assert(size > ScopeSlots::FirstSlotIndex); // Should never see empty slot array
         Field(Var)* slotArray = RecyclerNewArray(scriptContext->GetRecycler(), Field(Var), size); // last initialized slot contains reference to array of propertyIds, correspondent to objects in previous slots
         uint count = size - ScopeSlots::FirstSlotIndex;
-        ScopeSlots slots((Js::Var*)slotArray);
+        ScopeSlots slots(slotArray);
         slots.SetCount(count);
         AssertMsg(!FunctionBody::Is(scope), "Scope should only be FunctionInfo or DebuggerScope, not FunctionBody");
         slots.SetScopeMetadata(scope);
@@ -6996,7 +6996,7 @@ SetElementIHelper_INDEX_TYPE_IS_NUMBER:
 
     Field(Var)* JavascriptOperators::OP_CloneScopeSlots(Field(Var) *slotArray, ScriptContext *scriptContext)
     {
-        ScopeSlots slots((Js::Var*)slotArray);
+        ScopeSlots slots(slotArray);
         uint size = ScopeSlots::FirstSlotIndex + static_cast<uint>(slots.GetCount());
 
         Field(Var)* slotArrayClone = RecyclerNewArray(scriptContext->GetRecycler(), Field(Var), size);

+ 4 - 3
lib/Runtime/Library/BoundFunction.cpp

@@ -188,7 +188,7 @@ namespace Js
                 newValues[index++] = args[i];
             }
 
-            actualArgs = Arguments(args.Info, (Var*)newValues);
+            actualArgs = Arguments(args.Info, unsafe_write_barrier_cast<Var*>(newValues));
             actualArgs.Info.Count = boundFunction->count + argCount;
         }
         else
@@ -521,13 +521,14 @@ namespace Js
         TTD::NSSnapObjects::StdExtractSetKindSpecificInfo<TTD::NSSnapObjects::SnapBoundFunctionInfo*, TTD::NSSnapObjects::SnapObjectType::SnapBoundFunctionObject>(objData, bfi, alloc, depCount, depArray);
     }
 
-    BoundFunction* BoundFunction::InflateBoundFunction(ScriptContext* ctx, RecyclableObject* function, Var bThis, uint32 ct, Var* args)
+    BoundFunction* BoundFunction::InflateBoundFunction(
+        ScriptContext* ctx, RecyclableObject* function, Var bThis, uint32 ct, Field(Var)* args)
     {
         BoundFunction* res = RecyclerNew(ctx->GetRecycler(), BoundFunction, ctx->GetLibrary()->GetBoundFunctionType());
 
         res->boundThis = bThis;
         res->count = ct;
-        res->boundArgs = (Field(Var)*)args;
+        res->boundArgs = args;
 
         res->targetFunction = function;
 

+ 2 - 1
lib/Runtime/Library/BoundFunction.h

@@ -63,7 +63,8 @@ namespace Js
         virtual TTD::NSSnapObjects::SnapObjectType GetSnapTag_TTD() const override;
         virtual void ExtractSnapObjectDataInto(TTD::NSSnapObjects::SnapObject* objData, TTD::SlabAllocator& alloc) override;
 
-        static BoundFunction* InflateBoundFunction(ScriptContext* ctx, RecyclableObject* function, Var bThis, uint32 ct, Var* args);
+        static BoundFunction* InflateBoundFunction(
+            ScriptContext* ctx, RecyclableObject* function, Var bThis, uint32 ct, Field(Var)* args);
 #endif
 
     private:

+ 3 - 1
lib/Runtime/Library/ConcatString.cpp

@@ -138,8 +138,10 @@ namespace Js
 
         if (this->propertyRecord == nullptr)
         {
+            Js::PropertyRecord const * propertyRecord = nullptr;
             scriptContext->GetOrAddPropertyRecord(this->GetSz(), static_cast<int>(this->GetLength()),
-                (Js::PropertyRecord const **)&(this->propertyRecord));
+                &propertyRecord);
+            this->propertyRecord = propertyRecord;
         }
 
         this->propertyString = scriptContext->GetPropertyString(propertyRecord->GetPropertyId());

+ 19 - 37
lib/Runtime/Library/JavascriptArray.cpp

@@ -6878,38 +6878,8 @@ Case0:
                 Js::Throw::FatalInternalError();
             }
 
-            // Maintain nativity of the array only for the following cases (To favor inplace conversions - keeps the conversion cost less):
-            // -    int cases for X86 and
-            // -    FloatArray for AMD64
-            // We convert the entire array back and forth once here O(n), rather than doing the costly conversion down the call stack which is O(nlogn)
-
-#if defined(TARGET_64)
-            if(compFn && JavascriptNativeFloatArray::Is(arr))
-            {
-                arr = JavascriptNativeFloatArray::ConvertToVarArray((JavascriptNativeFloatArray*)arr);
-                JS_REENTRANT(jsReentLock, arr->Sort(compFn));
-                arr = arr->ConvertToNativeArrayInPlace<JavascriptNativeFloatArray, double>(arr);
-            }
-            else
-            {
-                EnsureNonNativeArray(arr);
-                JS_REENTRANT(jsReentLock, arr->Sort(compFn));
-            }
-#else
-            if(compFn && JavascriptNativeIntArray::Is(arr))
-            {
-                //EnsureNonNativeArray(arr);
-                arr = JavascriptNativeIntArray::ConvertToVarArray((JavascriptNativeIntArray*)arr);
-                JS_REENTRANT(jsReentLock, arr->Sort(compFn));
-                arr = arr->ConvertToNativeArrayInPlace<JavascriptNativeIntArray, int32>(arr);
-            }
-            else
-            {
-                EnsureNonNativeArray(arr);
-                JS_REENTRANT(jsReentLock, arr->Sort(compFn));
-            }
-#endif
-
+            EnsureNonNativeArray(arr);
+            JS_REENTRANT(jsReentLock, arr->Sort(compFn));
         }
         else
         {
@@ -7046,7 +7016,8 @@ Case0:
     }
 
     template<typename T>
-    void JavascriptArray::ArraySegmentSpliceHelper(JavascriptArray *pnewArr, SparseArraySegment<T> *seg, SparseArraySegment<T> **prev,
+    void JavascriptArray::ArraySegmentSpliceHelper(
+        JavascriptArray *pnewArr, SparseArraySegment<T> *seg, Field(SparseArraySegment<T>*) *prev,
                                                     uint32 start, uint32 deleteLen, Var* insertArgs, uint32 insertLen, Recycler *recycler)
     {
         // book keeping variables
@@ -7168,7 +7139,9 @@ Case0:
                 // All splice happens in one segment.
                 SparseArraySegmentBase *nextSeg = startSeg->next;
                 // Splice the segment first, which might OOM throw but the array would be intact.
-                JavascriptArray::ArraySegmentSpliceHelper(pnewArr, (SparseArraySegment<T>*)startSeg, (SparseArraySegment<T>**)prevSeg, start, deleteLen, insertArgs, insertLen, recycler);
+                JavascriptArray::ArraySegmentSpliceHelper(
+                    pnewArr, startSeg, SparseArraySegment<T>::AddressFrom(prevSeg),
+                    start, deleteLen, insertArgs, insertLen, recycler);
                 while (nextSeg)
                 {
                     // adjust next segments left
@@ -7506,15 +7479,24 @@ Case0:
                 bool isInlineSegment = JavascriptArray::IsInlineSegment(oldHead, pArr);
                 if (isIntArray)
                 {
-                    ArraySegmentSpliceHelper<int32>(newArr, SparseArraySegment<int32>::From(pArr->head), (SparseArraySegment<int32>**)&pArr->head, start, deleteLen, insertArgs, insertLen, recycler);
+                    ArraySegmentSpliceHelper<int32>(newArr,
+                        SparseArraySegment<int32>::From(pArr->head),
+                        SparseArraySegment<int32>::AddressFrom(&pArr->head),
+                        start, deleteLen, insertArgs, insertLen, recycler);
                 }
                 else if (isFloatArray)
                 {
-                    ArraySegmentSpliceHelper<double>(newArr, SparseArraySegment<double>::From(pArr->head), (SparseArraySegment<double>**)&pArr->head, start, deleteLen, insertArgs, insertLen, recycler);
+                    ArraySegmentSpliceHelper<double>(newArr,
+                        SparseArraySegment<double>::From(pArr->head),
+                        SparseArraySegment<double>::AddressFrom(&pArr->head),
+                        start, deleteLen, insertArgs, insertLen, recycler);
                 }
                 else
                 {
-                    ArraySegmentSpliceHelper<Var>(newArr, SparseArraySegment<Var>::From(pArr->head), (SparseArraySegment<Var>**)&pArr->head, start, deleteLen, insertArgs, insertLen, recycler);
+                    ArraySegmentSpliceHelper<Var>(newArr,
+                        SparseArraySegment<Var>::From(pArr->head),
+                        SparseArraySegment<Var>::AddressFrom(&pArr->head),
+                        start, deleteLen, insertArgs, insertLen, recycler);
                 }
 
                 if (isInlineSegment && oldHead != pArr->head)

+ 3 - 4
lib/Runtime/Library/JavascriptArray.h

@@ -105,8 +105,6 @@ namespace Js
     protected:
         DEFINE_VTABLE_CTOR(JavascriptArray, ArrayObject);
         DEFINE_MARSHAL_OBJECT_TO_SCRIPT_CONTEXT(JavascriptArray);
-    private:
-        Field(bool) isInitialized;
     protected:
         Field(SparseArraySegmentBase*) head;
         union SegmentUnionType
@@ -595,8 +593,9 @@ namespace Js
         static void ArraySpliceHelper(JavascriptArray* pNewArr, JavascriptArray* pArr, uint32 start, uint32 deleteLen,
                                                     Var* insertArgs, uint32 insertLen, ScriptContext *scriptContext);
         template<typename T>
-        static void ArraySegmentSpliceHelper(JavascriptArray *pnewArr, SparseArraySegment<T> *seg, SparseArraySegment<T> **prev, uint32 start, uint32 deleteLen,
-                                                    Var* insertArgs, uint32 insertLen, Recycler *recycler);
+        static void ArraySegmentSpliceHelper(
+            JavascriptArray *pnewArr, SparseArraySegment<T> *seg, Field(SparseArraySegment<T>*) *prev,
+            uint32 start, uint32 deleteLen, Var* insertArgs, uint32 insertLen, Recycler *recycler);
         template<typename T>
         static RecyclableObject* ObjectSpliceHelper(RecyclableObject* pObj, T len, T start, T deleteLen,
                                                     Var* insertArgs, uint32 insertLen, ScriptContext *scriptContext, RecyclableObject* pNewObj = nullptr);

+ 2 - 2
lib/Runtime/Library/JavascriptGeneratorFunction.cpp

@@ -137,7 +137,7 @@ namespace Js
         // and use that buffer for this InterpreterStackFrame.
         Field(Var)* argsHeapCopy = RecyclerNewArray(scriptContext->GetRecycler(), Field(Var), stackArgs.Info.Count);
         CopyArray(argsHeapCopy, stackArgs.Info.Count, stackArgs.Values, stackArgs.Info.Count);
-        Arguments heapArgs(callInfo, (Var*)argsHeapCopy);
+        Arguments heapArgs(callInfo, unsafe_write_barrier_cast<Var*>(argsHeapCopy));
 
         DynamicObject* prototype = scriptContext->GetLibrary()->CreateGeneratorConstructorPrototypeObject();
         JavascriptGenerator* generator = scriptContext->GetLibrary()->CreateGenerator(heapArgs, generatorFunction->scriptFunction, prototype);
@@ -163,7 +163,7 @@ namespace Js
         // and use that buffer for this InterpreterStackFrame.
         Field(Var)* argsHeapCopy = RecyclerNewArray(scriptContext->GetRecycler(), Field(Var), stackArgs.Info.Count);
         CopyArray(argsHeapCopy, stackArgs.Info.Count, stackArgs.Values, stackArgs.Info.Count);
-        Arguments heapArgs(callInfo, (Var*)argsHeapCopy);
+        Arguments heapArgs(callInfo, unsafe_write_barrier_cast<Var*>(argsHeapCopy));
 
         JavascriptExceptionObject* e = nullptr;
         JavascriptPromiseResolveOrRejectFunction* resolve;

+ 2 - 1
lib/Runtime/Library/JavascriptLibrary.cpp

@@ -5744,7 +5744,8 @@ namespace Js
         return function;
     }
 
-    Js::RecyclableObject* JavascriptLibrary::CreateBoundFunction_TTD(RecyclableObject* function, Var bThis, uint32 ct, Var* args)
+    Js::RecyclableObject* JavascriptLibrary::CreateBoundFunction_TTD(
+        RecyclableObject* function, Var bThis, uint32 ct, Field(Var)* args)
     {
         return BoundFunction::InflateBoundFunction(this->scriptContext, function, bThis, ct, args);
     }

+ 2 - 1
lib/Runtime/Library/JavascriptLibrary.h

@@ -735,7 +735,8 @@ namespace Js
         static void AddWeakMapElementInflate_TTD(Js::JavascriptWeakMap* map, Var key, Var value);
 
         Js::RecyclableObject* CreateExternalFunction_TTD(Js::Var fname);
-        Js::RecyclableObject* CreateBoundFunction_TTD(RecyclableObject* function, Var bThis, uint32 ct, Var* args);
+        Js::RecyclableObject* CreateBoundFunction_TTD(
+                RecyclableObject* function, Var bThis, uint32 ct, Field(Var)* args);
 
         Js::RecyclableObject* CreateProxy_TTD(RecyclableObject* handler, RecyclableObject* target);
         Js::RecyclableObject* CreateRevokeFunction_TTD(RecyclableObject* proxy);

+ 4 - 4
lib/Runtime/Library/RegexHelper.cpp

@@ -915,7 +915,7 @@ namespace Js
             ArenaAllocator* tempAlloc,
             JavascriptString* matchStr,
             int numberOfCaptures,
-            Var* captures,
+            Field(Var)* captures,
             CharCount position)
         {
             CharCount* substitutionOffsets = nullptr;
@@ -925,7 +925,7 @@ namespace Js
                 tempAlloc,
                 &substitutionOffsets);
             auto getGroup = [&](int captureIndex, Var nonMatchValue) {
-                return captureIndex <= numberOfCaptures ? captures[captureIndex] : nonMatchValue;
+                return captureIndex <= numberOfCaptures ? PointerValue(captures[captureIndex]) : nonMatchValue;
             };
             UnifiedRegex::GroupInfo match(position, matchStr->GetLength());
             int numGroups = numberOfCaptures + 1; // Take group 0 into account.
@@ -951,7 +951,7 @@ namespace Js
             ArenaAllocator* tempAlloc,
             JavascriptString* matchStr,
             int numberOfCaptures,
-            Var* captures,
+            Field(Var)* captures,
             CharCount position)
         {
             // replaceFn Arguments:
@@ -1088,7 +1088,7 @@ namespace Js
                     CharCount substringLength = position - nextSourcePosition;
                     accumulatedResultBuilder.Append(input, nextSourcePosition, substringLength);
 
-                    appendReplacement(accumulatedResultBuilder, tempAlloc, matchStr, (int) numberOfCapturesToKeep, (Var*)captures, position);
+                    appendReplacement(accumulatedResultBuilder, tempAlloc, matchStr, (int) numberOfCapturesToKeep, captures, position);
 
                     nextSourcePosition = JavascriptRegExp::AddIndex(position, matchStr->GetLength());
                 }

+ 6 - 4
lib/Runtime/Library/ScriptFunction.cpp

@@ -490,7 +490,7 @@ namespace Js
             BufferStringBuilder builder(cch, scriptContext);
             utf8::DecodeOptions options = pFuncBody->GetUtf8SourceInfo()->IsCesu8() ? utf8::doAllowThreeByteSurrogates : utf8::doDefault;
             size_t decodedCount = utf8::DecodeUnitsInto(builder.DangerousGetWritableBuffer(), pbStart, pbStart + cbLength, options);
-            
+
             if (decodedCount != cch)
             {
                 AssertMsg(false, "Decoded incorrect number of characters for function body");
@@ -574,7 +574,7 @@ namespace Js
             }
             case Js::ScopeType::ScopeType_SlotArray:
             {
-                Js::ScopeSlots slotArray = (Js::Var*)scope;
+                Js::ScopeSlots slotArray = (Field(Js::Var)*)scope;
                 uint slotArrayCount = static_cast<uint>(slotArray.GetCount());
 
                 //get the function body associated with the scope
@@ -716,7 +716,8 @@ namespace Js
     JavascriptArrayBuffer* AsmJsScriptFunction::GetAsmJsArrayBuffer() const
     {
 #ifdef ASMJS_PLAT
-        return *(JavascriptArrayBuffer**)(this->GetModuleEnvironment() + AsmJsModuleMemory::MemoryTableBeginOffset);
+        return (JavascriptArrayBuffer*)PointerValue(
+            *(this->GetModuleEnvironment() + AsmJsModuleMemory::MemoryTableBeginOffset));
 #else
         Assert(UNREACHED);
         return nullptr;
@@ -751,7 +752,8 @@ namespace Js
 
     WebAssemblyMemory* WasmScriptFunction::GetWebAssemblyMemory() const
     {
-        return *(WebAssemblyMemory**)(this->GetModuleEnvironment() + AsmJsModuleMemory::MemoryTableBeginOffset);
+        return (WebAssemblyMemory*)PointerValue(
+            *(this->GetModuleEnvironment() + AsmJsModuleMemory::MemoryTableBeginOffset));
     }
 #endif
 

+ 5 - 0
lib/Runtime/Library/SparseArraySegment.h

@@ -104,6 +104,11 @@ namespace Js
             return static_cast<SparseArraySegment*>(seg);
         }
 
+        static inline Field(SparseArraySegment*)* AddressFrom(Field(SparseArraySegmentBase*) *addr)
+        {
+            return reinterpret_cast<Field(SparseArraySegment*)*>(addr);
+        }
+
     private:
         template<bool isLeaf>
         static SparseArraySegment<T>* Allocate(Recycler* recycler, uint32 left, uint32 length, uint32 size, uint32 fillStart = 0);

+ 17 - 15
lib/Runtime/Library/StackScriptFunction.cpp

@@ -229,12 +229,12 @@ namespace Js
                     }
                     if (callerFunctionBody->DoStackScopeSlots() && interpreterFrame->IsClosureInitDone())
                     {
-                        Var* stackScopeSlots = (Var*)interpreterFrame->GetLocalClosure();
+                        Field(Var)* stackScopeSlots = (Field(Var)*)interpreterFrame->GetLocalClosure();
                         if (stackScopeSlots)
                         {
                             // Scope slot pointer may be null if bailout didn't restore it, which means we don't need it.
-                            Var* boxedScopeSlots = this->BoxScopeSlots(stackScopeSlots, static_cast<uint>(ScopeSlots(stackScopeSlots).GetCount()));
-                            interpreterFrame->SetLocalClosure((Var)boxedScopeSlots);
+                            Field(Var)* boxedScopeSlots = this->BoxScopeSlots(stackScopeSlots, static_cast<uint>(ScopeSlots(stackScopeSlots).GetCount()));
+                            interpreterFrame->SetLocalClosure(boxedScopeSlots);
                         }
                     }
 
@@ -304,7 +304,7 @@ namespace Js
                         }
                         if (callerFunctionBody->DoStackScopeSlots())
                         {
-                            Var* stackScopeSlots = this->GetScopeSlotsFromNativeFrame(walker, callerFunctionBody);
+                            Field(Var)* stackScopeSlots = (Field(Var)*)this->GetScopeSlotsFromNativeFrame(walker, callerFunctionBody);
                             if (stackScopeSlots)
                             {
                                 // Scope slot pointer may be null if bailout didn't restore it, which means we don't need it.
@@ -356,7 +356,7 @@ namespace Js
                     int i;
                     for (i = 0; i < frameDisplay->GetLength(); i++)
                     {
-                        Var *slotArray = (Var*)frameDisplay->GetItem(i);
+                        Field(Var) *slotArray = (Field(Var)*)frameDisplay->GetItem(i);
 
                         if (ScopeSlots::Is(slotArray))
                         {
@@ -373,10 +373,10 @@ namespace Js
                     }
                     for (; i < frameDisplay->GetLength(); i++)
                     {
-                        Var *pScope = (Var*)frameDisplay->GetItem(i);
+                        Field(Var) *pScope = (Field(Var)*)frameDisplay->GetItem(i);
                         if (ScopeSlots::Is(pScope))
                         {
-                            Var *boxedSlots = this->BoxScopeSlots(pScope, static_cast<uint>(ScopeSlots(pScope).GetCount()));
+                            Field(Var) *boxedSlots = this->BoxScopeSlots(pScope, static_cast<uint>(ScopeSlots(pScope).GetCount()));
                             frameDisplay->SetItem(i, boxedSlots);
                         }
                     }
@@ -652,7 +652,7 @@ namespace Js
         for (uint16 i = 0; i < length; i++)
         {
             // TODO: Once we allocate the slots on the stack, we can only look those slots
-            Var * pScope = (Var *)frameDisplay->GetItem(i);
+            Field(Var) * pScope = (Field(Var) *)frameDisplay->GetItem(i);
             // We don't do stack slots if we exceed max encoded slot count
             if (ScopeSlots::Is(pScope))
             {
@@ -664,19 +664,21 @@ namespace Js
         return boxedFrameDisplay;
     }
 
-    Var * StackScriptFunction::BoxState::BoxScopeSlots(Var * slotArray, uint count)
+    Field(Var) * StackScriptFunction::BoxState::BoxScopeSlots(Field(Var) * slotArray, uint count)
     {
         Assert(slotArray != nullptr);
         Assert(count != 0);
-        Field(Var) * boxedSlotArray = nullptr;
-        if (boxedValues.TryGetValue(slotArray, (void **)&boxedSlotArray))
+
+        void * tmp = nullptr;
+        if (boxedValues.TryGetValue(slotArray, &tmp))
         {
-            return (Var*)boxedSlotArray;
+            return (Field(Var) *)tmp;
         }
 
+        Field(Var) * boxedSlotArray = nullptr;
         if (!ThreadContext::IsOnStack(slotArray))
         {
-            boxedSlotArray = (Field(Var)*)slotArray;
+            boxedSlotArray = slotArray;
         }
         else
         {
@@ -686,7 +688,7 @@ namespace Js
         boxedValues.Add(slotArray, boxedSlotArray);
 
         ScopeSlots scopeSlots(slotArray);
-        ScopeSlots boxedScopeSlots((Js::Var*)boxedSlotArray);
+        ScopeSlots boxedScopeSlots(boxedSlotArray);
 
         boxedScopeSlots.SetCount(count);
         boxedScopeSlots.SetScopeMetadata(scopeSlots.GetScopeMetadataRaw());
@@ -702,7 +704,7 @@ namespace Js
             }
             boxedScopeSlots.Set(i, slotValue);
         }
-        return (Var*)boxedSlotArray;
+        return boxedSlotArray;
     }
 
     ScriptFunction * StackScriptFunction::BoxState::BoxStackFunction(ScriptFunction * scriptFunction)

+ 1 - 1
lib/Runtime/Library/StackScriptFunction.h

@@ -56,7 +56,7 @@ namespace Js
             ScriptContext * scriptContext;
             void * returnAddress;
 
-            Var * BoxScopeSlots(Var * scopeSlots, uint count);
+            Field(Var) * BoxScopeSlots(Field(Var) * scopeSlots, uint count);
             bool NeedBoxFrame(FunctionBody * functionBody);
             bool NeedBoxScriptFunction(ScriptFunction * scriptFunction);
             ScriptFunction * BoxStackFunction(ScriptFunction * scriptFunction);

+ 1 - 1
lib/Runtime/Library/WebAssemblyEnvironment.cpp

@@ -82,7 +82,7 @@ void WebAssemblyEnvironment::SetVarElement(Field(Var)* ptr, T* val, uint32 index
 
     Field(Var)* dst = ptr + index;
     CheckPtrIsValid<Var>((intptr_t)dst);
-    AssertMsg(*(T**)dst == nullptr, "We shouldn't overwrite anything on the environment once it is set");
+    AssertMsg(*dst == nullptr, "We shouldn't overwrite anything on the environment once it is set");
     *dst = val;
 }
 

+ 1 - 1
lib/Runtime/Library/WebAssemblyEnvironment.h

@@ -20,7 +20,7 @@ namespace Js
     public:
         WebAssemblyEnvironment(WebAssemblyModule* module);
 
-        Var* GetStartPtr() const { return (Var*)PointerValue(start); }
+        Field(Var)* GetStartPtr() const { return start; }
 
         WasmScriptFunction* GetWasmFunction(uint32 index) const;
         void SetWasmFunction(uint32 index, WasmScriptFunction* func);

+ 2 - 2
lib/Runtime/Library/WebAssemblyInstance.cpp

@@ -183,7 +183,7 @@ void WebAssemblyInstance::CreateWasmFunctions(WebAssemblyModule * wasmModule, Sc
         Wasm::WasmFunctionInfo* wasmFuncInfo = wasmModule->GetWasmFunctionInfo(i);
         FunctionBody* body = wasmFuncInfo->GetBody();
         WasmScriptFunction* funcObj = ctx->GetLibrary()->CreateWasmScriptFunction(body);
-        funcObj->SetModuleEnvironment((Field(Var)*)env->GetStartPtr());
+        funcObj->SetModuleEnvironment(env->GetStartPtr());
         funcObj->SetSignature(body->GetAsmJsFunctionInfo()->GetWasmSignature());
         funcObj->SetEnvironment(frameDisplay);
 
@@ -440,7 +440,7 @@ void WebAssemblyInstance::InitialGlobals(WebAssemblyModule * wasmModule, ScriptC
             {
                 JavascriptError::ThrowTypeError(ctx, WASMERR_InvalidGlobalRef);
             }
-            
+
             if (sourceGlobal->GetType() != global->GetType())
             {
                 JavascriptError::ThrowTypeError(ctx, WASMERR_InvalidTypeConversion);

+ 4 - 3
lib/Runtime/Library/WebAssemblyTable.cpp

@@ -11,9 +11,10 @@
 namespace Js
 {
 
-WebAssemblyTable::WebAssemblyTable(Var * values, uint32 currentLength, uint32 initialLength, uint32 maxLength, DynamicType * type) :
+WebAssemblyTable::WebAssemblyTable(
+        Field(Var) * values, uint32 currentLength, uint32 initialLength, uint32 maxLength, DynamicType * type) :
     DynamicObject(type),
-    m_values((Field(Var)*)values),
+    m_values(values),
     m_currentLength(currentLength),
     m_initialLength(initialLength),
     m_maxLength(maxLength)
@@ -229,7 +230,7 @@ WebAssemblyTable::Create(uint32 initial, uint32 maximum, ScriptContext * scriptC
     {
         values = RecyclerNewArrayZ(scriptContext->GetRecycler(), Field(Var), initial);
     }
-    return RecyclerNew(scriptContext->GetRecycler(), WebAssemblyTable, (Var*)values, initial, initial, maximum, scriptContext->GetLibrary()->GetWebAssemblyTableType());
+    return RecyclerNew(scriptContext->GetRecycler(), WebAssemblyTable, values, initial, initial, maximum, scriptContext->GetLibrary()->GetWebAssemblyTableType());
 }
 
 void

+ 2 - 1
lib/Runtime/Library/WebAssemblyTable.h

@@ -23,7 +23,8 @@ namespace Js
             static FunctionInfo Get;
             static FunctionInfo Set;
         };
-        WebAssemblyTable(Var * values, uint32 currentLength, uint32 initialLength, uint32 maxLength, DynamicType * type);
+        WebAssemblyTable(
+            Field(Var) * values, uint32 currentLength, uint32 initialLength, uint32 maxLength, DynamicType * type);
         static Var NewInstance(RecyclableObject* function, CallInfo callInfo, ...);
         static Var EntryGetterLength(RecyclableObject* function, CallInfo callInfo, ...);
         static Var EntryGrow(RecyclableObject* function, CallInfo callInfo, ...);

+ 1 - 1
lib/Runtime/Types/DynamicObjectPropertyEnumerator.cpp

@@ -117,7 +117,7 @@ namespace Js
         data->cachedCount = 0;
         data->propertyCount = propertyCount;
         data->strings = reinterpret_cast<Field(PropertyString*)*>(data + 1);
-        data->indexes = (BigPropertyIndex *)(data->strings + propertyCount);
+        data->indexes = unsafe_write_barrier_cast<BigPropertyIndex *>(data->strings + propertyCount);
         data->attributes = (PropertyAttributes*)(data->indexes + propertyCount);
         data->completed = false;
         data->enumNonEnumerable = GetEnumNonEnumerable();

+ 89 - 23
tools/RecyclerChecker/RecyclerChecker.cpp

@@ -7,20 +7,47 @@
 MainVisitor::MainVisitor(
         CompilerInstance& compilerInstance, ASTContext& context, bool fix)
     : _compilerInstance(compilerInstance), _context(context),
-     _fix(fix), _fixed(false), _barrierTypeDefined(false)
+     _fix(fix), _fixed(false), _diagEngine(context.getDiagnostics()),
+     _barrierTypeDefined(false)
 {
     if (_fix)
     {
         _rewriter.setSourceMgr(compilerInstance.getSourceManager(),
                                compilerInstance.getLangOpts());
     }
+
+#define SWB_WIKI \
+    "https://github.com/microsoft/ChakraCore/wiki/Software-Write-Barrier#coding-rules"
+
+    _diagUnbarrieredField = _diagEngine.getCustomDiagID(
+        DiagnosticsEngine::Error,
+        "Unbarriered field, see " SWB_WIKI);
+   _diagIllegalBarrierCast = _diagEngine.getCustomDiagID(
+        DiagnosticsEngine::Error,
+        "Illegal casting away of write barrier, see " SWB_WIKI);
+#undef SWB_WIKI
+}
+
+void MainVisitor::ReportUnbarriedField(SourceLocation location)
+{
+    DiagReport(location, _diagUnbarrieredField);
+}
+
+void MainVisitor::ReportIllegalBarrierCast(SourceLocation location)
+{
+    DiagReport(location, _diagIllegalBarrierCast);
+}
+
+void MainVisitor::DiagReport(SourceLocation location, unsigned diagId)
+{
+    _diagEngine.Report(location, diagId);
 }
 
 bool MainVisitor::VisitCXXRecordDecl(CXXRecordDecl* recordDecl)
 {
-    if (Log::GetLevel() < Log::LogLevel::Info)
+    if (Log::GetLevel() < Log::LogLevel::Verbose)
     {
-        return true; // At least Info level, otherwise this not needed
+        return true; // At least Verbose level, otherwise this not needed
     }
 
     std::string typeName = recordDecl->getQualifiedNameAsString();
@@ -99,11 +126,6 @@ void MainVisitor::ProcessUnbarrieredFields(
     }
 
     const auto& sourceMgr = _compilerInstance.getSourceManager();
-    DiagnosticsEngine& diagEngine = _context.getDiagnostics();
-    const unsigned diagID = diagEngine.getCustomDiagID(
-        DiagnosticsEngine::Error,
-        "Unbarriered field, see "
-        "https://github.com/microsoft/ChakraCore/wiki/Software-Write-Barrier#coding-rules");
 
     for (auto field : recordDecl->fields())
     {
@@ -141,7 +163,7 @@ void MainVisitor::ProcessUnbarrieredFields(
             {
                 if (pushFieldType(originalType))
                 {
-                    Log::outs() << "Queue field type: " << originalTypeName
+                    Log::verbose() << "Queue field type: " << originalTypeName
                         << " (" << typeName << "::" << fieldName << ")\n";
                 }
             }
@@ -168,7 +190,7 @@ void MainVisitor::ProcessUnbarrieredFields(
                             << fieldName << "\n";
             }
 
-            diagEngine.Report(location, diagID);
+            ReportUnbarriedField(location);
         }
     }
 }
@@ -351,14 +373,14 @@ void MainVisitor::RecordRecyclerAllocation(const string& allocationFunction, con
 template <class Set, class DumpItemFunc>
 void MainVisitor::dump(const char* name, const Set& set, const DumpItemFunc& func)
 {
-    Log::outs() << "-------------------------\n\n";
-    Log::outs() << name << "\n";
-    Log::outs() << "-------------------------\n\n";
+    Log::verbose() << "-------------------------\n\n";
+    Log::verbose() << name << "\n";
+    Log::verbose() << "-------------------------\n\n";
     for (auto item : set)
     {
-        func(Log::outs(), item);
+        func(Log::verbose(), item);
     }
-    Log::outs() << "-------------------------\n\n";
+    Log::verbose() << "-------------------------\n\n";
 }
 
 template <class Item>
@@ -384,7 +406,7 @@ void MainVisitor::Inspect()
     Dump(pointerClasses);
     Dump(barrieredClasses);
 
-    Log::outs() << "Recycler allocations\n";
+    Log::verbose() << "Recycler allocations\n";
     for (auto item : _allocatorTypeMap)
     {
         dump(item.first.c_str(), item.second);
@@ -428,7 +450,7 @@ void MainVisitor::Inspect()
             {
                 if (pushBarrierType(base.getType().getTypePtr()))
                 {
-                    Log::outs() << "Queue base type: " << base.getType().getAsString()
+                    Log::verbose() << "Queue base type: " << base.getType().getAsString()
                         << " (base of " << typeName << ")\n";
                 }
             }
@@ -521,8 +543,8 @@ void CheckAllocationsInFunctionVisitor::VisitAllocate(
 
         if (allocationType & AllocationTypes::WriteBarrier)
         {
-            Log::outs() << "In \"" << _functionDecl->getQualifiedNameAsString() << "\"\n";
-            Log::outs() << "  Allocating \"" << allocatedTypeStr << "\" in write barriered memory\n";
+            Log::verbose() << "In \"" << _functionDecl->getQualifiedNameAsString() << "\"\n";
+            Log::verbose() << "  Allocating \"" << allocatedTypeStr << "\" in write barriered memory\n";
         }
 
         _mainVisitor->RecordAllocation(allocatedType, allocationType);
@@ -565,6 +587,45 @@ bool CheckAllocationsInFunctionVisitor::VisitCallExpr(CallExpr* callExpr)
     return true;
 }
 
+// Check if type is a "Field() *" pointer type, or alternatively a pointer to
+// any type in "alt" if provided.
+bool CheckAllocationsInFunctionVisitor::IsFieldPointer(
+    const QualType& qtype, const char* alt)
+{
+    if (qtype->isPointerType())
+    {
+        auto name = qtype->getPointeeType()
+            .getDesugaredType(_mainVisitor->getContext()).getAsString();
+        return StartsWith(name, "class Memory::WriteBarrierPtr<")
+            || StartsWith(name, "typename WriteBarrierFieldTypeTraits<")
+            || (alt && strstr(alt, name.c_str()));
+    }
+
+    return false;
+}
+
+bool CheckAllocationsInFunctionVisitor::CommonVisitCastExpr(CastExpr *cast)
+{
+    if (IsFieldPointer(cast->getSubExpr()->getType()) &&  // from Field() *
+        cast->getType()->isPointerType() &&     // to a pointer type
+        !IsFieldPointer(cast->getType(),        // not another Field() *
+            "int|float|double|unsigned char"))  // not int/float/double/byte *
+    {
+        _mainVisitor->ReportIllegalBarrierCast(cast->getLocStart());
+
+        if (Log::GetLevel() >= Log::LogLevel::Info)
+        {
+            cast->dumpColor();
+            cast->getSubExpr()->getType()->getPointeeType()
+                .getDesugaredType(_mainVisitor->getContext()).dump("CAST_FROM");
+            cast->getType()->getPointeeType()
+                .getDesugaredType(_mainVisitor->getContext()).dump("CAST_TO");
+        }
+    }
+
+    return true;
+}
+
 void RecyclerCheckerConsumer::HandleTranslationUnit(ASTContext& context)
 {
     MainVisitor mainVisitor(_compilerInstance, context, _fix);
@@ -585,13 +646,17 @@ bool RecyclerCheckerAction::ParseArgs(
 {
     for (auto i = args.begin(); i != args.end(); i++)
     {
-        if (*i == "-verbose")
+        if (*i == "-fix")
         {
-            Log::SetLevel(Log::LogLevel::Verbose);
+            this->_fix = true;
         }
-        else if (*i == "-fix")
+        else if (*i == "-info")
         {
-            this->_fix = true;
+            Log::SetLevel(Log::LogLevel::Info);
+        }
+        else if (*i == "-verbose")
+        {
+            Log::SetLevel(Log::LogLevel::Verbose);
         }
         else
         {
@@ -599,6 +664,7 @@ bool RecyclerCheckerAction::ParseArgs(
                 << "ERROR: Unrecognized check-recycler option: " << *i << "\n"
                 << "Supported options:\n"
                 << "  -fix          Fix missing write barrier annotations"
+                << "  -info         Log info messages\n"
                 << "  -verbose      Log verbose messages\n";
             return false;
         }

+ 25 - 0
tools/RecyclerChecker/RecyclerChecker.h

@@ -40,6 +40,11 @@ private:
     bool _fix;      // whether user requested to fix missing annotations
     bool _fixed;    // whether this plugin committed any annotation fixes
 
+    // For emitting checker errors
+    DiagnosticsEngine& _diagEngine;
+    unsigned _diagUnbarrieredField;
+    unsigned _diagIllegalBarrierCast;
+
     bool _barrierTypeDefined;
     map<string, set<string>> _allocatorTypeMap;
     set<string> _pointerClasses;
@@ -51,6 +56,7 @@ public:
     MainVisitor(CompilerInstance& compilerInstance, ASTContext& context, bool fix);
 
     const ASTContext& getContext() const { return _context; }
+    const CompilerInstance& getCompilerInstance() const { return _compilerInstance; }
 
     bool VisitCXXRecordDecl(CXXRecordDecl* recordDecl);
     bool VisitFunctionDecl(FunctionDecl* functionDecl);
@@ -61,6 +67,9 @@ public:
     void Inspect();
     bool ApplyFix();
 
+    void ReportUnbarriedField(SourceLocation location);
+    void ReportIllegalBarrierCast(SourceLocation location);
+
 private:
     template <class Set, class DumpItemFunc>
     void dump(const char* name, const Set& set, const DumpItemFunc& func);
@@ -73,6 +82,8 @@ private:
 
     bool MatchType(const string& type, const char* source, const char** pSourceEnd);
     const char* GetFieldTypeAnnotation(QualType qtype);
+
+    void DiagReport(SourceLocation location, unsigned diagId);
 };
 
 class CheckAllocationsInFunctionVisitor:
@@ -87,12 +98,26 @@ public:
     bool VisitCXXNewExpr(CXXNewExpr* newExpression);
     bool VisitCallExpr(CallExpr* callExpr);
 
+#define IMPLEMENT_VISIT_CAST(Expr) \
+    bool Visit##Expr(Expr *cast) { return CommonVisitCastExpr(cast); }
+
+    IMPLEMENT_VISIT_CAST(CStyleCastExpr)
+    IMPLEMENT_VISIT_CAST(CXXFunctionalCastExpr)
+    IMPLEMENT_VISIT_CAST(CXXConstCastExpr)
+    IMPLEMENT_VISIT_CAST(CXXDynamicCastExpr)
+    IMPLEMENT_VISIT_CAST(CXXReinterpretCastExpr)
+    IMPLEMENT_VISIT_CAST(CXXStaticCastExpr)
+#undef IMPLEMENT_VISIT_CAST
+
 private:
     MainVisitor* _mainVisitor;
     FunctionDecl* _functionDecl;
 
     template <class A0, class A1, class T>
     void VisitAllocate(const A0& getArg0, const A1& getArg1, const T& getAllocType);
+
+    bool IsFieldPointer(const QualType& qtype, const char* alt = nullptr);
+    bool CommonVisitCastExpr(CastExpr *cast);
 };
 
 class RecyclerCheckerConsumer: public ASTConsumer