Răsfoiți Sursa

Enable globopt for generator functions

Nhat Nguyen 6 ani în urmă
părinte
comite
55ac99ffd4

+ 75 - 13
lib/Backend/BackwardPass.cpp

@@ -90,10 +90,9 @@ BackwardPass::DoMarkTempNumbers() const
 bool
 BackwardPass::SatisfyMarkTempObjectsConditions() const {
     return !PHASE_OFF(Js::MarkTempPhase, this->func) &&
-           !PHASE_OFF(Js::MarkTempObjectPhase, this->func) &&
-           func->DoGlobOpt() && func->GetHasTempObjectProducingInstr() &&
-           !func->IsJitInDebugMode() &&
-           func->DoGlobOptsForGeneratorFunc();
+        !PHASE_OFF(Js::MarkTempObjectPhase, this->func) &&
+        func->DoGlobOpt() && func->GetHasTempObjectProducingInstr() &&
+        !func->IsJitInDebugMode();
 
     // Why MarkTempObject is disabled under debugger:
     //   We add 'identified so far dead non-temp locals' to byteCodeUpwardExposedUsed in ProcessBailOutInfo,
@@ -156,8 +155,7 @@ BackwardPass::DoDeadStore(Func* func, StackSym* sym)
     // Dead store is disabled under debugger for non-temp local vars.
     return
         DoDeadStore(func) &&
-        !(func->IsJitInDebugMode() && sym->HasByteCodeRegSlot() && func->IsNonTempLocalVar(sym->GetByteCodeRegSlot())) &&
-        func->DoGlobOptsForGeneratorFunc();
+        !(func->IsJitInDebugMode() && sym->HasByteCodeRegSlot() && func->IsNonTempLocalVar(sym->GetByteCodeRegSlot()));
 }
 
 bool
@@ -168,8 +166,7 @@ BackwardPass::DoTrackNegativeZero() const
         !PHASE_OFF(Js::TrackNegativeZeroPhase, func) &&
         func->DoGlobOpt() &&
         !IsPrePass() &&
-        !func->IsJitInDebugMode() &&
-        func->DoGlobOptsForGeneratorFunc();
+        !func->IsJitInDebugMode();
 }
 
 bool
@@ -181,8 +178,7 @@ BackwardPass::DoTrackBitOpsOrNumber() const
         tag == Js::BackwardPhase &&
         func->DoGlobOpt() &&
         !IsPrePass() &&
-        !func->IsJitInDebugMode() &&
-        func->DoGlobOptsForGeneratorFunc();
+        !func->IsJitInDebugMode();
 #else
     return false;
 #endif
@@ -197,8 +193,7 @@ BackwardPass::DoTrackIntOverflow() const
         tag == Js::BackwardPhase &&
         !IsPrePass() &&
         globOpt->DoLossyIntTypeSpec() &&
-        !func->IsJitInDebugMode() &&
-        func->DoGlobOptsForGeneratorFunc();
+        !func->IsJitInDebugMode();
 }
 
 bool
@@ -2565,6 +2560,30 @@ BackwardPass::NeedBailOutOnImplicitCallsForTypedArrayStore(IR::Instr* instr)
     return false;
 }
 
+IR::Instr*
+BackwardPass::ProcessPendingPreOpBailOutInfoForYield(IR::Instr* const currentInstr)
+{
+    Assert(currentInstr->m_opcode == Js::OpCode::Yield);
+    IR::GeneratorBailInInstr* bailInInstr = currentInstr->m_next->m_next->AsGeneratorBailInInstr();
+
+    BailOutInfo* bailOutInfo = currentInstr->GetBailOutInfo();
+
+    // Make a copy of all detected constant values before we actually process
+    // the bailout info since we will then remove any values that don't need
+    // to be restored for the normal bailout cases. As for yields, we still
+    // need them for our bailin code.
+    bailInInstr->SetConstantValues(bailOutInfo->capturedValues->constantValues);
+
+    IR::Instr* ret = this->ProcessPendingPreOpBailOutInfo(currentInstr);
+
+    // We will need list of symbols that have been copy-prop'd to map the correct
+    // symbols to restore during bail-in. Since this list is cleared during
+    // FillBailOutRecord, make a copy of it now.
+    bailInInstr->SetCopyPropSyms(bailOutInfo->usedCapturedValues->copyPropSyms);
+
+    return ret;
+}
+
 IR::Instr*
 BackwardPass::ProcessPendingPreOpBailOutInfo(IR::Instr *const currentInstr)
 {
@@ -2993,6 +3012,11 @@ BackwardPass::ProcessBlock(BasicBlock * block)
         this->currentInstr = instr;
         this->currentRegion = this->currentBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
 
+        if (instr->m_opcode == Js::OpCode::Yield && !this->IsCollectionPass())
+        {
+            this->DisallowMarkTempAcrossYield(this->currentBlock->byteCodeUpwardExposedUsed);
+        }
+
         IR::Instr * insertedInstr = TryChangeInstrForStackArgOpt();
         if (insertedInstr != nullptr)
         {
@@ -3855,7 +3879,24 @@ BackwardPass::ProcessBlock(BasicBlock * block)
             }
         }
 #endif
-        instrPrev = ProcessPendingPreOpBailOutInfo(instr);
+
+        // Make a copy of upwardExposedUses for our bail-in code, note that we have
+        // to do it at the bail-in instruction (right after yield) and not at the yield point
+        // since the yield instruction might use some symbols as operands that we don't need when
+        // bail-in
+        if (instr->IsGeneratorBailInInstr() && this->currentBlock->upwardExposedUses)
+        {
+            instr->AsGeneratorBailInInstr()->SetUpwardExposedUses(*this->currentBlock->upwardExposedUses);
+        }
+
+        if (instr->m_opcode == Js::OpCode::Yield)
+        {
+            instrPrev = ProcessPendingPreOpBailOutInfoForYield(instr);
+        }
+        else
+        {
+            instrPrev = ProcessPendingPreOpBailOutInfo(instr);
+        }
 
 #if DBG_DUMP
         TraceInstrUses(block, instr, false);
@@ -6313,6 +6354,27 @@ BackwardPass::ProcessPropertySymUse(PropertySym *propertySym)
     return isLive;
 }
 
+void
+BackwardPass::DisallowMarkTempAcrossYield(BVSparse<JitArenaAllocator>* bytecodeUpwardExposed)
+{
+    Assert(!this->IsCollectionPass());
+    BasicBlock* block = this->currentBlock;
+    if (this->DoMarkTempNumbers())
+    {
+        block->tempNumberTracker->DisallowMarkTempAcrossYield(bytecodeUpwardExposed);
+    }
+    if (this->DoMarkTempObjects())
+    {
+        block->tempObjectTracker->DisallowMarkTempAcrossYield(bytecodeUpwardExposed);
+    }
+#if DBG
+    if (this->DoMarkTempObjectVerify())
+    {
+        block->tempObjectVerifyTracker->DisallowMarkTempAcrossYield(bytecodeUpwardExposed);
+    }
+#endif
+}
+
 void
 BackwardPass::MarkTemp(StackSym * sym)
 {

+ 2 - 0
lib/Backend/BackwardPass.h

@@ -45,6 +45,7 @@ private:
     bool ProcessByteCodeUsesInstr(IR::Instr * instr);
     bool ProcessBailOutInfo(IR::Instr * instr);
     void ProcessBailOutInfo(IR::Instr * instr, BailOutInfo * bailOutInfo);
+    IR::Instr* ProcessPendingPreOpBailOutInfoForYield(IR::Instr* const currentInstr);
     IR::Instr* ProcessPendingPreOpBailOutInfo(IR::Instr *const currentInstr);
     void ClearDstUseForPostOpLazyBailOut(IR::Instr *instr);
     void ProcessBailOutArgObj(BailOutInfo * bailOutInfo, BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed);
@@ -56,6 +57,7 @@ private:
     void ProcessPropertySymOpndUse(IR::PropertySymOpnd *opnd);
     bool ProcessPropertySymUse(PropertySym *propertySym);
     void ProcessNewScObject(IR::Instr* instr);
+    void DisallowMarkTempAcrossYield(BVSparse<JitArenaAllocator>* bytecodeUpwardExposed);
     void MarkTemp(StackSym * sym);
     bool ProcessInlineeStart(IR::Instr* instr);
     void ProcessInlineeEnd(IR::Instr* instr);

+ 1 - 8
lib/Backend/Func.cpp

@@ -138,7 +138,7 @@ Func::Func(JitArenaAllocator *alloc, JITTimeWorkItem * workItem,
     , vtableMap(nullptr)
 #endif
     , m_yieldOffsetResumeLabelList(nullptr)
-    , m_bailOutNoSaveLabel(nullptr)
+    , m_bailOutForElidedYieldInsertionPoint(nullptr)
     , constantAddressRegOpnd(alloc)
     , lastConstantAddressRegLoadInstr(nullptr)
     , m_totalJumpTableSizeInBytesForSwitchStatements(0)
@@ -866,13 +866,6 @@ Func::AjustLocalVarSlotOffset()
 }
 #endif
 
-bool
-Func::DoGlobOptsForGeneratorFunc() const
-{
-    // Disable GlobOpt optimizations for generators initially. Will visit and enable each one by one.
-    return !GetJITFunctionBody()->IsCoroutine();
-}
-
 bool
 Func::DoSimpleJitDynamicProfile() const
 {

+ 1 - 3
lib/Backend/Func.h

@@ -329,8 +329,6 @@ public:
     void AjustLocalVarSlotOffset();
 #endif
 
-    bool DoGlobOptsForGeneratorFunc() const;
-
     static int32 AdjustOffsetValue(int32 offset);
 
     static inline uint32 GetDiagLocalSlotSize()
@@ -997,7 +995,7 @@ public:
 
     uint32 m_inlineeId;
 
-    IR::LabelInstr *    m_bailOutNoSaveLabel;
+    IR::Instr *    m_bailOutForElidedYieldInsertionPoint;
 
 private:
     Js::EntryPointInfo* m_entryPointInfo; // for in-proc JIT only

+ 19 - 7
lib/Backend/GlobHashTable.h

@@ -31,18 +31,30 @@ public:
     static uint Get(ExprHash hash)  { return static_cast<uint>(hash); }
 };
 
-#define FOREACH_GLOBHASHTABLE_ENTRY(bucket, hashTable) \
+#define FOREACH_VALUEHASHTABLE_ENTRY(BucketType, bucket, hashTable) \
     for (uint _iterHash = 0; _iterHash < (hashTable)->tableSize; _iterHash++)  \
     {   \
-        FOREACH_SLISTBASE_ENTRY(GlobHashBucket, bucket, &(hashTable)->table[_iterHash]) \
+        FOREACH_SLISTBASE_ENTRY(BucketType, bucket, &(hashTable)->table[_iterHash]) \
         {
 
 
-#define NEXT_GLOBHASHTABLE_ENTRY \
+#define NEXT_VALUEHASHTABLE_ENTRY \
         } \
         NEXT_SLISTBASE_ENTRY; \
     }
 
+#define FOREACH_VALUEHASHTABLE_ENTRY_EDITING(BucketType, bucket, hashTable, iter) \
+    for (uint _iterHash = 0; _iterHash < (hashTable)->tableSize; _iterHash++)  \
+    {   \
+        FOREACH_SLISTBASE_ENTRY_EDITING(BucketType, bucket, &(hashTable)->table[_iterHash], iter) \
+        {
+
+
+#define NEXT_VALUEHASHTABLE_ENTRY_EDITING \
+        } \
+        NEXT_SLISTBASE_ENTRY_EDITING; \
+    }
+
 template<typename TData, typename TElement>
 class ValueHashTable
 {
@@ -390,7 +402,7 @@ public:
 #if DBG_DUMP
     void Dump()
     {
-        FOREACH_GLOBHASHTABLE_ENTRY(bucket, this)
+        FOREACH_VALUEHASHTABLE_ENTRY(HashBucket, bucket, this)
         {
 
             Output::Print(_u("%4d  =>  "), bucket.value);
@@ -398,20 +410,20 @@ public:
             Output::Print(_u("\n"));
             Output::Print(_u("\n"));
         }
-        NEXT_GLOBHASHTABLE_ENTRY;
+        NEXT_VALUEHASHTABLE_ENTRY;
     }
 
     void Dump(void (*valueDump)(TData))
     {
         Output::Print(_u("\n-------------------------------------------------------------------------------------------------\n"));
-        FOREACH_GLOBHASHTABLE_ENTRY(bucket, this)
+        FOREACH_VALUEHASHTABLE_ENTRY(HashBucket, bucket, this)
         {
             valueDump(bucket.value);
             Output::Print(_u("  =>  "), bucket.value);
             bucket.element->Dump();
             Output::Print(_u("\n"));
         }
-        NEXT_GLOBHASHTABLE_ENTRY;
+        NEXT_VALUEHASHTABLE_ENTRY;
     }
 #endif
 

+ 22 - 22
lib/Backend/GlobOpt.cpp

@@ -838,14 +838,19 @@ GlobOpt::TryTailDup(IR::BranchInstr *tailBranch)
 }
 
 void
-GlobOpt::ToVar(BVSparse<JitArenaAllocator> *bv, BasicBlock *block)
+GlobOpt::ToVar(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, IR::Instr* insertBeforeInstr /* = nullptr */)
 {
     FOREACH_BITSET_IN_SPARSEBV(id, bv)
     {
         StackSym *stackSym = this->func->m_symTable->FindStackSym(id);
         IR::RegOpnd *newOpnd = IR::RegOpnd::New(stackSym, TyVar, this->func);
-        IR::Instr *lastInstr = block->GetLastInstr();
-        if (lastInstr->IsBranchInstr() || lastInstr->m_opcode == Js::OpCode::BailTarget)
+        IR::Instr* lastInstr = block->GetLastInstr();
+
+        if (insertBeforeInstr != nullptr)
+        {
+            this->ToVar(insertBeforeInstr, newOpnd, block, nullptr, false);
+        }
+        else if (lastInstr->IsBranchInstr() || lastInstr->m_opcode == Js::OpCode::BailTarget)
         {
             // If branch is using this symbol, hoist the operand as the ToVar load will get
             // inserted right before the branch.
@@ -2427,15 +2432,15 @@ GlobOpt::OptInstr(IR::Instr *&instr, bool* isInstrRemoved)
         return instrNext;
     }
 
-    if (!instr->IsRealInstr() || instr->IsByteCodeUsesInstr() || instr->m_opcode == Js::OpCode::Conv_Bool)
+    if (instr->m_opcode == Js::OpCode::Yield)
     {
-        return instrNext;
+        // TODO[generators][ianhall]: Can this and the FillBailOutInfo call below be moved to after Src1 and Src2 so that Yield can be optimized right up to the actual yield?
+        this->ProcessKills(instr);
     }
 
-    if (instr->m_opcode == Js::OpCode::Yield)
+    if (!instr->IsRealInstr() || instr->IsByteCodeUsesInstr() || instr->m_opcode == Js::OpCode::Conv_Bool)
     {
-        // TODO[generators][ianhall]: Can this and the FillBailOutInfo call below be moved to after Src1 and Src2 so that Yield can be optimized right up to the actual yield?
-        CurrentBlockData()->KillStateForGeneratorYield();
+        return instrNext;
     }
 
     if (!IsLoopPrePass())
@@ -3033,13 +3038,13 @@ GlobOpt::OptDst(
         else if (dstVal)
         {
             opnd->SetValueType(dstVal->GetValueInfo()->Type());
-
+#if 0
             if(currentBlock->loop &&
                 !IsLoopPrePass() &&
                 (instr->m_opcode == Js::OpCode::Ld_A || instr->m_opcode == Js::OpCode::Ld_I4) &&
                 instr->GetSrc1()->IsRegOpnd() &&
                 !func->IsJitInDebugMode() &&
-                func->DoGlobOptsForGeneratorFunc())
+                this->GetJITFunctionBody()->IsCoroutine())
             {
                 // Look for the following patterns:
                 //
@@ -3103,6 +3108,7 @@ GlobOpt::OptDst(
                     this->SetSymStoreDirect(dstVal->GetValueInfo(), dstVarSym);
                 } while(false);
             }
+#endif
         }
 
         this->ValueNumberObjectType(opnd, instr);
@@ -3658,15 +3664,6 @@ GlobOpt::CopyProp(IR::Opnd *opnd, IR::Instr *instr, Value *val, IR::IndirOpnd *p
         return opnd;
     }
 
-    if (!this->func->DoGlobOptsForGeneratorFunc())
-    {
-        // Don't copy prop in generator functions because non-bytecode temps that span a yield
-        // cannot be saved and restored by the current bail-out mechanics utilized by generator
-        // yield/resume.
-        // TODO[generators][ianhall]: Enable copy-prop at least for in between yields.
-        return opnd;
-    }
-
     if (instr->m_opcode == Js::OpCode::CheckFixedFld || instr->m_opcode == Js::OpCode::CheckPropertyGuardAndLoadType)
     {
         // Don't copy prop into CheckFixedFld or CheckPropertyGuardAndLoadType
@@ -14555,6 +14552,10 @@ swap_srcs:
 void
 GlobOpt::ProcessKills(IR::Instr *instr)
 {
+    if (instr->m_opcode == Js::OpCode::Yield)
+    {
+        this->CurrentBlockData()->KillStateForGeneratorYield(instr);
+    }
     this->ProcessFieldKills(instr);
     this->ProcessValueKills(instr);
     this->ProcessArrayValueKills(instr);
@@ -15695,7 +15696,7 @@ GlobOpt::DoConstFold() const
 bool
 GlobOpt::IsTypeSpecPhaseOff(Func const *func)
 {
-    return PHASE_OFF(Js::TypeSpecPhase, func) || func->IsJitInDebugMode() || !func->DoGlobOptsForGeneratorFunc();
+    return PHASE_OFF(Js::TypeSpecPhase, func) || func->IsJitInDebugMode();
 }
 
 bool
@@ -15802,8 +15803,7 @@ GlobOpt::DoArrayCheckHoist(Func const * const func)
     return
         !PHASE_OFF(Js::ArrayCheckHoistPhase, func) &&
         !func->IsArrayCheckHoistDisabled() &&
-        !func->IsJitInDebugMode() && // StElemI fast path is not allowed when in debug mode, so it cannot have bailout
-        func->DoGlobOptsForGeneratorFunc();
+        !func->IsJitInDebugMode(); // StElemI fast path is not allowed when in debug mode, so it cannot have bailout
 }
 
 bool

+ 1 - 1
lib/Backend/GlobOpt.h

@@ -742,7 +742,7 @@ private:
     void                    InsertCloneStrs(BasicBlock *toBlock, GlobOptBlockData *toData, GlobOptBlockData *fromData);
     void                    InsertValueCompensation(BasicBlock *const predecessor, BasicBlock *const successor, const SymToValueInfoMap *symsRequiringCompensationToMergedValueInfoMap);
     IR::Instr *             ToVarUses(IR::Instr *instr, IR::Opnd *opnd, bool isDst, Value *val);
-    void                    ToVar(BVSparse<JitArenaAllocator> *bv, BasicBlock *block);
+    void                    ToVar(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, IR::Instr* insertBeforeInstr = nullptr);
     IR::Instr *             ToVar(IR::Instr *instr, IR::RegOpnd *regOpnd, BasicBlock *block, Value *val, bool needsUpdate);
     void                    ToInt32(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, bool lossy, IR::Instr *insertBeforeInstr = nullptr);
     void                    ToFloat64(BVSparse<JitArenaAllocator> *bv, BasicBlock *block);

+ 2 - 10
lib/Backend/GlobOptBailOut.cpp

@@ -31,7 +31,7 @@ GlobOpt::CaptureValuesFromScratch(BasicBlock * block,
 
     block->globOptData.changedSyms->ClearAll();
 
-    FOREACH_GLOBHASHTABLE_ENTRY(bucket, block->globOptData.symToValueMap)
+    FOREACH_VALUEHASHTABLE_ENTRY(GlobHashBucket, bucket, block->globOptData.symToValueMap)
     {
         value = bucket.element;
         valueInfo = value->GetValueInfo();
@@ -48,7 +48,7 @@ GlobOpt::CaptureValuesFromScratch(BasicBlock * block,
         }
         block->globOptData.changedSyms->Set(sym->m_id);
     }
-    NEXT_GLOBHASHTABLE_ENTRY;
+    NEXT_VALUEHASHTABLE_ENTRY;
 
     if (argsToCapture)
     {
@@ -239,14 +239,6 @@ GlobOpt::CaptureValuesIncremental(BasicBlock * block,
 void
 GlobOpt::CaptureValues(BasicBlock *block, BailOutInfo * bailOutInfo, BVSparse<JitArenaAllocator>* argsToCapture)
 {
-    if (!this->func->DoGlobOptsForGeneratorFunc())
-    {
-        // TODO[generators][ianhall]: Enable constprop and copyprop for generator functions; see GlobOpt::CopyProp()
-        // Even though CopyProp is disabled for generator functions we must also not put the copy-prop sym into the
-        // bailOutInfo so that the bailOutInfo keeps track of the key sym in its byteCodeUpwardExposed list.
-        return;
-    }
-
     CapturedValues capturedValues;
     SListBase<ConstantStackSymValue>::EditingIterator bailOutConstValuesIter(&capturedValues.constantValues);
     SListBase<CopyPropSyms>::EditingIterator bailOutCopySymsIter(&capturedValues.copyPropSyms);

+ 65 - 43
lib/Backend/GlobOptBlockData.cpp

@@ -1654,60 +1654,82 @@ GlobOptBlockData::IsFloat64TypeSpecialized(Sym const * sym) const
 }
 
 void
-GlobOptBlockData::KillStateForGeneratorYield()
+GlobOptBlockData::KillStateForGeneratorYield(IR::Instr* yieldInstr)
 {
-    /*
-    TODO[generators][ianhall]: Do a ToVar on any typespec'd syms before the bailout so that we can enable typespec in generators without bailin having to restore typespec'd values
-    FOREACH_BITSET_IN_SPARSEBV(symId, this->liveInt32Syms)
-    {
-        this->ToVar(instr, , this->globOpt->currentBlock, , );
-    }
-    NEXT_BITSET_IN_SPARSEBV;
-
-    FOREACH_BITSET_IN_SPARSEBV(symId, this->liveInt32Syms)
-    {
-        this->ToVar(instr, , this->globOpt->currentBlock, , );
-    }
-    NEXT_BITSET_IN_SPARSEBV;
-    */
+    this->liveInt32Syms->Minus(this->liveVarSyms);
+    this->globOpt->ToVar(liveInt32Syms, this->globOpt->currentBlock, yieldInstr /* insertBeforeInstr */);
+    this->liveInt32Syms->ClearAll();
 
-    FOREACH_GLOBHASHTABLE_ENTRY(bucket, this->symToValueMap)
-    {
-        ValueType type = bucket.element->GetValueInfo()->Type().ToLikely();
-        bucket.element = this->globOpt->NewGenericValue(type);
-    }
-    NEXT_GLOBHASHTABLE_ENTRY;
+    this->liveFloat64Syms->Minus(this->liveVarSyms);
+    this->globOpt->ToVar(liveFloat64Syms, this->globOpt->currentBlock, yieldInstr /* insertBeforeInstr */);
+    this->liveFloat64Syms->ClearAll();
 
-    this->exprToValueMap->ClearAll();
-    this->liveFields->ClearAll();
-    this->liveArrayValues->ClearAll();
-    if (this->maybeWrittenTypeSyms)
-    {
-        this->maybeWrittenTypeSyms->ClearAll();
-    }
-    this->isTempSrc->ClearAll();
-    this->liveInt32Syms->ClearAll();
     this->liveLossyInt32Syms->ClearAll();
-    this->liveFloat64Syms->ClearAll();
     // Keep this->liveVarSyms as is
     // Keep this->argObjSyms as is
 
-    // MarkTemp should be disabled for generator functions for now
-    Assert(this->maybeTempObjectSyms == nullptr || this->maybeTempObjectSyms->IsEmpty());
-    Assert(this->canStoreTempObjectSyms == nullptr || this->canStoreTempObjectSyms->IsEmpty());
+    this->hasCSECandidates = false;
+
+    // No need to clear `isTempSrc` (used for in-place string concat)
 
-    this->valuesToKillOnCalls->Clear();
-    if (this->inductionVariables)
-    {
-        this->inductionVariables->Clear();
+    this->exprToValueMap->ClearAll();
+
+    this->KillSymToValueMapForGeneratorYield();
+}
+
+void
+GlobOptBlockData::KillSymToValueMapForGeneratorYield()
+{
+    // Remove illegal symToValueMap entries whose symstores don't have bytecode registers
+    // Hash table bucket key-value visualization: { bucket.value: bucket.element }
+    //
+    // Idea:
+    // Multiple symbols can map to the same value which has a symstore
+    // (multiple keys map to same value).
+    // Since the symstore might not have a bytecode register, our first pass
+    // through the map attemps to use the symbol (key) as a symstore for that value.
+    // This allows us to still retain such entries.
+    // After the first pass, any symToValueMap entries whose symstores don't have
+    // bytecode registers will be cleared.
+    FOREACH_VALUEHASHTABLE_ENTRY(GlobHashBucket, bucket, this->symToValueMap)
+    {
+        if (bucket.element == nullptr)
+        {
+            continue;
+        }
+
+        Sym* symStore = bucket.element->GetValueInfo()->GetSymStore();
+        if (symStore != nullptr && symStore->IsStackSym() && symStore->AsStackSym()->HasByteCodeRegSlot())
+        {
+            continue;
+        }
+
+        Sym* sym = bucket.value;
+        if (sym != nullptr && sym->IsStackSym() && sym->AsStackSym()->HasByteCodeRegSlot())
+        {
+            bucket.element->GetValueInfo()->SetSymStore(sym);
+        }
     }
-    if (this->availableIntBoundChecks)
+    NEXT_VALUEHASHTABLE_ENTRY;
+
+    // Remove illegal entries
+    FOREACH_VALUEHASHTABLE_ENTRY_EDITING(GlobHashBucket, bucket, this->symToValueMap, iter)
     {
-        this->availableIntBoundChecks->Clear();
+        Value* value = bucket.element;
+        if (value == nullptr)
+        {
+            iter.RemoveCurrent(this->symToValueMap->alloc);
+        }
+        else
+        {
+            Sym* symStore = value->GetValueInfo()->GetSymStore();
+            if (symStore == nullptr || !symStore->IsStackSym() || !symStore->AsStackSym()->HasByteCodeRegSlot())
+            {
+                iter.RemoveCurrent(this->symToValueMap->alloc);
+            }
+        }
     }
-
-    // Keep bailout data as is
-    this->hasCSECandidates = false;
+    NEXT_VALUEHASHTABLE_ENTRY_EDITING;
 }
 
 #if DBG_DUMP

+ 2 - 1
lib/Backend/GlobOptBlockData.h

@@ -320,8 +320,9 @@ public:
 private:
 
     // Other
+    void                    KillSymToValueMapForGeneratorYield();
 public:
-    void                    KillStateForGeneratorYield();
+    void                    KillStateForGeneratorYield(IR::Instr *yieldInstr);
 
     // Debug
 public:

+ 1 - 2
lib/Backend/IRBuilder.cpp

@@ -1890,13 +1890,12 @@ IRBuilder::BuildReg2(Js::OpCode newOpcode, uint32 offset, Js::RegSlot R0, Js::Re
         this->m_lastInstr = instr->ConvertToBailOutInstr(instr, IR::BailOutForGeneratorYield);
 
         // This label indicates the bail-in section that we will jump to from the generator jump table
-        IR::LabelInstr* bailInLabel = IR::LabelInstr::New(Js::OpCode::GeneratorBailInLabel, m_func);
+        IR::LabelInstr* bailInLabel = IR::GeneratorBailInInstr::New(this->m_lastInstr /* yieldInstr */, m_func);
         bailInLabel->m_hasNonBranchRef = true;              // set to true so that we don't move this label around
         LABELNAMESET(bailInLabel, "GeneratorBailInLabel");
         this->AddInstr(bailInLabel, offset);
         this->m_func->AddYieldOffsetResumeLabel(nextOffset, bailInLabel);
 
-
 #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
         if (PHASE_TRACE(Js::Phase::BailInPhase, this->m_func))
         {

+ 184 - 72
lib/Backend/LinearScan.cpp

@@ -146,8 +146,6 @@ LinearScan::RegAlloc()
     }
 
     m_bailOutRecordCount = 0;
-    IR::Instr * insertBailInAfter = nullptr;
-    BailOutInfo * bailOutInfoForBailIn = nullptr;
     bool endOfBasicBlock = true;
     FOREACH_INSTR_EDITING(instr, instrNext, currentInstr)
     {
@@ -249,21 +247,6 @@ LinearScan::RegAlloc()
             }
 
             this->FillBailOutRecord(instr);
-            if (instr->GetBailOutKind() == IR::BailOutForGeneratorYield)
-            {
-                Assert(insertBailInAfter == nullptr);
-                bailOutInfoForBailIn = instr->GetBailOutInfo();
-                insertBailInAfter = instr->m_next;
-
-                // Insert right after the GeneratorBailInLabel
-                // The register allocator might insert some compensation code between
-                // the BailOutForGeneratorYield and the GeneratorBailInLabel, so our
-                // bail-in insertion point is not necessarily always the next instruction.
-                while (insertBailInAfter != nullptr && insertBailInAfter->m_opcode != Js::OpCode::GeneratorBailInLabel)
-                {
-                    insertBailInAfter = insertBailInAfter->m_next;
-                }
-            }
         }
 
         this->SetSrcRegs(instr);
@@ -304,11 +287,9 @@ LinearScan::RegAlloc()
             endOfBasicBlock = true;
         }
 
-        if (insertBailInAfter == instr)
+        if (instr->IsGeneratorBailInInstr())
         {
-            instrNext = this->bailIn.GenerateBailIn(instr, bailOutInfoForBailIn);
-            insertBailInAfter = nullptr;
-            bailOutInfoForBailIn = nullptr;
+            instrNext = this->bailIn.GenerateBailIn(instr->AsGeneratorBailInInstr());
         }
     } NEXT_INSTR_EDITING;
 
@@ -1485,7 +1466,6 @@ LinearScan::FillBailOutRecord(IR::Instr * instr)
     // Fill in the constants
     FOREACH_SLISTBASE_ENTRY_EDITING(ConstantStackSymValue, value, &bailOutInfo->usedCapturedValues->constantValues, constantValuesIterator)
     {
-        AssertMsg(bailOutInfo->bailOutRecord->bailOutKind != IR::BailOutForGeneratorYield, "constant prop syms unexpected for bail-in for generator yield");
         StackSym * stackSym = value.Key();
         if(stackSym->HasArgSlotNum())
         {
@@ -1528,7 +1508,6 @@ LinearScan::FillBailOutRecord(IR::Instr * instr)
     // Fill in the copy prop syms
     FOREACH_SLISTBASE_ENTRY_EDITING(CopyPropSyms, copyPropSyms, &bailOutInfo->usedCapturedValues->copyPropSyms, copyPropSymsIter)
     {
-        AssertMsg(bailOutInfo->bailOutRecord->bailOutKind != IR::BailOutForGeneratorYield, "copy prop syms unexpected for bail-in for generator yield");
         StackSym * stackSym = copyPropSyms.Key();
         if(stackSym->HasArgSlotNum())
         {
@@ -4967,6 +4946,15 @@ LinearScan::GeneratorBailIn::GeneratorBailIn(Func* func, LinearScan* linearScan)
 
     // The environment is loaded before the resume jump table, no need to restore either.
     this->initializedRegs.Set(this->jitFnBody->GetEnvReg());
+
+    this->bailInSymbols = JitAnew(this->func->m_alloc, SListBase<BailInSymbol>);
+}
+
+LinearScan::GeneratorBailIn::~GeneratorBailIn()
+{
+    this->bailInSymbols->Clear(this->func->m_alloc);
+    this->bailInSymbols->Reset();
+    JitAdelete(this->func->m_alloc, this->bailInSymbols);
 }
 
 void LinearScan::GeneratorBailIn::SpillRegsForBailIn()
@@ -4998,14 +4986,16 @@ void LinearScan::GeneratorBailIn::SpillRegsForBailIn()
 //
 //   MOV sym(register), [rax + regslot offset]
 //
-IR::Instr* LinearScan::GeneratorBailIn::GenerateBailIn(IR::Instr* resumeLabelInstr, BailOutInfo* bailOutInfo)
+IR::Instr* LinearScan::GeneratorBailIn::GenerateBailIn(IR::GeneratorBailInInstr* bailInInstr)
 {
+    BailOutInfo* bailOutInfo = bailInInstr->GetYieldInstr()->GetBailOutInfo();
+
     Assert(!bailOutInfo->capturedValues || bailOutInfo->capturedValues->constantValues.Empty());
     Assert(!bailOutInfo->capturedValues || bailOutInfo->capturedValues->copyPropSyms.Empty());
     Assert(!bailOutInfo->liveLosslessInt32Syms || bailOutInfo->liveLosslessInt32Syms->IsEmpty());
     Assert(!bailOutInfo->liveFloat64Syms || bailOutInfo->liveFloat64Syms->IsEmpty());
 
-    IR::Instr* instrAfter = resumeLabelInstr->m_next;
+    IR::Instr* instrAfter = bailInInstr->m_next;
 
     // 1) Load the generator object that was passed as one of the arguments to the jitted frame
     LinearScan::InsertMove(this->interpreterFrameRegOpnd, this->CreateGeneratorObjectOpnd(), instrAfter);
@@ -5036,7 +5026,19 @@ IR::Instr* LinearScan::GeneratorBailIn::GenerateBailIn(IR::Instr* resumeLabelIns
     // this->InsertRestoreSymbols(bailOutInfo->capturedValues->argObjSyms, insertionPoint, saveInitializedReg);
     // 
     // - We move all argout symbols right before the call so we don't need to restore argouts either
-    this->InsertRestoreSymbols(bailOutInfo->byteCodeUpwardExposedUsed, insertionPoint);
+
+    this->BuildBailInSymbolList(
+        *bailOutInfo->byteCodeUpwardExposedUsed,
+        bailInInstr->GetUpwardExposedUses(),
+        bailInInstr->GetCapturedValues()
+    );
+
+    this->InsertRestoreSymbols(
+        *bailOutInfo->byteCodeUpwardExposedUsed,
+        bailInInstr->GetUpwardExposedUses(),
+        bailInInstr->GetCapturedValues(),
+        insertionPoint
+    );
     Assert(!this->func->IsStackArgsEnabled());
 
 #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
@@ -5051,36 +5053,139 @@ IR::Instr* LinearScan::GeneratorBailIn::GenerateBailIn(IR::Instr* resumeLabelIns
     return instrAfter;
 }
 
-void LinearScan::GeneratorBailIn::InsertRestoreSymbols(BVSparse<JitArenaAllocator>* symbols, BailInInsertionPoint& insertionPoint)
+void LinearScan::GeneratorBailIn::BuildBailInSymbolList(
+    const BVSparse<JitArenaAllocator>& byteCodeUpwardExposedUses,
+    const BVSparse<JitArenaAllocator>& upwardExposedUses,
+    const CapturedValues& capturedValues
+)
 {
-    if (symbols == nullptr)
+    this->bailInSymbols->Clear(this->func->m_alloc);
+
+    // Assume all symbols cannot be restored
+    BVSparse<JitArenaAllocator> unrestorableSymbols(this->func->m_alloc);
+    unrestorableSymbols.Or(&upwardExposedUses);
+
+    unrestorableSymbols.Minus(&this->initializedRegs);
+
+    // Symbols in byteCodeUpwardExposedUses are restorable
+    FOREACH_BITSET_IN_SPARSEBV(symId, &byteCodeUpwardExposedUses)
     {
-        return;
+        StackSym* stackSym = this->func->m_symTable->FindStackSym(symId);
+        Assert(stackSym);
+        unrestorableSymbols.Clear(symId);
+        if (this->NeedsReloadingSymWhenBailingIn(stackSym))
+        {
+            BailInSymbol bailInSym(symId /* fromByteCodeRegSlot */, symId /* toBackendId */);
+            bailInSymbols->PrependNode(this->func->m_alloc, bailInSym);
+        }
     }
+    NEXT_BITSET_IN_SPARSEBV;
 
-    FOREACH_BITSET_IN_SPARSEBV(symId, symbols)
+    // Symbols that were copy-prop'd
+    FOREACH_SLISTBASE_ENTRY(CopyPropSyms, copyPropSym, &capturedValues.copyPropSyms)
+    {
+        Sym* key = copyPropSym.Key();
+        Sym* value = copyPropSym.Value();
+        if (unrestorableSymbols.Test(value->m_id))
+        {
+            Assert(key->IsStackSym() && (key->AsStackSym()->HasByteCodeRegSlot() || key->AsStackSym()->IsFromByteCodeConstantTable()));
+            unrestorableSymbols.Clear(value->m_id);
+            if (this->NeedsReloadingSymWhenBailingIn(copyPropSym.Key()))
+            {
+                BailInSymbol bailInSym(key->m_id /* fromByteCodeRegSlot */, value->m_id /* toBackendId */);
+                bailInSymbols->PrependNode(this->func->m_alloc, bailInSym);
+            }
+        }
+    }
+    NEXT_SLISTBASE_ENTRY;
+
+    // Used constant values
+    FOREACH_SLISTBASE_ENTRY(ConstantStackSymValue, entry, &capturedValues.constantValues)
+    {
+        SymID symId = entry.Key()->m_id;
+        if (unrestorableSymbols.TestAndClear(symId))
+        {
+            StackSym* stackSym = this->func->m_symTable->FindStackSym(symId);
+            Assert(stackSym);
+            if (this->NeedsReloadingSymWhenBailingIn(stackSym))
+            {
+                BailoutConstantValue constValue = entry.Value();
+                Js::Var varValue = constValue.ToVar(this->func);
+                Assert(!stackSym->IsFromByteCodeConstantTable());
+                BailInSymbol bailInSym(
+                    symId /* fromByteCodeRegSlot */,
+                    symId /* toBackendId */,
+                    true /* restoreConstDirectly */,
+                    varValue
+                );
+                bailInSymbols->PrependNode(this->func->m_alloc, bailInSym);
+            }
+        }
+    }
+    NEXT_SLISTBASE_ENTRY;
+
+    FOREACH_BITSET_IN_SPARSEBV_EDITING(symId, &unrestorableSymbols)
     {
         StackSym* stackSym = this->func->m_symTable->FindStackSym(symId);
+        Assert(stackSym);
         Lifetime* lifetime = stackSym->scratch.linearScan.lifetime;
-
-        if (!this->NeedsReloadingValueWhenBailIn(stackSym, lifetime))
+        if (
+            // Special backend symbols that don't need to be restored
+            (!stackSym->HasByteCodeRegSlot() && !this->NeedsReloadingBackendSymWhenBailingIn(stackSym)) ||
+            // Symbols already in the constant table don't need to be restored either
+            stackSym->IsFromByteCodeConstantTable() ||
+            // Symbols having no lifetimes
+            lifetime == nullptr
+        )
         {
-            continue;
+            unrestorableSymbols.Clear(stackSym->m_id);
         }
+    }
+    NEXT_BITSET_IN_SPARSEBV_EDITING;
 
-        Js::RegSlot regSlot = stackSym->GetByteCodeRegSlot();
-        IR::Opnd* srcOpnd = IR::IndirOpnd::New(
-            this->interpreterFrameRegOpnd,
-            this->GetOffsetFromInterpreterStackFrame(regSlot),
-            stackSym->GetType(),
-            this->func
-        );
+    AssertMsg(unrestorableSymbols.IsEmpty(), "There are unrestorable backend-only symbols across yield points");
+}
+
+void LinearScan::GeneratorBailIn::InsertRestoreSymbols(
+    const BVSparse<JitArenaAllocator>& byteCodeUpwardExposedUses,
+    const BVSparse<JitArenaAllocator>& upwardExposedUses,
+    const CapturedValues& capturedValues,
+    BailInInsertionPoint& insertionPoint
+)
+{
+    FOREACH_SLISTBASE_ENTRY(BailInSymbol, bailInSymbol, this->bailInSymbols)
+    {
+        StackSym* dstSym = this->func->m_symTable->FindStackSym(bailInSymbol.toBackendId);
+        Lifetime* lifetime = dstSym->scratch.linearScan.lifetime;
+        Assert(lifetime);
+
+        StackSym* copyPropStackSym = this->func->m_symTable->FindStackSym(bailInSymbol.fromByteCodeRegSlot);
+        Js::RegSlot regSlot = copyPropStackSym->GetByteCodeRegSlot();
+        IR::Opnd* srcOpnd;
+        
+        if (bailInSymbol.restoreConstDirectly)
+        {
+            srcOpnd = IR::AddrOpnd::New(bailInSymbol.constValue, IR::AddrOpndKind::AddrOpndKindDynamicVar, this->func);
+        }
+        else
+        {
+            srcOpnd = IR::IndirOpnd::New(
+                this->interpreterFrameRegOpnd,
+                this->GetOffsetFromInterpreterStackFrame(regSlot),
+                copyPropStackSym->GetType(),
+                this->func
+            );
+        }
 
         if (lifetime->isSpilled)
         {
-            Assert(!stackSym->IsConst());
+#if DBG
+            AssertMsg(!dstSym->IsConst(), "We don't need to restore constant symbol that has already been spilled");
+            // Supress assert in DbPostCheckLower
+            dstSym->m_allocated = true;
+#endif
             // Stack restores require an extra register since we can't move an indir directly to an indir on amd64
-            IR::SymOpnd* dstOpnd = IR::SymOpnd::New(stackSym, stackSym->GetType(), this->func);
+            IR::SymOpnd* dstOpnd = IR::SymOpnd::New(dstSym, dstSym->GetType(), this->func);
             LinearScan::InsertMove(this->tempRegOpnd, srcOpnd, insertionPoint.instrInsertStackSym);
             LinearScan::InsertMove(dstOpnd, this->tempRegOpnd, insertionPoint.instrInsertStackSym);
         }
@@ -5093,13 +5198,13 @@ void LinearScan::GeneratorBailIn::InsertRestoreSymbols(BVSparse<JitArenaAllocato
 
             IR::Instr* instr;
 
-            if (stackSym->IsConst())
+            if (dstSym->IsConst())
             {
-                instr = this->linearScan->InsertLoad(insertionPoint.instrInsertRegSym, stackSym, lifetime->reg);
+                instr = this->linearScan->InsertLoad(insertionPoint.instrInsertRegSym, dstSym, lifetime->reg);
             }
             else
             {
-                IR::RegOpnd* dstRegOpnd = IR::RegOpnd::New(stackSym, stackSym->GetType(), this->func);
+                IR::RegOpnd* dstRegOpnd = IR::RegOpnd::New(dstSym, dstSym->GetType(), this->func);
                 dstRegOpnd->SetReg(lifetime->reg);
                 instr = LinearScan::InsertMove(dstRegOpnd, srcOpnd, insertionPoint.instrInsertRegSym);
             }
@@ -5121,8 +5226,7 @@ void LinearScan::GeneratorBailIn::InsertRestoreSymbols(BVSparse<JitArenaAllocato
 
                 if (insertionPoint.raxRestoreInstr != nullptr)
                 {
-                    AssertMsg(false, "this is unexpected until copy prop is enabled");
-                    // rax was mapped to multiple bytecode registers.  Obviously only the first
+                    // rax was mapped to multiple bytecode registers. Obviously only the first
                     // restore we do will work so change all following stores to `mov rax, rax`.
                     // We still need to keep them around for RecordDef in case the corresponding
                     // dst sym is spilled later on.
@@ -5136,33 +5240,11 @@ void LinearScan::GeneratorBailIn::InsertRestoreSymbols(BVSparse<JitArenaAllocato
             this->linearScan->RecordDef(lifetime, instr, 0);
         }
     }
-    NEXT_BITSET_IN_SPARSEBV;
+    NEXT_SLISTBASE_ENTRY;
 }
 
-bool LinearScan::GeneratorBailIn::NeedsReloadingValueWhenBailIn(StackSym* sym, Lifetime* lifetime) const
+bool LinearScan::GeneratorBailIn::NeedsReloadingBackendSymWhenBailingIn(StackSym* sym) const
 {
-    if (sym->IsConst())
-    {
-        if (this->func->GetJITFunctionBody()->RegIsConstant(sym->GetByteCodeRegSlot()))
-        {
-            // Resume jump table is inserted after we load symbols in the constant table,
-            // so at bail-in point, we can have two scenarios:
-            //  1) the symbols are still in registers
-            //  2) the symbols have already been "spilled"
-            // Since we don't save/restore constant symbols and simply insert loads of their
-            // values before use, in either case, there is no need to reload the values
-            return false;
-        }
-        else
-        {
-            // Again, for all other constant symbols, if they are bytecodeUpwardExposed and they have
-            // already been "spilled", which means that the register allocator will automatically
-            // insert the load of their values later before use, we don't need to restore.
-            // Only restore symbols that are still in registers
-            return !lifetime->isSpilled;
-        }
-    }
-
     // If we have for-in in the generator, don't need to reload the symbol again as it is done
     // during the resume jump table
     if (this->func->GetForInEnumeratorSymForGeneratorSym() && this->func->GetForInEnumeratorSymForGeneratorSym()->m_id == sym->m_id)
@@ -5170,6 +5252,36 @@ bool LinearScan::GeneratorBailIn::NeedsReloadingValueWhenBailIn(StackSym* sym, L
         return false;
     }
 
+    return true;
+}
+
+bool LinearScan::GeneratorBailIn::NeedsReloadingSymWhenBailingIn(StackSym* sym) const
+{
+    if (sym->IsFromByteCodeConstantTable())
+    {
+        // Resume jump table is inserted after we load symbols in the constant table,
+        // so at bail-in point, we can have two scenarios:
+        //  1) the symbols are still in registers
+        //  2) the symbols have already been "spilled"
+        // Since we don't save/restore constant symbols and simply insert loads of their
+        // values before use, in either case, there is no need to reload the values
+        return false;
+    }
+
+    if (!sym->HasByteCodeRegSlot())
+    {
+        return this->NeedsReloadingBackendSymWhenBailingIn(sym);
+    }
+
+    if (sym->IsConst())
+    {
+        // For all other constant symbols, if they are bytecodeUpwardExposed and they have
+        // already been "spilled", which means that the register allocator will automatically
+        // insert the load of their values later before use, we don't need to restore.
+        // Only restore symbols that are still in registers
+        return !sym->scratch.linearScan.lifetime->isSpilled;
+    }
+
     // Check for other special registers that are already initialized
     return !this->initializedRegs.Test(sym->GetByteCodeRegSlot());
 }
@@ -5223,7 +5335,7 @@ void LinearScan::GeneratorBailIn::InsertBailInTrace(BVSparse<JitArenaAllocator>*
         StackSym* stackSym = this->func->m_symTable->FindStackSym(symId);
         Lifetime* lifetime = stackSym->scratch.linearScan.lifetime;
 
-        if (!this->NeedsReloadingValueWhenBailIn(stackSym, lifetime))
+        if (!this->NeedsReloadingSymWhenBailingIn(stackSym))
         {
             continue;
         }

+ 30 - 3
lib/Backend/LinearScan.h

@@ -256,28 +256,55 @@ private:
             IR::Instr* instrInsertRegSym;
         };
 
+        struct BailInSymbol
+        {
+            const SymID fromByteCodeRegSlot;
+            const SymID toBackendId;
+            const bool restoreConstDirectly : 1;
+            const Js::Var constValue;
+            BailInSymbol(SymID fromByteCodeRegSlot, SymID toBackendId, bool restoreConstDirectly = false, Js::Var constValue = nullptr):
+                fromByteCodeRegSlot(fromByteCodeRegSlot),
+                toBackendId(toBackendId),
+                restoreConstDirectly(restoreConstDirectly),
+                constValue(constValue) {}
+        };
+
         Func* const func;
         LinearScan* const linearScan;
         const JITTimeFunctionBody* const jitFnBody;
         BVSparse<JitArenaAllocator> initializedRegs;
+        SListBase<BailInSymbol>* bailInSymbols;
 
         static constexpr int regNum = 2;
         const RegNum regs[regNum];
         IR::RegOpnd* const interpreterFrameRegOpnd;
         IR::RegOpnd* const tempRegOpnd;
 
-        bool NeedsReloadingValueWhenBailIn(StackSym* sym, Lifetime* lifetime) const;
+        bool NeedsReloadingBackendSymWhenBailingIn(StackSym* sym) const;
+        bool NeedsReloadingSymWhenBailingIn(StackSym* sym) const;
         uint32 GetOffsetFromInterpreterStackFrame(Js::RegSlot regSlot) const;
         IR::SymOpnd* CreateGeneratorObjectOpnd() const;
 
-        void InsertRestoreSymbols(BVSparse<JitArenaAllocator>* symbols, BailInInsertionPoint& insertionPoint);
+        void InsertRestoreSymbols(
+            const BVSparse<JitArenaAllocator>& bytecodeUpwardExposedUses,
+            const BVSparse<JitArenaAllocator>& upwardExposedUses,
+            const CapturedValues& capturedValues,
+            BailInInsertionPoint& insertionPoint
+        );
+
+        void BuildBailInSymbolList(
+            const BVSparse<JitArenaAllocator>& byteCodeUpwardExposedUses,
+            const BVSparse<JitArenaAllocator>& upwardExposedUses,
+            const CapturedValues& capturedValues
+        );
 
 #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
         void InsertBailInTrace(BVSparse<JitArenaAllocator>* symbols, IR::Instr* insertBeforeInstr);
 #endif
     public:
         GeneratorBailIn(Func* func, LinearScan* linearScan);
-        IR::Instr* GenerateBailIn(IR::Instr* resumeLabelInstr, BailOutInfo* bailOutInfo);
+        ~GeneratorBailIn();
+        IR::Instr* GenerateBailIn(IR::GeneratorBailInInstr* bailInInstr);
         void SpillRegsForBailIn();
     };
 

+ 14 - 1
lib/Backend/TempTracker.cpp

@@ -289,6 +289,16 @@ TempTracker<T>::ProcessUse(StackSym * sym, BackwardPass * backwardPass)
     }
 };
 
+template <typename T>
+void
+TempTracker<T>::DisallowMarkTempAcrossYield(BVSparse<JitArenaAllocator>* bytecodeUpwardExposed)
+{
+    if (bytecodeUpwardExposed != nullptr)
+    {
+        this->nonTempSyms.Or(bytecodeUpwardExposed);
+    }
+}
+
 template <typename T>
 void
 TempTracker<T>::MarkTemp(StackSym * sym, BackwardPass * backwardPass)
@@ -1421,7 +1431,10 @@ ObjectTempVerify::SetDstIsTemp(bool dstIsTemp, bool dstIsTempTransferred, IR::In
                     Output::Flush();
                 }
 #endif
-                Assert(!instr->dstIsTempObject);
+                // In a generator function, we don't allow marking temp across yields. Since this assert makes
+                // sure that all instructions whose destinations produce temps are marked, it is not
+                // applicable for generators
+                Assert(instr->m_func->GetJITFunctionBody()->IsCoroutine() || !instr->dstIsTempObject);
             }
         }
     }

+ 1 - 0
lib/Backend/TempTracker.h

@@ -44,6 +44,7 @@ public:
 
     // Actual mark temp algorithm that are shared, but have different condition based
     // on the type of tracker as the template parameter
+    void DisallowMarkTempAcrossYield(BVSparse<JitArenaAllocator>* bytecodeUpwardExposed);
     void ProcessUse(StackSym * sym, BackwardPass * backwardPass);
     void MarkTemp(StackSym * sym, BackwardPass * backwardPass);