diff --git a/Build/NuGet/.pack-version b/Build/NuGet/.pack-version index a52b6ed9087..45333f34e4b 100644 --- a/Build/NuGet/.pack-version +++ b/Build/NuGet/.pack-version @@ -1 +1 @@ -1.11.21 +1.11.22 diff --git a/lib/Backend/BackwardPass.cpp b/lib/Backend/BackwardPass.cpp index e64a68c2cad..2cdeac4c9ca 100644 --- a/lib/Backend/BackwardPass.cpp +++ b/lib/Backend/BackwardPass.cpp @@ -8648,20 +8648,7 @@ BackwardPass::SetWriteThroughSymbolsSetForRegion(BasicBlock * catchOrFinallyBloc bool BackwardPass::CheckWriteThroughSymInRegion(Region* region, StackSym* sym) { - if (region->GetType() == RegionTypeRoot) - { - return false; - } - - // if the current region is a try region, check in its write-through set, - // otherwise (current = catch region) look in the first try ancestor's write-through set - Region * selfOrFirstTryAncestor = region->GetSelfOrFirstTryAncestor(); - if (!selfOrFirstTryAncestor) - { - return false; - } - Assert(selfOrFirstTryAncestor->GetType() == RegionTypeTry); - return selfOrFirstTryAncestor->writeThroughSymbolsSet && selfOrFirstTryAncestor->writeThroughSymbolsSet->Test(sym->m_id); + return region->CheckWriteThroughSym(sym); } #if DBG diff --git a/lib/Backend/Func.cpp b/lib/Backend/Func.cpp index 898d054fa7f..2958a1300d8 100644 --- a/lib/Backend/Func.cpp +++ b/lib/Backend/Func.cpp @@ -55,6 +55,7 @@ Func::Func(JitArenaAllocator *alloc, JITTimeWorkItem * workItem, m_localClosureSym(nullptr), m_paramClosureSym(nullptr), m_localFrameDisplaySym(nullptr), + m_inlineeFrameDisplaySyms(nullptr), m_bailoutReturnValueSym(nullptr), m_hasBailedOutSym(nullptr), m_inlineeFrameStartSym(nullptr), @@ -1124,6 +1125,16 @@ void Func::InitLocalClosureSyms() } } +void +Func::AddInlineeFrameDisplaySym(StackSym *inlineeFrameDisplaySym) +{ + if (m_inlineeFrameDisplaySyms == nullptr) + { + m_inlineeFrameDisplaySyms = JitAnew(this->m_alloc, SList, this->m_alloc); + } + m_inlineeFrameDisplaySyms->Prepend(inlineeFrameDisplaySym); +} + bool Func::IsTrackCompoundedIntOverflowDisabled() const { diff --git a/lib/Backend/Func.h b/lib/Backend/Func.h index 03eedb5f448..f376dd4d5dc 100644 --- a/lib/Backend/Func.h +++ b/lib/Backend/Func.h @@ -441,6 +441,7 @@ static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece; StackSym *GetLocalFrameDisplaySym() const { return m_localFrameDisplaySym; } void SetLocalFrameDisplaySym(StackSym *sym) { m_localFrameDisplaySym = sym; } + void AddInlineeFrameDisplaySym(StackSym *sym); intptr_t GetJittedLoopIterationsSinceLastBailoutAddress() const; void EnsurePinnedTypeRefs(); @@ -678,6 +679,7 @@ static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece; StackSym * m_paramClosureSym; StackSym * m_localFrameDisplaySym; StackSym * m_bailoutReturnValueSym; + SList * m_inlineeFrameDisplaySyms; StackSym * m_hasBailedOutSym; uint m_forInLoopMaxDepth; uint m_forInLoopBaseDepth; diff --git a/lib/Backend/GlobOpt.h b/lib/Backend/GlobOpt.h index 725ac0ea975..a53b5aea1f1 100644 --- a/lib/Backend/GlobOpt.h +++ b/lib/Backend/GlobOpt.h @@ -960,6 +960,8 @@ class GlobOpt void KillAllObjectTypes(BVSparse* liveFields = nullptr); void EndFieldLifetime(IR::SymOpnd *symOpnd); PropertySym * CopyPropPropertySymObj(IR::SymOpnd *opnd, IR::Instr *instr); + void OnCopyPropInPrePass(StackSym * copySym, IR::Instr * instr, BasicBlock * block); + void UpdateUpwardExposedUses(StackSym * sym, IR::Instr * instrLast, BasicBlock * block, JsUtil::Stack *blockStack, BVSparse* blocksProcessed); static bool NeedsTypeCheckBailOut(const IR::Instr *instr, IR::PropertySymOpnd *propertySymOpnd, bool isStore, bool* pIsTypeCheckProtected, IR::BailOutKind *pBailOutKind); IR::Instr * PreOptPeep(IR::Instr *instr); IR::Instr * OptPeep(IR::Instr *instr, Value *src1Val, Value *src2Val); diff --git a/lib/Backend/GlobOptFields.cpp b/lib/Backend/GlobOptFields.cpp index 48d60c26693..fcb524b246c 100644 --- a/lib/Backend/GlobOptFields.cpp +++ b/lib/Backend/GlobOptFields.cpp @@ -1917,6 +1917,11 @@ GlobOpt::CopyPropPropertySymObj(IR::SymOpnd *symOpnd, IR::Instr *instr) symOpnd->m_sym = newProp; symOpnd->SetIsJITOptimizedReg(true); + if (this->IsLoopPrePass()) + { + this->OnCopyPropInPrePass(copySym, instr, this->currentBlock); + } + if (symOpnd->IsPropertySymOpnd()) { IR::PropertySymOpnd *propertySymOpnd = symOpnd->AsPropertySymOpnd(); @@ -1959,6 +1964,77 @@ GlobOpt::CopyPropPropertySymObj(IR::SymOpnd *symOpnd, IR::Instr *instr) return propertySym; } +void +GlobOpt::OnCopyPropInPrePass(StackSym * copySym, IR::Instr * instr, BasicBlock * block) +{ + // Copy prop in the prepass may make upwardExposedUses out of date. Update it now. + + if (block->upwardExposedUses->Test(copySym->m_id)) + { + // Nothing to do + return; + } + + // Use a to-do stack to avoid recursion and a bv to avoid repeated work + JsUtil::Stack blockStack(this->tempAlloc); + BVSparse blocksProcessed(this->tempAlloc); + + blocksProcessed.Set(block->GetBlockNum()); + this->UpdateUpwardExposedUses(copySym, instr, block, &blockStack, &blocksProcessed); + + while (!blockStack.Empty()) + { + block = blockStack.Pop(); + Assert(blocksProcessed.Test(block->GetBlockNum())); + this->UpdateUpwardExposedUses(copySym, block->GetLastInstr(), block, &blockStack, &blocksProcessed); + } +} + +void +GlobOpt::UpdateUpwardExposedUses(StackSym * sym, IR::Instr * instrLast, BasicBlock * block, JsUtil::Stack *blockStack, BVSparse* blocksProcessed) +{ + Assert(blocksProcessed->Test(block->GetBlockNum())); + Assert(!block->upwardExposedUses->Test(sym->m_id)); + + // Walk the block backward looking for a def. If the sym is write-through in this block, though, + // treat it as upward-exposed regardless of the presence of defs. + IR::LabelInstr * instrFirst = block->GetFirstInstr()->AsLabelInstr(); + Region * region = instrFirst->GetRegion(); + if (region == nullptr || !region->CheckWriteThroughSym(sym)) + { + FOREACH_INSTR_BACKWARD_IN_RANGE(instr, instrLast, instrFirst) + { + // If the instr defines the sym, quit without setting upwardExposedUses + IR::Opnd * dst = instr->GetDst(); + if (dst != nullptr && dst->GetStackSym() == sym) + { + return; + } + Assert(dst == nullptr || dst->GetStackSym() == nullptr || dst->GetStackSym()->m_id != sym->m_id); + } + NEXT_INSTR_BACKWARD_IN_RANGE; + } + + // The sym is upward exposed at this block. Now update the to-do set with the predecessors. + + Assert(block->GetBlockNum() != 0); + block->upwardExposedUses->Set(sym->m_id); + FOREACH_PREDECESSOR_BLOCK(blockPred, block) + { + if (blockPred->upwardExposedUses == nullptr || blockPred->upwardExposedUses->Test(sym->m_id)) + { + // If the bv is null, that means the main pass is done with this block + // and so done with its predecessors + continue; + } + if (!blocksProcessed->TestAndSet(blockPred->GetBlockNum())) + { + blockStack->Push(blockPred); + } + } + NEXT_PREDECESSOR_BLOCK; +} + void GlobOpt::UpdateObjPtrValueType(IR::Opnd * opnd, IR::Instr * instr) { diff --git a/lib/Backend/IRBuilder.cpp b/lib/Backend/IRBuilder.cpp index 75ee5c7f245..72dd33731bf 100644 --- a/lib/Backend/IRBuilder.cpp +++ b/lib/Backend/IRBuilder.cpp @@ -661,6 +661,11 @@ IRBuilder::Build() dstOpnd, m_func), offset); } + else if (m_func->GetTopFunc()->DoStackFrameDisplay()) + { + Assert(m_func->GetLocalFrameDisplaySym() != nullptr); + m_func->GetTopFunc()->AddInlineeFrameDisplaySym(m_func->GetLocalFrameDisplaySym()); + } } } } diff --git a/lib/Backend/Lower.cpp b/lib/Backend/Lower.cpp index daea39941e7..813227fe3e0 100644 --- a/lib/Backend/Lower.cpp +++ b/lib/Backend/Lower.cpp @@ -49,9 +49,11 @@ Lowerer::Lower() { EnsureStackFunctionListStackSym(); } + StackSym * symInlineeFrameDisplayEnd = nullptr; if (m_func->DoStackFrameDisplay() && !m_func->IsLoopBody()) { - AllocStackClosure(); + symInlineeFrameDisplayEnd = StackSym::New(m_func); + AllocStackClosure(symInlineeFrameDisplayEnd); } AllocStackForInObjectEnumeratorArray(); @@ -140,6 +142,11 @@ Lowerer::Lower() EnsureZeroLastStackFunctionNext(); } + if (symInlineeFrameDisplayEnd != nullptr) + { + InitializeInlineeFrameDisplays(symInlineeFrameDisplayEnd); + } + if (!m_func->IsSimpleJit()) { #if 0 // TODO michhol oop jit, reenable assert @@ -6755,10 +6762,40 @@ Lowerer::EnsureStackFunctionListStackSym() } void -Lowerer::AllocStackClosure() +Lowerer::AllocStackClosure(StackSym * symInlineeFrameDisplayEnd) { m_func->StackAllocate(m_func->GetLocalFrameDisplaySym(), sizeof(Js::Var)); m_func->StackAllocate(m_func->GetLocalClosureSym(), sizeof(Js::Var)); + + if (m_func->m_inlineeFrameDisplaySyms != nullptr) + { + FOREACH_SLIST_ENTRY(StackSym*, sym, m_func->m_inlineeFrameDisplaySyms) + { + m_func->StackAllocate(sym, sizeof(Js::Var)); + } + NEXT_SLIST_ENTRY; + } + m_func->StackAllocate(symInlineeFrameDisplayEnd, sizeof(Js::Var)); +} + +void +Lowerer::InitializeInlineeFrameDisplays(StackSym * symInlineeFrameDisplayEnd) +{ + if (m_func->m_inlineeFrameDisplaySyms != nullptr) + { + FOREACH_SLIST_ENTRY(StackSym*, sym, m_func->m_inlineeFrameDisplaySyms) + { + Assert(sym->IsAllocated()); + InsertMove(IR::SymOpnd::New(sym, TyMachReg, m_func), + IR::AddrOpnd::New(m_func->GetThreadContextInfo()->GetNullFrameDisplayAddr(), IR::AddrOpndKindDynamicMisc, m_func), + m_func->GetFunctionEntryInsertionPoint()); + } + NEXT_SLIST_ENTRY; + } + Assert(symInlineeFrameDisplayEnd->IsAllocated()); + InsertMove(IR::SymOpnd::New(symInlineeFrameDisplayEnd, TyMachReg, m_func), + IR::AddrOpnd::New((void*)0, IR::AddrOpndKindDynamicMisc, m_func), + m_func->GetFunctionEntryInsertionPoint()); } void @@ -27112,6 +27149,12 @@ void Lowerer::LowerLdFrameDisplay(IR::Instr *instr, bool doStackFrameDisplay) else { GenerateRecyclerAlloc(IR::HelperAllocMemForFrameDisplay, allocSize, dstOpnd, instr); + if (instr->m_func != this->m_func && this->m_func->DoStackFrameDisplay()) + { + StackSym * inlineeFrameDisplaySym = instr->m_func->GetLocalFrameDisplaySym(); + Assert(inlineeFrameDisplaySym->IsAllocated()); + InsertMove(IR::SymOpnd::New(inlineeFrameDisplaySym, TyMachReg, m_func), dstOpnd, instr); + } } // Copy contents of environment diff --git a/lib/Backend/Lower.h b/lib/Backend/Lower.h index 9bbafce2076..276a87c6270 100644 --- a/lib/Backend/Lower.h +++ b/lib/Backend/Lower.h @@ -196,7 +196,8 @@ class Lowerer void EnsureStackFunctionListStackSym(); void EnsureZeroLastStackFunctionNext(); - void AllocStackClosure(); + void AllocStackClosure(StackSym * symInlineeFrameDisplayEnd); + void InitializeInlineeFrameDisplays(StackSym * symInlineeFrameDisplayEnd); IR::Instr * GenerateNewStackScFunc(IR::Instr * newScFuncInstr, IR::RegOpnd ** ppEnvOpnd); void GenerateStackScriptFunctionInit(StackSym * stackSym, Js::FunctionInfoPtrPtr nestedInfo); void GenerateScriptFunctionInit(IR::RegOpnd * regOpnd, IR::Opnd * vtableAddressOpnd, diff --git a/lib/Backend/Region.cpp b/lib/Backend/Region.cpp index 6b04be61318..39c3fbfd8ed 100644 --- a/lib/Backend/Region.cpp +++ b/lib/Backend/Region.cpp @@ -103,3 +103,21 @@ Region::GetFirstAncestorOfNonExceptingFinally() return ancestor; } +bool +Region::CheckWriteThroughSym(StackSym * sym) +{ + if (this->GetType() == RegionTypeRoot) + { + return false; + } + + // if the current region is a try region, check in its write-through set, + // otherwise (current = catch region) look in the first try ancestor's write-through set + Region * selfOrFirstTryAncestor = this->GetSelfOrFirstTryAncestor(); + if (!selfOrFirstTryAncestor) + { + return false; + } + Assert(selfOrFirstTryAncestor->GetType() == RegionTypeTry); + return selfOrFirstTryAncestor->writeThroughSymbolsSet && selfOrFirstTryAncestor->writeThroughSymbolsSet->Test(sym->m_id); +} diff --git a/lib/Backend/Region.h b/lib/Backend/Region.h index c5353242f6a..8542a72c37d 100644 --- a/lib/Backend/Region.h +++ b/lib/Backend/Region.h @@ -68,6 +68,7 @@ class Region Region * GetSelfOrFirstTryAncestor(); Region * GetFirstAncestorOfNonExceptingFinallyParent(); Region * GetFirstAncestorOfNonExceptingFinally(); + bool CheckWriteThroughSym(StackSym * sym); private: RegionType type; diff --git a/lib/Common/ChakraCoreVersion.h b/lib/Common/ChakraCoreVersion.h index f2d0e024c3b..e718f44cfc6 100644 --- a/lib/Common/ChakraCoreVersion.h +++ b/lib/Common/ChakraCoreVersion.h @@ -17,7 +17,7 @@ // ChakraCore version number definitions (used in ChakraCore binary metadata) #define CHAKRA_CORE_MAJOR_VERSION 1 #define CHAKRA_CORE_MINOR_VERSION 11 -#define CHAKRA_CORE_PATCH_VERSION 21 +#define CHAKRA_CORE_PATCH_VERSION 22 #define CHAKRA_CORE_VERSION_RELEASE_QFE 0 // Redundant with PATCH_VERSION. Keep this value set to 0. // ------------- diff --git a/lib/Parser/Scan.h b/lib/Parser/Scan.h index b34e163b374..575954a6cb6 100644 --- a/lib/Parser/Scan.h +++ b/lib/Parser/Scan.h @@ -241,6 +241,7 @@ class UTF8EncodingPolicyBase OLECHAR ReadRest(OLECHAR ch, EncodedCharPtr &p, EncodedCharPtr last) { EncodedCharPtr s; + utf8::DecodeOptions decodeOptions = m_decodeOptions; if (bScan) { s = p; @@ -248,8 +249,16 @@ class UTF8EncodingPolicyBase OLECHAR result = utf8::DecodeTail(ch, p, last, m_decodeOptions); if (bScan) { - // If we are scanning, update m_cMultiUnits counter. - m_cMultiUnits += p - s; + if ((decodeOptions & utf8::doSecondSurrogatePair) && (p - s > 2)) + { + // 4 byte utf8 chars equals 2 utf16 chars + 2 multi-unit chars only (refer to case4: in utf8::DecodeTail()). + m_cMultiUnits += 2; + } + else + { + // If we are scanning, update m_cMultiUnits counter. + m_cMultiUnits += p - s; + } } return result; } diff --git a/lib/Runtime/Base/Constants.h b/lib/Runtime/Base/Constants.h index 017bdc2640c..0016bebacbf 100644 --- a/lib/Runtime/Base/Constants.h +++ b/lib/Runtime/Base/Constants.h @@ -132,10 +132,12 @@ namespace Js static const int StackNestedFuncList = 2; static const int StackFrameDisplay = 3; static const int StackScopeSlots = 4; + static const int InlineeFrameDisplays = 5; #if _M_IX86 || _M_AMD64 static const int StackNestedFuncListWithNoArg = 1; static const int StackFrameDisplayWithNoArg = 2; static const int StackScopeSlotsWithNoArg = 3; + static const int InlineeFrameDisplaysWithNoArg = 4; #endif static const DWORD NonWebWorkerContextId = 0; diff --git a/lib/Runtime/Language/JavascriptFunctionArgIndex.h b/lib/Runtime/Language/JavascriptFunctionArgIndex.h index 25f68842e86..13eab3f2e35 100644 --- a/lib/Runtime/Language/JavascriptFunctionArgIndex.h +++ b/lib/Runtime/Language/JavascriptFunctionArgIndex.h @@ -13,10 +13,12 @@ namespace Js JavascriptFunctionArgIndex_StackNestedFuncListWithNoArg = JavascriptFunctionArgIndex_Frame - Js::Constants::StackNestedFuncListWithNoArg, JavascriptFunctionArgIndex_StackFrameDisplayNoArg = JavascriptFunctionArgIndex_Frame - Js::Constants::StackFrameDisplayWithNoArg, JavascriptFunctionArgIndex_StackScopeSlotsNoArg = JavascriptFunctionArgIndex_Frame - Js::Constants::StackScopeSlotsWithNoArg, + JavascriptFunctionArgIndex_InlineeFrameDisplaysNoArg = JavascriptFunctionArgIndex_Frame - Js::Constants::InlineeFrameDisplaysWithNoArg, #endif JavascriptFunctionArgIndex_StackNestedFuncList = JavascriptFunctionArgIndex_Frame - Js::Constants::StackNestedFuncList, JavascriptFunctionArgIndex_StackFrameDisplay = JavascriptFunctionArgIndex_Frame - Js::Constants::StackFrameDisplay, JavascriptFunctionArgIndex_StackScopeSlots = JavascriptFunctionArgIndex_Frame - Js::Constants::StackScopeSlots, + JavascriptFunctionArgIndex_InlineeFrameDisplays = JavascriptFunctionArgIndex_Frame - Js::Constants::InlineeFrameDisplays, JavascriptFunctionArgIndex_Function = 0, JavascriptFunctionArgIndex_CallInfo = 1, JavascriptFunctionArgIndex_This = 2, /* (hidden) first script arg */ diff --git a/lib/Runtime/Library/StackScriptFunction.cpp b/lib/Runtime/Library/StackScriptFunction.cpp index 52f9a199417..5121f408a57 100644 --- a/lib/Runtime/Library/StackScriptFunction.cpp +++ b/lib/Runtime/Library/StackScriptFunction.cpp @@ -247,7 +247,6 @@ namespace Js StackScriptFunction * stackFunction = interpreterFrame->GetStackNestedFunction(i); ScriptFunction * boxedFunction = this->BoxStackFunction(stackFunction); Assert(stackFunction->boxedScriptFunction == boxedFunction); - this->UpdateFrameDisplay(stackFunction); } if (walker.IsBailedOutFromInlinee()) @@ -265,16 +264,6 @@ namespace Js // Walk native frame that was bailed out as well. // The stack walker is pointing to the native frame already. this->BoxNativeFrame(walker, callerFunctionBody); - - // We don't need to box this frame, but we may still need to box the scope slot references - // within nested frame displays if the slots they refer to have been boxed. - if (callerFunctionBody->GetNestedCount() != 0) - { - this->ForEachStackNestedFunctionNative(walker, callerFunctionBody, [&](ScriptFunction *nestedFunc) - { - this->UpdateFrameDisplay(nestedFunc); - }); - } } } else @@ -314,16 +303,6 @@ namespace Js // walk native frame this->BoxNativeFrame(walker, callerFunctionBody); - - // We don't need to box this frame, but we may still need to box the scope slot references - // within nested frame displays if the slots they refer to have been boxed. - if (callerFunctionBody->GetNestedCount() != 0) - { - this->ForEachStackNestedFunctionNative(walker, callerFunctionBody, [&](ScriptFunction *nestedFunc) - { - this->UpdateFrameDisplay(nestedFunc); - }); - } } } } @@ -399,18 +378,18 @@ namespace Js { interpreterFrame->SetExecutingStackFunction(boxedCaller); } - - // We don't need to box this frame, but we may still need to box the scope slot references - // within nested frame displays if the slots they refer to have been boxed. - if (callerFunctionBody->GetNestedCount() != 0) - { - this->ForEachStackNestedFunction(walker, callerFunctionBody, [&](ScriptFunction *nestedFunc) - { - this->UpdateFrameDisplay(nestedFunc); - }); - } } } + + // We don't need to box this frame, but we may still need to box the scope slot references + // within nested frame displays if the slots they refer to have been boxed. + if (callerFunctionBody->GetNestedCount() != 0) + { + this->ForEachStackNestedFunction(walker, callerFunctionBody, [&](ScriptFunction *nestedFunc) + { + this->UpdateFrameDisplay(nestedFunc); + }); + } } Assert(!hasInlineeToBox); @@ -436,6 +415,11 @@ namespace Js } } + UpdateFrameDisplay(frameDisplay); + } + + void StackScriptFunction::BoxState::UpdateFrameDisplay(FrameDisplay *frameDisplay) + { for (uint i = 0; i < frameDisplay->GetLength(); i++) { Var* stackScopeSlots = (Var*)frameDisplay->GetItem(i); @@ -475,6 +459,20 @@ namespace Js } } + uintptr_t StackScriptFunction::BoxState::GetInlineeFrameDisplaysIndex(FunctionBody * functionBody) + { +#if _M_IX86 || _M_AMD64 + if (functionBody->GetInParamsCount() == 0) + { + return (uintptr_t)JavascriptFunctionArgIndex_InlineeFrameDisplaysNoArg; + } + else +#endif + { + return (uintptr_t)JavascriptFunctionArgIndex_InlineeFrameDisplays; + } + } + FrameDisplay * StackScriptFunction::BoxState::GetFrameDisplayFromNativeFrame(JavascriptStackWalker const& walker, FunctionBody * callerFunctionBody) { uintptr_t frameDisplayIndex = GetNativeFrameDisplayIndex(callerFunctionBody); @@ -489,6 +487,13 @@ namespace Js return (Var*)argv[scopeSlotsIndex]; } + FrameDisplay * StackScriptFunction::BoxState::GetInlineeFrameDisplaysFromNativeFrame(JavascriptStackWalker const& walker, FunctionBody * callerFunctionBody) + { + uintptr_t inlineeFrameDisplaysIndex = GetInlineeFrameDisplaysIndex(callerFunctionBody); + void **argv = walker.GetCurrentArgv(); + return (FrameDisplay*)argv[inlineeFrameDisplaysIndex]; + } + void StackScriptFunction::BoxState::SetFrameDisplayFromNativeFrame(JavascriptStackWalker const& walker, FunctionBody * callerFunctionBody, FrameDisplay * frameDisplay) { uintptr_t frameDisplayIndex = GetNativeFrameDisplayIndex(callerFunctionBody); @@ -541,6 +546,13 @@ namespace Js callerFunctionBody->GetScriptContext()->GetThreadContext()->AddImplicitCallFlags(ImplicitCall_Accessor); } } + + this->ForEachInlineeFrameDisplay(walker, callerFunctionBody, [&](FrameDisplay *frameDisplay) + { + // Update all the inlinee frame displays, which are not stack-allocated but may refer to scopes on the stack. + // This is only necessary in a native frame that does stack frame displays + this->UpdateFrameDisplay(frameDisplay); + }); } template @@ -615,6 +627,37 @@ namespace Js } } + template + void StackScriptFunction::BoxState::ForEachInlineeFrameDisplay( + JavascriptStackWalker const& walker, + FunctionBody *callerFunctionBody, + Fn fn) + { + if (!callerFunctionBody->DoStackFrameDisplay() || walker.GetCurrentInterpreterFrame() != nullptr || walker.IsInlineFrame()) + { + return; + } + +#ifdef MD_GROW_LOCALS_AREA_UP + // Stack closures not supported for layouts like ARM. We shouldn't get here. + AssertOrFailFast(0); +#endif + + void **argv = walker.GetCurrentArgv(); + FrameDisplay ** curr = (FrameDisplay**)( +#if _M_IX86 || _M_AMD64 + callerFunctionBody->GetInParamsCount() == 0? + &argv[JavascriptFunctionArgIndex_InlineeFrameDisplaysNoArg]: +#endif + &argv[JavascriptFunctionArgIndex_InlineeFrameDisplays]); + + while (*curr != nullptr) + { + fn(*curr); + curr--; + } + } + void StackScriptFunction::BoxState::Finish() { frameToBox.Map([](FunctionBody * body) diff --git a/lib/Runtime/Library/StackScriptFunction.h b/lib/Runtime/Library/StackScriptFunction.h index 8ed9339938d..f1db912be40 100644 --- a/lib/Runtime/Library/StackScriptFunction.h +++ b/lib/Runtime/Library/StackScriptFunction.h @@ -62,11 +62,13 @@ namespace Js ScriptFunction * BoxStackFunction(ScriptFunction * scriptFunction); FrameDisplay * BoxFrameDisplay(FrameDisplay * frameDisplay); FrameDisplay * GetFrameDisplayFromNativeFrame(JavascriptStackWalker const& walker, FunctionBody * callerFunctionBody); + FrameDisplay * GetInlineeFrameDisplaysFromNativeFrame(JavascriptStackWalker const& walker, FunctionBody * callerFunctionBody); Var * GetScopeSlotsFromNativeFrame(JavascriptStackWalker const& walker, FunctionBody * callerFunctionBody); void SetFrameDisplayFromNativeFrame(JavascriptStackWalker const& walker, FunctionBody * callerFunctionBody, FrameDisplay * frameDisplay); void SetScopeSlotsFromNativeFrame(JavascriptStackWalker const& walker, FunctionBody * callerFunctionBody, Var * scopeSlots); void BoxNativeFrame(JavascriptStackWalker const& walker, FunctionBody * callerFunctionBody); void UpdateFrameDisplay(ScriptFunction *nestedFunc); + void UpdateFrameDisplay(FrameDisplay *frameDisplay); void Finish(); template @@ -75,9 +77,12 @@ namespace Js void ForEachStackNestedFunctionInterpreted(InterpreterStackFrame *interpreterFrame, FunctionBody * callerFunctionBody, Fn fn); template void ForEachStackNestedFunctionNative(JavascriptStackWalker const& walker, FunctionBody * callerFunctionBody, Fn fn); + template + void ForEachInlineeFrameDisplay(JavascriptStackWalker const& walker, FunctionBody * callerFunctionBody, Fn fn); static uintptr_t GetNativeFrameDisplayIndex(FunctionBody * functionBody); static uintptr_t GetNativeScopeSlotsIndex(FunctionBody * functionBody); + static uintptr_t GetInlineeFrameDisplaysIndex(FunctionBody * functionBody); }; ScriptFunction * boxedScriptFunction;