Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
100 changes: 52 additions & 48 deletions src/coreclr/jit/objectalloc.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -373,7 +373,7 @@ void ObjectAllocator::PrepareAnalysis()
//
// If conditional escape analysis is enabled, we reserve the range [N...N+M-1]
// for locals allocated during the conditional escape analysis expansions,
// where N is the maximum number of pseudos.
// where M is the maximum number of pseudos.
//
// We reserve the range [N+M ... N+2M-1] for pseudos.
//
Expand Down Expand Up @@ -614,47 +614,51 @@ void ObjectAllocator::MarkEscapingVarsAndBuildConnGraph()
unsigned const lclNum = tree->AsLclVarCommon()->GetLclNum();
LclVarDsc* const lclDsc = m_compiler->lvaGetDesc(lclNum);

// If this local already escapes, no need to look further.
// Are we tracking this local?
//
if (m_allocator->CanLclVarEscape(lclNum))
if (!m_allocator->IsTrackedLocal(lclNum))
{
return Compiler::fgWalkResult::WALK_CONTINUE;
}

bool lclEscapes = true;
const unsigned lclIndex = m_allocator->LocalToIndex(lclNum);

// If this local already escapes, no need to look further.
//
if (m_allocator->CanIndexEscape(lclIndex))
{
return Compiler::fgWalkResult::WALK_CONTINUE;
}

if (tree->OperIsLocalStore())
{
lclEscapes = false;
m_allocator->CheckForGuardedAllocationOrCopy(m_block, m_stmt, use, lclNum);
}
else if (tree->OperIs(GT_LCL_VAR) && m_allocator->IsTrackedLocal(lclNum))
else if (tree->OperIs(GT_LCL_VAR))
{
assert(tree == m_ancestors.Top());
if (!m_allocator->CanLclVarEscapeViaParentStack(&m_ancestors, lclNum, m_block))
{
lclEscapes = false;
}
m_allocator->AnalyzeParentStack(&m_ancestors, lclIndex, m_block);
}
else if (tree->OperIs(GT_LCL_ADDR) && (lclDsc->TypeGet() == TYP_STRUCT) &&
m_allocator->IsTrackedLocal(lclNum))
else if (tree->OperIs(GT_LCL_ADDR) && (lclDsc->TypeGet() == TYP_STRUCT))
{
assert(tree == m_ancestors.Top());
if (!m_allocator->CanLclVarEscapeViaParentStack(&m_ancestors, lclNum, m_block))
{
lclEscapes = false;
}
m_allocator->AnalyzeParentStack(&m_ancestors, lclIndex, m_block);
}

if (lclEscapes)
else if (tree->OperIs(GT_LCL_FLD))
{
if (!m_allocator->CanLclVarEscape(lclNum))
{
JITDUMP("V%02u first escapes via [%06u]\n", lclNum, m_compiler->dspTreeID(tree));
}
// We generally don't see these in early IR. Bail for now.
//
JITDUMP("V%02u local field at [%06u]\n", lclNum, m_compiler->dspTreeID(tree));
m_allocator->MarkLclVarAsEscaping(lclNum);
}
else
{
assert((tree->OperIs(GT_LCL_ADDR) && (lclDsc->TypeGet() != TYP_STRUCT)));
JITDUMP("V%02u address taken at [%06u]\n", lclNum, m_compiler->dspTreeID(tree));
m_allocator->MarkLclVarAsEscaping(lclNum);
}
else if (!tree->OperIsLocalStore())

if (!m_allocator->CanIndexEscape(lclIndex) && !tree->OperIsLocalStore())
{
// Note uses of variables of interest to conditional escape analysis.
//
Expand Down Expand Up @@ -1606,29 +1610,22 @@ unsigned int ObjectAllocator::MorphAllocObjNodeIntoStackAlloc(GenTreeAllocObj* a
}

//------------------------------------------------------------------------
// CanLclVarEscapeViaParentStack: Check if the local variable escapes via the given parent stack.
// AnalyzeParentStack: Check if the local variable escapes via the given parent stack.
// Update the connection graph as necessary.
//
// Arguments:
// parentStack - Parent stack of the current visit
// lclNum - Local variable number
// lclIndex - Index for a tracked, unescaped local referenced at the top of the stack
// block - basic block holding the trees
//
// Return Value:
// true if the local can escape via the parent stack; false otherwise
//
// Notes:
// The method currently treats all locals assigned to a field as escaping.
// The can potentially be tracked by special field edges in the connection graph.
//
bool ObjectAllocator::CanLclVarEscapeViaParentStack(ArrayStack<GenTree*>* parentStack,
unsigned int lclNum,
BasicBlock* block)
void ObjectAllocator::AnalyzeParentStack(ArrayStack<GenTree*>* parentStack, unsigned int lclIndex, BasicBlock* block)
{
assert(parentStack != nullptr);
Copy link
Preview

Copilot AI May 5, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

[nitpick] Consider adding a clarifying comment explaining why it is asserted that the local index must not already be marked as escaping at this point.

Suggested change
assert(parentStack != nullptr);
assert(parentStack != nullptr);
// At this point, it is guaranteed that the local index (lclIndex) has not been marked as escaping.
// This is because AnalyzeParentStack is only called for tracked, unescaped locals, and the logic
// leading to this point ensures that no escaping has occurred. The assertion acts as a safeguard
// to verify this invariant.

Copilot uses AI. Check for mistakes.

int parentIndex = 1;
assert(!CanIndexEscape(lclIndex));

LclVarDsc* const lclDsc = comp->lvaGetDesc(lclNum);
int parentIndex = 1;
const unsigned lclNum = IndexToLocal(lclIndex);
LclVarDsc* const lclDsc = comp->lvaGetDesc(lclNum);

bool keepChecking = true;
bool canLclVarEscapeViaParentStack = true;
Expand Down Expand Up @@ -1675,18 +1672,19 @@ bool ObjectAllocator::CanLclVarEscapeViaParentStack(ArrayStack<GenTree*>* parent
break;
}

// Add an edge to the connection graph.
const unsigned int srcLclNum = lclNum;
const unsigned dstIndex = LocalToIndex(dstLclNum);

AddConnGraphEdge(dstLclNum, srcLclNum);
// Add an edge to the connection graph.
//
AddConnGraphEdgeIndex(dstIndex, lclIndex);
canLclVarEscapeViaParentStack = false;

// If the source of this store is an enumerator local,
// then the dest also becomes an enumerator local.
//
if (isCopy)
{
CheckForEnumeratorUse(srcLclNum, dstLclNum);
CheckForEnumeratorUse(lclNum, dstLclNum);
}

// Note that we modelled this store in the connection graph
Expand Down Expand Up @@ -1788,14 +1786,15 @@ bool ObjectAllocator::CanLclVarEscapeViaParentStack(ArrayStack<GenTree*>* parent

if (base->OperIs(GT_LCL_ADDR))
{
unsigned const dstLclNum = base->AsLclVarCommon()->GetLclNum();
LclVarDsc* const dstDsc = comp->lvaGetDesc(dstLclNum);
unsigned const dstLclNum = base->AsLclVarCommon()->GetLclNum();

if (IsTrackedLocal(dstLclNum))
{
JITDUMP("... local.field store\n");
const unsigned dstIndex = LocalToIndex(dstLclNum);
// Add an edge to the connection graph.
AddConnGraphEdge(dstLclNum, lclNum);
//
AddConnGraphEdgeIndex(dstIndex, lclIndex);
canLclVarEscapeViaParentStack = false;
}
}
Expand Down Expand Up @@ -1902,7 +1901,12 @@ bool ObjectAllocator::CanLclVarEscapeViaParentStack(ArrayStack<GenTree*>* parent
}
}

return canLclVarEscapeViaParentStack;
if (canLclVarEscapeViaParentStack)
{
JITDUMP("V%02u first escapes via [%06u]...[%06u]\n", lclNum, comp->dspTreeID(parentStack->Top()),
comp->dspTreeID(parentStack->Top(parentIndex)));
MarkLclVarAsEscaping(lclNum);
}
}

//------------------------------------------------------------------------
Expand Down Expand Up @@ -2510,11 +2514,11 @@ bool ObjectAllocator::AnalyzeIfCloningCanPreventEscape(BitVecTraits* bitVecTrait

// See what locals were "assigned" to the pseudo.
//
BitVec PseudoAdjacencies = m_ConnGraphAdjacencyMatrix[pseudoIndex];
BitVec pseudoAdjacencies = m_ConnGraphAdjacencyMatrix[pseudoIndex];

// If we found an allocation but didn't find any conditionally escaping uses, then cloning is of no use
//
if (BitVecOps::IsEmpty(bitVecTraits, PseudoAdjacencies))
if (BitVecOps::IsEmpty(bitVecTraits, pseudoAdjacencies))
{
JITDUMP(" No conditionally escaping uses under");
JITDUMPEXEC(DumpIndex(pseudoIndex));
Expand All @@ -2525,7 +2529,7 @@ bool ObjectAllocator::AnalyzeIfCloningCanPreventEscape(BitVecTraits* bitVecTrait

// Check if each conditionally escaping local escapes on its own; if so cloning is of no use
//
BitVecOps::Iter iterator(bitVecTraits, PseudoAdjacencies);
BitVecOps::Iter iterator(bitVecTraits, pseudoAdjacencies);
unsigned lclNumIndex = BAD_VAR_NUM;
while (canClone && iterator.NextElem(&lclNumIndex))
{
Expand Down
21 changes: 18 additions & 3 deletions src/coreclr/jit/objectalloc.h
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,7 @@ class ObjectAllocator final : public Phase
unsigned LocalToIndex(unsigned lclNum);
unsigned IndexToLocal(unsigned bvIndex);
bool CanLclVarEscape(unsigned int lclNum);
bool CanIndexEscape(unsigned int index);
void MarkLclVarAsPossiblyStackPointing(unsigned int lclNum);
void MarkIndexAsPossiblyStackPointing(unsigned int index);
void MarkLclVarAsDefinitelyStackPointing(unsigned int lclNum);
Expand Down Expand Up @@ -204,7 +205,7 @@ class ObjectAllocator final : public Phase
BasicBlock* block,
Statement* stmt);
struct BuildConnGraphVisitorCallbackData;
bool CanLclVarEscapeViaParentStack(ArrayStack<GenTree*>* parentStack, unsigned int lclNum, BasicBlock* block);
void AnalyzeParentStack(ArrayStack<GenTree*>* parentStack, unsigned int lclNum, BasicBlock* block);
void UpdateAncestorTypes(GenTree* tree, ArrayStack<GenTree*>* parentStack, var_types newType, bool retypeFields);
ObjectAllocationType AllocationKind(GenTree* tree);

Expand Down Expand Up @@ -263,6 +264,21 @@ inline void ObjectAllocator::EnableObjectStackAllocation()
m_IsObjectStackAllocationEnabled = true;
}

//------------------------------------------------------------------------
// CanIndexEscape: Returns true iff resource described by index can
// potentially escape from the method
//
// Arguments:
// index - bv index
//
// Return Value:
// Returns true if so

inline bool ObjectAllocator::CanIndexEscape(unsigned int index)
{
return BitVecOps::IsMember(&m_bitVecTraits, m_EscapingPointers, index);
}

//------------------------------------------------------------------------
// CanLclVarEscape: Returns true iff local variable can
// potentially escape from the method
Expand All @@ -280,8 +296,7 @@ inline bool ObjectAllocator::CanLclVarEscape(unsigned int lclNum)
return true;
}

const unsigned bvIndex = LocalToIndex(lclNum);
return BitVecOps::IsMember(&m_bitVecTraits, m_EscapingPointers, bvIndex);
return CanIndexEscape(LocalToIndex(lclNum));
}

//------------------------------------------------------------------------
Expand Down
Loading