Skip to content

Commit 5dbbc81

Browse files
committed
JIT: enhance escape analysis to understand Span<T> capture
Implement a very simplistic "field sensitive" analysis for `Span<T>` where we model the span as simply its byref field. If the span is only consumed locally, and the array does not otherwise escape, then the array does not escape. This is a subset of dotnet#112543 that does not try and reason interprocedurally. Contributes to dotnet#104936 / dotnet#108913
1 parent b4f8d4f commit 5dbbc81

File tree

3 files changed

+238
-29
lines changed

3 files changed

+238
-29
lines changed

src/coreclr/jit/compiler.h

+7
Original file line numberDiff line numberDiff line change
@@ -1048,6 +1048,13 @@ class LclVarDsc
10481048
m_layout = layout;
10491049
}
10501050

1051+
// Change the layout to one that may not be compatible.
1052+
void ChangeLayout(ClassLayout* layout)
1053+
{
1054+
assert(varTypeIsStruct(lvType));
1055+
m_layout = layout;
1056+
}
1057+
10511058
// Grow the size of a block layout local.
10521059
void GrowBlockLayout(ClassLayout* layout)
10531060
{

src/coreclr/jit/objectalloc.cpp

+94-29
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,7 @@ void ObjectAllocator::MarkEscapingVarsAndBuildConnGraph()
233233
lclEscapes = false;
234234
m_allocator->CheckForGuardedAllocationOrCopy(m_block, m_stmt, use, lclNum);
235235
}
236-
else if (tree->OperIs(GT_LCL_VAR) && tree->TypeIs(TYP_REF, TYP_BYREF, TYP_I_IMPL))
236+
else if (tree->OperIs(GT_LCL_VAR, GT_LCL_ADDR) && tree->TypeIs(TYP_REF, TYP_BYREF, TYP_I_IMPL, TYP_STRUCT))
237237
{
238238
assert(tree == m_ancestors.Top());
239239
if (!m_allocator->CanLclVarEscapeViaParentStack(&m_ancestors, lclNum, m_block))
@@ -265,7 +265,7 @@ void ObjectAllocator::MarkEscapingVarsAndBuildConnGraph()
265265
{
266266
var_types type = comp->lvaTable[lclNum].TypeGet();
267267

268-
if (type == TYP_REF || genActualType(type) == TYP_I_IMPL || type == TYP_BYREF)
268+
if (type == TYP_REF || genActualType(type) == TYP_I_IMPL || type == TYP_BYREF || type == TYP_STRUCT)
269269
{
270270
m_ConnGraphAdjacencyMatrix[lclNum] = BitVecOps::MakeEmpty(&m_bitVecTraits);
271271

@@ -1023,10 +1023,13 @@ bool ObjectAllocator::CanLclVarEscapeViaParentStack(ArrayStack<GenTree*>* parent
10231023
assert(parentStack != nullptr);
10241024
int parentIndex = 1;
10251025

1026-
bool keepChecking = true;
1027-
bool canLclVarEscapeViaParentStack = true;
1028-
bool isCopy = true;
1029-
bool isEnumeratorLocal = comp->lvaGetDesc(lclNum)->lvIsEnumerator;
1026+
LclVarDsc* const lclDsc = comp->lvaGetDesc(lclNum);
1027+
1028+
bool keepChecking = true;
1029+
bool canLclVarEscapeViaParentStack = true;
1030+
bool isCopy = true;
1031+
bool const isEnumeratorLocal = lclDsc->lvIsEnumerator;
1032+
bool const isSpanLocal = lclDsc->IsSpan();
10301033

10311034
while (keepChecking)
10321035
{
@@ -1093,11 +1096,21 @@ bool ObjectAllocator::CanLclVarEscapeViaParentStack(ArrayStack<GenTree*>* parent
10931096
case GT_ADD:
10941097
case GT_SUB:
10951098
case GT_FIELD_ADDR:
1096-
// Check whether the local escapes via its grandparent.
1099+
// Check whether the local escapes higher up
10971100
++parentIndex;
10981101
keepChecking = true;
10991102
break;
11001103

1104+
case GT_LCL_ADDR:
1105+
if (isSpanLocal)
1106+
{
1107+
// Check whether the local escapes higher up
1108+
++parentIndex;
1109+
keepChecking = true;
1110+
JITDUMP("... i'm good thanks\n");
1111+
}
1112+
break;
1113+
11011114
case GT_BOX:
11021115
isCopy = wasCopy;
11031116
++parentIndex;
@@ -1119,11 +1132,43 @@ bool ObjectAllocator::CanLclVarEscapeViaParentStack(ArrayStack<GenTree*>* parent
11191132
case GT_STOREIND:
11201133
case GT_STORE_BLK:
11211134
case GT_BLK:
1122-
if (tree != parent->AsIndir()->Addr())
1135+
{
1136+
GenTree* const addr = parent->AsIndir()->Addr();
1137+
if (tree != addr)
11231138
{
1124-
// TODO-ObjectStackAllocation: track stores to fields.
1139+
JITDUMP("... tree != addr\n");
1140+
1141+
// Is this an array element address store to (the pointer) field of a span?
1142+
// (note we can't yet handle cases where a span captures an object)
1143+
//
1144+
if (parent->OperIs(GT_STOREIND) && addr->OperIs(GT_FIELD_ADDR) && tree->OperIs(GT_INDEX_ADDR))
1145+
{
1146+
// Todo: mark the span pointer field addr like we mark IsSpanLength?
1147+
// (for now we don't worry which field we store to)
1148+
//
1149+
GenTree* const base = addr->AsOp()->gtGetOp1();
1150+
1151+
if (base->OperIs(GT_LCL_ADDR))
1152+
{
1153+
unsigned const dstLclNum = base->AsLclVarCommon()->GetLclNum();
1154+
LclVarDsc* const dstLclDsc = comp->lvaGetDesc(dstLclNum);
1155+
1156+
if (dstLclDsc->IsSpan())
1157+
{
1158+
JITDUMP("... span ptr store\n");
1159+
// Add an edge to the connection graph.
1160+
AddConnGraphEdge(dstLclNum, lclNum);
1161+
canLclVarEscapeViaParentStack = false;
1162+
}
1163+
}
1164+
}
11251165
break;
11261166
}
1167+
else
1168+
{
1169+
JITDUMP("... tree == addr\n");
1170+
}
1171+
}
11271172
FALLTHROUGH;
11281173
case GT_IND:
11291174
// Address of the field/ind is not taken so the local doesn't escape.
@@ -1132,20 +1177,20 @@ bool ObjectAllocator::CanLclVarEscapeViaParentStack(ArrayStack<GenTree*>* parent
11321177

11331178
case GT_CALL:
11341179
{
1135-
GenTreeCall* const asCall = parent->AsCall();
1180+
GenTreeCall* const call = parent->AsCall();
11361181

1137-
if (asCall->IsHelperCall())
1182+
if (call->IsHelperCall())
11381183
{
11391184
canLclVarEscapeViaParentStack =
1140-
!Compiler::s_helperCallProperties.IsNoEscape(comp->eeGetHelperNum(asCall->gtCallMethHnd));
1185+
!Compiler::s_helperCallProperties.IsNoEscape(comp->eeGetHelperNum(call->gtCallMethHnd));
11411186
}
1142-
else if (asCall->IsSpecialIntrinsic())
1187+
else if (call->IsSpecialIntrinsic())
11431188
{
11441189
// Some known special intrinsics don't escape. At this moment, only the ones accepting byrefs
11451190
// are supported. In order to support more intrinsics accepting objects, we need extra work
11461191
// on the VM side which is not ready for that yet.
11471192
//
1148-
switch (comp->lookupNamedIntrinsic(asCall->gtCallMethHnd))
1193+
switch (comp->lookupNamedIntrinsic(call->gtCallMethHnd))
11491194
{
11501195
case NI_System_SpanHelpers_ClearWithoutReferences:
11511196
case NI_System_SpanHelpers_Fill:
@@ -1246,6 +1291,7 @@ void ObjectAllocator::UpdateAncestorTypes(GenTree* tree, ArrayStack<GenTree*>* p
12461291
case GT_SUB:
12471292
case GT_FIELD_ADDR:
12481293
case GT_INDEX_ADDR:
1294+
case GT_LCL_ADDR:
12491295
if (parent->TypeGet() == TYP_REF)
12501296
{
12511297
parent->ChangeType(newType);
@@ -1283,17 +1329,18 @@ void ObjectAllocator::UpdateAncestorTypes(GenTree* tree, ArrayStack<GenTree*>* p
12831329
case GT_STOREIND:
12841330
case GT_STORE_BLK:
12851331
case GT_BLK:
1286-
assert(tree == parent->AsIndir()->Addr());
1287-
1288-
// The new target could be *not* on the heap.
1289-
parent->gtFlags &= ~GTF_IND_TGT_HEAP;
1290-
1291-
if (newType != TYP_BYREF)
1332+
if (tree == parent->AsIndir()->Addr())
12921333
{
1293-
// This indicates that a write barrier is not needed when writing
1294-
// to this field/indirection since the address is not pointing to the heap.
1295-
// It's either null or points to inside a stack-allocated object.
1296-
parent->gtFlags |= GTF_IND_TGT_NOT_HEAP;
1334+
// The new target could be *not* on the heap.
1335+
parent->gtFlags &= ~GTF_IND_TGT_HEAP;
1336+
1337+
if (newType != TYP_BYREF)
1338+
{
1339+
// This indicates that a write barrier is not needed when writing
1340+
// to this field/indirection since the address is not pointing to the heap.
1341+
// It's either null or points to inside a stack-allocated object.
1342+
parent->gtFlags |= GTF_IND_TGT_NOT_HEAP;
1343+
}
12971344
}
12981345
break;
12991346

@@ -1354,10 +1401,7 @@ void ObjectAllocator::RewriteUses()
13541401
if ((lclNum < BitVecTraits::GetSize(&m_allocator->m_bitVecTraits)) &&
13551402
m_allocator->MayLclVarPointToStack(lclNum))
13561403
{
1357-
// Analysis does not handle indirect access to pointer locals.
1358-
assert(tree->OperIsScalarLocal());
1359-
1360-
var_types newType;
1404+
var_types newType = TYP_UNDEF;
13611405
if (m_allocator->m_HeapLocalToStackLocalMap.TryGetValue(lclNum, &newLclNum))
13621406
{
13631407
assert(tree->OperIs(GT_LCL_VAR)); // Must be a use.
@@ -1374,12 +1418,33 @@ void ObjectAllocator::RewriteUses()
13741418
}
13751419
}
13761420

1377-
if (lclVarDsc->lvType != newType)
1421+
// For local structs, retype the GC fields.
1422+
//
1423+
if (lclVarDsc->lvType == TYP_STRUCT)
1424+
{
1425+
// We should only see spans here.
1426+
//
1427+
assert(lclVarDsc->IsSpan());
1428+
ClassLayout* const layout = lclVarDsc->GetLayout();
1429+
ClassLayout* newLayout = nullptr;
1430+
1431+
if (newType == TYP_I_IMPL)
1432+
{
1433+
// No GC refs remain, so now a block layout
1434+
newLayout = m_compiler->typGetBlkLayout(layout->GetSize());
1435+
JITDUMP("Changing layout of span V%02u to block\n", lclNum);
1436+
lclVarDsc->ChangeLayout(newLayout);
1437+
}
1438+
}
1439+
// For locals, retype the local
1440+
//
1441+
else if (lclVarDsc->lvType != newType)
13781442
{
13791443
JITDUMP("Changing the type of V%02u from %s to %s\n", lclNum, varTypeName(lclVarDsc->lvType),
13801444
varTypeName(newType));
13811445
lclVarDsc->lvType = newType;
13821446
}
1447+
13831448
m_allocator->UpdateAncestorTypes(tree, &m_ancestors, newType);
13841449

13851450
if (newLclNum != BAD_VAR_NUM)

0 commit comments

Comments
 (0)