Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

JIT: Add a stress mode that poisons implicit byrefs #80691

Merged
merged 7 commits into from
Jan 18, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions src/coreclr/jit/compiler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1775,6 +1775,8 @@ void Compiler::compInit(ArenaAllocator* pAlloc,

// set this early so we can use it without relying on random memory values
verbose = compIsForInlining() ? impInlineInfo->InlinerCompiler->verbose : false;

compPoisoningAnyImplicitByrefs = false;
#endif

#if defined(DEBUG) || defined(LATE_DISASM) || DUMP_FLOWGRAPHS || DUMP_GC_TABLES
Expand Down Expand Up @@ -10179,6 +10181,10 @@ void Compiler::EnregisterStats::RecordLocal(const LclVarDsc* varDsc)
m_dispatchRetBuf++;
break;

case AddressExposedReason::STRESS_POISON_IMPLICIT_BYREFS:
m_stressPoisonImplicitByrefs++;
break;

default:
unreached();
break;
Expand Down Expand Up @@ -10274,5 +10280,6 @@ void Compiler::EnregisterStats::Dump(FILE* fout) const
PRINT_STATS(m_osrExposed, m_addrExposed);
PRINT_STATS(m_stressLclFld, m_addrExposed);
PRINT_STATS(m_dispatchRetBuf, m_addrExposed);
PRINT_STATS(m_stressPoisonImplicitByrefs, m_addrExposed);
}
#endif // TRACK_ENREG_STATS
22 changes: 14 additions & 8 deletions src/coreclr/jit/compiler.h
Original file line number Diff line number Diff line change
Expand Up @@ -467,13 +467,14 @@ enum class DoNotEnregisterReason
enum class AddressExposedReason
{
NONE,
PARENT_EXPOSED, // This is a promoted field but the parent is exposed.
TOO_CONSERVATIVE, // Were marked as exposed to be conservative, fix these places.
ESCAPE_ADDRESS, // The address is escaping, for example, passed as call argument.
WIDE_INDIR, // We access via indirection with wider type.
OSR_EXPOSED, // It was exposed in the original method, osr has to repeat it.
STRESS_LCL_FLD, // Stress mode replaces localVar with localFld and makes them addrExposed.
DISPATCH_RET_BUF // Caller return buffer dispatch.
PARENT_EXPOSED, // This is a promoted field but the parent is exposed.
TOO_CONSERVATIVE, // Were marked as exposed to be conservative, fix these places.
ESCAPE_ADDRESS, // The address is escaping, for example, passed as call argument.
WIDE_INDIR, // We access via indirection with wider type.
OSR_EXPOSED, // It was exposed in the original method, osr has to repeat it.
STRESS_LCL_FLD, // Stress mode replaces localVar with localFld and makes them addrExposed.
DISPATCH_RET_BUF, // Caller return buffer dispatch.
STRESS_POISON_IMPLICIT_BYREFS, // This is an implicit byref we want to poison.
};

#endif // DEBUG
Expand Down Expand Up @@ -4183,6 +4184,7 @@ class Compiler
void impLoadArg(unsigned ilArgNum, IL_OFFSET offset);
void impLoadLoc(unsigned ilLclNum, IL_OFFSET offset);
bool impReturnInstruction(int prefixFlags, OPCODE& opcode);
void impPoisonImplicitByrefsBeforeReturn();

// A free list of linked list nodes used to represent to-do stacks of basic blocks.
struct BlockListNode
Expand Down Expand Up @@ -9080,7 +9082,9 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
bool fgNormalizeEHDone; // Has the flowgraph EH normalization phase been done?
size_t compSizeEstimate; // The estimated size of the method as per `gtSetEvalOrder`.
size_t compCycleEstimate; // The estimated cycle count of the method as per `gtSetEvalOrder`
#endif // DEBUG
bool compPoisoningAnyImplicitByrefs; // Importer inserted IR before returns to poison implicit byrefs

#endif // DEBUG

bool fgLocalVarLivenessDone; // Note that this one is used outside of debug.
bool fgLocalVarLivenessChanged;
Expand Down Expand Up @@ -9600,6 +9604,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
STRESS_MODE(GENERIC_CHECK) \
STRESS_MODE(IF_CONVERSION_COST) \
STRESS_MODE(IF_CONVERSION_INNER_LOOPS) \
STRESS_MODE(POISON_IMPLICIT_BYREFS) \
STRESS_MODE(COUNT)

enum compStressArea
Expand Down Expand Up @@ -10136,6 +10141,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
unsigned m_stressLclFld;
unsigned m_dispatchRetBuf;
unsigned m_wideIndir;
unsigned m_stressPoisonImplicitByrefs;

public:
void RecordLocal(const LclVarDsc* varDsc);
Expand Down
90 changes: 90 additions & 0 deletions src/coreclr/jit/importer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -10887,6 +10887,12 @@ bool Compiler::impReturnInstruction(int prefixFlags, OPCODE& opcode)
{
assert(lvaInlineeReturnSpillTemp != BAD_VAR_NUM);
}

if (!compIsForInlining() && ((prefixFlags & (PREFIX_TAILCALL_EXPLICIT | PREFIX_TAILCALL_STRESS)) == 0) &&
compStressCompile(STRESS_POISON_IMPLICIT_BYREFS, 25))
{
impPoisonImplicitByrefsBeforeReturn();
}
#endif // DEBUG

GenTree* op2 = nullptr;
Expand Down Expand Up @@ -11204,6 +11210,90 @@ bool Compiler::impReturnInstruction(int prefixFlags, OPCODE& opcode)
return true;
}

#ifdef DEBUG
//------------------------------------------------------------------------
// impPoisonImplicitByrefsBeforeReturn:
// Spill the stack and insert IR that poisons all implicit byrefs.
//
// Remarks:
// The memory pointed to by implicit byrefs is owned by the callee but
// usually exists on the caller's frame (or on the heap for some reflection
// invoke scenarios). This function helps catch situations where the caller
// reads from the memory after the invocation, for example due to a bug in
// the JIT's own last-use copy elision for implicit byrefs.
//
void Compiler::impPoisonImplicitByrefsBeforeReturn()
{
bool spilled = false;
for (unsigned lclNum = 0; lclNum < info.compArgsCount; lclNum++)
{
if (!lvaIsImplicitByRefLocal(lclNum))
{
continue;
}

compPoisoningAnyImplicitByrefs = true;

if (!spilled)
{
for (unsigned level = 0; level < verCurrentState.esStackDepth; level++)
{
impSpillStackEntry(level, BAD_VAR_NUM DEBUGARG(true) DEBUGARG("Stress poisoning byrefs before return"));
}

spilled = true;
}

LclVarDsc* dsc = lvaGetDesc(lclNum);
// Be conservative about this local to ensure we do not eliminate the poisoning.
lvaSetVarAddrExposed(lclNum, AddressExposedReason::STRESS_POISON_IMPLICIT_BYREFS);

assert(varTypeIsStruct(dsc));
ClassLayout* layout = dsc->GetLayout();
assert(layout != nullptr);

auto poisonBlock = [this, lclNum](unsigned start, unsigned count) {
if (count <= 0)
{
return;
}

GenTreeLclFld* lhs =
new (this, GT_LCL_FLD) GenTreeLclFld(GT_LCL_FLD, TYP_STRUCT, lclNum, start, typGetBlkLayout(count));
lhs->gtFlags |= GTF_GLOB_REF;

GenTree* asg = gtNewAssignNode(lhs, gtNewOperNode(GT_INIT_VAL, TYP_INT, gtNewIconNode(0xcd)));
impAppendTree(asg, CHECK_SPILL_NONE, DebugInfo());
};

unsigned startOffs = 0;
unsigned numSlots = layout->GetSlotCount();
for (unsigned curSlot = 0; curSlot < numSlots; curSlot++)
{
unsigned offs = curSlot * TARGET_POINTER_SIZE;
var_types gcPtr = layout->GetGCPtrType(curSlot);
if (!varTypeIsGC(gcPtr))
{
continue;
}

poisonBlock(startOffs, offs - startOffs);

GenTree* gcField = gtNewLclFldNode(lclNum, gcPtr, offs);
gcField->gtFlags |= GTF_GLOB_REF;

GenTree* zeroField = gtNewAssignNode(gcField, gtNewZeroConNode(gcPtr));
impAppendTree(zeroField, CHECK_SPILL_NONE, DebugInfo());

startOffs = offs + TARGET_POINTER_SIZE;
}

assert(startOffs <= lvaLclExactSize(lclNum));
poisonBlock(startOffs, lvaLclExactSize(lclNum) - startOffs);
}
}
#endif

/*****************************************************************************
* Mark the block as unimported.
* Note that the caller is responsible for calling impImportBlockPending(),
Expand Down
6 changes: 6 additions & 0 deletions src/coreclr/jit/lclvars.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1988,6 +1988,12 @@ bool Compiler::StructPromotionHelper::CanPromoteStructVar(unsigned lclNum)
return false;
}

if (varDsc->IsAddressExposed())
{
JITDUMP(" struct promotion of V%02u is disabled because it has already been marked address exposed\n", lclNum);
return false;
}

CORINFO_CLASS_HANDLE typeHnd = varDsc->GetStructHnd();
assert(typeHnd != NO_CLASS_HANDLE);

Expand Down
16 changes: 14 additions & 2 deletions src/coreclr/jit/morph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4810,7 +4810,7 @@ GenTree* Compiler::fgMorphExpandImplicitByRefArg(GenTreeLclVarCommon* lclNode)
{
if (argNodeType == TYP_STRUCT)
{
newArgNode = gtNewObjNode(argNodeLayout, addrNode);
newArgNode = gtNewStructVal(argNodeLayout, addrNode);
}
else
{
Expand Down Expand Up @@ -6199,7 +6199,7 @@ GenTree* Compiler::fgMorphPotentialTailCall(GenTreeCall* call)
#ifdef DEBUG
if (opts.compGcChecks && (info.compRetType == TYP_REF))
{
failTailCall("COMPlus_JitGCChecks or stress might have interposed a call to CORINFO_HELP_CHECK_OBJ, "
failTailCall("DOTNET_JitGCChecks or stress might have interposed a call to CORINFO_HELP_CHECK_OBJ, "
"invalidating tailcall opportunity");
return nullptr;
}
Expand Down Expand Up @@ -6250,6 +6250,18 @@ GenTree* Compiler::fgMorphPotentialTailCall(GenTreeCall* call)
return nullptr;
}

#ifdef DEBUG
// For explicit tailcalls the importer will avoid inserting stress
// poisoning after them. However, implicit tailcalls are marked earlier and
// we must filter those out here if we ended up adding any poisoning IR
// after them.
if (isImplicitOrStressTailCall && compPoisoningAnyImplicitByrefs)
{
failTailCall("STRESS_POISON_IMPLICIT_BYREFS has introduced IR after tailcall opportunity, invalidating");
return nullptr;
}
#endif

bool hasStructParam = false;
for (unsigned varNum = 0; varNum < lvaCount; varNum++)
{
Expand Down