diff --git a/src/coreclr/jit/compiler.cpp b/src/coreclr/jit/compiler.cpp index f86e1923f89dc..065d0aaf6ae27 100644 --- a/src/coreclr/jit/compiler.cpp +++ b/src/coreclr/jit/compiler.cpp @@ -1775,6 +1775,8 @@ void Compiler::compInit(ArenaAllocator* pAlloc, // set this early so we can use it without relying on random memory values verbose = compIsForInlining() ? impInlineInfo->InlinerCompiler->verbose : false; + + compPoisoningAnyImplicitByrefs = false; #endif #if defined(DEBUG) || defined(LATE_DISASM) || DUMP_FLOWGRAPHS || DUMP_GC_TABLES @@ -10179,6 +10181,10 @@ void Compiler::EnregisterStats::RecordLocal(const LclVarDsc* varDsc) m_dispatchRetBuf++; break; + case AddressExposedReason::STRESS_POISON_IMPLICIT_BYREFS: + m_stressPoisonImplicitByrefs++; + break; + default: unreached(); break; @@ -10274,5 +10280,6 @@ void Compiler::EnregisterStats::Dump(FILE* fout) const PRINT_STATS(m_osrExposed, m_addrExposed); PRINT_STATS(m_stressLclFld, m_addrExposed); PRINT_STATS(m_dispatchRetBuf, m_addrExposed); + PRINT_STATS(m_stressPoisonImplicitByrefs, m_addrExposed); } #endif // TRACK_ENREG_STATS diff --git a/src/coreclr/jit/compiler.h b/src/coreclr/jit/compiler.h index 46413b54eb59f..cbd5c85d2f812 100644 --- a/src/coreclr/jit/compiler.h +++ b/src/coreclr/jit/compiler.h @@ -467,13 +467,14 @@ enum class DoNotEnregisterReason enum class AddressExposedReason { NONE, - PARENT_EXPOSED, // This is a promoted field but the parent is exposed. - TOO_CONSERVATIVE, // Were marked as exposed to be conservative, fix these places. - ESCAPE_ADDRESS, // The address is escaping, for example, passed as call argument. - WIDE_INDIR, // We access via indirection with wider type. - OSR_EXPOSED, // It was exposed in the original method, osr has to repeat it. - STRESS_LCL_FLD, // Stress mode replaces localVar with localFld and makes them addrExposed. - DISPATCH_RET_BUF // Caller return buffer dispatch. + PARENT_EXPOSED, // This is a promoted field but the parent is exposed. + TOO_CONSERVATIVE, // Were marked as exposed to be conservative, fix these places. + ESCAPE_ADDRESS, // The address is escaping, for example, passed as call argument. + WIDE_INDIR, // We access via indirection with wider type. + OSR_EXPOSED, // It was exposed in the original method, osr has to repeat it. + STRESS_LCL_FLD, // Stress mode replaces localVar with localFld and makes them addrExposed. + DISPATCH_RET_BUF, // Caller return buffer dispatch. + STRESS_POISON_IMPLICIT_BYREFS, // This is an implicit byref we want to poison. }; #endif // DEBUG @@ -4183,6 +4184,7 @@ class Compiler void impLoadArg(unsigned ilArgNum, IL_OFFSET offset); void impLoadLoc(unsigned ilLclNum, IL_OFFSET offset); bool impReturnInstruction(int prefixFlags, OPCODE& opcode); + void impPoisonImplicitByrefsBeforeReturn(); // A free list of linked list nodes used to represent to-do stacks of basic blocks. struct BlockListNode @@ -9080,7 +9082,9 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX bool fgNormalizeEHDone; // Has the flowgraph EH normalization phase been done? size_t compSizeEstimate; // The estimated size of the method as per `gtSetEvalOrder`. size_t compCycleEstimate; // The estimated cycle count of the method as per `gtSetEvalOrder` -#endif // DEBUG + bool compPoisoningAnyImplicitByrefs; // Importer inserted IR before returns to poison implicit byrefs + +#endif // DEBUG bool fgLocalVarLivenessDone; // Note that this one is used outside of debug. bool fgLocalVarLivenessChanged; @@ -9600,6 +9604,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX STRESS_MODE(GENERIC_CHECK) \ STRESS_MODE(IF_CONVERSION_COST) \ STRESS_MODE(IF_CONVERSION_INNER_LOOPS) \ + STRESS_MODE(POISON_IMPLICIT_BYREFS) \ STRESS_MODE(COUNT) enum compStressArea @@ -10136,6 +10141,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX unsigned m_stressLclFld; unsigned m_dispatchRetBuf; unsigned m_wideIndir; + unsigned m_stressPoisonImplicitByrefs; public: void RecordLocal(const LclVarDsc* varDsc); diff --git a/src/coreclr/jit/importer.cpp b/src/coreclr/jit/importer.cpp index a09f831ceb270..489174745547a 100644 --- a/src/coreclr/jit/importer.cpp +++ b/src/coreclr/jit/importer.cpp @@ -10887,6 +10887,12 @@ bool Compiler::impReturnInstruction(int prefixFlags, OPCODE& opcode) { assert(lvaInlineeReturnSpillTemp != BAD_VAR_NUM); } + + if (!compIsForInlining() && ((prefixFlags & (PREFIX_TAILCALL_EXPLICIT | PREFIX_TAILCALL_STRESS)) == 0) && + compStressCompile(STRESS_POISON_IMPLICIT_BYREFS, 25)) + { + impPoisonImplicitByrefsBeforeReturn(); + } #endif // DEBUG GenTree* op2 = nullptr; @@ -11204,6 +11210,90 @@ bool Compiler::impReturnInstruction(int prefixFlags, OPCODE& opcode) return true; } +#ifdef DEBUG +//------------------------------------------------------------------------ +// impPoisonImplicitByrefsBeforeReturn: +// Spill the stack and insert IR that poisons all implicit byrefs. +// +// Remarks: +// The memory pointed to by implicit byrefs is owned by the callee but +// usually exists on the caller's frame (or on the heap for some reflection +// invoke scenarios). This function helps catch situations where the caller +// reads from the memory after the invocation, for example due to a bug in +// the JIT's own last-use copy elision for implicit byrefs. +// +void Compiler::impPoisonImplicitByrefsBeforeReturn() +{ + bool spilled = false; + for (unsigned lclNum = 0; lclNum < info.compArgsCount; lclNum++) + { + if (!lvaIsImplicitByRefLocal(lclNum)) + { + continue; + } + + compPoisoningAnyImplicitByrefs = true; + + if (!spilled) + { + for (unsigned level = 0; level < verCurrentState.esStackDepth; level++) + { + impSpillStackEntry(level, BAD_VAR_NUM DEBUGARG(true) DEBUGARG("Stress poisoning byrefs before return")); + } + + spilled = true; + } + + LclVarDsc* dsc = lvaGetDesc(lclNum); + // Be conservative about this local to ensure we do not eliminate the poisoning. + lvaSetVarAddrExposed(lclNum, AddressExposedReason::STRESS_POISON_IMPLICIT_BYREFS); + + assert(varTypeIsStruct(dsc)); + ClassLayout* layout = dsc->GetLayout(); + assert(layout != nullptr); + + auto poisonBlock = [this, lclNum](unsigned start, unsigned count) { + if (count <= 0) + { + return; + } + + GenTreeLclFld* lhs = + new (this, GT_LCL_FLD) GenTreeLclFld(GT_LCL_FLD, TYP_STRUCT, lclNum, start, typGetBlkLayout(count)); + lhs->gtFlags |= GTF_GLOB_REF; + + GenTree* asg = gtNewAssignNode(lhs, gtNewOperNode(GT_INIT_VAL, TYP_INT, gtNewIconNode(0xcd))); + impAppendTree(asg, CHECK_SPILL_NONE, DebugInfo()); + }; + + unsigned startOffs = 0; + unsigned numSlots = layout->GetSlotCount(); + for (unsigned curSlot = 0; curSlot < numSlots; curSlot++) + { + unsigned offs = curSlot * TARGET_POINTER_SIZE; + var_types gcPtr = layout->GetGCPtrType(curSlot); + if (!varTypeIsGC(gcPtr)) + { + continue; + } + + poisonBlock(startOffs, offs - startOffs); + + GenTree* gcField = gtNewLclFldNode(lclNum, gcPtr, offs); + gcField->gtFlags |= GTF_GLOB_REF; + + GenTree* zeroField = gtNewAssignNode(gcField, gtNewZeroConNode(gcPtr)); + impAppendTree(zeroField, CHECK_SPILL_NONE, DebugInfo()); + + startOffs = offs + TARGET_POINTER_SIZE; + } + + assert(startOffs <= lvaLclExactSize(lclNum)); + poisonBlock(startOffs, lvaLclExactSize(lclNum) - startOffs); + } +} +#endif + /***************************************************************************** * Mark the block as unimported. * Note that the caller is responsible for calling impImportBlockPending(), diff --git a/src/coreclr/jit/lclvars.cpp b/src/coreclr/jit/lclvars.cpp index 04411de16624c..eec8e0d1d800b 100644 --- a/src/coreclr/jit/lclvars.cpp +++ b/src/coreclr/jit/lclvars.cpp @@ -1988,6 +1988,12 @@ bool Compiler::StructPromotionHelper::CanPromoteStructVar(unsigned lclNum) return false; } + if (varDsc->IsAddressExposed()) + { + JITDUMP(" struct promotion of V%02u is disabled because it has already been marked address exposed\n", lclNum); + return false; + } + CORINFO_CLASS_HANDLE typeHnd = varDsc->GetStructHnd(); assert(typeHnd != NO_CLASS_HANDLE); diff --git a/src/coreclr/jit/morph.cpp b/src/coreclr/jit/morph.cpp index a969f9502a0fd..c57ca7143000a 100644 --- a/src/coreclr/jit/morph.cpp +++ b/src/coreclr/jit/morph.cpp @@ -4810,7 +4810,7 @@ GenTree* Compiler::fgMorphExpandImplicitByRefArg(GenTreeLclVarCommon* lclNode) { if (argNodeType == TYP_STRUCT) { - newArgNode = gtNewObjNode(argNodeLayout, addrNode); + newArgNode = gtNewStructVal(argNodeLayout, addrNode); } else { @@ -6199,7 +6199,7 @@ GenTree* Compiler::fgMorphPotentialTailCall(GenTreeCall* call) #ifdef DEBUG if (opts.compGcChecks && (info.compRetType == TYP_REF)) { - failTailCall("COMPlus_JitGCChecks or stress might have interposed a call to CORINFO_HELP_CHECK_OBJ, " + failTailCall("DOTNET_JitGCChecks or stress might have interposed a call to CORINFO_HELP_CHECK_OBJ, " "invalidating tailcall opportunity"); return nullptr; } @@ -6250,6 +6250,18 @@ GenTree* Compiler::fgMorphPotentialTailCall(GenTreeCall* call) return nullptr; } +#ifdef DEBUG + // For explicit tailcalls the importer will avoid inserting stress + // poisoning after them. However, implicit tailcalls are marked earlier and + // we must filter those out here if we ended up adding any poisoning IR + // after them. + if (isImplicitOrStressTailCall && compPoisoningAnyImplicitByrefs) + { + failTailCall("STRESS_POISON_IMPLICIT_BYREFS has introduced IR after tailcall opportunity, invalidating"); + return nullptr; + } +#endif + bool hasStructParam = false; for (unsigned varNum = 0; varNum < lvaCount; varNum++) {