diff --git a/src/coreclr/ToolBox/superpmi/superpmi-shared/lwmlist.h b/src/coreclr/ToolBox/superpmi/superpmi-shared/lwmlist.h index 4bffe52ecb4f6..056904894d1f0 100644 --- a/src/coreclr/ToolBox/superpmi/superpmi-shared/lwmlist.h +++ b/src/coreclr/ToolBox/superpmi/superpmi-shared/lwmlist.h @@ -128,6 +128,7 @@ LWM(GetSharedCCtorHelper, DWORDLONG, DWORD) LWM(GetStringConfigValue, DWORD, DWORD) LWM(GetSystemVAmd64PassStructInRegisterDescriptor, DWORDLONG, Agnostic_GetSystemVAmd64PassStructInRegisterDescriptor) LWM(GetTailCallHelpers, Agnostic_GetTailCallHelpers, Agnostic_CORINFO_TAILCALL_HELPERS) +LWM(UpdateEntryPointForTailCall, Agnostic_CORINFO_CONST_LOOKUP, Agnostic_CORINFO_CONST_LOOKUP) LWM(GetThreadTLSIndex, DWORD, DLD) LWM(GetTokenTypeAsHandle, GetTokenTypeAsHandleValue, DWORDLONG) LWM(GetTypeForBox, DWORDLONG, DWORDLONG) diff --git a/src/coreclr/ToolBox/superpmi/superpmi-shared/methodcontext.cpp b/src/coreclr/ToolBox/superpmi/superpmi-shared/methodcontext.cpp index 999ad17a87054..b9195e4bbd1d5 100644 --- a/src/coreclr/ToolBox/superpmi/superpmi-shared/methodcontext.cpp +++ b/src/coreclr/ToolBox/superpmi/superpmi-shared/methodcontext.cpp @@ -6608,6 +6608,41 @@ bool MethodContext::repGetTailCallHelpers( return true; } +void MethodContext::recUpdateEntryPointForTailCall( + const CORINFO_CONST_LOOKUP& origEntryPoint, + const CORINFO_CONST_LOOKUP& newEntryPoint) +{ + if (UpdateEntryPointForTailCall == nullptr) + UpdateEntryPointForTailCall = new LightWeightMap(); + + Agnostic_CORINFO_CONST_LOOKUP key = SpmiRecordsHelper::StoreAgnostic_CORINFO_CONST_LOOKUP(&origEntryPoint); + Agnostic_CORINFO_CONST_LOOKUP value = SpmiRecordsHelper::StoreAgnostic_CORINFO_CONST_LOOKUP(&newEntryPoint); + UpdateEntryPointForTailCall->Add(key, value); + DEBUG_REC(dmpUpdateEntryPointForTailCall(key, value)); +} + +void MethodContext::dmpUpdateEntryPointForTailCall( + const Agnostic_CORINFO_CONST_LOOKUP& origEntryPoint, + const Agnostic_CORINFO_CONST_LOOKUP& newEntryPoint) +{ + printf("UpdateEntryPointForTailcall orig=%s new=%s", + SpmiDumpHelper::DumpAgnostic_CORINFO_CONST_LOOKUP(origEntryPoint).c_str(), + SpmiDumpHelper::DumpAgnostic_CORINFO_CONST_LOOKUP(newEntryPoint).c_str()); +} + +void MethodContext::repUpdateEntryPointForTailCall(CORINFO_CONST_LOOKUP* entryPoint) +{ + AssertMapExistsNoMessage(UpdateEntryPointForTailCall); + + Agnostic_CORINFO_CONST_LOOKUP key = SpmiRecordsHelper::StoreAgnostic_CORINFO_CONST_LOOKUP(entryPoint); + AssertKeyExistsNoMessage(UpdateEntryPointForTailCall, key); + + Agnostic_CORINFO_CONST_LOOKUP value = UpdateEntryPointForTailCall->Get(key); + DEBUG_REP(dmpUpdateEntryPointForTailCall(key, value)); + + *entryPoint = SpmiRecordsHelper::RestoreCORINFO_CONST_LOOKUP(value); +} + void MethodContext::recGetMethodDefFromMethod(CORINFO_METHOD_HANDLE hMethod, mdMethodDef result) { if (GetMethodDefFromMethod == nullptr) diff --git a/src/coreclr/ToolBox/superpmi/superpmi-shared/methodcontext.h b/src/coreclr/ToolBox/superpmi/superpmi-shared/methodcontext.h index f209a4b5d0ad7..f1b8e405761d5 100644 --- a/src/coreclr/ToolBox/superpmi/superpmi-shared/methodcontext.h +++ b/src/coreclr/ToolBox/superpmi/superpmi-shared/methodcontext.h @@ -806,6 +806,10 @@ class MethodContext CORINFO_GET_TAILCALL_HELPERS_FLAGS flags, CORINFO_TAILCALL_HELPERS* pResult); + void recUpdateEntryPointForTailCall(const CORINFO_CONST_LOOKUP& origEntryPoint, const CORINFO_CONST_LOOKUP& newEntryPoint); + void dmpUpdateEntryPointForTailCall(const Agnostic_CORINFO_CONST_LOOKUP& origEntryPoint, const Agnostic_CORINFO_CONST_LOOKUP& newEntryPoint); + void repUpdateEntryPointForTailCall(CORINFO_CONST_LOOKUP* entryPoint); + void recGetMethodDefFromMethod(CORINFO_METHOD_HANDLE hMethod, mdMethodDef result); void dmpGetMethodDefFromMethod(DWORDLONG key, DWORD value); mdMethodDef repGetMethodDefFromMethod(CORINFO_METHOD_HANDLE hMethod); @@ -904,203 +908,199 @@ class MethodContext } }; -// ********************* Please keep this up-to-date to ease adding more *************** -// Highest packet number: 192 -// ************************************************************************************* enum mcPackets { - Packet_AllocMethodBlockCounts = 131, // retired 1/4/2021 - Packet_AppendClassName = 149, // Added 8/6/2014 - needed for SIMD - Packet_AreTypesEquivalent = 1, - Packet_AsCorInfoType = 2, - Packet_CanAccessClass = 3, - Packet_CanAccessFamily = 4, - Packet_CanCast = 5, - Retired8 = 6, - Packet_GetLazyStringLiteralHelper = 147, // Added 12/20/2013 - as a replacement for CanEmbedModuleHandleForHelper + Packet_AreTypesEquivalent = 1, + Packet_AsCorInfoType = 2, + Packet_CanAccessClass = 3, + Packet_CanAccessFamily = 4, + Packet_CanCast = 5, Packet_CanGetCookieForPInvokeCalliSig = 7, - Packet_CanGetVarArgsHandle = 8, - Packet_CanInline = 9, - Packet_CanInlineTypeCheck = 173, // Added 11/15/2018 as a replacement for CanInlineTypeCheckWithObjectVTable - Packet_CanInlineTypeCheckWithObjectVTable = 10, - Packet_CanSkipMethodVerification = 11, // Retired 2/18/2020 - Packet_CanTailCall = 12, - Retired4 = 13, - Packet_CheckMethodModifier = 142, // retired as 13 on 2013/07/04 - Retired3 = 14, - Retired5 = 141, // retired as 14 on 2013/07/03 - Packet_CompareTypesForCast = 163, // Added 10/4/17 - Packet_CompareTypesForEquality = 164, // Added 10/4/17 - Packet_CompileMethod = 143, // retired as 141 on 2013/07/09 - Packet_ConstructStringLiteral = 15, - Packet_ConvertPInvokeCalliToCall = 169, // Added 4/29/18 - Packet_EmbedClassHandle = 16, - Packet_EmbedFieldHandle = 17, - Packet_EmbedGenericHandle = 18, - Packet_EmbedMethodHandle = 19, - Packet_EmbedModuleHandle = 20, - Packet_EmptyStringLiteral = 21, - Retired9 = 136, - Packet_ErrorList = 22, - Packet_FilterException = 134, - Packet_FindCallSiteSig = 23, - Retired7 = 24, - Packet_FindNameOfToken = 145, // Added 7/19/2013 - adjusted members to proper types - Packet_GetSystemVAmd64PassStructInRegisterDescriptor = 156, // Added 2/17/2016 - Packet_FindSig = 25, - Packet_GetAddressOfPInvokeFixup = 26, // Retired 2/18/2020 - Packet_GetAddressOfPInvokeTarget = 153, // Added 2/3/2016 - Packet_GetAddrOfCaptureThreadGlobal = 27, - Retired1 = 28, - Packet_GetArgClass = 139, // retired as 28 on 2013/07/03 - Packet_GetHFAType = 159, - Packet_GetArgNext = 29, - Retired2 = 30, - Packet_GetArgType = 140, // retired as 30 on 2013/07/03 - Packet_GetArrayInitializationData = 31, - Packet_GetArrayRank = 32, - Packet_GetMethodBlockCounts = 33, - Packet_GetBoundaries = 34, - Packet_GetBoxHelper = 35, - Packet_GetBuiltinClass = 36, - Packet_GetCallInfo = 37, - Packet_GetCastingHelper = 38, - Packet_GetChildType = 39, - Packet_GetClassAlignmentRequirement = 40, - Packet_GetClassAttribs = 41, - Packet_GetClassDomainID = 42, - Packet_GetClassGClayout = 43, - Packet_GetClassModuleIdForStatics = 44, - Packet_GetClassName = 45, - Packet_GetClassNameFromMetadata = 166, // Added 12/4/17 - Packet_GetTypeInstantiationArgument = 167, // Added 12/4/17 - Packet_GetClassNumInstanceFields = 46, - Packet_GetClassSize = 47, - Packet_GetHeapClassSize = 170, // Added 10/5/2018 - Packet_CanAllocateOnStack = 171, // Added 10/5/2018 - Packet_GetIntConfigValue = 151, // Added 2/12/2015 - Packet_GetStringConfigValue = 152, // Added 2/12/2015 - Packet_GetCookieForPInvokeCalliSig = 48, - Packet_GetDefaultComparerClass = 188, // Added 2/10/2021 - Packet_GetDefaultEqualityComparerClass = 162, // Added 9/24/2017 - Packet_GetDelegateCtor = 49, - Packet_GetEEInfo = 50, - Packet_GetEHinfo = 51, - Packet_GetFieldAddress = 52, - Packet_GetStaticFieldCurrentClass = 172, // Added 11/7/2018 - Packet_GetFieldClass = 53, - Packet_GetFieldInClass = 54, - Packet_GetFieldInfo = 55, - Packet_GetFieldName = 56, - Packet_GetFieldOffset = 57, - Packet_GetFieldThreadLocalStoreID = 58, - Packet_GetFieldType = 59, - Packet_GetFunctionEntryPoint = 60, - Packet_GetFunctionFixedEntryPoint = 61, - Packet_GetGSCookie = 62, - Packet_GetHelperFtn = 63, - Packet_GetHelperName = 64, - Packet_GetInlinedCallFrameVptr = 65, - Packet_GetIntrinsicID = 66, - Packet_GetJitFlags = 154, // Added 2/3/2016 - Packet_GetJitTimeLogFilename = 67, - Packet_GetJustMyCodeHandle = 68, - Retired10 = 182, // Added 9/27/2020 // was Packet_GetLikelyClass - Packet_GetLocationOfThisType = 69, - Packet_IsJitIntrinsic = 192, - Packet_GetMethodAttribs = 70, - Packet_GetMethodClass = 71, - Packet_GetMethodModule = 181, // Added 11/20/2020 - Packet_GetMethodDefFromMethod = 72, - Packet_GetMethodHash = 73, - Packet_GetMethodInfo = 74, - Packet_GetMethodName = 75, - Packet_GetMethodNameFromMetadata = 161, // Added 9/6/17 - Packet_GetMethodSig = 76, - Packet_GetMethodSync = 77, - Packet_GetMethodVTableOffset = 78, - Packet_GetNewArrHelper = 79, - Packet_GetNewHelper = 80, - Packet_GetOSRInfo = 177, // Added 3/5/2020 - Packet_GetParentType = 81, - Packet_GetPInvokeUnmanagedTarget = 82, // Retired 2/18/2020 - Packet_GetProfilingHandle = 83, - Packet_GetRelocTypeHint = 84, - Packet_GetExpectedTargetArchitecture = 183, // Added 12/18/2020 - Packet_GetSecurityPrologHelper = 85, // Retired 2/18/2020 - Packet_GetSharedCCtorHelper = 86, - Packet_GetTailCallCopyArgsThunk = 87, // Retired 4/27/2020 - Packet_GetTailCallHelpers = 178, // Added 3/18/2020 - Packet_GetThreadTLSIndex = 88, - Packet_GetTokenTypeAsHandle = 89, - Packet_GetTypeForBox = 90, - Packet_GetTypeForPrimitiveValueClass = 91, - Packet_GetTypeForPrimitiveNumericClass = 168, // Added 12/7/2017 - Packet_GetUnboxedEntry = 165, // Added 10/26/17 - Packet_GetUnBoxHelper = 92, - Packet_GetReadyToRunHelper = 150, // Added 10/10/2014 - Packet_GetReadyToRunDelegateCtorHelper = 157, // Added 3/30/2016 - Packet_GetUnmanagedCallConv = 94, - Packet_GetVarArgsHandle = 95, - Packet_GetVars = 96, - Packet_HandleException = 135, // Retired 7/19/2021 - Packet_InitClass = 97, - Packet_InitConstraintsForVerification = 98, // Retired 2/18/2020 - Packet_IsCompatibleDelegate = 99, - Packet_IsDelegateCreationAllowed = 155, - Packet_IsFieldStatic = 137, // Added 4/9/2013 - needed for 4.5.1 - Packet_IsIntrinsicType = 148, // Added 10/26/2019 - SIMD support - Packet_IsInstantiationOfVerifiedGeneric = 100, // Retired 2/18/2020 - Packet_IsSDArray = 101, - Packet_IsStructRequiringStackAllocRetBuf = 102, - Packet_IsValidStringRef = 103, - Packet_GetStringLiteral = 175, // Added 1/7/2020 - Retired6 = 104, - Packet_IsValidToken = 144, // Added 7/19/2013 - adjusted members to proper types - Packet_IsValueClass = 105, - Packet_IsWriteBarrierHelperRequired = 106, // Retired 2/18/2020 - Packet_MergeClasses = 107, - Packet_IsMoreSpecificType = 174, // Added 2/14/2019 - Packet_PInvokeMarshalingRequired = 108, - Packet_ResolveToken = 109, - Packet_ResolveVirtualMethod = 160, // Added 2/13/17 - Packet_TryResolveToken = 158, // Added 4/26/2016 - Packet_SatisfiesClassConstraints = 110, - Packet_SatisfiesMethodConstraints = 111, - Packet_DoesFieldBelongToClass = 112, // Added 8/12/2021 - Packet_SigInstHandleMap = 184, - Packet_AllocPgoInstrumentationBySchema = 186, // Added 1/4/2021 - Packet_GetPgoInstrumentationResults = 187, // Added 1/4/2021 - Packet_GetClassModule = 189, // Added 2/19/2021 - Packet_GetModuleAssembly = 190, // Added 2/19/2021 - Packet_GetAssemblyName = 191, // Added 2/19/2021 - - PacketCR_AddressMap = 113, - PacketCR_AllocGCInfo = 114, - PacketCR_AllocMem = 115, - PacketCR_AllocUnwindInfo = 132, - PacketCR_AssertLog = 138, // Added 6/10/2013 - added to nicely support ilgen - PacketCR_CallLog = 116, - PacketCR_ClassMustBeLoadedBeforeCodeIsRun = 117, - PacketCR_CompileMethod = 118, - PacketCR_MessageLog = 119, + Packet_CanGetVarArgsHandle = 8, + Packet_CanInline = 9, + //Packet_CanInlineTypeCheckWithObjectVTable = 10, + //Packet_CanSkipMethodVerification = 11, + Packet_CanTailCall = 12, + //Retired4 = 13, + //Retired3 = 14, + Packet_ConstructStringLiteral = 15, + Packet_EmbedClassHandle = 16, + Packet_EmbedFieldHandle = 17, + Packet_EmbedGenericHandle = 18, + Packet_EmbedMethodHandle = 19, + Packet_EmbedModuleHandle = 20, + Packet_EmptyStringLiteral = 21, + Packet_ErrorList = 22, + Packet_FindCallSiteSig = 23, + //Retired7 = 24, + Packet_FindSig = 25, + Packet_GetAddressOfPInvokeFixup = 26, + Packet_GetAddrOfCaptureThreadGlobal = 27, + //Retired1 = 28, + Packet_GetArgNext = 29, + //Retired2 = 30, + Packet_GetArrayInitializationData = 31, + Packet_GetArrayRank = 32, + //Packet_GetMethodBlockCounts = 33, + Packet_GetBoundaries = 34, + Packet_GetBoxHelper = 35, + Packet_GetBuiltinClass = 36, + Packet_GetCallInfo = 37, + Packet_GetCastingHelper = 38, + Packet_GetChildType = 39, + Packet_GetClassAlignmentRequirement = 40, + Packet_GetClassAttribs = 41, + Packet_GetClassDomainID = 42, + Packet_GetClassGClayout = 43, + Packet_GetClassModuleIdForStatics = 44, + Packet_GetClassName = 45, + Packet_GetClassNumInstanceFields = 46, + Packet_GetClassSize = 47, + Packet_GetCookieForPInvokeCalliSig = 48, + Packet_GetDelegateCtor = 49, + Packet_GetEEInfo = 50, + Packet_GetEHinfo = 51, + Packet_GetFieldAddress = 52, + Packet_GetFieldClass = 53, + Packet_GetFieldInClass = 54, + Packet_GetFieldInfo = 55, + Packet_GetFieldName = 56, + Packet_GetFieldOffset = 57, + Packet_GetFieldThreadLocalStoreID = 58, + Packet_GetFieldType = 59, + Packet_GetFunctionEntryPoint = 60, + Packet_GetFunctionFixedEntryPoint = 61, + Packet_GetGSCookie = 62, + Packet_GetHelperFtn = 63, + Packet_GetHelperName = 64, + Packet_GetInlinedCallFrameVptr = 65, + Packet_GetIntrinsicID = 66, + Packet_GetJitTimeLogFilename = 67, + Packet_GetJustMyCodeHandle = 68, + Packet_GetLocationOfThisType = 69, + Packet_GetMethodAttribs = 70, + Packet_GetMethodClass = 71, + Packet_GetMethodDefFromMethod = 72, + Packet_GetMethodHash = 73, + Packet_GetMethodInfo = 74, + Packet_GetMethodName = 75, + Packet_GetMethodSig = 76, + Packet_GetMethodSync = 77, + Packet_GetMethodVTableOffset = 78, + Packet_GetNewArrHelper = 79, + Packet_GetNewHelper = 80, + Packet_GetParentType = 81, + //Packet_GetPInvokeUnmanagedTarget = 82, + Packet_GetProfilingHandle = 83, + Packet_GetRelocTypeHint = 84, + //Packet_GetSecurityPrologHelper = 85, + Packet_GetSharedCCtorHelper = 86, + //Packet_GetTailCallCopyArgsThunk = 87, + Packet_GetThreadTLSIndex = 88, + Packet_GetTokenTypeAsHandle = 89, + Packet_GetTypeForBox = 90, + Packet_GetTypeForPrimitiveValueClass = 91, + Packet_GetUnBoxHelper = 92, + Packet_GetUnmanagedCallConv = 94, + Packet_GetVarArgsHandle = 95, + Packet_GetVars = 96, + Packet_InitClass = 97, + //Packet_InitConstraintsForVerification = 98, + Packet_IsCompatibleDelegate = 99, + //Packet_IsInstantiationOfVerifiedGeneric = 100, + Packet_IsSDArray = 101, + Packet_IsStructRequiringStackAllocRetBuf = 102, + Packet_IsValidStringRef = 103, + //Retired6 = 104, + Packet_IsValueClass = 105, + //Packet_IsWriteBarrierHelperRequired = 106, + Packet_MergeClasses = 107, + Packet_PInvokeMarshalingRequired = 108, + Packet_ResolveToken = 109, + Packet_SatisfiesClassConstraints = 110, + Packet_SatisfiesMethodConstraints = 111, + Packet_DoesFieldBelongToClass = 112, + PacketCR_AddressMap = 113, + PacketCR_AllocGCInfo = 114, + PacketCR_AllocMem = 115, + PacketCR_CallLog = 116, + PacketCR_ClassMustBeLoadedBeforeCodeIsRun = 117, + PacketCR_CompileMethod = 118, + PacketCR_MessageLog = 119, PacketCR_MethodMustBeLoadedBeforeCodeIsRun = 120, - PacketCR_ProcessName = 121, - PacketCR_RecordRelocation = 122, - PacketCR_ReportFatalError = 123, - PacketCR_ReportInliningDecision = 124, - PacketCR_ReportTailCallDecision = 125, - PacketCR_ReserveUnwindInfo = 133, - PacketCR_SetBoundaries = 126, - PacketCR_SetEHcount = 127, - PacketCR_SetEHinfo = 128, - PacketCR_SetMethodAttribs = 129, - PacketCR_SetVars = 130, - PacketCR_SetPatchpointInfo = 176, // added 8/5/2019 - PacketCR_RecordCallSite = 146, // Retired 9/13/2020 - PacketCR_RecordCallSiteWithSignature = 179, // Added 9/13/2020 - PacketCR_RecordCallSiteWithoutSignature = 180, // Added 9/13/2020 - PacketCR_CrSigInstHandleMap = 185, + PacketCR_ProcessName = 121, + PacketCR_RecordRelocation = 122, + PacketCR_ReportFatalError = 123, + PacketCR_ReportInliningDecision = 124, + PacketCR_ReportTailCallDecision = 125, + PacketCR_SetBoundaries = 126, + PacketCR_SetEHcount = 127, + PacketCR_SetEHinfo = 128, + PacketCR_SetMethodAttribs = 129, + PacketCR_SetVars = 130, + //Packet_AllocMethodBlockCounts = 131, + PacketCR_AllocUnwindInfo = 132, + PacketCR_ReserveUnwindInfo = 133, + Packet_FilterException = 134, + //Packet_HandleException = 135, + //Retired9 = 136, + Packet_IsFieldStatic = 137, + PacketCR_AssertLog = 138, + Packet_GetArgClass = 139, + Packet_GetArgType = 140, + //Retired5 = 141, + Packet_CheckMethodModifier = 142, + Packet_CompileMethod = 143, + Packet_IsValidToken = 144, + Packet_FindNameOfToken = 145, + //PacketCR_RecordCallSite = 146, + Packet_GetLazyStringLiteralHelper = 147, + Packet_IsIntrinsicType = 148, + Packet_AppendClassName = 149, + Packet_GetReadyToRunHelper = 150, + Packet_GetIntConfigValue = 151, + Packet_GetStringConfigValue = 152, + Packet_GetAddressOfPInvokeTarget = 153, + Packet_GetJitFlags = 154, + Packet_IsDelegateCreationAllowed = 155, + Packet_GetSystemVAmd64PassStructInRegisterDescriptor = 156, + Packet_GetReadyToRunDelegateCtorHelper = 157, + Packet_TryResolveToken = 158, + Packet_GetHFAType = 159, + Packet_ResolveVirtualMethod = 160, + Packet_GetMethodNameFromMetadata = 161, + Packet_GetDefaultEqualityComparerClass = 162, + Packet_CompareTypesForCast = 163, + Packet_CompareTypesForEquality = 164, + Packet_GetUnboxedEntry = 165, + Packet_GetClassNameFromMetadata = 166, + Packet_GetTypeInstantiationArgument = 167, + Packet_GetTypeForPrimitiveNumericClass = 168, + Packet_ConvertPInvokeCalliToCall = 169, + Packet_GetHeapClassSize = 170, + Packet_CanAllocateOnStack = 171, + Packet_GetStaticFieldCurrentClass = 172, + Packet_CanInlineTypeCheck = 173, + Packet_IsMoreSpecificType = 174, + Packet_GetStringLiteral = 175, + PacketCR_SetPatchpointInfo = 176, + Packet_GetOSRInfo = 177, + Packet_GetTailCallHelpers = 178, + PacketCR_RecordCallSiteWithSignature = 179, + PacketCR_RecordCallSiteWithoutSignature = 180, + Packet_GetMethodModule = 181, + //Retired10 = 182, + Packet_GetExpectedTargetArchitecture = 183, + Packet_SigInstHandleMap = 184, + PacketCR_CrSigInstHandleMap = 185, + Packet_AllocPgoInstrumentationBySchema = 186, + Packet_GetPgoInstrumentationResults = 187, + Packet_GetDefaultComparerClass = 188, + Packet_GetClassModule = 189, + Packet_GetModuleAssembly = 190, + Packet_GetAssemblyName = 191, + Packet_IsJitIntrinsic = 192, + Packet_UpdateEntryPointForTailCall = 193, }; void SetDebugDumpVariables(); diff --git a/src/coreclr/ToolBox/superpmi/superpmi-shared/spmirecordhelper.h b/src/coreclr/ToolBox/superpmi/superpmi-shared/spmirecordhelper.h index d5316b94db17b..17476cb9ba69a 100644 --- a/src/coreclr/ToolBox/superpmi/superpmi-shared/spmirecordhelper.h +++ b/src/coreclr/ToolBox/superpmi/superpmi-shared/spmirecordhelper.h @@ -97,9 +97,9 @@ class SpmiRecordsHelper static CORINFO_LOOKUP_KIND RestoreCORINFO_LOOKUP_KIND(Agnostic_CORINFO_LOOKUP_KIND& lookupKind); static Agnostic_CORINFO_CONST_LOOKUP StoreAgnostic_CORINFO_CONST_LOOKUP( - CORINFO_CONST_LOOKUP* pLookup); + const CORINFO_CONST_LOOKUP* pLookup); - static CORINFO_CONST_LOOKUP RestoreCORINFO_CONST_LOOKUP(Agnostic_CORINFO_CONST_LOOKUP& lookup); + static CORINFO_CONST_LOOKUP RestoreCORINFO_CONST_LOOKUP(const Agnostic_CORINFO_CONST_LOOKUP& lookup); static Agnostic_CORINFO_RUNTIME_LOOKUP StoreAgnostic_CORINFO_RUNTIME_LOOKUP( CORINFO_RUNTIME_LOOKUP* pLookup); @@ -459,7 +459,7 @@ inline CORINFO_LOOKUP_KIND SpmiRecordsHelper::RestoreCORINFO_LOOKUP_KIND( } inline Agnostic_CORINFO_CONST_LOOKUP SpmiRecordsHelper::StoreAgnostic_CORINFO_CONST_LOOKUP( - CORINFO_CONST_LOOKUP* pLookup) + const CORINFO_CONST_LOOKUP* pLookup) { Agnostic_CORINFO_CONST_LOOKUP constLookup; ZeroMemory(&constLookup, sizeof(constLookup)); @@ -469,7 +469,7 @@ inline Agnostic_CORINFO_CONST_LOOKUP SpmiRecordsHelper::StoreAgnostic_CORINFO_CO } inline CORINFO_CONST_LOOKUP SpmiRecordsHelper::RestoreCORINFO_CONST_LOOKUP( - Agnostic_CORINFO_CONST_LOOKUP& lookup) + const Agnostic_CORINFO_CONST_LOOKUP& lookup) { CORINFO_CONST_LOOKUP constLookup; constLookup.accessType = (InfoAccessType)lookup.accessType; diff --git a/src/coreclr/ToolBox/superpmi/superpmi-shim-collector/icorjitinfo.cpp b/src/coreclr/ToolBox/superpmi/superpmi-shim-collector/icorjitinfo.cpp index 2b802b3eacc31..0d0acb208b416 100644 --- a/src/coreclr/ToolBox/superpmi/superpmi-shim-collector/icorjitinfo.cpp +++ b/src/coreclr/ToolBox/superpmi/superpmi-shim-collector/icorjitinfo.cpp @@ -1776,6 +1776,14 @@ bool interceptor_ICJI::getTailCallHelpers( return result; } +void interceptor_ICJI::updateEntryPointForTailCall(CORINFO_CONST_LOOKUP* entryPoint) +{ + mc->cr->AddCall("updateEntryPointForTailCall"); + CORINFO_CONST_LOOKUP origEntryPoint = *entryPoint; + original_ICorJitInfo->updateEntryPointForTailCall(entryPoint); + mc->recUpdateEntryPointForTailCall(origEntryPoint, *entryPoint); +} + // Stuff directly on ICorJitInfo // Returns extended flags for a particular compilation instance. diff --git a/src/coreclr/ToolBox/superpmi/superpmi-shim-counter/icorjitinfo.cpp b/src/coreclr/ToolBox/superpmi/superpmi-shim-counter/icorjitinfo.cpp index e967975ae244a..94325c48f208d 100644 --- a/src/coreclr/ToolBox/superpmi/superpmi-shim-counter/icorjitinfo.cpp +++ b/src/coreclr/ToolBox/superpmi/superpmi-shim-counter/icorjitinfo.cpp @@ -1250,6 +1250,13 @@ bool interceptor_ICJI::notifyInstructionSetUsage( return original_ICorJitInfo->notifyInstructionSetUsage(instructionSet, supportEnabled); } +void interceptor_ICJI::updateEntryPointForTailCall( + CORINFO_CONST_LOOKUP* entryPoint) +{ + mcs->AddCall("updateEntryPointForTailCall"); + original_ICorJitInfo->updateEntryPointForTailCall(entryPoint); +} + void interceptor_ICJI::allocMem( AllocMemArgs* pArgs) { diff --git a/src/coreclr/ToolBox/superpmi/superpmi-shim-simple/icorjitinfo.cpp b/src/coreclr/ToolBox/superpmi/superpmi-shim-simple/icorjitinfo.cpp index 8c982934239fe..a06ac0857ce30 100644 --- a/src/coreclr/ToolBox/superpmi/superpmi-shim-simple/icorjitinfo.cpp +++ b/src/coreclr/ToolBox/superpmi/superpmi-shim-simple/icorjitinfo.cpp @@ -1094,6 +1094,12 @@ bool interceptor_ICJI::notifyInstructionSetUsage( return original_ICorJitInfo->notifyInstructionSetUsage(instructionSet, supportEnabled); } +void interceptor_ICJI::updateEntryPointForTailCall( + CORINFO_CONST_LOOKUP* entryPoint) +{ + original_ICorJitInfo->updateEntryPointForTailCall(entryPoint); +} + void interceptor_ICJI::allocMem( AllocMemArgs* pArgs) { diff --git a/src/coreclr/ToolBox/superpmi/superpmi/icorjitinfo.cpp b/src/coreclr/ToolBox/superpmi/superpmi/icorjitinfo.cpp index cac9ce2b0f871..29ed9e03df363 100644 --- a/src/coreclr/ToolBox/superpmi/superpmi/icorjitinfo.cpp +++ b/src/coreclr/ToolBox/superpmi/superpmi/icorjitinfo.cpp @@ -1556,6 +1556,12 @@ bool MyICJI::notifyInstructionSetUsage(CORINFO_InstructionSet instructionSet, bo return supported; } +void MyICJI::updateEntryPointForTailCall(CORINFO_CONST_LOOKUP* entryPoint) +{ + jitInstance->mc->cr->AddCall("updateEntryPointForTailCall"); + jitInstance->mc->repUpdateEntryPointForTailCall(entryPoint); +} + // Stuff directly on ICorJitInfo // Returns extended flags for a particular compilation instance. diff --git a/src/coreclr/inc/corinfo.h b/src/coreclr/inc/corinfo.h index 0db622f21adb4..1abb2077ab762 100644 --- a/src/coreclr/inc/corinfo.h +++ b/src/coreclr/inc/corinfo.h @@ -3164,6 +3164,13 @@ class ICorDynamicInfo : public ICorStaticInfo CORINFO_InstructionSet instructionSet, bool supportEnabled ) = 0; + + // Notify EE that JIT needs an entry-point that is tail-callable. + // This is used for AOT on x64 to support delay loaded fast tailcalls. + // Normally the indirection cell is retrieved from the return address, + // but for tailcalls, the contract is that JIT leaves the indirection cell in + // a register during tailcall. + virtual void updateEntryPointForTailCall(CORINFO_CONST_LOOKUP* entryPoint) = 0; }; /**********************************************************************************/ diff --git a/src/coreclr/inc/icorjitinfoimpl_generated.h b/src/coreclr/inc/icorjitinfoimpl_generated.h index 1c470cf1fdd51..4cb3e3f012d92 100644 --- a/src/coreclr/inc/icorjitinfoimpl_generated.h +++ b/src/coreclr/inc/icorjitinfoimpl_generated.h @@ -635,6 +635,9 @@ bool notifyInstructionSetUsage( CORINFO_InstructionSet instructionSet, bool supportEnabled) override; +void updateEntryPointForTailCall( + CORINFO_CONST_LOOKUP* entryPoint) override; + void allocMem( AllocMemArgs* pArgs) override; diff --git a/src/coreclr/inc/jiteeversionguid.h b/src/coreclr/inc/jiteeversionguid.h index 8f99d9b4bb860..a7763f7835b27 100644 --- a/src/coreclr/inc/jiteeversionguid.h +++ b/src/coreclr/inc/jiteeversionguid.h @@ -43,12 +43,12 @@ typedef const GUID *LPCGUID; #define GUID_DEFINED #endif // !GUID_DEFINED -constexpr GUID JITEEVersionIdentifier = { /* 017b4b2e-80e1-41eb-afc3-f6f643df6bbc */ - 0x017b4b2e, - 0x80e1, - 0x41eb, - {0xaf, 0xc3, 0xf6, 0xf6, 0x43, 0xdf, 0x6b, 0xbc} -}; +constexpr GUID JITEEVersionIdentifier = { /* 7374274c-5cb5-4c41-872e-01f438ac1548 */ + 0x7374274c, + 0x5cb5, + 0x4c41, + { 0x87, 0x2e, 0x1, 0xf4, 0x38, 0xac, 0x15, 0x48 } + }; ////////////////////////////////////////////////////////////////////////////////////////////////////////// // diff --git a/src/coreclr/jit/ICorJitInfo_API_names.h b/src/coreclr/jit/ICorJitInfo_API_names.h index fec125b78ae06..39e74fc1949f1 100644 --- a/src/coreclr/jit/ICorJitInfo_API_names.h +++ b/src/coreclr/jit/ICorJitInfo_API_names.h @@ -160,6 +160,7 @@ DEF_CLR_API(MethodCompileComplete) DEF_CLR_API(getTailCallHelpers) DEF_CLR_API(convertPInvokeCalliToCall) DEF_CLR_API(notifyInstructionSetUsage) +DEF_CLR_API(updateEntryPointForTailCall) DEF_CLR_API(allocMem) DEF_CLR_API(reserveUnwindInfo) DEF_CLR_API(allocUnwindInfo) diff --git a/src/coreclr/jit/ICorJitInfo_API_wrapper.hpp b/src/coreclr/jit/ICorJitInfo_API_wrapper.hpp index f902b3b99ecd6..0fade44a4fef3 100644 --- a/src/coreclr/jit/ICorJitInfo_API_wrapper.hpp +++ b/src/coreclr/jit/ICorJitInfo_API_wrapper.hpp @@ -1527,6 +1527,14 @@ bool WrapICorJitInfo::notifyInstructionSetUsage( return temp; } +void WrapICorJitInfo::updateEntryPointForTailCall( + CORINFO_CONST_LOOKUP* entryPoint) +{ + API_ENTER(updateEntryPointForTailCall); + wrapHnd->updateEntryPointForTailCall(entryPoint); + API_LEAVE(updateEntryPointForTailCall); +} + void WrapICorJitInfo::allocMem( AllocMemArgs* pArgs) { diff --git a/src/coreclr/jit/codegenarmarch.cpp b/src/coreclr/jit/codegenarmarch.cpp index d540c522af52f..7f1ea836140ee 100644 --- a/src/coreclr/jit/codegenarmarch.cpp +++ b/src/coreclr/jit/codegenarmarch.cpp @@ -2352,6 +2352,24 @@ void CodeGen::genCall(GenTreeCall* call) // Indirect fast tail calls materialize call target either in gtControlExpr or in gtCallAddr. genConsumeReg(target); } +#ifdef FEATURE_READYTORUN + else if (call->IsR2ROrVirtualStubRelativeIndir()) + { + assert(((call->IsR2RRelativeIndir()) && (call->gtEntryPoint.accessType == IAT_PVALUE)) || + ((call->IsVirtualStubRelativeIndir()) && (call->gtEntryPoint.accessType == IAT_VALUE))); + assert(call->gtControlExpr == nullptr); + + regNumber tmpReg = call->GetSingleTempReg(); + // Register where we save call address in should not be overridden by epilog. + assert((tmpReg & (RBM_INT_CALLEE_TRASH & ~RBM_LR)) == tmpReg); + + regNumber callAddrReg = + call->IsVirtualStubRelativeIndir() ? compiler->virtualStubParamInfo->GetReg() : REG_R2R_INDIRECT_PARAM; + GetEmitter()->emitIns_R_R(ins_Load(TYP_I_IMPL), emitActualTypeSize(TYP_I_IMPL), tmpReg, callAddrReg); + // We will use this again when emitting the jump in genCallInstruction in the epilog + call->gtRsvdRegs |= genRegMask(tmpReg); + } +#endif return; } @@ -2558,12 +2576,20 @@ void CodeGen::genCallInstruction(GenTreeCall* call) ((call->IsVirtualStubRelativeIndir()) && (call->gtEntryPoint.accessType == IAT_VALUE))); #endif // FEATURE_READYTORUN assert(call->gtControlExpr == nullptr); - assert(!call->IsTailCall()); regNumber tmpReg = call->GetSingleTempReg(); - regNumber callAddrReg = - call->IsVirtualStubRelativeIndir() ? compiler->virtualStubParamInfo->GetReg() : REG_R2R_INDIRECT_PARAM; - GetEmitter()->emitIns_R_R(ins_Load(TYP_I_IMPL), emitActualTypeSize(TYP_I_IMPL), tmpReg, callAddrReg); + // For fast tailcalls we have already loaded the call target when processing the call node. + if (!call->IsFastTailCall()) + { + regNumber callAddrReg = + call->IsVirtualStubRelativeIndir() ? compiler->virtualStubParamInfo->GetReg() : REG_R2R_INDIRECT_PARAM; + GetEmitter()->emitIns_R_R(ins_Load(TYP_I_IMPL), emitActualTypeSize(TYP_I_IMPL), tmpReg, callAddrReg); + } + else + { + // Register where we save call address in should not be overridden by epilog. + assert((tmpReg & (RBM_INT_CALLEE_TRASH & ~RBM_LR)) == tmpReg); + } // We have now generated code for gtControlExpr evaluating it into `tmpReg`. // We just need to emit "call tmpReg" in this case. diff --git a/src/coreclr/jit/codegenxarch.cpp b/src/coreclr/jit/codegenxarch.cpp index 6fcbf0da75c48..52f59f3d99593 100644 --- a/src/coreclr/jit/codegenxarch.cpp +++ b/src/coreclr/jit/codegenxarch.cpp @@ -5702,18 +5702,40 @@ void CodeGen::genCallInstruction(GenTreeCall* call X86_ARG(target_ssize_t stackA { emitter::EmitCallType type = (call->gtEntryPoint.accessType == IAT_VALUE) ? emitter::EC_FUNC_TOKEN : emitter::EC_FUNC_TOKEN_INDIR; - // clang-format off - genEmitCall(type, - methHnd, - INDEBUG_LDISASM_COMMA(sigInfo) - (void*)call->gtEntryPoint.addr - X86_ARG(argSizeForEmitter), - retSize - MULTIREG_HAS_SECOND_GC_RET_ONLY_ARG(secondRetSize), - ilOffset, - REG_NA, - call->IsFastTailCall()); - // clang-format on + if (call->IsFastTailCall() && (type == emitter::EC_FUNC_TOKEN_INDIR)) + { + // For fast tailcall with func token indir we already have the indirection cell in REG_R2R_INDIRECT_PARAM, + // so get it from there. + // clang-format off + GetEmitter()->emitIns_Call( + emitter::EC_INDIR_ARD, + methHnd, + INDEBUG_LDISASM_COMMA(sigInfo) + nullptr, + 0, + retSize + MULTIREG_HAS_SECOND_GC_RET_ONLY_ARG(secondRetSize), + gcInfo.gcVarPtrSetCur, + gcInfo.gcRegGCrefSetCur, + gcInfo.gcRegByrefSetCur, + ilOffset, REG_R2R_INDIRECT_PARAM, REG_NA, 0, 0, true); + // clang-format on + } + else + { + // clang-format off + genEmitCall(type, + methHnd, + INDEBUG_LDISASM_COMMA(sigInfo) + (void*)call->gtEntryPoint.addr + X86_ARG(argSizeForEmitter), + retSize + MULTIREG_HAS_SECOND_GC_RET_ONLY_ARG(secondRetSize), + ilOffset, + REG_NA, + call->IsFastTailCall()); + // clang-format on + } } #endif else diff --git a/src/coreclr/jit/compiler.h b/src/coreclr/jit/compiler.h index 3758c56da45f5..61c8da1088073 100644 --- a/src/coreclr/jit/compiler.h +++ b/src/coreclr/jit/compiler.h @@ -6329,9 +6329,11 @@ class Compiler GenTree* fgMorphPotentialTailCall(GenTreeCall* call); GenTree* fgGetStubAddrArg(GenTreeCall* call); + unsigned fgGetArgTabEntryParameterLclNum(GenTreeCall* call, fgArgTabEntry* argTabEntry); void fgMorphRecursiveFastTailCallIntoLoop(BasicBlock* block, GenTreeCall* recursiveTailCall); Statement* fgAssignRecursiveCallArgToCallerParam(GenTree* arg, fgArgTabEntry* argTabEntry, + unsigned lclParamNum, BasicBlock* block, IL_OFFSETX callILOffset, Statement* tmpAssignmentInsertionPoint, diff --git a/src/coreclr/jit/gentree.h b/src/coreclr/jit/gentree.h index 53f73dbf7d190..a3c147f82f01d 100644 --- a/src/coreclr/jit/gentree.h +++ b/src/coreclr/jit/gentree.h @@ -4638,11 +4638,15 @@ struct GenTreeCall final : public GenTree return (gtCallMoreFlags & GTF_CALL_M_VIRTSTUB_REL_INDIRECT) != 0; } -#ifdef FEATURE_READYTORUN bool IsR2RRelativeIndir() const { +#ifdef FEATURE_READYTORUN return (gtCallMoreFlags & GTF_CALL_M_R2R_REL_INDIRECT) != 0; +#else + return false; +#endif } +#ifdef FEATURE_READYTORUN void setEntryPoint(const CORINFO_CONST_LOOKUP& entryPoint) { gtEntryPoint = entryPoint; diff --git a/src/coreclr/jit/lower.cpp b/src/coreclr/jit/lower.cpp index 92388eb0ea886..200095708b1e9 100644 --- a/src/coreclr/jit/lower.cpp +++ b/src/coreclr/jit/lower.cpp @@ -3759,15 +3759,19 @@ GenTree* Lowering::LowerDirectCall(GenTreeCall* call) case IAT_PVALUE: { - bool isR2RRelativeIndir = false; -#if defined(FEATURE_READYTORUN) && defined(TARGET_ARMARCH) + bool hasIndirectionCell = false; +#if defined(TARGET_ARMARCH) // Skip inserting the indirection node to load the address that is already // computed in REG_R2R_INDIRECT_PARAM as a hidden parameter. Instead during the // codegen, just load the call target from REG_R2R_INDIRECT_PARAM. - isR2RRelativeIndir = call->IsR2RRelativeIndir(); -#endif // FEATURE_READYTORUN && TARGET_ARMARCH + hasIndirectionCell = call->IsR2RRelativeIndir(); +#elif defined(TARGET_XARCH) + // For xarch we usually get the indirection cell from the return address, + // except for fast tailcalls where we do the same as ARM. + hasIndirectionCell = call->IsR2RRelativeIndir() && call->IsFastTailCall(); +#endif - if (!isR2RRelativeIndir) + if (!hasIndirectionCell) { // Non-virtual direct calls to addresses accessed by // a single indirection. @@ -4834,15 +4838,12 @@ GenTree* Lowering::LowerVirtualStubCall(GenTreeCall* call) } else { - bool shouldOptimizeVirtualStubCall = false; #if defined(FEATURE_READYTORUN) && defined(TARGET_ARMARCH) // Skip inserting the indirection node to load the address that is already // computed in REG_R2R_INDIRECT_PARAM as a hidden parameter. Instead during the // codegen, just load the call target from REG_R2R_INDIRECT_PARAM. - // However, for tail calls, the call target is always computed in RBM_FASTTAILCALL_TARGET - // and so do not optimize virtual stub calls for such cases. - shouldOptimizeVirtualStubCall = !call->IsTailCall(); + shouldOptimizeVirtualStubCall = true; #endif // FEATURE_READYTORUN && TARGET_ARMARCH if (!shouldOptimizeVirtualStubCall) diff --git a/src/coreclr/jit/lsraarmarch.cpp b/src/coreclr/jit/lsraarmarch.cpp index 1b439f0a4b783..c37e4a328d731 100644 --- a/src/coreclr/jit/lsraarmarch.cpp +++ b/src/coreclr/jit/lsraarmarch.cpp @@ -178,12 +178,22 @@ int LinearScan::BuildCall(GenTreeCall* call) { // Fast tail call - make sure that call target is always computed in volatile registers // that will not be overridden by epilog sequence. - ctrlExprCandidates = RBM_INT_CALLEE_TRASH; + ctrlExprCandidates = allRegs(TYP_INT) & RBM_INT_CALLEE_TRASH; + assert(ctrlExprCandidates != RBM_NONE); } } else if (call->IsR2ROrVirtualStubRelativeIndir()) { - buildInternalIntRegisterDefForNode(call); + // For R2R and VSD we have stub address in REG_R2R_INDIRECT_PARAM + // and will load call address into the temp register from this register. + regMaskTP candidates = RBM_NONE; + if (call->IsFastTailCall()) + { + candidates = allRegs(TYP_INT) & RBM_INT_CALLEE_TRASH; + assert(candidates != RBM_NONE); + } + + buildInternalIntRegisterDefForNode(call, candidates); } #ifdef TARGET_ARM else diff --git a/src/coreclr/jit/lsraxarch.cpp b/src/coreclr/jit/lsraxarch.cpp index 06e46b94e45e6..cda91bdd6a1f1 100644 --- a/src/coreclr/jit/lsraxarch.cpp +++ b/src/coreclr/jit/lsraxarch.cpp @@ -1237,7 +1237,7 @@ int LinearScan::BuildCall(GenTreeCall* call) // If it is a fast tail call, it is already preferenced to use RAX. // Therefore, no need set src candidates on call tgt again. - if (compFeatureVarArg() && call->IsVarargs() && callHasFloatRegArgs && !call->IsFastTailCall()) + if (compFeatureVarArg() && call->IsVarargs() && callHasFloatRegArgs && (ctrlExprCandidates == RBM_NONE)) { // Don't assign the call target to any of the argument registers because // we will use them to also pass floating point arguments as required diff --git a/src/coreclr/jit/morph.cpp b/src/coreclr/jit/morph.cpp index a5fab4626e2e5..d1be328073577 100644 --- a/src/coreclr/jit/morph.cpp +++ b/src/coreclr/jit/morph.cpp @@ -2546,10 +2546,21 @@ void Compiler::fgInitArgInfo(GenTreeCall* call) call->gtCallType = CT_HELPER; call->gtCallMethHnd = eeFindHelper(CORINFO_HELP_PINVOKE_CALLI); } -#if defined(FEATURE_READYTORUN) && defined(TARGET_ARMARCH) - // For arm, we dispatch code same as VSD using virtualStubParamInfo->GetReg() +#if defined(FEATURE_READYTORUN) + // For arm/arm64, we dispatch code same as VSD using virtualStubParamInfo->GetReg() // for indirection cell address, which ZapIndirectHelperThunk expects. - if (call->IsR2RRelativeIndir()) + // For x64/x86 we use return address to get the indirection cell by disassembling the call site. + // That is not possible for fast tailcalls, so we only need this logic for fast tailcalls on xarch. + // Note that we call this before we know if something will be a fast tailcall or not. + // That's ok; after making something a tailcall, we will invalidate this information + // and reconstruct it if necessary. The tailcalling decision does not change since + // this is a non-standard arg in a register. + bool needsIndirectionCell = call->IsR2RRelativeIndir() && !call->IsDelegateInvoke(); +#if defined(TARGET_XARCH) + needsIndirectionCell &= call->IsFastTailCall(); +#endif + + if (needsIndirectionCell) { assert(call->gtEntryPoint.addr != nullptr); @@ -2574,8 +2585,7 @@ void Compiler::fgInitArgInfo(GenTreeCall* call) nonStandardArgs.Add(indirectCellAddress, indirectCellAddress->GetRegNum(), NonStandardArgKind::R2RIndirectionCell); } - -#endif // FEATURE_READYTORUN && TARGET_ARMARCH +#endif // Allocate the fgArgInfo for the call node; // @@ -7421,6 +7431,21 @@ GenTree* Compiler::fgMorphPotentialTailCall(GenTreeCall* call) } #endif + // For R2R we might need a different entry point for this call if we are doing a tailcall. + // The reason is that the normal delay load helper uses the return address to find the indirection + // cell in xarch, but now the JIT is expected to leave the indirection cell in REG_R2R_INDIRECT_PARAM: + // We optimize delegate invocations manually in the JIT so skip this for those. + if (call->IsR2RRelativeIndir() && canFastTailCall && !fastTailCallToLoop && !call->IsDelegateInvoke()) + { + info.compCompHnd->updateEntryPointForTailCall(&call->gtEntryPoint); + +#ifdef TARGET_XARCH + // We have already computed arg info to make the fast tailcall decision, but on X64 we now + // have to pass the indirection cell, so redo arg info. + call->ResetArgInfo(); +#endif + } + // If this block has a flow successor, make suitable updates. // BasicBlock* const nextBlock = compCurBB->GetUniqueSucc(); @@ -8587,6 +8612,39 @@ GenTree* Compiler::fgGetStubAddrArg(GenTreeCall* call) return stubAddrArg; } +//------------------------------------------------------------------------------ +// fgGetArgTabEntryParameterLclNum : Get the lcl num for the parameter that +// corresponds to the argument to a recursive call. +// +// Notes: +// Due to non-standard args this is not just fgArgTabEntry::argNum. +// For example, in R2R compilations we will have added a non-standard +// arg for the R2R indirection cell. +// +// Arguments: +// argTabEntry - the arg +// +unsigned Compiler::fgGetArgTabEntryParameterLclNum(GenTreeCall* call, fgArgTabEntry* argTabEntry) +{ + fgArgInfo* argInfo = call->fgArgInfo; + unsigned argCount = argInfo->ArgCount(); + fgArgTabEntry** argTable = argInfo->ArgTable(); + + unsigned numToRemove = 0; + for (unsigned i = 0; i < argCount; i++) + { + fgArgTabEntry* arg = argTable[i]; + // Late added args add extra args that do not map to IL parameters and that we should not reassign. + if (!arg->isNonStandard() || !arg->isNonStandardArgAddedLate()) + continue; + + if (arg->argNum < argTabEntry->argNum) + numToRemove++; + } + + return argTabEntry->argNum - numToRemove; +} + //------------------------------------------------------------------------------ // fgMorphRecursiveFastTailCallIntoLoop : Transform a recursive fast tail call into a loop. // @@ -8678,13 +8736,20 @@ void Compiler::fgMorphRecursiveFastTailCallIntoLoop(BasicBlock* block, GenTreeCa { // This is an actual argument that needs to be assigned to the corresponding caller parameter. fgArgTabEntry* curArgTabEntry = gtArgEntryByArgNum(recursiveTailCall, earlyArgIndex); - Statement* paramAssignStmt = - fgAssignRecursiveCallArgToCallerParam(earlyArg, curArgTabEntry, block, callILOffset, - tmpAssignmentInsertionPoint, paramAssignmentInsertionPoint); - if ((tmpAssignmentInsertionPoint == lastStmt) && (paramAssignStmt != nullptr)) + // Late-added non-standard args are extra args that are not passed as locals, so skip those + if (!curArgTabEntry->isNonStandard() || !curArgTabEntry->isNonStandardArgAddedLate()) { - // All temp assignments will happen before the first param assignment. - tmpAssignmentInsertionPoint = paramAssignStmt; + Statement* paramAssignStmt = + fgAssignRecursiveCallArgToCallerParam(earlyArg, curArgTabEntry, + fgGetArgTabEntryParameterLclNum(recursiveTailCall, + curArgTabEntry), + block, callILOffset, tmpAssignmentInsertionPoint, + paramAssignmentInsertionPoint); + if ((tmpAssignmentInsertionPoint == lastStmt) && (paramAssignStmt != nullptr)) + { + // All temp assignments will happen before the first param assignment. + tmpAssignmentInsertionPoint = paramAssignStmt; + } } } } @@ -8698,14 +8763,21 @@ void Compiler::fgMorphRecursiveFastTailCallIntoLoop(BasicBlock* block, GenTreeCa // A late argument is an actual argument that needs to be assigned to the corresponding caller's parameter. GenTree* lateArg = use.GetNode(); fgArgTabEntry* curArgTabEntry = gtArgEntryByLateArgIndex(recursiveTailCall, lateArgIndex); - Statement* paramAssignStmt = - fgAssignRecursiveCallArgToCallerParam(lateArg, curArgTabEntry, block, callILOffset, - tmpAssignmentInsertionPoint, paramAssignmentInsertionPoint); - - if ((tmpAssignmentInsertionPoint == lastStmt) && (paramAssignStmt != nullptr)) + // Late-added non-standard args are extra args that are not passed as locals, so skip those + if (!curArgTabEntry->isNonStandard() || !curArgTabEntry->isNonStandardArgAddedLate()) { - // All temp assignments will happen before the first param assignment. - tmpAssignmentInsertionPoint = paramAssignStmt; + Statement* paramAssignStmt = + fgAssignRecursiveCallArgToCallerParam(lateArg, curArgTabEntry, + fgGetArgTabEntryParameterLclNum(recursiveTailCall, + curArgTabEntry), + block, callILOffset, tmpAssignmentInsertionPoint, + paramAssignmentInsertionPoint); + + if ((tmpAssignmentInsertionPoint == lastStmt) && (paramAssignStmt != nullptr)) + { + // All temp assignments will happen before the first param assignment. + tmpAssignmentInsertionPoint = paramAssignStmt; + } } lateArgIndex++; } @@ -8801,6 +8873,7 @@ void Compiler::fgMorphRecursiveFastTailCallIntoLoop(BasicBlock* block, GenTreeCa // Arguments: // arg - argument to assign // argTabEntry - argument table entry corresponding to arg +// lclParamNum - the lcl num of the parameter // block --- basic block the call is in // callILOffset - IL offset of the call // tmpAssignmentInsertionPoint - tree before which temp assignment should be inserted (if necessary) @@ -8811,6 +8884,7 @@ void Compiler::fgMorphRecursiveFastTailCallIntoLoop(BasicBlock* block, GenTreeCa Statement* Compiler::fgAssignRecursiveCallArgToCallerParam(GenTree* arg, fgArgTabEntry* argTabEntry, + unsigned lclParamNum, BasicBlock* block, IL_OFFSETX callILOffset, Statement* tmpAssignmentInsertionPoint, @@ -8820,7 +8894,6 @@ Statement* Compiler::fgAssignRecursiveCallArgToCallerParam(GenTree* arg, // some argument trees may reference parameters directly. GenTree* argInTemp = nullptr; - unsigned originalArgNum = argTabEntry->argNum; bool needToAssignParameter = true; // TODO-CQ: enable calls with struct arguments passed in registers. @@ -8840,7 +8913,7 @@ Statement* Compiler::fgAssignRecursiveCallArgToCallerParam(GenTree* arg, // The argument is a non-parameter local so it doesn't need to be assigned to a temp. argInTemp = arg; } - else if (lclNum == originalArgNum) + else if (lclNum == lclParamNum) { // The argument is the same parameter local that we were about to assign so // we can skip the assignment. @@ -8871,9 +8944,9 @@ Statement* Compiler::fgAssignRecursiveCallArgToCallerParam(GenTree* arg, } // Now assign the temp to the parameter. - LclVarDsc* paramDsc = lvaTable + originalArgNum; + LclVarDsc* paramDsc = lvaTable + lclParamNum; assert(paramDsc->lvIsParam); - GenTree* paramDest = gtNewLclvNode(originalArgNum, paramDsc->lvType); + GenTree* paramDest = gtNewLclvNode(lclParamNum, paramDsc->lvType); GenTree* paramAssignNode = gtNewAssignNode(paramDest, argInTemp); paramAssignStmt = gtNewStmt(paramAssignNode, callILOffset); @@ -18078,7 +18151,7 @@ bool Compiler::fgCheckStmtAfterTailCall() // bool Compiler::fgCanTailCallViaJitHelper() { -#if !defined(TARGET_X86) || defined(UNIX_X86_ABI) +#if !defined(TARGET_X86) || defined(UNIX_X86_ABI) || defined(FEATURE_READYTORUN) // On anything except windows X86 we have no faster mechanism available. return false; #else diff --git a/src/coreclr/jit/targetamd64.h b/src/coreclr/jit/targetamd64.h index 76d4d903884a7..77df2d8547355 100644 --- a/src/coreclr/jit/targetamd64.h +++ b/src/coreclr/jit/targetamd64.h @@ -222,6 +222,10 @@ #define REG_DEFAULT_HELPER_CALL_TARGET REG_RAX #define RBM_DEFAULT_HELPER_CALL_TARGET RBM_RAX + #define REG_R2R_INDIRECT_PARAM REG_RAX // Indirection cell for R2R fast tailcall + // See ImportThunk.Kind.DelayLoadHelperWithExistingIndirectionCell in crossgen2. + #define RBM_R2R_INDIRECT_PARAM RBM_RAX + // GenericPInvokeCalliHelper VASigCookie Parameter #define REG_PINVOKE_COOKIE_PARAM REG_R11 #define RBM_PINVOKE_COOKIE_PARAM RBM_R11 diff --git a/src/coreclr/jit/targetx86.h b/src/coreclr/jit/targetx86.h index 63c7f69a45c2d..776c9deece36e 100644 --- a/src/coreclr/jit/targetx86.h +++ b/src/coreclr/jit/targetx86.h @@ -163,6 +163,9 @@ #define REG_JUMP_THUNK_PARAM REG_EAX #define RBM_JUMP_THUNK_PARAM RBM_EAX + #define REG_R2R_INDIRECT_PARAM REG_EAX // Indirection cell for R2R fast tailcall, not currently used in x86. + #define RBM_R2R_INDIRECT_PARAM RBM_EAX + #if NOGC_WRITE_BARRIERS #define REG_WRITE_BARRIER REG_EDX #define RBM_WRITE_BARRIER RBM_EDX diff --git a/src/coreclr/tools/Common/JitInterface/CorInfoBase.cs b/src/coreclr/tools/Common/JitInterface/CorInfoBase.cs index 6fd7ca33bfb8d..5e5c89774b7ca 100644 --- a/src/coreclr/tools/Common/JitInterface/CorInfoBase.cs +++ b/src/coreclr/tools/Common/JitInterface/CorInfoBase.cs @@ -2317,6 +2317,20 @@ static byte _notifyInstructionSetUsage(IntPtr thisHandle, IntPtr* ppException, I } } + [UnmanagedCallersOnly] + static void _updateEntryPointForTailCall(IntPtr thisHandle, IntPtr* ppException, CORINFO_CONST_LOOKUP* entryPoint) + { + var _this = GetThis(thisHandle); + try + { + _this.updateEntryPointForTailCall(ref *entryPoint); + } + catch (Exception ex) + { + *ppException = _this.AllocException(ex); + } + } + [UnmanagedCallersOnly] static void _allocMem(IntPtr thisHandle, IntPtr* ppException, AllocMemArgs* pArgs) { @@ -2567,7 +2581,7 @@ static byte _doesFieldBelongToClass(IntPtr thisHandle, IntPtr* ppException, CORI static IntPtr GetUnmanagedCallbacks() { - void** callbacks = (void**)Marshal.AllocCoTaskMem(sizeof(IntPtr) * 173); + void** callbacks = (void**)Marshal.AllocCoTaskMem(sizeof(IntPtr) * 174); callbacks[0] = (delegate* unmanaged)&_isJitIntrinsic; callbacks[1] = (delegate* unmanaged)&_getMethodAttribs; @@ -2725,23 +2739,24 @@ static IntPtr GetUnmanagedCallbacks() callbacks[153] = (delegate* unmanaged)&_getTailCallHelpers; callbacks[154] = (delegate* unmanaged)&_convertPInvokeCalliToCall; callbacks[155] = (delegate* unmanaged)&_notifyInstructionSetUsage; - callbacks[156] = (delegate* unmanaged)&_allocMem; - callbacks[157] = (delegate* unmanaged)&_reserveUnwindInfo; - callbacks[158] = (delegate* unmanaged)&_allocUnwindInfo; - callbacks[159] = (delegate* unmanaged)&_allocGCInfo; - callbacks[160] = (delegate* unmanaged)&_setEHcount; - callbacks[161] = (delegate* unmanaged)&_setEHinfo; - callbacks[162] = (delegate* unmanaged)&_logMsg; - callbacks[163] = (delegate* unmanaged)&_doAssert; - callbacks[164] = (delegate* unmanaged)&_reportFatalError; - callbacks[165] = (delegate* unmanaged)&_getPgoInstrumentationResults; - callbacks[166] = (delegate* unmanaged)&_allocPgoInstrumentationBySchema; - callbacks[167] = (delegate* unmanaged)&_recordCallSite; - callbacks[168] = (delegate* unmanaged)&_recordRelocation; - callbacks[169] = (delegate* unmanaged)&_getRelocTypeHint; - callbacks[170] = (delegate* unmanaged)&_getExpectedTargetArchitecture; - callbacks[171] = (delegate* unmanaged)&_getJitFlags; - callbacks[172] = (delegate* unmanaged)&_doesFieldBelongToClass; + callbacks[156] = (delegate* unmanaged)&_updateEntryPointForTailCall; + callbacks[157] = (delegate* unmanaged)&_allocMem; + callbacks[158] = (delegate* unmanaged)&_reserveUnwindInfo; + callbacks[159] = (delegate* unmanaged)&_allocUnwindInfo; + callbacks[160] = (delegate* unmanaged)&_allocGCInfo; + callbacks[161] = (delegate* unmanaged)&_setEHcount; + callbacks[162] = (delegate* unmanaged)&_setEHinfo; + callbacks[163] = (delegate* unmanaged)&_logMsg; + callbacks[164] = (delegate* unmanaged)&_doAssert; + callbacks[165] = (delegate* unmanaged)&_reportFatalError; + callbacks[166] = (delegate* unmanaged)&_getPgoInstrumentationResults; + callbacks[167] = (delegate* unmanaged)&_allocPgoInstrumentationBySchema; + callbacks[168] = (delegate* unmanaged)&_recordCallSite; + callbacks[169] = (delegate* unmanaged)&_recordRelocation; + callbacks[170] = (delegate* unmanaged)&_getRelocTypeHint; + callbacks[171] = (delegate* unmanaged)&_getExpectedTargetArchitecture; + callbacks[172] = (delegate* unmanaged)&_getJitFlags; + callbacks[173] = (delegate* unmanaged)&_doesFieldBelongToClass; return (IntPtr)callbacks; } diff --git a/src/coreclr/tools/Common/JitInterface/CorInfoImpl.cs b/src/coreclr/tools/Common/JitInterface/CorInfoImpl.cs index 7bdfabc0f5c29..84ee78fb0f243 100644 --- a/src/coreclr/tools/Common/JitInterface/CorInfoImpl.cs +++ b/src/coreclr/tools/Common/JitInterface/CorInfoImpl.cs @@ -3216,7 +3216,7 @@ private bool getTailCallHelpers(ref CORINFO_RESOLVED_TOKEN callToken, CORINFO_SI // Slow tailcalls are not supported yet // https://github.com/dotnet/runtime/issues/35423 #if READYTORUN - throw new NotImplementedException(nameof(getTailCallHelpers)); + throw new RequiresRuntimeJitException(nameof(getTailCallHelpers)); #else return false; #endif diff --git a/src/coreclr/tools/Common/JitInterface/ThunkGenerator/ThunkInput.txt b/src/coreclr/tools/Common/JitInterface/ThunkGenerator/ThunkInput.txt index da802c872bb28..b981056c834ba 100644 --- a/src/coreclr/tools/Common/JitInterface/ThunkGenerator/ThunkInput.txt +++ b/src/coreclr/tools/Common/JitInterface/ThunkGenerator/ThunkInput.txt @@ -309,6 +309,7 @@ FUNCTIONS bool getTailCallHelpers(CORINFO_RESOLVED_TOKEN* callToken, CORINFO_SIG_INFO* sig, CORINFO_GET_TAILCALL_HELPERS_FLAGS flags, CORINFO_TAILCALL_HELPERS* pResult); bool convertPInvokeCalliToCall(CORINFO_RESOLVED_TOKEN * pResolvedToken, bool mustConvert); bool notifyInstructionSetUsage(CORINFO_InstructionSet instructionSet,bool supportEnabled); + void updateEntryPointForTailCall(CORINFO_CONST_LOOKUP* entryPoint); void allocMem(AllocMemArgs* pArgs); void reserveUnwindInfo(bool isFunclet, bool isColdCode, uint32_t unwindSize) void allocUnwindInfo(uint8_t* pHotCode, uint8_t* pColdCode, uint32_t startOffset, uint32_t endOffset, uint32_t unwindSize, uint8_t* pUnwindBlock, CorJitFuncKind funcKind) diff --git a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/DelayLoadHelperImport.cs b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/DelayLoadHelperImport.cs index d921697870cce..1b2674ab009bf 100644 --- a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/DelayLoadHelperImport.cs +++ b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/DelayLoadHelperImport.cs @@ -17,6 +17,7 @@ public class DelayLoadHelperImport : Import private readonly ReadyToRunHelper _helper; private readonly bool _useVirtualCall; + private readonly bool _useJumpableStub; private readonly ImportThunk _delayLoadHelper; @@ -26,12 +27,14 @@ public DelayLoadHelperImport( ReadyToRunHelper helper, Signature instanceSignature, bool useVirtualCall = false, + bool useJumpableStub = false, MethodDesc callingMethod = null) : base(importSectionNode, instanceSignature, callingMethod) { _helper = helper; _useVirtualCall = useVirtualCall; - _delayLoadHelper = factory.ImportThunk(helper, importSectionNode, useVirtualCall); + _useJumpableStub = useJumpableStub; + _delayLoadHelper = factory.ImportThunk(helper, importSectionNode, useVirtualCall, useJumpableStub); } public override void AppendMangledName(NameMangler nameMangler, Utf8StringBuilder sb) @@ -41,6 +44,10 @@ public override void AppendMangledName(NameMangler nameMangler, Utf8StringBuilde { sb.Append("[VSD] "); } + if (_useJumpableStub) + { + sb.Append("[JMP] "); + } sb.Append(_helper.ToString()); sb.Append(") -> "); ImportSignature.AppendMangledName(nameMangler, sb); @@ -79,7 +86,11 @@ public override IEnumerable GetStaticDependencies(NodeFacto public override int CompareToImpl(ISortableNode other, CompilerComparer comparer) { DelayLoadHelperImport otherNode = (DelayLoadHelperImport)other; - int result = _useVirtualCall.CompareTo(otherNode._useVirtualCall); + int result = _useJumpableStub.CompareTo(otherNode._useJumpableStub); + if (result != 0) + return result; + + result = _useVirtualCall.CompareTo(otherNode._useVirtualCall); if (result != 0) return result; diff --git a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/DelayLoadHelperMethodImport.cs b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/DelayLoadHelperMethodImport.cs index 3bf386801c68e..d034ba56d87b2 100644 --- a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/DelayLoadHelperMethodImport.cs +++ b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/DelayLoadHelperMethodImport.cs @@ -30,7 +30,7 @@ public DelayLoadHelperMethodImport( bool useInstantiatingStub, Signature instanceSignature, MethodDesc callingMethod = null) - : base(factory, importSectionNode, helper, instanceSignature, useVirtualCall, callingMethod) + : base(factory, importSectionNode, helper, instanceSignature, useVirtualCall, useJumpableStub: false, callingMethod) { _method = method; _useInstantiatingStub = useInstantiatingStub; diff --git a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/DelayLoadMethodImport.cs b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/DelayLoadMethodImport.cs index 47451cadd92f7..dd0d49c8db19c 100644 --- a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/DelayLoadMethodImport.cs +++ b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/DelayLoadMethodImport.cs @@ -12,14 +12,14 @@ namespace ILCompiler.DependencyAnalysis.ReadyToRun public class DelayLoadMethodImport : DelayLoadHelperImport, IMethodNode { private readonly MethodWithGCInfo _localMethod; - private readonly MethodWithToken _method; public DelayLoadMethodImport( NodeFactory factory, ReadyToRunFixupKind fixupKind, MethodWithToken method, MethodWithGCInfo localMethod, - bool isInstantiatingStub) + bool isInstantiatingStub, + bool isJump) : base( factory, factory.MethodImports, @@ -27,13 +27,15 @@ public DelayLoadMethodImport( factory.MethodSignature( fixupKind, method, - isInstantiatingStub)) + isInstantiatingStub), + useJumpableStub: isJump) { _localMethod = localMethod; - _method = method; + MethodWithToken = method; } - public MethodDesc Method => _method.Method; + public MethodWithToken MethodWithToken { get; } + public MethodDesc Method => MethodWithToken.Method; public MethodWithGCInfo MethodCodeNode => _localMethod; public override int ClassCode => 459923351; diff --git a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/ImportThunk.cs b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/ImportThunk.cs index 44525421f9b28..3895244c00197 100644 --- a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/ImportThunk.cs +++ b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/ImportThunk.cs @@ -18,6 +18,7 @@ enum Kind Eager, Lazy, DelayLoadHelper, + DelayLoadHelperWithExistingIndirectionCell, VirtualStubDispatch, } @@ -31,7 +32,7 @@ enum Kind /// Import thunks are used to call a runtime-provided helper which fixes up an indirection cell in a particular /// import section. Optionally they may also contain a relocation for a specific indirection cell to fix up. /// - public ImportThunk(NodeFactory factory, ReadyToRunHelper helperId, ImportSectionNode containingImportSection, bool useVirtualCall) + public ImportThunk(NodeFactory factory, ReadyToRunHelper helperId, ImportSectionNode containingImportSection, bool useVirtualCall, bool useJumpableStub) { _helperCell = factory.GetReadyToRunHelperCell(helperId); _containingImportSection = containingImportSection; @@ -40,6 +41,10 @@ public ImportThunk(NodeFactory factory, ReadyToRunHelper helperId, ImportSection { _thunkKind = Kind.VirtualStubDispatch; } + else if (useJumpableStub) + { + _thunkKind = Kind.DelayLoadHelperWithExistingIndirectionCell; + } else if (helperId == ReadyToRunHelper.GetString) { _thunkKind = Kind.Lazy; @@ -61,7 +66,7 @@ public override void AppendMangledName(NameMangler nameMangler, Utf8StringBuilde { sb.Append("DelayLoadHelper->"); _helperCell.AppendMangledName(nameMangler, sb); - sb.Append($"(ImportSection:{_containingImportSection.Name})"); + sb.Append($"(ImportSection:{_containingImportSection.Name},Kind:{_thunkKind})"); } protected override string GetName(NodeFactory factory) diff --git a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/MethodFixupSignature.cs b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/MethodFixupSignature.cs index eaeddd907ea5a..1de6cfcfa9251 100644 --- a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/MethodFixupSignature.cs +++ b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/MethodFixupSignature.cs @@ -21,8 +21,6 @@ public class MethodFixupSignature : Signature private readonly MethodWithToken _method; - private readonly bool _isInstantiatingStub; - public MethodFixupSignature( ReadyToRunFixupKind fixupKind, MethodWithToken method, @@ -30,7 +28,7 @@ public MethodFixupSignature( { _fixupKind = fixupKind; _method = method; - _isInstantiatingStub = isInstantiatingStub; + IsInstantiatingStub = isInstantiatingStub; // Ensure types in signature are loadable and resolvable, otherwise we'll fail later while emitting the signature CompilerTypeSystemContext compilerContext = (CompilerTypeSystemContext)method.Method.Context; @@ -42,6 +40,7 @@ public MethodFixupSignature( } public MethodDesc Method => _method.Method; + public bool IsInstantiatingStub { get; } public override int ClassCode => 150063499; @@ -61,7 +60,7 @@ public override ObjectData GetData(NodeFactory factory, bool relocsOnly = false) // Optimize some of the fixups into a more compact form ReadyToRunFixupKind fixupKind = _fixupKind; bool optimized = false; - if (!_method.Unboxing && !_isInstantiatingStub && _method.ConstrainedType == null && + if (!_method.Unboxing && !IsInstantiatingStub && _method.ConstrainedType == null && fixupKind == ReadyToRunFixupKind.MethodEntry) { if (!_method.Method.HasInstantiation && !_method.Method.OwningType.HasInstantiation && !_method.Method.OwningType.IsArray) @@ -111,7 +110,7 @@ public override ObjectData GetData(NodeFactory factory, bool relocsOnly = false) } else { - dataBuilder.EmitMethodSignature(method, enforceDefEncoding: false, enforceOwningType: false, innerContext, _isInstantiatingStub); + dataBuilder.EmitMethodSignature(method, enforceDefEncoding: false, enforceOwningType: false, innerContext, IsInstantiatingStub); } return dataBuilder.ToObjectData(); @@ -122,7 +121,7 @@ public override void AppendMangledName(NameMangler nameMangler, Utf8StringBuilde sb.Append(nameMangler.CompilationUnitPrefix); sb.Append($@"MethodFixupSignature("); sb.Append(_fixupKind.ToString()); - if (_isInstantiatingStub) + if (IsInstantiatingStub) { sb.Append(" [INST]"); } @@ -137,7 +136,7 @@ public override int CompareToImpl(ISortableNode other, CompilerComparer comparer if (result != 0) return result; - result = _isInstantiatingStub.CompareTo(otherNode._isInstantiatingStub); + result = IsInstantiatingStub.CompareTo(otherNode.IsInstantiatingStub); if (result != 0) return result; diff --git a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/Target_ARM/ImportThunk.cs b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/Target_ARM/ImportThunk.cs index ffe0835729ac8..178e29f30848e 100644 --- a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/Target_ARM/ImportThunk.cs +++ b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/Target_ARM/ImportThunk.cs @@ -30,6 +30,7 @@ protected override void EmitCode(NodeFactory factory, ref ARM.ARMEmitter instruc case Kind.DelayLoadHelper: case Kind.VirtualStubDispatch: + case Kind.DelayLoadHelperWithExistingIndirectionCell: // r4 contains indirection cell // push r4 instructionEncoder.EmitPUSH(Register.R4); diff --git a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/Target_ARM64/ImportThunk.cs b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/Target_ARM64/ImportThunk.cs index a3be3e0b3e5cd..ba619129a989b 100644 --- a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/Target_ARM64/ImportThunk.cs +++ b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/Target_ARM64/ImportThunk.cs @@ -22,6 +22,7 @@ protected override void EmitCode(NodeFactory factory, ref ARM64Emitter instructi break; case Kind.DelayLoadHelper: + case Kind.DelayLoadHelperWithExistingIndirectionCell: case Kind.VirtualStubDispatch: // x11 contains indirection cell // Do nothing x11 contains our first param diff --git a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/Target_X64/ImportThunk.cs b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/Target_X64/ImportThunk.cs index 439a022549993..4c34b97edcb63 100644 --- a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/Target_X64/ImportThunk.cs +++ b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/Target_X64/ImportThunk.cs @@ -37,6 +37,20 @@ protected override void EmitCode(NodeFactory factory, ref X64Emitter instruction break; + case Kind.DelayLoadHelperWithExistingIndirectionCell: + // Indirection cell is already in rax which will be first arg. Used for fast tailcalls. + + if (!relocsOnly) + { + // push table index + instructionEncoder.EmitPUSH((sbyte)_containingImportSection.IndexFromBeginningOfArray); + } + + // push [module] + instructionEncoder.EmitPUSH(factory.ModuleImport); + + break; + case Kind.VirtualStubDispatch: // mov rax, r11 - this is the most general case as the value of R11 also propagates // to the new method after the indirection cell has been updated so the cell content diff --git a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/Target_X86/ImportThunk.cs b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/Target_X86/ImportThunk.cs index 94f5bcb5858c1..5ae8df415f54f 100644 --- a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/Target_X86/ImportThunk.cs +++ b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/Target_X86/ImportThunk.cs @@ -41,6 +41,9 @@ protected override void EmitCode(NodeFactory factory, ref X86Emitter instruction // mov edx, [module] instructionEncoder.EmitMOV(Register.EDX, factory.ModuleImport); break; + + default: + throw new NotSupportedException(_thunkKind.ToString() + " is not supported"); } instructionEncoder.EmitJMP(_helperCell); } diff --git a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRunCodegenNodeFactory.cs b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRunCodegenNodeFactory.cs index da9c4592b549a..4fd1f958d9c3b 100644 --- a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRunCodegenNodeFactory.cs +++ b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRunCodegenNodeFactory.cs @@ -219,7 +219,7 @@ private void CreateNodeCaches() _importThunks = new NodeCache(key => { - return new ImportThunk(this, key.Helper, key.ContainingImportSection, key.UseVirtualCall); + return new ImportThunk(this, key.Helper, key.ContainingImportSection, key.UseVirtualCall, key.UseJumpableStub); }); _importMethods = new NodeCache(CreateMethodEntrypoint); @@ -377,6 +377,7 @@ private IMethodNode CreateMethodEntrypoint(TypeAndMethod key) if (isPrecodeImportRequired) { + Debug.Assert(!key.IsJumpableImportRequired); return new PrecodeMethodImport( this, ReadyToRunFixupKind.MethodEntry, @@ -391,13 +392,15 @@ private IMethodNode CreateMethodEntrypoint(TypeAndMethod key) ReadyToRunFixupKind.MethodEntry, method, methodWithGCInfo, - isInstantiatingStub); + isInstantiatingStub, + isJump: key.IsJumpableImportRequired); } } - public IMethodNode MethodEntrypoint(MethodWithToken method, bool isInstantiatingStub, bool isPrecodeImportRequired) + public IMethodNode MethodEntrypoint(MethodWithToken method, bool isInstantiatingStub, bool isPrecodeImportRequired, bool isJumpableImportRequired) { - TypeAndMethod key = new TypeAndMethod(method.ConstrainedType, method, isInstantiatingStub, isPrecodeImportRequired); + Debug.Assert(!isJumpableImportRequired || !isPrecodeImportRequired); + TypeAndMethod key = new TypeAndMethod(method.ConstrainedType, method, isInstantiatingStub, isPrecodeImportRequired, isJumpableImportRequired); return _importMethods.GetOrAdd(key); } @@ -416,7 +419,7 @@ public IEnumerable EnumerateCompiledMethods(EcmaModule moduleT EcmaModule module = ((EcmaMethod)method.GetTypicalMethodDefinition()).Module; ModuleToken moduleToken = Resolver.GetModuleTokenForMethod(method, throwIfNotFound: true); - IMethodNode methodNodeDebug = MethodEntrypoint(new MethodWithToken(method, moduleToken, constrainedType: null, unboxing: false, context: null), false, false); + IMethodNode methodNodeDebug = MethodEntrypoint(new MethodWithToken(method, moduleToken, constrainedType: null, unboxing: false, context: null), false, false, false); MethodWithGCInfo methodCodeNodeDebug = methodNodeDebug as MethodWithGCInfo; if (methodCodeNodeDebug == null && methodNodeDebug is DelayLoadMethodImport DelayLoadMethodImport) { @@ -470,7 +473,7 @@ public MethodFixupSignature MethodSignature( MethodWithToken method, bool isInstantiatingStub) { - TypeAndMethod key = new TypeAndMethod(method.ConstrainedType, method, isInstantiatingStub, false); + TypeAndMethod key = new TypeAndMethod(method.ConstrainedType, method, isInstantiatingStub, false, false); return _methodSignatures.GetOrAdd(new MethodFixupKey(fixupKind, key)); } @@ -558,19 +561,22 @@ private struct ImportThunkKey : IEquatable public readonly ReadyToRunHelper Helper; public readonly ImportSectionNode ContainingImportSection; public readonly bool UseVirtualCall; + public readonly bool UseJumpableStub; - public ImportThunkKey(ReadyToRunHelper helper, ImportSectionNode containingImportSection, bool useVirtualCall) + public ImportThunkKey(ReadyToRunHelper helper, ImportSectionNode containingImportSection, bool useVirtualCall, bool useJumpableStub) { Helper = helper; ContainingImportSection = containingImportSection; UseVirtualCall = useVirtualCall; + UseJumpableStub = useJumpableStub; } public bool Equals(ImportThunkKey other) { return Helper == other.Helper && ContainingImportSection == other.ContainingImportSection && - UseVirtualCall == other.UseVirtualCall; + UseVirtualCall == other.UseVirtualCall && + UseJumpableStub == other.UseJumpableStub; } public override bool Equals(object obj) @@ -582,15 +588,16 @@ public override int GetHashCode() { return unchecked(31 * Helper.GetHashCode() + 31 * ContainingImportSection.GetHashCode() + - 31 * UseVirtualCall.GetHashCode()); + 31 * UseVirtualCall.GetHashCode() + + 31 * UseJumpableStub.GetHashCode()); } } private NodeCache _importThunks; - public ImportThunk ImportThunk(ReadyToRunHelper helper, ImportSectionNode containingImportSection, bool useVirtualCall) + public ImportThunk ImportThunk(ReadyToRunHelper helper, ImportSectionNode containingImportSection, bool useVirtualCall, bool useJumpableStub) { - ImportThunkKey thunkKey = new ImportThunkKey(helper, containingImportSection, useVirtualCall); + ImportThunkKey thunkKey = new ImportThunkKey(helper, containingImportSection, useVirtualCall, useJumpableStub); return _importThunks.GetOrAdd(thunkKey); } @@ -693,13 +700,13 @@ public void AttachToDependencyGraph(DependencyAnalyzerBase graph) Import personalityRoutineImport = new Import(EagerImports, new ReadyToRunHelperSignature( ReadyToRunHelper.PersonalityRoutine)); PersonalityRoutine = new ImportThunk(this, - ReadyToRunHelper.PersonalityRoutine, EagerImports, useVirtualCall: false); + ReadyToRunHelper.PersonalityRoutine, EagerImports, useVirtualCall: false, useJumpableStub: false); graph.AddRoot(PersonalityRoutine, "Personality routine is faster to root early rather than referencing it from each unwind info"); Import filterFuncletPersonalityRoutineImport = new Import(EagerImports, new ReadyToRunHelperSignature( ReadyToRunHelper.PersonalityRoutineFilterFunclet)); FilterFuncletPersonalityRoutine = new ImportThunk(this, - ReadyToRunHelper.PersonalityRoutineFilterFunclet, EagerImports, useVirtualCall: false); + ReadyToRunHelper.PersonalityRoutineFilterFunclet, EagerImports, useVirtualCall: false, useJumpableStub: false); graph.AddRoot(FilterFuncletPersonalityRoutine, "Filter funclet personality routine is faster to root early rather than referencing it from each unwind info"); } diff --git a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRunSymbolNodeFactory.cs b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRunSymbolNodeFactory.cs index 38b1893f53162..02cc4d4edada9 100644 --- a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRunSymbolNodeFactory.cs +++ b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRunSymbolNodeFactory.cs @@ -118,7 +118,8 @@ private void CreateNodeCaches() IMethodNode targetMethodNode = _codegenNodeFactory.MethodEntrypoint( ctorKey.Method, isInstantiatingStub: ctorKey.Method.Method.HasInstantiation, - isPrecodeImportRequired: false); + isPrecodeImportRequired: false, + isJumpableImportRequired: false); return new DelayLoadHelperImport( _codegenNodeFactory, @@ -435,7 +436,8 @@ public ISymbolNode DelegateCtor(TypeDesc delegateType, MethodWithToken method) delegateType, method, isInstantiatingStub: false, - isPrecodeImportRequired: false); + isPrecodeImportRequired: false, + isJumpableImportRequired: false); return _delegateCtors.GetOrAdd(ctorKey); } diff --git a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/TypeAndMethod.cs b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/TypeAndMethod.cs index 7b92613a2ab50..16843d70599a6 100644 --- a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/TypeAndMethod.cs +++ b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/TypeAndMethod.cs @@ -16,13 +16,15 @@ internal struct TypeAndMethod : IEquatable public readonly MethodWithToken Method; public readonly bool IsInstantiatingStub; public readonly bool IsPrecodeImportRequired; + public readonly bool IsJumpableImportRequired; - public TypeAndMethod(TypeDesc type, MethodWithToken method, bool isInstantiatingStub, bool isPrecodeImportRequired) + public TypeAndMethod(TypeDesc type, MethodWithToken method, bool isInstantiatingStub, bool isPrecodeImportRequired, bool isJumpableImportRequired) { Type = type; Method = method; IsInstantiatingStub = isInstantiatingStub; IsPrecodeImportRequired = isPrecodeImportRequired; + IsJumpableImportRequired = isJumpableImportRequired; } public bool Equals(TypeAndMethod other) @@ -30,7 +32,8 @@ public bool Equals(TypeAndMethod other) return Type == other.Type && Method.Equals(other.Method) && IsInstantiatingStub == other.IsInstantiatingStub && - IsPrecodeImportRequired == other.IsPrecodeImportRequired; + IsPrecodeImportRequired == other.IsPrecodeImportRequired && + IsJumpableImportRequired == other.IsJumpableImportRequired; } public override bool Equals(object obj) @@ -43,7 +46,8 @@ public override int GetHashCode() return (Type?.GetHashCode() ?? 0) ^ unchecked(Method.GetHashCode() * 31) ^ (IsInstantiatingStub ? 0x40000000 : 0) ^ - (IsPrecodeImportRequired ? 0x20000000 : 0); + (IsPrecodeImportRequired ? 0x20000000 : 0) ^ + (IsJumpableImportRequired ? 0x10000000 : 0); } } } diff --git a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/JitInterface/CorInfoImpl.ReadyToRun.cs b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/JitInterface/CorInfoImpl.ReadyToRun.cs index 2bedb907e3c9b..542565a5128c9 100644 --- a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/JitInterface/CorInfoImpl.ReadyToRun.cs +++ b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/JitInterface/CorInfoImpl.ReadyToRun.cs @@ -905,6 +905,8 @@ private ISymbolNode GetHelperFtnUncached(CorInfoHelpFunc ftnNum) case CorInfoHelpFunc.CORINFO_HELP_GETREFANY: // For Vector256.Create and similar cases case CorInfoHelpFunc.CORINFO_HELP_THROW_NOT_IMPLEMENTED: + // For x86 tailcall where helper is required we need runtime JIT. + case CorInfoHelpFunc.CORINFO_HELP_TAILCALL: throw new RequiresRuntimeJitException(ftnNum.ToString()); default: @@ -921,13 +923,36 @@ private void getFunctionEntryPoint(CORINFO_METHOD_STRUCT_* ftn, ref CORINFO_CONS private bool canTailCall(CORINFO_METHOD_STRUCT_* callerHnd, CORINFO_METHOD_STRUCT_* declaredCalleeHnd, CORINFO_METHOD_STRUCT_* exactCalleeHnd, bool fIsTailPrefix) { - if (fIsTailPrefix) + if (!fIsTailPrefix) { - // FUTURE: Delay load fixups for tailcalls - throw new RequiresRuntimeJitException(nameof(fIsTailPrefix)); + MethodDesc caller = HandleToObject(callerHnd); + + // Do not tailcall out of the entry point as it results in a confusing debugger experience. + if (caller is EcmaMethod em && em.Module.EntryPoint == caller) + { + return false; + } + + // Do not tailcall from methods that are marked as noinline (people often use no-inline + // to mean "I want to always see this method in stacktrace") + if (caller.IsNoInlining) + { + return false; + } + + // Methods with StackCrawlMark depend on finding their caller on the stack. + // If we tail call one of these guys, they get confused. For lack of + // a better way of identifying them, we use DynamicSecurity attribute to identify + // them. + // + MethodDesc callee = exactCalleeHnd == null ? null : HandleToObject(exactCalleeHnd); + if (callee != null && callee.RequireSecObject) + { + return false; + } } - return false; + return true; } private MethodWithToken ComputeMethodWithToken(MethodDesc method, ref CORINFO_RESOLVED_TOKEN pResolvedToken, TypeDesc constrainedType, bool unboxing) @@ -1955,7 +1980,8 @@ private void getCallInfo(ref CORINFO_RESOLVED_TOKEN pResolvedToken, CORINFO_RESO _compilation.NodeFactory.MethodEntrypoint( ComputeMethodWithToken(nonUnboxingMethod, ref pResolvedToken, constrainedType, unboxing: isUnboxingStub), isInstantiatingStub: useInstantiatingStub, - isPrecodeImportRequired: (flags & CORINFO_CALLINFO_FLAGS.CORINFO_CALLINFO_LDFTN) != 0)); + isPrecodeImportRequired: (flags & CORINFO_CALLINFO_FLAGS.CORINFO_CALLINFO_LDFTN) != 0, + isJumpableImportRequired: false)); } // If the abi of the method isn't stable, this will cause a usage of the RequiresRuntimeJitSymbol, which will trigger a RequiresRuntimeJitException @@ -2637,5 +2663,27 @@ private void reportInliningDecision(CORINFO_METHOD_STRUCT_* inlinerHnd, CORINFO_ _inlinedMethods.Add(inlinee); } } + + private void updateEntryPointForTailCall(ref CORINFO_CONST_LOOKUP entryPoint) + { + // In x64 we normally use a return address to find the indirection cell for delay load. + // For tailcalls we instead expect the JIT to leave the indirection in rax. + if (_compilation.TypeSystemContext.Target.Architecture != TargetArchitecture.X64) + return; + + object node = HandleToObject((IntPtr)entryPoint.addr); + if (node is not DelayLoadMethodImport imp) + return; + + Debug.Assert(imp.GetType() == typeof(DelayLoadMethodImport)); + IMethodNode newEntryPoint = + _compilation.NodeFactory.MethodEntrypoint( + imp.MethodWithToken, + ((MethodFixupSignature)imp.ImportSignature.Target).IsInstantiatingStub, + isPrecodeImportRequired: false, + isJumpableImportRequired: true); + + entryPoint = CreateConstLookupToSymbol(newEntryPoint); + } } } diff --git a/src/coreclr/tools/aot/jitinterface/jitinterface.h b/src/coreclr/tools/aot/jitinterface/jitinterface.h index 68493f0bdc1e1..aa33a3fc58318 100644 --- a/src/coreclr/tools/aot/jitinterface/jitinterface.h +++ b/src/coreclr/tools/aot/jitinterface/jitinterface.h @@ -167,6 +167,7 @@ struct JitInterfaceCallbacks bool (* getTailCallHelpers)(void * thisHandle, CorInfoExceptionClass** ppException, CORINFO_RESOLVED_TOKEN* callToken, CORINFO_SIG_INFO* sig, CORINFO_GET_TAILCALL_HELPERS_FLAGS flags, CORINFO_TAILCALL_HELPERS* pResult); bool (* convertPInvokeCalliToCall)(void * thisHandle, CorInfoExceptionClass** ppException, CORINFO_RESOLVED_TOKEN* pResolvedToken, bool mustConvert); bool (* notifyInstructionSetUsage)(void * thisHandle, CorInfoExceptionClass** ppException, CORINFO_InstructionSet instructionSet, bool supportEnabled); + void (* updateEntryPointForTailCall)(void * thisHandle, CorInfoExceptionClass** ppException, CORINFO_CONST_LOOKUP* entryPoint); void (* allocMem)(void * thisHandle, CorInfoExceptionClass** ppException, AllocMemArgs* pArgs); void (* reserveUnwindInfo)(void * thisHandle, CorInfoExceptionClass** ppException, bool isFunclet, bool isColdCode, uint32_t unwindSize); void (* allocUnwindInfo)(void * thisHandle, CorInfoExceptionClass** ppException, uint8_t* pHotCode, uint8_t* pColdCode, uint32_t startOffset, uint32_t endOffset, uint32_t unwindSize, uint8_t* pUnwindBlock, CorJitFuncKind funcKind); @@ -1696,6 +1697,14 @@ class JitInterfaceWrapper : public ICorJitInfo return temp; } + virtual void updateEntryPointForTailCall( + CORINFO_CONST_LOOKUP* entryPoint) +{ + CorInfoExceptionClass* pException = nullptr; + _callbacks->updateEntryPointForTailCall(_thisHandle, &pException, entryPoint); + if (pException != nullptr) throw pException; +} + virtual void allocMem( AllocMemArgs* pArgs) { diff --git a/src/coreclr/vm/jitinterface.cpp b/src/coreclr/vm/jitinterface.cpp index 71f197444c9b6..05e7666c46e73 100644 --- a/src/coreclr/vm/jitinterface.cpp +++ b/src/coreclr/vm/jitinterface.cpp @@ -13977,6 +13977,11 @@ bool CEEInfo::convertPInvokeCalliToCall(CORINFO_RESOLVED_TOKEN * pResolvedToken, return false; } +void CEEInfo::updateEntryPointForTailCall(CORINFO_CONST_LOOKUP* entryPoint) +{ + // No update necessary, all entry points are tail callable in runtime. +} + void CEEInfo::allocMem (AllocMemArgs *pArgs) { LIMITED_METHOD_CONTRACT;