diff --git a/src/coreclr/debug/daccess/dacdbiimpl.cpp b/src/coreclr/debug/daccess/dacdbiimpl.cpp index fb31abdc764359805d9e2ea9e582250f47f07aca..161d2a72613678fa69c226b662cd5ad42a2f0d07 100644 --- a/src/coreclr/debug/daccess/dacdbiimpl.cpp +++ b/src/coreclr/debug/daccess/dacdbiimpl.cpp @@ -4519,7 +4519,9 @@ void DacDbiInterfaceImpl::EnumerateModulesInAssembly( // Debugger isn't notified of Resource / Inspection-only modules. if (pDomainAssembly->GetModule()->IsVisibleToDebugger()) { - _ASSERTE(pDomainAssembly->IsLoaded()); + // If domain assembly isn't yet loaded, just return + if (!pDomainAssembly->IsLoaded()) + return; VMPTR_DomainAssembly vmDomainAssembly = VMPTR_DomainAssembly::NullPtr(); vmDomainAssembly.SetHostPtr(pDomainAssembly); diff --git a/src/coreclr/dlls/mscorrc/mscorrc.rc b/src/coreclr/dlls/mscorrc/mscorrc.rc index 21784fce2e8cf1fb9f6ca8559e72a5840e7ca88d..4516daeda00656cf133b5f80d56f515dd80e70ad 100644 --- a/src/coreclr/dlls/mscorrc/mscorrc.rc +++ b/src/coreclr/dlls/mscorrc/mscorrc.rc @@ -314,6 +314,8 @@ BEGIN IDS_CLASSLOAD_INLINE_ARRAY_LENGTH "InlineArrayAttribute requires that the length argument is greater than 0. Type: '%1'. Assembly: '%2'." IDS_CLASSLOAD_INLINE_ARRAY_EXPLICIT "InlineArrayAttribute cannot be applied to a type with explicit layout. Type: '%1'. Assembly: '%2'." + IDS_INVALID_RECURSIVE_GENERIC_FIELD_LOAD "Could not load type '%1' from assembly '%2' because of an invalid self-referential generic field." + #if FEATURE_COMINTEROP IDS_EE_CANNOTCAST_NOMARSHAL "The Windows Runtime Object can only be used in the threading context where it was created, because it implements INoMarshal or has MarshalingBehaviorAttribute(MarshalingType.None) set." #endif diff --git a/src/coreclr/dlls/mscorrc/resource.h b/src/coreclr/dlls/mscorrc/resource.h index b65318ca02ad080e00edf2e3582a815cf75bb832..9942b1ae4429ee8412c8129ccff8eaec6683581f 100644 --- a/src/coreclr/dlls/mscorrc/resource.h +++ b/src/coreclr/dlls/mscorrc/resource.h @@ -166,6 +166,7 @@ #define IDS_CLASSLOAD_MI_BADRETURNTYPE 0x17a8 #define IDS_CLASSLOAD_STATICVIRTUAL_NOTIMPL 0x17a9 +#define IDS_INVALID_RECURSIVE_GENERIC_FIELD_LOAD 0x17aa #define IDS_CLASSLOAD_TOOMANYGENERICARGS 0x17ab #define IDS_CLASSLOAD_INLINE_ARRAY_FIELD_COUNT 0x17ac diff --git a/src/coreclr/vm/class.h b/src/coreclr/vm/class.h index 827212f0ac391fa80fbb2c6ea32e6a837bb7a6a4..8515765826a483579f5224e5ba27278428672d75 100644 --- a/src/coreclr/vm/class.h +++ b/src/coreclr/vm/class.h @@ -2230,6 +2230,11 @@ private: SUPPORTS_DAC; return m_totalFields - m_currField - 1; } + int GetValueClassCacheIndex() + { + LIMITED_METHOD_CONTRACT; + return m_currField; + } }; // diff --git a/src/coreclr/vm/classlayoutinfo.cpp b/src/coreclr/vm/classlayoutinfo.cpp index c326a3e26e15ba1cd271e02cbafe89ca31afba6a..1a3a90431bd8e981bba53aca553ba31de22fed33 100644 --- a/src/coreclr/vm/classlayoutinfo.cpp +++ b/src/coreclr/vm/classlayoutinfo.cpp @@ -461,6 +461,7 @@ namespace IMDInternalImport* pInternalImport, HENUMInternal* phEnumField, Module* pModule, + mdTypeDef cl, ParseNativeTypeFlags nativeTypeFlags, const SigTypeContext* pTypeContext, BOOL* fDisqualifyFromManagedSequential, @@ -477,6 +478,8 @@ namespace #endif ) { + STANDARD_VM_CONTRACT; + HRESULT hr; mdFieldDef fd; ULONG maxRid = pInternalImport->GetCountWithTokenKind(mdtFieldDef); @@ -533,20 +536,45 @@ namespace } #endif MetaSig fsig(pCOMSignature, cbCOMSignature, pModule, pTypeContext, MetaSig::sigField); - CorElementType corElemType = fsig.NextArgNormalized(); + CorElementType corElemType = fsig.NextArg(); + TypeHandle typeHandleMaybe; if (corElemType == ELEMENT_TYPE_VALUETYPE) // Only look up the next element in the signature if it is a value type to avoid causing recursive type loads in valid scenarios. { - typeHandleMaybe = fsig.GetLastTypeHandleThrowing(ClassLoader::LoadTypes, - CLASS_LOAD_APPROXPARENTS, - TRUE); + SigPointer::HandleRecursiveGenericsForFieldLayoutLoad recursiveControl; + recursiveControl.pModuleWithTokenToAvoidIfPossible = pModule; + recursiveControl.tkTypeDefToAvoidIfPossible = cl; + typeHandleMaybe = fsig.GetArgProps().GetTypeHandleThrowing(pModule, + pTypeContext, + ClassLoader::LoadTypes, + CLASS_LOAD_APPROXPARENTS, + TRUE, NULL, NULL, NULL, + &recursiveControl); + + if (typeHandleMaybe.IsNull()) + { + // Everett C++ compiler can generate a TypeRef with RS=0 + // without respective TypeDef for unmanaged valuetypes, + // referenced only by pointers to them. + // In such case, GetTypeHandleThrowing returns null handle, + // and we return E_T_VOID + typeHandleMaybe = TypeHandle(CoreLibBinder::GetElementType(ELEMENT_TYPE_VOID)); + } + corElemType = typeHandleMaybe.AsMethodTable()->GetInternalCorElementType(); + if (corElemType != ELEMENT_TYPE_VALUETYPE) + typeHandleMaybe = TypeHandle(); + } + else if (corElemType == ELEMENT_TYPE_TYPEDBYREF) + { + typeHandleMaybe = TypeHandle(g_TypedReferenceMT); } + pFieldInfoArrayOut->m_placement = GetFieldPlacementInfo(corElemType, typeHandleMaybe); *fDisqualifyFromManagedSequential |= TypeHasGCPointers(corElemType, typeHandleMaybe); *fHasAutoLayoutField |= TypeHasAutoLayoutField(corElemType, typeHandleMaybe); *fHasInt128Field |= TypeHasInt128Field(corElemType, typeHandleMaybe); - if (!IsFieldBlittable(pModule, fd, fsig.GetArgProps(), pTypeContext, nativeTypeFlags)) + if (!IsFieldBlittable(pModule, fd, corElemType, typeHandleMaybe, nativeTypeFlags)) *pIsBlittableOut = FALSE; (*cInstanceFields)++; @@ -705,6 +733,7 @@ VOID EEClassLayoutInfo::CollectLayoutFieldMetadataThrowing( pInternalImport, phEnumField, pModule, + cl, nativeTypeFlags, pTypeContext, &fDisqualifyFromManagedSequential, diff --git a/src/coreclr/vm/field.cpp b/src/coreclr/vm/field.cpp index 5e4de36cf92f2537b2e5427860666f4bbca55d12..aad74715d9a3bc34df6ae8f425ea096e8cd7b71e 100644 --- a/src/coreclr/vm/field.cpp +++ b/src/coreclr/vm/field.cpp @@ -672,6 +672,32 @@ UINT FieldDesc::LoadSize() return size; } +UINT FieldDesc::GetSize(MethodTable *pMTOfValueTypeField) +{ + CONTRACTL + { + INSTANCE_CHECK; + NOTHROW; + GC_NOTRIGGER; + MODE_ANY; + FORBID_FAULT; + } + CONTRACTL_END + + CorElementType type = GetFieldType(); + UINT size = GetSizeForCorElementType(type); + if (size == (UINT) -1) + { + LOG((LF_CLASSLOADER, LL_INFO10000, "FieldDesc::GetSize %s::%s\n", GetApproxEnclosingMethodTable()->GetDebugClassName(), m_debugName)); + CONSISTENCY_CHECK(GetFieldType() == ELEMENT_TYPE_VALUETYPE); + TypeHandle t = (pMTOfValueTypeField != NULL) ? TypeHandle(pMTOfValueTypeField) : LookupApproxFieldTypeHandle(); + _ASSERTE(!t.IsNull()); + size = t.GetMethodTable()->GetNumInstanceFieldBytes(); + } + + return size; +} + UINT FieldDesc::GetSize() { CONTRACTL diff --git a/src/coreclr/vm/field.h b/src/coreclr/vm/field.h index ea10741db585c41028111e9f1d9029c96d364f03..8e4f8539a3d138c2c320447b1a09740ccc8953be 100644 --- a/src/coreclr/vm/field.h +++ b/src/coreclr/vm/field.h @@ -324,6 +324,9 @@ public: // Return -1 if the type isn't loaded yet (i.e. if LookupFieldTypeHandle() would return null) UINT GetSize(); + // If the field is a valuetype, then either pMTOfValueTypeField must not be NULL or LookupFieldTypeHandle() must not return null + UINT GetSize(MethodTable *pMTOfValueTypeField); + // These routines encapsulate the operation of getting and setting // fields. void GetInstanceField(OBJECTREF o, VOID * pOutVal); diff --git a/src/coreclr/vm/fieldmarshaler.cpp b/src/coreclr/vm/fieldmarshaler.cpp index ea0bbb871cf4fc8762795ee4b7dec44a0b1c6e8b..296924bb1115609a5e3c62bc22ea6796a6f0dc49 100644 --- a/src/coreclr/vm/fieldmarshaler.cpp +++ b/src/coreclr/vm/fieldmarshaler.cpp @@ -201,11 +201,13 @@ VOID ParseNativeType(Module* pModule, bool IsFieldBlittable( Module* pModule, mdFieldDef fd, - SigPointer fieldSig, - const SigTypeContext* pTypeContext, + CorElementType corElemType, + TypeHandle valueTypeHandle, ParseNativeTypeFlags flags ) { + STANDARD_VM_CONTRACT; + PCCOR_SIGNATURE marshalInfoSig; ULONG marshalInfoSigLength; @@ -218,75 +220,63 @@ bool IsFieldBlittable( bool isBlittable = false; - EX_TRY + switch (corElemType) { - TypeHandle valueTypeHandle; - CorElementType corElemType = fieldSig.PeekElemTypeNormalized(pModule, pTypeContext, &valueTypeHandle); - - switch (corElemType) + case ELEMENT_TYPE_CHAR: + isBlittable = (nativeType == NATIVE_TYPE_DEFAULT && flags != ParseNativeTypeFlags::IsAnsi) || (nativeType == NATIVE_TYPE_I2) || (nativeType == NATIVE_TYPE_U2); + break; + case ELEMENT_TYPE_I1: + case ELEMENT_TYPE_U1: + isBlittable = (nativeType == NATIVE_TYPE_DEFAULT) || (nativeType == NATIVE_TYPE_I1) || (nativeType == NATIVE_TYPE_U1); + break; + case ELEMENT_TYPE_I2: + case ELEMENT_TYPE_U2: + isBlittable = (nativeType == NATIVE_TYPE_DEFAULT) || (nativeType == NATIVE_TYPE_I2) || (nativeType == NATIVE_TYPE_U2); + break; + case ELEMENT_TYPE_I4: + case ELEMENT_TYPE_U4: + isBlittable = (nativeType == NATIVE_TYPE_DEFAULT) || (nativeType == NATIVE_TYPE_I4) || (nativeType == NATIVE_TYPE_U4) || (nativeType == NATIVE_TYPE_ERROR); + break; + case ELEMENT_TYPE_I8: + case ELEMENT_TYPE_U8: + isBlittable = (nativeType == NATIVE_TYPE_DEFAULT) || (nativeType == NATIVE_TYPE_I8) || (nativeType == NATIVE_TYPE_U8); + break; + case ELEMENT_TYPE_R4: + isBlittable = (nativeType == NATIVE_TYPE_DEFAULT) || (nativeType == NATIVE_TYPE_R4); + break; + case ELEMENT_TYPE_R8: + isBlittable = (nativeType == NATIVE_TYPE_DEFAULT) || (nativeType == NATIVE_TYPE_R8); + break; + case ELEMENT_TYPE_I: + case ELEMENT_TYPE_U: + isBlittable = (nativeType == NATIVE_TYPE_DEFAULT) || (nativeType == NATIVE_TYPE_INT) || (nativeType == NATIVE_TYPE_UINT); + break; + case ELEMENT_TYPE_PTR: + isBlittable = nativeType == NATIVE_TYPE_DEFAULT; + break; + case ELEMENT_TYPE_FNPTR: + isBlittable = nativeType == NATIVE_TYPE_DEFAULT || nativeType == NATIVE_TYPE_FUNC; + break; + case ELEMENT_TYPE_VALUETYPE: + if (nativeType != NATIVE_TYPE_DEFAULT && nativeType != NATIVE_TYPE_STRUCT) { - case ELEMENT_TYPE_CHAR: - isBlittable = (nativeType == NATIVE_TYPE_DEFAULT && flags != ParseNativeTypeFlags::IsAnsi) || (nativeType == NATIVE_TYPE_I2) || (nativeType == NATIVE_TYPE_U2); - break; - case ELEMENT_TYPE_I1: - case ELEMENT_TYPE_U1: - isBlittable = (nativeType == NATIVE_TYPE_DEFAULT) || (nativeType == NATIVE_TYPE_I1) || (nativeType == NATIVE_TYPE_U1); - break; - case ELEMENT_TYPE_I2: - case ELEMENT_TYPE_U2: - isBlittable = (nativeType == NATIVE_TYPE_DEFAULT) || (nativeType == NATIVE_TYPE_I2) || (nativeType == NATIVE_TYPE_U2); - break; - case ELEMENT_TYPE_I4: - case ELEMENT_TYPE_U4: - isBlittable = (nativeType == NATIVE_TYPE_DEFAULT) || (nativeType == NATIVE_TYPE_I4) || (nativeType == NATIVE_TYPE_U4) || (nativeType == NATIVE_TYPE_ERROR); - break; - case ELEMENT_TYPE_I8: - case ELEMENT_TYPE_U8: - isBlittable = (nativeType == NATIVE_TYPE_DEFAULT) || (nativeType == NATIVE_TYPE_I8) || (nativeType == NATIVE_TYPE_U8); - break; - case ELEMENT_TYPE_R4: - isBlittable = (nativeType == NATIVE_TYPE_DEFAULT) || (nativeType == NATIVE_TYPE_R4); - break; - case ELEMENT_TYPE_R8: - isBlittable = (nativeType == NATIVE_TYPE_DEFAULT) || (nativeType == NATIVE_TYPE_R8); - break; - case ELEMENT_TYPE_I: - case ELEMENT_TYPE_U: - isBlittable = (nativeType == NATIVE_TYPE_DEFAULT) || (nativeType == NATIVE_TYPE_INT) || (nativeType == NATIVE_TYPE_UINT); - break; - case ELEMENT_TYPE_PTR: - isBlittable = nativeType == NATIVE_TYPE_DEFAULT; - break; - case ELEMENT_TYPE_FNPTR: - isBlittable = nativeType == NATIVE_TYPE_DEFAULT || nativeType == NATIVE_TYPE_FUNC; - break; - case ELEMENT_TYPE_VALUETYPE: - if (nativeType != NATIVE_TYPE_DEFAULT && nativeType != NATIVE_TYPE_STRUCT) - { - isBlittable = false; - } - else if (valueTypeHandle.GetMethodTable() == CoreLibBinder::GetClass(CLASS__DECIMAL)) - { - // The alignment requirements of the managed System.Decimal type do not match the native DECIMAL type. - // As a result, a field of type System.Decimal can't be blittable. - isBlittable = false; - } - else - { - isBlittable = valueTypeHandle.GetMethodTable()->IsBlittable(); - } - break; - default: isBlittable = false; - break; } + else if (valueTypeHandle.GetMethodTable() == CoreLibBinder::GetClass(CLASS__DECIMAL)) + { + // The alignment requirements of the managed System.Decimal type do not match the native DECIMAL type. + // As a result, a field of type System.Decimal can't be blittable. + isBlittable = false; + } + else + { + isBlittable = valueTypeHandle.GetMethodTable()->IsBlittable(); + } + break; + default: + isBlittable = false; + break; } - EX_CATCH - { - // We were unable to determine the native type, likely because there is a mutually recursive type reference - // in this field's type. A mutually recursive object would never be blittable, so we don't need to do anything. - } - EX_END_CATCH(RethrowTerminalExceptions); return isBlittable; } diff --git a/src/coreclr/vm/fieldmarshaler.h b/src/coreclr/vm/fieldmarshaler.h index 83b2c79fb4f05503befdcd2d61e2aa912a4f5164..f085aaa378768380a3af0f616531dcb206b248a0 100644 --- a/src/coreclr/vm/fieldmarshaler.h +++ b/src/coreclr/vm/fieldmarshaler.h @@ -52,8 +52,8 @@ BOOL IsStructMarshalable(TypeHandle th); bool IsFieldBlittable( Module* pModule, mdFieldDef fd, - SigPointer fieldSig, - const SigTypeContext* pTypeContext, + CorElementType corElemType, + TypeHandle valueTypeHandle, ParseNativeTypeFlags flags ); diff --git a/src/coreclr/vm/methodtable.cpp b/src/coreclr/vm/methodtable.cpp index 0897c0d2b43120a86d3099f67da2b484ab1ae54e..0c2a61ccb9f9155e4850702250a002831fa728ef 100644 --- a/src/coreclr/vm/methodtable.cpp +++ b/src/coreclr/vm/methodtable.cpp @@ -2177,15 +2177,16 @@ const char* GetSystemVClassificationTypeName(SystemVClassificationType t) #endif // _DEBUG && LOGGING // Returns 'true' if the struct is passed in registers, 'false' otherwise. -bool MethodTable::ClassifyEightBytes(SystemVStructRegisterPassingHelperPtr helperPtr, unsigned int nestingLevel, unsigned int startOffsetOfStruct, bool useNativeLayout) +bool MethodTable::ClassifyEightBytes(SystemVStructRegisterPassingHelperPtr helperPtr, unsigned int nestingLevel, unsigned int startOffsetOfStruct, bool useNativeLayout, MethodTable** pByValueClassCache) { if (useNativeLayout) { + _ASSERTE(pByValueClassCache == NULL); return ClassifyEightBytesWithNativeLayout(helperPtr, nestingLevel, startOffsetOfStruct, GetNativeLayoutInfo()); } else { - return ClassifyEightBytesWithManagedLayout(helperPtr, nestingLevel, startOffsetOfStruct, useNativeLayout); + return ClassifyEightBytesWithManagedLayout(helperPtr, nestingLevel, startOffsetOfStruct, useNativeLayout, pByValueClassCache); } } @@ -2238,11 +2239,21 @@ static SystemVClassificationType ReClassifyField(SystemVClassificationType origi } } +static MethodTable* ByValueClassCacheLookup(MethodTable** pByValueClassCache, unsigned index) +{ + LIMITED_METHOD_CONTRACT; + if (pByValueClassCache == NULL) + return NULL; + else + return pByValueClassCache[index]; +} + // Returns 'true' if the struct is passed in registers, 'false' otherwise. bool MethodTable::ClassifyEightBytesWithManagedLayout(SystemVStructRegisterPassingHelperPtr helperPtr, unsigned int nestingLevel, unsigned int startOffsetOfStruct, - bool useNativeLayout) + bool useNativeLayout, + MethodTable** pByValueClassCache) { CONTRACTL { @@ -2308,22 +2319,24 @@ bool MethodTable::ClassifyEightBytesWithManagedLayout(SystemVStructRegisterPassi || firstFieldElementType == ELEMENT_TYPE_VALUETYPE) && (pFieldStart->GetOffset() == 0) && HasLayout() - && (GetNumInstanceFieldBytes() % pFieldStart->GetSize() == 0); + && (GetNumInstanceFieldBytes() % pFieldStart->GetSize(ByValueClassCacheLookup(pByValueClassCache, 0)) == 0); if (isFixedBuffer) { - numIntroducedFields = GetNumInstanceFieldBytes() / pFieldStart->GetSize(); + numIntroducedFields = GetNumInstanceFieldBytes() / pFieldStart->GetSize(ByValueClassCacheLookup(pByValueClassCache, 0)); } for (unsigned int fieldIndex = 0; fieldIndex < numIntroducedFields; fieldIndex++) { FieldDesc* pField; DWORD fieldOffset; + unsigned int fieldIndexForSize = fieldIndex; if (isFixedBuffer) { pField = pFieldStart; - fieldOffset = fieldIndex * pField->GetSize(); + fieldIndexForSize = 0; + fieldOffset = fieldIndex * pField->GetSize(ByValueClassCacheLookup(pByValueClassCache, fieldIndexForSize)); } else { @@ -2333,7 +2346,7 @@ bool MethodTable::ClassifyEightBytesWithManagedLayout(SystemVStructRegisterPassi unsigned int normalizedFieldOffset = fieldOffset + startOffsetOfStruct; - unsigned int fieldSize = pField->GetSize(); + unsigned int fieldSize = pField->GetSize(ByValueClassCacheLookup(pByValueClassCache, fieldIndexForSize)); _ASSERTE(fieldSize != (unsigned int)-1); // The field can't span past the end of the struct. @@ -2352,7 +2365,11 @@ bool MethodTable::ClassifyEightBytesWithManagedLayout(SystemVStructRegisterPassi #endif // _DEBUG if (fieldClassificationType == SystemVClassificationTypeStruct) { - TypeHandle th = pField->GetApproxFieldTypeHandleThrowing(); + TypeHandle th; + if (pByValueClassCache != NULL) + th = TypeHandle(pByValueClassCache[fieldIndex]); + else + th = pField->GetApproxFieldTypeHandleThrowing(); _ASSERTE(!th.IsNull()); MethodTable* pFieldMT = th.GetMethodTable(); @@ -2368,7 +2385,7 @@ bool MethodTable::ClassifyEightBytesWithManagedLayout(SystemVStructRegisterPassi } else { - structRet = pFieldMT->ClassifyEightBytesWithManagedLayout(helperPtr, nestingLevel + 1, normalizedFieldOffset, useNativeLayout); + structRet = pFieldMT->ClassifyEightBytesWithManagedLayout(helperPtr, nestingLevel + 1, normalizedFieldOffset, useNativeLayout, NULL); } helperPtr->inEmbeddedStruct = inEmbeddedStructPrev; @@ -2479,7 +2496,7 @@ bool MethodTable::ClassifyEightBytesWithNativeLayout(SystemVStructRegisterPassin if (!HasLayout()) { // If there is no native layout for this struct use the managed layout instead. - return ClassifyEightBytesWithManagedLayout(helperPtr, nestingLevel, startOffsetOfStruct, true); + return ClassifyEightBytesWithManagedLayout(helperPtr, nestingLevel, startOffsetOfStruct, true, NULL); } const NativeFieldDescriptor *pNativeFieldDescs = pNativeLayoutInfo->GetNativeFieldDescriptors(); diff --git a/src/coreclr/vm/methodtable.h b/src/coreclr/vm/methodtable.h index c78fbfb83ed58945164a269cd14a50fc1470de54..20fa2b091629e4a4304f81c9896175a823076a61 100644 --- a/src/coreclr/vm/methodtable.h +++ b/src/coreclr/vm/methodtable.h @@ -754,7 +754,7 @@ public: #if defined(UNIX_AMD64_ABI_ITF) // Builds the internal data structures and classifies struct eightbytes for Amd System V calling convention. - bool ClassifyEightBytes(SystemVStructRegisterPassingHelperPtr helperPtr, unsigned int nestingLevel, unsigned int startOffsetOfStruct, bool isNativeStruct); + bool ClassifyEightBytes(SystemVStructRegisterPassingHelperPtr helperPtr, unsigned int nestingLevel, unsigned int startOffsetOfStruct, bool isNativeStruct, MethodTable** pByValueClassCache = NULL); bool ClassifyEightBytesWithNativeLayout(SystemVStructRegisterPassingHelperPtr helperPtr, unsigned int nestingLevel, unsigned int startOffsetOfStruct, EEClassNativeLayoutInfo const* nativeLayoutInfo); #endif // defined(UNIX_AMD64_ABI_ITF) @@ -794,7 +794,7 @@ private: #if defined(UNIX_AMD64_ABI_ITF) void AssignClassifiedEightByteTypes(SystemVStructRegisterPassingHelperPtr helperPtr, unsigned int nestingLevel) const; // Builds the internal data structures and classifies struct eightbytes for Amd System V calling convention. - bool ClassifyEightBytesWithManagedLayout(SystemVStructRegisterPassingHelperPtr helperPtr, unsigned int nestingLevel, unsigned int startOffsetOfStruct, bool isNativeStruct); + bool ClassifyEightBytesWithManagedLayout(SystemVStructRegisterPassingHelperPtr helperPtr, unsigned int nestingLevel, unsigned int startOffsetOfStruct, bool isNativeStruct, MethodTable** pByValueClassCache); #endif // defined(UNIX_AMD64_ABI_ITF) DWORD GetClassIndexFromToken(mdTypeDef typeToken) diff --git a/src/coreclr/vm/methodtablebuilder.cpp b/src/coreclr/vm/methodtablebuilder.cpp index 831b43f442ef52297e6c4ca12c55718cec207bec..bd867cddbacb68537bd8f1586cdc9bdcfab0409a 100644 --- a/src/coreclr/vm/methodtablebuilder.cpp +++ b/src/coreclr/vm/methodtablebuilder.cpp @@ -1841,7 +1841,7 @@ MethodTableBuilder::BuildMethodTableThrowing( #ifdef FEATURE_HFA #error "Can't have FEATURE_HFA and UNIX_AMD64_ABI defined at the same time." #endif // FEATURE_HFA - SystemVAmd64CheckForPassStructInRegister(); + SystemVAmd64CheckForPassStructInRegister(pByValueClassCache); #endif // UNIX_AMD64_ABI } @@ -1968,7 +1968,8 @@ MethodTableBuilder::BuildMethodTableThrowing( CONSISTENCY_CHECK(!current->IsStatic()); if (current->GetFieldType() == ELEMENT_TYPE_VALUETYPE) { - TypeHandle th = current->LookupApproxFieldTypeHandle(); + _ASSERTE((size_t)fields.GetValueClassCacheIndex() < bmtEnumFields->dwNumInstanceFields); + TypeHandle th = TypeHandle(pByValueClassCache[fields.GetValueClassCacheIndex()]); CONSISTENCY_CHECK(!th.IsNull()); if (th.AsMethodTable()->GetClass()->IsUnsafeValueClass()) { @@ -3787,6 +3788,11 @@ VOID MethodTableBuilder::InitializeFieldDescs(FieldDesc *pFieldDescList, // Track whether any field in this type requires 8-byte alignment BOOL fFieldRequiresAlign8 = HasParent() ? GetParentMethodTable()->RequiresAlign8() : FALSE; #endif +#if defined(EnC_SUPPORTED) + bool isEnCField = pFieldDescList != NULL && pFieldDescList->IsEnCNew(); +#else + bool isEnCField = false; +#endif // EnC_SUPPORTED for (i = 0; i < bmtMetaData->cFields; i++) { @@ -4091,14 +4097,30 @@ IS_VALUETYPE: { // Loading a non-self-ref valuetype field. OVERRIDE_TYPE_LOAD_LEVEL_LIMIT(CLASS_LOAD_APPROXPARENTS); - // We load the approximate type of the field to avoid recursion problems. - // MethodTable::DoFullyLoad() will later load it fully - pByValueClass = fsig.GetArgProps().GetTypeHandleThrowing(GetModule(), + if (isEnCField || fIsStatic) + { + // EnCFieldDescs are not created at normal MethodTableBuilder time, and don't need to avoid recursive generic instantiation + pByValueClass = fsig.GetArgProps().GetTypeHandleThrowing(GetModule(), &bmtGenerics->typeContext, ClassLoader::LoadTypes, CLASS_LOAD_APPROXPARENTS, TRUE ).GetMethodTable(); + } + else + { + // We load the approximate type of the field to avoid recursion problems. + // MethodTable::DoFullyLoad() will later load it fully + SigPointer::HandleRecursiveGenericsForFieldLayoutLoad recursiveControl; + recursiveControl.pModuleWithTokenToAvoidIfPossible = GetModule(); + recursiveControl.tkTypeDefToAvoidIfPossible = GetCl(); + pByValueClass = fsig.GetArgProps().GetTypeHandleThrowing(GetModule(), + &bmtGenerics->typeContext, + ClassLoader::LoadTypes, + CLASS_LOAD_APPROXPARENTS, + TRUE, NULL, NULL, NULL, + &recursiveControl).GetMethodTable(); + } } // #FieldDescTypeMorph IF it is an enum, strip it down to its underlying type @@ -8428,7 +8450,7 @@ DWORD MethodTableBuilder::GetFieldSize(FieldDesc *pFD) #ifdef UNIX_AMD64_ABI // checks whether the struct is enregisterable. -void MethodTableBuilder::SystemVAmd64CheckForPassStructInRegister() +void MethodTableBuilder::SystemVAmd64CheckForPassStructInRegister(MethodTable** pByValueClassCache) { STANDARD_VM_CONTRACT; @@ -8457,7 +8479,7 @@ void MethodTableBuilder::SystemVAmd64CheckForPassStructInRegister() const bool useNativeLayout = false; // Iterate through the fields and make sure they meet requirements to pass in registers SystemVStructRegisterPassingHelper helper((unsigned int)totalStructSize); - if (GetHalfBakedMethodTable()->ClassifyEightBytes(&helper, 0, 0, useNativeLayout)) + if (GetHalfBakedMethodTable()->ClassifyEightBytes(&helper, 0, 0, useNativeLayout, pByValueClassCache)) { LOG((LF_JIT, LL_EVERYTHING, "**** SystemVAmd64CheckForPassStructInRegister: struct %s is enregisterable\n", this->GetDebugClassName())); diff --git a/src/coreclr/vm/methodtablebuilder.h b/src/coreclr/vm/methodtablebuilder.h index b04106177297704a986e1981878547915ec29fdc..8e91efe45ad4721fd400ec9b50d9610298b8709c 100644 --- a/src/coreclr/vm/methodtablebuilder.h +++ b/src/coreclr/vm/methodtablebuilder.h @@ -2925,7 +2925,7 @@ private: #ifdef UNIX_AMD64_ABI // checks whether the struct is enregisterable. - void SystemVAmd64CheckForPassStructInRegister(); + void SystemVAmd64CheckForPassStructInRegister(MethodTable** pByValueClassCache); // Store the eightbyte classification into the EEClass void StoreEightByteClassification(SystemVStructRegisterPassingHelper* helper); diff --git a/src/coreclr/vm/siginfo.cpp b/src/coreclr/vm/siginfo.cpp index 7eb28f7095fc59589ee2583eb2dce5f0740f9795..23185646df61a40af0ebb9e1f7b93f3cc9483199 100644 --- a/src/coreclr/vm/siginfo.cpp +++ b/src/coreclr/vm/siginfo.cpp @@ -981,7 +981,8 @@ TypeHandle SigPointer::GetTypeHandleThrowing( const Substitution * pSubst/*=NULL*/, // ZapSigContext is only set when decoding zapsigs const ZapSig::Context * pZapSigContext, - MethodTable * pMTInterfaceMapOwner) const + MethodTable * pMTInterfaceMapOwner, + HandleRecursiveGenericsForFieldLayoutLoad *pRecursiveFieldGenericHandling) const { CONTRACT(TypeHandle) { @@ -998,6 +999,25 @@ TypeHandle SigPointer::GetTypeHandleThrowing( } CONTRACT_END + _ASSERTE(!pRecursiveFieldGenericHandling || dropGenericArgumentLevel); // pRecursiveFieldGenericHandling can only be set if dropGenericArgumentLevel is set + if (pRecursiveFieldGenericHandling != NULL) + { + // if pRecursiveFieldGenericHandling is set, we must allow loading types + _ASSERTE(fLoadTypes == ClassLoader::LoadTypes); + // if pRecursiveFieldGenericHandling is set, then substitutions must not be enabled. + _ASSERTE(pSubst == NULL); + // FORBIDGC_LOADER_USE_ENABLED must not be enabled + _ASSERTE(!FORBIDGC_LOADER_USE_ENABLED()); + // Zap sig context must be NULL, as this can only happen in the type loader itself + _ASSERTE(pZapSigContext == NULL); + // Similarly with the pMTInterfaceMapOwner logic + _ASSERTE(pMTInterfaceMapOwner == NULL); + + // This may throw an exception using the FullModule + _ASSERTE(pModule->IsFullModule()); + } + + // We have an invariant that before we call a method, we must have loaded all of the valuetype parameters of that // method visible from the signature of the method. Normally we do this via type loading before the method is called // by walking the signature of the callee method at jit time, and loading all of the valuetype arguments at that time. @@ -1329,110 +1349,224 @@ TypeHandle SigPointer::GetTypeHandleThrowing( TypeHandle *thisinst = (TypeHandle*) _alloca(dwAllocaSize); - // Finally we gather up the type arguments themselves, loading at the level specified for generic arguments - for (unsigned i = 0; i < ntypars; i++) - { - ClassLoadLevel argLevel = level; - TypeHandle typeHnd = TypeHandle(); - BOOL argDrop = FALSE; + bool handlingRecursiveGenericFieldScenario = false; + SigPointer psigCopy = psig; - if (dropGenericArgumentLevel) + // For the recursive field handling system, we instantiate over __Canon first, then over Byte and if the + // types end up with the same GC layout, we can use the __Canon variant to replace instantiations over the specified type + for (int iRecursiveGenericFieldHandlingPass = 0; handlingRecursiveGenericFieldScenario || iRecursiveGenericFieldHandlingPass == 0 ; iRecursiveGenericFieldHandlingPass++) + { + // Finally we gather up the type arguments themselves, loading at the level specified for generic arguments + for (unsigned i = 0; i < ntypars; i++) { - if (level == CLASS_LOAD_APPROXPARENTS) - { - SigPointer tempsig = psig; + ClassLoadLevel argLevel = level; + TypeHandle typeHnd = TypeHandle(); + BOOL argDrop = FALSE; - CorElementType elemType = ELEMENT_TYPE_END; - IfFailThrowBF(tempsig.GetElemType(&elemType), BFA_BAD_SIGNATURE, pOrigModule); - - if (elemType == (CorElementType) ELEMENT_TYPE_MODULE_ZAPSIG) - { - // Skip over the module index - IfFailThrowBF(tempsig.GetData(NULL), BFA_BAD_SIGNATURE, pModule); - // Read the next elemType - IfFailThrowBF(tempsig.GetElemType(&elemType), BFA_BAD_SIGNATURE, pModule); - } - - if (elemType == ELEMENT_TYPE_GENERICINST) + if (dropGenericArgumentLevel) + { + if (level == CLASS_LOAD_APPROXPARENTS) { - CorElementType tmpEType = ELEMENT_TYPE_END; - IfFailThrowBF(tempsig.PeekElemType(&tmpEType), BFA_BAD_SIGNATURE, pOrigModule); - - if (tmpEType == ELEMENT_TYPE_CLASS) + SigPointer tempsig = psig; + bool checkTokenForRecursion = false; + + CorElementType elemType = ELEMENT_TYPE_END; + IfFailThrowBF(tempsig.GetElemType(&elemType), BFA_BAD_SIGNATURE, pOrigModule); + + if (elemType == (CorElementType) ELEMENT_TYPE_MODULE_ZAPSIG) + { + // Skip over the module index + IfFailThrowBF(tempsig.GetData(NULL), BFA_BAD_SIGNATURE, pModule); + // Read the next elemType + IfFailThrowBF(tempsig.GetElemType(&elemType), BFA_BAD_SIGNATURE, pModule); + } + + if (elemType == ELEMENT_TYPE_GENERICINST) + { + CorElementType tmpEType = ELEMENT_TYPE_END; + IfFailThrowBF(tempsig.GetElemType(&tmpEType), BFA_BAD_SIGNATURE, pOrigModule); + + if (tmpEType == ELEMENT_TYPE_CLASS) + typeHnd = TypeHandle(g_pCanonMethodTableClass); + else if ((pRecursiveFieldGenericHandling != NULL) && (tmpEType == ELEMENT_TYPE_VALUETYPE)) + checkTokenForRecursion = true; + } + else if ((elemType == (CorElementType)ELEMENT_TYPE_CANON_ZAPSIG) || + (CorTypeInfo::GetGCType_NoThrow(elemType) == TYPE_GC_REF)) + { typeHnd = TypeHandle(g_pCanonMethodTableClass); + } + else if ((elemType == ELEMENT_TYPE_VALUETYPE) && (pRecursiveFieldGenericHandling != NULL)) + { + checkTokenForRecursion = true; + } + + if (checkTokenForRecursion) + { + mdToken valueTypeToken = mdTypeDefNil; + IfFailThrowBF(tempsig.GetToken(&valueTypeToken), BFA_BAD_SIGNATURE, pOrigModule); + if (valueTypeToken == pRecursiveFieldGenericHandling->tkTypeDefToAvoidIfPossible && pOrigModule == pRecursiveFieldGenericHandling->pModuleWithTokenToAvoidIfPossible) + { + bool exactSelfRecursionDetected = true; + + if (elemType == ELEMENT_TYPE_GENERICINST) + { + // Check to ensure that the type variables in use are for an exact self-referential generic. + // Other cases are possible, but this logic is scoped to exactly self-referential generics. + uint32_t instantiationCount; + IfFailThrowBF(tempsig.GetData(&instantiationCount), BFA_BAD_SIGNATURE, pModule); + for (uint32_t iInstantiation = 0; iInstantiation < instantiationCount; iInstantiation++) + { + IfFailThrowBF(tempsig.GetElemType(&elemType), BFA_BAD_SIGNATURE, pOrigModule); + if (elemType != ELEMENT_TYPE_VAR) + { + exactSelfRecursionDetected = false; + break; + } + + uint32_t varIndex; + IfFailThrowBF(tempsig.GetData(&varIndex), BFA_BAD_SIGNATURE, pModule); + if (varIndex != iInstantiation) + { + exactSelfRecursionDetected = false; + break; + } + } + } + if (exactSelfRecursionDetected) + { + handlingRecursiveGenericFieldScenario = true; + if (iRecursiveGenericFieldHandlingPass == 0) + { + typeHnd = TypeHandle(g_pCanonMethodTableClass); + } + else + { + typeHnd = TypeHandle(CoreLibBinder::GetClass(CLASS__BYTE)); + } + } + } + } + argDrop = TRUE; } - else if ((elemType == (CorElementType)ELEMENT_TYPE_CANON_ZAPSIG) || - (CorTypeInfo::GetGCType_NoThrow(elemType) == TYPE_GC_REF)) + else + // We need to make sure that typekey is always restored. Otherwise, we may run into unrestored typehandles while using + // the typekey for lookups. It is safe to not drop the levels for initial NGen-specific loading levels since there cannot + // be cycles in typekeys. + if (level > CLASS_LOAD_APPROXPARENTS) { - typeHnd = TypeHandle(g_pCanonMethodTableClass); + argLevel = (ClassLoadLevel) (level-1); } - - argDrop = TRUE; } - else - // We need to make sure that typekey is always restored. Otherwise, we may run into unrestored typehandles while using - // the typekey for lookups. It is safe to not drop the levels for initial NGen-specific loading levels since there cannot - // be cycles in typekeys. - if (level > CLASS_LOAD_APPROXPARENTS) + + if (typeHnd.IsNull()) { - argLevel = (ClassLoadLevel) (level-1); + typeHnd = psig.GetTypeHandleThrowing(pOrigModule, + pTypeContext, + fLoadTypes, + argLevel, + argDrop, + pSubst, + pZapSigContext, + NULL, + pRecursiveFieldGenericHandling); + if (typeHnd.IsNull()) + { + // Indicate failure by setting thisinst to NULL + thisinst = NULL; + break; + } + + if (dropGenericArgumentLevel && level == CLASS_LOAD_APPROXPARENTS) + { + typeHnd = ClassLoader::CanonicalizeGenericArg(typeHnd); + } } + thisinst[i] = typeHnd; + IfFailThrowBF(psig.SkipExactlyOne(), BFA_BAD_SIGNATURE, pOrigModule); } - if (typeHnd.IsNull()) + // If we failed to get all of the instantiation type arguments then we return the null type handle + if (thisinst == NULL) { - typeHnd = psig.GetTypeHandleThrowing(pOrigModule, - pTypeContext, - fLoadTypes, - argLevel, - argDrop, - pSubst, - pZapSigContext); - if (typeHnd.IsNull()) + thRet = TypeHandle(); + break; + } + + Instantiation genericLoadInst(thisinst, ntypars); + + if (pMTInterfaceMapOwner != NULL && genericLoadInst.ContainsAllOneType(pMTInterfaceMapOwner)) + { + thRet = ClassLoader::LoadTypeDefThrowing(pGenericTypeModule, tkGenericType, ClassLoader::ThrowIfNotFound, ClassLoader::PermitUninstDefOrRef, 0, level); + } + else + { + // Group together the current signature type context and substitution chain, which + // we may later use to instantiate constraints of type arguments that turn out to be + // typespecs, i.e. generic types. + InstantiationContext instContext(pTypeContext, pSubst); + + // Now make the instantiated type + // The class loader will check the arity + // When we know it was correctly computed at NGen time, we ask the class loader to skip that check. + TypeHandle thFound = (ClassLoader::LoadGenericInstantiationThrowing(pGenericTypeModule, + tkGenericType, + genericLoadInst, + fLoadTypes, level, + &instContext, + pZapSigContext && pZapSigContext->externalTokens == ZapSig::NormalTokens)); + + if (!handlingRecursiveGenericFieldScenario) { - // Indicate failure by setting thisinst to NULL - thisinst = NULL; + thRet = thFound; break; } - - if (dropGenericArgumentLevel && level == CLASS_LOAD_APPROXPARENTS) + else { - typeHnd = ClassLoader::CanonicalizeGenericArg(typeHnd); - } - } - thisinst[i] = typeHnd; - IfFailThrowBF(psig.SkipExactlyOne(), BFA_BAD_SIGNATURE, pOrigModule); - } + if (iRecursiveGenericFieldHandlingPass == 0) + { + // This is the instantiation over __Canon if we succeed with finding out if the recursion does not affect type layout, we will return this type. + thRet = thFound; + // Restart with the same sig as we had for the first pass + psig = psigCopy; - // If we failed to get all of the instantiation type arguments then we return the null type handle - if (thisinst == NULL) - { - thRet = TypeHandle(); - break; - } + } + else + { + // At this point thFound is the instantiation over Byte and thRet is set to the instantiation over __Canon. + // If the two have the same GC layout, then the field layout is not affected by the type parameters, and the type load can continue + // with just using the __Canon variant. + // To simplify the calculation, all we really need to compute is the number of GC pointers in the representation and the Base size. + // For if the type parameter is used in field layout, there will be at least 1 more pointer in the __Canon instantiation as compared to the Byte instantiation. + + SIZE_T objectSizeCanonInstantiation = thRet.AsMethodTable()->GetBaseSize(); + SIZE_T objectSizeByteInstantion = thFound.AsMethodTable()->GetBaseSize(); + + bool failedLayoutCompare = objectSizeCanonInstantiation != objectSizeByteInstantion; + if (!failedLayoutCompare) + { +#ifndef DACCESS_COMPILE + failedLayoutCompare = CGCDesc::GetNumPointers(thRet.AsMethodTable(), objectSizeCanonInstantiation, 0) != + CGCDesc::GetNumPointers(thFound.AsMethodTable(), objectSizeCanonInstantiation, 0); +#else + DacNotImpl(); +#endif + } - Instantiation genericLoadInst(thisinst, ntypars); + if (failedLayoutCompare) + { +#ifndef DACCESS_COMPILE + static_cast(pOrigModule)->ThrowTypeLoadException(pOrigModule->GetMDImport(), pRecursiveFieldGenericHandling->tkTypeDefToAvoidIfPossible, IDS_INVALID_RECURSIVE_GENERIC_FIELD_LOAD); +#else + DacNotImpl(); +#endif + } - if (pMTInterfaceMapOwner != NULL && genericLoadInst.ContainsAllOneType(pMTInterfaceMapOwner)) - { - thRet = ClassLoader::LoadTypeDefThrowing(pGenericTypeModule, tkGenericType, ClassLoader::ThrowIfNotFound, ClassLoader::PermitUninstDefOrRef, 0, level); - } - else - { - // Group together the current signature type context and substitution chain, which - // we may later use to instantiate constraints of type arguments that turn out to be - // typespecs, i.e. generic types. - InstantiationContext instContext(pTypeContext, pSubst); - - // Now make the instantiated type - // The class loader will check the arity - // When we know it was correctly computed at NGen time, we ask the class loader to skip that check. - thRet = (ClassLoader::LoadGenericInstantiationThrowing(pGenericTypeModule, - tkGenericType, - genericLoadInst, - fLoadTypes, level, - &instContext, - pZapSigContext && pZapSigContext->externalTokens == ZapSig::NormalTokens)); + // Runtime successfully found a type with the desired layout, return + break; + } + } + } } break; } diff --git a/src/coreclr/vm/siginfo.hpp b/src/coreclr/vm/siginfo.hpp index 585fdb2071ef19b0217effb4e74f34e3d7dfa081..13f3fb4de9a729d19bea447f6ca24039ed0919a3 100644 --- a/src/coreclr/vm/siginfo.hpp +++ b/src/coreclr/vm/siginfo.hpp @@ -213,6 +213,12 @@ public: TypeHandle GetTypeHandleNT(Module* pModule, const SigTypeContext *pTypeContext) const; + struct HandleRecursiveGenericsForFieldLayoutLoad + { + Module* pModuleWithTokenToAvoidIfPossible; + mdToken tkTypeDefToAvoidIfPossible; + }; + // pTypeContext indicates how to instantiate any generic type parameters we come // However, first we implicitly apply the substitution pSubst to the metadata if pSubst is supplied. // That is, if the metadata contains a type variable "!0" then we first look up @@ -240,7 +246,9 @@ public: BOOL dropGenericArgumentLevel = FALSE, const Substitution *pSubst = NULL, const ZapSig::Context *pZapSigContext = NULL, - MethodTable *pMTInterfaceMapOwner = NULL) const; + MethodTable *pMTInterfaceMapOwner = NULL, + HandleRecursiveGenericsForFieldLayoutLoad *pRecursiveFieldGenericHandling = NULL + ) const; public: //------------------------------------------------------------------------ diff --git a/src/tests/Loader/classloader/generics/Layout/Specific/SelfRecursiveGenerics.cs b/src/tests/Loader/classloader/generics/Layout/Specific/SelfRecursiveGenerics.cs new file mode 100644 index 0000000000000000000000000000000000000000..c010b02ed181489cdbab1b5b3425e8c7c2729b91 --- /dev/null +++ b/src/tests/Loader/classloader/generics/Layout/Specific/SelfRecursiveGenerics.cs @@ -0,0 +1,118 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Runtime.InteropServices; +using System.Runtime.CompilerServices; + +class SelfRecursiveGenerics +{ + + [MethodImpl(MethodImplOptions.NoInlining)] + static void WillFailOnCoreCLRDueToLimitationsInTypeLoader() + { + Console.WriteLine(new SelfReferentialGenericStructWithNoFieldsAutoNonLoadable()); + } + static int Main() + { + Console.WriteLine(new SelfReferentialStructWithNoFieldsAuto()); + Console.WriteLine(new SelfReferentialStructWithNoFieldsSequential()); + Console.WriteLine(new SelfReferentialStructWithStringFieldSequential()); + Console.WriteLine(new SelfReferentialStructWithExplicitLayout()); + + Console.WriteLine(new SelfReferentialGenericStructWithNoFieldsAuto()); + Console.WriteLine(new SelfReferentialGenericStructWithNoFieldsSequential()); + Console.WriteLine(new SelfReferentialGenericStructWithStringFieldSequential()); + + Console.WriteLine(new SelfReferentialGenericStructWithNoFieldsAuto()); + Console.WriteLine(new SelfReferentialGenericStructWithNoFieldsSequential()); + Console.WriteLine(new SelfReferentialGenericStructWithStringFieldSequential()); + + Console.WriteLine(typeof(MyNodeAuto).FullName); + Console.WriteLine(typeof(MyNodeSequential).FullName); + + try + { + WillFailOnCoreCLRDueToLimitationsInTypeLoader(); + } + catch (TypeLoadException tle) + { + Console.WriteLine("Hit TLE" + tle.ToString()); + } + + return 100; + } + + public class Container { + public struct Nested { } + } + + [StructLayout(LayoutKind.Auto)] + public struct SelfReferentialStructWithNoFieldsAuto { + public Container.Nested Nested; + } + [StructLayout(LayoutKind.Sequential)] + public struct SelfReferentialStructWithNoFieldsSequential { + public Container.Nested Nested; + } + [StructLayout(LayoutKind.Sequential)] + public struct SelfReferentialStructWithStringFieldSequential { + public Container.Nested Nested; + public string String; + } + + [StructLayout(LayoutKind.Explicit)] + public struct SelfReferentialStructWithExplicitLayout { + [FieldOffset(1)] + public Container.Nested Nested; + [FieldOffset(0)] + public int Fld1; + [FieldOffset(4)] + public int Fld2; + } + + [StructLayout(LayoutKind.Auto)] + public struct SelfReferentialGenericStructWithNoFieldsAutoNonLoadable { + public Container>.Nested Nested; + } + + [StructLayout(LayoutKind.Auto)] + public struct SelfReferentialGenericStructWithNoFieldsAuto { + public Container>.Nested Nested; + } + [StructLayout(LayoutKind.Sequential)] + public struct SelfReferentialGenericStructWithNoFieldsSequential { + public Container>.Nested Nested; + } + [StructLayout(LayoutKind.Sequential)] + public struct SelfReferentialGenericStructWithStringFieldSequential { + public Container>.Nested Nested; + public string String; + } + + + /// + /// List of T expressed as a value type + /// + public struct ValueList + { + private T[] _arr; + private int _count; + } + + [StructLayout(LayoutKind.Auto)] + public struct MyNodeAuto + { + public int NodeData; + + public ValueList Nodes; + } + + [StructLayout(LayoutKind.Sequential)] + public struct MyNodeSequential + { + public int NodeData; + + public ValueList Nodes; + } +} diff --git a/src/tests/Loader/classloader/generics/Layout/Specific/SelfRecursiveGenerics.csproj b/src/tests/Loader/classloader/generics/Layout/Specific/SelfRecursiveGenerics.csproj new file mode 100644 index 0000000000000000000000000000000000000000..17cd91c32ddab955551aa7df18a9d4e2f8fb047f --- /dev/null +++ b/src/tests/Loader/classloader/generics/Layout/Specific/SelfRecursiveGenerics.csproj @@ -0,0 +1,10 @@ + + + true + Exe + 0 + + + + +