https://github.com/therealcoochieman created https://github.com/llvm/llvm-project/pull/130182
Aims to fix #61702. I encountered an interesting behavior during my testing. It seems the Itanium Record Layout Builder can give out Wpadded-bitfield warnings with the wrong amount of padding when mimicking the Microsoft struct layout. For the following struct: ```cpp struct __attribute__((ms_struct)) Foo { long long x; char y : 1; long long yy : 1; }; ``` My understanding is we would expect `yy` to be padded by 63 bits, but the warning says there are 7 bytes of padding. Which one is correct? I am not sure this question is within the scope of this PR but I am still shooting my shot here. I can always create an issue if needed. >From 3a9bc908a791669c82f288ff745664411bf285ac Mon Sep 17 00:00:00 2001 From: Theo de Magalhaes <theodemagalh...@icloud.com> Date: Thu, 6 Mar 2025 22:26:37 +0100 Subject: [PATCH 1/2] chore(clang-format): formatted clang/lib/AST/RecordLayoutBuilder.cpp --- clang/lib/AST/RecordLayoutBuilder.cpp | 199 ++++++++++++-------------- 1 file changed, 94 insertions(+), 105 deletions(-) diff --git a/clang/lib/AST/RecordLayoutBuilder.cpp b/clang/lib/AST/RecordLayoutBuilder.cpp index b8600e6a344a4..695771f67868d 100644 --- a/clang/lib/AST/RecordLayoutBuilder.cpp +++ b/clang/lib/AST/RecordLayoutBuilder.cpp @@ -43,7 +43,7 @@ struct BaseSubobjectInfo { bool IsVirtual; /// Bases - Information about the base subobjects. - SmallVector<BaseSubobjectInfo*, 4> Bases; + SmallVector<BaseSubobjectInfo *, 4> Bases; /// PrimaryVirtualBaseInfo - Holds the base info for the primary virtual base /// of this base info (if one exists). @@ -77,8 +77,7 @@ struct ExternalLayout { /// Get the offset of the given field. The external source must provide /// entries for all fields in the record. uint64_t getExternalFieldOffset(const FieldDecl *FD) { - assert(FieldOffsets.count(FD) && - "Field does not have an external offset"); + assert(FieldOffsets.count(FD) && "Field does not have an external offset"); return FieldOffsets[FD]; } @@ -167,16 +166,15 @@ class EmptySubobjectMap { CharUnits SizeOfLargestEmptySubobject; EmptySubobjectMap(const ASTContext &Context, const CXXRecordDecl *Class) - : Context(Context), CharWidth(Context.getCharWidth()), Class(Class) { - ComputeEmptySubobjectSizes(); + : Context(Context), CharWidth(Context.getCharWidth()), Class(Class) { + ComputeEmptySubobjectSizes(); } /// CanPlaceBaseAtOffset - Return whether the given base class can be placed /// at the given offset. /// Returns false if placing the record will result in two components /// (direct or indirect) of the same type having the same offset. - bool CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info, - CharUnits Offset); + bool CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info, CharUnits Offset); /// CanPlaceFieldAtOffset - Return whether a field can be placed at the given /// offset. @@ -227,9 +225,8 @@ void EmptySubobjectMap::ComputeEmptySubobjectSizes() { } } -bool -EmptySubobjectMap::CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD, - CharUnits Offset) const { +bool EmptySubobjectMap::CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD, + CharUnits Offset) const { // We only need to check empty bases. if (!RD->isEmpty()) return true; @@ -265,9 +262,8 @@ void EmptySubobjectMap::AddSubobjectAtOffset(const CXXRecordDecl *RD, MaxEmptyClassOffset = Offset; } -bool -EmptySubobjectMap::CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info, - CharUnits Offset) { +bool EmptySubobjectMap::CanPlaceBaseSubobjectAtOffset( + const BaseSubobjectInfo *Info, CharUnits Offset) { // We don't have to keep looking past the maximum offset that's known to // contain an empty class. if (!AnyEmptySubobjectsBeyondOffset(Offset)) @@ -368,10 +364,9 @@ bool EmptySubobjectMap::CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info, return true; } -bool -EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD, - const CXXRecordDecl *Class, - CharUnits Offset) const { +bool EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset( + const CXXRecordDecl *RD, const CXXRecordDecl *Class, + CharUnits Offset) const { // We don't have to keep looking past the maximum offset that's known to // contain an empty class. if (!AnyEmptySubobjectsBeyondOffset(Offset)) @@ -418,9 +413,8 @@ EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD, return true; } -bool -EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD, - CharUnits Offset) const { +bool EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD, + CharUnits Offset) const { // We don't have to keep looking past the maximum offset that's known to // contain an empty class. if (!AnyEmptySubobjectsBeyondOffset(Offset)) @@ -560,7 +554,7 @@ void EmptySubobjectMap::UpdateEmptyFieldSubobjects( } } -typedef llvm::SmallPtrSet<const CXXRecordDecl*, 4> ClassSetTy; +typedef llvm::SmallPtrSet<const CXXRecordDecl *, 4> ClassSetTy; class ItaniumRecordLayoutBuilder { protected: @@ -712,15 +706,13 @@ class ItaniumRecordLayoutBuilder { bool FieldPacked, const FieldDecl *D); void LayoutBitField(const FieldDecl *D); - TargetCXXABI getCXXABI() const { - return Context.getTargetInfo().getCXXABI(); - } + TargetCXXABI getCXXABI() const { return Context.getTargetInfo().getCXXABI(); } /// BaseSubobjectInfoAllocator - Allocator for BaseSubobjectInfo objects. llvm::SpecificBumpPtrAllocator<BaseSubobjectInfo> BaseSubobjectInfoAllocator; typedef llvm::DenseMap<const CXXRecordDecl *, BaseSubobjectInfo *> - BaseSubobjectInfoMapTy; + BaseSubobjectInfoMapTy; /// VirtualBaseInfo - Map from all the (direct or indirect) virtual bases /// of the class we're laying out to their base subobject info. @@ -793,8 +785,8 @@ class ItaniumRecordLayoutBuilder { uint64_t ComputedOffset); void CheckFieldPadding(uint64_t Offset, uint64_t UnpaddedOffset, - uint64_t UnpackedOffset, unsigned UnpackedAlign, - bool isPacked, const FieldDecl *D); + uint64_t UnpackedOffset, unsigned UnpackedAlign, + bool isPacked, const FieldDecl *D); DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID); @@ -965,8 +957,7 @@ BaseSubobjectInfo *ItaniumRecordLayoutBuilder::ComputeBaseSubobjectInfo( // Traversing the bases must have created the base info for our primary // virtual base. PrimaryVirtualBaseInfo = VirtualBaseInfo.lookup(PrimaryVirtualBase); - assert(PrimaryVirtualBaseInfo && - "Did not create a primary virtual base!"); + assert(PrimaryVirtualBaseInfo && "Did not create a primary virtual base!"); // Claim the primary virtual base as our primary virtual base. Info->PrimaryVirtualBaseInfo = PrimaryVirtualBaseInfo; @@ -984,13 +975,12 @@ void ItaniumRecordLayoutBuilder::ComputeBaseSubobjectInfo( const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl(); // Compute the base subobject info for this base. - BaseSubobjectInfo *Info = ComputeBaseSubobjectInfo(BaseDecl, IsVirtual, - nullptr); + BaseSubobjectInfo *Info = + ComputeBaseSubobjectInfo(BaseDecl, IsVirtual, nullptr); if (IsVirtual) { // ComputeBaseInfo has already added this base for us. - assert(VirtualBaseInfo.count(BaseDecl) && - "Did not add virtual base!"); + assert(VirtualBaseInfo.count(BaseDecl) && "Did not add virtual base!"); } else { // Add the base info to the map of non-virtual bases. assert(!NonVirtualBaseInfo.count(BaseDecl) && @@ -1043,15 +1033,15 @@ void ItaniumRecordLayoutBuilder::LayoutNonVirtualBases( LayoutVirtualBase(PrimaryBaseInfo); } else { BaseSubobjectInfo *PrimaryBaseInfo = - NonVirtualBaseInfo.lookup(PrimaryBase); + NonVirtualBaseInfo.lookup(PrimaryBase); assert(PrimaryBaseInfo && "Did not find base info for non-virtual primary base!"); LayoutNonVirtualBase(PrimaryBaseInfo); } - // If this class needs a vtable/vf-table and didn't get one from a - // primary base, add it in now. + // If this class needs a vtable/vf-table and didn't get one from a + // primary base, add it in now. } else if (RD->isDynamicClass()) { assert(DataSize == 0 && "Vtable pointer must be at offset zero!"); CharUnits PtrWidth = Context.toCharUnitsFromBits( @@ -1191,8 +1181,8 @@ void ItaniumRecordLayoutBuilder::LayoutVirtualBase( // Add its base class offset. assert(!VBases.count(Base->Class) && "vbase offset already exists!"); - VBases.insert(std::make_pair(Base->Class, - ASTRecordLayout::VBaseInfo(Offset, false))); + VBases.insert( + std::make_pair(Base->Class, ASTRecordLayout::VBaseInfo(Offset, false))); AddPrimaryVirtualBaseOffsets(Base, Offset); } @@ -1450,9 +1440,8 @@ void ItaniumRecordLayoutBuilder::LayoutFields(const RecordDecl *D) { } // Rounds the specified size to have it a multiple of the char size. -static uint64_t -roundUpSizeToCharAlignment(uint64_t Size, - const ASTContext &Context) { +static uint64_t roundUpSizeToCharAlignment(uint64_t Size, + const ASTContext &Context) { uint64_t CharAlignment = Context.getTargetInfo().getCharAlign(); return llvm::alignTo(Size, CharAlignment); } @@ -1497,8 +1486,7 @@ void ItaniumRecordLayoutBuilder::LayoutWideBitField(uint64_t FieldSize, uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastUnit; if (IsUnion) { - uint64_t RoundedFieldSize = roundUpSizeToCharAlignment(FieldSize, - Context); + uint64_t RoundedFieldSize = roundUpSizeToCharAlignment(FieldSize, Context); setDataSize(std::max(getDataSizeInBits(), RoundedFieldSize)); FieldOffset = 0; } else { @@ -1648,7 +1636,7 @@ void ItaniumRecordLayoutBuilder::LayoutBitField(const FieldDecl *D) { // Compute the next available bit offset. uint64_t FieldOffset = - IsUnion ? 0 : (getDataSizeInBits() - UnfilledBitsInLastUnit); + IsUnion ? 0 : (getDataSizeInBits() - UnfilledBitsInLastUnit); // Handle targets that don't honor bitfield type alignment. if (!IsMsStruct && !Context.getTargetInfo().useBitFieldTypeAlignment()) { @@ -1666,7 +1654,7 @@ void ItaniumRecordLayoutBuilder::LayoutBitField(const FieldDecl *D) { Context.getTargetInfo().getZeroLengthBitfieldBoundary(); FieldAlign = std::max(FieldAlign, ZeroLengthBitfieldBoundary); } - // If that doesn't apply, just ignore the field alignment. + // If that doesn't apply, just ignore the field alignment. } else { FieldAlign = 1; } @@ -1810,8 +1798,8 @@ void ItaniumRecordLayoutBuilder::LayoutBitField(const FieldDecl *D) { } setDataSize(std::max(getDataSizeInBits(), RoundedFieldSize)); - // For non-zero-width bitfields in ms_struct structs, allocate a new - // storage unit if necessary. + // For non-zero-width bitfields in ms_struct structs, allocate a new + // storage unit if necessary. } else if (IsMsStruct && FieldSize) { // We should have cleared UnfilledBitsInLastUnit in every case // where we changed storage units. @@ -1960,13 +1948,13 @@ void ItaniumRecordLayoutBuilder::LayoutField(const FieldDecl *D, } } - bool FieldPacked = (Packed && (!FieldClass || FieldClass->isPOD() || - FieldClass->hasAttr<PackedAttr>() || - Context.getLangOpts().getClangABICompat() <= - LangOptions::ClangABI::Ver15 || - Target.isPS() || Target.isOSDarwin() || - Target.isOSAIX())) || - D->hasAttr<PackedAttr>(); + bool FieldPacked = + (Packed && (!FieldClass || FieldClass->isPOD() || + FieldClass->hasAttr<PackedAttr>() || + Context.getLangOpts().getClangABICompat() <= + LangOptions::ClangABI::Ver15 || + Target.isPS() || Target.isOSDarwin() || Target.isOSAIX())) || + D->hasAttr<PackedAttr>(); // When used as part of a typedef, or together with a 'packed' attribute, the // 'aligned' attribute can be used to decrease alignment. In that case, it @@ -2036,7 +2024,6 @@ void ItaniumRecordLayoutBuilder::LayoutField(const FieldDecl *D, UnpackedFieldAlign = std::min(UnpackedFieldAlign, MaxFieldAlignment); } - if (!FieldPacked) FieldAlign = UnpackedFieldAlign; if (DefaultsToAIXPowerAlignment) @@ -2142,8 +2129,7 @@ void ItaniumRecordLayoutBuilder::FinishLayout(const NamedDecl *D) { // array of zero-length, remains of Size 0 if (RD->isEmpty()) setSize(CharUnits::One()); - } - else + } else setSize(CharUnits::One()); } @@ -2190,8 +2176,7 @@ void ItaniumRecordLayoutBuilder::FinishLayout(const NamedDecl *D) { InBits = false; } Diag(RD->getLocation(), diag::warn_padded_struct_size) - << Context.getTypeDeclType(RD) - << PadSize + << Context.getTypeDeclType(RD) << PadSize << (InBits ? 1 : 0); // (byte|bit) } @@ -2270,7 +2255,8 @@ static unsigned getPaddingDiagFromTagKind(TagTypeKind Tag) { return 1; case TagTypeKind::Class: return 2; - default: llvm_unreachable("Invalid tag kind for field padding diagnostic!"); + default: + llvm_unreachable("Invalid tag kind for field padding diagnostic!"); } } @@ -2313,10 +2299,10 @@ void ItaniumRecordLayoutBuilder::CheckFieldPadding( << Context.getTypeDeclType(D->getParent()) << PadSize << (InBits ? 1 : 0); // (byte|bit) } - } - if (isPacked && Offset != UnpackedOffset) { - HasPackedField = true; - } + } + if (isPacked && Offset != UnpackedOffset) { + HasPackedField = true; + } } static const CXXMethodDecl *computeKeyFunction(ASTContext &Context, @@ -2340,7 +2326,7 @@ static const CXXMethodDecl *computeKeyFunction(ASTContext &Context, return nullptr; bool allowInlineFunctions = - Context.getTargetInfo().getCXXABI().canKeyFunctionBeInline(); + Context.getTargetInfo().getCXXABI().canKeyFunctionBeInline(); for (const CXXMethodDecl *MD : RD->methods()) { if (!MD->isVirtual()) @@ -2560,6 +2546,7 @@ struct MicrosoftRecordLayoutBuilder { private: MicrosoftRecordLayoutBuilder(const MicrosoftRecordLayoutBuilder &) = delete; void operator=(const MicrosoftRecordLayoutBuilder &) = delete; + public: void layout(const RecordDecl *RD); void cxxLayout(const CXXRecordDecl *RD); @@ -2684,15 +2671,15 @@ MicrosoftRecordLayoutBuilder::getAdjustedElementInfo( // the alignment in the case of pragma pack. Note that the required alignment // doesn't actually apply to the struct alignment at this point. Alignment = std::max(Alignment, Info.Alignment); - RequiredAlignment = std::max(RequiredAlignment, Layout.getRequiredAlignment()); + RequiredAlignment = + std::max(RequiredAlignment, Layout.getRequiredAlignment()); Info.Alignment = std::max(Info.Alignment, Layout.getRequiredAlignment()); Info.Size = Layout.getNonVirtualSize(); return Info; } MicrosoftRecordLayoutBuilder::ElementInfo -MicrosoftRecordLayoutBuilder::getAdjustedElementInfo( - const FieldDecl *FD) { +MicrosoftRecordLayoutBuilder::getAdjustedElementInfo(const FieldDecl *FD) { // Get the alignment of the field type's natural alignment, ignore any // alignment attributes. auto TInfo = @@ -2715,8 +2702,8 @@ MicrosoftRecordLayoutBuilder::getAdjustedElementInfo( FD->getType()->getBaseElementTypeUnsafe()->getAs<RecordType>()) { auto const &Layout = Context.getASTRecordLayout(RT->getDecl()); EndsWithZeroSizedObject = Layout.endsWithZeroSizedObject(); - FieldRequiredAlignment = std::max(FieldRequiredAlignment, - Layout.getRequiredAlignment()); + FieldRequiredAlignment = + std::max(FieldRequiredAlignment, Layout.getRequiredAlignment()); } // Capture required alignment as a side-effect. RequiredAlignment = std::max(RequiredAlignment, FieldRequiredAlignment); @@ -2778,10 +2765,11 @@ void MicrosoftRecordLayoutBuilder::initializeLayout(const RecordDecl *RD) { MaxFieldAlignment = CharUnits::Zero(); // Honor the default struct packing maximum alignment flag. if (unsigned DefaultMaxFieldAlignment = Context.getLangOpts().PackStruct) - MaxFieldAlignment = CharUnits::fromQuantity(DefaultMaxFieldAlignment); + MaxFieldAlignment = CharUnits::fromQuantity(DefaultMaxFieldAlignment); // Honor the packing attribute. The MS-ABI ignores pragma pack if its larger // than the pointer size. - if (const MaxFieldAlignmentAttr *MFAA = RD->getAttr<MaxFieldAlignmentAttr>()){ + if (const MaxFieldAlignmentAttr *MFAA = + RD->getAttr<MaxFieldAlignmentAttr>()) { unsigned PackedAlignment = MFAA->getAlignment(); if (PackedAlignment <= Context.getTargetInfo().getPointerWidth(LangAS::Default)) @@ -2799,8 +2787,8 @@ void MicrosoftRecordLayoutBuilder::initializeLayout(const RecordDecl *RD) { External.BaseOffsets, External.VirtualBaseOffsets); } -void -MicrosoftRecordLayoutBuilder::initializeCXXLayout(const CXXRecordDecl *RD) { +void MicrosoftRecordLayoutBuilder::initializeCXXLayout( + const CXXRecordDecl *RD) { EndsWithZeroSizedObject = false; LeadsWithZeroSizedBase = false; HasOwnVFPtr = false; @@ -2818,8 +2806,8 @@ MicrosoftRecordLayoutBuilder::initializeCXXLayout(const CXXRecordDecl *RD) { PointerInfo.Alignment = std::min(PointerInfo.Alignment, MaxFieldAlignment); } -void -MicrosoftRecordLayoutBuilder::layoutNonVirtualBases(const CXXRecordDecl *RD) { +void MicrosoftRecordLayoutBuilder::layoutNonVirtualBases( + const CXXRecordDecl *RD) { // The MS-ABI lays out all bases that contain leading vfptrs before it lays // out any bases that do not contain vfptrs. We implement this as two passes // over the bases. This approach guarantees that the primary base is laid out @@ -3057,8 +3045,8 @@ void MicrosoftRecordLayoutBuilder::layoutBitField(const FieldDecl *FD) { DataSize = Size; } -void -MicrosoftRecordLayoutBuilder::layoutZeroWidthBitField(const FieldDecl *FD) { +void MicrosoftRecordLayoutBuilder::layoutZeroWidthBitField( + const FieldDecl *FD) { // Zero-width bitfields are ignored unless they follow a non-zero-width // bitfield. if (!LastFieldIsNonZeroWidthBitfield) { @@ -3195,8 +3183,8 @@ void MicrosoftRecordLayoutBuilder::layoutVirtualBases(const CXXRecordDecl *RD) { assert(BaseOffset >= Size && "base offset already allocated"); - VBases.insert(std::make_pair(BaseDecl, - ASTRecordLayout::VBaseInfo(BaseOffset, HasVtordisp))); + VBases.insert(std::make_pair( + BaseDecl, ASTRecordLayout::VBaseInfo(BaseOffset, HasVtordisp))); Size = BaseOffset + BaseLayout.getNonVirtualSize(); PreviousBaseLayout = &BaseLayout; } @@ -3236,10 +3224,9 @@ void MicrosoftRecordLayoutBuilder::finalizeLayout(const RecordDecl *RD) { // Recursively walks the non-virtual bases of a class and determines if any of // them are in the bases with overridden methods set. -static bool -RequiresVtordisp(const llvm::SmallPtrSetImpl<const CXXRecordDecl *> & - BasesWithOverriddenMethods, - const CXXRecordDecl *RD) { +static bool RequiresVtordisp(const llvm::SmallPtrSetImpl<const CXXRecordDecl *> + &BasesWithOverriddenMethods, + const CXXRecordDecl *RD) { if (BasesWithOverriddenMethods.count(RD)) return true; // If any of a virtual bases non-virtual bases (recursively) requires a @@ -3329,7 +3316,7 @@ ASTContext::getASTRecordLayout(const RecordDecl *D) const { // until we *finish* parsing the definition. if (D->hasExternalLexicalStorage() && !D->getDefinition()) - getExternalSource()->CompleteType(const_cast<RecordDecl*>(D)); + getExternalSource()->CompleteType(const_cast<RecordDecl *>(D)); // Complete the redecl chain (if necessary). (void)D->getMostRecentDecl(); @@ -3342,7 +3329,8 @@ ASTContext::getASTRecordLayout(const RecordDecl *D) const { // Note that we can't save a reference to the entry because this function // is recursive. const ASTRecordLayout *Entry = ASTRecordLayouts[D]; - if (Entry) return *Entry; + if (Entry) + return *Entry; const ASTRecordLayout *NewEntry = nullptr; @@ -3418,7 +3406,8 @@ ASTContext::getASTRecordLayout(const RecordDecl *D) const { return *NewEntry; } -const CXXMethodDecl *ASTContext::getCurrentKeyFunction(const CXXRecordDecl *RD) { +const CXXMethodDecl * +ASTContext::getCurrentKeyFunction(const CXXRecordDecl *RD) { if (!getTargetInfo().getCXXABI().hasKeyFunctions()) return nullptr; @@ -3436,7 +3425,7 @@ const CXXMethodDecl *ASTContext::getCurrentKeyFunction(const CXXRecordDecl *RD) // Store it back if it changed. if (Entry.isOffset() || Entry.isValid() != bool(Result)) - KeyFunctions[RD] = const_cast<Decl*>(Result); + KeyFunctions[RD] = const_cast<Decl *>(Result); return cast_or_null<CXXMethodDecl>(Result); } @@ -3452,7 +3441,8 @@ void ASTContext::setNonKeyFunction(const CXXMethodDecl *Method) { auto I = Map.find(Method->getParent()); // If it's not cached, there's nothing to do. - if (I == Map.end()) return; + if (I == Map.end()) + return; // If it is cached, check whether it's the target method, and if so, // remove it from the cache. Note, the call to 'get' might invalidate @@ -3497,8 +3487,8 @@ uint64_t ASTContext::lookupFieldBitOffset(const ObjCInterfaceDecl *OID, // directly. unsigned Index = 0; - for (const ObjCIvarDecl *IVD = Container->all_declared_ivar_begin(); - IVD; IVD = IVD->getNextIvar()) { + for (const ObjCIvarDecl *IVD = Container->all_declared_ivar_begin(); IVD; + IVD = IVD->getNextIvar()) { if (Ivar == IVD) break; ++Index; @@ -3517,7 +3507,7 @@ const ASTRecordLayout & ASTContext::getObjCLayout(const ObjCInterfaceDecl *D) const { // Retrieve the definition if (D->hasExternalLexicalStorage() && !D->getDefinition()) - getExternalSource()->CompleteType(const_cast<ObjCInterfaceDecl*>(D)); + getExternalSource()->CompleteType(const_cast<ObjCInterfaceDecl *>(D)); D = D->getDefinition(); assert(D && !D->isInvalidDecl() && D->isThisDeclarationADefinition() && "Invalid interface decl!"); @@ -3540,8 +3530,8 @@ ASTContext::getObjCLayout(const ObjCInterfaceDecl *D) const { return *NewEntry; } -static void PrintOffset(raw_ostream &OS, - CharUnits Offset, unsigned IndentLevel) { +static void PrintOffset(raw_ostream &OS, CharUnits Offset, + unsigned IndentLevel) { OS << llvm::format("%10" PRId64 " | ", (int64_t)Offset.getQuantity()); OS.indent(IndentLevel * 2); } @@ -3570,12 +3560,9 @@ static void PrintIndentNoOffset(raw_ostream &OS, unsigned IndentLevel) { } static void DumpRecordLayout(raw_ostream &OS, const RecordDecl *RD, - const ASTContext &C, - CharUnits Offset, - unsigned IndentLevel, - const char* Description, - bool PrintSizeInfo, - bool IncludeVirtualBases) { + const ASTContext &C, CharUnits Offset, + unsigned IndentLevel, const char *Description, + bool PrintSizeInfo, bool IncludeVirtualBases) { const ASTRecordLayout &Layout = C.getASTRecordLayout(RD); auto CXXRD = dyn_cast<CXXRecordDecl>(RD); @@ -3641,7 +3628,7 @@ static void DumpRecordLayout(raw_ostream &OS, const RecordDecl *RD, uint64_t LocalFieldOffsetInBits = Layout.getFieldOffset(Field->getFieldIndex()); CharUnits FieldOffset = - Offset + C.toCharUnitsFromBits(LocalFieldOffsetInBits); + Offset + C.toCharUnitsFromBits(LocalFieldOffsetInBits); // Recursively dump fields of record type. if (auto RT = Field->getType()->getAs<RecordType>()) { @@ -3669,7 +3656,7 @@ static void DumpRecordLayout(raw_ostream &OS, const RecordDecl *RD, // Dump virtual bases. if (CXXRD && IncludeVirtualBases) { const ASTRecordLayout::VBaseOffsetsMapTy &VtorDisps = - Layout.getVBaseOffsetsMap(); + Layout.getVBaseOffsetsMap(); for (const CXXBaseSpecifier &Base : CXXRD->vbases()) { assert(Base.isVirtual() && "Found non-virtual class!"); @@ -3683,14 +3670,16 @@ static void DumpRecordLayout(raw_ostream &OS, const RecordDecl *RD, } DumpRecordLayout(OS, VBase, C, VBaseOffset, IndentLevel, - VBase == Layout.getPrimaryBase() ? - "(primary virtual base)" : "(virtual base)", + VBase == Layout.getPrimaryBase() + ? "(primary virtual base)" + : "(virtual base)", /*PrintSizeInfo=*/false, /*IncludeVirtualBases=*/false); } } - if (!PrintSizeInfo) return; + if (!PrintSizeInfo) + return; PrintIndentNoOffset(OS, IndentLevel - 1); OS << "[sizeof=" << Layout.getSize().getQuantity(); >From fb06091d1f542e6942286146df434c01e4e8eb0c Mon Sep 17 00:00:00 2001 From: Theo de Magalhaes <theodemagalh...@icloud.com> Date: Thu, 6 Mar 2025 22:48:49 +0100 Subject: [PATCH 2/2] fix(clang-cl): added -Wpadded and -Wpadded-bitfield warning --- clang/lib/AST/RecordLayoutBuilder.cpp | 86 ++++++++++++++++++++++++++- 1 file changed, 84 insertions(+), 2 deletions(-) diff --git a/clang/lib/AST/RecordLayoutBuilder.cpp b/clang/lib/AST/RecordLayoutBuilder.cpp index 695771f67868d..fe8e29df9e271 100644 --- a/clang/lib/AST/RecordLayoutBuilder.cpp +++ b/clang/lib/AST/RecordLayoutBuilder.cpp @@ -2592,6 +2592,8 @@ struct MicrosoftRecordLayoutBuilder { void computeVtorDispSet( llvm::SmallPtrSetImpl<const CXXRecordDecl *> &HasVtorDispSet, const CXXRecordDecl *RD) const; + void CheckFieldPadding(uint64_t Offset, uint64_t UnpaddedOffset, + const FieldDecl *D); const ASTContext &Context; EmptySubobjectMap *EmptySubobjects; @@ -2629,8 +2631,6 @@ struct MicrosoftRecordLayoutBuilder { /// virtual base classes and their offsets in the record. ASTRecordLayout::VBaseOffsetsMapTy VBases; /// The number of remaining bits in our last bitfield allocation. - /// This value isn't meaningful unless LastFieldIsNonZeroWidthBitfield is - /// true. unsigned RemainingBitsInField; bool IsUnion : 1; /// True if the last field laid out was a bitfield and was not 0 @@ -2992,6 +2992,15 @@ void MicrosoftRecordLayoutBuilder::layoutField(const FieldDecl *FD) { } else { FieldOffset = Size.alignTo(Info.Alignment); } + + uint64_t UnpaddedFielddOffsetInBits = + Context.toBits(DataSize) - RemainingBitsInField; + + CheckFieldPadding(Context.toBits(FieldOffset), UnpaddedFielddOffsetInBits, + FD); + + RemainingBitsInField = 0; + placeFieldAtOffset(FieldOffset); if (!IsOverlappingEmptyField) @@ -3037,10 +3046,14 @@ void MicrosoftRecordLayoutBuilder::layoutBitField(const FieldDecl *FD) { } else { // Allocate a new block of memory and place the bitfield in it. CharUnits FieldOffset = Size.alignTo(Info.Alignment); + uint64_t UnpaddedFieldOffsetInBits = + Context.toBits(DataSize) - RemainingBitsInField; placeFieldAtOffset(FieldOffset); Size = FieldOffset + Info.Size; Alignment = std::max(Alignment, Info.Alignment); RemainingBitsInField = Context.toBits(Info.Size) - Width; + CheckFieldPadding(Context.toBits(FieldOffset), UnpaddedFieldOffsetInBits, + FD); } DataSize = Size; } @@ -3064,9 +3077,14 @@ void MicrosoftRecordLayoutBuilder::layoutZeroWidthBitField( } else { // Round up the current record size to the field's alignment boundary. CharUnits FieldOffset = Size.alignTo(Info.Alignment); + uint64_t UnpaddedFieldOffsetInBits = + Context.toBits(DataSize) - RemainingBitsInField; placeFieldAtOffset(FieldOffset); + RemainingBitsInField = 0; Size = FieldOffset; Alignment = std::max(Alignment, Info.Alignment); + CheckFieldPadding(Context.toBits(FieldOffset), UnpaddedFieldOffsetInBits, + FD); } DataSize = Size; } @@ -3189,8 +3207,56 @@ void MicrosoftRecordLayoutBuilder::layoutVirtualBases(const CXXRecordDecl *RD) { PreviousBaseLayout = &BaseLayout; } } +// Function based on ItaniumRecordLayoutBuilder::CheckFieldPadding. +void MicrosoftRecordLayoutBuilder::CheckFieldPadding(uint64_t Offset, + uint64_t UnpaddedOffset, + const FieldDecl *D) { + + // We let objc ivars without warning, objc interfaces generally are not used + // for padding tricks. + if (isa<ObjCIvarDecl>(D)) + return; + + // Don't warn about structs created without a SourceLocation. This can + // be done by clients of the AST, such as codegen. + if (D->getLocation().isInvalid()) + return; + + unsigned CharBitNum = Context.getTargetInfo().getCharWidth(); + + // Warn if padding was introduced to the struct/class. + if (!IsUnion && Offset > UnpaddedOffset) { + unsigned PadSize = Offset - UnpaddedOffset; + bool InBits = true; + if (PadSize % CharBitNum == 0) { + PadSize = PadSize / CharBitNum; + InBits = false; + } + if (D->getIdentifier()) { + auto Diagnostic = D->isBitField() ? diag::warn_padded_struct_bitfield + : diag::warn_padded_struct_field; + Context.getDiagnostics().Report(D->getLocation(), + Diagnostic) + << getPaddingDiagFromTagKind(D->getParent()->getTagKind()) + << Context.getTypeDeclType(D->getParent()) << PadSize + << (InBits ? 1 : 0) // (byte|bit) + << D->getIdentifier(); + } else { + auto Diagnostic = D->isBitField() ? diag::warn_padded_struct_anon_bitfield + : diag::warn_padded_struct_anon_field; + Context.getDiagnostics().Report(D->getLocation(), + Diagnostic) + << getPaddingDiagFromTagKind(D->getParent()->getTagKind()) + << Context.getTypeDeclType(D->getParent()) << PadSize + << (InBits ? 1 : 0); // (byte|bit) + } + } +} void MicrosoftRecordLayoutBuilder::finalizeLayout(const RecordDecl *RD) { + uint64_t UnpaddedSizeInBits = Context.toBits(DataSize); + UnpaddedSizeInBits -= RemainingBitsInField; + // Respect required alignment. Note that in 32-bit mode Required alignment // may be 0 and cause size not to be updated. DataSize = Size; @@ -3219,6 +3285,22 @@ void MicrosoftRecordLayoutBuilder::finalizeLayout(const RecordDecl *RD) { Size = Context.toCharUnitsFromBits(External.Size); if (External.Align) Alignment = Context.toCharUnitsFromBits(External.Align); + return; + } + unsigned CharBitNum = Context.getTargetInfo().getCharWidth(); + uint64_t SizeInBits = Context.toBits(Size); + if (SizeInBits > UnpaddedSizeInBits) { + unsigned int PadSize = SizeInBits - UnpaddedSizeInBits; + bool InBits = true; + if (PadSize % CharBitNum == 0) { + PadSize = PadSize / CharBitNum; + InBits = false; + } + + Context.getDiagnostics().Report(RD->getLocation(), + diag::warn_padded_struct_size) + << Context.getTypeDeclType(RD) << PadSize + << (InBits ? 1 : 0); // (byte|bit) } } _______________________________________________ cfe-commits mailing list cfe-commits@lists.llvm.org https://lists.llvm.org/cgi-bin/mailman/listinfo/cfe-commits