idea, but unfortunately necessary.
- Default to using 4-bytes for the LSDA pointer encoding to agree with the
encoded value in the CIE.
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@93753
91177308-0d34-0410-b5e6-
96231b3b80d8
/// getLSDAEncoding - Returns the LSDA pointer encoding. The choices are
/// 4-byte, 8-byte, and target default.
+ /// FIXME: This call-back isn't good! We should be using the correct encoding
+ /// regardless of the system. However, there are some systems which have bugs
+ /// that prevent this from occuring.
virtual DwarfLSDAEncoding::Encoding getLSDAEncoding() const {
return DwarfLSDAEncoding::Default;
}
if (MMI->getPersonalities()[0] != NULL) {
bool is4Byte = TD->getPointerSize() == sizeof(int32_t);
- if (Asm->TM.getLSDAEncoding() == DwarfLSDAEncoding::FourByte) {
+ if (Asm->TM.getLSDAEncoding() != DwarfLSDAEncoding::EightByte) {
Asm->EmitULEB128Bytes(4);
Asm->EOL("Augmentation size");
}
DwarfLSDAEncoding::Encoding X86TargetMachine::getLSDAEncoding() const {
- if (Subtarget.isTargetDarwin() && Subtarget.getDarwinVers() > 10)
+ if (Subtarget.isTargetDarwin() && Subtarget.getDarwinVers() != 10)
return DwarfLSDAEncoding::FourByte;
- return DwarfLSDAEncoding::Default;
+ return DwarfLSDAEncoding::EightByte;
}