- Add a comment to the callback indicating that it's *extremely* not a good

idea, but unfortunately necessary.
- Default to using 4-bytes for the LSDA pointer encoding to agree with the
  encoded value in the CIE.


git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@93753 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Bill Wendling 2010-01-18 19:36:27 +00:00
parent 38812dff7d
commit a8c18890da
3 changed files with 6 additions and 3 deletions

View File

@ -202,6 +202,9 @@ public:
/// getLSDAEncoding - Returns the LSDA pointer encoding. The choices are
/// 4-byte, 8-byte, and target default.
/// FIXME: This call-back isn't good! We should be using the correct encoding
/// regardless of the system. However, there are some systems which have bugs
/// that prevent this from occuring.
virtual DwarfLSDAEncoding::Encoding getLSDAEncoding() const {
return DwarfLSDAEncoding::Default;
}

View File

@ -283,7 +283,7 @@ void DwarfException::EmitFDE(const FunctionEHFrameInfo &EHFrameInfo) {
if (MMI->getPersonalities()[0] != NULL) {
bool is4Byte = TD->getPointerSize() == sizeof(int32_t);
if (Asm->TM.getLSDAEncoding() == DwarfLSDAEncoding::FourByte) {
if (Asm->TM.getLSDAEncoding() != DwarfLSDAEncoding::EightByte) {
Asm->EmitULEB128Bytes(4);
Asm->EOL("Augmentation size");

View File

@ -251,8 +251,8 @@ void X86TargetMachine::setCodeModelForJIT() {
}
DwarfLSDAEncoding::Encoding X86TargetMachine::getLSDAEncoding() const {
if (Subtarget.isTargetDarwin() && Subtarget.getDarwinVers() > 10)
if (Subtarget.isTargetDarwin() && Subtarget.getDarwinVers() != 10)
return DwarfLSDAEncoding::FourByte;
return DwarfLSDAEncoding::Default;
return DwarfLSDAEncoding::EightByte;
}