Temporary: make R12 available in ARM mode if RegScavenger is being used.

git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@34709 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Evan Cheng 2007-02-28 00:22:44 +00:00
parent 1b051fc6a4
commit 41a4d562f7

View File

@ -123,6 +123,32 @@ def GPR : RegisterClass<"ARM", [i32], 32, [R0, R1, R2, R3, R4, R5, R6,
ARM::R4, ARM::R5, ARM::R6, ARM::R8,
ARM::R10,ARM::R11,
ARM::LR, ARM::R7 };
// FP is R11, R9 is available, R12 is available.
static const unsigned ARM_GPR_AO_5[] = {
ARM::R3, ARM::R2, ARM::R1, ARM::R0,
ARM::R4, ARM::R5, ARM::R6, ARM::R7,
ARM::R8, ARM::R9, ARM::R10,ARM::R12,
ARM::LR, ARM::R11 };
// FP is R11, R9 is not available, R12 is available.
static const unsigned ARM_GPR_AO_6[] = {
ARM::R3, ARM::R2, ARM::R1, ARM::R0,
ARM::R4, ARM::R5, ARM::R6, ARM::R7,
ARM::R8, ARM::R10,ARM::R12,
ARM::LR, ARM::R11 };
// FP is R7, R9 is available, R12 is available.
static const unsigned ARM_GPR_AO_7[] = {
ARM::R3, ARM::R2, ARM::R1, ARM::R0,
ARM::R4, ARM::R5, ARM::R6, ARM::R8,
ARM::R9, ARM::R10,ARM::R11,ARM::R12,
ARM::LR, ARM::R7 };
// FP is R7, R9 is not available, R12 is available.
static const unsigned ARM_GPR_AO_8[] = {
ARM::R3, ARM::R2, ARM::R1, ARM::R0,
ARM::R4, ARM::R5, ARM::R6, ARM::R8,
ARM::R10,ARM::R11,ARM::R12,
ARM::LR, ARM::R7 };
// FP is R7, only low registers available.
static const unsigned THUMB_GPR_AO[] = {
ARM::R2, ARM::R1, ARM::R0,
@ -131,19 +157,20 @@ def GPR : RegisterClass<"ARM", [i32], 32, [R0, R1, R2, R3, R4, R5, R6,
GPRClass::iterator
GPRClass::allocation_order_begin(const MachineFunction &MF) const {
const TargetMachine &TM = MF.getTarget();
const MRegisterInfo *RI = TM.getRegisterInfo();
const ARMSubtarget &Subtarget = TM.getSubtarget<ARMSubtarget>();
if (Subtarget.isThumb())
return THUMB_GPR_AO;
if (Subtarget.useThumbBacktraces()) {
if (Subtarget.isR9Reserved())
return ARM_GPR_AO_4;
return RI->requiresRegisterScavenging() ? ARM_GPR_AO_8 : ARM_GPR_AO_4;
else
return ARM_GPR_AO_3;
return RI->requiresRegisterScavenging() ? ARM_GPR_AO_7 : ARM_GPR_AO_3;
} else {
if (Subtarget.isR9Reserved())
return ARM_GPR_AO_2;
return RI->requiresRegisterScavenging() ? ARM_GPR_AO_6 : ARM_GPR_AO_2;
else
return ARM_GPR_AO_1;
return RI->requiresRegisterScavenging() ? ARM_GPR_AO_5 : ARM_GPR_AO_1;
}
}
@ -156,15 +183,29 @@ def GPR : RegisterClass<"ARM", [i32], 32, [R0, R1, R2, R3, R4, R5, R6,
if (Subtarget.isThumb())
I = THUMB_GPR_AO + (sizeof(THUMB_GPR_AO)/sizeof(unsigned));
else if (Subtarget.useThumbBacktraces()) {
if (Subtarget.isR9Reserved())
I = ARM_GPR_AO_4 + (sizeof(ARM_GPR_AO_4)/sizeof(unsigned));
else
I = ARM_GPR_AO_3 + (sizeof(ARM_GPR_AO_3)/sizeof(unsigned));
if (Subtarget.isR9Reserved()) {
if (RI->requiresRegisterScavenging())
I = ARM_GPR_AO_8 + (sizeof(ARM_GPR_AO_8)/sizeof(unsigned));
else
I = ARM_GPR_AO_4 + (sizeof(ARM_GPR_AO_4)/sizeof(unsigned));
} else {
if (RI->requiresRegisterScavenging())
I = ARM_GPR_AO_7 + (sizeof(ARM_GPR_AO_7)/sizeof(unsigned));
else
I = ARM_GPR_AO_3 + (sizeof(ARM_GPR_AO_3)/sizeof(unsigned));
}
} else {
if (Subtarget.isR9Reserved())
I = ARM_GPR_AO_2 + (sizeof(ARM_GPR_AO_2)/sizeof(unsigned));
else
I = ARM_GPR_AO_1 + (sizeof(ARM_GPR_AO_1)/sizeof(unsigned));
if (Subtarget.isR9Reserved()) {
if (RI->requiresRegisterScavenging())
I = ARM_GPR_AO_6 + (sizeof(ARM_GPR_AO_6)/sizeof(unsigned));
else
I = ARM_GPR_AO_2 + (sizeof(ARM_GPR_AO_2)/sizeof(unsigned));
} else {
if (RI->requiresRegisterScavenging())
I = ARM_GPR_AO_5 + (sizeof(ARM_GPR_AO_5)/sizeof(unsigned));
else
I = ARM_GPR_AO_1 + (sizeof(ARM_GPR_AO_1)/sizeof(unsigned));
}
}
// Mac OS X requires FP not to be clobbered for backtracing purpose.