mirror of
https://github.com/cc65/cc65.git
synced 2025-02-05 20:31:53 +00:00
New optimizer steps to restore some possibly lost optimization with boolean due to the previous fix.
This commit is contained in:
parent
f321bb16e5
commit
70549e868e
@ -132,8 +132,10 @@ static OptFunc DOptCmp7 = { OptCmp7, "OptCmp7", 85, 0,
|
||||
static OptFunc DOptCmp8 = { OptCmp8, "OptCmp8", 50, 0, 0, 0, 0, 0 };
|
||||
static OptFunc DOptCmp9 = { OptCmp9, "OptCmp9", 85, 0, 0, 0, 0, 0 };
|
||||
static OptFunc DOptComplAX1 = { OptComplAX1, "OptComplAX1", 65, 0, 0, 0, 0, 0 };
|
||||
static OptFunc DOptCondBranches1= { OptCondBranches1,"OptCondBranches1", 80, 0, 0, 0, 0, 0 };
|
||||
static OptFunc DOptCondBranches2= { OptCondBranches2,"OptCondBranches2", 0, 0, 0, 0, 0, 0 };
|
||||
static OptFunc DOptCondBranch1 = { OptCondBranch1, "OptCondBranch1", 80, 0, 0, 0, 0, 0 };
|
||||
static OptFunc DOptCondBranch2 = { OptCondBranch2, "OptCondBranch2", 40, 0, 0, 0, 0, 0 };
|
||||
static OptFunc DOptCondBranch3 = { OptCondBranch3, "OptCondBranch3", 40, 0, 0, 0, 0, 0 };
|
||||
static OptFunc DOptCondBranchC = { OptCondBranchC, "OptCondBranchC", 0, 0, 0, 0, 0, 0 };
|
||||
static OptFunc DOptDeadCode = { OptDeadCode, "OptDeadCode", 100, 0, 0, 0, 0, 0 };
|
||||
static OptFunc DOptDeadJumps = { OptDeadJumps, "OptDeadJumps", 100, 0, 0, 0, 0, 0 };
|
||||
static OptFunc DOptDecouple = { OptDecouple, "OptDecouple", 100, 0, 0, 0, 0, 0 };
|
||||
@ -241,8 +243,10 @@ static OptFunc* OptFuncs[] = {
|
||||
&DOptCmp8,
|
||||
&DOptCmp9,
|
||||
&DOptComplAX1,
|
||||
&DOptCondBranches1,
|
||||
&DOptCondBranches2,
|
||||
&DOptCondBranch1,
|
||||
&DOptCondBranch2,
|
||||
&DOptCondBranch3,
|
||||
&DOptCondBranchC,
|
||||
&DOptDeadCode,
|
||||
&DOptDeadJumps,
|
||||
&DOptDecouple,
|
||||
@ -619,9 +623,6 @@ static unsigned RunOptGroup1 (CodeSeg* S)
|
||||
Changes += RunOptFunc (S, &DOptPtrLoad15, 1);
|
||||
Changes += RunOptFunc (S, &DOptPtrLoad16, 1);
|
||||
Changes += RunOptFunc (S, &DOptPtrLoad17, 1);
|
||||
Changes += RunOptFunc (S, &DOptBNegAX2, 1);
|
||||
Changes += RunOptFunc (S, &DOptBNegAX3, 1);
|
||||
Changes += RunOptFunc (S, &DOptBNegAX4, 1);
|
||||
Changes += RunOptFunc (S, &DOptAdd1, 1);
|
||||
Changes += RunOptFunc (S, &DOptAdd2, 1);
|
||||
Changes += RunOptFunc (S, &DOptAdd4, 1);
|
||||
@ -674,8 +675,6 @@ static unsigned RunOptGroup3 (CodeSeg* S)
|
||||
do {
|
||||
C = 0;
|
||||
|
||||
C += RunOptFunc (S, &DOptBNegA1, 1);
|
||||
C += RunOptFunc (S, &DOptBNegA2, 1);
|
||||
C += RunOptFunc (S, &DOptNegAX1, 1);
|
||||
C += RunOptFunc (S, &DOptNegAX2, 1);
|
||||
C += RunOptFunc (S, &DOptStackOps, 3); /* Before OptBoolUnary1 */
|
||||
@ -683,6 +682,7 @@ static unsigned RunOptGroup3 (CodeSeg* S)
|
||||
C += RunOptFunc (S, &DOptBoolUnary1, 3);
|
||||
C += RunOptFunc (S, &DOptBoolUnary2, 3);
|
||||
C += RunOptFunc (S, &DOptBoolUnary3, 1);
|
||||
C += RunOptFunc (S, &DOptBNegA1, 1);
|
||||
C += RunOptFunc (S, &DOptBNegAX1, 1); /* After OptBoolUnary2 */
|
||||
C += RunOptFunc (S, &DOptShift1, 1);
|
||||
C += RunOptFunc (S, &DOptShift4, 1);
|
||||
@ -695,16 +695,22 @@ static unsigned RunOptGroup3 (CodeSeg* S)
|
||||
C += RunOptFunc (S, &DOptJumpCascades, 1);
|
||||
C += RunOptFunc (S, &DOptDeadJumps, 1);
|
||||
C += RunOptFunc (S, &DOptDeadCode, 1);
|
||||
C += RunOptFunc (S, &DOptBoolCmp, 1);
|
||||
C += RunOptFunc (S, &DOptBoolTrans, 1);
|
||||
C += RunOptFunc (S, &DOptJumpTarget1, 1);
|
||||
C += RunOptFunc (S, &DOptJumpTarget2, 1);
|
||||
C += RunOptFunc (S, &DOptCondBranches1, 1);
|
||||
C += RunOptFunc (S, &DOptCondBranches2, 1);
|
||||
C += RunOptFunc (S, &DOptCondBranch1, 1);
|
||||
C += RunOptFunc (S, &DOptCondBranch2, 1);
|
||||
C += RunOptFunc (S, &DOptCondBranch3, 1);
|
||||
C += RunOptFunc (S, &DOptCondBranchC, 1);
|
||||
C += RunOptFunc (S, &DOptRTSJumps1, 1);
|
||||
C += RunOptFunc (S, &DOptBoolCmp, 1);
|
||||
C += RunOptFunc (S, &DOptBoolTrans, 1);
|
||||
C += RunOptFunc (S, &DOptBNegA2, 1); /* After OptCondBranch's */
|
||||
C += RunOptFunc (S, &DOptBNegAX2, 1); /* After OptCondBranch's */
|
||||
C += RunOptFunc (S, &DOptBNegAX3, 1); /* After OptCondBranch's */
|
||||
C += RunOptFunc (S, &DOptBNegAX4, 1); /* After OptCondBranch's */
|
||||
C += RunOptFunc (S, &DOptCmp1, 1);
|
||||
C += RunOptFunc (S, &DOptCmp2, 1);
|
||||
C += RunOptFunc (S, &DOptCmp8, 1); /* Must run before OptCmp3 */
|
||||
C += RunOptFunc (S, &DOptCmp8, 1); /* Must run before OptCmp3 */
|
||||
C += RunOptFunc (S, &DOptCmp3, 1);
|
||||
C += RunOptFunc (S, &DOptCmp4, 1);
|
||||
C += RunOptFunc (S, &DOptCmp5, 1);
|
||||
@ -712,7 +718,7 @@ static unsigned RunOptGroup3 (CodeSeg* S)
|
||||
C += RunOptFunc (S, &DOptCmp9, 1);
|
||||
C += RunOptFunc (S, &DOptTest1, 1);
|
||||
C += RunOptFunc (S, &DOptLoad1, 1);
|
||||
C += RunOptFunc (S, &DOptJumpTarget3, 1); /* After OptCondBranches2 */
|
||||
C += RunOptFunc (S, &DOptJumpTarget3, 1); /* After OptCondBranches2 */
|
||||
C += RunOptFunc (S, &DOptUnusedLoads, 1);
|
||||
C += RunOptFunc (S, &DOptUnusedStores, 1);
|
||||
C += RunOptFunc (S, &DOptDupLoads, 1);
|
||||
|
@ -167,7 +167,7 @@ static void ReplaceBranchCond (CodeSeg* S, unsigned I, cmp_t Cond)
|
||||
|
||||
|
||||
/*****************************************************************************/
|
||||
/* Optimize bool comparison and transformer subroutines */
|
||||
/* Optimize bool comparison and transformer subroutines with branches */
|
||||
/*****************************************************************************/
|
||||
|
||||
|
||||
@ -291,6 +291,62 @@ unsigned OptBoolTrans (CodeSeg* S)
|
||||
|
||||
|
||||
|
||||
unsigned OptBoolUnary (CodeSeg* S)
|
||||
/* Try to remove the call to a bcastax/bnegax routines where the call is
|
||||
** not really needed and change following branch condition accordingly.
|
||||
*/
|
||||
{
|
||||
unsigned Changes = 0;
|
||||
|
||||
/* Walk over the entries */
|
||||
unsigned I = 0;
|
||||
while (I < CS_GetEntryCount (S)) {
|
||||
|
||||
CodeEntry* N;
|
||||
cmp_t Cond;
|
||||
|
||||
/* Get next entry */
|
||||
CodeEntry* E = CS_GetEntry (S, I);
|
||||
|
||||
/* Check for a boolean transformer */
|
||||
if (E->OPC == OP65_JSR &&
|
||||
(Cond = FindBoolCmpCond (E->Arg)) != CMP_INV &&
|
||||
(N = CS_GetNextEntry (S, I)) != 0 &&
|
||||
(N->Info & OF_ZBRA) != 0) {
|
||||
|
||||
/* Make the boolean transformer unnecessary by changing the
|
||||
** the conditional jump to evaluate the condition flags that
|
||||
** are set after the compare directly. Note: jeq jumps if
|
||||
** the condition is not met, jne jumps if the condition is met.
|
||||
** Invert the code if we jump on condition not met.
|
||||
*/
|
||||
if (GetBranchCond (N->OPC) == BC_EQ) {
|
||||
/* Jumps if condition false, invert condition */
|
||||
Cond = CmpInvertTab [Cond];
|
||||
}
|
||||
|
||||
/* Check if we can replace the code by something better */
|
||||
ReplaceBranchCond (S, I+1, Cond);
|
||||
|
||||
/* Remove the call to the bool transformer */
|
||||
CS_DelEntry (S, I);
|
||||
|
||||
/* Remember, we had changes */
|
||||
++Changes;
|
||||
|
||||
}
|
||||
|
||||
/* Next entry */
|
||||
++I;
|
||||
|
||||
}
|
||||
|
||||
/* Return the number of changes made */
|
||||
return Changes;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/*****************************************************************************/
|
||||
/* Remove calls to the boolean cast/negation subroutines */
|
||||
/*****************************************************************************/
|
||||
|
@ -898,17 +898,13 @@ unsigned OptJumpTarget3 (CodeSeg* S)
|
||||
|
||||
|
||||
|
||||
unsigned OptCondBranches1 (CodeSeg* S)
|
||||
/* Performs several optimization steps:
|
||||
**
|
||||
unsigned OptCondBranch1 (CodeSeg* S)
|
||||
/* Performs some optimization steps:
|
||||
** - If an immediate load of a register is followed by a conditional jump that
|
||||
** is never taken because the load of the register sets the flags in such a
|
||||
** manner, remove the conditional branch.
|
||||
** - If the conditional branch is always taken because of the register load,
|
||||
** replace it by a jmp.
|
||||
** - If a conditional branch jumps around an unconditional branch, remove the
|
||||
** conditional branch and make the jump a conditional branch with the
|
||||
** inverse condition of the first one.
|
||||
*/
|
||||
{
|
||||
unsigned Changes = 0;
|
||||
@ -918,7 +914,6 @@ unsigned OptCondBranches1 (CodeSeg* S)
|
||||
while (I < CS_GetEntryCount (S)) {
|
||||
|
||||
CodeEntry* N;
|
||||
CodeLabel* L;
|
||||
|
||||
/* Get next entry */
|
||||
CodeEntry* E = CS_GetEntry (S, I);
|
||||
@ -960,6 +955,35 @@ unsigned OptCondBranches1 (CodeSeg* S)
|
||||
|
||||
}
|
||||
|
||||
/* Next entry */
|
||||
++I;
|
||||
|
||||
}
|
||||
|
||||
/* Return the number of changes made */
|
||||
return Changes;
|
||||
}
|
||||
|
||||
|
||||
|
||||
unsigned OptCondBranch2 (CodeSeg* S)
|
||||
/* If a conditional branch jumps around an unconditional branch, remove the
|
||||
** conditional branch and make the jump a conditional branch with the inverse
|
||||
** condition of the first one.
|
||||
*/
|
||||
{
|
||||
unsigned Changes = 0;
|
||||
|
||||
/* Walk over the entries */
|
||||
unsigned I = 0;
|
||||
while (I < CS_GetEntryCount (S)) {
|
||||
|
||||
CodeEntry* N;
|
||||
CodeLabel* L;
|
||||
|
||||
/* Get next entry */
|
||||
CodeEntry* E = CS_GetEntry (S, I);
|
||||
|
||||
if ((E->Info & OF_CBRA) != 0 && /* It's a conditional branch */
|
||||
(L = E->JumpTo) != 0 && /* ..referencing a local label */
|
||||
(N = CS_GetNextEntry (S, I)) != 0 && /* There is a following entry */
|
||||
@ -991,7 +1015,51 @@ unsigned OptCondBranches1 (CodeSeg* S)
|
||||
|
||||
|
||||
|
||||
unsigned OptCondBranches2 (CodeSeg* S)
|
||||
unsigned OptCondBranch3 (CodeSeg* S)
|
||||
/* If the conditional branch is always taken because it follows an inverse
|
||||
** conditional branch, replace it by a jmp.
|
||||
*/
|
||||
{
|
||||
unsigned Changes = 0;
|
||||
|
||||
/* Walk over the entries */
|
||||
unsigned I = 0;
|
||||
while (I < CS_GetEntryCount (S)) {
|
||||
|
||||
CodeEntry* N;
|
||||
|
||||
/* Get next entry */
|
||||
CodeEntry* E = CS_GetEntry (S, I);
|
||||
|
||||
/* Check if it's a conditional branch */
|
||||
if ((E->Info & OF_CBRA) != 0 && /* It's a conditional branch */
|
||||
(N = CS_GetNextEntry (S, I)) != 0 && /* There is a following entry */
|
||||
(N->Info & OF_CBRA) != 0 && /* ..which is a conditional branch */
|
||||
!CE_HasLabel (N)) { /* ..and does not have a label */
|
||||
|
||||
/* Check if the branches conditions are inverse of each other */
|
||||
if (GetInverseCond (GetBranchCond (N->OPC)) == GetBranchCond (E->OPC)) {
|
||||
/* The branch is always taken, replace it by a jump */
|
||||
CE_ReplaceOPC (N, OP65_JMP);
|
||||
|
||||
/* Remember, we had changes */
|
||||
++Changes;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/* Next entry */
|
||||
++I;
|
||||
|
||||
}
|
||||
|
||||
/* Return the number of changes made */
|
||||
return Changes;
|
||||
}
|
||||
|
||||
|
||||
|
||||
unsigned OptCondBranchC (CodeSeg* S)
|
||||
/* If on entry to a "rol a" instruction the accu is zero, and a beq/bne follows,
|
||||
** we can remove the rol and branch on the state of the carry flag.
|
||||
*/
|
||||
|
@ -101,13 +101,27 @@ unsigned OptJumpTarget3 (CodeSeg* S);
|
||||
** done.
|
||||
*/
|
||||
|
||||
unsigned OptCondBranches1 (CodeSeg* S);
|
||||
/* If an immidiate load of a register is followed by a conditional jump that
|
||||
** is never taken because the load of the register sets the flags in such a
|
||||
** manner, remove the conditional branch.
|
||||
unsigned OptCondBranch1 (CodeSeg* S);
|
||||
/* Performs some optimization steps:
|
||||
** - If an immediate load of a register is followed by a conditional jump that
|
||||
** is never taken because the load of the register sets the flags in such a
|
||||
** manner, remove the conditional branch.
|
||||
** - If the conditional branch is always taken because of the register load,
|
||||
** replace it by a jmp.
|
||||
*/
|
||||
|
||||
unsigned OptCondBranches2 (CodeSeg* S);
|
||||
unsigned OptCondBranch2 (CodeSeg* S);
|
||||
/* If a conditional branch jumps around an unconditional branch, remove the
|
||||
** conditional branch and make the jump a conditional branch with the inverse
|
||||
** condition of the first one.
|
||||
*/
|
||||
|
||||
unsigned OptCondBranch3 (CodeSeg* S);
|
||||
/* If the conditional branch is always taken because it follows an inverse
|
||||
** conditional branch, replace it by a jmp.
|
||||
*/
|
||||
|
||||
unsigned OptCondBranchC (CodeSeg* S);
|
||||
/* If on entry to a "rol a" instruction the accu is zero, and a beq/bne follows,
|
||||
** we can remove the rol and branch on the state of the carry.
|
||||
*/
|
||||
|
Loading…
x
Reference in New Issue
Block a user