1
0
mirror of https://github.com/cc65/cc65.git synced 2024-12-24 11:31:31 +00:00

Fixed various issues in the usage-tracking code.

Added some utility functions.
This commit is contained in:
acqn 2020-04-16 17:17:08 +08:00 committed by Oliver Schmidt
parent d379affc4b
commit f3771a465d
3 changed files with 2509 additions and 278 deletions

File diff suppressed because it is too large Load Diff

View File

@ -75,8 +75,8 @@ struct LoadRegInfo {
CodeEntry* LoadEntry; /* The actual entry, 0 if invalid */
int LoadYIndex; /* Index of Y-load insn, -1 if invalid */
CodeEntry* LoadYEntry; /* The actual Y-load entry, 0 if invalid */
int XferIndex; /* Index of transfer insn */
CodeEntry* XferEntry; /* The actual transfer entry */
int ChgIndex; /* Index of last change */
CodeEntry* ChgEntry; /* The actual change entry */
int Offs; /* Stack offset if data is on stack */
};
@ -90,7 +90,6 @@ struct LoadInfo {
/* Structure forward decl */
typedef struct StackOpData StackOpData;
typedef struct OptFuncDesc OptFuncDesc;
/* Structure that holds the needed data */
struct StackOpData {
@ -98,7 +97,7 @@ struct StackOpData {
unsigned Flags; /* Flags to remember things */
/* Pointer to optimizer subfunction description */
const OptFuncDesc* OptFunc;
const void* OptFunc;
/* ZP register usage inside the sequence */
unsigned ZPUsage;
@ -110,9 +109,10 @@ struct StackOpData {
/* Whether the rhs is changed multiple times */
int RhsMultiChg;
/* Register load information for lhs and rhs */
/* Register load information for lhs, rhs and rv */
LoadInfo Lhs;
LoadInfo Rhs;
LoadInfo Rv;
/* Several indices of insns in the code segment */
int PushIndex; /* Index of call to pushax in codeseg */
@ -166,7 +166,16 @@ void AdjustLoadInfo (LoadInfo* LI, int Index, int Change);
RegInfo* GetLastChangedRegInfo (StackOpData* D, LoadRegInfo* Reg);
/* Get RegInfo of the last insn entry that changed the reg */
unsigned int TrackLoads (LoadInfo* LI, LoadInfo* LLI, CodeSeg* S, int I);
void PrepairLoadRegInfoForArgCheck (CodeSeg* S, LoadRegInfo* LRI, CodeEntry* E);
/* Set the load src flags and remember to check for load src change if necessary */
void SetIfOperandSrcAffected (LoadInfo* LLI, CodeEntry* E);
/* Check and flag operand src that may be affected */
void SetIfOperandLoadUnremovable (LoadInfo* LI, unsigned Used);
/* Check and flag operand load that may be unremovable */
unsigned int TrackLoads (LoadInfo* LI, CodeSeg* S, int I);
/* Track loads for a code entry.
** Return used registers.
*/
@ -252,6 +261,190 @@ int HarmlessCall (const char* Name);
** the pushax/op sequence when encountered.
*/
/*****************************************************************************/
/* Helpers */
/*****************************************************************************/
/* Backup location types */
#define BU_UNKNOWN 0x00000000U /* Unknown */
#define BU_IMM 0x00000000U /* Immediate */
#define BU_REG 0x01000000U /* In register */
#define BU_ZP 0x02000000U /* On ZP */
#define BU_SP6502 0x04000000U /* On 6502 stack */
#define BU_SP 0x08000000U /* On CC65 stack */
#define BU_B8 0x00000000U /* Size of 8-bit */
#define BU_B16 0x10000000U /* Size of 16-bit */
#define BU_B24 0x20000000U /* Size of 24-bit */
#define BU_B32 0x30000000U /* Size of 32-bit */
#define BU_TYPE_MASK 0x0F000000U /* Type mask */
#define BU_SIZE_MASK 0xF0000000U /* Size mask */
typedef struct {
unsigned Type; /* Backup location type and size */
unsigned ZPUsage; /* ZP unusable for backup */
union {
unsigned Where; /* Backup location */
unsigned Imm; /* Backed-up value */
unsigned char* Bytes; /* Pointer to backed-up value */
};
} BackupInfo;
const char* GetZPName (unsigned ZPLoc);
/* Get the name strings of certain known ZP Regs */
unsigned FindAvailableBackupLoc (BackupInfo* B, unsigned Type);
/* Find a ZP loc for storing the backup and fill in the info.
** The allowed types are specified with the Type parameter.
** For convenience, all types are aloowed if none is specified.
** Return the type of the found loc.
*/
void AdjustEntryIndices (Collection* Indices, int Index, int Change);
/* Adjust a load register info struct after deleting or inserting successive
** entries with a given index.
*/
void DelEntryIdx (CodeSeg* S, int Idx, Collection* Indices);
/* Delete an entry and adjust Indices if necessary */
void DelEntriesIdx (CodeSeg* S, int Idx, int Count, Collection* Indices);
/* Delete entries and adjust Indices if necessary */
void RemoveFlaggedRegLoads (CodeSeg* S, LoadRegInfo* LRI, Collection* Indices);
/* Remove flagged register load insns */
void RemoveFlaggedLoads (CodeSeg* S, LoadInfo* LI, Collection* Indices);
/* Remove flagged load insns */
int BackupABefore (CodeSeg* S, BackupInfo* B, int Idx, Collection* Indices);
/* Backup the content of A before the specified index Idx */
int BackupXBefore (CodeSeg* S, BackupInfo* B, int Idx, Collection* Indices);
/* Backup the content of X before the specified index Idx */
int BackupYBefore (CodeSeg* S, BackupInfo* B, int Idx, Collection* Indices);
/* Backup the content of Y before the specified index Idx */
int BackupAXBefore (CodeSeg* S, BackupInfo* B, int Idx, Collection* Indices);
/* Backup the content of AX before the specified index Idx */
int BackupAXYBefore (CodeSeg* S, BackupInfo* B, int Idx, Collection* Indices);
/* Backup the content of AXY before the specified index Idx.
** This doesn't allow separating the backup of Y from that of AX for now.
*/
int BackupAAfter (CodeSeg* S, BackupInfo* B, int Idx, Collection* Indices);
/* Backup the content of A after the specified index Idx */
int BackupXAfter (CodeSeg* S, BackupInfo* B, int Idx, Collection* Indices);
/* Backup the content of X after the specified index Idx */
int BackupYAfter (CodeSeg* S, BackupInfo* B, int Idx, Collection* Indices);
/* Backup the content of Y after the specified index Idx */
int BackupAXAfter (CodeSeg* S, BackupInfo* B, int Idx, Collection* Indices);
/* Backup the content of AX after the specified index Idx */
int BackupAXYAfter (CodeSeg* S, BackupInfo* B, int Idx, Collection* Indices);
/* Backup the content of AXY after the specified index Idx.
** This doesn't allow separating the backup of Y from that of AX for now.
*/
int RestoreABefore (CodeSeg* S, BackupInfo* B, int Idx, Collection* Indices);
/* Restore the content of Y before the specified index Idx */
int RestoreXBefore (CodeSeg* S, BackupInfo* B, int Idx, Collection* Indices);
/* Restore the content of X before the specified index Idx */
int RestoreYBefore (CodeSeg* S, BackupInfo* B, int Idx, Collection* Indices);
/* Restore the content of Y before the specified index Idx */
int RestoreAXBefore (CodeSeg* S, BackupInfo* B, int Idx, Collection* Indices);
/* Restore the content of AX before the specified index Idx */
int RestoreAXYBefore (CodeSeg* S, BackupInfo* B, int Idx, Collection* Indices);
/* Restore the content of AXY before the specified index Idx.
** This only allows restore from compacted AXY backup for now.
*/
int BackupArgAfter (CodeSeg* S, BackupInfo* B, int Idx, const CodeEntry* E, Collection* Indices);
/* Backup the content of the opc arg of the entry E after the specified index Idx.
** Reg A/Y will be used to transfer the content from a memory location to another
** regardless of whether it is in use.
*/
int LoadABefore (CodeSeg* S, int Idx, const LoadRegInfo* LRI, Collection* Indices);
/* Reload into A the same arg according to LoadRegInfo at Idx */
int LoadXBefore (CodeSeg* S, int Idx, const LoadRegInfo* LRI, Collection* Indices);
/* Reload into X the same arg according to LoadRegInfo at Idx */
int LoadYBefore (CodeSeg* S, int Idx, const LoadRegInfo* LRI, Collection* Indices);
/* Reload into Y the same arg according to LoadRegInfo at Idx */
int LoadAAfter (CodeSeg* S, int Idx, const LoadRegInfo* LRI, Collection* Indices);
/* Reload into A the same arg according to LoadRegInfo after Idx */
int LoadXAfter (CodeSeg* S, int Idx, const LoadRegInfo* LRI, Collection* Indices);
/* Reload into X the same arg according to LoadRegInfo after Idx */
int LoadYAfter (CodeSeg* S, int Idx, const LoadRegInfo* LRI, Collection* Indices);
/* Reload into Y the same arg according to LoadRegInfo after Idx */
unsigned GetRegAccessedInOpenRange (CodeSeg* S, int First, int Last);
/* Get what ZPs, registers or processor states are used or changed in the range
** (First, Last).
** The code block must be basic without any jump backwards.
*/
unsigned GetRegUsageInOpenRange (CodeSeg* S, int First, int Last, unsigned* Use, unsigned* Chg);
/* Get what ZPs, registers or processor states are used or changed in the range
** (First, Last) in output parameters Use and Chg.
** Return what ZP regs are used before changed in this range.
** The code block must be basic without any jump backwards.
*/
int FindArgFirstChangeInOpenRange (CodeSeg* S, int First, int Last, CodeEntry* E);
/* Find the first possible spot where the loaded arg of E might be changed in
** the range (First, Last). The code block in the range must be basic without
** any jump backwards.
** Return the index of the found entry, or Last if not found.
*/
int FindRegFirstChangeInOpenRange (CodeSeg* S, int First, int Last, unsigned what);
/* Find the first possible spot where the queried ZPs, registers and/or processor
** states might be changed in the range (First, Last). The code block in the
** range must be basic without any jump backwards.
** Return the index of the found entry, or Last if not found.
*/
int FindRegFirstUseInOpenRange (CodeSeg* S, int First, int Last, unsigned what);
/* Find the first possible spot where the queried ZPs, registers and/or processor
** states might be used in the range (First, Last). The code block in the range
** must be basic without any jump backwards.
** Return the index of the found entry, or Last if not found.
*/
int FindRegLastChangeInOpenRange (CodeSeg* S, int First, int Last, unsigned what);
/* Find the last possible spot where the queried ZPs, registers and/or processor
** states might be changed in the range (First, Last). The code block in the
** range must be basic without any jump backwards.
** Return the index of the found entry, or -1 if not found.
*/
int FindRegLastUseInOpenRange (CodeSeg* S, int First, int Last, unsigned what);
/* Find the last possible spot where the queried ZPs, registers and/or processor
** states might be used in the range (First, Last). The code block in the range
** must be basic without any jump backwards.
** Return the index of the found entry, or -1 if not found.
*/
/* End of codeoptutil.h */
#endif

View File

@ -100,10 +100,10 @@ static int SameRegAValue (StackOpData* D)
RegInfo* LRI = GetLastChangedRegInfo (D, &D->Lhs.A);
RegInfo* RRI = GetLastChangedRegInfo (D, &D->Rhs.A);
/* RHS can have a -1 LoadIndex only if it is carried over from LHS */
/* RHS can have a -1 ChgIndex only if it is carried over from LHS */
if (RRI == 0 ||
(D->Rhs.A.LoadIndex >= 0 &&
D->Rhs.A.LoadIndex == D->Lhs.A.LoadIndex) ||
(D->Rhs.A.ChgIndex >= 0 &&
D->Rhs.A.ChgIndex == D->Lhs.A.ChgIndex) ||
(LRI != 0 &&
RegValIsKnown (LRI->Out.RegA) &&
RegValIsKnown (RRI->Out.RegA) &&
@ -125,8 +125,8 @@ static int SameRegXValue (StackOpData* D)
RegInfo* RRI = GetLastChangedRegInfo (D, &D->Rhs.X);
if (RRI == 0 ||
(D->Rhs.X.LoadIndex >= 0 &&
D->Rhs.X.LoadIndex == D->Lhs.X.LoadIndex) ||
(D->Rhs.X.ChgIndex >= 0 &&
D->Rhs.X.ChgIndex == D->Lhs.X.ChgIndex) ||
(LRI != 0 &&
RegValIsKnown (LRI->Out.RegX) &&
RegValIsKnown (RRI->Out.RegX) &&
@ -1204,7 +1204,7 @@ static unsigned Opt_a_tosicmp (StackOpData* D)
if (!SameRegAValue (D)) {
/* Because of SameRegAValue */
CHECK (D->Rhs.A.LoadIndex >= 0);
CHECK (D->Rhs.A.ChgIndex >= 0);
/* Store LHS in ZP and reload it before op */
X = NewCodeEntry (OP65_STA, AM65_ZP, D->ZPLo, 0, D->PushEntry->LI);
@ -1217,7 +1217,7 @@ static unsigned Opt_a_tosicmp (StackOpData* D)
if ((D->Rhs.A.Flags & LI_DIRECT) == 0) {
/* RHS src is not directly comparable */
X = NewCodeEntry (OP65_STA, AM65_ZP, D->ZPHi, 0, D->OpEntry->LI);
InsertEntry (D, X, D->Rhs.A.LoadIndex + 1);
InsertEntry (D, X, D->Rhs.A.ChgIndex + 1);
/* Cmp with stored RHS */
X = NewCodeEntry (OP65_CMP, AM65_ZP, D->ZPHi, 0, D->OpEntry->LI);
@ -1431,7 +1431,8 @@ static int PreCondOk (StackOpData* D)
int Passed = 0;
/* Check the flags */
unsigned UnusedRegs = D->OptFunc->UnusedRegs;
const OptFuncDesc* Desc = D->OptFunc;
unsigned UnusedRegs = Desc->UnusedRegs;
if (UnusedRegs != REG_NONE &&
(GetRegInfo (D->Code, D->OpIndex+1, UnusedRegs) & UnusedRegs) != 0) {
/* Cannot optimize */
@ -1442,15 +1443,15 @@ static int PreCondOk (StackOpData* D)
LoVal = D->OpEntry->RI->In.RegA;
HiVal = D->OpEntry->RI->In.RegX;
/* Check normally first, then interchange A/X and check again if necessary */
for (I = (D->OptFunc->Flags & OP_AX_INTERCHANGE ? 0 : 1); !Passed && I < 2; ++I) {
for (I = (Desc->Flags & OP_AX_INTERCHANGE ? 0 : 1); !Passed && I < 2; ++I) {
do {
if ((D->OptFunc->Flags & OP_A_KNOWN) != 0 &&
if ((Desc->Flags & OP_A_KNOWN) != 0 &&
RegValIsUnknown (LoVal)) {
/* Cannot optimize */
break;
}
if ((D->OptFunc->Flags & OP_X_ZERO) != 0 &&
if ((Desc->Flags & OP_X_ZERO) != 0 &&
HiVal != 0) {
/* Cannot optimize */
break;
@ -1471,7 +1472,7 @@ static int PreCondOk (StackOpData* D)
Lhs = &D->Lhs;
Rhs = &D->Rhs;
/* Check normally first, then interchange LHS/RHS and check again if necessary */
for (I = (D->OptFunc->Flags & OP_LR_INTERCHANGE ? 0 : 1); !Passed && I < 2; ++I) {
for (I = (Desc->Flags & OP_LR_INTERCHANGE ? 0 : 1); !Passed && I < 2; ++I) {
do {
LhsLo = &Lhs->A;
@ -1482,48 +1483,48 @@ static int PreCondOk (StackOpData* D)
** so we don't need to check twice for now.
*/
if ((D->OptFunc->Flags & OP_LHS_LOAD) != 0) {
if ((Desc->Flags & OP_LHS_LOAD) != 0) {
if ((LhsLo->Flags & LhsHi->Flags & LI_LOAD_INSN) == 0) {
/* Cannot optimize */
break;
} else if ((D->OptFunc->Flags & OP_LHS_LOAD_DIRECT) != 0) {
} else if ((Desc->Flags & OP_LHS_LOAD_DIRECT) != 0) {
if ((LhsLo->Flags & LhsHi->Flags & LI_DIRECT) == 0) {
/* Cannot optimize */
break;
}
}
}
if ((D->OptFunc->Flags & OP_RHS_LOAD) != 0) {
if ((Desc->Flags & OP_RHS_LOAD) != 0) {
if ((RhsLo->Flags & RhsHi->Flags & LI_LOAD_INSN) == 0) {
/* Cannot optimize */
break;
} else if ((D->OptFunc->Flags & OP_RHS_LOAD_DIRECT) != 0) {
} else if ((Desc->Flags & OP_RHS_LOAD_DIRECT) != 0) {
if ((RhsLo->Flags & RhsHi->Flags & LI_DIRECT) == 0) {
/* Cannot optimize */
break;
}
}
}
if ((D->OptFunc->Flags & OP_LHS_REMOVE) != 0) {
if ((Desc->Flags & OP_LHS_REMOVE) != 0) {
/* Check if the load entries cannot be removed */
if ((LhsLo->LoadEntry != 0 && (LhsLo->LoadEntry->Flags & CEF_DONT_REMOVE) != 0) ||
(LhsHi->LoadEntry != 0 && (LhsHi->LoadEntry->Flags & CEF_DONT_REMOVE) != 0)) {
if ((D->OptFunc->Flags & OP_LHS_REMOVE_DIRECT) != 0) {
if ((Desc->Flags & OP_LHS_REMOVE_DIRECT) != 0) {
/* Cannot optimize */
break;
}
}
}
if ((D->OptFunc->Flags & OP_RHS_REMOVE) != 0) {
if ((Desc->Flags & OP_RHS_REMOVE) != 0) {
if ((RhsLo->LoadEntry != 0 && (RhsLo->LoadEntry->Flags & CEF_DONT_REMOVE) != 0) ||
(RhsHi->LoadEntry != 0 && (RhsHi->LoadEntry->Flags & CEF_DONT_REMOVE) != 0)) {
if ((D->OptFunc->Flags & OP_RHS_REMOVE_DIRECT) != 0) {
if ((Desc->Flags & OP_RHS_REMOVE_DIRECT) != 0) {
/* Cannot optimize */
break;
}
}
}
if (D->RhsMultiChg && (D->OptFunc->Flags & OP_RHS_REMOVE_DIRECT) != 0) {
if (D->RhsMultiChg && (Desc->Flags & OP_RHS_REMOVE_DIRECT) != 0) {
/* Cannot optimize */
break;
}
@ -1578,7 +1579,8 @@ static int RegAPreCondOk (StackOpData* D)
int Passed = 0;
/* Check the flags */
unsigned UnusedRegs = D->OptFunc->UnusedRegs;
const OptFuncDesc* Desc = D->OptFunc;
unsigned UnusedRegs = Desc->UnusedRegs;
if (UnusedRegs != REG_NONE &&
(GetRegInfo (D->Code, D->OpIndex+1, UnusedRegs) & UnusedRegs) != 0) {
/* Cannot optimize */
@ -1591,19 +1593,19 @@ static int RegAPreCondOk (StackOpData* D)
RhsLoVal = D->OpEntry->RI->In.RegA;
RhsHiVal = D->OpEntry->RI->In.RegX;
/* Check normally first, then interchange A/X and check again if necessary */
for (I = (D->OptFunc->Flags & OP_AX_INTERCHANGE ? 0 : 1); !Passed && I < 2; ++I) {
for (I = (Desc->Flags & OP_AX_INTERCHANGE ? 0 : 1); !Passed && I < 2; ++I) {
do {
if (LhsHiVal != RhsHiVal) {
/* Cannot optimize */
break;
}
if ((D->OptFunc->Flags & OP_A_KNOWN) != 0 &&
if ((Desc->Flags & OP_A_KNOWN) != 0 &&
RegValIsUnknown (LhsLoVal)) {
/* Cannot optimize */
break;
}
if ((D->OptFunc->Flags & OP_X_ZERO) != 0 &&
if ((Desc->Flags & OP_X_ZERO) != 0 &&
LhsHiVal != 0) {
/* Cannot optimize */
break;
@ -1629,7 +1631,7 @@ static int RegAPreCondOk (StackOpData* D)
Lhs = &D->Lhs;
Rhs = &D->Rhs;
/* Check normally first, then interchange LHS/RHS and check again if necessary */
for (I = (D->OptFunc->Flags & OP_LR_INTERCHANGE ? 0 : 1); !Passed && I < 2; ++I) {
for (I = (Desc->Flags & OP_LR_INTERCHANGE ? 0 : 1); !Passed && I < 2; ++I) {
do {
LhsLo = &Lhs->A;
@ -1638,46 +1640,46 @@ static int RegAPreCondOk (StackOpData* D)
** so we don't need to check twice for now.
*/
if ((D->OptFunc->Flags & OP_LHS_LOAD) != 0) {
if ((Desc->Flags & OP_LHS_LOAD) != 0) {
if ((LhsLo->Flags & LI_LOAD_INSN) == 0) {
/* Cannot optimize */
break;
} else if ((D->OptFunc->Flags & OP_LHS_LOAD_DIRECT) != 0) {
} else if ((Desc->Flags & OP_LHS_LOAD_DIRECT) != 0) {
if ((LhsLo->Flags & LI_DIRECT) == 0) {
/* Cannot optimize */
break;
}
}
}
if ((D->OptFunc->Flags & OP_RHS_LOAD) != 0) {
if ((Desc->Flags & OP_RHS_LOAD) != 0) {
if ((RhsLo->Flags & LI_LOAD_INSN) == 0) {
/* Cannot optimize */
break;
} else if ((D->OptFunc->Flags & OP_RHS_LOAD_DIRECT) != 0) {
} else if ((Desc->Flags & OP_RHS_LOAD_DIRECT) != 0) {
if ((RhsLo->Flags & LI_DIRECT) == 0) {
/* Cannot optimize */
break;
}
}
}
if ((D->OptFunc->Flags & OP_LHS_REMOVE) != 0) {
if ((Desc->Flags & OP_LHS_REMOVE) != 0) {
/* Check if the load entries cannot be removed */
if ((LhsLo->LoadEntry != 0 && (LhsLo->LoadEntry->Flags & CEF_DONT_REMOVE) != 0)) {
if ((D->OptFunc->Flags & OP_LHS_REMOVE_DIRECT) != 0) {
if ((Desc->Flags & OP_LHS_REMOVE_DIRECT) != 0) {
/* Cannot optimize */
break;
}
}
}
if ((D->OptFunc->Flags & OP_RHS_REMOVE) != 0) {
if ((Desc->Flags & OP_RHS_REMOVE) != 0) {
if ((RhsLo->LoadEntry != 0 && (RhsLo->LoadEntry->Flags & CEF_DONT_REMOVE) != 0)) {
if ((D->OptFunc->Flags & OP_RHS_REMOVE_DIRECT) != 0) {
if ((Desc->Flags & OP_RHS_REMOVE_DIRECT) != 0) {
/* Cannot optimize */
break;
}
}
}
if (D->RhsMultiChg && (D->OptFunc->Flags & OP_RHS_REMOVE_DIRECT) != 0) {
if (D->RhsMultiChg && (Desc->Flags & OP_RHS_REMOVE_DIRECT) != 0) {
/* Cannot optimize */
break;
}
@ -1731,8 +1733,8 @@ unsigned OptStackOps (CodeSeg* S)
int OldEntryCount; /* Old number of entries */
unsigned Used; /* What registers would be used */
unsigned PushedRegs; /* Track if the same regs are used after the push */
int RhsALoadIndex; /* Track if rhs is changed more than once */
int RhsXLoadIndex; /* Track if rhs is changed more than once */
int RhsAChgIndex; /* Track if rhs is changed more than once */
int RhsXChgIndex; /* Track if rhs is changed more than once */
int IsRegAOptFunc = 0; /* Whether to use the RegA-only optimizations */
enum {
@ -1783,28 +1785,19 @@ unsigned OptStackOps (CodeSeg* S)
*/
if (CE_HasLabel (E)) {
/* Currently we don't track across branches.
** Remember this as an indirect load.
** Treat this as a change to all regs.
*/
ClearLoadInfo (&Data.Lhs);
Data.Lhs.A.LoadIndex = I;
Data.Lhs.X.LoadIndex = I;
Data.Lhs.Y.LoadIndex = I;
Data.Lhs.A.ChgIndex = I;
Data.Lhs.X.ChgIndex = I;
Data.Lhs.Y.ChgIndex = I;
}
if (CE_IsCallTo (E, "pushax")) {
/* Disallow removing the loads if the registers are used */
if (Data.UsedRegs & REG_A) {
Data.Lhs.A.Flags |= LI_DONT_REMOVE;
}
if (Data.UsedRegs & REG_X) {
Data.Lhs.X.Flags |= LI_DONT_REMOVE;
}
if (Data.UsedRegs & REG_Y) {
Data.Lhs.Y.Flags |= LI_DONT_REMOVE;
}
/* Disallow removing Lhs loads if the registers are used */
SetIfOperandLoadUnremovable (&Data.Lhs, Data.UsedRegs);
/* The LHS regs are also used as the default RHS until changed */
/* The Lhs regs are also used as the default Rhs until changed */
PushedRegs = REG_AXY;
Data.UsedRegs = REG_AXY;
CopyLoadInfo (&Data.Rhs, &Data.Lhs);
Data.PushIndex = I;
@ -1812,7 +1805,7 @@ unsigned OptStackOps (CodeSeg* S)
State = FoundPush;
} else {
/* Track load insns */
Used = TrackLoads (&Data.Lhs, 0, Data.Code, I);
Used = TrackLoads (&Data.Lhs, S, I);
Data.UsedRegs &= ~E->Chg;
Data.UsedRegs |= Used;
}
@ -1825,15 +1818,14 @@ unsigned OptStackOps (CodeSeg* S)
*/
if (CE_HasLabel (E)) {
/* Currently we don't track across branches.
** Remember this as an indirect load.
** Treat this as a change to all regs.
*/
ClearLoadInfo (&Data.Rhs);
Data.Rhs.A.LoadIndex = I;
Data.Rhs.X.LoadIndex = I;
Data.Rhs.Y.LoadIndex = I;
Data.Rhs.A.ChgIndex = I;
Data.Rhs.X.ChgIndex = I;
Data.Rhs.Y.ChgIndex = I;
}
if (E->OPC == OP65_JSR) {
/* Subroutine call: Check if this is one of the functions,
** we're going to replace.
*/
@ -1846,16 +1838,9 @@ unsigned OptStackOps (CodeSeg* S)
IsRegAOptFunc = 0;
}
if (Data.OptFunc) {
/* Disallow removing the loads if the registers are used */
if (Data.UsedRegs & REG_A) {
Data.Rhs.A.Flags |= LI_DONT_REMOVE;
}
if (Data.UsedRegs & REG_X) {
Data.Rhs.X.Flags |= LI_DONT_REMOVE;
}
if (Data.UsedRegs & REG_Y) {
Data.Rhs.Y.Flags |= LI_DONT_REMOVE;
}
/* Disallow removing Rhs loads if the registers are used */
SetIfOperandLoadUnremovable (&Data.Rhs, Data.UsedRegs);
/* Remember the op index and go on */
Data.OpIndex = I;
Data.OpEntry = E;
@ -1891,11 +1876,15 @@ unsigned OptStackOps (CodeSeg* S)
}
/* Memorize the old rhs load indices before refreshing them */
RhsALoadIndex = Data.Rhs.A.LoadIndex;
RhsXLoadIndex = Data.Rhs.X.LoadIndex;
RhsAChgIndex = Data.Rhs.A.ChgIndex;
RhsXChgIndex = Data.Rhs.X.ChgIndex;
/* Keep tracking Lhs src if necessary */
SetIfOperandSrcAffected (&Data.Lhs, E);
/* Track register usage */
Used = TrackLoads (&Data.Rhs, &Data.Lhs, Data.Code, I);
Used = TrackLoads (&Data.Rhs, S, I);
Data.ZPUsage |= (E->Use | E->Chg);
/* The changes could depend on the use */
Data.UsedRegs &= ~E->Chg;
@ -1905,23 +1894,15 @@ unsigned OptStackOps (CodeSeg* S)
/* Check if any parts of Lhs are used again before overwritten */
if (PushedRegs != 0) {
if ((PushedRegs & E->Use) != 0) {
if ((PushedRegs & E->Use & REG_A) != 0) {
Data.Lhs.A.Flags |= LI_DONT_REMOVE;
}
if ((PushedRegs & E->Use & REG_X) != 0) {
Data.Lhs.X.Flags |= LI_DONT_REMOVE;
}
if ((PushedRegs & E->Use & REG_Y) != 0) {
Data.Lhs.Y.Flags |= LI_DONT_REMOVE;
}
SetIfOperandLoadUnremovable (&Data.Lhs, PushedRegs & E->Use);
}
PushedRegs &= ~E->Chg;
}
/* Check if rhs is changed again after the push */
if ((RhsALoadIndex != Data.Lhs.A.LoadIndex &&
RhsALoadIndex != Data.Rhs.A.LoadIndex) ||
(RhsXLoadIndex != Data.Lhs.X.LoadIndex &&
RhsXLoadIndex != Data.Rhs.X.LoadIndex)) {
if ((RhsAChgIndex != Data.Lhs.A.ChgIndex &&
RhsAChgIndex != Data.Rhs.A.ChgIndex) ||
(RhsXChgIndex != Data.Lhs.X.ChgIndex &&
RhsXChgIndex != Data.Rhs.X.ChgIndex)) {
/* This will disable those sub-opts that require removing
** the rhs as they can't handle such cases correctly.
*/
@ -1993,7 +1974,8 @@ unsigned OptStackOps (CodeSeg* S)
CS_GenRegInfo (S);
/* Call the optimizer function */
Changes += Data.OptFunc->Func (&Data);
const OptFuncDesc* Desc = Data.OptFunc;
Changes += Desc->Func (&Data);
/* Unflag entries that can't be removed */
ResetDontRemoveEntryFlags (&Data);