6502: use identity page for maths

This commit is contained in:
Karol Stasiak 2023-01-27 18:14:50 +01:00
parent 29c1e3f2a6
commit f4d2fdd370
9 changed files with 157 additions and 108 deletions

View File

@ -575,7 +575,7 @@ object CompilationFlag extends Enumeration {
EmitIllegals, DecimalMode, LenientTextEncoding, LineNumbersInAssembly, SourceInAssembly, EnableBreakpoints,
// compilation options for MOS:
EmitCmosOpcodes, EmitCmosNopOpcodes, EmitSC02Opcodes, EmitRockwellOpcodes, EmitWdcOpcodes, EmitHudsonOpcodes, Emit65CE02Opcodes, EmitEmulation65816Opcodes, EmitNative65816Opcodes,
PreventJmpIndirectBug, LargeCode, ReturnWordsViaAccumulator, SoftwareStack,
PreventJmpIndirectBug, LargeCode, ReturnWordsViaAccumulator, SoftwareStack, IdentityPage,
// compilation options for I80
EmitIntel8080Opcodes, EmitIntel8085Opcodes, EmitExtended80Opcodes, EmitZ80Opcodes, EmitR800Opcodes, EmitEZ80Opcodes, EmitSharpOpcodes, EmitZ80NextOpcodes,
UseShadowRegistersForInterrupts,
@ -654,6 +654,7 @@ object CompilationFlag extends Enumeration {
"u_stack" -> UseUForStack,
"y_stack" -> UseYForStack,
"software_stack" -> SoftwareStack,
"identity_page" -> IdentityPage,
"use_shadow_registers_for_irq" -> UseShadowRegistersForInterrupts,
"output_intel_syntax" -> UseIntelSyntaxForOutput,
"input_intel_syntax" -> UseIntelSyntaxForInput,

View File

@ -2,7 +2,7 @@ package millfork.assembly
import millfork.{CompilationFlag, CompilationOptions}
import millfork.compiler.LabelGenerator
import millfork.env.{NormalFunction, ThingInMemory}
import millfork.env.{Constant, NormalFunction, ThingInMemory}
import millfork.error.Logger
import millfork.node.NiceFunctionProperty
@ -12,6 +12,7 @@ import millfork.node.NiceFunctionProperty
case class OptimizationContext(options: CompilationOptions,
labelMap: Map[String, (String, Int)],
zreg: Option[ThingInMemory],
identityPage: Constant,
niceFunctionProperties: Set[(NiceFunctionProperty, String)]) {
@inline
def log: Logger = options.log

View File

@ -995,16 +995,10 @@ object AlwaysGoodOptimizations {
val ConstantFlowAnalysis = new RuleBasedAssemblyOptimization("Constant flow analysis",
needsFlowInfo = FlowInfoRequirement.ForwardFlow,
(MatchX(0) & HasAddrMode(AbsoluteX) & SupportsAbsolute & Elidable & HasParameterWhere({
case MemoryAddressConstant(th) => th.name == "identity$"
case _ => false
})) ~~> { (code, ctx) =>
(MatchX(0) & HasAddrMode(AbsoluteX) & SupportsAbsolute & Not(HasOpcode(BIT)) & Elidable & HasIdentityPageParameter) ~~> { (code, ctx) =>
code.map(l => l.copy(addrMode = Immediate, parameter = NumericConstant(ctx.get[Int](0), 1)))
},
(MatchY(0) & HasAddrMode(AbsoluteY) & SupportsAbsolute & Elidable & HasParameterWhere({
case MemoryAddressConstant(th) => th.name == "identity$"
case _ => false
})) ~~> { (code, ctx) =>
(MatchY(0) & HasAddrMode(AbsoluteY) & SupportsAbsolute & Elidable & HasIdentityPageParameter) ~~> { (code, ctx) =>
code.map(l => l.copy(addrMode = Immediate, parameter = NumericConstant(ctx.get[Int](0), 1)))
},
(MatchY(0) & HasAddrMode(AbsoluteY) & SupportsAbsolute & Elidable) ~~> { (code, ctx) =>

View File

@ -20,7 +20,7 @@ object DangerousOptimizations {
(HasOpcode(TAY) & DoesntMatterWhatItDoesWith(State.N, State.Z, State.A)) ~
(Linear & Not(ConcernsY)).*
).capture(1) ~
(Elidable & HasAddrMode(AbsoluteY) & DoesntMatterWhatItDoesWith(State.Y)) ~~> { (code, ctx) =>
(Elidable & HasAddrMode(AbsoluteY) & DoesntMatterWhatItDoesWith(State.Y) & HasSmallParameter) ~~> { (code, ctx) =>
val last = code.last
ctx.get[List[AssemblyLine]](1) :+ last.copy(parameter = last.parameter.+(ctx.get[Constant](0)).quickSimplify)
},
@ -30,27 +30,27 @@ object DangerousOptimizations {
(HasOpcode(TAX) & DoesntMatterWhatItDoesWith(State.N, State.Z, State.A)) ~
(Linear & Not(ConcernsX)).*
).capture(1) ~
(Elidable & HasAddrMode(AbsoluteX) & DoesntMatterWhatItDoesWith(State.X)) ~~> { (code, ctx) =>
(Elidable & HasAddrMode(AbsoluteX) & DoesntMatterWhatItDoesWith(State.X) & HasSmallParameter) ~~> { (code, ctx) =>
val last = code.last
ctx.get[List[AssemblyLine]](1) :+ last.copy(parameter = last.parameter.+(ctx.get[Constant](0)).quickSimplify)
},
(Elidable & HasOpcode(INY) & DoesntMatterWhatItDoesWith(State.N, State.Z)) ~
(Elidable & HasAddrMode(AbsoluteY) & DoesntMatterWhatItDoesWith(State.Y)) ~~> { (code, ctx) =>
(Elidable & HasAddrMode(AbsoluteY) & DoesntMatterWhatItDoesWith(State.Y) & HasSmallParameter) ~~> { (code, ctx) =>
val last = code.last
List(last.copy(parameter = last.parameter.+(1).quickSimplify))
},
(Elidable & HasOpcode(DEY) & DoesntMatterWhatItDoesWith(State.N, State.Z)) ~
(Elidable & HasAddrMode(AbsoluteY) & DoesntMatterWhatItDoesWith(State.Y)) ~~> { (code, ctx) =>
(Elidable & HasAddrMode(AbsoluteY) & DoesntMatterWhatItDoesWith(State.Y) & HasSmallParameter) ~~> { (code, ctx) =>
val last = code.last
List(last.copy(parameter = last.parameter.+(-1).quickSimplify))
},
(Elidable & HasOpcode(INX) & DoesntMatterWhatItDoesWith(State.N, State.Z)) ~
(Elidable & HasAddrMode(AbsoluteX) & DoesntMatterWhatItDoesWith(State.X)) ~~> { (code, ctx) =>
(Elidable & HasAddrMode(AbsoluteX) & DoesntMatterWhatItDoesWith(State.X) & HasSmallParameter) ~~> { (code, ctx) =>
val last = code.last
List(last.copy(parameter = last.parameter.+(1).quickSimplify))
},
(Elidable & HasOpcode(DEX) & DoesntMatterWhatItDoesWith(State.N, State.Z)) ~
(Elidable & HasAddrMode(AbsoluteX) & DoesntMatterWhatItDoesWith(State.X)) ~~> { (code, ctx) =>
(Elidable & HasAddrMode(AbsoluteX) & DoesntMatterWhatItDoesWith(State.X) & HasSmallParameter) ~~> { (code, ctx) =>
val last = code.last
List(last.copy(parameter = last.parameter.+(-1).quickSimplify))
},

View File

@ -121,6 +121,7 @@ class RuleBasedAssemblyOptimization(val name: String, val needsFlowInfo: FlowInf
class AssemblyMatchingContext(val compilationOptions: CompilationOptions,
val labelMap: Map[String, (String, Int)],
val zeropageRegister: Option[ThingInMemory],
val identityPage: Constant,
val niceFunctionProperties: Set[(NiceFunctionProperty, String)],
val labelUseCount: String => Int) {
@inline
@ -1514,6 +1515,27 @@ case class HasParameterWhere(predicate: Constant => Boolean) extends TrivialAsse
override def hitRate: Double = 0.332
}
case object HasSmallParameter extends TrivialAssemblyLinePattern {
override def apply(line: AssemblyLine): Boolean = line.parameter match {
case MemoryAddressConstant(th : VariableInMemory) => th.typ.size < 255
case CompoundConstant(MathOperator.Plus, MemoryAddressConstant(th : VariableInMemory), NumericConstant(_, 1)) => th.typ.size < 255
case MemoryAddressConstant(th : MfArray) => th.sizeInBytes < 255
case CompoundConstant(MathOperator.Plus, MemoryAddressConstant(th : MfArray), NumericConstant(_, 1)) => th.sizeInBytes < 255
case _ => false
}
override def hitRate: Double = 0.332
}
case object HasIdentityPageParameter extends TrivialAssemblyLinePattern {
override def apply(line: AssemblyLine): Boolean = line.parameter match {
case MemoryAddressConstant(th) => th.name == "identity$"
case _ => false
}
override def hitRate: Double = 0.04
}
case class MatchObject(i: Int, f: Function[AssemblyLine, Any]) extends AssemblyLinePattern {
override def matchLineTo(ctx: AssemblyMatchingContext, flowInfo: FlowInfo, line: AssemblyLine): Boolean =
ctx.addObject(i, f(line))

View File

@ -30,14 +30,14 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
}
case class FeaturesForIndexRegisters(
blastProcessing: Boolean,
izIsAlwaysZero: Boolean,
indexRegisterTransfers: Boolean,
functionsSafeForX: Set[String],
functionsSafeForY: Set[String],
functionsSafeForZ: Set[String],
identityArray: Constant,
log: Logger)
useIdentityPage: Boolean,
izIsAlwaysZero: Boolean,
indexRegisterTransfers: Boolean,
functionsSafeForX: Set[String],
functionsSafeForY: Set[String],
functionsSafeForZ: Set[String],
identityArray: Constant,
log: Logger)
case class FeaturesForAccumulator(
cmos: Boolean,
@ -178,11 +178,11 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
val removeVariablesForReal = !options.flag(CompilationFlag.InternalCurrentlyOptimizingForMeasurement)
val costFunction: CyclesAndBytes => Int = if (options.flag(CompilationFlag.OptimizeForSpeed)) _.cycles else _.bytes
val importances = ReverseFlowAnalyzer.analyze(f, code, optimizationContext)
val blastProcessing = options.flag(CompilationFlag.OptimizeForSonicSpeed)
val identityArray = f.environment.maybeGet[ThingInMemory]("identity$").map(MemoryAddressConstant).getOrElse(Constant.Zero)
val useIdentityPage = options.flag(CompilationFlag.IdentityPage)
val identityArray = f.environment.identityPage
val izIsAlwaysZero = !options.flag(CompilationFlag.Emit65CE02Opcodes)
val featuresForIndices = FeaturesForIndexRegisters(
blastProcessing = blastProcessing,
useIdentityPage = useIdentityPage,
izIsAlwaysZero = izIsAlwaysZero,
indexRegisterTransfers = options.flag(CompilationFlag.EmitEmulation65816Opcodes),
functionsSafeForX = optimizationContext.niceFunctionProperties.filter(x => x._1 == MosNiceFunctionProperty.DoesntChangeX).map(_._2),
@ -385,14 +385,26 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
val vx = xCandidate.getOrElse("-")
val vy = yCandidate.getOrElse("-")
val vz = zCandidate.getOrElse("-")
val accessingX = lines match {
case (AssemblyLine0(_, _, MemoryAddressConstant(th)), _) :: _ => th.name == vx
case _ => false
}
val accessingY = lines match {
case (AssemblyLine0(_, _, MemoryAddressConstant(th)), _) :: _ => th.name == vy
case _ => false
}
val accessingZ = lines match {
case (AssemblyLine0(_, _, MemoryAddressConstant(th)), _) :: _ => th.name == vz
case _ => false
}
lines match {
case (AssemblyLine0(_, Immediate, MemoryAddressConstant(th)), _) :: xs
if th.name == vx || th.name == vy || th.name == vz =>
if accessingX || accessingY || accessingZ =>
// if an address of a variable is used, then that variable cannot be assigned to a register
None
case (AssemblyLine0(_, Immediate, SubbyteConstant(MemoryAddressConstant(th), _)), _) :: xs
if th.name == vx || th.name == vy || th.name == vz =>
if accessingX || accessingY || accessingZ =>
// if an address of a variable is used, then that variable cannot be assigned to a register
None
@ -412,7 +424,7 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
Indirect | LongIndirect |
AbsoluteIndexedX, MemoryAddressConstant(th)), _) :: xs =>
// if a variable is used as an array or a pointer, then it cannot be assigned to a register
if (th.name == vx || th.name == vy || th.name == vz) {
if (accessingX || accessingY || accessingZ) {
None
} else {
canBeInlined(xCandidate, yCandidate, zCandidate, features, xs)
@ -424,56 +436,56 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
case (AssemblyLine0(SEP | REP, _, _), _) :: xs => None
case (AssemblyLine0(STY | LDY, Absolute | ZeroPage, MemoryAddressConstant(th)), _) :: xs if th.name == vx && (features.indexRegisterTransfers) =>
case (AssemblyLine0(STY | LDY, Absolute | ZeroPage, MemoryAddressConstant(th)), _) :: xs if accessingX && (features.indexRegisterTransfers) =>
canBeInlined(xCandidate, yCandidate, zCandidate, features, xs).map(_ + CyclesAndBytes(bytes = 2, cycles = 2))
case (AssemblyLine0(STX | LDX, Absolute | ZeroPage, MemoryAddressConstant(th)), _) :: xs if th.name == vy && (features.indexRegisterTransfers) =>
case (AssemblyLine0(STX | LDX, Absolute | ZeroPage, MemoryAddressConstant(th)), _) :: xs if accessingY && (features.indexRegisterTransfers) =>
canBeInlined(xCandidate, yCandidate, zCandidate, features, xs).map(_ + CyclesAndBytes(bytes = 2, cycles = 2))
case (AssemblyLine0(LDY, Absolute | ZeroPage, MemoryAddressConstant(th)), _) ::
(AssemblyLine0(LDA | STA | ADC | SBC | ORA | EOR | AND | CMP, AbsoluteY, _), f) :: xs if th.name == vx && f.y == Unimportant =>
(AssemblyLine0(LDA | STA | ADC | SBC | ORA | EOR | AND | CMP, AbsoluteY, _), f) :: xs if accessingX && f.y == Unimportant =>
canBeInlined(xCandidate, yCandidate, zCandidate, features, xs).map(_ + CyclesAndBytes(bytes = 2, cycles = 2))
case (AssemblyLine0(LDX, Absolute | ZeroPage, MemoryAddressConstant(th)), _) ::
(AssemblyLine0(LDA | STA | ADC | SBC | ORA | EOR | AND | CMP, AbsoluteX, _), f) :: xs if th.name == vy && f.x == Unimportant =>
(AssemblyLine0(LDA | STA | ADC | SBC | ORA | EOR | AND | CMP, AbsoluteX, _), f) :: xs if accessingY && f.x == Unimportant =>
canBeInlined(xCandidate, yCandidate, zCandidate, features, xs).map(_ + CyclesAndBytes(bytes = 2, cycles = 2))
case (AssemblyLine0(LDY, Absolute | ZeroPage, MemoryAddressConstant(th)), _) ::
(AssemblyLine0(SEC | CLC, _, _), _) ::
(AssemblyLine0(LDA | STA | ADC | SBC | ORA | EOR | AND | CMP, AbsoluteY, _), f) :: xs if th.name == vx && f.y == Unimportant =>
(AssemblyLine0(LDA | STA | ADC | SBC | ORA | EOR | AND | CMP, AbsoluteY, _), f) :: xs if accessingX && f.y == Unimportant =>
canBeInlined(xCandidate, yCandidate, zCandidate, features, xs).map(_ + CyclesAndBytes(bytes = 2, cycles = 2))
case (AssemblyLine0(LDX, Absolute | ZeroPage, MemoryAddressConstant(th)), _) ::
(AssemblyLine0(SEC | CLC, _, _), _) ::
(AssemblyLine0(LDA | STA | ADC | SBC | ORA | EOR | AND | CMP, AbsoluteX, _), f) :: xs if th.name == vy && f.x == Unimportant =>
(AssemblyLine0(LDA | STA | ADC | SBC | ORA | EOR | AND | CMP, AbsoluteX, _), f) :: xs if accessingY && f.x == Unimportant =>
canBeInlined(xCandidate, yCandidate, zCandidate, features, xs).map(_ + CyclesAndBytes(bytes = 2, cycles = 2))
case (AssemblyLine0(STY | LDY, Absolute | ZeroPage, MemoryAddressConstant(th)), _) :: xs if th.name == vx => None
case (AssemblyLine0(STY | LDY, Absolute | ZeroPage, MemoryAddressConstant(th)), _) :: xs if accessingX => None
case (AssemblyLine0(STX | LDX, Absolute | ZeroPage, MemoryAddressConstant(th)), _) :: xs if th.name == vy => None
case (AssemblyLine0(STX | LDX, Absolute | ZeroPage, MemoryAddressConstant(th)), _) :: xs if accessingY => None
case (AssemblyLine(op, Absolute | ZeroPage, MemoryAddressConstant(th), elidability, _), _) :: xs
if opcodesIdentityTable(op) && features.blastProcessing =>
if (th.name == vx || th.name == vy) {
if opcodesIdentityTable(op) && features.useIdentityPage =>
if (accessingX || accessingY) {
if (elidability == Elidability.Elidable) canBeInlined(xCandidate, yCandidate, zCandidate, features, xs).map(_ + CyclesAndBytes(bytes = 0, cycles = -1))
else None
} else {
if (th.name == vz) None
if (accessingZ) None
else canBeInlined(xCandidate, yCandidate, zCandidate, features, xs)
}
case (AssemblyLine0(opcode, Absolute | ZeroPage, MemoryAddressConstant(th)), _) :: xs
if th.name == vx && (opcode == LDY || opcode == LDZ || opcodesThatCannotBeUsedWithIndexRegistersAsParameters(opcode)) =>
if accessingX && (opcode == LDY || opcode == LDZ || opcodesThatCannotBeUsedWithIndexRegistersAsParameters(opcode)) =>
// if a variable is used by some opcodes, then it cannot be assigned to a register
None
case (AssemblyLine0(opcode, Absolute | ZeroPage, MemoryAddressConstant(th)), _) :: xs
if th.name == vy && (opcode == LDX || opcode == LAX || opcode == LDZ || opcodesThatCannotBeUsedWithIndexRegistersAsParameters(opcode)) =>
if accessingY && (opcode == LDX || opcode == LAX || opcode == LDZ || opcodesThatCannotBeUsedWithIndexRegistersAsParameters(opcode)) =>
// if a variable is used by some opcodes, then it cannot be assigned to a register
None
case (AssemblyLine0(opcode, Absolute | ZeroPage, MemoryAddressConstant(th)), _) :: xs
if th.name == vz && (opcode == LDX || opcode == LDY || opcodesThatCannotBeUsedWithIndexRegistersAsParameters(opcode)) =>
if accessingZ && (opcode == LDX || opcode == LDY || opcodesThatCannotBeUsedWithIndexRegistersAsParameters(opcode)) =>
// if a variable is used by some opcodes, then it cannot be assigned to a register
None
@ -481,7 +493,7 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
if xCandidate.isDefined =>
// if a register is populated with a different variable, then this variable cannot be assigned to that register
// removing LDX saves 3 cycles
if (elidability == Elidability.Elidable && th.name == vx) {
if (elidability == Elidability.Elidable && accessingX) {
if (imp.z == Unimportant && imp.n == Unimportant) {
canBeInlined(xCandidate, yCandidate, zCandidate, features, xs).map(_ + CyclesAndBytes(bytes = 2, cycles = 2))
} else {
@ -495,7 +507,7 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
if xCandidate.isDefined =>
// LAX = LDX-LDA, and since LDX simplifies to nothing and LDA simplifies to TXA,
// LAX simplifies to TXA, saving two bytes
if (elidability == Elidability.Elidable && th.name == vx) {
if (elidability == Elidability.Elidable && accessingX) {
canBeInlined(xCandidate, yCandidate, zCandidate, features, xs).map(_ + CyclesAndBytes(bytes = 2, cycles = 2))
} else {
None
@ -505,7 +517,7 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
// if a register is populated with a different variable, then this variable cannot be assigned to that register
// removing LDX saves 3 bytes
// sometimes that LDX has to be converted into CPX#0
if (elidability == Elidability.Elidable && th.name == vy) {
if (elidability == Elidability.Elidable && accessingY) {
if (imp.z == Unimportant && imp.n == Unimportant) {
canBeInlined(xCandidate, yCandidate, zCandidate, features, xs).map(_ + CyclesAndBytes(bytes = 3, cycles = 4))
} else {
@ -516,7 +528,7 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
}
case (AssemblyLine(LDZ, Absolute | ZeroPage, MemoryAddressConstant(th), elidability, _), imp) :: xs if zCandidate.isDefined =>
if (elidability == Elidability.Elidable && th.name == vz) {
if (elidability == Elidability.Elidable && accessingZ) {
if (imp.z == Unimportant && imp.n == Unimportant) {
canBeInlined(xCandidate, yCandidate, zCandidate, features, xs).map(_ + CyclesAndBytes(bytes = 3, cycles = 4))
} else {
@ -555,7 +567,7 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
if (elidability == Elidability.Elidable && elidability2 == Elidability.Elidable) canBeInlined(xCandidate, yCandidate, zCandidate, features, xs).map(_ + CyclesAndBytes(bytes = 2, cycles = 2))
else None
} else {
if (th.name == vz) None
if (accessingZ) None
else canBeInlined(xCandidate, yCandidate, zCandidate, features, xs)
}
@ -563,7 +575,7 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
if xCandidate.isDefined =>
// a variable cannot be inlined if there is TAX not after LDA of that variable
// but LDA-TAX can be simplified to TXA
if (elidability == Elidability.Elidable && elidability2 == Elidability.Elidable && th.name == vx) {
if (elidability == Elidability.Elidable && elidability2 == Elidability.Elidable && accessingX) {
canBeInlined(xCandidate, yCandidate, zCandidate, features, xs).map(_ + CyclesAndBytes(bytes = 3, cycles = 4))
} else {
None
@ -573,7 +585,7 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
if yCandidate.isDefined =>
// a variable cannot be inlined if there is TAY not after LDA of that variable
// but LDA-TAY can be simplified to TYA
if (elidability == Elidability.Elidable && elidability2 == Elidability.Elidable && th.name == vy) {
if (elidability == Elidability.Elidable && elidability2 == Elidability.Elidable && accessingY) {
canBeInlined(xCandidate, yCandidate, zCandidate, features, xs).map(_ + CyclesAndBytes(bytes = 3, cycles = 4))
} else {
None
@ -583,7 +595,7 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
if zCandidate.isDefined =>
// a variable cannot be inlined if there is TAZ not after LDA of that variable
// but LDA-TAZ can be simplified to TZA
if (elidability == Elidability.Elidable && elidability2 == Elidability.Elidable && th.name == vy) {
if (elidability == Elidability.Elidable && elidability2 == Elidability.Elidable && accessingY) {
canBeInlined(xCandidate, yCandidate, zCandidate, features, xs).map(_ + CyclesAndBytes(bytes = 3, cycles = 4))
} else {
None
@ -591,7 +603,7 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
case (AssemblyLine(LDA | STA, Absolute | ZeroPage, MemoryAddressConstant(th), elidability, _), _) :: xs =>
// changing LDA->TXA, STA->TAX, INC->INX, DEC->DEX saves 2 bytes
if (th.name == vy || th.name == vx || th.name == vz) {
if (accessingY || accessingX || accessingZ) {
if (elidability == Elidability.Elidable) {
canBeInlined(xCandidate, yCandidate, zCandidate, features, xs).map(_ + CyclesAndBytes(bytes = 2, cycles = 2))
} else {
@ -603,7 +615,7 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
case (AssemblyLine(INC | DEC, Absolute | ZeroPage, MemoryAddressConstant(th), elidability, _), _) :: xs =>
// changing LDA->TXA, STA->TAX, INC->INX, DEC->DEX saves 2 bytes
if (th.name == vy || th.name == vx || th.name == vz) {
if (accessingY || accessingX || accessingZ) {
if (elidability == Elidability.Elidable) {
canBeInlined(xCandidate, yCandidate, zCandidate, features, xs).map(_ + CyclesAndBytes(bytes = 2, cycles = 4))
} else {
@ -615,7 +627,7 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
case (AssemblyLine(STZ, Absolute | ZeroPage, MemoryAddressConstant(th), elidability, _), _) :: xs =>
// changing STZ->LDX saves 1 byte
if (th.name == vy || th.name == vx) {
if (accessingY || accessingX) {
if (elidability == Elidability.Elidable && features.izIsAlwaysZero) {
canBeInlined(xCandidate, yCandidate, zCandidate, features, xs).map(_ + CyclesAndBytes(bytes = 1, cycles = 2))
} else {
@ -890,37 +902,53 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
val vy = yCandidate.getOrElse("-")
val vz = zCandidate.getOrElse("-")
val va = aCandidate.getOrElse("-")
val accessingX = lines match {
case (AssemblyLine0(_, _, MemoryAddressConstant(th)), _) :: _ => th.name == vx
case _ => false
}
val accessingY = lines match {
case (AssemblyLine0(_, _, MemoryAddressConstant(th)), _) :: _ => th.name == vy
case _ => false
}
val accessingZ = lines match {
case (AssemblyLine0(_, _, MemoryAddressConstant(th)), _) :: _ => th.name == vz
case _ => false
}
val accessingA = lines match {
case (AssemblyLine0(_, _, MemoryAddressConstant(th)), _) :: _ => th.name == va
case _ => false
}
lines match {
case (AssemblyLine(INC, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vx =>
if accessingX =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map( AssemblyLine.implied(INX).pos(s) :: _)
case (AssemblyLine(INC, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vy =>
if accessingY =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(INY).pos(s) :: _)
case (AssemblyLine(INC, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vz =>
if accessingZ =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(INZ).pos(s) :: _)
case (AssemblyLine(DEC, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vx =>
if accessingX =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(DEX).pos(s) :: _)
case (AssemblyLine(DEC, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vy =>
if accessingY =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(DEY).pos(s) :: _)
case (AssemblyLine(DEC, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vz =>
if accessingZ =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(DEZ).pos(s) :: _)
case (AssemblyLine(opcode@(DEC | INC | ROL | ROR | ASL | LSR), Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == va =>
if accessingA =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(opcode).pos(s) :: _)
case (AssemblyLine(LDX, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), imp) :: xs
if th.name == vx =>
if accessingX =>
if (imp.z == Unimportant && imp.n == Unimportant) {
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs))
} else {
@ -928,22 +956,22 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
}
case (AssemblyLine(LAX, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vx =>
if accessingX =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TXA).pos(s) :: _)
case (l@AssemblyLine0(op, Absolute | ZeroPage, MemoryAddressConstant(th)), _) :: xs
if opcodesIdentityTable(op) && th.name == vx =>
if opcodesIdentityTable(op) && accessingX =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(l.copy(addrMode = AbsoluteX, parameter = features.identityArray) :: _)
case (l@AssemblyLine0(op, Absolute | ZeroPage, MemoryAddressConstant(th)), _) :: xs
if opcodesIdentityTable(op) && th.name == vy =>
if opcodesIdentityTable(op) && accessingY =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(l.copy(addrMode = AbsoluteY, parameter = features.identityArray) :: _)
case (l@AssemblyLine0(LDA | TYA | TXA | TZA | CLA, _, _), _) :: xs if aCandidate.isDefined && isReturn(xs) =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(l :: _)
case (l@AssemblyLine0(LDA, _, _), _) :: (AssemblyLine0(op, Absolute | ZeroPage, MemoryAddressConstant(th)), _) :: xs
if opcodesCommutative(op) && th.name == va =>
if opcodesCommutative(op) && accessingA =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(l.copy(opcode = op) :: _)
case (l@AssemblyLine0(LDA, _, _), _) :: (clc@AssemblyLine0(CLC, _, _), _) :: (AssemblyLine0(op, Absolute | ZeroPage, MemoryAddressConstant(th)), _) :: xs
@ -967,7 +995,7 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TYA).pos(s) :: clc :: l.copy(opcode = op) :: _)
case (AssemblyLine(LDA | STA, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), imp) :: xs
if th.name == va =>
if accessingA =>
if (imp.z == Unimportant && imp.n == Unimportant) {
inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)
} else {
@ -975,11 +1003,11 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
}
case (AssemblyLine(LAX, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == va =>
if accessingA =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TAX).pos(s) :: _)
case (AssemblyLine(LDY, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), imp) :: xs
if th.name == vy =>
if accessingY =>
if (imp.z == Unimportant && imp.n == Unimportant) {
inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)
} else {
@ -987,7 +1015,7 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
}
case (AssemblyLine(LDZ, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), imp) :: xs
if th.name == vz =>
if accessingZ =>
if (imp.z == Unimportant && imp.n == Unimportant) {
inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)
} else {
@ -995,64 +1023,64 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
}
case (AssemblyLine(LDA, Absolute | ZeroPage, MemoryAddressConstant(th), Elidability.Elidable, s), _) :: (AssemblyLine(TAX, _, _, Elidability.Elidable, _), _) :: xs
if th.name == vx =>
if accessingX =>
// these TXA's may get optimized away by a different optimization
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TXA).pos(s) :: _)
case (AssemblyLine(LDA, Absolute | ZeroPage, MemoryAddressConstant(th), Elidability.Elidable, s), _) :: (AssemblyLine(TAY, _, _, Elidability.Elidable, _), _) :: xs
if th.name == vy =>
if accessingY =>
// these TYA's may get optimized away by a different optimization
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TYA).pos(s) :: _)
case (AssemblyLine(LDA, Absolute | ZeroPage, MemoryAddressConstant(th), Elidability.Elidable, s), _) :: (AssemblyLine(TAZ, _, _, Elidability.Elidable, _), _) :: xs
if th.name == vz =>
if accessingZ =>
// these TZA's may get optimized away by a different optimization
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TZA).pos(s) :: _)
case (AssemblyLine(LDX, Absolute | ZeroPage, MemoryAddressConstant(th), Elidability.Elidable, s), _) :: (AssemblyLine(TXA, _, _, Elidability.Elidable, _), _) :: xs
if th.name == va =>
if accessingA =>
// these TAX's may get optimized away by a different optimization
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TAX).pos(s) :: _)
case (AssemblyLine(LDY, Absolute | ZeroPage, MemoryAddressConstant(th), Elidability.Elidable, _), _) :: (AssemblyLine(TYA, _, _, Elidability.Elidable, _), _) :: xs
if th.name == va =>
if accessingA =>
// these TAY's may get optimized away by a different optimization
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TAY) :: _)
case (AssemblyLine(LDA, Absolute | ZeroPage, MemoryAddressConstant(th), _, s1), _) :: (AssemblyLine(CMP, am, param, Elidability.Elidable, s2), _) :: xs
if th.name == vx && CpxyzAddrModes(am) && isNot(vx, param) =>
if accessingX && CpxyzAddrModes(am) && isNot(vx, param) =>
// ditto
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TXA).pos(s1) :: AssemblyLine(CPX, am, param).pos(s2) :: _)
case (AssemblyLine(LDA, Absolute | ZeroPage, MemoryAddressConstant(th), _, s1), _) :: (AssemblyLine(CMP, am, param, Elidability.Elidable, s2), _) :: xs
if th.name == vy && CpxyzAddrModes(am) && isNot(vx, param) =>
if accessingY && CpxyzAddrModes(am) && isNot(vx, param) =>
// ditto
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TYA).pos(s1) :: AssemblyLine(CPY, am, param).pos(s2) :: _)
case (AssemblyLine(LDA, Absolute | ZeroPage, MemoryAddressConstant(th), _, s1), _) :: (AssemblyLine(CMP, am, param, Elidability.Elidable, s2), _) :: xs
if th.name == vy && CpxyzAddrModes(am) && isNot(vx, param) =>
if accessingY && CpxyzAddrModes(am) && isNot(vx, param) =>
// ditto
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TZA).pos(s1) :: AssemblyLine(CPZ, am, param).pos(s2) :: _)
case (AssemblyLine(LDA, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vx =>
if accessingX =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TXA).pos(s) :: _)
case (AssemblyLine(LDA, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vy =>
if accessingY =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TYA).pos(s) :: _)
case (AssemblyLine(LDY, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vx && features.indexRegisterTransfers =>
if accessingX && features.indexRegisterTransfers =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TXY).pos(s) :: _)
case (AssemblyLine(LDX, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vy && features.indexRegisterTransfers =>
if accessingY && features.indexRegisterTransfers =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TYX).pos(s) :: _)
case (l0@AssemblyLine0(LDY, Absolute | ZeroPage, MemoryAddressConstant(th)), _) ::
(l1@AssemblyLine0(LDA | STA | ADC | SBC | AND | ORA | EOR | CMP, AbsoluteY, _), f):: xs
if th.name == vx =>
if accessingX =>
if (l1.opcode != STA || f.n == Unimportant && f.z == Unimportant) {
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(l1.copy(addrMode = AbsoluteX) :: _)
} else if (f.c == Unimportant) {
@ -1064,7 +1092,7 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
case (l0@AssemblyLine0(LDX, Absolute | ZeroPage, MemoryAddressConstant(th)), _) ::
(l1@AssemblyLine0(LDA | STA | ADC | SBC | AND | ORA | EOR | CMP, AbsoluteX, _), f):: xs
if th.name == vy =>
if accessingY =>
if (l1.opcode != STA || f.n == Unimportant && f.z == Unimportant) {
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(l1.copy(addrMode = AbsoluteY) :: _)
} else if (f.c == Unimportant) {
@ -1076,31 +1104,31 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
case (l0@AssemblyLine0(LDY, Absolute | ZeroPage, MemoryAddressConstant(th)), _) ::
(l5@AssemblyLine0(SEC | CLC, _, _), _) ::
(l1@AssemblyLine0(LDA | STA | ADC | SBC | AND | ORA | EOR | CMP, AbsoluteY, _), _):: xs
if th.name == vx =>
if accessingX =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(l5 :: l1.copy(addrMode = AbsoluteX) :: _)
case (l0@AssemblyLine0(LDX, Absolute | ZeroPage, MemoryAddressConstant(th)), _) ::
(l5@AssemblyLine0(SEC | CLC, _, _), _) ::
(l1@AssemblyLine0(LDA | STA | ADC | SBC | AND | ORA | EOR | CMP, AbsoluteX, _), _):: xs
if th.name == vy =>
if accessingY =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(l5 :: l1.copy(addrMode = AbsoluteY) :: _)
case (AssemblyLine(LDY, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vx => features.log.fatal("Unexpected LDY")
if accessingX => features.log.fatal("Unexpected LDY")
case (AssemblyLine(LDX, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vy => features.log.fatal("Unexpected LDX")
if accessingY => features.log.fatal("Unexpected LDX")
case (AssemblyLine(LDA, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vz =>
if accessingZ =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TZA).pos(s) :: _)
case (AssemblyLine(LDX, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == va =>
if accessingA =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TAX).pos(s) :: _)
case (AssemblyLine(LDY, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == va =>
if accessingA =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TAY).pos(s) :: _)
case (AssemblyLine(LDA, am, param, Elidability.Elidable, s1), _) :: (AssemblyLine(STA, Absolute | ZeroPage, MemoryAddressConstant(th), Elidability.Elidable, s2), _) :: xs
@ -1119,51 +1147,51 @@ object VariableToRegisterOptimization extends AssemblyOptimization[AssemblyLine]
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine(LDZ, am, param).pos(s1, s2) :: AssemblyLine.implied(TZA).pos(s1, s2) :: _)
case (AssemblyLine(STA, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vx =>
if accessingX =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TAX).pos(s) :: _)
case (AssemblyLine(STA, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vy =>
if accessingY =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TAY).pos(s) :: _)
case (AssemblyLine(STA, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vz =>
if accessingZ =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TAZ).pos(s) :: _)
case (AssemblyLine(STX, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == va =>
if accessingA =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TXA).pos(s) :: _)
case (AssemblyLine(STY, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == va =>
if accessingA =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TYA).pos(s) :: _)
case (AssemblyLine(STX, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vy && features.indexRegisterTransfers =>
if accessingY && features.indexRegisterTransfers =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TXY).pos(s) :: _)
case (AssemblyLine(STY, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vx && features.indexRegisterTransfers =>
if accessingX && features.indexRegisterTransfers =>
tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TYX).pos(s) :: _)
case (AssemblyLine(STX, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vy => features.log.fatal("Unexpected STX")
if accessingY => features.log.fatal("Unexpected STX")
case (AssemblyLine(STY, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vx => features.log.fatal("Unexpected STY")
if accessingX => features.log.fatal("Unexpected STY")
case (AssemblyLine(STZ, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vx =>
if accessingX =>
if (features.izIsAlwaysZero) tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.immediate(LDX, 0).pos(s) :: _)
else features.log.fatal("Unexpected STZ")
case (AssemblyLine(STZ, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == vy =>
if accessingY =>
if (features.izIsAlwaysZero) tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.immediate(LDY, 0).pos(s) :: _)
else features.log.fatal("Unexpected STZ")
case (AssemblyLine(STZ, Absolute | ZeroPage, MemoryAddressConstant(th), _, s), _) :: xs
if th.name == va =>
if accessingA =>
if (features.izIsAlwaysZero) tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.immediate(LDA, 0).pos(s) :: _)
else tailcall(inlineVars(xCandidate, yCandidate, zCandidate, aCandidate, features, xs)).map(AssemblyLine.implied(TZA).pos(s) :: _)

View File

@ -393,6 +393,9 @@ class Environment(val parent: Option[Environment], val prefix: String, val cpuFa
}
}
@inline
final def identityPage: Constant = maybeGet[MfArray]("identity$").fold(Constant.Zero)(_.toAddress)
def getPointy(name: String): Pointy = {
InitializedMemoryVariable
UninitializedMemoryVariable
@ -2618,7 +2621,7 @@ class Environment(val parent: Option[Environment], val prefix: String, val cpuFa
def collectDeclarations(program: Program, options: CompilationOptions): Unit = {
val b = get[VariableType]("byte")
val v = get[Type]("void")
if (options.flag(CompilationFlag.OptimizeForSonicSpeed)) {
if (options.flag(CompilationFlag.IdentityPage)) {
addThing(InitializedArray("identity$", None, IndexedSeq.tabulate(256)(n => LiteralExpression(n, 1)), declaredBank = None, b, b, readOnly = true, Set.empty, defaultArrayAlignment(options, 256)), None)
}
program.declarations.foreach {

View File

@ -22,7 +22,7 @@ class MosAssembler(program: Program,
override def performFinalOptimizationPass(f: NormalFunction, actuallyOptimize: Boolean, options: CompilationOptions, code: List[AssemblyLine]):List[AssemblyLine] = {
val optimizationContext = OptimizationContext(options, Map(), f.environment.maybeGet[ThingInMemory]("__reg"), Set())
val optimizationContext = OptimizationContext(options, Map(), f.environment.maybeGet[ThingInMemory]("__reg"), f.environment.identityPage, Set())
if (actuallyOptimize) {
val finalCode = if (options.flag(CompilationFlag.EmitHudsonOpcodes)) HudsonOptimizations.removeLoadZero(f, code, optimizationContext) else code
JumpShortening(f, JumpShortening(f, JumpFixing(f, JumpFollowing(options, finalCode), options), optimizationContext), optimizationContext)
@ -167,7 +167,7 @@ class MosAssembler(program: Program,
case AssemblyLine0(BRK | RTI, _, _) => true
case _ => false
}) return
val optimizationContext = OptimizationContext(options, Map(), function.environment.maybeGet[ThingInMemory]("__reg"), Set())
val optimizationContext = OptimizationContext(options, Map(), function.environment.maybeGet[ThingInMemory]("__reg"), function.environment.identityPage, Set())
val flow = CoarseFlowAnalyzer.analyze(function, code, optimizationContext)
def rtsPropertyScan[T](extractor: CpuStatus => Status[T])(niceFunctionProperty: Status[T] => Option[NiceFunctionProperty]): Unit = {
val statuses = code.zipWithIndex.flatMap{

View File

@ -865,7 +865,7 @@ class Z80Assembler(program: Program,
case ZLine0(RST, _, _) => true
case _ => false
}) return
val optimizationContext = OptimizationContext(options, Map(), None, Set())
val optimizationContext = OptimizationContext(options, Map(), None, Constant.Zero, Set())
val flow = CoarseFlowAnalyzer.analyze(function, code, optimizationContext)
def retPropertyScan[T](extractor: CpuStatus => Status[T])(niceFunctionProperty: Status[T] => Option[NiceFunctionProperty]): Unit = {
val statuses = code.zipWithIndex.flatMap{