mirror of
https://github.com/irmen/prog8.git
synced 2024-12-24 01:29:28 +00:00
reintegrate into existing IR optimizer
This commit is contained in:
parent
78a097585d
commit
c12bf991b3
@ -7,7 +7,6 @@ import prog8.code.SymbolTable
|
||||
import prog8.code.ast.*
|
||||
import prog8.code.core.*
|
||||
import prog8.intermediate.*
|
||||
import prog8.iroptimizer.IROptimizer
|
||||
import kotlin.io.path.readBytes
|
||||
import kotlin.math.pow
|
||||
|
||||
@ -67,12 +66,6 @@ class IRCodeGen(
|
||||
irProg.linkChunks() // re-link
|
||||
}
|
||||
|
||||
if(options.optimize) {
|
||||
// TODO integrate into peephole optimizer above
|
||||
val opt = IROptimizer(irProg)
|
||||
opt.optimize()
|
||||
}
|
||||
|
||||
irProg.validate()
|
||||
return irProg
|
||||
}
|
||||
|
@ -119,8 +119,8 @@ internal class IRPeepholeOptimizer(private val irprog: IRProgram) {
|
||||
indexedInstructions.reversed().forEach { (idx, ins) ->
|
||||
if(ins.opcode== Opcode.PUSH) {
|
||||
if(idx < chunk.instructions.size-1) {
|
||||
val insAfter = chunk.instructions[idx+1] as? IRInstruction
|
||||
if(insAfter!=null && insAfter.opcode == Opcode.POP) {
|
||||
val insAfter = chunk.instructions[idx+1]
|
||||
if(insAfter.opcode == Opcode.POP) {
|
||||
if(ins.reg1==insAfter.reg1) {
|
||||
chunk.instructions.removeAt(idx)
|
||||
chunk.instructions.removeAt(idx)
|
||||
@ -143,16 +143,16 @@ internal class IRPeepholeOptimizer(private val irprog: IRProgram) {
|
||||
indexedInstructions.reversed().forEach { (idx, ins) ->
|
||||
if(ins.opcode== Opcode.SEC || ins.opcode== Opcode.CLC) {
|
||||
if(idx < chunk.instructions.size-1) {
|
||||
val insAfter = chunk.instructions[idx+1] as? IRInstruction
|
||||
if(insAfter?.opcode == ins.opcode) {
|
||||
val insAfter = chunk.instructions[idx+1]
|
||||
if(insAfter.opcode == ins.opcode) {
|
||||
chunk.instructions.removeAt(idx)
|
||||
changed = true
|
||||
}
|
||||
else if(ins.opcode== Opcode.SEC && insAfter?.opcode== Opcode.CLC) {
|
||||
else if(ins.opcode== Opcode.SEC && insAfter.opcode== Opcode.CLC) {
|
||||
chunk.instructions.removeAt(idx)
|
||||
changed = true
|
||||
}
|
||||
else if(ins.opcode== Opcode.CLC && insAfter?.opcode== Opcode.SEC) {
|
||||
else if(ins.opcode== Opcode.CLC && insAfter.opcode== Opcode.SEC) {
|
||||
chunk.instructions.removeAt(idx)
|
||||
changed = true
|
||||
}
|
||||
@ -174,10 +174,30 @@ internal class IRPeepholeOptimizer(private val irprog: IRProgram) {
|
||||
changed = true
|
||||
}
|
||||
}
|
||||
|
||||
// remove useless RETURN
|
||||
if(idx>0 && (ins.opcode == Opcode.RETURN || ins.opcode==Opcode.RETURNREG)) {
|
||||
val previous = chunk.instructions[idx-1] as? IRInstruction
|
||||
if(previous?.opcode in OpcodesThatJump) {
|
||||
val previous = chunk.instructions[idx-1]
|
||||
if(previous.opcode in OpcodesThatJump) {
|
||||
chunk.instructions.removeAt(idx)
|
||||
changed = true
|
||||
}
|
||||
}
|
||||
|
||||
// replace subsequent opcodes that jump by just the first
|
||||
if(idx>0 && (ins.opcode in OpcodesThatJump)) {
|
||||
val previous = chunk.instructions[idx-1]
|
||||
if(previous.opcode in OpcodesThatJump) {
|
||||
chunk.instructions.removeAt(idx)
|
||||
changed = true
|
||||
}
|
||||
}
|
||||
|
||||
// replace call + return --> jump
|
||||
if(idx>0 && ins.opcode==Opcode.RETURN) {
|
||||
val previous = chunk.instructions[idx-1]
|
||||
if(previous.opcode==Opcode.CALL || previous.opcode==Opcode.CALLRVAL) {
|
||||
chunk.instructions[idx-1] = IRInstruction(Opcode.JUMP, value=previous.value, labelSymbol = previous.labelSymbol, branchTarget = previous.branchTarget)
|
||||
chunk.instructions.removeAt(idx)
|
||||
changed = true
|
||||
}
|
||||
|
@ -1,64 +0,0 @@
|
||||
package prog8.iroptimizer
|
||||
|
||||
import prog8.intermediate.*
|
||||
|
||||
// TODO integrate into peephole optimizer
|
||||
|
||||
internal class IROptimizer(val program: IRProgram) {
|
||||
fun optimize() {
|
||||
program.blocks.forEach { block ->
|
||||
block.children.forEach { elt ->
|
||||
process(elt)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun process(elt: IIRBlockElement) {
|
||||
when(elt) {
|
||||
is IRCodeChunkBase -> {
|
||||
optimizeInstructions(elt)
|
||||
// TODO renumber registers that are only used within the code chunk
|
||||
// val used = elt.usedRegisters()
|
||||
}
|
||||
is IRAsmSubroutine -> {
|
||||
if(elt.asmChunk.isIR) {
|
||||
optimizeInstructions(elt.asmChunk)
|
||||
}
|
||||
// TODO renumber registers that are only used within the code chunk
|
||||
// val used = elt.usedRegisters()
|
||||
}
|
||||
is IRSubroutine -> {
|
||||
elt.chunks.forEach { process(it) }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun optimizeInstructions(elt: IRCodeChunkBase) {
|
||||
elt.instructions.withIndex().windowed(2).forEach {(first, second) ->
|
||||
val i1 = first.value
|
||||
val i2 = second.value
|
||||
// replace call + return --> jump
|
||||
if((i1.opcode==Opcode.CALL || i1.opcode==Opcode.CALLRVAL) && i2.opcode==Opcode.RETURN) {
|
||||
elt.instructions[first.index] = IRInstruction(Opcode.JUMP, value=i1.value, labelSymbol = i1.labelSymbol, branchTarget = i1.branchTarget)
|
||||
elt.instructions[second.index] = IRInstruction(Opcode.NOP)
|
||||
if(second.index==elt.instructions.size-1) {
|
||||
// it was the last instruction, so the link to the next chunk needs to be cleared
|
||||
elt.next = null
|
||||
}
|
||||
}
|
||||
|
||||
// replace subsequent opcodes that jump by just the first
|
||||
if(i1.opcode in OpcodesThatJump && i2.opcode in OpcodesThatJump) {
|
||||
elt.instructions[second.index] = IRInstruction(Opcode.NOP)
|
||||
}
|
||||
}
|
||||
|
||||
// remove nops
|
||||
elt.instructions.withIndex()
|
||||
.filter { it.value.opcode==Opcode.NOP }
|
||||
.reversed()
|
||||
.forEach {
|
||||
elt.instructions.removeAt(it.index)
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user