mirror of
https://github.com/michaelcmartin/Ophis.git
synced 2025-01-17 21:32:44 +00:00
Major formatting fixes:
* No more tabs * Fix copyright notices to point at right files and name the license right
This commit is contained in:
parent
d5ec7bdacd
commit
1df8ad465d
@ -9,9 +9,9 @@
|
|||||||
|
|
||||||
6510 compatibility and deprecation are handled in Ophis.Main."""
|
6510 compatibility and deprecation are handled in Ophis.Main."""
|
||||||
|
|
||||||
# Copyright 2002 Michael C. Martin.
|
# Copyright 2002-2012 Michael C. Martin and additional contributors.
|
||||||
# You may use, modify, and distribute this file under the BSD
|
# You may use, modify, and distribute this file under the MIT
|
||||||
# license: See LICENSE.txt for details.
|
# license: See README for details.
|
||||||
|
|
||||||
verbose = 1;
|
verbose = 1;
|
||||||
|
|
||||||
|
@ -1,13 +1,10 @@
|
|||||||
"""Core pragmas
|
"""Core pragmas
|
||||||
|
|
||||||
Provides the core assembler directives. It does not guarantee
|
Provides the core assembler directives."""
|
||||||
compatibility with older versions of P65-Perl."""
|
|
||||||
|
|
||||||
# Copyright 2002 Michael C. Martin.
|
# Copyright 2002-2012 Michael C. Martin and additional contributors.
|
||||||
# You may use, modify, and distribute this file under the BSD
|
# You may use, modify, and distribute this file under the MIT
|
||||||
# license: See LICENSE.txt for details.
|
# license: See README for details.
|
||||||
|
|
||||||
from __future__ import nested_scopes
|
|
||||||
|
|
||||||
import Ophis.IR as IR
|
import Ophis.IR as IR
|
||||||
import Ophis.Frontend as FE
|
import Ophis.Frontend as FE
|
||||||
@ -18,193 +15,193 @@ basecharmap = "".join([chr(x) for x in range(256)])
|
|||||||
currentcharmap = basecharmap
|
currentcharmap = basecharmap
|
||||||
|
|
||||||
def reset():
|
def reset():
|
||||||
global loadedfiles, currentcharmap, basecharmap
|
global loadedfiles, currentcharmap, basecharmap
|
||||||
loadedfiles={}
|
loadedfiles={}
|
||||||
currentcharmap = basecharmap
|
currentcharmap = basecharmap
|
||||||
|
|
||||||
def pragmaInclude(ppt, line, result):
|
def pragmaInclude(ppt, line, result):
|
||||||
"Includes a source file"
|
"Includes a source file"
|
||||||
filename = line.expect("STRING").value
|
filename = line.expect("STRING").value
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
if type(filename)==str: result.append(FE.parse_file(ppt, filename))
|
if type(filename)==str: result.append(FE.parse_file(ppt, filename))
|
||||||
|
|
||||||
def pragmaRequire(ppt, line, result):
|
def pragmaRequire(ppt, line, result):
|
||||||
"Includes a source file at most one time"
|
"Includes a source file at most one time"
|
||||||
filename = line.expect("STRING").value
|
filename = line.expect("STRING").value
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
if type(filename)==str:
|
if type(filename)==str:
|
||||||
global loadedfiles
|
global loadedfiles
|
||||||
if filename not in loadedfiles:
|
if filename not in loadedfiles:
|
||||||
loadedfiles[filename]=1
|
loadedfiles[filename]=1
|
||||||
result.append(FE.parse_file(ppt, filename))
|
result.append(FE.parse_file(ppt, filename))
|
||||||
|
|
||||||
def pragmaIncbin(ppt, line, result):
|
def pragmaIncbin(ppt, line, result):
|
||||||
"Includes a binary file"
|
"Includes a binary file"
|
||||||
filename = line.expect("STRING").value
|
filename = line.expect("STRING").value
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
if type(filename)==str:
|
if type(filename)==str:
|
||||||
try:
|
try:
|
||||||
f = file(filename, "rb")
|
f = file(filename, "rb")
|
||||||
bytes = f.read()
|
bytes = f.read()
|
||||||
f.close()
|
f.close()
|
||||||
except IOError:
|
except IOError:
|
||||||
Err.log ("Could not read "+filename)
|
Err.log ("Could not read "+filename)
|
||||||
return
|
return
|
||||||
bytes = [IR.ConstantExpr(ord(x)) for x in bytes]
|
bytes = [IR.ConstantExpr(ord(x)) for x in bytes]
|
||||||
result.append(IR.Node(ppt, "Byte", *bytes))
|
result.append(IR.Node(ppt, "Byte", *bytes))
|
||||||
|
|
||||||
def pragmaCharmap(ppt, line, result):
|
def pragmaCharmap(ppt, line, result):
|
||||||
"Modify the character map."
|
"Modify the character map."
|
||||||
global currentcharmap, basecharmap
|
global currentcharmap, basecharmap
|
||||||
bytes = readData(line)
|
bytes = readData(line)
|
||||||
if len(bytes) == 0:
|
if len(bytes) == 0:
|
||||||
currentcharmap = basecharmap
|
currentcharmap = basecharmap
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
base = bytes[0].data
|
base = bytes[0].data
|
||||||
newsubstr = "".join([chr(x.data) for x in bytes[1:]])
|
newsubstr = "".join([chr(x.data) for x in bytes[1:]])
|
||||||
currentcharmap = currentcharmap[:base] + newsubstr + currentcharmap[base+len(newsubstr):]
|
currentcharmap = currentcharmap[:base] + newsubstr + currentcharmap[base+len(newsubstr):]
|
||||||
if len(currentcharmap) != 256 or base < 0 or base > 255:
|
if len(currentcharmap) != 256 or base < 0 or base > 255:
|
||||||
Err.log("Charmap replacement out of range")
|
Err.log("Charmap replacement out of range")
|
||||||
currentcharmap = currentcharmap[:256]
|
currentcharmap = currentcharmap[:256]
|
||||||
except ValueError:
|
except ValueError:
|
||||||
Err.log("Illegal character in .charmap directive")
|
Err.log("Illegal character in .charmap directive")
|
||||||
|
|
||||||
def pragmaCharmapbin(ppt, line, result):
|
def pragmaCharmapbin(ppt, line, result):
|
||||||
"Load a new character map from a file"
|
"Load a new character map from a file"
|
||||||
global currentcharmap
|
global currentcharmap
|
||||||
filename = line.expect("STRING").value
|
filename = line.expect("STRING").value
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
if type(filename)==str:
|
if type(filename)==str:
|
||||||
try:
|
try:
|
||||||
f = file(filename, "rb")
|
f = file(filename, "rb")
|
||||||
bytes = f.read()
|
bytes = f.read()
|
||||||
f.close()
|
f.close()
|
||||||
except IOError:
|
except IOError:
|
||||||
Err.log ("Could not read "+filename)
|
Err.log ("Could not read "+filename)
|
||||||
return
|
return
|
||||||
if len(bytes)==256:
|
if len(bytes)==256:
|
||||||
currentcharmap = bytes
|
currentcharmap = bytes
|
||||||
else:
|
else:
|
||||||
Err.log("Character map "+filename+" not 256 bytes long")
|
Err.log("Character map "+filename+" not 256 bytes long")
|
||||||
|
|
||||||
def pragmaOrg(ppt, line, result):
|
def pragmaOrg(ppt, line, result):
|
||||||
"Relocates the PC with no output"
|
"Relocates the PC with no output"
|
||||||
newPC = FE.parse_expr(line)
|
newPC = FE.parse_expr(line)
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
result.append(IR.Node(ppt, "SetPC", newPC))
|
result.append(IR.Node(ppt, "SetPC", newPC))
|
||||||
|
|
||||||
def pragmaAdvance(ppt, line, result):
|
def pragmaAdvance(ppt, line, result):
|
||||||
"Outputs filler until reaching the target PC"
|
"Outputs filler until reaching the target PC"
|
||||||
newPC = FE.parse_expr(line)
|
newPC = FE.parse_expr(line)
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
result.append(IR.Node(ppt, "Advance", newPC))
|
result.append(IR.Node(ppt, "Advance", newPC))
|
||||||
|
|
||||||
def pragmaCheckpc(ppt, line, result):
|
def pragmaCheckpc(ppt, line, result):
|
||||||
"Enforces that the PC has not exceeded a certain point"
|
"Enforces that the PC has not exceeded a certain point"
|
||||||
target = FE.parse_expr(line)
|
target = FE.parse_expr(line)
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
result.append(IR.Node(ppt, "CheckPC", target))
|
result.append(IR.Node(ppt, "CheckPC", target))
|
||||||
|
|
||||||
def pragmaAlias(ppt, line, result):
|
def pragmaAlias(ppt, line, result):
|
||||||
"Assigns an arbitrary label"
|
"Assigns an arbitrary label"
|
||||||
lbl = line.expect("LABEL").value
|
lbl = line.expect("LABEL").value
|
||||||
target = FE.parse_expr(line)
|
target = FE.parse_expr(line)
|
||||||
result.append(IR.Node(ppt, "Label", lbl, target))
|
result.append(IR.Node(ppt, "Label", lbl, target))
|
||||||
|
|
||||||
def pragmaSpace(ppt, line, result):
|
def pragmaSpace(ppt, line, result):
|
||||||
"Reserves space in a data segment for a variable"
|
"Reserves space in a data segment for a variable"
|
||||||
lbl = line.expect("LABEL").value
|
lbl = line.expect("LABEL").value
|
||||||
size = line.expect("NUM").value
|
size = line.expect("NUM").value
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
result.append(IR.Node(ppt, "Label", lbl, IR.PCExpr()))
|
result.append(IR.Node(ppt, "Label", lbl, IR.PCExpr()))
|
||||||
result.append(IR.Node(ppt, "SetPC", IR.SequenceExpr([IR.PCExpr(), "+", IR.ConstantExpr(size)])))
|
result.append(IR.Node(ppt, "SetPC", IR.SequenceExpr([IR.PCExpr(), "+", IR.ConstantExpr(size)])))
|
||||||
|
|
||||||
def pragmaText(ppt, line, result):
|
def pragmaText(ppt, line, result):
|
||||||
"Switches to a text segment"
|
"Switches to a text segment"
|
||||||
next = line.expect("LABEL", "EOL")
|
next = line.expect("LABEL", "EOL")
|
||||||
if next.type == "LABEL":
|
if next.type == "LABEL":
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
segment = next.value
|
segment = next.value
|
||||||
else:
|
else:
|
||||||
segment = "*text-default*"
|
segment = "*text-default*"
|
||||||
result.append(IR.Node(ppt, "TextSegment", segment))
|
result.append(IR.Node(ppt, "TextSegment", segment))
|
||||||
|
|
||||||
def pragmaData(ppt, line, result):
|
def pragmaData(ppt, line, result):
|
||||||
"Switches to a data segment (no output allowed)"
|
"Switches to a data segment (no output allowed)"
|
||||||
next = line.expect("LABEL", "EOL")
|
next = line.expect("LABEL", "EOL")
|
||||||
if next.type == "LABEL":
|
if next.type == "LABEL":
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
segment = next.value
|
segment = next.value
|
||||||
else:
|
else:
|
||||||
segment = "*data-default*"
|
segment = "*data-default*"
|
||||||
result.append(IR.Node(ppt, "DataSegment", segment))
|
result.append(IR.Node(ppt, "DataSegment", segment))
|
||||||
|
|
||||||
def readData(line):
|
def readData(line):
|
||||||
"Read raw data from a comma-separated list"
|
"Read raw data from a comma-separated list"
|
||||||
if line.lookahead(0).type == "STRING":
|
if line.lookahead(0).type == "STRING":
|
||||||
data = [IR.ConstantExpr(ord(x)) for x in line.expect("STRING").value.translate(currentcharmap)]
|
data = [IR.ConstantExpr(ord(x)) for x in line.expect("STRING").value.translate(currentcharmap)]
|
||||||
else:
|
else:
|
||||||
data = [FE.parse_expr(line)]
|
data = [FE.parse_expr(line)]
|
||||||
next = line.expect(',', 'EOL').type
|
next = line.expect(',', 'EOL').type
|
||||||
while next == ',':
|
while next == ',':
|
||||||
if line.lookahead(0).type == "STRING":
|
if line.lookahead(0).type == "STRING":
|
||||||
data.extend([IR.ConstantExpr(ord(x)) for x in line.expect("STRING").value])
|
data.extend([IR.ConstantExpr(ord(x)) for x in line.expect("STRING").value])
|
||||||
else:
|
else:
|
||||||
data.append(FE.parse_expr(line))
|
data.append(FE.parse_expr(line))
|
||||||
next = line.expect(',', 'EOL').type
|
next = line.expect(',', 'EOL').type
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def pragmaByte(ppt, line, result):
|
def pragmaByte(ppt, line, result):
|
||||||
"Raw data, a byte at a time"
|
"Raw data, a byte at a time"
|
||||||
bytes = readData(line)
|
bytes = readData(line)
|
||||||
result.append(IR.Node(ppt, "Byte", *bytes))
|
result.append(IR.Node(ppt, "Byte", *bytes))
|
||||||
|
|
||||||
def pragmaWord(ppt, line, result):
|
def pragmaWord(ppt, line, result):
|
||||||
"Raw data, a word at a time, little-endian"
|
"Raw data, a word at a time, little-endian"
|
||||||
words = readData(line)
|
words = readData(line)
|
||||||
result.append(IR.Node(ppt, "Word", *words))
|
result.append(IR.Node(ppt, "Word", *words))
|
||||||
|
|
||||||
def pragmaDword(ppt, line, result):
|
def pragmaDword(ppt, line, result):
|
||||||
"Raw data, a double-word at a time, little-endian"
|
"Raw data, a double-word at a time, little-endian"
|
||||||
dwords = readData(line)
|
dwords = readData(line)
|
||||||
result.append(IR.Node(ppt, "Dword", *dwords))
|
result.append(IR.Node(ppt, "Dword", *dwords))
|
||||||
|
|
||||||
def pragmaWordbe(ppt, line, result):
|
def pragmaWordbe(ppt, line, result):
|
||||||
"Raw data, a word at a time, big-endian"
|
"Raw data, a word at a time, big-endian"
|
||||||
words = readData(line)
|
words = readData(line)
|
||||||
result.append(IR.Node(ppt, "WordBE", *words))
|
result.append(IR.Node(ppt, "WordBE", *words))
|
||||||
|
|
||||||
def pragmaDwordbe(ppt, line, result):
|
def pragmaDwordbe(ppt, line, result):
|
||||||
"Raw data, a dword at a time, big-endian"
|
"Raw data, a dword at a time, big-endian"
|
||||||
dwords = readData(line)
|
dwords = readData(line)
|
||||||
result.append(IR.Node(ppt, "DwordBE", *dwords))
|
result.append(IR.Node(ppt, "DwordBE", *dwords))
|
||||||
|
|
||||||
def pragmaScope(ppt, line, result):
|
def pragmaScope(ppt, line, result):
|
||||||
"Create a new lexical scoping block"
|
"Create a new lexical scoping block"
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
result.append(IR.Node(ppt, "ScopeBegin"))
|
result.append(IR.Node(ppt, "ScopeBegin"))
|
||||||
|
|
||||||
def pragmaScend(ppt, line, result):
|
def pragmaScend(ppt, line, result):
|
||||||
"End the innermost lexical scoping block"
|
"End the innermost lexical scoping block"
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
result.append(IR.Node(ppt, "ScopeEnd"))
|
result.append(IR.Node(ppt, "ScopeEnd"))
|
||||||
|
|
||||||
def pragmaMacro(ppt, line, result):
|
def pragmaMacro(ppt, line, result):
|
||||||
"Begin a macro definition"
|
"Begin a macro definition"
|
||||||
lbl = line.expect("LABEL").value
|
lbl = line.expect("LABEL").value
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
result.append(IR.Node(ppt, "MacroBegin", lbl))
|
result.append(IR.Node(ppt, "MacroBegin", lbl))
|
||||||
|
|
||||||
def pragmaMacend(ppt, line, result):
|
def pragmaMacend(ppt, line, result):
|
||||||
"End a macro definition"
|
"End a macro definition"
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
result.append(IR.Node(ppt, "MacroEnd"))
|
result.append(IR.Node(ppt, "MacroEnd"))
|
||||||
|
|
||||||
def pragmaInvoke(ppt, line, result):
|
def pragmaInvoke(ppt, line, result):
|
||||||
macro = line.expect("LABEL").value
|
macro = line.expect("LABEL").value
|
||||||
if line.lookahead(0).type == "EOL":
|
if line.lookahead(0).type == "EOL":
|
||||||
args = []
|
args = []
|
||||||
else:
|
else:
|
||||||
args = readData(line)
|
args = readData(line)
|
||||||
result.append(IR.Node(ppt, "MacroInvoke", macro, *args))
|
result.append(IR.Node(ppt, "MacroInvoke", macro, *args))
|
||||||
|
@ -1,75 +1,74 @@
|
|||||||
"""Symbol tables and environments for P65.
|
"""Symbol tables and environments for Ophis.
|
||||||
|
|
||||||
Implements the symbol lookup, through nested environments -
|
Implements the symbol lookup, through nested environments -
|
||||||
any non-temporary variable is stored at the top level."""
|
any non-temporary variable is stored at the top level."""
|
||||||
|
|
||||||
# Copyright 2002 Michael C. Martin.
|
# Copyright 2002-2012 Michael C. Martin and additional contributors.
|
||||||
# You may use, modify, and distribute this file under the BSD
|
# You may use, modify, and distribute this file under the MIT
|
||||||
# license: See LICENSE.txt for details.
|
# license: See README for details.
|
||||||
|
|
||||||
from __future__ import nested_scopes
|
|
||||||
import Ophis.Errors as Err
|
import Ophis.Errors as Err
|
||||||
|
|
||||||
class Environment:
|
class Environment:
|
||||||
"""Environment class.
|
"""Environment class.
|
||||||
Controls the various scopes and global abstract execution variables."""
|
Controls the various scopes and global abstract execution variables."""
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.dicts = [{}]
|
self.dicts = [{}]
|
||||||
self.stack = [0]
|
self.stack = [0]
|
||||||
self.pc = 0
|
self.pc = 0
|
||||||
self.segmentdict = {}
|
self.segmentdict = {}
|
||||||
self.segment = "*text-default*"
|
self.segment = "*text-default*"
|
||||||
self.scopecount = 0
|
self.scopecount = 0
|
||||||
def __contains__(self, item):
|
def __contains__(self, item):
|
||||||
if item[0] == '_':
|
if item[0] == '_':
|
||||||
for dict in [self.dicts[i] for i in self.stack]:
|
for dict in [self.dicts[i] for i in self.stack]:
|
||||||
if item in dict: return 1
|
if item in dict: return 1
|
||||||
return 0
|
return 0
|
||||||
return item in self.dicts[0]
|
return item in self.dicts[0]
|
||||||
def __getitem__(self, item):
|
def __getitem__(self, item):
|
||||||
if item[0] == '_':
|
if item[0] == '_':
|
||||||
for dict in [self.dicts[i] for i in self.stack]:
|
for dict in [self.dicts[i] for i in self.stack]:
|
||||||
if item in dict: return dict[item]
|
if item in dict: return dict[item]
|
||||||
else:
|
else:
|
||||||
if item in self.dicts[0]: return self.dicts[0][item]
|
if item in self.dicts[0]: return self.dicts[0][item]
|
||||||
Err.log("Unknown label '%s'" % item)
|
Err.log("Unknown label '%s'" % item)
|
||||||
return 0
|
return 0
|
||||||
def __setitem__(self, item, value):
|
def __setitem__(self, item, value):
|
||||||
if item[0] == '_':
|
if item[0] == '_':
|
||||||
self.dicts[self.stack[0]][item] = value
|
self.dicts[self.stack[0]][item] = value
|
||||||
else:
|
else:
|
||||||
self.dicts[0][item] = value
|
self.dicts[0][item] = value
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.dicts)
|
return str(self.dicts)
|
||||||
def getPC(self):
|
def getPC(self):
|
||||||
return self.pc
|
return self.pc
|
||||||
def setPC(self, value):
|
def setPC(self, value):
|
||||||
self.pc = value
|
self.pc = value
|
||||||
def incPC(self, amount):
|
def incPC(self, amount):
|
||||||
self.pc += amount
|
self.pc += amount
|
||||||
def getsegment(self):
|
def getsegment(self):
|
||||||
return self.segment
|
return self.segment
|
||||||
def setsegment(self, segment):
|
def setsegment(self, segment):
|
||||||
self.segmentdict[self.segment] = self.pc
|
self.segmentdict[self.segment] = self.pc
|
||||||
self.segment = segment
|
self.segment = segment
|
||||||
self.pc = self.segmentdict.get(segment, 0)
|
self.pc = self.segmentdict.get(segment, 0)
|
||||||
def reset(self):
|
def reset(self):
|
||||||
"Clears out program counter, segment, and scoping information"
|
"Clears out program counter, segment, and scoping information"
|
||||||
self.pc = 0
|
self.pc = 0
|
||||||
self.segmentdict = {}
|
self.segmentdict = {}
|
||||||
self.segment = "*text-default*"
|
self.segment = "*text-default*"
|
||||||
self.scopecount = 0
|
self.scopecount = 0
|
||||||
if len(self.stack) > 1:
|
if len(self.stack) > 1:
|
||||||
Err.log("Unmatched .scope")
|
Err.log("Unmatched .scope")
|
||||||
self.stack = [0]
|
self.stack = [0]
|
||||||
def newscope(self):
|
def newscope(self):
|
||||||
"Enters a new scope for temporary labels."
|
"Enters a new scope for temporary labels."
|
||||||
self.scopecount += 1
|
self.scopecount += 1
|
||||||
self.stack.insert(0, self.scopecount)
|
self.stack.insert(0, self.scopecount)
|
||||||
if len(self.dicts) <= self.scopecount: self.dicts.append({})
|
if len(self.dicts) <= self.scopecount: self.dicts.append({})
|
||||||
def endscope(self):
|
def endscope(self):
|
||||||
"Leaves a scope."
|
"Leaves a scope."
|
||||||
if len(self.stack) == 1:
|
if len(self.stack) == 1:
|
||||||
Err.log("Unmatched .scend")
|
Err.log("Unmatched .scend")
|
||||||
self.stack.pop(0)
|
self.stack.pop(0)
|
||||||
|
|
||||||
|
@ -1,24 +1,24 @@
|
|||||||
"""Error logging
|
"""Error logging
|
||||||
|
|
||||||
Keeps track of the number of errors inflicted so far, and
|
Keeps track of the number of errors inflicted so far, and
|
||||||
where in the assembly the errors are occurring."""
|
where in the assembly the errors are occurring."""
|
||||||
|
|
||||||
# Copyright 2002 Michael C. Martin.
|
# Copyright 2002-2012 Michael C. Martin and additional contributors.
|
||||||
# You may use, modify, and distribute this file under the BSD
|
# You may use, modify, and distribute this file under the MIT
|
||||||
# license: See LICENSE.txt for details.
|
# license: See README for details.
|
||||||
|
|
||||||
count = 0
|
count = 0
|
||||||
currentpoint = "<Top Level>"
|
currentpoint = "<Top Level>"
|
||||||
|
|
||||||
def log(err):
|
def log(err):
|
||||||
"""Reports an error at the current program point, and increases
|
"""Reports an error at the current program point, and increases
|
||||||
the global error count."""
|
the global error count."""
|
||||||
global count
|
global count
|
||||||
count = count+1
|
count = count+1
|
||||||
print currentpoint+": "+err
|
print currentpoint+": "+err
|
||||||
|
|
||||||
def report():
|
def report():
|
||||||
"Print out the number of errors."
|
"Print out the number of errors."
|
||||||
if count == 0: print "No errors"
|
if count == 0: print "No errors"
|
||||||
elif count == 1: print "1 error"
|
elif count == 1: print "1 error"
|
||||||
else: print str(count)+" errors"
|
else: print str(count)+" errors"
|
||||||
|
@ -1,34 +1,32 @@
|
|||||||
"""Lexer and Parser
|
"""Lexer and Parser
|
||||||
|
|
||||||
Constructs a list of IR nodes from a list of input strings."""
|
Constructs a list of IR nodes from a list of input strings."""
|
||||||
|
|
||||||
from __future__ import nested_scopes
|
|
||||||
import Ophis.Errors as Err
|
import Ophis.Errors as Err
|
||||||
import Ophis.Opcodes as Ops
|
import Ophis.Opcodes as Ops
|
||||||
import Ophis.IR as IR
|
import Ophis.IR as IR
|
||||||
import Ophis.CmdLine as Cmd
|
import Ophis.CmdLine as Cmd
|
||||||
import os
|
import os
|
||||||
|
|
||||||
# Copyright 2002 Michael C. Martin.
|
# Copyright 2002-2012 Michael C. Martin and additional contributors.
|
||||||
# You may use, modify, and distribute this file under the BSD
|
# You may use, modify, and distribute this file under the MIT
|
||||||
# license: See LICENSE.txt for details.
|
# license: See README for details.
|
||||||
|
|
||||||
|
|
||||||
class Lexeme:
|
class Lexeme:
|
||||||
"Class for lexer tokens. Used by lexer and parser."
|
"Class for lexer tokens. Used by lexer and parser."
|
||||||
def __init__(self, type="UNKNOWN", value=None):
|
def __init__(self, type="UNKNOWN", value=None):
|
||||||
self.type = type.upper()
|
self.type = type.upper()
|
||||||
self.value = value
|
self.value = value
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
if self.value == None:
|
if self.value == None:
|
||||||
return self.type
|
return self.type
|
||||||
else:
|
else:
|
||||||
return self.type+":"+str(self.value)
|
return self.type+":"+str(self.value)
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "Lexeme("+`self.type`+", "+`self.value`+")"
|
return "Lexeme("+`self.type`+", "+`self.value`+")"
|
||||||
def matches(self, other):
|
def matches(self, other):
|
||||||
"1 if Lexemes a and b have the same type."
|
"1 if Lexemes a and b have the same type."
|
||||||
return self.type == other.type
|
return self.type == other.type
|
||||||
|
|
||||||
bases = {"$":("hexadecimal", 16),
|
bases = {"$":("hexadecimal", 16),
|
||||||
"%":("binary", 2),
|
"%":("binary", 2),
|
||||||
@ -37,297 +35,297 @@ bases = {"$":("hexadecimal", 16),
|
|||||||
punctuation = "#,`<>():.+-*/&|^[]"
|
punctuation = "#,`<>():.+-*/&|^[]"
|
||||||
|
|
||||||
def lex(point, line):
|
def lex(point, line):
|
||||||
"""Turns a line of source into a sequence of lexemes."""
|
"""Turns a line of source into a sequence of lexemes."""
|
||||||
Err.currentpoint = point
|
Err.currentpoint = point
|
||||||
result = []
|
result = []
|
||||||
def is_opcode(op):
|
def is_opcode(op):
|
||||||
"Tests whether a string is an opcode or an identifier"
|
"Tests whether a string is an opcode or an identifier"
|
||||||
return op in Ops.opcodes
|
return op in Ops.opcodes
|
||||||
def add_token(token):
|
def add_token(token):
|
||||||
"Converts a substring into a single lexeme"
|
"Converts a substring into a single lexeme"
|
||||||
if token == "":
|
if token == "":
|
||||||
return
|
return
|
||||||
if token == "0":
|
if token == "0":
|
||||||
result.append(Lexeme("NUM", 0))
|
result.append(Lexeme("NUM", 0))
|
||||||
return
|
return
|
||||||
firstchar = token[0]
|
firstchar = token[0]
|
||||||
rest = token[1:]
|
rest = token[1:]
|
||||||
if firstchar == '"':
|
if firstchar == '"':
|
||||||
result.append(Lexeme("STRING", rest))
|
result.append(Lexeme("STRING", rest))
|
||||||
return
|
return
|
||||||
elif firstchar in bases:
|
elif firstchar in bases:
|
||||||
try:
|
try:
|
||||||
result.append(Lexeme("NUM", long(rest, bases[firstchar][1])))
|
result.append(Lexeme("NUM", long(rest, bases[firstchar][1])))
|
||||||
return
|
return
|
||||||
except ValueError:
|
except ValueError:
|
||||||
Err.log('Invalid '+bases[firstchar][0]+' constant: '+rest)
|
Err.log('Invalid '+bases[firstchar][0]+' constant: '+rest)
|
||||||
result.append(Lexeme("NUM", 0))
|
result.append(Lexeme("NUM", 0))
|
||||||
return
|
return
|
||||||
elif firstchar.isdigit():
|
elif firstchar.isdigit():
|
||||||
try:
|
try:
|
||||||
result.append(Lexeme("NUM", long(token)))
|
result.append(Lexeme("NUM", long(token)))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
Err.log('Identifiers may not begin with a number')
|
Err.log('Identifiers may not begin with a number')
|
||||||
result.append(Lexeme("LABEL", "ERROR"))
|
result.append(Lexeme("LABEL", "ERROR"))
|
||||||
return
|
return
|
||||||
elif firstchar == "'":
|
elif firstchar == "'":
|
||||||
if len(rest) == 1:
|
if len(rest) == 1:
|
||||||
result.append(Lexeme("NUM", ord(rest)))
|
result.append(Lexeme("NUM", ord(rest)))
|
||||||
else:
|
else:
|
||||||
Err.log("Invalid character constant '"+rest+"'")
|
Err.log("Invalid character constant '"+rest+"'")
|
||||||
result.append(Lexeme("NUM", 0))
|
result.append(Lexeme("NUM", 0))
|
||||||
return
|
return
|
||||||
elif firstchar in punctuation:
|
elif firstchar in punctuation:
|
||||||
if rest != "":
|
if rest != "":
|
||||||
Err.log("Internal lexer error! '"+token+"' can't happen!")
|
Err.log("Internal lexer error! '"+token+"' can't happen!")
|
||||||
result.append(Lexeme(firstchar))
|
result.append(Lexeme(firstchar))
|
||||||
return
|
return
|
||||||
else: # Label, opcode, or index register
|
else: # Label, opcode, or index register
|
||||||
id = token.lower()
|
id = token.lower()
|
||||||
if is_opcode(id):
|
if is_opcode(id):
|
||||||
result.append(Lexeme("OPCODE", id))
|
result.append(Lexeme("OPCODE", id))
|
||||||
elif id == "x":
|
elif id == "x":
|
||||||
result.append(Lexeme("X"))
|
result.append(Lexeme("X"))
|
||||||
elif id == "y":
|
elif id == "y":
|
||||||
result.append(Lexeme("Y"))
|
result.append(Lexeme("Y"))
|
||||||
else:
|
else:
|
||||||
result.append(Lexeme("LABEL", id))
|
result.append(Lexeme("LABEL", id))
|
||||||
return
|
return
|
||||||
# should never reach here
|
# should never reach here
|
||||||
Err.log("Internal lexer error: add_token fall-through")
|
Err.log("Internal lexer error: add_token fall-through")
|
||||||
def add_EOL():
|
def add_EOL():
|
||||||
"Adds an end-of-line lexeme"
|
"Adds an end-of-line lexeme"
|
||||||
result.append(Lexeme("EOL"))
|
result.append(Lexeme("EOL"))
|
||||||
# Actual routine begins here
|
# Actual routine begins here
|
||||||
value = ""
|
value = ""
|
||||||
quotemode = 0
|
quotemode = 0
|
||||||
backslashmode = 0
|
backslashmode = 0
|
||||||
for c in line.strip():
|
for c in line.strip():
|
||||||
if backslashmode:
|
if backslashmode:
|
||||||
backslashmode = 0
|
backslashmode = 0
|
||||||
value = value + c
|
value = value + c
|
||||||
elif c == "\\":
|
elif c == "\\":
|
||||||
backslashmode = 1
|
backslashmode = 1
|
||||||
elif quotemode:
|
elif quotemode:
|
||||||
if c == '"':
|
if c == '"':
|
||||||
quotemode = 0
|
quotemode = 0
|
||||||
else:
|
else:
|
||||||
value = value + c
|
value = value + c
|
||||||
elif c == ';':
|
elif c == ';':
|
||||||
add_token(value)
|
add_token(value)
|
||||||
value = ""
|
value = ""
|
||||||
break
|
break
|
||||||
elif c.isspace():
|
elif c.isspace():
|
||||||
add_token(value)
|
add_token(value)
|
||||||
value = ""
|
value = ""
|
||||||
elif c in punctuation:
|
elif c in punctuation:
|
||||||
add_token(value)
|
add_token(value)
|
||||||
add_token(c)
|
add_token(c)
|
||||||
value = ""
|
value = ""
|
||||||
elif c == '"':
|
elif c == '"':
|
||||||
add_token(value)
|
add_token(value)
|
||||||
value = '"'
|
value = '"'
|
||||||
quotemode = 1
|
quotemode = 1
|
||||||
else:
|
else:
|
||||||
value = value + c
|
value = value + c
|
||||||
if backslashmode:
|
if backslashmode:
|
||||||
Err.log("Backslashed newline")
|
Err.log("Backslashed newline")
|
||||||
if quotemode:
|
if quotemode:
|
||||||
Err.log("Unterminated string constant")
|
Err.log("Unterminated string constant")
|
||||||
add_token(value)
|
add_token(value)
|
||||||
add_EOL()
|
add_EOL()
|
||||||
return result
|
return result
|
||||||
|
|
||||||
class ParseLine:
|
class ParseLine:
|
||||||
"Maintains the parse state of a line of code. Enables arbitrary lookahead."
|
"Maintains the parse state of a line of code. Enables arbitrary lookahead."
|
||||||
def __init__(self, lexemes):
|
def __init__(self, lexemes):
|
||||||
self.lexemes = lexemes
|
self.lexemes = lexemes
|
||||||
self.location = 0
|
self.location = 0
|
||||||
def lookahead(self, i):
|
def lookahead(self, i):
|
||||||
"""Returns the token i units ahead in the parse.
|
"""Returns the token i units ahead in the parse.
|
||||||
lookahead(0) returns the next token; trying to read off the end of
|
lookahead(0) returns the next token; trying to read off the end of
|
||||||
the sequence returns the last token in the sequence (usually EOL)."""
|
the sequence returns the last token in the sequence (usually EOL)."""
|
||||||
target = self.location+i
|
target = self.location+i
|
||||||
if target >= len(self.lexemes): target = -1
|
if target >= len(self.lexemes): target = -1
|
||||||
return self.lexemes[target]
|
return self.lexemes[target]
|
||||||
def pop(self):
|
def pop(self):
|
||||||
"Returns and removes the next element in the line."
|
"Returns and removes the next element in the line."
|
||||||
old = self.location
|
old = self.location
|
||||||
if self.location < len(self.lexemes)-1: self.location += 1
|
if self.location < len(self.lexemes)-1: self.location += 1
|
||||||
return self.lexemes[old]
|
return self.lexemes[old]
|
||||||
def expect(self, *tokens):
|
def expect(self, *tokens):
|
||||||
"""Reads a token from the ParseLine line and returns it if it's of a type
|
"""Reads a token from the ParseLine line and returns it if it's of a type
|
||||||
in the sequence tokens. Otherwise, it logs an error."""
|
in the sequence tokens. Otherwise, it logs an error."""
|
||||||
token = self.pop()
|
token = self.pop()
|
||||||
if token.type not in tokens:
|
if token.type not in tokens:
|
||||||
Err.log('Expected: "'+'", "'.join(tokens)+'"')
|
Err.log('Expected: "'+'", "'.join(tokens)+'"')
|
||||||
return token
|
return token
|
||||||
|
|
||||||
pragma_modules = []
|
pragma_modules = []
|
||||||
|
|
||||||
def parse_expr(line):
|
def parse_expr(line):
|
||||||
"Parses an Ophis arithmetic expression."
|
"Parses an Ophis arithmetic expression."
|
||||||
def atom():
|
def atom():
|
||||||
"Parses lowest-priority expression components."
|
"Parses lowest-priority expression components."
|
||||||
next = line.lookahead(0).type
|
next = line.lookahead(0).type
|
||||||
if next == "NUM":
|
if next == "NUM":
|
||||||
return IR.ConstantExpr(line.expect("NUM").value)
|
return IR.ConstantExpr(line.expect("NUM").value)
|
||||||
elif next == "LABEL":
|
elif next == "LABEL":
|
||||||
return IR.LabelExpr(line.expect("LABEL").value)
|
return IR.LabelExpr(line.expect("LABEL").value)
|
||||||
elif next == "^":
|
elif next == "^":
|
||||||
line.expect("^")
|
line.expect("^")
|
||||||
return IR.PCExpr()
|
return IR.PCExpr()
|
||||||
elif next == "[":
|
elif next == "[":
|
||||||
line.expect("[")
|
line.expect("[")
|
||||||
result = parse_expr(line)
|
result = parse_expr(line)
|
||||||
line.expect("]")
|
line.expect("]")
|
||||||
return result
|
return result
|
||||||
elif next == "+":
|
elif next == "+":
|
||||||
offset = 0
|
offset = 0
|
||||||
while next == "+":
|
while next == "+":
|
||||||
offset += 1
|
offset += 1
|
||||||
line.expect("+")
|
line.expect("+")
|
||||||
next = line.lookahead(0).type
|
next = line.lookahead(0).type
|
||||||
return IR.LabelExpr("*"+str(templabelcount+offset))
|
return IR.LabelExpr("*"+str(templabelcount+offset))
|
||||||
elif next == "-":
|
elif next == "-":
|
||||||
offset = 1
|
offset = 1
|
||||||
while next == "-":
|
while next == "-":
|
||||||
offset -= 1
|
offset -= 1
|
||||||
line.expect("-")
|
line.expect("-")
|
||||||
next = line.lookahead(0).type
|
next = line.lookahead(0).type
|
||||||
return IR.LabelExpr("*"+str(templabelcount+offset))
|
return IR.LabelExpr("*"+str(templabelcount+offset))
|
||||||
elif next == ">":
|
elif next == ">":
|
||||||
line.expect(">")
|
line.expect(">")
|
||||||
return IR.HighByteExpr(atom())
|
return IR.HighByteExpr(atom())
|
||||||
elif next == "<":
|
elif next == "<":
|
||||||
line.expect("<")
|
line.expect("<")
|
||||||
return IR.LowByteExpr(atom())
|
return IR.LowByteExpr(atom())
|
||||||
else:
|
else:
|
||||||
Err.log('Expected: expression')
|
Err.log('Expected: expression')
|
||||||
def precedence_read(constructor, reader, separators):
|
def precedence_read(constructor, reader, separators):
|
||||||
"""Handles precedence. The reader argument is a function that returns
|
"""Handles precedence. The reader argument is a function that returns
|
||||||
expressions that bind more tightly than these; separators is a list
|
expressions that bind more tightly than these; separators is a list
|
||||||
of strings naming the operators at this precedence level. The
|
of strings naming the operators at this precedence level. The
|
||||||
constructor argument is a class, indicating what node type holds
|
constructor argument is a class, indicating what node type holds
|
||||||
objects of this precedence level.
|
objects of this precedence level.
|
||||||
|
|
||||||
Returns a list of Expr objects with separator strings between them."""
|
Returns a list of Expr objects with separator strings between them."""
|
||||||
result = [reader()] # first object
|
result = [reader()] # first object
|
||||||
nextop = line.lookahead(0).type
|
nextop = line.lookahead(0).type
|
||||||
while (nextop in separators):
|
while (nextop in separators):
|
||||||
line.expect(nextop)
|
line.expect(nextop)
|
||||||
result.append(nextop)
|
result.append(nextop)
|
||||||
result.append(reader())
|
result.append(reader())
|
||||||
nextop = line.lookahead(0).type
|
nextop = line.lookahead(0).type
|
||||||
if len(result) == 1: return result[0]
|
if len(result) == 1: return result[0]
|
||||||
return constructor(result)
|
return constructor(result)
|
||||||
def term():
|
def term():
|
||||||
"Parses * and /"
|
"Parses * and /"
|
||||||
return precedence_read(IR.SequenceExpr, atom, ["*", "/"])
|
return precedence_read(IR.SequenceExpr, atom, ["*", "/"])
|
||||||
def arith():
|
def arith():
|
||||||
"Parses + and -"
|
"Parses + and -"
|
||||||
return precedence_read(IR.SequenceExpr, term, ["+", "-"])
|
return precedence_read(IR.SequenceExpr, term, ["+", "-"])
|
||||||
def bits():
|
def bits():
|
||||||
"Parses &, |, and ^"
|
"Parses &, |, and ^"
|
||||||
return precedence_read(IR.SequenceExpr, arith, ["&", "|", "^"])
|
return precedence_read(IR.SequenceExpr, arith, ["&", "|", "^"])
|
||||||
return bits()
|
return bits()
|
||||||
|
|
||||||
def parse_line(ppt, lexemelist):
|
def parse_line(ppt, lexemelist):
|
||||||
"Turn a line of source into an IR Node."
|
"Turn a line of source into an IR Node."
|
||||||
Err.currentpoint = ppt
|
Err.currentpoint = ppt
|
||||||
result = []
|
result = []
|
||||||
line = ParseLine(lexemelist)
|
line = ParseLine(lexemelist)
|
||||||
def aux():
|
def aux():
|
||||||
"Accumulates all IR nodes defined by this line."
|
"Accumulates all IR nodes defined by this line."
|
||||||
if line.lookahead(0).type == "EOL":
|
if line.lookahead(0).type == "EOL":
|
||||||
pass
|
pass
|
||||||
elif line.lookahead(1).type == ":":
|
elif line.lookahead(1).type == ":":
|
||||||
newlabel=line.expect("LABEL").value
|
newlabel=line.expect("LABEL").value
|
||||||
line.expect(":")
|
line.expect(":")
|
||||||
result.append(IR.Node(ppt, "Label", newlabel, IR.PCExpr()))
|
result.append(IR.Node(ppt, "Label", newlabel, IR.PCExpr()))
|
||||||
aux()
|
aux()
|
||||||
elif line.lookahead(0).type == "*":
|
elif line.lookahead(0).type == "*":
|
||||||
global templabelcount
|
global templabelcount
|
||||||
templabelcount = templabelcount + 1
|
templabelcount = templabelcount + 1
|
||||||
result.append(IR.Node(ppt, "Label", "*"+str(templabelcount), IR.PCExpr()))
|
result.append(IR.Node(ppt, "Label", "*"+str(templabelcount), IR.PCExpr()))
|
||||||
line.expect("*")
|
line.expect("*")
|
||||||
aux()
|
aux()
|
||||||
elif line.lookahead(0).type == "." or line.lookahead(0).type == "`":
|
elif line.lookahead(0).type == "." or line.lookahead(0).type == "`":
|
||||||
which = line.expect(".", "`").type
|
which = line.expect(".", "`").type
|
||||||
if (which == "."): pragma = line.expect("LABEL").value
|
if (which == "."): pragma = line.expect("LABEL").value
|
||||||
else: pragma = "invoke"
|
else: pragma = "invoke"
|
||||||
pragmaFunction = "pragma"+pragma.title()
|
pragmaFunction = "pragma"+pragma.title()
|
||||||
for mod in pragma_modules:
|
for mod in pragma_modules:
|
||||||
if hasattr(mod, pragmaFunction):
|
if hasattr(mod, pragmaFunction):
|
||||||
getattr(mod, pragmaFunction)(ppt, line, result)
|
getattr(mod, pragmaFunction)(ppt, line, result)
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
Err.log("Unknown pragma "+pragma)
|
Err.log("Unknown pragma "+pragma)
|
||||||
|
|
||||||
else: # Instruction
|
else: # Instruction
|
||||||
opcode = line.expect("OPCODE").value
|
opcode = line.expect("OPCODE").value
|
||||||
if line.lookahead(0).type == "#":
|
if line.lookahead(0).type == "#":
|
||||||
mode = "Immediate"
|
mode = "Immediate"
|
||||||
line.expect("#")
|
line.expect("#")
|
||||||
arg = parse_expr(line)
|
arg = parse_expr(line)
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
elif line.lookahead(0).type == "(":
|
elif line.lookahead(0).type == "(":
|
||||||
line.expect("(")
|
line.expect("(")
|
||||||
arg = parse_expr(line)
|
arg = parse_expr(line)
|
||||||
if line.lookahead(0).type == ",":
|
if line.lookahead(0).type == ",":
|
||||||
mode = "PointerX"
|
mode = "PointerX"
|
||||||
line.expect(",")
|
line.expect(",")
|
||||||
line.expect("X")
|
line.expect("X")
|
||||||
line.expect(")")
|
line.expect(")")
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
else:
|
else:
|
||||||
line.expect(")")
|
line.expect(")")
|
||||||
tok = line.expect(",", "EOL").type
|
tok = line.expect(",", "EOL").type
|
||||||
if tok == "EOL":
|
if tok == "EOL":
|
||||||
mode = "Pointer"
|
mode = "Pointer"
|
||||||
else:
|
else:
|
||||||
mode = "PointerY"
|
mode = "PointerY"
|
||||||
line.expect("Y")
|
line.expect("Y")
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
elif line.lookahead(0).type == "EOL":
|
elif line.lookahead(0).type == "EOL":
|
||||||
mode = "Implied"
|
mode = "Implied"
|
||||||
arg = None
|
arg = None
|
||||||
else:
|
else:
|
||||||
arg = parse_expr(line)
|
arg = parse_expr(line)
|
||||||
tok = line.expect("EOL", ",").type
|
tok = line.expect("EOL", ",").type
|
||||||
if tok == ",":
|
if tok == ",":
|
||||||
tok = line.expect("X", "Y").type
|
tok = line.expect("X", "Y").type
|
||||||
if tok == "X": mode = "MemoryX"
|
if tok == "X": mode = "MemoryX"
|
||||||
else: mode = "MemoryY"
|
else: mode = "MemoryY"
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
else: mode = "Memory"
|
else: mode = "Memory"
|
||||||
result.append(IR.Node(ppt, mode, opcode, arg))
|
result.append(IR.Node(ppt, mode, opcode, arg))
|
||||||
aux()
|
aux()
|
||||||
result = [node for node in result if node is not IR.NullNode]
|
result = [node for node in result if node is not IR.NullNode]
|
||||||
if len(result) == 0: return IR.NullNode
|
if len(result) == 0: return IR.NullNode
|
||||||
if len(result) == 1: return result[0]
|
if len(result) == 1: return result[0]
|
||||||
return IR.SequenceNode(ppt, result)
|
return IR.SequenceNode(ppt, result)
|
||||||
|
|
||||||
def parse_file(ppt, filename):
|
def parse_file(ppt, filename):
|
||||||
"Loads a .P65 source file, and returns an IR list."
|
"Loads a .P65 source file, and returns an IR list."
|
||||||
Err.currentpoint = ppt
|
Err.currentpoint = ppt
|
||||||
if Cmd.verbose > 0: print "Loading "+filename
|
if Cmd.verbose > 0: print "Loading "+filename
|
||||||
try:
|
try:
|
||||||
f = file(filename)
|
f = file(filename)
|
||||||
linelist = f.readlines()
|
linelist = f.readlines()
|
||||||
f.close()
|
f.close()
|
||||||
pptlist = ["%s:%d" % (filename, i+1) for i in range(len(linelist))]
|
pptlist = ["%s:%d" % (filename, i+1) for i in range(len(linelist))]
|
||||||
lexlist = map(lex, pptlist, linelist)
|
lexlist = map(lex, pptlist, linelist)
|
||||||
IRlist = map(parse_line, pptlist, lexlist)
|
IRlist = map(parse_line, pptlist, lexlist)
|
||||||
IRlist = [node for node in IRlist if node is not IR.NullNode]
|
IRlist = [node for node in IRlist if node is not IR.NullNode]
|
||||||
return IR.SequenceNode(ppt, IRlist)
|
return IR.SequenceNode(ppt, IRlist)
|
||||||
except IOError:
|
except IOError:
|
||||||
Err.log ("Could not read "+filename)
|
Err.log ("Could not read "+filename)
|
||||||
return IR.NullNode
|
return IR.NullNode
|
||||||
|
|
||||||
def parse(filename):
|
def parse(filename):
|
||||||
"Top level parsing routine, taking a source file name and returning an IR list."
|
"Top level parsing routine, taking a source file name and returning an IR list."
|
||||||
global templabelcount
|
global templabelcount
|
||||||
templabelcount = 0
|
templabelcount = 0
|
||||||
return parse_file("<Top Level>", filename)
|
return parse_file("<Top Level>", filename)
|
||||||
|
|
||||||
|
273
src/Ophis/IR.py
273
src/Ophis/IR.py
@ -1,161 +1,160 @@
|
|||||||
"""P65 Intermediate Representation
|
"""Ophis Intermediate Representation
|
||||||
|
|
||||||
Classes for representing the Intermediate nodes upon which the
|
Classes for representing the Intermediate nodes upon which the
|
||||||
assembler passes operate."""
|
assembler passes operate."""
|
||||||
|
|
||||||
# Copyright 2002 Michael C. Martin.
|
# Copyright 2002-2012 Michael C. Martin and additional contributors.
|
||||||
# You may use, modify, and distribute this file under the BSD
|
# You may use, modify, and distribute this file under the MIT
|
||||||
# license: See LICENSE.txt for details.
|
# license: See README for details.
|
||||||
|
|
||||||
from __future__ import nested_scopes
|
|
||||||
import Ophis.Errors as Err
|
import Ophis.Errors as Err
|
||||||
|
|
||||||
class Node:
|
class Node:
|
||||||
"""The default IR Node
|
"""The default IR Node
|
||||||
Instances of Node always have the three fields ppt(Program Point),
|
Instances of Node always have the three fields ppt(Program Point),
|
||||||
nodetype(a string), and data (a list)."""
|
nodetype(a string), and data (a list)."""
|
||||||
def __init__(self, ppt, nodetype, *data):
|
def __init__(self, ppt, nodetype, *data):
|
||||||
self.ppt = ppt
|
self.ppt = ppt
|
||||||
self.nodetype = nodetype
|
self.nodetype = nodetype
|
||||||
self.data = list(data)
|
self.data = list(data)
|
||||||
def accept(self, asmpass, env=None):
|
def accept(self, asmpass, env=None):
|
||||||
"""Implements the Visitor pattern for an assembler pass.
|
"""Implements the Visitor pattern for an assembler pass.
|
||||||
Calls the routine 'asmpass.visitTYPE(self, env)' where
|
Calls the routine 'asmpass.visitTYPE(self, env)' where
|
||||||
TYPE is the value of self.nodetype."""
|
TYPE is the value of self.nodetype."""
|
||||||
Err.currentpoint = self.ppt
|
Err.currentpoint = self.ppt
|
||||||
routine = getattr(asmpass, "visit"+self.nodetype, asmpass.visitUnknown)
|
routine = getattr(asmpass, "visit"+self.nodetype, asmpass.visitUnknown)
|
||||||
routine(self, env)
|
routine(self, env)
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
if self.nodetype != "SEQUENCE":
|
if self.nodetype != "SEQUENCE":
|
||||||
return str(self.ppt)+": "+self.nodetype+" - "+" ".join(map(str, self.data))
|
return str(self.ppt)+": "+self.nodetype+" - "+" ".join(map(str, self.data))
|
||||||
else:
|
else:
|
||||||
return "\n".join(map(str, self.data))
|
return "\n".join(map(str, self.data))
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
args = [self.ppt, self.nodetype] + self.data
|
args = [self.ppt, self.nodetype] + self.data
|
||||||
return "Node(" + ", ".join(map(repr, args)) + ")"
|
return "Node(" + ", ".join(map(repr, args)) + ")"
|
||||||
|
|
||||||
NullNode = Node("<none>", "None")
|
NullNode = Node("<none>", "None")
|
||||||
|
|
||||||
def SequenceNode(ppt, nodelist):
|
def SequenceNode(ppt, nodelist):
|
||||||
return Node(ppt, "SEQUENCE", *nodelist)
|
return Node(ppt, "SEQUENCE", *nodelist)
|
||||||
|
|
||||||
class Expr:
|
class Expr:
|
||||||
"""Base class for P65 expressions
|
"""Base class for P65 expressions
|
||||||
All expressions have a field called "data" and a boolean field
|
All expressions have a field called "data" and a boolean field
|
||||||
called "hardcoded". An expression is hardcoded if it has no
|
called "hardcoded". An expression is hardcoded if it has no
|
||||||
symbolic values in it."""
|
symbolic values in it."""
|
||||||
def __init__(self, data):
|
def __init__(self, data):
|
||||||
self.data = data
|
self.data = data
|
||||||
self.hardcoded = 0
|
self.hardcoded = 0
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "<UNKNOWN: "+`self.data`+">"
|
return "<UNKNOWN: "+`self.data`+">"
|
||||||
def valid(self, env=None, PCvalid=0):
|
def valid(self, env=None, PCvalid=0):
|
||||||
"""Returns true if the the expression can be successfully
|
"""Returns true if the the expression can be successfully
|
||||||
evaluated in the specified environment."""
|
evaluated in the specified environment."""
|
||||||
return 0
|
return 0
|
||||||
def value(self, env=None):
|
def value(self, env=None):
|
||||||
"Evaluates this expression in the given environment."
|
"Evaluates this expression in the given environment."
|
||||||
return None
|
return None
|
||||||
|
|
||||||
class ConstantExpr(Expr):
|
class ConstantExpr(Expr):
|
||||||
"Represents a numeric constant"
|
"Represents a numeric constant"
|
||||||
def __init__(self, data):
|
def __init__(self, data):
|
||||||
self.data = data
|
self.data = data
|
||||||
self.hardcoded = 1
|
self.hardcoded = 1
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return str(self.data)
|
return str(self.data)
|
||||||
def valid(self, env=None, PCvalid=0):
|
def valid(self, env=None, PCvalid=0):
|
||||||
return 1
|
return 1
|
||||||
def value(self, env=None):
|
def value(self, env=None):
|
||||||
return self.data
|
return self.data
|
||||||
|
|
||||||
class LabelExpr(Expr):
|
class LabelExpr(Expr):
|
||||||
"Represents a symbolic constant"
|
"Represents a symbolic constant"
|
||||||
def __init__(self, data):
|
def __init__(self, data):
|
||||||
self.data = data
|
self.data = data
|
||||||
self.hardcoded = 0
|
self.hardcoded = 0
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.data
|
return self.data
|
||||||
def valid(self, env=None, PCvalid=0):
|
def valid(self, env=None, PCvalid=0):
|
||||||
return (env is not None) and self.data in env
|
return (env is not None) and self.data in env
|
||||||
def value(self, env=None):
|
def value(self, env=None):
|
||||||
return env[self.data]
|
return env[self.data]
|
||||||
|
|
||||||
class PCExpr(Expr):
|
class PCExpr(Expr):
|
||||||
"Represents the current program counter: ^"
|
"Represents the current program counter: ^"
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.hardcoded = 0
|
self.hardcoded = 0
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "^"
|
return "^"
|
||||||
def valid(self, env=None, PCvalid=0):
|
def valid(self, env=None, PCvalid=0):
|
||||||
return env is not None and PCvalid
|
return env is not None and PCvalid
|
||||||
def value(self, env=None):
|
def value(self, env=None):
|
||||||
return env.getPC()
|
return env.getPC()
|
||||||
|
|
||||||
class HighByteExpr(Expr):
|
class HighByteExpr(Expr):
|
||||||
"Represents the expression >{data}"
|
"Represents the expression >{data}"
|
||||||
def __init__(self, data):
|
def __init__(self, data):
|
||||||
self.data = data
|
self.data = data
|
||||||
self.hardcoded = data.hardcoded
|
self.hardcoded = data.hardcoded
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return ">"+str(self.data)
|
return ">"+str(self.data)
|
||||||
def valid(self, env=None, PCvalid=0):
|
def valid(self, env=None, PCvalid=0):
|
||||||
return self.data.valid(env, PCvalid)
|
return self.data.valid(env, PCvalid)
|
||||||
def value(self, env=None):
|
def value(self, env=None):
|
||||||
val = self.data.value(env)
|
val = self.data.value(env)
|
||||||
return (val >> 8) & 0xff
|
return (val >> 8) & 0xff
|
||||||
|
|
||||||
class LowByteExpr(Expr):
|
class LowByteExpr(Expr):
|
||||||
"Represents the expression <{data}"
|
"Represents the expression <{data}"
|
||||||
def __init__(self, data):
|
def __init__(self, data):
|
||||||
self.data = data
|
self.data = data
|
||||||
self.hardcoded = data.hardcoded
|
self.hardcoded = data.hardcoded
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "<"+str(self.data)
|
return "<"+str(self.data)
|
||||||
def valid(self, env=None, PCvalid=0):
|
def valid(self, env=None, PCvalid=0):
|
||||||
return self.data.valid(env, PCvalid)
|
return self.data.valid(env, PCvalid)
|
||||||
def value(self, env=None):
|
def value(self, env=None):
|
||||||
val = self.data.value(env)
|
val = self.data.value(env)
|
||||||
return val & 0xff
|
return val & 0xff
|
||||||
|
|
||||||
class SequenceExpr(Expr):
|
class SequenceExpr(Expr):
|
||||||
"""Represents an interleaving of operands (of type Expr) and
|
"""Represents an interleaving of operands (of type Expr) and
|
||||||
operators (of type String). Subclasses must provide a routine
|
operators (of type String). Subclasses must provide a routine
|
||||||
operate(self, firstarg, op, secondarg) that evaluates the
|
operate(self, firstarg, op, secondarg) that evaluates the
|
||||||
operator."""
|
operator."""
|
||||||
def __init__(self, data):
|
def __init__(self, data):
|
||||||
"""Constructor for Sequence Expressions. Results will be
|
"""Constructor for Sequence Expressions. Results will be
|
||||||
screwy if the data inpot isn't a list with types
|
screwy if the data inpot isn't a list with types
|
||||||
[Expr, str, Expr, str, Expr, str, ... Expr, str, Expr]."""
|
[Expr, str, Expr, str, Expr, str, ... Expr, str, Expr]."""
|
||||||
self.data = data
|
self.data = data
|
||||||
self.operands = [x for x in data if isinstance(x, Expr)]
|
self.operands = [x for x in data if isinstance(x, Expr)]
|
||||||
self.operators = [x for x in data if type(x)==str]
|
self.operators = [x for x in data if type(x)==str]
|
||||||
for i in self.operands:
|
for i in self.operands:
|
||||||
if not i.hardcoded:
|
if not i.hardcoded:
|
||||||
self.hardcoded = 0
|
self.hardcoded = 0
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
self.hardcoded = 1
|
self.hardcoded = 1
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "["+" ".join(map(str, self.data))+"]"
|
return "["+" ".join(map(str, self.data))+"]"
|
||||||
def valid(self, env=None, PCvalid=0):
|
def valid(self, env=None, PCvalid=0):
|
||||||
for i in self.operands:
|
for i in self.operands:
|
||||||
if not i.valid(env, PCvalid):
|
if not i.valid(env, PCvalid):
|
||||||
return 0
|
return 0
|
||||||
return 1
|
return 1
|
||||||
def value(self, env=None):
|
def value(self, env=None):
|
||||||
subs = map((lambda x: x.value(env)), self.operands)
|
subs = map((lambda x: x.value(env)), self.operands)
|
||||||
result = subs[0]
|
result = subs[0]
|
||||||
index = 1
|
index = 1
|
||||||
for op in self.operators:
|
for op in self.operators:
|
||||||
result = self.operate(result, op, subs[index])
|
result = self.operate(result, op, subs[index])
|
||||||
index += 1
|
index += 1
|
||||||
return result
|
return result
|
||||||
def operate(self, start, op, other):
|
def operate(self, start, op, other):
|
||||||
if op=="*": return start * other
|
if op=="*": return start * other
|
||||||
if op=="/": return start // other
|
if op=="/": return start // other
|
||||||
if op=="+": return start + other
|
if op=="+": return start + other
|
||||||
if op=="-": return start - other
|
if op=="-": return start - other
|
||||||
if op=="&": return start & other
|
if op=="&": return start & other
|
||||||
if op=="|": return start | other
|
if op=="|": return start | other
|
||||||
if op=="^": return start ^ other
|
if op=="^": return start ^ other
|
||||||
|
@ -1,8 +1,12 @@
|
|||||||
"""Macro support for P65.
|
"""Macro support for Ophis.
|
||||||
|
|
||||||
P65 Macros are cached SequenceNodes with arguments
|
Ophis Macros are cached SequenceNodes with arguments
|
||||||
set via .alias commands and prevented from escaping
|
set via .alias commands and prevented from escaping
|
||||||
with .scope and .scend commands."""
|
with .scope and .scend commands."""
|
||||||
|
|
||||||
|
# Copyright 2002-2012 Michael C. Martin and additional contributors.
|
||||||
|
# You may use, modify, and distribute this file under the MIT
|
||||||
|
# license: See README for details.
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
@ -15,48 +19,48 @@ currentname = None
|
|||||||
currentbody = None
|
currentbody = None
|
||||||
|
|
||||||
def newMacro(name):
|
def newMacro(name):
|
||||||
"Start creating a new macro with the specified name."
|
"Start creating a new macro with the specified name."
|
||||||
global currentname
|
global currentname
|
||||||
global currentbody
|
global currentbody
|
||||||
global macros
|
global macros
|
||||||
if currentname is not None:
|
if currentname is not None:
|
||||||
Err.log("Internal error! Nested macro attempt!")
|
Err.log("Internal error! Nested macro attempt!")
|
||||||
else:
|
else:
|
||||||
if name in macros:
|
if name in macros:
|
||||||
Err.log("Duplicate macro definition '%s'" % name)
|
Err.log("Duplicate macro definition '%s'" % name)
|
||||||
currentname = name
|
currentname = name
|
||||||
currentbody = []
|
currentbody = []
|
||||||
|
|
||||||
def registerNode(node):
|
def registerNode(node):
|
||||||
global currentbody
|
global currentbody
|
||||||
currentbody.append(IR.Node(node.ppt, node.nodetype, *node.data))
|
currentbody.append(IR.Node(node.ppt, node.nodetype, *node.data))
|
||||||
|
|
||||||
def endMacro():
|
def endMacro():
|
||||||
global currentname
|
global currentname
|
||||||
global currentbody
|
global currentbody
|
||||||
global macros
|
global macros
|
||||||
if currentname is None:
|
if currentname is None:
|
||||||
Err.log("Internal error! Ended a non-existent macro!")
|
Err.log("Internal error! Ended a non-existent macro!")
|
||||||
else:
|
else:
|
||||||
macros[currentname] = currentbody
|
macros[currentname] = currentbody
|
||||||
currentname = None
|
currentname = None
|
||||||
currentbody = None
|
currentbody = None
|
||||||
|
|
||||||
def expandMacro(ppt, name, arglist):
|
def expandMacro(ppt, name, arglist):
|
||||||
global macros
|
global macros
|
||||||
if name not in macros:
|
if name not in macros:
|
||||||
Err.log("Undefined macro '%s'" % name)
|
Err.log("Undefined macro '%s'" % name)
|
||||||
return IR.NullNode
|
return IR.NullNode
|
||||||
argexprs = [IR.Node(ppt, "Label", "_*%d" % i, arg) for (i, arg) in zip(xrange(1, sys.maxint), arglist)]
|
argexprs = [IR.Node(ppt, "Label", "_*%d" % i, arg) for (i, arg) in zip(xrange(1, sys.maxint), arglist)]
|
||||||
bindexprs = [IR.Node(ppt, "Label", "_%d" % i, IR.LabelExpr("_*%d" % i)) for i in range(1, len(arglist)+1)]
|
bindexprs = [IR.Node(ppt, "Label", "_%d" % i, IR.LabelExpr("_*%d" % i)) for i in range(1, len(arglist)+1)]
|
||||||
body = [IR.Node("%s->%s" % (ppt, node.ppt), node.nodetype, *node.data) for node in macros[name]]
|
body = [IR.Node("%s->%s" % (ppt, node.ppt), node.nodetype, *node.data) for node in macros[name]]
|
||||||
invocation = [IR.Node(ppt, "ScopeBegin")] + argexprs + [IR.Node(ppt, "ScopeBegin")] + bindexprs + body + [IR.Node(ppt, "ScopeEnd"), IR.Node(ppt, "ScopeEnd")]
|
invocation = [IR.Node(ppt, "ScopeBegin")] + argexprs + [IR.Node(ppt, "ScopeBegin")] + bindexprs + body + [IR.Node(ppt, "ScopeEnd"), IR.Node(ppt, "ScopeEnd")]
|
||||||
return IR.SequenceNode(ppt, invocation)
|
return IR.SequenceNode(ppt, invocation)
|
||||||
|
|
||||||
def dump():
|
def dump():
|
||||||
global macros
|
global macros
|
||||||
for mac in macros:
|
for mac in macros:
|
||||||
body = macros[mac]
|
body = macros[mac]
|
||||||
print "Macro: "+mac
|
print "Macro: "+mac
|
||||||
for node in body: print node
|
for node in body: print node
|
||||||
print ""
|
print ""
|
||||||
|
@ -1,13 +1,12 @@
|
|||||||
"""Main controller routines for the P65 assembler.
|
"""Main controller routines for the Ophis assembler.
|
||||||
|
|
||||||
When invoked as main, interprets its command line and goes from there.
|
When invoked as main, interprets its command line and goes from there.
|
||||||
Otherwise, use run_all to interpret a file set."""
|
Otherwise, use run_all to interpret a file set."""
|
||||||
|
|
||||||
# Copyright 2002 Michael C. Martin.
|
# Copyright 2002-2012 Michael C. Martin and additional contributors.
|
||||||
# You may use, modify, and distribute this file under the BSD
|
# You may use, modify, and distribute this file under the MIT
|
||||||
# license: See LICENSE.txt for details.
|
# license: See README for details.
|
||||||
|
|
||||||
from __future__ import nested_scopes
|
|
||||||
import sys
|
import sys
|
||||||
import Ophis.Frontend
|
import Ophis.Frontend
|
||||||
import Ophis.IR
|
import Ophis.IR
|
||||||
@ -21,104 +20,104 @@ import Ophis.Opcodes
|
|||||||
|
|
||||||
|
|
||||||
def usage():
|
def usage():
|
||||||
"Prints a usage message and quits."
|
"Prints a usage message and quits."
|
||||||
print "Usage:"
|
print "Usage:"
|
||||||
print "\tOphis [options] infile outfile"
|
print "\tOphis [options] infile outfile"
|
||||||
print ""
|
print ""
|
||||||
print "Options:"
|
print "Options:"
|
||||||
print "\t-6510 Allow 6510 undocumented opcodes"
|
print "\t-6510 Allow 6510 undocumented opcodes"
|
||||||
print "\t-65c02 Enable 65c02 extensions"
|
print "\t-65c02 Enable 65c02 extensions"
|
||||||
print "\t-d Allow deprecated pragmas"
|
print "\t-d Allow deprecated pragmas"
|
||||||
print "\t-v n Set verbosity to n (0-4, 1=default)"
|
print "\t-v n Set verbosity to n (0-4, 1=default)"
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
def run_all(infile, outfile):
|
def run_all(infile, outfile):
|
||||||
"Transforms the source infile to a binary outfile."
|
"Transforms the source infile to a binary outfile."
|
||||||
Err.count = 0
|
Err.count = 0
|
||||||
z = Ophis.Frontend.parse(infile)
|
z = Ophis.Frontend.parse(infile)
|
||||||
env = Ophis.Environment.Environment()
|
env = Ophis.Environment.Environment()
|
||||||
|
|
||||||
m = Ophis.Passes.ExpandMacros()
|
m = Ophis.Passes.ExpandMacros()
|
||||||
i = Ophis.Passes.InitLabels()
|
i = Ophis.Passes.InitLabels()
|
||||||
l_basic = Ophis.Passes.UpdateLabels()
|
l_basic = Ophis.Passes.UpdateLabels()
|
||||||
l = Ophis.Passes.FixPoint("label update", [l_basic], lambda: l_basic.changed == 0)
|
l = Ophis.Passes.FixPoint("label update", [l_basic], lambda: l_basic.changed == 0)
|
||||||
c = Ophis.Passes.Collapse()
|
c = Ophis.Passes.Collapse()
|
||||||
a = Ophis.Passes.Assembler()
|
a = Ophis.Passes.Assembler()
|
||||||
|
|
||||||
passes = []
|
passes = []
|
||||||
passes.append(Ophis.Passes.DefineMacros())
|
passes.append(Ophis.Passes.DefineMacros())
|
||||||
passes.append(Ophis.Passes.FixPoint("macro expansion", [m], lambda: m.changed == 0))
|
passes.append(Ophis.Passes.FixPoint("macro expansion", [m], lambda: m.changed == 0))
|
||||||
passes.append(Ophis.Passes.FixPoint("label initialization", [i], lambda: i.changed == 0))
|
passes.append(Ophis.Passes.FixPoint("label initialization", [i], lambda: i.changed == 0))
|
||||||
passes.extend([Ophis.Passes.CircularityCheck(), Ophis.Passes.CheckExprs(), Ophis.Passes.EasyModes()])
|
passes.extend([Ophis.Passes.CircularityCheck(), Ophis.Passes.CheckExprs(), Ophis.Passes.EasyModes()])
|
||||||
passes.append(Ophis.Passes.FixPoint("instruction selection", [l, c], lambda: c.collapsed == 0))
|
passes.append(Ophis.Passes.FixPoint("instruction selection", [l, c], lambda: c.collapsed == 0))
|
||||||
passes.extend([Ophis.Passes.NormalizeModes(), Ophis.Passes.UpdateLabels(), a])
|
passes.extend([Ophis.Passes.NormalizeModes(), Ophis.Passes.UpdateLabels(), a])
|
||||||
|
|
||||||
for p in passes: p.go(z, env)
|
for p in passes: p.go(z, env)
|
||||||
|
|
||||||
if Err.count == 0:
|
if Err.count == 0:
|
||||||
try:
|
try:
|
||||||
output = file(outfile, 'wb')
|
output = file(outfile, 'wb')
|
||||||
output.write("".join(map(chr, a.output)))
|
output.write("".join(map(chr, a.output)))
|
||||||
except IOError:
|
except IOError:
|
||||||
print "Could not write to "+outfile
|
print "Could not write to "+outfile
|
||||||
else:
|
else:
|
||||||
Err.report()
|
Err.report()
|
||||||
|
|
||||||
def run_ophis():
|
def run_ophis():
|
||||||
infile = None
|
infile = None
|
||||||
outfile = None
|
outfile = None
|
||||||
|
|
||||||
p65_compatibility_mode = 0
|
p65_compatibility_mode = 0
|
||||||
chip_extension = None
|
chip_extension = None
|
||||||
|
|
||||||
reading_arg = 0
|
reading_arg = 0
|
||||||
|
|
||||||
for x in sys.argv[1:]:
|
for x in sys.argv[1:]:
|
||||||
if reading_arg:
|
if reading_arg:
|
||||||
try:
|
try:
|
||||||
Ophis.CmdLine.verbose = int(x)
|
Ophis.CmdLine.verbose = int(x)
|
||||||
reading_arg = 0
|
reading_arg = 0
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print "FATAL: Non-integer passed as argument to -v"
|
print "FATAL: Non-integer passed as argument to -v"
|
||||||
usage()
|
usage()
|
||||||
elif x[0] == '-':
|
elif x[0] == '-':
|
||||||
if x == '-v':
|
if x == '-v':
|
||||||
reading_arg = 1
|
reading_arg = 1
|
||||||
elif x == '-6510':
|
elif x == '-6510':
|
||||||
chip_extension = Ophis.Opcodes.undocops
|
chip_extension = Ophis.Opcodes.undocops
|
||||||
elif x == '-65c02':
|
elif x == '-65c02':
|
||||||
chip_extension = Ophis.Opcodes.c02extensions
|
chip_extension = Ophis.Opcodes.c02extensions
|
||||||
elif x == '-d':
|
elif x == '-d':
|
||||||
p65_compatibility_mode = 1
|
p65_compatibility_mode = 1
|
||||||
else:
|
else:
|
||||||
print "FATAL: Unknown option "+x
|
print "FATAL: Unknown option "+x
|
||||||
usage()
|
usage()
|
||||||
elif infile == None:
|
elif infile == None:
|
||||||
infile = x
|
infile = x
|
||||||
elif outfile == None:
|
elif outfile == None:
|
||||||
outfile = x
|
outfile = x
|
||||||
else:
|
else:
|
||||||
print "FATAL: Too many files specified"
|
print "FATAL: Too many files specified"
|
||||||
usage()
|
usage()
|
||||||
|
|
||||||
if infile is None:
|
if infile is None:
|
||||||
print "FATAL: No files specified"
|
print "FATAL: No files specified"
|
||||||
usage()
|
usage()
|
||||||
|
|
||||||
if outfile is None:
|
if outfile is None:
|
||||||
print "FATAL: No output file specified"
|
print "FATAL: No output file specified"
|
||||||
usage()
|
usage()
|
||||||
|
|
||||||
Ophis.Frontend.pragma_modules.append(Ophis.CorePragmas)
|
Ophis.Frontend.pragma_modules.append(Ophis.CorePragmas)
|
||||||
|
|
||||||
if p65_compatibility_mode:
|
if p65_compatibility_mode:
|
||||||
Ophis.Frontend.pragma_modules.append(Ophis.OldPragmas)
|
Ophis.Frontend.pragma_modules.append(Ophis.OldPragmas)
|
||||||
|
|
||||||
if chip_extension is not None:
|
if chip_extension is not None:
|
||||||
Ophis.Opcodes.opcodes.update(chip_extension)
|
Ophis.Opcodes.opcodes.update(chip_extension)
|
||||||
|
|
||||||
Ophis.CorePragmas.reset()
|
Ophis.CorePragmas.reset()
|
||||||
run_all(infile, outfile)
|
run_all(infile, outfile)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
run_ophis()
|
run_ophis()
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
"""P65-Perl compatibility pragmas
|
"""P65-Perl compatibility pragmas
|
||||||
|
|
||||||
Additional assembler directives to permit assembly of
|
Additional assembler directives to permit assembly of
|
||||||
old P65-Perl sources. This is not, in itself, sufficient,
|
old P65-Perl sources. This is not, in itself, sufficient,
|
||||||
as the precedence of < and > vs. + and - has changed
|
as the precedence of < and > vs. + and - has changed
|
||||||
between P65-Perl and P65-Ophis.
|
between P65-Perl and Ophis.
|
||||||
|
|
||||||
Supported pragmas are: .ascii (byte), .address (word),
|
Supported pragmas are: .ascii (byte), .address (word),
|
||||||
.segment (text), .code (text), and .link."""
|
.segment (text), .code (text), and .link."""
|
||||||
|
|
||||||
# Copyright 2002 Michael C. Martin.
|
# Copyright 2002-2012 Michael C. Martin and additional contributors.
|
||||||
# You may use, modify, and distribute this file under the BSD
|
# You may use, modify, and distribute this file under the MIT
|
||||||
# license: See LICENSE.txt for details.
|
# license: See README for details.
|
||||||
|
|
||||||
import Ophis.CorePragmas as core
|
import Ophis.CorePragmas as core
|
||||||
|
|
||||||
@ -20,9 +20,9 @@ pragmaSegment = core.pragmaText
|
|||||||
pragmaCode = core.pragmaText
|
pragmaCode = core.pragmaText
|
||||||
|
|
||||||
def pragmaLink(ppt, line, result):
|
def pragmaLink(ppt, line, result):
|
||||||
"Load a file in a precise memory location."
|
"Load a file in a precise memory location."
|
||||||
filename = line.expect("STRING").value
|
filename = line.expect("STRING").value
|
||||||
newPC = FE.parse_expr(line)
|
newPC = FE.parse_expr(line)
|
||||||
line.expect("EOL")
|
line.expect("EOL")
|
||||||
result.append(IR.Node(ppt, "SetPC", newPC))
|
result.append(IR.Node(ppt, "SetPC", newPC))
|
||||||
if type(filename)==str: result.append(FE.parse_file(ppt, filename))
|
if type(filename)==str: result.append(FE.parse_file(ppt, filename))
|
||||||
|
@ -1,28 +1,28 @@
|
|||||||
"""Opcodes file.
|
"""Opcodes file.
|
||||||
|
|
||||||
Tables for the assembly of 6502-family instructions, mapping
|
Tables for the assembly of 6502-family instructions, mapping
|
||||||
opcodes and addressing modes to binary instructions."""
|
opcodes and addressing modes to binary instructions."""
|
||||||
|
|
||||||
# Copyright 2002 Michael C. Martin.
|
# Copyright 2002-2012 Michael C. Martin and additional contributors.
|
||||||
# You may use, modify, and distribute this file under the BSD
|
# You may use, modify, and distribute this file under the MIT
|
||||||
# license: See LICENSE.txt for details.
|
# license: See README for details.
|
||||||
|
|
||||||
# Names of addressing modes
|
# Names of addressing modes
|
||||||
modes = ["Implied", # 0
|
modes = ["Implied", # 0
|
||||||
"Immediate", # 1
|
"Immediate", # 1
|
||||||
"Zero Page", # 2
|
"Zero Page", # 2
|
||||||
"Zero Page, X", # 3
|
"Zero Page, X", # 3
|
||||||
"Zero Page, Y", # 4
|
"Zero Page, Y", # 4
|
||||||
"Absolute", # 5
|
"Absolute", # 5
|
||||||
"Absolute, X", # 6
|
"Absolute, X", # 6
|
||||||
"Absolute, Y", # 7
|
"Absolute, Y", # 7
|
||||||
"(Absolute)", # 8
|
"(Absolute)", # 8
|
||||||
"(Absolute, X)", # 9
|
"(Absolute, X)", # 9
|
||||||
"(Absolute), Y", # 10
|
"(Absolute), Y", # 10
|
||||||
"(Zero Page)", # 11
|
"(Zero Page)", # 11
|
||||||
"(Zero Page, X)", # 12
|
"(Zero Page, X)", # 12
|
||||||
"(Zero Page), Y", # 13
|
"(Zero Page), Y", # 13
|
||||||
"Relative"] # 14
|
"Relative"] # 14
|
||||||
|
|
||||||
# Lengths of the argument
|
# Lengths of the argument
|
||||||
lengths = [0, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1]
|
lengths = [0, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1]
|
||||||
|
@ -1,18 +1,16 @@
|
|||||||
"""The P65 Assembler passes
|
"""The Ophis Assembler passes
|
||||||
|
|
||||||
P65's design philosophy is to build the IR once, then run a great
|
Ophis's design philosophy is to build the IR once, then run a great
|
||||||
many assembler passes over the result. Thus, each pass does a
|
many assembler passes over the result. Thus, each pass does a
|
||||||
single, specialized job. When strung together, the full
|
single, specialized job. When strung together, the full
|
||||||
translation occurs. This structure also makes the assembler
|
translation occurs. This structure also makes the assembler
|
||||||
very extensible; additional analyses or optimizations may be
|
very extensible; additional analyses or optimizations may be
|
||||||
added as new subclasses of Pass."""
|
added as new subclasses of Pass."""
|
||||||
|
|
||||||
|
# Copyright 2002-2012 Michael C. Martin and additional contributors.
|
||||||
|
# You may use, modify, and distribute this file under the MIT
|
||||||
|
# license: See README for details.
|
||||||
|
|
||||||
# Copyright 2002 Michael C. Martin.
|
|
||||||
# You may use, modify, and distribute this file under the BSD
|
|
||||||
# license: See LICENSE.txt for details.
|
|
||||||
|
|
||||||
from __future__ import nested_scopes
|
|
||||||
import Ophis.Errors as Err
|
import Ophis.Errors as Err
|
||||||
import Ophis.IR as IR
|
import Ophis.IR as IR
|
||||||
import Ophis.Opcodes as Ops
|
import Ophis.Opcodes as Ops
|
||||||
@ -22,497 +20,497 @@ import Ophis.Macro as Macro
|
|||||||
# The passes themselves
|
# The passes themselves
|
||||||
|
|
||||||
class Pass:
|
class Pass:
|
||||||
"""Superclass for all assembler passes. Automatically handles IR
|
"""Superclass for all assembler passes. Automatically handles IR
|
||||||
types that modify the environent's structure, and by default
|
types that modify the environent's structure, and by default
|
||||||
raises an error on anything else. Override visitUnknown in your
|
raises an error on anything else. Override visitUnknown in your
|
||||||
extension pass to produce a pass that accepts everything."""
|
extension pass to produce a pass that accepts everything."""
|
||||||
name = "Default Pass"
|
name = "Default Pass"
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.writeOK = 1
|
self.writeOK = 1
|
||||||
def visitNone(self, node, env):
|
def visitNone(self, node, env):
|
||||||
pass
|
pass
|
||||||
def visitSEQUENCE(self, node, env):
|
def visitSEQUENCE(self, node, env):
|
||||||
Err.currentpoint = node.ppt
|
Err.currentpoint = node.ppt
|
||||||
for n in node.data:
|
for n in node.data:
|
||||||
n.accept(self, env)
|
n.accept(self, env)
|
||||||
def visitDataSegment(self, node, env):
|
def visitDataSegment(self, node, env):
|
||||||
self.writeOK = 0
|
self.writeOK = 0
|
||||||
env.setsegment(node.data[0])
|
env.setsegment(node.data[0])
|
||||||
def visitTextSegment(self, node, env):
|
def visitTextSegment(self, node, env):
|
||||||
self.writeOK = 1
|
self.writeOK = 1
|
||||||
env.setsegment(node.data[0])
|
env.setsegment(node.data[0])
|
||||||
def visitScopeBegin(self, node, env):
|
def visitScopeBegin(self, node, env):
|
||||||
env.newscope()
|
env.newscope()
|
||||||
def visitScopeEnd(self, node, env):
|
def visitScopeEnd(self, node, env):
|
||||||
env.endscope()
|
env.endscope()
|
||||||
def visitUnknown(self, node, env):
|
def visitUnknown(self, node, env):
|
||||||
Err.log("Internal error! "+self.name+" cannot understand node type "+node.nodetype)
|
Err.log("Internal error! "+self.name+" cannot understand node type "+node.nodetype)
|
||||||
def prePass(self):
|
def prePass(self):
|
||||||
pass
|
pass
|
||||||
def postPass(self):
|
def postPass(self):
|
||||||
pass
|
pass
|
||||||
def go(self, node, env):
|
def go(self, node, env):
|
||||||
"""Prepares the environment and runs this pass, possibly
|
"""Prepares the environment and runs this pass, possibly
|
||||||
printing debugging information."""
|
printing debugging information."""
|
||||||
if Err.count == 0:
|
if Err.count == 0:
|
||||||
if Cmd.verbose > 1: print "Running: "+self.name
|
if Cmd.verbose > 1: print "Running: "+self.name
|
||||||
env.reset()
|
env.reset()
|
||||||
self.prePass()
|
self.prePass()
|
||||||
node.accept(self, env)
|
node.accept(self, env)
|
||||||
self.postPass()
|
self.postPass()
|
||||||
env.reset()
|
env.reset()
|
||||||
if Cmd.verbose > 3:
|
if Cmd.verbose > 3:
|
||||||
print "Current labels:"
|
print "Current labels:"
|
||||||
print env
|
print env
|
||||||
if Cmd.verbose > 2:
|
if Cmd.verbose > 2:
|
||||||
print "Current IR:"
|
print "Current IR:"
|
||||||
print node
|
print node
|
||||||
|
|
||||||
class FixPoint:
|
class FixPoint:
|
||||||
"""A specialized class that is not a pass but can be run like one.
|
"""A specialized class that is not a pass but can be run like one.
|
||||||
This class takes a list of passes and a "fixpoint" function."""
|
This class takes a list of passes and a "fixpoint" function."""
|
||||||
def __init__(self, name, passes, fixpoint):
|
def __init__(self, name, passes, fixpoint):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.passes = passes
|
self.passes = passes
|
||||||
self.fixpoint = fixpoint
|
self.fixpoint = fixpoint
|
||||||
def go(self, node, env):
|
def go(self, node, env):
|
||||||
"""Runs this FixPoint's passes, in order, until the fixpoint
|
"""Runs this FixPoint's passes, in order, until the fixpoint
|
||||||
is true. Always runs the passes at least once."""
|
is true. Always runs the passes at least once."""
|
||||||
for i in xrange(100):
|
for i in xrange(100):
|
||||||
if Err.count != 0: break
|
if Err.count != 0: break
|
||||||
for p in self.passes:
|
for p in self.passes:
|
||||||
p.go(node, env)
|
p.go(node, env)
|
||||||
if Err.count != 0: break
|
if Err.count != 0: break
|
||||||
if self.fixpoint(): break
|
if self.fixpoint(): break
|
||||||
if Cmd.verbose > 1: print "Fixpoint failed, looping back"
|
if Cmd.verbose > 1: print "Fixpoint failed, looping back"
|
||||||
else:
|
else:
|
||||||
Err.log("Can't make %s converge! Maybe there's a recursive dependency somewhere?" % self.name)
|
Err.log("Can't make %s converge! Maybe there's a recursive dependency somewhere?" % self.name)
|
||||||
|
|
||||||
class DefineMacros(Pass):
|
class DefineMacros(Pass):
|
||||||
"Extract macro definitions and remove them from the IR"
|
"Extract macro definitions and remove them from the IR"
|
||||||
name = "Macro definition pass"
|
name = "Macro definition pass"
|
||||||
def prePass(self):
|
def prePass(self):
|
||||||
self.inDef = 0
|
self.inDef = 0
|
||||||
self.nestedError = 0
|
self.nestedError = 0
|
||||||
def postPass(self):
|
def postPass(self):
|
||||||
if self.inDef:
|
if self.inDef:
|
||||||
Err.log("Unmatched .macro")
|
Err.log("Unmatched .macro")
|
||||||
elif Cmd.verbose > 2:
|
elif Cmd.verbose > 2:
|
||||||
print "Macro definitions:"
|
print "Macro definitions:"
|
||||||
Macro.dump()
|
Macro.dump()
|
||||||
def visitMacroBegin(self, node, env):
|
def visitMacroBegin(self, node, env):
|
||||||
if self.inDef:
|
if self.inDef:
|
||||||
Err.log("Nested macro definition")
|
Err.log("Nested macro definition")
|
||||||
self.nestedError = 1
|
self.nestedError = 1
|
||||||
else:
|
else:
|
||||||
Macro.newMacro(node.data[0])
|
Macro.newMacro(node.data[0])
|
||||||
node.nodetype = "None"
|
node.nodetype = "None"
|
||||||
node.data = []
|
node.data = []
|
||||||
self.inDef = 1
|
self.inDef = 1
|
||||||
def visitMacroEnd(self, node, env):
|
def visitMacroEnd(self, node, env):
|
||||||
if self.inDef:
|
if self.inDef:
|
||||||
Macro.endMacro()
|
Macro.endMacro()
|
||||||
node.nodetype = "None"
|
node.nodetype = "None"
|
||||||
node.data = []
|
node.data = []
|
||||||
self.inDef = 0
|
self.inDef = 0
|
||||||
elif not self.nestedError:
|
elif not self.nestedError:
|
||||||
Err.log("Unmatched .macend")
|
Err.log("Unmatched .macend")
|
||||||
def visitUnknown(self, node, env):
|
def visitUnknown(self, node, env):
|
||||||
if self.inDef:
|
if self.inDef:
|
||||||
Macro.registerNode(node)
|
Macro.registerNode(node)
|
||||||
node.nodetype = "None"
|
node.nodetype = "None"
|
||||||
node.data = []
|
node.data = []
|
||||||
|
|
||||||
|
|
||||||
class ExpandMacros(Pass):
|
class ExpandMacros(Pass):
|
||||||
"Replace macro invocations with the appropriate text"
|
"Replace macro invocations with the appropriate text"
|
||||||
name = "Macro expansion pass"
|
name = "Macro expansion pass"
|
||||||
def prePass(self):
|
def prePass(self):
|
||||||
self.changed = 0
|
self.changed = 0
|
||||||
def visitMacroInvoke(self, node, env):
|
def visitMacroInvoke(self, node, env):
|
||||||
replacement = Macro.expandMacro(node.ppt, node.data[0], node.data[1:])
|
replacement = Macro.expandMacro(node.ppt, node.data[0], node.data[1:])
|
||||||
node.nodetype = replacement.nodetype
|
node.nodetype = replacement.nodetype
|
||||||
node.data = replacement.data
|
node.data = replacement.data
|
||||||
self.changed = 1
|
self.changed = 1
|
||||||
def visitUnknown(self, node, env):
|
def visitUnknown(self, node, env):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class InitLabels(Pass):
|
class InitLabels(Pass):
|
||||||
"Finds all reachable labels"
|
"Finds all reachable labels"
|
||||||
name = "Label initialization pass"
|
name = "Label initialization pass"
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
Pass.__init__(self)
|
Pass.__init__(self)
|
||||||
self.labelmap = {}
|
self.labelmap = {}
|
||||||
def prePass(self):
|
def prePass(self):
|
||||||
self.changed = 0
|
self.changed = 0
|
||||||
self.PCvalid = 1
|
self.PCvalid = 1
|
||||||
def visitAdvance(self, node, env):
|
def visitAdvance(self, node, env):
|
||||||
self.PCvalid=node.data[0].valid(env, self.PCvalid)
|
self.PCvalid=node.data[0].valid(env, self.PCvalid)
|
||||||
def visitSetPC(self, node, env):
|
def visitSetPC(self, node, env):
|
||||||
self.PCvalid=node.data[0].valid(env, self.PCvalid)
|
self.PCvalid=node.data[0].valid(env, self.PCvalid)
|
||||||
def visitLabel(self, node, env):
|
def visitLabel(self, node, env):
|
||||||
(label, val) = node.data
|
(label, val) = node.data
|
||||||
fulllabel = "%d:%s" % (env.stack[0], label)
|
fulllabel = "%d:%s" % (env.stack[0], label)
|
||||||
if fulllabel in self.labelmap and self.labelmap[fulllabel] is not node:
|
if fulllabel in self.labelmap and self.labelmap[fulllabel] is not node:
|
||||||
Err.log("Duplicate label definition '%s'" % label)
|
Err.log("Duplicate label definition '%s'" % label)
|
||||||
if fulllabel not in self.labelmap:
|
if fulllabel not in self.labelmap:
|
||||||
self.labelmap[fulllabel] = node
|
self.labelmap[fulllabel] = node
|
||||||
if val.valid(env, self.PCvalid) and label not in env:
|
if val.valid(env, self.PCvalid) and label not in env:
|
||||||
env[label]=0
|
env[label]=0
|
||||||
self.changed=1
|
self.changed=1
|
||||||
def visitUnknown(self, node, env):
|
def visitUnknown(self, node, env):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class CircularityCheck(Pass):
|
class CircularityCheck(Pass):
|
||||||
"Checks for circular label dependencies"
|
"Checks for circular label dependencies"
|
||||||
name = "Circularity check pass"
|
name = "Circularity check pass"
|
||||||
def prePass(self):
|
def prePass(self):
|
||||||
self.changed=0
|
self.changed=0
|
||||||
self.PCvalid=1
|
self.PCvalid=1
|
||||||
def visitAdvance(self, node, env):
|
def visitAdvance(self, node, env):
|
||||||
PCvalid = self.PCvalid
|
PCvalid = self.PCvalid
|
||||||
self.PCvalid=node.data[0].valid(env, self.PCvalid)
|
self.PCvalid=node.data[0].valid(env, self.PCvalid)
|
||||||
if not node.data[0].valid(env, PCvalid):
|
if not node.data[0].valid(env, PCvalid):
|
||||||
Err.log("Undefined or circular reference on .advance")
|
Err.log("Undefined or circular reference on .advance")
|
||||||
def visitSetPC(self, node, env):
|
def visitSetPC(self, node, env):
|
||||||
PCvalid = self.PCvalid
|
PCvalid = self.PCvalid
|
||||||
self.PCvalid=node.data[0].valid(env, self.PCvalid)
|
self.PCvalid=node.data[0].valid(env, self.PCvalid)
|
||||||
if not node.data[0].valid(env, PCvalid):
|
if not node.data[0].valid(env, PCvalid):
|
||||||
Err.log("Undefined or circular reference on program counter set")
|
Err.log("Undefined or circular reference on program counter set")
|
||||||
def visitCheckPC(self, node, env):
|
def visitCheckPC(self, node, env):
|
||||||
if not node.data[0].valid(env, self.PCvalid):
|
if not node.data[0].valid(env, self.PCvalid):
|
||||||
Err.log("Undefined or circular reference on program counter check")
|
Err.log("Undefined or circular reference on program counter check")
|
||||||
def visitLabel(self, node, env):
|
def visitLabel(self, node, env):
|
||||||
(label, val) = node.data
|
(label, val) = node.data
|
||||||
if not val.valid(env, self.PCvalid):
|
if not val.valid(env, self.PCvalid):
|
||||||
Err.log("Undefined or circular dependency for label '%s'" % label)
|
Err.log("Undefined or circular dependency for label '%s'" % label)
|
||||||
def visitUnknown(self, node, env):
|
def visitUnknown(self, node, env):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class CheckExprs(Pass):
|
class CheckExprs(Pass):
|
||||||
"Ensures all expressions can resolve"
|
"Ensures all expressions can resolve"
|
||||||
name = "Expression checking pass"
|
name = "Expression checking pass"
|
||||||
def visitUnknown(self, node, env):
|
def visitUnknown(self, node, env):
|
||||||
for i in [x for x in node.data if isinstance(x, IR.Expr)]:
|
for i in [x for x in node.data if isinstance(x, IR.Expr)]:
|
||||||
i.value(env) # Throw away result, just confirm validity of all expressions
|
i.value(env) # Throw away result, just confirm validity of all expressions
|
||||||
|
|
||||||
class EasyModes(Pass):
|
class EasyModes(Pass):
|
||||||
"Assigns address modes to hardcoded and branch instructions"
|
"Assigns address modes to hardcoded and branch instructions"
|
||||||
name = "Easy addressing modes pass"
|
name = "Easy addressing modes pass"
|
||||||
def visitMemory(self, node, env):
|
def visitMemory(self, node, env):
|
||||||
if Ops.opcodes[node.data[0]][14] is not None:
|
if Ops.opcodes[node.data[0]][14] is not None:
|
||||||
node.nodetype = "Relative"
|
node.nodetype = "Relative"
|
||||||
return
|
return
|
||||||
if node.data[1].hardcoded:
|
if node.data[1].hardcoded:
|
||||||
if not collapse_no_index(node, env):
|
if not collapse_no_index(node, env):
|
||||||
node.nodetype = "Absolute"
|
node.nodetype = "Absolute"
|
||||||
def visitMemoryX(self, node, env):
|
def visitMemoryX(self, node, env):
|
||||||
if node.data[1].hardcoded:
|
if node.data[1].hardcoded:
|
||||||
if not collapse_x(node, env):
|
if not collapse_x(node, env):
|
||||||
node.nodetype = "AbsoluteX"
|
node.nodetype = "AbsoluteX"
|
||||||
def visitMemoryY(self, node, env):
|
def visitMemoryY(self, node, env):
|
||||||
if node.data[1].hardcoded:
|
if node.data[1].hardcoded:
|
||||||
if not collapse_y(node, env):
|
if not collapse_y(node, env):
|
||||||
node.nodetype = "AbsoluteY"
|
node.nodetype = "AbsoluteY"
|
||||||
def visitPointer(self, node, env):
|
def visitPointer(self, node, env):
|
||||||
if node.data[1].hardcoded:
|
if node.data[1].hardcoded:
|
||||||
if not collapse_no_index_ind(node, env):
|
if not collapse_no_index_ind(node, env):
|
||||||
node.nodetype = "Indirect"
|
node.nodetype = "Indirect"
|
||||||
def visitPointerX(self, node, env):
|
def visitPointerX(self, node, env):
|
||||||
if node.data[1].hardcoded:
|
if node.data[1].hardcoded:
|
||||||
if not collapse_x_ind(node, env):
|
if not collapse_x_ind(node, env):
|
||||||
node.nodetype = "AbsIndX"
|
node.nodetype = "AbsIndX"
|
||||||
def visitPointerY(self, node, env):
|
def visitPointerY(self, node, env):
|
||||||
if node.data[1].hardcoded:
|
if node.data[1].hardcoded:
|
||||||
if not collapse_y_ind(node, env):
|
if not collapse_y_ind(node, env):
|
||||||
node.nodetype = "AbsIndY"
|
node.nodetype = "AbsIndY"
|
||||||
def visitUnknown(self, node, env):
|
def visitUnknown(self, node, env):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class UpdateLabels(Pass):
|
class UpdateLabels(Pass):
|
||||||
"Computes the new values for all entries in the symbol table"
|
"Computes the new values for all entries in the symbol table"
|
||||||
name = "Label Update Pass"
|
name = "Label Update Pass"
|
||||||
def prePass(self):
|
def prePass(self):
|
||||||
self.changed = 0
|
self.changed = 0
|
||||||
def visitSetPC(self, node, env): env.setPC(node.data[0].value(env))
|
def visitSetPC(self, node, env): env.setPC(node.data[0].value(env))
|
||||||
def visitAdvance(self, node, env): env.setPC(node.data[0].value(env))
|
def visitAdvance(self, node, env): env.setPC(node.data[0].value(env))
|
||||||
def visitImplied(self, node, env): env.incPC(1)
|
def visitImplied(self, node, env): env.incPC(1)
|
||||||
def visitImmediate(self, node, env): env.incPC(2)
|
def visitImmediate(self, node, env): env.incPC(2)
|
||||||
def visitIndirectX(self, node, env): env.incPC(2)
|
def visitIndirectX(self, node, env): env.incPC(2)
|
||||||
def visitIndirectY(self, node, env): env.incPC(2)
|
def visitIndirectY(self, node, env): env.incPC(2)
|
||||||
def visitZPIndirect(self, node, env): env.incPC(2)
|
def visitZPIndirect(self, node, env): env.incPC(2)
|
||||||
def visitZeroPage(self, node, env): env.incPC(2)
|
def visitZeroPage(self, node, env): env.incPC(2)
|
||||||
def visitZeroPageX(self, node, env): env.incPC(2)
|
def visitZeroPageX(self, node, env): env.incPC(2)
|
||||||
def visitZeroPageY(self, node, env): env.incPC(2)
|
def visitZeroPageY(self, node, env): env.incPC(2)
|
||||||
def visitRelative(self, node, env): env.incPC(2)
|
def visitRelative(self, node, env): env.incPC(2)
|
||||||
def visitIndirect(self, node, env): env.incPC(3)
|
def visitIndirect(self, node, env): env.incPC(3)
|
||||||
def visitAbsolute(self, node, env): env.incPC(3)
|
def visitAbsolute(self, node, env): env.incPC(3)
|
||||||
def visitAbsoluteX(self, node, env): env.incPC(3)
|
def visitAbsoluteX(self, node, env): env.incPC(3)
|
||||||
def visitAbsoluteY(self, node, env): env.incPC(3)
|
def visitAbsoluteY(self, node, env): env.incPC(3)
|
||||||
def visitAbsIndX(self, node, env): env.incPC(3)
|
def visitAbsIndX(self, node, env): env.incPC(3)
|
||||||
def visitAbsIndY(self, node, env): env.incPC(3)
|
def visitAbsIndY(self, node, env): env.incPC(3)
|
||||||
def visitMemory(self, node, env): env.incPC(3)
|
def visitMemory(self, node, env): env.incPC(3)
|
||||||
def visitMemoryX(self, node, env): env.incPC(3)
|
def visitMemoryX(self, node, env): env.incPC(3)
|
||||||
def visitMemoryY(self, node, env): env.incPC(3)
|
def visitMemoryY(self, node, env): env.incPC(3)
|
||||||
def visitPointer(self, node, env): env.incPC(3)
|
def visitPointer(self, node, env): env.incPC(3)
|
||||||
def visitPointerX(self, node, env): env.incPC(3)
|
def visitPointerX(self, node, env): env.incPC(3)
|
||||||
def visitPointerY(self, node, env): env.incPC(3)
|
def visitPointerY(self, node, env): env.incPC(3)
|
||||||
def visitCheckPC(self, node, env): pass
|
def visitCheckPC(self, node, env): pass
|
||||||
def visitLabel(self, node, env):
|
def visitLabel(self, node, env):
|
||||||
(label, val) = node.data
|
(label, val) = node.data
|
||||||
old = env[label]
|
old = env[label]
|
||||||
env[label] = val.value(env)
|
env[label] = val.value(env)
|
||||||
if old != env[label]:
|
if old != env[label]:
|
||||||
self.changed = 1
|
self.changed = 1
|
||||||
def visitByte(self, node, env): env.incPC(len(node.data))
|
def visitByte(self, node, env): env.incPC(len(node.data))
|
||||||
def visitWord(self, node, env): env.incPC(len(node.data)*2)
|
def visitWord(self, node, env): env.incPC(len(node.data)*2)
|
||||||
def visitDword(self, node, env): env.incPC(len(node.data)*4)
|
def visitDword(self, node, env): env.incPC(len(node.data)*4)
|
||||||
def visitWordBE(self, node, env): env.incPC(len(node.data)*2)
|
def visitWordBE(self, node, env): env.incPC(len(node.data)*2)
|
||||||
def visitDwordBE(self, node, env): env.incPC(len(node.data)*4)
|
def visitDwordBE(self, node, env): env.incPC(len(node.data)*4)
|
||||||
|
|
||||||
class Collapse(Pass):
|
class Collapse(Pass):
|
||||||
"""Selects as many zero-page instructions to convert as
|
"""Selects as many zero-page instructions to convert as
|
||||||
possible, and tracks how many instructions have been
|
possible, and tracks how many instructions have been
|
||||||
converted this pass."""
|
converted this pass."""
|
||||||
name = "Instruction Collapse Pass"
|
name = "Instruction Collapse Pass"
|
||||||
def prePass(self):
|
def prePass(self):
|
||||||
self.collapsed = 0
|
self.collapsed = 0
|
||||||
def visitMemory(self, node, env):
|
def visitMemory(self, node, env):
|
||||||
if collapse_no_index(node, env): self.collapsed += 1
|
if collapse_no_index(node, env): self.collapsed += 1
|
||||||
def visitMemoryX(self, node, env):
|
def visitMemoryX(self, node, env):
|
||||||
if collapse_x(node, env): self.collapsed += 1
|
if collapse_x(node, env): self.collapsed += 1
|
||||||
def visitMemoryY(self, node, env):
|
def visitMemoryY(self, node, env):
|
||||||
if collapse_y(node, env): self.collapsed += 1
|
if collapse_y(node, env): self.collapsed += 1
|
||||||
def visitPointer(self, node, env):
|
def visitPointer(self, node, env):
|
||||||
if collapse_no_index_ind(node, env): self.collapsed += 1
|
if collapse_no_index_ind(node, env): self.collapsed += 1
|
||||||
def visitPointerX(self, node, env):
|
def visitPointerX(self, node, env):
|
||||||
if collapse_x_ind(node, env): self.collapsed += 1
|
if collapse_x_ind(node, env): self.collapsed += 1
|
||||||
def visitPointerY(self, node, env):
|
def visitPointerY(self, node, env):
|
||||||
if collapse_y_ind(node, env): self.collapsed += 1
|
if collapse_y_ind(node, env): self.collapsed += 1
|
||||||
def visitUnknown(self, node, env):
|
def visitUnknown(self, node, env):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def collapse_no_index(node, env):
|
def collapse_no_index(node, env):
|
||||||
"""Transforms a Memory node into a ZeroPage one if possible.
|
"""Transforms a Memory node into a ZeroPage one if possible.
|
||||||
Returns 1 if it made the collapse, false otherwise."""
|
Returns 1 if it made the collapse, false otherwise."""
|
||||||
if node.data[1].value(env) < 0x100 and Ops.opcodes[node.data[0]][2] is not None:
|
if node.data[1].value(env) < 0x100 and Ops.opcodes[node.data[0]][2] is not None:
|
||||||
node.nodetype = "ZeroPage"
|
node.nodetype = "ZeroPage"
|
||||||
return 1
|
return 1
|
||||||
else:
|
else:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def collapse_x(node, env):
|
def collapse_x(node, env):
|
||||||
"""Transforms a MemoryX node into a ZeroPageX one if possible.
|
"""Transforms a MemoryX node into a ZeroPageX one if possible.
|
||||||
Returns 1 if it made the collapse, false otherwise."""
|
Returns 1 if it made the collapse, false otherwise."""
|
||||||
if node.data[1].value(env) < 0x100 and Ops.opcodes[node.data[0]][3] is not None:
|
if node.data[1].value(env) < 0x100 and Ops.opcodes[node.data[0]][3] is not None:
|
||||||
node.nodetype = "ZeroPageX"
|
node.nodetype = "ZeroPageX"
|
||||||
return 1
|
return 1
|
||||||
else:
|
else:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def collapse_y(node, env):
|
def collapse_y(node, env):
|
||||||
"""Transforms a MemoryY node into a ZeroPageY one if possible.
|
"""Transforms a MemoryY node into a ZeroPageY one if possible.
|
||||||
Returns 1 if it made the collapse, false otherwise."""
|
Returns 1 if it made the collapse, false otherwise."""
|
||||||
if node.data[1].value(env) < 0x100 and Ops.opcodes[node.data[0]][4] is not None:
|
if node.data[1].value(env) < 0x100 and Ops.opcodes[node.data[0]][4] is not None:
|
||||||
node.nodetype = "ZeroPageY"
|
node.nodetype = "ZeroPageY"
|
||||||
return 1
|
return 1
|
||||||
else:
|
else:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def collapse_no_index_ind(node, env):
|
def collapse_no_index_ind(node, env):
|
||||||
"""Transforms a Pointer node into a ZPIndirect one if possible.
|
"""Transforms a Pointer node into a ZPIndirect one if possible.
|
||||||
Returns 1 if it made the collapse, false otherwise."""
|
Returns 1 if it made the collapse, false otherwise."""
|
||||||
if node.data[1].value(env) < 0x100 and Ops.opcodes[node.data[0]][11] is not None:
|
if node.data[1].value(env) < 0x100 and Ops.opcodes[node.data[0]][11] is not None:
|
||||||
node.nodetype = "ZPIndirect"
|
node.nodetype = "ZPIndirect"
|
||||||
return 1
|
return 1
|
||||||
else:
|
else:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def collapse_x_ind(node, env):
|
def collapse_x_ind(node, env):
|
||||||
"""Transforms a PointerX node into an IndirectX one if possible.
|
"""Transforms a PointerX node into an IndirectX one if possible.
|
||||||
Returns 1 if it made the collapse, false otherwise."""
|
Returns 1 if it made the collapse, false otherwise."""
|
||||||
if node.data[1].value(env) < 0x100 and Ops.opcodes[node.data[0]][12] is not None:
|
if node.data[1].value(env) < 0x100 and Ops.opcodes[node.data[0]][12] is not None:
|
||||||
node.nodetype = "IndirectX"
|
node.nodetype = "IndirectX"
|
||||||
return 1
|
return 1
|
||||||
else:
|
else:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def collapse_y_ind(node, env):
|
def collapse_y_ind(node, env):
|
||||||
"""Transforms a PointerY node into an IndirectY one if possible.
|
"""Transforms a PointerY node into an IndirectY one if possible.
|
||||||
Returns 1 if it made the collapse, false otherwise."""
|
Returns 1 if it made the collapse, false otherwise."""
|
||||||
if node.data[1].value(env) < 0x100 and Ops.opcodes[node.data[0]][13] is not None:
|
if node.data[1].value(env) < 0x100 and Ops.opcodes[node.data[0]][13] is not None:
|
||||||
node.nodetype = "IndirectY"
|
node.nodetype = "IndirectY"
|
||||||
return 1
|
return 1
|
||||||
else:
|
else:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
class NormalizeModes(Pass):
|
class NormalizeModes(Pass):
|
||||||
"""Eliminates the intermediate "Memory" and "Pointer" nodes,
|
"""Eliminates the intermediate "Memory" and "Pointer" nodes,
|
||||||
converting them to "Absolute"."""
|
converting them to "Absolute"."""
|
||||||
name = "Mode Normalization pass"
|
name = "Mode Normalization pass"
|
||||||
def visitMemory(self, node, env): node.nodetype = "Absolute"
|
def visitMemory(self, node, env): node.nodetype = "Absolute"
|
||||||
def visitMemoryX(self, node, env): node.nodetype = "AbsoluteX"
|
def visitMemoryX(self, node, env): node.nodetype = "AbsoluteX"
|
||||||
def visitMemoryY(self, node, env): node.nodetype = "AbsoluteY"
|
def visitMemoryY(self, node, env): node.nodetype = "AbsoluteY"
|
||||||
def visitPointer(self, node, env): node.nodetype = "Indirect"
|
def visitPointer(self, node, env): node.nodetype = "Indirect"
|
||||||
def visitPointerX(self, node, env): node.nodetype = "AbsIndX"
|
def visitPointerX(self, node, env): node.nodetype = "AbsIndX"
|
||||||
# If we ever hit a PointerY by this point, we have a bug.
|
# If we ever hit a PointerY by this point, we have a bug.
|
||||||
def visitPointerY(self, node, env): node.nodetype = "AbsIndY"
|
def visitPointerY(self, node, env): node.nodetype = "AbsIndY"
|
||||||
def visitUnknown(self, node, env): pass
|
def visitUnknown(self, node, env): pass
|
||||||
|
|
||||||
class Assembler(Pass):
|
class Assembler(Pass):
|
||||||
"""Converts the IR into a list of bytes, suitable for writing to
|
"""Converts the IR into a list of bytes, suitable for writing to
|
||||||
a file."""
|
a file."""
|
||||||
name = "Assembler"
|
name = "Assembler"
|
||||||
|
|
||||||
def prePass(self):
|
def prePass(self):
|
||||||
self.output = []
|
self.output = []
|
||||||
self.code = 0
|
self.code = 0
|
||||||
self.data = 0
|
self.data = 0
|
||||||
self.filler = 0
|
self.filler = 0
|
||||||
|
|
||||||
def postPass(self):
|
def postPass(self):
|
||||||
if Cmd.verbose > 0 and Err.count == 0:
|
if Cmd.verbose > 0 and Err.count == 0:
|
||||||
print "Assembly complete: %s bytes output (%s code, %s data, %s filler)" \
|
print "Assembly complete: %s bytes output (%s code, %s data, %s filler)" \
|
||||||
% (len(self.output), self.code, self.data, self.filler)
|
% (len(self.output), self.code, self.data, self.filler)
|
||||||
|
|
||||||
def outputbyte(self, expr, env):
|
def outputbyte(self, expr, env):
|
||||||
'Outputs a byte, with range checking'
|
'Outputs a byte, with range checking'
|
||||||
if self.writeOK:
|
if self.writeOK:
|
||||||
val = expr.value(env)
|
val = expr.value(env)
|
||||||
if val < 0x00 or val > 0xff:
|
if val < 0x00 or val > 0xff:
|
||||||
Err.log("Byte constant "+str(expr)+" out of range")
|
Err.log("Byte constant "+str(expr)+" out of range")
|
||||||
val = 0
|
val = 0
|
||||||
self.output.append(int(val))
|
self.output.append(int(val))
|
||||||
else:
|
else:
|
||||||
Err.log("Attempt to write to data segment")
|
Err.log("Attempt to write to data segment")
|
||||||
def outputword(self, expr, env):
|
def outputword(self, expr, env):
|
||||||
'Outputs a little-endian word, with range checking'
|
'Outputs a little-endian word, with range checking'
|
||||||
if self.writeOK:
|
if self.writeOK:
|
||||||
val = expr.value(env)
|
val = expr.value(env)
|
||||||
if val < 0x0000 or val > 0xFFFF:
|
if val < 0x0000 or val > 0xFFFF:
|
||||||
Err.log("Word constant "+str(expr)+" out of range")
|
Err.log("Word constant "+str(expr)+" out of range")
|
||||||
val = 0
|
val = 0
|
||||||
self.output.append(int(val & 0xFF))
|
self.output.append(int(val & 0xFF))
|
||||||
self.output.append(int((val >> 8) & 0xFF))
|
self.output.append(int((val >> 8) & 0xFF))
|
||||||
else:
|
else:
|
||||||
Err.log("Attempt to write to data segment")
|
Err.log("Attempt to write to data segment")
|
||||||
def outputdword(self, expr, env):
|
def outputdword(self, expr, env):
|
||||||
'Outputs a little-endian dword, with range checking'
|
'Outputs a little-endian dword, with range checking'
|
||||||
if self.writeOK:
|
if self.writeOK:
|
||||||
val = expr.value(env)
|
val = expr.value(env)
|
||||||
if val < 0x00000000 or val > 0xFFFFFFFFL:
|
if val < 0x00000000 or val > 0xFFFFFFFFL:
|
||||||
Err.log("DWord constant "+str(expr)+" out of range")
|
Err.log("DWord constant "+str(expr)+" out of range")
|
||||||
val = 0
|
val = 0
|
||||||
self.output.append(int(val & 0xFF))
|
self.output.append(int(val & 0xFF))
|
||||||
self.output.append(int((val >> 8) & 0xFF))
|
self.output.append(int((val >> 8) & 0xFF))
|
||||||
self.output.append(int((val >> 16) & 0xFF))
|
self.output.append(int((val >> 16) & 0xFF))
|
||||||
self.output.append(int((val >> 24) & 0xFF))
|
self.output.append(int((val >> 24) & 0xFF))
|
||||||
else:
|
else:
|
||||||
Err.log("Attempt to write to data segment")
|
Err.log("Attempt to write to data segment")
|
||||||
|
|
||||||
def outputword_be(self, expr, env):
|
def outputword_be(self, expr, env):
|
||||||
'Outputs a big-endian word, with range checking'
|
'Outputs a big-endian word, with range checking'
|
||||||
if self.writeOK:
|
if self.writeOK:
|
||||||
val = expr.value(env)
|
val = expr.value(env)
|
||||||
if val < 0x0000 or val > 0xFFFF:
|
if val < 0x0000 or val > 0xFFFF:
|
||||||
Err.log("Word constant "+str(expr)+" out of range")
|
Err.log("Word constant "+str(expr)+" out of range")
|
||||||
val = 0
|
val = 0
|
||||||
self.output.append(int((val >> 8) & 0xFF))
|
self.output.append(int((val >> 8) & 0xFF))
|
||||||
self.output.append(int(val & 0xFF))
|
self.output.append(int(val & 0xFF))
|
||||||
else:
|
else:
|
||||||
Err.log("Attempt to write to data segment")
|
Err.log("Attempt to write to data segment")
|
||||||
def outputdword_be(self, expr, env):
|
def outputdword_be(self, expr, env):
|
||||||
'Outputs a big-endian dword, with range checking'
|
'Outputs a big-endian dword, with range checking'
|
||||||
if self.writeOK:
|
if self.writeOK:
|
||||||
val = expr.value(env)
|
val = expr.value(env)
|
||||||
if val < 0x00000000 or val > 0xFFFFFFFFL:
|
if val < 0x00000000 or val > 0xFFFFFFFFL:
|
||||||
Err.log("DWord constant "+str(expr)+" out of range")
|
Err.log("DWord constant "+str(expr)+" out of range")
|
||||||
val = 0
|
val = 0
|
||||||
self.output.append(int((val >> 24) & 0xFF))
|
self.output.append(int((val >> 24) & 0xFF))
|
||||||
self.output.append(int((val >> 16) & 0xFF))
|
self.output.append(int((val >> 16) & 0xFF))
|
||||||
self.output.append(int((val >> 8) & 0xFF))
|
self.output.append(int((val >> 8) & 0xFF))
|
||||||
self.output.append(int(val & 0xFF))
|
self.output.append(int(val & 0xFF))
|
||||||
else:
|
else:
|
||||||
Err.log("Attempt to write to data segment")
|
Err.log("Attempt to write to data segment")
|
||||||
|
|
||||||
def assemble(self, node, mode, env):
|
def assemble(self, node, mode, env):
|
||||||
"A generic instruction called by the visitor methods themselves"
|
"A generic instruction called by the visitor methods themselves"
|
||||||
(opcode, expr) = node.data
|
(opcode, expr) = node.data
|
||||||
bin_op = Ops.opcodes[opcode][mode]
|
bin_op = Ops.opcodes[opcode][mode]
|
||||||
if bin_op is None:
|
if bin_op is None:
|
||||||
Err.log('%s does not have mode "%s"' % (opcode.upper(), Ops.modes[mode]))
|
Err.log('%s does not have mode "%s"' % (opcode.upper(), Ops.modes[mode]))
|
||||||
return
|
return
|
||||||
self.outputbyte(IR.ConstantExpr(bin_op), env)
|
self.outputbyte(IR.ConstantExpr(bin_op), env)
|
||||||
arglen = Ops.lengths[mode]
|
arglen = Ops.lengths[mode]
|
||||||
if mode == 14: # Special handling for relative mode
|
if mode == 14: # Special handling for relative mode
|
||||||
arg = expr.value(env)
|
arg = expr.value(env)
|
||||||
arg = arg-(env.getPC()+2)
|
arg = arg-(env.getPC()+2)
|
||||||
if arg < -128 or arg > 127:
|
if arg < -128 or arg > 127:
|
||||||
Err.log("Branch target out of bounds")
|
Err.log("Branch target out of bounds")
|
||||||
arg = 0
|
arg = 0
|
||||||
if arg < 0: arg += 256
|
if arg < 0: arg += 256
|
||||||
expr = IR.ConstantExpr(arg)
|
expr = IR.ConstantExpr(arg)
|
||||||
if arglen == 1: self.outputbyte(expr, env)
|
if arglen == 1: self.outputbyte(expr, env)
|
||||||
if arglen == 2: self.outputword(expr, env)
|
if arglen == 2: self.outputword(expr, env)
|
||||||
env.incPC(1+arglen)
|
env.incPC(1+arglen)
|
||||||
self.code += 1+arglen
|
self.code += 1+arglen
|
||||||
|
|
||||||
def visitImplied(self, node, env): self.assemble(node, 0, env)
|
def visitImplied(self, node, env): self.assemble(node, 0, env)
|
||||||
def visitImmediate(self, node, env): self.assemble(node, 1, env)
|
def visitImmediate(self, node, env): self.assemble(node, 1, env)
|
||||||
def visitZeroPage(self, node, env): self.assemble(node, 2, env)
|
def visitZeroPage(self, node, env): self.assemble(node, 2, env)
|
||||||
def visitZeroPageX(self, node, env): self.assemble(node, 3, env)
|
def visitZeroPageX(self, node, env): self.assemble(node, 3, env)
|
||||||
def visitZeroPageY(self, node, env): self.assemble(node, 4, env)
|
def visitZeroPageY(self, node, env): self.assemble(node, 4, env)
|
||||||
def visitAbsolute(self, node, env): self.assemble(node, 5, env)
|
def visitAbsolute(self, node, env): self.assemble(node, 5, env)
|
||||||
def visitAbsoluteX(self, node, env): self.assemble(node, 6, env)
|
def visitAbsoluteX(self, node, env): self.assemble(node, 6, env)
|
||||||
def visitAbsoluteY(self, node, env): self.assemble(node, 7, env)
|
def visitAbsoluteY(self, node, env): self.assemble(node, 7, env)
|
||||||
def visitIndirect(self, node, env): self.assemble(node, 8, env)
|
def visitIndirect(self, node, env): self.assemble(node, 8, env)
|
||||||
def visitAbsIndX(self, node, env): self.assemble(node, 9, env)
|
def visitAbsIndX(self, node, env): self.assemble(node, 9, env)
|
||||||
def visitAbsIndY(self, node, env): self.assemble(node, 10, env)
|
def visitAbsIndY(self, node, env): self.assemble(node, 10, env)
|
||||||
def visitZPIndirect(self, node, env): self.assemble(node, 11, env)
|
def visitZPIndirect(self, node, env): self.assemble(node, 11, env)
|
||||||
def visitIndirectX(self, node, env): self.assemble(node, 12, env)
|
def visitIndirectX(self, node, env): self.assemble(node, 12, env)
|
||||||
def visitIndirectY(self, node, env): self.assemble(node, 13, env)
|
def visitIndirectY(self, node, env): self.assemble(node, 13, env)
|
||||||
def visitRelative(self, node, env): self.assemble(node, 14, env)
|
def visitRelative(self, node, env): self.assemble(node, 14, env)
|
||||||
def visitLabel(self, node, env): pass
|
def visitLabel(self, node, env): pass
|
||||||
def visitByte(self, node, env):
|
def visitByte(self, node, env):
|
||||||
for expr in node.data:
|
for expr in node.data:
|
||||||
self.outputbyte(expr, env)
|
self.outputbyte(expr, env)
|
||||||
env.incPC(len(node.data))
|
env.incPC(len(node.data))
|
||||||
self.data += len(node.data)
|
self.data += len(node.data)
|
||||||
def visitWord(self, node, env):
|
def visitWord(self, node, env):
|
||||||
for expr in node.data:
|
for expr in node.data:
|
||||||
self.outputword(expr, env)
|
self.outputword(expr, env)
|
||||||
env.incPC(len(node.data)*2)
|
env.incPC(len(node.data)*2)
|
||||||
self.data += len(node.data)*2
|
self.data += len(node.data)*2
|
||||||
def visitDword(self, node, env):
|
def visitDword(self, node, env):
|
||||||
for expr in node.data:
|
for expr in node.data:
|
||||||
self.outputdword(expr, env)
|
self.outputdword(expr, env)
|
||||||
env.incPC(len(node.data)*4)
|
env.incPC(len(node.data)*4)
|
||||||
self.data += len(node.data)*4
|
self.data += len(node.data)*4
|
||||||
def visitWordBE(self, node, env):
|
def visitWordBE(self, node, env):
|
||||||
for expr in node.data:
|
for expr in node.data:
|
||||||
self.outputword_be(expr, env)
|
self.outputword_be(expr, env)
|
||||||
env.incPC(len(node.data)*2)
|
env.incPC(len(node.data)*2)
|
||||||
self.data += len(node.data)*2
|
self.data += len(node.data)*2
|
||||||
def visitDwordBE(self, node, env):
|
def visitDwordBE(self, node, env):
|
||||||
for expr in node.data:
|
for expr in node.data:
|
||||||
self.outputdword_be(expr, env)
|
self.outputdword_be(expr, env)
|
||||||
env.incPC(len(node.data)*4)
|
env.incPC(len(node.data)*4)
|
||||||
self.data += len(node.data)*4
|
self.data += len(node.data)*4
|
||||||
def visitSetPC(self, node, env):
|
def visitSetPC(self, node, env):
|
||||||
env.setPC(node.data[0].value(env))
|
env.setPC(node.data[0].value(env))
|
||||||
def visitCheckPC(self, node, env):
|
def visitCheckPC(self, node, env):
|
||||||
pc = env.getPC()
|
pc = env.getPC()
|
||||||
target = node.data[0].value(env)
|
target = node.data[0].value(env)
|
||||||
if (pc > target):
|
if (pc > target):
|
||||||
Err.log(".checkpc assertion failed: $%x > $%x" % (pc, target))
|
Err.log(".checkpc assertion failed: $%x > $%x" % (pc, target))
|
||||||
def visitAdvance(self, node, env):
|
def visitAdvance(self, node, env):
|
||||||
pc = env.getPC()
|
pc = env.getPC()
|
||||||
target = node.data[0].value(env)
|
target = node.data[0].value(env)
|
||||||
if (pc > target):
|
if (pc > target):
|
||||||
Err.log("Attempted to .advance backwards: $%x to $%x" % (pc, target))
|
Err.log("Attempted to .advance backwards: $%x to $%x" % (pc, target))
|
||||||
else:
|
else:
|
||||||
zero = IR.ConstantExpr(0)
|
zero = IR.ConstantExpr(0)
|
||||||
for i in xrange(target-pc): self.outputbyte(zero, env)
|
for i in xrange(target-pc): self.outputbyte(zero, env)
|
||||||
self.filler += target-pc
|
self.filler += target-pc
|
||||||
env.setPC(target)
|
env.setPC(target)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
"P65 - a cross-assembler for the 6502 series of chips"
|
"Ophis - a cross-assembler for the 6502 series of chips"
|
||||||
|
|
||||||
# Copyright 2002 Michael C. Martin.
|
# Copyright 2002-2012 Michael C. Martin and additional contributors.
|
||||||
# You may use, modify, and distribute this file under the BSD
|
# You may use, modify, and distribute this file under the MIT
|
||||||
# license: See LICENSE.txt for details.
|
# license: See README for details.
|
||||||
|
Loading…
x
Reference in New Issue
Block a user