Major formatting fixes:

* No more tabs
 * Fix copyright notices to point at right files and name the license right
This commit is contained in:
Michael C. Martin 2012-05-06 20:06:28 -07:00
parent d5ec7bdacd
commit 1df8ad465d
12 changed files with 1508 additions and 1514 deletions

View File

@ -1,17 +1,17 @@
"""Command line options data. """Command line options data.
verbose: verbose:
0: Only report errors 0: Only report errors
1: Announce each file as it is read, and data count (default) 1: Announce each file as it is read, and data count (default)
2: As above, but also announce each pass. 2: As above, but also announce each pass.
3: As above, but print the IR after each pass. 3: As above, but print the IR after each pass.
4: As above, but print the labels after each pass. 4: As above, but print the labels after each pass.
6510 compatibility and deprecation are handled in Ophis.Main.""" 6510 compatibility and deprecation are handled in Ophis.Main."""
# Copyright 2002 Michael C. Martin. # Copyright 2002-2012 Michael C. Martin and additional contributors.
# You may use, modify, and distribute this file under the BSD # You may use, modify, and distribute this file under the MIT
# license: See LICENSE.txt for details. # license: See README for details.
verbose = 1; verbose = 1;

View File

@ -1,13 +1,10 @@
"""Core pragmas """Core pragmas
Provides the core assembler directives. It does not guarantee Provides the core assembler directives."""
compatibility with older versions of P65-Perl."""
# Copyright 2002 Michael C. Martin. # Copyright 2002-2012 Michael C. Martin and additional contributors.
# You may use, modify, and distribute this file under the BSD # You may use, modify, and distribute this file under the MIT
# license: See LICENSE.txt for details. # license: See README for details.
from __future__ import nested_scopes
import Ophis.IR as IR import Ophis.IR as IR
import Ophis.Frontend as FE import Ophis.Frontend as FE
@ -18,193 +15,193 @@ basecharmap = "".join([chr(x) for x in range(256)])
currentcharmap = basecharmap currentcharmap = basecharmap
def reset(): def reset():
global loadedfiles, currentcharmap, basecharmap global loadedfiles, currentcharmap, basecharmap
loadedfiles={} loadedfiles={}
currentcharmap = basecharmap currentcharmap = basecharmap
def pragmaInclude(ppt, line, result): def pragmaInclude(ppt, line, result):
"Includes a source file" "Includes a source file"
filename = line.expect("STRING").value filename = line.expect("STRING").value
line.expect("EOL") line.expect("EOL")
if type(filename)==str: result.append(FE.parse_file(ppt, filename)) if type(filename)==str: result.append(FE.parse_file(ppt, filename))
def pragmaRequire(ppt, line, result): def pragmaRequire(ppt, line, result):
"Includes a source file at most one time" "Includes a source file at most one time"
filename = line.expect("STRING").value filename = line.expect("STRING").value
line.expect("EOL") line.expect("EOL")
if type(filename)==str: if type(filename)==str:
global loadedfiles global loadedfiles
if filename not in loadedfiles: if filename not in loadedfiles:
loadedfiles[filename]=1 loadedfiles[filename]=1
result.append(FE.parse_file(ppt, filename)) result.append(FE.parse_file(ppt, filename))
def pragmaIncbin(ppt, line, result): def pragmaIncbin(ppt, line, result):
"Includes a binary file" "Includes a binary file"
filename = line.expect("STRING").value filename = line.expect("STRING").value
line.expect("EOL") line.expect("EOL")
if type(filename)==str: if type(filename)==str:
try: try:
f = file(filename, "rb") f = file(filename, "rb")
bytes = f.read() bytes = f.read()
f.close() f.close()
except IOError: except IOError:
Err.log ("Could not read "+filename) Err.log ("Could not read "+filename)
return return
bytes = [IR.ConstantExpr(ord(x)) for x in bytes] bytes = [IR.ConstantExpr(ord(x)) for x in bytes]
result.append(IR.Node(ppt, "Byte", *bytes)) result.append(IR.Node(ppt, "Byte", *bytes))
def pragmaCharmap(ppt, line, result): def pragmaCharmap(ppt, line, result):
"Modify the character map." "Modify the character map."
global currentcharmap, basecharmap global currentcharmap, basecharmap
bytes = readData(line) bytes = readData(line)
if len(bytes) == 0: if len(bytes) == 0:
currentcharmap = basecharmap currentcharmap = basecharmap
else: else:
try: try:
base = bytes[0].data base = bytes[0].data
newsubstr = "".join([chr(x.data) for x in bytes[1:]]) newsubstr = "".join([chr(x.data) for x in bytes[1:]])
currentcharmap = currentcharmap[:base] + newsubstr + currentcharmap[base+len(newsubstr):] currentcharmap = currentcharmap[:base] + newsubstr + currentcharmap[base+len(newsubstr):]
if len(currentcharmap) != 256 or base < 0 or base > 255: if len(currentcharmap) != 256 or base < 0 or base > 255:
Err.log("Charmap replacement out of range") Err.log("Charmap replacement out of range")
currentcharmap = currentcharmap[:256] currentcharmap = currentcharmap[:256]
except ValueError: except ValueError:
Err.log("Illegal character in .charmap directive") Err.log("Illegal character in .charmap directive")
def pragmaCharmapbin(ppt, line, result): def pragmaCharmapbin(ppt, line, result):
"Load a new character map from a file" "Load a new character map from a file"
global currentcharmap global currentcharmap
filename = line.expect("STRING").value filename = line.expect("STRING").value
line.expect("EOL") line.expect("EOL")
if type(filename)==str: if type(filename)==str:
try: try:
f = file(filename, "rb") f = file(filename, "rb")
bytes = f.read() bytes = f.read()
f.close() f.close()
except IOError: except IOError:
Err.log ("Could not read "+filename) Err.log ("Could not read "+filename)
return return
if len(bytes)==256: if len(bytes)==256:
currentcharmap = bytes currentcharmap = bytes
else: else:
Err.log("Character map "+filename+" not 256 bytes long") Err.log("Character map "+filename+" not 256 bytes long")
def pragmaOrg(ppt, line, result): def pragmaOrg(ppt, line, result):
"Relocates the PC with no output" "Relocates the PC with no output"
newPC = FE.parse_expr(line) newPC = FE.parse_expr(line)
line.expect("EOL") line.expect("EOL")
result.append(IR.Node(ppt, "SetPC", newPC)) result.append(IR.Node(ppt, "SetPC", newPC))
def pragmaAdvance(ppt, line, result): def pragmaAdvance(ppt, line, result):
"Outputs filler until reaching the target PC" "Outputs filler until reaching the target PC"
newPC = FE.parse_expr(line) newPC = FE.parse_expr(line)
line.expect("EOL") line.expect("EOL")
result.append(IR.Node(ppt, "Advance", newPC)) result.append(IR.Node(ppt, "Advance", newPC))
def pragmaCheckpc(ppt, line, result): def pragmaCheckpc(ppt, line, result):
"Enforces that the PC has not exceeded a certain point" "Enforces that the PC has not exceeded a certain point"
target = FE.parse_expr(line) target = FE.parse_expr(line)
line.expect("EOL") line.expect("EOL")
result.append(IR.Node(ppt, "CheckPC", target)) result.append(IR.Node(ppt, "CheckPC", target))
def pragmaAlias(ppt, line, result): def pragmaAlias(ppt, line, result):
"Assigns an arbitrary label" "Assigns an arbitrary label"
lbl = line.expect("LABEL").value lbl = line.expect("LABEL").value
target = FE.parse_expr(line) target = FE.parse_expr(line)
result.append(IR.Node(ppt, "Label", lbl, target)) result.append(IR.Node(ppt, "Label", lbl, target))
def pragmaSpace(ppt, line, result): def pragmaSpace(ppt, line, result):
"Reserves space in a data segment for a variable" "Reserves space in a data segment for a variable"
lbl = line.expect("LABEL").value lbl = line.expect("LABEL").value
size = line.expect("NUM").value size = line.expect("NUM").value
line.expect("EOL") line.expect("EOL")
result.append(IR.Node(ppt, "Label", lbl, IR.PCExpr())) result.append(IR.Node(ppt, "Label", lbl, IR.PCExpr()))
result.append(IR.Node(ppt, "SetPC", IR.SequenceExpr([IR.PCExpr(), "+", IR.ConstantExpr(size)]))) result.append(IR.Node(ppt, "SetPC", IR.SequenceExpr([IR.PCExpr(), "+", IR.ConstantExpr(size)])))
def pragmaText(ppt, line, result): def pragmaText(ppt, line, result):
"Switches to a text segment" "Switches to a text segment"
next = line.expect("LABEL", "EOL") next = line.expect("LABEL", "EOL")
if next.type == "LABEL": if next.type == "LABEL":
line.expect("EOL") line.expect("EOL")
segment = next.value segment = next.value
else: else:
segment = "*text-default*" segment = "*text-default*"
result.append(IR.Node(ppt, "TextSegment", segment)) result.append(IR.Node(ppt, "TextSegment", segment))
def pragmaData(ppt, line, result): def pragmaData(ppt, line, result):
"Switches to a data segment (no output allowed)" "Switches to a data segment (no output allowed)"
next = line.expect("LABEL", "EOL") next = line.expect("LABEL", "EOL")
if next.type == "LABEL": if next.type == "LABEL":
line.expect("EOL") line.expect("EOL")
segment = next.value segment = next.value
else: else:
segment = "*data-default*" segment = "*data-default*"
result.append(IR.Node(ppt, "DataSegment", segment)) result.append(IR.Node(ppt, "DataSegment", segment))
def readData(line): def readData(line):
"Read raw data from a comma-separated list" "Read raw data from a comma-separated list"
if line.lookahead(0).type == "STRING": if line.lookahead(0).type == "STRING":
data = [IR.ConstantExpr(ord(x)) for x in line.expect("STRING").value.translate(currentcharmap)] data = [IR.ConstantExpr(ord(x)) for x in line.expect("STRING").value.translate(currentcharmap)]
else: else:
data = [FE.parse_expr(line)] data = [FE.parse_expr(line)]
next = line.expect(',', 'EOL').type next = line.expect(',', 'EOL').type
while next == ',': while next == ',':
if line.lookahead(0).type == "STRING": if line.lookahead(0).type == "STRING":
data.extend([IR.ConstantExpr(ord(x)) for x in line.expect("STRING").value]) data.extend([IR.ConstantExpr(ord(x)) for x in line.expect("STRING").value])
else: else:
data.append(FE.parse_expr(line)) data.append(FE.parse_expr(line))
next = line.expect(',', 'EOL').type next = line.expect(',', 'EOL').type
return data return data
def pragmaByte(ppt, line, result): def pragmaByte(ppt, line, result):
"Raw data, a byte at a time" "Raw data, a byte at a time"
bytes = readData(line) bytes = readData(line)
result.append(IR.Node(ppt, "Byte", *bytes)) result.append(IR.Node(ppt, "Byte", *bytes))
def pragmaWord(ppt, line, result): def pragmaWord(ppt, line, result):
"Raw data, a word at a time, little-endian" "Raw data, a word at a time, little-endian"
words = readData(line) words = readData(line)
result.append(IR.Node(ppt, "Word", *words)) result.append(IR.Node(ppt, "Word", *words))
def pragmaDword(ppt, line, result): def pragmaDword(ppt, line, result):
"Raw data, a double-word at a time, little-endian" "Raw data, a double-word at a time, little-endian"
dwords = readData(line) dwords = readData(line)
result.append(IR.Node(ppt, "Dword", *dwords)) result.append(IR.Node(ppt, "Dword", *dwords))
def pragmaWordbe(ppt, line, result): def pragmaWordbe(ppt, line, result):
"Raw data, a word at a time, big-endian" "Raw data, a word at a time, big-endian"
words = readData(line) words = readData(line)
result.append(IR.Node(ppt, "WordBE", *words)) result.append(IR.Node(ppt, "WordBE", *words))
def pragmaDwordbe(ppt, line, result): def pragmaDwordbe(ppt, line, result):
"Raw data, a dword at a time, big-endian" "Raw data, a dword at a time, big-endian"
dwords = readData(line) dwords = readData(line)
result.append(IR.Node(ppt, "DwordBE", *dwords)) result.append(IR.Node(ppt, "DwordBE", *dwords))
def pragmaScope(ppt, line, result): def pragmaScope(ppt, line, result):
"Create a new lexical scoping block" "Create a new lexical scoping block"
line.expect("EOL") line.expect("EOL")
result.append(IR.Node(ppt, "ScopeBegin")) result.append(IR.Node(ppt, "ScopeBegin"))
def pragmaScend(ppt, line, result): def pragmaScend(ppt, line, result):
"End the innermost lexical scoping block" "End the innermost lexical scoping block"
line.expect("EOL") line.expect("EOL")
result.append(IR.Node(ppt, "ScopeEnd")) result.append(IR.Node(ppt, "ScopeEnd"))
def pragmaMacro(ppt, line, result): def pragmaMacro(ppt, line, result):
"Begin a macro definition" "Begin a macro definition"
lbl = line.expect("LABEL").value lbl = line.expect("LABEL").value
line.expect("EOL") line.expect("EOL")
result.append(IR.Node(ppt, "MacroBegin", lbl)) result.append(IR.Node(ppt, "MacroBegin", lbl))
def pragmaMacend(ppt, line, result): def pragmaMacend(ppt, line, result):
"End a macro definition" "End a macro definition"
line.expect("EOL") line.expect("EOL")
result.append(IR.Node(ppt, "MacroEnd")) result.append(IR.Node(ppt, "MacroEnd"))
def pragmaInvoke(ppt, line, result): def pragmaInvoke(ppt, line, result):
macro = line.expect("LABEL").value macro = line.expect("LABEL").value
if line.lookahead(0).type == "EOL": if line.lookahead(0).type == "EOL":
args = [] args = []
else: else:
args = readData(line) args = readData(line)
result.append(IR.Node(ppt, "MacroInvoke", macro, *args)) result.append(IR.Node(ppt, "MacroInvoke", macro, *args))

View File

@ -1,75 +1,74 @@
"""Symbol tables and environments for P65. """Symbol tables and environments for Ophis.
Implements the symbol lookup, through nested environments - Implements the symbol lookup, through nested environments -
any non-temporary variable is stored at the top level.""" any non-temporary variable is stored at the top level."""
# Copyright 2002 Michael C. Martin. # Copyright 2002-2012 Michael C. Martin and additional contributors.
# You may use, modify, and distribute this file under the BSD # You may use, modify, and distribute this file under the MIT
# license: See LICENSE.txt for details. # license: See README for details.
from __future__ import nested_scopes import Ophis.Errors as Err
import Ophis.Errors as Err
class Environment:
class Environment: """Environment class.
"""Environment class. Controls the various scopes and global abstract execution variables."""
Controls the various scopes and global abstract execution variables.""" def __init__(self):
def __init__(self): self.dicts = [{}]
self.dicts = [{}] self.stack = [0]
self.stack = [0] self.pc = 0
self.pc = 0 self.segmentdict = {}
self.segmentdict = {} self.segment = "*text-default*"
self.segment = "*text-default*" self.scopecount = 0
self.scopecount = 0 def __contains__(self, item):
def __contains__(self, item): if item[0] == '_':
if item[0] == '_': for dict in [self.dicts[i] for i in self.stack]:
for dict in [self.dicts[i] for i in self.stack]: if item in dict: return 1
if item in dict: return 1 return 0
return 0 return item in self.dicts[0]
return item in self.dicts[0] def __getitem__(self, item):
def __getitem__(self, item): if item[0] == '_':
if item[0] == '_': for dict in [self.dicts[i] for i in self.stack]:
for dict in [self.dicts[i] for i in self.stack]: if item in dict: return dict[item]
if item in dict: return dict[item] else:
else: if item in self.dicts[0]: return self.dicts[0][item]
if item in self.dicts[0]: return self.dicts[0][item] Err.log("Unknown label '%s'" % item)
Err.log("Unknown label '%s'" % item) return 0
return 0 def __setitem__(self, item, value):
def __setitem__(self, item, value): if item[0] == '_':
if item[0] == '_': self.dicts[self.stack[0]][item] = value
self.dicts[self.stack[0]][item] = value else:
else: self.dicts[0][item] = value
self.dicts[0][item] = value def __str__(self):
def __str__(self): return str(self.dicts)
return str(self.dicts) def getPC(self):
def getPC(self): return self.pc
return self.pc def setPC(self, value):
def setPC(self, value): self.pc = value
self.pc = value def incPC(self, amount):
def incPC(self, amount): self.pc += amount
self.pc += amount def getsegment(self):
def getsegment(self): return self.segment
return self.segment def setsegment(self, segment):
def setsegment(self, segment): self.segmentdict[self.segment] = self.pc
self.segmentdict[self.segment] = self.pc self.segment = segment
self.segment = segment self.pc = self.segmentdict.get(segment, 0)
self.pc = self.segmentdict.get(segment, 0) def reset(self):
def reset(self): "Clears out program counter, segment, and scoping information"
"Clears out program counter, segment, and scoping information" self.pc = 0
self.pc = 0 self.segmentdict = {}
self.segmentdict = {} self.segment = "*text-default*"
self.segment = "*text-default*" self.scopecount = 0
self.scopecount = 0 if len(self.stack) > 1:
if len(self.stack) > 1: Err.log("Unmatched .scope")
Err.log("Unmatched .scope") self.stack = [0]
self.stack = [0] def newscope(self):
def newscope(self): "Enters a new scope for temporary labels."
"Enters a new scope for temporary labels." self.scopecount += 1
self.scopecount += 1 self.stack.insert(0, self.scopecount)
self.stack.insert(0, self.scopecount) if len(self.dicts) <= self.scopecount: self.dicts.append({})
if len(self.dicts) <= self.scopecount: self.dicts.append({}) def endscope(self):
def endscope(self): "Leaves a scope."
"Leaves a scope." if len(self.stack) == 1:
if len(self.stack) == 1: Err.log("Unmatched .scend")
Err.log("Unmatched .scend") self.stack.pop(0)
self.stack.pop(0)

View File

@ -1,24 +1,24 @@
"""Error logging """Error logging
Keeps track of the number of errors inflicted so far, and Keeps track of the number of errors inflicted so far, and
where in the assembly the errors are occurring.""" where in the assembly the errors are occurring."""
# Copyright 2002 Michael C. Martin. # Copyright 2002-2012 Michael C. Martin and additional contributors.
# You may use, modify, and distribute this file under the BSD # You may use, modify, and distribute this file under the MIT
# license: See LICENSE.txt for details. # license: See README for details.
count = 0 count = 0
currentpoint = "<Top Level>" currentpoint = "<Top Level>"
def log(err): def log(err):
"""Reports an error at the current program point, and increases """Reports an error at the current program point, and increases
the global error count.""" the global error count."""
global count global count
count = count+1 count = count+1
print currentpoint+": "+err print currentpoint+": "+err
def report(): def report():
"Print out the number of errors." "Print out the number of errors."
if count == 0: print "No errors" if count == 0: print "No errors"
elif count == 1: print "1 error" elif count == 1: print "1 error"
else: print str(count)+" errors" else: print str(count)+" errors"

View File

@ -1,333 +1,331 @@
"""Lexer and Parser """Lexer and Parser
Constructs a list of IR nodes from a list of input strings.""" Constructs a list of IR nodes from a list of input strings."""
from __future__ import nested_scopes import Ophis.Errors as Err
import Ophis.Errors as Err import Ophis.Opcodes as Ops
import Ophis.Opcodes as Ops import Ophis.IR as IR
import Ophis.IR as IR import Ophis.CmdLine as Cmd
import Ophis.CmdLine as Cmd import os
import os
# Copyright 2002-2012 Michael C. Martin and additional contributors.
# Copyright 2002 Michael C. Martin. # You may use, modify, and distribute this file under the MIT
# You may use, modify, and distribute this file under the BSD # license: See README for details.
# license: See LICENSE.txt for details.
class Lexeme:
"Class for lexer tokens. Used by lexer and parser."
class Lexeme: def __init__(self, type="UNKNOWN", value=None):
"Class for lexer tokens. Used by lexer and parser." self.type = type.upper()
def __init__(self, type="UNKNOWN", value=None): self.value = value
self.type = type.upper() def __str__(self):
self.value = value if self.value == None:
def __str__(self): return self.type
if self.value == None: else:
return self.type return self.type+":"+str(self.value)
else: def __repr__(self):
return self.type+":"+str(self.value) return "Lexeme("+`self.type`+", "+`self.value`+")"
def __repr__(self): def matches(self, other):
return "Lexeme("+`self.type`+", "+`self.value`+")" "1 if Lexemes a and b have the same type."
def matches(self, other): return self.type == other.type
"1 if Lexemes a and b have the same type."
return self.type == other.type bases = {"$":("hexadecimal", 16),
"%":("binary", 2),
bases = {"$":("hexadecimal", 16), "0":("octal", 8)}
"%":("binary", 2),
"0":("octal", 8)} punctuation = "#,`<>():.+-*/&|^[]"
punctuation = "#,`<>():.+-*/&|^[]" def lex(point, line):
"""Turns a line of source into a sequence of lexemes."""
def lex(point, line): Err.currentpoint = point
"""Turns a line of source into a sequence of lexemes.""" result = []
Err.currentpoint = point def is_opcode(op):
result = [] "Tests whether a string is an opcode or an identifier"
def is_opcode(op): return op in Ops.opcodes
"Tests whether a string is an opcode or an identifier" def add_token(token):
return op in Ops.opcodes "Converts a substring into a single lexeme"
def add_token(token): if token == "":
"Converts a substring into a single lexeme" return
if token == "": if token == "0":
return result.append(Lexeme("NUM", 0))
if token == "0": return
result.append(Lexeme("NUM", 0)) firstchar = token[0]
return rest = token[1:]
firstchar = token[0] if firstchar == '"':
rest = token[1:] result.append(Lexeme("STRING", rest))
if firstchar == '"': return
result.append(Lexeme("STRING", rest)) elif firstchar in bases:
return try:
elif firstchar in bases: result.append(Lexeme("NUM", long(rest, bases[firstchar][1])))
try: return
result.append(Lexeme("NUM", long(rest, bases[firstchar][1]))) except ValueError:
return Err.log('Invalid '+bases[firstchar][0]+' constant: '+rest)
except ValueError: result.append(Lexeme("NUM", 0))
Err.log('Invalid '+bases[firstchar][0]+' constant: '+rest) return
result.append(Lexeme("NUM", 0)) elif firstchar.isdigit():
return try:
elif firstchar.isdigit(): result.append(Lexeme("NUM", long(token)))
try: except ValueError:
result.append(Lexeme("NUM", long(token))) Err.log('Identifiers may not begin with a number')
except ValueError: result.append(Lexeme("LABEL", "ERROR"))
Err.log('Identifiers may not begin with a number') return
result.append(Lexeme("LABEL", "ERROR")) elif firstchar == "'":
return if len(rest) == 1:
elif firstchar == "'": result.append(Lexeme("NUM", ord(rest)))
if len(rest) == 1: else:
result.append(Lexeme("NUM", ord(rest))) Err.log("Invalid character constant '"+rest+"'")
else: result.append(Lexeme("NUM", 0))
Err.log("Invalid character constant '"+rest+"'") return
result.append(Lexeme("NUM", 0)) elif firstchar in punctuation:
return if rest != "":
elif firstchar in punctuation: Err.log("Internal lexer error! '"+token+"' can't happen!")
if rest != "": result.append(Lexeme(firstchar))
Err.log("Internal lexer error! '"+token+"' can't happen!") return
result.append(Lexeme(firstchar)) else: # Label, opcode, or index register
return id = token.lower()
else: # Label, opcode, or index register if is_opcode(id):
id = token.lower() result.append(Lexeme("OPCODE", id))
if is_opcode(id): elif id == "x":
result.append(Lexeme("OPCODE", id)) result.append(Lexeme("X"))
elif id == "x": elif id == "y":
result.append(Lexeme("X")) result.append(Lexeme("Y"))
elif id == "y": else:
result.append(Lexeme("Y")) result.append(Lexeme("LABEL", id))
else: return
result.append(Lexeme("LABEL", id)) # should never reach here
return Err.log("Internal lexer error: add_token fall-through")
# should never reach here def add_EOL():
Err.log("Internal lexer error: add_token fall-through") "Adds an end-of-line lexeme"
def add_EOL(): result.append(Lexeme("EOL"))
"Adds an end-of-line lexeme" # Actual routine begins here
result.append(Lexeme("EOL")) value = ""
# Actual routine begins here quotemode = 0
value = "" backslashmode = 0
quotemode = 0 for c in line.strip():
backslashmode = 0 if backslashmode:
for c in line.strip(): backslashmode = 0
if backslashmode: value = value + c
backslashmode = 0 elif c == "\\":
value = value + c backslashmode = 1
elif c == "\\": elif quotemode:
backslashmode = 1 if c == '"':
elif quotemode: quotemode = 0
if c == '"': else:
quotemode = 0 value = value + c
else: elif c == ';':
value = value + c add_token(value)
elif c == ';': value = ""
add_token(value) break
value = "" elif c.isspace():
break add_token(value)
elif c.isspace(): value = ""
add_token(value) elif c in punctuation:
value = "" add_token(value)
elif c in punctuation: add_token(c)
add_token(value) value = ""
add_token(c) elif c == '"':
value = "" add_token(value)
elif c == '"': value = '"'
add_token(value) quotemode = 1
value = '"' else:
quotemode = 1 value = value + c
else: if backslashmode:
value = value + c Err.log("Backslashed newline")
if backslashmode: if quotemode:
Err.log("Backslashed newline") Err.log("Unterminated string constant")
if quotemode: add_token(value)
Err.log("Unterminated string constant") add_EOL()
add_token(value) return result
add_EOL()
return result class ParseLine:
"Maintains the parse state of a line of code. Enables arbitrary lookahead."
class ParseLine: def __init__(self, lexemes):
"Maintains the parse state of a line of code. Enables arbitrary lookahead." self.lexemes = lexemes
def __init__(self, lexemes): self.location = 0
self.lexemes = lexemes def lookahead(self, i):
self.location = 0 """Returns the token i units ahead in the parse.
def lookahead(self, i): lookahead(0) returns the next token; trying to read off the end of
"""Returns the token i units ahead in the parse. the sequence returns the last token in the sequence (usually EOL)."""
lookahead(0) returns the next token; trying to read off the end of target = self.location+i
the sequence returns the last token in the sequence (usually EOL).""" if target >= len(self.lexemes): target = -1
target = self.location+i return self.lexemes[target]
if target >= len(self.lexemes): target = -1 def pop(self):
return self.lexemes[target] "Returns and removes the next element in the line."
def pop(self): old = self.location
"Returns and removes the next element in the line." if self.location < len(self.lexemes)-1: self.location += 1
old = self.location return self.lexemes[old]
if self.location < len(self.lexemes)-1: self.location += 1 def expect(self, *tokens):
return self.lexemes[old] """Reads a token from the ParseLine line and returns it if it's of a type
def expect(self, *tokens): in the sequence tokens. Otherwise, it logs an error."""
"""Reads a token from the ParseLine line and returns it if it's of a type token = self.pop()
in the sequence tokens. Otherwise, it logs an error.""" if token.type not in tokens:
token = self.pop() Err.log('Expected: "'+'", "'.join(tokens)+'"')
if token.type not in tokens: return token
Err.log('Expected: "'+'", "'.join(tokens)+'"')
return token pragma_modules = []
pragma_modules = [] def parse_expr(line):
"Parses an Ophis arithmetic expression."
def parse_expr(line): def atom():
"Parses an Ophis arithmetic expression." "Parses lowest-priority expression components."
def atom(): next = line.lookahead(0).type
"Parses lowest-priority expression components." if next == "NUM":
next = line.lookahead(0).type return IR.ConstantExpr(line.expect("NUM").value)
if next == "NUM": elif next == "LABEL":
return IR.ConstantExpr(line.expect("NUM").value) return IR.LabelExpr(line.expect("LABEL").value)
elif next == "LABEL": elif next == "^":
return IR.LabelExpr(line.expect("LABEL").value) line.expect("^")
elif next == "^": return IR.PCExpr()
line.expect("^") elif next == "[":
return IR.PCExpr() line.expect("[")
elif next == "[": result = parse_expr(line)
line.expect("[") line.expect("]")
result = parse_expr(line) return result
line.expect("]") elif next == "+":
return result offset = 0
elif next == "+": while next == "+":
offset = 0 offset += 1
while next == "+": line.expect("+")
offset += 1 next = line.lookahead(0).type
line.expect("+") return IR.LabelExpr("*"+str(templabelcount+offset))
next = line.lookahead(0).type elif next == "-":
return IR.LabelExpr("*"+str(templabelcount+offset)) offset = 1
elif next == "-": while next == "-":
offset = 1 offset -= 1
while next == "-": line.expect("-")
offset -= 1 next = line.lookahead(0).type
line.expect("-") return IR.LabelExpr("*"+str(templabelcount+offset))
next = line.lookahead(0).type elif next == ">":
return IR.LabelExpr("*"+str(templabelcount+offset)) line.expect(">")
elif next == ">": return IR.HighByteExpr(atom())
line.expect(">") elif next == "<":
return IR.HighByteExpr(atom()) line.expect("<")
elif next == "<": return IR.LowByteExpr(atom())
line.expect("<") else:
return IR.LowByteExpr(atom()) Err.log('Expected: expression')
else: def precedence_read(constructor, reader, separators):
Err.log('Expected: expression') """Handles precedence. The reader argument is a function that returns
def precedence_read(constructor, reader, separators): expressions that bind more tightly than these; separators is a list
"""Handles precedence. The reader argument is a function that returns of strings naming the operators at this precedence level. The
expressions that bind more tightly than these; separators is a list constructor argument is a class, indicating what node type holds
of strings naming the operators at this precedence level. The objects of this precedence level.
constructor argument is a class, indicating what node type holds
objects of this precedence level. Returns a list of Expr objects with separator strings between them."""
result = [reader()] # first object
Returns a list of Expr objects with separator strings between them.""" nextop = line.lookahead(0).type
result = [reader()] # first object while (nextop in separators):
nextop = line.lookahead(0).type line.expect(nextop)
while (nextop in separators): result.append(nextop)
line.expect(nextop) result.append(reader())
result.append(nextop) nextop = line.lookahead(0).type
result.append(reader()) if len(result) == 1: return result[0]
nextop = line.lookahead(0).type return constructor(result)
if len(result) == 1: return result[0] def term():
return constructor(result) "Parses * and /"
def term(): return precedence_read(IR.SequenceExpr, atom, ["*", "/"])
"Parses * and /" def arith():
return precedence_read(IR.SequenceExpr, atom, ["*", "/"]) "Parses + and -"
def arith(): return precedence_read(IR.SequenceExpr, term, ["+", "-"])
"Parses + and -" def bits():
return precedence_read(IR.SequenceExpr, term, ["+", "-"]) "Parses &, |, and ^"
def bits(): return precedence_read(IR.SequenceExpr, arith, ["&", "|", "^"])
"Parses &, |, and ^" return bits()
return precedence_read(IR.SequenceExpr, arith, ["&", "|", "^"])
return bits() def parse_line(ppt, lexemelist):
"Turn a line of source into an IR Node."
def parse_line(ppt, lexemelist): Err.currentpoint = ppt
"Turn a line of source into an IR Node." result = []
Err.currentpoint = ppt line = ParseLine(lexemelist)
result = [] def aux():
line = ParseLine(lexemelist) "Accumulates all IR nodes defined by this line."
def aux(): if line.lookahead(0).type == "EOL":
"Accumulates all IR nodes defined by this line." pass
if line.lookahead(0).type == "EOL": elif line.lookahead(1).type == ":":
pass newlabel=line.expect("LABEL").value
elif line.lookahead(1).type == ":": line.expect(":")
newlabel=line.expect("LABEL").value result.append(IR.Node(ppt, "Label", newlabel, IR.PCExpr()))
line.expect(":") aux()
result.append(IR.Node(ppt, "Label", newlabel, IR.PCExpr())) elif line.lookahead(0).type == "*":
aux() global templabelcount
elif line.lookahead(0).type == "*": templabelcount = templabelcount + 1
global templabelcount result.append(IR.Node(ppt, "Label", "*"+str(templabelcount), IR.PCExpr()))
templabelcount = templabelcount + 1 line.expect("*")
result.append(IR.Node(ppt, "Label", "*"+str(templabelcount), IR.PCExpr())) aux()
line.expect("*") elif line.lookahead(0).type == "." or line.lookahead(0).type == "`":
aux() which = line.expect(".", "`").type
elif line.lookahead(0).type == "." or line.lookahead(0).type == "`": if (which == "."): pragma = line.expect("LABEL").value
which = line.expect(".", "`").type else: pragma = "invoke"
if (which == "."): pragma = line.expect("LABEL").value pragmaFunction = "pragma"+pragma.title()
else: pragma = "invoke" for mod in pragma_modules:
pragmaFunction = "pragma"+pragma.title() if hasattr(mod, pragmaFunction):
for mod in pragma_modules: getattr(mod, pragmaFunction)(ppt, line, result)
if hasattr(mod, pragmaFunction): break
getattr(mod, pragmaFunction)(ppt, line, result) else:
break Err.log("Unknown pragma "+pragma)
else:
Err.log("Unknown pragma "+pragma) else: # Instruction
opcode = line.expect("OPCODE").value
else: # Instruction if line.lookahead(0).type == "#":
opcode = line.expect("OPCODE").value mode = "Immediate"
if line.lookahead(0).type == "#": line.expect("#")
mode = "Immediate" arg = parse_expr(line)
line.expect("#") line.expect("EOL")
arg = parse_expr(line) elif line.lookahead(0).type == "(":
line.expect("EOL") line.expect("(")
elif line.lookahead(0).type == "(": arg = parse_expr(line)
line.expect("(") if line.lookahead(0).type == ",":
arg = parse_expr(line) mode = "PointerX"
if line.lookahead(0).type == ",": line.expect(",")
mode = "PointerX" line.expect("X")
line.expect(",") line.expect(")")
line.expect("X") line.expect("EOL")
line.expect(")") else:
line.expect("EOL") line.expect(")")
else: tok = line.expect(",", "EOL").type
line.expect(")") if tok == "EOL":
tok = line.expect(",", "EOL").type mode = "Pointer"
if tok == "EOL": else:
mode = "Pointer" mode = "PointerY"
else: line.expect("Y")
mode = "PointerY" line.expect("EOL")
line.expect("Y") elif line.lookahead(0).type == "EOL":
line.expect("EOL") mode = "Implied"
elif line.lookahead(0).type == "EOL": arg = None
mode = "Implied" else:
arg = None arg = parse_expr(line)
else: tok = line.expect("EOL", ",").type
arg = parse_expr(line) if tok == ",":
tok = line.expect("EOL", ",").type tok = line.expect("X", "Y").type
if tok == ",": if tok == "X": mode = "MemoryX"
tok = line.expect("X", "Y").type else: mode = "MemoryY"
if tok == "X": mode = "MemoryX" line.expect("EOL")
else: mode = "MemoryY" else: mode = "Memory"
line.expect("EOL") result.append(IR.Node(ppt, mode, opcode, arg))
else: mode = "Memory" aux()
result.append(IR.Node(ppt, mode, opcode, arg)) result = [node for node in result if node is not IR.NullNode]
aux() if len(result) == 0: return IR.NullNode
result = [node for node in result if node is not IR.NullNode] if len(result) == 1: return result[0]
if len(result) == 0: return IR.NullNode return IR.SequenceNode(ppt, result)
if len(result) == 1: return result[0]
return IR.SequenceNode(ppt, result) def parse_file(ppt, filename):
"Loads a .P65 source file, and returns an IR list."
def parse_file(ppt, filename): Err.currentpoint = ppt
"Loads a .P65 source file, and returns an IR list." if Cmd.verbose > 0: print "Loading "+filename
Err.currentpoint = ppt try:
if Cmd.verbose > 0: print "Loading "+filename f = file(filename)
try: linelist = f.readlines()
f = file(filename) f.close()
linelist = f.readlines() pptlist = ["%s:%d" % (filename, i+1) for i in range(len(linelist))]
f.close() lexlist = map(lex, pptlist, linelist)
pptlist = ["%s:%d" % (filename, i+1) for i in range(len(linelist))] IRlist = map(parse_line, pptlist, lexlist)
lexlist = map(lex, pptlist, linelist) IRlist = [node for node in IRlist if node is not IR.NullNode]
IRlist = map(parse_line, pptlist, lexlist) return IR.SequenceNode(ppt, IRlist)
IRlist = [node for node in IRlist if node is not IR.NullNode] except IOError:
return IR.SequenceNode(ppt, IRlist) Err.log ("Could not read "+filename)
except IOError: return IR.NullNode
Err.log ("Could not read "+filename)
return IR.NullNode def parse(filename):
"Top level parsing routine, taking a source file name and returning an IR list."
def parse(filename): global templabelcount
"Top level parsing routine, taking a source file name and returning an IR list." templabelcount = 0
global templabelcount return parse_file("<Top Level>", filename)
templabelcount = 0
return parse_file("<Top Level>", filename)

View File

@ -1,161 +1,160 @@
"""P65 Intermediate Representation """Ophis Intermediate Representation
Classes for representing the Intermediate nodes upon which the Classes for representing the Intermediate nodes upon which the
assembler passes operate.""" assembler passes operate."""
# Copyright 2002 Michael C. Martin. # Copyright 2002-2012 Michael C. Martin and additional contributors.
# You may use, modify, and distribute this file under the BSD # You may use, modify, and distribute this file under the MIT
# license: See LICENSE.txt for details. # license: See README for details.
from __future__ import nested_scopes import Ophis.Errors as Err
import Ophis.Errors as Err
class Node:
class Node: """The default IR Node
"""The default IR Node Instances of Node always have the three fields ppt(Program Point),
Instances of Node always have the three fields ppt(Program Point), nodetype(a string), and data (a list)."""
nodetype(a string), and data (a list).""" def __init__(self, ppt, nodetype, *data):
def __init__(self, ppt, nodetype, *data): self.ppt = ppt
self.ppt = ppt self.nodetype = nodetype
self.nodetype = nodetype self.data = list(data)
self.data = list(data) def accept(self, asmpass, env=None):
def accept(self, asmpass, env=None): """Implements the Visitor pattern for an assembler pass.
"""Implements the Visitor pattern for an assembler pass. Calls the routine 'asmpass.visitTYPE(self, env)' where
Calls the routine 'asmpass.visitTYPE(self, env)' where TYPE is the value of self.nodetype."""
TYPE is the value of self.nodetype.""" Err.currentpoint = self.ppt
Err.currentpoint = self.ppt routine = getattr(asmpass, "visit"+self.nodetype, asmpass.visitUnknown)
routine = getattr(asmpass, "visit"+self.nodetype, asmpass.visitUnknown) routine(self, env)
routine(self, env) def __str__(self):
def __str__(self): if self.nodetype != "SEQUENCE":
if self.nodetype != "SEQUENCE": return str(self.ppt)+": "+self.nodetype+" - "+" ".join(map(str, self.data))
return str(self.ppt)+": "+self.nodetype+" - "+" ".join(map(str, self.data)) else:
else: return "\n".join(map(str, self.data))
return "\n".join(map(str, self.data)) def __repr__(self):
def __repr__(self): args = [self.ppt, self.nodetype] + self.data
args = [self.ppt, self.nodetype] + self.data return "Node(" + ", ".join(map(repr, args)) + ")"
return "Node(" + ", ".join(map(repr, args)) + ")"
NullNode = Node("<none>", "None")
NullNode = Node("<none>", "None")
def SequenceNode(ppt, nodelist):
def SequenceNode(ppt, nodelist): return Node(ppt, "SEQUENCE", *nodelist)
return Node(ppt, "SEQUENCE", *nodelist)
class Expr:
class Expr: """Base class for P65 expressions
"""Base class for P65 expressions All expressions have a field called "data" and a boolean field
All expressions have a field called "data" and a boolean field called "hardcoded". An expression is hardcoded if it has no
called "hardcoded". An expression is hardcoded if it has no symbolic values in it."""
symbolic values in it.""" def __init__(self, data):
def __init__(self, data): self.data = data
self.data = data self.hardcoded = 0
self.hardcoded = 0 def __str__(self):
def __str__(self): return "<UNKNOWN: "+`self.data`+">"
return "<UNKNOWN: "+`self.data`+">" def valid(self, env=None, PCvalid=0):
def valid(self, env=None, PCvalid=0): """Returns true if the the expression can be successfully
"""Returns true if the the expression can be successfully evaluated in the specified environment."""
evaluated in the specified environment.""" return 0
return 0 def value(self, env=None):
def value(self, env=None): "Evaluates this expression in the given environment."
"Evaluates this expression in the given environment." return None
return None
class ConstantExpr(Expr):
class ConstantExpr(Expr): "Represents a numeric constant"
"Represents a numeric constant" def __init__(self, data):
def __init__(self, data): self.data = data
self.data = data self.hardcoded = 1
self.hardcoded = 1 def __str__(self):
def __str__(self): return str(self.data)
return str(self.data) def valid(self, env=None, PCvalid=0):
def valid(self, env=None, PCvalid=0): return 1
return 1 def value(self, env=None):
def value(self, env=None): return self.data
return self.data
class LabelExpr(Expr):
class LabelExpr(Expr): "Represents a symbolic constant"
"Represents a symbolic constant" def __init__(self, data):
def __init__(self, data): self.data = data
self.data = data self.hardcoded = 0
self.hardcoded = 0 def __str__(self):
def __str__(self): return self.data
return self.data def valid(self, env=None, PCvalid=0):
def valid(self, env=None, PCvalid=0): return (env is not None) and self.data in env
return (env is not None) and self.data in env def value(self, env=None):
def value(self, env=None): return env[self.data]
return env[self.data]
class PCExpr(Expr):
class PCExpr(Expr): "Represents the current program counter: ^"
"Represents the current program counter: ^" def __init__(self):
def __init__(self): self.hardcoded = 0
self.hardcoded = 0 def __str__(self):
def __str__(self): return "^"
return "^" def valid(self, env=None, PCvalid=0):
def valid(self, env=None, PCvalid=0): return env is not None and PCvalid
return env is not None and PCvalid def value(self, env=None):
def value(self, env=None): return env.getPC()
return env.getPC()
class HighByteExpr(Expr):
class HighByteExpr(Expr): "Represents the expression >{data}"
"Represents the expression >{data}" def __init__(self, data):
def __init__(self, data): self.data = data
self.data = data self.hardcoded = data.hardcoded
self.hardcoded = data.hardcoded def __str__(self):
def __str__(self): return ">"+str(self.data)
return ">"+str(self.data) def valid(self, env=None, PCvalid=0):
def valid(self, env=None, PCvalid=0): return self.data.valid(env, PCvalid)
return self.data.valid(env, PCvalid) def value(self, env=None):
def value(self, env=None): val = self.data.value(env)
val = self.data.value(env) return (val >> 8) & 0xff
return (val >> 8) & 0xff
class LowByteExpr(Expr):
class LowByteExpr(Expr): "Represents the expression <{data}"
"Represents the expression <{data}" def __init__(self, data):
def __init__(self, data): self.data = data
self.data = data self.hardcoded = data.hardcoded
self.hardcoded = data.hardcoded def __str__(self):
def __str__(self): return "<"+str(self.data)
return "<"+str(self.data) def valid(self, env=None, PCvalid=0):
def valid(self, env=None, PCvalid=0): return self.data.valid(env, PCvalid)
return self.data.valid(env, PCvalid) def value(self, env=None):
def value(self, env=None): val = self.data.value(env)
val = self.data.value(env) return val & 0xff
return val & 0xff
class SequenceExpr(Expr):
class SequenceExpr(Expr): """Represents an interleaving of operands (of type Expr) and
"""Represents an interleaving of operands (of type Expr) and operators (of type String). Subclasses must provide a routine
operators (of type String). Subclasses must provide a routine operate(self, firstarg, op, secondarg) that evaluates the
operate(self, firstarg, op, secondarg) that evaluates the operator."""
operator.""" def __init__(self, data):
def __init__(self, data): """Constructor for Sequence Expressions. Results will be
"""Constructor for Sequence Expressions. Results will be screwy if the data inpot isn't a list with types
screwy if the data inpot isn't a list with types [Expr, str, Expr, str, Expr, str, ... Expr, str, Expr]."""
[Expr, str, Expr, str, Expr, str, ... Expr, str, Expr].""" self.data = data
self.data = data self.operands = [x for x in data if isinstance(x, Expr)]
self.operands = [x for x in data if isinstance(x, Expr)] self.operators = [x for x in data if type(x)==str]
self.operators = [x for x in data if type(x)==str] for i in self.operands:
for i in self.operands: if not i.hardcoded:
if not i.hardcoded: self.hardcoded = 0
self.hardcoded = 0 break
break else:
else: self.hardcoded = 1
self.hardcoded = 1 def __str__(self):
def __str__(self): return "["+" ".join(map(str, self.data))+"]"
return "["+" ".join(map(str, self.data))+"]" def valid(self, env=None, PCvalid=0):
def valid(self, env=None, PCvalid=0): for i in self.operands:
for i in self.operands: if not i.valid(env, PCvalid):
if not i.valid(env, PCvalid): return 0
return 0 return 1
return 1 def value(self, env=None):
def value(self, env=None): subs = map((lambda x: x.value(env)), self.operands)
subs = map((lambda x: x.value(env)), self.operands) result = subs[0]
result = subs[0] index = 1
index = 1 for op in self.operators:
for op in self.operators: result = self.operate(result, op, subs[index])
result = self.operate(result, op, subs[index]) index += 1
index += 1 return result
return result def operate(self, start, op, other):
def operate(self, start, op, other): if op=="*": return start * other
if op=="*": return start * other if op=="/": return start // other
if op=="/": return start // other if op=="+": return start + other
if op=="+": return start + other if op=="-": return start - other
if op=="-": return start - other if op=="&": return start & other
if op=="&": return start & other if op=="|": return start | other
if op=="|": return start | other if op=="^": return start ^ other
if op=="^": return start ^ other

View File

@ -1,62 +1,66 @@
"""Macro support for P65. """Macro support for Ophis.
P65 Macros are cached SequenceNodes with arguments Ophis Macros are cached SequenceNodes with arguments
set via .alias commands and prevented from escaping set via .alias commands and prevented from escaping
with .scope and .scend commands.""" with .scope and .scend commands."""
import sys # Copyright 2002-2012 Michael C. Martin and additional contributors.
# You may use, modify, and distribute this file under the MIT
import Ophis.IR as IR # license: See README for details.
import Ophis.CmdLine as Cmd
import Ophis.Errors as Err import sys
macros = {} import Ophis.IR as IR
currentname = None import Ophis.CmdLine as Cmd
currentbody = None import Ophis.Errors as Err
def newMacro(name): macros = {}
"Start creating a new macro with the specified name." currentname = None
global currentname currentbody = None
global currentbody
global macros def newMacro(name):
if currentname is not None: "Start creating a new macro with the specified name."
Err.log("Internal error! Nested macro attempt!") global currentname
else: global currentbody
if name in macros: global macros
Err.log("Duplicate macro definition '%s'" % name) if currentname is not None:
currentname = name Err.log("Internal error! Nested macro attempt!")
currentbody = [] else:
if name in macros:
def registerNode(node): Err.log("Duplicate macro definition '%s'" % name)
global currentbody currentname = name
currentbody.append(IR.Node(node.ppt, node.nodetype, *node.data)) currentbody = []
def endMacro(): def registerNode(node):
global currentname global currentbody
global currentbody currentbody.append(IR.Node(node.ppt, node.nodetype, *node.data))
global macros
if currentname is None: def endMacro():
Err.log("Internal error! Ended a non-existent macro!") global currentname
else: global currentbody
macros[currentname] = currentbody global macros
currentname = None if currentname is None:
currentbody = None Err.log("Internal error! Ended a non-existent macro!")
else:
def expandMacro(ppt, name, arglist): macros[currentname] = currentbody
global macros currentname = None
if name not in macros: currentbody = None
Err.log("Undefined macro '%s'" % name)
return IR.NullNode def expandMacro(ppt, name, arglist):
argexprs = [IR.Node(ppt, "Label", "_*%d" % i, arg) for (i, arg) in zip(xrange(1, sys.maxint), arglist)] global macros
bindexprs = [IR.Node(ppt, "Label", "_%d" % i, IR.LabelExpr("_*%d" % i)) for i in range(1, len(arglist)+1)] if name not in macros:
body = [IR.Node("%s->%s" % (ppt, node.ppt), node.nodetype, *node.data) for node in macros[name]] Err.log("Undefined macro '%s'" % name)
invocation = [IR.Node(ppt, "ScopeBegin")] + argexprs + [IR.Node(ppt, "ScopeBegin")] + bindexprs + body + [IR.Node(ppt, "ScopeEnd"), IR.Node(ppt, "ScopeEnd")] return IR.NullNode
return IR.SequenceNode(ppt, invocation) argexprs = [IR.Node(ppt, "Label", "_*%d" % i, arg) for (i, arg) in zip(xrange(1, sys.maxint), arglist)]
bindexprs = [IR.Node(ppt, "Label", "_%d" % i, IR.LabelExpr("_*%d" % i)) for i in range(1, len(arglist)+1)]
def dump(): body = [IR.Node("%s->%s" % (ppt, node.ppt), node.nodetype, *node.data) for node in macros[name]]
global macros invocation = [IR.Node(ppt, "ScopeBegin")] + argexprs + [IR.Node(ppt, "ScopeBegin")] + bindexprs + body + [IR.Node(ppt, "ScopeEnd"), IR.Node(ppt, "ScopeEnd")]
for mac in macros: return IR.SequenceNode(ppt, invocation)
body = macros[mac]
print "Macro: "+mac def dump():
for node in body: print node global macros
print "" for mac in macros:
body = macros[mac]
print "Macro: "+mac
for node in body: print node
print ""

View File

@ -1,124 +1,123 @@
"""Main controller routines for the P65 assembler. """Main controller routines for the Ophis assembler.
When invoked as main, interprets its command line and goes from there. When invoked as main, interprets its command line and goes from there.
Otherwise, use run_all to interpret a file set.""" Otherwise, use run_all to interpret a file set."""
# Copyright 2002 Michael C. Martin. # Copyright 2002-2012 Michael C. Martin and additional contributors.
# You may use, modify, and distribute this file under the BSD # You may use, modify, and distribute this file under the MIT
# license: See LICENSE.txt for details. # license: See README for details.
from __future__ import nested_scopes import sys
import sys import Ophis.Frontend
import Ophis.Frontend import Ophis.IR
import Ophis.IR import Ophis.CorePragmas
import Ophis.CorePragmas import Ophis.OldPragmas
import Ophis.OldPragmas import Ophis.Passes
import Ophis.Passes import Ophis.Errors as Err
import Ophis.Errors as Err import Ophis.Environment
import Ophis.Environment import Ophis.CmdLine
import Ophis.CmdLine import Ophis.Opcodes
import Ophis.Opcodes
def usage():
def usage(): "Prints a usage message and quits."
"Prints a usage message and quits." print "Usage:"
print "Usage:" print "\tOphis [options] infile outfile"
print "\tOphis [options] infile outfile" print ""
print "" print "Options:"
print "Options:" print "\t-6510 Allow 6510 undocumented opcodes"
print "\t-6510 Allow 6510 undocumented opcodes" print "\t-65c02 Enable 65c02 extensions"
print "\t-65c02 Enable 65c02 extensions" print "\t-d Allow deprecated pragmas"
print "\t-d Allow deprecated pragmas" print "\t-v n Set verbosity to n (0-4, 1=default)"
print "\t-v n Set verbosity to n (0-4, 1=default)" sys.exit(1)
sys.exit(1)
def run_all(infile, outfile):
def run_all(infile, outfile): "Transforms the source infile to a binary outfile."
"Transforms the source infile to a binary outfile." Err.count = 0
Err.count = 0 z = Ophis.Frontend.parse(infile)
z = Ophis.Frontend.parse(infile) env = Ophis.Environment.Environment()
env = Ophis.Environment.Environment()
m = Ophis.Passes.ExpandMacros()
m = Ophis.Passes.ExpandMacros() i = Ophis.Passes.InitLabels()
i = Ophis.Passes.InitLabels() l_basic = Ophis.Passes.UpdateLabels()
l_basic = Ophis.Passes.UpdateLabels() l = Ophis.Passes.FixPoint("label update", [l_basic], lambda: l_basic.changed == 0)
l = Ophis.Passes.FixPoint("label update", [l_basic], lambda: l_basic.changed == 0) c = Ophis.Passes.Collapse()
c = Ophis.Passes.Collapse() a = Ophis.Passes.Assembler()
a = Ophis.Passes.Assembler()
passes = []
passes = [] passes.append(Ophis.Passes.DefineMacros())
passes.append(Ophis.Passes.DefineMacros()) passes.append(Ophis.Passes.FixPoint("macro expansion", [m], lambda: m.changed == 0))
passes.append(Ophis.Passes.FixPoint("macro expansion", [m], lambda: m.changed == 0)) passes.append(Ophis.Passes.FixPoint("label initialization", [i], lambda: i.changed == 0))
passes.append(Ophis.Passes.FixPoint("label initialization", [i], lambda: i.changed == 0)) passes.extend([Ophis.Passes.CircularityCheck(), Ophis.Passes.CheckExprs(), Ophis.Passes.EasyModes()])
passes.extend([Ophis.Passes.CircularityCheck(), Ophis.Passes.CheckExprs(), Ophis.Passes.EasyModes()]) passes.append(Ophis.Passes.FixPoint("instruction selection", [l, c], lambda: c.collapsed == 0))
passes.append(Ophis.Passes.FixPoint("instruction selection", [l, c], lambda: c.collapsed == 0)) passes.extend([Ophis.Passes.NormalizeModes(), Ophis.Passes.UpdateLabels(), a])
passes.extend([Ophis.Passes.NormalizeModes(), Ophis.Passes.UpdateLabels(), a])
for p in passes: p.go(z, env)
for p in passes: p.go(z, env)
if Err.count == 0:
if Err.count == 0: try:
try: output = file(outfile, 'wb')
output = file(outfile, 'wb') output.write("".join(map(chr, a.output)))
output.write("".join(map(chr, a.output))) except IOError:
except IOError: print "Could not write to "+outfile
print "Could not write to "+outfile else:
else: Err.report()
Err.report()
def run_ophis():
def run_ophis(): infile = None
infile = None outfile = None
outfile = None
p65_compatibility_mode = 0
p65_compatibility_mode = 0 chip_extension = None
chip_extension = None
reading_arg = 0
reading_arg = 0
for x in sys.argv[1:]:
for x in sys.argv[1:]: if reading_arg:
if reading_arg: try:
try: Ophis.CmdLine.verbose = int(x)
Ophis.CmdLine.verbose = int(x) reading_arg = 0
reading_arg = 0 except ValueError:
except ValueError: print "FATAL: Non-integer passed as argument to -v"
print "FATAL: Non-integer passed as argument to -v" usage()
usage() elif x[0] == '-':
elif x[0] == '-': if x == '-v':
if x == '-v': reading_arg = 1
reading_arg = 1 elif x == '-6510':
elif x == '-6510': chip_extension = Ophis.Opcodes.undocops
chip_extension = Ophis.Opcodes.undocops elif x == '-65c02':
elif x == '-65c02': chip_extension = Ophis.Opcodes.c02extensions
chip_extension = Ophis.Opcodes.c02extensions elif x == '-d':
elif x == '-d': p65_compatibility_mode = 1
p65_compatibility_mode = 1 else:
else: print "FATAL: Unknown option "+x
print "FATAL: Unknown option "+x usage()
usage() elif infile == None:
elif infile == None: infile = x
infile = x elif outfile == None:
elif outfile == None: outfile = x
outfile = x else:
else: print "FATAL: Too many files specified"
print "FATAL: Too many files specified" usage()
usage()
if infile is None:
if infile is None: print "FATAL: No files specified"
print "FATAL: No files specified" usage()
usage()
if outfile is None:
if outfile is None: print "FATAL: No output file specified"
print "FATAL: No output file specified" usage()
usage()
Ophis.Frontend.pragma_modules.append(Ophis.CorePragmas)
Ophis.Frontend.pragma_modules.append(Ophis.CorePragmas)
if p65_compatibility_mode:
if p65_compatibility_mode: Ophis.Frontend.pragma_modules.append(Ophis.OldPragmas)
Ophis.Frontend.pragma_modules.append(Ophis.OldPragmas)
if chip_extension is not None:
if chip_extension is not None: Ophis.Opcodes.opcodes.update(chip_extension)
Ophis.Opcodes.opcodes.update(chip_extension)
Ophis.CorePragmas.reset()
Ophis.CorePragmas.reset() run_all(infile, outfile)
run_all(infile, outfile)
if __name__ == '__main__':
if __name__ == '__main__': run_ophis()
run_ophis()

View File

@ -1,28 +1,28 @@
"""P65-Perl compatibility pragmas """P65-Perl compatibility pragmas
Additional assembler directives to permit assembly of Additional assembler directives to permit assembly of
old P65-Perl sources. This is not, in itself, sufficient, old P65-Perl sources. This is not, in itself, sufficient,
as the precedence of < and > vs. + and - has changed as the precedence of < and > vs. + and - has changed
between P65-Perl and P65-Ophis. between P65-Perl and Ophis.
Supported pragmas are: .ascii (byte), .address (word), Supported pragmas are: .ascii (byte), .address (word),
.segment (text), .code (text), and .link.""" .segment (text), .code (text), and .link."""
# Copyright 2002 Michael C. Martin. # Copyright 2002-2012 Michael C. Martin and additional contributors.
# You may use, modify, and distribute this file under the BSD # You may use, modify, and distribute this file under the MIT
# license: See LICENSE.txt for details. # license: See README for details.
import Ophis.CorePragmas as core import Ophis.CorePragmas as core
pragmaAscii = core.pragmaByte pragmaAscii = core.pragmaByte
pragmaAddress = core.pragmaWord pragmaAddress = core.pragmaWord
pragmaSegment = core.pragmaText pragmaSegment = core.pragmaText
pragmaCode = core.pragmaText pragmaCode = core.pragmaText
def pragmaLink(ppt, line, result): def pragmaLink(ppt, line, result):
"Load a file in a precise memory location." "Load a file in a precise memory location."
filename = line.expect("STRING").value filename = line.expect("STRING").value
newPC = FE.parse_expr(line) newPC = FE.parse_expr(line)
line.expect("EOL") line.expect("EOL")
result.append(IR.Node(ppt, "SetPC", newPC)) result.append(IR.Node(ppt, "SetPC", newPC))
if type(filename)==str: result.append(FE.parse_file(ppt, filename)) if type(filename)==str: result.append(FE.parse_file(ppt, filename))

View File

@ -1,28 +1,28 @@
"""Opcodes file. """Opcodes file.
Tables for the assembly of 6502-family instructions, mapping Tables for the assembly of 6502-family instructions, mapping
opcodes and addressing modes to binary instructions.""" opcodes and addressing modes to binary instructions."""
# Copyright 2002 Michael C. Martin. # Copyright 2002-2012 Michael C. Martin and additional contributors.
# You may use, modify, and distribute this file under the BSD # You may use, modify, and distribute this file under the MIT
# license: See LICENSE.txt for details. # license: See README for details.
# Names of addressing modes # Names of addressing modes
modes = ["Implied", # 0 modes = ["Implied", # 0
"Immediate", # 1 "Immediate", # 1
"Zero Page", # 2 "Zero Page", # 2
"Zero Page, X", # 3 "Zero Page, X", # 3
"Zero Page, Y", # 4 "Zero Page, Y", # 4
"Absolute", # 5 "Absolute", # 5
"Absolute, X", # 6 "Absolute, X", # 6
"Absolute, Y", # 7 "Absolute, Y", # 7
"(Absolute)", # 8 "(Absolute)", # 8
"(Absolute, X)", # 9 "(Absolute, X)", # 9
"(Absolute), Y", # 10 "(Absolute), Y", # 10
"(Zero Page)", # 11 "(Zero Page)", # 11
"(Zero Page, X)", # 12 "(Zero Page, X)", # 12
"(Zero Page), Y", # 13 "(Zero Page), Y", # 13
"Relative"] # 14 "Relative"] # 14
# Lengths of the argument # Lengths of the argument
lengths = [0, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1] lengths = [0, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1]

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,5 @@
"P65 - a cross-assembler for the 6502 series of chips" "Ophis - a cross-assembler for the 6502 series of chips"
# Copyright 2002 Michael C. Martin. # Copyright 2002-2012 Michael C. Martin and additional contributors.
# You may use, modify, and distribute this file under the BSD # You may use, modify, and distribute this file under the MIT
# license: See LICENSE.txt for details. # license: See README for details.