Added samepage/crosspage into lexer.

This commit is contained in:
g012 2017-09-09 16:49:10 +02:00
parent d991d818c1
commit 80085a969e
3 changed files with 82 additions and 35 deletions

View File

@ -2,6 +2,7 @@ local M = {}
local symbols={} M.symbols=symbols
local locations={} M.locations=locations
local stats={} M.stats=stats
local byte_normalize = function(v)
if v < -128 or v > 255 then error("value out of byte range: " .. v) end
@ -20,13 +21,14 @@ local byte_emit = function(v, bin)
end
local word_emit = function(v, bin)
assert(v >=0 and v <= 0xffff)
bin[#bin+1] = 0xff & (v % 0x100)
bin[#bin+1] = 0xff & (v // 0x100)
bin[#bin+1] = v & 0xff
bin[#bin+1] = (v>>8) & 0xff
end
M.link = function()
assert(not location.unused, "can't link twice")
assert(not stats.unused, "can't link twice")
stats.unused = 0
for _,location in ipairs(locations) do
local sections = location.sections
@ -124,13 +126,14 @@ M.link = function()
local unused = 0
for _,chunk in ipairs(location.chunks) do unused = unused + chunk.size - chunk.start end
location.unused = unused
stats.unused = stats.unused + unused
end
end
M.genbin = function(filler)
if not filler then filler = 0xff end
if not location.unused then M.link() end
if not stats.unused then M.link() end
local bin = {}
local ins = table.insert
table.sort(locations, function(a,b) return a.start < b.start end)
@ -206,6 +209,13 @@ M.section = function(t)
end
end
M.samepage = function()
end
M.crosspage = function()
end
M.endpage = function()
end
M.byte = function(...)
local data = {...}
for _,v in ipairs(data) do byte_emit(byte_normalize(v)) end

View File

@ -29,6 +29,7 @@ section "waitForIntim" --alt short syntax when no other option: @@waitForIntim ?
ldx #0b1101
ldy #0xAB - 16 + 0b11011 & 3 | 6 ~ 0xf >> ~3 << 1 // 5
samepage
lda #0xac
lda #INTIM
lda 0xbeef
@ -38,6 +39,7 @@ section "waitForIntim" --alt short syntax when no other option: @@waitForIntim ?
lda INTIM,y
lda (INTIM,x)
lda (INTIM),y
end
asl
asl INTIM

97
l65.lua
View File

@ -33,7 +33,12 @@ local Keywords = lookupify{
};
-- 6502 opcodes
local Keywords_control = {
-- control keywords
'samepage', 'crosspage',
}
local Keywords_6502 = {
-- opcodes
'adc', 'and', 'asl', 'bcc', 'bcs', 'beq', 'bit', 'bmi',
'bne', 'bpl', 'brk', 'bvc', 'bvs', 'clc', 'cld', 'cli',
'clv', 'cmp', 'cpx', 'cpy', 'dec', 'dex', 'dey', 'eor',
@ -50,6 +55,7 @@ local Keywords_6502 = {
local syntax6502_on
local function syntax6502(on)
syntax6502_on = on
lookupify(Keywords_control, Keywords, not on)
lookupify(Keywords_6502, Keywords, not on)
end
syntax6502(true)
@ -245,7 +251,7 @@ local function LexLua(src)
while true do
--check for eof
if peek() == '' then
generateError("Expected `]"..string.rep('=', equalsCount).."]` near <eof>.", 3)
generateError("Expected ']"..string.rep('=', equalsCount).."]' near <eof>.", 3)
end
--check for the end
@ -543,7 +549,7 @@ local function LexLua(src)
if contents then
toEmit = {Type = 'String', Data = all, Constant = contents}
else
generateError("Unexpected Symbol `"..c.."` in source.", 2)
generateError("Unexpected Symbol '"..c.."' in source.", 2)
end
end
@ -683,7 +689,7 @@ local function ParseLua(src)
if line:sub(-1,-1) == '\n' then line = line:sub(1,-2) end
lineNum = lineNum+1
if lineNum == tok:Peek().Line then
err = err..">> `"..line:gsub('\t',' ').."`\n"
err = err..">> '"..line:gsub('\t',' ').."'\n"
for i = 1, tok:Peek().Char do
local c = line:sub(i,i)
if c == '\t' then
@ -717,7 +723,7 @@ local function ParseLua(src)
local function ParseFunctionArgsAndBody(scope, tokenList)
local funcScope = CreateScope(scope)
if not tok:ConsumeSymbol('(', tokenList) then
return false, GenerateError("`(` expected.")
return false, GenerateError("'(' expected.")
end
--arg list
@ -731,17 +737,17 @@ local function ParseLua(src)
if tok:ConsumeSymbol(')', tokenList) then
break
else
return false, GenerateError("`)` expected.")
return false, GenerateError("')' expected.")
end
end
elseif tok:ConsumeSymbol('...', tokenList) then
isVarArg = true
if not tok:ConsumeSymbol(')', tokenList) then
return false, GenerateError("`...` must be the last argument of a function.")
return false, GenerateError("'...' must be the last argument of a function.")
end
break
else
return false, GenerateError("Argument name or `...` expected")
return false, GenerateError("Argument name or '...' expected")
end
end
@ -751,7 +757,7 @@ local function ParseLua(src)
--end
if not tok:ConsumeKeyword('end', tokenList) then
return false, GenerateError("`end` expected after function body")
return false, GenerateError("'end' expected after function body")
end
local nodeFunc = {}
nodeFunc.AstType = 'Function'
@ -772,7 +778,7 @@ local function ParseLua(src)
local st, ex = ParseExpr(scope)
if not st then return false, ex end
if not tok:ConsumeSymbol(')', tokenList) then
return false, GenerateError("`)` Expected.")
return false, GenerateError("')' Expected.")
end
if false then
--save the information about parenthesized expressions somewhere
@ -835,7 +841,7 @@ local function ParseLua(src)
local st, ex = ParseExpr(scope)
if not st then return false, ex end
if not tok:ConsumeSymbol(']', tokenList) then
return false, GenerateError("`]` expected.")
return false, GenerateError("']' expected.")
end
local nodeIndex = {}
nodeIndex.AstType = 'IndexExpr'
@ -855,7 +861,7 @@ local function ParseLua(src)
if tok:ConsumeSymbol(')', tokenList) then
break
else
return false, GenerateError("`)` Expected.")
return false, GenerateError("')' Expected.")
end
end
end
@ -949,10 +955,10 @@ local function ParseLua(src)
return false, GenerateError("Key Expression Expected")
end
if not tok:ConsumeSymbol(']', tokenList) then
return false, GenerateError("`]` Expected")
return false, GenerateError("']' Expected")
end
if not tok:ConsumeSymbol('=', tokenList) then
return false, GenerateError("`=` Expected")
return false, GenerateError("'=' Expected")
end
local st, value = ParseExpr(scope)
if not st then
@ -971,7 +977,7 @@ local function ParseLua(src)
--we are a key
local key = tok:Get(tokenList)
if not tok:ConsumeSymbol('=', tokenList) then
return false, GenerateError("`=` Expected")
return false, GenerateError("'=' Expected")
end
local st, value = ParseExpr(scope)
if not st then
@ -1015,7 +1021,7 @@ local function ParseLua(src)
elseif tok:ConsumeSymbol('}', tokenList) then
break
else
return false, GenerateError("`}` or table entry Expected")
return false, GenerateError("'}' or table entry Expected")
end
end
v.Tokens = tokenList
@ -1115,11 +1121,12 @@ local function ParseLua(src)
local tokenList = {}
local opcode_tok
local function emit_call(func_name, args_expr)
local function emit_call(func_name, args_expr, white)
if not white then white = opcode_tok.LeadingWhite end
local c,l = opcode_tok.Char, opcode_tok.Line
local op_var = {
AstType = 'VarExpr', Name = func_name, Variable = { IsGlobal=true, Name=func_name, Scope=CreateScope(scope) }, Tokens = {
{ LeadingWhite = opcode_tok.LeadingWhite, Type='Ident', Data=func_name, Char=c, Line=l, Print=function() return '<Ident '..func_name..' >' end },
{ LeadingWhite = white, Type='Ident', Data=func_name, Char=c, Line=l, Print=function() return '<Ident '..func_name..' >' end },
}
}
local exp_call = {
@ -1156,6 +1163,34 @@ local function ParseLua(src)
stat = emit_call(is_local and 'label_local' or 'label', label_name)
end end
-- new statements
if not stat then
local pagestat = function(fpage)
opcode_tok = tokenList[1]
local st, nodeBlock = ParseStatementList(scope)
if not st then return false, nodeBlock end
if not tok:ConsumeKeyword('end', tokenList) then
return false, GenerateError("'end' expected.")
end
local nodeDoStat = {}
nodeDoStat.AstType = 'DoStatement'
nodeDoStat.Body = nodeBlock
nodeDoStat.Tokens = tokenList
stat = nodeDoStat
tokenList[1].Data = 'do'
local space = {{ Char=opcode_tok.Char, Line=opcode_tok.Line, Data=' ', Type='Whitespace' }}
local opencall,closecall = emit_call(fpage,nil,space),emit_call('endpage',nil,space)
table.insert(nodeBlock.Body, 1, opencall)
table.insert(nodeBlock.Body, closecall)
end
if tok:ConsumeKeyword('samepage', tokenList) then pagestat('samepage')
elseif tok:ConsumeKeyword('crosspage', tokenList) then pagestat('crosspage')
end
end
-- 6502 opcodes
if not stat then
for _,op in pairs(Keywords_6502) do
@ -1237,7 +1272,7 @@ local function ParseLua(src)
local st, nodeCond = ParseExpr(scope)
if not st then return false, nodeCond end
if not tok:ConsumeKeyword('then', tokenList) then
return false, GenerateError("`then` expected.")
return false, GenerateError("'then' expected.")
end
local st, nodeBody = ParseStatementList(scope)
if not st then return false, nodeBody end
@ -1258,7 +1293,7 @@ local function ParseLua(src)
--end
if not tok:ConsumeKeyword('end', tokenList) then
return false, GenerateError("`end` expected.")
return false, GenerateError("'end' expected.")
end
nodeIfStat.Tokens = tokenList
@ -1275,7 +1310,7 @@ local function ParseLua(src)
--do
if not tok:ConsumeKeyword('do', tokenList) then
return false, GenerateError("`do` expected.")
return false, GenerateError("'do' expected.")
end
--body
@ -1284,7 +1319,7 @@ local function ParseLua(src)
--end
if not tok:ConsumeKeyword('end', tokenList) then
return false, GenerateError("`end` expected.")
return false, GenerateError("'end' expected.")
end
--return
@ -1298,7 +1333,7 @@ local function ParseLua(src)
local st, nodeBlock = ParseStatementList(scope)
if not st then return false, nodeBlock end
if not tok:ConsumeKeyword('end', tokenList) then
return false, GenerateError("`end` expected.")
return false, GenerateError("'end' expected.")
end
local nodeDoStat = {}
@ -1321,7 +1356,7 @@ local function ParseLua(src)
local st, startEx = ParseExpr(scope)
if not st then return false, startEx end
if not tok:ConsumeSymbol(',', tokenList) then
return false, GenerateError("`,` Expected")
return false, GenerateError("',' Expected")
end
local st, endEx = ParseExpr(scope)
if not st then return false, endEx end
@ -1331,13 +1366,13 @@ local function ParseLua(src)
if not st then return false, stepEx end
end
if not tok:ConsumeKeyword('do', tokenList) then
return false, GenerateError("`do` expected")
return false, GenerateError("'do' expected")
end
--
local st, body = ParseStatementList(forScope)
if not st then return false, body end
if not tok:ConsumeKeyword('end', tokenList) then
return false, GenerateError("`end` expected")
return false, GenerateError("'end' expected")
end
--
local nodeFor = {}
@ -1362,7 +1397,7 @@ local function ParseLua(src)
varList[#varList+1] = forScope:CreateLocal(tok:Get(tokenList).Data)
end
if not tok:ConsumeKeyword('in', tokenList) then
return false, GenerateError("`in` expected.")
return false, GenerateError("'in' expected.")
end
local generators = {}
local st, firstGenerator = ParseExpr(scope)
@ -1374,12 +1409,12 @@ local function ParseLua(src)
generators[#generators+1] = gen
end
if not tok:ConsumeKeyword('do', tokenList) then
return false, GenerateError("`do` expected.")
return false, GenerateError("'do' expected.")
end
local st, body = ParseStatementList(forScope)
if not st then return false, body end
if not tok:ConsumeKeyword('end', tokenList) then
return false, GenerateError("`end` expected.")
return false, GenerateError("'end' expected.")
end
--
local nodeFor = {}
@ -1397,7 +1432,7 @@ local function ParseLua(src)
if not st then return false, body end
--
if not tok:ConsumeKeyword('until', tokenList) then
return false, GenerateError("`until` expected.")
return false, GenerateError("'until' expected.")
end
-- FIX: Used to parse in parent scope
-- Now parses in repeat scope
@ -1483,7 +1518,7 @@ local function ParseLua(src)
end
local label = tok:Get(tokenList).Data
if not tok:ConsumeSymbol('::', tokenList) then
return false, GenerateError("`::` expected")
return false, GenerateError("'::' expected")
end
local nodeLabel = {}
nodeLabel.AstType = 'LabelStatement'
@ -1551,7 +1586,7 @@ local function ParseLua(src)
--equals
if not tok:ConsumeSymbol('=', tokenList) then
return false, GenerateError("`=` Expected.")
return false, GenerateError("'=' Expected.")
end
--rhs