1
0
mirror of https://github.com/sehugg/8bitworkshop.git synced 2026-03-10 21:25:31 +00:00

Merge pull request #221 from fredsa/keywords

Improve Z80/6502 syntax highlighting
This commit is contained in:
Steven Hugg
2026-03-02 09:12:59 +01:00
committed by GitHub
4 changed files with 49 additions and 14 deletions

View File

@@ -9,6 +9,7 @@ Line {
Statement {
Instruction |
Directive |
HexDirective |
MacroDef |
MacEnd |
ControlOp |
@@ -22,7 +23,7 @@ Instruction {
}
Register {
@specialize<Identifier, "X" | "Y" | "A" | "x" | "y" | "a">
@specialize<Identifier, "a" | "x" | "y" | "A" | "X" | "Y">
}
Directive {
@@ -31,15 +32,27 @@ Directive {
PseudoOp {
@specialize<Identifier,
"ORG" | "EQU" | "END" | "org" | "equ" | "end" |
"org" | "equ" | "end" | ".end" |
"ORG" | "EQU" | "END" | ".END" |
"ds" | "ds.b" | "ds.w" | "dc" | "dc.b" | "dc.w" | "seg" | "seg.u" |
"subroutine" |
"DS" | "DS.B" | "DS.W" | "DC" | "DC.B" | "DC.W" | "SEG" | "SEG.U" |
".byte" | ".word" |
".BYTE" | ".WORD" |
"subroutine" | "SUBROUTINE" |
"processor" | "PROCESSOR" |
"echo" | "repeat" | "repend" | "set" |
"processor" |
".WORD" | ".word" | ".BYTE" | ".byte" | ".END" | ".end"
"ECHO" | "REPEAT" | "REPEND" | "SET"
>
}
HexOp { @specialize<Identifier, "hex" | "HEX"> }
HexDirective {
HexOp HexByte*
}
@external tokens hexTokenizer from "../../src/parser/tokens-6502" { HexByte }
Mac { @specialize<Identifier, "mac"> }
MacEnd { @specialize<Identifier, "endm"> }
@@ -56,20 +69,20 @@ CurrentAddress {
Opcode {
@specialize<Identifier,
"ADC" | "AND" | "ASL" | "BCC" | "BCS" | "BEQ" | "BIT" | "BMI" |
"BNE" | "BPL" | "BRK" | "BVC" | "BVS" | "CLC" | "CLD" | "CLI" |
"CLV" | "CMP" | "CPX" | "CPY" | "DEC" | "DEX" | "DEY" | "EOR" |
"INC" | "INX" | "INY" | "JMP" | "JSR" | "LDA" | "LDX" | "LDY" |
"LSR" | "NOP" | "ORA" | "PHA" | "PHP" | "PLA" | "PLP" | "ROL" |
"ROR" | "RTI" | "RTS" | "SBC" | "SEC" | "SED" | "SEI" | "STA" |
"STX" | "STY" | "TAX" | "TAY" | "TSX" | "TXA" | "TXS" | "TYA" |
"adc" | "and" | "asl" | "bcc" | "bcs" | "beq" | "bit" | "bmi" |
"bne" | "bpl" | "brk" | "bvc" | "bvs" | "clc" | "cld" | "cli" |
"clv" | "cmp" | "cpx" | "cpy" | "dec" | "dex" | "dey" | "eor" |
"inc" | "inx" | "iny" | "jmp" | "jsr" | "lda" | "ldx" | "ldy" |
"lsr" | "nop" | "ora" | "pha" | "php" | "pla" | "plp" | "rol" |
"ror" | "rti" | "rts" | "sbc" | "sec" | "sed" | "sei" | "sta" |
"stx" | "sty" | "tax" | "tay" | "tsx" | "txa" | "txs" | "tya"
"stx" | "sty" | "tax" | "tay" | "tsx" | "txa" | "txs" | "tya" |
"ADC" | "AND" | "ASL" | "BCC" | "BCS" | "BEQ" | "BIT" | "BMI" |
"BNE" | "BPL" | "BRK" | "BVC" | "BVS" | "CLC" | "CLD" | "CLI" |
"CLV" | "CMP" | "CPX" | "CPY" | "DEC" | "DEX" | "DEY" | "EOR" |
"INC" | "INX" | "INY" | "JMP" | "JSR" | "LDA" | "LDX" | "LDY" |
"LSR" | "NOP" | "ORA" | "PHA" | "PHP" | "PLA" | "PLP" | "ROL" |
"ROR" | "RTI" | "RTS" | "SBC" | "SEC" | "SED" | "SEI" | "STA" |
"STX" | "STY" | "TAX" | "TAY" | "TSX" | "TXA" | "TXS" | "TYA"
>
}

View File

@@ -35,6 +35,8 @@ export const Lezer6502: LRLanguage = LRLanguage.define({
BinaryGt: t.compareOperator,
UnaryLt: t.arithmeticOperator,
UnaryGt: t.arithmeticOperator,
HexOp: t.definition(t.variableName),
HexByte: t.number,
Mac: t.definitionKeyword,
MacEnd: t.definitionKeyword,
"MacroDef/Identifier": t.macroName,

View File

@@ -22,7 +22,12 @@ Directive {
}
PseudoOp {
@specialize<Identifier, "org" | "equ" | "end" | "public" | "ORG" | "EQU" | "END" | "PUBLIC">
@specialize<Identifier,
"defb" | "defw" | "defm" |
"DEFB" | "DEFW" | "DEFM" |
"org" | "equ" | "end" | "public" |
"ORG" | "EQU" | "END" | "PUBLIC"
>
}
Condition {

15
src/parser/tokens-6502.ts Normal file
View File

@@ -0,0 +1,15 @@
import { ExternalTokenizer } from "@lezer/lr"
import { HexByte } from "../../gen/parser/lang-6502.grammar.terms"
function isHexDigit(ch: number) {
return (ch >= 48 && ch <= 57) || // 0-9
(ch >= 65 && ch <= 70) || // A-F
(ch >= 97 && ch <= 102) // a-f
}
export const hexTokenizer = new ExternalTokenizer((input) => {
if (!isHexDigit(input.peek(0)) || !isHexDigit(input.peek(1))) return
let len = 2
while (isHexDigit(input.peek(len))) len++
if (len % 2 === 0) input.acceptToken(HexByte, len)
})