diff --git a/README.md b/README.md index 80326e3..52f2037 100644 --- a/README.md +++ b/README.md @@ -13,8 +13,8 @@ CORE DISK * common.lib: Common, useful subroutines: memory swaps, etc. MATH DISK -* math.lib: Integer Math Library. -* fpmath.lib: Floating-point Math Library. +* math8.lib: 8-bit Integer Math Library. +* math16.lib: 16-bit Integer Math Library. IO DISK * fileio.lib: File Input/Output Library. @@ -43,6 +43,6 @@ MISC DISK UTIL DISK * various useful utilities -COMPACT DISK 1 (and maybe a second one) -* every routine, separately packaged, without comments. +FPMATH DISK 1 (and maybe a second one) +* fpmath.lib: Floating-point math library diff --git a/README.md~ b/README.md~ index 0ae7212..80326e3 100644 --- a/README.md~ +++ b/README.md~ @@ -13,7 +13,6 @@ CORE DISK * common.lib: Common, useful subroutines: memory swaps, etc. MATH DISK - * math.lib: Integer Math Library. * fpmath.lib: Floating-point Math Library. @@ -42,4 +41,8 @@ MISC DISK * applesoft.lib: Library for interfacing ASM programs with Applesoft BASIC. UTIL DISK -* various useful utilities \ No newline at end of file +* various useful utilities + +COMPACT DISK 1 (and maybe a second one) +* every routine, separately packaged, without comments. + diff --git a/disks/disk2_math8_math16/MATH.DEMO b/disks/disk2_math8_math16/MATH.DEMO new file mode 100644 index 0000000..e17f0b6 --- /dev/null +++ b/disks/disk2_math8_math16/MATH.DEMO @@ -0,0 +1,558 @@ +6000- A9 03 LDA #$03 +6002- 48 PHA +6003- A9 E8 LDA #$E8 +6005- 48 PHA +6006- A9 07 LDA #$07 +6008- 48 PHA +6009- A9 D0 LDA #$D0 +600B- 48 PHA +600C- 20 9C 60 JSR $609C +600F- A9 07 LDA #$07 +6011- 48 PHA +6012- A9 D0 LDA #$D0 +6014- 48 PHA +6015- A9 03 LDA #$03 +6017- 48 PHA +6018- A9 E8 LDA #$E8 +601A- 48 PHA +601B- 20 D2 60 JSR $60D2 +601E- A9 03 LDA #$03 +6020- 48 PHA +6021- A9 E8 LDA #$E8 +6023- 48 PHA +6024- A9 00 LDA #$00 +6026- 48 PHA +6027- A9 03 LDA #$03 +6029- 48 PHA +602A- 20 08 61 JSR $6108 +602D- A9 75 LDA #$75 +602F- 48 PHA +6030- A9 30 LDA #$30 +6032- 48 PHA +6033- A9 0B LDA #$0B +6035- 48 PHA +6036- A9 B8 LDA #$B8 +6038- 48 PHA +6039- 20 73 61 JSR $6173 +603C- A9 FC LDA #$FC +603E- 48 PHA +603F- A9 18 LDA #$18 +6041- 48 PHA +6042- A9 00 LDA #$00 +6044- 48 PHA +6045- A9 0A LDA #$0A +6047- 48 PHA +6048- 20 9F 61 JSR $619F +604B- A9 FC LDA #$FC +604D- 48 PHA +604E- A9 18 LDA #$18 +6050- 48 PHA +6051- A9 00 LDA #$00 +6053- 48 PHA +6054- A9 0D LDA #$0D +6056- 48 PHA +6057- 20 A3 61 JSR $61A3 +605A- A9 03 LDA #$03 +605C- 48 PHA +605D- A9 E8 LDA #$E8 +605F- 48 PHA +6060- A9 00 LDA #$00 +6062- 48 PHA +6063- A9 0D LDA #$0D +6065- 48 PHA +6066- 20 77 61 JSR $6177 +6069- A9 03 LDA #$03 +606B- 48 PHA +606C- A9 FF LDA #$FF +606E- 48 PHA +606F- A9 00 LDA #$00 +6071- 48 PHA +6072- A9 7B LDA #$7B +6074- 48 PHA +6075- 20 A3 62 JSR $62A3 +6078- 20 28 63 JSR $6328 +607B- A9 32 LDA #$32 +607D- 48 PHA +607E- A9 64 LDA #$64 +6080- 48 PHA +6081- 20 3B 63 JSR $633B +6084- A9 64 LDA #$64 +6086- 48 PHA +6087- A9 C8 LDA #$C8 +6089- 48 PHA +608A- 20 D5 63 JSR $63D5 +608D- A9 C8 LDA #$C8 +608F- 48 PHA +6090- A9 0A LDA #$0A +6092- 48 PHA +6093- 20 13 64 JSR $6413 +6096- 20 EA 62 JSR $62EA +6099- 4C D0 03 JMP $03D0 +609C- 68 PLA +609D- 8D CC 60 STA $60CC +60A0- 68 PLA +60A1- 8D CD 60 STA $60CD +60A4- 68 PLA +60A5- 8D D0 60 STA $60D0 +60A8- 68 PLA +60A9- 8D D1 60 STA $60D1 +60AC- 68 PLA +60AD- 8D CE 60 STA $60CE +60B0- 68 PLA +60B1- 8D CF 60 STA $60CF +60B4- AD CE 60 LDA $60CE +60B7- 18 CLC +60B8- 6D D0 60 ADC $60D0 +60BB- A8 TAY +60BC- AD CF 60 LDA $60CF +60BF- 6D D1 60 ADC $60D1 +60C2- AA TAX +60C3- AD CD 60 LDA $60CD +60C6- 48 PHA +60C7- AD CC 60 LDA $60CC +60CA- 48 PHA +60CB- 60 RTS +60CC- 00 BRK +60CD- 00 BRK +60CE- 00 BRK +60CF- 00 BRK +60D0- 00 BRK +60D1- 00 BRK +60D2- 68 PLA +60D3- 8D 02 61 STA $6102 +60D6- 68 PLA +60D7- 8D 03 61 STA $6103 +60DA- 68 PLA +60DB- 8D 06 61 STA $6106 +60DE- 68 PLA +60DF- 8D 07 61 STA $6107 +60E2- 68 PLA +60E3- 8D 04 61 STA $6104 +60E6- 68 PLA +60E7- 8D 05 61 STA $6105 +60EA- AD 04 61 LDA $6104 +60ED- 38 SEC +60EE- ED 06 61 SBC $6106 +60F1- A8 TAY +60F2- AD 05 61 LDA $6105 +60F5- ED 07 61 SBC $6107 +60F8- AA TAX +60F9- AD 03 61 LDA $6103 +60FC- 48 PHA +60FD- AD 02 61 LDA $6102 +6100- 48 PHA +6101- 60 RTS +6102- 00 BRK +6103- 00 BRK +6104- 00 BRK +6105- 00 BRK +6106- 00 BRK +6107- 00 BRK +6108- 68 PLA +6109- 8D 71 61 STA $6171 +610C- 68 PLA +610D- 8D 72 61 STA $6172 +6110- 68 PLA +6111- 8D 6C 61 STA $616C +6114- 68 PLA +6115- 8D 6D 61 STA $616D +6118- 68 PLA +6119- 8D 69 61 STA $6169 +611C- 68 PLA +611D- 8D 6A 61 STA $616A +6120- A9 00 LDA #$00 +6122- 8D 6F 61 STA $616F +6125- 8D 70 61 STA $6170 +6128- A2 11 LDX #$11 +612A- 18 CLC +612B- 6E 70 61 ROR $6170 +612E- 6E 6F 61 ROR $616F +6131- 6E 6D 61 ROR $616D +6134- 6E 6C 61 ROR $616C +6137- 90 13 BCC $614C +6139- 18 CLC +613A- AD 69 61 LDA $6169 +613D- 6D 6F 61 ADC $616F +6140- 8D 6F 61 STA $616F +6143- AD 6A 61 LDA $616A +6146- 6D 70 61 ADC $6170 +6149- 8D 70 61 STA $6170 +614C- CA DEX +614D- D0 DC BNE $612B +614F- AC 6F 61 LDY $616F +6152- AE 70 61 LDX $6170 +6155- AD 6D 61 LDA $616D +6158- AA TAX +6159- AD 6C 61 LDA $616C +615C- A8 TAY +615D- AD 72 61 LDA $6172 +6160- 48 PHA +6161- AD 71 61 LDA $6171 +6164- 48 PHA +6165- AD 6F 61 LDA $616F +6168- 60 RTS +6169- 00 BRK +616A- 00 BRK +616B- 00 BRK +616C- 00 BRK +616D- 00 BRK +616E- 00 BRK +616F- 00 BRK +6170- 00 BRK +6171- 00 BRK +6172- 00 BRK +6173- A9 00 LDA #$00 +6175- F0 02 BEQ $6179 +6177- A9 02 LDA #$02 +6179- 8D A2 62 STA $62A2 +617C- 68 PLA +617D- 8D 9E 62 STA $629E +6180- 68 PLA +6181- 8D 9F 62 STA $629F +6184- 68 PLA +6185- 8D 98 62 STA $6298 +6188- 68 PLA +6189- 8D 99 62 STA $6299 +618C- 68 PLA +618D- 8D 9A 62 STA $629A +6190- 68 PLA +6191- 8D 9B 62 STA $629B +6194- 20 57 62 JSR $6257 +6197- 90 03 BCC $619C +6199- 4C 31 62 JMP $6231 +619C- 4C 42 62 JMP $6242 +619F- A9 00 LDA #$00 +61A1- F0 04 BEQ $61A7 +61A3- A9 02 LDA #$02 +61A5- D0 00 BNE $61A7 +61A7- 8D A2 62 STA $62A2 +61AA- 68 PLA +61AB- 8D 9E 62 STA $629E +61AE- 68 PLA +61AF- 8D 9F 62 STA $629F +61B2- 68 PLA +61B3- 8D 98 62 STA $6298 +61B6- 68 PLA +61B7- 8D 99 62 STA $6299 +61BA- 68 PLA +61BB- 8D 9A 62 STA $629A +61BE- 68 PLA +61BF- 8D 9B 62 STA $629B +61C2- AD 9B 62 LDA $629B +61C5- 4D 99 62 EOR $6299 +61C8- 8D A0 62 STA $62A0 +61CB- AD 9B 62 LDA $629B +61CE- 8D A1 62 STA $62A1 +61D1- AD 99 62 LDA $6299 +61D4- 10 11 BPL $61E7 +61D6- A9 00 LDA #$00 +61D8- 38 SEC +61D9- ED 98 62 SBC $6298 +61DC- 8D 98 62 STA $6298 +61DF- A9 00 LDA #$00 +61E1- ED 99 62 SBC $6299 +61E4- 8D 99 62 STA $6299 +61E7- AD 9B 62 LDA $629B +61EA- 10 11 BPL $61FD +61EC- A9 00 LDA #$00 +61EE- 38 SEC +61EF- ED 9A 62 SBC $629A +61F2- 8D 9A 62 STA $629A +61F5- A9 00 LDA #$00 +61F7- ED 9B 62 SBC $629B +61FA- 8D 9B 62 STA $629B +61FD- 20 57 62 JSR $6257 +6200- B0 2F BCS $6231 +6202- AD A0 62 LDA $62A0 +6205- 10 11 BPL $6218 +6207- A9 00 LDA #$00 +6209- 38 SEC +620A- ED 9A 62 SBC $629A +620D- 8D 9A 62 STA $629A +6210- A9 00 LDA #$00 +6212- ED 9B 62 SBC $629B +6215- 8D 9B 62 STA $629B +6218- AD A1 62 LDA $62A1 +621B- 10 25 BPL $6242 +621D- A9 00 LDA #$00 +621F- 38 SEC +6220- ED 9C 62 SBC $629C +6223- 8D 9C 62 STA $629C +6226- A9 00 LDA #$00 +6228- ED 9D 62 SBC $629D +622B- 8D 9D 62 STA $629D +622E- 4C 42 62 JMP $6242 +6231- A9 00 LDA #$00 +6233- 8D 9A 62 STA $629A +6236- 8D 9B 62 STA $629B +6239- 8D 9C 62 STA $629C +623C- 8D 9D 62 STA $629D +623F- 38 SEC +6240- B0 01 BCS $6243 +6242- 18 CLC +6243- AE A2 62 LDX $62A2 +6246- BD 9A 62 LDA $629A,X +6249- A8 TAY +624A- BD 9B 62 LDA $629B,X +624D- AA TAX +624E- AD 9F 62 LDA $629F +6251- 48 PHA +6252- AD 9E 62 LDA $629E +6255- 48 PHA +6256- 60 RTS +6257- A9 00 LDA #$00 +6259- 8D 9C 62 STA $629C +625C- 8D 9D 62 STA $629D +625F- AD 98 62 LDA $6298 +6262- 0D 99 62 ORA $6299 +6265- D0 02 BNE $6269 +6267- 38 SEC +6268- 60 RTS +6269- A2 10 LDX #$10 +626B- 2E 9A 62 ROL $629A +626E- 2E 9B 62 ROL $629B +6271- 2E 9C 62 ROL $629C +6274- 2E 9D 62 ROL $629D +6277- 38 SEC +6278- AD 9C 62 LDA $629C +627B- ED 98 62 SBC $6298 +627E- A8 TAY +627F- AD 9D 62 LDA $629D +6282- ED 99 62 SBC $6299 +6285- 90 06 BCC $628D +6287- 8C 9C 62 STY $629C +628A- 8D 9D 62 STA $629D +628D- CA DEX +628E- D0 DB BNE $626B +6290- 2E 9A 62 ROL $629A +6293- 2E 9B 62 ROL $629B +6296- 18 CLC +6297- 60 RTS +6298- 00 BRK +6299- 00 BRK +629A- 00 BRK +629B- 00 BRK +629C- 00 BRK +629D- 00 BRK +629E- 00 BRK +629F- 00 BRK +62A0- 00 BRK +62A1- 00 BRK +62A2- 00 BRK +62A3- 68 PLA +62A4- 8D E8 62 STA $62E8 +62A7- 68 PLA +62A8- 8D E9 62 STA $62E9 +62AB- 68 PLA +62AC- 8D E6 62 STA $62E6 +62AF- 68 PLA +62B0- 8D E7 62 STA $62E7 +62B3- 68 PLA +62B4- 8D E4 62 STA $62E4 +62B7- 68 PLA +62B8- 8D E5 62 STA $62E5 +62BB- AD E9 62 LDA $62E9 +62BE- 48 PHA +62BF- AD E8 62 LDA $62E8 +62C2- 48 PHA +62C3- AD E4 62 LDA $62E4 +62C6- CD E6 62 CMP $62E6 +62C9- F0 0B BEQ $62D6 +62CB- AD E5 62 LDA $62E5 +62CE- ED E7 62 SBC $62E7 +62D1- 09 01 ORA #$01 +62D3- 70 0A BVS $62DF +62D5- 60 RTS +62D6- AD E5 62 LDA $62E5 +62D9- ED E7 62 SBC $62E7 +62DC- 70 01 BVS $62DF +62DE- 60 RTS +62DF- 49 80 EOR #$80 +62E1- 09 01 ORA #$01 +62E3- 60 RTS +62E4- 00 BRK +62E5- 00 BRK +62E6- 00 BRK +62E7- 00 BRK +62E8- 00 BRK +62E9- 00 BRK +62EA- A5 4E LDA $4E +62EC- 8D 26 63 STA $6326 +62EF- A5 4F LDA $4F +62F1- 8D 27 63 STA $6327 +62F4- AD 26 63 LDA $6326 +62F7- F0 19 BEQ $6312 +62F9- 0E 26 63 ASL $6326 +62FC- AD 27 63 LDA $6327 +62FF- 2A ROL +6300- 90 1A BCC $631C +6302- 49 03 EOR #$03 +6304- 8D 27 63 STA $6327 +6307- AD 26 63 LDA $6326 +630A- 49 69 EOR #$69 +630C- 8D 26 63 STA $6326 +630F- 4C 1F 63 JMP $631F +6312- AD 27 63 LDA $6327 +6315- F0 EB BEQ $6302 +6317- 0A ASL +6318- F0 02 BEQ $631C +631A- B0 E6 BCS $6302 +631C- 8D 27 63 STA $6327 +631F- AE 27 63 LDX $6327 +6322- AC 26 63 LDY $6326 +6325- 60 RTS +6326- 00 BRK +6327- 00 BRK +6328- A2 08 LDX #$08 +632A- A5 4E LDA $4E +632C- 0A ASL +632D- 26 4F ROL $4F +632F- 90 02 BCC $6333 +6331- 49 2D EOR #$2D +6333- CA DEX +6334- D0 F6 BNE $632C +6336- 85 4E STA $4E +6338- C9 00 CMP #$00 +633A- 60 RTS +633B- 68 PLA +633C- 8D C9 63 STA $63C9 +633F- 68 PLA +6340- 8D CA 63 STA $63CA +6343- 68 PLA +6344- 8D CD 63 STA $63CD +6347- 68 PLA +6348- 8D CF 63 STA $63CF +634B- AD CD 63 LDA $63CD +634E- 18 CLC +634F- 69 02 ADC #$02 +6351- 8D CD 63 STA $63CD +6354- AD CD 63 LDA $63CD +6357- 38 SEC +6358- ED CF 63 SBC $63CF +635B- 8D D1 63 STA $63D1 +635E- 20 28 63 JSR $6328 +6361- 8D C7 63 STA $63C7 +6364- A2 00 LDX #$00 +6366- 8E C8 63 STX $63C8 +6369- 8E D2 63 STX $63D2 +636C- AD C8 63 LDA $63C8 +636F- 48 PHA +6370- AD C7 63 LDA $63C7 +6373- 48 PHA +6374- AD D2 63 LDA $63D2 +6377- 48 PHA +6378- AD D1 63 LDA $63D1 +637B- 48 PHA +637C- 20 73 61 JSR $6173 +637F- 68 PLA +6380- 8D D3 63 STA $63D3 +6383- 68 PLA +6384- 8D D4 63 STA $63D4 +6387- AD C7 63 LDA $63C7 +638A- 38 SEC +638B- ED D3 63 SBC $63D3 +638E- 8D D3 63 STA $63D3 +6391- A9 00 LDA #$00 +6393- 8D D4 63 STA $63D4 +6396- AD D2 63 LDA $63D2 +6399- 48 PHA +639A- AD D1 63 LDA $63D1 +639D- 48 PHA +639E- AD D4 63 LDA $63D4 +63A1- 48 PHA +63A2- AD D3 63 LDA $63D3 +63A5- 48 PHA +63A6- 20 08 61 JSR $6108 +63A9- 68 PLA +63AA- 8D CB 63 STA $63CB +63AD- 68 PLA +63AE- 8D CC 63 STA $63CC +63B1- 18 CLC +63B2- 6D CF 63 ADC $63CF +63B5- 8D CB 63 STA $63CB +63B8- AE CC 63 LDX $63CC +63BB- AD CA 63 LDA $63CA +63BE- 48 PHA +63BF- AD C9 63 LDA $63C9 +63C2- 48 PHA +63C3- AD CB 63 LDA $63CB +63C6- 60 RTS +63C7- 00 BRK +63C8- 00 BRK +63C9- 00 BRK +63CA- 00 BRK +63CB- 00 BRK +63CC- 00 BRK +63CD- 00 BRK +63CE- 00 BRK +63CF- 00 BRK +63D0- 00 BRK +63D1- 00 BRK +63D2- 00 BRK +63D3- 00 BRK +63D4- 00 BRK +63D5- 68 PLA +63D6- 8D 10 64 STA $6410 +63D9- 68 PLA +63DA- 8D 11 64 STA $6411 +63DD- 68 PLA +63DE- 8D 0E 64 STA $640E +63E1- 68 PLA +63E2- 8D 0F 64 STA $640F +63E5- A9 00 LDA #$00 +63E7- AA TAX +63E8- 8E 12 64 STX $6412 +63EB- F0 11 BEQ $63FE +63ED- 18 CLC +63EE- 6D 0E 64 ADC $640E +63F1- A8 TAY +63F2- 8A TXA +63F3- 6D 12 64 ADC $6412 +63F6- AA TAX +63F7- 98 TYA +63F8- 0E 0E 64 ASL $640E +63FB- 2E 12 64 ROL $6412 +63FE- 4E 0F 64 LSR $640F +6401- B0 EA BCS $63ED +6403- D0 F3 BNE $63F8 +6405- AD 11 64 LDA $6411 +6408- 48 PHA +6409- AD 10 64 LDA $6410 +640C- 48 PHA +640D- 60 RTS +640E- 00 BRK +640F- 00 BRK +6410- 00 BRK +6411- 00 BRK +6412- 00 BRK +6413- 68 PLA +6414- 8D 4E 64 STA $644E +6417- 68 PLA +6418- 8D 4F 64 STA $644F +641B- 68 PLA +641C- 8D 4C 64 STA $644C +641F- 68 PLA +6420- 8D 4D 64 STA $644D +6423- A9 00 LDA #$00 +6425- A2 08 LDX #$08 +6427- 0E 4D 64 ASL $644D +642A- 2A ROL +642B- CD 4C 64 CMP $644C +642E- 90 03 BCC $6433 +6430- ED 4C 64 SBC $644C +6433- 2E 4D 64 ROL $644D +6436- CA DEX +6437- D0 F1 BNE $642A +6439- 8D 4B 64 STA $644B +643C- AD 4F 64 LDA $644F +643F- 48 PHA +6440- AD 4E 64 LDA $644E +6443- 48 PHA +6444- AE 4B 64 LDX $644B +6447- AD 4D 64 LDA $644D +644A- 60 RTS +644B- 00 BRK +644C- 00 BRK +644D- 00 BRK +644E- 00 BRK +644F- 00 BRK diff --git a/disks/disk2_math8_math16/MATH.DEMO.S b/disks/disk2_math8_math16/MATH.DEMO.S new file mode 100644 index 0000000..b27c0c2 --- /dev/null +++ b/disks/disk2_math8_math16/MATH.DEMO.S @@ -0,0 +1,2078 @@ +0901- AA TAX +0902- 8D AA AA STA $AAAA +0905- AA TAX +0906- AA TAX +0907- AA TAX +0908- AA TAX +0909- AA TAX +090A- AA TAX +090B- AA TAX +090C- AA TAX +090D- AA TAX +090E- AA TAX +090F- AA TAX +0910- AA TAX +0911- AA TAX +0912- AA TAX +0913- AA TAX +0914- AA TAX +0915- AA TAX +0916- AA TAX +0917- AA TAX +0918- AA TAX +0919- AA TAX +091A- AA TAX +091B- AA TAX +091C- AA TAX +091D- AA TAX +091E- AA TAX +091F- AA TAX +0920- AA TAX +0921- AA TAX +0922- AA TAX +0923- 8D AA 20 STA $20AA +0926- 20 20 20 JSR $2020 +0929- 20 20 20 JSR $2020 +092C- 20 20 20 JSR $2020 +092F- 20 20 20 JSR $2020 +0932- 20 20 20 JSR $2020 +0935- 20 20 20 JSR $2020 +0938- 20 20 20 JSR $2020 +093B- 20 20 20 JSR $2020 +093E- 20 20 20 JSR $2020 +0941- 20 20 AA JSR $AA20 +0944- 8D AA 20 STA $20AA +0947- 20 20 20 JSR $2020 +094A- 20 20 AD JSR $AD20 +094D- BC 20 CD LDY $CD20,X +0950- C1 D4 CMP ($D4,X) +0952- C8 INY +0953- 20 20 C4 JSR $C420 +0956- C5 CD CMP $CD +0958- CF ??? +0959- 20 BE AD JSR $ADBE +095C- 20 20 20 JSR $2020 +095F- 20 20 20 JSR $2020 +0962- 20 20 AA JSR $AA20 +0965- 8D AA 20 STA $20AA +0968- 20 20 20 JSR $2020 +096B- 20 20 20 JSR $2020 +096E- 20 20 20 JSR $2020 +0971- 20 20 20 JSR $2020 +0974- 20 20 20 JSR $2020 +0977- 20 20 20 JSR $2020 +097A- 20 20 20 JSR $2020 +097D- 20 20 20 JSR $2020 +0980- 20 20 20 JSR $2020 +0983- 20 20 AA JSR $AA20 +0986- 8D AA 20 STA $20AA +0989- 20 20 20 JSR $2020 +098C- 20 20 20 JSR $2020 +098F- D6 C5 DEC $C5,X +0991- D2 D3 CMP ($D3) +0993- C9 CF CMP #$CF +0995- CE 20 B0 DEC $B020 +0998- B0 AE BCS $0948 +099A- B0 B0 BCS $094C +099C- AE B0 B1 LDX $B1B0 +099F- 20 20 20 JSR $2020 +09A2- 20 20 20 JSR $2020 +09A5- 20 AA 8D JSR $8DAA +09A8- AA TAX +09A9- 20 20 20 JSR $2020 +09AC- 20 20 20 JSR $2020 +09AF- 20 20 20 JSR $2020 +09B2- 20 20 20 JSR $2020 +09B5- 20 20 20 JSR $2020 +09B8- 20 20 20 JSR $2020 +09BB- 20 20 20 JSR $2020 +09BE- 20 20 20 JSR $2020 +09C1- 20 20 20 JSR $2020 +09C4- 20 20 20 JSR $2020 +09C7- AA TAX +09C8- 8D AA 20 STA $20AA +09CB- 20 20 20 JSR $2020 +09CE- 20 20 20 JSR $2020 +09D1- 20 20 B0 JSR $B020 +09D4- B3 ??? +09D5- AF ??? +09D6- B0 B4 BCS $098C +09D8- AF ??? +09D9- B1 B9 LDA ($B9),Y +09DB- B8 CLV +09DC- B0 20 BCS $09FE +09DE- 20 20 20 JSR $2020 +09E1- 20 20 20 JSR $2020 +09E4- 20 20 20 JSR $2020 +09E7- 20 AA 8D JSR $8DAA +09EA- AA TAX +09EB- 20 20 20 JSR $2020 +09EE- 20 20 20 JSR $2020 +09F1- 20 20 20 JSR $2020 +09F4- 20 20 20 JSR $2020 +09F7- 20 20 20 JSR $2020 +09FA- 20 20 20 JSR $2020 +09FD- 20 20 20 JSR $2020 +0A00- 20 20 20 JSR $2020 +0A03- 20 20 20 JSR $2020 +0A06- 20 20 20 JSR $2020 +0A09- AA TAX +0A0A- 8D AA AA STA $AAAA +0A0D- AA TAX +0A0E- AA TAX +0A0F- AA TAX +0A10- AA TAX +0A11- AA TAX +0A12- AA TAX +0A13- AA TAX +0A14- AA TAX +0A15- AA TAX +0A16- AA TAX +0A17- AA TAX +0A18- AA TAX +0A19- AA TAX +0A1A- AA TAX +0A1B- AA TAX +0A1C- AA TAX +0A1D- AA TAX +0A1E- AA TAX +0A1F- AA TAX +0A20- AA TAX +0A21- AA TAX +0A22- AA TAX +0A23- AA TAX +0A24- AA TAX +0A25- AA TAX +0A26- AA TAX +0A27- AA TAX +0A28- AA TAX +0A29- AA TAX +0A2A- AA TAX +0A2B- 8D AA 20 STA $20AA +0A2E- 20 20 20 JSR $2020 +0A31- 20 20 20 JSR $2020 +0A34- 20 20 20 JSR $2020 +0A37- 20 20 20 JSR $2020 +0A3A- 20 20 20 JSR $2020 +0A3D- 20 20 20 JSR $2020 +0A40- 20 20 20 JSR $2020 +0A43- 20 20 20 JSR $2020 +0A46- 20 20 20 JSR $2020 +0A49- 20 20 AA JSR $AA20 +0A4C- 8D AA 20 STA $20AA +0A4F- 20 20 20 JSR $2020 +0A52- 20 20 20 JSR $2020 +0A55- CE C1 D4 DEC $D4C1 +0A58- C8 INY +0A59- C1 CE CMP ($CE,X) +0A5B- 20 C4 AE JSR $AEC4 +0A5E- 20 D2 C9 JSR $C9D2 +0A61- C7 ??? +0A62- C7 ??? +0A63- D3 ??? +0A64- 20 20 20 JSR $2020 +0A67- 20 20 20 JSR $2020 +0A6A- 20 20 AA JSR $AA20 +0A6D- 8D AA 20 STA $20AA +0A70- 20 20 CE JSR $CE20 +0A73- C1 D4 CMP ($D4,X) +0A75- C8 INY +0A76- C1 CE CMP ($CE,X) +0A78- AE D2 C9 LDX $C9D2 +0A7B- C7 ??? +0A7C- C7 ??? +0A7D- D3 ??? +0A7E- C0 CF CPY #$CF +0A80- D5 D4 CMP $D4,X +0A82- CC CF CF CPY $CFCF +0A85- CB ??? +0A86- AE C3 CF LDX $CFC3 +0A89- CD 20 20 CMP $2020 +0A8C- 20 AA 8D JSR $8DAA +0A8F- AA TAX +0A90- 20 20 20 JSR $2020 +0A93- 20 20 20 JSR $2020 +0A96- 20 20 20 JSR $2020 +0A99- 20 20 20 JSR $2020 +0A9C- 20 20 20 JSR $2020 +0A9F- 20 20 20 JSR $2020 +0AA2- 20 20 20 JSR $2020 +0AA5- 20 20 20 JSR $2020 +0AA8- 20 20 20 JSR $2020 +0AAB- 20 20 20 JSR $2020 +0AAE- AA TAX +0AAF- 8D AA AA STA $AAAA +0AB2- AA TAX +0AB3- AA TAX +0AB4- AA TAX +0AB5- AA TAX +0AB6- AA TAX +0AB7- AA TAX +0AB8- AA TAX +0AB9- AA TAX +0ABA- AA TAX +0ABB- AA TAX +0ABC- AA TAX +0ABD- AA TAX +0ABE- AA TAX +0ABF- AA TAX +0AC0- AA TAX +0AC1- AA TAX +0AC2- AA TAX +0AC3- AA TAX +0AC4- AA TAX +0AC5- AA TAX +0AC6- AA TAX +0AC7- AA TAX +0AC8- AA TAX +0AC9- AA TAX +0ACA- AA TAX +0ACB- AA TAX +0ACC- AA TAX +0ACD- AA TAX +0ACE- AA TAX +0ACF- AA TAX +0AD0- 8D AA 8D STA $8DAA +0AD3- AA TAX +0AD4- AA TAX +0AD5- 20 C1 D3 JSR $D3C1 +0AD8- D3 ??? +0AD9- C5 CD CMP $CD +0ADB- C2 ??? +0ADC- CC C5 D2 CPY $D2C5 +0ADF- 20 C4 C9 JSR $C9C4 +0AE2- D2 C5 CMP ($C5) +0AE4- C3 ??? +0AE5- D4 ??? +0AE6- C9 D6 CMP #$D6 +0AE8- C5 D3 CMP $D3 +0AEA- 8D AA 8D STA $8DAA +0AED- A0 C3 LDY #$C3 +0AEF- D9 C3 A0 CMP $A0C3,Y +0AF2- C1 D6 CMP ($D6,X) +0AF4- C5 8D CMP $8D +0AF6- A0 C5 LDY #$C5 +0AF8- D8 CLD +0AF9- D0 A0 BNE $0A9B +0AFB- CF ??? +0AFC- CE CC D9 DEC $D9CC +0AFF- 8D A0 C4 STA $C4A0 +0B02- D3 ??? +0B03- CB ??? +0B04- A0 CD LDY #$CD +0B06- C1 D4 CMP ($D4,X) +0B08- C8 INY +0B09- AE C4 C5 LDX $C5C4 +0B0C- CD CF 8D CMP $8DCF +0B0F- AA TAX +0B10- 8D AA E0 STA $E0AA +0B13- E0 E0 CPX #$E0 +0B15- E0 E0 CPX #$E0 +0B17- E0 E0 CPX #$E0 +0B19- E0 E0 CPX #$E0 +0B1B- E0 E0 CPX #$E0 +0B1D- E0 E0 CPX #$E0 +0B1F- E0 E0 CPX #$E0 +0B21- E0 E0 CPX #$E0 +0B23- E0 E0 CPX #$E0 +0B25- E0 E0 CPX #$E0 +0B27- E0 E0 CPX #$E0 +0B29- E0 E0 CPX #$E0 +0B2B- E0 E0 CPX #$E0 +0B2D- E0 E0 CPX #$E0 +0B2F- E0 AA CPX #$AA +0B31- 8D AA 20 STA $20AA +0B34- 20 D4 CF JSR $CFD4 +0B37- D0 20 BNE $0B59 +0B39- C9 CE CMP #$CE +0B3B- C3 ??? +0B3C- CC D5 C4 CPY $C4D5 +0B3F- C5 D3 CMP $D3 +0B41- 20 A8 D0 JSR $D0A8 +0B44- D5 D4 CMP $D4,X +0B46- D3 ??? +0B47- AC 20 CD LDY $CD20 +0B4A- C1 C3 CMP ($C3,X) +0B4C- D2 CF CMP ($CF) +0B4E- D3 ??? +0B4F- A9 20 LDA #$20 +0B51- AA TAX +0B52- 8D AA AC STA $ACAA +0B55- AC AC AC LDY $ACAC +0B58- AC AC AC LDY $ACAC +0B5B- AC AC AC LDY $ACAC +0B5E- AC AC AC LDY $ACAC +0B61- AC AC AC LDY $ACAC +0B64- AC AC AC LDY $ACAC +0B67- AC AC AC LDY $ACAC +0B6A- AC AC AC LDY $ACAC +0B6D- AC AC AC LDY $ACAC +0B70- AC AC AA LDY $AAAC +0B73- 8D AA 8D STA $8DAA +0B76- A0 D0 LDY #$D0 +0B78- D5 D4 CMP $D4,X +0B7A- A0 C4 LDY #$C4 +0B7C- C5 C3 CMP $C3 +0B7E- D3 ??? +0B7F- 8D AA D5 STA $D5AA +0B82- D3 ??? +0B83- C5 20 CMP $20 +0B85- CD C1 D4 CMP $D4C1 +0B88- C8 INY +0B89- AE CD C1 LDX $C1CD +0B8C- C3 ??? +0B8D- 20 BB 20 JSR $20BB +0B90- C3 ??? +0B91- C1 CE CMP ($CE,X) +0B93- A7 ??? +0B94- D4 ??? +0B95- 20 D5 D3 JSR $D3D5 +0B98- C5 20 CMP $20 +0B9A- D2 C9 CMP ($C9) +0B9C- C7 ??? +0B9D- C8 INY +0B9E- D4 ??? +0B9F- 20 CE CF JSR $CFCE +0BA2- D7 ??? +0BA3- 8D AA 20 STA $20AA +0BA6- C2 ??? +0BA7- C5 C3 CMP $C3 +0BA9- C1 D5 CMP ($D5,X) +0BAB- D3 ??? +0BAC- C5 20 CMP $20 +0BAE- CF ??? +0BAF- C6 20 DEC $20 +0BB1- CD C5 CD CMP $CDC5 +0BB4- CF ??? +0BB5- D2 D9 CMP ($D9) +0BB7- 20 CC C9 JSR $C9CC +0BBA- CD C9 D4 CMP $D4C9 +0BBD- C1 D4 CMP ($D4,X) +0BBF- C9 CF CMP #$CF +0BC1- CE D3 8D DEC $8DD3 +0BC4- AA TAX +0BC5- 8D A0 D0 STA $D0A0 +0BC8- D5 D4 CMP $D4,X +0BCA- A0 CD LDY #$CD +0BCC- C1 D4 CMP ($D4,X) +0BCE- C8 INY +0BCF- AE C8 CF LDX $CFC8 +0BD2- CF ??? +0BD3- CB ??? +0BD4- D3 ??? +0BD5- 8D AA 8D STA $8DAA +0BD8- A0 CF LDY #$CF +0BDA- C2 ??? +0BDB- CA DEX +0BDC- A0 A4 LDY #$A4 +0BDE- C2 ??? +0BDF- C6 C5 DEC $C5 +0BE1- B0 8D BCS $0B70 +0BE3- A0 CF LDY #$CF +0BE5- D2 C7 CMP ($C7) +0BE7- 20 A4 B6 JSR $B6A4 +0BEA- B0 B0 BCS $0B9C +0BEC- B0 8D BCS $0B7B +0BEE- AA TAX +0BEF- 8D AA E0 STA $E0AA +0BF2- E0 E0 CPX #$E0 +0BF4- E0 E0 CPX #$E0 +0BF6- E0 E0 CPX #$E0 +0BF8- E0 E0 CPX #$E0 +0BFA- E0 E0 CPX #$E0 +0BFC- E0 E0 CPX #$E0 +0BFE- E0 E0 CPX #$E0 +0C00- E0 E0 CPX #$E0 +0C02- E0 E0 CPX #$E0 +0C04- E0 E0 CPX #$E0 +0C06- E0 E0 CPX #$E0 +0C08- E0 E0 CPX #$E0 +0C0A- E0 E0 CPX #$E0 +0C0C- E0 E0 CPX #$E0 +0C0E- E0 AA CPX #$AA +0C10- 8D AA 20 STA $20AA +0C13- 20 20 20 JSR $2020 +0C16- 20 20 D0 JSR $D020 +0C19- D2 CF CMP ($CF) +0C1B- C7 ??? +0C1C- D2 C1 CMP ($C1) +0C1E- CD 20 CD CMP $CD20 +0C21- C1 C9 CMP ($C9,X) +0C23- CE 20 C2 DEC $C220 +0C26- CF ??? +0C27- C4 D9 CPY $D9 +0C29- 20 20 20 JSR $2020 +0C2C- 20 20 20 JSR $2020 +0C2F- 20 AA 8D JSR $8DAA +0C32- AA TAX +0C33- AC AC AC LDY $ACAC +0C36- AC AC AC LDY $ACAC +0C39- AC AC AC LDY $ACAC +0C3C- AC AC AC LDY $ACAC +0C3F- AC AC AC LDY $ACAC +0C42- AC AC AC LDY $ACAC +0C45- AC AC AC LDY $ACAC +0C48- AC AC AC LDY $ACAC +0C4B- AC AC AC LDY $ACAC +0C4E- AC AC AC LDY $ACAC +0C51- AA TAX +0C52- 8D AA 8D STA $8DAA +0C55- AA TAX +0C56- 8D AA 8D STA $8DAA +0C59- AA TAX +0C5A- AA TAX +0C5B- 20 C2 C9 JSR $C9C2 +0C5E- C7 ??? +0C5F- 20 CC D5 JSR $D5CC +0C62- CD D0 D3 CMP $D3D0 +0C65- 20 AD AD JSR $ADAD +0C68- 20 C8 C9 JSR $C9C8 +0C6B- C7 ??? +0C6C- C8 INY +0C6D- 20 CC C5 JSR $C5CC +0C70- D6 C5 DEC $C5,X +0C72- CC 20 C3 CPY $C320 +0C75- CF ??? +0C76- C4 C5 CPY $C5 +0C78- 8D AA 8D STA $8DAA +0C7B- AA TAX +0C7C- 8D AA 8D STA $8DAA +0C7F- AA TAX +0C80- AA TAX +0C81- 20 C1 C4 JSR $C4C1 +0C84- C4 B1 CPY $B1 +0C86- B6 20 LDX $20,Y +0C88- C1 C4 CMP ($C4,X) +0C8A- C4 D3 CPY $D3 +0C8C- 20 D4 D7 JSR $D7D4 +0C8F- CF ??? +0C90- 20 B1 B6 JSR $B6B1 +0C93- AD C2 C9 LDA $C9C2 +0C96- D4 ??? +0C97- 20 CE D5 JSR $D5CE +0C9A- CD C2 C5 CMP $C5C2 +0C9D- D2 D3 CMP ($D3) +0C9F- 20 C1 CE JSR $CEC1 +0CA2- C4 8D CPY $8D +0CA4- AA TAX +0CA5- AA TAX +0CA6- 20 D2 C5 JSR $C5D2 +0CA9- D4 ??? +0CAA- D5 D2 CMP $D2,X +0CAC- CE D3 20 DEC $20D3 +0CAF- D4 ??? +0CB0- C8 INY +0CB1- C5 20 CMP $20 +0CB3- D2 C5 CMP ($C5) +0CB5- D3 ??? +0CB6- D5 CC CMP $CC,X +0CB8- D4 ??? +0CB9- 20 C9 CE JSR $CEC9 +0CBC- 20 D9 A8 JSR $A8D9 +0CBF- CC CF D7 CPY $D7CF +0CC2- A9 20 LDA #$20 +0CC4- C1 CE CMP ($CE,X) +0CC6- C4 20 CPY $20 +0CC8- D8 CLD +0CC9- 20 A8 C8 JSR $C8A8 +0CCC- C9 C7 CMP #$C7 +0CCE- C8 INY +0CCF- A9 AE LDA #$AE +0CD1- 8D AA 8D STA $8DAA +0CD4- AA TAX +0CD5- AA TAX +0CD6- 20 CD C1 JSR $C1CD +0CD9- C3 ??? +0CDA- D2 CF CMP ($CF) +0CDC- BA TSX +0CDD- 20 DF C1 JSR $C1DF +0CE0- C4 C4 CPY $C4 +0CE2- B1 B6 LDA ($B6),Y +0CE4- 20 A3 B1 JSR $B1A3 +0CE7- B0 B0 BCS $0C99 +0CE9- B0 BB BCS $0CA6 +0CEB- A3 ??? +0CEC- B2 B0 LDA ($B0) +0CEE- B0 B0 BCS $0CA0 +0CF0- 8D AA 8D STA $8DAA +0CF3- A0 CC LDY #$CC +0CF5- C4 C1 CPY $C1 +0CF7- A0 A3 LDY #$A3 +0CF9- BE B1 B0 LDX $B0B1,Y +0CFC- B0 B0 BCS $0CAE +0CFE- A0 BB LDY #$BB +0D00- 20 C1 C4 JSR $C4C1 +0D03- C4 B1 CPY $B1 +0D05- AC 20 C8 LDY $C820 +0D08- C9 C7 CMP #$C7 +0D0A- C8 INY +0D0B- 8D A0 D0 STA $D0A0 +0D0E- C8 INY +0D0F- C1 8D CMP ($8D,X) +0D11- A0 CC LDY #$CC +0D13- C4 C1 CPY $C1 +0D15- A0 A3 LDY #$A3 +0D17- BC B1 B0 LDY $B0B1,X +0D1A- B0 B0 BCS $0CCC +0D1C- A0 BB LDY #$BB +0D1E- 20 C1 C4 JSR $C4C1 +0D21- C4 B1 CPY $B1 +0D23- AC 20 CC LDY $CC20 +0D26- CF ??? +0D27- D7 ??? +0D28- 8D A0 D0 STA $D0A0 +0D2B- C8 INY +0D2C- C1 8D CMP ($8D,X) +0D2E- A0 CC LDY #$CC +0D30- C4 C1 CPY $C1 +0D32- A0 A3 LDY #$A3 +0D34- BE B2 B0 LDX $B0B2,Y +0D37- B0 B0 BCS $0CE9 +0D39- A0 BB LDY #$BB +0D3B- 20 C1 C4 JSR $C4C1 +0D3E- C4 B2 CPY $B2 +0D40- AC 20 C8 LDY $C820 +0D43- C9 C7 CMP #$C7 +0D45- C8 INY +0D46- 8D A0 D0 STA $D0A0 +0D49- C8 INY +0D4A- C1 8D CMP ($8D,X) +0D4C- A0 CC LDY #$CC +0D4E- C4 C1 CPY $C1 +0D50- A0 A3 LDY #$A3 +0D52- BC B2 B0 LDY $B0B2,X +0D55- B0 B0 BCS $0D07 +0D57- A0 BB LDY #$BB +0D59- 20 C1 C4 JSR $C4C1 +0D5C- C4 B2 CPY $B2 +0D5E- AC 20 CC LDY $CC20 +0D61- CF ??? +0D62- D7 ??? +0D63- 8D A0 D0 STA $D0A0 +0D66- C8 INY +0D67- C1 8D CMP ($8D,X) +0D69- A0 CA LDY #$CA +0D6B- D3 ??? +0D6C- D2 A0 CMP ($A0) +0D6E- C1 C4 CMP ($C4,X) +0D70- C4 B1 CPY $B1 +0D72- B6 8D LDX $8D,Y +0D74- AA TAX +0D75- 20 C2 D2 JSR $D2C2 +0D78- CB ??? +0D79- 20 BB 20 JSR $20BB +0D7C- D8 CLD +0D7D- BD B0 C2 LDA $C2B0,X +0D80- AC 20 D9 LDY $D920 +0D83- BD C2 B8 LDA $B8C2,X +0D86- AC 20 BD LDY $BD20 +0D89- BD 20 B3 LDA $B320,X +0D8C- B0 B0 BCS $0D3E +0D8E- B0 8D BCS $0D1D +0D90- AA TAX +0D91- 8D AA AA STA $AAAA +0D94- 20 D3 D5 JSR $D5D3 +0D97- C2 ??? +0D98- B1 B6 LDA ($B6),Y +0D9A- 20 D3 D5 JSR $D5D3 +0D9D- C2 ??? +0D9E- D4 ??? +0D9F- D2 C1 CMP ($C1) +0DA1- C3 ??? +0DA2- D4 ??? +0DA3- D3 ??? +0DA4- 20 CF CE JSR $CECF +0DA7- C5 20 CMP $20 +0DA9- B1 B6 LDA ($B6),Y +0DAB- AD C2 C9 LDA $C9C2 +0DAE- D4 ??? +0DAF- 20 CE D5 JSR $D5CE +0DB2- CD C2 C5 CMP $C5C2 +0DB5- D2 8D CMP ($8D) +0DB7- AA TAX +0DB8- AA TAX +0DB9- 20 C6 D2 JSR $D2C6 +0DBC- CF ??? +0DBD- CD 20 C1 CMP $C120 +0DC0- CE CF D4 DEC $D4CF +0DC3- C8 INY +0DC4- C5 D2 CMP $D2 +0DC6- AC 20 C1 LDY $C120 +0DC9- CE C4 20 DEC $20C4 +0DCC- D2 C5 CMP ($C5) +0DCE- D4 ??? +0DCF- D5 D2 CMP $D2,X +0DD1- CE D3 20 DEC $20D3 +0DD4- D2 C5 CMP ($C5) +0DD6- D3 ??? +0DD7- D5 CC CMP $CC,X +0DD9- D4 ??? +0DDA- 20 C9 CE JSR $CEC9 +0DDD- 8D AA AA STA $AAAA +0DE0- 20 D9 20 JSR $20D9 +0DE3- A8 TAY +0DE4- CC CF D7 CPY $D7CF +0DE7- A9 20 LDA #$20 +0DE9- C1 CE CMP ($CE,X) +0DEB- C4 20 CPY $20 +0DED- D8 CLD +0DEE- 20 A8 C8 JSR $C8A8 +0DF1- C9 C7 CMP #$C7 +0DF3- C8 INY +0DF4- A9 8D LDA #$8D +0DF6- AA TAX +0DF7- 8D AA AA STA $AAAA +0DFA- 20 CD C1 JSR $C1CD +0DFD- C3 ??? +0DFE- D2 CF CMP ($CF) +0E00- BA TSX +0E01- 20 DF D3 JSR $D3DF +0E04- D5 C2 CMP $C2,X +0E06- B1 B6 LDA ($B6),Y +0E08- 20 A3 B2 JSR $B2A3 +0E0B- B0 B0 BCS $0DBD +0E0D- B0 BB BCS $0DCA +0E0F- A3 ??? +0E10- B1 B0 LDA ($B0),Y +0E12- B0 B0 BCS $0DC4 +0E14- 8D AA 8D STA $8DAA +0E17- A0 CC LDY #$CC +0E19- C4 C1 CPY $C1 +0E1B- A0 A3 LDY #$A3 +0E1D- BE B2 B0 LDX $B0B2,Y +0E20- B0 B0 BCS $0DD2 +0E22- A0 BB LDY #$BB +0E24- 20 CD C9 JSR $C9CD +0E27- CE D5 C5 DEC $C5D5 +0E2A- CE C4 8D DEC $8DC4 +0E2D- A0 D0 LDY #$D0 +0E2F- C8 INY +0E30- C1 8D CMP ($8D,X) +0E32- A0 CC LDY #$CC +0E34- C4 C1 CPY $C1 +0E36- A0 A3 LDY #$A3 +0E38- BC B2 B0 LDY $B0B2,X +0E3B- B0 B0 BCS $0DED +0E3D- 8D A0 D0 STA $D0A0 +0E40- C8 INY +0E41- C1 8D CMP ($8D,X) +0E43- A0 CC LDY #$CC +0E45- C4 C1 CPY $C1 +0E47- A0 A3 LDY #$A3 +0E49- BE B1 B0 LDX $B0B1,Y +0E4C- B0 B0 BCS $0DFE +0E4E- A0 BB LDY #$BB +0E50- 20 D3 D5 JSR $D5D3 +0E53- C2 ??? +0E54- D4 ??? +0E55- D2 C1 CMP ($C1) +0E57- C8 INY +0E58- C5 CE CMP $CE +0E5A- C4 8D CPY $8D +0E5C- A0 D0 LDY #$D0 +0E5E- C8 INY +0E5F- C1 8D CMP ($8D,X) +0E61- A0 CC LDY #$CC +0E63- C4 C1 CPY $C1 +0E65- A0 A3 LDY #$A3 +0E67- BC B1 B0 LDY $B0B1,X +0E6A- B0 B0 BCS $0E1C +0E6C- 8D A0 D0 STA $D0A0 +0E6F- C8 INY +0E70- C1 8D CMP ($8D,X) +0E72- A0 CA LDY #$CA +0E74- D3 ??? +0E75- D2 A0 CMP ($A0) +0E77- D3 ??? +0E78- D5 C2 CMP $C2,X +0E7A- B1 B6 LDA ($B6),Y +0E7C- 8D AA 20 STA $20AA +0E7F- C2 ??? +0E80- D2 CB CMP ($CB) +0E82- 20 BB 20 JSR $20BB +0E85- D8 CLD +0E86- 20 BD 20 JSR $20BD +0E89- B0 B3 BCS $0E3E +0E8B- AC 20 D9 LDY $D920 +0E8E- 20 BD 20 JSR $20BD +0E91- C5 B8 CMP $B8 +0E93- AC 20 BD LDY $BD20 +0E96- BD 20 B1 LDA $B120,X +0E99- B0 B0 BCS $0E4B +0E9B- B0 8D BCS $0E2A +0E9D- AA TAX +0E9E- 8D AA AA STA $AAAA +0EA1- 20 CD D5 JSR $D5CD +0EA4- CC B1 B6 CPY $B6B1 +0EA7- 20 CD D5 JSR $D5CD +0EAA- CC D4 C9 CPY $C9D4 +0EAD- D0 CC BNE $0E7B +0EAF- C5 D3 CMP $D3 +0EB1- 20 D4 D7 JSR $D7D4 +0EB4- CF ??? +0EB5- 20 D6 C1 JSR $C1D6 +0EB8- CC D5 C5 CPY $C5D5 +0EBB- D3 ??? +0EBC- 20 C1 CE JSR $CEC1 +0EBF- C4 20 CPY $20 +0EC1- D2 C5 CMP ($C5) +0EC3- D4 ??? +0EC4- D5 D2 CMP $D2,X +0EC6- CE D3 20 DEC $20D3 +0EC9- D4 ??? +0ECA- C8 INY +0ECB- C5 8D CMP $8D +0ECD- AA TAX +0ECE- AA TAX +0ECF- 20 D0 D2 JSR $D2D0 +0ED2- CF ??? +0ED3- C4 D5 CPY $D5 +0ED5- C3 ??? +0ED6- D4 ??? +0ED7- 20 CF D6 JSR $D6CF +0EDA- C5 D2 CMP $D2 +0EDC- 20 D4 C8 JSR $C8D4 +0EDF- C5 20 CMP $20 +0EE1- D3 ??? +0EE2- D4 ??? +0EE3- C1 C3 CMP ($C3,X) +0EE5- CB ??? +0EE6- 20 A8 CC JSR $CCA8 +0EE9- CF ??? +0EEA- D7 ??? +0EEB- AC 20 C8 LDY $C820 +0EEE- C9 C7 CMP #$C7 +0EF0- C8 INY +0EF1- A9 8D LDA #$8D +0EF3- AA TAX +0EF4- 8D AA AA STA $AAAA +0EF7- 20 CD C1 JSR $C1CD +0EFA- C3 ??? +0EFB- D2 CF CMP ($CF) +0EFD- BA TSX +0EFE- 20 DF CD JSR $CDDF +0F01- D5 CC CMP $CC,X +0F03- B1 B6 LDA ($B6),Y +0F05- 20 A3 B1 JSR $B1A3 +0F08- B0 B0 BCS $0EBA +0F0A- B0 BB BCS $0EC7 +0F0C- A3 ??? +0F0D- B3 ??? +0F0E- 8D AA 8D STA $8DAA +0F11- A0 CC LDY #$CC +0F13- C4 C1 CPY $C1 +0F15- A0 A3 LDY #$A3 +0F17- BE B1 B0 LDX $B0B1,Y +0F1A- B0 B0 BCS $0ECC +0F1C- 8D A0 D0 STA $D0A0 +0F1F- C8 INY +0F20- C1 8D CMP ($8D,X) +0F22- A0 CC LDY #$CC +0F24- C4 C1 CPY $C1 +0F26- A0 A3 LDY #$A3 +0F28- BC B1 B0 LDY $B0B1,X +0F2B- B0 B0 BCS $0EDD +0F2D- A0 BB LDY #$BB +0F2F- 20 CD D5 JSR $D5CD +0F32- CC D4 C9 CPY $C9D4 +0F35- D0 CC BNE $0F03 +0F37- C9 C3 CMP #$C3 +0F39- C1 CE CMP ($CE,X) +0F3B- C4 8D CPY $8D +0F3D- A0 D0 LDY #$D0 +0F3F- C8 INY +0F40- C1 8D CMP ($8D,X) +0F42- A0 CC LDY #$CC +0F44- C4 C1 CPY $C1 +0F46- A0 A3 LDY #$A3 +0F48- BE B3 8D LDX $8DB3,Y +0F4B- A0 D0 LDY #$D0 +0F4D- C8 INY +0F4E- C1 8D CMP ($8D,X) +0F50- A0 CC LDY #$CC +0F52- C4 C1 CPY $C1 +0F54- A0 A3 LDY #$A3 +0F56- BC B3 A0 LDY $A0B3,X +0F59- BB ??? +0F5A- 20 CD D5 JSR $D5CD +0F5D- CC D4 C9 CPY $C9D4 +0F60- D0 CC BNE $0F2E +0F62- C9 C5 CMP #$C5 +0F64- D2 8D CMP ($8D) +0F66- A0 D0 LDY #$D0 +0F68- C8 INY +0F69- C1 8D CMP ($8D,X) +0F6B- A0 CA LDY #$CA +0F6D- D3 ??? +0F6E- D2 A0 CMP ($A0) +0F70- CD D5 CC CMP $CCD5 +0F73- B1 B6 LDA ($B6),Y +0F75- 8D AA 20 STA $20AA +0F78- C2 ??? +0F79- D2 CB CMP ($CB) +0F7B- 20 BB 20 JSR $20BB +0F7E- D8 CLD +0F7F- BD B0 C2 LDA $C2B0,X +0F82- AC 20 D9 LDY $D920 +0F85- BD B0 B8 LDA $B8B0,X +0F88- AC 20 BD LDY $BD20 +0F8B- BD 20 B3 LDA $B320,X +0F8E- B0 B0 BCS $0F40 +0F90- B0 8D BCS $0F1F +0F92- AA TAX +0F93- 8D AA AA STA $AAAA +0F96- 20 D5 C4 JSR $C4D5 +0F99- C9 D6 CMP #$D6 +0F9B- B1 B6 LDA ($B6),Y +0F9D- 20 C4 D6 JSR $D6C4 +0FA0- C9 C4 CMP #$C4 +0FA2- C5 D3 CMP $D3 +0FA4- 20 CF CE JSR $CECF +0FA7- C5 20 CMP $20 +0FA9- D5 CE CMP $CE,X +0FAB- D3 ??? +0FAC- C9 C7 CMP #$C7 +0FAE- CE C5 C4 DEC $C4C5 +0FB1- 20 CE D5 JSR $D5CE +0FB4- CD C2 C5 CMP $C5C2 +0FB7- D2 20 CMP ($20) +0FB9- C2 ??? +0FBA- D9 20 C1 CMP $C120,Y +0FBD- CE CF D4 DEC $D4CF +0FC0- C8 INY +0FC1- C5 D2 CMP $D2 +0FC3- 8D AA AA STA $AAAA +0FC6- 20 D5 CE JSR $CED5 +0FC9- D3 ??? +0FCA- C9 C7 CMP #$C7 +0FCC- CE C5 C4 DEC $C4C5 +0FCF- 20 CE D5 JSR $D5CE +0FD2- CD C2 C5 CMP $C5C2 +0FD5- D2 AC CMP ($AC) +0FD7- 20 D4 C8 JSR $C8D4 +0FDA- C5 CE CMP $CE +0FDC- 20 D2 C5 JSR $C5D2 +0FDF- D4 ??? +0FE0- D5 D2 CMP $D2,X +0FE2- CE D3 20 DEC $20D3 +0FE5- D4 ??? +0FE6- C8 INY +0FE7- C5 20 CMP $20 +0FE9- D2 C5 CMP ($C5) +0FEB- D3 ??? +0FEC- D5 CC CMP $CC,X +0FEE- D4 ??? +0FEF- 20 CF D6 JSR $D6CF +0FF2- C5 D2 CMP $D2 +0FF4- 8D AA AA STA $AAAA +0FF7- 20 D4 C8 JSR $C8D4 +0FFA- C5 20 CMP $20 +0FFC- D3 ??? +0FFD- D4 ??? +0FFE- C1 C3 CMP ($C3,X) +1000- CB ??? +1001- 20 A8 B1 JSR $B1A8 +1004- B6 AD LDX $AD,Y +1006- C2 ??? +1007- C9 D4 CMP #$D4 +1009- A9 AE LDA #$AE +100B- 8D AA 8D STA $8DAA +100E- AA TAX +100F- AA TAX +1010- 20 CD C1 JSR $C1CD +1013- C3 ??? +1014- D2 CF CMP ($CF) +1016- BA TSX +1017- 20 DF C4 JSR $C4DF +101A- C9 D6 CMP #$D6 +101C- B1 B6 LDA ($B6),Y +101E- 20 A3 B3 JSR $B3A3 +1021- B0 B0 BCS $0FD3 +1023- B0 B0 BCS $0FD5 +1025- BB ??? +1026- A3 ??? +1027- B3 ??? +1028- B0 B0 BCS $0FDA +102A- B0 BB BCS $0FE7 +102C- D5 8D CMP $8D,X +102E- AA TAX +102F- 8D A0 CC STA $CCA0 +1032- C4 C1 CPY $C1 +1034- A0 A3 LDY #$A3 +1036- BE B3 B0 LDX $B0B3,Y +1039- B0 B0 BCS $0FEB +103B- B0 8D BCS $0FCA +103D- A0 D0 LDY #$D0 +103F- C8 INY +1040- C1 8D CMP ($8D,X) +1042- A0 CC LDY #$CC +1044- C4 C1 CPY $C1 +1046- A0 A3 LDY #$A3 +1048- BC B3 B0 LDY $B0B3,X +104B- B0 B0 BCS $0FFD +104D- B0 8D BCS $0FDC +104F- A0 D0 LDY #$D0 +1051- C8 INY +1052- C1 8D CMP ($8D,X) +1054- A0 CC LDY #$CC +1056- C4 C1 CPY $C1 +1058- A0 A3 LDY #$A3 +105A- BE B3 B0 LDX $B0B3,Y +105D- B0 B0 BCS $100F +105F- 8D A0 D0 STA $D0A0 +1062- C8 INY +1063- C1 8D CMP ($8D,X) +1065- A0 CC LDY #$CC +1067- C4 C1 CPY $C1 +1069- A0 A3 LDY #$A3 +106B- BC B3 B0 LDY $B0B3,X +106E- B0 B0 BCS $1020 +1070- 8D A0 D0 STA $D0A0 +1073- C8 INY +1074- C1 8D CMP ($8D,X) +1076- A0 CA LDY #$CA +1078- D3 ??? +1079- D2 A0 CMP ($A0) +107B- D5 C4 CMP $C4,X +107D- C9 D6 CMP #$D6 +107F- B1 B6 LDA ($B6),Y +1081- 8D AA C2 STA $C2AA +1084- D2 CB CMP ($CB) +1086- 20 BB 20 JSR $20BB +1089- D8 CLD +108A- BD B0 B0 LDA $B0B0,X +108D- AC D9 BD LDY $BDD9 +1090- B0 C1 BCS $1053 +1092- AC 20 BD LDY $BD20 +1095- BD 20 B3 LDA $B320,X +1098- B0 B0 BCS $104A +109A- B0 B0 BCS $104C +109C- AF ??? +109D- B3 ??? +109E- B0 B0 BCS $1050 +10A0- B0 BD BCS $105F +10A2- B1 B0 LDA ($B0),Y +10A4- 8D AA 8D STA $8DAA +10A7- AA TAX +10A8- AA TAX +10A9- 20 D3 C4 JSR $C4D3 +10AC- C9 D6 CMP #$D6 +10AE- B1 B6 LDA ($B6),Y +10B0- 20 C4 C9 JSR $C9C4 +10B3- D6 C9 DEC $C9,X +10B5- C4 C5 CPY $C5 +10B7- D3 ??? +10B8- 20 D4 D7 JSR $D7D4 +10BB- CF ??? +10BC- 20 CE D5 JSR $D5CE +10BF- CD C2 C5 CMP $C5C2 +10C2- D2 D3 CMP ($D3) +10C4- 20 D4 C8 JSR $C8D4 +10C7- C1 D4 CMP ($D4,X) +10C9- 20 C1 D2 JSR $D2C1 +10CC- C5 8D CMP $8D +10CE- AA TAX +10CF- AA TAX +10D0- 20 D3 C9 JSR $C9D3 +10D3- C7 ??? +10D4- CE C5 C4 DEC $C4C5 +10D7- AE 8D AA LDX $AA8D +10DA- 8D AA AA STA $AAAA +10DD- 20 CD C1 JSR $C1CD +10E0- C3 ??? +10E1- D2 CF CMP ($CF) +10E3- BA TSX +10E4- 20 DF C4 JSR $C4DF +10E7- C9 D6 CMP #$D6 +10E9- B1 B6 LDA ($B6),Y +10EB- 20 A3 AD JSR $ADA3 +10EE- B1 B0 LDA ($B0),Y +10F0- B0 B0 BCS $10A2 +10F2- BB ??? +10F3- A3 ??? +10F4- B1 B0 LDA ($B0),Y +10F6- 8D AA 8D STA $8DAA +10F9- A0 CC LDY #$CC +10FB- C4 C1 CPY $C1 +10FD- A0 A3 LDY #$A3 +10FF- BE AD B1 LDX $B1AD,Y +1102- B0 B0 BCS $10B4 +1104- B0 8D BCS $1093 +1106- A0 D0 LDY #$D0 +1108- C8 INY +1109- C1 8D CMP ($8D,X) +110B- A0 CC LDY #$CC +110D- C4 C1 CPY $C1 +110F- A0 A3 LDY #$A3 +1111- BC AD B1 LDY $B1AD,X +1114- B0 B0 BCS $10C6 +1116- B0 8D BCS $10A5 +1118- A0 D0 LDY #$D0 +111A- C8 INY +111B- C1 8D CMP ($8D,X) +111D- A0 CC LDY #$CC +111F- C4 C1 CPY $C1 +1121- A0 A3 LDY #$A3 +1123- BE B1 B0 LDX $B0B1,Y +1126- 8D A0 D0 STA $D0A0 +1129- C8 INY +112A- C1 8D CMP ($8D,X) +112C- A0 CC LDY #$CC +112E- C4 C1 CPY $C1 +1130- A0 A3 LDY #$A3 +1132- BC B1 B0 LDY $B0B1,X +1135- 8D A0 D0 STA $D0A0 +1138- C8 INY +1139- C1 8D CMP ($8D,X) +113B- A0 CA LDY #$CA +113D- D3 ??? +113E- D2 A0 CMP ($A0) +1140- D3 ??? +1141- C4 C9 CPY $C9 +1143- D6 B1 DEC $B1,X +1145- B6 8D LDX $8D,Y +1147- AA TAX +1148- 20 C2 D2 JSR $D2C2 +114B- CB ??? +114C- 20 BB 20 JSR $20BB +114F- D8 CLD +1150- BD C6 C6 LDA $C6C6,X +1153- AC D9 BD LDY $BDD9 +1156- B9 C3 AC LDA $ACC3,Y +1159- 20 BD BD JSR $BDBD +115C- 20 AD B1 JSR $B1AD +115F- B0 B0 BCS $1111 +1161- 20 A8 B2 JSR $B2A8 +1164- A7 ??? +1165- D3 ??? +1166- 20 C3 CF JSR $CFC3 +1169- CD D0 CC CMP $CCD0 +116C- C5 CD CMP $CD +116E- C5 CE CMP $CE +1170- D4 ??? +1171- A9 8D LDA #$8D +1173- AA TAX +1174- 8D AA AA STA $AAAA +1177- 20 D3 D2 JSR $D2D3 +117A- C5 CD CMP $CD +117C- B1 B6 LDA ($B6),Y +117E- 20 C4 C9 JSR $C9C4 +1181- D6 C9 DEC $C9,X +1183- C4 C5 CPY $C5 +1185- D3 ??? +1186- 20 D4 D7 JSR $D7D4 +1189- CF ??? +118A- 20 CE D5 JSR $D5CE +118D- CD C2 C5 CMP $C5C2 +1190- D2 D3 CMP ($D3) +1192- 20 D4 C8 JSR $C8D4 +1195- C1 D4 CMP ($D4,X) +1197- 20 C1 D2 JSR $D2C1 +119A- C5 8D CMP $8D +119C- AA TAX +119D- AA TAX +119E- 20 D3 C9 JSR $C9D3 +11A1- C7 ??? +11A2- CE C5 C4 DEC $C4C5 +11A5- AC 20 D4 LDY $D420 +11A8- C8 INY +11A9- C5 CE CMP $CE +11AB- 20 D2 C5 JSR $C5D2 +11AE- D4 ??? +11AF- D5 D2 CMP $D2,X +11B1- CE D3 20 DEC $20D3 +11B4- D4 ??? +11B5- C8 INY +11B6- C5 20 CMP $20 +11B8- D2 C5 CMP ($C5) +11BA- CD C1 C9 CMP $C9C1 +11BD- CE C4 C5 DEC $C5C4 +11C0- D2 AE CMP ($AE) +11C2- 8D AA 8D STA $8DAA +11C5- AA TAX +11C6- AA TAX +11C7- 20 CD C1 JSR $C1CD +11CA- C3 ??? +11CB- D2 CF CMP ($CF) +11CD- BA TSX +11CE- 20 DF D2 JSR $D2DF +11D1- C5 CD CMP $CD +11D3- B1 B6 LDA ($B6),Y +11D5- 20 A3 AD JSR $ADA3 +11D8- B1 B0 LDA ($B0),Y +11DA- B0 B0 BCS $118C +11DC- BB ??? +11DD- A3 ??? +11DE- B1 B3 LDA ($B3),Y +11E0- 8D AA 8D STA $8DAA +11E3- A0 CC LDY #$CC +11E5- C4 C1 CPY $C1 +11E7- A0 A3 LDY #$A3 +11E9- BE AD B1 LDX $B1AD,Y +11EC- B0 B0 BCS $119E +11EE- B0 8D BCS $117D +11F0- A0 D0 LDY #$D0 +11F2- C8 INY +11F3- C1 8D CMP ($8D,X) +11F5- A0 CC LDY #$CC +11F7- C4 C1 CPY $C1 +11F9- A0 A3 LDY #$A3 +11FB- BC AD B1 LDY $B1AD,X +11FE- B0 B0 BCS $11B0 +1200- B0 8D BCS $118F +1202- A0 D0 LDY #$D0 +1204- C8 INY +1205- C1 8D CMP ($8D,X) +1207- A0 CC LDY #$CC +1209- C4 C1 CPY $C1 +120B- A0 A3 LDY #$A3 +120D- BE B1 B3 LDX $B3B1,Y +1210- 8D A0 D0 STA $D0A0 +1213- C8 INY +1214- C1 8D CMP ($8D,X) +1216- A0 CC LDY #$CC +1218- C4 C1 CPY $C1 +121A- A0 A3 LDY #$A3 +121C- BC B1 B3 LDY $B3B1,X +121F- 8D A0 D0 STA $D0A0 +1222- C8 INY +1223- C1 8D CMP ($8D,X) +1225- A0 CA LDY #$CA +1227- D3 ??? +1228- D2 A0 CMP ($A0) +122A- D3 ??? +122B- D2 C5 CMP ($C5) +122D- CD B1 B6 CMP $B6B1 +1230- 8D AA 20 STA $20AA +1233- C2 ??? +1234- D2 CB CMP ($CB) +1236- 20 BB 20 JSR $20BB +1239- D8 CLD +123A- BD C6 C6 LDA $C6C6,X +123D- AC D9 BD LDY $BDD9 +1240- C6 B4 DEC $B4 +1242- AC 20 BD LDY $BD20 +1245- BD 20 AD LDA $AD20,X +1248- B1 B2 LDA ($B2),Y +124A- 20 A8 B2 JSR $B2A8 +124D- A7 ??? +124E- D3 ??? +124F- 20 C3 CF JSR $CFC3 +1252- CD D0 CC CMP $CCD0 +1255- C5 CD CMP $CD +1257- C5 CE CMP $CE +1259- D4 ??? +125A- A9 8D LDA #$8D +125C- AA TAX +125D- 8D AA AA STA $AAAA +1260- 20 D5 D2 JSR $D2D5 +1263- C5 CD CMP $CD +1265- B1 B6 LDA ($B6),Y +1267- 20 C4 C9 JSR $C9C4 +126A- D6 C9 DEC $C9,X +126C- C4 C5 CPY $C5 +126E- D3 ??? +126F- 20 D4 D7 JSR $D7D4 +1272- CF ??? +1273- 20 CE D5 JSR $D5CE +1276- CD C2 C5 CMP $C5C2 +1279- D2 D3 CMP ($D3) +127B- 20 D4 C8 JSR $C8D4 +127E- C1 D4 CMP ($D4,X) +1280- 20 C1 D2 JSR $D2C1 +1283- C5 8D CMP $8D +1285- AA TAX +1286- AA TAX +1287- 20 D5 CE JSR $CED5 +128A- D3 ??? +128B- C9 C7 CMP #$C7 +128D- CE C5 C4 DEC $C4C5 +1290- AC 20 D4 LDY $D420 +1293- C8 INY +1294- C5 CE CMP $CE +1296- 20 D2 C5 JSR $C5D2 +1299- D4 ??? +129A- D5 D2 CMP $D2,X +129C- CE D3 20 DEC $20D3 +129F- D4 ??? +12A0- C8 INY +12A1- C5 20 CMP $20 +12A3- D2 C5 CMP ($C5) +12A5- CD C1 C9 CMP $C9C1 +12A8- CE C4 C5 DEC $C5C4 +12AB- D2 AE CMP ($AE) +12AD- 8D AA 8D STA $8DAA +12B0- AA TAX +12B1- AA TAX +12B2- 20 CD C1 JSR $C1CD +12B5- C3 ??? +12B6- D2 CF CMP ($CF) +12B8- BA TSX +12B9- 20 DF D2 JSR $D2DF +12BC- C5 CD CMP $CD +12BE- B1 B6 LDA ($B6),Y +12C0- 20 A3 B1 JSR $B1A3 +12C3- B0 B0 BCS $1275 +12C5- B0 BB BCS $1282 +12C7- A3 ??? +12C8- B1 B3 LDA ($B3),Y +12CA- BB ??? +12CB- D5 8D CMP $8D,X +12CD- AA TAX +12CE- 8D A0 CC STA $CCA0 +12D1- C4 C1 CPY $C1 +12D3- A0 A3 LDY #$A3 +12D5- BE B1 B0 LDX $B0B1,Y +12D8- B0 B0 BCS $128A +12DA- 8D A0 D0 STA $D0A0 +12DD- C8 INY +12DE- C1 8D CMP ($8D,X) +12E0- A0 CC LDY #$CC +12E2- C4 C1 CPY $C1 +12E4- A0 A3 LDY #$A3 +12E6- BC B1 B0 LDY $B0B1,X +12E9- B0 B0 BCS $129B +12EB- 8D A0 D0 STA $D0A0 +12EE- C8 INY +12EF- C1 8D CMP ($8D,X) +12F1- A0 CC LDY #$CC +12F3- C4 C1 CPY $C1 +12F5- A0 A3 LDY #$A3 +12F7- BE B1 B3 LDX $B3B1,Y +12FA- 8D A0 D0 STA $D0A0 +12FD- C8 INY +12FE- C1 8D CMP ($8D,X) +1300- A0 CC LDY #$CC +1302- C4 C1 CPY $C1 +1304- A0 A3 LDY #$A3 +1306- BC B1 B3 LDY $B3B1,X +1309- 8D A0 D0 STA $D0A0 +130C- C8 INY +130D- C1 8D CMP ($8D,X) +130F- A0 CA LDY #$CA +1311- D3 ??? +1312- D2 A0 CMP ($A0) +1314- D5 D2 CMP $D2,X +1316- C5 CD CMP $CD +1318- B1 B6 LDA ($B6),Y +131A- 8D AA 20 STA $20AA +131D- C2 ??? +131E- D2 CB CMP ($CB) +1320- 20 BB 20 JSR $20BB +1323- D8 CLD +1324- BD B0 B0 LDA $B0B0,X +1327- AC D9 BD LDY $BDD9 +132A- B0 C3 BCS $12EF +132C- AC 20 BD LDY $BD20 +132F- BD 20 B1 LDA $B120,X +1332- B2 8D LDA ($8D) +1334- AA TAX +1335- 8D AA AA STA $AAAA +1338- 20 C3 CD JSR $CDC3 +133B- D0 B1 BNE $12EE +133D- B6 20 LDX $20,Y +133F- C3 ??? +1340- CF ??? +1341- CD D0 C1 CMP $C1D0 +1344- D2 C5 CMP ($C5) +1346- D3 ??? +1347- 20 D4 D7 JSR $D7D4 +134A- CF ??? +134B- 20 B1 B6 JSR $B6B1 +134E- C2 ??? +134F- C9 D4 CMP #$D4 +1351- 20 D6 C1 JSR $C1D6 +1354- CC D5 C5 CPY $C5D5 +1357- D3 ??? +1358- AC 20 D4 LDY $D420 +135B- C8 INY +135C- C5 CE CMP $CE +135E- 8D AA AA STA $AAAA +1361- 20 C1 CC JSR $CCC1 +1364- D4 ??? +1365- C5 D2 CMP $D2 +1367- D3 ??? +1368- 20 D4 C8 JSR $C8D4 +136B- C5 20 CMP $20 +136D- D2 C5 CMP ($C5) +136F- C7 ??? +1370- C9 D3 CMP #$D3 +1372- D4 ??? +1373- C5 D2 CMP $D2 +1375- 20 C6 CC JSR $CCC6 +1378- C1 C7 CMP ($C7,X) +137A- D3 ??? +137B- 20 C1 D3 JSR $D3C1 +137E- 20 C6 CF JSR $CFC6 +1381- CC CC CF CPY $CFCC +1384- D7 ??? +1385- D3 ??? +1386- BA TSX +1387- 8D AA 8D STA $8DAA +138A- AA TAX +138B- AA TAX +138C- 20 CD C1 JSR $C1CD +138F- C3 ??? +1390- D2 CF CMP ($CF) +1392- BA TSX +1393- 20 DF C3 JSR $C3DF +1396- CD D0 B1 CMP $B1D0 +1399- B6 20 LDX $20,Y +139B- A3 ??? +139C- B1 B0 LDA ($B0),Y +139E- B2 B3 LDA ($B3) +13A0- BB ??? +13A1- A3 ??? +13A2- B1 B2 LDA ($B2),Y +13A4- B3 ??? +13A5- 8D AA 8D STA $8DAA +13A8- 8D AA 20 STA $20AA +13AB- C9 C6 CMP #$C6 +13AD- 20 D7 B1 JSR $B1D7 +13B0- 20 A6 20 JSR $20A6 +13B3- D7 ??? +13B4- B2 20 LDA ($20) +13B6- C1 D2 CMP ($D2,X) +13B8- C5 20 CMP $20 +13BA- B2 D3 LDA ($D3) +13BC- 20 C3 CF JSR $CFC3 +13BF- CD D0 CC CMP $CCD0 +13C2- C5 CD CMP $CD +13C4- C5 CE CMP $CE +13C6- D4 ??? +13C7- 8D AA 20 STA $20AA +13CA- 20 20 20 JSR $2020 +13CD- C9 C6 CMP #$C6 +13CF- 20 D7 B1 JSR $B1D7 +13D2- 20 BD 20 JSR $20BD +13D5- D7 ??? +13D6- B2 20 LDA ($20) +13D8- DA PHX +13D9- BD B1 AC LDA $ACB1,X +13DC- CE BD B0 DEC $B0BD +13DF- 8D AA 20 STA $20AA +13E2- 20 20 20 JSR $2020 +13E5- C9 C6 CMP #$C6 +13E7- 20 D7 B1 JSR $B1D7 +13EA- 20 BE 20 JSR $20BE +13ED- D7 ??? +13EE- B2 20 LDA ($20) +13F0- DA PHX +13F1- BD B0 AC LDA $ACB0,X +13F4- CE BD B0 DEC $B0BD +13F7- 8D AA 20 STA $20AA +13FA- 20 20 20 JSR $2020 +13FD- C9 C6 CMP #$C6 +13FF- 20 D7 B1 JSR $B1D7 +1402- 20 BC 20 JSR $20BC +1405- D7 ??? +1406- B2 20 LDA ($20) +1408- DA PHX +1409- BD B0 AC LDA $ACB0,X +140C- CE BD B1 DEC $B1BD +140F- 8D AA 20 STA $20AA +1412- C5 CC CMP $CC +1414- D3 ??? +1415- C5 8D CMP $8D +1417- AA TAX +1418- 20 20 20 JSR $2020 +141B- 20 C9 C6 JSR $C6C9 +141E- 20 D7 B1 JSR $B1D7 +1421- 20 BD 20 JSR $20BD +1424- D7 ??? +1425- B2 20 LDA ($20) +1427- DA PHX +1428- BD B1 AC LDA $ACB1,X +142B- C3 ??? +142C- BD B1 8D LDA $8DB1,X +142F- AA TAX +1430- 20 20 20 JSR $2020 +1433- 20 C9 C6 JSR $C6C9 +1436- 20 D7 B1 JSR $B1D7 +1439- 20 BE 20 JSR $20BE +143C- D7 ??? +143D- B2 20 LDA ($20) +143F- DA PHX +1440- BD B0 AC LDA $ACB0,X +1443- C3 ??? +1444- BD B1 8D LDA $8DB1,X +1447- AA TAX +1448- 20 20 20 JSR $2020 +144B- 20 C9 C6 JSR $C6C9 +144E- 20 D7 B1 JSR $B1D7 +1451- 20 BC 20 JSR $20BC +1454- D7 ??? +1455- B2 20 LDA ($20) +1457- DA PHX +1458- BD B0 AC LDA $ACB0,X +145B- C3 ??? +145C- BD B0 8D LDA $8DB0,X +145F- AA TAX +1460- 8D A0 CC STA $CCA0 +1463- C4 C1 CPY $C1 +1465- A0 A3 LDY #$A3 +1467- BE B1 B0 LDX $B0B1,Y +146A- B2 B3 LDA ($B3) +146C- 8D A0 D0 STA $D0A0 +146F- C8 INY +1470- C1 8D CMP ($8D,X) +1472- A0 CC LDY #$CC +1474- C4 C1 CPY $C1 +1476- A0 A3 LDY #$A3 +1478- BC B1 B0 LDY $B0B1,X +147B- B2 B3 LDA ($B3) +147D- 8D A0 D0 STA $D0A0 +1480- C8 INY +1481- C1 8D CMP ($8D,X) +1483- A0 CC LDY #$CC +1485- C4 C1 CPY $C1 +1487- A0 A3 LDY #$A3 +1489- BE B1 B2 LDX $B2B1,Y +148C- B3 ??? +148D- 8D A0 D0 STA $D0A0 +1490- C8 INY +1491- C1 8D CMP ($8D,X) +1493- A0 CC LDY #$CC +1495- C4 C1 CPY $C1 +1497- A0 A3 LDY #$A3 +1499- BC B1 B2 LDY $B2B1,X +149C- B3 ??? +149D- 8D A0 D0 STA $D0A0 +14A0- C8 INY +14A1- C1 8D CMP ($8D,X) +14A3- A0 CA LDY #$CA +14A5- D3 ??? +14A6- D2 A0 CMP ($A0) +14A8- C3 ??? +14A9- CD D0 B1 CMP $B1D0 +14AC- B6 8D LDX $8D,Y +14AE- AA TAX +14AF- 20 C2 D2 JSR $D2C2 +14B2- CB ??? +14B3- 20 BB 20 JSR $20BB +14B6- D0 BD BNE $1475 +14B8- B3 ??? +14B9- B1 AC LDA ($AC),Y +14BB- 20 BD BD JSR $BDBD +14BE- 20 B1 B1 JSR $B1B1 +14C1- B0 B0 BCS $1473 +14C3- B0 B1 BCS $1476 +14C5- B0 B0 BCS $1477 +14C7- 8D AA 20 STA $20AA +14CA- 20 20 20 JSR $2020 +14CD- 20 20 20 JSR $2020 +14D0- 20 20 20 JSR $2020 +14D3- 20 20 20 JSR $2020 +14D6- DE DE 20 DEC $20DE,X +14D9- CE 20 20 DEC $2020 +14DC- 20 20 20 JSR $2020 +14DF- DA PHX +14E0- C3 ??? +14E1- 8D AA 20 STA $20AA +14E4- CE 20 A8 DEC $A820 +14E7- D3 ??? +14E8- C9 C7 CMP #$C7 +14EA- CE A9 20 DEC $20A9 +14ED- BD B0 AC LDA $ACB0,X +14F0- 20 DA 20 JSR $20DA +14F3- A8 TAY +14F4- DA PHX +14F5- C5 D2 CMP $D2 +14F7- CF ??? +14F8- A9 20 LDA #$20 +14FA- BD 20 B0 LDA $B020,X +14FD- AC 20 C3 LDY $C320 +1500- 20 A8 C3 JSR $C3A8 +1503- C1 D2 CMP ($D2,X) +1505- D2 D9 CMP ($D9) +1507- A9 20 LDA #$20 +1509- BD 20 B0 LDA $B020,X +150C- 8D AA 8D STA $8DAA +150F- AA TAX +1510- AA TAX +1511- 20 D2 CE JSR $CED2 +1514- C4 B8 CPY $B8 +1516- 20 D3 C9 JSR $C9D3 +1519- CD D0 CC CMP $CCD0 +151C- D9 20 D2 CMP $D220,Y +151F- C5 D4 CMP $D4 +1521- D5 D2 CMP $D2,X +1523- CE D3 20 DEC $20D3 +1526- C1 20 CMP ($20,X) +1528- D0 D3 BNE $14FD +152A- C5 D5 CMP $D5 +152C- C4 CF CPY $CF +152E- AD D2 C1 LDA $C1D2 +1531- CE C4 CF DEC $CFC4 +1534- CD 8D AA CMP $AA8D +1537- AA TAX +1538- 20 CE D5 JSR $D5CE +153B- CD C2 C5 CMP $C5C2 +153E- D2 20 CMP ($20) +1540- C2 ??? +1541- C5 D4 CMP $D4 +1543- D7 ??? +1544- C5 C5 CMP $C5 +1546- CE 20 B0 DEC $B020 +1549- 20 C1 CE JSR $CEC1 +154C- C4 20 CPY $20 +154E- B2 B5 LDA ($B5) +1550- B5 20 LDA $20,X +1552- C9 CE CMP #$CE +1554- 20 C1 AE JSR $AEC1 +1557- 8D AA 8D STA $8DAA +155A- AA TAX +155B- AA TAX +155C- 20 CE CF JSR $CFCE +155F- 20 CD C1 JSR $C1CD +1562- C3 ??? +1563- D2 CF CMP ($CF) +1565- 20 AA AA JSR $AAAA +1568- 8D AA 8D STA $8DAA +156B- A0 CA LDY #$CA +156D- D3 ??? +156E- D2 A0 CMP ($A0) +1570- D2 CE CMP ($CE) +1572- C4 B8 CPY $B8 +1574- 8D AA 20 STA $20AA +1577- C2 ??? +1578- D2 CB CMP ($CB) +157A- 20 BB 20 JSR $20BB +157D- D2 D5 CMP ($D5) +157F- CE 20 D4 DEC $D420 +1582- C8 INY +1583- C9 D3 CMP #$D3 +1585- 20 C1 20 JSR $20C1 +1588- C6 C5 DEC $C5 +158A- D7 ??? +158B- 20 D4 C9 JSR $C9D4 +158E- CD C5 D3 CMP $D3C5 +1591- 20 D4 CF JSR $CFD4 +1594- 20 D3 C5 JSR $C5D3 +1597- C5 20 CMP $20 +1599- C1 20 CMP ($20,X) +159B- C3 ??? +159C- C8 INY +159D- C1 CE CMP ($CE,X) +159F- C7 ??? +15A0- C5 8D CMP $8D +15A2- AA TAX +15A3- 8D AA 8D STA $8DAA +15A6- AA TAX +15A7- AA TAX +15A8- 20 D2 C1 JSR $C1D2 +15AB- CE C4 C2 DEC $C2C4 +15AE- 20 D2 C5 JSR $C5D2 +15B1- D4 ??? +15B2- D5 D2 CMP $D2,X +15B4- CE D3 20 DEC $20D3 +15B7- C1 20 CMP ($20,X) +15B9- D6 C1 DEC $C1,X +15BB- CC D5 C5 CPY $C5D5 +15BE- 20 C2 C5 JSR $C5C2 +15C1- D4 ??? +15C2- D7 ??? +15C3- C5 C5 CMP $C5 +15C5- CE 20 C1 DEC $C120 +15C8- 20 CC CF JSR $CFCC +15CB- D7 ??? +15CC- 20 C1 CE JSR $CEC1 +15CF- C4 8D CPY $8D +15D1- AA TAX +15D2- AA TAX +15D3- 20 C8 C9 JSR $C9C8 +15D6- C7 ??? +15D7- C8 INY +15D8- 20 C2 CF JSR $CFC2 +15DB- D5 CE CMP $CE,X +15DD- C4 C1 CPY $C1 +15DF- D2 D9 CMP ($D9) +15E1- 20 D0 D5 JSR $D5D0 +15E4- D3 ??? +15E5- C8 INY +15E6- C5 C4 CMP $C4 +15E8- 20 D4 CF JSR $CFD4 +15EB- 20 D4 C8 JSR $C8D4 +15EE- C5 20 CMP $20 +15F0- D3 ??? +15F1- D4 ??? +15F2- C1 C3 CMP ($C3,X) +15F4- CB ??? +15F5- AE 20 D3 LDX $D320 +15F8- C9 CE CMP #$CE +15FA- C3 ??? +15FB- C5 20 CMP $20 +15FD- D4 ??? +15FE- C8 INY +15FF- C9 D3 CMP #$D3 +1601- 8D AA AA STA $AAAA +1604- 20 D2 C5 JSR $C5D2 +1607- D4 ??? +1608- D5 D2 CMP $D2,X +160A- CE D3 20 DEC $20D3 +160D- C1 20 CMP ($20,X) +160F- C2 ??? +1610- D9 D4 C5 CMP $C5D4,Y +1613- AC 20 D4 LDY $D420 +1616- C8 INY +1617- C5 20 CMP $20 +1619- D2 C1 CMP ($C1) +161B- CE C7 C5 DEC $C5C7 +161E- 20 CD D5 JSR $D5CD +1621- D3 ??? +1622- D4 ??? +1623- 20 C2 C5 JSR $C5C2 +1626- 20 B0 AE JSR $AEB0 +1629- AE B2 B5 LDX $B5B2 +162C- B5 AE LDA $AE,X +162E- 8D AA 8D STA $8DAA +1631- AA TAX +1632- AA TAX +1633- 20 CD C1 JSR $C1CD +1636- C3 ??? +1637- D2 CF CMP ($CF) +1639- BA TSX +163A- 20 DF D2 JSR $D2DF +163D- C1 CE CMP ($CE,X) +163F- C4 C2 CPY $C2 +1641- 20 A3 B5 JSR $B5A3 +1644- B0 BB BCS $1601 +1646- A3 ??? +1647- B1 B0 LDA ($B0),Y +1649- B0 8D BCS $15D8 +164B- AA TAX +164C- 8D A0 CC STA $CCA0 +164F- C4 C1 CPY $C1 +1651- A0 A3 LDY #$A3 +1653- B5 B0 LDA $B0,X +1655- 8D A0 D0 STA $D0A0 +1658- C8 INY +1659- C1 8D CMP ($8D,X) +165B- A0 CC LDY #$CC +165D- C4 C1 CPY $C1 +165F- A0 A3 LDY #$A3 +1661- B1 B0 LDA ($B0),Y +1663- B0 A0 BCS $1605 +1665- BB ??? +1666- 20 C8 C9 JSR $C9C8 +1669- C7 ??? +166A- C8 INY +166B- 20 C2 CF JSR $CFC2 +166E- D5 CE CMP $CE,X +1670- C4 8D CPY $8D +1672- A0 D0 LDY #$D0 +1674- C8 INY +1675- C1 8D CMP ($8D,X) +1677- A0 CA LDY #$CA +1679- D3 ??? +167A- D2 A0 CMP ($A0) +167C- D2 C1 CMP ($C1) +167E- CE C4 C2 DEC $C2C4 +1681- 8D AA 20 STA $20AA +1684- C2 ??? +1685- D2 CB CMP ($CB) +1687- 20 BB 20 JSR $20BB +168A- D2 D5 CMP ($D5) +168C- CE 20 C1 DEC $C120 +168F- 20 C6 C5 JSR $C5C6 +1692- D7 ??? +1693- 20 D4 C9 JSR $C9D4 +1696- CD C5 D3 CMP $D3C5 +1699- 20 D4 CF JSR $CFD4 +169C- 20 D3 C5 JSR $C5D3 +169F- C5 20 CMP $20 +16A1- D4 ??? +16A2- C8 INY +16A3- C5 20 CMP $20 +16A5- C3 ??? +16A6- C8 INY +16A7- C1 CE CMP ($CE,X) +16A9- C7 ??? +16AA- C5 20 CMP $20 +16AC- C9 CE CMP #$CE +16AE- 20 C1 8D JSR $8DC1 +16B1- AA TAX +16B2- 8D AA AA STA $AAAA +16B5- 20 CD D5 JSR $D5CD +16B8- CC B8 20 CPY $20B8 +16BB- CD D5 CC CMP $CCD5 +16BE- D4 ??? +16BF- C9 D0 CMP #$D0 +16C1- CC C9 C5 CPY $C5C9 +16C4- D3 ??? +16C5- 20 D4 D7 JSR $D7D4 +16C8- CF ??? +16C9- 20 B8 AD JSR $ADB8 +16CC- C2 ??? +16CD- C9 D4 CMP #$D4 +16CF- 20 CE D5 JSR $D5CE +16D2- CD C2 C5 CMP $C5C2 +16D5- D2 D3 CMP ($D3) +16D7- 20 C1 CE JSR $CEC1 +16DA- C4 8D CPY $8D +16DC- AA TAX +16DD- AA TAX +16DE- 20 D2 C5 JSR $C5D2 +16E1- D4 ??? +16E2- D5 D2 CMP $D2,X +16E4- CE D3 20 DEC $20D3 +16E7- C1 20 CMP ($20,X) +16E9- B1 B6 LDA ($B6),Y +16EB- C2 ??? +16EC- C9 D4 CMP #$D4 +16EE- 20 D2 C5 JSR $C5D2 +16F1- D3 ??? +16F2- D5 CC CMP $CC,X +16F4- D4 ??? +16F5- AC 20 C1 LDY $C120 +16F8- CC CC 20 CPY $20CC +16FB- D5 CE CMP $CE,X +16FD- D3 ??? +16FE- C9 C7 CMP #$C7 +1700- CE C5 C4 DEC $C4C5 +1703- AE 8D AA LDX $AA8D +1706- AA TAX +1707- 20 CD C1 JSR $C1CD +170A- C3 ??? +170B- D2 CF CMP ($CF) +170D- BA TSX +170E- 20 DF CD JSR $CDDF +1711- D5 CC CMP $CC,X +1713- B8 CLV +1714- 20 A3 B1 JSR $B1A3 +1717- B0 B0 BCS $16C9 +1719- BB ??? +171A- A3 ??? +171B- B2 B0 LDA ($B0) +171D- B0 8D BCS $16AC +171F- AA TAX +1720- 8D AA 8D STA $8DAA +1723- A0 CC LDY #$CC +1725- C4 C1 CPY $C1 +1727- A0 A3 LDY #$A3 +1729- B1 B0 LDA ($B0),Y +172B- B0 8D BCS $16BA +172D- A0 D0 LDY #$D0 +172F- C8 INY +1730- C1 8D CMP ($8D,X) +1732- A0 CC LDY #$CC +1734- C4 C1 CPY $C1 +1736- A0 A3 LDY #$A3 +1738- B2 B0 LDA ($B0) +173A- B0 8D BCS $16C9 +173C- A0 D0 LDY #$D0 +173E- C8 INY +173F- C1 8D CMP ($8D,X) +1741- A0 CA LDY #$CA +1743- D3 ??? +1744- D2 A0 CMP ($A0) +1746- CD D5 CC CMP $CCD5 +1749- B8 CLV +174A- 8D AA 20 STA $20AA +174D- C2 ??? +174E- D2 CB CMP ($CB) +1750- 20 BB 20 JSR $20BB +1753- D8 CLD +1754- BD B4 C5 LDA $C5B4,X +1757- AC D9 BD LDY $BDD9 +175A- B2 B0 LDA ($B0) +175C- AC 20 BD LDY $BD20 +175F- BD 20 B2 LDA $B220,X +1762- B0 B0 BCS $1714 +1764- B0 B0 BCS $1716 +1766- 8D AA 8D STA $8DAA +1769- AA TAX +176A- AA TAX +176B- 20 C4 C9 JSR $C9C4 +176E- D6 B8 DEC $B8,X +1770- 20 C4 C9 JSR $C9C4 +1773- D6 C9 DEC $C9,X +1775- C4 C5 CPY $C5 +1777- D3 ??? +1778- 20 CF CE JSR $CECF +177B- C5 20 CMP $20 +177D- B8 CLV +177E- C2 ??? +177F- C9 D4 CMP #$D4 +1781- 20 CE D5 JSR $D5CE +1784- CD C2 C5 CMP $C5C2 +1787- D2 20 CMP ($20) +1789- C2 ??? +178A- D9 8D AA CMP $AA8D,Y +178D- AA TAX +178E- 20 C1 CE JSR $CEC1 +1791- CF ??? +1792- D4 ??? +1793- C8 INY +1794- C5 D2 CMP $D2 +1796- 20 A8 D5 JSR $D5A8 +1799- CE D3 C9 DEC $C9D3 +179C- C7 ??? +179D- CE C5 C4 DEC $C4C5 +17A0- A9 AC LDA #$AC +17A2- 20 C1 CE JSR $CEC1 +17A5- C4 20 CPY $20 +17A7- D2 C5 CMP ($C5) +17A9- D4 ??? +17AA- D5 D2 CMP $D2,X +17AC- CE D3 8D DEC $8DD3 +17AF- AA TAX +17B0- AA TAX +17B1- 20 C1 CE JSR $CEC1 +17B4- 20 B8 C2 JSR $C2B8 +17B7- C9 D4 CMP #$D4 +17B9- 20 D1 D5 JSR $D5D1 +17BC- CF ??? +17BD- D4 ??? +17BE- C9 C5 CMP #$C5 +17C0- CE D4 AE DEC $AED4 +17C3- 8D AA AA STA $AAAA +17C6- 8D AA AA STA $AAAA +17C9- 20 D4 C8 JSR $C8D4 +17CC- C5 20 CMP $20 +17CE- D1 D5 CMP ($D5),Y +17D0- CF ??? +17D1- D4 ??? +17D2- C9 C5 CMP #$C5 +17D4- CE D4 20 DEC $20D4 +17D7- C9 D3 CMP #$D3 +17D9- 20 D3 D4 JSR $D4D3 +17DC- CF ??? +17DD- D2 C5 CMP ($C5) +17DF- C4 20 CPY $20 +17E1- C9 CE CMP #$CE +17E3- 20 C1 AC JSR $ACC1 +17E6- 20 D7 C8 JSR $C8D7 +17E9- C9 CC CMP #$CC +17EB- C5 8D CMP $8D +17ED- AA TAX +17EE- AA TAX +17EF- 20 D4 C8 JSR $C8D4 +17F2- C5 20 CMP $20 +17F4- D2 C5 CMP ($C5) +17F6- CD C1 C9 CMP $C9C1 +17F9- CE C4 C5 DEC $C5C4 +17FC- D2 20 CMP ($20) +17FE- C9 D3 CMP #$D3 +1800- 20 D2 C5 JSR $C5D2 +1803- D4 ??? +1804- D5 D2 CMP $D2,X +1806- CE C5 C4 DEC $C4C5 +1809- 20 C9 CE JSR $CEC9 +180C- 20 D8 AE JSR $AED8 +180F- 8D AA AA STA $AAAA +1812- 20 CD C1 JSR $C1CD +1815- C3 ??? +1816- D2 CF CMP ($CF) +1818- BA TSX +1819- 20 DF C4 JSR $C4DF +181C- C9 D6 CMP #$D6 +181E- B8 CLV +181F- 20 A3 B2 JSR $B2A3 +1822- B0 B0 BCS $17D4 +1824- BB ??? +1825- A3 ??? +1826- B1 B0 LDA ($B0),Y +1828- 8D AA 8D STA $8DAA +182B- AA TAX +182C- 8D A0 CC STA $CCA0 +182F- C4 C1 CPY $C1 +1831- A0 A3 LDY #$A3 +1833- B2 B0 LDA ($B0) +1835- B0 8D BCS $17C4 +1837- A0 D0 LDY #$D0 +1839- C8 INY +183A- C1 8D CMP ($8D,X) +183C- A0 CC LDY #$CC +183E- C4 C1 CPY $C1 +1840- A0 A3 LDY #$A3 +1842- B1 B0 LDA ($B0),Y +1844- 8D A0 D0 STA $D0A0 +1847- C8 INY +1848- C1 8D CMP ($8D,X) +184A- A0 CA LDY #$CA +184C- D3 ??? +184D- D2 20 CMP ($20) +184F- C4 C9 CPY $C9 +1851- D6 B8 DEC $B8,X +1853- 8D AA C2 STA $C2AA +1856- D2 CB CMP ($CB) +1858- 20 BB 20 JSR $20BB +185B- C1 BD CMP ($BD,X) +185D- B1 B4 LDA ($B4),Y +185F- AC 20 D8 LDY $D820 +1862- BD B0 AC LDA $ACB0,X +1865- 20 BD BD JSR $BDBD +1868- 20 B2 B0 JSR $B0B2 +186B- 20 D2 20 JSR $20D2 +186E- B0 8D BCS $17FD +1870- AA TAX +1871- 8D AA AA STA $AAAA +1874- 20 D2 CE JSR $CED2 +1877- C4 B1 CPY $B1 +1879- B6 20 LDX $20,Y +187B- D2 C5 CMP ($C5) +187D- D4 ??? +187E- D5 D2 CMP $D2,X +1880- CE D3 20 DEC $20D3 +1883- C1 20 CMP ($20,X) +1885- D0 D3 BNE $185A +1887- C5 D5 CMP $D5 +1889- C4 CF CPY $CF +188B- AD D2 C1 LDA $C1D2 +188E- CE C4 CF DEC $CFC4 +1891- CD 20 CE CMP $CE20 +1894- D5 CD CMP $CD,X +1896- C2 ??? +1897- C5 D2 CMP $D2 +1899- 20 A8 D7 JSR $D7A8 +189C- CF ??? +189D- D2 C4 CMP ($C4) +189F- A9 8D LDA #$8D +18A1- AA TAX +18A2- AA TAX +18A3- 20 C2 C5 JSR $C5C2 +18A6- D4 ??? +18A7- D7 ??? +18A8- C5 C5 CMP $C5 +18AA- CE 20 B0 DEC $B020 +18AD- 20 C1 CE JSR $CEC1 +18B0- C4 20 CPY $20 +18B2- B6 B5 LDX $B5,Y +18B4- B5 B3 LDA $B3,X +18B6- B5 20 LDA $20,X +18B8- A8 TAY +18B9- B0 B0 BCS $186B +18BB- B0 B0 BCS $186D +18BD- AD C6 C6 LDA $C6C6 +18C0- C6 C6 DEC $C6 +18C2- A9 8D LDA #$8D +18C4- AA TAX +18C5- 8D AA AA STA $AAAA +18C8- 20 CE CF JSR $CFCE +18CB- 20 CD C1 JSR $C1CD +18CE- C3 ??? +18CF- D2 CF CMP ($CF) +18D1- 20 AA AA JSR $AAAA +18D4- 8D AA 8D STA $8DAA +18D7- A0 CA LDY #$CA +18D9- D3 ??? +18DA- D2 A0 CMP ($A0) +18DC- D2 CE CMP ($CE) +18DE- C4 B1 CPY $B1 +18E0- B6 8D LDX $8D,Y +18E2- AA TAX +18E3- C2 ??? +18E4- D2 CB CMP ($CB) +18E6- 20 BB 20 JSR $20BB +18E9- C2 ??? +18EA- D2 D5 CMP ($D5) +18EC- CE 20 C1 DEC $C120 +18EF- 20 C6 C5 JSR $C5C6 +18F2- D7 ??? +18F3- 20 D4 C9 JSR $C9D4 +18F6- CD C5 D3 CMP $D3C5 +18F9- 20 D4 CF JSR $CFD4 +18FC- 20 D3 C5 JSR $C5D3 +18FF- C5 20 CMP $20 +1901- D8 CLD +1902- AC D9 8D LDY $8DD9 +1905- AA TAX +1906- 8D AA 8D STA $8DAA +1909- AA TAX +190A- 8D AA 8D STA $8DAA +190D- AA TAX +190E- AA TAX +190F- 20 CC C9 JSR $C9CC +1912- D4 ??? +1913- D4 ??? +1914- CC C5 20 CPY $20C5 +1917- CC D5 CD CPY $CDD5 +191A- D0 D3 BNE $18EF +191C- 20 AD AD JSR $ADAD +191F- 20 CD C9 JSR $C9CD +1922- C4 AF CPY $AF +1924- CC CF D7 CPY $D7CF +1927- 20 CC C5 JSR $C5CC +192A- D6 C5 DEC $C5,X +192C- CC 8D AA CPY $AA8D +192F- 8D AA 8D STA $8DAA +1932- A0 CA LDY #$CA +1934- CD D0 A0 CMP $A0D0 +1937- D2 C5 CMP ($C5) +1939- C5 CE CMP $CE +193B- D4 ??? +193C- D2 D9 CMP ($D9) +193E- 8D AA 8D STA $8DAA +1941- AA TAX +1942- E0 E0 CPX #$E0 +1944- E0 E0 CPX #$E0 +1946- E0 E0 CPX #$E0 +1948- E0 E0 CPX #$E0 +194A- E0 E0 CPX #$E0 +194C- E0 E0 CPX #$E0 +194E- E0 E0 CPX #$E0 +1950- E0 E0 CPX #$E0 +1952- E0 E0 CPX #$E0 +1954- E0 E0 CPX #$E0 +1956- E0 E0 CPX #$E0 +1958- E0 E0 CPX #$E0 +195A- E0 E0 CPX #$E0 +195C- E0 E0 CPX #$E0 +195E- E0 E0 CPX #$E0 +1960- AA TAX +1961- 8D AA 20 STA $20AA +1964- 20 20 20 JSR $2020 +1967- 20 20 20 JSR $2020 +196A- 20 C2 CF JSR $CFC2 +196D- D4 ??? +196E- D4 ??? +196F- CF ??? +1970- CD 20 C9 CMP $C920 +1973- CE C3 CC DEC $CCC3 +1976- D5 C4 CMP $C4,X +1978- C5 D3 CMP $D3 +197A- 20 20 20 JSR $2020 +197D- 20 20 20 JSR $2020 +1980- 20 AA 8D JSR $8DAA +1983- AA TAX +1984- AC AC AC LDY $ACAC +1987- AC AC AC LDY $ACAC +198A- AC AC AC LDY $ACAC +198D- AC AC AC LDY $ACAC +1990- AC AC AC LDY $ACAC +1993- AC AC AC LDY $ACAC +1996- AC AC AC LDY $ACAC +1999- AC AC AC LDY $ACAC +199C- AC AC AC LDY $ACAC +199F- AC AC AC LDY $ACAC +19A2- AA TAX +19A3- 8D AA 8D STA $8DAA +19A6- AA TAX +19A7- 8D AA AA STA $AAAA +19AA- 20 C2 CF JSR $CFC2 +19AD- D4 ??? +19AE- D4 ??? +19AF- CF ??? +19B0- CD 20 C9 CMP $C920 +19B3- CE C3 CC DEC $CCC3 +19B6- D5 C4 CMP $C4,X +19B8- C5 D3 CMP $D3 +19BA- 8D AA 8D STA $8DAA +19BD- A0 D0 LDY #$D0 +19BF- D5 D4 CMP $D4,X +19C1- A0 CD LDY #$CD +19C3- C1 D4 CMP ($D4,X) +19C5- C8 INY +19C6- B1 B6 LDA ($B6),Y +19C8- AE CC C9 LDX $C9CC +19CB- C2 ??? +19CC- 8D A0 D0 STA $D0A0 +19CF- D5 D4 CMP $D4,X +19D1- A0 CD LDY #$CD +19D3- C1 D4 CMP ($D4,X) +19D5- C8 INY +19D6- B8 CLV +19D7- AE CC C9 LDX $C9CC +19DA- C2 ??? +19DB- 8D AA 8D STA $8DAA diff --git a/disks/disk2_math8_math16/T.DECS b/disks/disk2_math8_math16/T.DECS new file mode 100644 index 0000000..24c19ae --- /dev/null +++ b/disks/disk2_math8_math16/T.DECS @@ -0,0 +1,20 @@ +*``````````````````````````````* +* DECLARATIONS.PUT * +*- -* +* USED IN CONJUNCTION WITH * +* OTHER PUT FILES. DEFINES * +* BASIC DECLARATIONS USED * +* ACROSS DIFFERENT ROUTINES * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +REENTRY EQU $03D0 +SMALLASM EQU $300 ; STADDR FOR SMALL PROGRAMS +* +ADDR1 EQU $06 +ADDR2 EQU $08 +ADDR3 EQU $EB +ADDR4 EQU $ED +ADDR5 EQU $FA +ADDR6 EQU $FC +ADDR7 EQU $FE +* diff --git a/disks/disk2_math8_math16/T.MATH.DEMO b/disks/disk2_math8_math16/T.MATH.DEMO new file mode 100644 index 0000000..7264537 --- /dev/null +++ b/disks/disk2_math8_math16/T.MATH.DEMO @@ -0,0 +1,263 @@ +* +******************************** +* * +* -< MATH DEMO >- * +* * +* VERSION 00.00.01 * +* * +* 03/04/1980 * +* * +******************************** +* * +* NATHAN D. RIGGS * +* NATHAN.RIGGS@OUTLOOK.COM * +* * +******************************** +* +** ASSEMBLER DIRECTIVES +* + CYC AVE + EXP ONLY + DSK MATH.DEMO +* +*``````````````````````````````* +* TOP INCLUDES (PUTS, MACROS) * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* + PUT DECS +*USE MATH.MAC ; CAN'T USE RIGHT NOW +* BECAUSE OF MEMORY LIMITATIONS +* + PUT MATH.HOOKS +* + OBJ $BFE0 + ORG $6000 +* +*``````````````````````````````* +* PROGRAM MAIN BODY * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +* +* +** BIG LUMPS -- HIGH LEVEL CODE +* +* +* +** ADD16 ADDS TWO 16-BIT NUMBERS AND +** RETURNS THE RESULT IN Y(LOW) AND X (HIGH). +* +** MACRO: _ADD16 #1000;#2000 +* + LDA #>1000 ; ADD1, HIGH + PHA + LDA #<1000 ; ADD1, LOW + PHA + LDA #>2000 ; ADD2, HIGH + PHA + LDA #<2000 ; ADD2, LOW + PHA + JSR ADD16 +* BRK ; X=0B, Y=B8, == 3000 +* +** SUB16 SUBTRACTS ONE 16-BIT NUMBER +** FROM ANOTHER, AND RETURNS RESULT IN +** Y (LOW) AND X (HIGH) +* +** MACRO: _SUB16 #2000;#1000 +* + LDA #>2000 ; MINUEND + PHA + LDA #<2000 + PHA + LDA #>1000 ; SUBTRAHEND + PHA + LDA #<1000 + PHA + JSR SUB16 +* BRK ; X = 03, Y = E8, == 1000 +* +** MUL16 MULTIPLES TWO VALUES AND RETURNS THE +** PRODUCT OVER THE STACK (LOW, HIGH) +* +** MACRO: _MUL16 #1000;#3 +* + LDA #>1000 + PHA + LDA #<1000 ; MULTIPLICAND + PHA + LDA #>3 + PHA + LDA #<3 ; MULTIPLIER + PHA + JSR MUL16 +* BRK ; X=0B, Y=08, == 3000 +* +** UDIV16 DVIDES ONE UNSIGNED NUMBER BY ANOTHER +** UNSIGNED NUMBER, THEN RETURNS THE RESULT OVER +** THE STACK (16-BIT). +* +** MACRO: _DIV16 #30000;#3000;U +* + LDA #>30000 + PHA + LDA #<30000 + PHA + LDA #>3000 + PHA + LDA #<3000 + PHA + JSR UDIV16 +*BRK ; X=00,Y=0A, == 30000/3000=10 +* +** SDIV16 DIVIDES TWO NUMBERS THAT ARE +** SIGNED. +* +** MACRO: _DIV16 #-1000;#10 +* + LDA #>-1000 + PHA + LDA #<-1000 + PHA + LDA #>10 + PHA + LDA #<10 + PHA + JSR SDIV16 +* BRK ; X=FF,Y=9C, == -100 (2'S COMPLEMENT) +* +** SREM16 DIVIDES TWO NUMBERS THAT ARE +** SIGNED, THEN RETURNS THE REMAINDER. +* +** MACRO: _REM16 #-1000;#13 +* + LDA #>-1000 + PHA + LDA #<-1000 + PHA + LDA #>13 + PHA + LDA #<13 + PHA + JSR SREM16 +* BRK ; X=FF,Y=F4, == -12 (2'S COMPLEMENT) +* +** UREM16 DIVIDES TWO NUMBERS THAT ARE +** UNSIGNED, THEN RETURNS THE REMAINDER. +* +** MACRO: _REM16 #1000;#13;U +* + LDA #>1000 + PHA + LDA #<1000 + PHA + LDA #>13 + PHA + LDA #<13 + PHA + JSR UREM16 +* BRK ; X=00,Y=0C, == 12 +* +** CMP16 COMPARES TWO 16BIT VALUES, THEN +** ALTERS THE REGISTER FLAGS AS FOLLOWS: +* +** MACRO: _CMP16 #1023;#123 +* + +* IF W1 & W2 ARE 2S COMPLEMENT +* IF W1 = W2 Z=1,N=0 +* IF W1 > W2 Z=0,N=0 +* IF W1 < W2 Z=0,N=1 +* ELSE +* IF W1 = W2 Z=1,C=1 +* IF W1 > W2 Z=0,C=1 +* IF W1 < W2 Z=0,C=0 +* + LDA #>1023 + PHA + LDA #<1023 + PHA + LDA #>123 + PHA + LDA #<123 + PHA + JSR CMP16 +* BRK ; P=31, == 11000100 +* ^^ N ZC +* N (SIGN) =0, Z (ZERO) = 0, C (CARRY) = 0 +* +** RND8 SIMPLY RETURNS A PSEUDO-RANDOM +** NUMBER BETWEEN 0 AND 255 IN A. +* +** NO MACRO ** +* + JSR RND8 +* BRK ; RUN THIS A FEW TIMES TO SEE A CHANGE +* +* +** RANDB RETURNS A VALUE BETWEEN A LOW AND +** HIGH BOUNDARY PUSHED TO THE STACK. SINCE THIS +** RETURNS A BYTE, THE RANGE MUST BE 0..255. +* +** MACRO: _RANDB #50;#100 +* + LDA #50 + PHA + LDA #100 ; HIGH BOUND + PHA + JSR RANDB +* BRK ; RUN A FEW TIMES TO SEE THE CHANGE IN A +* +** MUL8 MULTIPLIES TWO 8-BIT NUMBERS AND +** RETURNS A 16BIT RESULT, ALL UNSIGNED. +** MACRO: _MUL8 #100;#200 +* +* + LDA #100 + PHA + LDA #200 + PHA + JSR MUL8 +* BRK ; X=4E,Y=20, == 20000 +* +** DIV8 DIVIDES ONE 8BIT NUMBER BY +** ANOTHER (UNSIGNED), AND RETURNS +** AN 8BIT QUOTIENT. +** +** THE QUOTIENT IS STORED IN A, WHILE +** THE REMAINDER IS RETURNED IN X. +** MACRO: _DIV8 #200;#10 +* +* + LDA #200 + PHA + LDA #10 + PHA + JSR DIV8 +*BRK ; A=14, X=0, == 20 R 0 +* +** RND16 RETURNS A PSEUDO-RANDOM NUMBER (WORD) +** BETWEEN 0 AND 65535 (0000-FFFF) +* +** NO MACRO ** +* + JSR RND16 +*BRK ; BRUN A FEW TIMES TO SEE X,Y +* +* +* +* +** LITTLE LUMPS -- MID/LOW LEVEL +* +* + JMP REENTRY +* +*``````````````````````````````* +* BOTTOM INCLUDES * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +* +** BOTTOM INCLUDES +* + PUT MATH16.LIB + PUT MATH8.LIB +* diff --git a/disks/disk2_math8_math16/T.MATH.HOOKS b/disks/disk2_math8_math16/T.MATH.HOOKS new file mode 100644 index 0000000..d312ef7 --- /dev/null +++ b/disks/disk2_math8_math16/T.MATH.HOOKS @@ -0,0 +1,5 @@ +* +GETNUM EQU $FFA7 ; ASCII TO HEX IN 3E & 3F +RNDL EQU $4E ; RANDOM NUMBER LOW +RNDH EQU $4F ; RANDOM NUMBER HIGH +* diff --git a/disks/disk2_math8_math16/T.MATH.MAC b/disks/disk2_math8_math16/T.MATH.MAC new file mode 100644 index 0000000..02cc43a --- /dev/null +++ b/disks/disk2_math8_math16/T.MATH.MAC @@ -0,0 +1,350 @@ +*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=* +* * +* MATH.MAC * +* * +* AUTHOR: NATHAN RIGGS * +* CONTACT: NATHAN.RIGGS@ * +* OUTLOOK.COM * +* * +* VERSION: 0.1.2 * +* DATE: 30-OCT-2018 * +* ASSEMBLER: MERLIN 8 PRO * +* * +* LICENSE: APACHE 2.0 LICENSE, * +* WHERE APPLICABLE. CODE * +* INSPIRED BY OR COPIED FROM * +* OTHERS MAY FALL UNDER A * +* DIFFERENT LICENSE. I WILL * +* DO MY BEST TO NOTIFY SUCH * +* CASES. * +* * +*------------------------------* +* * +* THIS IS A MACRO LIBRARY FOR * +* MATH.LIB, AND CAN BE USED * +* REGARDLESS OF WHETHER A * +* SPECIFIC FUNCTION IS * +* INCLUDED AS A PUT IN THE * +* MAIN SOURCE. * +* * +*------------------------------* +* * +* LIST OF MACROS * +* * +* _ADD16 : 16BIT ADD * +* _SUB16 : 16BIT SUBTRACT * +* _MUL16 : 16BIT MULTIPLY * +* _DIV16 : 16BIT DIVIDE * +* _REM16 : 16BIT REMAINDER * +* _RND8 : RANDOM # 0..255 * +* _RNDB : RANDOM # LOW..HIGH * +* * +* _CMP16 : 16BIT COMPARE * +* _RND16 : 16BIT RANDOM # * +* _RNDW : RANDOM WORD LOW..HI * +* _MUL8 : 8BIT MULTIPLY * +* _DIV8 : 8BIT DIVIDE, REMAIND * +* * +*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=* +* +* +*``````````````````````````````* +* _ADD16 :: 16-BIT ADDITION * +*- -* +* ADD TWO 16BIT VALUES, STORE * +* RESULT IN Y, X (LOW, HIGH) * +*- -* +* PARAMETERS: * +* * +* PARAM1 = ADDEND 1 * +* PARAM2 = ADDEND 2 * +*- -* +* SAMPLE USAGE: * +* * +* _ADD16 #3000;#4000 * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +_ADD16 MAC + LDA #>]1 + PHA + LDA #<]1 + PHA + LDA #>]2 + PHA + LDA #<]2 + PHA + JSR ADD16 + <<< +* +*``````````````````````````````* +* _SUB16 :: 16-BIT SUBTRACTION * +*- -* +* SUBTRACTS ONE 16BIT INTEGER * +* FROM ANOTHER, STORING THE * +* RESULT IN Y,X (LOW, HIGH) * +*- -* +* PARAMETERS: * +* * +* PARAM1 = MINUEND * +* PARAM2 = SUBTRAHEND * +*- -* +* SAMPLE USAGE: * +* * +* _SUB16 #2000;#1500 * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +_SUB16 MAC + LDA #>]1 + PHA + LDA #<]1 + PHA + LDA #>]2 + PHA + LDA #<]2 + PHA + JSR SUB16 + <<< +* +*``````````````````````````````* +* _MUL16 :: 16-BIT MULTIPLY * +*- -* +* MULTIPLIES TWO 16BIT NUMBERS * +* AND RETURNS THE PRODUCT IN * +* Y,X (LOW, HIGH). * +*- -* +* PARAMETERS: * +* * +* PARAM1 = MULTIPLICAND * +* PARAM2 = MULTIPLIER * +*- -* +* SAMPLE USAGE: * +* * +* _MUL16 #400;#500 * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +_MUL16 MAC + LDA #>]1 + PHA + LDA #<]1 + PHA + LDA #>]2 + PHA + LDA #<]2 + PHA + JSR MUL16 +* +** NOW XFER STACK TO X,Y +* +** REALLY, THIS SHOULD BE IN THE +** ACTUAL ROUTINE. UNNECESSARY CYCLES. +* +** ALSO, HAVE IT RETURN THE 24BIT IN A, +** EVEN THOUGH IT'S CURRENTLY UNRELIABLE +* + PLA + TAY + PLA + TAX ; HIGH + <<< +* +*``````````````````````````````* +* _DIV16 :: 16-BIT DIVISION * +*- -* +* DIVIDES ONE 16BIT NUMBER BY * +* ANOTHER AND RETURNS THE * +* RESULT IN Y,X (LOW,HIGH). * +* * +* IF THE THIRD PARAMETER IS * +* PASSED WITH A U, VALUES WILL * +* BE TREATED AS UNSIGNED; IF * +* NOT, VALUES WILL BE TREATED * +* AS SIGNED. * +*- -* +* PARAMETERS: * +* * +* PARAM1 = DIVIDEND * +* PARAM2 = DIVISOR * +*- -* +* SAMPLE USAGE: * +* * +* _DIV16 #3000;#300 * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +_DIV16 MAC + LDA #>]1 + PHA + LDA #<]1 + PHA + LDA #>]2 + PHA + LDA #>]2 + PHA + IF U,]3 ; UNSIGNED + JSR UDIV16 + ELSE + JSR SDIV16 ; SIGNED + FIN +* +** NOW TRANSFER STACK TO Y,X +* + PLA + TAY ; LOW + PLA + TAX ; HIGH + <<< +* +*``````````````````````````````* +* _REM16 :: 16-BIT DIV REMAINR * +*- -* +* DIVIDES ONE INTEGER BY * +* ANOTHER AND RETURNS THE * +* REMAINDER IN Y,X (LOW, HIGH) * +* * +* IF THE THIRD PARAMETER IS * +* PASSED WITH A U, VALUES WILL * +* BE TREATED AS UNSIGNED; IF * +* NOT, VALUES WILL BE TREATED * +* AS SIGNED. * +*- -* +* PARAMETERS: * +* * +* PARAM1 = DIVIDEND * +* PARAM2 = DIVISOR * +*- -* +* SAMPLE USAGE: * +* * +* _REM16 #1000;#333 * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +_REM16 MAC + LDA #>]1 + PHA + LDA #<]1 + PHA + LDA #>]2 + PHA + LDA #>]2 + PHA + IF U,]3 ; UNSIGNED + JSR UREM16 + ELSE + JSR SREM16 ; SIGNED + FIN +* +** TRANSFER STACK TO Y,XX +* + PLA + TAY ; LOW + PLA + TAX ; HIGH + <<< +* +*``````````````````````````````* +* _RNDB :: 8BIT RANDOM LOW,HI * +*- -* +* RETURNS A RANDOM NUMBER IN * +* REGISTER A THAT IS BETWEEN * +* THE LOW AND HIGH BOUNDARIES * +* PASSED IN THE PARAMETERS. * +* * +* NOTE THAT THIS RETURNS A * +* BYTE, AND THUS ONLY DEALS * +* WITH VALUES BETWEEN 0..255. * +*- -* +* PARAMETERS: * +* * +* PARAM1 = LOW BOUNDARY * +* PARAM2 = HIGH BOUNDARY * +*- -* +* SAMPLE USAGE: * +* * +* _RNDB #50;#100 * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +_RNDB MAC + LDA ]1 ; LOW + PHA + LDA ]2 ; HIGH + PHA + JSR RANDB + <<< +* +*``````````````````````````````* +* _CMP16 : 16BIT COMPARISON * +*- -* +* COMPARES TWO 16BIT VALUES * +* AND ALTERS THE P-REGISTER * +* ACCORDINGLY (FLAGS). * +*- -* +* PARAMETERS: * +* * +* PARAM1 = WORD 1 TO COMPARE * +* PARAM2 = WORD 2 TO COMPARE * +*- -* +* SAMPLE USAGE: * +* * +* _CMP16 #1023;#3021 * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +_CMP16 MAC + LDA #>]1 + PHA + LDA #<]1 + PHA + LDA #>]2 + PHA + LDA #<]2 + PHA + JSR CMP16 + <<< +* +*``````````````````````````````* +* _MUL8 :: 8BIT MULTIPLICATION * +*- -* +* MULTIPLIES TWO 8BIT VALUES * +* AND RETURNS A 16BIT RESULT * +* IN Y,X (LOW, HIGH). * +*- -* +* PARAMETERS: * +* * +* PARAM1 = MULTIPLICAND * +* PARAM2 = MULTIPLIER * +*- -* +* SAMPLE USAGE: * +* * +* _MUL8 #10;#20 * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +_MUL8 MAC + LDA ]1 + PHA + LDA ]2 + PHA + JSR MUL8 + <<< +* +*``````````````````````````````* +* _DIV8 :: 8BIT MULTIPLICATION * +*- -* +* DIVIDES ONE 8BIT NUMBER BY * +* ANOTHER AND STORES THE * +* QUOTIENT IN A WITH THE * +* REMAINDER IN X. * +*- -* +* PARAMETERS: * +* * +* PARAM1 = DIVIDEND * +* PARAM2 = DIVISOR * +*- -* +* SAMPLE USAGE: * +* * +* _DIV8 #100;#10 * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +_DIV8 MAC + LDA ]1 + PHA + LDA ]2 + PHA + JSR DIV8 + <<< diff --git a/disks/disk2_math8_math16/T.MATH16.ADD16 b/disks/disk2_math8_math16/T.MATH16.ADD16 new file mode 100644 index 0000000..fb3db35 --- /dev/null +++ b/disks/disk2_math8_math16/T.MATH16.ADD16 @@ -0,0 +1,108 @@ +* +*``````````````````````````````* +* ADD16 :: ADD 16-BIT VALUES * +*- -* +* ADDS TWO SIXTEEN BIT VALUES * +* AND RETURNS A 16 BIT VALUE * +* IN Y (LOW) AND X (HIGH). * +*- -* +* CLOBBERS: * +* * +* FLAGS: ????---- REG: AXYM * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +* LDA #>1000 * +* PHA * +* LDA #<1000 * +* PHA * +* LDA #>1200 ; ADD2 * +* PHA * +* LDA #<1200 * +* PHA * +* JSR ADD16 * +*- -* +* ENTRY * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* ADDEND 2 LOW BYTE * +* ADDEND 2 HIGH BYTE * +* ADDEND 1 LOW BYTE * +* ADDEND 1 HIGH BYTE * +*- -* +* EXIT * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* * +* Y = LOW BYTE OF RESULT * +* X = HIGH BYTE OF RESULT * +* A = LOW BYTE OF RET ADDR * +*- -* +* ADAPTED FROM LEVANTHAL AND * +* WINTHROP'S /6502 ASSEMBLY * +* LANGUAGE ROUTINES/. * +* AS SUCH, THIS MAY NOT FALL * +* UNDER THE APACHE 2.0 LICENSE * +* AGREEMENT, SINCE THE BOOK * +* WAS WRITTEN BEFORE THE * +* LICENSE! * +* * +* THAT SAID, LEVENTHAL WROTE * +* THAT THE PURPOSE OF THE BOOK * +* WAS TO COPY THE SOURCE CODE, * +* AS REINVENTING THE WHEEL IS * +* TEDIOUS, AND HE PLACED NO * +* RESTRICTIONS ON ITS USE. * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +ADD16 +* +** GET RETURN ADDR +* + PLA + STA :RETADR + PLA + STA :RETADR+1 +* +** GET PARAMETERS +* + PLA + STA :ADD2 + PLA + STA :ADD2+1 + PLA + STA :ADD1 + PLA + STA :ADD1+1 +* + LDA :ADD1 + CLC + ADC :ADD2 + TAY ; LOW BYTE + LDA :ADD1+1 + ADC :ADD2+1 + TAX ; HIGH BYTE +* +** RESTORE RETURN ADDRESS +* + LDA :RETADR+1 + PHA + LDA :RETADR + PHA +* + RTS +* +** DATA +* +:RETADR DS 2 +:ADD1 DS 2 +:ADD2 DS 2 diff --git a/disks/disk2_math8_math16/T.MATH16.CMP16 b/disks/disk2_math8_math16/T.MATH16.CMP16 new file mode 100644 index 0000000..f5e6dab --- /dev/null +++ b/disks/disk2_math8_math16/T.MATH16.CMP16 @@ -0,0 +1,144 @@ +* +*``````````````````````````````* +* CMP16 :: 16-BIT COMPARE * +*- -* +* COMPARE TWO 16BIT SIGNED OR * +* UNSIGNED WORDS AND RETURN * +* THE C,Z,N FLAGS AS SET OR * +* CLEARED. * +*- -* +* CLOBBERS: * +* * +* FLAGS: CZN----- REG: AXYM * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +* LDA #>123 * +* PHA * +* LDA #<123 * +* PHA ; WORD 1 * +* LDA #>1023 * +* PHA * +* LDA #<1023 * +* PHA ; WORD 2 * +* JSR CMP16 * +*- -* +* ENTRY * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* LOW BYTE OF WORD 2 (SUBTRA) * +* HIGH BYTE OF WORD 2 * +* LOW BYTE OF WORD 1 (MINU) * +* HIGH BYTE OF WORD 1 * +*- -* +* EXIT * +* * +* TOP OF STACK * +* * +* FLAGS RETURNED BASED ON WORD * +* 1 - WORD 2. * +* * +* IF W1 & W2 ARE 2S COMPLEMENT * +* IF W1 = W2 Z=1,N=0 * +* IF W1 > W2 Z=0,N=0 * +* IF W1 < W2 Z=0,N=1 * +* ELSE * +* IF W1 = W2 Z=1,C=1 * +* IF W1 > W2 Z=0,C=1 * +* IF W1 < W2 Z=0,C=0 * +* * +*- -* +* ADAPTED FROM LEVANTHAL AND * +* WINTHROP'S /6502 ASSEMBLY * +* LANGUAGE ROUTINES/. * +* AS SUCH, THIS MAY NOT FALL * +* UNDER THE APACHE 2.0 LICENSE * +* AGREEMENT, SINCE THE BOOK * +* WAS WRITTEN BEFORE THE * +* LICENSE! * +* * +* THAT SAID, LEVENTHAL WROTE * +* THAT THE PURPOSE OF THE BOOK * +* WAS TO COPY THE SOURCE CODE, * +* AS REINVENTING THE WHEEL IS * +* TEDIOUS, AND HE PLACED NO * +* RESTRICTIONS ON ITS USE. * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +CMP16 +* +** GET RETURN ADDRESS +* + PLA + STA :RETADR + PLA + STA :RETADR+1 +* +** GET PARAMETERS +* + PLA + STA :SUBT ; SUBTRAHEND + PLA + STA :SUBT+1 + PLA + STA :MINU ; MINUEND + PLA + STA :MINU+1 +* +** RESTORE RETURN ADDRESS +* + LDA :RETADR+1 + PHA + LDA :RETADR + PHA +* + LDA :MINU + CMP :SUBT ; COMPARE LOW BYTES + BEQ :EQUAL +* +** LOW BYTES ARE NOTE EQUAL +** COMPARE HIGH BYTES +* + LDA :MINU+1 + SBC :SUBT+1 ; COMPARE HIGH BYTES + ORA #1 ; MAKE Z=0, SINCE LOW + ; BYTES ARE NOT EQUAL + BVS :OVFLOW ; MUST HANDLE OVERFLOW + ; FOR SIGNED MATH + RTS ; EXIT +* +** LOW BYTES ARE UQAL -- COMPARE HIGH +* +:EQUAL + LDA :MINU+1 + SBC :SUBT+1 ; UPPER BYTES + BVS :OVFLOW + RTS ; RETURN W FLAGS SET +* +** +** OVERFLOW WITH SIGNED ARITHMETIC SO +** COMPLEMENT THE NEGATIVE FLAG. +** DO NO CHANGE THE CARRY FLAG AND +** MAKE THE ZERO FLAG EQUAL TO 0. +** COMPLEMENT NEG FLAG BY EORING +** #$80 AND ACCUMULATOR. +** +* +:OVFLOW + EOR #$80 ; COMPLEMENT N FLAG + ORA #1 ; IF OVERFLOW THEN THE + ; WORDS ARE !=. Z= 0 + ; CARRY UNCHANGED + RTS +* +** DATA +* +:MINU DS 2 +:SUBT DS 2 +:RETADR DS 2 diff --git a/disks/disk2_math8_math16/T.MATH16.DIV16 b/disks/disk2_math8_math16/T.MATH16.DIV16 new file mode 100644 index 0000000..55dd570 --- /dev/null +++ b/disks/disk2_math8_math16/T.MATH16.DIV16 @@ -0,0 +1,335 @@ +* +*``````````````````````````````* +* 16-BIT DIVISION: * +* * +* SDIV16, UDIV16, SREM16, AND * +* UREM16. * +*- -* +* SDIV16: DIVIDED 2 SIGNED BIT * +* WORDS AND RETURN A 16-BIT * +* SIGNED QUOTIENT. * +* * +* UDIV16: DIVIDE 2 UNSIGNED * +* 16BIT WORDS AND RETURN A * +* 16BIT UNSIGNED QUOTIENT. * +* * +* SREM16: DIVIDE 2 SIGNED * +* 16BIT WORDS AND RETURN A * +* 16BIT SIGNED REMAINDER. * +* * +* UREM16: DIVIDE 2 UNSIGNED * +* 16BIT WORKDS AND RETURN A * +* 16BIT UNSIGNED REMAINDER. * +*- -* +* CLOBBERS: * +* * +* FLAGS: ????---- REG: AXYM * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +** ALL ROUTINES USE THE SAME * +** FORMAT. * +* * +* LDA #>10000 * +* PHA * +* LDA #<10000 ; DIVIDND * +* PHA * +* LDA #>1000 ; DIVISOR * +* PHA * +* LDA #<1000 * +* PHA * +* JSR UDIV16 * +*- -* +* ENTRY * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* LOW BYTE OF DIVISOR * +* HIGH BYTE OF DIVISOR * +* LOW BYTE OF DIVIDEND * +* HIGH BYTE OF DIVIDEND * +*- -* +* EXIT * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* * +* IF NO ERRORS, CARRY = 0, * +* ELSE CARRY=1, QUOTIENT=0, * +* AND REMAINDER=0 * +* * +* Y = COUNTER; TRASH * +* X = COUNTER; TRASH * +* A = LOW BYTE OF RET ADDR * +*- -* +* ADAPTED FROM LEVANTHAL AND * +* WINTHROP'S /6502 ASSEMBLY * +* LANGUAGE ROUTINES/. * +* AS SUCH, THIS MAY NOT FALL * +* UNDER THE APACHE 2.0 LICENSE * +* AGREEMENT, SINCE THE BOOK * +* WAS WRITTEN BEFORE THE * +* LICENSE! * +* * +* THAT SAID, LEVENTHAL WROTE * +* THAT THE PURPOSE OF THE BOOK * +* WAS TO COPY THE SOURCE CODE, * +* AS REINVENTING THE WHEEL IS * +* TEDIOUS, AND HE PLACED NO * +* RESTRICTIONS ON ITS USE. * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +UDIV16 + LDA #0 + BEQ UDIVMD +UREM16 + LDA #2 +UDIVMD + STA _RSLTI +* +** GET RETURN ADDRESS +* + PLA + STA _RETADR + PLA + STA _RETADR+1 +* +** GET PARAMETERS +* + PLA + STA _DVSOR + PLA + STA _DVSOR+1 + PLA + STA _DVEND + PLA + STA _DVEND+1 +* + JSR UDIV + BCC DIVOK ; BR IF NO ERR +DIVERR JMP EREXIT +DIVOK JMP OKEXIT +* +** SIGNED DIVISION +* +SDIV16 + LDA #0 ; RESULT IS QUOTIENT + BEQ SDIVMD ; (INDEX=0) +* +** SIGNED REMAINDER +* +SREM16 + LDA #2 ; RES = REMAINDER (I=2) + BNE SDIVMD +* +SDIVMD + STA _RSLTI ;RESULT INDEX;0=Q,2=R +* +** GET RETURN ADDRESS +* + PLA + STA _RETADR + PLA + STA _RETADR+1 +* +** GET PARAMETERS +* + PLA + STA _DVSOR + PLA + STA _DVSOR+1 + PLA + STA _DVEND + PLA + STA _DVEND+1 +* +* +** DETERMINE SIGN OF QUOTIENT BY +** PERFORMING AN EXCLUSIVE OR OF +** THE HIGH BYTES. IF THE SIGNS +** ARE THE SAME THEN BIT 7 WILL +** BE 0 AND THE QUOTIENT IS +** POSITIVE. IF THE SIGNS ARE +** DIFFERENT THEN THE QUOTIENT +** IS NEGATIVE. +* + LDA _DVEND+1 + EOR _DVSOR+1 + STA _SQUOT +* +** SIGN OF REMAINDER IS THE SIGN +** OF THE DIVIDEND +* + LDA _DVEND+1 + STA _SREMD +* +** TAKE THE ABSOLUTE VALUE OF +** THE DIVISOR +* + LDA _DVSOR+1 + BPL CHKDE ; BR IF ALREADY POS + LDA #0 ; SUB DVSOR FROM ZERO + SEC + SBC _DVSOR + STA _DVSOR + LDA #0 + SBC _DVSOR+1 + STA _DVSOR+1 +* +** TAKE ABS VALUE OF THE DIVIDEND +* +CHKDE + LDA _DVEND+1 + BPL DODIV ; BR IF DVEND IS POS + LDA #0 ; SUB DVEND FROM ZERO + SEC + SBC _DVEND + STA _DVEND + LDA #0 + SBC _DVEND+1 + STA _DVEND+1 +* +** DIVIDE ABS VALUES +* +DODIV + JSR UDIV + BCS EREXIT ; EXIT IF DIV BY 0 +* +** NEGATE QUOTIENT IF IT IS NEGATIVE +* + LDA _SQUOT + BPL DOREM ; BR IF Q IS POS + LDA #0 + SEC + SBC _DVEND + STA _DVEND + LDA #0 + SBC _DVEND+1 + STA _DVEND+1 +* +DOREM +* +** NEGATE REMAINDER IF IT IS NEG +* + LDA _SREMD + BPL OKEXIT ; BR IF REM IS POS + LDA #0 + SEC + SBC _DVEND+2 + STA _DVEND+2 + LDA #0 + SBC _DVEND+3 + STA _DVEND+3 + JMP OKEXIT +* +** ERROR EXIT (CARRY=1, RSLTS ARE 0) +* +EREXIT + LDA #0 + STA _DVEND + STA _DVEND+1 ;QUOTIENT = 0 + STA _DVEND+2 + STA _DVEND+3 ; REMAINDER=0 + SEC ; CARRY=1 IF ERROR + BCS DVEXIT +* +** GOOD EXIT (CARRY = 0) +* +OKEXIT + CLC ; CARRY = 0, NO ERRORS +* +DVEXIT +* +** STORE RESULT +* + LDX _RSLTI ;GET INDEX TO RESULT + ; 0=QUOTIENT, 2=REMAINDER +* +** STORE RESULT IN STACK +* + LDA _DVEND,X + TAY + LDA _DVEND+1,X + TAX +* +** RESTORE RETURN ADDRESS +* + LDA _RETADR+1 + PHA + LDA _RETADR + PHA +* + RTS +* +* +******************************** +* UDIVE ROUTINE +******************************** +UDIV +* +** ZERO UPPER WORD DIVIDEND +** THIS WILL BE CALLED +** DIVIDEND(1) BELOW +* + LDA #0 + STA _DVEND+2 + STA _DVEND+3 +* +** FIRST CHECK FOR DIV BY 0 +* + LDA _DVSOR + ORA _DVSOR+1 + BNE OKUDIV ; BR IF DVSOR NOT 0 + SEC + RTS +* +** PERFORM THE DIVISION BY +** TRIAL SUBTRACTIONS +* +OKUDIV + LDX #16 ; LOOP THROUGH 16 BITS +DIVLP + ROL _DVEND ;SHFT CARRY INTO BIT 0 OF DVEND + ROL _DVEND+1 ;WHICH WILL BE THE QUOTIENT AND + ROL _DVEND+2 ;SHFT DVEND AT THE SAME TIME + ROL _DVEND+3 +CHKLT + SEC + LDA _DVEND+2 + SBC _DVSOR + TAY ; SAVE LOW BYTE IN Y + LDA _DVEND+3 + SBC _DVSOR+1 ;SUB HIGHBYTES W RES IN A + BCC DECCNT ; BR IF DVEND < DVSOR AND CARRY + STY _DVEND+2 ; ELSE + STA _DVEND+3 ;VEN(1)=DVEND(1)-DVSOR +* +DECCNT + DEX + BNE DIVLP +* + ROL _DVEND ;SHFT IN LAST CAR FOR QUOT + ROL _DVEND+1 + CLC ; NO ERRORS, CLEAR CARRY + RTS +* +** DATA +* +** MAKING THESE GLOBAL FOR NOW WHILE I TRY TO +** UNDERSTAND THIS ALGORITHM ENOUGH TO MAKE +** IT USE LOCAL VARIABLES +* +_DVSOR DS 2 ; DIVISOR +_DVEND DS 4 ; DIVIDEND[0] AND QUOTIENT + ; DIVIDEND[1] AND REMAINDER +_RETADR DS 2 +_SQUOT DS 1 ; SIGN OF QUOTIENT +_SREMD DS 1 ; SIGN OF REMAINDER +_RSLTI DS 1 ; RESULT INDEX diff --git a/disks/disk2_math8_math16/T.MATH16.LIB b/disks/disk2_math8_math16/T.MATH16.LIB new file mode 100644 index 0000000..5354b1c --- /dev/null +++ b/disks/disk2_math8_math16/T.MATH16.LIB @@ -0,0 +1,995 @@ +*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=* +* * +* MATH.LIB * +* * +* AUTHOR: NATHAN RIGGS * +* CONTACT: NATHAN.RIGGS@ * +* OUTLOOK.COM * +* * +* VERSION: 0.1.2 * +* DATE: 30-OCT-2018 * +* ASSEMBLER: MERLIN 8 PRO * +* * +* LICENSE: APACHE 2.0, WHERE * +* APPLICABLE. CODE INSPIRED * +* BY THE WORKS OF OTHERS MAY * +* FALL UNDER A DIFFERENT * +* LICENSE (NOTED IN ROUTINE). * +* * +* STANDARD INTEGER MATH * +* LIBRARY FOR 8-BIT AND 16-BIT * +* CALCALUATIONS, AS WELL AS * +* COMMON ROUTINES LIKE PSEUDO- * +* RANDOM NUMBER GENERATORS. * +* * +*------------------------------* +* * +* LIST OF ROUTINES * +* * +* ADD16 : 16BIT ADD * +* SUB16 : 16BIT SUBTRACT * +* MUL16 : 16BIT MULTIPLY * +* SDIV16 : 16BIT DIVIDE (SIGN) * +* UDIV16 : 16BIT DIVIDE (UNSN) * +* SREM16 : 16BIT REMNDR (SIGN) * +* UREM16 : 16BIT REMNDR (UNSN) * +* CMP16 : 16BIT COMPARE * +* RND16 : RANDOM # 0..65535 * +* * +*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=* +* +*``````````````````````````````* +* ADD16 :: ADD 16-BIT VALUES * +*- -* +* ADDS TWO SIXTEEN BIT VALUES * +* AND RETURNS A 16 BIT VALUE * +* IN Y (LOW) AND X (HIGH). * +*- -* +* CLOBBERS: * +* * +* FLAGS: ????---- REG: AXYM * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +* LDA #>1000 * +* PHA * +* LDA #<1000 * +* PHA * +* LDA #>1200 ; ADD2 * +* PHA * +* LDA #<1200 * +* PHA * +* JSR ADD16 * +*- -* +* ENTRY * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* ADDEND 2 LOW BYTE * +* ADDEND 2 HIGH BYTE * +* ADDEND 1 LOW BYTE * +* ADDEND 1 HIGH BYTE * +*- -* +* EXIT * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* * +* Y = LOW BYTE OF RESULT * +* X = HIGH BYTE OF RESULT * +* A = LOW BYTE OF RET ADDR * +*- -* +* ADAPTED FROM LEVANTHAL AND * +* WINTHROP'S /6502 ASSEMBLY * +* LANGUAGE ROUTINES/. * +* AS SUCH, THIS MAY NOT FALL * +* UNDER THE APACHE 2.0 LICENSE * +* AGREEMENT, SINCE THE BOOK * +* WAS WRITTEN BEFORE THE * +* LICENSE! * +* * +* THAT SAID, LEVENTHAL WROTE * +* THAT THE PURPOSE OF THE BOOK * +* WAS TO COPY THE SOURCE CODE, * +* AS REINVENTING THE WHEEL IS * +* TEDIOUS, AND HE PLACED NO * +* RESTRICTIONS ON ITS USE. * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +ADD16 +* +** GET RETURN ADDR +* + PLA + STA :RETADR + PLA + STA :RETADR+1 +* +** GET PARAMETERS +* + PLA + STA :ADD2 + PLA + STA :ADD2+1 + PLA + STA :ADD1 + PLA + STA :ADD1+1 +* + LDA :ADD1 + CLC + ADC :ADD2 + TAY ; LOW BYTE + LDA :ADD1+1 + ADC :ADD2+1 + TAX ; HIGH BYTE +* +** RESTORE RETURN ADDRESS +* + LDA :RETADR+1 + PHA + LDA :RETADR + PHA +* + RTS +* +** DATA +* +:RETADR DS 2 +:ADD1 DS 2 +:ADD2 DS 2 +* +*``````````````````````````````* +* SUB16 :: SUBTRACT WORDS * +*- -* +* SUBTRACT ONE 16BIT NUMBER * +* FROM ANOTHER, RETURNING THE * +* RESULT IN X (HI) AND Y (LOW) * +*- -* +* CLOBBERS: * +* * +* FLAGS: ????---- REG: AXYM * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +* LDA #>1000 ; MINU * +* PHA * +* LDA #<1000 * +* PHA * +* LDA #>500 ; SUBT * +* PHA * +* LDA #<500 * +* PHA * +* JSR SUB16 * +*- -* +* ENTRY * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* LOW BYTE OF SUBTRAHEND * +* HIGH BYTE OF SUBTRAHEND * +* LOW BYTE OF MINUEND * +* HIGH BYTE OF MINUEND * +*- -* +* EXIT * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* * +* Y = RESULT LOW BYTE * +* X = RESULT HIGH BYTE * +* A = LOW BYTE OF RET ADDR * +*- -* +* ADAPTED FROM LEVANTHAL AND * +* WINTHROP'S /6502 ASSEMBLY * +* LANGUAGE ROUTINES/. * +* AS SUCH, THIS MAY NOT FALL * +* UNDER THE APACHE 2.0 LICENSE * +* AGREEMENT, SINCE THE BOOK * +* WAS WRITTEN BEFORE THE * +* LICENSE! * +* * +* THAT SAID, LEVENTHAL WROTE * +* THAT THE PURPOSE OF THE BOOK * +* WAS TO COPY THE SOURCE CODE, * +* AS REINVENTING THE WHEEL IS * +* TEDIOUS, AND HE PLACED NO * +* RESTRICTIONS ON ITS USE. * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +SUB16 +* +** GET RETURN ADDRESS +* + PLA + STA :RETADR + PLA + STA :RETADR+1 +* +** GET PARAMETERS +* + PLA + STA :SUBT + PLA + STA :SUBT+1 ; SUBTRAHEND + PLA + STA :MINU + PLA + STA :MINU+1 ; MINUEND +* +** MINUEND - SUBTRAHEND +* + LDA :MINU + SEC + SBC :SUBT + TAY ; LOW BYTE + LDA :MINU+1 + SBC :SUBT+1 + TAX ; HIGH BYTE +* +** RESTORE REUTNR ADDRESS +* + LDA :RETADR+1 + PHA + LDA :RETADR + PHA +* + RTS +* +** DATA +* +:RETADR DS 2 +:MINU DS 2 +:SUBT DS 2 +* +*``````````````````````````````* +* MUL16 :: MULTIPLY WORDS * +*- -* +* PERFORM MULTIPLICATION USING * +* THE SHIFT AND ADD ALGORITHM * +* * +* THIS ALGORITHM PRODUCES AN * +* UNSIGNED 32-BIT PRODUCT IN * +* HIPROD AND MLIER WITH HIPROD * +* BEING THE HIGH WORD. * +* * +* NOTE: WHILE THIS DOES PASS * +* THE FULL 32BIT PRODUCT BACK * +* VIA X AND Y, BUT THIS WOULD * +* RETURN FALSE RESULTS IF ONE * +* OF THE PARAMETERS HAD BEEN * +* SIGNED. THIS, USUALLY ONLY * +* THE LOW WORD IS USED. * +*- -* +* CLOBBERS: * +* * +* FLAGS: ????---- REG: AXYM * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +* LDA #>1000 * +* PHA * +* LDA #<1000 ; MCAND * +* PHA * +* LDA #>3 * +* PHA * +* LDA #<3 ; MLIER * +* PHA * +* JSR MUL16 ; = 3000 * +*- -* +* ENTRY * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* LOW BYTE OF MULTIPLIER * +* HIGH BYTE OF MULTIPLIER * +* LOW BYTE OF MULTIPLICAND * +* HIGH BYTE OF MULTIPLICAND * +*- -* +* EXIT * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* LOW BYTE OF PRODUCT * +* HIGH BYTE OF PRODUCT (16BIT) * +* * +* Y = LOW BYTE OF HIGH WORD * +* X = HIGH BYTE OF HIGH WORD * +* A = LOW BYTE OF RET ADDR * +*- -* +* ADAPTED FROM LEVANTHAL AND * +* WINTHROP'S /6502 ASSEMBLY * +* LANGUAGE ROUTINES/. * +* AS SUCH, THIS MAY NOT FALL * +* UNDER THE APACHE 2.0 LICENSE * +* AGREEMENT, SINCE THE BOOK * +* WAS WRITTEN BEFORE THE * +* LICENSE! * +* * +* THAT SAID, LEVENTHAL WROTE * +* THAT THE PURPOSE OF THE BOOK * +* WAS TO COPY THE SOURCE CODE, * +* AS REINVENTING THE WHEEL IS * +* TEDIOUS, AND HE PLACED NO * +* RESTRICTIONS ON ITS USE. * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +MUL16 +* +** GET RETURN ADDRESS +* + PLA + STA :RETADR + PLA + STA :RETADR+1 +* +** GET PARAMETERS +* + PLA + STA :MLIER + PLA + STA :MLIER+1 + PLA + STA :MCAND + PLA + STA :MCAND+1 +* +** PERFORM MULTIPLICATION USING THE SHIFT +** AND ADD ALGORITHM, WHICH PRODUCES AN +** UNSIGNED 32-BIT PRODUCT IN :HIPROD AND +** :MLIER WITH :HIPROD BEING THE HIGH WORD. +* + LDA #0 + STA :HPROD ; ZERO HIGH WORD + STA :HPROD+1 + LDX #17 ; # OF BITS IN MPLIER + ; PLUS 1. EXTRA LOOP IS + ; TO MOVE LAST CARRY INTO + ; THE PRODUCT. + CLC ; CLEAR CARRY FOR 1ST TIME + ; THROUGH LOOP. +:MULLP +* +** IF NEXT BIT = 1 THEN +** HIPROD = HIPROD+MCAND +* + ROR :HPROD+1 + ROR :HPROD + ROR :MLIER+1 + ROR :MLIER + BCC :DECCNT ; BR IF NEXT BIT OF + ; MULTIPLIER IS 0 + CLC ; NEXT BIT=1 SO ADD MCAND + ; TO PRODUCT + LDA :MCAND + ADC :HPROD + STA :HPROD + LDA :MCAND+1 + ADC :HPROD+1 + STA :HPROD+1 ; CARRY = OVERFLOW + ; FROM ADD +:DECCNT + DEX + BNE :MULLP ; CONTINUE UNTIL DONE + + LDY :HPROD ; LOW BYTE OF HIGH WORD + LDX :HPROD+1 ; HIGH BYTE OF HIGH WORD +* +** PUSH LOW WORD OF PRODUCT ONTO THE STACK +* + LDA :MLIER+1 + TAX + LDA :MLIER + TAY +* +** RESTORE RETURN ADDRESS +* + LDA :RETADR+1 + PHA + LDA :RETADR + PHA +* + LDA :HPROD ;24BIT + RTS +* +** DATA +* +:MCAND DS 3 +:MLIER DS 3 +:HPROD DS 2 +:RETADR DS 2 +* +*``````````````````````````````* +* 16-BIT DIVISION: * +* * +* SDIV16, UDIV16, SREM16, AND * +* UREM16. * +*- -* +* SDIV16: DIVIDED 2 SIGNED BIT * +* WORDS AND RETURN A 16-BIT * +* SIGNED QUOTIENT. * +* * +* UDIV16: DIVIDE 2 UNSIGNED * +* 16BIT WORDS AND RETURN A * +* 16BIT UNSIGNED QUOTIENT. * +* * +* SREM16: DIVIDE 2 SIGNED * +* 16BIT WORDS AND RETURN A * +* 16BIT SIGNED REMAINDER. * +* * +* UREM16: DIVIDE 2 UNSIGNED * +* 16BIT WORKDS AND RETURN A * +* 16BIT UNSIGNED REMAINDER. * +*- -* +* CLOBBERS: * +* * +* FLAGS: ????---- REG: AXYM * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +** ALL ROUTINES USE THE SAME * +** FORMAT. * +* * +* LDA #>10000 * +* PHA * +* LDA #<10000 ; DIVIDND * +* PHA * +* LDA #>1000 ; DIVISOR * +* PHA * +* LDA #<1000 * +* PHA * +* JSR UDIV16 * +*- -* +* ENTRY * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* LOW BYTE OF DIVISOR * +* HIGH BYTE OF DIVISOR * +* LOW BYTE OF DIVIDEND * +* HIGH BYTE OF DIVIDEND * +*- -* +* EXIT * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* * +* IF NO ERRORS, CARRY = 0, * +* ELSE CARRY=1, QUOTIENT=0, * +* AND REMAINDER=0 * +* * +* Y = COUNTER; TRASH * +* X = COUNTER; TRASH * +* A = LOW BYTE OF RET ADDR * +*- -* +* ADAPTED FROM LEVANTHAL AND * +* WINTHROP'S /6502 ASSEMBLY * +* LANGUAGE ROUTINES/. * +* AS SUCH, THIS MAY NOT FALL * +* UNDER THE APACHE 2.0 LICENSE * +* AGREEMENT, SINCE THE BOOK * +* WAS WRITTEN BEFORE THE * +* LICENSE! * +* * +* THAT SAID, LEVENTHAL WROTE * +* THAT THE PURPOSE OF THE BOOK * +* WAS TO COPY THE SOURCE CODE, * +* AS REINVENTING THE WHEEL IS * +* TEDIOUS, AND HE PLACED NO * +* RESTRICTIONS ON ITS USE. * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +UDIV16 + LDA #0 + BEQ UDIVMD +UREM16 + LDA #2 +UDIVMD + STA _RSLTI +* +** GET RETURN ADDRESS +* + PLA + STA _RETADR + PLA + STA _RETADR+1 +* +** GET PARAMETERS +* + PLA + STA _DVSOR + PLA + STA _DVSOR+1 + PLA + STA _DVEND + PLA + STA _DVEND+1 +* + JSR UDIV + BCC DIVOK ; BR IF NO ERR +DIVERR JMP EREXIT +DIVOK JMP OKEXIT +* +** SIGNED DIVISION +* +SDIV16 + LDA #0 ; RESULT IS QUOTIENT + BEQ SDIVMD ; (INDEX=0) +* +** SIGNED REMAINDER +* +SREM16 + LDA #2 ; RES = REMAINDER (I=2) + BNE SDIVMD +* +SDIVMD + STA _RSLTI ;RESULT INDEX;0=Q,2=R +* +** GET RETURN ADDRESS +* + PLA + STA _RETADR + PLA + STA _RETADR+1 +* +** GET PARAMETERS +* + PLA + STA _DVSOR + PLA + STA _DVSOR+1 + PLA + STA _DVEND + PLA + STA _DVEND+1 +* +* +** DETERMINE SIGN OF QUOTIENT BY +** PERFORMING AN EXCLUSIVE OR OF +** THE HIGH BYTES. IF THE SIGNS +** ARE THE SAME THEN BIT 7 WILL +** BE 0 AND THE QUOTIENT IS +** POSITIVE. IF THE SIGNS ARE +** DIFFERENT THEN THE QUOTIENT +** IS NEGATIVE. +* + LDA _DVEND+1 + EOR _DVSOR+1 + STA _SQUOT +* +** SIGN OF REMAINDER IS THE SIGN +** OF THE DIVIDEND +* + LDA _DVEND+1 + STA _SREMD +* +** TAKE THE ABSOLUTE VALUE OF +** THE DIVISOR +* + LDA _DVSOR+1 + BPL CHKDE ; BR IF ALREADY POS + LDA #0 ; SUB DVSOR FROM ZERO + SEC + SBC _DVSOR + STA _DVSOR + LDA #0 + SBC _DVSOR+1 + STA _DVSOR+1 +* +** TAKE ABS VALUE OF THE DIVIDEND +* +CHKDE + LDA _DVEND+1 + BPL DODIV ; BR IF DVEND IS POS + LDA #0 ; SUB DVEND FROM ZERO + SEC + SBC _DVEND + STA _DVEND + LDA #0 + SBC _DVEND+1 + STA _DVEND+1 +* +** DIVIDE ABS VALUES +* +DODIV + JSR UDIV + BCS EREXIT ; EXIT IF DIV BY 0 +* +** NEGATE QUOTIENT IF IT IS NEGATIVE +* + LDA _SQUOT + BPL DOREM ; BR IF Q IS POS + LDA #0 + SEC + SBC _DVEND + STA _DVEND + LDA #0 + SBC _DVEND+1 + STA _DVEND+1 +* +DOREM +* +** NEGATE REMAINDER IF IT IS NEG +* + LDA _SREMD + BPL OKEXIT ; BR IF REM IS POS + LDA #0 + SEC + SBC _DVEND+2 + STA _DVEND+2 + LDA #0 + SBC _DVEND+3 + STA _DVEND+3 + JMP OKEXIT +* +** ERROR EXIT (CARRY=1, RSLTS ARE 0) +* +EREXIT + LDA #0 + STA _DVEND + STA _DVEND+1 ;QUOTIENT = 0 + STA _DVEND+2 + STA _DVEND+3 ; REMAINDER=0 + SEC ; CARRY=1 IF ERROR + BCS DVEXIT +* +** GOOD EXIT (CARRY = 0) +* +OKEXIT + CLC ; CARRY = 0, NO ERRORS +* +DVEXIT +* +** STORE RESULT +* + LDX _RSLTI ;GET INDEX TO RESULT + ; 0=QUOTIENT, 2=REMAINDER +* +** STORE RESULT IN STACK +* + LDA _DVEND,X + TAY + LDA _DVEND+1,X + TAX +* +** RESTORE RETURN ADDRESS +* + LDA _RETADR+1 + PHA + LDA _RETADR + PHA +* + RTS +* +* +******************************** +* UDIVE ROUTINE +******************************** +UDIV +* +** ZERO UPPER WORD DIVIDEND +** THIS WILL BE CALLED +** DIVIDEND(1) BELOW +* + LDA #0 + STA _DVEND+2 + STA _DVEND+3 +* +** FIRST CHECK FOR DIV BY 0 +* + LDA _DVSOR + ORA _DVSOR+1 + BNE OKUDIV ; BR IF DVSOR NOT 0 + SEC + RTS +* +** PERFORM THE DIVISION BY +** TRIAL SUBTRACTIONS +* +OKUDIV + LDX #16 ; LOOP THROUGH 16 BITS +DIVLP + ROL _DVEND ;SHFT CARRY INTO BIT 0 OF DVEND + ROL _DVEND+1 ;WHICH WILL BE THE QUOTIENT AND + ROL _DVEND+2 ;SHFT DVEND AT THE SAME TIME + ROL _DVEND+3 +CHKLT + SEC + LDA _DVEND+2 + SBC _DVSOR + TAY ; SAVE LOW BYTE IN Y + LDA _DVEND+3 + SBC _DVSOR+1 ;SUB HIGHBYTES W RES IN A + BCC DECCNT ; BR IF DVEND < DVSOR AND CARRY + STY _DVEND+2 ; ELSE + STA _DVEND+3 ;VEN(1)=DVEND(1)-DVSOR +* +DECCNT + DEX + BNE DIVLP +* + ROL _DVEND ;SHFT IN LAST CAR FOR QUOT + ROL _DVEND+1 + CLC ; NO ERRORS, CLEAR CARRY + RTS +* +** DATA +* +** MAKING THESE GLOBAL FOR NOW WHILE I TRY TO +** UNDERSTAND THIS ALGORITHM ENOUGH TO MAKE +** IT USE LOCAL VARIABLES +* +_DVSOR DS 2 ; DIVISOR +_DVEND DS 4 ; DIVIDEND[0] AND QUOTIENT + ; DIVIDEND[1] AND REMAINDER +_RETADR DS 2 +_SQUOT DS 1 ; SIGN OF QUOTIENT +_SREMD DS 1 ; SIGN OF REMAINDER +_RSLTI DS 1 ; RESULT INDEX +* +* +*``````````````````````````````* +* CMP16 :: 16-BIT COMPARE * +*- -* +* COMPARE TWO 16BIT SIGNED OR * +* UNSIGNED WORDS AND RETURN * +* THE C,Z,N FLAGS AS SET OR * +* CLEARED. * +*- -* +* CLOBBERS: * +* * +* FLAGS: CZN----- REG: AXYM * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +* LDA #>123 * +* PHA * +* LDA #<123 * +* PHA ; WORD 1 * +* LDA #>1023 * +* PHA * +* LDA #<1023 * +* PHA ; WORD 2 * +* JSR CMP16 * +*- -* +* ENTRY * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* LOW BYTE OF WORD 2 (SUBTRA) * +* HIGH BYTE OF WORD 2 * +* LOW BYTE OF WORD 1 (MINU) * +* HIGH BYTE OF WORD 1 * +*- -* +* EXIT * +* * +* TOP OF STACK * +* * +* FLAGS RETURNED BASED ON WORD * +* 1 - WORD 2. * +* * +* IF W1 & W2 ARE 2S COMPLEMENT * +* IF W1 = W2 Z=1,N=0 * +* IF W1 > W2 Z=0,N=0 * +* IF W1 < W2 Z=0,N=1 * +* ELSE * +* IF W1 = W2 Z=1,C=1 * +* IF W1 > W2 Z=0,C=1 * +* IF W1 < W2 Z=0,C=0 * +* * +*- -* +* ADAPTED FROM LEVANTHAL AND * +* WINTHROP'S /6502 ASSEMBLY * +* LANGUAGE ROUTINES/. * +* AS SUCH, THIS MAY NOT FALL * +* UNDER THE APACHE 2.0 LICENSE * +* AGREEMENT, SINCE THE BOOK * +* WAS WRITTEN BEFORE THE * +* LICENSE! * +* * +* THAT SAID, LEVENTHAL WROTE * +* THAT THE PURPOSE OF THE BOOK * +* WAS TO COPY THE SOURCE CODE, * +* AS REINVENTING THE WHEEL IS * +* TEDIOUS, AND HE PLACED NO * +* RESTRICTIONS ON ITS USE. * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +CMP16 +* +** GET RETURN ADDRESS +* + PLA + STA :RETADR + PLA + STA :RETADR+1 +* +** GET PARAMETERS +* + PLA + STA :SUBT ; SUBTRAHEND + PLA + STA :SUBT+1 + PLA + STA :MINU ; MINUEND + PLA + STA :MINU+1 +* +** RESTORE RETURN ADDRESS +* + LDA :RETADR+1 + PHA + LDA :RETADR + PHA +* + LDA :MINU + CMP :SUBT ; COMPARE LOW BYTES + BEQ :EQUAL +* +** LOW BYTES ARE NOTE EQUAL +** COMPARE HIGH BYTES +* + LDA :MINU+1 + SBC :SUBT+1 ; COMPARE HIGH BYTES + ORA #1 ; MAKE Z=0, SINCE LOW + ; BYTES ARE NOT EQUAL + BVS :OVFLOW ; MUST HANDLE OVERFLOW + ; FOR SIGNED MATH + RTS ; EXIT +* +** LOW BYTES ARE UQAL -- COMPARE HIGH +* +:EQUAL + LDA :MINU+1 + SBC :SUBT+1 ; UPPER BYTES + BVS :OVFLOW + RTS ; RETURN W FLAGS SET +* +** +** OVERFLOW WITH SIGNED ARITHMETIC SO +** COMPLEMENT THE NEGATIVE FLAG. +** DO NO CHANGE THE CARRY FLAG AND +** MAKE THE ZERO FLAG EQUAL TO 0. +** COMPLEMENT NEG FLAG BY EORING +** #$80 AND ACCUMULATOR. +** +* +:OVFLOW + EOR #$80 ; COMPLEMENT N FLAG + ORA #1 ; IF OVERFLOW THEN THE + ; WORDS ARE !=. Z= 0 + ; CARRY UNCHANGED + RTS +* +** DATA +* +:MINU DS 2 +:SUBT DS 2 +:RETADR DS 2 +* +*``````````````````````````````* +* RND16 : 16BIT RANDOM NUMBER * +*- -* +* GENERATE A 16BIT PSEUDO- * +* RANDOM NUMBER AND RETURN IT * +* IN Y,X (LOW, HIGH). * +* * +* ORIGINAL AUTHOR IS WHITE * +* FLAME, AS SHARED ON * +* CODEBASE64. I HAVE MADE SOME * +* MINOR ALTERATIONS, BUT NOT * +* NEARLY ENOUGH TO CALL IT MY * +* OWN. * +*- -* +* CLOBBERS: * +* * +* FLAGS: ????---- REG: AXYM * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +*- -* +* ENTRY * +* * +* LOW BYTE OF RETURN ADDRESS * +* HIGH BYTE OF RETURN ADDRESS * +*- -* +* EXIT * +* * +* Y = HIGH BYTE OF PRODUCT * +* X = LOW BYTE OF PRODUCT * +* A = LOW BYTE OF PRODUCT * +*- -* +* NOTE: THERE ARE 2048 MAGIC * +* NUMBERS THAT COULD BE EOR'D * +* TO GENERATE A PSEUDO-RANDOM * +* PATTERN THAT DOESN'T REPEAT * +* UNTIL 65535 ITERATIONS. TOO * +* MANY TO LIST HERE, BUT SOME * +* ARE: $002D, $1979, $1B47, * +* $41BB, $3D91, $B5E9, $FFEB * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +RND16 +* + LDA RNDL + STA :SEED + LDA RNDH + STA :SEED+1 +* + LDA :SEED + BEQ :LOW0 +* +** DO A NORMAL SHIFT +* + ASL :SEED + LDA :SEED+1 + ROL + BCC :NOEOR +:DOEOR ; HIGH BYTE IN A + EOR #>$0369 + STA :SEED+1 + LDA :SEED + EOR #<$0369 + STA :SEED + JMP :EXIT +:LOW0 + LDA :SEED+1 + BEQ :DOEOR + ASL + BEQ :NOEOR + BCS :DOEOR +:NOEOR + STA :SEED+1 +:EXIT LDX :SEED+1 + LDY :SEED + RTS +* +** DATA +* +:SEED DS 2 +* +* +*``````````````````````````````* +* RANDW :: RANDOM WORD * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +* +** THIS WILL BE INCLUDED IN FUTURE UPDATES +* +* diff --git a/disks/disk2_math8_math16/T.MATH16.MUL16 b/disks/disk2_math8_math16/T.MATH16.MUL16 new file mode 100644 index 0000000..77e3c72 --- /dev/null +++ b/disks/disk2_math8_math16/T.MATH16.MUL16 @@ -0,0 +1,164 @@ +* +*``````````````````````````````* +* MUL16 :: MULTIPLY WORDS * +*- -* +* PERFORM MULTIPLICATION USING * +* THE SHIFT AND ADD ALGORITHM * +* * +* THIS ALGORITHM PRODUCES AN * +* UNSIGNED 32-BIT PRODUCT IN * +* HIPROD AND MLIER WITH HIPROD * +* BEING THE HIGH WORD. * +* * +* NOTE: WHILE THIS DOES PASS * +* THE FULL 32BIT PRODUCT BACK * +* VIA X AND Y, BUT THIS WOULD * +* RETURN FALSE RESULTS IF ONE * +* OF THE PARAMETERS HAD BEEN * +* SIGNED. THIS, USUALLY ONLY * +* THE LOW WORD IS USED. * +*- -* +* CLOBBERS: * +* * +* FLAGS: ????---- REG: AXYM * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +* LDA #>1000 * +* PHA * +* LDA #<1000 ; MCAND * +* PHA * +* LDA #>3 * +* PHA * +* LDA #<3 ; MLIER * +* PHA * +* JSR MUL16 ; = 3000 * +*- -* +* ENTRY * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* LOW BYTE OF MULTIPLIER * +* HIGH BYTE OF MULTIPLIER * +* LOW BYTE OF MULTIPLICAND * +* HIGH BYTE OF MULTIPLICAND * +*- -* +* EXIT * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* LOW BYTE OF PRODUCT * +* HIGH BYTE OF PRODUCT (16BIT) * +* * +* Y = LOW BYTE OF HIGH WORD * +* X = HIGH BYTE OF HIGH WORD * +* A = LOW BYTE OF RET ADDR * +*- -* +* ADAPTED FROM LEVANTHAL AND * +* WINTHROP'S /6502 ASSEMBLY * +* LANGUAGE ROUTINES/. * +* AS SUCH, THIS MAY NOT FALL * +* UNDER THE APACHE 2.0 LICENSE * +* AGREEMENT, SINCE THE BOOK * +* WAS WRITTEN BEFORE THE * +* LICENSE! * +* * +* THAT SAID, LEVENTHAL WROTE * +* THAT THE PURPOSE OF THE BOOK * +* WAS TO COPY THE SOURCE CODE, * +* AS REINVENTING THE WHEEL IS * +* TEDIOUS, AND HE PLACED NO * +* RESTRICTIONS ON ITS USE. * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +MUL16 +* +** GET RETURN ADDRESS +* + PLA + STA :RETADR + PLA + STA :RETADR+1 +* +** GET PARAMETERS +* + PLA + STA :MLIER + PLA + STA :MLIER+1 + PLA + STA :MCAND + PLA + STA :MCAND+1 +* +** PERFORM MULTIPLICATION USING THE SHIFT +** AND ADD ALGORITHM, WHICH PRODUCES AN +** UNSIGNED 32-BIT PRODUCT IN :HIPROD AND +** :MLIER WITH :HIPROD BEING THE HIGH WORD. +* + LDA #0 + STA :HPROD ; ZERO HIGH WORD + STA :HPROD+1 + LDX #17 ; # OF BITS IN MPLIER + ; PLUS 1. EXTRA LOOP IS + ; TO MOVE LAST CARRY INTO + ; THE PRODUCT. + CLC ; CLEAR CARRY FOR 1ST TIME + ; THROUGH LOOP. +:MULLP +* +** IF NEXT BIT = 1 THEN +** HIPROD = HIPROD+MCAND +* + ROR :HPROD+1 + ROR :HPROD + ROR :MLIER+1 + ROR :MLIER + BCC :DECCNT ; BR IF NEXT BIT OF + ; MULTIPLIER IS 0 + CLC ; NEXT BIT=1 SO ADD MCAND + ; TO PRODUCT + LDA :MCAND + ADC :HPROD + STA :HPROD + LDA :MCAND+1 + ADC :HPROD+1 + STA :HPROD+1 ; CARRY = OVERFLOW + ; FROM ADD +:DECCNT + DEX + BNE :MULLP ; CONTINUE UNTIL DONE + + LDY :HPROD ; LOW BYTE OF HIGH WORD + LDX :HPROD+1 ; HIGH BYTE OF HIGH WORD +* +** PUSH LOW WORD OF PRODUCT ONTO THE STACK +* + LDA :MLIER+1 + TAX + LDA :MLIER + TAY +* +** RESTORE RETURN ADDRESS +* + LDA :RETADR+1 + PHA + LDA :RETADR + PHA +* + LDA :HPROD ;24BIT + RTS +* +** DATA +* +:MCAND DS 3 +:MLIER DS 3 +:HPROD DS 2 +:RETADR DS 2 diff --git a/disks/disk2_math8_math16/T.MATH16.RND16 b/disks/disk2_math8_math16/T.MATH16.RND16 new file mode 100644 index 0000000..0b7964f --- /dev/null +++ b/disks/disk2_math8_math16/T.MATH16.RND16 @@ -0,0 +1,84 @@ +* +*``````````````````````````````* +* RND16 : 16BIT RANDOM NUMBER * +*- -* +* GENERATE A 16BIT PSEUDO- * +* RANDOM NUMBER AND RETURN IT * +* IN Y,X (LOW, HIGH). * +* * +* ORIGINAL AUTHOR IS WHITE * +* FLAME, AS SHARED ON * +* CODEBASE64. I HAVE MADE SOME * +* MINOR ALTERATIONS, BUT NOT * +* NEARLY ENOUGH TO CALL IT MY * +* OWN. * +*- -* +* CLOBBERS: * +* * +* FLAGS: ????---- REG: AXYM * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +*- -* +* ENTRY * +* * +* LOW BYTE OF RETURN ADDRESS * +* HIGH BYTE OF RETURN ADDRESS * +*- -* +* EXIT * +* * +* Y = HIGH BYTE OF PRODUCT * +* X = LOW BYTE OF PRODUCT * +* A = LOW BYTE OF PRODUCT * +*- -* +* NOTE: THERE ARE 2048 MAGIC * +* NUMBERS THAT COULD BE EOR'D * +* TO GENERATE A PSEUDO-RANDOM * +* PATTERN THAT DOESN'T REPEAT * +* UNTIL 65535 ITERATIONS. TOO * +* MANY TO LIST HERE, BUT SOME * +* ARE: $002D, $1979, $1B47, * +* $41BB, $3D91, $B5E9, $FFEB * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +RND16 +* + LDA RNDL + STA :SEED + LDA RNDH + STA :SEED+1 +* + LDA :SEED + BEQ :LOW0 +* +** DO A NORMAL SHIFT +* + ASL :SEED + LDA :SEED+1 + ROL + BCC :NOEOR +:DOEOR ; HIGH BYTE IN A + EOR #>$0369 + STA :SEED+1 + LDA :SEED + EOR #<$0369 + STA :SEED + JMP :EXIT +:LOW0 + LDA :SEED+1 + BEQ :DOEOR + ASL + BEQ :NOEOR + BCS :DOEOR +:NOEOR + STA :SEED+1 +:EXIT LDX :SEED+1 + LDY :SEED + RTS +* +** DATA +* +:SEED DS 2 diff --git a/disks/disk2_math8_math16/T.MATH16.SUB16 b/disks/disk2_math8_math16/T.MATH16.SUB16 new file mode 100644 index 0000000..541c191 --- /dev/null +++ b/disks/disk2_math8_math16/T.MATH16.SUB16 @@ -0,0 +1,110 @@ +* +*``````````````````````````````* +* SUB16 :: SUBTRACT WORDS * +*- -* +* SUBTRACT ONE 16BIT NUMBER * +* FROM ANOTHER, RETURNING THE * +* RESULT IN X (HI) AND Y (LOW) * +*- -* +* CLOBBERS: * +* * +* FLAGS: ????---- REG: AXYM * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +* LDA #>1000 ; MINU * +* PHA * +* LDA #<1000 * +* PHA * +* LDA #>500 ; SUBT * +* PHA * +* LDA #<500 * +* PHA * +* JSR SUB16 * +*- -* +* ENTRY * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* LOW BYTE OF SUBTRAHEND * +* HIGH BYTE OF SUBTRAHEND * +* LOW BYTE OF MINUEND * +* HIGH BYTE OF MINUEND * +*- -* +* EXIT * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* * +* Y = RESULT LOW BYTE * +* X = RESULT HIGH BYTE * +* A = LOW BYTE OF RET ADDR * +*- -* +* ADAPTED FROM LEVANTHAL AND * +* WINTHROP'S /6502 ASSEMBLY * +* LANGUAGE ROUTINES/. * +* AS SUCH, THIS MAY NOT FALL * +* UNDER THE APACHE 2.0 LICENSE * +* AGREEMENT, SINCE THE BOOK * +* WAS WRITTEN BEFORE THE * +* LICENSE! * +* * +* THAT SAID, LEVENTHAL WROTE * +* THAT THE PURPOSE OF THE BOOK * +* WAS TO COPY THE SOURCE CODE, * +* AS REINVENTING THE WHEEL IS * +* TEDIOUS, AND HE PLACED NO * +* RESTRICTIONS ON ITS USE. * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +SUB16 +* +** GET RETURN ADDRESS +* + PLA + STA :RETADR + PLA + STA :RETADR+1 +* +** GET PARAMETERS +* + PLA + STA :SUBT + PLA + STA :SUBT+1 ; SUBTRAHEND + PLA + STA :MINU + PLA + STA :MINU+1 ; MINUEND +* +** MINUEND - SUBTRAHEND +* + LDA :MINU + SEC + SBC :SUBT + TAY ; LOW BYTE + LDA :MINU+1 + SBC :SUBT+1 + TAX ; HIGH BYTE +* +** RESTORE REUTNR ADDRESS +* + LDA :RETADR+1 + PHA + LDA :RETADR + PHA +* + RTS +* +** DATA +* +:RETADR DS 2 +:MINU DS 2 +:SUBT DS 2 diff --git a/disks/disk2_math8_math16/T.MATH8.DIV8 b/disks/disk2_math8_math16/T.MATH8.DIV8 new file mode 100644 index 0000000..0236495 --- /dev/null +++ b/disks/disk2_math8_math16/T.MATH8.DIV8 @@ -0,0 +1,93 @@ +* +*``````````````````````````````* +* DIV8 :: 8BIT DIVISION (8BR) * +*- -* +* DIVIDE ONE 8BIT NUMBER BY * +* ANOTHER TO GET AN 8BIT * +* RESULT. * +* * +* ORIGINAL AUTHOR IS WHITE * +* FLAME, AS SHARED ON * +* CODEBASE64. I HAVE MADE SOME * +* MINOR ALTERATIONS, BUT NOT * +* NEARLY ENOUGH TO CALL IT MY * +* OWN. * +*- -* +* CLOBBERS: * +* * +* FLAGS: ????---- REG: AXYM * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +* LDA #200 * +* PHA * +* PDA #10 * +* PHA * +* JSR DIV8 * +*- -* +* ENTRY * +* * +* LOW BYTE OF RETURN ADDRESS * +* HIGH BYTE OF RETURN ADDRESS * +* SECOND NUM TO BE MULTIPLIED * +* FIRST NUM TO BE MULTIPLIED * +*- -* +* EXIT * +* * +* Y = HIGH BYTE OF PRODUCT * +* X = LOW BYTE OF PRODUCT * +* A = LOW BYTE OF PRODUCT * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +DIV8 +* +** GET RETURN ADDRESS +* + PLA + STA :RETADR + PLA + STA :RETADR+1 +* +** GET PARAMETERS +* + PLA + STA :DVEND + PLA + STA :DVSOR +* + LDA #$00 + LDX #8 + ASL :DVSOR +:L1 ROL + CMP :DVEND + BCC :L2 + SBC :DVEND +:L2 ROL :DVSOR + DEX + BNE :L1 + STA :REM +* +** RESTORE RETURN ADDRESS +* + LDA :RETADR+1 + PHA + LDA :RETADR + PHA +* +** LOAD QUOTIENT INTO A, REMAINDER INTO X +* + LDX :REM ; REMAINDER TO X +* + LDA :DVSOR + RTS +* +** DATA +* +:REM DS 1 +:DVEND DS 1 +:DVSOR DS 1 +:RETADR DS 2 +* diff --git a/disks/disk2_math8_math16/T.MATH8.LIB b/disks/disk2_math8_math16/T.MATH8.LIB new file mode 100644 index 0000000..7b7f9d7 --- /dev/null +++ b/disks/disk2_math8_math16/T.MATH8.LIB @@ -0,0 +1,400 @@ +* +*``````````````````````````````* +* RND8 :: 8-BIT RANDOM NUMBER * +*- -* +* GENERATES A PSEUDO-RANDOM * +* NUMBER BETWEEN 0..255 (BYTE) * +* AND RETURNS THE VALUE IN A. * +* * +* THIS USES THE GALOIS LINEAR * +* FEEDBACK SHIFT REGISTER * +* ALGORITHM WITH POLYNOMIAL * +* $002D. THE SEQUENCE WILL * +* REPEAT AFTER 65535 CALLS. * +*- -* +* CLOBBERS: * +* * +* FLAGS: ????---- REG: AX-- * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +* JSR RND8 * +*- -* +* ENTRY: NONE * +*- -* +* EXIT * +* * +* Y = NOT AFFECTED * +* X = CLOBBERED * +* A = RANDOM NUM 0..255 * +*- -* +* NOTE: BASED ON THE PRNG * +* LISTING FOUND IN THE NESDEV * +* WIKI. * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +RND8 +* + LDX #8 + LDA RNDL+0 +:A + ASL ;SHIFT THE REG + ROL RNDL+1 + BCC :B + EOR #$2D ; APPLY XOR FEEDBACK + ; WHENEVER A 1 BIT IS + ; SHIFTED OUT +:B + DEX + BNE :A + STA RNDL+0 + CMP #0 ; RELOAD FLAGS + RTS +* +*``````````````````````````````* +* RANDB :: RANDOM BYTE * +*- -* +* RETURNS A RANDOM BYTE VALUE * +* BETWEEN THE HIGH AND LOW * +* BOUNDS PASSED TO ROUTINE. * +*- -* +* CLOBBERS: * +* * +* FLAGS: ????---- REG: AXYM * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +* LDA #1 ; LOW BOUND * +* PHA * +* LDA #100 ; HIGH * +* PHA * +* JSR RANDB * +*- -* +* ENTRY * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +*- -* +* EXIT * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* * +* Y = COUNTER; TRASH * +* X = COUNTER; TRASH * +* A = LOW BYTE OF RET ADDR * +*- -* +* *** NOTE: I DON'T LIKE THAT * +* THIS HAS TO CALL UDIV16, AND * +* THUS CANNOT BE INCLUDED ON * +* ITS OWN. IN FUTURE, HANDLE * +* DIVISION WITHIN ROUTINE, * +* ESPECIALLY SINCE IT IS 8BIT. * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +RANDB +* +** GET RETURN ADDRESS +* + PLA + STA :RETADR + PLA + STA :RETADR+1 +* +** GET PARAMETERS +* + PLA + STA :RHB ; HIGH BOUND + PLA + STA :RLB ; LOW BOUND +* +** FORMULA: F = L + (R-((R\(H-L))*(H-L) +* + LDA :RHB + CLC + ADC #2 ; NO IDEA WHY I NEED + ; THIS BUT I DO + STA :RHB + LDA :RHB ;LOAD IN HIGH RANGE + SEC + SBC :RLB ;SUBTRACT LOW RANGE + STA :RHL ;STORE HIGH - LOW HERE + JSR RND8 ; GET NUM BETWEEN 0..255 + STA :R ; RANDOM 0,,255 +* +** R\(H-L) +* + LDX #0 ; STORE 0 FOR HIGH BYTE + STX :R+1 ; 0 HIGH BYTE + STX :RHL+1 ; 0 HIGH BYTE + LDA :R+1 + PHA + LDA :R + PHA + LDA :RHL+1 ; LOAD (H-L) BACK INTO A + PHA + LDA :RHL + PHA + JSR UDIV16 ; DIV (H-L) BY __R + PLA + STA :QUOT + PLA + STA :QUOT+1 +* +** R-(R\(H-L) +* + LDA :R ; LOAD RND BACK INTO A + SEC + SBC :QUOT ; SUBTRACT (R/(H-L) FROM RND8 + STA :QUOT ; STORE TEMP +* +** (R-(R\(H-L))*(H-L) +* + LDA #0 ; ZERO HIGH BYTE + STA :QUOT+1 ; 0 HIGH BYTE + LDA :RHL+1 ; LOAD (H-L) + PHA + LDA :RHL + PHA + LDA :QUOT+1 + PHA + LDA :QUOT + PHA + JSR MUL16 ; MUL (H-L) * __R - (H-L) + PLA + STA :RESULT + PLA + STA :RESULT+1 +* +** L + EVERYTHING ELSE +* + CLC + ADC :RLB ;ADD LOW BYTE TO ALL ELSE +:FINISH + STA :RESULT ; STORE IN RESULT + LDX :RESULT+1 ; HIGH BYTE; FOR + ; DEBUGGING PURPOSES +* +** RESTORE RETURN ADDRESS +* + LDA :RETADR+1 + PHA + LDA :RETADR + PHA +* + LDA :RESULT ; RETURN # IN A + RTS +* +** DATA +* +:R DS 2 +:RETADR DS 2 +:RESULT DS 2 +:RHB DS 2 +:RLB DS 2 +:RHL DS 2 +:QUOT DS 2 +* +*``````````````````````````````* +* MUL8 :: 8BIT MULTIPLY (16BR) * +*- -* +* MULTIPLY TWO 8BIT NUMBERS TO * +* GET 16BIT RESULT. NUMBERS * +* MUST BE UNSIGNED. * +* * +* ORIGINAL AUTHOR IS WHITE * +* FLAME, AS SHARED ON * +* CODEBASE64. I HAVE MADE SOME * +* MINOR ALTERATIONS, BUT NOT * +* NEARLY ENOUGH TO CALL IT MY * +* OWN. * +*- -* +* CLOBBERS: * +* * +* FLAGS: ????---- REG: AXYM * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +* LDA #100 * +* PHA * +* PDA #200 * +* PHA * +* JSR MUL8 * +*- -* +* ENTRY * +* * +* LOW BYTE OF RETURN ADDRESS * +* HIGH BYTE OF RETURN ADDRESS * +* SECOND NUM TO BE MULTIPLIED * +* FIRST NUM TO BE MULTIPLIED * +*- -* +* EXIT * +* * +* Y = HIGH BYTE OF PRODUCT * +* X = LOW BYTE OF PRODUCT * +* A = LOW BYTE OF PRODUCT * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +MUL8 +* +** GET RETURN ADDRESS +* + PLA + STA :RETADR + PLA + STA :RETADR+1 +* +** GET PARAMETERS +* + PLA + STA :MUL1 + PLA + STA :MUL2 +* +** INIT +* + LDA #$00 + TAX + STX :MUL1H + BEQ :GOLOOP +* +:DOADD + CLC + ADC :MUL1 + TAY +* + TXA + ADC :MUL1H + TAX + TYA +* +:LP + ASL :MUL1 + ROL :MUL1H +:GOLOOP + LSR :MUL2 + BCS :DOADD + BNE :LP +* +** RESTORE RETURN ADDRESS +* + LDA :RETADR+1 + PHA + LDA :RETADR + PHA +* + RTS +* +** DATA +* +:MUL1 DS 1 +:MUL2 DS 1 +:RETADR DS 2 +:MUL1H DS 1 +* +* +*``````````````````````````````* +* DIV8 :: 8BIT DIVISION (8BR) * +*- -* +* DIVIDE ONE 8BIT NUMBER BY * +* ANOTHER TO GET AN 8BIT * +* RESULT. * +* * +* ORIGINAL AUTHOR IS WHITE * +* FLAME, AS SHARED ON * +* CODEBASE64. I HAVE MADE SOME * +* MINOR ALTERATIONS, BUT NOT * +* NEARLY ENOUGH TO CALL IT MY * +* OWN. * +*- -* +* CLOBBERS: * +* * +* FLAGS: ????---- REG: AXYM * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +* LDA #200 * +* PHA * +* PDA #10 * +* PHA * +* JSR DIV8 * +*- -* +* ENTRY * +* * +* LOW BYTE OF RETURN ADDRESS * +* HIGH BYTE OF RETURN ADDRESS * +* SECOND NUM TO BE MULTIPLIED * +* FIRST NUM TO BE MULTIPLIED * +*- -* +* EXIT * +* * +* Y = HIGH BYTE OF PRODUCT * +* X = LOW BYTE OF PRODUCT * +* A = LOW BYTE OF PRODUCT * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +DIV8 +* +** GET RETURN ADDRESS +* + PLA + STA :RETADR + PLA + STA :RETADR+1 +* +** GET PARAMETERS +* + PLA + STA :DVEND + PLA + STA :DVSOR +* + LDA #$00 + LDX #8 + ASL :DVSOR +:L1 ROL + CMP :DVEND + BCC :L2 + SBC :DVEND +:L2 ROL :DVSOR + DEX + BNE :L1 + STA :REM +* +** RESTORE RETURN ADDRESS +* + LDA :RETADR+1 + PHA + LDA :RETADR + PHA +* +** LOAD QUOTIENT INTO A, REMAINDER INTO X +* + LDX :REM ; REMAINDER TO X +* + LDA :DVSOR + RTS +* +** DATA +* +:REM DS 1 +:DVEND DS 1 +:DVSOR DS 1 +:RETADR DS 2 +* diff --git a/disks/disk2_math8_math16/T.MATH8.MUL8 b/disks/disk2_math8_math16/T.MATH8.MUL8 new file mode 100644 index 0000000..b4818b1 --- /dev/null +++ b/disks/disk2_math8_math16/T.MATH8.MUL8 @@ -0,0 +1,100 @@ +* +*``````````````````````````````* +* MUL8 :: 8BIT MULTIPLY (16BR) * +*- -* +* MULTIPLY TWO 8BIT NUMBERS TO * +* GET 16BIT RESULT. NUMBERS * +* MUST BE UNSIGNED. * +* * +* ORIGINAL AUTHOR IS WHITE * +* FLAME, AS SHARED ON * +* CODEBASE64. I HAVE MADE SOME * +* MINOR ALTERATIONS, BUT NOT * +* NEARLY ENOUGH TO CALL IT MY * +* OWN. * +*- -* +* CLOBBERS: * +* * +* FLAGS: ????---- REG: AXYM * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +* LDA #100 * +* PHA * +* PDA #200 * +* PHA * +* JSR MUL8 * +*- -* +* ENTRY * +* * +* LOW BYTE OF RETURN ADDRESS * +* HIGH BYTE OF RETURN ADDRESS * +* SECOND NUM TO BE MULTIPLIED * +* FIRST NUM TO BE MULTIPLIED * +*- -* +* EXIT * +* * +* Y = HIGH BYTE OF PRODUCT * +* X = LOW BYTE OF PRODUCT * +* A = LOW BYTE OF PRODUCT * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +MUL8 +* +** GET RETURN ADDRESS +* + PLA + STA :RETADR + PLA + STA :RETADR+1 +* +** GET PARAMETERS +* + PLA + STA :MUL1 + PLA + STA :MUL2 +* +** INIT +* + LDA #$00 + TAX + STX :MUL1H + BEQ :GOLOOP +* +:DOADD + CLC + ADC :MUL1 + TAY +* + TXA + ADC :MUL1H + TAX + TYA +* +:LP + ASL :MUL1 + ROL :MUL1H +:GOLOOP + LSR :MUL2 + BCS :DOADD + BNE :LP +* +** RESTORE RETURN ADDRESS +* + LDA :RETADR+1 + PHA + LDA :RETADR + PHA +* + RTS +* +** DATA +* +:MUL1 DS 1 +:MUL2 DS 1 +:RETADR DS 2 +:MUL1H DS 1 diff --git a/disks/disk2_math8_math16/T.MATH8.RANDB b/disks/disk2_math8_math16/T.MATH8.RANDB new file mode 100644 index 0000000..19cd8ea --- /dev/null +++ b/disks/disk2_math8_math16/T.MATH8.RANDB @@ -0,0 +1,151 @@ +* +*``````````````````````````````* +* RANDB :: RANDOM BYTE * +*- -* +* RETURNS A RANDOM BYTE VALUE * +* BETWEEN THE HIGH AND LOW * +* BOUNDS PASSED TO ROUTINE. * +*- -* +* CLOBBERS: * +* * +* FLAGS: ????---- REG: AXYM * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +* LDA #1 ; LOW BOUND * +* PHA * +* LDA #100 ; HIGH * +* PHA * +* JSR RANDB * +*- -* +* ENTRY * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +*- -* +* EXIT * +* * +* TOP OF STACK * +* * +* LOW BYTE OF RETURN ADDRESS * +* HI BYTE OF RETURN ADDRESS * +* * +* Y = COUNTER; TRASH * +* X = COUNTER; TRASH * +* A = LOW BYTE OF RET ADDR * +*- -* +* *** NOTE: I DON'T LIKE THAT * +* THIS HAS TO CALL UDIV16, AND * +* THUS CANNOT BE INCLUDED ON * +* ITS OWN. IN FUTURE, HANDLE * +* DIVISION WITHIN ROUTINE, * +* ESPECIALLY SINCE IT IS 8BIT. * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +RANDB +* +** GET RETURN ADDRESS +* + PLA + STA :RETADR + PLA + STA :RETADR+1 +* +** GET PARAMETERS +* + PLA + STA :RHB ; HIGH BOUND + PLA + STA :RLB ; LOW BOUND +* +** FORMULA: F = L + (R-((R\(H-L))*(H-L) +* + LDA :RHB + CLC + ADC #2 ; NO IDEA WHY I NEED + ; THIS BUT I DO + STA :RHB + LDA :RHB ;LOAD IN HIGH RANGE + SEC + SBC :RLB ;SUBTRACT LOW RANGE + STA :RHL ;STORE HIGH - LOW HERE + JSR RND8 ; GET NUM BETWEEN 0..255 + STA :R ; RANDOM 0,,255 +* +** R\(H-L) +* + LDX #0 ; STORE 0 FOR HIGH BYTE + STX :R+1 ; 0 HIGH BYTE + STX :RHL+1 ; 0 HIGH BYTE + LDA :R+1 + PHA + LDA :R + PHA + LDA :RHL+1 ; LOAD (H-L) BACK INTO A + PHA + LDA :RHL + PHA + JSR UDIV16 ; DIV (H-L) BY __R + PLA + STA :QUOT + PLA + STA :QUOT+1 +* +** R-(R\(H-L) +* + LDA :R ; LOAD RND BACK INTO A + SEC + SBC :QUOT ; SUBTRACT (R/(H-L) FROM RND8 + STA :QUOT ; STORE TEMP +* +** (R-(R\(H-L))*(H-L) +* + LDA #0 ; ZERO HIGH BYTE + STA :QUOT+1 ; 0 HIGH BYTE + LDA :RHL+1 ; LOAD (H-L) + PHA + LDA :RHL + PHA + LDA :QUOT+1 + PHA + LDA :QUOT + PHA + JSR MUL16 ; MUL (H-L) * __R - (H-L) + PLA + STA :RESULT + PLA + STA :RESULT+1 +* +** L + EVERYTHING ELSE +* + CLC + ADC :RLB ;ADD LOW BYTE TO ALL ELSE +:FINISH + STA :RESULT ; STORE IN RESULT + LDX :RESULT+1 ; HIGH BYTE; FOR + ; DEBUGGING PURPOSES +* +** RESTORE RETURN ADDRESS +* + LDA :RETADR+1 + PHA + LDA :RETADR + PHA +* + LDA :RESULT ; RETURN # IN A + RTS +* +** DATA +* +:R DS 2 +:RETADR DS 2 +:RESULT DS 2 +:RHB DS 2 +:RLB DS 2 +:RHL DS 2 +:QUOT DS 2 diff --git a/disks/disk2_math8_math16/T.MATH8.RND8 b/disks/disk2_math8_math16/T.MATH8.RND8 new file mode 100644 index 0000000..6df4b17 --- /dev/null +++ b/disks/disk2_math8_math16/T.MATH8.RND8 @@ -0,0 +1,55 @@ +* +*``````````````````````````````* +* RND8 :: 8-BIT RANDOM NUMBER * +*- -* +* GENERATES A PSEUDO-RANDOM * +* NUMBER BETWEEN 0..255 (BYTE) * +* AND RETURNS THE VALUE IN A. * +* * +* THIS USES THE GALOIS LINEAR * +* FEEDBACK SHIFT REGISTER * +* ALGORITHM WITH POLYNOMIAL * +* $002D. THE SEQUENCE WILL * +* REPEAT AFTER 65535 CALLS. * +*- -* +* CLOBBERS: * +* * +* FLAGS: ????---- REG: AX-- * +*- -* +* CYCLES: ??? * +* SIZE: * +*- -* +* USAGE: * +* * +* JSR RND8 * +*- -* +* ENTRY: NONE * +*- -* +* EXIT * +* * +* Y = NOT AFFECTED * +* X = CLOBBERED * +* A = RANDOM NUM 0..255 * +*- -* +* NOTE: BASED ON THE PRNG * +* LISTING FOUND IN THE NESDEV * +* WIKI. * +*,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,* +* +RND8 +* + LDX #8 + LDA RNDL+0 +:A + ASL ;SHIFT THE REG + ROL RNDL+1 + BCC :B + EOR #$2D ; APPLY XOR FEEDBACK + ; WHENEVER A 1 BIT IS + ; SHIFTED OUT +:B + DEX + BNE :A + STA RNDL+0 + CMP #0 ; RELOAD FLAGS + RTS