Merge pull request #22 from specke/master

incorporated improvements by uniabis
This commit is contained in:
Emmanuel Marty
2019-08-01 01:34:46 +02:00
committed by GitHub
2 changed files with 276 additions and 273 deletions

View File

@@ -1,145 +1,147 @@
; ;
; Speed-optimized LZSA decompressor by spke (v.1 03-25/04/2019 +patch1-30/07/2019, 109 bytes) ; Speed-optimized LZSA1 decompressor by spke (v.1 03-25/04/2019, 109 bytes);
; ; with improvements by uniabis (30/07/2019, -1 byte, +3% speed).
; The data must be compressed using the command line compressor by Emmanuel Marty ;
; The compression is done as follows: ; The data must be compressed using the command line compressor by Emmanuel Marty
; ; The compression is done as follows:
; lzsa.exe -f1 -r <sourcefile> <outfile> ;
; ; lzsa.exe -f1 -r <sourcefile> <outfile>
; where option -r asks for the generation of raw (frame-less) data. ;
; ; where option -r asks for the generation of raw (frame-less) data.
; The decompression is done in the standard way: ;
; ; The decompression is done in the standard way:
; ld hl,FirstByteOfCompressedData ;
; ld de,FirstByteOfMemoryForDecompressedData ; ld hl,FirstByteOfCompressedData
; call DecompressLZSA ; ld de,FirstByteOfMemoryForDecompressedData
; ; call DecompressLZSA1
; Backward compression is also supported; you can compress files backward using: ;
; ; Backward compression is also supported; you can compress files backward using:
; lzsa.exe -f1 -r -b <sourcefile> <outfile> ;
; ; lzsa.exe -f1 -r -b <sourcefile> <outfile>
; and decompress the resulting files using: ;
; ; and decompress the resulting files using:
; ld hl,LastByteOfCompressedData ;
; ld de,LastByteOfMemoryForDecompressedData ; ld hl,LastByteOfCompressedData
; call DecompressLZSA ; ld de,LastByteOfMemoryForDecompressedData
; ; call DecompressLZSA1
; (do not forget to uncomment the BACKWARD_DECOMPRESS option in the decompressor). ;
; ; (do not forget to uncomment the BACKWARD_DECOMPRESS option in the decompressor).
; Of course, LZSA compression algorithm is (c) 2019 Emmanuel Marty, ;
; see https://github.com/emmanuel-marty/lzsa for more information ; Of course, LZSA compression algorithms are (c) 2019 Emmanuel Marty,
; ; see https://github.com/emmanuel-marty/lzsa for more information
; Drop me an email if you have any comments/ideas/suggestions: zxintrospec@gmail.com ;
; ; Drop me an email if you have any comments/ideas/suggestions: zxintrospec@gmail.com
; This software is provided 'as-is', without any express or implied ;
; warranty. In no event will the authors be held liable for any damages ; This software is provided 'as-is', without any express or implied
; arising from the use of this software. ; warranty. In no event will the authors be held liable for any damages
; ; arising from the use of this software.
; Permission is granted to anyone to use this software for any purpose, ;
; including commercial applications, and to alter it and redistribute it ; Permission is granted to anyone to use this software for any purpose,
; freely, subject to the following restrictions: ; including commercial applications, and to alter it and redistribute it
; ; freely, subject to the following restrictions:
; 1. The origin of this software must not be misrepresented; you must not ;
; claim that you wrote the original software. If you use this software ; 1. The origin of this software must not be misrepresented; you must not
; in a product, an acknowledgment in the product documentation would be ; claim that you wrote the original software. If you use this software
; appreciated but is not required. ; in a product, an acknowledgment in the product documentation would be
; 2. Altered source versions must be plainly marked as such, and must not be ; appreciated but is not required.
; misrepresented as being the original software. ; 2. Altered source versions must be plainly marked as such, and must not be
; 3. This notice may not be removed or altered from any source distribution. ; misrepresented as being the original software.
; 3. This notice may not be removed or altered from any source distribution.
; DEFINE BACKWARD_DECOMPRESS
; DEFINE BACKWARD_DECOMPRESS
IFDEF BACKWARD_DECOMPRESS
IFNDEF BACKWARD_DECOMPRESS
MACRO NEXT_HL
dec hl MACRO NEXT_HL
ENDM inc hl
ENDM
MACRO ADD_OFFSET
push hl : or a : sbc hl,de : pop de MACRO ADD_OFFSET
ENDM ex de,hl : add hl,de
ENDM
MACRO BLOCKCOPY
lddr MACRO BLOCKCOPY
ENDM ldir
ENDM
ELSE
ELSE
MACRO NEXT_HL
inc hl MACRO NEXT_HL
ENDM dec hl
ENDM
MACRO ADD_OFFSET
ex de,hl : add hl,de MACRO ADD_OFFSET
ENDM ex de,hl : ld a,e : sub l : ld l,a
ld a,d : sbc h : ld h,a ; 4*4+3*4 = 28t / 7 bytes
MACRO BLOCKCOPY ENDM
ldir
ENDM MACRO BLOCKCOPY
lddr
ENDIF ENDM
@DecompressLZSA: ENDIF
ld b,0 : jr ReadToken
@DecompressLZSA1:
NoLiterals: xor (hl) : NEXT_HL ld b,0 : jr ReadToken
push de : ld e,(hl) : NEXT_HL : jp m,LongOffset
NoLiterals: xor (hl) : NEXT_HL
; short matches have length 0+3..14+3 push de : ld e,(hl) : NEXT_HL : jp m,LongOffset
ShortOffset: ld d,#FF : add 3 : cp 15+3 : jr nc,LongerMatch
; short matches have length 0+3..14+3
; placed here this saves a JP per iteration ShortOffset: ld d,#FF : add 3 : cp 15+3 : jr nc,LongerMatch
CopyMatch: ld c,a
.UseC ex (sp),hl ; BC = len, DE = offset, HL = dest, SP ->[src] ; placed here this saves a JP per iteration
ADD_OFFSET ; BC = len, DE = dest, HL = dest-offset, SP->[src] CopyMatch: ld c,a
BLOCKCOPY : pop hl ; BC = 0, DE = dest, HL = src .UseC ex (sp),hl ; BC = len, DE = offset, HL = dest, SP ->[dest,src]
ADD_OFFSET ; BC = len, DE = dest, HL = dest-offset, SP->[src]
ReadToken: ; first a byte token "O|LLL|MMMM" is read from the stream, BLOCKCOPY : pop hl ; BC = 0, DE = dest, HL = src
; where LLL is the number of literals and MMMM is
; a length of the match that follows after the literals ReadToken: ; first a byte token "O|LLL|MMMM" is read from the stream,
ld a,(hl) : and #70 : jr z,NoLiterals ; where LLL is the number of literals and MMMM is
; a length of the match that follows after the literals
cp #70 : jr z,MoreLiterals ; LLL=7 means 7+ literals... ld a,(hl) : and #70 : jr z,NoLiterals
rrca : rrca : rrca : rrca ; LLL<7 means 0..6 literals...
cp #70 : jr z,MoreLiterals ; LLL=7 means 7+ literals...
ld c,a : ld a,(hl) : NEXT_HL rrca : rrca : rrca : rrca ; LLL<7 means 0..6 literals...
BLOCKCOPY
ld c,a : ld a,(hl) : NEXT_HL
; next we read the first byte of the offset BLOCKCOPY
push de : ld e,(hl) : NEXT_HL
; the top bit of token is set if the offset contains two bytes ; next we read the first byte of the offset
and #8F : jp p,ShortOffset push de : ld e,(hl) : NEXT_HL
; the top bit of token is set if the offset contains two bytes
LongOffset: ; read second byte of the offset and #8F : jp p,ShortOffset
ld d,(hl) : NEXT_HL
add -128+3 : cp 15+3 : jp c,CopyMatch LongOffset: ; read second byte of the offset
ld d,(hl) : NEXT_HL
; MMMM=15 indicates a multi-byte number of literals add -128+3 : cp 15+3 : jp c,CopyMatch
LongerMatch: add (hl) : NEXT_HL : jr nc,CopyMatch
; MMMM=15 indicates a multi-byte number of literals
; the codes are designed to overflow; LongerMatch: add (hl) : NEXT_HL : jr nc,CopyMatch
; the overflow value 1 means read 1 extra byte
; and overflow value 0 means read 2 extra bytes ; the codes are designed to overflow;
.code1 ld b,a : ld c,(hl) : NEXT_HL : jr nz,CopyMatch.UseC ; the overflow value 1 means read 1 extra byte
.code0 ld b,(hl) : NEXT_HL ; and overflow value 0 means read 2 extra bytes
.code1 ld b,a : ld c,(hl) : NEXT_HL : jr nz,CopyMatch.UseC
; the two-byte match length equal to zero .code0 ld b,(hl) : NEXT_HL
; designates the end-of-data marker
ld a,b : or c : jr nz,CopyMatch.UseC ; the two-byte match length equal to zero
pop de : ret ; designates the end-of-data marker
ld a,b : or c : jr nz,CopyMatch.UseC
MoreLiterals: ; there are three possible situations here pop de : ret
xor (hl) : NEXT_HL : exa
ld a,7 : add (hl) : NEXT_HL : jr c,ManyLiterals MoreLiterals: ; there are three possible situations here
xor (hl) : NEXT_HL : exa
CopyLiterals: ld c,a ld a,7 : add (hl) : NEXT_HL : jr c,ManyLiterals
.UseC BLOCKCOPY
CopyLiterals: ld c,a
push de : ld e,(hl) : NEXT_HL .UseC BLOCKCOPY
exa : jp p,ShortOffset : jr LongOffset
push de : ld e,(hl) : NEXT_HL
ManyLiterals: exa : jp p,ShortOffset : jr LongOffset
.code1 ld b,a : ld c,(hl) : NEXT_HL : jr nz,CopyLiterals.UseC
.code0 ld b,(hl) : NEXT_HL : jr CopyLiterals.UseC ManyLiterals:
.code1 ld b,a : ld c,(hl) : NEXT_HL : jr nz,CopyLiterals.UseC
.code0 ld b,(hl) : NEXT_HL : jr CopyLiterals.UseC

View File

@@ -1,128 +1,129 @@
; ;
; Size-optimized LZSA decompressor by spke (v.1 23/04/2019 +patch1-30/07/2019, 68 bytes) ; Size-optimized LZSA1 decompressor by spke (v.1 23/04/2019, 68 bytes);
; ; with improvements by uniabis (30/07/2019, -1 byte, +3% speed).
; The data must be compressed using the command line compressor by Emmanuel Marty ;
; The compression is done as follows: ; The data must be compressed using the command line compressor by Emmanuel Marty
; ; The compression is done as follows:
; lzsa.exe -f1 -r <sourcefile> <outfile> ;
; ; lzsa.exe -f1 -r <sourcefile> <outfile>
; where option -r asks for the generation of raw (frame-less) data. ;
; ; where option -r asks for the generation of raw (frame-less) data.
; The decompression is done in the standard way: ;
; ; The decompression is done in the standard way:
; ld hl,FirstByteOfCompressedData ;
; ld de,FirstByteOfMemoryForDecompressedData ; ld hl,FirstByteOfCompressedData
; call DecompressLZSA ; ld de,FirstByteOfMemoryForDecompressedData
; ; call DecompressLZSA1
; Backward compression is also supported; you can compress files backward using: ;
; ; Backward compression is also supported; you can compress files backward using:
; lzsa.exe -f1 -r -b <sourcefile> <outfile> ;
; ; lzsa.exe -f1 -r -b <sourcefile> <outfile>
; and decompress the resulting files using: ;
; ; and decompress the resulting files using:
; ld hl,LastByteOfCompressedData ;
; ld de,LastByteOfMemoryForDecompressedData ; ld hl,LastByteOfCompressedData
; call DecompressLZSA ; ld de,LastByteOfMemoryForDecompressedData
; ; call DecompressLZSA1
; (do not forget to uncomment the BACKWARD_DECOMPRESS option in the decompressor). ;
; ; (do not forget to uncomment the BACKWARD_DECOMPRESS option in the decompressor).
; Of course, LZSA compression algorithm is (c) 2019 Emmanuel Marty, ;
; see https://github.com/emmanuel-marty/lzsa for more information ; Of course, LZSA compression algorithms are (c) 2019 Emmanuel Marty,
; ; see https://github.com/emmanuel-marty/lzsa for more information
; Drop me an email if you have any comments/ideas/suggestions: zxintrospec@gmail.com ;
; ; Drop me an email if you have any comments/ideas/suggestions: zxintrospec@gmail.com
; This software is provided 'as-is', without any express or implied ;
; warranty. In no event will the authors be held liable for any damages ; This software is provided 'as-is', without any express or implied
; arising from the use of this software. ; warranty. In no event will the authors be held liable for any damages
; ; arising from the use of this software.
; Permission is granted to anyone to use this software for any purpose, ;
; including commercial applications, and to alter it and redistribute it ; Permission is granted to anyone to use this software for any purpose,
; freely, subject to the following restrictions: ; including commercial applications, and to alter it and redistribute it
; ; freely, subject to the following restrictions:
; 1. The origin of this software must not be misrepresented; you must not ;
; claim that you wrote the original software. If you use this software ; 1. The origin of this software must not be misrepresented; you must not
; in a product, an acknowledgment in the product documentation would be ; claim that you wrote the original software. If you use this software
; appreciated but is not required. ; in a product, an acknowledgment in the product documentation would be
; 2. Altered source versions must be plainly marked as such, and must not be ; appreciated but is not required.
; misrepresented as being the original software. ; 2. Altered source versions must be plainly marked as such, and must not be
; 3. This notice may not be removed or altered from any source distribution. ; misrepresented as being the original software.
; 3. This notice may not be removed or altered from any source distribution.
; DEFINE BACKWARD_DECOMPRESS
; DEFINE BACKWARD_DECOMPRESS
IFDEF BACKWARD_DECOMPRESS
IFNDEF BACKWARD_DECOMPRESS
MACRO NEXT_HL
dec hl MACRO NEXT_HL
ENDM inc hl
ENDM
MACRO ADD_OFFSET
push hl : or a : sbc hl,de : pop de MACRO ADD_OFFSET
ENDM ex de,hl : add hl,de
ENDM
MACRO BLOCKCOPY
lddr MACRO BLOCKCOPY
ENDM ldir
ENDM
ELSE
ELSE
MACRO NEXT_HL
inc hl MACRO NEXT_HL
ENDM dec hl
ENDM
MACRO ADD_OFFSET
ex de,hl : add hl,de MACRO ADD_OFFSET
ENDM push hl : or a : sbc hl,de : pop de ; 11+4+15+10 = 40t / 5 bytes
ENDM
MACRO BLOCKCOPY
ldir MACRO BLOCKCOPY
ENDM lddr
ENDM
ENDIF
ENDIF
@DecompressLZSA:
ld b,0 @DecompressLZSA1:
ld b,0
; first a byte token "O|LLL|MMMM" is read from the stream,
; where LLL is the number of literals and MMMM is ; first a byte token "O|LLL|MMMM" is read from the stream,
; a length of the match that follows after the literals ; where LLL is the number of literals and MMMM is
ReadToken: ld a,(hl) : exa : ld a,(hl) : NEXT_HL ; a length of the match that follows after the literals
and #70 : jr z,NoLiterals ReadToken: ld a,(hl) : exa : ld a,(hl) : NEXT_HL
and #70 : jr z,NoLiterals
rrca : rrca : rrca : rrca ; LLL<7 means 0..6 literals...
cp #07 : call z,ReadLongBA ; LLL=7 means 7+ literals... rrca : rrca : rrca : rrca ; LLL<7 means 0..6 literals...
cp #07 : call z,ReadLongBA ; LLL=7 means 7+ literals...
ld c,a : BLOCKCOPY
ld c,a : BLOCKCOPY
; next we read the low byte of the -offset
NoLiterals: push de : ld e,(hl) : NEXT_HL : ld d,#FF ; next we read the low byte of the -offset
; the top bit of token is set if NoLiterals: push de : ld e,(hl) : NEXT_HL : ld d,#FF
; the offset contains the high byte as well ; the top bit of token is set if
exa : or a : jp p,ShortOffset ; the offset contains the high byte as well
exa : or a : jp p,ShortOffset
LongOffset: ld d,(hl) : NEXT_HL
LongOffset: ld d,(hl) : NEXT_HL
; last but not least, the match length is read
ShortOffset: and #0F : add 3 ; MMMM<15 means match lengths 0+3..14+3 ; last but not least, the match length is read
cp 15+3 : call z,ReadLongBA ; MMMM=15 means lengths 14+3+ ShortOffset: and #0F : add 3 ; MMMM<15 means match lengths 0+3..14+3
ld c,a cp 15+3 : call z,ReadLongBA ; MMMM=15 means lengths 14+3+
ld c,a
ex (sp),hl ; BC = len, DE = -offset, HL = dest, SP ->[src]
ADD_OFFSET ; BC = len, DE = dest, HL = dest+(-offset), SP->[src] ex (sp),hl ; BC = len, DE = -offset, HL = dest, SP -> [src]
BLOCKCOPY : pop hl ; BC = 0, DE = dest, HL = src ADD_OFFSET ; BC = len, DE = dest, HL = dest+(-offset), SP -> [src]
jr ReadToken BLOCKCOPY ; BC = 0, DE = dest
pop hl : jr ReadToken ; HL = src
; a standard routine to read extended codes
; into registers B (higher byte) and A (lower byte). ; a standard routine to read extended codes
ReadLongBA: add (hl) : NEXT_HL : ret nc ; into registers B (higher byte) and A (lower byte).
ReadLongBA: add (hl) : NEXT_HL : ret nc
; the codes are designed to overflow;
; the overflow value 1 means read 1 extra byte ; the codes are designed to overflow;
; and overflow value 0 means read 2 extra bytes ; the overflow value 1 means read 1 extra byte
.code1: ld b,a : ld a,(hl) : NEXT_HL : ret nz ; and overflow value 0 means read 2 extra bytes
.code0: ld c,a : ld b,(hl) : NEXT_HL .code1: ld b,a : ld a,(hl) : NEXT_HL : ret nz
.code0: ld c,a : ld b,(hl) : NEXT_HL
; the two-byte match length equal to zero
; designates the end-of-data marker ; the two-byte match length equal to zero
or b : ld a,c : ret nz ; designates the end-of-data marker
pop de : pop de : ret or b : ld a,c : ret nz
pop de : pop de : ret