mirror of
https://github.com/emmanuel-marty/lzsa.git
synced 2025-02-18 00:30:44 +00:00
Fix small LZSA2 token reduction inefficiency
This commit is contained in:
parent
29c6f3b2a3
commit
33327201f7
@ -417,9 +417,11 @@ static int lzsa_optimize_command_count_v2(lzsa_compressor *pCompressor, const un
|
|||||||
* matching large regions of identical bytes for instance, where there are too many offsets to be considered by the parser, and when not compressing to favor the
|
* matching large regions of identical bytes for instance, where there are too many offsets to be considered by the parser, and when not compressing to favor the
|
||||||
* ratio (the forward arrivals parser already has this covered). */
|
* ratio (the forward arrivals parser already has this covered). */
|
||||||
if (i >= nRepMatchOffset &&
|
if (i >= nRepMatchOffset &&
|
||||||
(i - nRepMatchOffset + pMatch->length) <= (nEndOffset - LAST_LITERALS) &&
|
(i - nRepMatchOffset + pMatch->length) <= (nEndOffset - LAST_LITERALS) &&
|
||||||
!memcmp(pInWindow + i - nRepMatchOffset, pInWindow + i - pMatch->offset, pMatch->length))
|
!memcmp(pInWindow + i - nRepMatchOffset, pInWindow + i - pMatch->offset, pMatch->length)) {
|
||||||
pMatch->offset = nRepMatchOffset;
|
pMatch->offset = nRepMatchOffset;
|
||||||
|
nDidReduce = 1;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (pBestMatch[nNextIndex].offset && pMatch->offset != pBestMatch[nNextIndex].offset && nRepMatchOffset != pBestMatch[nNextIndex].offset) {
|
if (pBestMatch[nNextIndex].offset && pMatch->offset != pBestMatch[nNextIndex].offset && nRepMatchOffset != pBestMatch[nNextIndex].offset) {
|
||||||
@ -431,6 +433,7 @@ static int lzsa_optimize_command_count_v2(lzsa_compressor *pCompressor, const un
|
|||||||
if (nMaxLen >= pMatch->length) {
|
if (nMaxLen >= pMatch->length) {
|
||||||
/* Replace */
|
/* Replace */
|
||||||
pMatch->offset = pBestMatch[nNextIndex].offset;
|
pMatch->offset = pBestMatch[nNextIndex].offset;
|
||||||
|
nDidReduce = 1;
|
||||||
}
|
}
|
||||||
else if (nMaxLen >= 2 && pMatch->offset != nRepMatchOffset) {
|
else if (nMaxLen >= 2 && pMatch->offset != nRepMatchOffset) {
|
||||||
int nPartialSizeBefore, nPartialSizeAfter;
|
int nPartialSizeBefore, nPartialSizeAfter;
|
||||||
@ -452,6 +455,7 @@ static int lzsa_optimize_command_count_v2(lzsa_compressor *pCompressor, const un
|
|||||||
pBestMatch[i + j].length = 0;
|
pBestMatch[i + j].length = 0;
|
||||||
}
|
}
|
||||||
pMatch->length = nMaxLen;
|
pMatch->length = nMaxLen;
|
||||||
|
nDidReduce = 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -502,6 +506,7 @@ static int lzsa_optimize_command_count_v2(lzsa_compressor *pCompressor, const un
|
|||||||
pMatch->length += pBestMatch[i + nMatchLen].length;
|
pMatch->length += pBestMatch[i + nMatchLen].length;
|
||||||
pBestMatch[i + nMatchLen].offset = 0;
|
pBestMatch[i + nMatchLen].offset = 0;
|
||||||
pBestMatch[i + nMatchLen].length = -1;
|
pBestMatch[i + nMatchLen].length = -1;
|
||||||
|
nDidReduce = 1;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user