Add extra safety checks to LZSA2 token reducer

This commit is contained in:
Emmanuel Marty 2019-09-22 20:41:09 +02:00 committed by GitHub
parent 1869d85c1f
commit 81e15d10f0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -667,10 +667,14 @@ static int lzsa_optimize_command_count_v2(lzsa_compressor *pCompressor, const un
/* Check if we can change the current match's offset to be the same as the previous match's offset, and get an extra repmatch. This will occur when
* matching large regions of identical bytes for instance, where there are too many offsets to be considered by the parser, and when not compressing to favor the
* ratio (the forward arrivals parser already has this covered). */
if (i >= nRepMatchOffset && !memcmp(pInWindow + i - nRepMatchOffset, pInWindow + i - pMatch->offset, pMatch->length))
if (i >= nRepMatchOffset &&
(i - nRepMatchOffset + pMatch->length) <= (nEndOffset - LAST_LITERALS) &&
!memcmp(pInWindow + i - nRepMatchOffset, pInWindow + i - pMatch->offset, pMatch->length))
pMatch->offset = nRepMatchOffset;
}
if (pMatch->length < 9 /* Don't waste time considering large matches, they will always win over literals */) {
/* Calculate this command's current cost (excluding 'nNumLiterals' bytes) */
int nCurCommandSize = 8 /* token */ + lzsa_get_literals_varlen_size_v2(nNumLiterals) + lzsa_get_match_varlen_size_v2(pMatch->length - MIN_MATCH_SIZE_V2);
@ -703,6 +707,7 @@ static int lzsa_optimize_command_count_v2(lzsa_compressor *pCompressor, const un
}
}
}
}
if ((i + pMatch->length) < nEndOffset && pMatch->length >= LCP_MAX &&
pMatch->offset && pMatch->offset <= 32 && pBestMatch[i + pMatch->length].offset == pMatch->offset && (pMatch->length % pMatch->offset) == 0 &&