gsla test file, with dictionary support is decompressing correctly

This commit is contained in:
JASON-6700K\jandersen 2020-07-20 11:37:54 -04:00
parent 8dd7fbd28f
commit 0d28c20f03
2 changed files with 23 additions and 14 deletions

View File

@ -417,7 +417,7 @@ void GSLAFile::SaveToFile(const char* pFilenamePath)
// I don't want random data in the bank gaps, so initialize this // I don't want random data in the bank gaps, so initialize this
// buffer with zero // buffer with zero
//memset(pWorkBuffer, 0, m_frameSize * 2); //memset(pWorkBuffer, 0xEA, m_frameSize * 2);
int frameSize = LZBA_Compress(pWorkBuffer, m_pC1PixelMaps[ frameIndex ], int frameSize = LZBA_Compress(pWorkBuffer, m_pC1PixelMaps[ frameIndex ],
m_frameSize, pWorkBuffer-bytes.size(), m_frameSize, pWorkBuffer-bytes.size(),
@ -438,7 +438,7 @@ void GSLAFile::SaveToFile(const char* pFilenamePath)
} }
// Add the RING Frame // Add the RING Frame
//memset(pWorkBuffer, 0, m_frameSize * 2); //memset(pWorkBuffer, 0xAB, m_frameSize * 2);
printf("Save Ring Frame\n"); printf("Save Ring Frame\n");

View File

@ -416,6 +416,10 @@ DataString LongestMatch(const DataString& data, const DataString& dictionary, in
} }
} }
// Not getting better than this
if (result.size == data.size)
return result;
// This will keep us from finding matches that we can't use // This will keep us from finding matches that we can't use
int dictionarySize = cursorPosition; int dictionarySize = cursorPosition;
@ -458,13 +462,19 @@ DataString LongestMatch(const DataString& data, const DataString& dictionary, in
} }
} }
// Not getting better than this
if (result.size == data.size)
return result;
#if 1
// Look for matches beyond the cursor // Look for matches beyond the cursor
dictionarySize = dictionary.size; dictionarySize = dictionary.size;
if ((dictionarySize-cursorPosition) > candidate.size) if ((dictionarySize-cursorPosition) > candidate.size)
{ {
// Check the dictionary for a match, brute force // Check the dictionary for a match, brute force
for (int dictionaryIndex = cursorPosition; dictionaryIndex <= (dictionarySize-candidate.size); ++dictionaryIndex) for (int dictionaryIndex = cursorPosition+3; dictionaryIndex <= (dictionarySize-candidate.size); ++dictionaryIndex)
{ {
int sizeAvailable = dictionarySize - dictionaryIndex; int sizeAvailable = dictionarySize - dictionaryIndex;
@ -493,7 +503,7 @@ DataString LongestMatch(const DataString& data, const DataString& dictionary, in
} }
} }
} }
#endif
} }
return result; return result;
@ -855,7 +865,9 @@ int LZBA_Compress(unsigned char* pDest, unsigned char* pSource, int sourceSize,
} }
else else
{ {
if (gapCount > 3) // if there's a small amount of matching data, let's include
// it in the clump (try and reduce opcode emissions)
if (gapCount >= 3)
break; break;
gapCount++; gapCount++;
} }
@ -871,6 +883,7 @@ int LZBA_Compress(unsigned char* pDest, unsigned char* pSource, int sourceSize,
sourceData.pData = &pSource[ cursorPosition ]; sourceData.pData = &pSource[ cursorPosition ];
sourceData.size = tempCursorPosition - cursorPosition; sourceData.size = tempCursorPosition - cursorPosition;
#if 0 // This Works
//-------------------------- Dump, so skip dump only //-------------------------- Dump, so skip dump only
space_left_in_bank = CheckEmitSourceSkip(2+sourceData.size, pDest, space_left_in_bank); space_left_in_bank = CheckEmitSourceSkip(2+sourceData.size, pDest, space_left_in_bank);
@ -878,8 +891,8 @@ int LZBA_Compress(unsigned char* pDest, unsigned char* pSource, int sourceSize,
pDest += EmitLiteral(pDest, sourceData); pDest += EmitLiteral(pDest, sourceData);
lastEmittedCursorPosition = cursorPosition; lastEmittedCursorPosition = cursorPosition;
#endif
#if 0
while (sourceData.size > 0) while (sourceData.size > 0)
{ {
candidateData = LongestMatch(sourceData, dictionaryData, cursorPosition); candidateData = LongestMatch(sourceData, dictionaryData, cursorPosition);
@ -908,7 +921,6 @@ int LZBA_Compress(unsigned char* pDest, unsigned char* pSource, int sourceSize,
pDest += (int)EmitReference(pDest, (int)(candidateData.pData - dictionaryData.pData), candidateData); pDest += (int)EmitReference(pDest, (int)(candidateData.pData - dictionaryData.pData), candidateData);
bLastEmitIsLiteral = false; bLastEmitIsLiteral = false;
space_left_in_bank = (int)0x10000 - (int)((pDest - pDataStart)&0xFFFF);
} }
else if (bLastEmitIsLiteral) else if (bLastEmitIsLiteral)
{ {
@ -919,18 +931,18 @@ int LZBA_Compress(unsigned char* pDest, unsigned char* pSource, int sourceSize,
int space = CheckEmitSourceSkip(candidateData.size, pDest, space_left_in_bank); int space = CheckEmitSourceSkip(candidateData.size, pDest, space_left_in_bank);
if (space != space_left_in_bank) if (space != (space_left_in_bank - candidateData.size))
{ {
space_left_in_bank = space-2;
// Emit a new literal // Emit a new literal
pLastLiteralDest = pDest; pLastLiteralDest = pDest;
bLastEmitIsLiteral = true;
pDest += EmitLiteral(pDest, candidateData); pDest += EmitLiteral(pDest, candidateData);
space_left_in_bank = (int)0x10000 - (int)((pDest - pDataStart)&0xFFFF);
} }
else else
{ {
// Concatenate this literal onto the previous literal // Concatenate this literal onto the previous literal
space_left_in_bank = space;
pDest += ConcatLiteral(pLastLiteralDest, candidateData); pDest += ConcatLiteral(pLastLiteralDest, candidateData);
} }
} }
@ -942,11 +954,8 @@ int LZBA_Compress(unsigned char* pDest, unsigned char* pSource, int sourceSize,
pLastLiteralDest = pDest; pLastLiteralDest = pDest;
bLastEmitIsLiteral = true; bLastEmitIsLiteral = true;
pDest += EmitLiteral(pDest, candidateData); pDest += EmitLiteral(pDest, candidateData);
space_left_in_bank = (int)0x10000 - (int)((pDest - pDataStart)&0xFFFF);
} }
} }
#endif
} }
else else
{ {