mirror of
https://github.com/fadden/ciderpress.git
synced 2025-01-11 15:29:47 +00:00
Allow files larger than 16MB to be added to HFS volumes
Ideally this would add a "what's your limit" call to the disk image library, but this will do for now.
This commit is contained in:
parent
8ecb8e35eb
commit
8d5e07e683
@ -1581,14 +1581,15 @@ CString DiskArchive::ProcessFileAddData(DiskFS* pDiskFS, int addOptsConvEOL)
|
|||||||
else
|
else
|
||||||
convHA = GenericEntry::kConvertHAOff;
|
convHA = GenericEntry::kConvertHAOff;
|
||||||
|
|
||||||
errMsg = LoadFile(pDataDetails->GetLocalPathName(), &dataBuf, &dataLen,
|
errMsg = LoadFile(pDataDetails->GetLocalPathName(), pDiskFS,
|
||||||
convEOL, convHA);
|
&dataBuf, &dataLen, convEOL, convHA);
|
||||||
if (!errMsg.IsEmpty())
|
if (!errMsg.IsEmpty())
|
||||||
goto bail;
|
goto bail;
|
||||||
}
|
}
|
||||||
if (pRsrcDetails != NULL) {
|
if (pRsrcDetails != NULL) {
|
||||||
/* no text conversion on resource forks */
|
/* no text conversion on resource forks */
|
||||||
errMsg = LoadFile(pRsrcDetails->GetLocalPathName(), &rsrcBuf, &rsrcLen,
|
errMsg = LoadFile(pRsrcDetails->GetLocalPathName(), pDiskFS,
|
||||||
|
&rsrcBuf, &rsrcLen,
|
||||||
GenericEntry::kConvertEOLOff, GenericEntry::kConvertHAOff);
|
GenericEntry::kConvertEOLOff, GenericEntry::kConvertHAOff);
|
||||||
if (!errMsg.IsEmpty())
|
if (!errMsg.IsEmpty())
|
||||||
goto bail;
|
goto bail;
|
||||||
@ -1624,8 +1625,11 @@ bail:
|
|||||||
|
|
||||||
// TODO: really ought to update the progress counter, especially when reading
|
// TODO: really ought to update the progress counter, especially when reading
|
||||||
// really large files.
|
// really large files.
|
||||||
CString DiskArchive::LoadFile(const WCHAR* pathName, uint8_t** pBuf, long* pLen,
|
// TODO: shouldn't be loading really large files into memory. ProDOS is fine
|
||||||
GenericEntry::ConvertEOL conv, GenericEntry::ConvertHighASCII convHA) const
|
// (16MB limit), HFS could be problematic in a 32-bit app.
|
||||||
|
CString DiskArchive::LoadFile(const WCHAR* pathName, DiskFS* pDiskFS,
|
||||||
|
uint8_t** pBuf, long* pLen,
|
||||||
|
GenericEntry::ConvertEOL conv, GenericEntry::ConvertHighASCII convHA) const
|
||||||
{
|
{
|
||||||
const char kCharLF = '\n';
|
const char kCharLF = '\n';
|
||||||
const char kCharCR = '\r';
|
const char kCharCR = '\r';
|
||||||
@ -1666,8 +1670,15 @@ CString DiskArchive::LoadFile(const WCHAR* pathName, uint8_t** pBuf, long* pLen,
|
|||||||
*pBuf = NULL;
|
*pBuf = NULL;
|
||||||
*pLen = 0;
|
*pLen = 0;
|
||||||
goto bail;
|
goto bail;
|
||||||
} else if (fileLen > 0x00ffffff) {
|
}
|
||||||
errMsg = L"Cannot add files larger than 16MB to a disk image.";
|
// TODO: we want to limit the file length based on the filesystem type,
|
||||||
|
// so we don't initiate an operation that can't possibly succeed. We
|
||||||
|
// should be able to query the DiskImg for its max file size, but that
|
||||||
|
// API doesn't currently exist. For now, limit anything other than HFS
|
||||||
|
// to 16MB.
|
||||||
|
if (pDiskFS->GetDiskImg()->GetFSFormat() != DiskImg::kFormatMacHFS &&
|
||||||
|
fileLen > 0x00ffffff) {
|
||||||
|
errMsg = L"Cannot add files larger than 16MB to a non-HFS disk image.";
|
||||||
goto bail;
|
goto bail;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -358,7 +358,7 @@ private:
|
|||||||
*
|
*
|
||||||
* Returns an empty string on success, or an error message on failure.
|
* Returns an empty string on success, or an error message on failure.
|
||||||
*/
|
*/
|
||||||
CString LoadFile(const WCHAR* pathName, uint8_t** pBuf, long* pLen,
|
CString LoadFile(const WCHAR* pathName, DiskFS* pDiskFS, uint8_t** pBuf, long* pLen,
|
||||||
GenericEntry::ConvertEOL conv, GenericEntry::ConvertHighASCII convHA) const;
|
GenericEntry::ConvertEOL conv, GenericEntry::ConvertHighASCII convHA) const;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
Loading…
x
Reference in New Issue
Block a user