Add ability to turn flat, non-GS .SHK files into proper disk images

This commit is contained in:
2012-07-31 21:06:01 +00:00
parent 77aa498bee
commit 437270ea95
6 changed files with 134 additions and 69 deletions

View File

@ -74,7 +74,7 @@ public class Disk {
return names;
}
}
public static final int BLOCK_SIZE = 512;
public static final int SECTOR_SIZE = 256;
public static final int PRODOS_BLOCKS_ON_140KB_DISK = 280;
@ -179,15 +179,10 @@ public class Disk {
int diskSize = 0;
byte[] diskImage = null;
if (isSDK()) {
if (isSDK() || isSHK()) {
// If we have an SDK, unpack it and send along the byte array
diskImage = com.webcodepro.shrinkit.Utilities.unpackSDKFile(filename);
diskSize = diskImage.length;
} else if (isSHK()) {
// If we have an SHK, unpack it and send along the byte array
diskImage = com.webcodepro.shrinkit.Utilities.unpackSHKFile(filename);
throw new IOException("SHK unpacking is not implemented yet."); // TODO - remove me
//TODO - diskSize = diskImage.length;
diskSize = diskImage.length;
} else {
File file = new File(filename);
diskSize = (int) file.length();
@ -791,6 +786,29 @@ public class Disk {
setImageOrder(pdo);
}
/**
* Find the standard sized disk that will fit the requested number of bytes.
* @returns int size of the disk if it will satisfy the request, -1 otherwise
*/
public static int sizeToFit(long bytes) {
if (bytes < APPLE_140KB_DISK) {
return APPLE_140KB_DISK;
} else if (bytes < APPLE_800KB_DISK) {
return APPLE_800KB_DISK;
} else if (bytes < APPLE_5MB_HARDDISK) {
return APPLE_5MB_HARDDISK;
} else if (bytes < APPLE_10MB_HARDDISK) {
return APPLE_10MB_HARDDISK;
} else if (bytes < APPLE_20MB_HARDDISK) {
return APPLE_20MB_HARDDISK;
} else if (bytes < APPLE_32MB_HARDDISK) {
return APPLE_20MB_HARDDISK;
} else if (bytes < APPLE_32MB_HARDDISK) {
return APPLE_32MB_HARDDISK;
}
return -1;
}
/**
* Change ImageOrder from source order to target order by copying sector by
* sector.

View File

@ -173,7 +173,7 @@ public class ProdosFileEntry extends ProdosCommonEntry implements FileEntry {
}
/**
* Set the filetype.
* Set the filetype based on a string value.
*/
public void setFiletype(String filetype) {
byte[] entry = readFileEntry();
@ -181,6 +181,15 @@ public class ProdosFileEntry extends ProdosCommonEntry implements FileEntry {
writeFileEntry(entry);
}
/**
* Set the filetype based on a long - thunk into a byte
*/
public void setFiletype(long fileType) {
byte[] entry = readFileEntry();
entry[0x10] = (byte)fileType;
writeFileEntry(entry);
}
/**
* Indicate if this is an AppleWorks file.
* Intended to force upper/lowercase into the filename.
@ -261,6 +270,15 @@ public class ProdosFileEntry extends ProdosCommonEntry implements FileEntry {
return AppleUtil.getWordValue(readFileEntry(), 0x1f);
}
/**
* Set the auxiliary type for this file.
*/
public void setAuxiliaryType(int auxiliaryType) {
byte[] entry = readFileEntry();
setAuxiliaryType(entry, auxiliaryType);
writeFileEntry(entry);
}
/**
* Set the auxiliary type for this file.
*/

View File

@ -539,7 +539,7 @@ public class ac {
* Unshrink the ShrinkIt data depending on what kind it is:
*
* SDK disk image - unpack it to a disk image
* ShrinkIt file bundle [future] - unpack files onto a disk image of reqeusted size
* ShrinkIt file bundle [future] - unpack files onto a disk image of requested size
*/
static void unshrink(String shrinkName, String imageName, int imageSize)
throws IOException {

View File

@ -37,6 +37,7 @@ public class HeaderBlock {
private byte[] attribBytes;
private String filename;
private String rawFilename;
private long headerSize = 0;
private List<ThreadRecord> threads = new ArrayList<ThreadRecord>();
/**
@ -88,7 +89,10 @@ public class HeaderBlock {
*/
public void readThreads(LittleEndianByteInputStream bs) throws IOException {
for (long l=0; l<totalThreads; l++) threads.add(new ThreadRecord(this, bs));
for (ThreadRecord r : threads) r.readThreadData(bs);
for (ThreadRecord r : threads) {
r.readThreadData(bs);
headerSize += r.getThreadEof();
}
}
/**
@ -242,4 +246,7 @@ public class HeaderBlock {
public void setThreadRecords(List<ThreadRecord> threads) {
this.threads = threads;
}
public long getHeaderSize() {
return headerSize;
}
}

View File

@ -15,6 +15,7 @@ import com.webcodepro.shrinkit.io.LittleEndianByteInputStream;
public class NuFileArchive {
private MasterHeaderBlock master;
private List<HeaderBlock> headers;
private long totalSize = 0;
/**
* Need to enumerate some basic sub-types of archives.
@ -34,13 +35,20 @@ public class NuFileArchive {
HeaderBlock header = new HeaderBlock(bs);
header.readThreads(bs);
headers.add(header);
totalSize += header.getHeaderSize();
}
}
/**
* @return long size in bytes of the archive
*/
public long getArchiveSize() {
return totalSize;
}
public MasterHeaderBlock getMasterHeaderBlock() {
return master;
}
public List<HeaderBlock> getHeaderBlocks() {
return headers;
}
}
}}

View File

@ -23,8 +23,17 @@ import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Date;
import com.webcodepro.applecommander.storage.Disk;
import com.webcodepro.applecommander.storage.FileEntry;
import com.webcodepro.applecommander.storage.FormattedDisk;
import com.webcodepro.applecommander.storage.StorageBundle;
import com.webcodepro.applecommander.storage.os.prodos.ProdosFileEntry;
import com.webcodepro.applecommander.storage.os.prodos.ProdosFormatDisk;
import com.webcodepro.applecommander.storage.physical.ByteArrayImageLayout;
import com.webcodepro.applecommander.storage.physical.ImageOrder;
import com.webcodepro.applecommander.storage.physical.ProdosOrder;
import com.webcodepro.applecommander.util.TextBundle;
import com.webcodepro.shrinkit.io.LittleEndianByteInputStream;
@ -36,50 +45,9 @@ import com.webcodepro.shrinkit.io.LittleEndianByteInputStream;
public class Utilities
{
/**
* Interpret a SDK NuFile/NuFX/Shrinkit archive as a full disk image.
*
* @return byte[] buffer containing full disk of data; null if unable to read
* @throws IllegalArgumentException if the filename is not able to be read
* @throws IOException the file has some malformed-ness about it
*/
public static byte[] unpackSDKFile(String fileName) throws IOException {
TextBundle textBundle = StorageBundle.getInstance();
byte buffer[] = null;
ThreadRecord dataThread = null;
File file = new File(fileName);
if (file.isDirectory() || !file.canRead()) {
throw new IllegalArgumentException(textBundle.format("NotAFile", fileName, 1)); //$NON-NLS-1$
}
InputStream is = new FileInputStream(file);
NuFileArchive a = new NuFileArchive(is);
for (HeaderBlock b : a.getHeaderBlocks()) {
for (ThreadRecord r : b.getThreadRecords()) {
try
{
if (r.getThreadKind() == ThreadKind.DISK_IMAGE)
{
dataThread = r;
}
}
catch (Exception ex)
{
System.out.println(ex);
}
}
if (null != dataThread) {
dataThread.readThreadData(new LittleEndianByteInputStream(dataThread.getRawInputStream()));
InputStream fis = dataThread.getInputStream();
int dmgLen = (int)(dataThread.getThreadEof());
buffer = new byte[dmgLen];
fis.read(buffer,0,dmgLen);
fis.close();
}
}
return buffer;
}
/**
* Interpret a SHK NuFile/NuFX/Shrinkit archive as a full disk image.
* Interpret a NuFile/NuFX/Shrinkit archive as a full disk image.
* Note that a disk within a shk (Disk Disintegrator Deluxe 5.0_D1.SHK) should
* be interpreted directly as that disk image.
*
* @return byte[] buffer containing full disk of data; null if unable to read
* @throws IllegalArgumentException if the filename is not able to be read
@ -87,21 +55,49 @@ public class Utilities
*/
public static byte[] unpackSHKFile(String fileName) throws IOException {
TextBundle textBundle = StorageBundle.getInstance();
byte buffer[] = null;
ThreadRecord dataThread = null;
byte dmgBuffer[] = null;
File file = new File(fileName);
if (file.isDirectory() || !file.canRead()) {
throw new IllegalArgumentException(textBundle.format("NotAFile", fileName, 1)); //$NON-NLS-1$
}
InputStream is = new FileInputStream(file);
NuFileArchive a = new NuFileArchive(is);
int newDiskSize = Disk.sizeToFit(a.getArchiveSize());
ByteArrayImageLayout layout = new ByteArrayImageLayout(newDiskSize);
ImageOrder imageOrder = new ProdosOrder(layout);
FormattedDisk[] disks = ProdosFormatDisk.create(fileName, "APPLECOMMANDER", imageOrder);
ProdosFormatDisk pdDisk = (ProdosFormatDisk)disks[0];
for (HeaderBlock b : a.getHeaderBlocks()) {
ProdosFileEntry newFile = null;
for (ThreadRecord r : b.getThreadRecords()) {
try
{
if (r.getThreadKind() == ThreadKind.DISK_IMAGE)
{
dataThread = r;
switch (r.getThreadKind()) {
case ASCII_TEXT:
break;
case ALLOCATED_SPACE:
break;
case APPLE_IIGS_ICON:
break;
case CREATE_DIRECTORY:
break;
case DATA_FORK:
// This is a normal-ish file
newFile = (ProdosFileEntry) pdDisk.createFile();
if (newFile != null) {
newFile.setFileData(readThread(r));
}
break;
case DISK_IMAGE:
dmgBuffer = readThread(r);
break;
case RESOURCE_FORK:
break;
case FILENAME:
break;
default:
// Hmmm, this should not occur - but let us not fret about it.
break;
}
}
catch (Exception ex)
@ -109,15 +105,33 @@ public class Utilities
System.out.println(ex);
}
}
if (null != dataThread) {
dataThread.readThreadData(new LittleEndianByteInputStream(dataThread.getRawInputStream()));
InputStream fis = dataThread.getInputStream();
int dmgLen = (int)(dataThread.getThreadEof());
buffer = new byte[dmgLen];
fis.read(buffer,0,dmgLen);
fis.close();
if (newFile != null) {
newFile.setFilename(b.getFilename());
newFile.setFiletype(b.getFileType());
newFile.setAuxiliaryType((int)b.getExtraType());
newFile.setCreationDate(b.getCreateWhen());
newFile.setLastModificationDate(b.getModWhen());
newFile = null;
}
}
if (dmgBuffer != null)
return dmgBuffer;
else
return imageOrder.readBytes(0,newDiskSize);
}
/**
* readThread
*
* Reads the data from a thread
* @returns byte[] buffer
*/
public static byte[] readThread(ThreadRecord thread) throws IOException {
thread.readThreadData(new LittleEndianByteInputStream(thread.getRawInputStream()));
InputStream fis = thread.getInputStream();
byte[] buffer = new byte[(int)(thread.getThreadEof())];
fis.read(buffer,0,buffer.length);
fis.close();
return buffer;
}
}