mirror of
https://github.com/elliotnunn/macresources.git
synced 2024-12-12 18:30:08 +00:00
Keep fake header out-of-channel
This commit is contained in:
parent
bda55a52b2
commit
ad96f756d2
@ -15,7 +15,7 @@ parser = argparse.ArgumentParser(description='''
|
||||
parser.add_argument('resourceFile', help='file to be decompiled')
|
||||
parser.add_argument('-ascii', action='store_true', help='guarantee ASCII output')
|
||||
parser.add_argument('-useDF', action='store_true', help='ignored: data fork is always used')
|
||||
parser.add_argument('-fakehdr', action='store_true', help='output header as fake resource')
|
||||
parser.add_argument('-fakehdr', action='store_true', help='save 225b file header as fake resource')
|
||||
parser.add_argument('-compcmt', action='store_true', help='helpfully comment compressed resources')
|
||||
|
||||
args = parser.parse_args()
|
||||
@ -24,7 +24,7 @@ with open(args.resourceFile, 'rb') as f:
|
||||
resources = macresources.parse_file(f.read(), fake_header_rsrc=args.fakehdr)
|
||||
|
||||
try:
|
||||
rez = macresources.make_rez_code(resources, ascii_clean=args.ascii, fake_header_rsrc=args.fakehdr, cmt_unsupported_attrib=args.compcmt)
|
||||
rez = macresources.make_rez_code(resources, ascii_clean=args.ascii, cmt_unsupported_attrib=args.compcmt)
|
||||
sys.stdout.buffer.write(rez)
|
||||
except BrokenPipeError:
|
||||
pass # like we get when we pipe into head
|
||||
|
@ -33,14 +33,13 @@ parser.add_argument('rezFile', nargs='+', help='resource description files')
|
||||
parser.add_argument('-o', metavar='outputFile', default='Rez.out', help='default: Rez.out')
|
||||
parser.add_argument('-align', metavar='word | longword | n', action='store', type=parse_align, default=1)
|
||||
parser.add_argument('-useDF', action='store_true', help='ignored: data fork is always used')
|
||||
parser.add_argument('-fakehdr', action='store_true', help='accept fake-header resource')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
resources = []
|
||||
for in_path in args.rezFile:
|
||||
with open(in_path, 'rb') as f:
|
||||
resources.extend(macresources.parse_rez_code(f.read(), fake_header_rsrc=args.fakehdr))
|
||||
resources.extend(macresources.parse_rez_code(f.read()))
|
||||
|
||||
with open(args.o, 'wb') as f:
|
||||
f.write(macresources.make_file(resources, align=args.align, fake_header_rsrc=args.fakehdr))
|
||||
f.write(macresources.make_file(resources, align=args.align))
|
||||
|
@ -3,7 +3,7 @@ import struct
|
||||
import enum
|
||||
|
||||
|
||||
FAKE_HEADER_RSRC_NAME = 'Header as fake resource (not for Rez)'
|
||||
FAKE_HEADER_RSRC_TYPE = b'header' # obviously invalid
|
||||
|
||||
|
||||
MAP = bytearray(range(256))
|
||||
@ -136,7 +136,7 @@ def parse_file(from_resfile, fake_header_rsrc=False):
|
||||
return
|
||||
|
||||
if fake_header_rsrc and any(from_resfile[16:256]):
|
||||
yield Resource(type=b'????', id=0, name=FAKE_HEADER_RSRC_NAME, data=from_resfile[16:256])
|
||||
yield Resource(FAKE_HEADER_RSRC_TYPE, 0, name='Header as fake resource (not for Rez)', data=from_resfile[16:256])
|
||||
|
||||
data_offset, map_offset, data_len, map_len = struct.unpack_from('>4L', from_resfile)
|
||||
|
||||
@ -175,7 +175,7 @@ def parse_file(from_resfile, fake_header_rsrc=False):
|
||||
yield Resource(type=rtype, id=rid, name=name, attribs=rattribs, data=bytearray(rdata))
|
||||
|
||||
|
||||
def parse_rez_code(from_rezcode, fake_header_rsrc=False):
|
||||
def parse_rez_code(from_rezcode):
|
||||
"""Get an iterator of Resource objects from code in a subset of the Rez language (bytes or str)."""
|
||||
|
||||
try:
|
||||
@ -249,7 +249,7 @@ def parse_rez_code(from_rezcode, fake_header_rsrc=False):
|
||||
pass
|
||||
|
||||
|
||||
def make_file(from_iter, align=1, fake_header_rsrc=False):
|
||||
def make_file(from_iter, align=1):
|
||||
"""Pack an iterator of Resource objects into a binary resource file."""
|
||||
|
||||
class wrap:
|
||||
@ -261,7 +261,7 @@ def make_file(from_iter, align=1, fake_header_rsrc=False):
|
||||
data_offset = len(accum)
|
||||
bigdict = collections.OrderedDict() # maintain order of types, but manually order IDs
|
||||
for r in from_iter:
|
||||
if fake_header_rsrc and r.name == FAKE_HEADER_RSRC_NAME:
|
||||
if r.type == FAKE_HEADER_RSRC_TYPE:
|
||||
if len(r.data) > 256-16:
|
||||
raise ValueError('Special resource length (%r) too long' % len(r.data))
|
||||
accum[16:16+len(r.data)] = r.data
|
||||
@ -337,7 +337,7 @@ def make_file(from_iter, align=1, fake_header_rsrc=False):
|
||||
return bytes(accum)
|
||||
|
||||
|
||||
def make_rez_code(from_iter, ascii_clean=False, fake_header_rsrc=False, cmt_unsupported_attrib=False):
|
||||
def make_rez_code(from_iter, ascii_clean=False, cmt_unsupported_attrib=False):
|
||||
"""Express an iterator of Resource objects as Rez code (bytes).
|
||||
|
||||
This will match the output of the deprecated Rez utility, unless the
|
||||
@ -360,7 +360,7 @@ def make_rez_code(from_iter, ascii_clean=False, fake_header_rsrc=False, cmt_unsu
|
||||
|
||||
fourcc = _rez_escape(resource.type, singlequote=True, ascii_clean=ascii_clean)
|
||||
|
||||
if fake_header_rsrc and resource.name == FAKE_HEADER_RSRC_NAME:
|
||||
if resource.type == FAKE_HEADER_RSRC_TYPE:
|
||||
lines.append(b'#if 0')
|
||||
lines.append(b'data %s (%s) {' % (fourcc, args))
|
||||
|
||||
@ -390,7 +390,7 @@ def make_rez_code(from_iter, ascii_clean=False, fake_header_rsrc=False, cmt_unsu
|
||||
lines.append(line)
|
||||
|
||||
lines.append(b'};')
|
||||
if fake_header_rsrc and resource.name == FAKE_HEADER_RSRC_NAME:
|
||||
if resource.type == FAKE_HEADER_RSRC_TYPE:
|
||||
lines.append(b'#endif')
|
||||
lines.append(b'')
|
||||
if lines: lines.append(b'') # hack, because all posix lines end with a newline
|
||||
|
Loading…
Reference in New Issue
Block a user