mirror of
https://github.com/akuker/RASCSI.git
synced 2024-11-22 16:33:17 +00:00
Allow images to be extracted from StuffIt, tarball, gzip, and 7z archives
This commit is contained in:
parent
a523e1febe
commit
b5e6438a45
@ -82,7 +82,7 @@ function sudoCheck() {
|
||||
|
||||
# install all dependency packages for RaSCSI Service
|
||||
function installPackages() {
|
||||
sudo apt-get update && sudo apt-get install git libspdlog-dev libpcap-dev genisoimage python3 python3-venv python3-dev python3-pip nginx libpcap-dev protobuf-compiler bridge-utils libev-dev libevdev2 -y </dev/null
|
||||
sudo apt-get update && sudo apt-get install git libspdlog-dev libpcap-dev genisoimage python3 python3-venv python3-dev python3-pip nginx libpcap-dev protobuf-compiler bridge-utils libev-dev libevdev2 unar -y </dev/null
|
||||
}
|
||||
|
||||
# cache the pip packages
|
||||
|
@ -12,6 +12,15 @@ CONFIG_FILE_SUFFIX = "json"
|
||||
# File ending used for drive properties files
|
||||
PROPERTIES_SUFFIX = "properties"
|
||||
|
||||
# Supported archive file suffixes
|
||||
ARCHIVE_FILE_SUFFIXES = [
|
||||
"zip",
|
||||
"sit",
|
||||
"tar",
|
||||
"gz",
|
||||
"7z"
|
||||
]
|
||||
|
||||
# The RESERVATIONS list is used to keep track of the reserved ID memos.
|
||||
# Initialize with a list of 8 empty strings.
|
||||
RESERVATIONS = ["" for x in range(0, 8)]
|
||||
|
@ -5,9 +5,9 @@ Module for methods reading from and writing to the file system
|
||||
import os
|
||||
import logging
|
||||
import asyncio
|
||||
from functools import lru_cache
|
||||
from pathlib import PurePath
|
||||
from zipfile import ZipFile, is_zipfile
|
||||
from re import escape, findall
|
||||
from time import time
|
||||
from subprocess import run, CalledProcessError
|
||||
from json import dump, load
|
||||
@ -16,10 +16,11 @@ from shutil import copyfile
|
||||
import requests
|
||||
|
||||
import rascsi_interface_pb2 as proto
|
||||
from rascsi.common_settings import CFG_DIR, CONFIG_FILE_SUFFIX, PROPERTIES_SUFFIX, RESERVATIONS
|
||||
from rascsi.common_settings import CFG_DIR, CONFIG_FILE_SUFFIX, PROPERTIES_SUFFIX, ARCHIVE_FILE_SUFFIXES, RESERVATIONS
|
||||
from rascsi.ractl_cmds import RaCtlCmds
|
||||
from rascsi.return_codes import ReturnCodes
|
||||
from rascsi.socket_cmds import SocketCmds
|
||||
from util import unarchiver
|
||||
|
||||
|
||||
class FileCmds:
|
||||
@ -97,19 +98,31 @@ class FileCmds:
|
||||
prop = process["conf"]
|
||||
else:
|
||||
prop = False
|
||||
if file.name.lower().endswith(".zip"):
|
||||
zip_path = f"{server_info['image_dir']}/{file.name}"
|
||||
if is_zipfile(zip_path):
|
||||
zipfile = ZipFile(zip_path)
|
||||
# Get a list of (str) containing all zipfile members
|
||||
zip_members = zipfile.namelist()
|
||||
# Strip out directories from the list
|
||||
zip_members = [x for x in zip_members if not x.endswith("/")]
|
||||
else:
|
||||
logging.warning("%s is an invalid zip file", zip_path)
|
||||
zip_members = False
|
||||
else:
|
||||
zip_members = False
|
||||
|
||||
archive_contents = []
|
||||
if PurePath(file.name).suffix.lower()[1:] in ARCHIVE_FILE_SUFFIXES:
|
||||
try:
|
||||
archive_info = self._get_archive_info(
|
||||
f"{server_info['image_dir']}/{file.name}",
|
||||
_cache_extra_key=file.size
|
||||
)
|
||||
|
||||
properties_files = [x["path"]
|
||||
for x in archive_info["members"]
|
||||
if x["path"].endswith(PROPERTIES_SUFFIX)]
|
||||
|
||||
for member in archive_info["members"]:
|
||||
if member["is_dir"] or member["is_resource_fork"]:
|
||||
continue
|
||||
|
||||
if PurePath(member["path"]).suffix.lower()[1:] == PROPERTIES_SUFFIX:
|
||||
member["is_properties_file"] = True
|
||||
elif f"{member['path']}.{PROPERTIES_SUFFIX}" in properties_files:
|
||||
member["related_properties_file"] = f"{member['path']}.{PROPERTIES_SUFFIX}"
|
||||
|
||||
archive_contents.append(member)
|
||||
except (unarchiver.LsarCommandError, unarchiver.LsarOutputError):
|
||||
pass
|
||||
|
||||
size_mb = "{:,.1f}".format(file.size / 1024 / 1024)
|
||||
dtype = proto.PbDeviceType.Name(file.type)
|
||||
@ -119,7 +132,7 @@ class FileCmds:
|
||||
"size_mb": size_mb,
|
||||
"detected_type": dtype,
|
||||
"prop": prop,
|
||||
"zip_members": zip_members,
|
||||
"archive_contents": archive_contents,
|
||||
})
|
||||
|
||||
return {"status": result.status, "msg": result.msg, "files": files}
|
||||
@ -266,62 +279,73 @@ class FileCmds:
|
||||
"parameters": parameters,
|
||||
}
|
||||
|
||||
def unzip_file(self, file_name, member=False, members=False):
|
||||
def extract_image(self, file_path, members=None, move_properties_files_to_config=True):
|
||||
"""
|
||||
Takes (str) file_name, optional (str) member, optional (list) of (str) members
|
||||
file_name is the name of the zip file to unzip
|
||||
member is the full path to the particular file in the zip file to unzip
|
||||
members contains all of the full paths to each of the zip archive members
|
||||
Returns (dict) with (boolean) status and (list of str) msg
|
||||
Takes (str) file_path, (list) members, optional (bool) move_properties_files_to_config
|
||||
file_name is the path of the archive file to extract, relative to the images directory
|
||||
members is a list of file paths in the archive file to extract
|
||||
move_properties_files_to_config controls if .properties files are auto-moved to CFG_DIR
|
||||
Returns (dict) result
|
||||
"""
|
||||
server_info = self.ractl.get_server_info()
|
||||
prop_flag = False
|
||||
|
||||
if not member:
|
||||
unzip_proc = asyncio.run(self.run_async("unzip", [
|
||||
"-d",
|
||||
server_info['image_dir'],
|
||||
"-n",
|
||||
"-j",
|
||||
f"{server_info['image_dir']}/{file_name}",
|
||||
]))
|
||||
if members:
|
||||
for path in members:
|
||||
if path.endswith(PROPERTIES_SUFFIX):
|
||||
name = PurePath(path).name
|
||||
self.rename_file(f"{server_info['image_dir']}/{name}", f"{CFG_DIR}/{name}")
|
||||
prop_flag = True
|
||||
else:
|
||||
member = escape(member)
|
||||
unzip_proc = asyncio.run(self.run_async("unzip", [
|
||||
"-d",
|
||||
server_info['image_dir'],
|
||||
"-n",
|
||||
"-j",
|
||||
f"{server_info['image_dir']}/{file_name}",
|
||||
member,
|
||||
]))
|
||||
# Attempt to unzip a properties file in the same archive dir
|
||||
unzip_prop = asyncio.run(self.run_async("unzip", [
|
||||
"-d",
|
||||
CFG_DIR,
|
||||
"-n",
|
||||
"-j",
|
||||
f"{server_info['image_dir']}/{file_name}",
|
||||
f"{member}.{PROPERTIES_SUFFIX}",
|
||||
]))
|
||||
if not members:
|
||||
return {
|
||||
"status": False,
|
||||
"return_code": ReturnCodes.EXTRACTIMAGE_NO_FILES_SPECIFIED,
|
||||
}
|
||||
|
||||
if unzip_prop["returncode"] == 0:
|
||||
prop_flag = True
|
||||
if unzip_proc["returncode"] != 0:
|
||||
logging.warning("Unzipping failed: %s", unzip_proc["stderr"])
|
||||
return {"status": False, "msg": unzip_proc["stderr"]}
|
||||
try:
|
||||
extract_result = unarchiver.extract_archive(
|
||||
f"{server_info['image_dir']}/{file_path}",
|
||||
members=members,
|
||||
output_dir=server_info["image_dir"],
|
||||
)
|
||||
|
||||
properties_files_moved = []
|
||||
if move_properties_files_to_config:
|
||||
for file in extract_result["extracted"]:
|
||||
if file.get("name").endswith(".properties"):
|
||||
if (self.rename_file(
|
||||
file["absolute_path"],
|
||||
f"{CFG_DIR}/{file['name']}"
|
||||
)):
|
||||
properties_files_moved.append({
|
||||
"status": True,
|
||||
"name": file["path"],
|
||||
"path": f"{CFG_DIR}/{file['name']}",
|
||||
})
|
||||
else:
|
||||
properties_files_moved.append({
|
||||
"status": False,
|
||||
"name": file["path"],
|
||||
"path": f"{CFG_DIR}/{file['name']}",
|
||||
})
|
||||
|
||||
return {
|
||||
"status": True,
|
||||
"return_code": ReturnCodes.EXTRACTIMAGE_SUCCESS,
|
||||
"parameters": {
|
||||
"count": len(extract_result["extracted"]),
|
||||
},
|
||||
"extracted": extract_result["extracted"],
|
||||
"skipped": extract_result["skipped"],
|
||||
"properties_files_moved": properties_files_moved,
|
||||
}
|
||||
except unarchiver.UnarNoFilesExtractedError:
|
||||
return {
|
||||
"status": False,
|
||||
"return_code": ReturnCodes.EXTRACTIMAGE_NO_FILES_EXTRACTED,
|
||||
}
|
||||
except (unarchiver.UnarCommandError, unarchiver.UnarUnexpectedOutputError) as error:
|
||||
return {
|
||||
"status": False,
|
||||
"return_code": ReturnCodes.EXTRACTIMAGE_COMMAND_ERROR,
|
||||
"parameters": {
|
||||
"error": error,
|
||||
}
|
||||
}
|
||||
|
||||
unzipped = findall(
|
||||
"(?:inflating|extracting):(.+)\n",
|
||||
unzip_proc["stdout"]
|
||||
)
|
||||
return {"status": True, "msg": unzipped, "prop_flag": prop_flag}
|
||||
|
||||
def download_file_to_iso(self, url, *iso_args):
|
||||
"""
|
||||
@ -652,3 +676,14 @@ class FileCmds:
|
||||
logging.info("stderr: %s", stderr)
|
||||
|
||||
return {"returncode": proc.returncode, "stdout": stdout, "stderr": stderr}
|
||||
|
||||
# noinspection PyMethodMayBeStatic
|
||||
@lru_cache(maxsize=32)
|
||||
def _get_archive_info(self, file_path, **kwargs):
|
||||
"""
|
||||
Cached wrapper method to improve performance, e.g. on index screen
|
||||
"""
|
||||
try:
|
||||
return unarchiver.inspect_archive(file_path)
|
||||
except (unarchiver.LsarCommandError, unarchiver.LsarOutputError):
|
||||
raise
|
||||
|
@ -20,3 +20,7 @@ class ReturnCodes:
|
||||
READDRIVEPROPS_SUCCESS = 70
|
||||
READDRIVEPROPS_COULD_NOT_READ = 71
|
||||
ATTACHIMAGE_COULD_NOT_ATTACH = 80
|
||||
EXTRACTIMAGE_SUCCESS = 90
|
||||
EXTRACTIMAGE_NO_FILES_SPECIFIED = 91
|
||||
EXTRACTIMAGE_NO_FILES_EXTRACTED = 92
|
||||
EXTRACTIMAGE_COMMAND_ERROR = 93
|
||||
|
0
python/common/src/util/__init__.py
Normal file
0
python/common/src/util/__init__.py
Normal file
45
python/common/src/util/run.py
Normal file
45
python/common/src/util/run.py
Normal file
@ -0,0 +1,45 @@
|
||||
"""
|
||||
Utility module for running system commands with basic logging
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
|
||||
|
||||
def run(program, args=None):
|
||||
""" Run a command and return its output """
|
||||
return asyncio.run(run_async(program, args))
|
||||
|
||||
|
||||
async def run_async(program, args=None):
|
||||
""" Run a command in the background """
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
program,
|
||||
*args,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE
|
||||
)
|
||||
|
||||
stdout, stderr = await proc.communicate()
|
||||
|
||||
logging.info(
|
||||
"Executed command \"%s %s\" with status code %d",
|
||||
program,
|
||||
" ".join(args),
|
||||
proc.returncode
|
||||
)
|
||||
|
||||
if stdout:
|
||||
stdout = stdout.decode()
|
||||
logging.debug(stdout)
|
||||
|
||||
if stderr:
|
||||
stderr = stderr.decode()
|
||||
logging.warning(stderr)
|
||||
|
||||
return {
|
||||
"returncode": proc.returncode,
|
||||
"stdout": stdout,
|
||||
"stderr": stderr,
|
||||
}
|
201
python/common/src/util/unarchiver.py
Normal file
201
python/common/src/util/unarchiver.py
Normal file
@ -0,0 +1,201 @@
|
||||
"""
|
||||
A minimal wrapper around 'The Unarchiver' command line tools (v1.10.1)
|
||||
https://theunarchiver.com/command-line
|
||||
Later versions (untested) available at: https://github.com/MacPaw/XADMaster
|
||||
"""
|
||||
|
||||
import logging
|
||||
import pathlib
|
||||
|
||||
from tempfile import TemporaryDirectory
|
||||
from re import escape, match
|
||||
from json import loads, JSONDecodeError
|
||||
from util.run import run
|
||||
|
||||
FORK_OUTPUT_TYPE_VISIBLE = "visible"
|
||||
FORK_OUTPUT_TYPE_HIDDEN = "hidden"
|
||||
|
||||
FORK_OUTPUT_TYPES = [FORK_OUTPUT_TYPE_VISIBLE, FORK_OUTPUT_TYPE_HIDDEN]
|
||||
|
||||
|
||||
def extract_archive(file_path, **kwargs):
|
||||
"""
|
||||
Extracts files from an archive
|
||||
Takes (str) file_path, and kwargs:
|
||||
- (list) members - list of (str) files to be extracted (all files are extracted if None)
|
||||
- (str) output_dir - directory to place the extracted files
|
||||
- (str) fork_output_type - output type for resource forks; "visible" for *.rsrc files, "hidden" for ._* files
|
||||
Returns (dict) of extracted and skipped members
|
||||
"""
|
||||
members = kwargs.get("members")
|
||||
|
||||
if kwargs.get("output_dir"):
|
||||
if not pathlib.Path(kwargs["output_dir"]).is_dir():
|
||||
raise ValueError("Argument output_dir must be a directory")
|
||||
output_dir = str(pathlib.Path(kwargs["output_dir"]).resolve())
|
||||
else:
|
||||
output_dir = str(pathlib.Path(file_path).parent.resolve())
|
||||
|
||||
if kwargs.get("fork_output_type"):
|
||||
if kwargs["fork_output_type"] not in FORK_OUTPUT_TYPES:
|
||||
raise ValueError(f"Argument fork_output_type must be one of: {','.join(FORK_OUTPUT_TYPES)} ")
|
||||
fork_output_type = kwargs["fork_output_type"]
|
||||
fork_output_type_args = ["-forks", fork_output_type or FORK_OUTPUT_TYPE_VISIBLE]
|
||||
else:
|
||||
fork_output_type = None
|
||||
fork_output_type_args = []
|
||||
|
||||
with TemporaryDirectory() as tmp_dir:
|
||||
unar_args = [
|
||||
"-output-directory",
|
||||
tmp_dir,
|
||||
"-force-skip",
|
||||
"-no-directory",
|
||||
*fork_output_type_args,
|
||||
'--',
|
||||
file_path,
|
||||
]
|
||||
|
||||
if members:
|
||||
for member in members:
|
||||
unar_args.append(escape(member))
|
||||
|
||||
process = run("unar", unar_args)
|
||||
|
||||
if process["returncode"] != 0:
|
||||
raise UnarCommandError(f"Non-zero return code: {process['returncode']}")
|
||||
|
||||
unar_result_success = r'^Successfully extracted to "(?P<destination>.+)".$'
|
||||
unar_result_no_files = "No files extracted."
|
||||
unar_file_extracted = \
|
||||
r"^ (?P<path>.+). \(((?P<size>[0-9]+) B)?(?P<types>(dir)?(, )?(rsrc)?)\)\.\.\. (?P<status>[A-Z]+)\.$"
|
||||
|
||||
lines = process["stdout"].rstrip("\n").split("\n")
|
||||
|
||||
if lines[-1] == unar_result_no_files:
|
||||
raise UnarNoFilesExtractedError
|
||||
|
||||
if match(unar_result_success, lines[-1]):
|
||||
extracted_members = []
|
||||
|
||||
for line in lines[1:-1]:
|
||||
if line_matches := match(unar_file_extracted, line):
|
||||
matches = line_matches.groupdict()
|
||||
member = {
|
||||
"name": str(pathlib.PurePath(matches["path"]).name),
|
||||
"path": matches["path"],
|
||||
"size": matches["size"] or 0,
|
||||
"is_dir": False,
|
||||
"is_resource_fork": False,
|
||||
"absolute_path": str(pathlib.PurePath(tmp_dir).joinpath(matches["path"])),
|
||||
}
|
||||
|
||||
member_types = matches.get("types", "").removeprefix(", ").split(", ")
|
||||
|
||||
if "dir" in member_types:
|
||||
member["is_dir"] = True
|
||||
|
||||
if "rsrc" in member_types:
|
||||
if not fork_output_type:
|
||||
continue
|
||||
|
||||
member["is_resource_fork"] = True
|
||||
|
||||
# Update names/paths to match unar resource fork naming convention
|
||||
if fork_output_type == FORK_OUTPUT_TYPE_HIDDEN:
|
||||
member["name"] = f"._{member['name']}"
|
||||
else:
|
||||
member["name"] += ".rsrc"
|
||||
member["path"] = str(pathlib.PurePath(member["path"]).parent.joinpath(member["name"]))
|
||||
member["absolute_path"] = str(pathlib.PurePath(tmp_dir).joinpath(member["path"]))
|
||||
|
||||
logging.debug("Extracted: %s -> %s", member['path'], member['absolute_path'])
|
||||
extracted_members.append(member)
|
||||
else:
|
||||
raise UnarUnexpectedOutputError(f"Unexpected output: {line}")
|
||||
|
||||
moved = []
|
||||
skipped = []
|
||||
for member in sorted(extracted_members, key=lambda m: m["path"]):
|
||||
source_path = pathlib.Path(member["absolute_path"])
|
||||
target_path = pathlib.Path(output_dir).joinpath(member["path"])
|
||||
member["absolute_path"] = str(target_path)
|
||||
|
||||
if target_path.exists():
|
||||
logging.info("Skipping temp file/dir as the target already exists: %s", target_path)
|
||||
skipped.append(member)
|
||||
continue
|
||||
|
||||
if member["is_dir"]:
|
||||
logging.debug("Creating empty dir: %s -> %s", source_path, target_path)
|
||||
target_path.mkdir(parents=True, exist_ok=True)
|
||||
moved.append(member)
|
||||
continue
|
||||
|
||||
# The parent dir may not be specified as a member, so ensure it exists
|
||||
target_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
logging.debug("Moving temp file: %s -> %s", source_path, target_path)
|
||||
source_path.rename(target_path)
|
||||
moved.append(member)
|
||||
|
||||
return {
|
||||
"extracted": moved,
|
||||
"skipped": skipped,
|
||||
}
|
||||
|
||||
raise UnarUnexpectedOutputError(lines[-1])
|
||||
|
||||
|
||||
def inspect_archive(file_path, **kwargs):
|
||||
"""
|
||||
Calls `lsar` to inspect the contents of an archive
|
||||
Takes (str) file_path
|
||||
Returns (dict) of (str) format, (list) members
|
||||
"""
|
||||
if not pathlib.Path(file_path):
|
||||
raise FileNotFoundError(f"File {file_path} does not exist")
|
||||
|
||||
process = run("lsar", ["-json", "--", file_path])
|
||||
|
||||
if process["returncode"] != 0:
|
||||
raise LsarCommandError(f"Non-zero return code: {process['returncode']}")
|
||||
|
||||
try:
|
||||
archive_info = loads(process["stdout"])
|
||||
except JSONDecodeError as error:
|
||||
raise LsarOutputError(f"Unable to read JSON output from lsar: {error.msg}") from error
|
||||
|
||||
members = [{
|
||||
"name": pathlib.PurePath(member.get("XADFileName")).name,
|
||||
"path": member.get("XADFileName"),
|
||||
"size": member.get("XADFileSize"),
|
||||
"is_dir": member.get("XADIsDirectory"),
|
||||
"is_resource_fork": member.get("XADIsResourceFork"),
|
||||
"raw": member,
|
||||
} for member in archive_info.get("lsarContents", [])]
|
||||
|
||||
return {
|
||||
"format": archive_info.get("lsarFormatName"),
|
||||
"members": members,
|
||||
}
|
||||
|
||||
|
||||
class UnarCommandError(Exception):
|
||||
""" Command execution was unsuccessful """
|
||||
pass
|
||||
|
||||
|
||||
class UnarNoFilesExtractedError(Exception):
|
||||
""" Command completed, but no files extracted """
|
||||
|
||||
|
||||
class UnarUnexpectedOutputError(Exception):
|
||||
""" Command output not recognized """
|
||||
|
||||
|
||||
class LsarCommandError(Exception):
|
||||
""" Command execution was unsuccessful """
|
||||
|
||||
|
||||
class LsarOutputError(Exception):
|
||||
""" Command output could not be parsed"""
|
@ -37,6 +37,14 @@ class ReturnCodeMapper:
|
||||
_("Could not read properties from file: %(file_path)s"),
|
||||
ReturnCodes.ATTACHIMAGE_COULD_NOT_ATTACH:
|
||||
_("Cannot insert an image for %(device_type)s into a %(current_device_type)s device"),
|
||||
ReturnCodes.EXTRACTIMAGE_SUCCESS:
|
||||
_("Extracted %(count)s file(s)"),
|
||||
ReturnCodes.EXTRACTIMAGE_NO_FILES_SPECIFIED:
|
||||
_("Unable to extract archive: No files were specified"),
|
||||
ReturnCodes.EXTRACTIMAGE_NO_FILES_EXTRACTED:
|
||||
_("No files were extracted (existing files are skipped)"),
|
||||
ReturnCodes.EXTRACTIMAGE_COMMAND_ERROR:
|
||||
_("Unable to extract archive: %(error)s"),
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
|
@ -12,8 +12,6 @@ AFP_DIR = f"{HOME_DIR}/afpshare"
|
||||
|
||||
MAX_FILE_SIZE = getenv("MAX_FILE_SIZE", str(1024 * 1024 * 1024 * 4)) # 4gb
|
||||
|
||||
ARCHIVE_FILE_SUFFIX = "zip"
|
||||
|
||||
# The file name of the default config file that loads when rascsi-web starts
|
||||
DEFAULT_CONFIG = f"default.{rascsi.common_settings.CONFIG_FILE_SUFFIX}"
|
||||
# File containing canonical drive properties
|
||||
|
@ -35,12 +35,14 @@ table, tr, td {
|
||||
color: white;
|
||||
font-size:20px;
|
||||
background-color:red;
|
||||
white-space: pre-line;
|
||||
}
|
||||
|
||||
.message {
|
||||
color: white;
|
||||
font-size:20px;
|
||||
background-color:green;
|
||||
white-space: pre-line;
|
||||
}
|
||||
|
||||
td.inactive {
|
||||
|
@ -185,41 +185,41 @@
|
||||
</ul>
|
||||
</details>
|
||||
</td>
|
||||
{% elif file["zip_members"] %}
|
||||
{% elif file["archive_contents"] %}
|
||||
<td>
|
||||
<details>
|
||||
<summary>
|
||||
{{ file["name"] }}
|
||||
</summary>
|
||||
<ul style="list-style: none;">
|
||||
{% for member in file["zip_members"] %}
|
||||
{% if not member.lower().endswith(PROPERTIES_SUFFIX) %}
|
||||
<li>
|
||||
{% if member + "." + PROPERTIES_SUFFIX in file["zip_members"] %}
|
||||
<details><summary>{{ member }}
|
||||
<form action="/files/unzip" method="post">
|
||||
<input name="zip_file" type="hidden" value="{{ file['name'] }}">
|
||||
<input name="zip_member" type="hidden" value="{{ member }}">
|
||||
<input type="submit" value="{{ _("Unzip") }}" onclick="processNotify('{{ _("Unzipping a single file...") }}')">
|
||||
</form>
|
||||
</summary>
|
||||
<ul style="list-style: none;">
|
||||
{% for member in file["archive_contents"] %}
|
||||
{% if not member["is_properties_file"] %}
|
||||
<li>
|
||||
{{ member + "." + PROPERTIES_SUFFIX }}
|
||||
{% if member["related_properties_file"] %}
|
||||
<details>
|
||||
<summary>
|
||||
<label>{{ member["path"] }}</label>
|
||||
<form action="/files/extract_image" method="post">
|
||||
<input name="archive_file" type="hidden" value="{{ file['name'] }}">
|
||||
<input name="archive_members" type="hidden" value="{{ member["path"] }}|{{ member["related_properties_file"] }}">
|
||||
<input type="submit" value="{{ _("Extract") }}" onclick="processNotify('{{ _("Extracting a single file...") }}')">
|
||||
</form>
|
||||
</summary>
|
||||
<ul style="list-style: none;">
|
||||
<li>{{ member["related_properties_file"] }}</li>
|
||||
</ul>
|
||||
</details>
|
||||
{% else %}
|
||||
<label>{{ member["path"] }}</label>
|
||||
<form action="/files/extract_image" method="post">
|
||||
<input name="archive_file" type="hidden" value="{{ file["name"] }}">
|
||||
<input name="archive_members" type="hidden" value="{{ member["path"] }}">
|
||||
<input type="submit" value="{{ _("Extract") }}" onclick="processNotify('{{ _("Extracting a single file...") }}')">
|
||||
</form>
|
||||
{% endif %}
|
||||
</li>
|
||||
</ul>
|
||||
</details>
|
||||
{% else %}
|
||||
<label for="zip_member">{{ member }}</label>
|
||||
<form action="/files/unzip" method="post">
|
||||
<input name="zip_file" type="hidden" value="{{ file['name'] }}">
|
||||
<input name="zip_member" type="hidden" value="{{ member }}">
|
||||
<input type="submit" value="{{ _("Unzip") }}" onclick="processNotify('{{ _("Unzipping a single file...") }}')">
|
||||
</form>
|
||||
{% endif %}
|
||||
</li>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</details>
|
||||
</td>
|
||||
@ -238,11 +238,12 @@
|
||||
{{ _("Attached!") }}
|
||||
</center>
|
||||
{% else %}
|
||||
{% if file["name"].lower().endswith(ARCHIVE_FILE_SUFFIX) %}
|
||||
<form action="/files/unzip" method="post">
|
||||
<input name="zip_file" type="hidden" value="{{ file['name'] }}">
|
||||
<input name="zip_members" type="hidden" value="{{ file['zip_members'] }}">
|
||||
<input type="submit" value="{{ _("Unzip All") }}" onclick="processNotify('{{ _("Unzipping all files...") }}')">
|
||||
{% if file["archive_contents"] %}
|
||||
<form action="/files/extract_image" method="post">
|
||||
<input name="archive_file" type="hidden" value="{{ file['name'] }}">
|
||||
{% set pipe = joiner("|") %}
|
||||
<input name="archive_members" type="hidden" value="{% for member in file["archive_contents"] %}{{ pipe() }}{{ member["path"] }}{% endfor %}">
|
||||
<input type="submit" value="{{ _("Extract All") }}" onclick="processNotify('{{ _("Extracting all files...") }}')">
|
||||
</form>
|
||||
{% else %}
|
||||
<form action="/scsi/attach" method="post">
|
||||
|
@ -8,9 +8,9 @@ import argparse
|
||||
from pathlib import Path
|
||||
from functools import wraps
|
||||
from grp import getgrall
|
||||
from ast import literal_eval
|
||||
|
||||
import bjoern
|
||||
from rascsi.return_codes import ReturnCodes
|
||||
from werkzeug.utils import secure_filename
|
||||
from simplepam import authenticate
|
||||
from flask_babel import Babel, Locale, refresh, _
|
||||
@ -37,6 +37,7 @@ from rascsi.common_settings import (
|
||||
CFG_DIR,
|
||||
CONFIG_FILE_SUFFIX,
|
||||
PROPERTIES_SUFFIX,
|
||||
ARCHIVE_FILE_SUFFIXES,
|
||||
RESERVATIONS,
|
||||
)
|
||||
|
||||
@ -55,7 +56,6 @@ from web_utils import (
|
||||
from settings import (
|
||||
AFP_DIR,
|
||||
MAX_FILE_SIZE,
|
||||
ARCHIVE_FILE_SUFFIX,
|
||||
DEFAULT_CONFIG,
|
||||
DRIVE_PROPERTIES_FILE,
|
||||
AUTH_GROUP,
|
||||
@ -133,14 +133,13 @@ def index():
|
||||
scsi_ids, recommended_id = get_valid_scsi_ids(devices["device_list"], reserved_scsi_ids)
|
||||
formatted_devices = sort_and_format_devices(devices["device_list"])
|
||||
|
||||
valid_file_suffix = "."+", .".join(
|
||||
valid_file_suffix = "." + ", .".join(
|
||||
server_info["sahd"] +
|
||||
server_info["schd"] +
|
||||
server_info["scrm"] +
|
||||
server_info["scmo"] +
|
||||
server_info["sccd"] +
|
||||
[ARCHIVE_FILE_SUFFIX]
|
||||
)
|
||||
ARCHIVE_FILE_SUFFIXES)
|
||||
|
||||
if "username" in session:
|
||||
username = session["username"]
|
||||
@ -182,7 +181,6 @@ def index():
|
||||
mo_file_suffix=tuple(server_info["scmo"]),
|
||||
username=username,
|
||||
auth_active=auth_active(AUTH_GROUP)["status"],
|
||||
ARCHIVE_FILE_SUFFIX=ARCHIVE_FILE_SUFFIX,
|
||||
PROPERTIES_SUFFIX=PROPERTIES_SUFFIX,
|
||||
REMOVABLE_DEVICE_TYPES=ractl_cmd.get_removable_device_types(),
|
||||
DISK_DEVICE_TYPES=ractl_cmd.get_disk_device_types(),
|
||||
@ -945,33 +943,38 @@ def copy():
|
||||
return redirect(url_for("index"))
|
||||
|
||||
|
||||
@APP.route("/files/unzip", methods=["POST"])
|
||||
@APP.route("/files/extract_image", methods=["POST"])
|
||||
@login_required
|
||||
def unzip():
|
||||
def extract_image():
|
||||
"""
|
||||
Unzips all files in a specified zip archive, or a single file in the zip archive
|
||||
Extracts all or a subset of files in the specified archive
|
||||
"""
|
||||
zip_file = request.form.get("zip_file")
|
||||
zip_member = request.form.get("zip_member") or False
|
||||
zip_members = request.form.get("zip_members") or False
|
||||
archive_file = request.form.get("archive_file")
|
||||
archive_members_raw = request.form.get("archive_members") or None
|
||||
archive_members = archive_members_raw.split("|") if archive_members_raw else None
|
||||
|
||||
if zip_members:
|
||||
zip_members = literal_eval(zip_members)
|
||||
extract_result = file_cmd.extract_image(
|
||||
archive_file,
|
||||
archive_members
|
||||
)
|
||||
|
||||
process = file_cmd.unzip_file(zip_file, zip_member, zip_members)
|
||||
if process["status"]:
|
||||
if not process["msg"]:
|
||||
flash(_("Aborted unzip: File(s) with the same name already exists."), "error")
|
||||
return redirect(url_for("index"))
|
||||
flash(_("Unzipped the following files:"))
|
||||
for msg in process["msg"]:
|
||||
flash(msg)
|
||||
if process["prop_flag"]:
|
||||
flash(_("Properties file(s) have been moved to %(directory)s", directory=CFG_DIR))
|
||||
return redirect(url_for("index"))
|
||||
if extract_result["return_code"] == ReturnCodes.EXTRACTIMAGE_SUCCESS:
|
||||
flash(ReturnCodeMapper.add_msg(extract_result).get("msg"))
|
||||
|
||||
for properties_file in extract_result["properties_files_moved"]:
|
||||
if properties_file["status"]:
|
||||
flash(_("Properties file %(file)s moved to %(directory)s",
|
||||
file=properties_file['name'],
|
||||
directory=CFG_DIR
|
||||
))
|
||||
else:
|
||||
flash(_("Failed to move properties file %(file)s to %(directory)s",
|
||||
file=properties_file['name'],
|
||||
directory=CFG_DIR
|
||||
), "error")
|
||||
else:
|
||||
flash(ReturnCodeMapper.add_msg(extract_result).get("msg"), "error")
|
||||
|
||||
flash(_("Failed to unzip %(zip_file)s", zip_file=zip_file), "error")
|
||||
flash(process["msg"], "error")
|
||||
return redirect(url_for("index"))
|
||||
|
||||
|
||||
|
@ -25,6 +25,11 @@ if ! command -v unzip &> /dev/null ; then
|
||||
echo "Run 'sudo apt install unzip' to fix."
|
||||
ERROR=1
|
||||
fi
|
||||
if ! command -v unar &> /dev/null ; then
|
||||
echo "unar could not be found"
|
||||
echo "Run 'sudo apt install unar' to fix."
|
||||
ERROR=1
|
||||
fi
|
||||
if [ $ERROR = 1 ] ; then
|
||||
echo
|
||||
echo "Fix errors and re-run ./start.sh"
|
||||
|
Loading…
Reference in New Issue
Block a user