Auto-format Python sources with black, fix all issues reported by flake8 (#1010)

* Update config for black and flake8
* Auto-format Python sources with black
* Fix issues reported by flake8
* Exclude protobuf files from black
* Address formatting feedback
This commit is contained in:
nucleogenic 2022-11-30 05:19:17 +00:00 committed by GitHub
parent 5afc6b911f
commit 315ef9f248
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
44 changed files with 1073 additions and 725 deletions

5
python/.flake8 Normal file
View File

@ -0,0 +1,5 @@
[flake8]
max-line-length = 100
exclude =
venv
rascsi_interface_pb2.py

View File

@ -13,17 +13,11 @@ CONFIG_FILE_SUFFIX = "json"
PROPERTIES_SUFFIX = "properties"
# Supported archive file suffixes
ARCHIVE_FILE_SUFFIXES = [
"zip",
"sit",
"tar",
"gz",
"7z"
]
ARCHIVE_FILE_SUFFIXES = ["zip", "sit", "tar", "gz", "7z"]
# The RESERVATIONS list is used to keep track of the reserved ID memos.
# Initialize with a list of 8 empty strings.
RESERVATIONS = ["" for _ in range(0, 8)]
# Standard error message for shell commands
SHELL_ERROR = "Shell command: \"%s\" led to error: %s"
SHELL_ERROR = 'Shell command: "%s" led to error: %s'

View File

@ -8,8 +8,8 @@ class FailedSocketConnectionException(Exception):
class EmptySocketChunkException(Exception):
"""Raise when a socket payload contains an empty chunk which implies a possible problem. """
"""Raise when a socket payload contains an empty chunk which implies a possible problem."""
class InvalidProtobufResponse(Exception):
"""Raise when a rascsi socket payload contains unpexpected data. """
"""Raise when a rascsi socket payload contains unpexpected data."""

View File

@ -35,6 +35,7 @@ FILE_READ_ERROR = "Unhandled exception when reading file: %s"
FILE_WRITE_ERROR = "Unhandled exception when writing to file: %s"
URL_SAFE = "/:?&"
class FileCmds:
"""
class for methods reading from and writing to the file system
@ -57,15 +58,10 @@ class FileCmds:
files_list = []
for file_path, _dirs, files in walk(dir_path):
# Only list selected file types
# TODO: Refactor for readability?
files = [f for f in files if f.lower().endswith(file_types)]
files_list.extend(
[
(
file,
path.getsize(path.join(file_path, file))
)
for file in files
]
[(file, path.getsize(path.join(file_path, file))) for file in files],
)
return files_list
@ -108,7 +104,7 @@ class FileCmds:
if file.name in prop_files:
process = self.read_drive_properties(
Path(CFG_DIR) / f"{file.name}.{PROPERTIES_SUFFIX}"
)
)
prop = process["conf"]
else:
prop = False
@ -118,12 +114,14 @@ class FileCmds:
try:
archive_info = self._get_archive_info(
f"{server_info['image_dir']}/{file.name}",
_cache_extra_key=file.size
)
_cache_extra_key=file.size,
)
properties_files = [x["path"]
for x in archive_info["members"]
if x["path"].endswith(PROPERTIES_SUFFIX)]
properties_files = [
x["path"]
for x in archive_info["members"]
if x["path"].endswith(PROPERTIES_SUFFIX)
]
for member in archive_info["members"]:
if member["is_dir"] or member["is_resource_fork"]:
@ -132,7 +130,9 @@ class FileCmds:
if PurePath(member["path"]).suffix.lower()[1:] == PROPERTIES_SUFFIX:
member["is_properties_file"] = True
elif f"{member['path']}.{PROPERTIES_SUFFIX}" in properties_files:
member["related_properties_file"] = f"{member['path']}.{PROPERTIES_SUFFIX}"
member[
"related_properties_file"
] = f"{member['path']}.{PROPERTIES_SUFFIX}"
archive_contents.append(member)
except (unarchiver.LsarCommandError, unarchiver.LsarOutputError):
@ -140,14 +140,16 @@ class FileCmds:
size_mb = "{:,.1f}".format(file.size / 1024 / 1024)
dtype = proto.PbDeviceType.Name(file.type)
files.append({
"name": file.name,
"size": file.size,
"size_mb": size_mb,
"detected_type": dtype,
"prop": prop,
"archive_contents": archive_contents,
})
files.append(
{
"name": file.name,
"size": file.size,
"size_mb": size_mb,
"detected_type": dtype,
"prop": prop,
"archive_contents": archive_contents,
}
)
return {"status": result.status, "msg": result.msg, "files": files}
@ -233,22 +235,20 @@ class FileCmds:
Takes (Path) file_path for the file to delete
Returns (dict) with (bool) status and (str) msg
"""
parameters = {
"file_path": file_path
}
parameters = {"file_path": file_path}
if file_path.exists():
file_path.unlink()
return {
"status": True,
"return_code": ReturnCodes.DELETEFILE_SUCCESS,
"parameters": parameters,
}
return {
"status": False,
"return_code": ReturnCodes.DELETEFILE_FILE_NOT_FOUND,
"status": True,
"return_code": ReturnCodes.DELETEFILE_SUCCESS,
"parameters": parameters,
}
return {
"status": False,
"return_code": ReturnCodes.DELETEFILE_FILE_NOT_FOUND,
"parameters": parameters,
}
# noinspection PyMethodMayBeStatic
def rename_file(self, file_path, target_path):
@ -258,21 +258,19 @@ class FileCmds:
- (Path) target_path for the name to rename
Returns (dict) with (bool) status and (str) msg
"""
parameters = {
"target_path": target_path
}
parameters = {"target_path": target_path}
if target_path.parent.exists:
file_path.rename(target_path)
return {
"status": True,
"return_code": ReturnCodes.RENAMEFILE_SUCCESS,
"parameters": parameters,
}
}
return {
"status": False,
"return_code": ReturnCodes.RENAMEFILE_UNABLE_TO_MOVE,
"parameters": parameters,
}
}
# noinspection PyMethodMayBeStatic
def copy_file(self, file_path, target_path):
@ -282,21 +280,19 @@ class FileCmds:
- (Path) target_path for the name to copy to
Returns (dict) with (bool) status and (str) msg
"""
parameters = {
"target_path": target_path
}
parameters = {"target_path": target_path}
if target_path.parent.exists:
copyfile(str(file_path), str(target_path))
return {
"status": True,
"return_code": ReturnCodes.WRITEFILE_SUCCESS,
"parameters": parameters,
}
}
return {
"status": False,
"return_code": ReturnCodes.WRITEFILE_UNABLE_TO_WRITE,
"parameters": parameters,
}
}
def extract_image(self, file_path, members=None, move_properties_files_to_config=True):
"""
@ -312,60 +308,66 @@ class FileCmds:
return {
"status": False,
"return_code": ReturnCodes.EXTRACTIMAGE_NO_FILES_SPECIFIED,
}
}
try:
extract_result = unarchiver.extract_archive(
f"{server_info['image_dir']}/{file_path}",
members=members,
output_dir=server_info["image_dir"],
)
)
properties_files_moved = []
if move_properties_files_to_config:
for file in extract_result["extracted"]:
if file.get("name").endswith(f".{PROPERTIES_SUFFIX}"):
prop_path = Path(CFG_DIR) / file["name"]
if (self.rename_file(
Path(file["absolute_path"]),
prop_path,
)):
properties_files_moved.append({
"status": True,
"name": file["path"],
"path": str(prop_path),
})
if self.rename_file(
Path(file["absolute_path"]),
prop_path,
):
properties_files_moved.append(
{
"status": True,
"name": file["path"],
"path": str(prop_path),
}
)
else:
properties_files_moved.append({
"status": False,
"name": file["path"],
"path": str(prop_path),
})
properties_files_moved.append(
{
"status": False,
"name": file["path"],
"path": str(prop_path),
}
)
return {
"status": True,
"return_code": ReturnCodes.EXTRACTIMAGE_SUCCESS,
"parameters": {
"count": len(extract_result["extracted"]),
},
},
"extracted": extract_result["extracted"],
"skipped": extract_result["skipped"],
"properties_files_moved": properties_files_moved,
}
}
except unarchiver.UnarNoFilesExtractedError:
return {
"status": False,
"return_code": ReturnCodes.EXTRACTIMAGE_NO_FILES_EXTRACTED,
}
except (unarchiver.UnarCommandError, unarchiver.UnarUnexpectedOutputError) as error:
}
except (
unarchiver.UnarCommandError,
unarchiver.UnarUnexpectedOutputError,
) as error:
return {
"status": False,
"return_code": ReturnCodes.EXTRACTIMAGE_COMMAND_ERROR,
"parameters": {
"error": error,
}
}
},
}
# noinspection PyMethodMayBeStatic
def partition_disk(self, file_name, volume_name, disk_format):
@ -399,42 +401,42 @@ class FileCmds:
if disk_format == "HFS":
partitioning_tool = "hfdisk"
commands = [
"i",
"",
"C",
"",
"32",
"Driver_Partition",
"Apple_Driver",
"C",
"",
"",
volume_name,
"Apple_HFS",
"w",
"y",
"p",
]
"i",
"",
"C",
"",
"32",
"Driver_Partition",
"Apple_Driver",
"C",
"",
"",
volume_name,
"Apple_HFS",
"w",
"y",
"p",
]
# Create a DOS label, primary partition, W95 FAT type
elif disk_format == "FAT":
partitioning_tool = "fdisk"
commands = [
"o",
"n",
"p",
"",
"",
"",
"t",
"b",
"w",
]
"o",
"n",
"p",
"",
"",
"",
"t",
"b",
"w",
]
try:
process = Popen(
[partitioning_tool, str(full_file_path)],
stdin=PIPE,
stdout=PIPE,
)
[partitioning_tool, str(full_file_path)],
stdin=PIPE,
stdout=PIPE,
)
for command in commands:
process.stdin.write(bytes(command + "\n", "utf-8"))
process.stdin.flush()
@ -464,7 +466,6 @@ class FileCmds:
return {"status": True, "msg": ""}
# noinspection PyMethodMayBeStatic
def format_hfs(self, file_name, volume_name, driver_path):
"""
@ -514,7 +515,6 @@ class FileCmds:
return {"status": True, "msg": ""}
# noinspection PyMethodMayBeStatic
def format_fat(self, file_name, volume_name, fat_size):
"""
@ -538,21 +538,21 @@ class FileCmds:
else:
logging.info(process.stdout.decode("utf-8"))
self.delete_file(Path(file_name))
return {"status": False, "msg": error.stderr.decode("utf-8")}
return {"status": False, "msg": process.stderr.decode("utf-8")}
except (FileNotFoundError, CalledProcessError) as error:
logging.warning(SHELL_ERROR, " ".join(error.cmd), error.stderr.decode("utf-8"))
self.delete_file(Path(file_name))
return {"status": False, "msg": error.stderr.decode("utf-8")}
args = [
"mkfs.fat",
"-v",
"-F",
fat_size,
"-n",
volume_name,
"/dev/mapper/" + loopback_device,
]
args = [
"mkfs.fat",
"-v",
"-F",
fat_size,
"-n",
volume_name,
"/dev/mapper/" + loopback_device,
]
try:
process = run(
args,
@ -582,7 +582,6 @@ class FileCmds:
return {"status": True, "msg": ""}
def download_file_to_iso(self, url, *iso_args):
"""
Takes (str) url and one or more (str) *iso_args
@ -592,7 +591,7 @@ class FileCmds:
server_info = self.ractl.get_server_info()
file_name = PurePath(url).name
iso_filename = Path(server_info['image_dir']) / f"{file_name}.iso"
iso_filename = Path(server_info["image_dir"]) / f"{file_name}.iso"
with TemporaryDirectory() as tmp_dir:
req_proc = self.download_to_dir(quote(url, safe=URL_SAFE), tmp_dir, file_name)
@ -603,23 +602,30 @@ class FileCmds:
tmp_full_path = Path(tmp_dir) / file_name
if is_zipfile(tmp_full_path):
if "XtraStuf.mac" in str(ZipFile(str(tmp_full_path)).namelist()):
logging.info("MacZip file format detected. Will not unzip to retain resource fork.")
logging.info(
"MacZip file format detected. Will not unzip to retain resource fork."
)
else:
logging.info(
"%s is a zipfile! Will attempt to unzip and store the resulting files.",
tmp_full_path,
)
unzip_proc = asyncio.run(
self.run_async(
"unzip",
[
"-d",
str(tmp_dir),
"-n",
str(tmp_full_path),
],
)
unzip_proc = asyncio.run(self.run_async("unzip", [
"-d",
str(tmp_dir),
"-n",
str(tmp_full_path),
]))
)
if not unzip_proc["returncode"]:
logging.info(
"%s was successfully unzipped. Deleting the zipfile.",
tmp_full_path,
)
)
tmp_full_path.unlink(True)
try:
@ -638,9 +644,7 @@ class FileCmds:
logging.warning(SHELL_ERROR, " ".join(error.cmd), error.stderr.decode("utf-8"))
return {"status": False, "msg": error.stderr.decode("utf-8")}
parameters = {
"value": " ".join(iso_args)
}
parameters = {"value": " ".join(iso_args)}
return {
"status": True,
"return_code": ReturnCodes.DOWNLOADFILETOISO_SUCCESS,
@ -658,10 +662,10 @@ class FileCmds:
try:
with requests.get(
quote(url, safe=URL_SAFE),
stream=True,
headers={"User-Agent": "Mozilla/5.0"},
) as req:
quote(url, safe=URL_SAFE),
stream=True,
headers={"User-Agent": "Mozilla/5.0"},
) as req:
req.raise_for_status()
try:
with open(f"{save_dir}/{file_name}", "wb") as download:
@ -677,10 +681,7 @@ class FileCmds:
logging.info("Response content-type: %s", req.headers["content-type"])
logging.info("Response status code: %s", req.status_code)
parameters = {
"file_name": file_name,
"save_dir": save_dir
}
parameters = {"file_name": file_name, "save_dir": save_dir}
return {
"status": True,
"return_code": ReturnCodes.DOWNLOADTODIR_SUCCESS,
@ -715,28 +716,29 @@ class FileCmds:
reserved_ids_and_memos = []
reserved_ids = self.ractl.get_reserved_ids()["ids"]
for scsi_id in reserved_ids:
reserved_ids_and_memos.append({"id": scsi_id,
"memo": RESERVATIONS[int(scsi_id)]})
dump(
{"version": version,
"devices": devices,
"reserved_ids": reserved_ids_and_memos},
json_file,
indent=4
reserved_ids_and_memos.append(
{"id": scsi_id, "memo": RESERVATIONS[int(scsi_id)]}
)
parameters = {
"target_path": file_path
}
dump(
{
"version": version,
"devices": devices,
"reserved_ids": reserved_ids_and_memos,
},
json_file,
indent=4,
)
parameters = {"target_path": file_path}
return {
"status": True,
"return_code": ReturnCodes.WRITEFILE_SUCCESS,
"parameters": parameters,
}
}
except (IOError, ValueError, EOFError, TypeError) as error:
logging.error(str(error))
self.delete_file(Path(file_path))
return {"status": False, "msg": str(error)}
except:
except Exception:
logging.error(FILE_WRITE_ERROR, file_name)
self.delete_file(Path(file_path))
raise
@ -770,7 +772,7 @@ class FileCmds:
"revision": row["revision"],
"block_size": row["block_size"],
"params": dict(row["params"]),
}
}
if row["image"]:
kwargs["params"]["file"] = row["image"]
self.ractl.attach_device(row["id"], **kwargs)
@ -789,27 +791,27 @@ class FileCmds:
"revision": row["revision"],
"block_size": row["block_size"],
"params": dict(row["params"]),
}
}
if row["image"]:
kwargs["params"]["file"] = row["image"]
self.ractl.attach_device(row["id"], **kwargs)
logging.warning("%s is in an obsolete config file format", file_name)
else:
return {"status": False,
"return_code": ReturnCodes.READCONFIG_INVALID_CONFIG_FILE_FORMAT}
return {
"status": False,
"return_code": ReturnCodes.READCONFIG_INVALID_CONFIG_FILE_FORMAT,
}
parameters = {
"file_name": file_name
}
parameters = {"file_name": file_name}
return {
"status": True,
"return_code": ReturnCodes.READCONFIG_SUCCESS,
"parameters": parameters
}
"parameters": parameters,
}
except (IOError, ValueError, EOFError, TypeError) as error:
logging.error(str(error))
return {"status": False, "msg": str(error)}
except:
except Exception:
logging.error(FILE_READ_ERROR, str(file_path))
raise
@ -823,19 +825,17 @@ class FileCmds:
try:
with open(file_path, "w") as json_file:
dump(conf, json_file, indent=4)
parameters = {
"target_path": str(file_path)
}
parameters = {"target_path": str(file_path)}
return {
"status": True,
"return_code": ReturnCodes.WRITEFILE_SUCCESS,
"parameters": parameters,
}
}
except (IOError, ValueError, EOFError, TypeError) as error:
logging.error(str(error))
self.delete_file(file_path)
return {"status": False, "msg": str(error)}
except:
except Exception:
logging.error(FILE_WRITE_ERROR, str(file_path))
self.delete_file(file_path)
raise
@ -850,19 +850,17 @@ class FileCmds:
try:
with open(file_path) as json_file:
conf = load(json_file)
parameters = {
"file_path": str(file_path)
}
parameters = {"file_path": str(file_path)}
return {
"status": True,
"return_codes": ReturnCodes.READDRIVEPROPS_SUCCESS,
"parameters": parameters,
"conf": conf,
}
}
except (IOError, ValueError, EOFError, TypeError) as error:
logging.error(str(error))
return {"status": False, "msg": str(error)}
except:
except Exception:
logging.error(FILE_READ_ERROR, str(file_path))
raise
@ -877,11 +875,17 @@ class FileCmds:
program,
*args,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE)
stderr=asyncio.subprocess.PIPE,
)
stdout, stderr = await proc.communicate()
logging.info("Executed command \"%s %s\" with status code %d", program, " ".join(args), proc.returncode)
logging.info(
'Executed command "%s %s" with status code %d',
program,
" ".join(args),
proc.returncode,
)
if stdout:
stdout = stdout.decode()
logging.info("stdout: %s", stdout)

View File

@ -11,6 +11,7 @@ class RaCtlCmds:
"""
Class for commands sent to the RaSCSI backend service.
"""
def __init__(self, sock_cmd: SocketCmds, token=None, locale="en"):
self.sock_cmd = sock_cmd
self.token = token
@ -37,9 +38,13 @@ class RaCtlCmds:
data = self.sock_cmd.send_pb_command(command.SerializeToString())
result = proto.PbResult()
result.ParseFromString(data)
version = (str(result.server_info.version_info.major_version) + "." +
str(result.server_info.version_info.minor_version) + "." +
str(result.server_info.version_info.patch_version))
version = (
str(result.server_info.version_info.major_version)
+ "."
+ str(result.server_info.version_info.minor_version)
+ "."
+ str(result.server_info.version_info.patch_version)
)
log_levels = list(result.server_info.log_level_info.log_levels)
current_log_level = result.server_info.log_level_info.current_log_level
reserved_ids = list(result.server_info.reserved_ids_info.ids)
@ -74,7 +79,7 @@ class RaCtlCmds:
"scrm": scrm,
"scmo": scmo,
"sccd": sccd,
}
}
def get_reserved_ids(self):
"""
@ -137,11 +142,11 @@ class RaCtlCmds:
for key, value in device.properties.default_params.items():
params[key] = value
device_types[proto.PbDeviceType.Name(device.type)] = {
"removable": device.properties.removable,
"supports_file": device.properties.supports_file,
"params": params,
"block_sizes": list(device.properties.block_sizes),
}
"removable": device.properties.removable,
"supports_file": device.properties.supports_file,
"params": params,
"block_sizes": list(device.properties.block_sizes),
}
return {"status": result.status, "device_types": device_types}
def get_removable_device_types(self):
@ -176,8 +181,8 @@ class RaCtlCmds:
device_types = self.get_device_types()
image_device_types = self.get_disk_device_types()
peripheral_device_types = [
x for x in device_types["device_types"] if x not in image_device_types
]
x for x in device_types["device_types"] if x not in image_device_types
]
return peripheral_device_types
def get_image_files_info(self):
@ -205,7 +210,7 @@ class RaCtlCmds:
"images_dir": images_dir,
"image_files": image_files,
"scan_depth": scan_depth,
}
}
def attach_device(self, scsi_id, **kwargs):
"""
@ -245,13 +250,13 @@ class RaCtlCmds:
if current_type != device_type:
parameters = {
"device_type": device_type,
"current_device_type": current_type
"current_device_type": current_type,
}
return {
"status": False,
"return_code": ReturnCodes.ATTACHIMAGE_COULD_NOT_ATTACH,
"parameters": parameters,
}
}
command.operation = proto.PbOperation.INSERT
# Handling attaching a new device
@ -394,20 +399,22 @@ class RaCtlCmds:
dblock = result.devices_info.devices[i].block_size
dsize = int(result.devices_info.devices[i].block_count) * int(dblock)
device_list.append({
"id": did,
"unit": dunit,
"device_type": dtype,
"status": ", ".join(dstat_msg),
"image": dpath,
"file": dfile,
"params": dparam,
"vendor": dven,
"product": dprod,
"revision": drev,
"block_size": dblock,
"size": dsize,
})
device_list.append(
{
"id": did,
"unit": dunit,
"device_type": dtype,
"status": ", ".join(dstat_msg),
"image": dpath,
"file": dfile,
"params": dparam,
"vendor": dven,
"product": dprod,
"revision": drev,
"block_size": dblock,
"size": dsize,
}
)
i += 1
return {"status": result.status, "msg": result.msg, "device_list": device_list}

View File

@ -6,6 +6,7 @@ Module for return codes that are refrenced in the return payloads of the rascsi
# pylint: disable=too-few-public-methods
class ReturnCodes:
"""Class for the return codes used within the rascsi module."""
DELETEFILE_SUCCESS = 0
DELETEFILE_FILE_NOT_FOUND = 1
RENAMEFILE_SUCCESS = 10

View File

@ -7,15 +7,18 @@ import socket
from time import sleep
from struct import pack, unpack
from rascsi.exceptions import (EmptySocketChunkException,
InvalidProtobufResponse,
FailedSocketConnectionException)
from rascsi.exceptions import (
EmptySocketChunkException,
InvalidProtobufResponse,
FailedSocketConnectionException,
)
class SocketCmds:
"""
Class for sending and receiving data over a socket connection with the RaSCSI backend
"""
def __init__(self, host="localhost", port=6868):
self.host = host
self.port = port
@ -38,8 +41,11 @@ class SocketCmds:
return response
except socket.error as error:
counter += 1
logging.warning("The RaSCSI service is not responding - attempt %s/%s",
str(counter), str(tries))
logging.warning(
"The RaSCSI service is not responding - attempt %s/%s",
str(counter),
str(tries),
)
error_msg = str(error)
sleep(0.2)
except EmptySocketChunkException as ex:
@ -75,18 +81,22 @@ class SocketCmds:
bytes_recvd = 0
while bytes_recvd < response_length:
chunk = sock.recv(min(response_length - bytes_recvd, 2048))
if chunk == b'':
error_message = ("Read an empty chunk from the socket. Socket connection has "
"dropped unexpectedly. RaSCSI may have crashed.")
if chunk == b"":
error_message = (
"Read an empty chunk from the socket. Socket connection has "
"dropped unexpectedly. RaSCSI may have crashed."
)
logging.error(error_message)
raise EmptySocketChunkException(error_message)
chunks.append(chunk)
bytes_recvd = bytes_recvd + len(chunk)
response_message = b''.join(chunks)
response_message = b"".join(chunks)
return response_message
error_message = ("The response from RaSCSI did not contain a protobuf header. "
"RaSCSI may have crashed.")
error_message = (
"The response from RaSCSI did not contain a protobuf header. "
"RaSCSI may have crashed."
)
logging.error(error_message)
raise InvalidProtobufResponse(error_message)

View File

@ -12,6 +12,7 @@ from platform import uname
from rascsi.common_settings import SHELL_ERROR
class SysCmds:
"""
Class for commands sent to the Pi's Linux system.
@ -30,7 +31,7 @@ class SysCmds:
["git", "rev-parse", "HEAD"],
capture_output=True,
check=True,
)
)
.stdout.decode("utf-8")
.strip()
)
@ -68,7 +69,7 @@ class SysCmds:
return {
"git": ra_git_version,
"env": f"{hardware}, {env.system} {env.release} {env.machine}",
}
}
@staticmethod
def running_proc(daemon):
@ -82,7 +83,7 @@ class SysCmds:
["ps", "aux"],
capture_output=True,
check=True,
)
)
.stdout.decode("utf-8")
.strip()
)
@ -104,7 +105,7 @@ class SysCmds:
["brctl", "show"],
capture_output=True,
check=True,
)
)
.stdout.decode("utf-8")
.strip()
)
@ -155,7 +156,7 @@ class SysCmds:
sock = socket(AF_INET, SOCK_DGRAM)
try:
# mock ip address; doesn't have to be reachable
sock.connect(('10.255.255.255', 1))
sock.connect(("10.255.255.255", 1))
ip_addr = sock.getsockname()[0]
except Exception:
ip_addr = False
@ -170,10 +171,10 @@ class SysCmds:
"""
try:
process = run(
["hostnamectl", "status", "--pretty"],
capture_output=True,
check=True,
)
["hostnamectl", "status", "--pretty"],
capture_output=True,
check=True,
)
pretty_hostname = process.stdout.decode("utf-8").rstrip()
if pretty_hostname:
return pretty_hostname
@ -188,11 +189,11 @@ class SysCmds:
Set the pretty hostname for the system
"""
try:
process = run(
["sudo", "hostnamectl", "set-hostname", "--pretty", name],
capture_output=False,
check=True,
)
run(
["sudo", "hostnamectl", "set-hostname", "--pretty", name],
capture_output=False,
check=True,
)
except CalledProcessError as error:
logging.error(str(error))
return False
@ -213,9 +214,9 @@ class SysCmds:
if scope:
scope_param = ["-u", scope]
process = run(
["journalctl"] + line_param + scope_param,
capture_output=True,
)
["journalctl"] + line_param + scope_param,
capture_output=True,
)
if process.returncode == 0:
return process.returncode, process.stdout.decode("utf-8")
@ -228,9 +229,9 @@ class SysCmds:
Returns either the disktype output, or the stderr output.
"""
process = run(
["disktype", file_path],
capture_output=True,
)
["disktype", file_path],
capture_output=True,
)
if process.returncode == 0:
return process.returncode, process.stdout.decode("utf-8")
@ -243,9 +244,9 @@ class SysCmds:
Returns either the man2html output, or the stderr output.
"""
process = run(
["man2html", file_path, "-M", "/"],
capture_output=True,
)
["man2html", file_path, "-M", "/"],
capture_output=True,
)
if process.returncode == 0:
return process.returncode, process.stdout.decode("utf-8")
@ -257,9 +258,9 @@ class SysCmds:
Sends a reboot command to the system
"""
process = run(
["sudo", "reboot"],
capture_output=True,
)
["sudo", "reboot"],
capture_output=True,
)
if process.returncode == 0:
return process.returncode, process.stdout.decode("utf-8")
@ -271,9 +272,9 @@ class SysCmds:
Sends a shutdown command to the system
"""
process = run(
["sudo", "shutdown", "-h", "now"],
capture_output=True,
)
["sudo", "shutdown", "-h", "now"],
capture_output=True,
)
if process.returncode == 0:
return process.returncode, process.stdout.decode("utf-8")

View File

@ -4,31 +4,27 @@ Utility module for running system commands with basic logging
import asyncio
import logging
import os
def run(program, args=None):
""" Run a command and return its output """
"""Run a command and return its output"""
return asyncio.run(run_async(program, args))
async def run_async(program, args=None):
""" Run a command in the background """
"""Run a command in the background"""
proc = await asyncio.create_subprocess_exec(
program,
*args,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
program, *args, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await proc.communicate()
logging.info(
"Executed command \"%s %s\" with status code %d",
'Executed command "%s %s" with status code %d',
program,
" ".join(args),
proc.returncode
)
proc.returncode,
)
if stdout:
stdout = stdout.decode()
@ -42,4 +38,4 @@ async def run_async(program, args=None):
"returncode": proc.returncode,
"stdout": stdout,
"stderr": stderr,
}
}

View File

@ -25,7 +25,8 @@ def extract_archive(file_path, **kwargs):
Takes (str) file_path, and kwargs:
- (list) members - list of (str) files to be extracted (all files are extracted if None)
- (str) output_dir - directory to place the extracted files
- (str) fork_output_type - output type for resource forks; "visible" for *.rsrc files, "hidden" for ._* files
- (str) fork_output_type - output type for resource forks;
"visible" for *.rsrc files, "hidden" for ._* files
Returns (dict) of extracted and skipped members
"""
members = kwargs.get("members")
@ -39,7 +40,9 @@ def extract_archive(file_path, **kwargs):
if kwargs.get("fork_output_type"):
if kwargs["fork_output_type"] not in FORK_OUTPUT_TYPES:
raise ValueError(f"Argument fork_output_type must be one of: {','.join(FORK_OUTPUT_TYPES)} ")
raise ValueError(
f"Argument fork_output_type must be one of: {','.join(FORK_OUTPUT_TYPES)} "
)
fork_output_type = kwargs["fork_output_type"]
fork_output_type_args = ["-forks", fork_output_type or FORK_OUTPUT_TYPE_VISIBLE]
else:
@ -53,9 +56,9 @@ def extract_archive(file_path, **kwargs):
"-force-skip",
"-no-directory",
*fork_output_type_args,
'--',
"--",
file_path,
]
]
if members:
for member in members:
@ -68,8 +71,10 @@ def extract_archive(file_path, **kwargs):
unar_result_success = r'^Successfully extracted to "(?P<destination>.+)".$'
unar_result_no_files = "No files extracted."
unar_file_extracted = \
r"^ {2}(?P<path>.+). \(((?P<size>\d+) B)?(?P<types>(dir)?(, )?(rsrc)?)\)\.\.\. (?P<status>[A-Z]+)\.$"
unar_file_extracted = (
r"^ {2}(?P<path>.+). \(((?P<size>\d+) B)?(?P<types>(dir)?(, )?"
r"(rsrc)?)\)\.\.\. (?P<status>[A-Z]+)\.$"
)
lines = process["stdout"].rstrip("\n").split("\n")
@ -90,7 +95,7 @@ def extract_archive(file_path, **kwargs):
"is_dir": False,
"is_resource_fork": False,
"absolute_path": str(pathlib.PurePath(tmp_dir).joinpath(matches["path"])),
}
}
member_types = matches.get("types", "")
if member_types.startswith(", "):
@ -112,10 +117,14 @@ def extract_archive(file_path, **kwargs):
member["name"] = f"._{member['name']}"
else:
member["name"] += ".rsrc"
member["path"] = str(pathlib.PurePath(member["path"]).parent.joinpath(member["name"]))
member["absolute_path"] = str(pathlib.PurePath(tmp_dir).joinpath(member["path"]))
member["path"] = str(
pathlib.PurePath(member["path"]).parent.joinpath(member["name"])
)
member["absolute_path"] = str(
pathlib.PurePath(tmp_dir).joinpath(member["path"])
)
logging.debug("Extracted: %s -> %s", member['path'], member['absolute_path'])
logging.debug("Extracted: %s -> %s", member["path"], member["absolute_path"])
extracted_members.append(member)
else:
raise UnarUnexpectedOutputError(f"Unexpected output: {line}")
@ -128,7 +137,10 @@ def extract_archive(file_path, **kwargs):
member["absolute_path"] = str(target_path)
if target_path.exists():
logging.info("Skipping temp file/dir as the target already exists: %s", target_path)
logging.info(
"Skipping temp file/dir as the target already exists: %s",
target_path,
)
skipped.append(member)
continue
@ -147,7 +159,7 @@ def extract_archive(file_path, **kwargs):
return {
"extracted": moved,
"skipped": skipped,
}
}
raise UnarUnexpectedOutputError(lines[-1])
@ -171,37 +183,41 @@ def inspect_archive(file_path):
except JSONDecodeError as error:
raise LsarOutputError(f"Unable to read JSON output from lsar: {error.msg}") from error
members = [{
"name": pathlib.PurePath(member.get("XADFileName")).name,
"path": member.get("XADFileName"),
"size": member.get("XADFileSize"),
"is_dir": member.get("XADIsDirectory"),
"is_resource_fork": member.get("XADIsResourceFork"),
"raw": member,
} for member in archive_info.get("lsarContents", [])]
members = [
{
"name": pathlib.PurePath(member.get("XADFileName")).name,
"path": member.get("XADFileName"),
"size": member.get("XADFileSize"),
"is_dir": member.get("XADIsDirectory"),
"is_resource_fork": member.get("XADIsResourceFork"),
"raw": member,
}
for member in archive_info.get("lsarContents", [])
]
return {
"format": archive_info.get("lsarFormatName"),
"members": members,
}
}
class UnarCommandError(Exception):
""" Command execution was unsuccessful """
"""Command execution was unsuccessful"""
pass
class UnarNoFilesExtractedError(Exception):
""" Command completed, but no files extracted """
"""Command completed, but no files extracted"""
class UnarUnexpectedOutputError(Exception):
""" Command output not recognized """
"""Command output not recognized"""
class LsarCommandError(Exception):
""" Command execution was unsuccessful """
"""Command execution was unsuccessful"""
class LsarOutputError(Exception):
""" Command output could not be parsed"""
"""Command output could not be parsed"""

View File

@ -7,6 +7,7 @@ from ctrlboard_hw.ctrlboard_hw_constants import CtrlBoardHardwareConstants
# pylint: disable=too-few-public-methods
class CtrlboardConfig:
"""Class for central RaSCSI control board configuration parameters"""
ROTATION = 0
WIDTH = 128
HEIGHT = 64

View File

@ -19,8 +19,12 @@ from rascsi_menu_controller import RascsiMenuController
class CtrlBoardMenuUpdateEventHandler(Observer):
"""Class interfacing the menu controller the RaSCSI Control Board hardware."""
def __init__(self, menu_controller: RascsiMenuController, sock_cmd: SocketCmds,
ractl_cmd: RaCtlCmds):
def __init__(
self,
menu_controller: RascsiMenuController,
sock_cmd: SocketCmds,
ractl_cmd: RaCtlCmds,
):
self.message = None
self._menu_controller = menu_controller
self._menu_renderer_config = self._menu_controller.get_menu_renderer().get_config()