From 08b7cbb1f4142067d15e37cc4488d1d5a2911be0 Mon Sep 17 00:00:00 2001 From: Stefan Tatschner Date: Tue, 24 Sep 2024 13:20:37 +0200 Subject: [PATCH] chore: Remove aiofiles dependency We only write very small files. Unless the artifactsdir is located on a, e.g., very slow network drive, it is very unlikely that the eventloop is blocked by these writes. Usually only a few bytes are written to dumpfiles or to separate result files. Let's use the stdlib instead and get rid of a further dependency. --- poetry.lock | 24 +------ pyproject.toml | 2 - src/gallia/command/uds.py | 15 ++--- src/gallia/commands/discover/doip.py | 62 +++++++------------ src/gallia/commands/scan/uds/sa_dump_seeds.py | 8 +-- src/gallia/dumpcap.py | 2 - src/gallia/utils.py | 6 +- 7 files changed, 34 insertions(+), 85 deletions(-) diff --git a/poetry.lock b/poetry.lock index 18fbaf556..323d3d6fb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,16 +1,5 @@ # This file is automatically @generated by Poetry 1.8.1 and should not be changed by hand. -[[package]] -name = "aiofiles" -version = "24.1.0" -description = "File support for asyncio." -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, - {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, -] - [[package]] name = "aiosqlite" version = "0.20.0" @@ -1793,17 +1782,6 @@ files = [ {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] -[[package]] -name = "types-aiofiles" -version = "24.1.0.20240626" -description = "Typing stubs for aiofiles" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-aiofiles-24.1.0.20240626.tar.gz", hash = "sha256:48604663e24bc2d5038eac05ccc33e75799b0779e93e13d6a8f711ddc306ac08"}, - {file = "types_aiofiles-24.1.0.20240626-py3-none-any.whl", hash = "sha256:7939eca4a8b4f9c6491b6e8ef160caee9a21d32e18534a57d5ed90aee47c66b4"}, -] - [[package]] name = "types-psutil" version = "6.0.0.20240901" @@ -2135,4 +2113,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.13" -content-hash = "5bfb99555575baa993a00e4d837942eb4e29471c7c076f5ea999a0d95b76feea" +content-hash = "8c5cc44da4929da2ce23548769805e0f9d31de944ec6e76f493b5cb18058f8bd" diff --git a/pyproject.toml b/pyproject.toml index fab016431..56309b56d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,7 +34,6 @@ packages = [ [tool.poetry.dependencies] python = ">=3.11,<3.13" -aiofiles = ">=24.1.0,<25.0" aiosqlite = ">=0.18" argcomplete = ">=2,<4" zstandard = ">=0.19" @@ -56,7 +55,6 @@ pylsp-rope = "^0.1" pytest = ">=7.1,<9.0" pytest-asyncio = ">=0.20,<0.25" python-lsp-server = "^1.5" -types-aiofiles = ">=23.1,<25.0" types-psutil = ">=5.9.5.10,<7.0.0.0" types-tabulate = "^0.9" myst-parser = ">=3.0.1,<4.1" diff --git a/src/gallia/command/uds.py b/src/gallia/command/uds.py index f3df85351..0c6d3ac33 100644 --- a/src/gallia/command/uds.py +++ b/src/gallia/command/uds.py @@ -5,8 +5,6 @@ import json from argparse import ArgumentParser, BooleanOptionalAction, Namespace -import aiofiles - from gallia.command.base import FileNames, Scanner from gallia.config import Config from gallia.log import get_logger @@ -157,9 +155,8 @@ async def setup(self, args: Namespace) -> None: if args.properties is True: path = self.artifacts_dir.joinpath(FileNames.PROPERTIES_PRE.value) - async with aiofiles.open(path, "w") as file: - await file.write(json.dumps(await self.ecu.properties(True), indent=4)) - await file.write("\n") + properties = await self.ecu.properties(True) + path.write_text(json.dumps(properties, indent=4) + "\n") if self.db_handler is not None: self._apply_implicit_logging_setting() @@ -175,13 +172,11 @@ async def setup(self, args: Namespace) -> None: async def teardown(self, args: Namespace) -> None: if args.properties is True and not self.ecu.transport.is_closed: path = self.artifacts_dir.joinpath(FileNames.PROPERTIES_POST.value) - async with aiofiles.open(path, "w") as file: - await file.write(json.dumps(await self.ecu.properties(True), indent=4)) - await file.write("\n") + properties = await self.ecu.properties(True) + path.write_text(json.dumps(properties, indent=4) + "\n") path_pre = self.artifacts_dir.joinpath(FileNames.PROPERTIES_PRE.value) - async with aiofiles.open(path_pre) as file: - prop_pre = json.loads(await file.read()) + prop_pre = json.loads(path_pre.read_text()) if args.compare_properties and await self.ecu.properties(False) != prop_pre: logger.warning("ecu properties differ, please investigate!") diff --git a/src/gallia/commands/discover/doip.py b/src/gallia/commands/discover/doip.py index a655676d3..a5d58976c 100644 --- a/src/gallia/commands/discover/doip.py +++ b/src/gallia/commands/discover/doip.py @@ -9,7 +9,6 @@ from itertools import product from urllib.parse import parse_qs, urlparse -import aiofiles import psutil from gallia.command import AsyncScript @@ -316,10 +315,10 @@ async def enumerate_routing_activation_requests( # noqa: PLR0913 f"doip://{tgt_hostname}:{tgt_port}?protocol_version={self.protocol_version}&activation_type={routing_activation_type:#x}&src_addr={source_address:#x}" ) logger.notice(f"[🤯] Holy moly, it actually worked: {targets[-1]}") - async with aiofiles.open( - self.artifacts_dir.joinpath("1_valid_routing_activation_requests.txt"), "a" + with self.artifacts_dir.joinpath("1_valid_routing_activation_requests.txt").open( + "a" ) as f: - await f.write(f"{targets[-1]}\n") + f.write(f"{targets[-1]}\n") if len(targets) > 0: logger.notice("[⚔️] It's dangerous to test alone, take one of these:") @@ -359,10 +358,8 @@ async def enumerate_target_addresses( # noqa: PLR0913 # If we reach this, the request was not denied due to unknown TargetAddress known_targets.append(current_target) logger.notice(f"[🥇] HEUREKA: target address {target_addr:#x} is valid! ") - async with aiofiles.open( - self.artifacts_dir.joinpath("3_valid_targets.txt"), "a" - ) as f: - await f.write(f"{current_target}\n") + with self.artifacts_dir.joinpath("3_valid_targets.txt").open("a") as f: + f.write(f"{current_target}\n") logger.info(f"[⏳] Waiting for reply of target {target_addr:#x}") # Hardcoded loop to detect potential broadcasts @@ -379,20 +376,16 @@ async def enumerate_target_addresses( # noqa: PLR0913 logger.notice( f"[🤑] B-B-B-B-B-B-BROADCAST at TargetAddress {target_addr:#x}! Got reply from {pot_broadcast:#x}" ) - async with aiofiles.open( - self.artifacts_dir.joinpath("6_unsolicited_replies.txt"), "a" - ) as f: - await f.write( + with self.artifacts_dir.joinpath("6_unsolicited_replies.txt").open("a") as f: + f.write( f"target_addr={target_addr:#x} yielded reply from {pot_broadcast:#x}; could also be late answer triggered by previous address!\n" ) resp = TesterPresentResponse.parse_static(data) logger.notice(f"[🥳] It cannot get nicer: {target_addr:#x} responded: {resp}") responsive_targets.append(current_target) - async with aiofiles.open( - self.artifacts_dir.joinpath("4_responsive_targets.txt"), "a" - ) as f: - await f.write(f"{current_target}\n") + with self.artifacts_dir.joinpath("4_responsive_targets.txt").open("a") as f: + f.write(f"{current_target}\n") if self.db_handler is not None: await self.db_handler.insert_discovery_result(current_target) @@ -403,36 +396,28 @@ async def enumerate_target_addresses( # noqa: PLR0913 elif e.nack_code == DiagnosticMessageNegativeAckCodes.TargetUnreachable: logger.info(f"[💤] {target_addr:#x} is (currently?) unreachable") unreachable_targets.append(current_target) - async with aiofiles.open( - self.artifacts_dir.joinpath("5_unreachable_targets.txt"), "a" - ) as f: - await f.write(f"{current_target}\n") + with self.artifacts_dir.joinpath("5_unreachable_targets.txt").open("a") as f: + f.write(f"{current_target}\n") continue else: logger.warning( f"[🤷] {target_addr:#x} is behaving strangely: {e.nack_code.name}" ) - async with aiofiles.open( - self.artifacts_dir.joinpath("7_targets_with_errors.txt"), "a" - ) as f: - await f.write(f"{target_addr:#x}: {e.nack_code.name}\n") + with self.artifacts_dir.joinpath("7_targets_with_errors.txt").open("a") as f: + f.write(f"{target_addr:#x}: {e.nack_code.name}\n") continue except TimeoutError: # This triggers when DoIP ACK but no UDS reply logger.info(f"[🙊] Presumably no active ECU on target address {target_addr:#x}") - async with aiofiles.open( - self.artifacts_dir.joinpath("5_unresponsive_targets.txt"), "a" - ) as f: - await f.write(f"{current_target}\n") + with self.artifacts_dir.joinpath("5_unresponsive_targets.txt").open("a") as f: + f.write(f"{current_target}\n") continue except ConnectionError as e: # Whenever this triggers, but sometimes connections are closed not by us logger.warn(f"[🫦] Sexy, but unexpected: {target_addr:#x} triggered {e!r}") - async with aiofiles.open( - self.artifacts_dir.joinpath("7_targets_with_errors.txt"), "a" - ) as f: - await f.write(f"{target_addr:#x}: {e}\n") + with self.artifacts_dir.joinpath("7_targets_with_errors.txt").open("a") as f: + f.write(f"{target_addr:#x}: {e}\n") # Re-establish DoIP connection await conn.close() await asyncio.sleep(tcp_connect_delay) @@ -546,13 +531,12 @@ async def run_udp_discovery(self) -> list[tuple[str, int]]: if len(found) > 0: logger.notice("[💎] Look what valid hosts I've found:") - for item in found: - url = f"doip://{item[0]}:{item[1]}" - logger.notice(url) - async with aiofiles.open( - self.artifacts_dir.joinpath("0_valid_hosts.txt"), "a" - ) as f: - await f.write(f"{url}\n") + + with self.artifacts_dir.joinpath("0_valid_hosts.txt").open() as f: + for item in found: + url = f"doip://{item[0]}:{item[1]}" + logger.notice(url) + f.write(f"{url}\n") else: logger.notice( "[👸] Your princess is in another castle: no DoIP endpoints here it seems..." diff --git a/src/gallia/commands/scan/uds/sa_dump_seeds.py b/src/gallia/commands/scan/uds/sa_dump_seeds.py index 453cfbd1e..bafc83f50 100644 --- a/src/gallia/commands/scan/uds/sa_dump_seeds.py +++ b/src/gallia/commands/scan/uds/sa_dump_seeds.py @@ -9,8 +9,6 @@ from argparse import ArgumentParser, Namespace from pathlib import Path -import aiofiles - from gallia.command import UDSScanner from gallia.config import Config from gallia.log import get_logger @@ -135,7 +133,7 @@ async def main(self, args: Namespace) -> None: i = -1 seeds_file = Path.joinpath(self.artifacts_dir, "seeds.bin") - file = await aiofiles.open(seeds_file, "wb", buffering=0) + file = seeds_file.open("wb", buffering=0) duration = args.duration * 60 start_time = time.time() last_seed = b"" @@ -177,7 +175,7 @@ async def main(self, args: Namespace) -> None: logger.info(f"Received seed of length {len(seed)}") - await file.write(seed) + file.write(seed) if last_seed == seed: logger.warning("Received the same seed as before") @@ -222,6 +220,6 @@ async def main(self, args: Namespace) -> None: logger.info(f"Sleeping for {args.sleep} seconds between seed requests…") await asyncio.sleep(args.sleep) - await file.close() + file.close() self.log_size(seeds_file, time.time() - start_time) await self.ecu.leave_session(session, sleep=args.power_cycle_sleep) diff --git a/src/gallia/dumpcap.py b/src/gallia/dumpcap.py index a22f6d344..2454801ad 100644 --- a/src/gallia/dumpcap.py +++ b/src/gallia/dumpcap.py @@ -112,8 +112,6 @@ async def stop(self) -> None: await self.compressor async def _compressor(self) -> None: - # Gzip support in aiofiles is missing. - # https://github.com/Tinche/aiofiles/issues/46 ready = False assert self.proc.stdout with await asyncio.to_thread(gzip.open, self.outfile, "wb") as f: diff --git a/src/gallia/utils.py b/src/gallia/utils.py index dc0310955..dd8050699 100644 --- a/src/gallia/utils.py +++ b/src/gallia/utils.py @@ -16,8 +16,6 @@ from typing import TYPE_CHECKING, Any, TypeVar from urllib.parse import urlparse -import aiofiles - from gallia.log import Loglevel if TYPE_CHECKING: @@ -192,9 +190,9 @@ async def write_target_list( :params db_handler: if given, urls are also written to the database as discovery results :return: None """ - async with aiofiles.open(path, "w") as f: + with path.open("w") as f: for target in targets: - await f.write(f"{target}\n") + f.write(f"{target}\n") if db_handler is not None: await db_handler.insert_discovery_result(str(target))