From 48132f3a4be098100c58f9737ab055ce39deaec2 Mon Sep 17 00:00:00 2001 From: mrbean-bremen Date: Thu, 23 Oct 2025 20:28:01 +0200 Subject: [PATCH] Replace occurrences of "revision" by "edition" - conforms to DICOM vocabulary - includes function names and command line arguments - deprecated --revision argument - slightly refactored EditionReader --- .../{get_revision.py => get_edition.py} | 12 +- .github/workflows/testsuite.yml | 4 +- CHANGES.md | 13 +- README.md | 12 +- dicom_validator/command_line_utils.py | 53 +++++ dicom_validator/dump_dcm_info.py | 57 ++---- dicom_validator/spec_reader/edition_reader.py | 184 ++++++++++-------- dicom_validator/tests/spec_reader/conftest.py | 22 +-- .../tests/spec_reader/test_edition_reader.py | 101 +++++----- .../tests/spec_reader/test_part3_reader.py | 38 ++-- .../tests/spec_reader/test_spec_reader.py | 2 +- dicom_validator/tests/test_cmdline_tools.py | 6 +- dicom_validator/tests/validator/conftest.py | 4 +- dicom_validator/validate_iods.py | 43 +--- 14 files changed, 290 insertions(+), 261 deletions(-) rename .github/workflows/{get_revision.py => get_edition.py} (63%) create mode 100644 dicom_validator/command_line_utils.py diff --git a/.github/workflows/get_revision.py b/.github/workflows/get_edition.py similarity index 63% rename from .github/workflows/get_revision.py rename to .github/workflows/get_edition.py index 4acba0b..ad467af 100644 --- a/.github/workflows/get_revision.py +++ b/.github/workflows/get_edition.py @@ -3,24 +3,24 @@ from dicom_validator.spec_reader.edition_reader import EditionReader -def get_revision(revision, path): +def get_edition(edition, path): reader = EditionReader(path) # we want to recreate the json files for each test run, # so we don't need them cached - reader.get_revision(revision, create_json=False) + reader.get_edition_path(edition, create_json=False) if __name__ == "__main__": parser = argparse.ArgumentParser( - description="Downloads a revision of the DICOM standard" + description="Downloads an edition of the DICOM standard" ) parser.add_argument( - "revision", - help="Standard revision", + "edition", + help="Standard edition", ) parser.add_argument( "path", help="Path for the DICOM specs", ) args = parser.parse_args() - get_revision(args.revision, args.path) + get_edition(args.edition, args.path) diff --git a/.github/workflows/testsuite.yml b/.github/workflows/testsuite.yml index 7cfac43..7160e27 100644 --- a/.github/workflows/testsuite.yml +++ b/.github/workflows/testsuite.yml @@ -34,8 +34,8 @@ jobs: if: steps.cache-dicom.outputs.cache-hit != 'true' run: | pip install -e . - python .github/workflows/get_revision.py 2015b "`pwd`/dicom_validator/tests/fixtures/standard" - python .github/workflows/get_revision.py 2025d "`pwd`/dicom_validator/tests/fixtures/standard" + python .github/workflows/get_edition.py 2015b "`pwd`/dicom_validator/tests/fixtures/standard" + python .github/workflows/get_edition.py 2025d "`pwd`/dicom_validator/tests/fixtures/standard" - name: Install dependencies run: | diff --git a/CHANGES.md b/CHANGES.md index 4fdce1a..d59a213 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -12,18 +12,27 @@ In line with pydicom, support for Python 3.9 will be removed in that version. * added typing to production code * the result dictionary generated by `IODValidator.validate()` has changed to an instance of `ValidationResult`, and the log messages in the result have been replaced by error codes +* changed `--revision` to `--edition` to conform to DICOM vocabulary, deprecated + `--revision`; also renamed `revision` to `edition` elsewhere in function names * logging the errors has been decoupled from validation via an error handler argument ### Fixes * tags not allowed in multi-frame functional groups have been listed as errors twice (see [#196](../../issues/196)) + +### Infrastructure +* added Python 3.14 to CI (currently needs development version of `pydicom`) +* removed support for Python 3.9 + +## [Version 0.7.3](https://pypi.python.org/pypi/dicom-validator/0.7.3) (2025-10-13) +Fixes handling of nested sequences. + +### Fixes * fixes to correctly evaluate SR documents (see [#206](../../issues/206)): * sequences defined recursively in the standard are now supported * conditions for including macros inside a sequence are now evaluated on the correct level ### Infrastructure -* added Python 3.14 to CI (currently needs development version of `pydicom`) -* removed support for Python 3.9 * updated the tests for current DICOM version 2025d ## [Version 0.7.2](https://pypi.python.org/pypi/dicom-validator/0.7.2) (2025-08-16) diff --git a/README.md b/README.md index c3c0691..5d73ce6 100644 --- a/README.md +++ b/README.md @@ -41,12 +41,12 @@ pip install dicom-validator ## Usage ``` validate_iods [-h] [--standard-path STANDARD_PATH] - [--revision REVISION] [--force-read] [--recreate-json] + [--edition EDITION] [--force-read] [--recreate-json] [--suppress-vr-warnings] [--verbose] dicomfiles [dicomfiles ...] dump_dcm_info [-h] [--standard-path STANDARD_PATH] - [--revision REVISION] [--max-value-len MAX_VALUE_LEN] + [--edition EDITION] [--max-value-len MAX_VALUE_LEN] [--show-tags [SHOW_TAGS [SHOW_TAGS ...]]] [--show-image-data] [--recreate-json] dicomfiles [dicomfiles ...] @@ -64,8 +64,8 @@ These files are then used by the tools. Periodically (once a month), the tools check for a newer version of the DICOM standard and download it if found. It is also possible to use older versions of the standard via the command line -option `--revision` or `-r`, provided they are available for download -(at the time of writing, standards are available since revision 2014a). A +option `--edition` or `-e`, provided they are available for download +(at the time of writing, standards are available since edition 2014a). A list of currently available editions can be found in */dicom-validator/editions.json* after a tool has been called the first time. @@ -90,7 +90,7 @@ The output for a single file may look like this: ``` (py3_test) c:\dev\GitHub\dicom-validator>validate_iods "c:\dev\DICOM Data\WG02\Enhanced-XA\ENHXA" -Using DICOM revision 2023c +Using DICOM edition 2023c SOP class is "1.2.840.10008.5.1.4.1.1.12.1.1" (Enhanced XA Image IOD) Errors @@ -139,7 +139,7 @@ Only the given standard is used to evaluate the files. If the DICOM file has been written using an older standard, it may conform to that standard, but not to the newest one. Tags that are retired in the version of the standard used for parsing are not considered at all. -You can always check against an older standard by using the `--revision` option. +You can always check against an older standard by using the `--edition` option. #### Enumerated values and defined terms Most enumerated values are checked against, but some are ignored due to parsing issues. diff --git a/dicom_validator/command_line_utils.py b/dicom_validator/command_line_utils.py new file mode 100644 index 0000000..c3d4a4a --- /dev/null +++ b/dicom_validator/command_line_utils.py @@ -0,0 +1,53 @@ +import argparse +import os +import warnings +from pathlib import Path + +from dicom_validator.spec_reader.edition_reader import EditionReader +from dicom_validator.validator.dicom_info import DicomInfo + + +def add_edition_args(parser: argparse.ArgumentParser) -> None: + """Add edition related arguments to argument parser.""" + parser.add_argument( + "--standard-path", + "-src", + help="Base path with the DICOM specs in docbook and json format", + default=str(Path.home() / "dicom-validator"), + ) + parser.add_argument( + "--edition", + "-e", + help='Standard edition (e.g. "2014c"), year of ' + 'edition, "current" or "local" (latest ' + "locally installed)", + default="current", + ) + parser.add_argument( + "--revision", + "-r", + help="Standard edition - deprecated, use --edition instead", + ) + + +def dicom_info_from_args(args: argparse.Namespace) -> DicomInfo | None: + """Retrieve DICOM info using edition related parser arguments.""" + if not os.path.exists(args.standard_path): + print(f"Invalid standard path {args.standard_path} - aborting") + if args.revision: + edition_str = args.revision + warnings.warn( + "--revision is deprecated, use --edition instead", DeprecationWarning + ) + else: + edition_str = args.edition + edition_reader = EditionReader(args.standard_path) + edition = edition_reader.get_edition(edition_str) + if edition is None: + print(f"Invalid DICOM edition {edition_str} - aborting") + return None + destination = edition_reader.get_edition_path(edition, args.recreate_json) + if destination is None: + print(f"Failed to get DICOM edition {edition_str} - aborting") + return None + return edition_reader.load_dicom_info(edition) diff --git a/dicom_validator/dump_dcm_info.py b/dicom_validator/dump_dcm_info.py index 8b461dc..34591e4 100644 --- a/dicom_validator/dump_dcm_info.py +++ b/dicom_validator/dump_dcm_info.py @@ -7,12 +7,11 @@ import re import sys from collections.abc import Iterable, Sequence -from pathlib import Path from pydicom import config, dcmread, Dataset, DataElement from pydicom.errors import InvalidDicomError -from dicom_validator.spec_reader.edition_reader import EditionReader +from dicom_validator.command_line_utils import dicom_info_from_args, add_edition_args from dicom_validator.validator.dicom_info import DicomInfo @@ -35,22 +34,22 @@ def __init__( for uid_dict in dicom_info.dictionary.values(): self.uid_info.update(uid_dict) - tags = tags or [] self.tags: list[str] = [] - for tag in tags: - match = self.tag_regex.match(tag) - if match: - self.tags.append(f"({match.group(1)},{match.group(2)})") - else: - matching = [ - tag_id - for tag_id in dicom_info.dictionary - if dicom_info.dictionary[tag_id]["name"].replace(" ", "") == tag - ] - if matching: - self.tags.append(matching[0]) + if tags is not None: + for tag in tags: + match = self.tag_regex.match(tag) + if match: + self.tags.append(f"({match.group(1)},{match.group(2)})") else: - print(f"{tag} is not a valid tag expression - ignoring") + matching = [ + tag_id + for tag_id in dicom_info.dictionary + if dicom_info.dictionary[tag_id]["name"].replace(" ", "") == tag + ] + if matching: + self.tags.append(matching[0]) + else: + print(f"{tag} is not a valid tag expression - ignoring") def print_dataset(self, dataset: Dataset) -> None: dataset.walk( @@ -144,25 +143,12 @@ def dump_directory(self, dir_path: str) -> None: def main() -> int: parser = argparse.ArgumentParser( - description="Dumps DICOM information dictionary from " "DICOM file using PS3.6" + description="Dumps DICOM information dictionary from DICOM file using PS3.6" ) parser.add_argument( "dicomfiles", help="Path(s) of DICOM files or directories to parse", nargs="+" ) - parser.add_argument( - "--standard-path", - "-src", - help="Path with the DICOM specs in docbook " "and json format", - default=str(Path.home() / "dicom-validator"), - ) - parser.add_argument( - "--revision", - "-r", - help='Standard revision (e.g. "2014c"), year of ' - 'revision, "current" or "local" (latest ' - "locally installed)", - default="current", - ) + add_edition_args(parser) parser.add_argument( "--max-value-len", "-ml", @@ -191,15 +177,10 @@ def main() -> int: default=False, ) args = parser.parse_args() - - edition_reader = EditionReader(args.standard_path) - destination = edition_reader.get_revision(args.revision, args.recreate_json) - if destination is None: - print(f"Failed to get DICOM edition {args.revision} - aborting") + dicom_info = dicom_info_from_args(args) + if dicom_info is None: return 1 - json_path = destination / "json" - dicom_info = EditionReader.load_dicom_info(json_path) dumper = DataElementDumper( dicom_info, args.max_value_len, args.show_image_data, args.show_tags ) diff --git a/dicom_validator/spec_reader/edition_reader.py b/dicom_validator/spec_reader/edition_reader.py index 99577cd..2484523 100644 --- a/dicom_validator/spec_reader/edition_reader.py +++ b/dicom_validator/spec_reader/edition_reader.py @@ -51,12 +51,24 @@ class EditionReader: dict_info_json = "dict_info.json" uid_info_json = "uid_info.json" - def __init__(self, path: str) -> None: + def __init__(self, path: str | Path) -> None: self.path = Path(path) self.logger = logging.getLogger() if not self.logger.hasHandlers(): self.logger.addHandler(logging.StreamHandler(sys.stdout)) + def editions_path(self) -> Path: + return self.path / "editions.json" + + def docbook_path(self, edition: str) -> Path: + return self.path / edition / "docbook" + + def json_path(self, edition: str) -> Path: + return self.path / edition / "json" + + def version_path(self, edition: str) -> Path: + return self.json_path(edition) / "version" + def update_edition(self) -> None: try: self.logger.info("Getting DICOM editions...") @@ -70,7 +82,7 @@ def retrieve(self, html_path: Path) -> None: urlretrieve(self.base_url, html_path) def get_editions(self, update: bool = True) -> list[str] | None: - editions_path = self.path / self.json_filename + editions_path = self.editions_path() if editions_path.exists(): if update: today = datetime.datetime.today() @@ -103,66 +115,64 @@ def read_from_html(self) -> list[str]: def write_to_json(self) -> None: editions = self.read_from_html() if editions: - json_path = self.path / self.json_filename - with open(json_path, "w", encoding="utf8") as json_file: + editions_path = self.editions_path() + with open(editions_path, "w", encoding="utf8") as json_file: json_file.write(json.dumps(editions)) - def get_edition(self, revision: str) -> str | None: - """Get the edition matching the revision or None. - The revision can be the edition name, the year of the edition, + def get_edition(self, edition_str: str) -> str | None: + """Get the edition matching the edition or None. + The edition can be the edition name, the year of the edition, 'current', or 'local'. """ - editions_opt = self.get_editions(revision != "local") + editions_opt = self.get_editions(edition_str != "local") if not editions_opt: return None editions = sorted(editions_opt) - if revision in editions: - return revision - if len(revision) == 4: + if edition_str in editions: + return edition_str + if len(edition_str) == 4: for edition in reversed(editions): - if edition.startswith(revision): + if edition.startswith(edition_str): return edition - if revision == "current" or revision == "local": + if edition_str == "current" or edition_str == "local": return editions[-1] return None - def is_current(self, revision: str | None) -> bool: - """Get the edition matching the revision or None. - The revision can be the edition name, the year of the edition, + def is_current(self, edition_str: str) -> bool: + """Get the edition matching the edition or None. + The edition can be the edition name, the year of the edition, or 'current'. """ - if revision is None: + if edition_str is None: return True - editions_opt = self.get_editions(revision != "local") + editions_opt = self.get_editions(edition_str != "local") if not editions_opt: return False editions = sorted(editions_opt) - if revision in editions: - return revision == editions[-1] - if len(revision) == 4: - return editions[-1].startswith(revision) - if revision == "current": + if edition_str in editions: + return edition_str == editions[-1] + if len(edition_str) == 4: + return editions[-1].startswith(edition_str) + if edition_str == "current": return True return False - def check_revision(self, revision_str: str) -> tuple[str | None, Path | None]: - revision = self.get_edition(revision_str) - if revision is not None: - return revision, self.path / revision + def get_edition_and_path(self, edition_str: str) -> tuple[str | None, Path | None]: + edition = self.get_edition(edition_str) + if edition is not None: + return edition, self.path / edition return None, None - def get_chapter( - self, revision: str, chapter: int, destination: Path, is_current: bool - ) -> bool: - file_path = destination / f"part{chapter:02}.xml" + def get_chapter(self, edition: str, chapter: int) -> bool: + file_path = self.docbook_path(edition) / f"part{chapter:02}.xml" if file_path.exists(): return True - revision_dir = "current" if is_current else revision + edition_part = "current" if self.is_current(edition) else edition url = "{0}{1}/source/docbook/part{2:02}/part{2:02}.xml".format( - self.base_url, revision_dir, chapter + self.base_url, edition_part, chapter ) try: - print(f"Downloading DICOM spec {revision} PS3.{chapter}...") + self.logger.info(f"Downloading DICOM spec {edition} PS3.{chapter}...") urlretrieve(url, file_path) return True except BaseException as exception: @@ -176,38 +186,40 @@ def get_chapter( ) return False - @staticmethod - def load_info(json_path: Path, info_json: str) -> dict: + def load_info(self, edition: str, info_json: str) -> dict: + json_path = self.json_path(edition) with open(json_path / info_json, encoding="utf8") as info_file: return json.load(info_file) - @classmethod - def load_dicom_info(cls, json_path: Path) -> DicomInfo: + def load_dicom_info(self, edition: str) -> DicomInfo: return DicomInfo( - cls.load_info(json_path, cls.dict_info_json), - cls.load_info(json_path, cls.iod_info_json), - cls.load_info(json_path, cls.module_info_json), + self.load_info(edition, self.dict_info_json), + self.load_info(edition, self.iod_info_json), + self.load_info(edition, self.module_info_json), ) - @classmethod - def json_files_exist(cls, json_path: Path) -> bool: + def json_files_exist(self, edition: str) -> bool: + json_path = self.json_path(edition) for filename in ( - cls.dict_info_json, - cls.module_info_json, - cls.iod_info_json, - cls.uid_info_json, + self.dict_info_json, + self.module_info_json, + self.iod_info_json, + self.uid_info_json, ): if not (json_path / filename).exists(): return False return True - @classmethod - def dump_description(cls, description: dict) -> str: + @staticmethod + def dump_description(description: dict) -> str: return json.dumps(description, sort_keys=True, indent=2, cls=DefinitionEncoder) - @classmethod - def create_json_files(cls, docbook_path: Path, json_path: Path) -> None: - print("Creating JSON excerpts from docbook files - this may take a while...") + def create_json_files(self, edition: str) -> None: + json_path = self.json_path(edition) + docbook_path = self.docbook_path(edition) + self.logger.info( + "Creating JSON excerpts from docbook files - this may take a while..." + ) start_time = time.time() part6reader = Part6Reader(docbook_path) dict_info = part6reader.data_elements() @@ -220,62 +232,62 @@ def create_json_files(cls, docbook_path: Path, json_path: Path) -> None: if chapter in chapter_info: for uid in chapter_info[chapter]: definition[uid] = iod_info[chapter] - print(f"Parsing docbooks took {time.time() - start_time:.1f} seconds.") + self.logger.info( + f"Parsing docbooks took {time.time() - start_time:.1f} seconds." + ) start_time = time.time() - with open(json_path / cls.iod_info_json, "w", encoding="utf8") as info_file: - info_file.write(cls.dump_description(definition)) - with open(json_path / cls.module_info_json, "w", encoding="utf8") as info_file: - info_file.write(cls.dump_description(part3reader.module_descriptions())) - with open(json_path / cls.dict_info_json, "w", encoding="utf8") as info_file: - info_file.write(cls.dump_description(dict_info)) - with open(json_path / cls.uid_info_json, "w", encoding="utf8") as info_file: - info_file.write(cls.dump_description(part6reader.all_uids())) - cls.write_current_version(json_path) - print(f"Writing json files took {time.time() - start_time:.1f} seconds.") - print("Done!") - - def get_revision( - self, revision_str: str, recreate_json: bool = False, create_json: bool = True + with open(json_path / self.iod_info_json, "w", encoding="utf8") as info_file: + info_file.write(self.dump_description(definition)) + with open(json_path / self.module_info_json, "w", encoding="utf8") as info_file: + info_file.write(self.dump_description(part3reader.module_descriptions())) + with open(json_path / self.dict_info_json, "w", encoding="utf8") as info_file: + info_file.write(self.dump_description(dict_info)) + with open(json_path / self.uid_info_json, "w", encoding="utf8") as info_file: + info_file.write(self.dump_description(part6reader.all_uids())) + self.write_current_version(edition) + self.logger.info( + f"Writing json files took {time.time() - start_time:.1f} seconds." + ) + self.logger.info("Done!") + + def get_edition_path( + self, edition_str: str, recreate_json: bool = False, create_json: bool = True ) -> Path | None: - revision, destination = self.check_revision(revision_str) - if destination is None or revision is None: - self.logger.error(f"DICOM revision {revision} not found.") + edition, destination = self.get_edition_and_path(edition_str) + if destination is None or edition is None: + self.logger.error(f"DICOM edition {edition} not found.") return None - docbook_path = destination / "docbook" + docbook_path = self.docbook_path(edition) docbook_path.mkdir(parents=True, exist_ok=True) - json_path = destination / "json" + json_path = self.json_path(edition) json_path.mkdir(parents=True, exist_ok=True) # download the docbook files for chapter in [3, 4, 6]: if not self.get_chapter( - revision=revision, + edition=edition, chapter=chapter, - destination=docbook_path, - is_current=self.is_current(revision), ): return None if create_json and ( - not self.json_files_exist(json_path) - or not self.is_current_version(json_path) + not self.json_files_exist(edition) + or not self.is_current_version(edition) or recreate_json ): - self.create_json_files(docbook_path, json_path) - print(f"Using DICOM revision {revision}") + self.create_json_files(edition) + print(f"Using DICOM edition {edition}") return destination - @staticmethod - def is_current_version(json_path: Path) -> bool: - version_path = json_path / "version" + def is_current_version(self, edition: str) -> bool: + version_path = self.version_path(edition) if not version_path.exists(): return False with open(version_path, encoding="utf8") as f: return f.read() >= __version__ - @staticmethod - def write_current_version(json_path: Path) -> None: - version_path = json_path / "version" - with open(version_path, "w", encoding="utf8") as f: + def write_current_version(self, edition: str) -> None: + self.json_path(edition).mkdir(parents=True, exist_ok=True) + with open(self.version_path(edition), "w", encoding="utf8") as f: f.write(__version__) diff --git a/dicom_validator/tests/spec_reader/conftest.py b/dicom_validator/tests/spec_reader/conftest.py index 759540a..c65fe1d 100644 --- a/dicom_validator/tests/spec_reader/conftest.py +++ b/dicom_validator/tests/spec_reader/conftest.py @@ -3,7 +3,7 @@ import pytest -CURRENT_REVISION = "2025d" +CURRENT_EDITION = "2025d" def pytest_configure(config): @@ -24,29 +24,29 @@ def standard_path(fixture_path): @pytest.fixture(scope="session") -def revision(request): +def edition(request): if hasattr(request, "param"): - revision = request.param + edition = request.param else: - revision = CURRENT_REVISION - yield revision + edition = CURRENT_EDITION + yield edition @pytest.fixture(scope="session") -def revision_path(standard_path, revision): - yield standard_path / revision +def edition_path(standard_path, edition): + yield standard_path / edition @pytest.fixture(scope="session") -def spec_fixture_path(revision_path): - yield revision_path / "docbook" +def spec_fixture_path(edition_path): + yield edition_path / "docbook" @pytest.fixture(scope="session") -def dict_info(revision_path): +def dict_info(edition_path): from dicom_validator.spec_reader.edition_reader import EditionReader - json_fixture_path = revision_path / "json" + json_fixture_path = edition_path / "json" with open( json_fixture_path / EditionReader.dict_info_json, encoding="utf8" ) as info_file: diff --git a/dicom_validator/tests/spec_reader/test_edition_reader.py b/dicom_validator/tests/spec_reader/test_edition_reader.py index 8d7bcbd..1ca15e1 100644 --- a/dicom_validator/tests/spec_reader/test_edition_reader.py +++ b/dicom_validator/tests/spec_reader/test_edition_reader.py @@ -1,6 +1,9 @@ import os import time from pathlib import Path +from typing import Any + +from collections.abc import Generator from unittest.mock import patch from xml.etree import ElementTree @@ -8,6 +11,7 @@ from dicom_validator import __version__ from dicom_validator.spec_reader.edition_reader import EditionReader +from dicom_validator.tests.validator.conftest import CURRENT_EDITION pytestmark = pytest.mark.usefixtures("fs") @@ -28,7 +32,7 @@ def retrieve(self, html_path): @pytest.fixture -def base_path(fs): +def base_path(fs) -> Generator[Path, Any, None]: path = Path("user", "dicom-validator") path.mkdir(parents=True) yield path @@ -119,21 +123,21 @@ def test_update_if_no_local_version_exists( assert reader.get_editions(update=False) == ["2018a"] -def test_get_existing_revision(base_path): +def test_get_existing_edition(base_path): reader = MemoryEditionReader( base_path, '2014a' '2014e' ) assert reader.get_edition("2014a") == "2014a" -def test_non_existing_revision(base_path): +def test_non_existing_edition(base_path): reader = MemoryEditionReader( base_path, '2014a' '2014e' ) assert reader.get_edition("2015a") is None -def test_last_revision_in_year(base_path): +def test_last_edition_in_year(base_path): reader = MemoryEditionReader( base_path, '2014a' @@ -143,7 +147,7 @@ def test_last_revision_in_year(base_path): assert reader.get_edition("2014") == "2014c" -def test_current_revision(base_path): +def test_current_edition(base_path): reader = MemoryEditionReader( base_path, '2014a' @@ -153,23 +157,21 @@ def test_current_revision(base_path): assert reader.get_edition("current") == "2015e" -def test_check_revision_existing(fs): +def test_check_edition_existing(fs): base_path = Path("base") reader = MemoryEditionReader(base_path, "") - json_path = base_path / EditionReader.json_filename - fs.create_file(json_path, contents='["2014a", "2014c", "2015a"]') - revision, path = reader.check_revision("2014") - assert revision == "2014c" + fs.create_file(reader.editions_path(), contents='["2014a", "2014c", "2015a"]') + edition, path = reader.get_edition_and_path("2014") + assert edition == "2014c" assert path == base_path / "2014c" -def test_check_revision_nonexisting(fs): +def test_check_edition_nonexisting(fs): base_path = Path("/foo/bar") reader = MemoryEditionReader(base_path, "") - json_path = base_path / EditionReader.json_filename - fs.create_file(json_path, contents='["2014a", "2014c", "2015a"]') - revision, path = reader.check_revision("2016") - assert revision is None + fs.create_file(reader.editions_path(), contents='["2014a", "2014c", "2015a"]') + edition, path = reader.get_edition_and_path("2016") + assert edition is None assert path is None @@ -192,28 +194,29 @@ def test_is_current(base_path): assert reader.is_current(None) -def test_is_current_version(fs, edition_path): - assert not EditionReader.is_current_version(edition_path) - version_path = edition_path / "version" +def test_is_current_version(fs, base_path): + reader = MemoryEditionReader(base_path, "") + assert not reader.is_current_version(CURRENT_EDITION) + version_path = reader.version_path(CURRENT_EDITION) fs.create_file(version_path, contents="0.2.1") - assert not EditionReader.is_current_version(edition_path) + assert not reader.is_current_version(CURRENT_EDITION) version_path.unlink() fs.create_file(version_path, contents=__version__) - assert EditionReader.is_current_version(edition_path) + assert reader.is_current_version(CURRENT_EDITION) -def test_write_current_version(fs, edition_path): - assert not EditionReader.is_current_version(edition_path) - fs.create_dir(edition_path) - assert not EditionReader.is_current_version(edition_path) - EditionReader.write_current_version(edition_path) - assert EditionReader.is_current_version(edition_path) +def test_write_current_version(fs, base_path): + reader = MemoryEditionReader(base_path, "") + assert not reader.is_current_version(CURRENT_EDITION) + assert not reader.is_current_version(CURRENT_EDITION) + reader.write_current_version(CURRENT_EDITION) + assert reader.is_current_version(CURRENT_EDITION) def test_recreate_json_if_needed(fs, base_path, edition_path): create_json_files_called = 0 - def create_json_files(cls, docbook_path, json_path): + def create_json_files(self, edition): nonlocal create_json_files_called create_json_files_called += 1 for name in ( @@ -222,37 +225,33 @@ def create_json_files(cls, docbook_path, json_path): "module_info.json", "uid_info.json", ): - path = os.path.join(json_path, name) + path = os.path.join(self.json_path(edition), name) if not os.path.exists(path): fs.create_file(path) docbook_path = base_path / "2014a" / "docbook" for chapter_name in ("part03.xml", "part04.xml", "part06.xml"): fs.create_file(docbook_path / chapter_name) - orig_create_json_files = MemoryEditionReader.create_json_files - try: - MemoryEditionReader.create_json_files = create_json_files - reader = MemoryEditionReader(base_path, "") - fs.create_file(edition_path, contents='["2014a", "2014c", "2015a"]') - assert reader.get_revision("2014e") is None - assert reader.get_revision("2014a") is not None - assert create_json_files_called == 1 - reader.get_revision("2014a") - assert create_json_files_called == 2 - assert create_json_files_called == 2 - json_path = base_path / "2014a" / "json" - EditionReader.write_current_version(json_path) - reader.get_revision("2014a") - assert create_json_files_called == 2 - reader.get_revision("2014a", recreate_json=True) - assert create_json_files_called == 3 - finally: - MemoryEditionReader.create_json_files = orig_create_json_files + reader = MemoryEditionReader(base_path, "") + reader.create_json_files = lambda e: create_json_files(reader, e) + fs.create_file(edition_path, contents='["2014a", "2014c", "2015a"]') + current_edition = "2014a" + assert reader.get_edition_path("2014e") is None + assert reader.get_edition_path(current_edition) is not None + assert create_json_files_called == 1 + reader.get_edition_path(current_edition) + assert create_json_files_called == 2 + assert create_json_files_called == 2 + reader.write_current_version(current_edition) + reader.get_edition_path(current_edition) + assert create_json_files_called == 2 + reader.get_edition_path(current_edition, recreate_json=True) + assert create_json_files_called == 3 @patch("dicom_validator.spec_reader.edition_reader.urlretrieve") @patch("dicom_validator.spec_reader.spec_reader.ElementTree", ElementTree) -def test_get_non_existing_revision( +def test_get_non_existing_edition( retrieve_mock, fs, fixture_path, standard_path, edition_path ): def retrieve(url, path): @@ -267,7 +266,7 @@ def retrieve(url, path): retrieve_mock.side_effect = lambda url, path: retrieve(url, path) json_path = root_path / "2014c" / "json" assert not json_path.exists() - reader.get_revision("2014c") + reader.get_edition_path("2014c") assert json_path.exists() assert (json_path / "dict_info.json").exists() assert (json_path / "iod_info.json").exists() @@ -280,7 +279,7 @@ def test_failing_download(retrieve_mock, fs, base_path, edition_path, caplog): reader = MemoryEditionReader(base_path, "") fs.create_file(edition_path, contents='["2014a", "2014c", "2015a"]') retrieve_mock.side_effect = BaseException - assert reader.get_revision("2014c") is None + assert reader.get_edition_path("2014c") is None assert "Failed to download" in caplog.text @@ -295,7 +294,7 @@ def partial_download(url, file_path): reader = MemoryEditionReader(base_path, "") fs.create_file(edition_path, contents='["2014a", "2014c", "2015a"]') retrieve_mock.side_effect = lambda url, path: partial_download(url, path) - assert reader.get_revision("2014c") is None + assert reader.get_edition_path("2014c") is None assert not os.path.exists(part3_path) diff --git a/dicom_validator/tests/spec_reader/test_part3_reader.py b/dicom_validator/tests/spec_reader/test_part3_reader.py index ea7c56a..56f2b32 100644 --- a/dicom_validator/tests/spec_reader/test_part3_reader.py +++ b/dicom_validator/tests/spec_reader/test_part3_reader.py @@ -49,9 +49,9 @@ def test_read_incomplete_doc_file(self, fs): reader.iod_description("A.6") @pytest.mark.parametrize( - "revision,iod_name", + "edition,iod_name", [("2015b", "Computed Tomography Image IOD"), ("2025d", "CT Image IOD")], - indirect=["revision"], + indirect=["edition"], scope="session", ) def test_lookup_sop_class(self, reader, iod_name): @@ -63,9 +63,9 @@ def test_lookup_sop_class(self, reader, iod_name): assert description["title"] == iod_name @pytest.mark.parametrize( - "revision,module_nr", + "edition,module_nr", [("2015b", 27), ("2025d", 29)], - indirect=["revision"], + indirect=["edition"], scope="session", ) def test_get_iod_modules(self, reader, module_nr): @@ -78,7 +78,7 @@ def test_get_iod_modules(self, reader, module_nr): assert module["ref"] == "C.7.5.1" assert module["use"] == "M" - @pytest.mark.parametrize("revision", ["2015b", "2025d"], indirect=True) + @pytest.mark.parametrize("edition", ["2015b", "2025d"], indirect=True) def test_optional_iod_module(self, reader): description = reader.iod_description(chapter="A.38.1") assert "modules" in description @@ -89,9 +89,9 @@ def test_optional_iod_module(self, reader): assert module["use"] == "U" @pytest.mark.parametrize( - "revision,desc_nr", + "edition,desc_nr", [("2015b", 110), ("2025d", 174)], - indirect=["revision"], + indirect=["edition"], scope="session", ) def test_iod_descriptions(self, reader, desc_nr): @@ -102,9 +102,9 @@ def test_iod_descriptions(self, reader, desc_nr): assert "A.38.1" in descriptions @pytest.mark.parametrize( - "revision,macro_nr", + "edition,macro_nr", [("2015b", 24), ("2025d", 28)], - indirect=["revision"], + indirect=["edition"], scope="session", ) def test_group_macros(self, reader, macro_nr): @@ -154,9 +154,9 @@ def test_nested_group_macros(self, reader): assert len(macros) == 16 @pytest.mark.parametrize( - "revision,desc_nr", + "edition,desc_nr", [("2015b", 9), ("2025d", 14)], - indirect=["revision"], + indirect=["edition"], scope="session", ) def test_module_description(self, reader, desc_nr): @@ -169,9 +169,9 @@ def test_module_description(self, reader, desc_nr): assert description["(0012,0031)"]["type"] == "2" @pytest.mark.parametrize( - "revision,desc_nr, seq_desc_nr", + "edition,desc_nr, seq_desc_nr", [("2015b", 3, 3), ("2025d", 7, 4)], - indirect=["revision"], + indirect=["edition"], scope="session", ) def test_sequence_inside_module_description(self, reader, desc_nr, seq_desc_nr): @@ -200,9 +200,9 @@ def test_referenced_macro(self, reader): assert "(0028,0103)" in description @pytest.mark.parametrize( - "revision,desc_nr", + "edition,desc_nr", [("2015b", 452), ("2025d", 671)], - indirect=["revision"], + indirect=["edition"], scope="session", ) def test_module_descriptions(self, reader, desc_nr): @@ -210,9 +210,9 @@ def test_module_descriptions(self, reader, desc_nr): assert len(descriptions) == desc_nr @pytest.mark.parametrize( - "revision,include_nr", + "edition,include_nr", [("2015b", 9), ("2025d", 10)], - indirect=["revision"], + indirect=["edition"], scope="session", ) def test_conditional_include_in_sr_module(self, reader, include_nr): @@ -244,7 +244,7 @@ def test_parsed_enum_values(self, reader): assert enums == [{"val": ["FAILURE", "WARNING", "INFORMATIVE"]}] @pytest.mark.parametrize( - "revision", ["2015b", "2025d"], indirect=True, scope="session" + "edition", ["2015b", "2025d"], indirect=True, scope="session" ) def test_linked_enum_values(self, reader): description = reader.module_description("10.24") @@ -257,7 +257,7 @@ def test_linked_enum_values(self, reader): assert tag["enums"] == [{"val": ["CONTINUOUS", "TRIGGERED", "AUTOMATIC"]}] @pytest.mark.parametrize( - "revision", ["2015b", "2025d"], indirect=True, scope="session" + "edition", ["2015b", "2025d"], indirect=True, scope="session" ) def test_graphic_annotation_sequence(self, reader): description = reader.module_description("C.10.5") diff --git a/dicom_validator/tests/spec_reader/test_spec_reader.py b/dicom_validator/tests/spec_reader/test_spec_reader.py index 23ea5b1..4bc8a14 100644 --- a/dicom_validator/tests/spec_reader/test_spec_reader.py +++ b/dicom_validator/tests/spec_reader/test_spec_reader.py @@ -29,5 +29,5 @@ def test_existing_doc_files(fs): def test_cleaned_uid(): - orig_value = "1.2.840.10008.5." "\u200b1.\u200b4.\u200b1.\u200b1.\u200b88.\u200b72" + orig_value = "1.2.840.10008.5.\u200b1.\u200b4.\u200b1.\u200b1.\u200b88.\u200b72" assert SpecReader.cleaned_value(orig_value) == "1.2.840.10008.5.1.4.1.1.88.72" diff --git a/dicom_validator/tests/test_cmdline_tools.py b/dicom_validator/tests/test_cmdline_tools.py index 81dd708..fd5ccf9 100644 --- a/dicom_validator/tests/test_cmdline_tools.py +++ b/dicom_validator/tests/test_cmdline_tools.py @@ -22,8 +22,8 @@ def dicom_fixture_path(fixture_path): @pytest.mark.order(0) -@pytest.mark.parametrize("revision", ["2015b", "2025d"]) -def test_validate_sr(revision, caplog, standard_path, dicom_fixture_path): +@pytest.mark.parametrize("edition", ["2015b", "2025d"]) +def test_validate_sr(edition, caplog, standard_path, dicom_fixture_path): # test also for 2015b to test an issue causing an exception rtdose_path = dicom_fixture_path / "rtdose.dcm" # recreate json files to avoid getting the cached ones @@ -32,7 +32,7 @@ def test_validate_sr(revision, caplog, standard_path, dicom_fixture_path): "-src", str(standard_path), "-r", - revision, + edition, "--recreate-json", str(rtdose_path), ] diff --git a/dicom_validator/tests/validator/conftest.py b/dicom_validator/tests/validator/conftest.py index 999e8ca..5bb6147 100644 --- a/dicom_validator/tests/validator/conftest.py +++ b/dicom_validator/tests/validator/conftest.py @@ -7,7 +7,7 @@ from dicom_validator.spec_reader.edition_reader import EditionReader from dicom_validator.validator.dicom_info import DicomInfo -CURRENT_REVISION = "2025d" +CURRENT_EDITION = "2025d" def pytest_configure(config): @@ -30,7 +30,7 @@ def standard_path(): @pytest.fixture(scope="session") def json_fixture_path(standard_path): - yield standard_path / CURRENT_REVISION / "json" + yield standard_path / CURRENT_EDITION / "json" @pytest.fixture(scope="session") diff --git a/dicom_validator/validate_iods.py b/dicom_validator/validate_iods.py index 24f1c83..851e7a8 100644 --- a/dicom_validator/validate_iods.py +++ b/dicom_validator/validate_iods.py @@ -1,18 +1,16 @@ import argparse import logging -from pathlib import Path import sys +from collections.abc import Sequence -from dicom_validator.spec_reader.edition_reader import EditionReader +from dicom_validator.command_line_utils import dicom_info_from_args, add_edition_args from dicom_validator.validator.dicom_file_validator import DicomFileValidator -from collections.abc import Sequence - - -def validate(args: argparse.Namespace, base_path: str | Path) -> int: - json_path = Path(base_path, "json") - dicom_info = EditionReader.load_dicom_info(json_path) +def validate(args: argparse.Namespace) -> int: + dicom_info = dicom_info_from_args(args) + if dicom_info is None: + return 1 log_level = logging.DEBUG if args.verbose else logging.INFO validator = DicomFileValidator( dicom_info, log_level, args.force_read, args.suppress_vr_warnings @@ -29,23 +27,10 @@ def main(args: Sequence[str] | None = None) -> int: parser = argparse.ArgumentParser(description="Validates DICOM file IODs") parser.add_argument( "dicomfiles", - help="Path(s) of DICOM files or directories " "to validate", + help="Path(s) of DICOM files or directories to validate", nargs="+", ) - parser.add_argument( - "--standard-path", - "-src", - help="Base path with the DICOM specs in docbook " "and json format", - default=str(Path.home() / "dicom-validator"), - ) - parser.add_argument( - "--revision", - "-r", - help='Standard revision (e.g. "2014c"), year of ' - 'revision, "current" or "local" (latest ' - "locally installed)", - default="current", - ) + add_edition_args(parser) parser.add_argument( "--force-read", action="store_true", @@ -68,17 +53,7 @@ def main(args: Sequence[str] | None = None) -> int: parser.add_argument( "--verbose", "-v", action="store_true", help="Outputs diagnostic information" ) - parsed_args = parser.parse_args(args) - - edition_reader = EditionReader(parsed_args.standard_path) - destination = edition_reader.get_revision( - parsed_args.revision, parsed_args.recreate_json - ) - if destination is None: - print(f"Failed to get DICOM edition {parsed_args.revision} - aborting") - return 1 - - return validate(parsed_args, destination) + return validate(parser.parse_args(args)) if __name__ == "__main__":