Feat: Add new specific comparators #63
66 tests run, 61 passed, 0 skipped, 5 failed.
Annotations
Check failure on line 25 in .tox/py310/tmp/test_missing_deps0/test_missing_deps.py
github-actions / JUnit Test Report
test_missing_deps
assert '' == "Loading the ...[voxcell]'.\n"
- Loading the morphio module without the required dependencies installed (requirements are the following: morphio>=3.3.6 and morph_tool>=2.9). Will crash at runtime if the related functionalities are used. These dependencies can be installed with 'pip install dir-content-diff[morphio]'.
- Loading the voxcell module without the required dependencies installed (requirement is the following: voxcell>=3.1.1). Will crash at runtime if the related functionalities are used. These dependencies can be installed with 'pip install dir-content-diff[voxcell]'.
Raw output
tmp_path = PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_missing_deps0')
@pytest.mark.comparators_missing_deps
def test_missing_deps(tmp_path):
"""Test missing dependencies."""
root_dir = importlib.resources.files("dir_content_diff") # pylint: disable=no-member
comparator_dir = root_dir / "comparators"
imported_comparators = [
f"import dir_content_diff.comparators.{i}\n"
for i in dir(dir_content_diff.comparators)
if "_" not in i and (comparator_dir / i).with_suffix(".py").exists()
]
missing_deps_file = tmp_path / "test_missing_deps.py"
with missing_deps_file.open(mode="w", encoding="utf8") as f:
f.writelines(imported_comparators)
f.flush()
res = run(["python", str(missing_deps_file)], capture_output=True, check=True)
> assert res.stderr.decode() == (
"Loading the morphio module without the required dependencies installed "
"(requirements are the following: morphio>=3.3.6 and morph_tool>=2.9). "
"Will crash at runtime if the related functionalities are used. "
"These dependencies can be installed with 'pip install dir-content-diff[morphio]'."
"\n"
"Loading the voxcell module without the required dependencies installed "
"(requirement is the following: voxcell>=3.1.1). "
"Will crash at runtime if the related functionalities are used. "
"These dependencies can be installed with 'pip install dir-content-diff[voxcell]'.\n"
)
E assert '' == "Loading the ...[voxcell]'.\n"
E
E - Loading the morphio module without the required dependencies installed (requirements are the following: morphio>=3.3.6 and morph_tool>=2.9). Will crash at runtime if the related functionalities are used. These dependencies can be installed with 'pip install dir-content-diff[morphio]'.
E - Loading the voxcell module without the required dependencies installed (requirement is the following: voxcell>=3.1.1). Will crash at runtime if the related functionalities are used. These dependencies can be installed with 'pip install dir-content-diff[voxcell]'.
tests/test_missing_deps.py:25: AssertionError
Check failure on line 119 in tests/test_pytest_plugin.py
github-actions / JUnit Test Report
test_pytest_plugin.test_export_formatted_data[False-None]
AssertionError: assert {'errors': 1,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
Omitting 4 identical items, use -vv to show
Differing items:
{'passed': 0} != {'passed': 3}
{'errors': 1} != {'errors': 0}
Use -v to get more diff
Raw output
ref_tree = PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Fal0/ref')
res_tree_equal = PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Fal0/res')
ref_csv = PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Fal0/ref/file.csv')
res_csv_equal = PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Fal0/res/file.csv')
tmp_conftest = '\n from pathlib import Path\n\n import pytest\n\n @pytest.fixture\n def ref_path():\n ...th("/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Fal0/res")\n '
pytester = <Pytester PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data0')>
do_export = False, export_suffix = None, registry_reseter = None
@pytest.mark.parametrize(
"do_export, export_suffix",
[
[False, None],
[False, "_CMD_SUFFIX"],
[True, None],
[True, "_CMD_SUFFIX"],
],
)
def test_export_formatted_data(
ref_tree,
res_tree_equal,
ref_csv,
res_csv_equal,
tmp_conftest,
pytester,
do_export,
export_suffix,
registry_reseter,
):
"""Test that the formatted files are properly exported."""
args = []
if export_suffix is None:
suffix = "_FORMATTED"
else:
suffix = export_suffix
expected_dir = f"""res_path.with_name(res_path.name + "{suffix}")"""
if not do_export:
tester = f"""assert not {expected_dir}.exists()"""
else:
args.append("--dcd-export-formatted-data")
if export_suffix is not None:
args.append("--dcd-export-suffix")
args.append(export_suffix)
tester = """assert sorted(expected_dir.iterdir()) == [
(expected_dir / "file").with_suffix(ext)
for ext in [".csv", ".ini", ".json", ".xml", ".yaml"]
]"""
expected_dir_str = f"""expected_dir = {expected_dir}"""
remover = """rmtree(expected_dir, ignore_errors=True)"""
# create a temporary conftest.py file
pytester.makeconftest(tmp_conftest)
# create a temporary pytest test file
pytester.makepyfile(
f"""
from shutil import rmtree
import dir_content_diff
import dir_content_diff.pandas
from dir_content_diff import assert_equal_trees
dir_content_diff.reset_comparators()
dir_content_diff.pandas.register()
def test_export_formatted_data_default(ref_path, res_path):
{expected_dir_str}
{remover}
assert_equal_trees(ref_path, res_path)
{tester}
def test_export_formatted_data_no_suffix(ref_path, res_path):
expected_dir = res_path.with_name(res_path.name + "_FORMATTED")
{remover}
assert_equal_trees(ref_path, res_path, export_formatted_files={do_export})
{tester}
def test_export_formatted_data_suffix(ref_path, res_path):
expected_dir = res_path.with_name(res_path.name + "{suffix}")
{remover}
assert_equal_trees(
ref_path,
res_path,
export_formatted_files="{suffix if do_export else False}",
)
{tester}
"""
)
# run all tests with pytest
result = pytester.runpytest(*args)
# check that all 3 tests passed
> result.assert_outcomes(passed=3)
E AssertionError: assert {'errors': 1,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
E
E Omitting 4 identical items, use -vv to show
E Differing items:
E {'passed': 0} != {'passed': 3}
E {'errors': 1} != {'errors': 0}
E Use -v to get more diff
/home/runner/work/dir-content-diff/dir-content-diff/tests/test_pytest_plugin.py:119: AssertionError
Check failure on line 119 in tests/test_pytest_plugin.py
github-actions / JUnit Test Report
test_pytest_plugin.test_export_formatted_data[False-_CMD_SUFFIX]
AssertionError: assert {'errors': 1,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
Omitting 4 identical items, use -vv to show
Differing items:
{'passed': 0} != {'passed': 3}
{'errors': 1} != {'errors': 0}
Use -v to get more diff
Raw output
ref_tree = PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Fal1/ref')
res_tree_equal = PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Fal1/res')
ref_csv = PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Fal1/ref/file.csv')
res_csv_equal = PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Fal1/res/file.csv')
tmp_conftest = '\n from pathlib import Path\n\n import pytest\n\n @pytest.fixture\n def ref_path():\n ...th("/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Fal1/res")\n '
pytester = <Pytester PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data1')>
do_export = False, export_suffix = '_CMD_SUFFIX', registry_reseter = None
@pytest.mark.parametrize(
"do_export, export_suffix",
[
[False, None],
[False, "_CMD_SUFFIX"],
[True, None],
[True, "_CMD_SUFFIX"],
],
)
def test_export_formatted_data(
ref_tree,
res_tree_equal,
ref_csv,
res_csv_equal,
tmp_conftest,
pytester,
do_export,
export_suffix,
registry_reseter,
):
"""Test that the formatted files are properly exported."""
args = []
if export_suffix is None:
suffix = "_FORMATTED"
else:
suffix = export_suffix
expected_dir = f"""res_path.with_name(res_path.name + "{suffix}")"""
if not do_export:
tester = f"""assert not {expected_dir}.exists()"""
else:
args.append("--dcd-export-formatted-data")
if export_suffix is not None:
args.append("--dcd-export-suffix")
args.append(export_suffix)
tester = """assert sorted(expected_dir.iterdir()) == [
(expected_dir / "file").with_suffix(ext)
for ext in [".csv", ".ini", ".json", ".xml", ".yaml"]
]"""
expected_dir_str = f"""expected_dir = {expected_dir}"""
remover = """rmtree(expected_dir, ignore_errors=True)"""
# create a temporary conftest.py file
pytester.makeconftest(tmp_conftest)
# create a temporary pytest test file
pytester.makepyfile(
f"""
from shutil import rmtree
import dir_content_diff
import dir_content_diff.pandas
from dir_content_diff import assert_equal_trees
dir_content_diff.reset_comparators()
dir_content_diff.pandas.register()
def test_export_formatted_data_default(ref_path, res_path):
{expected_dir_str}
{remover}
assert_equal_trees(ref_path, res_path)
{tester}
def test_export_formatted_data_no_suffix(ref_path, res_path):
expected_dir = res_path.with_name(res_path.name + "_FORMATTED")
{remover}
assert_equal_trees(ref_path, res_path, export_formatted_files={do_export})
{tester}
def test_export_formatted_data_suffix(ref_path, res_path):
expected_dir = res_path.with_name(res_path.name + "{suffix}")
{remover}
assert_equal_trees(
ref_path,
res_path,
export_formatted_files="{suffix if do_export else False}",
)
{tester}
"""
)
# run all tests with pytest
result = pytester.runpytest(*args)
# check that all 3 tests passed
> result.assert_outcomes(passed=3)
E AssertionError: assert {'errors': 1,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
E
E Omitting 4 identical items, use -vv to show
E Differing items:
E {'passed': 0} != {'passed': 3}
E {'errors': 1} != {'errors': 0}
E Use -v to get more diff
/home/runner/work/dir-content-diff/dir-content-diff/tests/test_pytest_plugin.py:119: AssertionError
Check failure on line 119 in tests/test_pytest_plugin.py
github-actions / JUnit Test Report
test_pytest_plugin.test_export_formatted_data[True-None]
AssertionError: assert {'errors': 1,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
Omitting 4 identical items, use -vv to show
Differing items:
{'passed': 0} != {'passed': 3}
{'errors': 1} != {'errors': 0}
Use -v to get more diff
Raw output
ref_tree = PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Tru0/ref')
res_tree_equal = PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Tru0/res')
ref_csv = PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Tru0/ref/file.csv')
res_csv_equal = PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Tru0/res/file.csv')
tmp_conftest = '\n from pathlib import Path\n\n import pytest\n\n @pytest.fixture\n def ref_path():\n ...th("/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Tru0/res")\n '
pytester = <Pytester PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data2')>
do_export = True, export_suffix = None, registry_reseter = None
@pytest.mark.parametrize(
"do_export, export_suffix",
[
[False, None],
[False, "_CMD_SUFFIX"],
[True, None],
[True, "_CMD_SUFFIX"],
],
)
def test_export_formatted_data(
ref_tree,
res_tree_equal,
ref_csv,
res_csv_equal,
tmp_conftest,
pytester,
do_export,
export_suffix,
registry_reseter,
):
"""Test that the formatted files are properly exported."""
args = []
if export_suffix is None:
suffix = "_FORMATTED"
else:
suffix = export_suffix
expected_dir = f"""res_path.with_name(res_path.name + "{suffix}")"""
if not do_export:
tester = f"""assert not {expected_dir}.exists()"""
else:
args.append("--dcd-export-formatted-data")
if export_suffix is not None:
args.append("--dcd-export-suffix")
args.append(export_suffix)
tester = """assert sorted(expected_dir.iterdir()) == [
(expected_dir / "file").with_suffix(ext)
for ext in [".csv", ".ini", ".json", ".xml", ".yaml"]
]"""
expected_dir_str = f"""expected_dir = {expected_dir}"""
remover = """rmtree(expected_dir, ignore_errors=True)"""
# create a temporary conftest.py file
pytester.makeconftest(tmp_conftest)
# create a temporary pytest test file
pytester.makepyfile(
f"""
from shutil import rmtree
import dir_content_diff
import dir_content_diff.pandas
from dir_content_diff import assert_equal_trees
dir_content_diff.reset_comparators()
dir_content_diff.pandas.register()
def test_export_formatted_data_default(ref_path, res_path):
{expected_dir_str}
{remover}
assert_equal_trees(ref_path, res_path)
{tester}
def test_export_formatted_data_no_suffix(ref_path, res_path):
expected_dir = res_path.with_name(res_path.name + "_FORMATTED")
{remover}
assert_equal_trees(ref_path, res_path, export_formatted_files={do_export})
{tester}
def test_export_formatted_data_suffix(ref_path, res_path):
expected_dir = res_path.with_name(res_path.name + "{suffix}")
{remover}
assert_equal_trees(
ref_path,
res_path,
export_formatted_files="{suffix if do_export else False}",
)
{tester}
"""
)
# run all tests with pytest
result = pytester.runpytest(*args)
# check that all 3 tests passed
> result.assert_outcomes(passed=3)
E AssertionError: assert {'errors': 1,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
E
E Omitting 4 identical items, use -vv to show
E Differing items:
E {'passed': 0} != {'passed': 3}
E {'errors': 1} != {'errors': 0}
E Use -v to get more diff
/home/runner/work/dir-content-diff/dir-content-diff/tests/test_pytest_plugin.py:119: AssertionError
Check failure on line 119 in tests/test_pytest_plugin.py
github-actions / JUnit Test Report
test_pytest_plugin.test_export_formatted_data[True-_CMD_SUFFIX]
AssertionError: assert {'errors': 1,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
Omitting 4 identical items, use -vv to show
Differing items:
{'passed': 0} != {'passed': 3}
{'errors': 1} != {'errors': 0}
Use -v to get more diff
Raw output
ref_tree = PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Tru1/ref')
res_tree_equal = PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Tru1/res')
ref_csv = PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Tru1/ref/file.csv')
res_csv_equal = PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Tru1/res/file.csv')
tmp_conftest = '\n from pathlib import Path\n\n import pytest\n\n @pytest.fixture\n def ref_path():\n ...th("/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data_Tru1/res")\n '
pytester = <Pytester PosixPath('/home/runner/work/dir-content-diff/dir-content-diff/.tox/py310/tmp/test_export_formatted_data3')>
do_export = True, export_suffix = '_CMD_SUFFIX', registry_reseter = None
@pytest.mark.parametrize(
"do_export, export_suffix",
[
[False, None],
[False, "_CMD_SUFFIX"],
[True, None],
[True, "_CMD_SUFFIX"],
],
)
def test_export_formatted_data(
ref_tree,
res_tree_equal,
ref_csv,
res_csv_equal,
tmp_conftest,
pytester,
do_export,
export_suffix,
registry_reseter,
):
"""Test that the formatted files are properly exported."""
args = []
if export_suffix is None:
suffix = "_FORMATTED"
else:
suffix = export_suffix
expected_dir = f"""res_path.with_name(res_path.name + "{suffix}")"""
if not do_export:
tester = f"""assert not {expected_dir}.exists()"""
else:
args.append("--dcd-export-formatted-data")
if export_suffix is not None:
args.append("--dcd-export-suffix")
args.append(export_suffix)
tester = """assert sorted(expected_dir.iterdir()) == [
(expected_dir / "file").with_suffix(ext)
for ext in [".csv", ".ini", ".json", ".xml", ".yaml"]
]"""
expected_dir_str = f"""expected_dir = {expected_dir}"""
remover = """rmtree(expected_dir, ignore_errors=True)"""
# create a temporary conftest.py file
pytester.makeconftest(tmp_conftest)
# create a temporary pytest test file
pytester.makepyfile(
f"""
from shutil import rmtree
import dir_content_diff
import dir_content_diff.pandas
from dir_content_diff import assert_equal_trees
dir_content_diff.reset_comparators()
dir_content_diff.pandas.register()
def test_export_formatted_data_default(ref_path, res_path):
{expected_dir_str}
{remover}
assert_equal_trees(ref_path, res_path)
{tester}
def test_export_formatted_data_no_suffix(ref_path, res_path):
expected_dir = res_path.with_name(res_path.name + "_FORMATTED")
{remover}
assert_equal_trees(ref_path, res_path, export_formatted_files={do_export})
{tester}
def test_export_formatted_data_suffix(ref_path, res_path):
expected_dir = res_path.with_name(res_path.name + "{suffix}")
{remover}
assert_equal_trees(
ref_path,
res_path,
export_formatted_files="{suffix if do_export else False}",
)
{tester}
"""
)
# run all tests with pytest
result = pytester.runpytest(*args)
# check that all 3 tests passed
> result.assert_outcomes(passed=3)
E AssertionError: assert {'errors': 1,...pped': 0, ...} == {'errors': 0,...pped': 0, ...}
E
E Omitting 4 identical items, use -vv to show
E Differing items:
E {'passed': 0} != {'passed': 3}
E {'errors': 1} != {'errors': 0}
E Use -v to get more diff
/home/runner/work/dir-content-diff/dir-content-diff/tests/test_pytest_plugin.py:119: AssertionError