Skip to content

Make MypyResults line-based #191

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Mar 12, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 48 additions & 34 deletions src/pytest_mypy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,8 +235,10 @@ def repr_failure(
return super().repr_failure(excinfo)


def _error_severity(error: str) -> str:
components = [component.strip() for component in error.split(":")]
def _error_severity(line: str) -> Optional[str]:
components = [component.strip() for component in line.split(":", 3)]
if len(components) < 2:
return None
# The second component is either the line or the severity:
# demo/note.py:2: note: By default the bodies of untyped functions are not checked
# demo/sub/conftest.py: error: Duplicate module named "conftest"
Expand All @@ -249,20 +251,22 @@ class MypyFileItem(MypyItem):
def runtest(self) -> None:
"""Raise an exception if mypy found errors for this item."""
results = MypyResults.from_session(self.session)
abspath = str(self.path.resolve())
errors = [
error.partition(":")[2].strip()
for error in results.abspath_errors.get(abspath, [])
]
if errors and not all(_error_severity(error) == "note" for error in errors):
lines = results.path_lines.get(self.path.resolve(), [])
if lines and not all(_error_severity(line) == "note" for line in lines):
if self.session.config.option.mypy_xfail:
self.add_marker(
pytest.mark.xfail(
raises=MypyError,
reason="mypy errors are expected by --mypy-xfail.",
)
)
raise MypyError(file_error_formatter(self, results, errors))
raise MypyError(
file_error_formatter(
self,
results,
errors=[line.partition(":")[2].strip() for line in lines],
)
)

def reportinfo(self) -> Tuple[str, None, str]:
"""Produce a heading for the test report."""
Expand Down Expand Up @@ -296,24 +300,32 @@ def runtest(self) -> None:
class MypyResults:
"""Parsed results from Mypy."""

_abspath_errors_type = typing.Dict[str, typing.List[str]]
_encoding = "utf-8"

opts: List[str]
args: List[str]
stdout: str
stderr: str
status: int
abspath_errors: _abspath_errors_type
unmatched_stdout: str
path_lines: Dict[Optional[Path], List[str]]

def dump(self, results_f: IO[bytes]) -> None:
"""Cache results in a format that can be parsed by load()."""
results_f.write(json.dumps(vars(self)).encode(self._encoding))
prepared = vars(self).copy()
prepared["path_lines"] = {
str(path or ""): lines for path, lines in prepared["path_lines"].items()
}
results_f.write(json.dumps(prepared).encode(self._encoding))

@classmethod
def load(cls, results_f: IO[bytes]) -> MypyResults:
"""Get results cached by dump()."""
return cls(**json.loads(results_f.read().decode(cls._encoding)))
prepared = json.loads(results_f.read().decode(cls._encoding))
prepared["path_lines"] = {
Path(path) if path else None: lines
for path, lines in prepared["path_lines"].items()
}
return cls(**prepared)

@classmethod
def from_mypy(
Expand All @@ -326,33 +338,31 @@ def from_mypy(

if opts is None:
opts = mypy_argv[:]
abspath_errors = {
str(path.resolve()): [] for path in paths
} # type: MypyResults._abspath_errors_type
args = [str(path) for path in paths]

cwd = Path.cwd()
stdout, stderr, status = mypy.api.run(
opts + [str(Path(key).relative_to(cwd)) for key in abspath_errors.keys()]
)
stdout, stderr, status = mypy.api.run(opts + args)

unmatched_lines = []
path_lines: Dict[Optional[Path], List[str]] = {
path.resolve(): [] for path in paths
}
path_lines[None] = []
for line in stdout.split("\n"):
if not line:
continue
path, _, error = line.partition(":")
abspath = str(Path(path).resolve())
path = Path(line.partition(":")[0]).resolve()
try:
abspath_errors[abspath].append(line)
lines = path_lines[path]
except KeyError:
unmatched_lines.append(line)
lines = path_lines[None]
lines.append(line)

return cls(
opts=opts,
args=args,
stdout=stdout,
stderr=stderr,
status=status,
abspath_errors=abspath_errors,
unmatched_stdout="\n".join(unmatched_lines),
path_lines=path_lines,
)

@classmethod
Expand All @@ -364,9 +374,10 @@ def from_session(cls, session: pytest.Session) -> MypyResults:
with open(mypy_results_path, mode="rb") as results_f:
results = cls.load(results_f)
except FileNotFoundError:
cwd = Path.cwd()
results = cls.from_mypy(
[
item.path
item.path.relative_to(cwd)
for item in session.items
if isinstance(item, MypyFileItem)
],
Expand Down Expand Up @@ -408,14 +419,17 @@ def pytest_terminal_summary(
else:
for note in (
unreported_note
for errors in results.abspath_errors.values()
if all(_error_severity(error) == "note" for error in errors)
for unreported_note in errors
for path, lines in results.path_lines.items()
if path is not None
if all(_error_severity(line) == "note" for line in lines)
for unreported_note in lines
):
terminalreporter.write_line(note)
if results.unmatched_stdout:
if results.path_lines.get(None):
color = {"red": True} if results.status else {"green": True}
terminalreporter.write_line(results.unmatched_stdout, **color)
terminalreporter.write_line(
"\n".join(results.path_lines[None]), **color
)
if results.stderr:
terminalreporter.write_line(results.stderr, yellow=True)

Expand Down
14 changes: 9 additions & 5 deletions tests/test_pytest_mypy.py
Original file line number Diff line number Diff line change
Expand Up @@ -532,7 +532,6 @@ def test_mypy_results_from_mypy_with_opts():
"""MypyResults.from_mypy respects passed options."""
mypy_results = pytest_mypy.MypyResults.from_mypy([], opts=["--version"])
assert mypy_results.status == 0
assert mypy_results.abspath_errors == {}
assert str(MYPY_VERSION) in mypy_results.stdout


Expand All @@ -552,11 +551,11 @@ def pytest_configure(config):
with open(mypy_config_stash.mypy_results_path, mode="wb") as results_f:
pytest_mypy.MypyResults(
opts=[],
args=[],
stdout="",
stderr="",
status=0,
abspath_errors={},
unmatched_stdout="",
path_lines={},
).dump(results_f)
""",
)
Expand Down Expand Up @@ -630,11 +629,11 @@ def pytest_configure(config):
with open(mypy_config_stash.mypy_results_path, mode="wb") as results_f:
pytest_mypy.MypyResults(
opts=[],
args=[],
stdout="{stdout}",
stderr="",
status=0,
abspath_errors={{}},
unmatched_stdout="",
path_lines={{}},
).dump(results_f)
""",
)
Expand All @@ -644,3 +643,8 @@ def pytest_configure(config):
result = testdir.runpytest_subprocess("--mypy-xfail", *xdist_args)
assert result.ret == pytest.ExitCode.OK
assert stdout in result.stdout.str()


def test_error_severity():
"""Verify that non-error lines produce no severity."""
assert pytest_mypy._error_severity("arbitrary line with no error") is None
Loading