Coverage for tests / unit / tools / pytest_tool / conftest.py: 74%
86 statements
« prev ^ index » next coverage.py v7.13.0, created at 2026-04-03 18:53 +0000
« prev ^ index » next coverage.py v7.13.0, created at 2026-04-03 18:53 +0000
1"""Shared fixtures for pytest tool tests."""
3from __future__ import annotations
5import json
6from collections.abc import Generator
7from contextlib import contextmanager
8from typing import TYPE_CHECKING, Any
9from unittest.mock import MagicMock, patch
11import pytest
13from lintro.parsers.pytest.pytest_issue import PytestIssue
14from lintro.tools.implementations.pytest.pytest_config import PytestConfiguration
15from lintro.tools.implementations.pytest.pytest_result_processor import (
16 PytestResultProcessor,
17)
19if TYPE_CHECKING:
20 from lintro.tools.definitions.pytest import PytestPlugin
23@pytest.fixture
24def mock_test_tool() -> MagicMock:
25 """Provide a mock PytestPlugin instance for testing.
27 Returns:
28 MagicMock: Mock PytestPlugin instance.
29 """
30 tool = MagicMock()
31 tool.definition.name = "pytest"
32 tool.can_fix = False
33 tool.options = {}
34 tool._default_timeout = 300
35 tool.config.priority = 90
37 # Mock common methods
38 tool._get_executable_command.return_value = ["pytest"]
39 tool._verify_tool_version.return_value = None
41 return tool
44@contextmanager
45def patch_pytest_tool_for_check(
46 tool: PytestPlugin,
47 *,
48 run_subprocess_return: tuple[bool, str] = (True, "All tests passed"),
49 prepare_execution_return: tuple[int, int, Any] = (10, 0, None),
50) -> Generator[None]:
51 """Context manager providing common patches for pytest check tests.
53 Args:
54 tool: PytestPlugin instance to patch.
55 run_subprocess_return: Return value for _run_subprocess.
56 prepare_execution_return: Return value for prepare_test_execution.
58 Yields:
59 None: Context manager for patches.
60 """
61 with (
62 patch.object(tool, "_get_executable_command", return_value=["pytest"]),
63 patch.object(tool, "_run_subprocess", return_value=run_subprocess_return),
64 patch.object(tool, "_parse_output", return_value=[]),
65 patch.object(
66 tool.executor,
67 "prepare_test_execution",
68 return_value=prepare_execution_return,
69 ),
70 ):
71 yield
74@pytest.fixture
75def sample_pytest_plugin() -> Generator[PytestPlugin, None, None]:
76 """Create a PytestPlugin instance for testing.
78 Uses mocks to avoid filesystem operations during initialization.
80 Yields:
81 PytestPlugin: A PytestPlugin instance.
82 """
83 from lintro.tools.definitions.pytest import PytestPlugin
85 with (
86 patch(
87 "lintro.tools.definitions.pytest.load_lintro_ignore",
88 return_value=[],
89 ),
90 patch(
91 "lintro.tools.definitions.pytest.load_pytest_config",
92 return_value={},
93 ),
94 patch(
95 "lintro.tools.definitions.pytest.load_file_patterns_from_config",
96 return_value=[],
97 ),
98 ):
99 yield PytestPlugin()
102@pytest.fixture
103def sample_pytest_config() -> PytestConfiguration:
104 """Create a PytestConfiguration instance for testing.
106 Returns:
107 A PytestConfiguration instance.
108 """
109 return PytestConfiguration()
112@pytest.fixture
113def result_processor() -> PytestResultProcessor:
114 """Create a PytestResultProcessor instance for testing.
116 Returns:
117 A PytestResultProcessor instance.
118 """
119 return PytestResultProcessor()
122@pytest.fixture
123def mock_test_success_output() -> str:
124 """Mock pytest output for successful test run.
126 Returns:
127 A string representing successful pytest output.
128 """
129 return "collected 10 items\n10 passed in 0.12s"
132@pytest.fixture
133def mock_test_failure_output() -> str:
134 """Mock pytest output for test run with failures.
136 Returns:
137 A string representing failed pytest output.
138 """
139 return "collected 10 items\n1 failed, 9 passed in 0.15s"
142@pytest.fixture
143def mock_test_mixed_output() -> str:
144 """Mock pytest output for test run with mixed results.
146 Returns:
147 A string representing mixed pytest output.
148 """
149 return "collected 20 items\n2 failed, 15 passed, 2 skipped, 1 error in 1.50s"
152@pytest.fixture
153def mock_test_json_success() -> str:
154 """Mock pytest JSON report for successful tests.
156 Returns:
157 A JSON string representing successful pytest results.
158 """
159 data = {
160 "tests": [
161 {
162 "file": "tests/test_example.py",
163 "lineno": 10,
164 "name": "test_success",
165 "nodeid": "tests/test_example.py::test_success",
166 "outcome": "passed",
167 "duration": 0.05,
168 },
169 {
170 "file": "tests/test_example.py",
171 "lineno": 20,
172 "name": "test_another",
173 "nodeid": "tests/test_example.py::test_another",
174 "outcome": "passed",
175 "duration": 0.03,
176 },
177 ],
178 }
179 return json.dumps(data)
182@pytest.fixture
183def mock_test_json_failure() -> str:
184 """Mock pytest JSON report for failed tests.
186 Returns:
187 A JSON string representing failed pytest results.
188 """
189 data = {
190 "tests": [
191 {
192 "file": "tests/test_example.py",
193 "lineno": 10,
194 "name": "test_failure",
195 "nodeid": "tests/test_example.py::test_failure",
196 "outcome": "failed",
197 "duration": 0.05,
198 "call": {
199 "longrepr": "AssertionError: assert 1 == 2",
200 },
201 },
202 {
203 "file": "tests/test_example.py",
204 "lineno": 20,
205 "name": "test_success",
206 "nodeid": "tests/test_example.py::test_success",
207 "outcome": "passed",
208 "duration": 0.03,
209 },
210 ],
211 }
212 return json.dumps(data)
215@pytest.fixture
216def mock_test_json_mixed() -> str:
217 """Mock pytest JSON report for mixed test results.
219 Returns:
220 A JSON string representing mixed pytest results.
221 """
222 data = {
223 "tests": [
224 {
225 "file": "tests/test_example.py",
226 "lineno": 10,
227 "name": "test_failure",
228 "nodeid": "tests/test_example.py::test_failure",
229 "outcome": "failed",
230 "duration": 0.05,
231 "call": {
232 "longrepr": "AssertionError: assert 1 == 2",
233 },
234 },
235 {
236 "file": "tests/test_example.py",
237 "lineno": 20,
238 "name": "test_error",
239 "nodeid": "tests/test_example.py::test_error",
240 "outcome": "error",
241 "duration": 0.02,
242 "call": {
243 "longrepr": "RuntimeError: Something went wrong",
244 },
245 },
246 {
247 "file": "tests/test_example.py",
248 "lineno": 30,
249 "name": "test_skipped",
250 "nodeid": "tests/test_example.py::test_skipped",
251 "outcome": "skipped",
252 "duration": 0.0,
253 "longrepr": "Skipped: Not implemented yet",
254 },
255 {
256 "file": "tests/test_example.py",
257 "lineno": 40,
258 "name": "test_success",
259 "nodeid": "tests/test_example.py::test_success",
260 "outcome": "passed",
261 "duration": 0.03,
262 },
263 ],
264 }
265 return json.dumps(data)
268@pytest.fixture
269def mock_test_junit_xml_success() -> str:
270 """Mock pytest JUnit XML output for successful tests.
272 Returns:
273 A JUnit XML string representing successful pytest results.
274 """
275 return """<?xml version="1.0" encoding="utf-8"?>
276<testsuite name="pytest" tests="2" errors="0" failures="0" skipped="0" time="0.08">
277 <testcase classname="tests.test_example" name="test_success" file="tests/test_example.py" line="10" time="0.05"/>
278 <testcase classname="tests.test_example" name="test_another" file="tests/test_example.py" line="20" time="0.03"/>
279</testsuite>
280"""
283@pytest.fixture
284def mock_test_junit_xml_failure() -> str:
285 """Mock pytest JUnit XML output for failed tests.
287 Returns:
288 A JUnit XML string representing failed pytest results.
289 """
290 return """<?xml version="1.0" encoding="utf-8"?>
291<testsuite name="pytest" tests="2" errors="0" failures="1" skipped="0" time="0.08">
292 <testcase classname="tests.test_example" name="test_failure" file="tests/test_example.py" line="10" time="0.05">
293 <failure message="AssertionError: assert 1 == 2">AssertionError: assert 1 == 2</failure>
294 </testcase>
295 <testcase classname="tests.test_example" name="test_success" file="tests/test_example.py" line="20" time="0.03"/>
296</testsuite>
297"""
300@pytest.fixture
301def mock_test_junit_xml_mixed() -> str:
302 """Mock pytest JUnit XML output for mixed test results.
304 Returns:
305 A JUnit XML string representing mixed pytest results.
306 """
307 return """<?xml version="1.0" encoding="utf-8"?>
308<testsuite name="pytest" tests="4" errors="1" failures="1" skipped="1" time="0.10">
309 <testcase classname="tests.test_example" name="test_failure" file="tests/test_example.py" line="10" time="0.05">
310 <failure message="AssertionError: assert 1 == 2">AssertionError: assert 1 == 2</failure>
311 </testcase>
312 <testcase classname="tests.test_example" name="test_error" file="tests/test_example.py" line="20" time="0.02">
313 <error message="RuntimeError: Something went wrong">RuntimeError: Something went wrong</error>
314 </testcase>
315 <testcase classname="tests.test_example" name="test_skipped" file="tests/test_example.py" line="30" time="0.0">
316 <skipped message="Not implemented yet">Not implemented yet</skipped>
317 </testcase>
318 <testcase classname="tests.test_example" name="test_success" file="tests/test_example.py" line="40" time="0.03"/>
319</testsuite>
320"""
323@pytest.fixture
324def sample_pytest_issues() -> list[PytestIssue]:
325 """Create sample PytestIssue objects for testing.
327 Returns:
328 A list of sample PytestIssue objects.
329 """
330 return [
331 PytestIssue(
332 file="tests/test_example.py",
333 line=10,
334 test_name="test_failure",
335 message="AssertionError: assert 1 == 2",
336 test_status="FAILED",
337 duration=0.05,
338 node_id="tests/test_example.py::test_failure",
339 ),
340 PytestIssue(
341 file="tests/test_example.py",
342 line=20,
343 test_name="test_error",
344 message="RuntimeError: Something went wrong",
345 test_status="ERROR",
346 duration=0.02,
347 node_id="tests/test_example.py::test_error",
348 ),
349 PytestIssue(
350 file="tests/test_example.py",
351 line=30,
352 test_name="test_skipped",
353 message="Not implemented yet",
354 test_status="SKIPPED",
355 duration=0.0,
356 node_id="tests/test_example.py::test_skipped",
357 ),
358 ]
361@pytest.fixture
362def sample_passed_issues() -> list[PytestIssue]:
363 """Create sample passed PytestIssue objects for testing.
365 Returns:
366 A list of PytestIssue objects with passed status.
367 """
368 return [
369 PytestIssue(
370 file="tests/test_example.py",
371 line=10,
372 test_name="test_success",
373 message="",
374 test_status="PASSED",
375 duration=0.05,
376 node_id="tests/test_example.py::test_success",
377 ),
378 ]
381@pytest.fixture
382def mock_subprocess_success() -> MagicMock:
383 """Create a mock for successful subprocess execution.
385 Returns:
386 A MagicMock representing successful subprocess execution.
387 """
388 mock = MagicMock()
389 mock.returncode = 0
390 mock.stdout = "collected 10 items\n10 passed in 0.12s"
391 mock.stderr = ""
392 return mock
395@pytest.fixture
396def mock_subprocess_failure() -> MagicMock:
397 """Create a mock for failed subprocess execution.
399 Returns:
400 A MagicMock representing failed subprocess execution.
401 """
402 mock = MagicMock()
403 mock.returncode = 1
404 mock.stdout = "collected 10 items\n1 failed, 9 passed in 0.15s"
405 mock.stderr = ""
406 return mock