Coverage for tests / unit / tools / pytest_tool / test_check_method.py: 100%
39 statements
« prev ^ index » next coverage.py v7.13.0, created at 2026-04-03 18:53 +0000
« prev ^ index » next coverage.py v7.13.0, created at 2026-04-03 18:53 +0000
1"""Tests for PytestPlugin check method."""
3from __future__ import annotations
5from typing import TYPE_CHECKING
6from unittest.mock import patch
8from assertpy import assert_that
10from lintro.enums.pytest_enums import PytestSpecialMode
11from lintro.parsers.pytest.pytest_issue import PytestIssue
13if TYPE_CHECKING:
14 from lintro.tools.definitions.pytest import PytestPlugin
17# =============================================================================
18# Tests for PytestPlugin check method with mocked subprocess
19# =============================================================================
22def test_check_success_with_mocked_subprocess(
23 sample_pytest_plugin: PytestPlugin,
24) -> None:
25 """Check succeeds with mocked subprocess returning success.
27 Args:
28 sample_pytest_plugin: The PytestPlugin instance to test.
29 """
30 with (
31 patch.object(
32 sample_pytest_plugin,
33 "_verify_tool_version",
34 return_value=None,
35 ),
36 patch.object(
37 sample_pytest_plugin,
38 "_run_subprocess",
39 return_value=(True, "10 passed in 0.12s"),
40 ),
41 patch.object(sample_pytest_plugin, "_parse_output", return_value=[]),
42 patch.object(
43 sample_pytest_plugin.executor,
44 "prepare_test_execution",
45 return_value=10,
46 ),
47 patch.object(
48 sample_pytest_plugin.executor,
49 "execute_tests",
50 ) as mock_execute,
51 ):
52 mock_execute.return_value = (True, "10 passed in 0.12s", 0)
54 result = sample_pytest_plugin.check(["tests"], {})
56 assert_that(result.success).is_true()
57 assert_that(result.name).is_equal_to("pytest")
60def test_check_failure_with_mocked_subprocess(
61 sample_pytest_plugin: PytestPlugin,
62 sample_pytest_issues: list[PytestIssue],
63) -> None:
64 """Check fails with mocked subprocess returning failure.
66 Args:
67 sample_pytest_plugin: The PytestPlugin instance to test.
68 sample_pytest_issues: List of sample PytestIssue objects.
69 """
70 failed_issues = [
71 i for i in sample_pytest_issues if i.test_status in ("FAILED", "ERROR")
72 ]
74 with (
75 patch.object(
76 sample_pytest_plugin,
77 "_verify_tool_version",
78 return_value=None,
79 ),
80 patch.object(
81 sample_pytest_plugin,
82 "_run_subprocess",
83 return_value=(False, "2 failed, 8 passed in 0.15s"),
84 ),
85 patch.object(
86 sample_pytest_plugin,
87 "_parse_output",
88 return_value=failed_issues,
89 ),
90 patch.object(
91 sample_pytest_plugin.executor,
92 "prepare_test_execution",
93 return_value=10,
94 ),
95 patch.object(
96 sample_pytest_plugin.executor,
97 "execute_tests",
98 ) as mock_execute,
99 ):
100 mock_execute.return_value = (False, "2 failed, 8 passed in 0.15s", 1)
102 result = sample_pytest_plugin.check(["tests"], {})
104 assert_that(result.success).is_false()
105 assert_that(result.issues_count).is_greater_than(0)
108def test_check_handles_executor_not_initialized(
109 sample_pytest_plugin: PytestPlugin,
110) -> None:
111 """Check handles case when executor is None.
113 Args:
114 sample_pytest_plugin: The PytestPlugin instance to test.
115 """
116 sample_pytest_plugin.executor = None
118 with patch.object(
119 sample_pytest_plugin,
120 "_verify_tool_version",
121 return_value=None,
122 ):
123 result = sample_pytest_plugin.check(["tests"], {})
125 assert_that(result.success).is_false()
126 assert_that(result.output).contains("not initialized")
129def test_check_handles_result_processor_not_initialized(
130 sample_pytest_plugin: PytestPlugin,
131) -> None:
132 """Check handles case when result_processor is None.
134 Args:
135 sample_pytest_plugin: The PytestPlugin instance to test.
136 """
137 sample_pytest_plugin.result_processor = None
139 with (
140 patch.object(
141 sample_pytest_plugin,
142 "_verify_tool_version",
143 return_value=None,
144 ),
145 patch.object(
146 sample_pytest_plugin.executor,
147 "prepare_test_execution",
148 return_value=10,
149 ),
150 patch.object(
151 sample_pytest_plugin.executor,
152 "execute_tests",
153 return_value=(True, "10 passed", 0),
154 ),
155 patch.object(sample_pytest_plugin, "_parse_output", return_value=[]),
156 ):
157 result = sample_pytest_plugin.check(["tests"], {})
159 assert_that(result.success).is_false()
160 assert_that(result.output).contains("not initialized")
163# =============================================================================
164# Tests for pytest test collection mode
165# =============================================================================
168def test_collect_only_mode_enabled(
169 sample_pytest_plugin: PytestPlugin,
170) -> None:
171 """Collect only mode is enabled correctly.
173 Args:
174 sample_pytest_plugin: The PytestPlugin instance to test.
175 """
176 sample_pytest_plugin.set_options(collect_only=True)
177 assert_that(sample_pytest_plugin.pytest_config.collect_only).is_true()
178 assert_that(sample_pytest_plugin.pytest_config.is_special_mode()).is_true()
181def test_collect_only_returns_special_mode(
182 sample_pytest_plugin: PytestPlugin,
183) -> None:
184 """Collect only returns correct special mode name.
186 Args:
187 sample_pytest_plugin: The PytestPlugin instance to test.
188 """
189 sample_pytest_plugin.set_options(collect_only=True)
190 mode = sample_pytest_plugin.pytest_config.get_special_mode()
191 assert_that(mode).is_equal_to(PytestSpecialMode.COLLECT_ONLY.value)