Coverage for tests / unit / utils / test_display_helpers.py: 100%
80 statements
« prev ^ index » next coverage.py v7.13.0, created at 2026-04-03 18:53 +0000
« prev ^ index » next coverage.py v7.13.0, created at 2026-04-03 18:53 +0000
1"""Unit tests for display_helpers module.
3Tests for ASCII art display, final status printing, and module constants.
4"""
6from __future__ import annotations
8from collections.abc import Callable
9from typing import TYPE_CHECKING
10from unittest.mock import MagicMock, patch
12import pytest
13from assertpy import assert_that
15from lintro.enums.action import Action
16from lintro.utils.display_helpers import (
17 BORDER_LENGTH,
18 INFO_BORDER_LENGTH,
19 print_ascii_art,
20 print_final_status,
21 print_final_status_format,
22)
24if TYPE_CHECKING:
25 from collections.abc import Generator
28# --- Fixtures ---
31@pytest.fixture
32def console_capture() -> Generator[tuple[list[str], Callable[..., None]], None, None]:
33 """Provide a mock console function that captures output.
35 Yields:
36 tuple[list[str], Callable[..., None]]: Output list and mock console function.
37 """
38 output: list[str] = []
40 def mock_console(text: str = "") -> None:
41 output.append(text)
43 yield output, mock_console
46# --- Tests for print_ascii_art ---
49@pytest.mark.parametrize(
50 ("issue_count", "expected_file", "art_content", "expected_in_output"),
51 [
52 pytest.param(
53 0,
54 "success.txt",
55 [" \\o/ ", " SUCCESS "],
56 "SUCCESS",
57 id="zero_issues_loads_success_art",
58 ),
59 pytest.param(
60 5,
61 "fail.txt",
62 [" FAIL ", " x_x "],
63 "FAIL",
64 id="nonzero_issues_loads_fail_art",
65 ),
66 pytest.param(
67 1,
68 "fail.txt",
69 [" FAIL ", " x_x "],
70 "FAIL",
71 id="single_issue_loads_fail_art",
72 ),
73 ],
74)
75@patch("lintro.utils.display_helpers.read_ascii_art")
76def test_print_ascii_art_selects_correct_file(
77 mock_read: MagicMock,
78 console_capture: tuple[list[str], Callable[..., None]],
79 issue_count: int,
80 expected_file: str,
81 art_content: list[str],
82 expected_in_output: str,
83) -> None:
84 """Verify print_ascii_art loads the correct file based on issue count.
86 Args:
87 mock_read: Mock for read_ascii_art function.
88 console_capture: Fixture providing output capture.
89 issue_count: Number of issues to simulate.
90 expected_file: Expected filename to be loaded.
91 art_content: Mock art content to return.
92 expected_in_output: Text expected in console output.
93 """
94 output, mock_console = console_capture
95 mock_read.return_value = art_content
97 print_ascii_art(mock_console, issue_count=issue_count)
99 mock_read.assert_called_once_with(filename=expected_file)
100 assert_that(output).is_length(1)
101 assert_that(output[0]).contains(expected_in_output)
104@patch("lintro.utils.display_helpers.read_ascii_art")
105def test_print_ascii_art_no_output_when_empty(
106 mock_read: MagicMock,
107 console_capture: tuple[list[str], Callable[..., None]],
108) -> None:
109 """Verify no output is produced when ASCII art is empty.
111 Args:
112 mock_read: Mock for read_ascii_art function.
113 console_capture: Fixture providing output capture.
114 """
115 output, mock_console = console_capture
116 mock_read.return_value = []
118 print_ascii_art(mock_console, issue_count=0)
120 assert_that(output).is_empty()
123@patch("lintro.utils.display_helpers.read_ascii_art")
124def test_print_ascii_art_handles_exception_gracefully(
125 mock_read: MagicMock,
126 console_capture: tuple[list[str], Callable[..., None]],
127) -> None:
128 """Verify exceptions are handled gracefully without crashing.
130 Args:
131 mock_read: Mock for read_ascii_art function.
132 console_capture: Fixture providing output capture.
133 """
134 output, mock_console = console_capture
135 mock_read.side_effect = FileNotFoundError("Art file not found")
137 # Should not raise, just log debug
138 print_ascii_art(mock_console, issue_count=0)
140 assert_that(output).is_empty()
143# --- Tests for print_final_status ---
146@pytest.mark.parametrize(
147 ("action", "total_issues", "expected_message"),
148 [
149 pytest.param(
150 Action.CHECK,
151 0,
152 "No issues found",
153 id="check_no_issues_shows_success",
154 ),
155 pytest.param(
156 Action.CHECK,
157 5,
158 "Found 5 issues",
159 id="check_with_issues_shows_count",
160 ),
161 pytest.param(
162 Action.CHECK,
163 1,
164 "Found 1 issues",
165 id="check_single_issue_shows_count",
166 ),
167 pytest.param(
168 Action.FIX,
169 0,
170 "No issues found",
171 id="fix_no_issues_shows_success",
172 ),
173 pytest.param(
174 Action.FIX,
175 3,
176 "Fixed 3 issues",
177 id="fix_with_issues_shows_fixed_count",
178 ),
179 pytest.param(
180 Action.FIX,
181 1,
182 "Fixed 1 issues",
183 id="fix_single_issue_shows_fixed_count",
184 ),
185 ],
186)
187def test_print_final_status_message_content(
188 console_capture: tuple[list[str], Callable[..., None]],
189 action: Action,
190 total_issues: int,
191 expected_message: str,
192) -> None:
193 """Verify print_final_status displays correct message for action and issue count.
195 Args:
196 console_capture: Fixture providing output capture.
197 action: Action type (CHECK or FIX).
198 total_issues: Number of issues to report.
199 expected_message: Expected text in output.
200 """
201 output, mock_console = console_capture
203 print_final_status(mock_console, action, total_issues=total_issues)
205 combined = "".join(output)
206 assert_that(combined).contains(expected_message)
209def test_print_final_status_outputs_blank_line_at_end(
210 console_capture: tuple[list[str], Callable[..., None]],
211) -> None:
212 """Verify print_final_status appends a blank line after the status message.
214 Args:
215 console_capture: Fixture providing output capture.
216 """
217 output, mock_console = console_capture
219 print_final_status(mock_console, Action.CHECK, total_issues=0)
221 assert_that(output).is_not_empty()
222 assert_that(output[-1]).is_equal_to("")
225def test_print_final_status_produces_output(
226 console_capture: tuple[list[str], Callable[..., None]],
227) -> None:
228 """Verify print_final_status produces at least some output.
230 Args:
231 console_capture: Fixture providing output capture.
232 """
233 output, mock_console = console_capture
235 print_final_status(mock_console, Action.CHECK, total_issues=0)
237 assert_that(len(output)).is_greater_than(0)
240# --- Tests for print_final_status_format ---
243@pytest.mark.parametrize(
244 ("total_fixed", "total_remaining", "expected_messages"),
245 [
246 pytest.param(
247 0,
248 0,
249 ["No issues found"],
250 id="no_issues_no_fixes",
251 ),
252 pytest.param(
253 5,
254 0,
255 ["5 fixed"],
256 id="all_fixed_no_remaining",
257 ),
258 pytest.param(
259 3,
260 2,
261 ["3 fixed", "2 remaining"],
262 id="some_fixed_some_remaining",
263 ),
264 pytest.param(
265 0,
266 4,
267 ["4 remaining"],
268 id="none_fixed_some_remaining",
269 ),
270 pytest.param(
271 10,
272 5,
273 ["10 fixed", "5 remaining"],
274 id="many_fixed_some_remaining",
275 ),
276 ],
277)
278def test_print_final_status_format_message_content(
279 console_capture: tuple[list[str], Callable[..., None]],
280 total_fixed: int,
281 total_remaining: int,
282 expected_messages: list[str],
283) -> None:
284 """Verify print_final_status_format displays correct messages for fixed/remaining.
286 Args:
287 console_capture: Fixture providing output capture.
288 total_fixed: Number of fixed issues.
289 total_remaining: Number of remaining issues.
290 expected_messages: List of expected text fragments in output.
291 """
292 output, mock_console = console_capture
294 print_final_status_format(
295 mock_console,
296 total_fixed=total_fixed,
297 total_remaining=total_remaining,
298 )
300 combined = "".join(output)
301 for expected in expected_messages:
302 assert_that(combined).contains(expected)
305def test_print_final_status_format_outputs_blank_line_at_end(
306 console_capture: tuple[list[str], Callable[..., None]],
307) -> None:
308 """Verify print_final_status_format appends a blank line after messages.
310 Args:
311 console_capture: Fixture providing output capture.
312 """
313 output, mock_console = console_capture
315 print_final_status_format(mock_console, total_fixed=0, total_remaining=0)
317 assert_that(output).is_not_empty()
318 assert_that(output[-1]).is_equal_to("")
321# --- Tests for module constants ---
324def test_border_length_is_positive_integer() -> None:
325 """Verify BORDER_LENGTH is a positive integer suitable for border formatting."""
326 assert_that(BORDER_LENGTH).is_instance_of(int)
327 assert_that(BORDER_LENGTH).is_greater_than(0)
328 assert_that(BORDER_LENGTH).is_equal_to(50)
331def test_info_border_length_is_positive_integer() -> None:
332 """Verify INFO_BORDER_LENGTH is a positive integer suitable for info borders."""
333 assert_that(INFO_BORDER_LENGTH).is_instance_of(int)
334 assert_that(INFO_BORDER_LENGTH).is_greater_than(0)
335 assert_that(INFO_BORDER_LENGTH).is_equal_to(40)
338def test_border_length_can_create_border_string() -> None:
339 """Verify BORDER_LENGTH produces a usable border string."""
340 border = "=" * BORDER_LENGTH
341 assert_that(border).is_length(BORDER_LENGTH)
342 assert_that(border).is_equal_to("=" * 50)
345def test_info_border_length_can_create_border_string() -> None:
346 """Verify INFO_BORDER_LENGTH produces a usable border string."""
347 border = "-" * INFO_BORDER_LENGTH
348 assert_that(border).is_length(INFO_BORDER_LENGTH)
349 assert_that(border).is_equal_to("-" * 40)
352def test_border_lengths_relationship() -> None:
353 """Verify BORDER_LENGTH is longer than INFO_BORDER_LENGTH for visual hierarchy."""
354 assert_that(BORDER_LENGTH).is_greater_than(INFO_BORDER_LENGTH)