Coverage for tests / unit / tools / prettier / test_output_parsing.py: 100%
16 statements
« prev ^ index » next coverage.py v7.13.0, created at 2026-04-03 18:53 +0000
« prev ^ index » next coverage.py v7.13.0, created at 2026-04-03 18:53 +0000
1"""Tests for Prettier output parsing."""
3from __future__ import annotations
5from typing import TYPE_CHECKING, Any
6from unittest.mock import patch
8from assertpy import assert_that
10if TYPE_CHECKING:
11 from lintro.tools.definitions.prettier import PrettierPlugin
14def test_check_parses_prettier_output_correctly(
15 prettier_plugin: PrettierPlugin,
16 mock_execution_context_for_tool: Any,
17) -> None:
18 """Check correctly parses Prettier output with issues.
20 Args:
21 prettier_plugin: The prettier plugin instance to test.
22 mock_execution_context_for_tool: Mock execution context factory.
23 """
24 with (
25 patch.object(prettier_plugin, "_prepare_execution") as mock_prepare,
26 patch.object(prettier_plugin, "_run_subprocess") as mock_run,
27 patch.object(prettier_plugin, "_get_executable_command") as mock_exec,
28 patch.object(prettier_plugin, "_build_config_args") as mock_config,
29 ):
30 mock_prepare.return_value = mock_execution_context_for_tool(
31 files=["file1.js", "file2.js"],
32 rel_files=["file1.js", "file2.js"],
33 cwd="/tmp",
34 )
36 mock_exec.return_value = ["npx", "prettier"]
37 mock_config.return_value = []
38 mock_run.return_value = (
39 False,
40 "Checking formatting...\n[warn] file1.js\n[warn] file2.js\n"
41 "[warn] Code style issues found in the above file(s).",
42 )
44 result = prettier_plugin.check(["/tmp"], {})
46 assert_that(result.success).is_false()
47 assert_that(result.issues_count).is_equal_to(2)
48 assert_that(result.issues).is_not_none()
49 assert_that(result.issues[0].file).is_equal_to("file1.js") # type: ignore[index] # validated via is_not_none
50 assert_that(result.issues[1].file).is_equal_to("file2.js") # type: ignore[index] # validated via is_not_none