Coverage for lintro / plugins / file_processor.py: 100%
53 statements
« prev ^ index » next coverage.py v7.13.0, created at 2026-04-03 18:53 +0000
« prev ^ index » next coverage.py v7.13.0, created at 2026-04-03 18:53 +0000
1"""File processing utilities for tools that process files one at a time.
3This module provides dataclasses and utilities for tools that need to process
4files individually (rather than in batch mode). It extracts the common pattern
5of iterating through files, collecting results, and building output.
7Example:
8 >>> from lintro.plugins.file_processor import (
9 ... AggregatedResult,
10 ... FileProcessingResult,
11 ... )
12 >>>
13 >>> def process_file(path: str) -> FileProcessingResult:
14 ... # Process the file
15 ... return FileProcessingResult(success=True, output="", issues=[])
16 >>>
17 >>> result = AggregatedResult()
18 >>> for file_path in files:
19 ... file_result = process_file(file_path)
20 ... result.add_file_result(file_path, file_result)
21 >>> output = result.build_output()
22"""
24from __future__ import annotations
26from collections.abc import Sequence
27from dataclasses import dataclass, field
28from typing import TYPE_CHECKING
30if TYPE_CHECKING:
31 from lintro.parsers.base_issue import BaseIssue
34@dataclass
35class FileProcessingResult:
36 """Result from processing a single file.
38 Attributes:
39 success: Whether the file was processed successfully (exit code 0).
40 output: Raw output from the tool for this file.
41 issues: List of issues found in this file.
42 skipped: Whether the file was skipped (e.g., due to timeout).
43 error: Error message if processing failed.
44 """
46 success: bool
47 output: str
48 issues: Sequence[BaseIssue]
49 skipped: bool = False
50 error: str | None = None
53@dataclass
54class AggregatedResult:
55 """Aggregated results from processing multiple files.
57 This class collects results from processing multiple files and provides
58 methods to build the final output and determine overall success.
60 Attributes:
61 all_success: Whether all files were processed successfully.
62 all_issues: Combined list of all issues from all files.
63 all_outputs: List of non-empty outputs from files with issues.
64 skipped_files: List of file paths that were skipped.
65 execution_failures: Count of files that failed to process.
66 total_issues: Total count of issues across all files.
67 """
69 all_success: bool = True
70 all_issues: list[BaseIssue] = field(default_factory=list)
71 all_outputs: list[str] = field(default_factory=list)
72 skipped_files: list[str] = field(default_factory=list)
73 execution_failures: int = 0
74 total_issues: int = 0
76 def add_file_result(self, file_path: str, result: FileProcessingResult) -> None:
77 """Add a single file's result to the aggregate.
79 Args:
80 file_path: Path to the file that was processed.
81 result: The processing result for this file.
82 """
83 if result.skipped:
84 self.skipped_files.append(file_path)
85 self.all_success = False
86 self.execution_failures += 1
87 return
89 if result.error:
90 self.all_outputs.append(f"Error processing {file_path}: {result.error}")
91 self.all_success = False
92 self.execution_failures += 1
93 return
95 issues_count = len(result.issues)
96 self.total_issues += issues_count
98 if not result.success:
99 self.all_success = False
101 if (not result.success or result.issues) and result.output:
102 self.all_outputs.append(result.output)
104 if result.issues:
105 self.all_issues.extend(result.issues)
107 def build_output(self, *, timeout: int | None = None) -> str | None:
108 """Build the final output string.
110 Args:
111 timeout: Timeout value to include in failure messages.
113 Returns:
114 Combined output string, or None if no output.
115 """
116 output = "\n".join(self.all_outputs) if self.all_outputs else ""
118 if self.execution_failures > 0:
119 if output:
120 output += "\n\n"
121 if self.skipped_files:
122 output += (
123 f"Skipped/failed {self.execution_failures} file(s) due to "
124 f"execution failures (including timeouts)"
125 )
126 if timeout is not None:
127 output += f" (timeout: {timeout}s):"
128 else:
129 output += ":"
130 for file in self.skipped_files:
131 output += f"\n - {file}"
132 else:
133 output += (
134 f"Failed to process {self.execution_failures} file(s) "
135 "due to execution errors"
136 )
138 return output if output.strip() else None
141__all__ = [
142 "AggregatedResult",
143 "FileProcessingResult",
144]