Coverage for lintro / tools / implementations / pytest / test_analytics.py: 81%
54 statements
« prev ^ index » next coverage.py v7.13.0, created at 2026-04-03 18:53 +0000
« prev ^ index » next coverage.py v7.13.0, created at 2026-04-03 18:53 +0000
1"""Test analytics for pytest: slow and flaky test detection.
3This module provides functions for detecting slow and flaky tests.
4"""
6from __future__ import annotations
8from pathlib import Path
9from typing import Any
11from loguru import logger
13from lintro.parsers.pytest.pytest_issue import PytestIssue
14from lintro.tools.implementations.pytest.collection import (
15 compute_updated_flaky_test_history,
16 extract_all_test_results_from_junit,
17 is_ci_environment,
18 save_flaky_test_history,
19)
20from lintro.tools.implementations.pytest.output import detect_flaky_tests
22# Constants for pytest configuration
23PYTEST_SLOW_TEST_THRESHOLD: float = 1.0 # Warn if any test takes > 1 second
24PYTEST_TOTAL_TIME_WARNING: float = 60.0 # Warn if total execution time > 60 seconds
25PYTEST_FLAKY_MIN_RUNS: int = 3 # Minimum runs before detecting flaky tests
26PYTEST_FLAKY_FAILURE_RATE: float = 0.3 # Consider flaky if fails >= 30% but < 100%
29def detect_and_log_slow_tests(
30 issues: list[PytestIssue],
31 options: dict[str, Any],
32) -> list[tuple[str, float]]:
33 """Detect slow tests and log warnings.
35 Args:
36 issues: List of parsed test issues.
37 options: Options dictionary.
39 Returns:
40 list[tuple[str, float]]: List of (test_name, duration) tuples for slow tests.
41 """
42 slow_tests: list[tuple[str, float]] = []
43 # Check all issues (including passed tests) for slow tests
44 if issues:
45 # Find slow tests (individual test duration > threshold)
46 slow_threshold = options.get(
47 "slow_test_threshold",
48 PYTEST_SLOW_TEST_THRESHOLD,
49 )
50 for issue in issues:
51 if (
52 issue.duration
53 and isinstance(issue.duration, (int, float))
54 and issue.duration > slow_threshold
55 ):
56 slow_tests.append((issue.test_name, issue.duration))
58 # Log slow test files
59 if slow_tests:
60 # Sort by duration descending
61 slow_tests.sort(key=lambda x: x[1], reverse=True)
62 slow_threshold = options.get(
63 "slow_test_threshold",
64 PYTEST_SLOW_TEST_THRESHOLD,
65 )
66 slow_msg = f"Found {len(slow_tests)} slow test(s) (> {slow_threshold}s):"
67 logger.info(slow_msg)
68 for test_name, duration in slow_tests[:10]: # Show top 10 slowest
69 logger.info(f" - {test_name}: {duration:.2f}s")
70 if len(slow_tests) > 10:
71 logger.info(f" ... and {len(slow_tests) - 10} more")
73 return slow_tests
76def check_total_time_warning(
77 summary_duration: float,
78 options: dict[str, Any],
79) -> None:
80 """Check and warn if total execution time exceeds threshold.
82 Args:
83 summary_duration: Total test execution duration.
84 options: Options dictionary.
85 """
86 total_time_warning = options.get(
87 "total_time_warning",
88 PYTEST_TOTAL_TIME_WARNING,
89 )
90 if summary_duration > total_time_warning:
91 warning_msg = (
92 f"Tests took {summary_duration:.1f}s to run "
93 f"(threshold: {total_time_warning}s). "
94 "Consider optimizing slow tests."
95 )
96 logger.warning(warning_msg)
99def detect_and_log_flaky_tests(
100 issues: list[PytestIssue],
101 options: dict[str, Any],
102) -> list[tuple[str, float]]:
103 """Detect flaky tests and log warnings.
105 Args:
106 issues: List of parsed test issues.
107 options: Options dictionary.
109 Returns:
110 list[tuple[str, float]]: List of (node_id, failure_rate) tuples for flaky tests.
111 """
112 enable_flaky_detection = options.get("detect_flaky", True)
113 flaky_tests: list[tuple[str, float]] = []
114 if enable_flaky_detection:
115 # Try to get all test results from JUnit XML if available
116 all_test_results: dict[str, str] | None = None
117 junitxml_path = options.get("junitxml") or (
118 "report.xml" if is_ci_environment() else None
119 )
120 if junitxml_path and Path(junitxml_path).exists():
121 all_test_results = extract_all_test_results_from_junit(
122 junitxml_path,
123 )
125 # Update flaky test history
126 history = compute_updated_flaky_test_history(issues, all_test_results)
127 save_flaky_test_history(history)
129 # Detect flaky tests
130 min_runs = options.get("flaky_min_runs", PYTEST_FLAKY_MIN_RUNS)
131 failure_rate = options.get(
132 "flaky_failure_rate",
133 PYTEST_FLAKY_FAILURE_RATE,
134 )
135 flaky_tests = detect_flaky_tests(history, min_runs, failure_rate)
137 # Report flaky tests
138 if flaky_tests:
139 flaky_msg = f"Found {len(flaky_tests)} potentially flaky test(s):"
140 logger.warning(flaky_msg)
141 for node_id, rate in flaky_tests[:10]: # Show top 10 flakiest
142 logger.warning(
143 f" - {node_id}: {rate:.0%} failure rate "
144 f"({history[node_id]['failed'] + history[node_id]['error']}"
145 f" failures in {sum(history[node_id].values())} runs)",
146 )
147 if len(flaky_tests) > 10:
148 logger.warning(f" ... and {len(flaky_tests) - 10} more")
150 return flaky_tests