Coverage for tests / unit / ai / conftest.py: 90%
49 statements
« prev ^ index » next coverage.py v7.13.0, created at 2026-04-03 18:53 +0000
« prev ^ index » next coverage.py v7.13.0, created at 2026-04-03 18:53 +0000
1"""Shared fixtures for AI tests."""
3from __future__ import annotations
5import threading
6from dataclasses import dataclass
7from typing import Any
9import pytest
11from lintro.ai.config import AIConfig
12from lintro.ai.models import AIFixSuggestion
13from lintro.ai.providers.base import AIResponse, BaseAIProvider
14from lintro.parsers.base_issue import BaseIssue
17class MockAIProvider(BaseAIProvider):
18 """Thread-safe mock AI provider for testing."""
20 def __init__(
21 self,
22 responses: list[AIResponse] | None = None,
23 *,
24 available: bool = True,
25 ) -> None:
26 """Initialize the mock AI provider.
28 Args:
29 responses: List of responses to return from complete() calls.
30 available: Whether the provider reports as available.
31 """
32 super().__init__(
33 provider_name="mock",
34 has_sdk=True,
35 sdk_package="mock",
36 default_model="mock-model",
37 default_api_key_env="MOCK_API_KEY",
38 )
39 self.responses: list[AIResponse] = responses or []
40 self.calls: list[dict[str, Any]] = []
41 self._available = available
42 self._call_index = 0
43 self._lock = threading.Lock()
45 def _create_client(self, *, api_key: str) -> Any:
46 """Return a mock client."""
47 return None
49 def complete(
50 self,
51 prompt: str,
52 *,
53 system: str | None = None,
54 max_tokens: int = 1024,
55 timeout: float = 60.0,
56 ) -> AIResponse:
57 """Return the next queued response or a default."""
58 with self._lock:
59 self.calls.append(
60 {
61 "prompt": prompt,
62 "system": system,
63 "max_tokens": max_tokens,
64 "timeout": timeout,
65 },
66 )
67 if self._call_index < len(self.responses):
68 response = self.responses[self._call_index]
69 self._call_index += 1
70 return response
71 return AIResponse(
72 content="{}",
73 model="mock-model",
74 input_tokens=10,
75 output_tokens=5,
76 cost_estimate=0.001,
77 provider="mock",
78 )
80 def is_available(self) -> bool:
81 """Check if the mock AI provider is available."""
82 return self._available
85@dataclass
86class MockIssue(BaseIssue):
87 """Mock issue with code and severity for testing."""
89 code: str = ""
90 severity: str = ""
91 fixable: bool = False
94@pytest.fixture
95def mock_provider() -> MockAIProvider:
96 """Create a mock AI provider."""
97 return MockAIProvider()
100@pytest.fixture
101def ai_config() -> AIConfig:
102 """Create a default AI config for testing."""
103 return AIConfig(enabled=True, provider="anthropic") # type: ignore[arg-type] # Pydantic coerces str
106@pytest.fixture
107def ai_config_disabled() -> AIConfig:
108 """Create a disabled AI config for testing."""
109 return AIConfig(enabled=False)
112@pytest.fixture
113def sample_issues() -> list[MockIssue]:
114 """Create sample issues for testing."""
115 return [
116 MockIssue(
117 file="src/main.py",
118 line=10,
119 column=1,
120 message="Use of assert detected",
121 code="B101",
122 severity="low",
123 ),
124 MockIssue(
125 file="src/utils.py",
126 line=25,
127 column=5,
128 message="Use of assert detected",
129 code="B101",
130 severity="low",
131 ),
132 MockIssue(
133 file="src/main.py",
134 line=42,
135 column=1,
136 message="Line too long",
137 code="E501",
138 severity="warning",
139 ),
140 ]
143@pytest.fixture
144def sample_fix_suggestions() -> list[AIFixSuggestion]:
145 """Create sample fix suggestions for testing."""
146 return [
147 AIFixSuggestion(
148 file="src/main.py",
149 line=10,
150 code="B101",
151 tool_name="bandit",
152 original_code="assert x > 0",
153 suggested_code="if not x > 0:\n raise ValueError",
154 diff="--- a/src/main.py\n+++ b/src/main.py\n"
155 "-assert x > 0\n"
156 "+if not x > 0:\n"
157 "+ raise ValueError",
158 explanation="Replace assert with if/raise",
159 confidence="high",
160 input_tokens=150,
161 output_tokens=80,
162 cost_estimate=0.002,
163 ),
164 ]