Skip to content

Commit 0b73671

Browse files
committed
Add docstrings for clarity in LLM backend tests, remove unused imports, and update CLAUDE.md with test-writing guidance.
1 parent 02733dc commit 0b73671

6 files changed

Lines changed: 27 additions & 5 deletions

File tree

CLAUDE.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@ pre-commit install # install git hooks
2020

2121
Always add `--agent-digest=term` when running pytest to see token-optimized test results.
2222

23+
Use the python-tester skill when writing Python tests.
24+
2325
```bash
2426
uv run pytest --agent-digest=term # run all tests with coverage
2527
uv run pytest --agent-digest=term tests/test_config.py # run a single test file

tests/test_config.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77

88
from foreman.config import ConfigError, ForemanConfig, load_config
99

10-
1110
VALID_YAML = textwrap.dedent("""\
1211
identity:
1312
github_token: "ghp_test_token"

tests/test_llm_backends.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,12 +22,16 @@ def load_fixture(name: str) -> dict:
2222

2323

2424
class TestAnthropicBackend:
25+
"""Tests for the Anthropic LLM backend."""
26+
2527
def test_instantiates_with_config(self):
28+
"""Test that the backend can be instantiated with a valid configuration."""
2629
cfg = LLMConfig(provider="anthropic", model="claude-sonnet-4-6", api_key="test-key")
2730
backend = AnthropicBackend(cfg)
2831
assert isinstance(backend, AnthropicBackend)
2932

3033
def test_complete_returns_string(self):
34+
"""Test that the complete method returns a string response."""
3135
fixture = load_fixture("anthropic_triage_response.json")
3236
cfg = LLMConfig(provider="anthropic", model="claude-sonnet-4-6", api_key="test-key")
3337
backend = AnthropicBackend(cfg)
@@ -43,6 +47,7 @@ def test_complete_returns_string(self):
4347
assert result == fixture["content"]
4448

4549
def test_complete_with_system_prompt(self):
50+
"""Test that the complete method correctly handles a system prompt."""
4651
cfg = LLMConfig(provider="anthropic", model="claude-sonnet-4-6", api_key="test-key")
4752
backend = AnthropicBackend(cfg)
4853

@@ -58,6 +63,7 @@ def test_complete_with_system_prompt(self):
5863
assert any(m.get("role") == "system" for m in messages)
5964

6065
def test_complete_passes_model_to_litellm(self):
66+
"""Test that the correct model name is passed to the underlying LiteLLM call."""
6167
cfg = LLMConfig(provider="anthropic", model="claude-sonnet-4-6", api_key="test-key")
6268
backend = AnthropicBackend(cfg)
6369

@@ -73,12 +79,16 @@ def test_complete_passes_model_to_litellm(self):
7379

7480

7581
class TestOllamaBackend:
82+
"""Tests for the Ollama LLM backend."""
83+
7684
def test_instantiates_with_config(self):
85+
"""Test that the backend can be instantiated with a valid configuration."""
7786
cfg = LLMConfig(provider="ollama", model="llama3")
7887
backend = OllamaBackend(cfg)
7988
assert isinstance(backend, OllamaBackend)
8089

8190
def test_complete_returns_string(self):
91+
"""Test that the complete method returns a string response."""
8292
fixture = load_fixture("ollama_triage_response.json")
8393
cfg = LLMConfig(provider="ollama", model="llama3")
8494
backend = OllamaBackend(cfg)
@@ -94,6 +104,7 @@ def test_complete_returns_string(self):
94104
assert result == fixture["content"]
95105

96106
def test_complete_with_system_prompt(self):
107+
"""Test that the complete method correctly handles a system prompt."""
97108
cfg = LLMConfig(provider="ollama", model="llama3")
98109
backend = OllamaBackend(cfg)
99110

@@ -108,6 +119,7 @@ def test_complete_with_system_prompt(self):
108119
assert any(m.get("role") == "system" for m in messages)
109120

110121
def test_complete_passes_ollama_model_format(self):
122+
"""Test that the model name is passed to LiteLLM in the expected Ollama format."""
111123
cfg = LLMConfig(provider="ollama", model="llama3")
112124
backend = OllamaBackend(cfg)
113125

tests/test_llm_base.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,46 +17,58 @@ def complete(self, prompt: str, system: str | None = None) -> str:
1717

1818

1919
class TestLLMBackendABC:
20+
"""Tests for the LLMBackend abstract base class."""
21+
2022
def test_cannot_instantiate_abc_directly(self):
23+
"""Ensure that LLMBackend cannot be instantiated directly."""
2124
with pytest.raises(TypeError):
2225
LLMBackend() # type: ignore[abstract]
2326

2427
def test_concrete_subclass_is_instantiable(self):
28+
"""Ensure that a concrete subclass of LLMBackend can be instantiated."""
2529
backend = ConcreteBackend()
2630
assert isinstance(backend, LLMBackend)
2731

2832
def test_complete_returns_string(self):
33+
"""Ensure that the complete method returns a string."""
2934
backend = ConcreteBackend()
3035
result = backend.complete("hello")
3136
assert isinstance(result, str)
3237

3338
def test_complete_accepts_optional_system(self):
39+
"""Ensure that the complete method accepts an optional system prompt."""
3440
backend = ConcreteBackend()
3541
result = backend.complete("hello", system="you are helpful")
3642
assert isinstance(result, str)
3743

3844

3945
class TestFromConfigFactory:
46+
"""Tests for the from_config factory function."""
47+
4048
def test_returns_anthropic_backend_for_anthropic_provider(self):
49+
"""Ensure from_config returns an AnthropicBackend for 'anthropic' provider."""
4150
from foreman.llm.anthropic import AnthropicBackend
4251

4352
cfg = LLMConfig(provider="anthropic", model="claude-sonnet-4-6", api_key="test-key")
4453
backend = from_config(cfg)
4554
assert isinstance(backend, AnthropicBackend)
4655

4756
def test_returns_ollama_backend_for_ollama_provider(self):
57+
"""Ensure from_config returns an OllamaBackend for 'ollama' provider."""
4858
from foreman.llm.ollama import OllamaBackend
4959

5060
cfg = LLMConfig(provider="ollama", model="llama3")
5161
backend = from_config(cfg)
5262
assert isinstance(backend, OllamaBackend)
5363

5464
def test_unknown_provider_raises_value_error(self):
65+
"""Ensure from_config raises ValueError for an unknown provider."""
5566
cfg = LLMConfig(provider="unknown-provider", model="some-model")
5667
with pytest.raises(ValueError, match="unknown-provider"):
5768
from_config(cfg)
5869

5970
def test_factory_returns_llm_backend_instance(self):
71+
"""Ensure from_config returns an instance of LLMBackend."""
6072
cfg = LLMConfig(provider="anthropic", model="claude-sonnet-4-6", api_key="test-key")
6173
backend = from_config(cfg)
6274
assert isinstance(backend, LLMBackend)

tests/test_middleware.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
from unittest.mock import AsyncMock, patch
44

55
import pytest
6-
import structlog
76

87
from foreman.middleware import LogCorrelationIdMiddleware
98

tests/test_otel.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
1-
""" "Tests for the foreman.otel module."""
1+
"""Tests for the foreman.otel module."""
22

3-
import pytest
43
from fastapi import FastAPI
5-
from opentelemetry import trace
64

75
from foreman.otel import configure_otel
86
from foreman.settings import AppSettings

0 commit comments

Comments
 (0)