test(#134): qwen_bewerter Coverage 86% → 94%
- TestContentFingerprint: leerer/None content → 'len=0', sha1-Praefix
- TestStripMarkdownJsonFences: explizite ```json-Sprache-Erkennung
- TestLazyClientInstantiation:
- injected client umgeht Lazy-Import
- kein injected client triggert openai.AsyncOpenAI-Aufruf
(sys.modules-Stub fuer Lazy-Import-Branch)
Verbleibend uncovered: Line 46 (json-Fence ohne Newline, defensiv aber
unerreichbar weil split('\n', 1) vorher crashen wuerde) und 110-111
(assert/raise-Pfad, im Code als 'unreachable' markiert).
This commit is contained in:
parent
698562b1f5
commit
9af74b1a05
@ -135,3 +135,66 @@ class TestLlmRequestDefaults:
|
|||||||
assert req.max_retries == 3
|
assert req.max_retries == 3
|
||||||
assert req.max_tokens == 4000
|
assert req.max_tokens == 4000
|
||||||
assert req.base_temperature == 0.3
|
assert req.base_temperature == 0.3
|
||||||
|
|
||||||
|
|
||||||
|
# ─── Coverage-Backfill (#134) ────────────────────────────────────────────────
|
||||||
|
|
||||||
|
|
||||||
|
class TestContentFingerprint:
|
||||||
|
def test_empty_string_returns_len_zero(self):
|
||||||
|
from app.adapters.qwen_bewerter import _content_fingerprint
|
||||||
|
assert _content_fingerprint("") == "len=0"
|
||||||
|
|
||||||
|
def test_none_returns_len_zero(self):
|
||||||
|
from app.adapters.qwen_bewerter import _content_fingerprint
|
||||||
|
# Defensiv: None toleriert, weil log-Pfad aufgerufen wird
|
||||||
|
# mit content.choices[0].message.content der schon mal None ist
|
||||||
|
assert _content_fingerprint(None) == "len=0"
|
||||||
|
|
||||||
|
def test_non_empty_includes_sha1_prefix(self):
|
||||||
|
from app.adapters.qwen_bewerter import _content_fingerprint
|
||||||
|
result = _content_fingerprint("hallo")
|
||||||
|
assert result.startswith("len=5 sha1=")
|
||||||
|
assert len(result.split("sha1=")[1]) == 8
|
||||||
|
|
||||||
|
|
||||||
|
class TestStripMarkdownJsonFences:
|
||||||
|
"""```json-Fence wird zusaetzlich zum Plain-Fence behandelt."""
|
||||||
|
|
||||||
|
def test_json_fence_with_explicit_lang(self):
|
||||||
|
from app.adapters.qwen_bewerter import _strip_markdown_fences
|
||||||
|
s = "```json\n{\"a\": 1}\n```"
|
||||||
|
assert _strip_markdown_fences(s) == '{"a": 1}'
|
||||||
|
|
||||||
|
|
||||||
|
class TestLazyClientInstantiation:
|
||||||
|
"""_get_client laedt openai erst beim ersten Call."""
|
||||||
|
|
||||||
|
def test_no_client_triggers_openai_import(self, monkeypatch):
|
||||||
|
"""Wenn der Client nicht injected ist, versucht _get_client den
|
||||||
|
Lazy-Import von openai.AsyncOpenAI. Hier patchen wir den Import,
|
||||||
|
um sicherzustellen dass _get_client tatsaechlich versucht zu
|
||||||
|
instanziieren (Branch-Coverage Lines 70-73)."""
|
||||||
|
import sys
|
||||||
|
from unittest.mock import MagicMock
|
||||||
|
from app.adapters.qwen_bewerter import QwenBewerter
|
||||||
|
|
||||||
|
fake_client = MagicMock(name="AsyncOpenAI-Instance")
|
||||||
|
fake_async_openai = MagicMock(return_value=fake_client)
|
||||||
|
fake_module = type(sys)("openai")
|
||||||
|
fake_module.AsyncOpenAI = fake_async_openai
|
||||||
|
monkeypatch.setitem(sys.modules, "openai", fake_module)
|
||||||
|
|
||||||
|
qb = QwenBewerter(api_key="test", base_url="http://test")
|
||||||
|
client = qb._get_client()
|
||||||
|
assert client is fake_client
|
||||||
|
fake_async_openai.assert_called_once_with(api_key="test",
|
||||||
|
base_url="http://test")
|
||||||
|
|
||||||
|
def test_injected_client_skips_lazy_import(self):
|
||||||
|
"""Wenn der Client schon im Konstruktor da ist, wird _get_client
|
||||||
|
ihn direkt zurueckgeben — kein openai-Import."""
|
||||||
|
from app.adapters.qwen_bewerter import QwenBewerter
|
||||||
|
injected = object()
|
||||||
|
qb = QwenBewerter(client=injected)
|
||||||
|
assert qb._get_client() is injected
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user