diff --git a/tests/test_llm_bewerter.py b/tests/test_llm_bewerter.py index 3e1ae31..2a2ff5d 100644 --- a/tests/test_llm_bewerter.py +++ b/tests/test_llm_bewerter.py @@ -135,3 +135,66 @@ class TestLlmRequestDefaults: assert req.max_retries == 3 assert req.max_tokens == 4000 assert req.base_temperature == 0.3 + + +# ─── Coverage-Backfill (#134) ──────────────────────────────────────────────── + + +class TestContentFingerprint: + def test_empty_string_returns_len_zero(self): + from app.adapters.qwen_bewerter import _content_fingerprint + assert _content_fingerprint("") == "len=0" + + def test_none_returns_len_zero(self): + from app.adapters.qwen_bewerter import _content_fingerprint + # Defensiv: None toleriert, weil log-Pfad aufgerufen wird + # mit content.choices[0].message.content der schon mal None ist + assert _content_fingerprint(None) == "len=0" + + def test_non_empty_includes_sha1_prefix(self): + from app.adapters.qwen_bewerter import _content_fingerprint + result = _content_fingerprint("hallo") + assert result.startswith("len=5 sha1=") + assert len(result.split("sha1=")[1]) == 8 + + +class TestStripMarkdownJsonFences: + """```json-Fence wird zusaetzlich zum Plain-Fence behandelt.""" + + def test_json_fence_with_explicit_lang(self): + from app.adapters.qwen_bewerter import _strip_markdown_fences + s = "```json\n{\"a\": 1}\n```" + assert _strip_markdown_fences(s) == '{"a": 1}' + + +class TestLazyClientInstantiation: + """_get_client laedt openai erst beim ersten Call.""" + + def test_no_client_triggers_openai_import(self, monkeypatch): + """Wenn der Client nicht injected ist, versucht _get_client den + Lazy-Import von openai.AsyncOpenAI. Hier patchen wir den Import, + um sicherzustellen dass _get_client tatsaechlich versucht zu + instanziieren (Branch-Coverage Lines 70-73).""" + import sys + from unittest.mock import MagicMock + from app.adapters.qwen_bewerter import QwenBewerter + + fake_client = MagicMock(name="AsyncOpenAI-Instance") + fake_async_openai = MagicMock(return_value=fake_client) + fake_module = type(sys)("openai") + fake_module.AsyncOpenAI = fake_async_openai + monkeypatch.setitem(sys.modules, "openai", fake_module) + + qb = QwenBewerter(api_key="test", base_url="http://test") + client = qb._get_client() + assert client is fake_client + fake_async_openai.assert_called_once_with(api_key="test", + base_url="http://test") + + def test_injected_client_skips_lazy_import(self): + """Wenn der Client schon im Konstruktor da ist, wird _get_client + ihn direkt zurueckgeben — kein openai-Import.""" + from app.adapters.qwen_bewerter import QwenBewerter + injected = object() + qb = QwenBewerter(client=injected) + assert qb._get_client() is injected