commit 17606ab237f91db715328750ce3334980d33f868 Author: Dotty Dotter Date: Mon Mar 30 16:37:58 2026 +0200 feat: Initial commit — Antragstracker Hagen Vollständige Pipeline zur Analyse kommunaler Vorlagen aus ALLRIS: - OParl-Import: 20.149 Vorlagen - PDF-Extraktion: 10.045 Volltexte (adaptives Throttling) - KI-Zusammenfassungen: 10.026 via Qwen Plus (parallelisiert) - Beratungsfolge-Scraper: Beschlusstexte + Wortprotokolle - Abstimmungs-Analyse mit Koalitionsmatrix - Georeferenzierung (Nominatim) Stack: FastAPI + SvelteKit + SQLite Deployment: Docker + Traefik auf VServer Daten (DB, Logs) nicht im Repo — siehe Restic-Backup. Repo-Setup: scripts/setup.sh für Neuaufbau aus OParl-API. diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2edcbb6 --- /dev/null +++ b/.gitignore @@ -0,0 +1,28 @@ +# Data (500MB+ DBs, Logs, State) +data/ +antraege.db + +# Python +.venv/ +venv/ +__pycache__/ +*.pyc +*.egg-info/ + +# Node +node_modules/ +frontend/.svelte-kit/ +frontend/build/ + +# OS +.DS_Store +*.swp +Thumbs.db + +# Logs & Runtime +*.log +nohup.out +extract.log +import_urls.log +system_metrics.log +antraege.db diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..c4a2a80 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,41 @@ +# Multi-stage build für Antragstracker Hagen + +# Stage 1: Frontend Build +FROM node:20-alpine AS frontend-build +WORKDIR /app/frontend +COPY frontend/package*.json ./ +RUN npm ci +COPY frontend/ ./ +RUN npm run build + +# Stage 2: Backend + Frontend +FROM python:3.12-slim + +WORKDIR /app + +# System dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Python dependencies +COPY backend/requirements.txt ./ +RUN pip install --no-cache-dir -r requirements.txt + +# Backend code +COPY backend/src/ ./src/ + +# Frontend static files (from build stage) +COPY --from=frontend-build /app/frontend/build ./static + +# Data directory (will be mounted as volume) +RUN mkdir -p /app/data + +# Environment +ENV PYTHONPATH=/app/src +ENV DATABASE_PATH=/app/data/tracker.db + +EXPOSE 8000 + +# Run with uvicorn +CMD ["uvicorn", "tracker.main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/backend/requirements.txt b/backend/requirements.txt new file mode 100644 index 0000000..8484df9 --- /dev/null +++ b/backend/requirements.txt @@ -0,0 +1,4 @@ +fastapi>=0.109.0 +uvicorn[standard]>=0.27.0 +pydantic>=2.5.0 +httpx>=0.26.0 diff --git a/backend/src/tracker/__init__.py b/backend/src/tracker/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/src/tracker/api/__init__.py b/backend/src/tracker/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/src/tracker/api/models.py b/backend/src/tracker/api/models.py new file mode 100644 index 0000000..a08e937 --- /dev/null +++ b/backend/src/tracker/api/models.py @@ -0,0 +1,123 @@ +"""Pydantic response models for the API.""" + +from datetime import date, datetime +from pydantic import BaseModel + + +class ParteiOut(BaseModel): + id: int + kuerzel: str + name: str | None = None + farbe: str | None = None + + +class GremiumOut(BaseModel): + id: int + name: str + kuerzel: str | None = None + typ: str | None = None + + +class BeratungOut(BaseModel): + id: int + gremium: GremiumOut | None = None + sitzung_datum: date | None = None + rolle: str | None = None + ergebnis: str | None = None + ergebnis_text: str | None = None + + +class VorlageKurz(BaseModel): + id: int + aktenzeichen: str | None = None + typ: str | None = None + betreff: str | None = None + datum_eingang: date | None = None + ist_verwaltungsvorlage: bool = False + + +class ReferenzOut(BaseModel): + vorlage_id: int + aktenzeichen: str | None = None + betreff: str | None = None + vorlage_typ: str | None = None + datum_eingang: date | None = None + ref_typ: str | None = None + konfidenz: float | None = None + kontext: str | None = None + + +class KiZusammenfassung(BaseModel): + zusammenfassung: str | None = None + kernforderung: str | None = None + begruendung: str | None = None + betroffene_orte: list[str] = [] + thema: str | None = None + partei: str | list[str] | None = None + + +class VorlageDetail(BaseModel): + id: int + aktenzeichen: str | None = None + aktenzeichen_basis: str | None = None + aktenzeichen_suffix: str | None = None + typ: str | None = None + betreff: str | None = None + volltext_clean: str | None = None + datum_eingang: date | None = None + pdf_url: str | None = None + web_url: str | None = None + ki_zusammenfassung: KiZusammenfassung | None = None + ist_verwaltungsvorlage: bool = False + thema_kurz: str | None = None + antragsteller: list[ParteiOut] = [] + beratungen: list[BeratungOut] = [] + referenzen_ausgehend: list[ReferenzOut] = [] + referenzen_eingehend: list[ReferenzOut] = [] + kette_id: int | None = None + + +class KettenGliedOut(BaseModel): + vorlage: VorlageKurz + position: int + rolle: str | None = None + + +class KetteKurz(BaseModel): + id: int + ursprung: VorlageKurz | None = None + typ: str | None = None + thema: str | None = None + status: str | None = None + status_seit: date | None = None + letzte_aktivitaet: date | None = None + vertagungen_count: int = 0 + glieder_count: int = 0 + + +class KetteDetail(BaseModel): + id: int + ursprung: VorlageKurz | None = None + typ: str | None = None + thema: str | None = None + status: str | None = None + status_seit: date | None = None + letzte_aktivitaet: date | None = None + vertagungen_count: int = 0 + glieder: list[KettenGliedOut] = [] + antragsteller: list[ParteiOut] = [] + graph: dict | None = None + + +class PaginatedVorlagen(BaseModel): + items: list[VorlageKurz] + total: int + page: int + page_size: int + + +class PaginatedKetten(BaseModel): + items: list[KetteKurz] + total: int + page: int + page_size: int diff --git a/backend/src/tracker/api/routes/__init__.py b/backend/src/tracker/api/routes/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/src/tracker/api/routes/abstimmungen.py b/backend/src/tracker/api/routes/abstimmungen.py new file mode 100644 index 0000000..496de91 --- /dev/null +++ b/backend/src/tracker/api/routes/abstimmungen.py @@ -0,0 +1,188 @@ +"""API routes for Abstimmungen und Stimmverhalten-Analysen.""" + +from fastapi import APIRouter, Depends, Query + +from tracker.db.session import get_connection + +router = APIRouter(prefix="/abstimmungen", tags=["Abstimmungen"]) + + +def _db(): + conn = get_connection() + try: + yield conn + finally: + conn.close() + + +@router.get("/stats") +def get_abstimmungen_stats(conn=Depends(_db)): + """Übersicht: Anzahl Abstimmungen, Ergebnisverteilung.""" + total = conn.execute("SELECT COUNT(*) FROM abstimmungen").fetchone()[0] + + by_ergebnis = conn.execute(""" + SELECT ergebnis, COUNT(*) as anzahl + FROM abstimmungen + WHERE ergebnis IS NOT NULL + GROUP BY ergebnis + ORDER BY anzahl DESC + """).fetchall() + + return { + "total": total, + "nach_ergebnis": [{"ergebnis": r[0], "anzahl": r[1]} for r in by_ergebnis] + } + + +@router.get("/fraktionen") +def get_fraktionen_uebersicht(conn=Depends(_db)): + """Stimmverhalten aller Fraktionen aggregiert.""" + rows = conn.execute(""" + SELECT fraktion, + SUM(CASE WHEN stimme='ja' THEN 1 ELSE 0 END) as ja, + SUM(CASE WHEN stimme='nein' THEN 1 ELSE 0 END) as nein, + SUM(CASE WHEN stimme='enthaltung' THEN 1 ELSE 0 END) as enthaltung, + COUNT(*) as gesamt + FROM abstimmungen_fraktionen + GROUP BY fraktion + ORDER BY gesamt DESC + """).fetchall() + + return [ + { + "fraktion": r[0], + "ja": r[1], + "nein": r[2], + "enthaltung": r[3], + "gesamt": r[4], + "ja_quote": round(r[1] / r[4] * 100, 1) if r[4] > 0 else 0 + } + for r in rows + ] + + +@router.get("/koalitionsmatrix") +def get_koalitionsmatrix(conn=Depends(_db)): + """Matrix: Wie oft stimmen Fraktionen gleich ab?""" + # Alle Abstimmungen mit mindestens 2 Fraktionen + abstimmungen = conn.execute(""" + SELECT abstimmung_id, fraktion, stimme + FROM abstimmungen_fraktionen + WHERE stimme IN ('ja', 'nein') + """).fetchall() + + # Gruppieren nach Abstimmung + from collections import defaultdict + by_abstimmung = defaultdict(dict) + for aid, fraktion, stimme in abstimmungen: + by_abstimmung[aid][fraktion] = stimme + + # Paarweise Übereinstimmung zählen + fraktionen = list(set(r[1] for r in abstimmungen)) + matrix = {f1: {f2: {"gleich": 0, "gesamt": 0} for f2 in fraktionen} for f1 in fraktionen} + + for aid, stimmen in by_abstimmung.items(): + for f1 in stimmen: + for f2 in stimmen: + if f1 != f2: + matrix[f1][f2]["gesamt"] += 1 + if stimmen[f1] == stimmen[f2]: + matrix[f1][f2]["gleich"] += 1 + + # Als Liste für Frontend + result = [] + for f1 in sorted(fraktionen): + row = {"fraktion": f1, "uebereinstimmung": {}} + for f2 in sorted(fraktionen): + if f1 != f2 and matrix[f1][f2]["gesamt"] > 0: + quote = round(matrix[f1][f2]["gleich"] / matrix[f1][f2]["gesamt"] * 100, 1) + row["uebereinstimmung"][f2] = { + "quote": quote, + "gleich": matrix[f1][f2]["gleich"], + "gesamt": matrix[f1][f2]["gesamt"] + } + result.append(row) + + return result + + +@router.get("/ablehnungen") +def get_ablehnungsverhalten(conn=Depends(_db)): + """Wer lehnt wessen Anträge ab?""" + # Anträge mit Antragsteller-Fraktion und Abstimmungsverhalten + rows = conn.execute(""" + SELECT + kb.anmerkungen, + af.fraktion, + af.stimme, + a.ergebnis + FROM abstimmungen a + JOIN abstimmungen_fraktionen af ON a.id = af.abstimmung_id + LEFT JOIN ki_bewertungen kb ON a.vorlage_id = kb.vorlage_id AND kb.typ = 'zusammenfassung' + WHERE af.stimme IN ('ja', 'nein', 'enthaltung') + """).fetchall() + + import json + from collections import defaultdict + + # antragsteller -> abstimmende_fraktion -> stimme -> count + matrix = defaultdict(lambda: defaultdict(lambda: {"ja": 0, "nein": 0, "enthaltung": 0})) + + for anmerkungen, fraktion, stimme, ergebnis in rows: + if not anmerkungen: + continue + try: + data = json.loads(anmerkungen) + antragsteller = data.get("partei") + if isinstance(antragsteller, list): + antragsteller = antragsteller[0] if antragsteller else None + if antragsteller and antragsteller != fraktion: + matrix[antragsteller][fraktion][stimme] += 1 + except: + continue + + # Als Liste + result = [] + for antragsteller, abstimmungen in sorted(matrix.items()): + result.append({ + "antragsteller": antragsteller, + "abstimmungen": { + f: counts for f, counts in sorted(abstimmungen.items()) + } + }) + + return result + + +@router.get("/vorlage/{vorlage_id}") +def get_abstimmungen_fuer_vorlage(vorlage_id: int, conn=Depends(_db)): + """Alle Abstimmungen zu einer Vorlage.""" + abstimmungen = conn.execute(""" + SELECT a.id, a.sitzung_datum, a.ergebnis, a.volltext, g.name as gremium + FROM abstimmungen a + LEFT JOIN gremien g ON a.gremium_id = g.id + WHERE a.vorlage_id = ? + ORDER BY a.sitzung_datum + """, (vorlage_id,)).fetchall() + + result = [] + for a in abstimmungen: + fraktionen = conn.execute(""" + SELECT fraktion, stimme, anzahl, bemerkung + FROM abstimmungen_fraktionen + WHERE abstimmung_id = ? + """, (a[0],)).fetchall() + + result.append({ + "id": a[0], + "sitzung_datum": a[1], + "ergebnis": a[2], + "volltext": a[3], + "gremium": a[4], + "fraktionen": [ + {"fraktion": f[0], "stimme": f[1], "anzahl": f[2], "bemerkung": f[3]} + for f in fraktionen + ] + }) + + return result diff --git a/backend/src/tracker/api/routes/ketten.py b/backend/src/tracker/api/routes/ketten.py new file mode 100644 index 0000000..e08268d --- /dev/null +++ b/backend/src/tracker/api/routes/ketten.py @@ -0,0 +1,176 @@ +"""API routes for Ketten (chains).""" + +from fastapi import APIRouter, Depends, HTTPException, Query + +from tracker.api.models import ( + KetteDetail, + KetteKurz, + KettenGliedOut, + PaginatedKetten, + ParteiOut, + VorlageKurz, +) +from tracker.core.graph import get_kette_graph +from tracker.db.session import get_connection + +router = APIRouter(prefix="/ketten", tags=["Ketten"]) + + +def _db(): + conn = get_connection() + try: + yield conn + finally: + conn.close() + + +@router.get("", response_model=PaginatedKetten) +def list_ketten( + page: int = Query(1, ge=1), + page_size: int = Query(50, ge=1, le=200), + status: str | None = None, + typ: str | None = None, + suche: str | None = None, + conn=Depends(_db), +): + """List Ketten with optional filters.""" + where_clauses = [] + params: list = [] + + if status: + where_clauses.append("k.status = ?") + params.append(status) + + if typ: + where_clauses.append("k.typ = ?") + params.append(typ) + + if suche: + where_clauses.append("k.thema LIKE ?") + params.append(f"%{suche}%") + + where_sql = ("WHERE " + " AND ".join(where_clauses)) if where_clauses else "" + + total = conn.execute( + f"SELECT COUNT(*) as cnt FROM ketten k {where_sql}", params + ).fetchone()["cnt"] + + offset = (page - 1) * page_size + rows = conn.execute( + f"""SELECT k.id, k.typ, k.thema, k.status, k.status_seit, + k.letzte_aktivitaet, k.vertagungen_count, k.ursprung_id, + v.aktenzeichen, v.typ as v_typ, v.betreff, v.datum_eingang, + v.ist_verwaltungsvorlage, + (SELECT COUNT(*) FROM ketten_glieder kg WHERE kg.kette_id = k.id) as glieder_count + FROM ketten k + LEFT JOIN vorlagen v ON k.ursprung_id = v.id + {where_sql} + ORDER BY k.letzte_aktivitaet DESC NULLS LAST, k.id DESC + LIMIT ? OFFSET ?""", + params + [page_size, offset], + ).fetchall() + + items = [ + KetteKurz( + id=r["id"], + ursprung=VorlageKurz( + id=r["ursprung_id"], + aktenzeichen=r["aktenzeichen"], + typ=r["v_typ"], + betreff=r["betreff"], + datum_eingang=r["datum_eingang"], + ist_verwaltungsvorlage=bool(r["ist_verwaltungsvorlage"]), + ) if r["ursprung_id"] else None, + typ=r["typ"], + thema=r["thema"], + status=r["status"], + status_seit=r["status_seit"], + letzte_aktivitaet=r["letzte_aktivitaet"], + vertagungen_count=r["vertagungen_count"], + glieder_count=r["glieder_count"], + ) + for r in rows + ] + + return PaginatedKetten(items=items, total=total, page=page, page_size=page_size) + + +@router.get("/{kette_id}", response_model=KetteDetail) +def get_kette(kette_id: int, conn=Depends(_db)): + """Get a single Kette with all Glieder.""" + row = conn.execute( + """SELECT k.id, k.typ, k.thema, k.status, k.status_seit, + k.letzte_aktivitaet, k.vertagungen_count, k.ursprung_id, + v.aktenzeichen, v.typ as v_typ, v.betreff, v.datum_eingang, + v.ist_verwaltungsvorlage + FROM ketten k + LEFT JOIN vorlagen v ON k.ursprung_id = v.id + WHERE k.id = ?""", + (kette_id,), + ).fetchone() + + if not row: + raise HTTPException(status_code=404, detail="Kette nicht gefunden") + + # Get Glieder + glieder_rows = conn.execute( + """SELECT kg.position, kg.rolle, + v.id, v.aktenzeichen, v.typ, v.betreff, v.datum_eingang, + v.ist_verwaltungsvorlage + FROM ketten_glieder kg + JOIN vorlagen v ON kg.vorlage_id = v.id + WHERE kg.kette_id = ? + ORDER BY kg.position""", + (kette_id,), + ).fetchall() + + glieder = [ + KettenGliedOut( + vorlage=VorlageKurz( + id=g["id"], + aktenzeichen=g["aktenzeichen"], + typ=g["typ"], + betreff=g["betreff"], + datum_eingang=g["datum_eingang"], + ist_verwaltungsvorlage=bool(g["ist_verwaltungsvorlage"]), + ), + position=g["position"], + rolle=g["rolle"], + ) + for g in glieder_rows + ] + + # Antragsteller des Ursprungs + antragsteller = [] + if row["ursprung_id"]: + antragsteller_rows = conn.execute(""" + SELECT p.id, p.kuerzel, p.name, p.farbe + FROM antragsteller a + JOIN parteien p ON a.partei_id = p.id + WHERE a.vorlage_id = ? + """, (row["ursprung_id"],)).fetchall() + antragsteller = [ParteiOut(**dict(a)) for a in antragsteller_rows] + + # Graph/Perlenschnur data + graph = get_kette_graph(conn, kette_id) + + return KetteDetail( + id=row["id"], + ursprung=VorlageKurz( + id=row["ursprung_id"], + aktenzeichen=row["aktenzeichen"], + typ=row["v_typ"], + betreff=row["betreff"], + datum_eingang=row["datum_eingang"], + ist_verwaltungsvorlage=bool(row["ist_verwaltungsvorlage"]), + ) if row["ursprung_id"] else None, + typ=row["typ"], + thema=row["thema"], + status=row["status"], + status_seit=row["status_seit"], + letzte_aktivitaet=row["letzte_aktivitaet"], + vertagungen_count=row["vertagungen_count"], + glieder=glieder, + antragsteller=antragsteller, + graph=graph, + ) diff --git a/backend/src/tracker/api/routes/orte.py b/backend/src/tracker/api/routes/orte.py new file mode 100644 index 0000000..c696ac4 --- /dev/null +++ b/backend/src/tracker/api/routes/orte.py @@ -0,0 +1,98 @@ +"""API routes for Orte und Karten-Daten.""" + +from fastapi import APIRouter, Depends + +from tracker.db.session import get_connection + +router = APIRouter(prefix="/orte", tags=["Orte"]) + + +def _db(): + conn = get_connection() + try: + yield conn + finally: + conn.close() + + +@router.get("") +def list_orte(conn=Depends(_db)): + """Alle geocodierten Orte mit Vorlagen-Anzahl.""" + rows = conn.execute(""" + SELECT o.id, o.name, o.typ, o.lat, o.lon, o.vorlage_count + FROM orte o + WHERE o.lat IS NOT NULL + ORDER BY o.vorlage_count DESC + """).fetchall() + + return [ + { + "id": r[0], + "name": r[1], + "typ": r[2], + "lat": r[3], + "lon": r[4], + "vorlage_count": r[5] + } + for r in rows + ] + + +@router.get("/geojson") +def get_orte_geojson(conn=Depends(_db)): + """Orte als GeoJSON für Leaflet.""" + rows = conn.execute(""" + SELECT o.id, o.name, o.lat, o.lon, o.vorlage_count, + GROUP_CONCAT(v.aktenzeichen, ', ') as vorlagen + FROM orte o + LEFT JOIN vorlagen_orte vo ON o.id = vo.ort_id + LEFT JOIN vorlagen v ON vo.vorlage_id = v.id + WHERE o.lat IS NOT NULL + GROUP BY o.id + ORDER BY o.vorlage_count DESC + """).fetchall() + + features = [] + for r in rows: + features.append({ + "type": "Feature", + "geometry": { + "type": "Point", + "coordinates": [r[3], r[2]] # lon, lat + }, + "properties": { + "id": r[0], + "name": r[1], + "vorlage_count": r[4], + "vorlagen": r[5] + } + }) + + return { + "type": "FeatureCollection", + "features": features + } + + +@router.get("/{ort_id}/vorlagen") +def get_vorlagen_fuer_ort(ort_id: int, conn=Depends(_db)): + """Alle Vorlagen die einen bestimmten Ort betreffen.""" + rows = conn.execute(""" + SELECT v.id, v.aktenzeichen, v.typ, v.betreff, v.datum_eingang, vo.kontext + FROM vorlagen_orte vo + JOIN vorlagen v ON vo.vorlage_id = v.id + WHERE vo.ort_id = ? + ORDER BY v.datum_eingang DESC + """, (ort_id,)).fetchall() + + return [ + { + "id": r[0], + "aktenzeichen": r[1], + "typ": r[2], + "betreff": r[3], + "datum_eingang": r[4], + "kontext": r[5] + } + for r in rows + ] diff --git a/backend/src/tracker/api/routes/stats.py b/backend/src/tracker/api/routes/stats.py new file mode 100644 index 0000000..784906d --- /dev/null +++ b/backend/src/tracker/api/routes/stats.py @@ -0,0 +1,162 @@ +"""API routes for Dashboard statistics.""" + +from fastapi import APIRouter, Depends + +from tracker.db.session import get_connection + +router = APIRouter(prefix="/stats", tags=["Statistics"]) + + +def _db(): + conn = get_connection() + try: + yield conn + finally: + conn.close() + + +@router.get("") +def get_stats(conn=Depends(_db)): + """Dashboard-level KPI statistics.""" + vorlagen_total = conn.execute("SELECT COUNT(*) as c FROM vorlagen").fetchone()["c"] + beratungen_total = conn.execute("SELECT COUNT(*) as c FROM beratungen").fetchone()["c"] + ketten_total = conn.execute("SELECT COUNT(*) as c FROM ketten").fetchone()["c"] + + # Vorlagen by type + typ_rows = conn.execute(""" + SELECT typ, COUNT(*) as c FROM vorlagen + WHERE typ IS NOT NULL + GROUP BY typ ORDER BY c DESC + """).fetchall() + + # Ketten by status + status_rows = conn.execute(""" + SELECT status, COUNT(*) as c FROM ketten + WHERE status IS NOT NULL + GROUP BY status ORDER BY c DESC + """).fetchall() + + # Ketten by type + ketten_typ_rows = conn.execute(""" + SELECT typ, COUNT(*) as c FROM ketten + WHERE typ IS NOT NULL + GROUP BY typ ORDER BY c DESC + """).fetchall() + + # Recent activity (last 10 Vorlagen) + recent = conn.execute(""" + SELECT id, aktenzeichen, betreff, typ, datum_eingang + FROM vorlagen + WHERE datum_eingang IS NOT NULL + ORDER BY datum_eingang DESC + LIMIT 10 + """).fetchall() + + # Parteien with antrag counts + parteien = conn.execute(""" + SELECT p.kuerzel, p.name, p.farbe, COUNT(a.vorlage_id) as anzahl + FROM parteien p + LEFT JOIN antragsteller a ON p.id = a.partei_id + GROUP BY p.id + ORDER BY anzahl DESC + """).fetchall() + + # Gremien with beratung counts + gremien = conn.execute(""" + SELECT g.name, g.kuerzel, g.typ, COUNT(b.id) as anzahl + FROM gremien g + LEFT JOIN beratungen b ON g.id = b.gremium_id + GROUP BY g.id + ORDER BY anzahl DESC + LIMIT 15 + """).fetchall() + + # Zeitliche Verteilung (Vorlagen pro Monat, letzte 24 Monate) + timeline = conn.execute(""" + SELECT strftime('%Y-%m', datum_eingang) as monat, COUNT(*) as c + FROM vorlagen + WHERE datum_eingang IS NOT NULL + GROUP BY monat + ORDER BY monat DESC + LIMIT 24 + """).fetchall() + + return { + "vorlagen_total": vorlagen_total, + "beratungen_total": beratungen_total, + "ketten_total": ketten_total, + "vorlagen_nach_typ": [{"typ": r["typ"], "anzahl": r["c"]} for r in typ_rows], + "ketten_nach_status": [{"status": r["status"], "anzahl": r["c"]} for r in status_rows], + "ketten_nach_typ": [{"typ": r["typ"], "anzahl": r["c"]} for r in ketten_typ_rows], + "letzte_vorlagen": [ + { + "id": r["id"], + "aktenzeichen": r["aktenzeichen"], + "betreff": r["betreff"], + "typ": r["typ"], + "datum_eingang": r["datum_eingang"], + } + for r in recent + ], + "parteien": [ + {"kuerzel": r["kuerzel"], "name": r["name"], "farbe": r["farbe"], "anzahl": r["anzahl"]} + for r in parteien + ], + "gremien": [ + {"name": r["name"], "kuerzel": r["kuerzel"], "typ": r["typ"], "anzahl": r["anzahl"]} + for r in gremien + ], + "timeline": [{"monat": r["monat"], "anzahl": r["c"]} for r in reversed(timeline)], + } + + +@router.get("/ketten-stats") +def get_ketten_stats(conn=Depends(_db)): + """Aggregated Ketten status distribution with breakdowns.""" + # Status by type + rows = conn.execute(""" + SELECT k.typ, k.status, COUNT(*) as c + FROM ketten k + GROUP BY k.typ, k.status + ORDER BY k.typ, c DESC + """).fetchall() + + by_type: dict = {} + for r in rows: + typ = r["typ"] or "unbekannt" + if typ not in by_type: + by_type[typ] = [] + by_type[typ].append({"status": r["status"], "anzahl": r["c"]}) + + # Status counts with average age + status_detail = conn.execute(""" + SELECT status, + COUNT(*) as anzahl, + AVG(julianday('now') - julianday(status_seit)) as avg_tage, + AVG(vertagungen_count) as avg_vertagungen + FROM ketten + WHERE status IS NOT NULL + GROUP BY status + ORDER BY anzahl DESC + """).fetchall() + + # Versandungs-Fruhwarnung: beschlossen > 6 Monate ohne Bericht + fruehwarnung = conn.execute(""" + SELECT COUNT(*) as c FROM ketten + WHERE status = 'beschlossen' + AND julianday('now') - julianday(status_seit) > 180 + """).fetchone()["c"] + + return { + "nach_typ": by_type, + "status_detail": [ + { + "status": r["status"], + "anzahl": r["anzahl"], + "avg_tage": round(r["avg_tage"] or 0, 1), + "avg_vertagungen": round(r["avg_vertagungen"] or 0, 1), + } + for r in status_detail + ], + "versandungs_fruehwarnung": fruehwarnung, + } diff --git a/backend/src/tracker/api/routes/vorlagen.py b/backend/src/tracker/api/routes/vorlagen.py new file mode 100644 index 0000000..71a81d0 --- /dev/null +++ b/backend/src/tracker/api/routes/vorlagen.py @@ -0,0 +1,171 @@ +"""API routes for Vorlagen.""" + +from fastapi import APIRouter, Depends, HTTPException, Query + +import json + +from tracker.api.models import ( + BeratungOut, + GremiumOut, + KiZusammenfassung, + PaginatedVorlagen, + ParteiOut, + ReferenzOut, + VorlageDetail, + VorlageKurz, +) +from tracker.core.graph import get_references_for_vorlage +from tracker.db.session import get_connection + +router = APIRouter(prefix="/vorlagen", tags=["Vorlagen"]) + + +def _db(): + conn = get_connection() + try: + yield conn + finally: + conn.close() + + +@router.get("", response_model=PaginatedVorlagen) +def list_vorlagen( + page: int = Query(1, ge=1), + page_size: int = Query(50, ge=1, le=200), + typ: str | None = None, + suche: str | None = None, + conn=Depends(_db), +): + """List Vorlagen with optional filters.""" + where_clauses = [] + params: list = [] + + if typ: + where_clauses.append("v.typ = ?") + params.append(typ) + + if suche: + where_clauses.append("(v.betreff LIKE ? OR v.aktenzeichen LIKE ?)") + params.extend([f"%{suche}%", f"%{suche}%"]) + + where_sql = ("WHERE " + " AND ".join(where_clauses)) if where_clauses else "" + + total = conn.execute( + f"SELECT COUNT(*) as cnt FROM vorlagen v {where_sql}", params + ).fetchone()["cnt"] + + offset = (page - 1) * page_size + rows = conn.execute( + f"""SELECT v.id, v.aktenzeichen, v.typ, v.betreff, v.datum_eingang, + v.ist_verwaltungsvorlage + FROM vorlagen v {where_sql} + ORDER BY v.datum_eingang DESC NULLS LAST, v.id DESC + LIMIT ? OFFSET ?""", + params + [page_size, offset], + ).fetchall() + + items = [ + VorlageKurz( + id=r["id"], + aktenzeichen=r["aktenzeichen"], + typ=r["typ"], + betreff=r["betreff"], + datum_eingang=r["datum_eingang"], + ist_verwaltungsvorlage=bool(r["ist_verwaltungsvorlage"]), + ) + for r in rows + ] + + return PaginatedVorlagen(items=items, total=total, page=page, page_size=page_size) + + +@router.get("/{vorlage_id}", response_model=VorlageDetail) +def get_vorlage(vorlage_id: int, conn=Depends(_db)): + """Get a single Vorlage with all details.""" + row = conn.execute( + """SELECT id, aktenzeichen, aktenzeichen_basis, aktenzeichen_suffix, + typ, betreff, volltext_clean, datum_eingang, pdf_url, web_url, + ist_verwaltungsvorlage, thema_kurz + FROM vorlagen WHERE id = ?""", + (vorlage_id,), + ).fetchone() + + if not row: + raise HTTPException(status_code=404, detail="Vorlage nicht gefunden") + + # Antragsteller + antragsteller = conn.execute( + """SELECT p.id, p.kuerzel, p.name, p.farbe + FROM antragsteller a + JOIN parteien p ON a.partei_id = p.id + WHERE a.vorlage_id = ?""", + (vorlage_id,), + ).fetchall() + + # Beratungen + beratungen_rows = conn.execute( + """SELECT b.id, b.sitzung_datum, b.rolle, b.ergebnis, b.ergebnis_text, + g.id as g_id, g.name as g_name, g.kuerzel as g_kuerzel, g.typ as g_typ + FROM beratungen b + LEFT JOIN gremien g ON b.gremium_id = g.id + WHERE b.vorlage_id = ? + ORDER BY b.sitzung_datum DESC NULLS LAST""", + (vorlage_id,), + ).fetchall() + + beratungen = [ + BeratungOut( + id=b["id"], + gremium=GremiumOut(id=b["g_id"], name=b["g_name"], kuerzel=b["g_kuerzel"], typ=b["g_typ"]) + if b["g_id"] + else None, + sitzung_datum=b["sitzung_datum"], + rolle=b["rolle"], + ergebnis=b["ergebnis"], + ergebnis_text=b["ergebnis_text"], + ) + for b in beratungen_rows + ] + + # Referenzen + refs = get_references_for_vorlage(conn, vorlage_id) + + # Kette-Zugehörigkeit + kette_row = conn.execute( + "SELECT kette_id FROM ketten_glieder WHERE vorlage_id = ? LIMIT 1", + (vorlage_id,), + ).fetchone() + + # KI-Zusammenfassung + ki_row = conn.execute( + "SELECT anmerkungen FROM ki_bewertungen WHERE vorlage_id = ? AND typ = 'zusammenfassung' LIMIT 1", + (vorlage_id,), + ).fetchone() + ki_zusammenfassung = None + if ki_row and ki_row["anmerkungen"]: + try: + ki_data = json.loads(ki_row["anmerkungen"]) + ki_zusammenfassung = KiZusammenfassung(**ki_data) + except (json.JSONDecodeError, TypeError): + pass + + return VorlageDetail( + id=row["id"], + aktenzeichen=row["aktenzeichen"], + aktenzeichen_basis=row["aktenzeichen_basis"], + aktenzeichen_suffix=row["aktenzeichen_suffix"], + typ=row["typ"], + betreff=row["betreff"], + volltext_clean=row["volltext_clean"], + datum_eingang=row["datum_eingang"], + pdf_url=row["pdf_url"], + web_url=row["web_url"], + ist_verwaltungsvorlage=bool(row["ist_verwaltungsvorlage"]), + thema_kurz=row["thema_kurz"], + antragsteller=[ParteiOut(**dict(a)) for a in antragsteller], + beratungen=beratungen, + referenzen_ausgehend=[ReferenzOut(**r) for r in refs["ausgehend"]], + referenzen_eingehend=[ReferenzOut(**r) for r in refs["eingehend"]], + kette_id=kette_row["kette_id"] if kette_row else None, + ki_zusammenfassung=ki_zusammenfassung, + ) diff --git a/backend/src/tracker/core/__init__.py b/backend/src/tracker/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/src/tracker/core/chains.py b/backend/src/tracker/core/chains.py new file mode 100644 index 0000000..4b87bc5 --- /dev/null +++ b/backend/src/tracker/core/chains.py @@ -0,0 +1,152 @@ +"""Ketten-Builder: groups Vorlagen into chains based on Aktenzeichen-Suffix references.""" + +import sqlite3 +from collections import defaultdict + +from tracker.core.status import compute_status + + +def build_suffix_references(conn: sqlite3.Connection) -> int: + """Create referenzen entries for Aktenzeichen-Suffix relations. + + E.g. 0362/2025-1 references 0362/2025 via suffix relation. + Returns the number of new references created. + """ + cursor = conn.execute(""" + INSERT OR IGNORE INTO referenzen (quelle_id, ziel_id, typ, konfidenz) + SELECT child.id, parent.id, 'suffix', 1.0 + FROM vorlagen child + JOIN vorlagen parent ON child.aktenzeichen_basis = parent.aktenzeichen_basis + WHERE child.aktenzeichen_suffix IS NOT NULL + AND parent.aktenzeichen_suffix IS NULL + AND child.id != parent.id + """) + # Also link sequential suffixes: -2 -> -1, -3 -> -2, etc. + conn.execute(""" + INSERT OR IGNORE INTO referenzen (quelle_id, ziel_id, typ, konfidenz) + SELECT later.id, earlier.id, 'suffix', 1.0 + FROM vorlagen later + JOIN vorlagen earlier + ON later.aktenzeichen_basis = earlier.aktenzeichen_basis + AND later.aktenzeichen_suffix IS NOT NULL + AND earlier.aktenzeichen_suffix IS NOT NULL + AND CAST(REPLACE(later.aktenzeichen_suffix, '-', '') AS INTEGER) + = CAST(REPLACE(earlier.aktenzeichen_suffix, '-', '') AS INTEGER) + 1 + WHERE later.id != earlier.id + """) + conn.commit() + return cursor.rowcount + + +def build_chains(conn: sqlite3.Connection) -> int: + """Build ketten from Vorlagen that share the same aktenzeichen_basis. + + A chain's Ursprung is the Vorlage without suffix (the original). + Chain members are ordered by suffix number. + Returns the number of chains created/updated. + """ + # Find all aktenzeichen_basis values that have at least one entry + # and where the base vorlage is an antrag, anfrage, or stellungnahme + rows = conn.execute(""" + SELECT aktenzeichen_basis, COUNT(*) as cnt + FROM vorlagen + WHERE aktenzeichen_basis IS NOT NULL + GROUP BY aktenzeichen_basis + HAVING cnt >= 1 + """).fetchall() + + count = 0 + for row in rows: + basis = row["aktenzeichen_basis"] + + # Get all Vorlagen in this chain, ordered by suffix + members = conn.execute(""" + SELECT id, aktenzeichen, aktenzeichen_suffix, typ, datum_eingang, betreff + FROM vorlagen + WHERE aktenzeichen_basis = ? + ORDER BY + CASE WHEN aktenzeichen_suffix IS NULL THEN 0 + ELSE CAST(REPLACE(aktenzeichen_suffix, '-', '') AS INTEGER) + END + """, (basis,)).fetchall() + + if not members: + continue + + ursprung = members[0] + + # Only create chains for antrag/anfrage types (the base should be one) + chain_typ = ursprung["typ"] + if chain_typ not in ("antrag", "anfrage"): + continue + + # Compute status + status_info = compute_status(conn, ursprung["id"], chain_typ, members) + + # Determine letzte_aktivitaet + dates = [m["datum_eingang"] for m in members if m["datum_eingang"]] + letzte_aktivitaet = max(dates) if dates else ursprung["datum_eingang"] + + # Check if chain already exists + existing = conn.execute( + "SELECT id FROM ketten WHERE ursprung_id = ?", (ursprung["id"],) + ).fetchone() + + if existing: + kette_id = existing["id"] + conn.execute(""" + UPDATE ketten + SET typ = ?, thema = ?, status = ?, status_seit = ?, + letzte_aktivitaet = ?, vertagungen_count = ? + WHERE id = ? + """, ( + chain_typ, + ursprung["betreff"], + status_info["status"], + status_info.get("status_seit"), + letzte_aktivitaet, + status_info.get("vertagungen_count", 0), + kette_id, + )) + conn.execute("DELETE FROM ketten_glieder WHERE kette_id = ?", (kette_id,)) + else: + cursor = conn.execute(""" + INSERT INTO ketten (ursprung_id, typ, thema, status, status_seit, + letzte_aktivitaet, vertagungen_count) + VALUES (?, ?, ?, ?, ?, ?, ?) + """, ( + ursprung["id"], + chain_typ, + ursprung["betreff"], + status_info["status"], + status_info.get("status_seit"), + letzte_aktivitaet, + status_info.get("vertagungen_count", 0), + )) + kette_id = cursor.lastrowid + + # Insert chain members + for pos, member in enumerate(members): + rolle = _determine_rolle(member, pos) + conn.execute(""" + INSERT OR REPLACE INTO ketten_glieder (kette_id, vorlage_id, position, rolle) + VALUES (?, ?, ?, ?) + """, (kette_id, member["id"], pos, rolle)) + + count += 1 + + conn.commit() + return count + + +def _determine_rolle(member: sqlite3.Row, position: int) -> str: + if position == 0: + return "ursprung" + typ = member["typ"] + if typ == "stellungnahme": + return "stellungnahme" + if typ == "bericht": + return "bericht" + if typ in ("antrag", "anfrage"): + return "aenderung" + return "ergaenzung" diff --git a/backend/src/tracker/core/graph.py b/backend/src/tracker/core/graph.py new file mode 100644 index 0000000..49b0a82 --- /dev/null +++ b/backend/src/tracker/core/graph.py @@ -0,0 +1,181 @@ +"""Graph-Builder: builds reference graph for navigation between Vorlagen.""" + +from __future__ import annotations + +import sqlite3 + + +def get_references_for_vorlage(conn: sqlite3.Connection, vorlage_id: int) -> dict: + """Get all references (incoming and outgoing) for a Vorlage. + + Returns dict with 'ausgehend' (outgoing) and 'eingehend' (incoming) lists. + """ + ausgehend = conn.execute(""" + SELECT r.id, r.ziel_id as vorlage_id, r.typ, r.konfidenz, r.kontext, + v.aktenzeichen, v.betreff, v.typ as vorlage_typ, v.datum_eingang + FROM referenzen r + JOIN vorlagen v ON r.ziel_id = v.id + WHERE r.quelle_id = ? + ORDER BY v.datum_eingang + """, (vorlage_id,)).fetchall() + + eingehend = conn.execute(""" + SELECT r.id, r.quelle_id as vorlage_id, r.typ, r.konfidenz, r.kontext, + v.aktenzeichen, v.betreff, v.typ as vorlage_typ, v.datum_eingang + FROM referenzen r + JOIN vorlagen v ON r.quelle_id = v.id + WHERE r.ziel_id = ? + ORDER BY v.datum_eingang + """, (vorlage_id,)).fetchall() + + return { + "ausgehend": [_ref_to_dict(r) for r in ausgehend], + "eingehend": [_ref_to_dict(r) for r in eingehend], + } + + +def get_kette_graph(conn: sqlite3.Connection, kette_id: int) -> dict: + """Build the Perlenschnur graph data for a Kette. + + Returns nodes (Vorlagen) and edges (Referenzen) for visualization. + """ + # Get all Vorlagen in this chain + glieder = conn.execute(""" + SELECT kg.position, kg.rolle, v.id, v.aktenzeichen, v.typ, v.betreff, + v.datum_eingang, v.ist_verwaltungsvorlage + FROM ketten_glieder kg + JOIN vorlagen v ON kg.vorlage_id = v.id + WHERE kg.kette_id = ? + ORDER BY kg.position + """, (kette_id,)).fetchall() + + vorlage_ids = [g["id"] for g in glieder] + if not vorlage_ids: + return {"nodes": [], "edges": []} + + placeholders = ",".join("?" * len(vorlage_ids)) + + # Get all references between chain members + edges_internal = conn.execute(f""" + SELECT r.id, r.quelle_id, r.ziel_id, r.typ, r.konfidenz + FROM referenzen r + WHERE r.quelle_id IN ({placeholders}) + AND r.ziel_id IN ({placeholders}) + """, vorlage_ids + vorlage_ids).fetchall() + + # Get references to/from external Vorlagen (for side-links) + edges_external = conn.execute(f""" + SELECT r.id, r.quelle_id, r.ziel_id, r.typ, r.konfidenz, + v.aktenzeichen, v.betreff, v.typ as vorlage_typ, v.datum_eingang + FROM referenzen r + JOIN vorlagen v ON CASE + WHEN r.quelle_id IN ({placeholders}) THEN r.ziel_id = v.id + ELSE r.quelle_id = v.id + END + WHERE (r.quelle_id IN ({placeholders}) AND r.ziel_id NOT IN ({placeholders})) + OR (r.ziel_id IN ({placeholders}) AND r.quelle_id NOT IN ({placeholders})) + """, vorlage_ids * 5).fetchall() + + # Get Beratungen for each chain Vorlage + beratungen = conn.execute(f""" + SELECT b.vorlage_id, b.sitzung_datum, b.rolle, b.ergebnis, + g.name as gremium_name + FROM beratungen b + LEFT JOIN gremien g ON b.gremium_id = g.id + WHERE b.vorlage_id IN ({placeholders}) + ORDER BY b.sitzung_datum + """, vorlage_ids).fetchall() + + beratungen_map: dict[int, list] = {} + for b in beratungen: + vid = b["vorlage_id"] + if vid not in beratungen_map: + beratungen_map[vid] = [] + beratungen_map[vid].append({ + "sitzung_datum": b["sitzung_datum"], + "rolle": b["rolle"], + "ergebnis": b["ergebnis"], + "gremium_name": b["gremium_name"], + }) + + # Get Antragsteller for Ursprung + antragsteller = conn.execute(f""" + SELECT a.vorlage_id, p.kuerzel, p.name, p.farbe + FROM antragsteller a + JOIN parteien p ON a.partei_id = p.id + WHERE a.vorlage_id IN ({placeholders}) + """, vorlage_ids).fetchall() + + antragsteller_map: dict[int, list] = {} + for a in antragsteller: + vid = a["vorlage_id"] + if vid not in antragsteller_map: + antragsteller_map[vid] = [] + antragsteller_map[vid].append({ + "kuerzel": a["kuerzel"], + "name": a["name"], + "farbe": a["farbe"], + }) + + nodes = [ + { + "id": g["id"], + "aktenzeichen": g["aktenzeichen"], + "typ": g["typ"], + "betreff": g["betreff"], + "datum_eingang": g["datum_eingang"], + "position": g["position"], + "rolle": g["rolle"], + "ist_verwaltungsvorlage": bool(g["ist_verwaltungsvorlage"]), + "beratungen": beratungen_map.get(g["id"], []), + "antragsteller": antragsteller_map.get(g["id"], []), + } + for g in glieder + ] + + edges = [ + { + "quelle_id": e["quelle_id"], + "ziel_id": e["ziel_id"], + "typ": e["typ"], + "konfidenz": e["konfidenz"], + } + for e in edges_internal + ] + + external_nodes = [] + for e in edges_external: + ext_id = e["ziel_id"] if e["quelle_id"] in vorlage_ids else e["quelle_id"] + if not any(n["id"] == ext_id for n in external_nodes): + external_nodes.append({ + "id": ext_id, + "aktenzeichen": e["aktenzeichen"], + "typ": e["vorlage_typ"], + "betreff": e["betreff"], + "datum_eingang": e["datum_eingang"], + "extern": True, + }) + edges.append({ + "quelle_id": e["quelle_id"], + "ziel_id": e["ziel_id"], + "typ": e["typ"], + "konfidenz": e["konfidenz"], + }) + + return { + "nodes": nodes + external_nodes, + "edges": edges, + } + + +def _ref_to_dict(r: sqlite3.Row) -> dict: + return { + "vorlage_id": r["vorlage_id"], + "aktenzeichen": r["aktenzeichen"], + "betreff": r["betreff"], + "vorlage_typ": r["vorlage_typ"], + "datum_eingang": r["datum_eingang"], + "ref_typ": r["typ"], + "konfidenz": r["konfidenz"], + "kontext": r["kontext"], + } diff --git a/backend/src/tracker/core/status.py b/backend/src/tracker/core/status.py new file mode 100644 index 0000000..7b261b2 --- /dev/null +++ b/backend/src/tracker/core/status.py @@ -0,0 +1,222 @@ +"""Status-Engine: computes chain status based on KONZEPT.md section 6.""" + +from __future__ import annotations + +import sqlite3 +from datetime import date, timedelta + +# Threshold: a Vorlage is considered "old" after this many days without activity +VERSANDET_TAGE = 365 + + +def compute_status( + conn: sqlite3.Connection, + ursprung_id: int, + chain_typ: str, + members: list[sqlite3.Row], +) -> dict: + """Compute the overall status for a chain. + + Returns dict with keys: status, status_seit, vertagungen_count + """ + if chain_typ == "anfrage": + return _status_anfrage(conn, ursprung_id, members) + elif chain_typ == "antrag": + return _status_antrag(conn, ursprung_id, members) + return {"status": "unbekannt", "status_seit": None, "vertagungen_count": 0} + + +def _status_anfrage( + conn: sqlite3.Connection, + ursprung_id: int, + members: list[sqlite3.Row], +) -> dict: + """Status logic for Anfragen (KONZEPT.md 6.1). + + angefragt: Keine Stellungnahme, <1 Jahr + beantwortet: Stellungnahme + KI-Match >=0.7 + Kenntnisnahme + offen: Stellungnahme da, aber keine Kenntnisnahme + abgewiegelt: Stellungnahme + KI-Match <0.5 + versandet: Keine Antwort, >1 Jahr + zurückgezogen: Explizit zurückgezogen + """ + heute = date.today() + ursprung_datum = _parse_date(members[0]["datum_eingang"]) + + # Check for Stellungnahme in chain members + stellungnahmen = [m for m in members if m["typ"] == "stellungnahme"] + has_stellungnahme = len(stellungnahmen) > 0 + + # Check for Kenntnisnahme in Beratungen + beratungen = conn.execute(""" + SELECT rolle, ergebnis, sitzung_datum + FROM beratungen + WHERE vorlage_id = ? + ORDER BY sitzung_datum DESC + """, (ursprung_id,)).fetchall() + + has_kenntnisnahme = any( + b["rolle"] and "kenntnisnahme" in b["rolle"].lower() + for b in beratungen + ) + + # Check KI-Match score for Antwort + ki_score = _get_ki_score(conn, ursprung_id, "antwort_match") + + # Check zurückgezogen + if _is_zurueckgezogen(beratungen): + return {"status": "zurückgezogen", "status_seit": _latest_date(beratungen), "vertagungen_count": 0} + + if has_stellungnahme: + if ki_score is not None and ki_score < 0.5: + return {"status": "abgewiegelt", "status_seit": _vorlage_date(stellungnahmen[0]), "vertagungen_count": 0} + if has_kenntnisnahme and (ki_score is None or ki_score >= 0.7): + return {"status": "beantwortet", "status_seit": _vorlage_date(stellungnahmen[0]), "vertagungen_count": 0} + return {"status": "offen", "status_seit": _vorlage_date(stellungnahmen[0]), "vertagungen_count": 0} + + # No Stellungnahme + if ursprung_datum and (heute - ursprung_datum).days > VERSANDET_TAGE: + return {"status": "versandet", "status_seit": ursprung_datum, "vertagungen_count": 0} + + return {"status": "angefragt", "status_seit": ursprung_datum, "vertagungen_count": 0} + + +def _status_antrag( + conn: sqlite3.Connection, + ursprung_id: int, + members: list[sqlite3.Row], +) -> dict: + """Status logic for Anträge (KONZEPT.md 6.2). + + eingereicht: Neu, noch keine Beratung + in_beratung: Mindestens eine Beratung ohne Endbeschluss + vertagt: Letzte Beratung = vertagt + verwiesen: An anderen Ausschuss überwiesen + beschlossen: Angenommen, <1 Jahr, kein Umsetzungsbericht + umgesetzt: Umsetzungsbericht + KI-Match >=0.7 + teilweise_umgesetzt: Umsetzungsbericht + KI-Match 0.4-0.7 + abgelehnt: Beschluss = abgelehnt + abgewiegelt: Beschlossen + Bericht + KI-Match <0.4 + versandet: Beschlossen, >1 Jahr, kein Bericht + zurückgezogen: Explizit zurückgezogen + """ + heute = date.today() + ursprung_datum = _parse_date(members[0]["datum_eingang"]) + + beratungen = conn.execute(""" + SELECT rolle, ergebnis, sitzung_datum + FROM beratungen + WHERE vorlage_id = ? + ORDER BY sitzung_datum DESC NULLS LAST + """, (ursprung_id,)).fetchall() + + # Count Vertagungen + vertagungen = sum(1 for b in beratungen if b["ergebnis"] and "vertagt" in b["ergebnis"].lower()) + + # Check zurückgezogen + if _is_zurueckgezogen(beratungen): + return {"status": "zurückgezogen", "status_seit": _latest_date(beratungen), "vertagungen_count": vertagungen} + + # Check for Berichte in chain + berichte = [m for m in members if m["typ"] == "bericht"] + has_bericht = len(berichte) > 0 + + # Determine beschluss from beratungen + beschluss = _get_beschluss(beratungen) + + if beschluss == "abgelehnt": + return {"status": "abgelehnt", "status_seit": _latest_date(beratungen), "vertagungen_count": vertagungen} + + if beschluss == "angenommen": + beschluss_datum = _latest_date(beratungen) + + if has_bericht: + ki_score = _get_ki_score(conn, ursprung_id, "umsetzung_match") + bericht_datum = _vorlage_date(berichte[-1]) + + if ki_score is not None: + if ki_score >= 0.7: + return {"status": "umgesetzt", "status_seit": bericht_datum, "vertagungen_count": vertagungen} + elif ki_score >= 0.4: + return {"status": "teilweise_umgesetzt", "status_seit": bericht_datum, "vertagungen_count": vertagungen} + else: + return {"status": "abgewiegelt", "status_seit": bericht_datum, "vertagungen_count": vertagungen} + return {"status": "umgesetzt", "status_seit": bericht_datum, "vertagungen_count": vertagungen} + + # Angenommen but no Bericht + if beschluss_datum and (heute - beschluss_datum).days > VERSANDET_TAGE: + return {"status": "versandet", "status_seit": beschluss_datum, "vertagungen_count": vertagungen} + + return {"status": "beschlossen", "status_seit": beschluss_datum, "vertagungen_count": vertagungen} + + if beschluss == "verwiesen": + return {"status": "verwiesen", "status_seit": _latest_date(beratungen), "vertagungen_count": vertagungen} + + # No final decision yet + if beratungen: + last = beratungen[0] + if last["ergebnis"] and "vertagt" in last["ergebnis"].lower(): + return {"status": "vertagt", "status_seit": _latest_date(beratungen), "vertagungen_count": vertagungen} + return {"status": "in_beratung", "status_seit": _latest_date(beratungen), "vertagungen_count": vertagungen} + + # No beratungen at all + return {"status": "eingereicht", "status_seit": ursprung_datum, "vertagungen_count": vertagungen} + + +# --- Helpers --- + +def _parse_date(val: str | None) -> date | None: + if not val: + return None + try: + return date.fromisoformat(val) + except (ValueError, TypeError): + return None + + +def _vorlage_date(member: sqlite3.Row) -> date | None: + return _parse_date(member["datum_eingang"]) + + +def _latest_date(beratungen: list[sqlite3.Row]) -> date | None: + dates = [_parse_date(b["sitzung_datum"]) for b in beratungen if b["sitzung_datum"]] + return max(dates) if dates else None + + +def _get_ki_score(conn: sqlite3.Connection, vorlage_id: int, typ: str) -> float | None: + row = conn.execute(""" + SELECT score FROM ki_bewertungen + WHERE vorlage_id = ? AND typ = ? + ORDER BY erstellt_at DESC + LIMIT 1 + """, (vorlage_id, typ)).fetchone() + return row["score"] if row else None + + +def _is_zurueckgezogen(beratungen: list[sqlite3.Row]) -> bool: + return any( + b["ergebnis"] and "zurückgezogen" in b["ergebnis"].lower() + for b in beratungen + ) + + +def _get_beschluss(beratungen: list[sqlite3.Row]) -> str | None: + """Determine the final decision from Beratungen. + + Looks for Entscheidung-role beratungen with a result. + """ + for b in beratungen: + ergebnis = (b["ergebnis"] or "").lower() + rolle = (b["rolle"] or "").lower() + + if "abgelehnt" in ergebnis: + return "abgelehnt" + if "verwiesen" in ergebnis: + return "verwiesen" + if any(kw in ergebnis for kw in ("angenommen", "empfohlen", "beschlossen", "zugestimmt")): + return "angenommen" + # If rolle is Entscheidung and there's any ergebnis, it's likely a decision + if "entscheidung" in rolle and ergebnis and "vertagt" not in ergebnis: + return "angenommen" + + return None diff --git a/backend/src/tracker/db/__init__.py b/backend/src/tracker/db/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/src/tracker/db/schema.sql b/backend/src/tracker/db/schema.sql new file mode 100644 index 0000000..a9153a9 --- /dev/null +++ b/backend/src/tracker/db/schema.sql @@ -0,0 +1,298 @@ +-- Antragstracker Hagen — SQLite Schema +-- Schicht 1: ALLRIS-Mirror + Schicht 2: Analyse + +PRAGMA journal_mode = WAL; +PRAGMA foreign_keys = ON; + +---------------------------------------------------------------------- +-- SCHICHT 1: ALLRIS-Mirror +---------------------------------------------------------------------- + +-- Parteien (Lookup) +CREATE TABLE IF NOT EXISTS parteien ( + id INTEGER PRIMARY KEY, + kuerzel TEXT UNIQUE NOT NULL, + name TEXT, + farbe TEXT +); + +INSERT OR IGNORE INTO parteien (kuerzel, name, farbe) VALUES + ('GRÜNE', 'Bündnis 90/Die Grünen', '#1AA037'), + ('CDU', 'Christlich Demokratische Union', '#000000'), + ('SPD', 'Sozialdemokratische Partei Deutschlands', '#E3000F'), + ('FDP', 'Freie Demokratische Partei', '#FFED00'), + ('LINKE', 'Die Linke', '#BE3075'), + ('AfD', 'Alternative für Deutschland', '#009EE0'), + ('HBL', 'Hagener Bürger-Liste', '#FF8C00'), + ('BfHo', 'Bürger für Hohenlimburg', '#8B4513'), + ('Volt', 'Volt Deutschland', '#502379'), + ('parteilos', 'Parteilos', '#808080'); + +-- Gremien (Lookup) +CREATE TABLE IF NOT EXISTS gremien ( + id INTEGER PRIMARY KEY, + oparl_id TEXT UNIQUE, + name TEXT NOT NULL, + kuerzel TEXT, + typ TEXT -- 'rat', 'ausschuss', 'bv' +); + +-- Thematische Klassifikation (Aktenplan) +CREATE TABLE IF NOT EXISTS themen_kategorien ( + id INTEGER PRIMARY KEY, + code TEXT UNIQUE NOT NULL, + name TEXT NOT NULL, + ebene INTEGER, + parent_code TEXT +); + +-- Hauptgruppen (Ebene 1) +INSERT OR IGNORE INTO themen_kategorien (code, name, ebene, parent_code) VALUES + ('000', 'Allgemeine Verwaltung', 1, NULL), + ('100', 'Sicherheit und Ordnung', 1, NULL), + ('200', 'Schulen', 1, NULL), + ('300', 'Kultur', 1, NULL), + ('400', 'Soziales', 1, NULL), + ('500', 'Gesundheit und Sport', 1, NULL), + ('600', 'Bauwesen', 1, NULL), + ('700', 'Öffentliche Einrichtungen', 1, NULL), + ('800', 'Wirtschaft und Verkehr', 1, NULL), + ('900', 'Finanzen', 1, NULL); + +-- Gruppen (Ebene 2) +INSERT OR IGNORE INTO themen_kategorien (code, name, ebene, parent_code) VALUES + ('001', 'Verfassung, Verwaltungsorganisation', 2, '000'), + ('002', 'Statistik, Wahlen', 2, '000'), + ('003', 'Ratsangelegenheiten', 2, '000'), + ('004', 'Personal', 2, '000'), + ('005', 'IT, Digitalisierung', 2, '000'), + ('006', 'Öffentlichkeitsarbeit', 2, '000'), + ('007', 'Interkommunale Zusammenarbeit', 2, '000'), + ('008', 'Bürgerservice, Bürgerbeteiligung', 2, '000'), + ('110', 'Polizeiwesen, öffentliche Sicherheit', 2, '100'), + ('111', 'Straßenverkehr, Verkehrssicherheit', 2, '100'), + ('112', 'Gewerbeaufsicht', 2, '100'), + ('113', 'Feuerschutz, Feuerwehr', 2, '100'), + ('114', 'Katastrophenschutz, Rettungsdienst', 2, '100'), + ('115', 'Ordnungsamt, Ordnungswidrigkeiten', 2, '100'), + ('116', 'Ausländerwesen', 2, '100'), + ('117', 'Standesamt', 2, '100'), + ('210', 'Schulverwaltung', 2, '200'), + ('211', 'Grundschulen', 2, '200'), + ('212', 'Weiterführende Schulen', 2, '200'), + ('213', 'Förderschulen', 2, '200'), + ('214', 'Berufsschulen', 2, '200'), + ('215', 'Schulentwicklungsplanung', 2, '200'), + ('216', 'Schülerbeförderung', 2, '200'), + ('217', 'OGS, Ganztagsbetreuung', 2, '200'), + ('310', 'Kulturförderung, Kulturpolitik', 2, '300'), + ('311', 'Museen', 2, '300'), + ('312', 'Bibliotheken', 2, '300'), + ('313', 'Theater, Konzerte', 2, '300'), + ('314', 'Musikschulen', 2, '300'), + ('315', 'VHS, Erwachsenenbildung', 2, '300'), + ('316', 'Denkmalschutz', 2, '300'), + ('317', 'Stadtgeschichte, Archive', 2, '300'), + ('410', 'Sozialplanung', 2, '400'), + ('411', 'Jugendhilfe', 2, '400'), + ('412', 'Kindertagesstätten', 2, '400'), + ('413', 'Jugendarbeit', 2, '400'), + ('414', 'Seniorenarbeit', 2, '400'), + ('415', 'Behindertenhilfe, Inklusion', 2, '400'), + ('416', 'Integration, Migration', 2, '400'), + ('417', 'Wohnungslosenhilfe', 2, '400'), + ('418', 'Sozialleistungen', 2, '400'), + ('510', 'Gesundheitsförderung', 2, '500'), + ('511', 'Gesundheitsamt', 2, '500'), + ('512', 'Krankenhäuser, Kliniken', 2, '500'), + ('513', 'Suchtprävention', 2, '500'), + ('520', 'Sport allgemein', 2, '500'), + ('521', 'Sportförderung', 2, '500'), + ('522', 'Sportstätten', 2, '500'), + ('523', 'Vereinsförderung', 2, '500'), + ('610', 'Stadtplanung, Bauleitplanung', 2, '600'), + ('611', 'Bebauungspläne', 2, '600'), + ('612', 'Flächennutzungsplan', 2, '600'), + ('613', 'Bauordnung', 2, '600'), + ('614', 'Hochbau, Gebäudemanagement', 2, '600'), + ('615', 'Tiefbau, Straßenbau', 2, '600'), + ('616', 'Wohnungsbau, Wohnungsförderung', 2, '600'), + ('617', 'Grünflächen, Parks', 2, '600'), + ('618', 'Spielplätze', 2, '600'), + ('619', 'Klimaschutz, Umwelt', 2, '600'), + ('710', 'Versorgung, Entsorgung', 2, '700'), + ('711', 'Wasserversorgung', 2, '700'), + ('712', 'Abwasser, Kanalisation', 2, '700'), + ('713', 'Abfallwirtschaft', 2, '700'), + ('714', 'Friedhöfe', 2, '700'), + ('715', 'Bäder, Schwimmbäder', 2, '700'), + ('716', 'Märkte', 2, '700'), + ('717', 'Stadtwerke', 2, '700'), + ('810', 'Wirtschaftsförderung', 2, '800'), + ('811', 'Arbeitsmarkt', 2, '800'), + ('812', 'Tourismus', 2, '800'), + ('820', 'Verkehr allgemein', 2, '800'), + ('821', 'ÖPNV, Nahverkehr', 2, '800'), + ('822', 'Radverkehr', 2, '800'), + ('823', 'Fußverkehr', 2, '800'), + ('824', 'Straßen, Parkraum', 2, '800'), + ('825', 'Mobilität, Verkehrswende', 2, '800'), + ('910', 'Haushalt', 2, '900'), + ('911', 'Steuern, Abgaben', 2, '900'), + ('912', 'Liegenschaften', 2, '900'), + ('913', 'Beteiligungen', 2, '900'), + ('914', 'Fördermittel', 2, '900'), + ('915', 'Gebühren', 2, '900'); + +-- Schlagwörter +CREATE TABLE IF NOT EXISTS schlagwoerter ( + id INTEGER PRIMARY KEY, + name TEXT UNIQUE NOT NULL +); + +-- Vorlagen (Kerntabelle) +CREATE TABLE IF NOT EXISTS vorlagen ( + id INTEGER PRIMARY KEY, + oparl_id TEXT UNIQUE, + volfdnr INTEGER, + aktenzeichen TEXT, + aktenzeichen_basis TEXT, + aktenzeichen_suffix TEXT, + typ TEXT, + betreff TEXT, + volltext TEXT, + volltext_clean TEXT, + datum_eingang DATE, + pdf_url TEXT, + web_url TEXT, + ist_verwaltungsvorlage BOOLEAN DEFAULT FALSE, + thema_kategorie_id INTEGER REFERENCES themen_kategorien(id), + thema_kurz TEXT, + scraped_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +CREATE INDEX IF NOT EXISTS idx_vorlagen_aktenzeichen ON vorlagen(aktenzeichen); +CREATE INDEX IF NOT EXISTS idx_vorlagen_aktenzeichen_basis ON vorlagen(aktenzeichen_basis); +CREATE INDEX IF NOT EXISTS idx_vorlagen_typ ON vorlagen(typ); +CREATE INDEX IF NOT EXISTS idx_vorlagen_datum ON vorlagen(datum_eingang); + +-- Verschlagwortung (n:m) +CREATE TABLE IF NOT EXISTS vorlage_schlagwoerter ( + vorlage_id INTEGER REFERENCES vorlagen(id), + schlagwort_id INTEGER REFERENCES schlagwoerter(id), + konfidenz REAL DEFAULT 1.0, + PRIMARY KEY (vorlage_id, schlagwort_id) +); + +-- Ortsbezüge +CREATE TABLE IF NOT EXISTS ortsbezuege ( + id INTEGER PRIMARY KEY, + vorlage_id INTEGER REFERENCES vorlagen(id), + ortsangabe TEXT NOT NULL, + ortstyp TEXT, + strasse1 TEXT, + strasse2 TEXT, + hausnummer TEXT, + stadtteil TEXT, + lat REAL, + lon REAL, + geocoding_quelle TEXT, + geocoding_konfidenz REAL, + konfidenz REAL DEFAULT 1.0, + kontext TEXT, + ist_antragsgegenstand BOOLEAN DEFAULT TRUE +); + +-- Antragsteller (n:m) +CREATE TABLE IF NOT EXISTS antragsteller ( + vorlage_id INTEGER REFERENCES vorlagen(id), + partei_id INTEGER REFERENCES parteien(id), + rolle TEXT DEFAULT 'antragsteller', + PRIMARY KEY (vorlage_id, partei_id) +); + +-- Beratungsfolge +CREATE TABLE IF NOT EXISTS beratungen ( + id INTEGER PRIMARY KEY, + vorlage_id INTEGER REFERENCES vorlagen(id), + gremium_id INTEGER REFERENCES gremien(id), + sitzung_datum DATE, + sitzung_id INTEGER, + tagesordnungspunkt TEXT, + rolle TEXT, + ergebnis TEXT, + ergebnis_text TEXT, + protokoll_auszug TEXT +); + +CREATE INDEX IF NOT EXISTS idx_beratungen_vorlage ON beratungen(vorlage_id); +CREATE INDEX IF NOT EXISTS idx_beratungen_gremium ON beratungen(gremium_id); + +-- Anlagen +CREATE TABLE IF NOT EXISTS anlagen ( + id INTEGER PRIMARY KEY, + vorlage_id INTEGER REFERENCES vorlagen(id), + dateiname TEXT, + url TEXT, + inhalt_text TEXT, + braucht_extraktion BOOLEAN DEFAULT FALSE +); + +---------------------------------------------------------------------- +-- SCHICHT 2: Analyse +---------------------------------------------------------------------- + +-- Referenzen (Graph-Kanten) +CREATE TABLE IF NOT EXISTS referenzen ( + id INTEGER PRIMARY KEY, + quelle_id INTEGER REFERENCES vorlagen(id), + ziel_id INTEGER REFERENCES vorlagen(id), + typ TEXT NOT NULL, + konfidenz REAL DEFAULT 1.0, + kontext TEXT, + UNIQUE(quelle_id, ziel_id, typ) +); + +CREATE INDEX IF NOT EXISTS idx_referenzen_quelle ON referenzen(quelle_id); +CREATE INDEX IF NOT EXISTS idx_referenzen_ziel ON referenzen(ziel_id); + +-- Ketten +CREATE TABLE IF NOT EXISTS ketten ( + id INTEGER PRIMARY KEY, + ursprung_id INTEGER REFERENCES vorlagen(id), + typ TEXT, + thema TEXT, + status TEXT, + status_seit DATE, + letzte_aktivitaet DATE, + vertagungen_count INTEGER DEFAULT 0 +); + +-- Ketten-Glieder +CREATE TABLE IF NOT EXISTS ketten_glieder ( + kette_id INTEGER REFERENCES ketten(id), + vorlage_id INTEGER REFERENCES vorlagen(id), + position INTEGER, + rolle TEXT, + PRIMARY KEY (kette_id, vorlage_id) +); + +-- KI-Bewertungen +CREATE TABLE IF NOT EXISTS ki_bewertungen ( + id INTEGER PRIMARY KEY, + vorlage_id INTEGER REFERENCES vorlagen(id), + bezug_id INTEGER, + typ TEXT NOT NULL, + score REAL, + konfidenz REAL, + begruendung TEXT, + anmerkungen TEXT, + unsicher BOOLEAN DEFAULT FALSE, + review_grund TEXT, + modell TEXT, + prompt_version TEXT, + erstellt_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +CREATE INDEX IF NOT EXISTS idx_ki_bewertungen_vorlage ON ki_bewertungen(vorlage_id); +CREATE INDEX IF NOT EXISTS idx_ki_bewertungen_typ ON ki_bewertungen(typ); diff --git a/backend/src/tracker/db/session.py b/backend/src/tracker/db/session.py new file mode 100644 index 0000000..a7564a7 --- /dev/null +++ b/backend/src/tracker/db/session.py @@ -0,0 +1,27 @@ +"""SQLite database connection management.""" + +import os +import sqlite3 +from contextlib import contextmanager +from pathlib import Path + +# Use environment variable or default to relative path +DB_PATH = Path(os.environ.get("DATABASE_PATH", Path(__file__).resolve().parents[4] / "data" / "tracker.db")) + + +def get_connection(db_path: Path | str | None = None) -> sqlite3.Connection: + path = str(db_path or DB_PATH) + conn = sqlite3.connect(path, detect_types=0) + conn.row_factory = sqlite3.Row + conn.execute("PRAGMA journal_mode = WAL") + conn.execute("PRAGMA foreign_keys = ON") + return conn + + +@contextmanager +def get_db(db_path: Path | str | None = None): + conn = get_connection(db_path) + try: + yield conn + finally: + conn.close() diff --git a/backend/src/tracker/main.py b/backend/src/tracker/main.py new file mode 100644 index 0000000..e40afee --- /dev/null +++ b/backend/src/tracker/main.py @@ -0,0 +1,43 @@ +"""FastAPI application for Antragstracker Hagen.""" + +import os +from pathlib import Path + +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from fastapi.staticfiles import StaticFiles + +from tracker.api.routes import abstimmungen, ketten, orte, stats, vorlagen + +app = FastAPI( + title="Antragstracker Hagen", + description="API zur Nachverfolgung kommunaler Anträge und Anfragen", + version="0.1.0", +) + +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_methods=["GET"], + allow_headers=["*"], +) + +app.include_router(vorlagen.router, prefix="/api") +app.include_router(ketten.router, prefix="/api") +app.include_router(stats.router, prefix="/api") +app.include_router(abstimmungen.router, prefix="/api") +app.include_router(orte.router, prefix="/api") + + +@app.get("/api/health") +def health(): + return {"status": "ok"} + + +# Serve static frontend files in production +# Try multiple paths (Docker vs local dev) +for static_path in ["/app/static", Path(__file__).parent.parent.parent.parent / "static"]: + static_dir = Path(static_path) + if static_dir.exists() and (static_dir / "index.html").exists(): + app.mount("/", StaticFiles(directory=str(static_dir), html=True), name="static") + break diff --git a/backend/tests/__init__.py b/backend/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/tests/test_api.py b/backend/tests/test_api.py new file mode 100644 index 0000000..4b37016 --- /dev/null +++ b/backend/tests/test_api.py @@ -0,0 +1,113 @@ +"""Tests for API endpoints against the real database.""" + +import pytest +from fastapi.testclient import TestClient + +from tracker.main import app +from tracker.db.session import get_connection + + +@pytest.fixture +def client(): + return TestClient(app) + + +@pytest.fixture +def db(): + conn = get_connection() + yield conn + conn.close() + + +# --- Sanity checks on the DB --- + +class TestDatabaseSanity: + def test_vorlagen_count(self, db): + count = db.execute("SELECT COUNT(*) as cnt FROM vorlagen").fetchone()["cnt"] + assert count >= 6000, f"Expected >=6000 Vorlagen, got {count}" + + def test_vorlagen_types(self, db): + types = db.execute( + "SELECT DISTINCT typ FROM vorlagen WHERE typ IS NOT NULL" + ).fetchall() + type_names = {r["typ"] for r in types} + assert "antrag" in type_names + assert "anfrage" in type_names + + def test_beratungen_exist(self, db): + count = db.execute("SELECT COUNT(*) as cnt FROM beratungen").fetchone()["cnt"] + assert count > 0, "No Beratungen in DB" + + def test_suffix_vorlagen_exist(self, db): + count = db.execute( + "SELECT COUNT(*) as cnt FROM vorlagen WHERE aktenzeichen_suffix IS NOT NULL" + ).fetchone()["cnt"] + assert count > 0, "No suffix Vorlagen in DB" + + +# --- API: Health --- + +class TestHealth: + def test_health(self, client): + resp = client.get("/api/health") + assert resp.status_code == 200 + assert resp.json()["status"] == "ok" + + +# --- API: Vorlagen --- + +class TestVorlagenAPI: + def test_list_vorlagen(self, client): + resp = client.get("/api/vorlagen?page=1&page_size=10") + assert resp.status_code == 200 + data = resp.json() + assert data["total"] >= 6000 + assert len(data["items"]) == 10 + assert data["page"] == 1 + + def test_list_vorlagen_filter_typ(self, client): + resp = client.get("/api/vorlagen?typ=antrag&page_size=5") + assert resp.status_code == 200 + data = resp.json() + assert data["total"] > 0 + for item in data["items"]: + assert item["typ"] == "antrag" + + def test_list_vorlagen_filter_suche(self, client): + resp = client.get("/api/vorlagen?suche=Klimaschutz&page_size=5") + assert resp.status_code == 200 + # May or may not find results, but should not error + + def test_get_vorlage_detail(self, client, db): + # Get first vorlage with aktenzeichen + row = db.execute( + "SELECT id FROM vorlagen WHERE aktenzeichen IS NOT NULL LIMIT 1" + ).fetchone() + assert row is not None + + resp = client.get(f"/api/vorlagen/{row['id']}") + assert resp.status_code == 200 + data = resp.json() + assert data["id"] == row["id"] + assert data["aktenzeichen"] is not None + + def test_get_vorlage_not_found(self, client): + resp = client.get("/api/vorlagen/999999") + assert resp.status_code == 404 + + +# --- API: Ketten --- + +class TestKettenAPI: + def test_list_ketten_empty_initially(self, client): + """Before building chains, the list may be empty.""" + resp = client.get("/api/ketten?page_size=5") + assert resp.status_code == 200 + + def test_list_ketten_filter(self, client): + resp = client.get("/api/ketten?status=eingereicht&page_size=5") + assert resp.status_code == 200 + + def test_get_kette_not_found(self, client): + resp = client.get("/api/ketten/999999") + assert resp.status_code == 404 diff --git a/backend/tests/test_chains.py b/backend/tests/test_chains.py new file mode 100644 index 0000000..35e5d07 --- /dev/null +++ b/backend/tests/test_chains.py @@ -0,0 +1,89 @@ +"""Tests for the Ketten-Builder and Status-Engine.""" + +import pytest + +from tracker.core.chains import build_suffix_references, build_chains +from tracker.db.session import get_connection + + +@pytest.fixture +def db(): + conn = get_connection() + yield conn + conn.close() + + +class TestSuffixReferences: + def test_build_suffix_references(self, db): + """Should create referenzen for suffix-linked Vorlagen.""" + count = build_suffix_references(db) + # We know there are 166 suffix Vorlagen, so we should get references + total = db.execute( + "SELECT COUNT(*) as cnt FROM referenzen WHERE typ = 'suffix'" + ).fetchone()["cnt"] + assert total > 0, "No suffix references created" + + def test_suffix_reference_links_correct(self, db): + """Verify that suffix references link child -> parent correctly.""" + build_suffix_references(db) + # Check a known suffix pair + row = db.execute(""" + SELECT r.quelle_id, r.ziel_id, + q.aktenzeichen as q_az, z.aktenzeichen as z_az + FROM referenzen r + JOIN vorlagen q ON r.quelle_id = q.id + JOIN vorlagen z ON r.ziel_id = z.id + WHERE r.typ = 'suffix' + LIMIT 5 + """).fetchall() + + for r in row: + # The quelle (child) should have a suffix, the ziel (parent) should not or have lower suffix + assert r["q_az"] is not None + + +class TestChainBuilder: + def test_build_chains(self, db): + """Should create ketten for Anträge and Anfragen.""" + build_suffix_references(db) + count = build_chains(db) + assert count > 0, "No chains built" + + # Verify chains exist in DB + total = db.execute("SELECT COUNT(*) as cnt FROM ketten").fetchone()["cnt"] + assert total > 0 + + def test_chain_has_glieder(self, db): + """Each chain should have at least one Glied (the Ursprung).""" + build_suffix_references(db) + build_chains(db) + + orphans = db.execute(""" + SELECT k.id FROM ketten k + LEFT JOIN ketten_glieder kg ON k.id = kg.kette_id + WHERE kg.kette_id IS NULL + """).fetchall() + + assert len(orphans) == 0, f"Found {len(orphans)} chains without Glieder" + + def test_chain_status_is_set(self, db): + """Every chain should have a status.""" + build_suffix_references(db) + build_chains(db) + + no_status = db.execute( + "SELECT COUNT(*) as cnt FROM ketten WHERE status IS NULL" + ).fetchone()["cnt"] + + assert no_status == 0, f"Found {no_status} chains without status" + + def test_chain_types(self, db): + """Chains should only be antrag or anfrage.""" + build_suffix_references(db) + build_chains(db) + + types = db.execute( + "SELECT DISTINCT typ FROM ketten" + ).fetchall() + type_names = {r["typ"] for r in types} + assert type_names <= {"antrag", "anfrage"}, f"Unexpected chain types: {type_names}" diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..8f7b9b7 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,23 @@ +version: '3.8' + +services: + antragstracker: + build: . + container_name: antragstracker-hagen + restart: unless-stopped + volumes: + - ./data:/app/data + environment: + - DATABASE_PATH=/app/data/tracker.db + networks: + - collaboration_collaboration + labels: + - "traefik.enable=true" + - "traefik.http.routers.antragstracker.rule=Host(`antraege.toppyr.de`)" + - "traefik.http.routers.antragstracker.entrypoints=websecure" + - "traefik.http.routers.antragstracker.tls.certresolver=letsencrypt" + - "traefik.http.services.antragstracker.loadbalancer.server.port=8000" + +networks: + collaboration_collaboration: + external: true diff --git a/frontend/.gitignore b/frontend/.gitignore new file mode 100644 index 0000000..3b462cb --- /dev/null +++ b/frontend/.gitignore @@ -0,0 +1,23 @@ +node_modules + +# Output +.output +.vercel +.netlify +.wrangler +/.svelte-kit +/build + +# OS +.DS_Store +Thumbs.db + +# Env +.env +.env.* +!.env.example +!.env.test + +# Vite +vite.config.js.timestamp-* +vite.config.ts.timestamp-* diff --git a/frontend/.npmrc b/frontend/.npmrc new file mode 100644 index 0000000..b6f27f1 --- /dev/null +++ b/frontend/.npmrc @@ -0,0 +1 @@ +engine-strict=true diff --git a/frontend/.vscode/extensions.json b/frontend/.vscode/extensions.json new file mode 100644 index 0000000..28d1e67 --- /dev/null +++ b/frontend/.vscode/extensions.json @@ -0,0 +1,3 @@ +{ + "recommendations": ["svelte.svelte-vscode"] +} diff --git a/frontend/README.md b/frontend/README.md new file mode 100644 index 0000000..b8d5d6f --- /dev/null +++ b/frontend/README.md @@ -0,0 +1,42 @@ +# sv + +Everything you need to build a Svelte project, powered by [`sv`](https://github.com/sveltejs/cli). + +## Creating a project + +If you're seeing this, you've probably already done this step. Congrats! + +```sh +# create a new project +npx sv create my-app +``` + +To recreate this project with the same configuration: + +```sh +# recreate this project +npx sv@0.13.0 create --template minimal --types ts --install npm frontend +``` + +## Developing + +Once you've created a project and installed dependencies with `npm install` (or `pnpm install` or `yarn`), start a development server: + +```sh +npm run dev + +# or start the server and open the app in a new browser tab +npm run dev -- --open +``` + +## Building + +To create a production version of your app: + +```sh +npm run build +``` + +You can preview the production build with `npm run preview`. + +> To deploy your app, you may need to install an [adapter](https://svelte.dev/docs/kit/adapters) for your target environment. diff --git a/frontend/package-lock.json b/frontend/package-lock.json new file mode 100644 index 0000000..e3cb369 --- /dev/null +++ b/frontend/package-lock.json @@ -0,0 +1,2267 @@ +{ + "name": "frontend", + "version": "0.0.1", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "frontend", + "version": "0.0.1", + "dependencies": { + "@types/leaflet": "^1.9.21", + "leaflet": "^1.9.4" + }, + "devDependencies": { + "@sveltejs/adapter-auto": "^7.0.0", + "@sveltejs/adapter-static": "^3.0.10", + "@sveltejs/kit": "^2.50.2", + "@sveltejs/vite-plugin-svelte": "^6.2.4", + "@tailwindcss/vite": "^4.2.2", + "svelte": "^5.54.0", + "svelte-check": "^4.4.2", + "tailwindcss": "^4.2.2", + "typescript": "^5.9.3", + "vite": "^7.3.1" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.4.tgz", + "integrity": "sha512-cQPwL2mp2nSmHHJlCyoXgHGhbEPMrEEU5xhkcy3Hs/O7nGZqEpZ2sUtLaL9MORLtDfRvVl2/3PAuEkYZH0Ty8Q==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.4.tgz", + "integrity": "sha512-X9bUgvxiC8CHAGKYufLIHGXPJWnr0OCdR0anD2e21vdvgCI8lIfqFbnoeOz7lBjdrAGUhqLZLcQo6MLhTO2DKQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.4.tgz", + "integrity": "sha512-gdLscB7v75wRfu7QSm/zg6Rx29VLdy9eTr2t44sfTW7CxwAtQghZ4ZnqHk3/ogz7xao0QAgrkradbBzcqFPasw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.4.tgz", + "integrity": "sha512-PzPFnBNVF292sfpfhiyiXCGSn9HZg5BcAz+ivBuSsl6Rk4ga1oEXAamhOXRFyMcjwr2DVtm40G65N3GLeH1Lvw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.4.tgz", + "integrity": "sha512-b7xaGIwdJlht8ZFCvMkpDN6uiSmnxxK56N2GDTMYPr2/gzvfdQN8rTfBsvVKmIVY/X7EM+/hJKEIbbHs9oA4tQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.4.tgz", + "integrity": "sha512-sR+OiKLwd15nmCdqpXMnuJ9W2kpy0KigzqScqHI3Hqwr7IXxBp3Yva+yJwoqh7rE8V77tdoheRYataNKL4QrPw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.4.tgz", + "integrity": "sha512-jnfpKe+p79tCnm4GVav68A7tUFeKQwQyLgESwEAUzyxk/TJr4QdGog9sqWNcUbr/bZt/O/HXouspuQDd9JxFSw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.4.tgz", + "integrity": "sha512-2kb4ceA/CpfUrIcTUl1wrP/9ad9Atrp5J94Lq69w7UwOMolPIGrfLSvAKJp0RTvkPPyn6CIWrNy13kyLikZRZQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.4.tgz", + "integrity": "sha512-aBYgcIxX/wd5n2ys0yESGeYMGF+pv6g0DhZr3G1ZG4jMfruU9Tl1i2Z+Wnj9/KjGz1lTLCcorqE2viePZqj4Eg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.4.tgz", + "integrity": "sha512-7nQOttdzVGth1iz57kxg9uCz57dxQLHWxopL6mYuYthohPKEK0vU0C3O21CcBK6KDlkYVcnDXY099HcCDXd9dA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.4.tgz", + "integrity": "sha512-oPtixtAIzgvzYcKBQM/qZ3R+9TEUd1aNJQu0HhGyqtx6oS7qTpvjheIWBbes4+qu1bNlo2V4cbkISr8q6gRBFA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.4.tgz", + "integrity": "sha512-8mL/vh8qeCoRcFH2nM8wm5uJP+ZcVYGGayMavi8GmRJjuI3g1v6Z7Ni0JJKAJW+m0EtUuARb6Lmp4hMjzCBWzA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.4.tgz", + "integrity": "sha512-1RdrWFFiiLIW7LQq9Q2NES+HiD4NyT8Itj9AUeCl0IVCA459WnPhREKgwrpaIfTOe+/2rdntisegiPWn/r/aAw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.4.tgz", + "integrity": "sha512-tLCwNG47l3sd9lpfyx9LAGEGItCUeRCWeAx6x2Jmbav65nAwoPXfewtAdtbtit/pJFLUWOhpv0FpS6GQAmPrHA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.4.tgz", + "integrity": "sha512-BnASypppbUWyqjd1KIpU4AUBiIhVr6YlHx/cnPgqEkNoVOhHg+YiSVxM1RLfiy4t9cAulbRGTNCKOcqHrEQLIw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.4.tgz", + "integrity": "sha512-+eUqgb/Z7vxVLezG8bVB9SfBie89gMueS+I0xYh2tJdw3vqA/0ImZJ2ROeWwVJN59ihBeZ7Tu92dF/5dy5FttA==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.4.tgz", + "integrity": "sha512-S5qOXrKV8BQEzJPVxAwnryi2+Iq5pB40gTEIT69BQONqR7JH1EPIcQ/Uiv9mCnn05jff9umq/5nqzxlqTOg9NA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.4.tgz", + "integrity": "sha512-xHT8X4sb0GS8qTqiwzHqpY00C95DPAq7nAwX35Ie/s+LO9830hrMd3oX0ZMKLvy7vsonee73x0lmcdOVXFzd6Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.4.tgz", + "integrity": "sha512-RugOvOdXfdyi5Tyv40kgQnI0byv66BFgAqjdgtAKqHoZTbTF2QqfQrFwa7cHEORJf6X2ht+l9ABLMP0dnKYsgg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.4.tgz", + "integrity": "sha512-2MyL3IAaTX+1/qP0O1SwskwcwCoOI4kV2IBX1xYnDDqthmq5ArrW94qSIKCAuRraMgPOmG0RDTA74mzYNQA9ow==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.4.tgz", + "integrity": "sha512-u8fg/jQ5aQDfsnIV6+KwLOf1CmJnfu1ShpwqdwC0uA7ZPwFws55Ngc12vBdeUdnuWoQYx/SOQLGDcdlfXhYmXQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.4.tgz", + "integrity": "sha512-JkTZrl6VbyO8lDQO3yv26nNr2RM2yZzNrNHEsj9bm6dOwwu9OYN28CjzZkH57bh4w0I2F7IodpQvUAEd1mbWXg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.4.tgz", + "integrity": "sha512-/gOzgaewZJfeJTlsWhvUEmUG4tWEY2Spp5M20INYRg2ZKl9QPO3QEEgPeRtLjEWSW8FilRNacPOg8R1uaYkA6g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.4.tgz", + "integrity": "sha512-Z9SExBg2y32smoDQdf1HRwHRt6vAHLXcxD2uGgO/v2jK7Y718Ix4ndsbNMU/+1Qiem9OiOdaqitioZwxivhXYg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.4.tgz", + "integrity": "sha512-DAyGLS0Jz5G5iixEbMHi5KdiApqHBWMGzTtMiJ72ZOLhbu/bzxgAe8Ue8CTS3n3HbIUHQz/L51yMdGMeoxXNJw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.4.tgz", + "integrity": "sha512-+knoa0BDoeXgkNvvV1vvbZX4+hizelrkwmGJBdT17t8FNPwG2lKemmuMZlmaNQ3ws3DKKCxpb4zRZEIp3UxFCg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@polka/url": { + "version": "1.0.0-next.29", + "resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.29.tgz", + "integrity": "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.60.0.tgz", + "integrity": "sha512-WOhNW9K8bR3kf4zLxbfg6Pxu2ybOUbB2AjMDHSQx86LIF4rH4Ft7vmMwNt0loO0eonglSNy4cpD3MKXXKQu0/A==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.60.0.tgz", + "integrity": "sha512-u6JHLll5QKRvjciE78bQXDmqRqNs5M/3GVqZeMwvmjaNODJih/WIrJlFVEihvV0MiYFmd+ZyPr9wxOVbPAG2Iw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.60.0.tgz", + "integrity": "sha512-qEF7CsKKzSRc20Ciu2Zw1wRrBz4g56F7r/vRwY430UPp/nt1x21Q/fpJ9N5l47WWvJlkNCPJz3QRVw008fi7yA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.60.0.tgz", + "integrity": "sha512-WADYozJ4QCnXCH4wPB+3FuGmDPoFseVCUrANmA5LWwGmC6FL14BWC7pcq+FstOZv3baGX65tZ378uT6WG8ynTw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.60.0.tgz", + "integrity": "sha512-6b8wGHJlDrGeSE3aH5mGNHBjA0TTkxdoNHik5EkvPHCt351XnigA4pS7Wsj/Eo9Y8RBU6f35cjN9SYmCFBtzxw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.60.0.tgz", + "integrity": "sha512-h25Ga0t4jaylMB8M/JKAyrvvfxGRjnPQIR8lnCayyzEjEOx2EJIlIiMbhpWxDRKGKF8jbNH01NnN663dH638mA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.60.0.tgz", + "integrity": "sha512-RzeBwv0B3qtVBWtcuABtSuCzToo2IEAIQrcyB/b2zMvBWVbjo8bZDjACUpnaafaxhTw2W+imQbP2BD1usasK4g==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.60.0.tgz", + "integrity": "sha512-Sf7zusNI2CIU1HLzuu9Tc5YGAHEZs5Lu7N1ssJG4Tkw6e0MEsN7NdjUDDfGNHy2IU+ENyWT+L2obgWiguWibWQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.60.0.tgz", + "integrity": "sha512-DX2x7CMcrJzsE91q7/O02IJQ5/aLkVtYFryqCjduJhUfGKG6yJV8hxaw8pZa93lLEpPTP/ohdN4wFz7yp/ry9A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.60.0.tgz", + "integrity": "sha512-09EL+yFVbJZlhcQfShpswwRZ0Rg+z/CsSELFCnPt3iK+iqwGsI4zht3secj5vLEs957QvFFXnzAT0FFPIxSrkQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.60.0.tgz", + "integrity": "sha512-i9IcCMPr3EXm8EQg5jnja0Zyc1iFxJjZWlb4wr7U2Wx/GrddOuEafxRdMPRYVaXjgbhvqalp6np07hN1w9kAKw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.60.0.tgz", + "integrity": "sha512-DGzdJK9kyJ+B78MCkWeGnpXJ91tK/iKA6HwHxF4TAlPIY7GXEvMe8hBFRgdrR9Ly4qebR/7gfUs9y2IoaVEyog==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.60.0.tgz", + "integrity": "sha512-RwpnLsqC8qbS8z1H1AxBA1H6qknR4YpPR9w2XX0vo2Sz10miu57PkNcnHVaZkbqyw/kUWfKMI73jhmfi9BRMUQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.60.0.tgz", + "integrity": "sha512-Z8pPf54Ly3aqtdWC3G4rFigZgNvd+qJlOE52fmko3KST9SoGfAdSRCwyoyG05q1HrrAblLbk1/PSIV+80/pxLg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.60.0.tgz", + "integrity": "sha512-3a3qQustp3COCGvnP4SvrMHnPQ9d1vzCakQVRTliaz8cIp/wULGjiGpbcqrkv0WrHTEp8bQD/B3HBjzujVWLOA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.60.0.tgz", + "integrity": "sha512-pjZDsVH/1VsghMJ2/kAaxt6dL0psT6ZexQVrijczOf+PeP2BUqTHYejk3l6TlPRydggINOeNRhvpLa0AYpCWSQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.60.0.tgz", + "integrity": "sha512-3ObQs0BhvPgiUVZrN7gqCSvmFuMWvWvsjG5ayJ3Lraqv+2KhOsp+pUbigqbeWqueGIsnn+09HBw27rJ+gYK4VQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.60.0.tgz", + "integrity": "sha512-EtylprDtQPdS5rXvAayrNDYoJhIz1/vzN2fEubo3yLE7tfAw+948dO0g4M0vkTVFhKojnF+n6C8bDNe+gDRdTg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.60.0.tgz", + "integrity": "sha512-k09oiRCi/bHU9UVFqD17r3eJR9bn03TyKraCrlz5ULFJGdJGi7VOmm9jl44vOJvRJ6P7WuBi/s2A97LxxHGIdw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.60.0.tgz", + "integrity": "sha512-1o/0/pIhozoSaDJoDcec+IVLbnRtQmHwPV730+AOD29lHEEo4F5BEUB24H0OBdhbBBDwIOSuf7vgg0Ywxdfiiw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.60.0.tgz", + "integrity": "sha512-pESDkos/PDzYwtyzB5p/UoNU/8fJo68vcXM9ZW2V0kjYayj1KaaUfi1NmTUTUpMn4UhU4gTuK8gIaFO4UGuMbA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.60.0.tgz", + "integrity": "sha512-hj1wFStD7B1YBeYmvY+lWXZ7ey73YGPcViMShYikqKT1GtstIKQAtfUI6yrzPjAy/O7pO0VLXGmUVWXQMaYgTQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.60.0.tgz", + "integrity": "sha512-SyaIPFoxmUPlNDq5EHkTbiKzmSEmq/gOYFI/3HHJ8iS/v1mbugVa7dXUzcJGQfoytp9DJFLhHH4U3/eTy2Bq4w==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.60.0.tgz", + "integrity": "sha512-RdcryEfzZr+lAr5kRm2ucN9aVlCCa2QNq4hXelZxb8GG0NJSazq44Z3PCCc8wISRuCVnGs0lQJVX5Vp6fKA+IA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.60.0.tgz", + "integrity": "sha512-PrsWNQ8BuE00O3Xsx3ALh2Df8fAj9+cvvX9AIA6o4KpATR98c9mud4XtDWVvsEuyia5U4tVSTKygawyJkjm60w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@standard-schema/spec": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz", + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@sveltejs/acorn-typescript": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@sveltejs/acorn-typescript/-/acorn-typescript-1.0.9.tgz", + "integrity": "sha512-lVJX6qEgs/4DOcRTpo56tmKzVPtoWAaVbL4hfO7t7NVwl9AAXzQR6cihesW1BmNMPl+bK6dreu2sOKBP2Q9CIA==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^8.9.0" + } + }, + "node_modules/@sveltejs/adapter-auto": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/@sveltejs/adapter-auto/-/adapter-auto-7.0.1.tgz", + "integrity": "sha512-dvuPm1E7M9NI/+canIQ6KKQDU2AkEefEZ2Dp7cY6uKoPq9Z/PhOXABe526UdW2mN986gjVkuSLkOYIBnS/M2LQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@sveltejs/kit": "^2.0.0" + } + }, + "node_modules/@sveltejs/adapter-static": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@sveltejs/adapter-static/-/adapter-static-3.0.10.tgz", + "integrity": "sha512-7D9lYFWJmB7zxZyTE/qxjksvMqzMuYrrsyh1f4AlZqeZeACPRySjbC3aFiY55wb1tWUaKOQG9PVbm74JcN2Iew==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@sveltejs/kit": "^2.0.0" + } + }, + "node_modules/@sveltejs/kit": { + "version": "2.55.0", + "resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-2.55.0.tgz", + "integrity": "sha512-MdFRjevVxmAknf2NbaUkDF16jSIzXMWd4Nfah0Qp8TtQVoSp3bV4jKt8mX7z7qTUTWvgSaxtR0EG5WJf53gcuA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@standard-schema/spec": "^1.0.0", + "@sveltejs/acorn-typescript": "^1.0.5", + "@types/cookie": "^0.6.0", + "acorn": "^8.14.1", + "cookie": "^0.6.0", + "devalue": "^5.6.4", + "esm-env": "^1.2.2", + "kleur": "^4.1.5", + "magic-string": "^0.30.5", + "mrmime": "^2.0.0", + "set-cookie-parser": "^3.0.0", + "sirv": "^3.0.0" + }, + "bin": { + "svelte-kit": "svelte-kit.js" + }, + "engines": { + "node": ">=18.13" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0", + "@sveltejs/vite-plugin-svelte": "^3.0.0 || ^4.0.0-next.1 || ^5.0.0 || ^6.0.0-next.0 || ^7.0.0", + "svelte": "^4.0.0 || ^5.0.0-next.0", + "typescript": "^5.3.3", + "vite": "^5.0.3 || ^6.0.0 || ^7.0.0-beta.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "@opentelemetry/api": { + "optional": true + }, + "typescript": { + "optional": true + } + } + }, + "node_modules/@sveltejs/vite-plugin-svelte": { + "version": "6.2.4", + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-6.2.4.tgz", + "integrity": "sha512-ou/d51QSdTyN26D7h6dSpusAKaZkAiGM55/AKYi+9AGZw7q85hElbjK3kEyzXHhLSnRISHOYzVge6x0jRZ7DXA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@sveltejs/vite-plugin-svelte-inspector": "^5.0.0", + "deepmerge": "^4.3.1", + "magic-string": "^0.30.21", + "obug": "^2.1.0", + "vitefu": "^1.1.1" + }, + "engines": { + "node": "^20.19 || ^22.12 || >=24" + }, + "peerDependencies": { + "svelte": "^5.0.0", + "vite": "^6.3.0 || ^7.0.0" + } + }, + "node_modules/@sveltejs/vite-plugin-svelte-inspector": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte-inspector/-/vite-plugin-svelte-inspector-5.0.2.tgz", + "integrity": "sha512-TZzRTcEtZffICSAoZGkPSl6Etsj2torOVrx6Uw0KpXxrec9Gg6jFWQ60Q3+LmNGfZSxHRCZL7vXVZIWmuV50Ig==", + "dev": true, + "license": "MIT", + "dependencies": { + "obug": "^2.1.0" + }, + "engines": { + "node": "^20.19 || ^22.12 || >=24" + }, + "peerDependencies": { + "@sveltejs/vite-plugin-svelte": "^6.0.0-next.0", + "svelte": "^5.0.0", + "vite": "^6.3.0 || ^7.0.0" + } + }, + "node_modules/@tailwindcss/node": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.2.2.tgz", + "integrity": "sha512-pXS+wJ2gZpVXqFaUEjojq7jzMpTGf8rU6ipJz5ovJV6PUGmlJ+jvIwGrzdHdQ80Sg+wmQxUFuoW1UAAwHNEdFA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/remapping": "^2.3.5", + "enhanced-resolve": "^5.19.0", + "jiti": "^2.6.1", + "lightningcss": "1.32.0", + "magic-string": "^0.30.21", + "source-map-js": "^1.2.1", + "tailwindcss": "4.2.2" + } + }, + "node_modules/@tailwindcss/oxide": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.2.2.tgz", + "integrity": "sha512-qEUA07+E5kehxYp9BVMpq9E8vnJuBHfJEC0vPC5e7iL/hw7HR61aDKoVoKzrG+QKp56vhNZe4qwkRmMC0zDLvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 20" + }, + "optionalDependencies": { + "@tailwindcss/oxide-android-arm64": "4.2.2", + "@tailwindcss/oxide-darwin-arm64": "4.2.2", + "@tailwindcss/oxide-darwin-x64": "4.2.2", + "@tailwindcss/oxide-freebsd-x64": "4.2.2", + "@tailwindcss/oxide-linux-arm-gnueabihf": "4.2.2", + "@tailwindcss/oxide-linux-arm64-gnu": "4.2.2", + "@tailwindcss/oxide-linux-arm64-musl": "4.2.2", + "@tailwindcss/oxide-linux-x64-gnu": "4.2.2", + "@tailwindcss/oxide-linux-x64-musl": "4.2.2", + "@tailwindcss/oxide-wasm32-wasi": "4.2.2", + "@tailwindcss/oxide-win32-arm64-msvc": "4.2.2", + "@tailwindcss/oxide-win32-x64-msvc": "4.2.2" + } + }, + "node_modules/@tailwindcss/oxide-android-arm64": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.2.2.tgz", + "integrity": "sha512-dXGR1n+P3B6748jZO/SvHZq7qBOqqzQ+yFrXpoOWWALWndF9MoSKAT3Q0fYgAzYzGhxNYOoysRvYlpixRBBoDg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-darwin-arm64": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.2.2.tgz", + "integrity": "sha512-iq9Qjr6knfMpZHj55/37ouZeykwbDqF21gPFtfnhCCKGDcPI/21FKC9XdMO/XyBM7qKORx6UIhGgg6jLl7BZlg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-darwin-x64": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.2.2.tgz", + "integrity": "sha512-BlR+2c3nzc8f2G639LpL89YY4bdcIdUmiOOkv2GQv4/4M0vJlpXEa0JXNHhCHU7VWOKWT/CjqHdTP8aUuDJkuw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-freebsd-x64": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.2.2.tgz", + "integrity": "sha512-YUqUgrGMSu2CDO82hzlQ5qSb5xmx3RUrke/QgnoEx7KvmRJHQuZHZmZTLSuuHwFf0DJPybFMXMYf+WJdxHy/nQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm-gnueabihf": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.2.2.tgz", + "integrity": "sha512-FPdhvsW6g06T9BWT0qTwiVZYE2WIFo2dY5aCSpjG/S/u1tby+wXoslXS0kl3/KXnULlLr1E3NPRRw0g7t2kgaQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-gnu": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.2.2.tgz", + "integrity": "sha512-4og1V+ftEPXGttOO7eCmW7VICmzzJWgMx+QXAJRAhjrSjumCwWqMfkDrNu1LXEQzNAwz28NCUpucgQPrR4S2yw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-musl": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.2.2.tgz", + "integrity": "sha512-oCfG/mS+/+XRlwNjnsNLVwnMWYH7tn/kYPsNPh+JSOMlnt93mYNCKHYzylRhI51X+TbR+ufNhhKKzm6QkqX8ag==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-gnu": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.2.2.tgz", + "integrity": "sha512-rTAGAkDgqbXHNp/xW0iugLVmX62wOp2PoE39BTCGKjv3Iocf6AFbRP/wZT/kuCxC9QBh9Pu8XPkv/zCZB2mcMg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-musl": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.2.2.tgz", + "integrity": "sha512-XW3t3qwbIwiSyRCggeO2zxe3KWaEbM0/kW9e8+0XpBgyKU4ATYzcVSMKteZJ1iukJ3HgHBjbg9P5YPRCVUxlnQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.2.2.tgz", + "integrity": "sha512-eKSztKsmEsn1O5lJ4ZAfyn41NfG7vzCg496YiGtMDV86jz1q/irhms5O0VrY6ZwTUkFy/EKG3RfWgxSI3VbZ8Q==", + "bundleDependencies": [ + "@napi-rs/wasm-runtime", + "@emnapi/core", + "@emnapi/runtime", + "@tybys/wasm-util", + "@emnapi/wasi-threads", + "tslib" + ], + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.8.1", + "@emnapi/runtime": "^1.8.1", + "@emnapi/wasi-threads": "^1.1.0", + "@napi-rs/wasm-runtime": "^1.1.1", + "@tybys/wasm-util": "^0.10.1", + "tslib": "^2.8.1" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.2.2.tgz", + "integrity": "sha512-qPmaQM4iKu5mxpsrWZMOZRgZv1tOZpUm+zdhhQP0VhJfyGGO3aUKdbh3gDZc/dPLQwW4eSqWGrrcWNBZWUWaXQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/oxide-win32-x64-msvc": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.2.2.tgz", + "integrity": "sha512-1T/37VvI7WyH66b+vqHj/cLwnCxt7Qt3WFu5Q8hk65aOvlwAhs7rAp1VkulBJw/N4tMirXjVnylTR72uI0HGcA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 20" + } + }, + "node_modules/@tailwindcss/vite": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@tailwindcss/vite/-/vite-4.2.2.tgz", + "integrity": "sha512-mEiF5HO1QqCLXoNEfXVA1Tzo+cYsrqV7w9Juj2wdUFyW07JRenqMG225MvPwr3ZD9N1bFQj46X7r33iHxLUW0w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@tailwindcss/node": "4.2.2", + "@tailwindcss/oxide": "4.2.2", + "tailwindcss": "4.2.2" + }, + "peerDependencies": { + "vite": "^5.2.0 || ^6 || ^7 || ^8" + } + }, + "node_modules/@types/cookie": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/geojson": { + "version": "7946.0.16", + "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz", + "integrity": "sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==", + "license": "MIT" + }, + "node_modules/@types/leaflet": { + "version": "1.9.21", + "resolved": "https://registry.npmjs.org/@types/leaflet/-/leaflet-1.9.21.tgz", + "integrity": "sha512-TbAd9DaPGSnzp6QvtYngntMZgcRk+igFELwR2N99XZn7RXUdKgsXMR+28bUO0rPsWp8MIu/f47luLIQuSLYv/w==", + "license": "MIT", + "dependencies": { + "@types/geojson": "*" + } + }, + "node_modules/@types/trusted-types": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", + "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@typescript-eslint/types": { + "version": "8.57.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.57.2.tgz", + "integrity": "sha512-/iZM6FnM4tnx9csuTxspMW4BOSegshwX5oBDznJ7S4WggL7Vczz5d2W11ecc4vRrQMQHXRSxzrCsyG5EsPPTbA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/acorn": { + "version": "8.16.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz", + "integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==", + "dev": true, + "license": "MIT", + "peer": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/aria-query": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.1.tgz", + "integrity": "sha512-Z/ZeOgVl7bcSYZ/u/rh0fOpvEpq//LZmdbkXyc7syVzjPAhfOa9ebsdTSjEBDU4vs5nC98Kfduj1uFo0qyET3g==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/axobject-query": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-4.1.0.tgz", + "integrity": "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/chokidar": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", + "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "readdirp": "^4.0.1" + }, + "engines": { + "node": ">= 14.16.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/cookie": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/devalue": { + "version": "5.6.4", + "resolved": "https://registry.npmjs.org/devalue/-/devalue-5.6.4.tgz", + "integrity": "sha512-Gp6rDldRsFh/7XuouDbxMH3Mx8GMCcgzIb1pDTvNyn8pZGQ22u+Wa+lGV9dQCltFQ7uVw0MhRyb8XDskNFOReA==", + "dev": true, + "license": "MIT" + }, + "node_modules/enhanced-resolve": { + "version": "5.20.1", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.20.1.tgz", + "integrity": "sha512-Qohcme7V1inbAfvjItgw0EaxVX5q2rdVEZHRBrEQdRZTssLDGsL8Lwrznl8oQ/6kuTJONLaDcGjkNP247XEhcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.3.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/esbuild": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.4.tgz", + "integrity": "sha512-Rq4vbHnYkK5fws5NF7MYTU68FPRE1ajX7heQ/8QXXWqNgqqJ/GkmmyxIzUnf2Sr/bakf8l54716CcMGHYhMrrQ==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.4", + "@esbuild/android-arm": "0.27.4", + "@esbuild/android-arm64": "0.27.4", + "@esbuild/android-x64": "0.27.4", + "@esbuild/darwin-arm64": "0.27.4", + "@esbuild/darwin-x64": "0.27.4", + "@esbuild/freebsd-arm64": "0.27.4", + "@esbuild/freebsd-x64": "0.27.4", + "@esbuild/linux-arm": "0.27.4", + "@esbuild/linux-arm64": "0.27.4", + "@esbuild/linux-ia32": "0.27.4", + "@esbuild/linux-loong64": "0.27.4", + "@esbuild/linux-mips64el": "0.27.4", + "@esbuild/linux-ppc64": "0.27.4", + "@esbuild/linux-riscv64": "0.27.4", + "@esbuild/linux-s390x": "0.27.4", + "@esbuild/linux-x64": "0.27.4", + "@esbuild/netbsd-arm64": "0.27.4", + "@esbuild/netbsd-x64": "0.27.4", + "@esbuild/openbsd-arm64": "0.27.4", + "@esbuild/openbsd-x64": "0.27.4", + "@esbuild/openharmony-arm64": "0.27.4", + "@esbuild/sunos-x64": "0.27.4", + "@esbuild/win32-arm64": "0.27.4", + "@esbuild/win32-ia32": "0.27.4", + "@esbuild/win32-x64": "0.27.4" + } + }, + "node_modules/esm-env": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/esm-env/-/esm-env-1.2.2.tgz", + "integrity": "sha512-Epxrv+Nr/CaL4ZcFGPJIYLWFom+YeV1DqMLHJoEd9SYRxNbaFruBwfEX/kkHUJf55j2+TUbmDcmuilbP1TmXHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/esrap": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/esrap/-/esrap-2.2.4.tgz", + "integrity": "sha512-suICpxAmZ9A8bzJjEl/+rLJiDKC0X4gYWUxT6URAWBLvlXmtbZd5ySMu/N2ZGEtMCAmflUDPSehrP9BQcsGcSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.4.15", + "@typescript-eslint/types": "^8.2.0" + } + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/is-reference": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-3.0.3.tgz", + "integrity": "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.6" + } + }, + "node_modules/jiti": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", + "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", + "dev": true, + "license": "MIT", + "bin": { + "jiti": "lib/jiti-cli.mjs" + } + }, + "node_modules/kleur": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/leaflet": { + "version": "1.9.4", + "resolved": "https://registry.npmjs.org/leaflet/-/leaflet-1.9.4.tgz", + "integrity": "sha512-nxS1ynzJOmOlHp+iL3FyWqK89GtNL8U8rvlMOsQdTTssxZwCXh8N2NB3GDQOL+YR3XnWyZAxwQixURb+FA74PA==", + "license": "BSD-2-Clause" + }, + "node_modules/lightningcss": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.32.0.tgz", + "integrity": "sha512-NXYBzinNrblfraPGyrbPoD19C1h9lfI/1mzgWYvXUTe414Gz/X1FD2XBZSZM7rRTrMA8JL3OtAaGifrIKhQ5yQ==", + "dev": true, + "license": "MPL-2.0", + "dependencies": { + "detect-libc": "^2.0.3" + }, + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + }, + "optionalDependencies": { + "lightningcss-android-arm64": "1.32.0", + "lightningcss-darwin-arm64": "1.32.0", + "lightningcss-darwin-x64": "1.32.0", + "lightningcss-freebsd-x64": "1.32.0", + "lightningcss-linux-arm-gnueabihf": "1.32.0", + "lightningcss-linux-arm64-gnu": "1.32.0", + "lightningcss-linux-arm64-musl": "1.32.0", + "lightningcss-linux-x64-gnu": "1.32.0", + "lightningcss-linux-x64-musl": "1.32.0", + "lightningcss-win32-arm64-msvc": "1.32.0", + "lightningcss-win32-x64-msvc": "1.32.0" + } + }, + "node_modules/lightningcss-android-arm64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.32.0.tgz", + "integrity": "sha512-YK7/ClTt4kAK0vo6w3X+Pnm0D2cf2vPHbhOXdoNti1Ga0al1P4TBZhwjATvjNwLEBCnKvjJc2jQgHXH0NEwlAg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-arm64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.32.0.tgz", + "integrity": "sha512-RzeG9Ju5bag2Bv1/lwlVJvBE3q6TtXskdZLLCyfg5pt+HLz9BqlICO7LZM7VHNTTn/5PRhHFBSjk5lc4cmscPQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-x64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.32.0.tgz", + "integrity": "sha512-U+QsBp2m/s2wqpUYT/6wnlagdZbtZdndSmut/NJqlCcMLTWp5muCrID+K5UJ6jqD2BFshejCYXniPDbNh73V8w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-freebsd-x64": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.32.0.tgz", + "integrity": "sha512-JCTigedEksZk3tHTTthnMdVfGf61Fky8Ji2E4YjUTEQX14xiy/lTzXnu1vwiZe3bYe0q+SpsSH/CTeDXK6WHig==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm-gnueabihf": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.32.0.tgz", + "integrity": "sha512-x6rnnpRa2GL0zQOkt6rts3YDPzduLpWvwAF6EMhXFVZXD4tPrBkEFqzGowzCsIWsPjqSK+tyNEODUBXeeVHSkw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-gnu": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.32.0.tgz", + "integrity": "sha512-0nnMyoyOLRJXfbMOilaSRcLH3Jw5z9HDNGfT/gwCPgaDjnx0i8w7vBzFLFR1f6CMLKF8gVbebmkUN3fa/kQJpQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-musl": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.32.0.tgz", + "integrity": "sha512-UpQkoenr4UJEzgVIYpI80lDFvRmPVg6oqboNHfoH4CQIfNA+HOrZ7Mo7KZP02dC6LjghPQJeBsvXhJod/wnIBg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-gnu": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.32.0.tgz", + "integrity": "sha512-V7Qr52IhZmdKPVr+Vtw8o+WLsQJYCTd8loIfpDaMRWGUZfBOYEJeyJIkqGIDMZPwPx24pUMfwSxxI8phr/MbOA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-musl": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.32.0.tgz", + "integrity": "sha512-bYcLp+Vb0awsiXg/80uCRezCYHNg1/l3mt0gzHnWV9XP1W5sKa5/TCdGWaR/zBM2PeF/HbsQv/j2URNOiVuxWg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-arm64-msvc": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.32.0.tgz", + "integrity": "sha512-8SbC8BR40pS6baCM8sbtYDSwEVQd4JlFTOlaD3gWGHfThTcABnNDBda6eTZeqbofalIJhFx0qKzgHJmcPTnGdw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-x64-msvc": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.32.0.tgz", + "integrity": "sha512-Amq9B/SoZYdDi1kFrojnoqPLxYhQ4Wo5XiL8EVJrVsB8ARoC1PWW6VGtT0WKCemjy8aC+louJnjS7U18x3b06Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/locate-character": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-character/-/locate-character-3.0.0.tgz", + "integrity": "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==", + "dev": true, + "license": "MIT" + }, + "node_modules/magic-string": { + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/mri": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", + "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/mrmime": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.1.tgz", + "integrity": "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/obug": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/obug/-/obug-2.1.1.tgz", + "integrity": "sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==", + "dev": true, + "funding": [ + "https://github.com/sponsors/sxzz", + "https://opencollective.com/debug" + ], + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postcss": { + "version": "8.5.8", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.8.tgz", + "integrity": "sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/readdirp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", + "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14.18.0" + }, + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/rollup": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.60.0.tgz", + "integrity": "sha512-yqjxruMGBQJ2gG4HtjZtAfXArHomazDHoFwFFmZZl0r7Pdo7qCIXKqKHZc8yeoMgzJJ+pO6pEEHa+V7uzWlrAQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.60.0", + "@rollup/rollup-android-arm64": "4.60.0", + "@rollup/rollup-darwin-arm64": "4.60.0", + "@rollup/rollup-darwin-x64": "4.60.0", + "@rollup/rollup-freebsd-arm64": "4.60.0", + "@rollup/rollup-freebsd-x64": "4.60.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.60.0", + "@rollup/rollup-linux-arm-musleabihf": "4.60.0", + "@rollup/rollup-linux-arm64-gnu": "4.60.0", + "@rollup/rollup-linux-arm64-musl": "4.60.0", + "@rollup/rollup-linux-loong64-gnu": "4.60.0", + "@rollup/rollup-linux-loong64-musl": "4.60.0", + "@rollup/rollup-linux-ppc64-gnu": "4.60.0", + "@rollup/rollup-linux-ppc64-musl": "4.60.0", + "@rollup/rollup-linux-riscv64-gnu": "4.60.0", + "@rollup/rollup-linux-riscv64-musl": "4.60.0", + "@rollup/rollup-linux-s390x-gnu": "4.60.0", + "@rollup/rollup-linux-x64-gnu": "4.60.0", + "@rollup/rollup-linux-x64-musl": "4.60.0", + "@rollup/rollup-openbsd-x64": "4.60.0", + "@rollup/rollup-openharmony-arm64": "4.60.0", + "@rollup/rollup-win32-arm64-msvc": "4.60.0", + "@rollup/rollup-win32-ia32-msvc": "4.60.0", + "@rollup/rollup-win32-x64-gnu": "4.60.0", + "@rollup/rollup-win32-x64-msvc": "4.60.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/sade": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz", + "integrity": "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==", + "dev": true, + "license": "MIT", + "dependencies": { + "mri": "^1.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/set-cookie-parser": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-3.1.0.tgz", + "integrity": "sha512-kjnC1DXBHcxaOaOXBHBeRtltsDG2nUiUni+jP92M9gYdW12rsmx92UsfpH7o5tDRs7I1ZZPSQJQGv3UaRfCiuw==", + "dev": true, + "license": "MIT" + }, + "node_modules/sirv": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/sirv/-/sirv-3.0.2.tgz", + "integrity": "sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@polka/url": "^1.0.0-next.24", + "mrmime": "^2.0.0", + "totalist": "^3.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/svelte": { + "version": "5.55.0", + "resolved": "https://registry.npmjs.org/svelte/-/svelte-5.55.0.tgz", + "integrity": "sha512-SThllKq6TRMBwPtat7ASnm/9CDXnIhBR0NPGw0ujn2DVYx9rVwsPZxDaDQcYGdUz/3BYVsCzdq7pZarRQoGvtw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@jridgewell/remapping": "^2.3.4", + "@jridgewell/sourcemap-codec": "^1.5.0", + "@sveltejs/acorn-typescript": "^1.0.5", + "@types/estree": "^1.0.5", + "@types/trusted-types": "^2.0.7", + "acorn": "^8.12.1", + "aria-query": "5.3.1", + "axobject-query": "^4.1.0", + "clsx": "^2.1.1", + "devalue": "^5.6.4", + "esm-env": "^1.2.1", + "esrap": "^2.2.2", + "is-reference": "^3.0.3", + "locate-character": "^3.0.0", + "magic-string": "^0.30.11", + "zimmerframe": "^1.1.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/svelte-check": { + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/svelte-check/-/svelte-check-4.4.5.tgz", + "integrity": "sha512-1bSwIRCvvmSHrlK52fOlZmVtUZgil43jNL/2H18pRpa+eQjzGt6e3zayxhp1S7GajPFKNM/2PMCG+DZFHlG9fw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.25", + "chokidar": "^4.0.1", + "fdir": "^6.2.0", + "picocolors": "^1.0.0", + "sade": "^1.7.4" + }, + "bin": { + "svelte-check": "bin/svelte-check" + }, + "engines": { + "node": ">= 18.0.0" + }, + "peerDependencies": { + "svelte": "^4.0.0 || ^5.0.0-next.0", + "typescript": ">=5.0.0" + } + }, + "node_modules/tailwindcss": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.2.2.tgz", + "integrity": "sha512-KWBIxs1Xb6NoLdMVqhbhgwZf2PGBpPEiwOqgI4pFIYbNTfBXiKYyWoTsXgBQ9WFg/OlhnvHaY+AEpW7wSmFo2Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/tapable": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.2.tgz", + "integrity": "sha512-1MOpMXuhGzGL5TTCZFItxCc0AARf1EZFQkGqMm7ERKj8+Hgr5oLvJOVFcC+lRmR8hCe2S3jC4T5D7Vg/d7/fhA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/totalist": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz", + "integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "peer": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/vite": { + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.1.tgz", + "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "esbuild": "^0.27.0", + "fdir": "^6.5.0", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.15" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^20.19.0 || >=22.12.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", + "lightningcss": "^1.21.0", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/vitefu": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vitefu/-/vitefu-1.1.2.tgz", + "integrity": "sha512-zpKATdUbzbsycPFBN71nS2uzBUQiVnFoOrr2rvqv34S1lcAgMKKkjWleLGeiJlZ8lwCXvtWaRn7R3ZC16SYRuw==", + "dev": true, + "license": "MIT", + "workspaces": [ + "tests/deps/*", + "tests/projects/*", + "tests/projects/workspace/packages/*" + ], + "peerDependencies": { + "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-beta.0" + }, + "peerDependenciesMeta": { + "vite": { + "optional": true + } + } + }, + "node_modules/zimmerframe": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/zimmerframe/-/zimmerframe-1.1.4.tgz", + "integrity": "sha512-B58NGBEoc8Y9MWWCQGl/gq9xBCe4IiKM0a2x7GZdQKOW5Exr8S1W24J6OgM1njK8xCRGvAJIL/MxXHf6SkmQKQ==", + "dev": true, + "license": "MIT" + } + } +} diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..af2a33f --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,30 @@ +{ + "name": "frontend", + "private": true, + "version": "0.0.1", + "type": "module", + "scripts": { + "dev": "vite dev", + "build": "vite build", + "preview": "vite preview", + "prepare": "svelte-kit sync || echo ''", + "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json", + "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch" + }, + "devDependencies": { + "@sveltejs/adapter-auto": "^7.0.0", + "@sveltejs/adapter-static": "^3.0.10", + "@sveltejs/kit": "^2.50.2", + "@sveltejs/vite-plugin-svelte": "^6.2.4", + "@tailwindcss/vite": "^4.2.2", + "svelte": "^5.54.0", + "svelte-check": "^4.4.2", + "tailwindcss": "^4.2.2", + "typescript": "^5.9.3", + "vite": "^7.3.1" + }, + "dependencies": { + "@types/leaflet": "^1.9.21", + "leaflet": "^1.9.4" + } +} diff --git a/frontend/src/app.css b/frontend/src/app.css new file mode 100644 index 0000000..f1d8c73 --- /dev/null +++ b/frontend/src/app.css @@ -0,0 +1 @@ +@import "tailwindcss"; diff --git a/frontend/src/app.d.ts b/frontend/src/app.d.ts new file mode 100644 index 0000000..da08e6d --- /dev/null +++ b/frontend/src/app.d.ts @@ -0,0 +1,13 @@ +// See https://svelte.dev/docs/kit/types#app.d.ts +// for information about these interfaces +declare global { + namespace App { + // interface Error {} + // interface Locals {} + // interface PageData {} + // interface PageState {} + // interface Platform {} + } +} + +export {}; diff --git a/frontend/src/app.html b/frontend/src/app.html new file mode 100644 index 0000000..fb58138 --- /dev/null +++ b/frontend/src/app.html @@ -0,0 +1,11 @@ + + + + + + %sveltekit.head% + + +
%sveltekit.body%
+ + diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts new file mode 100644 index 0000000..006040b --- /dev/null +++ b/frontend/src/lib/api.ts @@ -0,0 +1,169 @@ +// API-Base: In Produktion relativ, in Dev mit Port +const BASE = typeof window !== 'undefined' + ? (window.location.port === '5173' + ? `http://${window.location.hostname}:8099/api` // Dev + : '/api') // Produktion + : '/api'; + +async function get(path: string): Promise { + const res = await fetch(`${BASE}${path}`); + if (!res.ok) throw new Error(`API error: ${res.status}`); + return res.json(); +} + +export interface VorlageKurz { + id: number; + aktenzeichen: string | null; + typ: string | null; + betreff: string | null; + datum_eingang: string | null; + ist_verwaltungsvorlage: boolean; +} + +export interface ParteiOut { + id: number; + kuerzel: string; + name: string | null; + farbe: string | null; +} + +export interface GremiumOut { + id: number; + name: string; + kuerzel: string | null; + typ: string | null; +} + +export interface BeratungOut { + id: number; + gremium: GremiumOut | null; + sitzung_datum: string | null; + rolle: string | null; + ergebnis: string | null; + ergebnis_text: string | null; +} + +export interface ReferenzOut { + vorlage_id: number; + aktenzeichen: string | null; + betreff: string | null; + vorlage_typ: string | null; + datum_eingang: string | null; + ref_typ: string | null; + konfidenz: number | null; + kontext: string | null; +} + +export interface VorlageDetail extends VorlageKurz { + aktenzeichen_basis: string | null; + aktenzeichen_suffix: string | null; + volltext_clean: string | null; + pdf_url: string | null; + web_url: string | null; + thema_kurz: string | null; + antragsteller: ParteiOut[]; + beratungen: BeratungOut[]; + referenzen_ausgehend: ReferenzOut[]; + referenzen_eingehend: ReferenzOut[]; + kette_id: number | null; +} + +export interface KetteKurz { + id: number; + ursprung: VorlageKurz | null; + typ: string | null; + thema: string | null; + status: string | null; + status_seit: string | null; + letzte_aktivitaet: string | null; + vertagungen_count: number; + glieder_count: number; +} + +export interface KettenGliedOut { + vorlage: VorlageKurz; + position: number; + rolle: string | null; +} + +export interface KetteDetail { + id: number; + ursprung: VorlageKurz | null; + typ: string | null; + thema: string | null; + status: string | null; + status_seit: string | null; + letzte_aktivitaet: string | null; + vertagungen_count: number; + glieder: KettenGliedOut[]; + antragsteller: ParteiOut[]; + graph: { + nodes: GraphNode[]; + edges: GraphEdge[]; + } | null; +} + +export interface GraphNode { + id: number; + aktenzeichen: string | null; + typ: string | null; + betreff: string | null; + datum_eingang: string | null; + position?: number; + rolle?: string; + ist_verwaltungsvorlage?: boolean; + extern?: boolean; + beratungen?: { sitzung_datum: string; rolle: string; ergebnis: string; gremium_name: string }[]; + antragsteller?: { kuerzel: string; name: string; farbe: string }[]; +} + +export interface GraphEdge { + quelle_id: number; + ziel_id: number; + typ: string; + konfidenz: number; +} + +export interface Paginated { + items: T[]; + total: number; + page: number; + page_size: number; +} + +export interface Stats { + vorlagen_total: number; + beratungen_total: number; + ketten_total: number; + vorlagen_nach_typ: { typ: string; anzahl: number }[]; + ketten_nach_status: { status: string; anzahl: number }[]; + ketten_nach_typ: { typ: string; anzahl: number }[]; + letzte_vorlagen: VorlageKurz[]; + parteien: { kuerzel: string; name: string; farbe: string | null; anzahl: number }[]; + gremien: { name: string; kuerzel: string | null; typ: string | null; anzahl: number }[]; + timeline: { monat: string; anzahl: number }[]; +} + +export interface KettenStats { + nach_typ: Record; + status_detail: { status: string; anzahl: number; avg_tage: number; avg_vertagungen: number }[]; + versandungs_fruehwarnung: number; +} + +// API functions +export const fetchStats = () => get('/stats'); +export const fetchKettenStats = () => get('/stats/ketten-stats'); + +export const fetchVorlagen = (params: Record) => { + const qs = new URLSearchParams(params).toString(); + return get>(`/vorlagen?${qs}`); +}; + +export const fetchVorlage = (id: number) => get(`/vorlagen/${id}`); + +export const fetchKetten = (params: Record) => { + const qs = new URLSearchParams(params).toString(); + return get>(`/ketten?${qs}`); +}; + +export const fetchKette = (id: number) => get(`/ketten/${id}`); diff --git a/frontend/src/lib/assets/favicon.svg b/frontend/src/lib/assets/favicon.svg new file mode 100644 index 0000000..cc5dc66 --- /dev/null +++ b/frontend/src/lib/assets/favicon.svg @@ -0,0 +1 @@ +svelte-logo \ No newline at end of file diff --git a/frontend/src/lib/components/KpiCard.svelte b/frontend/src/lib/components/KpiCard.svelte new file mode 100644 index 0000000..43a2e2e --- /dev/null +++ b/frontend/src/lib/components/KpiCard.svelte @@ -0,0 +1,11 @@ + + +
+
{title}
+
{value.toLocaleString('de-DE')}
+ {#if subtitle} +
{subtitle}
+ {/if} +
diff --git a/frontend/src/lib/components/Perlenschnur.svelte b/frontend/src/lib/components/Perlenschnur.svelte new file mode 100644 index 0000000..88fad1f --- /dev/null +++ b/frontend/src/lib/components/Perlenschnur.svelte @@ -0,0 +1,55 @@ + + +
+ +
+ +
+ {#each glieder as glied, i} + {@const isFirst = i === 0} + {@const isLast = i === glieder.length - 1} + + {/each} +
+
diff --git a/frontend/src/lib/components/StatusBadge.svelte b/frontend/src/lib/components/StatusBadge.svelte new file mode 100644 index 0000000..23d91ac --- /dev/null +++ b/frontend/src/lib/components/StatusBadge.svelte @@ -0,0 +1,16 @@ + + +{#if linked && status} + + {info.emoji} {info.label} + +{:else} + + {info.emoji} {info.label} + +{/if} diff --git a/frontend/src/lib/index.ts b/frontend/src/lib/index.ts new file mode 100644 index 0000000..856f2b6 --- /dev/null +++ b/frontend/src/lib/index.ts @@ -0,0 +1 @@ +// place files you want to import through the `$lib` alias in this folder. diff --git a/frontend/src/lib/status.ts b/frontend/src/lib/status.ts new file mode 100644 index 0000000..2146304 --- /dev/null +++ b/frontend/src/lib/status.ts @@ -0,0 +1,43 @@ +export const STATUS_CONFIG: Record = { + angefragt: { emoji: '\ud83d\udcdd', label: 'Angefragt', color: 'bg-blue-100 text-blue-800' }, + beantwortet: { emoji: '\u2705', label: 'Beantwortet', color: 'bg-green-100 text-green-800' }, + offen: { emoji: '\u23f3', label: 'Offen', color: 'bg-yellow-100 text-yellow-800' }, + abgewiegelt: { emoji: '\u26a0\ufe0f', label: 'Abgewiegelt', color: 'bg-orange-100 text-orange-800' }, + versandet: { emoji: '\ud83d\udc80', label: 'Versandet', color: 'bg-red-100 text-red-800' }, + zur\u00fcckgezogen: { emoji: '\ud83d\udd19', label: 'Zur\u00fcckgezogen', color: 'bg-gray-100 text-gray-800' }, + eingereicht: { emoji: '\ud83d\udcdd', label: 'Eingereicht', color: 'bg-blue-100 text-blue-800' }, + in_beratung: { emoji: '\ud83d\udd04', label: 'In Beratung', color: 'bg-indigo-100 text-indigo-800' }, + vertagt: { emoji: '\u23f8\ufe0f', label: 'Vertagt', color: 'bg-amber-100 text-amber-800' }, + verwiesen: { emoji: '\u21aa\ufe0f', label: 'Verwiesen', color: 'bg-purple-100 text-purple-800' }, + beschlossen: { emoji: '\ud83d\udccb', label: 'Beschlossen', color: 'bg-teal-100 text-teal-800' }, + umgesetzt: { emoji: '\u2705', label: 'Umgesetzt', color: 'bg-green-100 text-green-800' }, + teilweise_umgesetzt: { emoji: '\ud83d\udd36', label: 'Teilw. umgesetzt', color: 'bg-lime-100 text-lime-800' }, + abgelehnt: { emoji: '\u274c', label: 'Abgelehnt', color: 'bg-red-100 text-red-800' }, + still_uebernommen: { emoji: '\ud83d\udd04\u2728', label: 'Still \u00fcbernommen', color: 'bg-pink-100 text-pink-800' }, +}; + +export function statusInfo(status: string | null) { + if (!status) return { emoji: '\u2753', label: 'Unbekannt', color: 'bg-gray-100 text-gray-600' }; + return STATUS_CONFIG[status] ?? { emoji: '\u2753', label: status, color: 'bg-gray-100 text-gray-600' }; +} + +export const TYP_LABELS: Record = { + antrag: 'Antrag', + anfrage: 'Anfrage', + stellungnahme: 'Stellungnahme', + bericht: 'Bericht', +}; + +export function typLabel(typ: string | null): string { + if (!typ) return 'Unbekannt'; + return TYP_LABELS[typ] ?? typ; +} + +export function formatDate(d: string | null): string { + if (!d) return '\u2013'; + try { + return new Date(d).toLocaleDateString('de-DE', { day: '2-digit', month: '2-digit', year: 'numeric' }); + } catch { + return d; + } +} diff --git a/frontend/src/routes/+layout.svelte b/frontend/src/routes/+layout.svelte new file mode 100644 index 0000000..e56a7bb --- /dev/null +++ b/frontend/src/routes/+layout.svelte @@ -0,0 +1,31 @@ + + +
+ + + + +
+ {@render children()} +
+
diff --git a/frontend/src/routes/+layout.ts b/frontend/src/routes/+layout.ts new file mode 100644 index 0000000..65a7356 --- /dev/null +++ b/frontend/src/routes/+layout.ts @@ -0,0 +1,3 @@ +// Enable client-side rendering for static adapter +export const prerender = false; +export const ssr = false; diff --git a/frontend/src/routes/+page.svelte b/frontend/src/routes/+page.svelte new file mode 100644 index 0000000..7c39185 --- /dev/null +++ b/frontend/src/routes/+page.svelte @@ -0,0 +1,145 @@ + + + + Antragstracker Hagen + + +
+ +
+
+

🏛️ Antragstracker Hagen

+

Kommunale Anträge & Anfragen nachverfolgen

+
+
+ +
+ +
+
+
{stats.vorlagen.toLocaleString()}
+
Vorlagen
+
+
+
{stats.ketten.toLocaleString()}
+
Ketten
+
+
+
41
+
Gremien
+
+
+
2004–2026
+
Zeitraum
+
+
+ + +
+
+

📋 Aktuelle Anträge

+
+ + {#if error} +
{error}
+ {:else if loading} +
Lade Daten... (API: {API_BASE})
+ {:else if antraege.length === 0} +
Keine Anträge gefunden
+ {:else} +
    + {#each antraege as antrag} +
  • +
    +
    +
    + + {antrag.aktenzeichen} + + {antrag.datum_eingang} +
    +

    {antrag.betreff}

    +
    + + ⏳ offen + +
    +
  • + {/each} +
+ {/if} +
+
+
diff --git a/frontend/src/routes/abstimmungen/+page.svelte b/frontend/src/routes/abstimmungen/+page.svelte new file mode 100644 index 0000000..eb9c0b3 --- /dev/null +++ b/frontend/src/routes/abstimmungen/+page.svelte @@ -0,0 +1,185 @@ + + + + Abstimmungen - Antragstracker Hagen + + +
+

Abstimmungsverhalten

+

Analyse des Stimmverhaltens der Ratsfraktionen

+
+ +{#if error} +
{error}
+{/if} + +{#if loading} +
+
+
+{:else} + +
+

📊 Stimmverhalten nach Fraktion

+ +
+ + + + + + + + + + + + + {#each fraktionen as f} + + + + + + + + + {/each} + +
FraktionJaNeinEnthaltungGesamtZustimmungsquote
{f.fraktion}{f.ja}{f.nein}{f.enthaltung}{f.gesamt} +
+
+
+
+ {f.ja_quote}% +
+
+
+
+ + +
+

🤝 Koalitionsmatrix

+

Wie oft stimmen Fraktionen gleich ab? (nur Ja/Nein-Stimmen)

+ + {#if koalitionsmatrix.length > 0} + {@const allFraktionen = koalitionsmatrix.map(r => r.fraktion).sort()} +
+ + + + + {#each allFraktionen as f} + + {/each} + + + + {#each koalitionsmatrix as row} + + + {#each allFraktionen as f2} + {#if row.fraktion === f2} + + {:else if row.uebereinstimmung[f2]} + {@const data = row.uebereinstimmung[f2]} + + {:else} + + {/if} + {/each} + + {/each} + +
+ {f} +
{row.fraktion} +
+ — +
+
+
+ {Math.round(data.quote)} +
+
+
+ - +
+
+
+ +
+ Legende: + 90-100% + 70-90% + 50-70% + 30-50% + <30% +
+ {:else} +

Noch keine Koalitionsdaten verfügbar.

+ {/if} +
+{/if} diff --git a/frontend/src/routes/karte/+page.svelte b/frontend/src/routes/karte/+page.svelte new file mode 100644 index 0000000..19141bd --- /dev/null +++ b/frontend/src/routes/karte/+page.svelte @@ -0,0 +1,178 @@ + + + + Karte - Antragstracker Hagen + + + +
+

📍 Anträge auf der Karte

+

Orte aus Anträgen und Anfragen in Hagen

+
+ +
+ +
+
+ {#if loading} +
+
+
+ {:else} +
+ {/if} +
+ +
+ {orte.length} Orte geocodiert • Marker-Größe = Anzahl Vorlagen +
+
+ + +
+ + {#if selectedOrt} +
+

{selectedOrt.name}

+

+ {selectedOrt.vorlage_count} Vorlage(n) betreffen diesen Ort +

+ + {#if selectedVorlagen.length > 0} + + {/if} +
+ {:else} +
+

Klicke auf einen Marker um die zugehörigen Vorlagen zu sehen.

+
+ {/if} + + +
+

🗺️ Alle Orte

+
    + {#each orte as ort} +
  • + +
  • + {/each} +
+
+
+
diff --git a/frontend/src/routes/ketten/+page.svelte b/frontend/src/routes/ketten/+page.svelte new file mode 100644 index 0000000..c87b40a --- /dev/null +++ b/frontend/src/routes/ketten/+page.svelte @@ -0,0 +1,174 @@ + + + + Ketten - Antragstracker Hagen + + +
+

Ketten

+

Zusammengehörige Vorlagen als Ketten nachverfolgen

+
+ + +
+
+
+ + { if (e.key === 'Enter') applyFilters(); }} /> +
+
+ + +
+
+ + +
+ +
+
+ +{#if error} +
{error}
+{:else if loading && !data} +
+
+
+{:else if data} +
{data.total} Ketten gefunden
+ +
+ + + + + + + + + + + + + {#each data.items as kette} + goto(`/ketten/${kette.id}`)}> + + + + + + + + {/each} + +
AktenzeichenThemaTypStatusGliederLetzte Akt.
+ + {kette.ursprung?.aktenzeichen || `#${kette.id}`} + + {kette.thema || '-'}{kette.typ || '-'}{kette.glieder_count}{formatDate(kette.letzte_aktivitaet)}
+
+ + + {#if data.total > data.page_size} + {@const totalPages = Math.ceil(data.total / data.page_size)} +
+ + Seite {currentPage} von {totalPages} + +
+ {/if} +{/if} diff --git a/frontend/src/routes/ketten/[id]/+page.svelte b/frontend/src/routes/ketten/[id]/+page.svelte new file mode 100644 index 0000000..32c049b --- /dev/null +++ b/frontend/src/routes/ketten/[id]/+page.svelte @@ -0,0 +1,114 @@ + + + + {kette?.ursprung?.aktenzeichen || 'Kette'} - Antragstracker Hagen + + +{#if error} +
{error}
+{:else if !kette} +
+
+
+{:else} + + ← Zurück zur Liste + + +
+
+
+
+ {#if kette.ursprung?.aktenzeichen} +

{kette.ursprung.aktenzeichen}

+ {/if} + + {#if kette.typ} + {typLabel(kette.typ)} + {/if} +
+ {#if kette.thema} +

{kette.thema}

+ {/if} +
+
+ {#if kette.status_seit} +
Status seit: {formatDate(kette.status_seit)}
+ {/if} + {#if kette.letzte_aktivitaet} +
Letzte Aktivität: {formatDate(kette.letzte_aktivitaet)}
+ {/if} + {#if kette.vertagungen_count > 0} +
Vertagungen: {kette.vertagungen_count}
+ {/if} +
+
+ + + {#if kette.antragsteller.length > 0} +
+ Antragsteller: + {#each kette.antragsteller as p} + + {p.kuerzel} + + {/each} +
+ {/if} +
+ + +
+

Perlenschnur

+ {#if kette.glieder.length > 0} + + {:else} +

Keine Glieder in dieser Kette.

+ {/if} +
+ + + {#if kette.graph && kette.graph.nodes.filter(n => n.extern).length > 0} + + {/if} +{/if} diff --git a/frontend/src/routes/vorlagen/+page.svelte b/frontend/src/routes/vorlagen/+page.svelte new file mode 100644 index 0000000..90a8aeb --- /dev/null +++ b/frontend/src/routes/vorlagen/+page.svelte @@ -0,0 +1,147 @@ + + + + Vorlagen - Antragstracker Hagen + + +
+

Vorlagen

+

Alle importierten Vorlagen aus dem ALLRIS-System

+
+ + +
+
+
+ + { if (e.key === 'Enter') applyFilters(); }} /> +
+
+ + +
+ +
+
+ +{#if error} +
{error}
+{:else if loading && !data} +
+
+
+{:else if data} +
{data.total} Vorlagen gefunden
+ +
+ + + + + + + + + + + {#each data.items as v} + goto(`/vorlagen/${v.id}`)}> + + + + + + {/each} + +
AktenzeichenBetreffTypDatum
+ + {v.aktenzeichen || `#${v.id}`} + + {v.betreff || '-'}{v.typ || '-'}{formatDate(v.datum_eingang)}
+
+ + + {#if data.total > data.page_size} + {@const totalPages = Math.ceil(data.total / data.page_size)} +
+ + Seite {currentPage} von {totalPages} + +
+ {/if} +{/if} diff --git a/frontend/src/routes/vorlagen/[id]/+page.svelte b/frontend/src/routes/vorlagen/[id]/+page.svelte new file mode 100644 index 0000000..00f9f37 --- /dev/null +++ b/frontend/src/routes/vorlagen/[id]/+page.svelte @@ -0,0 +1,259 @@ + + + + {vorlage?.aktenzeichen || 'Vorlage'} - Antragstracker Hagen + + +{#if error} +
{error}
+{:else if !vorlage} +
+
+
+{:else} + + ← Zurück zur Liste + + +
+
+
+
+ {#if vorlage.aktenzeichen} +

{vorlage.aktenzeichen}

+ {/if} + {#if vorlage.typ} + {typLabel(vorlage.typ)} + {/if} + {#if vorlage.ist_verwaltungsvorlage} + Verwaltungsvorlage + {/if} +
+ {#if vorlage.betreff} +

{vorlage.betreff}

+ {/if} + {#if vorlage.thema_kurz} +

Thema: {vorlage.thema_kurz}

+ {/if} +
+
+ {#if vorlage.datum_eingang} +
Eingegangen: {formatDate(vorlage.datum_eingang)}
+ {/if} + {#if vorlage.kette_id} + Zur Kette → + {/if} +
+
+ + + {#if vorlage.antragsteller.length > 0} +
+ Antragsteller: + {#each vorlage.antragsteller as p} + + {p.kuerzel} + + {/each} +
+ {/if} + + +
+ {#if vorlage.web_url} + ALLRIS ↗ + {/if} + {#if vorlage.pdf_url} + PDF ↗ + {/if} +
+
+ +
+ +
+ + {#if vorlage.ki_zusammenfassung} +
+

+ 🤖 KI-Zusammenfassung +

+

{vorlage.ki_zusammenfassung.zusammenfassung}

+ + {#if vorlage.ki_zusammenfassung.kernforderung} +
+ Kernforderung: +

{vorlage.ki_zusammenfassung.kernforderung}

+
+ {/if} + + {#if vorlage.ki_zusammenfassung.begruendung} +
+ Begründung: +

{vorlage.ki_zusammenfassung.begruendung}

+
+ {/if} + +
+ {#if vorlage.ki_zusammenfassung.thema} + + 📂 {vorlage.ki_zusammenfassung.thema} + + {/if} + {#if vorlage.ki_zusammenfassung.partei} + + 🏛️ {vorlage.ki_zusammenfassung.partei} + + {/if} + {#each vorlage.ki_zusammenfassung.betroffene_orte || [] as ort} + + 📍 {ort} + + {/each} +
+
+ {/if} + + + {#if vorlage.volltext_clean} +
+
+

Volltext

+ +
+ {#if showVolltext} +
{vorlage.volltext_clean}
+ {:else} +

{vorlage.volltext_clean}

+ {/if} +
+ {/if} + + + {#if vorlage.beratungen.length > 0} +
+

Beratungsfolge

+
+ {#each vorlage.beratungen as b} +
+
+ {#if b.gremium} + {b.gremium.name} + {/if} + {#if b.rolle} + ({b.rolle}) + {/if} + {#if b.ergebnis} +
+ + {b.ergebnis} + +
+ {/if} + {#if b.ergebnis_text} +

{b.ergebnis_text}

+ {/if} +
+ {formatDate(b.sitzung_datum)} +
+ {/each} +
+
+ {/if} +
+ + +
+ + {#if vorlage.referenzen_ausgehend.length > 0} +
+

Verweist auf

+ +
+ {/if} + + + {#if vorlage.referenzen_eingehend.length > 0} +
+

Referenziert von

+ +
+ {/if} + + +
+

Details

+
+ {#if vorlage.aktenzeichen_basis} +
+
Basis
+
{vorlage.aktenzeichen_basis}
+
+ {/if} + {#if vorlage.aktenzeichen_suffix} +
+
Suffix
+
{vorlage.aktenzeichen_suffix}
+
+ {/if} +
+
ID
+
{vorlage.id}
+
+
+
+
+
+{/if} diff --git a/frontend/static/robots.txt b/frontend/static/robots.txt new file mode 100644 index 0000000..b6dd667 --- /dev/null +++ b/frontend/static/robots.txt @@ -0,0 +1,3 @@ +# allow crawling everything by default +User-agent: * +Disallow: diff --git a/frontend/svelte.config.js b/frontend/svelte.config.js new file mode 100644 index 0000000..c4ca255 --- /dev/null +++ b/frontend/svelte.config.js @@ -0,0 +1,30 @@ +import adapter from '@sveltejs/adapter-static'; +import { relative, sep } from 'node:path'; + +/** @type {import('@sveltejs/kit').Config} */ +const config = { + compilerOptions: { + // defaults to rune mode for the project, execept for `node_modules`. Can be removed in svelte 6. + runes: ({ filename }) => { + const relativePath = relative(import.meta.dirname, filename); + const pathSegments = relativePath.toLowerCase().split(sep); + const isExternalLibrary = pathSegments.includes('node_modules'); + + return isExternalLibrary ? undefined : true; + } + }, + kit: { + // adapter-auto only supports some environments, see https://svelte.dev/docs/kit/adapter-auto for a list. + // If your environment is not supported, or you settled on a specific environment, switch out the adapter. + // See https://svelte.dev/docs/kit/adapters for more information about adapters. + adapter: adapter({ + pages: 'build', + assets: 'build', + fallback: 'index.html', + precompress: false, + strict: true + }) + } +}; + +export default config; diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json new file mode 100644 index 0000000..2c2ed3c --- /dev/null +++ b/frontend/tsconfig.json @@ -0,0 +1,20 @@ +{ + "extends": "./.svelte-kit/tsconfig.json", + "compilerOptions": { + "rewriteRelativeImportExtensions": true, + "allowJs": true, + "checkJs": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "moduleResolution": "bundler" + } + // Path aliases are handled by https://svelte.dev/docs/kit/configuration#alias + // except $lib which is handled by https://svelte.dev/docs/kit/configuration#files + // + // To make changes to top-level options such as include and exclude, we recommend extending + // the generated config; see https://svelte.dev/docs/kit/configuration#typescript +} diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts new file mode 100644 index 0000000..0c4fc1c --- /dev/null +++ b/frontend/vite.config.ts @@ -0,0 +1,10 @@ +import { sveltekit } from '@sveltejs/kit/vite'; +import tailwindcss from '@tailwindcss/vite'; +import { defineConfig } from 'vite'; + +export default defineConfig({ + plugins: [tailwindcss(), sveltekit()], + server: { + allowedHosts: ['mac-mini-von-dotty.local', 'Mac.wideopen.space', 'localhost'] + } +}); diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..cabdb9a --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,27 @@ +[project] +name = "antragstracker-hagen" +version = "0.1.0" +description = "Automatisierte Nachverfolgung kommunaler Anträge und Anfragen in Hagen" +requires-python = ">=3.11" +dependencies = [ + "fastapi>=0.110", + "uvicorn>=0.29", + "httpx>=0.27", + "aiosqlite>=0.20", +] + +[project.optional-dependencies] +dev = [ + "pytest>=8.0", + "ruff>=0.4", +] + +[tool.pytest.ini_options] +pythonpath = ["backend/src"] +filterwarnings = [ + "ignore::DeprecationWarning:tracker.core.chains", +] + +[build-system] +requires = ["setuptools>=68"] +build-backend = "setuptools.backends._legacy:_Backend" diff --git a/scripts/extract_adaptive.py b/scripts/extract_adaptive.py new file mode 100644 index 0000000..107620d --- /dev/null +++ b/scripts/extract_adaptive.py @@ -0,0 +1,527 @@ +#!/usr/bin/env python3 +""" +Adaptive PDF-Extraktion mit Throttle-Detection. + +Startet konservativ und erhöht Geschwindigkeit bis zum Limit. +Robustes Logging für Wiederaufnahme nach Abbruch. +""" + +import argparse +import json +import os +import sqlite3 +import tempfile +import time +from concurrent.futures import ThreadPoolExecutor, as_completed +from dataclasses import dataclass, field +from datetime import datetime +from pathlib import Path +from threading import Lock + +import httpx +import pymupdf + +# Netdata Metrics HTTP Endpoint (VServer) +METRICS_URL = os.environ.get("METRICS_URL", "http://152.53.119.77:8127") + +PROJECT_ROOT = Path(__file__).resolve().parent.parent +DB_PATH = PROJECT_ROOT / "data" / "tracker_remote.db" +STATE_FILE = PROJECT_ROOT / "data" / "extract_state.json" +LOG_FILE = PROJECT_ROOT / "data" / "extract.log" +METRICS_FILE = PROJECT_ROOT / "data" / "extract_metrics.jsonl" + + +@dataclass +class AdaptiveConfig: + """Adaptive Throttling-Konfiguration.""" + delay: float = 0.2 # Start nahe Optimum + workers: int = 4 # Start bei ~optimal-35% (basierend auf Daten: optimal ~6) + min_delay: float = 0.1 # Minimaler Delay + max_workers: int = 15 # Hartes Maximum + success_streak: int = 0 # Erfolge in Folge + streak_threshold: int = 30 # Erfolge bis Speedup + cooldown_until: float = 0 # Timestamp bis Cooldown endet + best_delay_per_worker: dict = field(default_factory=dict) # worker_count -> min stable delay + delay_fully_explored: bool = False # True wenn delay bei aktuellem worker-level am min + throughput_per_worker: dict = field(default_factory=dict) # worker_count -> best throughput + saturation_detected: bool = False # True wenn mehr Workers keinen Gewinn bringen + saturation_threshold: float = 0.1 # 10% Verbesserung nötig für neuen Worker + + +@dataclass +class State: + """Persistenter Zustand für Wiederaufnahme.""" + processed: set = field(default_factory=set) + failed: dict = field(default_factory=dict) # vorlage_id -> retry_count + failed_permanent: set = field(default_factory=set) + started_at: str = "" + last_update: str = "" + stats: dict = field(default_factory=lambda: { + "success": 0, "failed": 0, "retried": 0, "total": 0 + }) + + +class AdaptiveExtractor: + def __init__(self, state_file: Path = STATE_FILE, notify: bool = True): + self.state_file = state_file + self.config = AdaptiveConfig() + self.db_lock = Lock() + self.log_lock = Lock() + self.notify = notify + self.last_notify = 0 + self.notify_interval = 300 # 5 Minuten + self.batch_start_time = None + self.batch_metrics = [] + self.state = self._load_state() # Must be after log_lock init + + def _load_state(self) -> State: + """Lädt Zustand aus Datei oder erstellt neuen.""" + if self.state_file.exists(): + try: + data = json.loads(self.state_file.read_text()) + state = State( + processed=set(data.get("processed", [])), + failed=data.get("failed", {}), + failed_permanent=set(data.get("failed_permanent", [])), + started_at=data.get("started_at", ""), + last_update=data.get("last_update", ""), + stats=data.get("stats", State().stats) + ) + self._log(f"State geladen: {len(state.processed)} verarbeitet, {len(state.failed)} pending retries") + return state + except Exception as e: + self._log(f"State-Laden fehlgeschlagen: {e}") + + return State(started_at=datetime.now().isoformat()) + + def _save_state(self): + """Speichert Zustand.""" + self.state.last_update = datetime.now().isoformat() + data = { + "processed": list(self.state.processed), + "failed": self.state.failed, + "failed_permanent": list(self.state.failed_permanent), + "started_at": self.state.started_at, + "last_update": self.state.last_update, + "stats": self.state.stats + } + self.state_file.write_text(json.dumps(data, indent=2)) + + def _log(self, msg: str): + """Thread-safe Logging.""" + timestamp = datetime.now().strftime("%H:%M:%S") + line = f"[{timestamp}] {msg}" + print(line) + with self.log_lock: + with open(LOG_FILE, "a") as f: + f.write(line + "\n") + + def _record_metric(self, batch_num: int, batch_time: float, success: int, failed: int, bytes_downloaded: int = 0): + """Speichert Metriken für Visualisierung.""" + mb_downloaded = bytes_downloaded / (1024 * 1024) + mb_per_sec = mb_downloaded / max(batch_time, 0.1) + + metric = { + "timestamp": datetime.now().isoformat(), + "batch": batch_num, + "batch_time_sec": round(batch_time, 2), + "success": success, + "failed": failed, + "delay": round(self.config.delay, 3), + "workers": self.config.workers, + "throughput": round(success / max(batch_time, 0.1), 2), # docs/sec + "total_success": self.state.stats["success"], + "total_failed": len(self.state.failed_permanent), + "pending_retries": len(self.state.failed), + "mb_downloaded": round(mb_downloaded, 2), + "mb_per_sec": round(mb_per_sec, 2), + } + self.batch_metrics.append(metric) + + with open(METRICS_FILE, "a") as f: + f.write(json.dumps(metric) + "\n") + + return metric + + def _push_metrics(self, metric: dict): + """Pusht Metriken per HTTP an VServer → Netdata Statsd.""" + try: + payload = { + "throughput": metric["throughput"], + "delay": metric["delay"], + "workers": metric["workers"], + "success_total": metric["total_success"], + "failed_total": metric["total_failed"], + "batch_time": metric["batch_time_sec"], + "pending_retries": metric["pending_retries"], + "items_per_sec": metric["throughput"], # Alias + "mb_per_sec": metric.get("mb_per_sec", 0), + "mb_downloaded": metric.get("mb_downloaded", 0), + } + httpx.post(METRICS_URL, json=payload, timeout=5) + except Exception as e: + pass # Silent fail, don't block extraction + + def _send_telegram(self, message: str): + """Loggt Update.""" + self._log(f"[NOTIFY] {message[:100]}...") + + def _maybe_notify(self, force: bool = False): + """Sendet periodische Updates nach Telegram.""" + if not self.notify: + return + + now = time.time() + if not force and (now - self.last_notify) < self.notify_interval: + return + + self.last_notify = now + + # Letzte Metriken + if not self.batch_metrics: + return + + recent = self.batch_metrics[-1] + elapsed = (datetime.now() - datetime.fromisoformat(self.state.started_at)).total_seconds() / 60 + + # Throughput-Trend (letzte 5 Batches) + recent_throughputs = [m["throughput"] for m in self.batch_metrics[-5:]] + avg_throughput = sum(recent_throughputs) / len(recent_throughputs) + + # ETA + remaining = self.state.stats["total"] - self.state.stats["success"] - len(self.state.failed_permanent) + eta_min = remaining / max(avg_throughput * 60, 0.1) + + msg = f"""📊 *PDF-Extraktion Update* + +✓ Erfolg: {self.state.stats['success']:,} +✗ Fehler: {len(self.state.failed_permanent)} +↻ Retries: {len(self.state.failed)} + +⚡ Config: {self.config.workers} workers, {self.config.delay:.2f}s delay +📈 Throughput: {avg_throughput:.1f} docs/sec +⏱️ Laufzeit: {elapsed:.0f} min +🎯 ETA: ~{eta_min:.0f} min + +Batch {recent['batch']}: {recent['success']}✓ {recent['failed']}✗ in {recent['batch_time_sec']}s""" + + self._send_telegram(msg) + + def _get_db(self): + conn = sqlite3.connect(str(DB_PATH), check_same_thread=False) + conn.row_factory = sqlite3.Row + return conn + + def _download_and_extract(self, vorlage_id: int, url: str) -> tuple[int, str | None, str | None, int]: + """Lädt PDF und extrahiert Text. Returns: (vorlage_id, text, error, bytes_downloaded)""" + try: + resp = httpx.get(url, timeout=60, follow_redirects=True) + + # Throttling-Detection + if resp.status_code in (429, 503): + return (vorlage_id, None, f"THROTTLED:{resp.status_code}", 0) + + resp.raise_for_status() + + content_size = len(resp.content) + + if content_size < 100: + return (vorlage_id, None, "PDF zu klein", content_size) + + with tempfile.NamedTemporaryFile(suffix=".pdf", delete=True) as tmp: + tmp.write(resp.content) + tmp.flush() + + doc = pymupdf.open(tmp.name) + text_parts = [] + for page in doc: + text_parts.append(page.get_text()) + doc.close() + + text = "\n".join(text_parts).strip() + + if len(text) < 50: + return (vorlage_id, None, "Kein Text", content_size) + + # Bereinigen + import re + text = re.sub(r'\n{3,}', '\n\n', text) + text = re.sub(r' {2,}', ' ', text) + + return (vorlage_id, text, None, content_size) + + except httpx.HTTPStatusError as e: + return (vorlage_id, None, f"HTTP:{e.response.status_code}", 0) + except Exception as e: + return (vorlage_id, None, str(e)[:80], 0) + + def _handle_success(self, vorlage_id: int, text: str): + """Verarbeitet erfolgreiche Extraktion.""" + with self.db_lock: + conn = self._get_db() + conn.execute(""" + UPDATE vorlagen SET volltext = ?, volltext_clean = ? + WHERE id = ? + """, (text, text, vorlage_id)) + conn.execute("UPDATE anlagen SET downloaded = 1 WHERE vorlage_id = ?", (vorlage_id,)) + conn.commit() + conn.close() + + self.state.processed.add(vorlage_id) + self.state.stats["success"] += 1 + self.config.success_streak += 1 + + # Adaptive Speedup + if self.config.success_streak >= self.config.streak_threshold: + self._speedup() + self.config.success_streak = 0 + + def _handle_failure(self, vorlage_id: int, error: str): + """Verarbeitet Fehler mit Retry-Logik.""" + retry_count = self.state.failed.get(str(vorlage_id), 0) + 1 + + if "THROTTLED" in error: + # Cooldown aktivieren + self._slowdown(severe=True) + self.state.failed[str(vorlage_id)] = retry_count + return + + if retry_count >= 3: + self.state.failed_permanent.add(vorlage_id) + if str(vorlage_id) in self.state.failed: + del self.state.failed[str(vorlage_id)] + self._log(f" ✗ #{vorlage_id} permanent failed: {error}") + else: + self.state.failed[str(vorlage_id)] = retry_count + self._log(f" ↻ #{vorlage_id} retry {retry_count}/3: {error}") + + self.state.stats["failed"] += 1 + self.config.success_streak = 0 + + def _speedup(self): + """Erhöht Geschwindigkeit mit Worker-Level-Exploration und Sättigungs-Erkennung.""" + old_delay = self.config.delay + old_workers = self.config.workers + + # Aktuellen Delay als stabil für diesen Worker-Level merken + w = self.config.workers + if w not in self.config.best_delay_per_worker: + self.config.best_delay_per_worker[w] = self.config.delay + else: + self.config.best_delay_per_worker[w] = min( + self.config.best_delay_per_worker[w], + self.config.delay + ) + + # Aktuellen Throughput für Worker-Level tracken + if self.batch_metrics: + recent_throughput = sum(m["throughput"] for m in self.batch_metrics[-3:]) / min(3, len(self.batch_metrics)) + if w not in self.config.throughput_per_worker: + self.config.throughput_per_worker[w] = recent_throughput + else: + # Gleitender Durchschnitt + self.config.throughput_per_worker[w] = ( + self.config.throughput_per_worker[w] * 0.7 + recent_throughput * 0.3 + ) + + if self.config.delay > self.config.min_delay: + # Delay noch nicht am Minimum → weiter reduzieren + self.config.delay = max(self.config.min_delay, self.config.delay * 0.8) + self.config.delay_fully_explored = False + elif self.config.saturation_detected: + # Sättigung erkannt → nicht mehr skalieren + self._log(f"📊 Sättigung bei {self.config.workers} Workers — mehr bringt nichts") + elif self.config.workers < self.config.max_workers: + # Prüfe ob letzter Worker-Sprung Verbesserung gebracht hat + prev_throughput = self.config.throughput_per_worker.get(w - 1, 0) + curr_throughput = self.config.throughput_per_worker.get(w, 0) + + if prev_throughput > 0 and curr_throughput > 0: + improvement = (curr_throughput - prev_throughput) / prev_throughput + if improvement < self.config.saturation_threshold: + # Weniger als 10% Verbesserung → Sättigung + self.config.saturation_detected = True + self._log(f"📊 Sättigung erkannt: {w-1}→{w} Workers nur +{improvement*100:.1f}% Throughput") + return + + # Worker hinzufügen + self.config.workers += 1 + prev_best = self.config.best_delay_per_worker.get(w, 0.5) + self.config.delay = max(prev_best, 0.3) + self.config.delay_fully_explored = False + self._log(f"🔄 Neuer Worker-Level: reset delay auf {self.config.delay:.2f}s für Exploration") + else: + self.config.delay_fully_explored = True + + if old_delay != self.config.delay or old_workers != self.config.workers: + self._log(f"⚡ Speedup: delay={self.config.delay:.2f}s, workers={self.config.workers}") + + def _slowdown(self, severe: bool = False): + """Verlangsamt bei Problemen.""" + if severe: + self.config.cooldown_until = time.time() + 30 # 30s Pause + self.config.delay = min(2.0, self.config.delay * 2) + self.config.workers = max(1, self.config.workers - 1) + self.config.delay_fully_explored = False # Exploration resetten + self._log(f"🛑 Throttled! Cooldown 30s, delay={self.config.delay:.2f}s, workers={self.config.workers}") + else: + self.config.delay = min(2.0, self.config.delay * 1.2) + self._log(f"⚠️ Slowdown: delay={self.config.delay:.2f}s") + + def _wait_cooldown(self): + """Wartet Cooldown ab.""" + if self.config.cooldown_until > time.time(): + wait = self.config.cooldown_until - time.time() + self._log(f"⏳ Cooldown: {wait:.0f}s warten...") + time.sleep(wait) + + def get_pending(self, limit: int) -> list[dict]: + """Holt zu verarbeitende Vorlagen.""" + conn = self._get_db() + + # Alle mit URL aber ohne Volltext, die nicht schon verarbeitet sind + processed_ids = self.state.processed | self.state.failed_permanent + + query = """ + SELECT a.vorlage_id, a.url + FROM anlagen a + JOIN vorlagen v ON a.vorlage_id = v.id + WHERE a.url IS NOT NULL + AND a.downloaded = 0 + AND (v.volltext_clean IS NULL OR v.volltext_clean = '') + ORDER BY v.datum_eingang DESC + """ + + all_pending = conn.execute(query).fetchall() + conn.close() + + # Filtern + result = [] + for row in all_pending: + if row['vorlage_id'] not in processed_ids: + result.append(dict(row)) + if len(result) >= limit: + break + + # Retries hinzufügen (am Ende) + for vid_str, count in list(self.state.failed.items()): + if len(result) >= limit: + break + vid = int(vid_str) + # URL nochmal holen + conn = self._get_db() + row = conn.execute("SELECT vorlage_id, url FROM anlagen WHERE vorlage_id = ?", (vid,)).fetchone() + conn.close() + if row: + result.append(dict(row)) + self.state.stats["retried"] += 1 + + return result + + def run(self, limit: int = 1000): + """Hauptschleife.""" + self._log(f"=== Adaptive Extraktion gestartet ===") + self._log(f"Limit: {limit}, Start-Config: delay={self.config.delay}s, workers={self.config.workers}") + + pending = self.get_pending(limit) + self.state.stats["total"] = len(pending) + self._log(f"Zu verarbeiten: {len(pending)}") + + if not pending: + self._log("Nichts zu tun!") + return + + batch_size = 50 + processed_count = 0 + batch_num = 0 + + for i in range(0, len(pending), batch_size): + self._wait_cooldown() + + batch = pending[i:i+batch_size] + batch_num += 1 + batch_start = time.time() + batch_success = 0 + batch_failed = 0 + batch_bytes = 0 + + self._log(f"\n--- Batch {batch_num}: {len(batch)} Vorlagen ---") + self._log(f"Config: delay={self.config.delay:.2f}s, workers={self.config.workers}") + + with ThreadPoolExecutor(max_workers=self.config.workers) as executor: + futures = {} + for item in batch: + time.sleep(self.config.delay) + future = executor.submit( + self._download_and_extract, + item['vorlage_id'], + item['url'] + ) + futures[future] = item + + for future in as_completed(futures): + vorlage_id, text, error, bytes_dl = future.result() + batch_bytes += bytes_dl + + if text: + self._handle_success(vorlage_id, text) + self._log(f" ✓ #{vorlage_id}: {len(text)} Zeichen") + batch_success += 1 + else: + self._handle_failure(vorlage_id, error) + batch_failed += 1 + + processed_count += 1 + + # Metriken aufzeichnen + batch_time = time.time() - batch_start + metric = self._record_metric(batch_num, batch_time, batch_success, batch_failed, batch_bytes) + + # Push to Netdata Statsd + self._push_metrics(metric) + + # State speichern nach jedem Batch + self._save_state() + + # Fortschritt + stats = self.state.stats + self._log(f"Progress: {processed_count}/{len(pending)} | ✓{stats['success']} ✗{len(self.state.failed_permanent)} | {batch_success/max(batch_time,0.1):.1f} docs/sec") + + # Telegram-Update (alle 5 min) + self._maybe_notify() + + self._log(f"\n=== Fertig ===") + self._log(f"Erfolgreich: {self.state.stats['success']}") + self._log(f"Fehlgeschlagen: {len(self.state.failed_permanent)}") + self._log(f"State gespeichert: {self.state_file}") + + # Finale Notification + self._maybe_notify(force=True) + + if self.notify: + self._send_telegram(f"✅ *PDF-Extraktion abgeschlossen*\n\n✓ {self.state.stats['success']:,} erfolgreich\n✗ {len(self.state.failed_permanent)} fehlgeschlagen") + + +def main(): + parser = argparse.ArgumentParser(description="Adaptive PDF-Extraktion") + parser.add_argument("--limit", type=int, default=1000, help="Max. Anzahl") + parser.add_argument("--reset", action="store_true", help="State zurücksetzen") + parser.add_argument("--no-notify", action="store_true", help="Keine Telegram-Updates") + parser.add_argument("--notify-interval", type=int, default=300, help="Sekunden zwischen Updates") + args = parser.parse_args() + + if args.reset and STATE_FILE.exists(): + STATE_FILE.unlink() + print("State zurückgesetzt") + + # Metriken-Datei leeren bei Reset + if args.reset and METRICS_FILE.exists(): + METRICS_FILE.unlink() + + extractor = AdaptiveExtractor(notify=not args.no_notify) + extractor.notify_interval = args.notify_interval + extractor.run(limit=args.limit) + + +if __name__ == "__main__": + main() diff --git a/scripts/extract_orte_ki.py b/scripts/extract_orte_ki.py new file mode 100644 index 0000000..5bcc185 --- /dev/null +++ b/scripts/extract_orte_ki.py @@ -0,0 +1,279 @@ +#!/usr/bin/env python3 +""" +KI-gestützte Ortsextraktion aus Volltexten. +Zweistufiger Prozess: +1. Extraktion aller Ortsangaben mit Kontext +2. Intelligente Georeferenzierung mit Kontextverständnis +""" + +import argparse +import json +import os +import sqlite3 +import time +from pathlib import Path + +import httpx + +PROJECT_ROOT = Path(__file__).resolve().parent.parent +DB_PATH = PROJECT_ROOT / "data" / "tracker_remote.db" + +DASHSCOPE_URL = "https://dashscope-intl.aliyuncs.com/compatible-mode/v1/chat/completions" +DASHSCOPE_KEY = os.environ.get("QWEN_API_KEY") or os.popen("security find-generic-password -s qwen-api -w 2>/dev/null").read().strip() + +# Nominatim für Geocoding +NOMINATIM_URL = "https://nominatim.openstreetmap.org/search" +USER_AGENT = "Antragstracker-Hagen/1.0" +HAGEN_BBOX = "7.35,51.30,7.65,51.45" + +EXTRACTION_PROMPT = """Extrahiere ALLE geografischen Ortsangaben aus diesem kommunalpolitischen Dokument aus Hagen. + +DOKUMENT: +{volltext} + +--- + +Gib eine Liste aller Orte zurück, die im Text erwähnt werden. Für jeden Ort: +- rohtext: Die genaue Formulierung im Text +- kontext: Der Satz oder Absatz, in dem der Ort erwähnt wird +- typ: strasse|platz|stadtteil|gebaeude|sonstiges +- geocodierbar: true/false (kann man das auf einer Karte finden?) +- geocode_query: Falls geocodierbar, der beste Suchbegriff für Nominatim (z.B. bei "Polizeiwache an der Boeler Straße" → "Boeler Straße") + +JSON-Format: +{{ + "orte": [ + {{ + "rohtext": "Altenhagener Brücke", + "kontext": "Der Abschnitt ab der Altenhagener Brücke bis zum Aldi", + "typ": "strasse", + "geocodierbar": true, + "geocode_query": "Altenhagener Brücke, Hagen" + }}, + {{ + "rohtext": "Spielplatz", + "kontext": "Darüber hinaus befindet sich ein Spielplatz", + "typ": "gebaeude", + "geocodierbar": false, + "geocode_query": null + }} + ] +}} + +WICHTIG: +- Extrahiere ALLE Orte, auch generische +- Bei "X an der Y-Straße" ist Y-Straße der geocode_query +- Stadtteile wie "Altenhagen", "Haspe" sind geocodierbar +- Generische Begriffe wie "Schule", "Spielplatz" ohne Straßenangabe sind NICHT geocodierbar + +NUR JSON, keine Erklärungen.""" + + +GEOCODE_REFINEMENT_PROMPT = """Du bist ein Geocoding-Experte für die Stadt Hagen (NRW). + +Ich habe folgende Ortsangaben aus einem kommunalpolitischen Dokument extrahiert: +{orte_json} + +Der Volltext-Kontext war: +{kontext} + +Nominatim hat für "{query}" folgende Ergebnisse in Hagen gefunden: +{nominatim_results} + +Welches Ergebnis passt am besten zum Kontext? Antworte mit der Nummer (1, 2, 3...) oder "keins" wenn keins passt. +Nur die Nummer oder "keins", keine Erklärung.""" + + +def get_db(): + conn = sqlite3.connect(str(DB_PATH)) + conn.row_factory = sqlite3.Row + return conn + + +def call_qwen(prompt: str, model: str = "qwen-turbo-latest") -> dict | str | None: + """Ruft Qwen API auf.""" + if not DASHSCOPE_KEY: + return None + + try: + resp = httpx.post( + DASHSCOPE_URL, + headers={"Authorization": f"Bearer {DASHSCOPE_KEY}", "Content-Type": "application/json"}, + json={"model": model, "messages": [{"role": "user", "content": prompt}], "temperature": 0.1}, + timeout=60 + ) + resp.raise_for_status() + content = resp.json()["choices"][0]["message"]["content"] + + # JSON extrahieren wenn vorhanden + if "```json" in content: + content = content.split("```json")[1].split("```")[0] + elif "```" in content: + parts = content.split("```") + if len(parts) >= 2: + content = parts[1] + + try: + return json.loads(content.strip()) + except json.JSONDecodeError: + return content.strip() + + except Exception as e: + print(f" API-Fehler: {e}") + return None + + +def geocode_nominatim(client: httpx.Client, query: str) -> list[dict]: + """Sucht mit Nominatim in Hagen.""" + try: + resp = client.get( + NOMINATIM_URL, + params={"q": f"{query}, Hagen, Germany", "format": "json", "limit": 3, + "viewbox": HAGEN_BBOX, "bounded": 1}, + headers={"User-Agent": USER_AGENT}, + timeout=10 + ) + resp.raise_for_status() + return resp.json() + except Exception as e: + print(f" Nominatim-Fehler: {e}") + return [] + + +def process_vorlage(conn: sqlite3.Connection, client: httpx.Client, vorlage: dict) -> int: + """Extrahiert und geocodiert Orte aus einer Vorlage.""" + vid = vorlage['id'] + akz = vorlage['aktenzeichen'] or f"#{vid}" + volltext = vorlage['volltext_clean'] + + if not volltext or len(volltext) < 100: + return 0 + + # Volltext kürzen + volltext_short = volltext[:6000] if len(volltext) > 6000 else volltext + + # Schritt 1: KI-Extraktion + prompt = EXTRACTION_PROMPT.format(volltext=volltext_short) + result = call_qwen(prompt) + + if not result or not isinstance(result, dict) or 'orte' not in result: + print(f" {akz}: Keine Orte extrahiert") + return 0 + + orte = result['orte'] + print(f" {akz}: {len(orte)} Orte gefunden") + + # Schritt 2: Geocoding für geocodierbare Orte + success = 0 + for ort in orte: + rohtext = ort.get('rohtext', '') + kontext = ort.get('kontext', '') + typ = ort.get('typ', 'sonstiges') + geocodierbar = ort.get('geocodierbar', False) + geocode_query = ort.get('geocode_query') + + if not rohtext: + continue + + # Prüfen ob schon existiert + existing = conn.execute( + "SELECT id FROM orte WHERE name = ? OR rohtext = ?", + (rohtext, rohtext) + ).fetchone() + + if existing: + # Nur Verknüpfung erstellen + conn.execute(""" + INSERT OR IGNORE INTO vorlagen_orte (vorlage_id, ort_id, kontext) + VALUES (?, ?, ?) + """, (vid, existing['id'], kontext[:500])) + conn.execute("UPDATE orte SET vorlage_count = vorlage_count + 1 WHERE id = ?", (existing['id'],)) + conn.commit() + continue + + # Neuen Ort anlegen + lat, lon = None, None + status = 'skipped' + + if geocodierbar and geocode_query: + time.sleep(1.1) # Nominatim Rate Limit + results = geocode_nominatim(client, geocode_query) + + if results: + # Ersten Treffer nehmen (könnte mit KI verfeinert werden) + lat = float(results[0]['lat']) + lon = float(results[0]['lon']) + status = 'success' + print(f" ✓ {rohtext} → ({lat:.4f}, {lon:.4f})") + else: + status = 'failed' + print(f" ✗ {rohtext} (nicht gefunden)") + else: + print(f" ⊘ {rohtext} (nicht geocodierbar)") + + cursor = conn.execute(""" + INSERT INTO orte (name, typ, lat, lon, rohtext, kontext_satz, geocode_status, vorlage_count) + VALUES (?, ?, ?, ?, ?, ?, ?, 1) + """, (geocode_query or rohtext, typ, lat, lon, rohtext, kontext[:500], status)) + + ort_id = cursor.lastrowid + conn.execute(""" + INSERT OR IGNORE INTO vorlagen_orte (vorlage_id, ort_id, kontext) + VALUES (?, ?, ?) + """, (vid, ort_id, kontext[:500])) + conn.commit() + + if lat: + success += 1 + + return success + + +def main(): + parser = argparse.ArgumentParser(description="KI-gestützte Ortsextraktion") + parser.add_argument("--limit", type=int, default=10, help="Max. Anzahl Vorlagen") + parser.add_argument("--vorlage", type=int, help="Einzelne Vorlage-ID") + args = parser.parse_args() + + print(f"=== KI-Ortsextraktion ===\n") + + conn = get_db() + client = httpx.Client() + + if args.vorlage: + query = "SELECT id, aktenzeichen, volltext_clean FROM vorlagen WHERE id = ?" + params = [args.vorlage] + else: + # Vorlagen mit Volltext die noch nicht verarbeitet wurden + query = """ + SELECT v.id, v.aktenzeichen, v.volltext_clean + FROM vorlagen v + WHERE v.volltext_clean IS NOT NULL + AND v.id NOT IN (SELECT DISTINCT vorlage_id FROM vorlagen_orte) + ORDER BY v.datum_eingang DESC + LIMIT ? + """ + params = [args.limit] + + vorlagen = conn.execute(query, params).fetchall() + print(f"Verarbeite {len(vorlagen)} Vorlagen\n") + + total_success = 0 + for v in vorlagen: + total_success += process_vorlage(conn, client, dict(v)) + + client.close() + + # Stats + total_orte = conn.execute("SELECT COUNT(*) FROM orte").fetchone()[0] + geocoded = conn.execute("SELECT COUNT(*) FROM orte WHERE lat IS NOT NULL").fetchone()[0] + conn.close() + + print(f"\n=== Fertig ===") + print(f"Orte gesamt: {total_orte}") + print(f"Geocodiert: {geocoded}") + print(f"Diese Runde: {total_success} neue geocodiert") + + +if __name__ == "__main__": + main() diff --git a/scripts/extract_pdfs.py b/scripts/extract_pdfs.py new file mode 100644 index 0000000..d1d7744 --- /dev/null +++ b/scripts/extract_pdfs.py @@ -0,0 +1,155 @@ +#!/usr/bin/env python3 +""" +Lädt PDFs von URLs und extrahiert Text mit PyMuPDF. +Parallelisiert für Geschwindigkeit. +""" + +import argparse +import sqlite3 +import tempfile +import time +from concurrent.futures import ThreadPoolExecutor, as_completed +from pathlib import Path + +import httpx +import pymupdf # PyMuPDF + +PROJECT_ROOT = Path(__file__).resolve().parent.parent +DB_PATH = PROJECT_ROOT / "data" / "tracker_remote.db" + +# Rate limiting +REQUESTS_PER_SECOND = 5 +MIN_DELAY = 1.0 / REQUESTS_PER_SECOND + + +def get_db(): + conn = sqlite3.connect(str(DB_PATH), check_same_thread=False) + conn.row_factory = sqlite3.Row + return conn + + +def download_and_extract(vorlage_id: int, url: str) -> tuple[int, str | None, str | None]: + """ + Lädt PDF und extrahiert Text. + Returns: (vorlage_id, text, error) + """ + try: + # Download + resp = httpx.get(url, timeout=60, follow_redirects=True) + resp.raise_for_status() + + if len(resp.content) < 100: + return (vorlage_id, None, "PDF zu klein") + + # Text extrahieren + with tempfile.NamedTemporaryFile(suffix=".pdf", delete=True) as tmp: + tmp.write(resp.content) + tmp.flush() + + doc = pymupdf.open(tmp.name) + text_parts = [] + for page in doc: + text_parts.append(page.get_text()) + doc.close() + + text = "\n".join(text_parts).strip() + + if len(text) < 50: + return (vorlage_id, None, "Kein Text extrahiert") + + return (vorlage_id, text, None) + + except httpx.HTTPStatusError as e: + return (vorlage_id, None, f"HTTP {e.response.status_code}") + except Exception as e: + return (vorlage_id, None, str(e)[:100]) + + +def clean_text(text: str) -> str: + """Bereinigt extrahierten Text.""" + import re + # Mehrfache Leerzeilen reduzieren + text = re.sub(r'\n{3,}', '\n\n', text) + # Mehrfache Leerzeichen + text = re.sub(r' {2,}', ' ', text) + return text.strip() + + +def process_batch(vorlagen: list[dict], workers: int = 5) -> dict: + """Verarbeitet einen Batch parallel.""" + results = {"success": 0, "failed": 0, "errors": []} + conn = get_db() + + with ThreadPoolExecutor(max_workers=workers) as executor: + futures = {} + for v in vorlagen: + time.sleep(MIN_DELAY) # Rate limiting + future = executor.submit(download_and_extract, v['vorlage_id'], v['url']) + futures[future] = v + + for future in as_completed(futures): + v = futures[future] + vorlage_id, text, error = future.result() + + if text: + clean = clean_text(text) + conn.execute(""" + UPDATE vorlagen SET volltext = ?, volltext_clean = ? + WHERE id = ? + """, (text, clean, vorlage_id)) + conn.execute(""" + UPDATE anlagen SET downloaded = 1 WHERE vorlage_id = ? + """, (vorlage_id,)) + conn.commit() + results["success"] += 1 + print(f" ✓ #{vorlage_id}: {len(clean)} Zeichen") + else: + results["failed"] += 1 + results["errors"].append((vorlage_id, error)) + print(f" ✗ #{vorlage_id}: {error}") + + conn.close() + return results + + +def main(): + parser = argparse.ArgumentParser(description="PDF-Extraktion") + parser.add_argument("--limit", type=int, default=100, help="Max. Anzahl") + parser.add_argument("--workers", type=int, default=5, help="Parallele Downloads") + parser.add_argument("--offset", type=int, default=0, help="Start-Offset") + args = parser.parse_args() + + print(f"=== PDF-Extraktion ===") + print(f"Limit: {args.limit}, Workers: {args.workers}\n") + + conn = get_db() + + # Vorlagen mit PDF-URL aber ohne Volltext + vorlagen = conn.execute(""" + SELECT a.vorlage_id, a.url + FROM anlagen a + JOIN vorlagen v ON a.vorlage_id = v.id + WHERE a.url IS NOT NULL + AND a.downloaded = 0 + AND (v.volltext_clean IS NULL OR v.volltext_clean = '') + ORDER BY v.datum_eingang DESC + LIMIT ? OFFSET ? + """, (args.limit, args.offset)).fetchall() + + conn.close() + + print(f"Zu verarbeiten: {len(vorlagen)}\n") + + if not vorlagen: + print("Nichts zu tun!") + return + + results = process_batch([dict(v) for v in vorlagen], args.workers) + + print(f"\n=== Fertig ===") + print(f"Erfolgreich: {results['success']}") + print(f"Fehlgeschlagen: {results['failed']}") + + +if __name__ == "__main__": + main() diff --git a/scripts/extract_volltext.py b/scripts/extract_volltext.py new file mode 100644 index 0000000..9170af0 --- /dev/null +++ b/scripts/extract_volltext.py @@ -0,0 +1,132 @@ +#!/usr/bin/env python3 +""" +PDF-Volltext-Extraktion für Antragstracker Hagen. +Lädt PDFs von ALLRIS und extrahiert den Text. +""" + +import argparse +import sqlite3 +import time +from pathlib import Path + +import fitz # PyMuPDF +import httpx + +PROJECT_ROOT = Path(__file__).resolve().parent.parent +DB_PATH = PROJECT_ROOT / "data" / "tracker_remote.db" + + +def get_db(): + conn = sqlite3.connect(str(DB_PATH)) + conn.row_factory = sqlite3.Row + return conn + + +def extract_text_from_pdf(pdf_bytes: bytes) -> str: + """Extrahiert Text aus PDF-Bytes.""" + try: + doc = fitz.open(stream=pdf_bytes, filetype="pdf") + text_parts = [] + for page in doc: + text_parts.append(page.get_text()) + doc.close() + return "\n".join(text_parts).strip() + except Exception as e: + print(f" PDF-Fehler: {e}") + return "" + + +def clean_text(text: str) -> str: + """Bereinigt extrahierten Text.""" + # Mehrfache Leerzeilen reduzieren + import re + text = re.sub(r'\n{3,}', '\n\n', text) + # Führende/trailing Whitespace pro Zeile + lines = [line.strip() for line in text.split('\n')] + return '\n'.join(lines).strip() + + +def process_vorlage(conn: sqlite3.Connection, client: httpx.Client, vorlage: dict) -> bool: + """Lädt PDF und extrahiert Volltext für eine Vorlage.""" + vid = vorlage['id'] + akz = vorlage['aktenzeichen'] or f"#{vid}" + pdf_url = vorlage['pdf_url'] + + if not pdf_url: + print(f" {akz}: Keine PDF-URL") + return False + + try: + resp = client.get(pdf_url, timeout=30, follow_redirects=True) + resp.raise_for_status() + + if 'application/pdf' not in resp.headers.get('content-type', ''): + print(f" {akz}: Kein PDF ({resp.headers.get('content-type')})") + return False + + text = extract_text_from_pdf(resp.content) + if not text: + print(f" {akz}: Kein Text extrahiert") + return False + + text_clean = clean_text(text) + + conn.execute( + "UPDATE vorlagen SET volltext = ?, volltext_clean = ? WHERE id = ?", + (text, text_clean, vid) + ) + conn.commit() + + print(f" {akz}: {len(text_clean)} Zeichen") + return True + + except httpx.TimeoutException: + print(f" {akz}: Timeout") + return False + except Exception as e: + print(f" {akz}: Fehler {e}") + return False + + +def main(): + parser = argparse.ArgumentParser(description="PDF-Volltext-Extraktion") + parser.add_argument("--limit", type=int, default=10, help="Max. Anzahl (default: 10)") + parser.add_argument("--typ", type=str, default="antrag", help="Vorlagen-Typ (default: antrag)") + parser.add_argument("--all", action="store_true", help="Alle ohne Volltext") + args = parser.parse_args() + + print(f"=== PDF-Volltext-Extraktion ===\n") + + conn = get_db() + client = httpx.Client() + + # Vorlagen ohne Volltext finden + query = """ + SELECT id, aktenzeichen, pdf_url + FROM vorlagen + WHERE volltext IS NULL + AND pdf_url IS NOT NULL + """ + if args.typ: + query += f" AND typ = '{args.typ}'" + query += " ORDER BY datum_eingang DESC" + if not args.all: + query += f" LIMIT {args.limit}" + + vorlagen = conn.execute(query).fetchall() + print(f"Verarbeite {len(vorlagen)} Vorlagen (Typ: {args.typ or 'alle'})\n") + + success = 0 + for v in vorlagen: + if process_vorlage(conn, client, dict(v)): + success += 1 + time.sleep(0.5) # Rate limiting + + client.close() + conn.close() + + print(f"\n=== Fertig: {success}/{len(vorlagen)} erfolgreich ===") + + +if __name__ == "__main__": + main() diff --git a/scripts/geocode_orte.py b/scripts/geocode_orte.py new file mode 100644 index 0000000..c026dd6 --- /dev/null +++ b/scripts/geocode_orte.py @@ -0,0 +1,222 @@ +#!/usr/bin/env python3 +""" +Geocodierung von Orten aus KI-Zusammenfassungen. +Nutzt Nominatim (OpenStreetMap) für Hagen-spezifische Orte. +""" + +import argparse +import json +import sqlite3 +import time +from pathlib import Path + +import httpx + +PROJECT_ROOT = Path(__file__).resolve().parent.parent +DB_PATH = PROJECT_ROOT / "data" / "tracker_remote.db" + +# Nominatim API (OpenStreetMap) +NOMINATIM_URL = "https://nominatim.openstreetmap.org/search" +USER_AGENT = "Antragstracker-Hagen/1.0 (tobias.roedel@econgood.org)" + +# Hagen Bounding Box (ungefähr) +HAGEN_BBOX = "7.35,51.30,7.65,51.45" # minLon,minLat,maxLon,maxLat + +# Generische Begriffe die nicht geocodiert werden sollten +BLACKLIST = { + "polizeiwache", "polizei", "feuerwehr", "krankenhaus", "rathaus", + "aldi", "aldi-markt", "lidl", "rewe", "edeka", "penny", "netto", + "schule", "grundschule", "gymnasium", "kindergarten", "kita", + "spielplatz", "kirche", "friedhof", "sportplatz", "schwimmbad", + "bushaltestelle", "bahnhof", "parkplatz", "parkhaus", +} + + +def get_db(): + conn = sqlite3.connect(str(DB_PATH)) + conn.row_factory = sqlite3.Row + return conn + + +def geocode_ort(client: httpx.Client, name: str) -> tuple[float, float] | None: + """Geocodiert einen Ort in Hagen.""" + # Verschiedene Suchvarianten + queries = [ + f"{name}, Hagen, Germany", + f"{name}, Hagen", + f"{name} Hagen", + ] + + for q in queries: + try: + resp = client.get( + NOMINATIM_URL, + params={ + "q": q, + "format": "json", + "limit": 1, + "viewbox": HAGEN_BBOX, + "bounded": 1, + }, + headers={"User-Agent": USER_AGENT}, + timeout=10 + ) + resp.raise_for_status() + + results = resp.json() + if results: + lat = float(results[0]["lat"]) + lon = float(results[0]["lon"]) + return (lat, lon) + + except Exception as e: + print(f" Geocoding-Fehler für '{q}': {e}") + + time.sleep(1.1) # Nominatim Rate Limit: 1 req/s + + return None + + +def extract_orte_from_ki(conn: sqlite3.Connection) -> list[tuple[int, str, str]]: + """Extrahiert Orte aus ki_bewertungen.anmerkungen. + Returns: List of (vorlage_id, ort_name, kontext) + """ + rows = conn.execute(""" + SELECT vorlage_id, anmerkungen + FROM ki_bewertungen + WHERE typ = 'zusammenfassung' + AND anmerkungen IS NOT NULL + """).fetchall() + + orte = [] + for vorlage_id, anmerkungen in rows: + try: + data = json.loads(anmerkungen) + for ort in data.get("betroffene_orte", []): + if ort and len(ort) > 2: + orte.append((vorlage_id, ort, data.get("kernforderung", ""))) + except: + continue + + return orte + + +def process_ort(conn: sqlite3.Connection, client: httpx.Client, + vorlage_id: int, ort_name: str, kontext: str) -> bool: + """Verarbeitet einen einzelnen Ort.""" + # Normalisieren + ort_name_clean = ort_name.strip() + + # Blacklist prüfen + if ort_name_clean.lower() in BLACKLIST: + print(f" ⊘ {ort_name_clean} (generisch, übersprungen)") + return False + + # Zu kurze Namen ignorieren + if len(ort_name_clean) < 4: + return False + + # Prüfen ob Ort schon existiert + existing = conn.execute( + "SELECT id, lat, lon FROM orte WHERE name = ?", + (ort_name_clean,) + ).fetchone() + + if existing: + ort_id = existing["id"] + # Verknüpfung erstellen falls nicht vorhanden + conn.execute(""" + INSERT OR IGNORE INTO vorlagen_orte (vorlage_id, ort_id, kontext) + VALUES (?, ?, ?) + """, (vorlage_id, ort_id, kontext[:500] if kontext else None)) + conn.execute("UPDATE orte SET vorlage_count = vorlage_count + 1 WHERE id = ?", (ort_id,)) + conn.commit() + + if existing["lat"]: + print(f" ✓ {ort_name_clean} (cached)") + return True + else: + # Noch nicht geocodiert, versuchen + pass + else: + # Neuen Ort anlegen + cursor = conn.execute( + "INSERT INTO orte (name, vorlage_count) VALUES (?, 1)", + (ort_name_clean,) + ) + ort_id = cursor.lastrowid + conn.execute(""" + INSERT OR IGNORE INTO vorlagen_orte (vorlage_id, ort_id, kontext) + VALUES (?, ?, ?) + """, (vorlage_id, ort_id, kontext[:500] if kontext else None)) + conn.commit() + + # Geocodieren + coords = geocode_ort(client, ort_name_clean) + + if coords: + lat, lon = coords + conn.execute( + "UPDATE orte SET lat = ?, lon = ?, typ = 'geocoded' WHERE id = ?", + (lat, lon, ort_id) + ) + conn.commit() + print(f" ✓ {ort_name_clean} → ({lat:.5f}, {lon:.5f})") + return True + else: + print(f" ✗ {ort_name_clean} (nicht gefunden)") + return False + + +def main(): + parser = argparse.ArgumentParser(description="Geocodierung von Orten") + parser.add_argument("--limit", type=int, default=50, help="Max. Anzahl neuer Orte") + args = parser.parse_args() + + print(f"=== Geocodierung von Orten ===\n") + + conn = get_db() + client = httpx.Client() + + # Orte aus KI-Zusammenfassungen extrahieren + orte = extract_orte_from_ki(conn) + print(f"Gefunden: {len(orte)} Ort-Erwähnungen\n") + + # Deduplizieren + seen = set() + unique_orte = [] + for vorlage_id, ort, kontext in orte: + key = (vorlage_id, ort) + if key not in seen: + seen.add(key) + unique_orte.append((vorlage_id, ort, kontext)) + + print(f"Unique: {len(unique_orte)} Verknüpfungen\n") + + # Verarbeiten + success = 0 + processed = 0 + for vorlage_id, ort, kontext in unique_orte: + if processed >= args.limit: + break + if process_ort(conn, client, vorlage_id, ort, kontext): + success += 1 + processed += 1 + + client.close() + conn.close() + + # Stats + conn = get_db() + total_orte = conn.execute("SELECT COUNT(*) FROM orte").fetchone()[0] + geocoded = conn.execute("SELECT COUNT(*) FROM orte WHERE lat IS NOT NULL").fetchone()[0] + conn.close() + + print(f"\n=== Fertig ===") + print(f"Orte gesamt: {total_orte}") + print(f"Geocodiert: {geocoded}") + print(f"Diese Runde: {success}/{processed}") + + +if __name__ == "__main__": + main() diff --git a/scripts/import_oparl.py b/scripts/import_oparl.py new file mode 100644 index 0000000..6d1b83d --- /dev/null +++ b/scripts/import_oparl.py @@ -0,0 +1,503 @@ +#!/usr/bin/env python3 +""" +OParl-Importer für den Antragstracker Hagen. +Liest alle Papers von der ALLRIS OParl-API und speichert sie in die SQLite-DB. + +Nutzung: + python scripts/import_oparl.py # Voll-Import (alle Seiten) + python scripts/import_oparl.py --resume 337 # Ab Seite 337 weitermachen + python scripts/import_oparl.py --incremental # Nur neue Papers (stoppt bei bekannten) + python scripts/import_oparl.py --limit 100 # Nur 100 Papers (Test) + python scripts/import_oparl.py --resolve-gremien # Nur Gremien-Namen auflösen +""" + +import argparse +import functools +import re +import sqlite3 +import sys +import time +from pathlib import Path + +import httpx + +# Unbuffered print für Live-Fortschritt +print = functools.partial(print, flush=True) + +OPARL_BASE = "https://allris.hagen.de/public/oparl" +PAPERS_URL = f"{OPARL_BASE}/papers" +ORGS_URL = f"{OPARL_BASE}/organizations" +PROJECT_ROOT = Path(__file__).resolve().parent.parent +SCHEMA_PATH = PROJECT_ROOT / "backend" / "src" / "tracker" / "db" / "schema.sql" +DB_PATH = PROJECT_ROOT / "data" / "tracker_remote.db" + +# OParl paperType → interner Typ +PAPER_TYPE_MAP = { + "Anfrage": "anfrage", + "Antrag": "antrag", + "Beschlussvorlage": "beschlussvorlage", + "Beschlussvorlage BBM": "beschlussvorlage", + "Beschlussvorlage WBH": "beschlussvorlage", + "Mitteilungsvorlage": "mitteilungsvorlage", + "Mitteilung": "mitteilungsvorlage", + "Mitteilung WBH": "mitteilungsvorlage", + "Stellungnahme": "stellungnahme", + "Berichtsvorlage": "bericht", + "Vorschlag zur Tagesordnung": "antrag", + "Dringlichkeitsantrag": "antrag", + "Dringlichkeitsanfrage": "anfrage", + "Änderungsantrag": "aenderungsantrag", + "Ergänzungsantrag": "ergaenzungsantrag", + "Bericht": "bericht", + "Resolution": "resolution", +} + +# Regex für Aktenzeichen-Parsing +AKZ_RE = re.compile(r"^(\d+/\d+)(?:-(\d+))?$") + +# Gremien-Typ aus OParl classification ableiten +GREMIUM_TYP_MAP = { + "Rat": "rat", + "Bezirksvertretung": "bv", + "Ausschuss": "ausschuss", + "Beirat": "beirat", +} + + +def init_db() -> sqlite3.Connection: + """Erstellt die DB und führt schema.sql aus.""" + DB_PATH.parent.mkdir(parents=True, exist_ok=True) + conn = sqlite3.connect(str(DB_PATH)) + conn.execute("PRAGMA journal_mode = WAL") + conn.execute("PRAGMA foreign_keys = ON") + conn.row_factory = sqlite3.Row + schema = SCHEMA_PATH.read_text(encoding="utf-8") + conn.executescript(schema) + + # Schema-Migration: oparl_id in beratungen (für Dedup) + cols = {r[1] for r in conn.execute("PRAGMA table_info(beratungen)").fetchall()} + if "oparl_id" not in cols: + conn.execute("ALTER TABLE beratungen ADD COLUMN oparl_id TEXT") + conn.execute("CREATE UNIQUE INDEX IF NOT EXISTS idx_beratungen_oparl ON beratungen(oparl_id)") + print(" Migration: oparl_id zu beratungen hinzugefügt") + + conn.commit() + return conn + + +def fetch_page(client: httpx.Client, url: str, params: dict, + max_retries: int = 3) -> dict | None: + """Holt eine API-Seite mit Retry-Logik.""" + for attempt in range(max_retries): + try: + resp = client.get(url, params=params, timeout=30) + resp.raise_for_status() + return resp.json() + except httpx.TimeoutException: + print(f" Timeout, Versuch {attempt + 1}/{max_retries}") + if attempt < max_retries - 1: + time.sleep(5 * (attempt + 1)) + except Exception as e: + print(f" Fehler: {e}") + if attempt < max_retries - 1: + time.sleep(3) + return None + + +def parse_aktenzeichen(reference: str | None) -> tuple[str | None, str | None, str | None]: + """Zerlegt ein Aktenzeichen in (aktenzeichen, basis, suffix).""" + if not reference: + return None, None, None + m = AKZ_RE.match(reference.strip()) + if m: + basis = m.group(1) + suffix = m.group(2) + return reference.strip(), basis, f"-{suffix}" if suffix else None + return reference.strip(), reference.strip(), None + + +def map_paper_type(oparl_type: str | None) -> str: + if not oparl_type: + return "sonstig" + return PAPER_TYPE_MAP.get(oparl_type, "sonstig") + + +def is_verwaltungsvorlage(paper_type: str | None) -> bool: + if not paper_type: + return False + return paper_type.startswith("Beschlussvorlage") or paper_type.startswith("Mitteilung") + + +def upsert_paper(conn: sqlite3.Connection, paper: dict) -> tuple[int | None, bool]: + """Fügt ein Paper ein oder aktualisiert es. + + Returns (vorlage_id, is_new) — is_new=False means it already existed. + """ + oparl_id = paper.get("id") + if not oparl_id: + return None, False + + # Check if already exists + existing = conn.execute( + "SELECT id FROM vorlagen WHERE oparl_id = ?", (oparl_id,) + ).fetchone() + + reference = paper.get("reference") + aktenzeichen, basis, suffix = parse_aktenzeichen(reference) + oparl_type = paper.get("paperType") + typ = map_paper_type(oparl_type) + betreff = paper.get("name", "") + datum = paper.get("date") + web_url = paper.get("web") + + main_file = paper.get("mainFile") + pdf_url = None + if isinstance(main_file, dict): + pdf_url = main_file.get("accessUrl") or main_file.get("downloadUrl") + + try: + if existing: + vorlage_id = existing["id"] + conn.execute( + """UPDATE vorlagen SET + aktenzeichen = ?, aktenzeichen_basis = ?, aktenzeichen_suffix = ?, + typ = ?, betreff = ?, datum_eingang = ?, + pdf_url = ?, web_url = ?, ist_verwaltungsvorlage = ?, + scraped_at = CURRENT_TIMESTAMP + WHERE id = ?""", + (aktenzeichen, basis, suffix, typ, betreff, datum, + pdf_url, web_url, is_verwaltungsvorlage(oparl_type), vorlage_id), + ) + return vorlage_id, False + else: + cur = conn.execute( + """INSERT INTO vorlagen + (oparl_id, aktenzeichen, aktenzeichen_basis, aktenzeichen_suffix, + typ, betreff, datum_eingang, pdf_url, web_url, ist_verwaltungsvorlage) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", + (oparl_id, aktenzeichen, basis, suffix, + typ, betreff, datum, pdf_url, web_url, + is_verwaltungsvorlage(oparl_type)), + ) + return cur.lastrowid, True + except sqlite3.Error as e: + print(f" DB-Fehler bei {reference}: {e}") + return None, False + + +def upsert_consultations(conn: sqlite3.Connection, vorlage_id: int, paper: dict): + """Speichert die Beratungsfolge mit Dedup über oparl_id.""" + consultations = paper.get("consultation") or [] + for cons in consultations: + if not isinstance(cons, dict): + continue + + cons_oparl_id = cons.get("id") + + # Gremium auflösen + orgs = cons.get("organization") or [] + gremium_id = None + for org_url in orgs: + if not isinstance(org_url, str): + continue + # Nur typ=gr sind echte Gremien + if "typ=gr" not in org_url: + continue + conn.execute( + "INSERT OR IGNORE INTO gremien (oparl_id, name) VALUES (?, ?)", + (org_url, _org_placeholder_name(org_url)), + ) + row = conn.execute( + "SELECT id FROM gremien WHERE oparl_id = ?", (org_url,) + ).fetchone() + if row: + gremium_id = row["id"] + + rolle = cons.get("role") + authoritative = cons.get("authoritative", False) + + if cons_oparl_id: + conn.execute( + """INSERT INTO beratungen (oparl_id, vorlage_id, gremium_id, rolle) + VALUES (?, ?, ?, ?) + ON CONFLICT(oparl_id) DO UPDATE SET + vorlage_id = excluded.vorlage_id, + gremium_id = excluded.gremium_id, + rolle = excluded.rolle""", + (cons_oparl_id, vorlage_id, gremium_id, rolle), + ) + else: + # Fallback ohne oparl_id: prüfe auf Duplikat + exists = conn.execute( + """SELECT 1 FROM beratungen + WHERE vorlage_id = ? AND gremium_id IS ? AND rolle IS ? + LIMIT 1""", + (vorlage_id, gremium_id, rolle), + ).fetchone() + if not exists: + conn.execute( + "INSERT INTO beratungen (vorlage_id, gremium_id, rolle) VALUES (?, ?, ?)", + (vorlage_id, gremium_id, rolle), + ) + + +def insert_files(conn: sqlite3.Connection, vorlage_id: int, paper: dict): + """Speichert Anlagen eines Papers (nur neue).""" + aux_files = paper.get("auxiliaryFile") or [] + for f in aux_files: + if not isinstance(f, dict): + continue + url = f.get("accessUrl") or f.get("downloadUrl") + name = f.get("name") or f.get("fileName", "") + if url: + exists = conn.execute( + "SELECT 1 FROM anlagen WHERE vorlage_id = ? AND url = ?", + (vorlage_id, url), + ).fetchone() + if not exists: + conn.execute( + "INSERT INTO anlagen (vorlage_id, dateiname, url) VALUES (?, ?, ?)", + (vorlage_id, name, url), + ) + + +def _org_placeholder_name(org_url: str) -> str: + """Extrahiere Platzhalter-Name aus URL.""" + org_id = org_url.split("id=")[-1] if "id=" in org_url else org_url + return f"Gremium {org_id}" + + +def build_suffix_references(conn: sqlite3.Connection): + """Erstellt automatische Suffix-Referenzen.""" + print("\nErstelle Suffix-Referenzen...") + # Parent → Child + conn.execute( + """INSERT OR IGNORE INTO referenzen (quelle_id, ziel_id, typ, konfidenz) + SELECT parent.id, child.id, 'suffix', 1.0 + FROM vorlagen child + JOIN vorlagen parent ON child.aktenzeichen_basis = parent.aktenzeichen_basis + WHERE child.aktenzeichen_suffix IS NOT NULL + AND parent.aktenzeichen_suffix IS NULL + AND child.id != parent.id""" + ) + # Sequential: -1 → -2, -2 → -3, etc. + conn.execute( + """INSERT OR IGNORE INTO referenzen (quelle_id, ziel_id, typ, konfidenz) + SELECT earlier.id, later.id, 'suffix', 1.0 + FROM vorlagen later + JOIN vorlagen earlier + ON later.aktenzeichen_basis = earlier.aktenzeichen_basis + AND later.aktenzeichen_suffix IS NOT NULL + AND earlier.aktenzeichen_suffix IS NOT NULL + AND CAST(REPLACE(later.aktenzeichen_suffix, '-', '') AS INTEGER) + = CAST(REPLACE(earlier.aktenzeichen_suffix, '-', '') AS INTEGER) + 1 + WHERE later.id != earlier.id""" + ) + count = conn.execute("SELECT changes()").fetchone()[0] + conn.commit() + print(f" Suffix-Referenzen aktualisiert (letzte Runde: {count} neue)") + + +def resolve_gremien(conn: sqlite3.Connection, client: httpx.Client): + """Löst Gremien-Namen über die OParl Organizations-API auf. + + Aktualisiert name, kuerzel und typ für alle Gremien mit Platzhalter-Namen. + """ + print("\nLöse Gremien-Namen auf...") + + # Alle Organisationen von der API holen + org_lookup: dict[str, dict] = {} + page = 1 + while True: + data = fetch_page(client, ORGS_URL, {"body": 1, "page": page}) + if not data or "data" not in data: + break + for org in data["data"]: + oparl_id = org.get("id") + if oparl_id: + classification = org.get("classification", "") + typ = GREMIUM_TYP_MAP.get(classification, "sonstig") + org_lookup[oparl_id] = { + "name": org.get("name", ""), + "kuerzel": org.get("shortName", ""), + "typ": typ, + "classification": classification, + } + total_pages = data.get("pagination", {}).get("totalPages", page) + if page >= total_pages: + break + page += 1 + time.sleep(0.2) + + print(f" {len(org_lookup)} Organisationen von API geladen") + + # Gremien in DB aktualisieren + updated = 0 + for row in conn.execute("SELECT id, oparl_id, name FROM gremien").fetchall(): + oparl_id = row["oparl_id"] + if oparl_id in org_lookup: + org = org_lookup[oparl_id] + if org["name"] and org["name"] != row["name"]: + conn.execute( + "UPDATE gremien SET name = ?, kuerzel = ?, typ = ? WHERE id = ?", + (org["name"], org["kuerzel"] or None, org["typ"], row["id"]), + ) + updated += 1 + + conn.commit() + print(f" {updated} Gremien aktualisiert") + + +def import_papers(conn: sqlite3.Connection, client: httpx.Client, + start_page: int = 1, limit: int = 0, incremental: bool = False): + """Importiert Papers von der OParl-API. + + Args: + start_page: Erste Seite (für --resume) + limit: Max. Anzahl Papers (0 = alle) + incremental: Stoppt wenn nur bereits bekannte Papers gefunden werden + """ + # Pagination ermitteln + print("Ermittle Seitenanzahl...") + first = fetch_page(client, PAPERS_URL, {"body": 1, "page": 1}) + if not first or "pagination" not in first: + print("FEHLER: Konnte API nicht erreichen") + return + + total_pages = first["pagination"]["totalPages"] + total_elements = first["pagination"]["totalElements"] + print(f" {total_elements} Papers auf {total_pages} Seiten") + + existing_count = conn.execute("SELECT COUNT(*) FROM vorlagen").fetchone()[0] + print(f" {existing_count} bereits in DB\n") + + total_new = 0 + total_updated = 0 + consecutive_known_pages = 0 # Für --incremental Abbruch + + for page_num in range(start_page, total_pages + 1): + if page_num == 1 and start_page == 1: + data = first + else: + data = fetch_page(client, PAPERS_URL, {"body": 1, "page": page_num}) + + if not data or "data" not in data: + print(f" Seite {page_num} übersprungen (kein Data)") + continue + + papers = data["data"] + page_new = 0 + for paper in papers: + vorlage_id, is_new = upsert_paper(conn, paper) + if vorlage_id: + upsert_consultations(conn, vorlage_id, paper) + insert_files(conn, vorlage_id, paper) + if is_new: + page_new += 1 + total_new += 1 + else: + total_updated += 1 + + conn.commit() + + progress = (page_num / total_pages) * 100 + marker = f" (+{page_new} neu)" if page_new > 0 else " (bekannt)" + print(f" Seite {page_num:4d}/{total_pages} ({progress:5.1f}%)" + f" — {len(papers)} Papers{marker}" + f" — neu: {total_new}, aktualisiert: {total_updated}") + + # Incremental: Abbruch wenn 3 Seiten hintereinander nur bekannte Papers + if incremental: + if page_new == 0: + consecutive_known_pages += 1 + if consecutive_known_pages >= 3: + print(f"\n Inkrementell: 3 Seiten ohne neue Papers, stoppe.") + break + else: + consecutive_known_pages = 0 + + if limit and (total_new + total_updated) >= limit: + print(f"\n Limit von {limit} erreicht, stoppe.") + break + + # Schonende Pause + time.sleep(0.3) + if page_num % 100 == 0: + print(f" Checkpoint Seite {page_num} — Pause 5s...") + time.sleep(5) + + return total_new, total_updated + + +def print_stats(conn: sqlite3.Connection): + """Gibt aktuelle DB-Statistiken aus.""" + print(f"\n=== Datenbank-Statistiken ===") + print(f" Vorlagen: {conn.execute('SELECT COUNT(*) FROM vorlagen').fetchone()[0]}") + print(f" Beratungen: {conn.execute('SELECT COUNT(*) FROM beratungen').fetchone()[0]}") + print(f" Gremien: {conn.execute('SELECT COUNT(*) FROM gremien').fetchone()[0]}") + print(f" Referenzen: {conn.execute('SELECT COUNT(*) FROM referenzen').fetchone()[0]}") + print(f" Anlagen: {conn.execute('SELECT COUNT(*) FROM anlagen').fetchone()[0]}") + print(f"\n Vorlagen nach Typ:") + for r in conn.execute("SELECT typ, COUNT(*) c FROM vorlagen GROUP BY typ ORDER BY c DESC"): + print(f" {r['typ']:25s} {r['c']:>6d}") + print(f"\n Zeitraum: {conn.execute('SELECT MIN(datum_eingang) FROM vorlagen').fetchone()[0]}" + f" bis {conn.execute('SELECT MAX(datum_eingang) FROM vorlagen').fetchone()[0]}") + + +def main(): + parser = argparse.ArgumentParser(description="OParl-Import für Antragstracker Hagen") + parser.add_argument("--resume", type=int, default=0, + help="Ab dieser Seitennummer weitermachen") + parser.add_argument("--incremental", action="store_true", + help="Nur neue Papers (stoppt bei bekannten)") + parser.add_argument("--limit", type=int, default=0, + help="Max. Anzahl Papers (0 = alle)") + parser.add_argument("--resolve-gremien", action="store_true", + help="Nur Gremien-Namen auflösen, kein Paper-Import") + parser.add_argument("--no-references", action="store_true", + help="Suffix-Referenzen nicht neu bauen") + args = parser.parse_args() + + print("=== Antragstracker Hagen — OParl-Import ===\n") + + conn = init_db() + print(f" DB: {DB_PATH}\n") + + client = httpx.Client( + headers={"Accept": "application/json"}, + follow_redirects=True, + ) + + try: + if args.resolve_gremien: + resolve_gremien(conn, client) + print_stats(conn) + return + + # Paper-Import + start_page = args.resume if args.resume > 0 else 1 + if args.resume: + print(f"Setze Import ab Seite {start_page} fort...\n") + elif args.incremental: + print("Inkrementeller Import (nur neue Papers)...\n") + else: + print("Voll-Import...\n") + + import_papers(conn, client, start_page, args.limit, args.incremental) + + # Gremien-Namen auflösen + resolve_gremien(conn, client) + + # Suffix-Referenzen + if not args.no_references: + build_suffix_references(conn) + + print_stats(conn) + + finally: + client.close() + conn.close() + + +if __name__ == "__main__": + main() diff --git a/scripts/import_pdf_urls.py b/scripts/import_pdf_urls.py new file mode 100644 index 0000000..0036fd0 --- /dev/null +++ b/scripts/import_pdf_urls.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python3 +""" +Importiert PDF-URLs aus OParl in die anlagen-Tabelle. +""" + +import sqlite3 +from pathlib import Path +import httpx + +PROJECT_ROOT = Path(__file__).resolve().parent.parent +DB_PATH = PROJECT_ROOT / "data" / "tracker_remote.db" +OPARL_BASE = "https://allris.hagen.de/public/oparl/papers?body=1" + + +def get_db(): + conn = sqlite3.connect(str(DB_PATH)) + conn.row_factory = sqlite3.Row + return conn + + +def ensure_anlagen_table(conn): + """Erstellt anlagen-Tabelle falls nicht vorhanden.""" + conn.execute(""" + CREATE TABLE IF NOT EXISTS anlagen ( + id INTEGER PRIMARY KEY, + vorlage_id INTEGER NOT NULL, + name TEXT, + url TEXT, + mime_type TEXT, + size INTEGER, + downloaded INTEGER DEFAULT 0, + FOREIGN KEY (vorlage_id) REFERENCES vorlagen(id) + ) + """) + conn.execute("CREATE INDEX IF NOT EXISTS idx_anlagen_vorlage ON anlagen(vorlage_id)") + conn.commit() + + +def import_pdf_urls(): + conn = get_db() + ensure_anlagen_table(conn) + + # Mapping oparl_id -> vorlage.id + vorlage_map = {} + for row in conn.execute("SELECT id, oparl_id FROM vorlagen WHERE oparl_id IS NOT NULL"): + vorlage_map[row['oparl_id']] = row['id'] + + print(f"Vorlagen mit OParl-ID: {len(vorlage_map)}") + + page = 1 + imported = 0 + skipped = 0 + + while True: + print(f"Seite {page}...", end=" ", flush=True) + + try: + resp = httpx.get(f"{OPARL_BASE}&page={page}", timeout=30) + resp.raise_for_status() + data = resp.json() + except Exception as e: + print(f"Fehler: {e}") + break + + if not data.get('data'): + print("keine Daten") + break + + page_imported = 0 + for paper in data['data']: + oparl_id = paper.get('id') + main_file = paper.get('mainFile') + + if not main_file or not oparl_id: + continue + + vorlage_id = vorlage_map.get(oparl_id) + if not vorlage_id: + skipped += 1 + continue + + # Prüfen ob schon existiert + existing = conn.execute( + "SELECT id FROM anlagen WHERE vorlage_id = ?", (vorlage_id,) + ).fetchone() + + if existing: + continue + + url = main_file.get('accessUrl') or main_file.get('downloadUrl') + if not url: + continue + + conn.execute(""" + INSERT INTO anlagen (vorlage_id, name, url, mime_type, size) + VALUES (?, ?, ?, ?, ?) + """, ( + vorlage_id, + main_file.get('name') or main_file.get('fileName'), + url, + main_file.get('mimeType'), + main_file.get('size') + )) + imported += 1 + page_imported += 1 + + conn.commit() + print(f"{page_imported} importiert") + + if not data.get('links', {}).get('next'): + break + page += 1 + + conn.close() + print(f"\n=== Fertig ===") + print(f"Importiert: {imported}") + print(f"Übersprungen (keine Vorlage): {skipped}") + + +if __name__ == "__main__": + import_pdf_urls() diff --git a/scripts/ki_bulk_remote.py b/scripts/ki_bulk_remote.py new file mode 100755 index 0000000..8639e2d --- /dev/null +++ b/scripts/ki_bulk_remote.py @@ -0,0 +1,262 @@ +#!/usr/bin/env python3 +""" +KI-Zusammenfassung für Anträge via Qwen (DashScope). +""" + +import argparse +import json +import os +import sqlite3 +import time +from pathlib import Path + +import httpx + +# Nominatim für Geocoding +NOMINATIM_URL = "https://nominatim.openstreetmap.org/search" +USER_AGENT = "Antragstracker-Hagen/1.0" +HAGEN_BBOX = "7.35,51.30,7.65,51.45" + +PROJECT_ROOT = Path(__file__).resolve().parent.parent +DB_PATH = PROJECT_ROOT / "data" / "tracker_remote.db" + +# DashScope API +DASHSCOPE_URL = "https://dashscope-intl.aliyuncs.com/compatible-mode/v1/chat/completions" +DASHSCOPE_KEY = os.environ.get("QWEN_API_KEY") or os.popen("security find-generic-password -s qwen-api -w 2>/dev/null").read().strip() + +PROMPT_TEMPLATE = """Analysiere diesen kommunalpolitischen Antrag aus Hagen. + +DOKUMENT: +{volltext} + +--- + +Erstelle eine strukturierte Zusammenfassung im JSON-Format: + +{{ + "zusammenfassung": "2-3 Sätze, was gefordert wird", + "kernforderung": "Die zentrale Forderung in einem Satz", + "begruendung": "Warum wird das gefordert? (kurz)", + "thema": "Hauptthema (z.B. Verkehr, Soziales, Umwelt)", + "partei": "Antragstellende Fraktion falls erkennbar", + "orte": [ + {{ + "rohtext": "Die genaue Formulierung im Text (z.B. 'Polizeiwache an der Boeler Straße')", + "kontext": "Der Satz in dem der Ort erwähnt wird", + "typ": "strasse|platz|stadtteil|gebaeude|sonstiges", + "geocodierbar": true/false, + "geocode_query": "Suchbegriff für Karte (z.B. 'Boeler Straße' statt 'Polizeiwache an der Boeler Straße')" + }} + ] +}} + +WICHTIG für orte: +- Extrahiere ALLE geografischen Erwähnungen +- Bei "X an der Y-Straße" ist geocode_query = "Y-Straße" +- Straßennamen, Plätze, Stadtteile sind geocodierbar=true +- "Spielplatz", "Schule", "Bushaltestelle" ohne Straße sind geocodierbar=false +- geocode_query nur bei geocodierbar=true setzen + +NUR JSON ausgeben, keine Erklärungen.""" + + +def get_db(): + conn = sqlite3.connect(str(DB_PATH)) + conn.row_factory = sqlite3.Row + return conn + + +def call_qwen(prompt: str) -> dict | None: + """Ruft Qwen API auf und gibt JSON zurück.""" + if not DASHSCOPE_KEY: + print(" FEHLER: Kein QWEN_API_KEY gefunden") + return None + + try: + resp = httpx.post( + DASHSCOPE_URL, + headers={ + "Authorization": f"Bearer {DASHSCOPE_KEY}", + "Content-Type": "application/json" + }, + json={ + "model": "qwen-plus-latest", + "messages": [{"role": "user", "content": prompt}], + "temperature": 0.3 + }, + timeout=180 + ) + resp.raise_for_status() + + content = resp.json()["choices"][0]["message"]["content"] + + # JSON extrahieren (falls in Markdown-Block) + if "```json" in content: + content = content.split("```json")[1].split("```")[0] + elif "```" in content: + content = content.split("```")[1].split("```")[0] + + return json.loads(content.strip()) + + except json.JSONDecodeError as e: + print(f" JSON-Parse-Fehler: {e}") + print(f" Content: {content[:200]}...") + return None + except Exception as e: + print(f" API-Fehler: {e}") + return None + + +def process_vorlage(conn: sqlite3.Connection, vorlage: dict) -> bool: + """Erstellt KI-Zusammenfassung für eine Vorlage.""" + vid = vorlage['id'] + akz = vorlage['aktenzeichen'] or f"#{vid}" + volltext = vorlage['volltext_clean'] + + if not volltext or len(volltext) < 100: + print(f" {akz}: Volltext zu kurz") + return False + + # Volltext kürzen falls zu lang + if len(volltext) > 8000: + volltext = volltext[:8000] + "\n[...gekürzt...]" + + prompt = PROMPT_TEMPLATE.format(volltext=volltext) + result = call_qwen(prompt) + + if not result: + return False + + # In ki_bewertungen speichern + conn.execute(""" + INSERT INTO ki_bewertungen (vorlage_id, typ, begruendung, anmerkungen, modell, prompt_version) + VALUES (?, 'zusammenfassung', ?, ?, 'qwen-plus-latest', 'v1') + """, (vid, result.get('zusammenfassung'), json.dumps(result, ensure_ascii=False))) + + # thema_kurz in vorlagen aktualisieren + if result.get('kernforderung'): + conn.execute("UPDATE vorlagen SET thema_kurz = ? WHERE id = ?", + (result['kernforderung'][:200], vid)) + + conn.commit() + + # Orte geocodieren und speichern + orte = result.get('orte', []) + geocoded_count = 0 + for ort in orte: + geocoded_count += process_ort(conn, vid, ort) + + print(f" {akz}: ✓ {result.get('thema', '?')} ({len(orte)} Orte, {geocoded_count} geocodiert)") + return True + + +def geocode_nominatim(query: str) -> tuple[float, float] | None: + """Geocodiert einen Ort in Hagen via Nominatim.""" + try: + resp = httpx.get( + NOMINATIM_URL, + params={"q": f"{query}, Hagen, Germany", "format": "json", "limit": 1, + "viewbox": HAGEN_BBOX, "bounded": 1}, + headers={"User-Agent": USER_AGENT}, + timeout=10 + ) + resp.raise_for_status() + results = resp.json() + if results: + return (float(results[0]['lat']), float(results[0]['lon'])) + except Exception as e: + pass + return None + + +def process_ort(conn: sqlite3.Connection, vorlage_id: int, ort: dict) -> int: + """Speichert einen Ort und geocodiert ihn wenn möglich. Returns 1 wenn geocodiert.""" + rohtext = ort.get('rohtext', '') + kontext = ort.get('kontext', '') + typ = ort.get('typ', 'sonstiges') + geocodierbar = ort.get('geocodierbar', False) + geocode_query = ort.get('geocode_query') + + if not rohtext: + return 0 + + # Prüfen ob schon existiert + existing = conn.execute( + "SELECT id, lat FROM orte WHERE name = ? OR rohtext = ?", + (geocode_query or rohtext, rohtext) + ).fetchone() + + if existing: + # Nur Verknüpfung erstellen + conn.execute(""" + INSERT OR IGNORE INTO vorlagen_orte (vorlage_id, ort_id, kontext) + VALUES (?, ?, ?) + """, (vorlage_id, existing['id'], kontext[:500] if kontext else None)) + conn.execute("UPDATE orte SET vorlage_count = vorlage_count + 1 WHERE id = ?", (existing['id'],)) + conn.commit() + return 1 if existing['lat'] else 0 + + # Neuen Ort anlegen + lat, lon = None, None + status = 'skipped' + + if geocodierbar and geocode_query: + time.sleep(1.1) # Nominatim Rate Limit + coords = geocode_nominatim(geocode_query) + if coords: + lat, lon = coords + status = 'success' + else: + status = 'failed' + + cursor = conn.execute(""" + INSERT INTO orte (name, typ, lat, lon, rohtext, kontext_satz, geocode_status, vorlage_count) + VALUES (?, ?, ?, ?, ?, ?, ?, 1) + """, (geocode_query or rohtext, typ, lat, lon, rohtext, kontext[:500] if kontext else None, status)) + + ort_id = cursor.lastrowid + conn.execute(""" + INSERT OR IGNORE INTO vorlagen_orte (vorlage_id, ort_id, kontext) + VALUES (?, ?, ?) + """, (vorlage_id, ort_id, kontext[:500] if kontext else None)) + conn.commit() + + return 1 if lat else 0 + + +def main(): + parser = argparse.ArgumentParser(description="KI-Zusammenfassung für Anträge") + parser.add_argument("--limit", type=int, default=5, help="Max. Anzahl (default: 5)") + parser.add_argument("--typ", type=str, default="", help="Vorlagen-Typ") + args = parser.parse_args() + + print(f"=== KI-Zusammenfassung ===\n") + + conn = get_db() + + # Vorlagen mit Volltext aber ohne KI-Zusammenfassung + query = """ + SELECT v.id, v.aktenzeichen, v.volltext_clean + FROM vorlagen v + LEFT JOIN ki_bewertungen kb ON v.id = kb.vorlage_id AND kb.typ = 'zusammenfassung' + WHERE v.volltext_clean IS NOT NULL + AND kb.id IS NULL + """ + if args.typ: + query += f" AND v.typ = '{args.typ}'" + query += f" ORDER BY v.datum_eingang DESC LIMIT {args.limit}" + + vorlagen = conn.execute(query).fetchall() + print(f"Verarbeite {len(vorlagen)} Vorlagen\n") + + success = 0 + for v in vorlagen: + if process_vorlage(conn, dict(v)): + success += 1 + + conn.close() + print(f"\n=== Fertig: {success}/{len(vorlagen)} erfolgreich ===") + + +if __name__ == "__main__": + main() diff --git a/scripts/ki_parallel.py b/scripts/ki_parallel.py new file mode 100644 index 0000000..9de3850 --- /dev/null +++ b/scripts/ki_parallel.py @@ -0,0 +1,293 @@ +#!/usr/bin/env python3 +""" +Parallelisierte KI-Zusammenfassungen via Qwen/DashScope. +Nutzt ThreadPoolExecutor für parallele API-Calls. +""" + +import argparse +import json +import os +import re +import sqlite3 +import sys +import time +from concurrent.futures import ThreadPoolExecutor, as_completed +from pathlib import Path +from threading import Lock + +import httpx + +PROJECT_ROOT = Path(__file__).resolve().parent.parent +DB_PATH = PROJECT_ROOT / "data" / "tracker_remote.db" +LOG_FILE = PROJECT_ROOT / "data" / "ki_parallel.log" +STATE_FILE = PROJECT_ROOT / "data" / "ki_parallel_state.json" + +# DashScope API +DASHSCOPE_URL = "https://dashscope-intl.aliyuncs.com/compatible-mode/v1/chat/completions" +DASHSCOPE_KEY = os.environ.get("QWEN_API_KEY") or os.popen("security find-generic-password -s qwen-api -w 2>/dev/null").read().strip() + +# Nominatim für Geocoding +NOMINATIM_URL = "https://nominatim.openstreetmap.org/search" +USER_AGENT = "Antragstracker-Hagen/1.0" +HAGEN_BBOX = "7.35,51.30,7.65,51.45" + +PROMPT_TEMPLATE = """Analysiere diesen kommunalpolitischen Antrag aus Hagen. + +DOKUMENT: +{volltext} + +--- + +Erstelle eine strukturierte Zusammenfassung im JSON-Format: + +{{ + "zusammenfassung": "2-3 Sätze, was gefordert wird", + "kernforderung": "Die zentrale Forderung in einem Satz", + "begruendung": "Warum wird das gefordert? (kurz)", + "thema": "Hauptthema (z.B. Verkehr, Soziales, Umwelt)", + "partei": "Antragstellende Fraktion falls erkennbar", + "orte": [ + {{ + "rohtext": "Die genaue Formulierung im Text", + "kontext": "Der Satz in dem der Ort erwähnt wird", + "typ": "strasse|platz|stadtteil|gebaeude|sonstiges", + "geocodierbar": true/false, + "geocode_query": "Suchbegriff für Karte" + }} + ] +}} + +NUR JSON ausgeben, keine Erklärungen.""" + +db_lock = Lock() +log_lock = Lock() +stats = {"success": 0, "failed": 0, "throttled": 0} + + +def log(msg: str): + timestamp = time.strftime("%H:%M:%S") + line = f"[{timestamp}] {msg}" + print(line) + with log_lock: + with open(LOG_FILE, "a") as f: + f.write(line + "\n") + + +def get_db(): + conn = sqlite3.connect(str(DB_PATH), check_same_thread=False) + conn.row_factory = sqlite3.Row + return conn + + +def call_qwen(prompt: str, max_retries: int = 5) -> dict | None: + """Ruft Qwen API auf mit schnellen, häufigen Retries.""" + for attempt in range(max_retries): + try: + resp = httpx.post( + DASHSCOPE_URL, + headers={ + "Authorization": f"Bearer {DASHSCOPE_KEY}", + "Content-Type": "application/json" + }, + json={ + "model": "qwen-plus-latest", + "messages": [{"role": "user", "content": prompt}], + "temperature": 0.1, + "max_tokens": 2000, + "response_format": {"type": "json_object"}, + }, + timeout=30 + ) + + if resp.status_code == 429: + wait = min(5, attempt + 1) + stats["throttled"] += 1 + time.sleep(wait) + continue + + resp.raise_for_status() + + content = resp.json()["choices"][0]["message"]["content"] + content = content.strip() + if content.startswith("```"): + content = re.sub(r'^```\w*\n?', '', content) + content = re.sub(r'\n?```$', '', content) + + return json.loads(content) + + except json.JSONDecodeError: + time.sleep(0.5) + except httpx.HTTPStatusError as e: + time.sleep(1) + except httpx.TimeoutException: + time.sleep(0.5) + except Exception as e: + time.sleep(0.5) + + log(f" ✗ 5 Retries fehlgeschlagen") + return None + + +def geocode_nominatim(query: str) -> tuple | None: + """Geocodiert einen Ort in Hagen.""" + try: + resp = httpx.get(NOMINATIM_URL, params={ + "q": f"{query}, Hagen", + "format": "json", + "limit": 1, + "viewbox": HAGEN_BBOX, + "bounded": 1, + }, headers={"User-Agent": USER_AGENT}, timeout=10) + + results = resp.json() + if results: + return (float(results[0]["lat"]), float(results[0]["lon"])) + except: + pass + return None + + +def process_vorlage(vorlage_id: int, aktenzeichen: str, volltext: str) -> bool: + """Verarbeitet eine einzelne Vorlage.""" + text = volltext[:30000] # Qwen Plus hat 128k Context, aber >30k bringt selten Mehrwert + prompt = PROMPT_TEMPLATE.format(volltext=text) + + result = call_qwen(prompt) + if not result: + stats["failed"] += 1 + return False + + # In DB speichern + with db_lock: + conn = get_db() + try: + conn.execute(""" + INSERT INTO ki_bewertungen + (vorlage_id, typ, begruendung, anmerkungen, modell, prompt_version) + VALUES (?, 'zusammenfassung', ?, ?, 'qwen-plus-latest', 'v2-parallel') + """, ( + vorlage_id, + result.get("zusammenfassung", ""), + json.dumps(result, ensure_ascii=False), + )) + + # Thema + Status in Vorlagen-Tabelle + conn.execute(""" + UPDATE vorlagen SET thema_kurz = ?, ki_status = 'done' WHERE id = ? + """, (result.get("thema"), vorlage_id)) + + # Orte verarbeiten + geocoded = 0 + for ort in result.get("orte", []): + rohtext = ort.get("rohtext", "") + geocodierbar = ort.get("geocodierbar", False) + geocode_query = ort.get("geocode_query") + typ = ort.get("typ", "sonstiges") + kontext = ort.get("kontext", "") + + lat, lon = None, None + status = 'pending' if geocodierbar else 'skipped' + + # Geocoding wird separat gemacht (Nominatim 1req/s Limit) + + cursor = conn.execute(""" + INSERT INTO orte (name, typ, lat, lon, rohtext, kontext_satz, geocode_status, vorlage_count) + VALUES (?, ?, ?, ?, ?, ?, ?, 1) + """, (geocode_query or rohtext, typ, lat, lon, rohtext, kontext[:500], status)) + + conn.execute(""" + INSERT OR IGNORE INTO vorlagen_orte (vorlage_id, ort_id, kontext) + VALUES (?, ?, ?) + """, (vorlage_id, cursor.lastrowid, kontext[:500])) + + conn.commit() + conn.close() + + stats["success"] += 1 + orte_count = len(result.get("orte", [])) + log(f" ✓ {aktenzeichen}: {result.get('thema', '?')} ({orte_count} Orte, {geocoded} geocodiert)") + return True + + except Exception as e: + conn.close() + log(f" DB-Fehler {aktenzeichen}: {e}") + stats["failed"] += 1 + return False + + +def main(): + parser = argparse.ArgumentParser(description="Parallele KI-Zusammenfassungen") + parser.add_argument("--workers", type=int, default=5, help="Parallele API-Calls") + parser.add_argument("--batch-size", type=int, default=100, help="Batch-Größe") + args = parser.parse_args() + + log(f"=== Parallele KI-Zusammenfassung ===") + log(f"Workers: {args.workers}, Batch: {args.batch_size}") + + if not DASHSCOPE_KEY: + log("FEHLER: Kein API-Key!") + return 1 + + conn = get_db() + + # Zähle offene (nutze ki_status statt JOIN) + remaining = conn.execute(""" + SELECT COUNT(*) FROM vorlagen + WHERE volltext_clean IS NOT NULL AND volltext_clean != '' + AND ki_status IS NULL + """).fetchone()[0] + + log(f"Noch zu verarbeiten: {remaining}") + + if remaining == 0: + log("Alle fertig!") + conn.close() + return 0 + + # Batch holen + vorlagen = conn.execute(""" + SELECT id, aktenzeichen, volltext_clean + FROM vorlagen + WHERE volltext_clean IS NOT NULL AND volltext_clean != '' + AND ki_status IS NULL + ORDER BY datum_eingang DESC + LIMIT ? + """, (args.batch_size,)).fetchall() + + conn.close() + + log(f"Batch: {len(vorlagen)} Vorlagen\n") + + start_time = time.time() + + with ThreadPoolExecutor(max_workers=args.workers) as executor: + futures = {} + for v in vorlagen: + future = executor.submit( + process_vorlage, + v['id'], v['aktenzeichen'], v['volltext_clean'] + ) + futures[future] = v['aktenzeichen'] + + for future in as_completed(futures): + ak = futures[future] + try: + future.result() + except Exception as e: + log(f" ✗ {ak}: {e}") + + elapsed = time.time() - start_time + docs_per_sec = stats["success"] / max(elapsed, 1) + remaining_after = remaining - stats["success"] + + log(f"\n=== Batch fertig ===") + log(f"✓ {stats['success']} | ✗ {stats['failed']} | ⏳ {stats['throttled']} throttled") + log(f"Dauer: {elapsed:.0f}s | {docs_per_sec:.2f} docs/sec") + log(f"Verbleibend: {remaining_after}") + log(f"ETA: {remaining_after / max(docs_per_sec, 0.01) / 3600:.1f}h") + + return 0 if remaining_after == 0 else 1 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/scripts/ki_zusammenfassung.py b/scripts/ki_zusammenfassung.py new file mode 100644 index 0000000..b2308c4 --- /dev/null +++ b/scripts/ki_zusammenfassung.py @@ -0,0 +1,285 @@ +#!/usr/bin/env python3 +""" +KI-Zusammenfassung für Anträge via Qwen (DashScope). +""" + +import argparse +import json +import os +import sqlite3 +import time +from pathlib import Path + +import httpx + +# Nominatim für Geocoding +NOMINATIM_URL = "https://nominatim.openstreetmap.org/search" +USER_AGENT = "Antragstracker-Hagen/1.0" +HAGEN_BBOX = "7.35,51.30,7.65,51.45" + +PROJECT_ROOT = Path(__file__).resolve().parent.parent +DB_PATH = PROJECT_ROOT / "data" / "tracker_remote.db" + +# DashScope API +DASHSCOPE_URL = "https://dashscope-intl.aliyuncs.com/compatible-mode/v1/chat/completions" +DASHSCOPE_KEY = os.environ.get("QWEN_API_KEY") or os.popen("security find-generic-password -s qwen-api -w 2>/dev/null").read().strip() + +PROMPT_TEMPLATE = """Analysiere diesen kommunalpolitischen Antrag aus Hagen. + +DOKUMENT: +{volltext} + +--- + +Erstelle eine strukturierte Zusammenfassung im JSON-Format: + +{{ + "zusammenfassung": "2-3 Sätze, was gefordert wird", + "kernforderung": "Die zentrale Forderung in einem Satz", + "begruendung": "Warum wird das gefordert? (kurz)", + "thema": "Hauptthema (z.B. Verkehr, Soziales, Umwelt)", + "partei": "Antragstellende Fraktion falls erkennbar", + "orte": [ + {{ + "rohtext": "Die genaue Formulierung im Text (z.B. 'Polizeiwache an der Boeler Straße')", + "kontext": "Der Satz in dem der Ort erwähnt wird", + "typ": "strasse|platz|stadtteil|gebaeude|sonstiges", + "geocodierbar": true/false, + "geocode_query": "Suchbegriff für Karte (z.B. 'Boeler Straße' statt 'Polizeiwache an der Boeler Straße')" + }} + ] +}} + +WICHTIG für orte: +- Extrahiere ALLE geografischen Erwähnungen +- Bei "X an der Y-Straße" ist geocode_query = "Y-Straße" +- Straßennamen, Plätze, Stadtteile sind geocodierbar=true +- "Spielplatz", "Schule", "Bushaltestelle" ohne Straße sind geocodierbar=false +- geocode_query nur bei geocodierbar=true setzen + +NUR JSON ausgeben, keine Erklärungen.""" + + +def get_db(): + conn = sqlite3.connect(str(DB_PATH)) + conn.row_factory = sqlite3.Row + return conn + + +def call_qwen(prompt: str) -> dict | None: + """Ruft Qwen API auf und gibt JSON zurück.""" + if not DASHSCOPE_KEY: + print(" FEHLER: Kein QWEN_API_KEY gefunden") + return None + + try: + resp = httpx.post( + DASHSCOPE_URL, + headers={ + "Authorization": f"Bearer {DASHSCOPE_KEY}", + "Content-Type": "application/json" + }, + json={ + "model": "qwen-plus-latest", + "messages": [{"role": "user", "content": prompt}], + "temperature": 0.3 + }, + timeout=180 + ) + resp.raise_for_status() + + content = resp.json()["choices"][0]["message"]["content"] + + # JSON extrahieren (falls in Markdown-Block) + if "```json" in content: + content = content.split("```json")[1].split("```")[0] + elif "```" in content: + content = content.split("```")[1].split("```")[0] + + return json.loads(content.strip()) + + except json.JSONDecodeError as e: + print(f" JSON-Parse-Fehler: {e}") + print(f" Content: {content[:200]}...") + return None + except Exception as e: + print(f" API-Fehler: {e}") + return None + + +def process_vorlage(conn: sqlite3.Connection, vorlage: dict) -> bool: + """Erstellt KI-Zusammenfassung für eine Vorlage.""" + vid = vorlage['id'] + akz = vorlage['aktenzeichen'] or f"#{vid}" + volltext = vorlage['volltext_clean'] + + if not volltext or len(volltext) < 100: + print(f" {akz}: Volltext zu kurz") + return False + + # Volltext kürzen falls zu lang + if len(volltext) > 8000: + volltext = volltext[:8000] + "\n[...gekürzt...]" + + prompt = PROMPT_TEMPLATE.format(volltext=volltext) + result = call_qwen(prompt) + + if not result: + return False + + # In ki_bewertungen speichern + conn.execute(""" + INSERT INTO ki_bewertungen (vorlage_id, typ, begruendung, anmerkungen, modell, prompt_version) + VALUES (?, 'zusammenfassung', ?, ?, 'qwen-plus-latest', 'v1') + """, (vid, result.get('zusammenfassung'), json.dumps(result, ensure_ascii=False))) + + # thema_kurz in vorlagen aktualisieren + if result.get('kernforderung'): + conn.execute("UPDATE vorlagen SET thema_kurz = ? WHERE id = ?", + (result['kernforderung'][:200], vid)) + + conn.commit() + + # Orte geocodieren und speichern + orte = result.get('orte', []) + geocoded_count = 0 + for ort in orte: + geocoded_count += process_ort(conn, vid, ort) + + print(f" {akz}: ✓ {result.get('thema', '?')} ({len(orte)} Orte, {geocoded_count} geocodiert)") + return True + + +def geocode_nominatim(query: str) -> tuple[float, float] | None: + """Geocodiert einen Ort in Hagen via Nominatim.""" + try: + resp = httpx.get( + NOMINATIM_URL, + params={"q": f"{query}, Hagen, Germany", "format": "json", "limit": 1, + "viewbox": HAGEN_BBOX, "bounded": 1}, + headers={"User-Agent": USER_AGENT}, + timeout=10 + ) + resp.raise_for_status() + results = resp.json() + if results: + return (float(results[0]['lat']), float(results[0]['lon'])) + except Exception as e: + pass + return None + + +def process_ort(conn: sqlite3.Connection, vorlage_id: int, ort: dict) -> int: + """Speichert einen Ort und geocodiert ihn wenn möglich. Returns 1 wenn geocodiert.""" + rohtext = ort.get('rohtext', '') + kontext = ort.get('kontext', '') + typ = ort.get('typ', 'sonstiges') + geocodierbar = ort.get('geocodierbar', False) + geocode_query = ort.get('geocode_query') + + if not rohtext: + return 0 + + # Prüfen ob schon existiert + existing = conn.execute( + "SELECT id, lat FROM orte WHERE name = ? OR rohtext = ?", + (geocode_query or rohtext, rohtext) + ).fetchone() + + if existing: + # Nur Verknüpfung erstellen + conn.execute(""" + INSERT OR IGNORE INTO vorlagen_orte (vorlage_id, ort_id, kontext) + VALUES (?, ?, ?) + """, (vorlage_id, existing['id'], kontext[:500] if kontext else None)) + conn.execute("UPDATE orte SET vorlage_count = vorlage_count + 1 WHERE id = ?", (existing['id'],)) + conn.commit() + return 1 if existing['lat'] else 0 + + # Neuen Ort anlegen + lat, lon = None, None + status = 'skipped' + + if geocodierbar and geocode_query: + time.sleep(1.1) # Nominatim Rate Limit + coords = geocode_nominatim(geocode_query) + if coords: + lat, lon = coords + status = 'success' + else: + status = 'failed' + + cursor = conn.execute(""" + INSERT INTO orte (name, typ, lat, lon, rohtext, kontext_satz, geocode_status, vorlage_count) + VALUES (?, ?, ?, ?, ?, ?, ?, 1) + """, (geocode_query or rohtext, typ, lat, lon, rohtext, kontext[:500] if kontext else None, status)) + + ort_id = cursor.lastrowid + conn.execute(""" + INSERT OR IGNORE INTO vorlagen_orte (vorlage_id, ort_id, kontext) + VALUES (?, ?, ?) + """, (vorlage_id, ort_id, kontext[:500] if kontext else None)) + conn.commit() + + return 1 if lat else 0 + + +def main(): + parser = argparse.ArgumentParser(description="KI-Zusammenfassung für Anträge") + parser.add_argument("--limit", type=int, default=100, help="Max. Anzahl pro Batch (default: 100)") + parser.add_argument("--typ", type=str, default=None, help="Vorlagen-Typ Filter") + args = parser.parse_args() + + print(f"=== KI-Zusammenfassung ===\n") + + conn = get_db() + + # Zähle verbleibende + count_query = """ + SELECT COUNT(*) + FROM vorlagen v + LEFT JOIN ki_bewertungen kb ON v.id = kb.vorlage_id AND kb.typ = 'zusammenfassung' + WHERE v.volltext_clean IS NOT NULL AND v.volltext_clean != '' + AND kb.id IS NULL + """ + remaining = conn.execute(count_query).fetchone()[0] + print(f"Noch zu verarbeiten: {remaining}") + + if remaining == 0: + print("Alle fertig!") + conn.close() + return 0 # Exit-Code 0 = fertig + + # Vorlagen mit Volltext aber ohne KI-Zusammenfassung + query = """ + SELECT v.id, v.aktenzeichen, v.volltext_clean + FROM vorlagen v + LEFT JOIN ki_bewertungen kb ON v.id = kb.vorlage_id AND kb.typ = 'zusammenfassung' + WHERE v.volltext_clean IS NOT NULL AND v.volltext_clean != '' + AND kb.id IS NULL + """ + if args.typ: + query += f" AND v.typ = '{args.typ}'" + query += f" ORDER BY v.datum_eingang DESC LIMIT {args.limit}" + + vorlagen = conn.execute(query).fetchall() + print(f"Verarbeite {len(vorlagen)} Vorlagen (Batch)\n") + + success = 0 + for v in vorlagen: + if process_vorlage(conn, dict(v)): + success += 1 + + conn.close() + + remaining_after = remaining - success + print(f"\n=== Batch fertig: {success}/{len(vorlagen)} erfolgreich ===") + print(f"Verbleibend: {remaining_after}") + + # Exit-Code: 0 = alles fertig, 1 = noch mehr zu tun + return 0 if remaining_after == 0 else 1 + + +if __name__ == "__main__": + import sys + sys.exit(main()) diff --git a/scripts/ocr_scans.py b/scripts/ocr_scans.py new file mode 100644 index 0000000..22bd8af --- /dev/null +++ b/scripts/ocr_scans.py @@ -0,0 +1,149 @@ +#!/usr/bin/env python3 +""" +OCR für PDFs ohne extrahierbaren Text. +Nutzt Apple Vision Framework via ocrmac. +""" + +import argparse +import sqlite3 +import tempfile +import time +from pathlib import Path + +import httpx +import pymupdf +from ocrmac import ocrmac + +PROJECT_ROOT = Path(__file__).resolve().parent.parent +DB_PATH = PROJECT_ROOT / "data" / "tracker_remote.db" +LOG_FILE = PROJECT_ROOT / "data" / "ocr.log" + + +def log(msg: str): + timestamp = time.strftime("%H:%M:%S") + line = f"[{timestamp}] {msg}" + print(line) + with open(LOG_FILE, "a") as f: + f.write(line + "\n") + + +def get_db(): + conn = sqlite3.connect(str(DB_PATH)) + conn.row_factory = sqlite3.Row + return conn + + +def get_pdfs_without_text(limit: int) -> list[dict]: + """Findet PDFs die keinen Text haben aber eine URL.""" + conn = get_db() + + # Vorlagen mit PDF-URL aber ohne Volltext + rows = conn.execute(""" + SELECT v.id, a.url + FROM vorlagen v + JOIN anlagen a ON v.id = a.vorlage_id + WHERE a.url IS NOT NULL + AND a.downloaded = 1 + AND (v.volltext_clean IS NULL OR v.volltext_clean = '' OR LENGTH(v.volltext_clean) < 50) + ORDER BY v.datum_eingang DESC + LIMIT ? + """, (limit,)).fetchall() + + conn.close() + return [dict(r) for r in rows] + + +def ocr_pdf(url: str) -> tuple[str | None, str | None]: + """Lädt PDF, extrahiert Bilder, macht OCR.""" + try: + # Download + resp = httpx.get(url, timeout=60, follow_redirects=True) + resp.raise_for_status() + + if len(resp.content) < 100: + return None, "PDF zu klein" + + with tempfile.NamedTemporaryFile(suffix=".pdf", delete=False) as tmp_pdf: + tmp_pdf.write(resp.content) + tmp_pdf.flush() + + doc = pymupdf.open(tmp_pdf.name) + all_text = [] + + for page_num, page in enumerate(doc): + # Seite als Bild rendern (höhere DPI für bessere OCR) + pix = page.get_pixmap(dpi=200) + + with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as tmp_img: + pix.save(tmp_img.name) + + # Apple Vision OCR + try: + results = ocrmac.OCR(tmp_img.name).recognize() + page_text = " ".join([r[0] for r in results]) + all_text.append(page_text) + except Exception as e: + log(f" OCR-Fehler Seite {page_num}: {e}") + + Path(tmp_img.name).unlink(missing_ok=True) + + doc.close() + Path(tmp_pdf.name).unlink(missing_ok=True) + + text = "\n\n".join(all_text).strip() + + if len(text) < 50: + return None, "Kein Text erkannt" + + return text, None + + except Exception as e: + return None, str(e)[:100] + + +def main(): + parser = argparse.ArgumentParser(description="OCR für Scan-PDFs") + parser.add_argument("--limit", type=int, default=100, help="Max. Anzahl") + args = parser.parse_args() + + log(f"=== OCR für Scans gestartet ===") + log(f"Limit: {args.limit}") + + pdfs = get_pdfs_without_text(args.limit) + log(f"Gefunden: {len(pdfs)} PDFs ohne Text") + + if not pdfs: + log("Nichts zu tun!") + return + + conn = get_db() + success = 0 + failed = 0 + + for i, pdf in enumerate(pdfs): + log(f"[{i+1}/{len(pdfs)}] Vorlage #{pdf['id']}...") + + text, error = ocr_pdf(pdf['url']) + + if text: + # In DB speichern + conn.execute(""" + UPDATE vorlagen SET volltext = ?, volltext_clean = ? + WHERE id = ? + """, (text, text, pdf['id'])) + conn.commit() + success += 1 + log(f" ✓ {len(text)} Zeichen via OCR") + else: + failed += 1 + log(f" ✗ {error}") + + conn.close() + + log(f"\n=== Fertig ===") + log(f"Erfolgreich: {success}") + log(f"Fehlgeschlagen: {failed}") + + +if __name__ == "__main__": + main() diff --git a/scripts/push_system_metrics.py b/scripts/push_system_metrics.py new file mode 100644 index 0000000..7c64007 --- /dev/null +++ b/scripts/push_system_metrics.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python3 +""" +Pusht Mac Mini System-Metriken zum VServer für Netdata. +Läuft als Hintergrund-Daemon. +""" + +import subprocess +import time +import httpx + +METRICS_URL = "http://152.53.119.77:8127" +INTERVAL = 10 # Sekunden + +def get_cpu_percent(): + """CPU-Auslastung via top.""" + result = subprocess.run( + ["top", "-l", "1", "-n", "0"], + capture_output=True, text=True + ) + for line in result.stdout.split("\n"): + if "CPU usage" in line: + # "CPU usage: 2.53% user, 10.52% sys, 84.21% idle" + import re + numbers = re.findall(r'([\d.]+)%', line) + if len(numbers) >= 2: + return float(numbers[0]) + float(numbers[1]) # user + sys + return 0 + +def get_memory_percent(): + """Memory-Auslastung via vm_stat.""" + result = subprocess.run(["vm_stat"], capture_output=True, text=True) + stats = {} + for line in result.stdout.split("\n"): + if ":" in line: + key, val = line.split(":") + val = val.strip().rstrip(".") + try: + stats[key.strip()] = int(val) + except: + pass + + page_size = 16384 # Apple Silicon + pages_free = stats.get("Pages free", 0) + pages_active = stats.get("Pages active", 0) + pages_inactive = stats.get("Pages inactive", 0) + pages_wired = stats.get("Pages wired down", 0) + + total_pages = pages_free + pages_active + pages_inactive + pages_wired + used_pages = pages_active + pages_wired + + if total_pages > 0: + return (used_pages / total_pages) * 100 + return 0 + +def get_load_avg(): + """Load average.""" + result = subprocess.run(["/usr/sbin/sysctl", "-n", "vm.loadavg"], capture_output=True, text=True) + # "{ 1.23 2.34 3.45 }" + parts = result.stdout.strip().strip("{}").split() + if parts: + return float(parts[0]) + return 0 + +def push_metrics(): + """Sammelt und pusht Metriken.""" + metrics = { + "macmini.cpu_percent": get_cpu_percent(), + "macmini.memory_percent": get_memory_percent(), + "macmini.load_1min": get_load_avg(), + } + + try: + httpx.post(METRICS_URL, json=metrics, timeout=5) + print(f"Pushed: {metrics}") + except Exception as e: + print(f"Push failed: {e}") + +def main(): + print(f"Mac Mini metrics pusher → {METRICS_URL}") + print(f"Interval: {INTERVAL}s") + + while True: + push_metrics() + time.sleep(INTERVAL) + +if __name__ == "__main__": + main() diff --git a/scripts/run_beratungen_batches.sh b/scripts/run_beratungen_batches.sh new file mode 100755 index 0000000..ecbd063 --- /dev/null +++ b/scripts/run_beratungen_batches.sh @@ -0,0 +1,44 @@ +#!/bin/bash +# Läuft Beratungsfolge-Scraper in Tranchen bis alles fertig ist + +cd "$(dirname "$0")/.." +source .venv/bin/activate + +LOG_FILE="data/beratungen_batches.log" +BATCH_SIZE=200 +PAUSE_SECONDS=10 + +echo "=== Beratungsfolge-Batch-Runner gestartet $(date) ===" | tee -a "$LOG_FILE" + +while true; do + echo "" | tee -a "$LOG_FILE" + echo "--- Starte Batch $(date +%H:%M:%S) ---" | tee -a "$LOG_FILE" + + python scripts/scrape_beratungsfolge.py --limit $BATCH_SIZE 2>&1 | tee -a "$LOG_FILE" + + # Prüfe ob noch Vorlagen ohne Beschlusstext übrig sind + REMAINING=$(python -c " +import sqlite3 +conn = sqlite3.connect('data/tracker_remote.db') +r = conn.execute(''' + SELECT COUNT(DISTINCT v.id) + FROM vorlagen v + LEFT JOIN beratungen b ON v.id = b.vorlage_id AND b.beschlusstext IS NOT NULL + WHERE v.web_url IS NOT NULL AND b.id IS NULL +''').fetchone()[0] +print(r) +") + + echo "Verbleibend: $REMAINING" | tee -a "$LOG_FILE" + + if [ "$REMAINING" -eq 0 ]; then + echo "" | tee -a "$LOG_FILE" + echo "=== ALLE FERTIG $(date) ===" | tee -a "$LOG_FILE" + break + fi + + echo "Pause ${PAUSE_SECONDS}s..." | tee -a "$LOG_FILE" + sleep $PAUSE_SECONDS +done + +echo "Runner beendet." | tee -a "$LOG_FILE" diff --git a/scripts/run_ki_batches.sh b/scripts/run_ki_batches.sh new file mode 100755 index 0000000..c25ea13 --- /dev/null +++ b/scripts/run_ki_batches.sh @@ -0,0 +1,30 @@ +#!/bin/bash +# Läuft KI-Zusammenfassungen in Tranchen bis alles fertig ist + +cd "$(dirname "$0")/.." +source .venv/bin/activate + +LOG_FILE="data/ki_batches.log" +BATCH_SIZE=100 +PAUSE_SECONDS=5 + +echo "=== KI-Batch-Runner gestartet $(date) ===" | tee -a "$LOG_FILE" + +while true; do + echo "" | tee -a "$LOG_FILE" + echo "--- Starte Batch $(date +%H:%M:%S) ---" | tee -a "$LOG_FILE" + + python scripts/ki_zusammenfassung.py --limit $BATCH_SIZE 2>&1 | tee -a "$LOG_FILE" + EXIT_CODE=${PIPESTATUS[0]} + + if [ $EXIT_CODE -eq 0 ]; then + echo "" | tee -a "$LOG_FILE" + echo "=== ALLE FERTIG $(date) ===" | tee -a "$LOG_FILE" + break + fi + + echo "Pause ${PAUSE_SECONDS}s vor nächstem Batch..." | tee -a "$LOG_FILE" + sleep $PAUSE_SECONDS +done + +echo "Runner beendet." | tee -a "$LOG_FILE" diff --git a/scripts/run_ki_parallel.sh b/scripts/run_ki_parallel.sh new file mode 100755 index 0000000..4699317 --- /dev/null +++ b/scripts/run_ki_parallel.sh @@ -0,0 +1,32 @@ +#!/bin/bash +# Parallele KI-Zusammenfassungen in Batches bis alles fertig + +cd "$(dirname "$0")/.." +source .venv/bin/activate + +LOG_FILE="data/ki_parallel_batches.log" +WORKERS=15 +BATCH_SIZE=100 +PAUSE_SECONDS=5 + +echo "=== KI-Parallel-Runner gestartet $(date) ===" | tee -a "$LOG_FILE" +echo "Workers: $WORKERS, Batch: $BATCH_SIZE" | tee -a "$LOG_FILE" + +while true; do + echo "" | tee -a "$LOG_FILE" + echo "--- Starte Batch $(date +%H:%M:%S) ---" | tee -a "$LOG_FILE" + + python scripts/ki_parallel.py --workers $WORKERS --batch-size $BATCH_SIZE 2>&1 | tee -a "$LOG_FILE" + EXIT_CODE=${PIPESTATUS[0]} + + if [ $EXIT_CODE -eq 0 ]; then + echo "" | tee -a "$LOG_FILE" + echo "=== ALLE FERTIG $(date) ===" | tee -a "$LOG_FILE" + break + fi + + echo "Pause ${PAUSE_SECONDS}s..." | tee -a "$LOG_FILE" + sleep $PAUSE_SECONDS +done + +echo "Runner beendet." | tee -a "$LOG_FILE" diff --git a/scripts/scrape_abstimmungen.py b/scripts/scrape_abstimmungen.py new file mode 100644 index 0000000..9894faa --- /dev/null +++ b/scripts/scrape_abstimmungen.py @@ -0,0 +1,341 @@ +#!/usr/bin/env python3 +""" +ALLRIS Abstimmungs-Scraper für Antragstracker Hagen. +Lädt Abstimmungsergebnisse von to020-Seiten und extrahiert fraktionsweises Stimmverhalten. +""" + +import argparse +import json +import os +import re +import sqlite3 +import time +from pathlib import Path + +import httpx +from bs4 import BeautifulSoup + +PROJECT_ROOT = Path(__file__).resolve().parent.parent +DB_PATH = PROJECT_ROOT / "data" / "tracker_remote.db" + +# DashScope API für Qwen +DASHSCOPE_URL = "https://dashscope-intl.aliyuncs.com/compatible-mode/v1/chat/completions" +DASHSCOPE_KEY = os.environ.get("QWEN_API_KEY") or os.popen("security find-generic-password -s qwen-api -w 2>/dev/null").read().strip() + + +EXTRACTION_PROMPT = """Analysiere dieses Abstimmungsergebnis aus einem kommunalen Ratsinformationssystem. + +TEXT: +{text} + +--- + +Extrahiere das Stimmverhalten aller Fraktionen. Gib NUR valides JSON zurück: + +{{ + "ergebnis": "beschlossen/abgelehnt/vertagt/zurückgezogen", + "ergebnis_typ": "einstimmig/mehrheitlich/mit_gegenstimmen", + "fraktionen": [ + {{"name": "SPD", "stimme": "ja", "anzahl": null, "bemerkung": null}}, + {{"name": "AfD", "stimme": "enthaltung", "anzahl": null, "bemerkung": null}}, + {{"name": "Hagen Aktiv", "stimme": "enthaltung", "anzahl": 1, "bemerkung": "1 Mitglied"}} + ] +}} + +Regeln: +- stimme: "ja", "nein", oder "enthaltung" +- anzahl: nur wenn explizit Teilmenge genannt ("1 Mitglied"), sonst null +- Bei "einstimmig" oder "ungeändert beschlossen" ohne Gegenstimmen: alle bekannten Fraktionen als "ja" +- Bekannte Hagener Fraktionen: SPD, CDU, Grüne, FDP, AfD, HAK/Die Linke, Hagen Aktiv, Freie Wähler + +NUR JSON, keine Erklärungen.""" + + +def get_db(): + conn = sqlite3.connect(str(DB_PATH)) + conn.row_factory = sqlite3.Row + return conn + + +def fetch_to020_page(client: httpx.Client, tolfdnr: int) -> str | None: + """Lädt die to020-Seite (Tagesordnungspunkt-Detail).""" + url = f"https://allris.hagen.de/public/to020?TOLFDNR={tolfdnr}" + try: + resp = client.get(url, timeout=30, follow_redirects=True) + resp.raise_for_status() + return resp.text + except Exception as e: + print(f" Fehler beim Laden von {url}: {e}") + return None + + +def extract_abstimmung_text(html: str) -> tuple[str | None, str | None]: + """Extrahiert Abstimmungsergebnis-Block aus HTML. + Returns: (ergebnis_kurz, volltext) + """ + soup = BeautifulSoup(html, 'html.parser') + + # Kurzform: "ungeändert beschlossen" etc. + ergebnis_kurz = None + beschluss_span = soup.find('span', id='toBeschlussart') + if beschluss_span: + ergebnis_kurz = beschluss_span.get_text(strip=True) + + # Volltext: Abstimmungsergebnis-Block + volltext = None + + # Suche nach docPart mit "Abstimmungsergebnis" + for div in soup.find_all('div', class_='docPart'): + text = div.get_text() + if 'Abstimmungsergebnis' in text or 'einstimmig' in text.lower() or 'Enthaltung' in text: + volltext = text.strip() + break + + # Fallback: Suche im Wortprotokoll nach Abstimmungsinfo + if not volltext: + for div in soup.find_all('div', class_='docPart'): + text = div.get_text() + if any(kw in text.lower() for kw in ['abstimmung', 'ja-stimm', 'nein-stimm', 'enthält sich', 'mehrheitlich', 'einstimmig']): + volltext = text.strip() + break + + return ergebnis_kurz, volltext + + +def call_qwen_turbo(prompt: str) -> dict | None: + """Ruft Qwen-Turbo API auf (schnell & günstig).""" + if not DASHSCOPE_KEY: + print(" FEHLER: Kein QWEN_API_KEY gefunden") + return None + + try: + resp = httpx.post( + DASHSCOPE_URL, + headers={ + "Authorization": f"Bearer {DASHSCOPE_KEY}", + "Content-Type": "application/json" + }, + json={ + "model": "qwen-turbo-latest", + "messages": [{"role": "user", "content": prompt}], + "temperature": 0.1 + }, + timeout=30 + ) + resp.raise_for_status() + + content = resp.json()["choices"][0]["message"]["content"] + + # JSON extrahieren + if "```json" in content: + content = content.split("```json")[1].split("```")[0] + elif "```" in content: + content = content.split("```")[1].split("```")[0] + + return json.loads(content.strip()) + + except json.JSONDecodeError as e: + print(f" JSON-Parse-Fehler: {e}") + return None + except Exception as e: + print(f" API-Fehler: {e}") + return None + + +def get_tolfdnrs_from_vorlage(client: httpx.Client, volfdnr: int) -> list[tuple[int, str, str]]: + """Extrahiert TOLFDNRs aus der vo020-Seite. + Returns: List of (tolfdnr, datum, gremium_name) + """ + url = f"https://allris.hagen.de/public/vo020?VOLFDNR={volfdnr}" + try: + resp = client.get(url, timeout=30, follow_redirects=True) + resp.raise_for_status() + html = resp.text + + results = [] + soup = BeautifulSoup(html, 'html.parser') + + # Suche nach Links zu to020 + for link in soup.find_all('a', href=re.compile(r'TOLFDNR=\d+')): + match = re.search(r'TOLFDNR=(\d+)', link.get('href', '')) + if match: + tolfdnr = int(match.group(1)) + # Versuche Datum und Gremium aus Kontext zu extrahieren + row = link.find_parent('tr') + datum = '' + gremium = '' + if row: + cells = row.find_all('td') + for cell in cells: + text = cell.get_text(strip=True) + if re.match(r'\d{2}\.\d{2}\.\d{4}', text): + datum = text + elif 'Sitzung' in text or 'Rat' in text or 'Ausschuss' in text: + gremium = text + results.append((tolfdnr, datum, gremium)) + + return list(set(results)) # Deduplizieren + except Exception as e: + print(f" Fehler beim Laden von vo020: {e}") + return [] + + +def process_vorlage(conn: sqlite3.Connection, client: httpx.Client, vorlage: dict) -> int: + """Verarbeitet alle TOLFDNRs einer Vorlage. + Returns: Anzahl erfolgreicher Abstimmungen + """ + vid = vorlage['id'] + volfdnr = vorlage.get('volfdnr') + + if not volfdnr: + # VOLFDNR aus web_url extrahieren + web_url = vorlage.get('web_url', '') + match = re.search(r'VOLFDNR=(\d+)', web_url) + if match: + volfdnr = int(match.group(1)) + else: + return 0 + + # TOLFDNRs von Vorlagen-Seite holen + tolfdnrs = get_tolfdnrs_from_vorlage(client, volfdnr) + if not tolfdnrs: + return 0 + + success = 0 + for tolfdnr, datum, gremium in tolfdnrs: + # Prüfen ob schon verarbeitet + existing = conn.execute( + "SELECT id FROM abstimmungen WHERE vorlage_id = ? AND ergebnis_detail LIKE ?", + (vid, f'%{tolfdnr}%') + ).fetchone() + if existing: + continue + + if process_tolfdnr(conn, client, vid, tolfdnr, datum, gremium): + success += 1 + time.sleep(0.3) + + return success + + +def process_tolfdnr(conn: sqlite3.Connection, client: httpx.Client, + vorlage_id: int, tolfdnr: int, datum: str, gremium: str) -> bool: + """Verarbeitet eine einzelne TOLFDNR.""" + + # Seite laden + html = fetch_to020_page(client, tolfdnr) + if not html: + return False + + # Abstimmungstext extrahieren + ergebnis_kurz, volltext = extract_abstimmung_text(html) + + if not ergebnis_kurz and not volltext: + return False + + # KI-Analyse wenn Volltext vorhanden + fraktionen_data = [] + if volltext and len(volltext) > 20: + prompt = EXTRACTION_PROMPT.format(text=volltext[:3000]) + result = call_qwen_turbo(prompt) + if result and 'fraktionen' in result: + fraktionen_data = result['fraktionen'] + if result.get('ergebnis'): + ergebnis_kurz = result['ergebnis'] + + # Gremium-ID finden + gremium_id = None + if gremium: + row = conn.execute("SELECT id FROM gremien WHERE name LIKE ?", (f"%{gremium[:20]}%",)).fetchone() + if row: + gremium_id = row['id'] + + # Datum parsen + sitzung_datum = None + if datum: + try: + parts = datum.split('.') + if len(parts) == 3: + sitzung_datum = f"{parts[2]}-{parts[1]}-{parts[0]}" + except: + pass + + # In DB speichern + detail_json = json.dumps({ + "tolfdnr": tolfdnr, + "fraktionen": fraktionen_data + }, ensure_ascii=False) if fraktionen_data else json.dumps({"tolfdnr": tolfdnr}) + + cursor = conn.execute(""" + INSERT INTO abstimmungen (beratung_id, vorlage_id, gremium_id, sitzung_datum, ergebnis, ergebnis_detail, volltext) + VALUES (?, ?, ?, ?, ?, ?, ?) + """, (None, vorlage_id, gremium_id, sitzung_datum, ergebnis_kurz, detail_json, volltext)) + + abstimmung_id = cursor.lastrowid + + # Fraktionen speichern + for f in fraktionen_data: + conn.execute(""" + INSERT INTO abstimmungen_fraktionen (abstimmung_id, fraktion, stimme, anzahl, bemerkung) + VALUES (?, ?, ?, ?, ?) + """, (abstimmung_id, f.get('name'), f.get('stimme'), f.get('anzahl'), f.get('bemerkung'))) + + conn.commit() + + print(f" ✓ TOLFDNR {tolfdnr}: {ergebnis_kurz or 'OK'} ({len(fraktionen_data)} Fraktionen)") + return True + + +def main(): + parser = argparse.ArgumentParser(description="ALLRIS Abstimmungs-Scraper") + parser.add_argument("--limit", type=int, default=10, help="Max. Anzahl Vorlagen") + parser.add_argument("--typ", type=str, default="antrag", help="Vorlagen-Typ (antrag/anfrage)") + parser.add_argument("--vorlage", type=int, help="Nur bestimmte Vorlage-ID") + args = parser.parse_args() + + print(f"=== ALLRIS Abstimmungs-Scraper ===\n") + + conn = get_db() + client = httpx.Client() + + # Vorlagen mit web_url finden, die noch keine Abstimmung haben + query = """ + SELECT v.id, v.aktenzeichen, v.web_url + FROM vorlagen v + LEFT JOIN abstimmungen a ON v.id = a.vorlage_id + WHERE v.web_url IS NOT NULL + AND a.id IS NULL + """ + params = [] + + if args.typ: + query += " AND v.typ = ?" + params.append(args.typ) + + if args.vorlage: + query += " AND v.id = ?" + params.append(args.vorlage) + + query += f" ORDER BY v.datum_eingang DESC LIMIT {args.limit}" + + vorlagen = conn.execute(query, params).fetchall() + print(f"Verarbeite {len(vorlagen)} Vorlagen\n") + + total_success = 0 + for v in vorlagen: + vd = dict(v) + print(f"Vorlage {vd.get('aktenzeichen', vd['id'])}...") + success = process_vorlage(conn, client, vd) + total_success += success + if success == 0: + print(f" (keine Abstimmungsdaten)") + time.sleep(0.5) + + client.close() + conn.close() + + print(f"\n=== Fertig: {total_success} Abstimmungen extrahiert ===") + + +if __name__ == "__main__": + main() diff --git a/scripts/scrape_beratungsfolge.py b/scripts/scrape_beratungsfolge.py new file mode 100644 index 0000000..bfa2a69 --- /dev/null +++ b/scripts/scrape_beratungsfolge.py @@ -0,0 +1,259 @@ +#!/usr/bin/env python3 +""" +Scraped Beratungsfolge und Beschlüsse von ALLRIS Vorlagen-Seiten. +Extrahiert: Sitzungen, Beschlussart, Beschlusstext aus verlinkten TOs. +""" + +import argparse +import re +import sqlite3 +import time +from pathlib import Path + +import httpx +from bs4 import BeautifulSoup + +PROJECT_ROOT = Path(__file__).resolve().parent.parent +DB_PATH = PROJECT_ROOT / "data" / "tracker_remote.db" +LOG_FILE = PROJECT_ROOT / "data" / "beratungsfolge.log" + +# Rate Limiting +DELAY_SECONDS = 1.0 + + +def log(msg: str): + timestamp = time.strftime("%H:%M:%S") + line = f"[{timestamp}] {msg}" + print(line) + with open(LOG_FILE, "a") as f: + f.write(line + "\n") + + +def get_db(): + conn = sqlite3.connect(str(DB_PATH)) + conn.row_factory = sqlite3.Row + return conn + + +def init_tables(conn): + """Erweitert beratungen-Tabelle um neue Spalten falls nötig.""" + existing = [c[1] for c in conn.execute('PRAGMA table_info(beratungen)').fetchall()] + + needed = ['to_url', 'tolfdnr', 'beschlussart', 'beschlusstext', 'wortprotokoll', 'scraped_at'] + for col in needed: + if col not in existing: + conn.execute(f'ALTER TABLE beratungen ADD COLUMN {col} TEXT') + log(f" Schema: +{col}") + + conn.commit() + + +def scrape_vorlage_page(url: str) -> list[dict]: + """Scraped Beratungsfolge von einer Vorlagen-Seite.""" + try: + resp = httpx.get(url, timeout=30, follow_redirects=True) + resp.raise_for_status() + soup = BeautifulSoup(resp.text, 'html.parser') + + beratungen = [] + + # Finde Beratungsfolge-Tabelle + # Links zu to020 (Tagesordnungspunkt) oder to010 (Sitzung) + for link in soup.find_all('a', href=True): + href = link['href'] + + # TO-Links finden (Beschluss) + if 'to020' in href and 'TOLFDNR=' in href: + tolfdnr_match = re.search(r'TOLFDNR=(\d+)', href) + if tolfdnr_match: + tolfdnr = tolfdnr_match.group(1) + beschlussart = link.get_text(strip=True) + + # Sitzungsinfo aus vorherigem Link holen + sitzung_name = None + sitzung_url = None + + # Suche vorherigen to010 Link (Sitzung) + prev = link.find_previous('a', href=re.compile(r'to010.*SILFDNR=')) + if prev: + sitzung_name = prev.get_text(strip=True) + sitzung_url = prev['href'] + if not sitzung_url.startswith('http'): + sitzung_url = 'https://allris.hagen.de' + sitzung_url + + to_url = href if href.startswith('http') else 'https://allris.hagen.de' + href + + beratungen.append({ + 'tolfdnr': tolfdnr, + 'beschlussart': beschlussart, + 'sitzung_name': sitzung_name, + 'sitzung_url': sitzung_url, + 'to_url': to_url, + }) + + return beratungen + + except Exception as e: + log(f" Fehler beim Scrapen: {e}") + return [] + + +def scrape_to_page(url: str) -> dict: + """Scraped Beschlusstext und Wortprotokoll von TO-Seite.""" + try: + resp = httpx.get(url, timeout=30, follow_redirects=True) + resp.raise_for_status() + soup = BeautifulSoup(resp.text, 'html.parser') + + result = { + 'beschlusstext': None, + 'wortprotokoll': None, + 'sitzung_datum': None, + } + + # Datum aus Titel extrahieren (z.B. "30.01.2025 - 6.4 Mündlicher...") + title = soup.find('h1', class_='title') + if title: + date_match = re.search(r'(\d{2}\.\d{2}\.\d{4})', title.get_text()) + if date_match: + result['sitzung_datum'] = date_match.group(1) + + # Beschlusstext und Wortprotokoll finden + # Die sind in Tags + text_spans = soup.find_all('span', style=re.compile(r'font-family.*Arial')) + + texts = [s.get_text(strip=True) for s in text_spans if s.get_text(strip=True)] + + if texts: + # Letzter Text ist oft der Beschluss + result['beschlusstext'] = texts[-1] if len(texts) > 0 else None + # Vorherige Texte sind Wortprotokoll + if len(texts) > 1: + result['wortprotokoll'] = '\n\n'.join(texts[:-1]) + + return result + + except Exception as e: + log(f" TO-Fehler: {e}") + return {} + + +def process_vorlage(conn, vorlage: dict) -> int: + """Verarbeitet eine Vorlage und speichert Beratungsfolge.""" + vorlage_id = vorlage['id'] + web_url = vorlage['web_url'] + + if not web_url: + return 0 + + # Beratungsfolge von Vorlagen-Seite scrapen + beratungen = scrape_vorlage_page(web_url) + + if not beratungen: + return 0 + + saved = 0 + for b in beratungen: + time.sleep(DELAY_SECONDS) + + # TO-Seite für Details scrapen + to_details = scrape_to_page(b['to_url']) + + try: + # Update existierende Zeile oder insert neue + conn.execute(""" + UPDATE beratungen + SET to_url = ?, tolfdnr = ?, beschlussart = ?, + beschlusstext = ?, wortprotokoll = ?, scraped_at = CURRENT_TIMESTAMP + WHERE vorlage_id = ? AND (tolfdnr = ? OR tolfdnr IS NULL) + """, ( + b['to_url'], + b['tolfdnr'], + b['beschlussart'], + to_details.get('beschlusstext'), + to_details.get('wortprotokoll'), + vorlage_id, + b['tolfdnr'], + )) + + if conn.total_changes == 0: + # Neue Zeile + conn.execute(""" + INSERT INTO beratungen + (vorlage_id, to_url, tolfdnr, beschlussart, beschlusstext, wortprotokoll, scraped_at) + VALUES (?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP) + """, ( + vorlage_id, + b['to_url'], + b['tolfdnr'], + b['beschlussart'], + to_details.get('beschlusstext'), + to_details.get('wortprotokoll'), + )) + + conn.commit() + saved += 1 + except Exception as e: + log(f" DB-Fehler: {e}") + + return saved + + +def main(): + parser = argparse.ArgumentParser(description="Beratungsfolge scrapen") + parser.add_argument("--limit", type=int, default=50, help="Max. Vorlagen") + args = parser.parse_args() + + log(f"=== Beratungsfolge-Scraper ===") + log(f"Limit: {args.limit}") + + conn = get_db() + init_tables(conn) + + # Vorlagen mit web_url die noch nicht gescraped sind (nutze beratung_status) + vorlagen = conn.execute(""" + SELECT id, aktenzeichen, web_url + FROM vorlagen + WHERE web_url IS NOT NULL + AND beratung_status IS NULL + ORDER BY datum_eingang DESC + LIMIT ? + """, (args.limit,)).fetchall() + + log(f"Zu verarbeiten: {len(vorlagen)}") + + total_beratungen = 0 + for i, v in enumerate(vorlagen): + log(f"[{i+1}/{len(vorlagen)}] {v['aktenzeichen']}...") + time.sleep(DELAY_SECONDS) + + count = process_vorlage(conn, dict(v)) + total_beratungen += count + + # Status setzen (auch wenn keine Beratung gefunden) + conn.execute("UPDATE vorlagen SET beratung_status = 'done' WHERE id = ?", (v['id'],)) + conn.commit() + + if count > 0: + log(f" ✓ {count} Beratungen") + else: + log(f" - Keine Beratungsfolge") + + conn.close() + + log(f"\n=== Fertig ===") + log(f"Beratungen gespeichert: {total_beratungen}") + + # Exit-Code für Batch-Runner + remaining = conn.execute(""" + SELECT COUNT(*) FROM vorlagen + WHERE web_url IS NOT NULL + AND id NOT IN (SELECT DISTINCT vorlage_id FROM beratungen) + """).fetchone()[0] if False else 0 # Vereinfacht + + return 0 + + +if __name__ == "__main__": + import sys + sys.exit(main()) diff --git a/tracker.db b/tracker.db new file mode 100644 index 0000000..c614834 Binary files /dev/null and b/tracker.db differ diff --git a/tracker.db-shm b/tracker.db-shm new file mode 100644 index 0000000..fe9ac28 Binary files /dev/null and b/tracker.db-shm differ diff --git a/tracker.db-wal b/tracker.db-wal new file mode 100644 index 0000000..e69de29 diff --git a/tracker.db.broken b/tracker.db.broken new file mode 100644 index 0000000..e69de29