Spaces:
Runtime error
Runtime error
File size: 4,727 Bytes
2a7171f | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 | """Finance assistant: LLM-backed expense parsing and ledger actions."""
import json
import re
import logging
from datetime import datetime
from typing import Generator
from huggingface_hub import InferenceClient
from ledger import Ledger
logger = logging.getLogger(__name__)
MODEL = "openai/gpt-oss-20b"
SYSTEM = """\
You are a personal finance assistant. Help the user log expenses, query spending summaries, and manage their ledger.
When the user describes an expense, extract it and include a JSON action block in your response:
```json
{"action": "add", "date": "YYYY-MM-DD", "description": "...", "category": "Food|Transport|Utilities|Entertainment|Health|Shopping|Rent|Other", "amount": 0.00}
```
When the user wants to undo or delete the last entry:
```json
{"action": "delete_last"}
```
Use today's date if none is given. Keep replies brief and friendly.
If the user asks about their spending, use the ledger context below to answer accurately.
If no ledger action is needed, just respond conversationally β no JSON block."""
# ββ context & parsing βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
def _ledger_context(ledger: Ledger) -> str:
if ledger.df.empty:
return "Ledger is empty."
total = ledger.total()
by_cat = ledger.by_category()
cat_str = " | ".join(
f"{k} ${v:.2f}" for k, v in sorted(by_cat.items(), key=lambda x: -x[1])
)
recent = ledger.recent(5).to_string(index=False)
return f"Total: ${total:.2f} | {cat_str}\nRecent entries:\n{recent}"
def _parse_action(text: str) -> dict | None:
m = re.search(r"```json\s*(\{.*?\})\s*```", text, re.DOTALL)
if m:
try:
return json.loads(m.group(1))
except json.JSONDecodeError:
pass
return None
def _clean(text: str) -> str:
"""Strip JSON action blocks from visible reply."""
return re.sub(r"```json.*?```", "", text, flags=re.DOTALL).strip()
def _build_messages(message: str, history: list[dict], ledger: Ledger) -> list[dict]:
system = SYSTEM + "\n\nCurrent ledger:\n" + _ledger_context(ledger)
return [{"role": "system", "content": system}] + history + [{"role": "user", "content": message}]
# ββ actions βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
def execute(action: dict, ledger: Ledger, fallback_desc: str = "") -> str:
"""Run a parsed action against the ledger. Returns a confirmation string."""
if action.get("action") == "add":
ok = ledger.add(
date=action.get("date", datetime.now().strftime("%Y-%m-%d")),
description=action.get("description", fallback_desc),
category=action.get("category", "Other"),
amount=float(action.get("amount", 0)),
)
if ok:
return f"β
Logged **{action.get('category')}** β ${float(action.get('amount', 0)):.2f}"
return "β Failed to save entry."
if action.get("action") == "delete_last":
return "ποΈ Last entry removed." if ledger.delete_last() else "Nothing to delete."
return ""
# ββ inference βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
def stream_response(
message: str, history: list[dict], ledger: Ledger, token: str
) -> Generator[tuple[str, dict | None], None, None]:
"""
Yields (partial_reply, action) tuples.
action is None on all intermediate yields; populated only on the final yield.
"""
client = InferenceClient(token=token, model=MODEL)
messages = _build_messages(message, history, ledger)
accumulated = ""
for chunk in client.chat_completion(messages, max_tokens=512, stream=True, temperature=0.2):
if chunk.choices and chunk.choices[0].delta.content:
accumulated += chunk.choices[0].delta.content
yield _clean(accumulated), None
yield _clean(accumulated), _parse_action(accumulated)
def batch_response(
message: str, history: list[dict], ledger: Ledger, token: str
) -> tuple[str, dict | None]:
"""Synchronous single-call variant used by the Telegram bot."""
client = InferenceClient(token=token, model=MODEL)
messages = _build_messages(message, history, ledger)
raw = client.chat_completion(messages, max_tokens=512, temperature=0.2).choices[0].message.content
return _clean(raw), _parse_action(raw)
|