File size: 2,344 Bytes
fea1bd1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
# scripts/ping_ollama.py
# -*- coding: utf-8 -*-

# --- sys.path bootstrap: permite importar main_agent do diretório raiz ---
import sys, pathlib
ROOT = pathlib.Path(__file__).resolve().parents[1]
if str(ROOT) not in sys.path:
    sys.path.insert(0, str(ROOT))
# -------------------------------------------------------------------------

import os, asyncio, json
import httpx
from main_agent import agent

async def get_installed_models(base_url: str) -> list[str]:
    """Consulta o Ollama e retorna a lista de modelos instalados (nome com tag)."""
    try:
        async with httpx.AsyncClient(timeout=10) as c:
            r = await c.get(f"{base_url}/api/tags")
            r.raise_for_status()
            data = r.json() or {}
            return [m.get("name") for m in data.get("models", []) if m.get("name")]
    except Exception:
        return []

async def main():
    base = os.environ.get("OLLAMA_BASE_URL", "http://127.0.0.1:11434")
    model = os.environ.get("OLLAMA_TEST_MODEL")

    if not model:
        models = await get_installed_models(base)
        if not models:
            print(
                "Nenhum modelo instalado no Ollama.\n"
                "Rode:\n"
                "  ollama list\n"
                "  ollama pull llama3.1:8b   (ou)\n"
                "  ollama pull qwen2.5:7b\n"
                "Depois defina OLLAMA_TEST_MODEL=<nome exato que aparece em 'ollama list'> e rode novamente."
            )
            return
        model = models[0]
        print(f"[auto] Usando modelo instalado: {model}")

    payload = {"model": model, "prompt": "ping", "stream": False}

    try:
        out = await agent._post_ollama(base, payload, timeout=20)
        print("OK:", (out or "")[:240], "...")
    except httpx.HTTPStatusError as e:
        # Mostra corpo do erro do Ollama (exibe 'model not found' etc.)
        body = e.response.text if e.response is not None else ""
        code = e.response.status_code if e.response is not None else "N/A"
        print(f"HTTPStatusError {code}: {e}\nBody: {body[:500]}")
    finally:
        # Fecha o cliente http explicitamente (fora do ciclo do Chainlit)
        if getattr(agent, "http", None):
            try:
                await agent.http.aclose()
            except Exception:
                pass

if __name__ == "__main__":
    asyncio.run(main())