gopuOS / d_app.py
Mauricio-100's picture
Rename app.py to d_app.py
a9cbf35 verified
import gradio as gr
import requests, secrets, sqlite3, os
from fastapi import FastAPI
from pydantic import BaseModel
# --- Config ---
API_URL = "https://Mauricio-100-agent-ai.hf.space/infer"
DB_PATH = "tokens.db"
# --- SQLite init ---
def init_db():
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute("""
CREATE TABLE IF NOT EXISTS api_tokens (
id INTEGER PRIMARY KEY AUTOINCREMENT,
token TEXT UNIQUE,
description TEXT
)
""")
conn.commit()
conn.close()
init_db()
# --- FastAPI (optionnel si tu veux exposer aussi côté backend) ---
app = FastAPI()
class Input(BaseModel):
input: str
system_prompt: str | None = None
temperature: float = 0.7
max_new_tokens: int = 200
top_p: float = 0.9
api_token: str | None = None
@app.post("/validate_token")
def validate_token(data: Input):
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute("SELECT token FROM api_tokens WHERE token=?", (data.api_token,))
row = c.fetchone()
conn.close()
return {"valid": row is not None}
# --- Fonctions pour Gradio ---
def generate_token(description):
token = "sk-" + secrets.token_hex(16) # style OpenAI
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute("INSERT INTO api_tokens (token, description) VALUES (?, ?)", (token, description))
conn.commit()
conn.close()
return token
def list_tokens():
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute("SELECT id, token, description FROM api_tokens")
rows = c.fetchall()
conn.close()
return "\n".join([f"{r[0]} | {r[1]} | {r[2]}" for r in rows])
def call_gopu(system_prompt, user_prompt, temperature, max_new_tokens, top_p, api_token):
# Vérifier le token
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute("SELECT token FROM api_tokens WHERE token=?", (api_token,))
row = c.fetchone()
conn.close()
if not row:
return "[Erreur] Token invalide"
payload = {
"input": user_prompt,
"system_prompt": system_prompt,
"temperature": temperature,
"max_new_tokens": int(max_new_tokens),
"top_p": top_p
}
try:
response = requests.post(API_URL, json=payload)
if response.status_code == 200:
return response.json().get("generated_text", "")
else:
return f"[Erreur {response.status_code}] {response.text}"
except Exception as e:
return f"[Exception] {str(e)}"
# --- Interface Gradio ---
with gr.Blocks(title="⚡ GopuOS API Manager") as demo:
gr.Markdown("## ⚡ GopuOS Client + Gestion des Tokens")
with gr.Tab("Générer un Token"):
desc = gr.Textbox(label="Description du token")
out_token = gr.Textbox(label="Nouveau Token")
btn_gen = gr.Button("Générer")
btn_gen.click(fn=generate_token, inputs=desc, outputs=out_token)
with gr.Tab("Lister les Tokens"):
out_list = gr.Textbox(label="Tokens existants", lines=10)
btn_list = gr.Button("Afficher")
btn_list.click(fn=list_tokens, outputs=out_list)
with gr.Tab("Appeler le modèle"):
sys_prompt = gr.Textbox(label="Prompt système")
user_prompt = gr.Textbox(label="Texte utilisateur")
temp = gr.Slider(0.1, 1.5, value=0.7, step=0.1, label="Température")
max_tok = gr.Slider(50, 500, value=200, step=10, label="Max new tokens")
top_p = gr.Slider(0.1, 1.0, value=0.9, step=0.05, label="Top-p")
api_token = gr.Textbox(label="API Token")
output = gr.Textbox(label="Réponse du modèle")
btn_call = gr.Button("Envoyer")
btn_call.click(
fn=call_gopu,
inputs=[sys_prompt, user_prompt, temp, max_tok, top_p, api_token],
outputs=output
)
# Monter Gradio dans FastAPI
app = gr.mount_gradio_app(app, demo, path="/gradio")