|
|
|
|
|
import gradio as gr |
|
|
import openai |
|
|
import os |
|
|
import json |
|
|
from threading import Lock |
|
|
import pkg_resources |
|
|
|
|
|
|
|
|
|
|
|
CHATBOT_MEMORY_FILE = "memory_chatbot.json" |
|
|
API_MEMORY_FILE = "memory_api.json" |
|
|
MODEL_NAME = "openai/gpt-oss-120b" |
|
|
TOGETHER_API_BASE = "https://api.together.xyz/v1" |
|
|
|
|
|
file_lock = Lock() |
|
|
|
|
|
def initialize_memory_files(): |
|
|
with file_lock: |
|
|
if not os.path.exists(CHATBOT_MEMORY_FILE): |
|
|
with open(CHATBOT_MEMORY_FILE, 'w') as f: |
|
|
json.dump([], f) |
|
|
if not os.path.exists(API_MEMORY_FILE): |
|
|
with open(API_MEMORY_FILE, 'w') as f: |
|
|
json.dump([], f) |
|
|
|
|
|
def load_json(filepath): |
|
|
with file_lock: |
|
|
try: |
|
|
with open(filepath, 'r', encoding='utf-8') as f: |
|
|
return json.load(f) |
|
|
except (FileNotFoundError, json.JSONDecodeError): |
|
|
return [] |
|
|
|
|
|
def save_json(filepath, data): |
|
|
with file_lock: |
|
|
with open(filepath, 'w', encoding='utf-8') as f: |
|
|
json.dump(data, f, indent=4, ensure_ascii=False) |
|
|
|
|
|
|
|
|
|
|
|
def get_library_versions(): |
|
|
"""Retorna as versões das bibliotecas principais.""" |
|
|
try: |
|
|
gradio_version = pkg_resources.get_distribution("gradio").version |
|
|
openai_version = pkg_resources.get_distribution("openai").version |
|
|
return f""" |
|
|
- **Gradio:** `{gradio_version}` |
|
|
- **OpenAI:** `{openai_version}` |
|
|
""" |
|
|
except pkg_resources.DistributionNotFound: |
|
|
return "Não foi possível obter as versões das bibliotecas." |
|
|
|
|
|
def load_help_content(): |
|
|
"""Carrega o conteúdo do arquivo help.md.""" |
|
|
try: |
|
|
with open('help.md', 'r', encoding='utf-8') as f: |
|
|
return f.read() |
|
|
except FileNotFoundError: |
|
|
return "Arquivo help.md não encontrado." |
|
|
|
|
|
|
|
|
|
|
|
def get_model_response(user_message, chat_history): |
|
|
"""Obtém uma resposta da API e gerencia a memória.""" |
|
|
try: |
|
|
client = openai.OpenAI( |
|
|
api_key=os.getenv("TOGETHER_API_KEY"), |
|
|
base_url=TOGETHER_API_BASE, |
|
|
) |
|
|
except Exception as e: |
|
|
chat_history.append((user_message, f"ERRO: A chave da API não foi encontrada. Verifique o segredo 'TOGETHER_API_KEY' no seu Space. Detalhes: {e}")) |
|
|
return chat_history |
|
|
|
|
|
messages = [{"role": "system", "content": "Você é um assistente prestativo e detalhista."}] |
|
|
for user_msg, assistant_msg in chat_history: |
|
|
messages.append({"role": "user", "content": user_msg}) |
|
|
if assistant_msg: |
|
|
messages.append({"role": "assistant", "content": assistant_msg}) |
|
|
messages.append({"role": "user", "content": user_message}) |
|
|
|
|
|
try: |
|
|
chat_completion = client.chat.completions.create( |
|
|
messages=messages, |
|
|
model=MODEL_NAME, |
|
|
) |
|
|
assistant_response = chat_completion.choices[0].message.content |
|
|
except Exception as e: |
|
|
assistant_response = f"Ocorreu um erro ao contatar a API: {e}" |
|
|
|
|
|
chat_history.append((user_message, assistant_response)) |
|
|
|
|
|
save_json(CHATBOT_MEMORY_FILE, chat_history) |
|
|
|
|
|
api_memory = load_json(API_MEMORY_FILE) |
|
|
api_memory.append({ "request": messages, "response": assistant_response }) |
|
|
save_json(API_MEMORY_FILE, api_memory) |
|
|
|
|
|
return chat_history |
|
|
|
|
|
|
|
|
def add_text(history, text): |
|
|
history = history + [(text, None)] |
|
|
return history, "" |
|
|
|
|
|
|
|
|
|
|
|
with gr.Blocks(title=f"Assistente com {MODEL_NAME}") as demo: |
|
|
gr.Markdown(f"# 🤖 Assistente de Chat com {MODEL_NAME}") |
|
|
gr.Markdown("Faça uma pergunta e receba uma resposta do modelo hospedado na Together.ai.") |
|
|
|
|
|
with gr.Tabs(): |
|
|
with gr.TabItem("Chatbot"): |
|
|
initial_history = load_json(CHATBOT_MEMORY_FILE) |
|
|
chatbot_ui = gr.Chatbot(value=initial_history, label="Conversa").style(height=600) |
|
|
|
|
|
with gr.Row(): |
|
|
msg_input = gr.Textbox( |
|
|
scale=4, |
|
|
show_label=False, |
|
|
placeholder="Digite sua mensagem e pressione Enter", |
|
|
) |
|
|
btn_submit = gr.Button("Enviar", variant="primary") |
|
|
|
|
|
with gr.TabItem("JSON API Log"): |
|
|
gr.Markdown("Exibe o log completo de requisições e respostas para a API.") |
|
|
btn_update_api_log = gr.Button("Atualizar") |
|
|
json_api_view = gr.JSON(label="Memória da API") |
|
|
|
|
|
with gr.TabItem("JSON Chatbot"): |
|
|
gr.Markdown("Exibe o histórico de mensagens formatado para a interface do chatbot.") |
|
|
btn_update_chatbot = gr.Button("Atualizar") |
|
|
json_chatbot_view = gr.JSON(label="Memória do Chatbot") |
|
|
|
|
|
with gr.TabItem("Help"): |
|
|
|
|
|
help_display = gr.Markdown(load_help_content()) |
|
|
gr.Markdown("### Versões das Bibliotecas") |
|
|
gr.Markdown(get_library_versions()) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
msg_input.submit(add_text, [chatbot_ui, msg_input], [chatbot_ui, msg_input]).then( |
|
|
get_model_response, chatbot_ui, chatbot_ui |
|
|
) |
|
|
btn_submit.click(add_text, [chatbot_ui, msg_input], [chatbot_ui, msg_input]).then( |
|
|
get_model_response, chatbot_ui, chatbot_ui |
|
|
) |
|
|
|
|
|
|
|
|
btn_update_api_log.click(fn=lambda: load_json(API_MEMORY_FILE), inputs=None, outputs=[json_api_view]) |
|
|
btn_update_chatbot.click(fn=lambda: load_json(CHATBOT_MEMORY_FILE), inputs=None, outputs=[json_chatbot_view]) |
|
|
|
|
|
|
|
|
demo.load(fn=lambda: load_json(API_MEMORY_FILE), inputs=None, outputs=[json_api_view]) |
|
|
demo.load(fn=lambda: load_json(CHATBOT_MEMORY_FILE), inputs=None, outputs=[json_chatbot_view]) |
|
|
|
|
|
|
|
|
initialize_memory_files() |
|
|
|
|
|
|
|
|
demo.launch() |