AgenteHelpN8n / app.py
Jeice's picture
Update app.py
0520cb9 verified
raw
history blame
1.89 kB
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, ServiceContext
from llama_index.llms.openai import OpenAI
from datasets import load_dataset
import gradio as gr
import os
#Configure sua API da OpenAI (ou pode trocar por HuggingFace LLM depois)
os.environ["OPENAI_API_KEY"] = "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" # Coloque sua chave da OpenAI aqui
#Carregar dataset do Hugging Face
dataset = load_dataset("Jeice/n8n-docs", split="train")
#Salvar os arquivos localmente
os.makedirs("docs", exist_ok=True)
for item in dataset:
if "file" in item and item["file"]["path"].endswith(('.md', '.txt')):
file_name = item["file"]["path"].split("/")[-1]
with open(f"docs/{file_name}", "w", encoding="utf-8") as f:
f.write(item["text"])
#Criar o index
documents = SimpleDirectoryReader('docs').load_data()
service_context = ServiceContext.from_defaults(
llm=OpenAI(model="gpt-3.5-turbo")
)
index = VectorStoreIndex.from_documents(documents, service_context=service_context)
query_engine = index.as_query_engine()
#Função do chatbot
def chatbot(input_text):
response = query_engine.query(input_text)
return str(response)
#Interface Gradio
interface = gr.Interface(
fn=chatbot,
inputs=gr.Textbox(lines=2, placeholder="Digite sua pergunta sobre o n8n aqui..."),
outputs="text",
title="🤖 Bot de Dúvidas sobre o n8n",
description="Consulte a documentação oficial do n8n. Pergunte sobre workflows, nodes e integrações!",
theme="default",
examples=[
["Como criar um workflow no n8n?"],
["Para que serve o node HTTP Request?"],
["Quais são os nodes de integração com Google Sheets?"],
],
allow_flagging="never"
)
interface.launch()