Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -7,11 +7,10 @@ import os
|
|
| 7 |
#Configure sua API da OpenAI (ou pode trocar por HuggingFace LLM depois)
|
| 8 |
os.environ["OPENAI_API_KEY"] = "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" # Coloque sua chave da OpenAI aqui
|
| 9 |
|
| 10 |
-
#Carregar
|
| 11 |
-
print("🔗 Carregando dataset do Hugging Face...")
|
| 12 |
dataset = load_dataset("Jeice/n8n-docs", split="train")
|
| 13 |
|
| 14 |
-
#Salvar os arquivos localmente
|
| 15 |
os.makedirs("docs", exist_ok=True)
|
| 16 |
|
| 17 |
for item in dataset:
|
|
@@ -20,28 +19,28 @@ for item in dataset:
|
|
| 20 |
with open(f"docs/{file_name}", "w", encoding="utf-8") as f:
|
| 21 |
f.write(item["text"])
|
| 22 |
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
#Criar o index com o LlamaIndex
|
| 26 |
documents = SimpleDirectoryReader('docs').load_data()
|
|
|
|
| 27 |
service_context = ServiceContext.from_defaults(
|
| 28 |
-
llm=OpenAI(model="gpt-3.5-turbo")
|
| 29 |
)
|
|
|
|
| 30 |
index = VectorStoreIndex.from_documents(documents, service_context=service_context)
|
| 31 |
query_engine = index.as_query_engine()
|
| 32 |
|
| 33 |
-
#Função
|
| 34 |
def chatbot(input_text):
|
| 35 |
response = query_engine.query(input_text)
|
| 36 |
return str(response)
|
| 37 |
|
| 38 |
-
#Interface
|
| 39 |
interface = gr.Interface(
|
| 40 |
fn=chatbot,
|
| 41 |
inputs=gr.Textbox(lines=2, placeholder="Digite sua pergunta sobre o n8n aqui..."),
|
| 42 |
outputs="text",
|
| 43 |
title="🤖 Bot de Dúvidas sobre o n8n",
|
| 44 |
-
description="
|
| 45 |
theme="default",
|
| 46 |
examples=[
|
| 47 |
["Como criar um workflow no n8n?"],
|
|
@@ -51,5 +50,4 @@ interface = gr.Interface(
|
|
| 51 |
allow_flagging="never"
|
| 52 |
)
|
| 53 |
|
| 54 |
-
|
| 55 |
-
interface.launch()
|
|
|
|
| 7 |
#Configure sua API da OpenAI (ou pode trocar por HuggingFace LLM depois)
|
| 8 |
os.environ["OPENAI_API_KEY"] = "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" # Coloque sua chave da OpenAI aqui
|
| 9 |
|
| 10 |
+
#Carregar dataset do Hugging Face
|
|
|
|
| 11 |
dataset = load_dataset("Jeice/n8n-docs", split="train")
|
| 12 |
|
| 13 |
+
#Salvar os arquivos localmente
|
| 14 |
os.makedirs("docs", exist_ok=True)
|
| 15 |
|
| 16 |
for item in dataset:
|
|
|
|
| 19 |
with open(f"docs/{file_name}", "w", encoding="utf-8") as f:
|
| 20 |
f.write(item["text"])
|
| 21 |
|
| 22 |
+
#Criar o index
|
|
|
|
|
|
|
| 23 |
documents = SimpleDirectoryReader('docs').load_data()
|
| 24 |
+
|
| 25 |
service_context = ServiceContext.from_defaults(
|
| 26 |
+
llm=OpenAI(model="gpt-3.5-turbo")
|
| 27 |
)
|
| 28 |
+
|
| 29 |
index = VectorStoreIndex.from_documents(documents, service_context=service_context)
|
| 30 |
query_engine = index.as_query_engine()
|
| 31 |
|
| 32 |
+
#Função do chatbot
|
| 33 |
def chatbot(input_text):
|
| 34 |
response = query_engine.query(input_text)
|
| 35 |
return str(response)
|
| 36 |
|
| 37 |
+
#Interface Gradio
|
| 38 |
interface = gr.Interface(
|
| 39 |
fn=chatbot,
|
| 40 |
inputs=gr.Textbox(lines=2, placeholder="Digite sua pergunta sobre o n8n aqui..."),
|
| 41 |
outputs="text",
|
| 42 |
title="🤖 Bot de Dúvidas sobre o n8n",
|
| 43 |
+
description="Consulte a documentação oficial do n8n. Pergunte sobre workflows, nodes e integrações!",
|
| 44 |
theme="default",
|
| 45 |
examples=[
|
| 46 |
["Como criar um workflow no n8n?"],
|
|
|
|
| 50 |
allow_flagging="never"
|
| 51 |
)
|
| 52 |
|
| 53 |
+
interface.launch()
|
|
|