profdanielvieira95 commited on
Commit
3488a2a
Β·
verified Β·
1 Parent(s): 3c9a801
Files changed (1) hide show
  1. app.py +131 -0
app.py ADDED
@@ -0,0 +1,131 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import shutil
3
+ import gradio as gr
4
+ from typing import List
5
+ from llama_index.core import SimpleDirectoryReader, StorageContext, VectorStoreIndex
6
+ from llama_index.core.node_parser import SentenceSplitter
7
+ from llama_index.embeddings.huggingface import HuggingFaceEmbedding
8
+ from llama_index.vector_stores.chroma import ChromaVectorStore
9
+ from llama_index.llms.groq import Groq
10
+ from llama_index.core.memory import ChatSummaryMemoryBuffer
11
+ import chromadb
12
+ from tempfile import TemporaryDirectory
13
+ from PyPDF2 import PdfReader
14
+
15
+ # Wrapper de embedding compatΓ­vel com ChromaDB
16
+ class ChromaEmbeddingWrapper:
17
+ def __init__(self, model_name: str):
18
+ self.model = HuggingFaceEmbedding(model_name=model_name)
19
+
20
+ def __call__(self, input: List[str]) -> List[List[float]]:
21
+ return self.model.embed_documents(input)
22
+
23
+ # Inicializa modelos de embedding
24
+ embed_model = HuggingFaceEmbedding(model_name='intfloat/multilingual-e5-large')
25
+ embed_model_chroma = ChromaEmbeddingWrapper(model_name='intfloat/multilingual-e5-large')
26
+
27
+ # Inicializa ChromaDB
28
+ chroma_client = chromadb.PersistentClient(path='./chroma_db')
29
+ collection_name = 'documentos_serenatto'
30
+ chroma_collection = chroma_client.get_or_create_collection(
31
+ name=collection_name,
32
+ embedding_function=embed_model_chroma
33
+ )
34
+
35
+ vector_store = ChromaVectorStore(chroma_collection=chroma_collection)
36
+ storage_context = StorageContext.from_defaults(vector_store=vector_store)
37
+
38
+ # Inicializa LLM da Groq
39
+ Groq_api = os.environ.get("GROQ_API_KEY")
40
+ llms = Groq(model='llama3-70b-8192', api_key='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')
41
+
42
+ # Estados globais
43
+ document_index = None
44
+ chat_engine = None
45
+
46
+ # Processamento do PDF
47
+
48
+ def process_pdf(file):
49
+ global document_index, chat_engine
50
+
51
+ try:
52
+ with TemporaryDirectory() as tmpdir:
53
+ pdf_path = os.path.join(tmpdir, "upload.pdf")
54
+ shutil.copy(file.name, pdf_path)
55
+
56
+ text = ""
57
+ reader = PdfReader(pdf_path)
58
+ for page in reader.pages:
59
+ text += page.extract_text() or ""
60
+
61
+ with open(os.path.join(tmpdir, "temp.txt"), "w", encoding="utf-8") as f:
62
+ f.write(text)
63
+
64
+ documentos = SimpleDirectoryReader(input_dir=tmpdir)
65
+ docs = documentos.load_data()
66
+
67
+ node_parser = SentenceSplitter(chunk_size=1200)
68
+ nodes = node_parser.get_nodes_from_documents(docs, show_progress=True)
69
+
70
+ document_index = VectorStoreIndex(nodes, storage_context=storage_context, embed_model=embed_model)
71
+
72
+ memory = ChatSummaryMemoryBuffer(llm=llms, token_limit=256)
73
+
74
+ chat_engine = document_index.as_chat_engine(
75
+ chat_mode='context',
76
+ llm=llms,
77
+ memory=memory,
78
+ system_prompt='''Voce Γ© especialista em cafes da loja Serenatto, uma loja online que vende graos de cafe
79
+ torrados, sua funΓ§ao Γ© tirar duvidas de forma simpatica e natural sobre os graos disponiveis.'''
80
+ )
81
+
82
+ return "PDF carregado com sucesso! Agora vocΓͺ pode conversar com o bot."
83
+
84
+ except Exception as e:
85
+ return f"Erro ao processar PDF: {e}"
86
+
87
+ # Chat com histΓ³rico estilo "messages"
88
+ def converse_com_bot(message, chat_history):
89
+ global chat_engine
90
+
91
+ if chat_engine is None:
92
+ return "Por favor, envie um PDF primeiro.", chat_history
93
+
94
+ response = chat_engine.chat(message)
95
+
96
+ if chat_history is None:
97
+ chat_history = []
98
+
99
+ chat_history.append({"role": "user", "content": message})
100
+ chat_history.append({"role": "assistant", "content": response.response})
101
+
102
+ return "", chat_history
103
+
104
+ # Resetar conversa
105
+ def resetar_chat():
106
+ global chat_engine
107
+ if chat_engine:
108
+ chat_engine.reset()
109
+ return []
110
+
111
+ # Interface Gradio com upload de PDF
112
+ with gr.Blocks() as app:
113
+ gr.Markdown("# Chatbot da Serenatto - Especialista em CafΓ©s")
114
+
115
+ with gr.Row():
116
+ upload = gr.File(label="πŸ“„ Envie seu PDF")
117
+ upload_button = gr.Button("Carregar PDF")
118
+
119
+ output_status = gr.Textbox(label="Status", interactive=False)
120
+
121
+ chatbot = gr.Chatbot(label="Conversa", type="messages")
122
+ msg = gr.Textbox(label='Digite a sua mensagem')
123
+ limpar = gr.Button('Limpar')
124
+
125
+ upload_button.click(process_pdf, inputs=upload, outputs=output_status).then(
126
+ resetar_chat, None, chatbot
127
+ )
128
+ msg.submit(converse_com_bot, [msg, chatbot], [msg, chatbot])
129
+ limpar.click(resetar_chat, None, chatbot, queue=False)
130
+
131
+ app.launch(debug=True)