ELI-chatbot / utils.py
StefanoDUrso's picture
handling contexts
f88de80
from config import initialize
from utilities.vectorstore.SummaryManager import SummaryManager
def manage_collection(file_name, collection_name):
if not qdrant_manager.get_collection(collection_name):
if not qdrant_manager.create_collection(collection_name):
print("❌ Error: Failed to create collection in Qdrant. Exiting application.", flush=True)
return
success, total_tokens, text = qdrant_manager.insert_document(file_name)
if success:
print(f"βœ… Documento inserito correttamente. Token totali: {total_tokens}")
if text:
print(f"βœ… Testo completo disponibile (entro il limite token): {text[:100]}...")
else:
print("❌ Errore durante l'inserimento del documento.")
#qdrant_manager.delete_collection(collection_name)
def get_initial_summary(collection_name):
"""Retrieve initial summary from a Qdrant collection."""
# Carica la collection se esiste
if not qdrant_manager.get_collection(collection_name):
print(f"❌ Collection '{collection_name}' non trovata.")
return None
# Inizializza il SummaryManager
summary_manager = SummaryManager(language="en", qdrant_manager=qdrant_manager)
# Genera il riassunto iniziale
return summary_manager.do_initial_summary()
def get_summary(collection_name, type="map_reduce"):
"""Retrieve initial summary from a Qdrant collection."""
# Carica la collection se esiste
if not qdrant_manager.get_collection(collection_name):
print(f"❌ Collection '{collection_name}' non trovata.")
return None
# Inizializza il SummaryManager
summary_manager = SummaryManager(language="en", qdrant_manager=qdrant_manager)
if type == "map_reduce":
return summary_manager.do_summary_map_reduce()
elif type == "stuff":
print("Using stuff method")
return summary_manager.do_summary_stuff()
else:
return None
def chat_with_bot(llm_manager, contextualize=True):
print("πŸ€– Chatbot! Write 'exit' or 'quit' to close the conversation.\n")
try:
if contextualize:
llm_manager.initialize_conversation()
print(f"πŸ€– ELI: {llm_manager.messages[-1].content}\n")
except Exception as e:
print(f"⚠️ Could not load initial summary: {e}\n")
while True:
try:
user_input = input("πŸ‘€ You: ")
if user_input.lower() in ["exit", "quit"]:
print("πŸ‘‹ End of conversation.")
break
response = llm_manager.send_message(user_input, contextualize=contextualize)
print(f"πŸ€– ELI: {response}\n")
except KeyboardInterrupt:
print("\nπŸ‘‹ Conversation stopped.")
break
except Exception as e:
print(f"⚠️ Error: {e}\n")
def get_chunk(collection_name, chunk_id):
"""Retrieve a specific chunk from a Qdrant collection."""
# Carica la collection se esiste
if not qdrant_manager.get_collection(collection_name):
print(f"❌ Collection '{collection_name}' non trovata.")
return None
return qdrant_manager.get_chunk_by_index(chunk_id)
llm_manager, qdrant_manager = initialize()
# file_name="data/txt/Key statisitcs startups.txt"
collection_name="key_statistics"
llm_manager, qdrant_manager = initialize()
# if qdrant_manager.get_collection(collection_name):
# llm_manager.set_qdrant_manager(qdrant_manager)
#chat_with_bot(llm_manager)
#manage_collection(file_name, collection_name)
#summary=get_initial_summary(collection_name)
#summary=get_summary(collection_name,"map_reduce")
#summary=get_summary(collection_name,type="stuff")
# if summary:
# print(f"βœ… Summary:\n{summary}")
# else:
# print("⚠️ Nessun riassunto generato.")
text=get_chunk(collection_name, 1)
print(text)