Spaces:
Runtime error
Runtime error
| # # This file will contain the core logic for the TurBot agent, | |
| # # including the LangGraph setup, state management, and tool definition. | |
| # | |
| # import os | |
| # from typing import List | |
| # from dotenv import load_dotenv | |
| # from huggingface_hub.inference._client import InferenceClient | |
| # from langchain_community.vectorstores import FAISS | |
| # from langchain_huggingface import HuggingFaceEmbeddings | |
| # from langchain_core.messages import HumanMessage, AIMessage | |
| # | |
| # # Load environment variables | |
| # load_dotenv() | |
| # | |
| # # Initialize the LLM (using a free model from Hugging Face) | |
| # client = InferenceClient("meta-llama/Meta-Llama-3-70B-Instruct") | |
| # | |
| # # Load the vector store | |
| # DB_FAISS_PATH = "../vectorstore/db_faiss" | |
| # | |
| # def search_travel_packages(query: str) -> str: | |
| # """ | |
| # Search for travel packages based on the user's query. | |
| # This function searches through the travel package documents to find relevant information. | |
| # """ | |
| # print(f"BURAAAZ STA SAM JA USAO U SEARCH TRAVEL PACKAGES OVO STVARNO NISAM OCEKIVAO") | |
| # try: | |
| # # Load the vector store | |
| # embeddings = HuggingFaceEmbeddings( | |
| # model_name='sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2', | |
| # model_kwargs={'device': 'cpu'} | |
| # ) | |
| # | |
| # if os.path.exists(DB_FAISS_PATH): | |
| # vectorstore = FAISS.load_local(DB_FAISS_PATH, embeddings) | |
| # | |
| # | |
| # print(f"JOOOO USPEJO SAM") | |
| # | |
| # # Search for relevant documents | |
| # docs = vectorstore.similarity_search(query, k=3) | |
| # | |
| # # Combine the relevant information | |
| # context = "\n\n".join([doc.page_content for doc in docs]) | |
| # return f"Found relevant travel information:\n\n{context}" | |
| # else: | |
| # return "Travel package database not found. Please ensure the vector store has been created." | |
| # except Exception as e: | |
| # return f"Error searching travel packages: {str(e)}" | |
| # | |
| # def should_search_travel_info(message: str) -> bool: | |
| # print(f"BURAZENGIJA SRA CU JA U SHOULD SEARCH TRAVEL INFU") | |
| # """Decide whether to search for travel information based on the message content.""" | |
| # message_lower = message.lower() | |
| # | |
| # # Keywords that indicate we should search for travel information | |
| # travel_keywords = [ | |
| # "destinacija", "let", "hotel", "putovanje", "mediteran", "grčka", "italija", | |
| # "turska", "budžet", "porodica", "all inclusive", "preporučite", "ponuda", | |
| # "destination", "flight", "travel", "mediterranean", "greece", "italy", | |
| # "turkey", "budget", "family", "recommend", "offer" | |
| # ] | |
| # | |
| # return any(keyword in message_lower for keyword in travel_keywords) | |
| # | |
| # def chat_with_turbot(message: str, history: List[List[str]]) -> str: | |
| # print(f"BURAZ KOJI JE OVO KITA MOJA") | |
| # """ | |
| # Main function to chat with TurBot. | |
| # """ | |
| # # Prepare the system message first | |
| # system_message = """Ti si TurBot, digitalni asistent za turističku agenciju. | |
| # Odgovori na srpskom jeziku i budi koristan i prijateljski nastrojen. | |
| # Ako ne znaš odgovor na pitanje, preporuči korisniku da pita o putovanjima, letovima, hotelima ili destinacijama.""" | |
| # | |
| # # Convert messages to the format expected by InferenceClient | |
| # messages_for_client = [] | |
| # | |
| # # Add system message first (only once) | |
| # messages_for_client.append({"role": "system", "content": system_message}) | |
| # | |
| # # Add conversation history | |
| # for human, ai in history: | |
| # if human: | |
| # messages_for_client.append({"role": "user", "content": human}) | |
| # if ai: | |
| # messages_for_client.append({"role": "assistant", "content": ai}) | |
| # | |
| # # Add the current message | |
| # messages_for_client.append({"role": "user", "content": message}) | |
| # | |
| # # Check if we should search for travel information | |
| # context = "" | |
| # if should_search_travel_info(message): | |
| # context = search_travel_packages(message) | |
| # if context: | |
| # # Add context as additional information | |
| # messages_for_client.append({"role": "user", "content": f"Kontekst o putovanjima: {context}"}) | |
| # | |
| # try: | |
| # # Debug: Print messages being sent to model | |
| # print(f"\n🔍 DEBUG: Sending {len(messages_for_client)} messages to model:") | |
| # for i, msg in enumerate(messages_for_client): | |
| # print(f" {i+1}. {msg['role']}: {msg['content'][:100]}...") | |
| # | |
| # # Generate response | |
| # response = "" | |
| # for message_chunk in client.chat_completion( | |
| # messages_for_client, | |
| # stream=True, | |
| # max_tokens=512, | |
| # temperature=0.7, | |
| # top_p=0.95, | |
| # ): | |
| # if hasattr(message_chunk, 'choices') and message_chunk.choices: | |
| # token = message_chunk.choices[0].delta.content | |
| # if token: | |
| # response += token | |
| # | |
| # return response if response else "Izvinjavam se, nisam dobio odgovor od modela. Pokušajte ponovo." | |
| # except Exception as e: | |
| # return f"Izvinjavam se, došlo je do greške: {str(e)}" | |
| import os | |
| from typing import List | |
| from dotenv import load_dotenv | |
| from huggingface_hub.inference._client import InferenceClient | |
| from langchain_community.vectorstores import FAISS | |
| from langchain_huggingface import HuggingFaceEmbeddings | |
| from langchain_core.messages import HumanMessage, AIMessage | |
| # Load environment variables | |
| load_dotenv() | |
| # Initialize the LLM (using a free model from Hugging Face) | |
| client = InferenceClient("meta-llama/Meta-Llama-3-70B-Instruct") | |
| # Load the vector store | |
| DB_FAISS_PATH = "../vectorstore/db_faiss" | |
| # --- KLJUČNA IZMENA: Učitaj embeddings model i FAISS bazu SAMO JEDNOM na početku --- | |
| # Inicijalizuj embeddings model globalno (ili bar van funkcije search_travel_packages) | |
| print("Initializing HuggingFaceEmbeddings...") | |
| try: | |
| embeddings_model = HuggingFaceEmbeddings( | |
| model_name='sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2', | |
| model_kwargs={'device': 'cpu'} # Možeš probati 'cuda' ako imaš GPU i PyTorch sa CUDA | |
| ) | |
| print("HuggingFaceEmbeddings initialized.") | |
| except Exception as e: | |
| print(f"Error initializing embeddings: {str(e)}") | |
| embeddings_model = None # Postavi na None ako dođe do greške | |
| # Učitaj FAISS vektor bazu globalno | |
| vectorstore = None | |
| if embeddings_model: | |
| print(f"JOOOOO EMBEDINGS POSTOJI BACI MOJ") | |
| if embeddings_model and os.path.exists(DB_FAISS_PATH): | |
| print(f"Loading FAISS vector store from {DB_FAISS_PATH}...") | |
| try: | |
| vectorstore = FAISS.load_local(DB_FAISS_PATH, embeddings_model, allow_dangerous_deserialization=True) | |
| print("FAISS vector store loaded successfully.") | |
| except Exception as e: | |
| print(f"Error loading FAISS vector store: {str(e)}. Make sure 'faiss-cpu' is installed and vectorstore is valid.") | |
| vectorstore = None | |
| else: | |
| print("FAISS vector store path does not exist or embeddings model failed to initialize. RAG will not function.") | |
| # --- KRAJ KLJUČNE IZMENE --- | |
| def search_travel_packages(query: str) -> str: | |
| """ | |
| Search for travel packages based on the user's query. | |
| This function searches through the travel package documents to find relevant information. | |
| """ | |
| print(f"BURAAAZ STA SAM JA USAO U SEARCH TRAVEL PACKAGES OVO STVARNO NISAM OCEKIVAO") | |
| if vectorstore is None: | |
| return "Travel package database is not available. Please ensure the vector store has been created and loaded correctly." | |
| try: | |
| # Nema više učitavanja embeddings i vectorstore OVDE! Koristimo globalne instance. | |
| print(f"JOOOO USPEJO SAM (now searching)...") # Poruka je sada relevantnija | |
| # Search for relevant documents | |
| docs = vectorstore.similarity_search(query, k=3) | |
| # Combine the relevant information | |
| context = "\n\n".join([doc.page_content for doc in docs]) | |
| return f"Found relevant travel information:\n\n{context}" | |
| except Exception as e: | |
| return f"Error searching travel packages: {str(e)}" | |
| def should_search_travel_info(message: str) -> bool: | |
| print(f"BURAZENGIJA SRA CU JA U SHOULD SEARCH TRAVEL INFU") | |
| """Decide whether to search for travel information based on the message content.""" | |
| message_lower = message.lower() | |
| # Keywords that indicate we should search for travel information | |
| travel_keywords = [ | |
| "destinacija", "let", "hotel", "putovanje", "mediteran", "grčka", "italija", | |
| "turska", "budžet", "porodica", "all inclusive", "preporučite", "ponuda", | |
| "destination", "flight", "travel", "mediterranean", "greece", "italy", | |
| "turkey", "budget", "family", "recommend", "offer" | |
| ] | |
| return any(keyword in message_lower for keyword in travel_keywords) | |
| def chat_with_turbot(message: str, history: List[List[str]]) -> str: | |
| print(f"BURAZ KOJI JE OVO KITA MOJA") | |
| """ | |
| Main function to chat with TurBot. | |
| """ | |
| # Prepare the system message first | |
| system_message = """Ti si TurBot, digitalni asistent za turističku agenciju. | |
| Odgovori na srpskom jeziku i budi koristan i prijateljski nastrojen. | |
| Ako ne znaš odgovor na pitanje, preporuči korisniku da pita o putovanjima, letovima, hotelima ili destinacijama.""" | |
| # Convert messages to the format expected by InferenceClient | |
| messages_for_client = [] | |
| # Add system message first (only once per interaction, model will handle context) | |
| messages_for_client.append({"role": "system", "content": system_message}) | |
| # Add conversation history | |
| for human, ai in history: | |
| if human: | |
| messages_for_client.append({"role": "user", "content": human}) | |
| if ai: | |
| messages_for_client.append({"role": "assistant", "content": ai}) | |
| # Add the current message | |
| messages_for_client.append({"role": "user", "content": message}) | |
| # Check if we should search for travel information | |
| context = "" | |
| if should_search_travel_info(message): | |
| context = search_travel_packages(message) | |
| if context: | |
| # Add context as additional information for the model | |
| # It's better to add it as a user message or a tool output for the model to use | |
| messages_for_client.append({"role": "user", "content": f"Relevantne informacije iz baze podataka: {context}"}) | |
| try: | |
| # Debug: Print messages being sent to model | |
| print(f"\n🔍 DEBUG: Sending {len(messages_for_client)} messages to model:") | |
| for i, msg in enumerate(messages_for_client): | |
| print(f" {i+1}. {msg['role']}: {msg['content'][:150]}...") # Povećan prikaz karaktera za debug | |
| # Generate response | |
| response = "" | |
| # The `client.chat_completion` now expects `messages` in a list of dicts. | |
| # This is already correctly prepared as `messages_for_client`. | |
| for message_chunk in client.chat_completion( | |
| messages_for_client, # Prosleđujemo celu listu poruka | |
| stream=True, | |
| max_tokens=512, | |
| temperature=0.7, | |
| top_p=0.95, | |
| ): | |
| if hasattr(message_chunk, 'choices') and message_chunk.choices: | |
| token = message_chunk.choices[0].delta.content | |
| if token: | |
| response += token | |
| return response if response else "Izvinjavam se, nisam dobio odgovor od modela. Pokušajte ponovo." | |
| except Exception as e: | |
| return f"Izvinjavam se, došlo je do greške u pozivu LLM-a: {str(e)}" |