diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..be2f91dfd278564b57fd55893529ee1bba731bcf
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,29 @@
+# Use the slim variant of Python 3.11 to reduce the size
+FROM python:3.11-slim
+
+# Add a non-root user for better security
+RUN useradd -m -u 1000 user
+
+# Switch to non-root user
+USER user
+
+# Ensure pip, setuptools, and wheel are up to date
+RUN python -m pip install --upgrade pip setuptools wheel
+
+# Set PATH to include user installs
+ENV PATH="/home/user/.local/bin:$PATH"
+
+# Set the working directory inside the container
+WORKDIR /app
+
+# Copy requirements.txt file and install dependencies
+COPY --chown=user ./requirements.txt /app/requirements.txt
+
+# Install only necessary dependencies from the requirements.txt
+RUN pip install --no-cache-dir -r /app/requirements.txt
+
+# Copy the rest of the application code to the working directory
+COPY --chown=user . /app
+
+# Start the FastAPI application with uvicorn
+CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
diff --git a/app.py b/app.py
new file mode 100644
index 0000000000000000000000000000000000000000..adb77b8bce3f96440b36c892e25ca102e0341046
--- /dev/null
+++ b/app.py
@@ -0,0 +1,27 @@
+from dotenv import load_dotenv
+import base64
+import json
+import os
+
+load_dotenv(override=True)
+encoded_env = os.getenv("ENCODED_ENV")
+if encoded_env:
+ # Decode the base64 string
+ decoded_env = base64.b64decode(encoded_env).decode()
+
+ # Load it as a dictionary
+ env_data = json.loads(decoded_env)
+
+ # Set environment variables
+ for key, value in env_data.items():
+ os.environ[key] = value
+from src.apis.create_app import create_app, api_router
+import uvicorn
+
+
+app = create_app()
+
+# eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjY3M2IwZDMzNTQ5OTg5Zjc1NmZhMzk3MCJ9.a3A9B1ZpzkzIPvhLqFpasK4sk2ocqmc1M80rtyAkbmM
+app.include_router(api_router)
+if __name__ == "__main__":
+ uvicorn.run("app:app", host="0.0.0.0", port=3002)
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..5cf868532894bd9f31647d4350f3b9eb4c70b40e
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,21 @@
+langgraph
+langchain
+python-dotenv
+motor
+langchain-community
+langchain-mongodb
+fastapi
+uvicorn
+pytz
+PyJWT==2.8.0
+python_jose==3.3.0
+pydantic[email]
+jose
+langchain-google-genai
+python-dateutil
+pandas
+openpyxl
+langchain-redis
+redis
+bs4
+duckduckgo-search
\ No newline at end of file
diff --git a/src/.DS_Store b/src/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..57e670896ce53a7ca41090200d8eae9def922ad0
Binary files /dev/null and b/src/.DS_Store differ
diff --git a/src/__pycache__/__init__.cpython-311.pyc b/src/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1f411ae2a7fb755dacd11e2b07871897118ad82d
Binary files /dev/null and b/src/__pycache__/__init__.cpython-311.pyc differ
diff --git a/src/__pycache__/app.cpython-311.pyc b/src/__pycache__/app.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a029f67b6b58d5bdb814e8dd5bd31e097c8a3779
Binary files /dev/null and b/src/__pycache__/app.cpython-311.pyc differ
diff --git a/src/__pycache__/state.cpython-311.pyc b/src/__pycache__/state.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9e8755ec932c515b48a691bc66b29b50ed5cd650
Binary files /dev/null and b/src/__pycache__/state.cpython-311.pyc differ
diff --git a/src/apis/.DS_Store b/src/apis/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..7fc336d00caf80f1e4ec4e8f2988d027c84e4563
Binary files /dev/null and b/src/apis/.DS_Store differ
diff --git a/src/apis/__pycache__/__init__.cpython-311.pyc b/src/apis/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5bd667d13f88b0fbb418859c1e6f8a3fc0542afe
Binary files /dev/null and b/src/apis/__pycache__/__init__.cpython-311.pyc differ
diff --git a/src/apis/__pycache__/create_app.cpython-311.pyc b/src/apis/__pycache__/create_app.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..24ccee12b1ad1e0ee3b903a5f62add48bfcc44a9
Binary files /dev/null and b/src/apis/__pycache__/create_app.cpython-311.pyc differ
diff --git a/src/apis/controllers/.DS_Store b/src/apis/controllers/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..036fd422d6f005aea9400fe2566e5b266263c94b
Binary files /dev/null and b/src/apis/controllers/.DS_Store differ
diff --git a/src/apis/controllers/__pycache__/__init__.cpython-311.pyc b/src/apis/controllers/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e06639b9993a3864ea4b08f5512333845a6c2fb0
Binary files /dev/null and b/src/apis/controllers/__pycache__/__init__.cpython-311.pyc differ
diff --git a/src/apis/controllers/__pycache__/auth_controller.cpython-311.pyc b/src/apis/controllers/__pycache__/auth_controller.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3c7cfe22c815416a62ac04f255fc7e61d6b9cceb
Binary files /dev/null and b/src/apis/controllers/__pycache__/auth_controller.cpython-311.pyc differ
diff --git a/src/apis/controllers/__pycache__/chat_controller.cpython-311.pyc b/src/apis/controllers/__pycache__/chat_controller.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5688b572d884db5283526f6b75a9b54cb8aed04d
Binary files /dev/null and b/src/apis/controllers/__pycache__/chat_controller.cpython-311.pyc differ
diff --git a/src/apis/controllers/__pycache__/destination_controller.cpython-311.pyc b/src/apis/controllers/__pycache__/destination_controller.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..992e91ba917385fe8023749463fcc5f91a16b805
Binary files /dev/null and b/src/apis/controllers/__pycache__/destination_controller.cpython-311.pyc differ
diff --git a/src/apis/controllers/__pycache__/hotel_controller.cpython-311.pyc b/src/apis/controllers/__pycache__/hotel_controller.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ddc42a80c8be86e5ea3321f93f6fc3752f048061
Binary files /dev/null and b/src/apis/controllers/__pycache__/hotel_controller.cpython-311.pyc differ
diff --git a/src/apis/controllers/__pycache__/location_controller.cpython-311.pyc b/src/apis/controllers/__pycache__/location_controller.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..43f2d1975ce33d151b93c831d31a40caec37bd4b
Binary files /dev/null and b/src/apis/controllers/__pycache__/location_controller.cpython-311.pyc differ
diff --git a/src/apis/controllers/__pycache__/planner_controller.cpython-311.pyc b/src/apis/controllers/__pycache__/planner_controller.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..688040472758fe2425cc722bc44d93d9f66d3fee
Binary files /dev/null and b/src/apis/controllers/__pycache__/planner_controller.cpython-311.pyc differ
diff --git a/src/apis/controllers/__pycache__/post_controller.cpython-311.pyc b/src/apis/controllers/__pycache__/post_controller.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dedf21a2b8132c0f14fe773246f68620a9fa7d20
Binary files /dev/null and b/src/apis/controllers/__pycache__/post_controller.cpython-311.pyc differ
diff --git a/src/apis/controllers/__pycache__/scheduling_controller.cpython-311.pyc b/src/apis/controllers/__pycache__/scheduling_controller.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..abff71eed991a6519cf46cc30f5d95e9b60c25b0
Binary files /dev/null and b/src/apis/controllers/__pycache__/scheduling_controller.cpython-311.pyc differ
diff --git a/src/apis/controllers/auth_controller.py b/src/apis/controllers/auth_controller.py
new file mode 100644
index 0000000000000000000000000000000000000000..53a5bfc68d895039983a8c59184d060753efec5f
--- /dev/null
+++ b/src/apis/controllers/auth_controller.py
@@ -0,0 +1,38 @@
+from fastapi import HTTPException, status
+from src.apis.models.user_models import User
+from src.utils.mongo import UserCRUD
+from src.apis.providers.jwt_provider import JWTProvider
+from src.utils.logger import logger
+import jwt
+
+jwt_provider = JWTProvider()
+
+
+async def login_control(token):
+ if not token:
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="Authorization Token is required",
+ )
+ decoded_token = jwt.decode(token, options={"verify_signature": False})
+ decoded_data = {
+ "name": decoded_token["name"],
+ "email": decoded_token["email"],
+ "picture": decoded_token["picture"],
+ }
+ user = User(**decoded_data)
+ logger.info(f"User {user} is logging in.")
+ existing_user = await UserCRUD.read_one({"email": user.email})
+ if not existing_user:
+ user_id = await UserCRUD.create(user.model_dump())
+ logger.info(f"User {user.email} created.")
+ else:
+ user_id = existing_user["_id"]
+
+ logger.info(f"User {user.email} logged in.")
+ token = jwt_provider.encrypt({"id": str(user_id)})
+ user_data = user.__dict__
+ user_data.pop("created_at", None)
+ user_data.pop("updated_at", None)
+ user_data.pop("expire_at", None)
+ return token, user_data
diff --git a/src/apis/controllers/chat_controller.py b/src/apis/controllers/chat_controller.py
new file mode 100644
index 0000000000000000000000000000000000000000..4d230ff701eae4cd7bec9d748ecc98f28035379f
--- /dev/null
+++ b/src/apis/controllers/chat_controller.py
@@ -0,0 +1,240 @@
+from langchain_core.messages import HumanMessage, AIMessage
+from src.langgraph.multi_agent.chat.chat_flow import app as workflow
+from src.utils.mongo import chat_messages_history
+from src.utils.logger import logger
+from src.utils.mongo import chat_history_management_crud
+from src.apis.interfaces.api_interface import Chat
+from src.utils.helper import handle_validator_raise
+from src.utils.redis import get_key_redis, set_key_redis
+import json
+from langchain_core.messages.ai import AIMessageChunk
+from fastapi import BackgroundTasks
+from fastapi.responses import JSONResponse
+
+
+@handle_validator_raise
+def post_process_history(history):
+ processed_history = []
+ for entry in history:
+ if entry["type"] == "human":
+ processed_history.append(HumanMessage(content=entry["content"]))
+ elif entry["type"] == "ai":
+ processed_history.append(AIMessage(content=entry["content"]))
+ return processed_history
+
+
+async def save_history(user_id, human_message, ai_message, intent):
+ messages_add_to_history = [HumanMessage(human_message), AIMessage(ai_message)]
+ messages_add_to_history_dict = [
+ {"type": "human", "content": human_message},
+ {"type": "ai", "content": ai_message},
+ ]
+ messages_add_to_history_cache = {
+ "message": messages_add_to_history_dict,
+ "intent": intent,
+ }
+ history = chat_messages_history(user_id)
+ await history.aadd_messages(messages_add_to_history)
+ check_exist_history = await chat_history_management_crud.read_one(
+ {"session_id": user_id}
+ )
+ if check_exist_history is None:
+ await chat_history_management_crud.create(
+ {"user_id": user_id, "session_id": user_id, "intent": intent}
+ )
+ logger.info("History created")
+ else:
+ await chat_history_management_crud.update(
+ {"session_id": user_id}, {"intent": intent}
+ )
+ logger.info("History updated")
+ history_cache = await get_key_redis(f"chat_history_{user_id}")
+ if history_cache is not None:
+ history_cache = eval(history_cache)
+ history_cache["message"] = (
+ history_cache["message"] + messages_add_to_history_dict
+ )
+ history_cache["intent"] = intent
+ await set_key_redis(
+ f"chat_history_{user_id}",
+ str(history_cache),
+ )
+ return {"message": "History updated"}
+ await set_key_redis(f"chat_history_{user_id}", str(messages_add_to_history_cache))
+ return {"message": "History created"}
+
+
+async def chat_streaming_function(user, data: Chat, background_tasks: BackgroundTasks):
+ human_message = data.message
+ history = data.history
+ lat = data.lat
+ long = data.long
+ language = data.language
+ logger.info(f"Language: {language}")
+ process_history = post_process_history(history) if history is not None else None
+ config = {
+ "configurable": {
+ "user_id": user["id"],
+ "user_email": user["email"],
+ "contact_number": user["contact_number"],
+ "session_id": user["id"],
+ "lat": lat,
+ "long": long,
+ }
+ }
+ # try:
+ initial_input = {
+ "messages": [("user", human_message)],
+ "messages_history": process_history,
+ "entry_message": None,
+ "manual_save": False,
+ "intent": data.intent,
+ "language": language,
+ "tool_name": None,
+ }
+ last_output_state = None
+ temp = ""
+ async for event in workflow.astream(
+ input=initial_input,
+ config=config,
+ stream_mode=["messages", "values"],
+ ):
+ event_type, event_message = event
+ if event_type == "messages":
+ message, metadata = event_message
+ if (
+ isinstance(message, AIMessageChunk)
+ and message.tool_calls
+ and message.tool_call_chunks[0]["name"] != "ClassifyUserIntent"
+ ):
+ tool_name = message.tool_call_chunks[0]["name"]
+ message_yield = json.dumps(
+ {"type": "tool_call", "content": tool_name}, ensure_ascii=False
+ )
+ print(message_yield)
+ yield message_yield + "\n"
+ if metadata["langgraph_node"] in [
+ "primary_assistant",
+ "scheduling_agent",
+ "book_hotel_agent",
+ ]:
+
+ if message.content:
+ temp += message.content
+ message_yield = json.dumps(
+ {"type": "message", "content": temp}, ensure_ascii=False
+ )
+ print(message_yield)
+ yield message_yield + "\n"
+ if event_type == "values":
+ last_output_state = event_message
+
+ final_ai_output = last_output_state["messages"][-1].content
+ final_intent = last_output_state["intent"]
+ tool_name_important = last_output_state["tool_name"]
+
+ final_response = json.dumps(
+ {
+ "type": "final",
+ "content": final_ai_output,
+ "intent": final_intent,
+ "tool_name": tool_name_important,
+ },
+ ensure_ascii=False,
+ )
+ yield final_response
+
+ background_tasks.add_task(
+ save_history, user["id"], human_message, final_ai_output, final_intent
+ )
+
+
+async def chat_function(user, data: Chat, background_tasks: BackgroundTasks):
+ message = data.message
+ history = data.history
+ lat = data.lat
+ long = data.long
+ language = data.language
+ logger.info(f"Language: {language}")
+ process_history = post_process_history(history) if history is not None else None
+ config = {
+ "configurable": {
+ "user_id": user["id"],
+ "user_email": user["email"],
+ "contact_number": user["contact_number"],
+ "session_id": user["id"],
+ "lat": lat,
+ "long": long,
+ }
+ }
+ # try:
+ initial_input = {
+ "messages": [("user", message)],
+ "messages_history": process_history,
+ "entry_message": None,
+ "manual_save": False,
+ "intent": data.intent,
+ "language": language,
+ "tool_name": None,
+ }
+ output = await workflow.ainvoke(initial_input, config)
+
+ final_ai_output = output["messages"][-1].content
+ final_intent = output["intent"]
+ tool_name = output["tool_name"]
+
+ if final_ai_output is None:
+ return JSONResponse(
+ content={"message": "Error in chat_function"}, status_code=500
+ )
+ background_tasks.add_task(
+ save_history, user["id"], data.message, final_ai_output, final_intent
+ )
+
+ response_ouput = {
+ "message": final_ai_output,
+ "intent": final_intent,
+ "tool_name": tool_name,
+ }
+ return JSONResponse(content=response_ouput, status_code=200)
+
+
+async def get_intent_function(session_id):
+ record = await chat_history_management_crud.read_one({"session_id": session_id})
+ if record is None:
+ return None
+
+ return record["intent"]
+
+
+async def get_history_function(session_id):
+ history = chat_messages_history(session_id, 50)
+ try:
+ history_messages = await get_key_redis(f"chat_history_{session_id}")
+ if not history_messages:
+ logger.info("History not found in redis")
+ history_messages = await history.aget_messages()
+ history_messages = [
+ i.model_dump(include=["type", "content"]) for i in history_messages
+ ]
+ intent = await get_intent_function(session_id)
+ return {"message": history_messages, "intent": intent}
+ history_messages = eval(history_messages)
+ return history_messages
+ except Exception as e:
+ logger.error(f"Error in get_history_function: {e}")
+ return {"message": [], "intent": None}
+
+
+async def list_chat_history_function(user_id: str):
+ result = await chat_history_management_crud.read({"user_id": user_id})
+ if result is None:
+ return []
+ result = [i["session_id"] for i in result]
+ return result
+
+
+async def delete_chat_history_function(session_id: str):
+ history = chat_messages_history(session_id, 50)
+ await history.aclear()
+ return {"message": "Chat history has been deleted"}
diff --git a/src/apis/controllers/destination_controller.py b/src/apis/controllers/destination_controller.py
new file mode 100644
index 0000000000000000000000000000000000000000..cd3e00a119125345cb57b34fc2618165c99f862b
--- /dev/null
+++ b/src/apis/controllers/destination_controller.py
@@ -0,0 +1,37 @@
+from typing import List, Dict, Any
+import aiohttp
+from fastapi import HTTPException
+from src.utils.logger import logger
+async def destination_suggestion_controller(question: str, top_k: int = 5) -> List[Dict[str, Any]]:
+ async with aiohttp.ClientSession() as session:
+ # Get question tags
+ try:
+ async with session.get(
+ f"https://darkbreakerk-triventure-ai.hf.space/model/get_question_tags/{question}"
+ ) as response_tag:
+ if response_tag.status == 200:
+ tag_data = await response_tag.json()
+ tags = " ".join(tag_data["question_tags"])
+ logger.info(f"Tags: {tags} for question: {question}")
+ else:
+ raise HTTPException(
+ status_code=response_tag.status,
+ detail=f"Tag request failed with status {response_tag.status}"
+ )
+
+ # Get destinations list
+ async with session.get(
+ f"https://darkbreakerk-triventure-ai.hf.space/model/get_destinations_list/{tags}/{top_k}"
+ ) as response:
+ if response.status == 200:
+ data = await response.json()
+ logger.info(f"Destination suggestion for question: {data}")
+ return data["destinations_list"]
+ else:
+ raise HTTPException(
+ status_code=response.status,
+ detail=f"Destinations request failed with status {response.status}"
+ )
+
+ except aiohttp.ClientError as e:
+ raise HTTPException(status_code=500, detail=f"Request failed: {str(e)}")
\ No newline at end of file
diff --git a/src/apis/controllers/hotel_controller.py b/src/apis/controllers/hotel_controller.py
new file mode 100644
index 0000000000000000000000000000000000000000..89e2802fde069bbee7641f4890c78c8b49ebc437
--- /dev/null
+++ b/src/apis/controllers/hotel_controller.py
@@ -0,0 +1,109 @@
+from typing import Optional, Dict, Union
+from datetime import datetime
+from src.utils.mongo import BookHotelCRUD
+from src.utils.logger import logger
+import smtplib
+from datetime import datetime
+from email.mime.multipart import MIMEMultipart
+from email.mime.text import MIMEText
+
+
+def send_booking_confirmation_email(
+ user_email: str,
+ user_contact_number: str,
+ hotel_email: str,
+ start_time: datetime,
+ end_time: datetime,
+):
+ host_email = "htbqn2003@gmail.com"
+ msg = MIMEMultipart()
+ msg["From"] = host_email
+ msg["To"] = hotel_email
+ msg["Subject"] = f"TriVenture AI Application Booking from {user_email}"
+
+ email_content = f"""
+
+
+ Booking Confirmation
+ Dear Hotel Manager,
+
+ I would like to book your hotel from {start_time.strftime('%Y-%m-%d %H:%M:%S')} to {end_time.strftime('%Y-%m-%d %H:%M:%S')}.
+
+ My personal information is as follows:
+
+ - Email: {user_email}
+ - Contact Number: {user_contact_number}
+
+ With start time: {start_time.strftime('%Y-%m-%d %H:%M:%S')} and end time: {end_time.strftime('%Y-%m-%d %H:%M:%S')}.
+
+ Best regards,
TriVenture AI Application
+
+
+ """
+ msg.attach(MIMEText(email_content, "html"))
+ try:
+ server = smtplib.SMTP("smtp.gmail.com", 587)
+ server.starttls()
+ server.login(host_email, "lvvi ouzk vafe vgem")
+ server.sendmail(host_email, hotel_email, msg.as_string())
+ server.quit()
+ logger.info("Booking confirmation email sent successfully.")
+ except Exception as e:
+ logger.error(f"Failed to send email: {str(e)}")
+
+
+async def book_hotel_controller(
+ hotel_email: str,
+ hotel_name: str,
+ address: str,
+ phone_number: Optional[str],
+ website: Optional[str],
+ start_time_str: Optional[datetime],
+ end_time_str: Optional[datetime],
+ user_id,
+) -> Union[Dict[str, str], Dict[str, str]]:
+ try:
+ check_existing = await BookHotelCRUD.read_one(
+ {
+ "user_id": user_id,
+ "$or": [
+ {
+ "start_time": {"$lte": start_time_str},
+ "end_time": {"$gt": start_time_str},
+ },
+ {
+ "start_time": {"$lt": end_time_str},
+ "end_time": {"$gte": end_time_str},
+ },
+ {
+ "start_time": {"$gte": start_time_str},
+ "end_time": {"$lte": end_time_str},
+ },
+ ],
+ }
+ )
+
+ if check_existing:
+ logger.info(f"Existing booking: {check_existing}")
+ return {
+ "status": "error",
+ "message": "In the same time, you have already booked a hotel named: "
+ + check_existing["hotel_name"],
+ }
+
+ result = await BookHotelCRUD.create(
+ {
+ "user_id": user_id,
+ "hotel_name": hotel_name,
+ "address": address,
+ "phone_number": phone_number,
+ "hotel_email": hotel_email,
+ "website": website,
+ "start_time": start_time_str,
+ "end_time": end_time_str,
+ }
+ )
+ logger.info(f"Hotel booking result: {result}")
+ return {"status": "success", "message": "Hotel booked successfully"}
+ except Exception as e:
+ return {"status": "error", "message": str(e)}
diff --git a/src/apis/controllers/location_controller.py b/src/apis/controllers/location_controller.py
new file mode 100644
index 0000000000000000000000000000000000000000..7d4089eb157e9634a8123e12bcf9ee94c3a975bd
--- /dev/null
+++ b/src/apis/controllers/location_controller.py
@@ -0,0 +1,77 @@
+from src.utils.logger import logger
+from src.utils.helper import format_geoapify_response, format_weather_data
+from fastapi.responses import JSONResponse
+import requests
+import os
+
+
+def get_location_details(lat, long):
+ api_key = os.getenv("OPENCAGE_API_KEY")
+ url = f"https://api.opencagedata.com/geocode/v1/json?q={lat},{long}&pretty=1&key={api_key}"
+ response = requests.get(url)
+ if response.status_code == 200:
+ logger.info("Location details fetched successfully")
+ return JSONResponse(
+ content={"location": response.json()["results"][0]["formatted"]},
+ status_code=200,
+ )
+ else:
+ return JSONResponse(
+ content={"error": "Error fetching location details"}, status_code=500
+ )
+
+
+def get_nearby_places(lat, long, radius, kinds):
+ api_key = os.getenv("OPENTRIPMAP_API_KEY", None)
+ if api_key is None:
+ logger.error("OpenTripMap API key not found")
+ return JSONResponse(content={"error": "API key not found"}, status=500)
+ url = "https://api.opentripmap.com/0.1/en/places/radius"
+ params = {
+ "radius": radius,
+ "lon": long,
+ "lat": lat,
+ "kinds": kinds,
+ "apikey": api_key,
+ }
+ response = requests.get(url, params=params, headers={"accept": "application/json"})
+ if response.status_code == 200:
+ logger.info("Places fetched successfully")
+ return JSONResponse(
+ content={"places": response.json().get("features", [])}, status_code=200
+ )
+ else:
+ return JSONResponse(content={"error": "Error fetching places"}, status_code=500)
+
+
+def get_places(lat, long, radius, categories, limit=20):
+ api_key = os.getenv("GEOAPIFY_API_KEY", None)
+ if api_key is None:
+ logger.error("Geoapify API key not found")
+ return JSONResponse(content={"error": "API key not found"}, status=500)
+ url = f"https://api.geoapify.com/v2/places?categories={categories}&filter=circle:{long},{lat},{radius}&limit={limit}&apiKey={api_key}"
+ response = requests.get(url)
+ if response.status_code == 200:
+ response = response.json().get("features", [])
+ # logger.info(f"RESPONSE:{response}")
+ if response:
+ response = format_geoapify_response(response, long, lat)
+ return JSONResponse(content=response, status_code=200)
+ else:
+ return JSONResponse(content={"error": "Error fetching places"}, status_code=500)
+
+
+def get_weather(lat, long):
+ api_key = os.getenv("OPENWEATHER_API_KEY", None)
+ if api_key is None:
+ logger.error("OpenWeather API key not found")
+ return JSONResponse(content={"error": "API key not found"}, status=500)
+ url = f"https://api.openweathermap.org/data/3.0/onecall?lat={lat}&lon={long}&exclude=hourly,daily&appid={api_key}"
+ response = requests.get(url)
+ if response.status_code == 200:
+ response = format_weather_data(response.json())
+ return JSONResponse(content=response, status_code=200)
+ else:
+ return JSONResponse(
+ content={"error": "Error fetching weather"}, status_code=500
+ )
diff --git a/src/apis/controllers/planner_controller.py b/src/apis/controllers/planner_controller.py
new file mode 100644
index 0000000000000000000000000000000000000000..060300e6ceb45cb490a45d9c28ce6a72e0f4dfc5
--- /dev/null
+++ b/src/apis/controllers/planner_controller.py
@@ -0,0 +1,57 @@
+import json
+from fastapi import BackgroundTasks
+from src.langgraph.multi_agent.planner.planner_flow import planner_app
+from src.utils.helper import parse_itinerary
+
+
+async def message_generator(input_graph, config, background: BackgroundTasks):
+
+ last_output_state = None
+ temp = ""
+ async for event in planner_app.astream(
+ input=input_graph,
+ config=config,
+ stream_mode=["messages", "values"],
+ ):
+ event_type, event_message = event
+ if event_type == "messages":
+ message, _ = event_message
+ if message.content:
+ temp += message.content
+ message_yield = json.dumps(
+ {"type": "message", "content": temp},
+ ensure_ascii=False,
+ )
+ yield message_yield + "\n"
+ if event_type == "values":
+ last_output_state = event_message
+ parser_ouput = parse_itinerary(last_output_state["final_answer"])
+ final_response = json.dumps(
+ {
+ "type": "final",
+ "content": parser_ouput,
+ },
+ ensure_ascii=False,
+ )
+ yield final_response + "\n"
+
+
+from pydantic import BaseModel, Field
+from datetime import datetime
+
+
+class Activity(BaseModel):
+ """Activity model"""
+
+ description: str = Field(
+ ..., description="Short description of the activity can have location"
+ )
+ start_time: datetime = Field(..., description="Start time of the activity")
+ end_time: datetime = Field(..., description="End time of the activity")
+
+
+class Output(BaseModel):
+ """Output model"""
+
+ activities: list[Activity] = Field(..., description="List of activities")
+ note: str = Field(..., description="Note for the user")
diff --git a/src/apis/controllers/post_controller.py b/src/apis/controllers/post_controller.py
new file mode 100644
index 0000000000000000000000000000000000000000..f0b27447297f05a7a8e3790226a3821fc658252a
--- /dev/null
+++ b/src/apis/controllers/post_controller.py
@@ -0,0 +1,122 @@
+from typing import Optional, Dict, Union
+from datetime import datetime
+from src.utils.mongo import PostCRUD, CommentCRUD, LikeCRUD
+from src.utils.logger import logger
+from datetime import datetime
+from bson import ObjectId
+from typing import Optional, Dict
+from datetime import datetime
+from bson import ObjectId
+
+
+def serialize_datetime(obj):
+ if isinstance(obj, datetime):
+ return obj.isoformat()
+ if isinstance(obj, ObjectId):
+ return str(obj)
+ return obj
+
+
+async def create_a_post_controller(
+ content: str, user_id: str, destination_id: str
+) -> Dict:
+ try:
+ post = {
+ "content": content,
+ "user_id": user_id,
+ "destination_id": destination_id,
+ "comment_ids": [],
+ "like": [],
+ }
+ await PostCRUD.create(post)
+ return {"status": "success", "message": "Post created successfully"}
+ except Exception as e:
+ return {"status": "error", "message": str(e)}
+
+
+async def get_a_post_controller(post_id: str) -> Dict:
+ try:
+ post = await PostCRUD.find_by_id(post_id)
+ if post is None:
+ return {"status": "error", "message": "Post not found"}
+
+ # Convert datetime objects in the post
+ serialized_post = {
+ "id": serialize_datetime(post.get("_id")),
+ "content": post.get("content"),
+ "user_id": post.get("user_id"),
+ "destination_id": post.get("destination_id"),
+ "comment_ids": post.get("comment_ids", []),
+ "like": post.get("like", []),
+ "created_at": serialize_datetime(post.get("created_at")),
+ "updated_at": serialize_datetime(post.get("updated_at")),
+ }
+
+ return {"status": "success", "message": serialized_post}
+ except Exception as e:
+ logger.error(f"Error getting post: {str(e)}")
+ return {"status": "error", "message": str(e)}
+
+
+async def list_all_posts_controller():
+ try:
+ posts = await PostCRUD.find_all()
+ serialized_posts = []
+ for post in posts:
+ serialized_post = {
+ "id": serialize_datetime(post.get("_id")),
+ "content": post.get("content"),
+ "user_id": post.get("user_id"),
+ "destination_id": post.get("destination_id"),
+ "comment_ids": post.get("comment_ids", []),
+ "like": post.get("like", []),
+ "created_at": serialize_datetime(post.get("created_at")),
+ "updated_at": serialize_datetime(post.get("updated_at")),
+ }
+ serialized_posts.append(serialized_post)
+ return {"status": "success", "message": serialized_posts}
+ except Exception as e:
+ logger.error(f"Error listing posts: {str(e)}")
+ return {"status": "error", "message": str(e)}
+
+
+async def update_a_post_controller(
+ user_id: str, post_id: str, content: str, destination_id: str
+) -> Dict:
+ try:
+ exist_data = await PostCRUD.find_by_id(post_id)
+ if exist_data["user_id"] != user_id:
+ return {
+ "status": "error",
+ "message": "You are not allowed to update this post",
+ }
+ if exist_data is None:
+ return {"status": "error", "message": "Post not found"}
+ await PostCRUD.update(
+ {"_id": ObjectId(post_id)},
+ {
+ "content": content,
+ "destination_id": destination_id,
+ },
+ )
+ return {"status": "success", "message": "Post updated successfully"}
+ except Exception as e:
+ logger.error(f"Error updating post: {str(e)}")
+ return {"status": "error", "message": str(e)}
+
+
+async def delete_a_post_controller(user_id: str, post_id: str) -> Dict:
+ try:
+ exist_data = await PostCRUD.find_by_id(post_id)
+ if exist_data["user_id"] != user_id:
+ return {
+ "status": "error",
+ "message": "You are not allowed to delete this post",
+ }
+ if exist_data is None:
+ return {"status": "error", "message": "Post not found"}
+ await PostCRUD.delete({"_id": ObjectId(post_id)})
+ return {"status": "success", "message": "Post deleted successfully"}
+ except Exception as e:
+ logger.error(f"Error deleting post: {str(e)}")
+ return {"status": "error", "message": str(e)}
diff --git a/src/apis/controllers/scheduling_controller.py b/src/apis/controllers/scheduling_controller.py
new file mode 100644
index 0000000000000000000000000000000000000000..e2a1b5ba0e38fe65d12a7c528f507c4aa87700d4
--- /dev/null
+++ b/src/apis/controllers/scheduling_controller.py
@@ -0,0 +1,238 @@
+from typing import Optional, Dict, Union
+from datetime import datetime
+from src.utils.mongo import ScheduleCRUD
+from src.utils.logger import logger
+from datetime import datetime
+from bson import ObjectId
+from typing import Optional, Dict
+from datetime import datetime
+from bson import ObjectId
+
+
+async def create_a_activity_controller(
+ activity_id: Optional[str],
+ activity_category: str,
+ description: str,
+ start_time: datetime,
+ end_time: datetime,
+ user_id: str,
+) -> Dict[str, str]:
+ try:
+ existing_activity = await ScheduleCRUD.read_one(
+ {
+ "user_id": user_id,
+ "$or": [
+ {
+ "start_time": {"$lte": start_time},
+ "end_time": {"$gt": start_time},
+ },
+ {
+ "start_time": {"$lt": end_time},
+ "end_time": {"$gte": end_time},
+ },
+ {
+ "start_time": {"$gte": start_time},
+ "end_time": {"$lte": end_time},
+ },
+ ],
+ }
+ )
+ if existing_activity:
+ activity_category = existing_activity.get("activity_category", "N/A")
+ description = existing_activity.get("description", "N/A")
+ start_time = existing_activity.get("start_time", "N/A")
+ end_time = existing_activity.get("end_time", "N/A")
+ return {
+ "status": "error",
+ "message": f"""Overlapping activities found:\nDescription: {description}, \nCategory: {activity_category}, \nStart time: {start_time}, \nEnd time: {end_time}. Please update or delete the existing activity to create a new one.""",
+ }
+ document = {
+ "user_id": user_id,
+ "activity_category": activity_category,
+ "description": description,
+ "start_time": start_time,
+ "end_time": end_time,
+ }
+ if activity_id:
+ logger.info(f"Create activity with ID: {activity_id}")
+ document["id"] = activity_id
+ await ScheduleCRUD.create(document)
+ return {"status": "success", "message": "Activity created successfully"}
+
+ except Exception as e:
+ logger.error(f"Error creating activity: {e}")
+ return {"status": "error", "message": f"Error creating activity: {str(e)}"}
+
+
+async def search_activities_controller(
+ start_time: datetime,
+ end_time: datetime,
+ user_id: str,
+) -> Dict[str, Union[str, list[dict]]]:
+ try:
+ if not start_time or not end_time:
+ activities = await ScheduleCRUD.read({"user_id": user_id})
+ else:
+ activities = await ScheduleCRUD.read(
+ {
+ "user_id": user_id,
+ "$or": [
+ {
+ "start_time": {"$lte": start_time},
+ "end_time": {"$gt": start_time},
+ },
+ {
+ "start_time": {"$lt": end_time},
+ "end_time": {"$gte": end_time},
+ },
+ {
+ "start_time": {"$gte": start_time},
+ "end_time": {"$lte": end_time},
+ },
+ ],
+ }
+ )
+ return {"status": "success", "message": activities}
+ except Exception as e:
+ logger.error(f"Error reading activities: {e}")
+ return {"status": "error", "message": f"Error reading activities {e}"}
+
+
+async def update_a_activity_controller(
+ activity_id: Optional[str],
+ activity_category: str,
+ description: str,
+ start_time: datetime,
+ end_time: datetime,
+ user_id: str,
+) -> Dict[str, str]:
+ try:
+ if activity_id:
+ existing_activity = await ScheduleCRUD.read_one(
+ {"_id": ObjectId(activity_id)}
+ )
+ if not existing_activity:
+ return {
+ "status": "error",
+ "message": f"Activity with id {activity_id} not found",
+ }
+ else:
+ existing_activity = await ScheduleCRUD.read_one(
+ {
+ "user_id": user_id,
+ "$or": [
+ {
+ "start_time": {"$lte": start_time},
+ "end_time": {"$gt": start_time},
+ },
+ {
+ "start_time": {"$lt": end_time},
+ "end_time": {"$gte": end_time},
+ },
+ {
+ "start_time": {"$gte": start_time},
+ "end_time": {"$lte": end_time},
+ },
+ ],
+ }
+ )
+ if not existing_activity:
+ return {
+ "status": "error",
+ "message": f"Activity with id {activity_id} not found",
+ }
+ activity_id = existing_activity["_id"]
+
+ await ScheduleCRUD.update(
+ {"_id": ObjectId(activity_id)},
+ {
+ "activity_category": activity_category,
+ "description": description,
+ "start_time": start_time,
+ "end_time": end_time,
+ },
+ )
+ return {"status": "success", "message": "Activity updated successfully"}
+ except Exception as e:
+ logger.error(f"Error updating activity: {e}")
+ return {"status": "error", "message": f"Error updating activity {e}"}
+
+
+async def delete_activities_controller(
+ activity_id: Optional[str],
+ start_time: datetime,
+ end_time: datetime,
+ user_id: str,
+) -> Dict[str, str]:
+ try:
+ if activity_id:
+ existing_activity = await ScheduleCRUD.read_one(
+ {"_id": ObjectId(activity_id)}
+ )
+ if not existing_activity:
+ return {
+ "status": "error",
+ "message": "Don't have activity at the given time",
+ }
+ # Delete single activity by ID
+ await ScheduleCRUD.delete({"_id": ObjectId(activity_id)})
+ return {"status": "success", "message": "Successfully deleted activity"}
+ else:
+ # Find all activities in the time range
+ existing_activities = await ScheduleCRUD.read(
+ {
+ "user_id": user_id,
+ "$or": [
+ {
+ "start_time": {"$lte": start_time},
+ "end_time": {"$gt": start_time},
+ },
+ {
+ "start_time": {"$lt": end_time},
+ "end_time": {"$gte": end_time},
+ },
+ {
+ "start_time": {"$gte": start_time},
+ "end_time": {"$lte": end_time},
+ },
+ ],
+ }
+ )
+
+ if not existing_activities:
+ return {
+ "status": "error",
+ "message": "Don't have any activities at the given time range",
+ }
+
+ logger.info(f"Found {len(existing_activities)} activities to delete")
+
+ # Delete all activities in the time range
+ await ScheduleCRUD.delete(
+ {
+ "user_id": user_id,
+ "$or": [
+ {
+ "start_time": {"$lte": start_time},
+ "end_time": {"$gt": start_time},
+ },
+ {
+ "start_time": {"$lt": end_time},
+ "end_time": {"$gte": end_time},
+ },
+ {
+ "start_time": {"$gte": start_time},
+ "end_time": {"$lte": end_time},
+ },
+ ],
+ }
+ )
+
+ return {
+ "status": "success",
+ "message": f"Successfully deleted {len(existing_activities)} activities",
+ }
+
+ except Exception as e:
+ logger.error(f"Error deleting activities: {e}")
+ return {"status": "error", "message": f"Error deleting activities: {e}"}
diff --git a/src/apis/create_app.py b/src/apis/create_app.py
new file mode 100644
index 0000000000000000000000000000000000000000..64d9662e037d8cd4696a0152dc36629f2db7d542
--- /dev/null
+++ b/src/apis/create_app.py
@@ -0,0 +1,64 @@
+from fastapi import FastAPI, APIRouter
+from contextlib import asynccontextmanager
+import base64
+import json
+import os
+from fastapi.middleware.cors import CORSMiddleware
+from src.apis.routes.chat_route import router as router_chat
+from src.apis.routes.auth_route import router as router_auth
+from src.apis.routes.location_route import router as router_location
+from src.apis.routes.hotel_route import router as router_hotel
+from src.apis.routes.travel_dest_route import router as router_travel_dest
+from src.apis.routes.scheduling_router import router as router_scheduling
+from src.apis.routes.planner_route import router as router_planner
+from src.apis.routes.post_router import router as router_post
+from src.utils.logger import logger
+
+api_router = APIRouter()
+api_router.include_router(router_chat)
+api_router.include_router(router_auth)
+api_router.include_router(router_location)
+api_router.include_router(router_hotel)
+api_router.include_router(router_travel_dest)
+api_router.include_router(router_scheduling)
+api_router.include_router(router_planner)
+api_router.include_router(router_post)
+
+
+@asynccontextmanager
+async def lifespan(app: FastAPI):
+ logger.info("Starting the app")
+ # Load the ML model
+ encoded_env = os.getenv("ENCODED_ENV")
+
+ if encoded_env:
+ # Decode the base64 string
+ decoded_env = base64.b64decode(encoded_env).decode()
+
+ # Load it as a dictionary
+ env_data = json.loads(decoded_env)
+
+ # Set environment variables
+ for key, value in env_data.items():
+ os.environ[key] = value
+
+ # Verify by printing an environment variable (for testing)
+ print(os.getenv("MONGODB_URL"))
+ yield
+
+
+def create_app():
+ app = FastAPI(
+ docs_url="/",
+ title="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjY3M2IwZDMzNTQ5OTg5Zjc1NmZhMzk3MCJ9.a3A9B1ZpzkzIPvhLqFpasK4sk2ocqmc1M80rtyAkbmM",
+ )
+
+ app.add_middleware(
+ CORSMiddleware,
+ allow_origins=["*"],
+ allow_credentials=True,
+ allow_methods=["*"],
+ allow_headers=["*"],
+ )
+
+ return app
diff --git a/src/apis/interfaces/__pycache__/api_interface.cpython-311.pyc b/src/apis/interfaces/__pycache__/api_interface.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dd5a556a3ceccc595e8877def4519178dcbacfed
Binary files /dev/null and b/src/apis/interfaces/__pycache__/api_interface.cpython-311.pyc differ
diff --git a/src/apis/interfaces/__pycache__/auth_interface.cpython-311.pyc b/src/apis/interfaces/__pycache__/auth_interface.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..39b71f70b418ce10d87ae23f4deb0598fe63e8d7
Binary files /dev/null and b/src/apis/interfaces/__pycache__/auth_interface.cpython-311.pyc differ
diff --git a/src/apis/interfaces/api_interface.py b/src/apis/interfaces/api_interface.py
new file mode 100644
index 0000000000000000000000000000000000000000..ecfd12e2b45aef26b48d96ef810cfdf4a303dbfd
--- /dev/null
+++ b/src/apis/interfaces/api_interface.py
@@ -0,0 +1,106 @@
+from pydantic import BaseModel, Field
+from typing import Optional
+from src.apis.models.BaseModel import BaseDocument
+from typing import List
+
+class Chat(BaseModel):
+ message: str = Field(..., title="Message from user")
+ session_id: Optional[str] = Field("6701fe32d76fde9d8df1de8e", title="Session Id")
+ history: Optional[list] = Field(None, title="Chat history")
+ lat: Optional[float] = Field(13.717162954654036, title="Latitude")
+ long: Optional[float] = Field(109.21054173319894, title="Longitude")
+ intent: Optional[str] = Field(None, title="Intent")
+ language: Optional[str] = Field("en", title="Language")
+
+ class Config:
+ json_schema_extra = {
+ "example": {
+ "message": "Đề xuất cho 1 địa điểm",
+ "session_id": "6d16c975e8b74d979d6d680e6ff536eb",
+ "history": [
+ {"content": "tìm khách sạn xịn ở QUy Nhơn", "type": "human"},
+ {
+ "content": "search_hotels_luxury on frontend for user to select",
+ "type": "ai",
+ },
+ ],
+ "lat": 13.717162954654036,
+ "long": 109.21054173319894,
+ "intent": None,
+ "language": "Vietnamese",
+ }
+ }
+
+
+class ChatHistory(BaseModel):
+ session_id: Optional[str] = Field(None, title="Session Id")
+
+ class Config:
+ json_schema_extra = {"example": {"session_id": "6701fe32d76fde9d8df1de8e"}}
+
+
+class ChatHistoryManagement(BaseDocument):
+ session_id: str = Field("6701fe32d76fde9d8df1de8e", title="Session Id")
+ user_id: str = Field("6701fe32d76fde9d8df1de8e", title="User Id")
+ intent: Optional[str] = Field(None, title="Intent")
+
+ class Config:
+ json_schema_extra = {
+ "example": {
+ "session_id": "6701fe32d76fde9d8df1de8e",
+ "user_id": "6701fe32d76fde9d8df1de8e",
+ "intent": "greeting",
+ }
+ }
+
+
+class Location(BaseModel):
+ lat: float = Field(13.717162954654036, title="Latitude")
+ long: float = Field(109.21054173319894, title="Longitude")
+ radius: Optional[int] = Field(5000, title="Radius in meters")
+ location_text: Optional[str] = Field("Hanoi", title="Location text")
+ categories: Optional[str] = Field("interesting_places", title="Type of places")
+
+ class Config:
+ json_schema_extra = {
+ "example": {
+ "lat": 13.717162954654036,
+ "long": 109.21054173319894,
+ "radius": 5000,
+ "location_text": "Hanoi",
+ "categories": "interesting_places",
+ }
+ }
+
+
+class Destination(BaseModel):
+ id: int = Field(..., title="Destination Id", gt=0)
+ name: str = Field(..., title="Destination Name", min_length=1)
+ location: str = Field(..., title="Location", min_length=1)
+ description: str = Field(..., title="Description", min_length=1)
+
+
+class Planning(BaseModel):
+ duration: str = Field("7", title="Duration")
+ start_date: str = Field("June 1-7", title="Start date")
+ location: str = Field("Quy Nhon, Vietnam", title="Location")
+ interests: str = Field("natural, cultural", title="Interests")
+ nation: str = Field("Vietnamese", title="Nation")
+ include_destination: Optional[List[Destination]] = Field([], title="Include destinations")
+ limit_interation: Optional[int] = Field(3, title="Limit interation")
+
+ class Config:
+ json_schema_extra = {
+ "example": {
+ "duration": "7",
+ "start_date": "June 1-7",
+ "location": "Quy Nhon, Vietnam",
+ "interests": "natural, cultural",
+ "nation": "nation",
+ "include_destination": {
+ "destination": "Ky Co Beach",
+ "description": "Ky Co Beach is a beautiful beach in Quy Nhon, Vietnam",
+ },
+ "limit_interation": 3,
+ }
+ }
diff --git a/src/apis/interfaces/auth_interface.py b/src/apis/interfaces/auth_interface.py
new file mode 100644
index 0000000000000000000000000000000000000000..b6fe50512152de51a0382ed00d63a615c5d337d8
--- /dev/null
+++ b/src/apis/interfaces/auth_interface.py
@@ -0,0 +1,18 @@
+from pydantic import BaseModel, Field
+
+
+class Credential(BaseModel):
+ credential: str = Field(..., example="F9P/3?@q2!vq")
+
+
+class _LoginResponseInterface(BaseModel):
+ token: str = Field(..., title="JWT Token")
+
+
+class LoginResponseInterface(BaseModel):
+ msg: str = Field(..., title="Message")
+ data: _LoginResponseInterface = Field(..., title="User Data")
+
+
+class AuthInterface(BaseModel):
+ gtoken: str = Field(..., title="Google Access-Token")
\ No newline at end of file
diff --git a/src/apis/middlewares/__pycache__/auth_middleware.cpython-311.pyc b/src/apis/middlewares/__pycache__/auth_middleware.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..da758b815d97520db10fb795dfa8ea061b8fbf12
Binary files /dev/null and b/src/apis/middlewares/__pycache__/auth_middleware.cpython-311.pyc differ
diff --git a/src/apis/middlewares/auth_middleware.py b/src/apis/middlewares/auth_middleware.py
new file mode 100644
index 0000000000000000000000000000000000000000..3ac8b327467b6fd4f78f0d9384de748d6ed0f3a7
--- /dev/null
+++ b/src/apis/middlewares/auth_middleware.py
@@ -0,0 +1,40 @@
+from typing import Annotated
+from fastapi import Depends
+from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
+from fastapi.responses import JSONResponse
+from src.apis.providers.jwt_provider import jwt_provider as jwt
+from src.apis.models.user_models import get_user
+from src.utils.mongo import UserCRUD
+from bson import ObjectId
+from jose import JWTError
+from src.utils.logger import logger
+
+security = HTTPBearer()
+
+
+async def get_current_user(
+ credentials: Annotated[HTTPAuthorizationCredentials, Depends(security)]
+):
+
+ try:
+ token = credentials.credentials
+ if not token:
+ return JSONResponse(
+ content={"msg": "Authentication failed"}, status_code=401
+ )
+ payload = jwt.decrypt(token)
+ user_id: str = payload["id"]
+ if not user_id:
+ return JSONResponse(
+ content={"msg": "Authentication failed"}, status_code=401
+ )
+ user = await UserCRUD.read_one({"_id": ObjectId(user_id)})
+ user_email = user.get("email", None)
+ logger.info(f"Request of user: {user_email}")
+ if not user:
+ return JSONResponse(
+ content={"msg": "Authentication failed"}, status_code=401
+ )
+ return get_user(user)
+ except JWTError:
+ return JSONResponse(content={"msg": "Authentication failed"}, status_code=401)
diff --git a/src/apis/models/BaseModel.py b/src/apis/models/BaseModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..7319bc494b00720a21e3fcd64d5bebba7956500d
--- /dev/null
+++ b/src/apis/models/BaseModel.py
@@ -0,0 +1,17 @@
+from pydantic import BaseModel, Field
+from typing import Optional
+
+from datetime import datetime, timezone
+
+
+class BaseDocument(BaseModel):
+ created_at: Optional[datetime] = Field(
+ default_factory=lambda: datetime.now(timezone.utc)
+ )
+ updated_at: Optional[datetime] = Field(
+ default_factory=lambda: datetime.now(timezone.utc)
+ )
+ expire_at: Optional[datetime] = None
+
+ class Config:
+ arbitrary_types_allowed = True
diff --git a/src/apis/models/__pycache__/BaseModel.cpython-311.pyc b/src/apis/models/__pycache__/BaseModel.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0006b141383df4a3e2b684da4a13984fef7cac41
Binary files /dev/null and b/src/apis/models/__pycache__/BaseModel.cpython-311.pyc differ
diff --git a/src/apis/models/__pycache__/destination_models.cpython-311.pyc b/src/apis/models/__pycache__/destination_models.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3aef38111f2f2869141db26d3878caf1b1c87391
Binary files /dev/null and b/src/apis/models/__pycache__/destination_models.cpython-311.pyc differ
diff --git a/src/apis/models/__pycache__/hotel_models.cpython-311.pyc b/src/apis/models/__pycache__/hotel_models.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..68db065dbb4398aebe5b89eedc336f7a6c15a10e
Binary files /dev/null and b/src/apis/models/__pycache__/hotel_models.cpython-311.pyc differ
diff --git a/src/apis/models/__pycache__/post_models.cpython-311.pyc b/src/apis/models/__pycache__/post_models.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0f3a75f50cf55a08b9d65bf4543ddfbe4d322fd4
Binary files /dev/null and b/src/apis/models/__pycache__/post_models.cpython-311.pyc differ
diff --git a/src/apis/models/__pycache__/schedule_models.cpython-311.pyc b/src/apis/models/__pycache__/schedule_models.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..886ac11f84dfcfc9e301908f1ceb52144f235ac2
Binary files /dev/null and b/src/apis/models/__pycache__/schedule_models.cpython-311.pyc differ
diff --git a/src/apis/models/__pycache__/user_models.cpython-311.pyc b/src/apis/models/__pycache__/user_models.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b0d82faa6c0a838c3a5fe2a9dbcfac838ddaa48b
Binary files /dev/null and b/src/apis/models/__pycache__/user_models.cpython-311.pyc differ
diff --git a/src/apis/models/destination_models.py b/src/apis/models/destination_models.py
new file mode 100644
index 0000000000000000000000000000000000000000..c691f45bbe50b946428c4a2cb7202487b9a9a767
--- /dev/null
+++ b/src/apis/models/destination_models.py
@@ -0,0 +1,28 @@
+from pydantic import Field
+from typing import Literal
+from .BaseModel import BaseDocument
+from bson import ObjectId
+
+
+class Destination(BaseDocument):
+ manager_id: str = Field("", description="Manager's id")
+ address: str = Field("", description="Destination's address")
+ name: str = Field("", description="Destination's name")
+ picture: list[str] = Field([], description="Destination's picture")
+ type: Literal["hotel", "restaurant", "attraction"] = Field(
+ "", description="Destination's type"
+ )
+ status: int = Field(0, description="Destination's status")
+
+ model_config = {
+ "json_schema_extra": {
+ "example": {
+ "manager_id": "1234567890",
+ "address": "1234567890",
+ "name": "ABAO Hotel",
+ "picture": ["https://example.com/picture.jpg"],
+ "type": "hotel",
+ "status": 0,
+ }
+ }
+ }
diff --git a/src/apis/models/hotel_models.py b/src/apis/models/hotel_models.py
new file mode 100644
index 0000000000000000000000000000000000000000..62b713a482a60f6deb72b1b02170fda42c637a35
--- /dev/null
+++ b/src/apis/models/hotel_models.py
@@ -0,0 +1,29 @@
+from pydantic import Field, EmailStr
+from typing import Optional
+from datetime import datetime
+from .BaseModel import BaseDocument
+
+
+class BookHotel(BaseDocument):
+ hotel_name: str = Field("", description="Hotel's name")
+ address: str = Field("", description="Hotel's address")
+ phone_number: str = Field("", description="Hotel's phone number")
+ hotel_email: EmailStr = Field("", description="Hotel's email")
+ start_time: Optional[datetime] = Field("", description="Start time of the booking")
+ end_time: Optional[datetime] = Field("", description="End time of the booking")
+ rating: str = Field("", description="Hotel's rating")
+ website: str = Field("", description="Hotel's website")
+
+ class Config:
+ json_schema_extra = {
+ "example": {
+ "hotel_name": "Blue Lagoon Resort",
+ "address": "123 Beachside Blvd, Paradise City, Island Nation 54321",
+ "phone_number": "+1234567890",
+ "hotel_email": "baohtqe170017@fpt.edu.vn",
+ "start_time": "2025-01-05T14:00:00.000+00:00",
+ "end_time": "2025-01-10T11:00:00.000+00:00",
+ "rating": "4.5",
+ "website": "https://www.bluelagoonresort.com",
+ }
+ }
diff --git a/src/apis/models/post_models.py b/src/apis/models/post_models.py
new file mode 100644
index 0000000000000000000000000000000000000000..9acdcd8cc5d56d171e63c4a169844bc1866afac4
--- /dev/null
+++ b/src/apis/models/post_models.py
@@ -0,0 +1,53 @@
+from pydantic import Field
+from .BaseModel import BaseDocument
+
+class Comment(BaseDocument):
+ content: str = Field("", description="Post's content")
+ user_id: str = Field("", description="User's id")
+ post_id: str = Field("", description="Post's id")
+
+ model_config = {
+ "json_schema_extra": {
+ "example": {
+ "content": "John Doe",
+ "user_id": "1234567890",
+ "destination_id": "1234567890",
+ "comment_ids": ["1234567890"],
+ }
+ }
+ }
+
+
+class Like(BaseDocument):
+ user_id: str = Field("", description="User's id")
+ post_id: str = Field("", description="Post's id")
+ comment_id: str = Field("", description="Comment's id")
+ type: int = Field(0, description="Type of like", gt=0, lt=3)
+
+ model_config = {
+ "json_schema_extra": {
+ "example": {
+ "user_id": "1234567890",
+ "post_id": "1234567890",
+ "comment_id": "1234567890",
+ }
+ }
+ }
+
+
+class Post(BaseDocument):
+ content: str = Field("", description="Post's content")
+ user_id: str = Field("", description="User's id")
+ destination_id: str = Field("", description="Destination's id")
+ comment_ids: list[str] = Field([], description="Comment's id")
+ like: list[str] = Field([], description="User's id who like this post")
+ model_config = {
+ "json_schema_extra": {
+ "example": {
+ "content": "John Doe",
+ "user_id": "1234567890",
+ "destination_id": "1234567890",
+ "comment_ids": ["1234567890"],
+ }
+ }
+ }
diff --git a/src/apis/models/schedule_models.py b/src/apis/models/schedule_models.py
new file mode 100644
index 0000000000000000000000000000000000000000..6fbfb802da5a2ba16e184cc2078f4bb84c3f2c3e
--- /dev/null
+++ b/src/apis/models/schedule_models.py
@@ -0,0 +1,25 @@
+from pydantic import Field
+from typing import Optional
+from datetime import datetime
+from .BaseModel import BaseDocument
+
+
+class Schedule(BaseDocument):
+ id: Optional[str] = Field("", description="Activity's id")
+ user_id: str = Field("", description="User's id")
+ activity_category: str = Field("", description="Activity's category")
+ description: str = Field("", description="Activity's description")
+ start_time: Optional[datetime] = Field("", description="Activity's start time")
+ end_time: Optional[datetime] = Field("", description="Activity's end time")
+
+ class Config:
+ json_schema_extra = {
+ "example": {
+ "id": "61f7b1b7b3b3b3b3b3b3b3",
+ "user_id": "61f7b1b7b3b3b3b3b3b3b3",
+ "activity_category": "Study",
+ "description": "Study for the final exam",
+ "start_time": "2025-01-05T14:00:00.000+00:00",
+ "end_time": "2025-01-05T16:00:00.000+00:00",
+ }
+ }
diff --git a/src/apis/models/user_models.py b/src/apis/models/user_models.py
new file mode 100644
index 0000000000000000000000000000000000000000..455c0c7b1b1cf6af7d28d7694080ca2aed9b8734
--- /dev/null
+++ b/src/apis/models/user_models.py
@@ -0,0 +1,35 @@
+from pydantic import Field, EmailStr
+from .BaseModel import BaseDocument
+from bson import ObjectId
+
+
+def get_user(user) -> dict:
+ return {
+ "id": str(user["_id"]),
+ "name": user["name"],
+ "email": user["email"],
+ "picture": user["picture"],
+ "contact_number": user["contact_number"],
+ }
+
+
+def list_serial(users) -> list:
+ return [get_user(user) for user in users]
+
+
+class User(BaseDocument):
+ id: str = Field(default_factory=lambda: str(ObjectId()), alias="_id")
+ name: str = Field("", description="User's name")
+ email: EmailStr = Field("", description="User's email")
+ picture: str = Field("", title="User Picture")
+ contact_number: str = Field("", description="User's contact number")
+
+ class Config:
+ json_schema_extra = {
+ "example": {
+ "name": "John Doe",
+ "email": "johnUS192@gmail.com",
+ "picture": "https://example.com/picture.jpg",
+ "contact_number": "1234567890",
+ }
+ }
diff --git a/src/apis/providers/__pycache__/__init__.cpython-311.pyc b/src/apis/providers/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b8b1ed88f2c96a0c8c4ed0030c7676a6f7bfdf9a
Binary files /dev/null and b/src/apis/providers/__pycache__/__init__.cpython-311.pyc differ
diff --git a/src/apis/providers/__pycache__/jwt_provider.cpython-311.pyc b/src/apis/providers/__pycache__/jwt_provider.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d2a2f2f39816284881762bcac97c3f35286b190d
Binary files /dev/null and b/src/apis/providers/__pycache__/jwt_provider.cpython-311.pyc differ
diff --git a/src/apis/providers/jwt_provider.py b/src/apis/providers/jwt_provider.py
new file mode 100644
index 0000000000000000000000000000000000000000..bec0fb58341cccc3128c17f3afeb4e1fde2f1f52
--- /dev/null
+++ b/src/apis/providers/jwt_provider.py
@@ -0,0 +1,37 @@
+from typing import AnyStr, Dict, Union
+import os
+from fastapi import HTTPException, status
+from jose import jwt, JWTError
+
+
+class JWTProvider:
+ """
+ Perform JWT Encryption and Decryption
+ """
+
+ def __init__(
+ self, secret: AnyStr = os.environ.get("JWT_SECRET"), algorithm: AnyStr = "HS256"
+ ):
+ self.secret = secret
+ self.algorithm = algorithm
+
+ def encrypt(self, data: Dict) -> AnyStr:
+ """
+ Encrypt the data with JWT
+ """
+ return jwt.encode(data, self.secret, algorithm=self.algorithm)
+
+ def decrypt(self, token: AnyStr) -> Union[Dict, None]:
+ """
+ Decrypt the token with JWT
+ """
+ try:
+ return jwt.decode(token, self.secret, algorithms=[self.algorithm])
+ except JWTError as e:
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail=f"Could not validate credentials. {str(e)}",
+ )
+
+
+jwt_provider = JWTProvider()
diff --git a/src/apis/routes/__pycache__/auth_route.cpython-311.pyc b/src/apis/routes/__pycache__/auth_route.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7085bd9fe9ad18f7420b331f0f7b7ba56f997b42
Binary files /dev/null and b/src/apis/routes/__pycache__/auth_route.cpython-311.pyc differ
diff --git a/src/apis/routes/__pycache__/chat_route.cpython-311.pyc b/src/apis/routes/__pycache__/chat_route.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..34f91126129e56ad8f4ed7d9ffa834f4cb844a07
Binary files /dev/null and b/src/apis/routes/__pycache__/chat_route.cpython-311.pyc differ
diff --git a/src/apis/routes/__pycache__/hotel_route.cpython-311.pyc b/src/apis/routes/__pycache__/hotel_route.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..11e130f5a5c66e5a35d6a905a80dc419aefd2c07
Binary files /dev/null and b/src/apis/routes/__pycache__/hotel_route.cpython-311.pyc differ
diff --git a/src/apis/routes/__pycache__/location_route.cpython-311.pyc b/src/apis/routes/__pycache__/location_route.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c7897238073c9f303032068e8c4bf361476bed41
Binary files /dev/null and b/src/apis/routes/__pycache__/location_route.cpython-311.pyc differ
diff --git a/src/apis/routes/__pycache__/planner_route.cpython-311.pyc b/src/apis/routes/__pycache__/planner_route.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..62e48280239287e7527ae82193a11bc6c78e3a7b
Binary files /dev/null and b/src/apis/routes/__pycache__/planner_route.cpython-311.pyc differ
diff --git a/src/apis/routes/__pycache__/post_router.cpython-311.pyc b/src/apis/routes/__pycache__/post_router.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a2426fbe9fdee9cc833e9f2a446069b0dc24fdb9
Binary files /dev/null and b/src/apis/routes/__pycache__/post_router.cpython-311.pyc differ
diff --git a/src/apis/routes/__pycache__/scheduling_router.cpython-311.pyc b/src/apis/routes/__pycache__/scheduling_router.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6a95ca8730477409d9e724e555cfe8ea4ffca39e
Binary files /dev/null and b/src/apis/routes/__pycache__/scheduling_router.cpython-311.pyc differ
diff --git a/src/apis/routes/__pycache__/travel_dest_route.cpython-311.pyc b/src/apis/routes/__pycache__/travel_dest_route.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7ed590d3618fe874027c3f3ed368c23be1df3b54
Binary files /dev/null and b/src/apis/routes/__pycache__/travel_dest_route.cpython-311.pyc differ
diff --git a/src/apis/routes/auth_route.py b/src/apis/routes/auth_route.py
new file mode 100644
index 0000000000000000000000000000000000000000..aecfc25a31e2094a7f2a3cfe11bae312829fc61a
--- /dev/null
+++ b/src/apis/routes/auth_route.py
@@ -0,0 +1,32 @@
+from fastapi import APIRouter, status, Depends
+from fastapi.responses import JSONResponse
+from typing import Annotated
+from src.apis.models.user_models import User
+from src.apis.controllers.auth_controller import login_control
+from src.apis.interfaces.auth_interface import _LoginResponseInterface
+from src.apis.interfaces.auth_interface import Credential
+from src.apis.middlewares.auth_middleware import get_current_user
+
+router = APIRouter(prefix="/auth", tags=["Authentications"])
+
+user_dependency = Annotated[User, Depends(get_current_user)]
+
+
+@router.post(
+ "/login", status_code=status.HTTP_200_OK, response_model=_LoginResponseInterface
+)
+async def login(credential: Credential):
+ try:
+ token, user_data = await login_control(credential.credential)
+ return JSONResponse(
+ content={"token": token, "user_data": user_data}, status_code=200
+ )
+ except Exception as e:
+ return JSONResponse(content={"message": str(e)}, status_code=500)
+
+
+@router.get("/get_info", status_code=status.HTTP_200_OK)
+async def get_user_info(user: user_dependency):
+ if user is None:
+ return JSONResponse(content={"message": "User not found"}, status_code=401)
+ return JSONResponse(content={"user": user}, status_code=200)
diff --git a/src/apis/routes/chat_route.py b/src/apis/routes/chat_route.py
new file mode 100644
index 0000000000000000000000000000000000000000..872973ecd97b5941f1e0c864ec23d1e9a64a8fc4
--- /dev/null
+++ b/src/apis/routes/chat_route.py
@@ -0,0 +1,84 @@
+from fastapi import APIRouter, status, Depends, BackgroundTasks
+from fastapi.responses import JSONResponse
+from typing import Annotated
+from dotenv import load_dotenv
+from fastapi.responses import StreamingResponse
+from src.apis.interfaces.api_interface import Chat
+from src.apis.controllers.chat_controller import (
+ chat_function,
+ chat_streaming_function,
+ get_history_function,
+ list_chat_history_function,
+ delete_chat_history_function,
+)
+from src.utils.logger import logger
+from src.apis.models.user_models import User
+from src.apis.middlewares.auth_middleware import get_current_user
+from src.utils.redis import set_key_redis, delete_key_redis
+
+load_dotenv(override=True)
+
+router = APIRouter(prefix="/llm", tags=["LLM"])
+
+user_dependency = Annotated[User, Depends(get_current_user)]
+
+
+@router.post("/chat", status_code=status.HTTP_200_OK)
+async def chat_router(
+ data: Chat, user: user_dependency, background_tasks: BackgroundTasks
+):
+ if user is None:
+ return JSONResponse(content={"message": "User not found"}, status_code=404)
+ return await chat_function(user, data, background_tasks)
+
+
+@router.post("/chat_streaming", status_code=status.HTTP_200_OK)
+async def chat_streaming_router(
+ data: Chat, user: user_dependency, background_tasks: BackgroundTasks
+):
+ return StreamingResponse(
+ chat_streaming_function(user, data, background_tasks),
+ media_type="text/plain",
+ )
+
+
+@router.post("/get_chat_history", status_code=status.HTTP_200_OK)
+async def get_chat_history(user: user_dependency, background_tasks: BackgroundTasks):
+ if user is None:
+ return JSONResponse(content={"message": "User not found"}, status_code=404)
+ user_id = user["id"]
+ result = await get_history_function(user_id)
+ if result is None:
+ return JSONResponse(content={"message": [], "intent": None}, status_code=500)
+ background_tasks.add_task(set_key_redis, f"chat_history_{user_id}", str(result))
+ return JSONResponse(content=result, status_code=200)
+
+
+@router.delete("/delete_chat_history", status_code=status.HTTP_200_OK)
+async def delete_chat_history(user: user_dependency, background_tasks: BackgroundTasks):
+ if user is None:
+ return JSONResponse(content={"message": "User not found"}, status_code=404)
+ try:
+ await delete_chat_history_function(user["id"])
+ background_tasks.add_task(delete_key_redis, f"chat_history_{user['id']}")
+
+ except Exception as e:
+ logger.error(f"Error in delete_chat_history: {e}")
+
+ return JSONResponse(
+ content={"message": "Chat history has been deleted"}, status_code=200
+ )
+
+
+@router.get("/list_chat_history", status_code=status.HTTP_200_OK)
+async def list_chat_history(user: user_dependency):
+ if user is None:
+ return JSONResponse(content={"message": "User not found"}, status_code=404)
+ try:
+ result = await list_chat_history_function(user["id"])
+ await set_key_redis(f"list_chat_history_{user['id']}", str(result))
+ except Exception as e:
+ logger.error(f"Error in list_chat_history: {e}")
+ result = []
+
+ return JSONResponse(content=result, status_code=200)
diff --git a/src/apis/routes/hotel_route.py b/src/apis/routes/hotel_route.py
new file mode 100644
index 0000000000000000000000000000000000000000..2ead7e34aca5be0491f8d1ea38425c45f2f8da65
--- /dev/null
+++ b/src/apis/routes/hotel_route.py
@@ -0,0 +1,87 @@
+from fastapi import APIRouter, status, Depends, BackgroundTasks
+from typing import Annotated
+from fastapi.responses import JSONResponse
+from src.apis.controllers.hotel_controller import (
+ book_hotel_controller,
+ send_booking_confirmation_email,
+)
+from src.apis.models.user_models import User
+from src.apis.middlewares.auth_middleware import get_current_user
+from src.utils.logger import logger
+from pydantic import BaseModel, Field
+import pandas as pd
+
+router = APIRouter(prefix="/hotel", tags=["Hotel"])
+
+user_dependency = Annotated[User, Depends(get_current_user)]
+
+
+from src.apis.models.hotel_models import BookHotel
+
+
+@router.post("/book_hotel", status_code=status.HTTP_201_CREATED)
+async def book_hotel(
+ body: BookHotel,
+ user: user_dependency,
+ background_tasks: BackgroundTasks,
+):
+ logger.info(f"User: {user}")
+ if user is None:
+ return JSONResponse(
+ content="User not found", status_code=status.HTTP_401_UNAUTHORIZED
+ )
+ hotel_email = "baohtqe170017@fpt.edu.vn"
+ user_id = user["id"]
+ user_email = user["email"]
+ user_contact_number = user.get("contact_number", "Does not have contact number")
+ response = await book_hotel_controller(
+ body.hotel_email,
+ body.hotel_name,
+ body.address,
+ body.phone_number,
+ body.website,
+ body.start_time,
+ body.end_time,
+ user_id,
+ )
+ if response["status"] == "error":
+ return JSONResponse(
+ content=response["message"], status_code=status.HTTP_400_BAD_REQUEST
+ )
+ background_tasks.add_task(
+ send_booking_confirmation_email,
+ user_email,
+ user_contact_number,
+ hotel_email,
+ body.start_time,
+ body.end_time,
+ )
+ return JSONResponse(content=response["message"], status_code=status.HTTP_200_OK)
+
+
+df = pd.read_excel("./src/data/hotel.xlsx")
+
+
+class HotelRequest(BaseModel):
+ hotel_type: str = Field(..., title="Type of hotel")
+ top_k: int = Field(5, title="Number of hotels to return")
+
+ class Config:
+ json_schema_extra = {
+ "example": {
+ "hotel_type": "popular",
+ "top_k": 5,
+ }
+ }
+
+
+@router.post("/search_hotels")
+def search_hotels(request: HotelRequest):
+ if request.hotel_type not in ["popular", "luxury", "basic"]:
+ return {
+ "error": "Invalid hotel type. Choose from 'popular', 'luxury', 'basic'."
+ }
+ filtered_df = df[df["type"] == request.hotel_type]
+ top_hotels = filtered_df.head(request.top_k).to_dict(orient="records")
+
+ return JSONResponse(content=top_hotels, status_code=status.HTTP_200_OK)
diff --git a/src/apis/routes/location_route.py b/src/apis/routes/location_route.py
new file mode 100644
index 0000000000000000000000000000000000000000..5919cede5c42c0bf1d9a56553a36878bcb37ec09
--- /dev/null
+++ b/src/apis/routes/location_route.py
@@ -0,0 +1,35 @@
+from src.apis.interfaces.api_interface import Location
+from src.apis.models.user_models import User
+from src.apis.middlewares.auth_middleware import get_current_user
+from src.apis.controllers.location_controller import (
+ get_location_details,
+ get_nearby_places,
+ get_places,
+)
+from fastapi import APIRouter, status, Depends
+from typing import Annotated
+from fastapi.responses import JSONResponse
+
+router = APIRouter(prefix="/location", tags=["Location"])
+
+user_dependency = Annotated[User, Depends(get_current_user)]
+
+
+@router.post("/details", status_code=status.HTTP_200_OK)
+def get_location(body: Location, user: user_dependency):
+ if user is None:
+ return JSONResponse(content={"message": "User not found"}, status_code=404)
+ return get_location_details(body.lat, body.long)
+
+
+@router.post("/nearby", status_code=status.HTTP_200_OK)
+def get_nearby(body: Location, user: user_dependency):
+ if user is None:
+ return JSONResponse(content={"message": "User not found"}, status_code=404)
+
+ return get_nearby_places(body.lat, body.long, body.radius, body.categories)
+
+
+@router.post("/places", status_code=status.HTTP_200_OK)
+def get_near_places(body: Location):
+ return get_places(body.lat, body.long, body.radius, body.categories)
diff --git a/src/apis/routes/planner_route.py b/src/apis/routes/planner_route.py
new file mode 100644
index 0000000000000000000000000000000000000000..a097e3bb56a0bcb3afcee92472671b57a4a6325c
--- /dev/null
+++ b/src/apis/routes/planner_route.py
@@ -0,0 +1,50 @@
+from src.apis.models.user_models import User
+from src.apis.middlewares.auth_middleware import get_current_user
+from fastapi import APIRouter, status, Depends
+from typing import Annotated
+from fastapi.responses import StreamingResponse
+from src.apis.interfaces.api_interface import Planning
+from src.apis.controllers.planner_controller import message_generator
+import json
+from fastapi import BackgroundTasks
+
+router = APIRouter(prefix="/planner", tags=["Planner"])
+
+user_dependency = Annotated[User, Depends(get_current_user)]
+
+config = {
+ "configurable": {
+ "user_id": "673b0d33549989f756fa3970",
+ "user_email": "baohtqe170017@fpt.edu.vn",
+ "contact_number": "1234567890",
+ "session_id": "6d16c975e8b74d979d6d680e6ff536eb",
+ "lat": 13.717163281669754,
+ "long": 109.21053970482858,
+ }
+}
+
+
+duration = "7"
+start_date = "June 1-7"
+location = "Quy Nhon, Vietnam"
+interests = "natural, cultural"
+nation = "nation"
+
+
+@router.post("/invoke")
+async def invoke_planner(body: Planning, background: BackgroundTasks):
+ input_graph = {
+ "duration": body.duration,
+ "start_date": body.start_date,
+ "location": body.location,
+ "interests": body.interests,
+ "nation": body.nation,
+ "include_destination": body.include_destination,
+ "limit_interation": body.limit_interation,
+ "current_interation": 0,
+ "error": None,
+ }
+ return StreamingResponse(
+ message_generator(input_graph, config, background),
+ media_type="text/plain",
+ )
diff --git a/src/apis/routes/post_router.py b/src/apis/routes/post_router.py
new file mode 100644
index 0000000000000000000000000000000000000000..97d60fd64bf40d987bcb877e0dcb99bc8c271816
--- /dev/null
+++ b/src/apis/routes/post_router.py
@@ -0,0 +1,101 @@
+from fastapi import APIRouter, status, Depends
+from typing import Annotated, Optional
+from fastapi.responses import JSONResponse
+from pydantic import Field
+from src.apis.models.user_models import User
+from src.apis.models.BaseModel import BaseDocument
+from src.apis.models.post_models import Post as BodyPost
+from src.apis.middlewares.auth_middleware import get_current_user
+from src.apis.controllers.post_controller import (
+ create_a_post_controller,
+ list_all_posts_controller,
+ get_a_post_controller,
+ update_a_post_controller,
+ delete_a_post_controller,
+)
+from src.utils.logger import logger
+
+router = APIRouter(prefix="/post", tags=["Post"])
+
+user_dependency = Annotated[User, Depends(get_current_user)]
+
+
+@router.post("/create", status_code=status.HTTP_201_CREATED)
+async def create_post(body: BodyPost, user: user_dependency):
+ if user is None:
+ return JSONResponse(content={"message": "User not found"}, status_code=404)
+ user_id = user["id"]
+ result = await create_a_post_controller(
+ body.content,
+ user_id,
+ body.destination_id,
+ )
+ logger.info(f"RESULT: {result}")
+ if result["status"] == "error":
+ return JSONResponse(content=result, status_code=404)
+ else:
+ return JSONResponse(content=result, status_code=201)
+
+
+@router.post("/get/{post_id}", status_code=status.HTTP_200_OK)
+async def get_post(post_id: str, user: user_dependency):
+ if user is None:
+ return JSONResponse(content={"message": "User not found"}, status_code=404)
+ result = await get_a_post_controller(post_id)
+ logger.info(f"RESULT: {result}")
+ if result["status"] == "error":
+ return JSONResponse(content=result, status_code=404)
+ else:
+ return JSONResponse(content=result, status_code=200)
+
+
+@router.get("/list", status_code=status.HTTP_200_OK)
+async def list_all_posts():
+ result = await list_all_posts_controller()
+ logger.info(f"RESULT: {result}")
+ if result["status"] == "error":
+ return JSONResponse(content=result, status_code=404)
+ else:
+ return JSONResponse(content=result, status_code=200)
+
+
+class BodyUpdatePost(BaseDocument):
+ content: Optional[str] = Field("", description="Post's content")
+ destination_id: Optional[str] = Field("", description="Destination's id")
+
+ class Config:
+ json_schema_extra = {
+ "example": {
+ "content": "John Doe",
+ "destination_id": "1234567890",
+ }
+ }
+
+
+@router.post("/update/{post_id}", status_code=status.HTTP_200_OK)
+async def update_post(post_id: str, body: BodyUpdatePost, user: user_dependency):
+ if user is None:
+ return JSONResponse(content={"message": "User not found"}, status_code=404)
+ result = await update_a_post_controller(
+ user["id"],
+ post_id,
+ body.content,
+ body.destination_id,
+ )
+ logger.info(f"RESULT: {result}")
+ if result["status"] == "error":
+ return JSONResponse(content=result, status_code=404)
+ else:
+ return JSONResponse(content=result, status_code=200)
+
+
+@router.post("/delete/{post_id}", status_code=status.HTTP_200_OK)
+async def delete_post(post_id: str, user: user_dependency):
+ if user is None:
+ return JSONResponse(content={"message": "User not found"}, status_code=404)
+ result = await delete_a_post_controller(user["id"], post_id)
+ logger.info(f"RESULT: {result}")
+ if result["status"] == "error":
+ return JSONResponse(content=result, status_code=404)
+ else:
+ return JSONResponse(content=result, status_code=200)
diff --git a/src/apis/routes/scheduling_router.py b/src/apis/routes/scheduling_router.py
new file mode 100644
index 0000000000000000000000000000000000000000..fee561a29cd9b9cb768508811e030067e4cdcd7e
--- /dev/null
+++ b/src/apis/routes/scheduling_router.py
@@ -0,0 +1,118 @@
+from fastapi import APIRouter, status, Depends
+from typing import Annotated, Optional
+from fastapi.responses import JSONResponse
+from pydantic import Field
+from src.utils.helper import convert_string_date_to_iso, datetime_to_iso_string
+from src.apis.models.user_models import User
+from src.apis.models.BaseModel import BaseDocument
+from src.apis.middlewares.auth_middleware import get_current_user
+from src.apis.controllers.scheduling_controller import (
+ create_a_activity_controller,
+ search_activities_controller,
+ update_a_activity_controller,
+ delete_activities_controller,
+)
+from src.utils.logger import logger
+
+router = APIRouter(prefix="/scheduling", tags=["Scheduling"])
+
+user_dependency = Annotated[User, Depends(get_current_user)]
+
+
+class BodyActivity(BaseDocument):
+ activity_id: Optional[str] = Field("", description="Activity's id")
+ activity_category: Optional[str] = Field("", description="Activity's category")
+ description: Optional[str] = Field("", description="Activity's description")
+ start_time: Optional[str] = Field("", description="Activity's start time")
+ end_time: Optional[str] = Field("", description="Activity's end time")
+
+ class Config:
+ json_schema_extra = {
+ "example": {
+ "activity_id": "61f7b1b7b3b3b3b3b3b3b3b3",
+ "activity_category": "Study",
+ "description": "Study for the final exam",
+ "start_time": "2025-01-05T14:00:00",
+ "end_time": "2025-01-05T16:00:00",
+ }
+ }
+
+
+@router.post("/create", status_code=status.HTTP_201_CREATED)
+async def create_activity(body: BodyActivity, user: user_dependency):
+ if user is None:
+ return JSONResponse(content={"message": "User not found"}, status_code=404)
+ user_id = user["id"]
+ start_time = convert_string_date_to_iso(body.start_time)
+ end_time = convert_string_date_to_iso(body.end_time)
+ result = await create_a_activity_controller(
+ body.activity_id,
+ body.activity_category,
+ body.description,
+ start_time,
+ end_time,
+ user_id,
+ )
+ logger.info(f"RESULT: {result}")
+ if result["status"] == "error":
+ return JSONResponse(content=result, status_code=404)
+ else:
+ return JSONResponse(content=result, status_code=201)
+
+
+@router.get("/search", status_code=status.HTTP_200_OK)
+async def search_activities(user: user_dependency):
+ if user is None:
+ return JSONResponse(content={"message": "User not found"}, status_code=404)
+ user_id = user["id"]
+ result = await search_activities_controller(None, None, user_id)
+ logger.info(f"result {result}")
+ if result["status"] == "error":
+ return JSONResponse(content={"message": result["message"]}, status_code=404)
+ else:
+ for activity in result["message"]:
+ activity["start_time"] = datetime_to_iso_string(activity["start_time"])
+ activity["end_time"] = datetime_to_iso_string(activity["end_time"])
+ activity["created_at"] = datetime_to_iso_string(activity["created_at"])
+ activity["updated_at"] = datetime_to_iso_string(activity["updated_at"])
+ activity["id"] = activity.pop("_id")
+ del activity["user_id"]
+ return JSONResponse(content=result, status_code=200)
+
+
+@router.put("/update", status_code=status.HTTP_200_OK)
+async def update_activity(body: BodyActivity, user: user_dependency):
+ if user is None:
+ return JSONResponse(content={"message": "User not found"}, status_code=404)
+ user_id = user["id"]
+ if body.start_time and body.end_time:
+ start_time = convert_string_date_to_iso(body.start_time)
+ end_time = convert_string_date_to_iso(body.end_time)
+ result = await update_a_activity_controller(
+ body.activity_id,
+ body.activity_category,
+ body.description,
+ start_time,
+ end_time,
+ user_id,
+ )
+ if result["status"] == "error":
+ return JSONResponse(content=result, status_code=404)
+ else:
+ return JSONResponse(content=result, status_code=200)
+ else:
+ return JSONResponse(
+ content={"message": "Start time and end time are required"}, status_code=404
+ )
+
+
+@router.delete("/delete", status_code=status.HTTP_200_OK)
+async def delete_activity(activity_id: str, user: user_dependency):
+ if user is None:
+ return JSONResponse(content={"message": "User not found"}, status_code=404)
+ user_id = user["id"]
+ result = await delete_activities_controller(activity_id, None, None, user_id)
+ if result["status"] == "error":
+ return JSONResponse(content=result, status_code=404)
+ else:
+ return JSONResponse(content=result, status_code=200)
diff --git a/src/apis/routes/travel_dest_route.py b/src/apis/routes/travel_dest_route.py
new file mode 100644
index 0000000000000000000000000000000000000000..b8e2f818aab818915fb0a36fcb25a84e2f82ca06
--- /dev/null
+++ b/src/apis/routes/travel_dest_route.py
@@ -0,0 +1,54 @@
+from fastapi import APIRouter, Depends, HTTPException, Query
+from typing import Annotated
+from fastapi.responses import JSONResponse
+import pandas as pd
+import math
+from src.apis.models.user_models import User
+from src.apis.middlewares.auth_middleware import get_current_user
+from src.utils.logger import logger
+from src.apis.controllers.destination_controller import (
+ destination_suggestion_controller,
+)
+from fastapi.responses import JSONResponse
+
+router = APIRouter(prefix="/dest", tags=["Destination"])
+
+user_dependency = Annotated[User, Depends(get_current_user)]
+EXCEL_FILE_PATH = "./src/data/destination_1_new_tag.xlsx"
+
+
+@router.get("/get_tourist")
+def get_tourist(page: int = Query(default=1, ge=1)):
+ try:
+ PAGE_SIZE = 10
+ df = pd.read_excel(EXCEL_FILE_PATH)
+ required_columns = ["name", "description", "image"]
+ if not all(col in df.columns for col in required_columns):
+ raise HTTPException(
+ status_code=400, detail="Missing required columns in Excel file"
+ )
+ total_items = len(df)
+ total_pages = math.ceil(total_items / PAGE_SIZE)
+ start_idx = (page - 1) * PAGE_SIZE
+ end_idx = start_idx + PAGE_SIZE
+ paginated_df = df[required_columns].iloc[start_idx:end_idx]
+ tourist_data = paginated_df.to_dict(orient="records")
+ return JSONResponse(
+ content={
+ "data": tourist_data,
+ "page": page,
+ "total_pages": total_pages,
+ "total_items": total_items,
+ "page_size": PAGE_SIZE,
+ }
+ )
+ except Exception as e:
+ logger.error(f"Error reading the Excel file: {str(e)}")
+ raise HTTPException(
+ status_code=500, detail=f"Error reading the Excel file: {str(e)}"
+ )
+
+
+@router.get("/suggest")
+async def destination_suggestion(question: str, top_k: int = Query(default=5, ge=1)):
+ return JSONResponse(await destination_suggestion_controller(question, top_k))
diff --git a/src/data/destination_1.xlsx b/src/data/destination_1.xlsx
new file mode 100644
index 0000000000000000000000000000000000000000..e3e2d2f4e89acdbe0bfb74b626bc8129e65b7900
Binary files /dev/null and b/src/data/destination_1.xlsx differ
diff --git a/src/data/destination_1_new_tag.xlsx b/src/data/destination_1_new_tag.xlsx
new file mode 100644
index 0000000000000000000000000000000000000000..02d784b8a06b5eb25bfb87874b9ecb19b3b815b4
Binary files /dev/null and b/src/data/destination_1_new_tag.xlsx differ
diff --git a/src/data/hotel.xlsx b/src/data/hotel.xlsx
new file mode 100644
index 0000000000000000000000000000000000000000..c9a23bfa2bbb262106579bdc2b86abe86760acf3
Binary files /dev/null and b/src/data/hotel.xlsx differ
diff --git a/src/langgraph/.DS_Store b/src/langgraph/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..c1dc8f1643ee2eb7ead79871a1a31d90182f4558
Binary files /dev/null and b/src/langgraph/.DS_Store differ
diff --git a/src/langgraph/__pycache__/__init__.cpython-311.pyc b/src/langgraph/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b654321a0f1c9d4cf2270fb85dd6efdcc1b21060
Binary files /dev/null and b/src/langgraph/__pycache__/__init__.cpython-311.pyc differ
diff --git a/src/langgraph/__pycache__/state.cpython-311.pyc b/src/langgraph/__pycache__/state.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..414ed4a1b0226a3b80cbd7fe071ed50d270b54fd
Binary files /dev/null and b/src/langgraph/__pycache__/state.cpython-311.pyc differ
diff --git a/src/langgraph/config/__pycache__/agent.cpython-311.pyc b/src/langgraph/config/__pycache__/agent.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bcd7fecf69d5630477de8832bb429d3fe6d42e63
Binary files /dev/null and b/src/langgraph/config/__pycache__/agent.cpython-311.pyc differ
diff --git a/src/langgraph/config/__pycache__/constant.cpython-311.pyc b/src/langgraph/config/__pycache__/constant.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5c77fe32ef7ffbaabf7b72097a04ee66ba86656a
Binary files /dev/null and b/src/langgraph/config/__pycache__/constant.cpython-311.pyc differ
diff --git a/src/langgraph/config/__pycache__/prompt.cpython-311.pyc b/src/langgraph/config/__pycache__/prompt.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..953b4eb1da576842487ba1272be4a91d1acdbafa
Binary files /dev/null and b/src/langgraph/config/__pycache__/prompt.cpython-311.pyc differ
diff --git a/src/langgraph/config/agent.py b/src/langgraph/config/agent.py
new file mode 100644
index 0000000000000000000000000000000000000000..bb2861fc0cc8936630c4bfb426b4a7d332d6b733
--- /dev/null
+++ b/src/langgraph/config/agent.py
@@ -0,0 +1,88 @@
+from langchain_core.runnables import Runnable
+from langchain_core.messages import AIMessage, HumanMessage
+from src.langgraph.state import State
+from src.utils.logger import logger
+
+
+class Agent:
+ def __init__(self, runnable: Runnable):
+ self.runnable = runnable
+
+ async def __call__(self, state: State):
+ while True:
+ messages = state["messages"]
+ # message_logger = messages[-1].pretty_print()
+ # logger.info(f"Message: {message_logger}")
+ chat_history = state["messages_history"]
+ if messages:
+ messages = [
+ message
+ for message in messages
+ if not (
+ hasattr(message, "tool_calls")
+ and any(
+ tool_call["name"] == "CompleteOrRoute"
+ for tool_call in message.tool_calls
+ )
+ )
+ ]
+ intent = state["intent"]
+ entry_message = state.get("entry_message")
+ if (
+ state["messages"][0].content == "y"
+ and "Do you want to run the following tool(s)"
+ in state["messages_history"][-1].content
+ and isinstance(state["messages"][-1], HumanMessage)
+ ):
+ logger.info("AGENT CALL SENTITIVE TOOLS")
+ data = {
+ "messages": [state["messages"][0]],
+ "history": [state["messages_history"][-1]],
+ "entry_message": entry_message,
+ "intent": intent,
+ "language": state["language"],
+ }
+ result = await self.runnable.ainvoke(data)
+ # message_logger = result.pretty_print()
+ # logger.info(f"Message: {message_logger}")
+ try:
+ tool_name = result.tool_calls[0]["name"]
+ logger.info(f"Tool name: {tool_name}")
+ return {"messages": result, "tool_name": tool_name}
+ except Exception as e:
+ logger.error(f"Error scheduling sensitive tools: {e}")
+ return {"messages": "Can't call tool"}
+
+ if state["intent"] is None:
+ messages = [
+ msg
+ for msg in messages
+ if not (
+ isinstance(msg, AIMessage)
+ and any(
+ tool_call["name"] == "CompleteOrRoute"
+ for tool_call in msg.tool_calls
+ )
+ )
+ ]
+ data = {
+ "messages": messages,
+ "history": chat_history,
+ "entry_message": entry_message,
+ "intent": intent,
+ "language": state["language"],
+ }
+ result: AIMessage = await self.runnable.ainvoke(data)
+ # message_logger = result.pretty_print()
+ # logger.info(f"Message: {message_logger}")
+ if not result.tool_calls and (
+ not result.content
+ or isinstance(result.content, list)
+ and not result.content[0].get("text")
+ ):
+ logger.info("No content found, retrying")
+ messages = state["messages"] + [("user", "Respond with a real output.")]
+ state = {**state, "messages": messages}
+ else:
+ break
+ return {"messages": result}
diff --git a/src/langgraph/config/constant.py b/src/langgraph/config/constant.py
new file mode 100644
index 0000000000000000000000000000000000000000..ffc57097efa5a2b66889ec2aedb144e2ea25e41d
--- /dev/null
+++ b/src/langgraph/config/constant.py
@@ -0,0 +1,23 @@
+import os
+class StateCfg:
+ MESSAGES = "messages"
+ INTENT = "intent"
+ HISTORY = "messages_history"
+ USER_FEEDBACK = "user_feedback"
+
+
+class MongoCfg:
+ MONGODB_URL: str = os.getenv("MONGODB_URL")
+ MONGO_INDEX: str = os.getenv("MONGO_INDEX")
+ USER: str = "user"
+ ACTIVITY: str = "activity"
+ DATE: str = "date"
+ BOOK_HOTEL: str = "book_hotel"
+ CHAT_HISTORY: str = "chat_history"
+ POST: str = "post"
+ COMMENT: str = "comment"
+ LIKE: str = "like"
+ DESTINATION: str = "destination"
+ MAX_HISTORY_SIZE: int = 15
+class RedisCfg:
+ REDIS_URL: str = os.getenv("REDIS_URL")
diff --git a/src/langgraph/langchain/.DS_Store b/src/langgraph/langchain/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..12c39ed6d98dd540d5b097c9cd50d5fd0b5f0966
Binary files /dev/null and b/src/langgraph/langchain/.DS_Store differ
diff --git a/src/langgraph/langchain/__pycache__/__init__.cpython-311.pyc b/src/langgraph/langchain/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..84b7a3ffa5802bfcc6082a0f1472ab5160fc6092
Binary files /dev/null and b/src/langgraph/langchain/__pycache__/__init__.cpython-311.pyc differ
diff --git a/src/langgraph/langchain/__pycache__/llm.cpython-311.pyc b/src/langgraph/langchain/__pycache__/llm.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7026ee1c33f5f6519fb8f5eda2065336277b1d03
Binary files /dev/null and b/src/langgraph/langchain/__pycache__/llm.cpython-311.pyc differ
diff --git a/src/langgraph/langchain/__pycache__/message_langchain.cpython-311.pyc b/src/langgraph/langchain/__pycache__/message_langchain.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c231a0fa31b1181ce4c99b866cf38a9fa7ce01dd
Binary files /dev/null and b/src/langgraph/langchain/__pycache__/message_langchain.cpython-311.pyc differ
diff --git a/src/langgraph/langchain/__pycache__/prompt.cpython-311.pyc b/src/langgraph/langchain/__pycache__/prompt.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d5c353232d302ad506b48f790a8702d863fd820f
Binary files /dev/null and b/src/langgraph/langchain/__pycache__/prompt.cpython-311.pyc differ
diff --git a/src/langgraph/langchain/llm.py b/src/langgraph/langchain/llm.py
new file mode 100644
index 0000000000000000000000000000000000000000..77b68c5618e0bc66f9f728936be6c6fc4bd57d1c
--- /dev/null
+++ b/src/langgraph/langchain/llm.py
@@ -0,0 +1,17 @@
+from langchain_google_genai import ChatGoogleGenerativeAI
+
+# llm = ChatGoogleGenerativeAI(
+# model="gemini-1.5-flash",
+# temperature=0.1,
+# max_retries=2,
+# )
+llm = ChatGoogleGenerativeAI(
+ model="gemini-2.0-flash-exp",
+ temperature=0.1,
+ max_retries=2,
+)
+llm_flash = ChatGoogleGenerativeAI(
+ model="gemini-1.5-flash",
+ temperature=0.1,
+ max_retries=2,
+)
diff --git a/src/langgraph/langchain/prompt.py b/src/langgraph/langchain/prompt.py
new file mode 100644
index 0000000000000000000000000000000000000000..04d6b142ff16c4c55b87153b319be0d363853d1a
--- /dev/null
+++ b/src/langgraph/langchain/prompt.py
@@ -0,0 +1,246 @@
+from langchain.prompts import ChatPromptTemplate
+from datetime import datetime
+from pydantic import BaseModel, Field
+from langchain_core.messages import HumanMessage
+from typing import Callable
+import pytz
+from src.langgraph.state import State
+from src.utils.logger import logger
+from langchain_core.prompts import PromptTemplate
+
+vietnam_timezone = pytz.timezone("Asia/Ho_Chi_Minh")
+vietnam_time = datetime.now(vietnam_timezone).strftime("%Y-%m-%d %H:%M:%S")
+
+
+def create_entry_node(assistant_name: str, new_dialog_state: str) -> Callable:
+ logger.info((f"Create entry node: {assistant_name}, {new_dialog_state}"))
+
+ def create_entry(state: State):
+ return {
+ "entry_message": [
+ HumanMessage(
+ content=f"""The assistant is now the {assistant_name}. Reflect on the above conversation between the host assistant and the user.
+ The user's intent is unsatisfied. Use the provided tools to assist the user. Remember, you are {assistant_name} and the booking, update, other other action is not complete until after you have successfully invoked the appropriate tool
+ If the user changes their mind or needs help for other tasks, call the CompleteOrRoute function to leave this assistant.
+ Do not mention who you are - just act as the proxy for the assistant.""",
+ ),
+ ],
+ }
+
+ return create_entry
+
+
+primary_assistant_prompt = ChatPromptTemplate.from_messages(
+ [
+ (
+ "system",
+ """You are a helpful customer support assistant for TriVenture AI Corporation about traveling.
+ Key guidelines:
+ - Call tool destination_recommendation to recommend destinations when user want to go somewhere or ask for recommendation.
+ - Using search engine to find an something relate to user's request.
+ - Be persistent with searches and expand bounds if needed.
+ - Answer in {language} language
+ <>,
+ """,
+ ),
+ ("placeholder", "{history}"),
+ ("placeholder", "{messages}"),
+ ]
+).partial(current_time=vietnam_time)
+
+classify_user_intent_prompt = ChatPromptTemplate.from_messages(
+ [
+ (
+ "system",
+ """
+Your task is to classify the user's intent based on the message history.
+Available intents are:
+- book_hotel : if current user intent related to hotel booking, searching, or another information about hotel
+- scheduling : if current user intent related to scheduling activities, planning, create calendar, timetable agent
+- other: if current user intent is not related to hotel booking or scheduling. It can be a general question, greeting, or other information(destination recommendation).
+
+Output only user intent.
+""",
+ ),
+ ("placeholder", "{history}"),
+ ("placeholder", "{messages}"),
+ ]
+)
+
+scheduling_prompt = ChatPromptTemplate.from_messages(
+ [
+ (
+ "system",
+ """You are a part of agent in a multi agent system.
+ You are a scheduling assistant for handling scheduling CRUD operations.
+ Instructions:
+ 1. Using history to make content for conversation
+ 2. Call tool to create, update, delete, search activity when collect enough information.
+ 3. Not ask user for confirmation, automatically confirm call tool.
+ 4. start_time (str): Ask the user for the start time of the activity. If not sure then return current time.
+ 5. end_time (str): Ask the user if they not mentioned the end time of the activity. If not sure then return the end time as 1 hour from the start time.
+ 6. start_time and end_time can be all day, morning, afternoon, evening, night, or specific time.
+ 7. Don't ask user more detail. You must naturally respond to the user's messages.
+
+ Do not waste the user's time. Do not make up invalid tools or functions.
+ Call CompleteOrRoute when user's intent is not related to scheduling. Another agent will take over.
+
+ Some examples for which you must call CompleteOrRoute:
+ - what's the weather like this time of year?
+ - i need to figure out transportation while i'm there
+ - Acitivity created successfully
+ - I want recommend some travel destinations, hotels.
+ Note:
+ - Past conversation it can be response from another assistant. If your tool can't handle, you must call "CompleteOrRoute" to return to the host assistant.
+ - Don't ask user confirmation the tool which you didn't bind.
+ - Not required user typing in the format of tool call. You must naturally respond to the user's messages.
+ - You must call "CompleteOrRoute" than say you can't handle the user's request.
+ - Answer in {language} language
+ <>
+ """,
+ ),
+ ("placeholder", "{history}"),
+ ("placeholder", "{entry_message}"),
+ ("placeholder", "{messages}"),
+ ]
+).partial(current_time=vietnam_time)
+
+book_hotel_prompt = ChatPromptTemplate.from_messages(
+ [
+ (
+ "system",
+ """You are a part of agent in a multi agent system.
+ Your goal is to assist the user asking for hotel recommendations and booking a hotel.
+ Instructions:
+ Your task:
+ 1. Using history to make content for conversation.
+ 2. Search for available hotels when the user asks for hotel recommendations.
+ 3. Execute then CRUD operations for booking a hotel base on user conversation.
+ 4. You must call "CompleteOrRoute" when last message is not related to hotel booking.
+
+ Some examples for which you should CompleteOrRoute:
+ - what's the weather like this time of year?
+ - nevermind i think I'll book separately
+ - I want create schedule, timetable, calendar
+ - else not related to hotel booking
+
+ Note:
+ - Past conversation it can be response from another assistant. If your tool can't handle, you must call "CompleteOrRoute" to return to the host assistant.
+ - Don't ask user confirmation the tool which you didn't bind.
+ - Not required user typing in the format of tool call. You must naturally respond to the user's messages.
+ - Answer in {language} language
+
+ <>
+ """,
+ ),
+ ("placeholder", "{history}"),
+ ("placeholder", "{entry_message}"),
+ ("placeholder", "{messages}"),
+ ]
+).partial(current_time=vietnam_time)
+
+
+class CompleteOrRoute(BaseModel):
+ """Call this function to complete the current assistant's task and route the conversation back to the primary assistant."""
+
+ reason: str
+
+
+class HotelBookingAgent(BaseModel):
+ """Transfer work to a specialized assistant to handle hotel bookings and search for available hotels."""
+
+
+class ScheduleActivityAgent(BaseModel):
+ """Transfer work to a schedule assistant to handle schedule activity calendar, schedule, timetable or planning."""
+
+
+class ClassifyUserIntent(BaseModel):
+ """Most relevant intent"""
+
+ intent: str = Field(
+ ...,
+ description="User intent is classified into one of the following categories: book_hotel, scheduling, other",
+ )
+
+
+planner_prompt = PromptTemplate.from_template(
+ """
+Your are Amazing Travel Concierge!. Specialist in travel planning and logistics with decades of experience in the industry localted in {location}.
+Your goal is create the most amazing Daily Itinerary: Plan a day-by-day schedule of what to do, including sightseeing, activities.
+Create a detailed {duration}-day travel itinerary with the following specifications:
+
+Start date: {start_date}
+Location: {location}
+Customer's interests: {interests}
+
+Required components:
+1. Travel destination recommendations.
+2. Restaurant, street food suggestions for each meal (with cuisine type).
+
+Required destinations must be included in the itinerary.
+{include_destination}
+
+Note:
+- Call tool destination_recommendation multiple times, with different queries to get a variety of recommendations.(query should be based on user interests).
+- Call tool search_and_summarize_website(like search engine) to find information on websites.
+- Using {nation} language for Final Answer.
+- Using 24-hour time format for start_time and end_time (13:00 - 14:00) in format of (hour:minute).
+- Use available tools to create a comprehensive travel plan.(If needed, you can call tool multiple times to get a variety of recommendations).
+- If have already enough information to build the itinerary, you need directly to Final Answer.
+
+{tools}
+
+Use the following format:
+Question: the input question you must answer
+Thought: you should always think about what to do
+Action: the action to take, should be one of [{tool_names}]
+Action Input: the input to the action
+Observation: the result of the action
+... (this Thought/Action/Action Input/Observation can repeat N times)
+Thought: I now know the final answer
+Final Answer: the final answer to the original input question
+
+Note: Always keep correct formatting and structure
+**Begin!**
+
+
+{agent_scratchpad}
+"""
+)
+
+
+parser_output_planner_prompt = PromptTemplate.from_template(
+ """Your task is build a travel itinerary for the user based on log of ReAct agent scratchpad. Specialist in travel planning and logistics with decades of experience in the industry localted in {location}.
+Your goal is create the most amazing Daily Itinerary: Plan a day-by-day schedule of what to do, including sightseeing, activities.
+Create a detailed {duration}-day travel itinerary with the following specifications:
+
+Start date: {start_date}
+Location: {location}
+Customer's interests: {interests}
+
+Required components for each day:
+1. Travel destination recommendations.
+2. Restaurant, street food suggestions for each meal (with cuisine type).
+
+ReAct agent scratchpad: {agent_scratchpad}
+
+
+
+Expect output format:
+
+Format the of itinerary as a daily schedule with:
+
+ Date (date with format month/day/year)
+ (start_time - end_time): Activity 1 description
+ (start_time - end_time): Activity 2 description
+ ... (repeat for all activities scheduled for the day)
+ Date (date with format month/day/year)
+ ... (repeat for all days in the itinerary)
+
+Note:
+- Using {nation} language for description.
+- Using 24-hour time format for start_time and end_time (13:00 - 14:00) in format of (hour:minute).
+- Include all destinations, activities,... in only one messsage format structure described above.
+- Not include beside information like transportation, local tips,...
+""",
+)
diff --git a/src/langgraph/models/__pycache__/BaseModel.cpython-311.pyc b/src/langgraph/models/__pycache__/BaseModel.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4a1b8af475c596f8f86b62f17f4c108caffbc489
Binary files /dev/null and b/src/langgraph/models/__pycache__/BaseModel.cpython-311.pyc differ
diff --git a/src/langgraph/models/__pycache__/__init__.cpython-311.pyc b/src/langgraph/models/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6b3b7ca6213877f93b2857778cd72daf4424e43e
Binary files /dev/null and b/src/langgraph/models/__pycache__/__init__.cpython-311.pyc differ
diff --git a/src/langgraph/models/__pycache__/activity.cpython-311.pyc b/src/langgraph/models/__pycache__/activity.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4ea846e6176076e8215b77e780c9a8d7a42aa820
Binary files /dev/null and b/src/langgraph/models/__pycache__/activity.cpython-311.pyc differ
diff --git a/src/langgraph/models/__pycache__/body.cpython-311.pyc b/src/langgraph/models/__pycache__/body.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dab05d7edb4c225d65950ebf09fe5d38f36d690e
Binary files /dev/null and b/src/langgraph/models/__pycache__/body.cpython-311.pyc differ
diff --git a/src/langgraph/models/__pycache__/date.cpython-311.pyc b/src/langgraph/models/__pycache__/date.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a5c9838f6850d767da3856e35329a12e22b573b8
Binary files /dev/null and b/src/langgraph/models/__pycache__/date.cpython-311.pyc differ
diff --git a/src/langgraph/models/__pycache__/model_validator.cpython-311.pyc b/src/langgraph/models/__pycache__/model_validator.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..109f2711f15bcbb6ca54c9e0f746ebdf4dbdd50f
Binary files /dev/null and b/src/langgraph/models/__pycache__/model_validator.cpython-311.pyc differ
diff --git a/src/langgraph/models/__pycache__/user.cpython-311.pyc b/src/langgraph/models/__pycache__/user.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0678a98920bb0688a0cba944195955db2426ccfb
Binary files /dev/null and b/src/langgraph/models/__pycache__/user.cpython-311.pyc differ
diff --git a/src/langgraph/models/model_validator.py b/src/langgraph/models/model_validator.py
new file mode 100644
index 0000000000000000000000000000000000000000..48f5b22cc674812059b28514b64bb13c9d11537e
--- /dev/null
+++ b/src/langgraph/models/model_validator.py
@@ -0,0 +1,48 @@
+# from pydantic import BaseModel, Field, EmailStr
+# from typing import Optional
+# from datetime import datetime, timezone
+
+
+# class BaseDocument(BaseModel):
+# created_at: Optional[datetime] = Field(
+# default_factory=lambda: datetime.now(timezone.utc)
+# )
+# updated_at: Optional[datetime] = Field(
+# default_factory=lambda: datetime.now(timezone.utc)
+# )
+
+# class Config:
+# arbitrary_types_allowed = True
+
+# class Activity(BaseDocument):
+# id: Optional[str] = Field("", description="Activity's id")
+# user_id: str = Field("", description="User's id")
+# activity_category: str = Field("", description="Activity's category")
+# description: str = Field("", description="Activity's description")
+# start_time: Optional[datetime] = Field("", description="Activity's start time")
+# end_time: Optional[datetime] = Field("", description="Activity's end time")
+
+
+# class BookHotel(BaseDocument):
+# hotel_name: str = Field("", description="Hotel's name")
+# address: str = Field("", description="Hotel's address")
+# phone_number: str = Field("", description="Hotel's phone number")
+# hotel_email: EmailStr = Field("", description="Hotel's email")
+# start_time: Optional[datetime] = Field("", description="Start time of the booking")
+# end_time: Optional[datetime] = Field("", description="End time of the booking")
+# rating: str = Field("", description="Hotel's rating")
+# website: str = Field("", description="Hotel's website")
+
+# class Config:
+# json_schema_extra = {
+# "example": {
+# "hotel_name": "Blue Lagoon Resort",
+# "address": "123 Beachside Blvd, Paradise City, Island Nation 54321",
+# "phone_number": "+1234567890",
+# "hotel_email": "baohtqe170017@fpt.edu.vn",
+# "start_time": "2025-01-05T14:00:00.000+00:00",
+# "end_time": "2025-01-10T11:00:00.000+00:00",
+# "rating": "4.5",
+# "website": "https://www.bluelagoonresort.com",
+# }
+# }
diff --git a/src/langgraph/multi_agent/.DS_Store b/src/langgraph/multi_agent/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..3f72ffe941da2bf9817ccad5ed4a1fcfa7c60aba
Binary files /dev/null and b/src/langgraph/multi_agent/.DS_Store differ
diff --git a/src/langgraph/multi_agent/__pycache__/asking.cpython-311.pyc b/src/langgraph/multi_agent/__pycache__/asking.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ccf7572d3ed6bc65828fe612b8910c83aac51eeb
Binary files /dev/null and b/src/langgraph/multi_agent/__pycache__/asking.cpython-311.pyc differ
diff --git a/src/langgraph/multi_agent/__pycache__/chat_flow.cpython-311.pyc b/src/langgraph/multi_agent/__pycache__/chat_flow.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dd8b2267c74b3221211bb762e9d22ffdd2f90851
Binary files /dev/null and b/src/langgraph/multi_agent/__pycache__/chat_flow.cpython-311.pyc differ
diff --git a/src/langgraph/multi_agent/__pycache__/hotel_flow.cpython-311.pyc b/src/langgraph/multi_agent/__pycache__/hotel_flow.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a8598aa33d2e85973fd3cf0be776078c0f0daf2d
Binary files /dev/null and b/src/langgraph/multi_agent/__pycache__/hotel_flow.cpython-311.pyc differ
diff --git a/src/langgraph/multi_agent/__pycache__/main.cpython-311.pyc b/src/langgraph/multi_agent/__pycache__/main.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..811448c6e7de469107af1cbaee293925170f7df5
Binary files /dev/null and b/src/langgraph/multi_agent/__pycache__/main.cpython-311.pyc differ
diff --git a/src/langgraph/multi_agent/__pycache__/main_2.cpython-311.pyc b/src/langgraph/multi_agent/__pycache__/main_2.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..18b6f84eaf58850f7f56eda095b303a930bd9b63
Binary files /dev/null and b/src/langgraph/multi_agent/__pycache__/main_2.cpython-311.pyc differ
diff --git a/src/langgraph/multi_agent/__pycache__/main_flow.cpython-311.pyc b/src/langgraph/multi_agent/__pycache__/main_flow.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..68612d22aeb7b53144bbe539591bfee5c556fa9c
Binary files /dev/null and b/src/langgraph/multi_agent/__pycache__/main_flow.cpython-311.pyc differ
diff --git a/src/langgraph/multi_agent/__pycache__/router.cpython-311.pyc b/src/langgraph/multi_agent/__pycache__/router.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bebaec8b731d6a88688c7f9575b0ed5cf1a01f87
Binary files /dev/null and b/src/langgraph/multi_agent/__pycache__/router.cpython-311.pyc differ
diff --git a/src/langgraph/multi_agent/__pycache__/scheduling.cpython-311.pyc b/src/langgraph/multi_agent/__pycache__/scheduling.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3c5f27c56f648d59095a1560b54f2cad3e0faf95
Binary files /dev/null and b/src/langgraph/multi_agent/__pycache__/scheduling.cpython-311.pyc differ
diff --git a/src/langgraph/multi_agent/__pycache__/scheduling_flow.cpython-311.pyc b/src/langgraph/multi_agent/__pycache__/scheduling_flow.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..650d91831775483bb5a9a5c07a7bfb6fa6bbacfa
Binary files /dev/null and b/src/langgraph/multi_agent/__pycache__/scheduling_flow.cpython-311.pyc differ
diff --git a/src/langgraph/multi_agent/chat/__pycache__/chat_flow.cpython-311.pyc b/src/langgraph/multi_agent/chat/__pycache__/chat_flow.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8dd46bd8ee110bd7c50a92675c28fe17b9b0055f
Binary files /dev/null and b/src/langgraph/multi_agent/chat/__pycache__/chat_flow.cpython-311.pyc differ
diff --git a/src/langgraph/multi_agent/chat/__pycache__/hotel_flow.cpython-311.pyc b/src/langgraph/multi_agent/chat/__pycache__/hotel_flow.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..88fa51a5e1362141730e95d8f5d43137a53970f8
Binary files /dev/null and b/src/langgraph/multi_agent/chat/__pycache__/hotel_flow.cpython-311.pyc differ
diff --git a/src/langgraph/multi_agent/chat/__pycache__/scheduling_flow.cpython-311.pyc b/src/langgraph/multi_agent/chat/__pycache__/scheduling_flow.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7d99f7758ffb7be9769b57ff7889624a134b0afb
Binary files /dev/null and b/src/langgraph/multi_agent/chat/__pycache__/scheduling_flow.cpython-311.pyc differ
diff --git a/src/langgraph/multi_agent/chat/chat_flow.py b/src/langgraph/multi_agent/chat/chat_flow.py
new file mode 100644
index 0000000000000000000000000000000000000000..990283f38a9f2eba19338157e50b4ec8e28a5659
--- /dev/null
+++ b/src/langgraph/multi_agent/chat/chat_flow.py
@@ -0,0 +1,134 @@
+from langgraph.graph import END, StateGraph, START
+from langchain_community.tools.tavily_search import TavilySearchResults
+from langgraph.prebuilt import tools_condition
+from src.langgraph.langchain.llm import llm
+from src.langgraph.config.agent import Agent
+from src.langgraph.state import State
+from src.langgraph.langchain.prompt import (
+ primary_assistant_prompt,
+ classify_user_intent_prompt,
+ ClassifyUserIntent,
+ HotelBookingAgent,
+ ScheduleActivityAgent,
+)
+from src.langgraph.utils_function.function_graph import (
+ create_tool_node_with_fallback,
+ get_history,
+ save_history,
+)
+from src.langgraph.tools.destination_tools import destination_recommendation
+from src.utils.logger import logger
+
+# Primary Assistant
+primary_assistant_tools = [
+ TavilySearchResults(max_results=2),
+ destination_recommendation,
+]
+
+assistant_runnable = primary_assistant_prompt | llm.bind_tools(
+ primary_assistant_tools + [ScheduleActivityAgent, HotelBookingAgent]
+)
+
+
+builder = StateGraph(State)
+
+
+def leave_skill_fn(state: State):
+ return {"intent": None}
+
+
+builder.add_node("leave_skill", leave_skill_fn)
+builder.add_edge("leave_skill", "classify_intent")
+from .scheduling_flow import builder as scheduling_builder
+from .hotel_flow import builder as hotel_builder
+
+
+# Primary assistant
+builder.add_node("fetch_history", get_history)
+builder.add_edge(START, "fetch_history")
+
+
+async def classify_user_intent_fn(state: State):
+ if state["intent"] is not None:
+ return {"intent": state["intent"]}
+ user_query = state["messages"]
+ history = state["messages_history"]
+ chain_classify = classify_user_intent_prompt | llm.with_structured_output(
+ ClassifyUserIntent
+ )
+ response: ClassifyUserIntent = await chain_classify.ainvoke(
+ {"messages": user_query, "history": history}
+ )
+ logger.info(f"Classify user intent: {response.intent}")
+ return {"intent": None if response.intent == "other" else response.intent}
+
+
+builder.add_node("classify_intent", classify_user_intent_fn)
+builder.add_edge("fetch_history", "classify_intent")
+
+
+def routing_assistant(state: State):
+ logger.info("Routing assistant")
+ if state["intent"] is None:
+ logger.info("No intent")
+ return "primary_assistant"
+ elif state["intent"] == "book_hotel":
+ logger.info("Book hotel")
+ return "enter_book_hotel"
+ elif state["intent"] == "scheduling":
+ logger.info("Scheduling")
+ return "enter_schedule_activity"
+
+
+builder.add_conditional_edges(
+ "classify_intent",
+ routing_assistant,
+ {
+ "primary_assistant": "primary_assistant",
+ "enter_book_hotel": "enter_book_hotel",
+ "enter_schedule_activity": "enter_schedule_activity",
+ },
+)
+
+
+builder.add_node("primary_assistant", Agent(assistant_runnable))
+builder.add_node(
+ "primary_assistant_tools", create_tool_node_with_fallback(primary_assistant_tools)
+)
+
+
+def route_primary_assistant(
+ state: State,
+):
+ logger.info("Route primary assistant")
+ route = tools_condition(state)
+ if route == END:
+ return END
+ tool_calls = state["messages"][-1].tool_calls
+ if tool_calls:
+ if tool_calls[0]["name"] == HotelBookingAgent.__name__:
+ logger.info("To hotel booking assistant")
+ return "enter_book_hotel"
+ if tool_calls[0]["name"] == ScheduleActivityAgent.__name__:
+ logger.info("To schedule activity")
+ return "enter_schedule_activity"
+ logger.info("Not hotel booking assistant")
+ return "primary_assistant_tools"
+ raise ValueError("Invalid route")
+
+
+builder.add_conditional_edges(
+ "primary_assistant",
+ route_primary_assistant,
+ {
+ END: "save_history",
+ "enter_book_hotel": "enter_book_hotel",
+ "enter_schedule_activity": "enter_schedule_activity",
+ "primary_assistant_tools": "primary_assistant_tools",
+ },
+)
+builder.add_edge("primary_assistant_tools", "primary_assistant")
+builder.add_node("save_history", save_history)
+builder.add_edge("save_history", END)
+
+app = builder.compile()
diff --git a/src/langgraph/multi_agent/chat/hotel_flow.py b/src/langgraph/multi_agent/chat/hotel_flow.py
new file mode 100644
index 0000000000000000000000000000000000000000..85957b6ccbce601a1b573e70e3ee223c71890acc
--- /dev/null
+++ b/src/langgraph/multi_agent/chat/hotel_flow.py
@@ -0,0 +1,129 @@
+from langgraph.prebuilt import tools_condition
+from langgraph.graph import END
+from langchain_core.messages import ToolMessage, AIMessage
+from src.langgraph.tools.hotel_tools import (
+ book_hotel,
+ update_hotel,
+ cancel_hotel,
+ search_hotels,
+)
+from src.langgraph.langchain.llm import llm
+from src.langgraph.config.agent import Agent
+from src.langgraph.utils_function.function_graph import (
+ create_tool_node_with_fallback,
+ human_review_node,
+)
+from src.langgraph.langchain.prompt import (
+ book_hotel_prompt,
+ CompleteOrRoute,
+ create_entry_node,
+)
+from .chat_flow import builder
+from src.langgraph.state import State
+from src.utils.logger import logger
+
+book_hotel_safe_tools = [search_hotels]
+book_hotel_sensitive_tools = [book_hotel, update_hotel, cancel_hotel]
+book_hotel_tools = book_hotel_safe_tools
+book_hotel_runnable = book_hotel_prompt | llm.bind_tools(
+ book_hotel_tools + [CompleteOrRoute]
+)
+builder.add_node(
+ "enter_book_hotel", create_entry_node("Hotel Booking Assistant", "book_hotel")
+)
+builder.add_node("book_hotel_agent", Agent(book_hotel_runnable))
+builder.add_edge("enter_book_hotel", "book_hotel_agent")
+builder.add_node(
+ "book_hotel_safe_tools",
+ create_tool_node_with_fallback(book_hotel_safe_tools),
+)
+builder.add_node(
+ "book_hotel_sensitive_tools",
+ create_tool_node_with_fallback(book_hotel_sensitive_tools),
+)
+builder.add_node("user_review_book_hotel", human_review_node)
+
+
+def routing_user_review_book_hotel(state: State):
+ if state["accept"] == True:
+ return "book_hotel_sensitive_tools"
+ return "save_history"
+
+
+builder.add_conditional_edges(
+ "user_review_book_hotel",
+ routing_user_review_book_hotel,
+ {
+ "book_hotel_sensitive_tools": "book_hotel_sensitive_tools",
+ "save_history": "save_history",
+ },
+)
+builder.add_edge("book_hotel_sensitive_tools", "book_hotel_agent")
+
+
+def check_search_hotels(state: State):
+ name = state["messages"][-1].name
+ if name == "search_hotels":
+ return "format_search_hotels"
+ return "book_hotel_agent"
+
+
+builder.add_conditional_edges(
+ "book_hotel_safe_tools",
+ check_search_hotels,
+ {
+ "book_hotel_agent": "book_hotel_agent",
+ "format_search_hotels": "format_search_hotels",
+ },
+)
+
+
+def format_search_hotels_fn(state: State):
+ for message in state["messages"]:
+ if isinstance(message, ToolMessage) and message.name == "search_hotels":
+ # new_content = f"Here are the search results for hotels near location which you can book:\n {message.content}"
+ # return {"messages": AIMessage(content=new_content)}
+ return {"messages": AIMessage(content=message.content + " on frontend for user to select")}
+
+
+def route_book_hotel(
+ state: State,
+):
+ logger.info("Route book hotel")
+ if (
+ state["messages"][0].content == "y"
+ and "Do you want to run the following tool(s)?"
+ in state["messages_history"][-1].content
+ ):
+ if state["messages"][-1].tool_calls:
+ return "book_hotel_sensitive_tools"
+ return END
+ route = tools_condition(state)
+ if route == END:
+ return END
+ tool_calls = state["messages"][-1].tool_calls
+ did_cancel = any(tc["name"] == CompleteOrRoute.__name__ for tc in tool_calls)
+ logger.info(f"Did cancel: {did_cancel}")
+ if did_cancel:
+ return "leave_skill"
+ tool_names = [t.name for t in book_hotel_safe_tools]
+ if all(tc["name"] in tool_names for tc in tool_calls):
+ logger.info("Book hotel safe tools")
+ return "book_hotel_safe_tools"
+ logger.info("User review")
+ return "user_review_book_hotel"
+
+
+builder.add_node("format_search_hotels", format_search_hotels_fn)
+builder.add_edge("format_search_hotels", "save_history")
+builder.add_conditional_edges(
+ "book_hotel_agent",
+ route_book_hotel,
+ {
+ "book_hotel_sensitive_tools": "book_hotel_sensitive_tools",
+ END: "save_history",
+ "leave_skill": "leave_skill",
+ "book_hotel_safe_tools": "book_hotel_safe_tools",
+ "user_review_book_hotel": "user_review_book_hotel",
+ },
+)
diff --git a/src/langgraph/multi_agent/chat/scheduling_flow.py b/src/langgraph/multi_agent/chat/scheduling_flow.py
new file mode 100644
index 0000000000000000000000000000000000000000..0ffdce156ac2917f81459dbe03e3d8d39f3803af
--- /dev/null
+++ b/src/langgraph/multi_agent/chat/scheduling_flow.py
@@ -0,0 +1,99 @@
+from langgraph.prebuilt import tools_condition
+from langgraph.graph import END
+from src.langgraph.tools.scheduling_tools import (
+ create_a_activity,
+ search_activities,
+ update_a_activiy,
+ delete_a_activity,
+)
+from src.langgraph.langchain.llm import llm
+from src.langgraph.config.agent import Agent
+from src.langgraph.utils_function.function_graph import (
+ create_tool_node_with_fallback,
+ human_review_node,
+)
+from src.langgraph.langchain.prompt import (
+ scheduling_prompt,
+ CompleteOrRoute,
+ create_entry_node,
+)
+from .chat_flow import builder
+from src.langgraph.state import State
+from src.utils.logger import logger
+
+scheduling_safe_tools = [search_activities]
+scheduling_sensitive_tools = [
+ create_a_activity,
+ update_a_activiy,
+ delete_a_activity,
+]
+scheduling_tools = scheduling_sensitive_tools + scheduling_safe_tools
+scheduling_runnable = scheduling_prompt | llm.bind_tools(
+ scheduling_tools + [CompleteOrRoute]
+)
+
+builder.add_node(
+ "enter_schedule_activity",
+ create_entry_node("Schedule Activity Assistant", "scheduling"),
+)
+builder.add_node("scheduling_agent", Agent(scheduling_runnable))
+builder.add_edge("enter_schedule_activity", "scheduling_agent")
+builder.add_node(
+ "scheduling_safe_tools", create_tool_node_with_fallback(scheduling_safe_tools)
+)
+builder.add_node(
+ "scheduling_sensitive_tools",
+ create_tool_node_with_fallback(scheduling_sensitive_tools),
+)
+
+builder.add_node("user_review_scheduling", human_review_node)
+
+
+def route_scheduling(
+ state: State,
+):
+ logger.info("Route scheduling")
+ if (
+ state["messages_history"] is not None
+ and state["messages"][0].content == "y"
+ and "Do you want to run the following tool(s)?"
+ in state["messages_history"][-1].content
+ ):
+ if state["messages"][-1].tool_calls:
+ logger.info("Sensitive tools")
+ return "scheduling_sensitive_tools"
+ logger.info("Safe tools")
+ return END
+
+ route = tools_condition(state)
+ if route == END:
+ return END
+ tool_calls = state["messages"][-1].tool_calls
+ did_cancel = any(tc["name"] == CompleteOrRoute.__name__ for tc in tool_calls)
+ logger.info(f"Did cancel: {did_cancel}")
+ if did_cancel:
+ return "leave_skill"
+ tool_names = [t.name for t in scheduling_safe_tools]
+ if all(tc["name"] in tool_names for tc in tool_calls):
+ logger.info("Book hotel safe tools")
+ return "scheduling_safe_tools"
+ logger.info("User review")
+ return "user_review_scheduling"
+
+
+builder.add_conditional_edges(
+ "scheduling_agent",
+ route_scheduling,
+ {
+ "scheduling_sensitive_tools": "scheduling_sensitive_tools",
+ END: "save_history",
+ "leave_skill": "leave_skill",
+ "scheduling_safe_tools": "scheduling_safe_tools",
+ "user_review_scheduling": "user_review_scheduling",
+ },
+)
+
+
+builder.add_edge("user_review_scheduling", "save_history")
+builder.add_edge("scheduling_sensitive_tools", "scheduling_agent")
+builder.add_edge("scheduling_safe_tools", "scheduling_agent")
diff --git a/src/langgraph/multi_agent/planner/__pycache__/planner_flow.cpython-311.pyc b/src/langgraph/multi_agent/planner/__pycache__/planner_flow.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fd8cef3a3f613a7ce9e8f17d1a9f8cecafcc372d
Binary files /dev/null and b/src/langgraph/multi_agent/planner/__pycache__/planner_flow.cpython-311.pyc differ
diff --git a/src/langgraph/multi_agent/planner/__pycache__/react_agent.cpython-311.pyc b/src/langgraph/multi_agent/planner/__pycache__/react_agent.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e9af8f481b2d7a46c24efc331b648aa70ba27073
Binary files /dev/null and b/src/langgraph/multi_agent/planner/__pycache__/react_agent.cpython-311.pyc differ
diff --git a/src/langgraph/multi_agent/planner/__pycache__/utils.cpython-311.pyc b/src/langgraph/multi_agent/planner/__pycache__/utils.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1badecefa8f7e06706ee99dd839ae6d5f89ecd1f
Binary files /dev/null and b/src/langgraph/multi_agent/planner/__pycache__/utils.cpython-311.pyc differ
diff --git a/src/langgraph/multi_agent/planner/planner_flow.py b/src/langgraph/multi_agent/planner/planner_flow.py
new file mode 100644
index 0000000000000000000000000000000000000000..a9643b58b030505e4cee93fc7a1d4cf1177e7588
--- /dev/null
+++ b/src/langgraph/multi_agent/planner/planner_flow.py
@@ -0,0 +1,192 @@
+from dotenv import load_dotenv
+
+load_dotenv()
+from src.langgraph.langchain.llm import llm
+from src.langgraph.tools.destination_tools import destination_recommendation
+from src.langgraph.tools.search_tools import search_and_summarize_website
+from .react_agent import create_react_agent
+from src.langgraph.langchain.prompt import planner_prompt, parser_output_planner_prompt
+from typing import TypedDict, Union, Any, Optional
+from langchain_core.agents import AgentAction, AgentFinish
+from langchain_core.exceptions import OutputParserException
+from langgraph.graph import StateGraph, START, END
+from src.utils.logger import logger
+from src.utils.helper import format_include_destinations
+import operator
+from typing import Annotated
+from .utils import format_log_to_str
+from langchain.agents.output_parsers import ReActSingleInputOutputParser
+
+
+class State(TypedDict):
+ llm_response: Annotated[list[Union[AgentAction, Any]], operator.add]
+ tools_ouput: Annotated[list[str], operator.add]
+ # error: Union[OutputParserException, Any]
+ error: Optional[Any]
+ duration: str
+ start_date: str
+ location: str
+ interests: str
+ nation: str
+ include_destination: list
+ limit_interation: int
+ current_interation: int
+ final_answer: str
+
+
+parser = ReActSingleInputOutputParser()
+
+tools = [destination_recommendation, search_and_summarize_website]
+tools_mapping = {tool.name: tool for tool in tools}
+
+
+async def agent_fn(state: State):
+ llm_response = state["llm_response"]
+ tools_output = state["tools_ouput"]
+ error = state["error"]
+ duration = state["duration"]
+ start_date = state["start_date"]
+ location = state["location"]
+ interests = state["interests"]
+ nation = state["nation"]
+ include_destination = format_include_destinations(state["include_destination"])
+ prompt = planner_prompt.partial(
+ duration=duration,
+ start_date=start_date,
+ location=location,
+ interests=interests,
+ nation=nation,
+ include_destination=include_destination,
+ )
+ if len(llm_response) != 0:
+ agent_scratchpad = format_log_to_str(
+ zip(llm_response, tools_output), llm_prefix=""
+ )
+ else:
+ agent_scratchpad = ""
+ if error:
+ if isinstance(error, OutputParserException):
+ error = error.observation
+ agent_scratchpad += (
+ "\nPrevious response have error: "
+ + str(error)
+ + "so agent will try to recover. Please return in right format defined in prompt"
+ )
+ agent = create_react_agent(llm, tools, prompt)
+ try:
+ response = await agent.ainvoke(agent_scratchpad)
+ logger.info(f"-> Agent response {response.content}")
+ response_paser: Union[AgentAction, AgentFinish] = parser.parse(response.content)
+ return {
+ "llm_response": [response_paser],
+ "error": None,
+ }
+ except OutputParserException as e:
+ response = e.observation
+ logger.error(f"Error in agent invoke {e}")
+ return {
+ "error": e,
+ }
+
+
+def after_call_agent(state: State):
+ error = state["error"]
+ llm_response = state["llm_response"][-1]
+ if isinstance(error, OutputParserException):
+ logger.info("-> paser output 1")
+ return "parse_output"
+ else:
+ logger.info("-> Tool")
+ return "execute_tools"
+
+
+async def excute_tools_fn(state: State, config):
+ llm_response: AgentAction = state["llm_response"][-1]
+ tool_call_name = llm_response.tool
+ tool_args = llm_response.tool_input
+ logger.info(f"-> Tool name: {tool_call_name}")
+ logger.info(f"-> Tool args: {tool_args}")
+ if tool_call_name == "destination_recommendation":
+ tool_response = await destination_recommendation.ainvoke(
+ {"query": tool_args, "config": ""}
+ )
+ elif tool_call_name == "search_and_summarize_website":
+ tool_response = await search_and_summarize_website.ainvoke({"query": tool_args})
+ logger.info("-> Agent")
+ return {
+ "tools_ouput": [tool_response],
+ "error": None,
+ "current_interation": state["current_interation"] + 1,
+ }
+
+
+async def parser_output_fn(state: State):
+ llm_response = state["llm_response"]
+ tools_output = state["tools_ouput"]
+ error = state["error"]
+ duration = state["duration"]
+ start_date = state["start_date"]
+ location = state["location"]
+ interests = state["interests"]
+ nation = state["nation"]
+ if len(llm_response) != 0:
+ agent_scratchpad = format_log_to_str(
+ zip(llm_response, tools_output), llm_prefix=""
+ )
+ else:
+ agent_scratchpad = ""
+ if error:
+ if isinstance(error, OutputParserException):
+ error = error.observation
+ agent_scratchpad += (
+ "\nPrevious response have error: "
+ + str(error)
+ + "so agent will try to recover. Please return in right format defined in prompt"
+ )
+ prompt = parser_output_planner_prompt.partial(
+ duration=duration,
+ start_date=start_date,
+ location=location,
+ interests=interests,
+ nation=nation,
+ )
+ chain_output = prompt | llm
+ output = await chain_output.ainvoke({"agent_scratchpad": agent_scratchpad})
+ return {
+ "final_answer": output.content,
+ }
+
+
+workflow = StateGraph(State)
+workflow.add_node("agent", agent_fn)
+workflow.add_node("execute_tools", excute_tools_fn)
+workflow.add_node("parse_output", parser_output_fn)
+
+workflow.add_edge(START, "agent")
+workflow.add_conditional_edges(
+ "agent",
+ after_call_agent,
+ {
+ "parse_output": "parse_output",
+ "execute_tools": "execute_tools",
+ },
+)
+
+
+def after_execute_tools(state: State):
+ if state["current_interation"] >= state["limit_interation"]:
+ return "parse_output"
+ return "agent"
+
+
+workflow.add_conditional_edges(
+ "execute_tools",
+ after_execute_tools,
+ {
+ "parse_output": "parse_output",
+ "agent": "agent",
+ },
+)
+workflow.add_edge("parse_output", END)
+
+planner_app = workflow.compile()
diff --git a/src/langgraph/multi_agent/planner/react_agent.py b/src/langgraph/multi_agent/planner/react_agent.py
new file mode 100644
index 0000000000000000000000000000000000000000..f00664da34ce9e46287e382cdc94a8b39eefd4a0
--- /dev/null
+++ b/src/langgraph/multi_agent/planner/react_agent.py
@@ -0,0 +1,141 @@
+from __future__ import annotations
+
+from typing import List, Optional, Sequence, Union
+
+from langchain_core.language_models import BaseLanguageModel
+from langchain_core.prompts import BasePromptTemplate
+from langchain_core.runnables import Runnable, RunnablePassthrough
+from langchain_core.tools import BaseTool
+from langchain_core.tools.render import ToolsRenderer, render_text_description
+from langchain.agents import AgentOutputParser
+from langchain.agents.output_parsers import ReActSingleInputOutputParser
+
+
+def create_react_agent(
+ llm: BaseLanguageModel,
+ tools: Sequence[BaseTool],
+ prompt: BasePromptTemplate,
+ output_parser: Optional[AgentOutputParser] = None,
+ tools_renderer: ToolsRenderer = render_text_description,
+ *,
+ stop_sequence: Union[bool, List[str]] = True,
+) -> Runnable:
+ """Create an agent that uses ReAct prompting.
+
+ Based on paper "ReAct: Synergizing Reasoning and Acting in Language Models"
+ (https://arxiv.org/abs/2210.03629)
+
+ .. warning::
+ This implementation is based on the foundational ReAct paper but is older and not well-suited for production applications.
+ For a more robust and feature-rich implementation, we recommend using the `create_react_agent` function from the LangGraph library.
+ See the [reference doc](https://langchain-ai.github.io/langgraph/reference/prebuilt/#langgraph.prebuilt.chat_agent_executor.create_react_agent)
+ for more information.
+
+ Args:
+ llm: LLM to use as the agent.
+ tools: Tools this agent has access to.
+ prompt: The prompt to use. See Prompt section below for more.
+ output_parser: AgentOutputParser for parse the LLM output.
+ tools_renderer: This controls how the tools are converted into a string and
+ then passed into the LLM. Default is `render_text_description`.
+ stop_sequence: bool or list of str.
+ If True, adds a stop token of "Observation:" to avoid hallucinates.
+ If False, does not add a stop token.
+ If a list of str, uses the provided list as the stop tokens.
+
+ Default is True. You may to set this to False if the LLM you are using
+ does not support stop sequences.
+
+ Returns:
+ A Runnable sequence representing an agent. It takes as input all the same input
+ variables as the prompt passed in does. It returns as output either an
+ AgentAction or AgentFinish.
+
+ Examples:
+
+ .. code-block:: python
+
+ from langchain import hub
+ from langchain_community.llms import OpenAI
+ from langchain.agents import AgentExecutor, create_react_agent
+
+ prompt = hub.pull("hwchase17/react")
+ model = OpenAI()
+ tools = ...
+
+ agent = create_react_agent(model, tools, prompt)
+ agent_executor = AgentExecutor(agent=agent, tools=tools)
+
+ agent_executor.invoke({"input": "hi"})
+
+ # Use with chat history
+ from langchain_core.messages import AIMessage, HumanMessage
+ agent_executor.invoke(
+ {
+ "input": "what's my name?",
+ # Notice that chat_history is a string
+ # since this prompt is aimed at LLMs, not chat models
+ "chat_history": "Human: My name is Bob\\nAI: Hello Bob!",
+ }
+ )
+
+ Prompt:
+
+ The prompt must have input keys:
+ * `tools`: contains descriptions and arguments for each tool.
+ * `tool_names`: contains all tool names.
+ * `agent_scratchpad`: contains previous agent actions and tool outputs as a string.
+
+ Here's an example:
+
+ .. code-block:: python
+
+ from langchain_core.prompts import PromptTemplate
+
+ template = '''Answer the following questions as best you can. You have access to the following tools:
+
+ {tools}
+
+ Use the following format:
+
+ Question: the input question you must answer
+ Thought: you should always think about what to do
+ Action: the action to take, should be one of [{tool_names}]
+ Action Input: the input to the action
+ Observation: the result of the action
+ ... (this Thought/Action/Action Input/Observation can repeat N times)
+ Thought: I now know the final answer
+ Final Answer: the final answer to the original input question
+
+ Begin!
+
+ Question: {input}
+ Thought:{agent_scratchpad}'''
+
+ prompt = PromptTemplate.from_template(template)
+ """ # noqa: E501
+ missing_vars = {"tools", "tool_names", "agent_scratchpad"}.difference(
+ prompt.input_variables + list(prompt.partial_variables)
+ )
+ if missing_vars:
+ raise ValueError(f"Prompt missing required variables: {missing_vars}")
+
+ prompt = prompt.partial(
+ tools=tools_renderer(list(tools)),
+ tool_names=", ".join([t.name for t in tools]),
+ )
+ if stop_sequence:
+ stop = ["\nObservation","\nFinal", "Answer:"] if stop_sequence is True else stop_sequence
+ llm_with_stop = llm.bind(stop=stop)
+ else:
+ llm_with_stop = llm
+ output_parser = output_parser or ReActSingleInputOutputParser()
+ agent = (
+ {
+ "agent_scratchpad": RunnablePassthrough(),
+ }
+ | prompt
+ | llm_with_stop
+ # | output_parser
+ )
+ return agent
diff --git a/src/langgraph/multi_agent/planner/utils.py b/src/langgraph/multi_agent/planner/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..d2fefd30f4c8b82538e2fd3b38cb2f9fce002e11
--- /dev/null
+++ b/src/langgraph/multi_agent/planner/utils.py
@@ -0,0 +1,27 @@
+from typing import List, Tuple
+
+from langchain_core.agents import AgentAction
+
+
+def format_log_to_str(
+ intermediate_steps: List[Tuple[AgentAction, str]],
+ observation_prefix: str = "Observation: ",
+ llm_prefix: str = "Thought: ",
+) -> str:
+ """Construct the scratchpad that lets the agent continue its thought process.
+
+ Args:
+ intermediate_steps: List of tuples of AgentAction and observation strings.
+ observation_prefix: Prefix to append the observation with.
+ Defaults to "Observation: ".
+ llm_prefix: Prefix to append the llm call with.
+ Defaults to "Thought: ".
+
+ Returns:
+ str: The scratchpad.
+ """
+ thoughts = ""
+ for action, observation in intermediate_steps:
+ thoughts += action.log
+ thoughts += f"\n{observation_prefix}{observation}\n{llm_prefix}"
+ return thoughts
diff --git a/src/langgraph/state.py b/src/langgraph/state.py
new file mode 100644
index 0000000000000000000000000000000000000000..4ee09dac4b6fdd171d78715d1ae0ef0a27ada40d
--- /dev/null
+++ b/src/langgraph/state.py
@@ -0,0 +1,14 @@
+from typing_extensions import TypedDict, Annotated, Sequence
+from langgraph.graph.message import AnyMessage, add_messages
+from langchain_core.messages import BaseMessage
+
+
+class State(TypedDict):
+ messages: Annotated[Sequence[AnyMessage], add_messages]
+ intent: str
+ messages_history: list[AnyMessage]
+ manual_save: bool
+ accept: bool
+ entry_message: BaseMessage
+ tool_name: str
+ language: str
diff --git a/src/langgraph/tools/.DS_Store b/src/langgraph/tools/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..05d35577352022c8d321042b3ba95010dc66d026
Binary files /dev/null and b/src/langgraph/tools/.DS_Store differ
diff --git a/src/langgraph/tools/__pycache__/__init__.cpython-311.pyc b/src/langgraph/tools/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..42f1607c268b8752eae36815be14345ec36c591c
Binary files /dev/null and b/src/langgraph/tools/__pycache__/__init__.cpython-311.pyc differ
diff --git a/src/langgraph/tools/__pycache__/asking_tools.cpython-311.pyc b/src/langgraph/tools/__pycache__/asking_tools.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4b15422b913b271cfed14d9f56d7aa72df800bd7
Binary files /dev/null and b/src/langgraph/tools/__pycache__/asking_tools.cpython-311.pyc differ
diff --git a/src/langgraph/tools/__pycache__/destination_tools.cpython-311.pyc b/src/langgraph/tools/__pycache__/destination_tools.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b984ce91a8f6dec76296340424f72ede65cc29a7
Binary files /dev/null and b/src/langgraph/tools/__pycache__/destination_tools.cpython-311.pyc differ
diff --git a/src/langgraph/tools/__pycache__/hotel_tools.cpython-311.pyc b/src/langgraph/tools/__pycache__/hotel_tools.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..176afcafd763a100cc467e308f162df64f1d592a
Binary files /dev/null and b/src/langgraph/tools/__pycache__/hotel_tools.cpython-311.pyc differ
diff --git a/src/langgraph/tools/__pycache__/hotels.cpython-311.pyc b/src/langgraph/tools/__pycache__/hotels.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f23d878a7215343d0fbe1b0eef90a923b5e87e03
Binary files /dev/null and b/src/langgraph/tools/__pycache__/hotels.cpython-311.pyc differ
diff --git a/src/langgraph/tools/__pycache__/scheduling_tools.cpython-311.pyc b/src/langgraph/tools/__pycache__/scheduling_tools.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8bb353ea8579a9b7d64c51d76b407cb6f571381d
Binary files /dev/null and b/src/langgraph/tools/__pycache__/scheduling_tools.cpython-311.pyc differ
diff --git a/src/langgraph/tools/__pycache__/search_tools.cpython-311.pyc b/src/langgraph/tools/__pycache__/search_tools.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..88fe7d0432027983d6dd5a4612501641e10f845b
Binary files /dev/null and b/src/langgraph/tools/__pycache__/search_tools.cpython-311.pyc differ
diff --git a/src/langgraph/tools/__pycache__/user.cpython-311.pyc b/src/langgraph/tools/__pycache__/user.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bbb08d6934215382b561cd158d5906ecf6bff2f3
Binary files /dev/null and b/src/langgraph/tools/__pycache__/user.cpython-311.pyc differ
diff --git a/src/langgraph/tools/destination_tools.py b/src/langgraph/tools/destination_tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..9d0fc7506ae0044b7e18c2be27217d336568340f
--- /dev/null
+++ b/src/langgraph/tools/destination_tools.py
@@ -0,0 +1,18 @@
+from langchain_core.tools import tool
+from src.apis.controllers.destination_controller import (
+ destination_suggestion_controller,
+)
+from langchain_core.runnables.config import RunnableConfig
+from src.utils.logger import logger
+
+
+@tool
+async def destination_recommendation(query: str, config: RunnableConfig):
+ """Call tool when user want to recommend a travel destination(tourist attractions, restaurants). Not require user typing anything.
+ Args:
+ query (str): query related to wanting to go somewhere. Auto extracted from user's message. Using Vietnamese language for better results.
+ Output: A list of recommended destinations.
+ """
+ logger.info(f"Destination recommendation query: {query}")
+ output = await destination_suggestion_controller(query, 3)
+ return output
diff --git a/src/langgraph/tools/hotel_tools.py b/src/langgraph/tools/hotel_tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..f9b00c72edd4be07f6a413c8b5f1e44f16e30716
--- /dev/null
+++ b/src/langgraph/tools/hotel_tools.py
@@ -0,0 +1,127 @@
+from langchain_core.tools import tool
+from langchain_core.runnables.config import RunnableConfig
+from datetime import datetime
+from src.apis.controllers.location_controller import get_places
+from src.utils.helper import process_controller_output, format_accommodation_markdown
+from typing import Optional
+from src.apis.controllers.hotel_controller import (
+ book_hotel_controller,
+ send_booking_confirmation_email,
+)
+
+
+@tool
+def search_hotels(option: str, config: RunnableConfig):
+ """
+ Call this tool directly to search hotels for the user. No need to require asking for city location.
+ Args:
+ option (Literal["popular", "basic", "luxury"]): The type of hotel to search for. Options are "popular", "basic", "luxury"
+
+ """
+
+ configuration = config.get("configurable", {})
+ lat = configuration.get("lat", None)
+ long = configuration.get("long", None)
+ # radius = configuration.get("radius", 5000)
+ if lat is None or long is None:
+ return "Please provide latitude and longitude"
+ # response = process_controller_output(
+ # get_places(lat, long, radius, "accommodation", 2)
+ # )
+ # return format_accommodation_markdown(response)
+ return f"search_hotels_{option}"
+
+
+@tool
+async def book_hotel(
+ hotel_email: str,
+ hotel_name: str,
+ address: str,
+ phone_number: Optional[str],
+ website: Optional[str],
+ start_time: Optional[datetime],
+ end_time: Optional[datetime],
+ config: RunnableConfig,
+):
+ """
+ Call this tool to book a hotel with the email of the hotel. No need require user's email.
+ Args:
+ hotel_email (str): Hotel booking email
+ hotel_name (str): Hotel name
+ address (str): Hotel address
+ phone_number (Optional[str]): Hotel phone number
+ website (Optional[str]): Hotel website
+ start_time (datetime): Start time of the booking in the format "YYYY-MM-DDTHH:MM:SS.sss+00:00"
+ end_time (datetime): End time of the booking in the format "YYYY-MM-DDTHH:MM:SS.sss+00:00"
+
+ The start_time and end_time are Python `datetime` objects and will be stored as BSON Date objects in MongoDB.
+
+ """
+ start_time_str = start_time.strftime("%Y-%m-%dT%H:%M:%S.%f%z")
+ end_time_str = end_time.strftime("%Y-%m-%dT%H:%M:%S.%f%z")
+
+ configuration = config.get("configurable", {})
+ user_id = configuration.get("user_id", None)
+ user_email = configuration.get("user_email", None)
+ user_contact_number = configuration.get(
+ "contact_number", "Does not have contact number"
+ )
+ hotel_email = "baohtqe170017@fpt.edu.vn"
+ response = await book_hotel_controller(
+ hotel_email,
+ hotel_name,
+ address,
+ phone_number,
+ website,
+ start_time_str,
+ end_time_str,
+ user_id,
+ )
+ if response["status"] == "error":
+ return response["message"]
+ send_booking_confirmation_email(
+ user_email, user_contact_number, hotel_email, start_time, end_time
+ )
+ return "Hotel booked successfully"
+
+
+async def cancel_hotel(
+ receiver_email: str,
+ room_number: Optional[str],
+ reason: Optional[str],
+ config: RunnableConfig,
+) -> str:
+ """
+ Call this tool to cancel a hotel booking with the given email.
+
+ Args:
+ receiver_email (str): Hotel booking email
+ room_number (Optional[str]): Room number
+ reason (Optional[str]): Reason for cancellation
+
+ Returns:
+ str: Confirmation message of the cancellation request.
+ """
+ if receiver_email is None:
+ return "The hotel booking email is required"
+
+ return f"Email sent to {receiver_email} to cancel booking for room {room_number} with reason: {reason}"
+
+
+@tool
+async def update_hotel(
+ receiver_email: str,
+ room_number: Optional[str],
+ content: Optional[str],
+ config: RunnableConfig,
+):
+ """
+ Call this tool to update a hotel booking with the given email
+ Args:
+ receiver_email (str): Hotel booking email
+ room_number (Optional[str]): Room number
+ content (Optional[str]): Updated content
+ """
+ if receiver_email is None:
+ return "The hotel booking email is required"
+ return f"Email sent to {receiver_email} to update booking for room {room_number} with content: {content}"
diff --git a/src/langgraph/tools/scheduling_tools.py b/src/langgraph/tools/scheduling_tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..9b921f99cb4c986a0062b29170ead67e78b64573
--- /dev/null
+++ b/src/langgraph/tools/scheduling_tools.py
@@ -0,0 +1,116 @@
+from langchain_core.tools import tool
+from langchain_core.runnables.config import RunnableConfig
+from datetime import datetime
+from src.utils.logger import logger
+from src.apis.controllers.scheduling_controller import (
+ create_a_activity_controller,
+ update_a_activity_controller,
+ delete_activities_controller,
+ search_activities_controller,
+)
+
+
+@tool
+async def create_a_activity(
+ description: str,
+ activity_category: str,
+ start_time: datetime,
+ end_time: datetime,
+ config: RunnableConfig,
+) -> str:
+ """Create an activity by extracting details from conversation context if available
+
+ Args:
+ description (str): Concise description of the activity. Not too detailing.
+ activity_category (str): value in list ["work", "study", "relax", "exercise", "other"] classifying the activity based on user's description. Not required user typing the category, it can be extracted from the description. If not sure then return 'other'.
+ start_time (datetime): Activity's start time in the format "YYYY-MM-DD HH:MM:SS".
+ end_time (datetime): Activity's end time in the format "YYYY-MM-DD HH:MM:SS".
+ """
+ try:
+ # Convert start_time and end_time to the required format
+ configuration = config.get("configurable", {})
+ user_id = configuration.get("user_id", None)
+ response = await create_a_activity_controller(
+ None, activity_category, description, start_time, end_time, user_id
+ )
+ return response["message"]
+ except Exception as e:
+ logger.error(f"Error creating activity: {e}")
+ return f"Error creating activity {e}"
+
+
+@tool
+async def search_activities(
+ start_time: datetime,
+ end_time: datetime,
+ config: RunnableConfig,
+) -> str:
+ """Search for activities by extracting details from conversation context if available or asking the user for the details.
+ Using
+ Args:
+ start_time (datetime): Activity's start time in the format "YYYY-MM-DD HH:MM:SS". Ask the user for the start time of the activity.
+ end_time (datetime): Activity's end time in the format "YYYY-MM-DD HH:MM:SS". Ask the user if they not mentioned the end time of the activity.
+ """
+ try:
+ configuration = config.get("configurable", {})
+ user_id = configuration.get("user_id", None)
+ response = await search_activities_controller(start_time, end_time, user_id)
+ return response["message"]
+ except Exception as e:
+ logger.error(f"Error searching activities: {e}")
+ return f"Error searching activities {e}"
+
+
+@tool
+async def update_a_activiy(
+ description: str,
+ activity_category: str,
+ start_time: datetime,
+ end_time: datetime,
+ config: RunnableConfig,
+) -> str:
+ """Update an activity by extracting details from conversation context if available or asking the user for the details.
+
+ description (str): Concise description of the activity. Not too detailing.
+ activity_category (str): value in list ["work", "study", "relax", "exercise", "other"] classifying the activity based on user's description. Not required user typing the category, it can be extracted from the description. If not sure then return 'other'.
+ start_time (datetime): Activity's start time in the format "YYYY-MM-DD HH:MM:SS".
+ end_time (datetime): Activity's end time in the format "YYYY-MM-DD HH:MM:SS".
+ """
+ try:
+ configuration = config.get("configurable", {})
+ user_id = configuration.get("user_id", None)
+ response = await update_a_activity_controller(
+ None,
+ activity_category,
+ description,
+ start_time,
+ end_time,
+ user_id,
+ )
+ return response["message"]
+ except Exception as e:
+ logger.error(f"Error updating activity: {e}")
+ return f"Error updating activity {e}"
+
+
+@tool
+async def delete_a_activity(
+ start_time: datetime,
+ end_time: datetime,
+ config: RunnableConfig,
+) -> str:
+ """Delete an activity by extracting details from conversation context if available or asking the user for the details.
+ Args:
+ start_time (datetime): Activity's start time in the format "YYYY-MM-DD HH:MM:SS".
+ end_time (datetime): Activity's end time in the format "YYYY-MM-DD HH:MM:SS".
+ """
+ try:
+ configuration = config.get("configurable", {})
+ user_id = configuration.get("user_id", None)
+ response = await delete_activities_controller(
+ None, start_time, end_time, user_id
+ )
+ return response["message"]
+ except Exception as e:
+ logger.error(f"Error deleting activity: {e}")
+ return f"Error deleting activity {e}"
diff --git a/src/langgraph/tools/search_tools.py b/src/langgraph/tools/search_tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..068211d8f44d9619aeca03bb88eeba8dce420ec6
--- /dev/null
+++ b/src/langgraph/tools/search_tools.py
@@ -0,0 +1,39 @@
+from bs4 import BeautifulSoup
+from langchain_community.document_loaders.recursive_url_loader import RecursiveUrlLoader
+from langchain_core.prompts import PromptTemplate
+from langchain_community.tools.tavily_search import TavilySearchResults
+from langchain_core.tools import tool
+from src.langgraph.langchain.llm import llm_flash
+from src.utils.logger import logger
+from langchain_community.tools import DuckDuckGoSearchResults
+
+search_tavily = TavilySearchResults(max_results=1)
+search_duck = DuckDuckGoSearchResults(output_format="list", max_results=5)
+
+
+@tool
+async def search_and_summarize_website(query: str):
+ """A search engine optimized for comprehensive, accurate, and trusted results.
+ Useful for when you need to answer questions about current events.
+ Input should be a search query."
+ Args:
+ query (str): The search query for search engine. Using Vietnamese language for better results.
+ """
+
+ results = search_duck.invoke(query)
+ content = "\n".join(
+ [
+ f"Snippet {int(index)+1}: {r.get('snippet')}"
+ for index, r in enumerate(results)
+ ]
+ )
+
+ # Define prompt
+ prompt = PromptTemplate.from_template(
+ "Write a concise summary of the following:\\n\\n{context}"
+ )
+
+ chain = prompt | llm_flash
+ results = await chain.ainvoke({"context": content})
+
+ return results.content
diff --git a/src/langgraph/utils_function/__pycache__/__init__.cpython-311.pyc b/src/langgraph/utils_function/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..31f7a17c83333dff7587608876a926402b6dac34
Binary files /dev/null and b/src/langgraph/utils_function/__pycache__/__init__.cpython-311.pyc differ
diff --git a/src/langgraph/utils_function/__pycache__/function_graph.cpython-311.pyc b/src/langgraph/utils_function/__pycache__/function_graph.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a0449e3af6f9f87297ccf2adc51d73380689f481
Binary files /dev/null and b/src/langgraph/utils_function/__pycache__/function_graph.cpython-311.pyc differ
diff --git a/src/langgraph/utils_function/__pycache__/helpers.cpython-311.pyc b/src/langgraph/utils_function/__pycache__/helpers.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e152c55ab2449b3bae79fe361ce1d9dfeef25d6c
Binary files /dev/null and b/src/langgraph/utils_function/__pycache__/helpers.cpython-311.pyc differ
diff --git a/src/langgraph/utils_function/__pycache__/logger.cpython-311.pyc b/src/langgraph/utils_function/__pycache__/logger.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..41c91701f95407306995be8c25014bc5a0c781d7
Binary files /dev/null and b/src/langgraph/utils_function/__pycache__/logger.cpython-311.pyc differ
diff --git a/src/langgraph/utils_function/function_graph.py b/src/langgraph/utils_function/function_graph.py
new file mode 100644
index 0000000000000000000000000000000000000000..e8f1b4a6b44532a37e4972307b4fa1707f83a5ea
--- /dev/null
+++ b/src/langgraph/utils_function/function_graph.py
@@ -0,0 +1,142 @@
+from langchain_core.messages import (
+ BaseMessage,
+ ToolMessage,
+ AIMessage,
+ HumanMessage,
+ trim_messages,
+)
+from typing import Union
+from langgraph.prebuilt import ToolNode
+from langchain_core.runnables import RunnableLambda
+from src.langgraph.state import State
+from src.utils.mongo import chat_messages_history
+from src.utils.logger import logger
+
+
+def fake_token_counter(messages: Union[list[BaseMessage], BaseMessage]) -> int:
+ if isinstance(messages, list):
+ return sum(len(message.content.split()) for message in messages)
+ return len(messages.content.split())
+
+
+def create_tool_node_with_fallback(tools: list) -> dict:
+ return ToolNode(tools).with_fallbacks(
+ [RunnableLambda(handle_tool_error)], exception_key="error"
+ )
+
+
+def handle_tool_error(state: State) -> dict:
+ error = state.get("error")
+ tool_messages = state["messages"][-1]
+ return {
+ "messages": [
+ ToolMessage(
+ content=f"Error: {repr(error)}\n please fix your mistakes.",
+ tool_call_id=tc["id"],
+ )
+ for tc in tool_messages.tool_calls
+ ]
+ }
+
+
+async def get_history(state: State, config):
+ logger.info("Get history node")
+ history = state["messages_history"] if state.get("messages_history") else None
+ try:
+ if history is None:
+ session_id = config.get("configurable", {}).get("session_id")
+ history = await chat_messages_history(session_id).aget_messages()
+ # logger.info(f"Chat history: {history}")
+ if not history:
+ return {"messages_history": []}
+ chat_message_history = trim_messages(
+ history,
+ strategy="last",
+ token_counter=fake_token_counter,
+ max_tokens=4000,
+ start_on="human",
+ end_on="ai",
+ include_system=False,
+ allow_partial=False,
+ )
+ # logger.info(f"Chat history: {chat_message_history}")
+ except Exception as e:
+ logger.error(f"Error getting chat history: {e}")
+ chat_message_history = []
+
+ return {"messages_history": chat_message_history}
+
+
+async def save_history(state: State, config):
+ if not state["manual_save"]:
+ return {"messages": []}
+ message = state["messages"]
+ user_input = message[0].content
+ final_output = message[-1]
+ session_id = config.get("configurable", {}).get("session_id")
+ messages_add_to_history = [HumanMessage(user_input)]
+ if isinstance(final_output, AIMessage):
+ messages_add_to_history.append(AIMessage(final_output.content))
+ history = chat_messages_history(session_id)
+ await history.aadd_messages(messages_add_to_history)
+ return {"messages": []}
+
+
+def human_review_node(state: State):
+ logger.info("Human review node")
+ tool_calls = state["messages"][-1].tool_calls
+ formatted_tool_calls = []
+ user_message: HumanMessage = state["messages"][0]
+ logger.info(f"User message: {user_message}")
+
+ for call in tool_calls:
+ args_str = "\n".join(
+ f"#### {k.replace('_', ' ')}: {v}" for k, v in call["args"].items()
+ )
+ call_name_with_spaces = call["name"].replace("_", " ")
+ formatted_call = f"""
+**Tool calling**: {call_name_with_spaces}
+
+**Arguments**:\n
+{args_str}
+"""
+ formatted_tool_calls.append(formatted_call)
+
+ format_message = (
+ "#### Do you want to run the following tool(s)?\n\n"
+ f"{chr(10).join(formatted_tool_calls)}\n\n"
+ "Enter **'y'** to run or **'n'** to cancel:"
+ )
+
+ return {"messages": [AIMessage(format_message)], "accept": False}
+
+
+def format_accommodation_markdown(data):
+ formatted = ""
+ for entry in data:
+ formatted += f"### {entry['Accommodation Name']}\n"
+ formatted += f"- **Address:** {entry['Address']}\n"
+ formatted += f"- **Distance from center:** {entry['distance_km']}\n"
+
+ contact_info = entry.get("contact")
+ if contact_info:
+ formatted += "- **Contact:**\n"
+ if "phone" in contact_info:
+ formatted += f" - Phone: {contact_info['phone']}\n"
+ if "email" in contact_info:
+ formatted += f" - Email: {contact_info['email']}\n"
+
+ if "website" in entry:
+ formatted += f"- **Website:** [{entry['website']}]({entry['website']})\n"
+
+ accommodation_info = entry.get("accommodation")
+ if accommodation_info:
+ formatted += "- **Accommodation Info:**\n"
+ if "stars" in accommodation_info:
+ formatted += f" - Stars: {accommodation_info['stars']}\n"
+ if "rooms" in accommodation_info:
+ formatted += f" - Rooms: {accommodation_info['rooms']}\n"
+
+ formatted += "\n---\n\n"
+
+ return formatted
diff --git a/src/utils/__pycache__/helper.cpython-311.pyc b/src/utils/__pycache__/helper.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a699295731332e6fba625b34f150d25ac0936c79
Binary files /dev/null and b/src/utils/__pycache__/helper.cpython-311.pyc differ
diff --git a/src/utils/__pycache__/logger.cpython-311.pyc b/src/utils/__pycache__/logger.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9033dca4b4fe9d13725340146a873ae497736232
Binary files /dev/null and b/src/utils/__pycache__/logger.cpython-311.pyc differ
diff --git a/src/utils/__pycache__/mongo.cpython-311.pyc b/src/utils/__pycache__/mongo.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d988bf47692085903227d44c12b1818301ee365b
Binary files /dev/null and b/src/utils/__pycache__/mongo.cpython-311.pyc differ
diff --git a/src/utils/__pycache__/redis.cpython-311.pyc b/src/utils/__pycache__/redis.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..74b88a31af11f71266976717370bccc72c574d35
Binary files /dev/null and b/src/utils/__pycache__/redis.cpython-311.pyc differ
diff --git a/src/utils/helper.py b/src/utils/helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..4a13103ffc58b6fea8613a9b1e3f492e773e134e
--- /dev/null
+++ b/src/utils/helper.py
@@ -0,0 +1,248 @@
+from fastapi.responses import JSONResponse
+import json
+import math
+import traceback
+from datetime import datetime, timezone
+import re
+from src.utils.logger import logger
+from pydantic import BaseModel, Field
+from typing import List, Optional, Union
+
+
+def handle_validator_raise(func):
+ """
+ Custom decorator to handle exceptions raised by the validator
+ """
+
+ def wrapper(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except Exception as e:
+ if len(traceback.extract_tb(e.__traceback__)) > 1:
+ tb = traceback.extract_tb(e.__traceback__)[1]
+ else:
+ tb = traceback.extract_tb(e.__traceback__)[0]
+ filename, lineno, function, line = tb.filename, tb.lineno, tb.name, tb.line
+ error_type, error_msg = type(e).__name__, str(e)
+ error_info = {
+ "error": error_type,
+ "message": error_msg,
+ "step": function,
+ "line": line,
+ "filename": filename,
+ "lineno": lineno,
+ }
+ logger.error(f"Exception: {error_info}")
+
+ return wrapper
+
+
+def process_controller_output(ouput: JSONResponse):
+ if ouput.status_code in [
+ 200,
+ 201,
+ ]:
+ return json.loads(ouput.body.decode("utf-8"))
+ else:
+ return "Error"
+
+
+def format_weather_data(weather_data):
+ try:
+ current_weather = weather_data["current"]
+ lat = weather_data["lat"]
+ lon = weather_data["lon"]
+ location = f"Latitude: {lat}, Longitude: {lon}"
+ formatted_weather = f"In {location}, the current weather is as follows:\n"
+ formatted_weather += (
+ f"Detailed status: {current_weather['weather'][0]['description']}\n"
+ )
+ formatted_weather += f"Wind speed: {current_weather['wind_speed']} m/s, direction: {current_weather['wind_deg']}°\n"
+ formatted_weather += f"Humidity: {current_weather['humidity']}%\n"
+ formatted_weather += f"Temperature:\n"
+ formatted_weather += f" - Current: {current_weather['temp'] - 273.15:.2f}°C\n"
+ formatted_weather += (
+ f" - Feels like: {current_weather['feels_like'] - 273.15:.2f}°C\n"
+ )
+ if "rain" in current_weather:
+ formatted_weather += f"Rain: {current_weather['rain'].get('1h', 0)} mm\n"
+ else:
+ formatted_weather += "Rain: {}\n"
+ formatted_weather += f"Cloud cover: {current_weather['clouds']}%\n"
+ return formatted_weather
+ except Exception as e:
+ return f"Error formatting weather data: {e}"
+
+
+def haversine(lon1, lat1, lon2, lat2):
+ lon1, lat1, lon2, lat2 = map(math.radians, [lon1, lat1, lon2, lat2])
+ dlon = lon2 - lon1
+ dlat = lat2 - lat1
+ a = (
+ math.sin(dlat / 2) ** 2
+ + math.cos(lat1) * math.cos(lat2) * math.sin(dlon / 2) ** 2
+ )
+ c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
+ R = 6371.0
+ distance = R * c
+ return distance
+
+
+def format_geoapify_response(
+ response, current_long, current_lat, include_latnlong=False
+):
+ formatted_data = []
+ for feature in response:
+ formatted_item = {}
+ place_lat = feature["geometry"]["coordinates"][1]
+ place_lon = feature["geometry"]["coordinates"][0]
+
+ distance = haversine(current_long, current_lat, place_lon, place_lat)
+ if include_latnlong:
+ formatted_item["lat"] = place_lat
+ formatted_item["lon"] = place_lon
+ formatted_item["Accommodation Name"] = feature["properties"]["address_line1"]
+ formatted_item["Address"] = feature["properties"]["formatted"]
+ formatted_item["distance_km"] = str(round(distance, 2)) + " km"
+
+ if "contact" in feature["properties"]:
+ formatted_item["contact"] = feature["properties"]["contact"]
+
+ if "website" in feature["properties"]:
+ formatted_item["website"] = feature["properties"]["website"]
+
+ if "accommodation" in feature["properties"]:
+ formatted_item["accommodation"] = feature["properties"]["accommodation"]
+
+ formatted_data.append(formatted_item)
+
+ return formatted_data
+
+
+def format_accommodation_markdown(data):
+ formatted = ""
+ for entry in data:
+ formatted += f"### {entry['Accommodation Name']}\n"
+ formatted += f"- **Address:** {entry['Address']}\n"
+ formatted += f"- **Distance from center:** {entry['distance_km']}\n"
+
+ contact_info = entry.get("contact")
+ if contact_info:
+ formatted += "- **Contact:**\n"
+ if "phone" in contact_info:
+ formatted += f" - Phone: {contact_info['phone']}\n"
+ if "email" in contact_info:
+ formatted += f" - Email: {contact_info['email']}\n"
+
+ if "website" in entry:
+ formatted += f"- **Website:** [{entry['website']}]({entry['website']})\n"
+
+ accommodation_info = entry.get("accommodation")
+ if accommodation_info:
+ formatted += "- **Accommodation Info:**\n"
+ if "stars" in accommodation_info:
+ formatted += f" - Stars: {accommodation_info['stars']}\n"
+ if "rooms" in accommodation_info:
+ formatted += f" - Rooms: {accommodation_info['rooms']}\n"
+
+ formatted += "\n---\n\n"
+
+ return formatted
+
+
+@handle_validator_raise
+def convert_string_date_to_iso(input_str: str):
+ if not input_str:
+ raise ValueError("Input date string cannot be empty")
+ try:
+ converted_datetime = datetime.strptime(input_str.strip(), "%Y-%m-%dT%H:%M:%S%z")
+ raw_datetime = datetime(
+ year=converted_datetime.year,
+ month=converted_datetime.month,
+ day=converted_datetime.day,
+ hour=converted_datetime.hour,
+ minute=converted_datetime.minute,
+ second=converted_datetime.second,
+ tzinfo=timezone.utc,
+ )
+ return raw_datetime
+ except ValueError as e:
+ raise ValueError(
+ f"Invalid date format. Expected format: YYYY-MM-DDThh:mm:ss+hh:mm, got: {input_str}"
+ )
+ except Exception as e:
+ raise ValueError(f"Error converting date string: {str(e)}")
+
+
+@handle_validator_raise
+def datetime_to_iso_string(dt: datetime) -> str:
+ """Convert a datetime object to a string in the format YYYY-MM-DDTHH:MM:SS.
+
+ Args:
+ dt (datetime): The datetime object to convert.
+
+ Returns:
+ str: The formatted datetime string.
+ """
+ converted_datetime = dt.strftime("%Y-%m-%dT%H:%M:%S")
+ return converted_datetime
+
+
+def parse_itinerary(text):
+ # Split the input text by date pattern
+ days = re.split(r"(\d{2}/\d{2}/\d{4})", text)
+
+ # Initialize an empty list to store each day's activities
+ itinerary = []
+
+ # Define a regex to capture the "Additional information" section
+ additional_info_pattern = re.compile(r"Additional information:(.*)", re.DOTALL)
+ additional_info_match = additional_info_pattern.search(text)
+
+ # If "Additional information" exists, capture it
+ additional_info = (
+ additional_info_match.group(1).strip() if additional_info_match else ""
+ )
+
+ # Loop through the days to extract date and activities
+ for i in range(1, len(days), 2): # Skip even indexes as they are not dates
+ date = days[i].strip()
+ activities_text = days[i + 1].strip()
+
+ # Find activities
+ activities = []
+ activity_matches = re.findall(
+ r"\((\d{1,2}:\d{2}) - (\d{1,2}:\d{2})\):\s*(.+)", activities_text
+ )
+ for match in activity_matches:
+ start_time, end_time, description = match
+ activities.append(
+ {
+ "start_time": start_time,
+ "end_time": end_time,
+ "description": description,
+ }
+ )
+
+ # Append the day's activities to the itinerary
+ itinerary.append({"date": date, "activities": activities})
+
+ # Return the itinerary along with the additional information as a string
+ return {"itinerary": itinerary, "additional_info": additional_info}
+
+
+class Destination(BaseModel):
+ id: int = Field(..., title="Destination Id", gt=0)
+ name: str = Field(..., title="Destination Name", min_length=1)
+ location: str = Field(..., title="Location", min_length=1)
+ description: str = Field(..., title="Description", min_length=1)
+
+
+def format_include_destinations(include_destinations: Optional[List[Destination]]):
+ formatted_string = ""
+ if not include_destinations:
+ return "No destinations required"
+ for index, destination in enumerate(include_destinations):
+ formatted_string += f"#Destination {int(index) + 1}: {destination.name}\n"
+ formatted_string += f" Description: {destination.description}\n\n"
+ return formatted_string
diff --git a/src/utils/logger.py b/src/utils/logger.py
new file mode 100644
index 0000000000000000000000000000000000000000..030dbdb522ad48eb2ae322b749e583a57c9b14d3
--- /dev/null
+++ b/src/utils/logger.py
@@ -0,0 +1,65 @@
+import logging
+import os
+from datetime import datetime
+from pathlib import Path
+
+import pytz
+
+
+class CoreCFG:
+ PROJECT_NAME = "SCHEDULE AI"
+ BOT_NAME = str("SCHEDULE AI")
+
+
+def get_date_time():
+ return datetime.now(pytz.timezone("Asia/Ho_Chi_Minh"))
+
+
+DATE_TIME = get_date_time().date()
+BASE_DIR = os.path.dirname(Path(__file__).parent.parent)
+LOG_DIR = os.path.join(BASE_DIR, "logs")
+
+
+class CustomFormatter(logging.Formatter):
+ green = "\x1b[0;32m"
+ grey = "\x1b[38;5;248m"
+ yellow = "\x1b[38;5;229m"
+ red = "\x1b[31;20m"
+ bold_red = "\x1b[31;1m"
+ blue = "\x1b[38;5;31m"
+ white = "\x1b[38;5;255m"
+ reset = "\x1b[38;5;15m"
+
+ base_format = f"{grey}%(asctime)s | %(name)s | %(threadName)s | {{level_color}}%(levelname)-8s{grey} | {blue}%(module)s:%(lineno)d{grey} - {white}%(message)s"
+
+ FORMATS = {
+ logging.INFO: base_format.format(level_color=green),
+ logging.WARNING: base_format.format(level_color=yellow),
+ logging.ERROR: base_format.format(level_color=red),
+ logging.CRITICAL: base_format.format(level_color=bold_red),
+ }
+
+ def format(self, record):
+ log_fmt = self.FORMATS.get(record.levelno)
+ formatter = logging.Formatter(log_fmt)
+ return formatter.format(record)
+
+
+def custom_logger(app_name="APP"):
+ logger_r = logging.getLogger(name=app_name)
+ # Set the timezone to Ho_Chi_Minh
+ tz = pytz.timezone("Asia/Ho_Chi_Minh")
+
+ logging.Formatter.converter = lambda *args: datetime.now(tz).timetuple()
+
+ ch = logging.StreamHandler()
+ ch.setLevel(logging.INFO)
+ ch.setFormatter(CustomFormatter())
+
+ logger_r.setLevel(logging.INFO)
+ logger_r.addHandler(ch)
+
+ return logger_r
+
+
+logger = custom_logger(app_name=CoreCFG.PROJECT_NAME)
diff --git a/src/utils/mongo.py b/src/utils/mongo.py
new file mode 100644
index 0000000000000000000000000000000000000000..84c1fc83e6538de9d4a2081c2dd2b304f35010df
--- /dev/null
+++ b/src/utils/mongo.py
@@ -0,0 +1,151 @@
+from src.langgraph.config.constant import MongoCfg, RedisCfg
+from src.utils.logger import logger
+from langchain_mongodb.chat_message_histories import MongoDBChatMessageHistory
+from langchain_redis import RedisChatMessageHistory
+from motor.motor_asyncio import AsyncIOMotorClient
+from pydantic import BaseModel
+from typing import Type, Dict, List, Optional
+from bson import ObjectId
+from motor.motor_asyncio import AsyncIOMotorCollection
+from datetime import datetime, timezone, timedelta
+from src.apis.models.post_models import Comment, Like, Post
+from src.apis.models.destination_models import Destination
+from src.apis.models.schedule_models import Schedule
+from src.apis.models.hotel_models import BookHotel
+from src.apis.interfaces.api_interface import ChatHistoryManagement
+from src.apis.models.user_models import User
+
+
+client: AsyncIOMotorClient = AsyncIOMotorClient(MongoCfg.MONGODB_URL)
+database = client[MongoCfg.MONGO_INDEX]
+
+
+class MongoCRUD:
+ def __init__(
+ self,
+ collection: AsyncIOMotorCollection,
+ model: Type[BaseModel],
+ ttl_seconds: Optional[int] = None,
+ ):
+ self.collection = collection
+ self.model = model
+ self.ttl_seconds = ttl_seconds
+ self._index_created = False
+
+ async def _ensure_ttl_index(self):
+ """Ensure TTL index exists"""
+ if self.ttl_seconds is not None and not self._index_created:
+ await self.collection.create_index("expire_at", expireAfterSeconds=0)
+ self._index_created = True
+
+ def _order_fields(self, doc: Dict) -> Dict:
+ """Order fields in the document to ensure created_at and updated_at are at the end."""
+ ordered_doc = {
+ k: doc[k] for k in doc if k not in ["created_at", "updated_at", "expire_at"]
+ }
+ if "id" in doc:
+ ordered_doc["_id"] = ObjectId(doc["id"])
+ if "created_at" in doc:
+ ordered_doc["created_at"] = doc["created_at"]
+ if "updated_at" in doc:
+ ordered_doc["updated_at"] = doc["updated_at"]
+ if "expire_at" in doc:
+ ordered_doc["expire_at"] = doc["expire_at"]
+ return ordered_doc
+
+ async def create(self, data: Dict) -> str:
+ """Create a new document in the collection asynchronously, optionally using a user-specified ID."""
+ await self._ensure_ttl_index()
+ now = datetime.now(timezone.utc)
+ data["created_at"] = now
+ data["updated_at"] = now
+ if self.ttl_seconds is not None:
+ data["expire_at"] = now + timedelta(seconds=self.ttl_seconds)
+ document = self.model(**data).model_dump(exclude_unset=True)
+ ordered_document = self._order_fields(document)
+ result = await self.collection.insert_one(ordered_document)
+ return str(result.inserted_id)
+
+ async def read(self, query: Dict) -> List[Dict]:
+ """Read documents from the collection based on a query asynchronously."""
+ cursor = self.collection.find(query)
+ docs = []
+ async for doc in cursor:
+ docs.append(
+ {
+ "_id": str(doc["_id"]),
+ **self._order_fields(self.model(**doc).model_dump(exclude={"id"})),
+ }
+ )
+ return docs
+
+ async def read_one(self, query: Dict) -> Optional[Dict]:
+ """Read a single document from the collection based on a query asynchronously."""
+ doc = await self.collection.find_one(query)
+ if doc:
+ doc["_id"] = str(doc["_id"])
+ return {
+ "_id": doc["_id"],
+ **self._order_fields(self.model(**doc).model_dump(exclude={"id"})),
+ }
+ return None
+
+ async def update(self, query: Dict, data: Dict) -> int:
+ """Update documents in the collection based on a query asynchronously."""
+ await self._ensure_ttl_index()
+ data["updated_at"] = datetime.now(timezone.utc)
+ if self.ttl_seconds is not None:
+ data["expire_at"] = data["updated_at"] + timedelta(seconds=self.ttl_seconds)
+ update_data = self.model(**data).model_dump(exclude_unset=True)
+ ordered_update = self._order_fields(update_data)
+ result = await self.collection.update_many(query, {"$set": ordered_update})
+ return result.modified_count
+
+ async def delete(self, query: Dict) -> int:
+ """Delete documents from the collection based on a query asynchronously."""
+ result = await self.collection.delete_many(query)
+ return result.deleted_count
+
+ async def find_by_id(self, id: str) -> Optional[Dict]:
+ """Find a document by its ID asynchronously."""
+ return await self.read_one({"_id": ObjectId(id)})
+
+ async def find_all(self) -> List[Dict]:
+ """Find all documents in the collection asynchronously."""
+ return await self.read({})
+
+
+def chat_messages_history(
+ session_id: str, number_of_messages: int = MongoCfg.MAX_HISTORY_SIZE, db="mongo"
+):
+ if not session_id:
+ session_id = "12345678910"
+ logger.warning("Session ID not provided, using default session ID")
+ if db == "redis":
+ return RedisChatMessageHistory(
+ session_id=session_id,
+ redis_url=RedisCfg.REDIS_URL,
+ # ttl=605000,
+ ttl=40000,
+ )
+ return MongoDBChatMessageHistory(
+ session_id=session_id,
+ connection_string=MongoCfg.MONGODB_URL,
+ database_name=MongoCfg.MONGO_INDEX,
+ collection_name=MongoCfg.CHAT_HISTORY,
+ history_size=number_of_messages,
+ )
+
+
+BookHotelCRUD = MongoCRUD(database[MongoCfg.BOOK_HOTEL], BookHotel)
+ScheduleCRUD = MongoCRUD(database[MongoCfg.ACTIVITY], Schedule)
+UserCRUD = MongoCRUD(database[MongoCfg.USER], User)
+PostCRUD = MongoCRUD(database[MongoCfg.POST], Post)
+LikeCRUD = MongoCRUD(database[MongoCfg.LIKE], Like)
+CommentCRUD = MongoCRUD(database[MongoCfg.COMMENT], Comment)
+DestinationCRUD = MongoCRUD(database[MongoCfg.DESTINATION], Destination)
+
+
+chat_history_management_crud = MongoCRUD(
+ database["chat_history_management"], ChatHistoryManagement, 3600
+)
diff --git a/src/utils/redis.py b/src/utils/redis.py
new file mode 100644
index 0000000000000000000000000000000000000000..757b86e25664619abb010e32a8276188688f185a
--- /dev/null
+++ b/src/utils/redis.py
@@ -0,0 +1,18 @@
+from src.langgraph.config.constant import RedisCfg
+import redis.asyncio as redis
+
+redis_client = redis.from_url(
+ RedisCfg.REDIS_URL, encoding="utf-8", decode_responses=True
+)
+
+
+async def set_key_redis(key, value, time=300):
+ await redis_client.set(key, value, time)
+
+
+async def get_key_redis(key):
+ return await redis_client.get(key)
+
+
+async def delete_key_redis(key):
+ await redis_client.delete(key)