Spaces:
Sleeping
Sleeping
| from __future__ import annotations | |
| import os | |
| from typing import List | |
| from langchain_core.messages import HumanMessage, SystemMessage | |
| from langchain_groq import ChatGroq | |
| from .models import ItineraryPlan, UserRequest | |
| def get_groq_chat() -> ChatGroq | None: | |
| """Return a Groq-backed chat model if GROQ_API_KEY is set, else None.""" | |
| api_key = os.getenv("GROQ_API_KEY") | |
| if not api_key: | |
| return None | |
| # Deterministic, low-temperature model choice | |
| return ChatGroq( | |
| model="llama-3.1-70b-versatile", | |
| temperature=0.2, | |
| groq_api_key=api_key, | |
| ) | |
| def generate_narrative_logs(plan: ItineraryPlan, request: UserRequest) -> List[str]: | |
| """ | |
| Use Groq (if available) to generate a concise, human-readable itinerary guide. | |
| Falls back to deterministic heuristic logs when GROQ_API_KEY is not set. | |
| """ | |
| llm = get_groq_chat() | |
| if llm is None: | |
| logs: List[str] = [] | |
| for stop in plan.stops: | |
| logs.append( | |
| f"Between {stop.start_time} and {stop.end_time}, enjoy {stop.attraction_name}. " | |
| f"Expect around {stop.estimated_wait_minutes} minutes of queuing and " | |
| f"approximately {stop.walking_distance_m} meters of walking from the previous stop." | |
| ) | |
| return logs | |
| system = SystemMessage( | |
| content=( | |
| "You are Qiddiya Smart Guide, an expert theme-park planner. " | |
| "Given a structured itinerary, write a clear, concise, step-by-step guide. " | |
| "Each step should be on its own line starting with a number and a period " | |
| "(for example: '1. 10:00–10:30 — ...'). Focus on clarity; do not add new rides." | |
| ) | |
| ) | |
| human = HumanMessage( | |
| content=( | |
| f"Visitor date: {request.visit_date}\n" | |
| f"Start/end: {request.start_time}–{request.end_time}\n" | |
| f"Must-do attractions: {', '.join(request.must_do_attractions) or 'none'}\n" | |
| f"Intensity: {request.intensity_preference}, walking tolerance: {request.walking_tolerance}\n\n" | |
| f"Itinerary stops (JSON): {plan.model_dump()['stops']}\n\n" | |
| "Now produce the numbered guide as described." | |
| ) | |
| ) | |
| result = llm.invoke([system, human]) | |
| text = result.content if isinstance(result.content, str) else str(result.content) | |
| lines = [line.strip() for line in text.splitlines() if line.strip()] | |
| return lines | |