Spaces:
Runtime error
Runtime error
| import os | |
| import yaml | |
| from fastapi import FastAPI, HTTPException | |
| from fastapi.middleware.cors import CORSMiddleware | |
| from pydantic import BaseModel | |
| from langchain.prompts import PromptTemplate | |
| from langchain.chat_models import ChatOpenAI | |
| from langchain.schema import HumanMessage, AIMessage, SystemMessage | |
| from langchain.tools import BaseTool | |
| from exa_py import Exa | |
| from dotenv import load_dotenv | |
| load_dotenv() | |
| exa = Exa(api_key=os.getenv("EXA_API_KEY")) | |
| # FastAPI app | |
| app = FastAPI() | |
| # Configure CORS | |
| origins = ["*"] | |
| app.add_middleware( | |
| CORSMiddleware, | |
| allow_origins=origins, | |
| allow_credentials=True, | |
| allow_methods=["*"], | |
| allow_headers=["*"], | |
| ) | |
| # Create the Exa Web Search Tool | |
| class SearchAndContents(BaseTool): | |
| name: str = "Search and Contents Tool" | |
| description: str = ( | |
| "Searches the web based on a search query for the latest results. " | |
| "Uses the Exa API to return the contents of the search results." | |
| ) | |
| def _run(self, search_query: str) -> str: | |
| response = exa.search_and_contents(search_query, use_autoprompt=True, num_results=3) | |
| return str(response) | |
| # Load prompt from YAML file | |
| def load_prompt_from_yaml(file_path: str, prompt_name: str) -> str: | |
| with open(file_path, 'r') as file: | |
| prompts = yaml.safe_load(file) | |
| return prompts[prompt_name] | |
| prompt_text = load_prompt_from_yaml("prompts.yaml", "titi_prompt") | |
| # Instantiate ChatOpenAI model and tools | |
| llm = ChatOpenAI(model='gpt-4o-mini', temperature=0.7) | |
| exa_tool = SearchAndContents() | |
| prompt = PromptTemplate(input_variables=[], template=prompt_text) | |
| # Data models for API | |
| class MessageInput(BaseModel): | |
| message: str | |
| class MessageResponse(BaseModel): | |
| reply: str | |
| # Initialize conversation history | |
| conversation_history = [SystemMessage(content=prompt_text)] | |
| async def chat_with_titi(message_input: MessageInput): | |
| try: | |
| # User input | |
| user_input = message_input.message | |
| # Use Exa Web Search Tool to get relevant web data | |
| web_data = exa_tool._run(user_input) | |
| # Append the web data and user input to conversation history | |
| conversation_history.append(HumanMessage(content=f"Here's what I found on the web: {web_data}")) | |
| conversation_history.append(HumanMessage(content=user_input)) | |
| # Generate the response using the language model | |
| response = llm.invoke(conversation_history) | |
| # Append assistant's response to the conversation history | |
| conversation_history.append(AIMessage(content=response.content)) | |
| # Return the model's response | |
| return MessageResponse(reply=response.content) | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) | |