File size: 2,777 Bytes
d5a3ec4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
import os
import yaml
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from langchain.prompts import PromptTemplate
from langchain.chat_models import ChatOpenAI
from langchain.schema import HumanMessage, AIMessage, SystemMessage
from langchain.tools import BaseTool
from exa_py import Exa
from dotenv import load_dotenv

load_dotenv()

exa = Exa(api_key=os.getenv("EXA_API_KEY"))

# FastAPI app
app = FastAPI()

# Configure CORS
origins = ["*"]
app.add_middleware(
    CORSMiddleware,
    allow_origins=origins,
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

# Create the Exa Web Search Tool
class SearchAndContents(BaseTool):
    name: str = "Search and Contents Tool"
    description: str = (
        "Searches the web based on a search query for the latest results. "
        "Uses the Exa API to return the contents of the search results."
    )

    def _run(self, search_query: str) -> str:
        response = exa.search_and_contents(search_query, use_autoprompt=True, num_results=3)
        return str(response)

# Load prompt from YAML file
def load_prompt_from_yaml(file_path: str, prompt_name: str) -> str:
    with open(file_path, 'r') as file:
        prompts = yaml.safe_load(file)
    return prompts[prompt_name]

prompt_text = load_prompt_from_yaml("prompts.yaml", "titi_prompt")

# Instantiate ChatOpenAI model and tools
llm = ChatOpenAI(model='gpt-4o-mini', temperature=0.7)
exa_tool = SearchAndContents()
prompt = PromptTemplate(input_variables=[], template=prompt_text)

# Data models for API
class MessageInput(BaseModel):
    message: str

class MessageResponse(BaseModel):
    reply: str

# Initialize conversation history
conversation_history = [SystemMessage(content=prompt_text)]

@app.post("/chat", response_model=MessageResponse)
async def chat_with_titi(message_input: MessageInput):
    try:
        # User input
        user_input = message_input.message

        # Use Exa Web Search Tool to get relevant web data
        web_data = exa_tool._run(user_input)

        # Append the web data and user input to conversation history
        conversation_history.append(HumanMessage(content=f"Here's what I found on the web: {web_data}"))
        conversation_history.append(HumanMessage(content=user_input))

        # Generate the response using the language model
        response = llm.invoke(conversation_history)

        # Append assistant's response to the conversation history
        conversation_history.append(AIMessage(content=response.content))

        # Return the model's response
        return MessageResponse(reply=response.content)

    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))