File size: 1,897 Bytes
5d93cab
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import os
from langchain_core.messages import SystemMessage, HumanMessage, AIMessage
from pathlib import Path
from functools import lru_cache
from langgraph.prebuilt import create_react_agent
from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
load_dotenv()

@lru_cache(maxsize=1)
def get_chat_model():
    llm = ChatOpenAI(
        model=os.getenv("OPENAI_MODEL"),
        api_key=os.getenv("OPENAI_API_KEY"),
        temperature=0,
        max_tokens = 10000  # Adjust max tokens as needed
    )
    return llm

@lru_cache(maxsize=1)
def get_local_chat_model():
    """
    Return an Ollama-backed ChatOpenAI model (OpenAI compatible endpoint).
    Requires Ollama running locally: https://ollama.com
    Example: ollama run llama3.1
    """

    # model_name = model or os.getenv("OLLAMA_MODEL", "llama3.1")

    llm = ChatOpenAI(
        model="llama3.2:3b",
        base_url="http://localhost:11434/v1",
        api_key="ollama",  # Placeholder; Ollama ignores this but LangChain expects a key.
        temperature=0,
        max_tokens=2048,
    )
    return llm

# def generate_response(user_input: str) -> str:
#     system_message = SystemMessage(content="You are a helpful assistant.")
#     human_message = HumanMessage(content=f"Please answer to the user query: {user_input}")
    
#     chat_model = get_chat_model()
#     response = chat_model.invoke([system_message, human_message])
#     print(response)
#     return response.content

def get_weather(city: str) -> str:  
    """Get weather for a given city."""
    return f"It's always sunny in {city}!"

def get_agent_response(user_input: str) -> str:
    agent = create_react_agent(
                model=get_chat_model(),
                tools=[get_weather],
            )
    response = agent.invoke({"messages": [HumanMessage(user_input)]})
    print(response)
    return response['messages'][-1].content