File size: 1,142 Bytes
02d44c3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
from langchain_core.prompts import ChatPromptTemplate
from llm_factory import get_llm
from langchain_core.output_parsers import JsonOutputParser
from models import Patient
import os

# Initialize LLM
llm = get_llm(model_type="text", temperature=0.1)

# Parser
parser = JsonOutputParser(pydantic_object=Patient)

# Prompt
system_prompt = """You are a medical receptionist agent. Your goal is to extract patient information from a natural language introduction.
Extract the following fields: name, age, gender, and any mentioned medical history.
If a field is missing, leave it as null or infer it if obvious.
Return the result as a JSON object matching the Patient schema.
"""

prompt = ChatPromptTemplate.from_messages([
    ("system", system_prompt),
    ("user", "{input}")
])

chain = prompt | llm | parser

async def run_intake_agent(user_input: str) -> Patient:
    try:
        result = await chain.ainvoke({"input": user_input})
        return Patient(**result)
    except Exception as e:
        # Fallback or error handling
        print(f"Error in intake agent: {e}")
        return Patient(name="Unknown", age=0, gender="Unknown")