iammartian0 commited on
Commit
ff476e8
·
verified ·
1 Parent(s): 486b918

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +17 -3
agent.py CHANGED
@@ -54,14 +54,28 @@ class BasicAgent:
54
  context = "No context found. Answer based on your knowledge."
55
 
56
  # Use truncated_question for LLM too
57
- messages = [
58
- {"role": "system", "content": "You are a helpful assistant. Do not call tools. Use context faithfully."},
59
- {"role": "user", "content": f"Question: {truncated_question}\nContext:\n{context}\nAnswer concisely."}
 
 
 
 
 
 
 
 
 
 
 
 
60
  ]
61
 
62
  comp = self.llm_client.chat.completions.create(
63
  model=self.model,
64
  messages=messages,
 
 
65
  )
66
 
67
  return comp.choices[0].message.content.strip()
 
54
  context = "No context found. Answer based on your knowledge."
55
 
56
  # Use truncated_question for LLM too
57
+ messages = [
58
+ {
59
+ "role": "system",
60
+ "content": (
61
+ "You are a precise data extraction engine. "
62
+ "Your task is to provide ONLY the exact answer to the user's question based on the context. "
63
+ "Do not provide explanations, introductory text, or conversational filler. "
64
+ "Do not say 'The answer is' or 'Based on the context'. "
65
+ "If the answer is a name, number, or date, return JUST that specific value."
66
+ )
67
+ },
68
+ {
69
+ "role": "user",
70
+ "content": f"Context:\n{context}\n\nQuestion: {truncated_question}\n\nExact Answer:"
71
+ }
72
  ]
73
 
74
  comp = self.llm_client.chat.completions.create(
75
  model=self.model,
76
  messages=messages,
77
+ temperature=0.0, # 0.0 makes the model very strict and factual
78
+ max_tokens=100
79
  )
80
 
81
  return comp.choices[0].message.content.strip()