joelg commited on
Commit
0157c73
·
1 Parent(s): 19a6387

FIX method

Browse files
Files changed (1) hide show
  1. rag_system.py +16 -8
rag_system.py CHANGED
@@ -192,18 +192,26 @@ Question: {query}
192
 
193
  Answer:"""
194
 
195
- # Generate response
196
  try:
197
- response = self.llm_client.text_generation(
198
- prompt,
199
- max_new_tokens=max_tokens,
 
 
 
 
 
 
 
200
  temperature=temperature,
201
- do_sample=temperature > 0,
202
  )
203
 
204
- # Clean up response
205
- if isinstance(response, str):
206
- answer = response.strip()
 
 
207
  else:
208
  answer = str(response).strip()
209
 
 
192
 
193
  Answer:"""
194
 
195
+ # Generate response using chat completion
196
  try:
197
+ messages = [
198
+ {
199
+ "role": "user",
200
+ "content": prompt
201
+ }
202
+ ]
203
+
204
+ response = self.llm_client.chat_completion(
205
+ messages=messages,
206
+ max_tokens=max_tokens,
207
  temperature=temperature,
 
208
  )
209
 
210
+ # Extract answer from response
211
+ if hasattr(response, 'choices') and len(response.choices) > 0:
212
+ answer = response.choices[0].message.content.strip()
213
+ elif isinstance(response, dict) and 'choices' in response:
214
+ answer = response['choices'][0]['message']['content'].strip()
215
  else:
216
  answer = str(response).strip()
217