VcRlAgent commited on
Commit
3aacf33
·
1 Parent(s): 07e4e32

Starter LLM Inference Call

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -18,7 +18,8 @@ def ask_llm(prompt):
18
  ],
19
  max_tokens=200
20
  )
21
- return completion.choices[0].message["content"]
 
22
  except Exception as e:
23
  return f"Error: {str(e)}"
24
 
 
18
  ],
19
  max_tokens=200
20
  )
21
+ #return completion.choices[0].message["content"]
22
+ return completion.choices[0].message.content
23
  except Exception as e:
24
  return f"Error: {str(e)}"
25