abenkbp commited on
Commit
419a2ec
·
1 Parent(s): 69d43f6
Files changed (1) hide show
  1. chat.py +5 -1
chat.py CHANGED
@@ -16,11 +16,14 @@ client = InferenceClient("meta-llama/Meta-Llama-3-70B-Instruct")
16
  @spaces.GPU()
17
  def chat_completion():
18
  data = request.json
19
- user_input = data.get('user_input', '')
20
  max_tokens = data.get('max_tokens', 512)
21
  temperature = data.get('temperature', 0.7)
22
  top_p = data.get('top_p', 0.95)
23
 
 
 
 
24
  try:
25
  response = ""
26
  for message in client.chat_completion(
@@ -37,6 +40,7 @@ def chat_completion():
37
  except Exception as e:
38
  return jsonify({"status": "error", "message": str(e)})
39
 
 
40
  def main():
41
  app.run(host='0.0.0.0', port=7050)
42
 
 
16
  @spaces.GPU()
17
  def chat_completion():
18
  data = request.json
19
+ user_input = data.get('user_input', [])
20
  max_tokens = data.get('max_tokens', 512)
21
  temperature = data.get('temperature', 0.7)
22
  top_p = data.get('top_p', 0.95)
23
 
24
+ print(f"Received user_input: {user_input}")
25
+ print(f"max_tokens: {max_tokens}, temperature: {temperature}, top_p: {top_p}")
26
+
27
  try:
28
  response = ""
29
  for message in client.chat_completion(
 
40
  except Exception as e:
41
  return jsonify({"status": "error", "message": str(e)})
42
 
43
+
44
  def main():
45
  app.run(host='0.0.0.0', port=7050)
46