lanna_lalala;- commited on
Commit
9aaaaba
·
1 Parent(s): 62cd4c7

update hf_client

Browse files
Files changed (1) hide show
  1. phase/Student_view/chatbot.py +48 -6
phase/Student_view/chatbot.py CHANGED
@@ -32,16 +32,42 @@ def _trim_turn(text: str) -> str:
32
  return text.split(cp, 1)[0].strip()
33
  return text.strip()
34
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35
  def _reply_with_hf():
36
  if "client" not in globals():
37
  raise RuntimeError("HF client not initialized")
38
 
 
39
  convo = _format_history_for_flan(st.session_state.get("messages", []))
40
- prompt = f"{TUTOR_PROMPT}\n\n{convo}\n\nTutor:"
41
 
42
  try:
43
- response = client.text_generation(
44
- prompt,
 
45
  max_new_tokens=220,
46
  temperature=0.2,
47
  top_p=0.9,
@@ -49,9 +75,25 @@ def _reply_with_hf():
49
  return_full_text=False,
50
  stream=False,
51
  )
52
- if not isinstance(response, str):
53
- response = response.get("generated_text") or str(response)
54
- return _trim_turn(response)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
  except Exception as e:
56
  err_text = ''.join(traceback.format_exception_only(type(e), e)).strip()
57
  raise RuntimeError(f"Hugging Face API Error: {err_text}")
 
32
  return text.split(cp, 1)[0].strip()
33
  return text.strip()
34
 
35
+ def _history_as_chat_messages(messages: list[dict]) -> list[dict]:
36
+ # Convert your session history to HF chat format
37
+ msgs = [{"role": "system", "content": TUTOR_PROMPT}]
38
+ for m in messages:
39
+ txt = (m.get("text") or "").strip()
40
+ if not txt:
41
+ continue
42
+ role = "assistant" if m.get("sender") == "assistant" else "user"
43
+ msgs.append({"role": role, "content": txt})
44
+ return msgs
45
+
46
+ def _extract_chat_text(chat_resp) -> str:
47
+ # Works for both dict-like and object-like responses
48
+ try:
49
+ # huggingface_hub >= 0.23 style
50
+ return chat_resp.choices[0].message["content"] if isinstance(chat_resp.choices[0].message, dict) \
51
+ else chat_resp.choices[0].message.content
52
+ except Exception:
53
+ # fallback for dict payloads
54
+ try:
55
+ return chat_resp["choices"][0]["message"]["content"]
56
+ except Exception:
57
+ return str(chat_resp)
58
+
59
  def _reply_with_hf():
60
  if "client" not in globals():
61
  raise RuntimeError("HF client not initialized")
62
 
63
+ # Text-generation prompt (for providers that support it)
64
  convo = _format_history_for_flan(st.session_state.get("messages", []))
65
+ tg_prompt = f"{TUTOR_PROMPT}\n\n{convo}\n\nTutor:"
66
 
67
  try:
68
+ # 1) Try text-generation first (many backends support this)
69
+ resp = client.text_generation(
70
+ tg_prompt,
71
  max_new_tokens=220,
72
  temperature=0.2,
73
  top_p=0.9,
 
75
  return_full_text=False,
76
  stream=False,
77
  )
78
+ text = resp.get("generated_text") if isinstance(resp, dict) else resp
79
+ return _trim_turn(str(text or "").strip())
80
+
81
+ except ValueError as ve:
82
+ # 2) If the provider says "Supported task: conversational", use HF chat
83
+ if "Supported task: conversational" in str(ve):
84
+ msgs = _history_as_chat_messages(st.session_state.get("messages", []))
85
+ chat = client.chat.completions.create(
86
+ model=GEN_MODEL, # still HF, not OpenAI
87
+ messages=msgs,
88
+ max_tokens=220,
89
+ temperature=0.2,
90
+ top_p=0.9,
91
+ )
92
+ return _trim_turn(_extract_chat_text(chat))
93
+
94
+ # Some other ValueError — rethrow with the original message
95
+ raise
96
+
97
  except Exception as e:
98
  err_text = ''.join(traceback.format_exception_only(type(e), e)).strip()
99
  raise RuntimeError(f"Hugging Face API Error: {err_text}")