KWK25 commited on
Commit
08681e0
·
verified ·
1 Parent(s): ec6c4fb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -6
app.py CHANGED
@@ -1,10 +1,11 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
- client = InferenceClient("microsoft/phi-4")
 
5
 
6
  def respond(message, history):
7
- # System prompt to guide the model
8
  system_prompt = (
9
  "You are a friendly chatbot who ONLY talks about K-pop. "
10
  "You are an expert in K-pop groups, idols, music, albums, choreography, fandom culture, "
@@ -12,17 +13,22 @@ def respond(message, history):
12
  "the conversation back to K-pop.\n\n"
13
  )
14
 
15
- # Build conversation history into a string
16
  conversation = ""
17
  for user_msg, bot_msg in history:
18
  conversation += f"User: {user_msg}\nBot: {bot_msg}\n"
19
  conversation += f"User: {message}\nBot:"
20
 
21
- # Combine system prompt and conversation
22
  prompt = system_prompt + conversation
23
 
24
- # Call the model using text_generation
25
- response = client.text_generation(prompt, max_new_tokens=150)
 
 
 
 
 
26
 
27
  return response.strip()
28
 
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
+ # Use a Spaces-friendly chat model
5
+ client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
6
 
7
  def respond(message, history):
8
+ # Build K-pop specific system prompt
9
  system_prompt = (
10
  "You are a friendly chatbot who ONLY talks about K-pop. "
11
  "You are an expert in K-pop groups, idols, music, albums, choreography, fandom culture, "
 
13
  "the conversation back to K-pop.\n\n"
14
  )
15
 
16
+ # Turn chat history into a plain text conversation
17
  conversation = ""
18
  for user_msg, bot_msg in history:
19
  conversation += f"User: {user_msg}\nBot: {bot_msg}\n"
20
  conversation += f"User: {message}\nBot:"
21
 
22
+ # Combine instructions and conversation
23
  prompt = system_prompt + conversation
24
 
25
+ # Generate response
26
+ response = client.text_generation(
27
+ prompt,
28
+ max_new_tokens=150,
29
+ do_sample=True,
30
+ temperature=0.7
31
+ )
32
 
33
  return response.strip()
34