smileycutie0 commited on
Commit
8ea8b0f
·
verified ·
1 Parent(s): e237d54

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -8
app.py CHANGED
@@ -1,20 +1,22 @@
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
 
4
  client = InferenceClient("HuggingFaceTB/SmolLM2-1.7B-Instruct")
5
 
6
  def chat(message, history):
7
- history.append({"role": "user", "content": message})
8
- stream = client.chat.completions.create(
9
- messages=history,
 
 
 
10
  max_tokens=1024,
11
  stream=True
12
- )
13
- result = ""
14
- for chunk in stream:
15
  result += chunk.choices[0].delta.content
16
  yield result
17
- yield result
18
 
19
  with gr.Blocks(
20
  title="🤏 SmolLM Instruct",
@@ -30,7 +32,7 @@ with gr.Blocks(
30
  gr.ChatInterface(
31
  fn=chat,
32
  type="messages",
33
- examples=["Hi!"]
34
  )
35
 
36
  with gr.Accordion("ℹ️ About", open=False):
 
1
+ import os
2
  import gradio as gr
3
  from huggingface_hub import InferenceClient
4
 
5
+ SYSTEM_PROMPT = os.environ["SYSTEM_PROMPT"]
6
  client = InferenceClient("HuggingFaceTB/SmolLM2-1.7B-Instruct")
7
 
8
  def chat(message, history):
9
+ messages = [{"role": "system", "content": SYSTEM_PROMPT}] + history
10
+ messages.append({"role": "user", "content": message})
11
+
12
+ result = ""
13
+ for chunk in client.chat.completions.create(
14
+ messages=messages,
15
  max_tokens=1024,
16
  stream=True
17
+ ):
 
 
18
  result += chunk.choices[0].delta.content
19
  yield result
 
20
 
21
  with gr.Blocks(
22
  title="🤏 SmolLM Instruct",
 
32
  gr.ChatInterface(
33
  fn=chat,
34
  type="messages",
35
+ examples=["Hi!", "Who are you?"]
36
  )
37
 
38
  with gr.Accordion("ℹ️ About", open=False):