Mayank-22 commited on
Commit
57d2582
·
verified ·
1 Parent(s): 62eab22

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -37
app.py CHANGED
@@ -1,13 +1,9 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
-
4
  """
5
- For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
  """
7
-
8
- # Replace the default model with your custom model
9
  client = InferenceClient("Mayank-22/Mayank-AI")
10
-
11
  def respond(
12
  message,
13
  history: list[tuple[str, str]],
@@ -16,39 +12,27 @@ def respond(
16
  temperature,
17
  top_p,
18
  ):
19
- try:
20
- # Format conversation for text generation
21
- formatted_prompt = f"{system_message}\n\n"
22
-
23
- for val in history:
24
- if val[0]:
25
- formatted_prompt += f"User: {val[0]}\n"
26
- if val[1]:
27
- formatted_prompt += f"Assistant: {val[1]}\n"
28
-
29
- formatted_prompt += f"User: {message}\nAssistant:"
30
-
31
- response = ""
32
-
33
- for token in client.text_generation(
34
- formatted_prompt,
35
- max_new_tokens=max_tokens,
36
- temperature=temperature,
37
- top_p=top_p,
38
- stream=True,
39
- stop_sequences=["User:", "\nUser:"],
40
- ):
41
- if token:
42
- response += token
43
- yield response
44
-
45
- except Exception as e:
46
- yield f"Error: {str(e)}. Please try again or contact support."
47
-
48
  """
49
  For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
50
  """
51
-
52
  demo = gr.ChatInterface(
53
  respond,
54
  additional_inputs=[
@@ -64,6 +48,5 @@ demo = gr.ChatInterface(
64
  ),
65
  ],
66
  )
67
-
68
- if __name__ == "__main__":
69
  demo.launch()
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
 
3
  """
4
+ For more information on huggingface_hub Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
5
  """
 
 
6
  client = InferenceClient("Mayank-22/Mayank-AI")
 
7
  def respond(
8
  message,
9
  history: list[tuple[str, str]],
 
12
  temperature,
13
  top_p,
14
  ):
15
+ messages = [{"role": "system", "content": system_message}]
16
+ for val in history:
17
+ if val[0]:
18
+ messages.append({"role": "user", "content": val[0]})
19
+ if val[1]:
20
+ messages.append({"role": "assistant", "content": val[1]})
21
+ messages.append({"role": "user", "content": message})
22
+ response = ""
23
+ for message in client.chat_completion(
24
+ messages,
25
+ max_tokens=max_tokens,
26
+ stream=True,
27
+ temperature=temperature,
28
+ top_p=top_p,
29
+ ):
30
+ token = message.choices[0].delta.content
31
+ response += token
32
+ yield response
 
 
 
 
 
 
 
 
 
 
 
33
  """
34
  For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
35
  """
 
36
  demo = gr.ChatInterface(
37
  respond,
38
  additional_inputs=[
 
48
  ),
49
  ],
50
  )
51
+ if name == "main":
 
52
  demo.launch()