Waheeb2001 commited on
Commit
9141a26
·
verified ·
1 Parent(s): 810050f

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +28 -17
main.py CHANGED
@@ -1,21 +1,24 @@
1
  from ctransformers import AutoModelForCausalLM
2
- from gradio import Chatbot, Textbox, Button
3
  import gradio as gr
4
 
5
- # Load Qwen 2.5 GGUF model
6
  llm = AutoModelForCausalLM.from_pretrained(
7
- "Qwen2.5-7B-Q4_K_M.gguf",
8
- model_type="qwen",
9
  max_new_tokens=1096,
10
  threads=3
11
  )
12
 
13
- # Define prompt format
14
  def format_prompt(message, chat_history):
15
- prompt = ""
 
 
 
16
  for user_msg, bot_msg in chat_history:
17
- prompt += f"<|user|>\n{user_msg}\n<|assistant|>\n{bot_msg}\n"
18
- prompt += f"<|user|>\n{message}\n<|assistant|>\n"
19
  return prompt
20
 
21
  # Define chatbot function
@@ -23,18 +26,26 @@ def respond(message, chat_history):
23
  formatted_prompt = format_prompt(message, chat_history)
24
  response = llm(formatted_prompt)
25
  chat_history.append((message, response))
26
- return chat_history, chat_history # Return both chatbot and state
27
 
28
- # Gradio UI
 
 
 
 
 
 
29
  with gr.Blocks() as demo:
30
- gr.Markdown("## Qwen 2.5 Chat Interface")
31
- chatbot = Chatbot()
32
- msg = Textbox(label="Your Message")
33
- clear = Button("Clear Chat")
 
 
34
 
35
- msg.submit(fn=respond, inputs=[msg, chatbot], outputs=[chatbot, chatbot])
36
- clear.click(fn=lambda: ([], []), inputs=None, outputs=[chatbot, chatbot])
37
 
38
- # Launch with share=True
39
  if __name__ == "__main__":
40
  demo.launch(share=True)
 
1
  from ctransformers import AutoModelForCausalLM
2
+ from gradio import Chatbot, Interface
3
  import gradio as gr
4
 
5
+ # Load the GGUF model
6
  llm = AutoModelForCausalLM.from_pretrained(
7
+ "zephyr-7b-beta.Q4_K_S.gguf",
8
+ model_type="mistral",
9
  max_new_tokens=1096,
10
  threads=3
11
  )
12
 
13
+ # Format prompt with system message and chat history
14
  def format_prompt(message, chat_history):
15
+ system_prompt = "Below is an instruction that describes a task. Write a response that appropriately completes the request."
16
+ E_INST = "</s>"
17
+ user, assistant = "<|user|>", "<|assistant|>"
18
+ prompt = f"{system_prompt}{E_INST}\n"
19
  for user_msg, bot_msg in chat_history:
20
+ prompt += f"{user}\n{user_msg}{E_INST}\n{assistant}\n{bot_msg}{E_INST}\n"
21
+ prompt += f"{user}\n{message}{E_INST}\n{assistant}\n"
22
  return prompt
23
 
24
  # Define chatbot function
 
26
  formatted_prompt = format_prompt(message, chat_history)
27
  response = llm(formatted_prompt)
28
  chat_history.append((message, response))
29
+ return chat_history, chat_history
30
 
31
+ # Create Gradio Chatbot UI
32
+ chatbot = Chatbot(
33
+ bubble_full_width=False,
34
+ height=500
35
+ )
36
+
37
+ # Launch interface
38
  with gr.Blocks() as demo:
39
+ gr.Markdown("## Zephyr LLM Chat Interface")
40
+ chatbot = gr.Chatbot()
41
+ msg = gr.Textbox(label="Your Message")
42
+ clear = gr.Button("Clear Chat")
43
+
44
+ state = gr.State([])
45
 
46
+ msg.submit(respond, [msg, state], [chatbot, state])
47
+ clear.click(lambda: ([], []), None, [chatbot, state])
48
 
49
+ # Launch Gradio app
50
  if __name__ == "__main__":
51
  demo.launch(share=True)