Bahareh Kavousi nejad commited on
Commit
d284da3
·
1 Parent(s): 11a921e

Updated the model

Browse files
Files changed (1) hide show
  1. app.py +51 -9
app.py CHANGED
@@ -1,16 +1,58 @@
1
  import gradio as gr
2
-
3
- # Load model and tokenizer
4
  from transformers import AutoTokenizer, AutoModelForCausalLM
5
 
6
- tokenizer = AutoTokenizer.from_pretrained("SebastianSchramm/UniNER-7B-type-GPTQ-4bit-128g-actorder_True")
7
- model = AutoModelForCausalLM.from_pretrained("SebastianSchramm/UniNER-7B-type-GPTQ-4bit-128g-actorder_True")
 
 
 
 
 
 
 
8
 
9
- def chat(prompt):
 
 
 
 
 
 
 
 
10
  inputs = tokenizer(prompt, return_tensors="pt")
11
  outputs = model.generate(inputs.input_ids, max_length=200, num_return_sequences=1)
12
- return tokenizer.decode(outputs[0], skip_special_tokens=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
 
14
- # Create Gradio interface
15
- iface = gr.Interface(fn=chat, inputs="text", outputs="text", title="Open-Insurance LLM Chat")
16
- iface.launch()
 
1
  import gradio as gr
 
 
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
 
4
+ # Load the model and tokenizer
5
+ tokenizer = AutoTokenizer.from_pretrained(
6
+ "SebastianSchramm/UniNER-7B-type-GPTQ-4bit-128g-actorder_True",
7
+ legacy=False
8
+ )
9
+ model = AutoModelForCausalLM.from_pretrained(
10
+ "SebastianSchramm/UniNER-7B-type-GPTQ-4bit-128g-actorder_True",
11
+ device_map="auto"
12
+ )
13
 
14
+ # Define a function to handle user input and generate a response
15
+ def chatbot_response(user_input, chat_history=[]):
16
+ # Append user input to chat history
17
+ chat_history.append(("User", user_input))
18
+
19
+ # Prepare the prompt based on the chat history
20
+ prompt = " ".join([f"{speaker}: {text}" for speaker, text in chat_history])
21
+
22
+ # Generate a response
23
  inputs = tokenizer(prompt, return_tensors="pt")
24
  outputs = model.generate(inputs.input_ids, max_length=200, num_return_sequences=1)
25
+ bot_response = tokenizer.decode(outputs[0], skip_special_tokens=True)
26
+
27
+ # Append bot response to chat history
28
+ chat_history.append(("Bot", bot_response))
29
+
30
+ # Return updated chat history
31
+ return chat_history, chat_history
32
+
33
+ # Gradio interface
34
+ with gr.Blocks() as interface:
35
+ gr.Markdown("### Chat with Your LLM")
36
+
37
+ chat_history = gr.State([]) # To store the conversation history
38
+ chat_display = gr.Chatbot() # Chat display for the conversation
39
+ user_input = gr.Textbox(
40
+ show_label=False,
41
+ placeholder="Type your message and press Enter"
42
+ )
43
+ send_button = gr.Button("Send")
44
+
45
+ # Define the interaction
46
+ send_button.click(
47
+ chatbot_response,
48
+ inputs=[user_input, chat_history],
49
+ outputs=[chat_display, chat_history]
50
+ )
51
+ user_input.submit(
52
+ chatbot_response,
53
+ inputs=[user_input, chat_history],
54
+ outputs=[chat_display, chat_history]
55
+ )
56
 
57
+ # Launch the Gradio app
58
+ interface.launch()