Ctaake commited on
Commit
6715852
·
verified ·
1 Parent(s): 8e52e76

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -22
app.py CHANGED
@@ -1,36 +1,15 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
- from transformers import AutoTokenizer
4
  import random
5
  from mySystemPrompt import SYSTEM_PROMPT
6
 
7
  # Model which is used
8
  checkpoint = "mistralai/Mistral-7B-Instruct-v0.2"
9
- # Tokenizer to convert into the right format
10
- tokenizer = AutoTokenizer.from_pretrained(checkpoint)
11
  # Inference client with the model (And HF-token if needed)
12
  client = InferenceClient(checkpoint)
13
 
14
 
15
- def format_promptOld(message, history, systemPrompt):
16
- # Adjusting the format to fit the currently selected model
17
- # First everything is converted into role format
18
- # First a system prompt
19
- messages = []
20
- #messages.append({"role": "user", "content": systemPrompt})
21
- #messages.append({"role": "assistant", "content": ""})
22
- # Followed by the message history
23
- for user_message, bot_message in history:
24
- messages.append({"role": "user", "content": user_message})
25
- messages.append({"role": "assistant", "content": bot_message})
26
- # Followed by the current message
27
- messages.append({"role": "user", "content": message})
28
- # The tokenizer converts into the model format
29
- conversationalPart = tokenizer.apply_chat_template(messages, tokenize=False)
30
- conversationalPart = conversationalPart.removeprefix('<s>')
31
- systemPromptMessage = f"<s>[INST] <<SYS>>\n{systemPrompt}\n<</SYS>>\n\n "
32
- forPrompt=systemPromptMessage+str(messages)
33
- return forPrompt
34
  def format_prompt(message,chatbot,system_prompt):
35
  if system_prompt != "":
36
  input_prompt = f"<s>[INST] <<SYS>>\n{system_prompt}\n<</SYS>>\n\n "
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
 
3
  import random
4
  from mySystemPrompt import SYSTEM_PROMPT
5
 
6
  # Model which is used
7
  checkpoint = "mistralai/Mistral-7B-Instruct-v0.2"
 
 
8
  # Inference client with the model (And HF-token if needed)
9
  client = InferenceClient(checkpoint)
10
 
11
 
12
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  def format_prompt(message,chatbot,system_prompt):
14
  if system_prompt != "":
15
  input_prompt = f"<s>[INST] <<SYS>>\n{system_prompt}\n<</SYS>>\n\n "