kdevoe commited on
Commit
4b83c55
·
1 Parent(s): 75f5166

Adding prompt message framework

Browse files
Files changed (1) hide show
  1. app.py +8 -0
app.py CHANGED
@@ -8,6 +8,14 @@ pipe = pipeline("text-generation", model="TinyLlama/TinyLlama-1.1B-Chat-v1.0")
8
  # Define the inference function
9
  def generate_text(prompt):
10
  start_time = time.time()
 
 
 
 
 
 
 
 
11
  results = pipe(prompt, max_length=100, num_return_sequences=1)
12
  end_time = time.time()
13
  response_time = end_time - start_time
 
8
  # Define the inference function
9
  def generate_text(prompt):
10
  start_time = time.time()
11
+ messages = [
12
+ {
13
+ "role": "system",
14
+ "content": "You are a friendly and helpful chatbot",
15
+ },
16
+ {"role": "user", "content": prompt},
17
+ ]
18
+ prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
19
  results = pipe(prompt, max_length=100, num_return_sequences=1)
20
  end_time = time.time()
21
  response_time = end_time - start_time