Keyan2006 commited on
Commit
2a8bae1
·
verified ·
1 Parent(s): 98bc231

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -4
app.py CHANGED
@@ -2,9 +2,11 @@ import gradio as gr
2
  import torch
3
  from transformers import AutoModelForCausalLM, AutoTokenizer
4
 
5
- # Load model once
6
  model_name = "fla-hub/rwkv7-2.9B-world"
 
 
7
  tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
 
8
  model = AutoModelForCausalLM.from_pretrained(
9
  model_name,
10
  trust_remote_code=True,
@@ -12,12 +14,13 @@ model = AutoModelForCausalLM.from_pretrained(
12
  low_cpu_mem_usage=True,
13
  device_map="cpu"
14
  )
 
15
 
16
  def respond(message, history, system_message, max_tokens, temperature, top_p):
17
  messages = [{"role": "system", "content": system_message}]
18
- for h in history:
19
- messages.append({"role": "user", "content": h[0]})
20
- messages.append({"role": "assistant", "content": h[1]})
21
  messages.append({"role": "user", "content": message})
22
 
23
  text = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
 
2
  import torch
3
  from transformers import AutoModelForCausalLM, AutoTokenizer
4
 
 
5
  model_name = "fla-hub/rwkv7-2.9B-world"
6
+
7
+ print("Loading tokenizer...")
8
  tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
9
+ print("Loading model...")
10
  model = AutoModelForCausalLM.from_pretrained(
11
  model_name,
12
  trust_remote_code=True,
 
14
  low_cpu_mem_usage=True,
15
  device_map="cpu"
16
  )
17
+ print("Model loaded!")
18
 
19
  def respond(message, history, system_message, max_tokens, temperature, top_p):
20
  messages = [{"role": "system", "content": system_message}]
21
+ for human, assistant in history:
22
+ messages.append({"role": "user", "content": human})
23
+ messages.append({"role": "assistant", "content": assistant})
24
  messages.append({"role": "user", "content": message})
25
 
26
  text = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)