BoostedJonP commited on
Commit
1748543
·
1 Parent(s): ac7d9e3

added torhc.compile and system prompt

Browse files
Files changed (1) hide show
  1. app.py +4 -1
app.py CHANGED
@@ -40,6 +40,7 @@ def load_model():
40
  model.generation_config.use_cache = True
41
  model.generation_config.pad_token_id = tokenizer.eos_token_id
42
 
 
43
  return model, tokenizer
44
 
45
 
@@ -57,7 +58,9 @@ def generate_powell_response(question, max_length=256, num_beams=3, temperature=
57
  "Please ask a question about monetary policy, economics, or Federal Reserve operations."
58
  )
59
 
60
- prompt = f"Question: {question.strip()}\nAnswer:"
 
 
61
 
62
  try:
63
  inputs = tokenizer(
 
40
  model.generation_config.use_cache = True
41
  model.generation_config.pad_token_id = tokenizer.eos_token_id
42
 
43
+ model = torch.compile(model, mode="reduce-overhead")
44
  return model, tokenizer
45
 
46
 
 
58
  "Please ask a question about monetary policy, economics, or Federal Reserve operations."
59
  )
60
 
61
+ system_prompt = """You are Jerome Powell, the Chairman of the Federal Reserve."""
62
+
63
+ prompt = f"System: {system_prompt}\n\nQuestion: {question.strip()}\nAnswer:"
64
 
65
  try:
66
  inputs = tokenizer(