raphael-gl HF Staff commited on
Commit
a96f07b
·
verified ·
1 Parent(s): 9d08586

test footprint

Browse files
Files changed (1) hide show
  1. app.py +1 -4
app.py CHANGED
@@ -72,9 +72,6 @@ def _history_to_messages(history: List[Tuple[str, str]]) -> List[Dict[str, str]]
72
  return msgs
73
 
74
 
75
- _ensure_loaded()
76
-
77
-
78
  @spaces.GPU(duration=120)
79
  def generate_stream(message: str, history: List[Tuple[str, str]]):
80
  """
@@ -83,7 +80,7 @@ def generate_stream(message: str, history: List[Tuple[str, str]]):
83
  """
84
 
85
  # FIXME: check the memory footprint doing so. We should rather do this before the spaces wrapper...
86
- # _ensure_loaded()
87
 
88
  messages = _history_to_messages(history) + [{"role": "user", "content": message}]
89
  inputs = _tokenizer.apply_chat_template(
 
72
  return msgs
73
 
74
 
 
 
 
75
  @spaces.GPU(duration=120)
76
  def generate_stream(message: str, history: List[Tuple[str, str]]):
77
  """
 
80
  """
81
 
82
  # FIXME: check the memory footprint doing so. We should rather do this before the spaces wrapper...
83
+ _ensure_loaded()
84
 
85
  messages = _history_to_messages(history) + [{"role": "user", "content": message}]
86
  inputs = _tokenizer.apply_chat_template(