Spaces:
Sleeping
Sleeping
Raphael Glon
commited on
wip
Browse filesSigned-off-by: Raphael Glon <oOraph@users.noreply.github.com>
app.py
CHANGED
|
@@ -57,8 +57,7 @@ def _ensure_loaded():
|
|
| 57 |
_device = next(_model.parameters()).device
|
| 58 |
|
| 59 |
|
| 60 |
-
|
| 61 |
-
|
| 62 |
LOG.info("DEVICE %s", _device)
|
| 63 |
|
| 64 |
|
|
@@ -80,7 +79,7 @@ def generate_stream(message: str, history: List[Tuple[str, str]]):
|
|
| 80 |
"""
|
| 81 |
|
| 82 |
# FIXME: check the memory footprint doing so. We should rather do this before the spaces wrapper...
|
| 83 |
-
_ensure_loaded()
|
| 84 |
|
| 85 |
messages = _history_to_messages(history) + [{"role": "user", "content": message}]
|
| 86 |
inputs = _tokenizer.apply_chat_template(
|
|
|
|
| 57 |
_device = next(_model.parameters()).device
|
| 58 |
|
| 59 |
|
| 60 |
+
_ensure_loaded()
|
|
|
|
| 61 |
LOG.info("DEVICE %s", _device)
|
| 62 |
|
| 63 |
|
|
|
|
| 79 |
"""
|
| 80 |
|
| 81 |
# FIXME: check the memory footprint doing so. We should rather do this before the spaces wrapper...
|
| 82 |
+
# _ensure_loaded()
|
| 83 |
|
| 84 |
messages = _history_to_messages(history) + [{"role": "user", "content": message}]
|
| 85 |
inputs = _tokenizer.apply_chat_template(
|