Spaces:
Runtime error
Runtime error
ffreemt
commited on
Commit
·
584239a
1
Parent(s):
b00e246
Update logger.trace disabled
Browse files
app.py
CHANGED
|
@@ -44,6 +44,11 @@ except AssertionError:
|
|
| 44 |
_ = 0
|
| 45 |
max_memory = {i: _ for i in range(n_gpus)}
|
| 46 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 47 |
|
| 48 |
def gen_model(model_name: str):
|
| 49 |
model = AutoModelForCausalLM.from_pretrained(
|
|
@@ -69,10 +74,6 @@ def user_clear(message, chat_history):
|
|
| 69 |
"""Gen a response, clear message in user textbox."""
|
| 70 |
logger.debug(f"{message=}")
|
| 71 |
|
| 72 |
-
logger.remove() # to turn on trace
|
| 73 |
-
logger.add(sys.stderr, level="TRACE")
|
| 74 |
-
logger.trace(f"{chat_history=}")
|
| 75 |
-
|
| 76 |
try:
|
| 77 |
chat_history.append([message, ""])
|
| 78 |
except Exception:
|
|
@@ -151,7 +152,7 @@ def bot_stream(chat_history, **kwargs):
|
|
| 151 |
for elm in model.chat_stream(tokenizer, message, chat_history):
|
| 152 |
chat_history[-1] = [message, elm]
|
| 153 |
yield chat_history
|
| 154 |
-
|
| 155 |
|
| 156 |
|
| 157 |
SYSTEM_PROMPT = "You are a helpful assistant."
|
|
@@ -269,7 +270,7 @@ with gr.Blocks(
|
|
| 269 |
)
|
| 270 |
temperature = gr.Slider(
|
| 271 |
label="Temperature",
|
| 272 |
-
minimum=0.
|
| 273 |
maximum=40.0,
|
| 274 |
step=0.1,
|
| 275 |
value=stats_default.config.temperature,
|
|
|
|
| 44 |
_ = 0
|
| 45 |
max_memory = {i: _ for i in range(n_gpus)}
|
| 46 |
|
| 47 |
+
del sys
|
| 48 |
+
# logger.remove() # to turn on trace
|
| 49 |
+
# logger.add(sys.stderr, level="TRACE")
|
| 50 |
+
# logger.trace(f"{chat_history=}")
|
| 51 |
+
|
| 52 |
|
| 53 |
def gen_model(model_name: str):
|
| 54 |
model = AutoModelForCausalLM.from_pretrained(
|
|
|
|
| 74 |
"""Gen a response, clear message in user textbox."""
|
| 75 |
logger.debug(f"{message=}")
|
| 76 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 77 |
try:
|
| 78 |
chat_history.append([message, ""])
|
| 79 |
except Exception:
|
|
|
|
| 152 |
for elm in model.chat_stream(tokenizer, message, chat_history):
|
| 153 |
chat_history[-1] = [message, elm]
|
| 154 |
yield chat_history
|
| 155 |
+
logger.debug(f"response: {elm}")
|
| 156 |
|
| 157 |
|
| 158 |
SYSTEM_PROMPT = "You are a helpful assistant."
|
|
|
|
| 270 |
)
|
| 271 |
temperature = gr.Slider(
|
| 272 |
label="Temperature",
|
| 273 |
+
minimum=0.51,
|
| 274 |
maximum=40.0,
|
| 275 |
step=0.1,
|
| 276 |
value=stats_default.config.temperature,
|