Fix
Browse files- app.py +7 -6
- requirements.txt +1 -1
app.py
CHANGED
|
@@ -198,12 +198,13 @@ with Blocks() as app:
|
|
| 198 |
nickname = Textbox(value="鐢ㄦ埛1234", label="nickname", info="鐢ㄦ埛鏄电О")
|
| 199 |
json_messages = JSON([], max_height=125, label="JSON娑堟伅鏍煎紡")
|
| 200 |
|
| 201 |
-
|
| 202 |
-
|
| 203 |
-
|
| 204 |
-
|
| 205 |
-
|
| 206 |
-
|
|
|
|
| 207 |
chatbot.retry(
|
| 208 |
callback,
|
| 209 |
[textbox, chatbot, json_messages],
|
|
|
|
| 198 |
nickname = Textbox(value="鐢ㄦ埛1234", label="nickname", info="鐢ㄦ埛鏄电О")
|
| 199 |
json_messages = JSON([], max_height=125, label="JSON娑堟伅鏍煎紡")
|
| 200 |
|
| 201 |
+
## NOT SUPPORT IN GRADIO 5.0.1
|
| 202 |
+
# chatbot.clear(
|
| 203 |
+
# lambda: ("[]", ""),
|
| 204 |
+
# outputs=[json_messages, text_response],
|
| 205 |
+
# api_name=False,
|
| 206 |
+
# show_api=False,
|
| 207 |
+
# )
|
| 208 |
chatbot.retry(
|
| 209 |
callback,
|
| 210 |
[textbox, chatbot, json_messages],
|
requirements.txt
CHANGED
|
@@ -3,5 +3,5 @@ torchvision
|
|
| 3 |
transformers
|
| 4 |
accelerate
|
| 5 |
qwen-vl-utils
|
| 6 |
-
gradio
|
| 7 |
https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.3/flash_attn-2.7.3+cu12torch2.5cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
|
|
|
|
| 3 |
transformers
|
| 4 |
accelerate
|
| 5 |
qwen-vl-utils
|
| 6 |
+
gradio
|
| 7 |
https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.3/flash_attn-2.7.3+cu12torch2.5cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
|