Update app.py
Browse files
app.py
CHANGED
|
@@ -4,7 +4,7 @@ os.environ['CUDA_VISIBLE_DEVICES'] = '-1' # Force TensorFlow to use CPU only
|
|
| 4 |
os.environ['TF_ENABLE_ONEDNN_OPTS'] = '0'
|
| 5 |
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' # Suppress all TensorFlow messages
|
| 6 |
os.environ['GRADIO_HOT_RELOAD'] = 'false' # Disable Gradio hot reload to avoid the error # You have no Nvidia GPU and Cuda
|
| 7 |
-
os.environ['WRAPT_DISABLE_EXTENSIONS'] = 'true'
|
| 8 |
|
| 9 |
import re
|
| 10 |
import nltk
|
|
@@ -288,8 +288,20 @@ if __name__ == "__main__":
|
|
| 288 |
interface = create_gradio_interface()
|
| 289 |
|
| 290 |
# Launch with configuration - theme moved here for Gradio 6.0+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 291 |
interface.launch(
|
| 292 |
-
server_name="
|
| 293 |
server_port=7860,
|
| 294 |
share=False,
|
| 295 |
show_error=True,
|
|
|
|
| 4 |
os.environ['TF_ENABLE_ONEDNN_OPTS'] = '0'
|
| 5 |
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' # Suppress all TensorFlow messages
|
| 6 |
os.environ['GRADIO_HOT_RELOAD'] = 'false' # Disable Gradio hot reload to avoid the error # You have no Nvidia GPU and Cuda
|
| 7 |
+
# os.environ['WRAPT_DISABLE_EXTENSIONS'] = 'true'
|
| 8 |
|
| 9 |
import re
|
| 10 |
import nltk
|
|
|
|
| 288 |
interface = create_gradio_interface()
|
| 289 |
|
| 290 |
# Launch with configuration - theme moved here for Gradio 6.0+
|
| 291 |
+
# try:
|
| 292 |
+
# interface.launch(
|
| 293 |
+
# server_name="0.0.0.0",
|
| 294 |
+
# server_port=7860,
|
| 295 |
+
# share=False,
|
| 296 |
+
# show_error=True,
|
| 297 |
+
# ssr_mode=False,
|
| 298 |
+
# debug=True,
|
| 299 |
+
# theme=gr.themes.Soft()
|
| 300 |
+
# )
|
| 301 |
+
# except Exception as e:
|
| 302 |
+
# print("Server shutdown cleanup warning ignored:", e)
|
| 303 |
interface.launch(
|
| 304 |
+
server_name="0.0.0.0",
|
| 305 |
server_port=7860,
|
| 306 |
share=False,
|
| 307 |
show_error=True,
|