NoQuest commited on
Commit
52f3cb4
·
verified ·
1 Parent(s): b61bde5

Update server.py

Browse files
Files changed (1) hide show
  1. server.py +14 -11
server.py CHANGED
@@ -10,6 +10,7 @@ warnings.filterwarnings('ignore', category=UserWarning, message='TypedStorage is
10
 
11
  with RequestBlocker():
12
  import gradio as gr
 
13
 
14
  import matplotlib
15
  matplotlib.use('Agg') # This fixes LaTeX rendering on some systems
@@ -127,8 +128,8 @@ def count_tokens(text):
127
  except:
128
  return 'Couldn\'t count the number of tokens. Is a tokenizer loaded?'
129
 
130
-
131
- def download_model_wrapper(repo_id, progress=gr.Progress()):
132
  try:
133
  downloader_module = importlib.import_module("download-model")
134
  downloader = downloader_module.ModelDownloader()
@@ -439,7 +440,9 @@ def create_settings_menus(default_preset):
439
  def create_file_saving_menus():
440
 
441
  # Text file saver
442
- with gr.Box(visible=False, elem_classes='file-saver') as shared.gradio['file_saver']:
 
 
443
  shared.gradio['save_filename'] = gr.Textbox(lines=1, label='File name')
444
  shared.gradio['save_root'] = gr.Textbox(lines=1, label='File folder', info='For reference. Unchangeable.', interactive=False)
445
  shared.gradio['save_contents'] = gr.Textbox(lines=10, label='File contents')
@@ -1066,14 +1069,14 @@ def create_interface():
1066
  # Extensions block
1067
  extensions_module.create_extensions_block()
1068
 
1069
- # Launch the interface
1070
- shared.gradio['interface'].queue()
1071
- with OpenMonkeyPatch():
1072
- if shared.args.listen:
1073
- shared.gradio['interface'].launch(prevent_thread_lock=True, share=shared.args.share, server_name=shared.args.listen_host or '0.0.0.0', server_port=shared.args.listen_port, inbrowser=shared.args.auto_launch, auth=auth)
1074
- else:
1075
- shared.gradio['interface'].launch(prevent_thread_lock=True, share=shared.args.share, server_port=shared.args.listen_port, inbrowser=shared.args.auto_launch, auth=auth)
1076
-
1077
 
1078
  if __name__ == "__main__":
1079
  # Loading custom settings
 
10
 
11
  with RequestBlocker():
12
  import gradio as gr
13
+ from gradio import components, Progress
14
 
15
  import matplotlib
16
  matplotlib.use('Agg') # This fixes LaTeX rendering on some systems
 
128
  except:
129
  return 'Couldn\'t count the number of tokens. Is a tokenizer loaded?'
130
 
131
+ #def download_model_wrapper(repo_id, progress=gr.Progress()):
132
+ def download_model_wrapper(repo_id, progress=Progress()):
133
  try:
134
  downloader_module = importlib.import_module("download-model")
135
  downloader = downloader_module.ModelDownloader()
 
440
  def create_file_saving_menus():
441
 
442
  # Text file saver
443
+ #with gr.Box(visible=False, elem_classes='file-saver') as shared.gradio['file_saver']:
444
+ #with gr.Column(visible=False, elem_classes='file-saver') as shared.gradio['file_saver']:
445
+ with gr.Group(visible=False, elem_classes='file-saver') as shared.gradio['file_saver']:
446
  shared.gradio['save_filename'] = gr.Textbox(lines=1, label='File name')
447
  shared.gradio['save_root'] = gr.Textbox(lines=1, label='File folder', info='For reference. Unchangeable.', interactive=False)
448
  shared.gradio['save_contents'] = gr.Textbox(lines=10, label='File contents')
 
1069
  # Extensions block
1070
  extensions_module.create_extensions_block()
1071
 
1072
+ shared.gradio['interface'].launch(
1073
+ prevent_thread_lock=True,
1074
+ share=False, # Set this to True
1075
+ server_name="0.0.0.0", # Add this line
1076
+ server_port=shared.args.listen_port,
1077
+ inbrowser=shared.args.auto_launch,
1078
+ auth=auth
1079
+ )
1080
 
1081
  if __name__ == "__main__":
1082
  # Loading custom settings