Spaces:
Running
on
Zero
Running
on
Zero
v1
Browse files
app.py
CHANGED
|
@@ -100,7 +100,14 @@ def bot_streaming(message, history, temperature, new_max_token, top_p):
|
|
| 100 |
response = generated_text.split('assistant\n')[-1].split('[U')[0].strip()
|
| 101 |
|
| 102 |
except:
|
| 103 |
-
response = "There
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 104 |
|
| 105 |
buffer = ""
|
| 106 |
for character in response:
|
|
@@ -113,7 +120,7 @@ demo = gr.ChatInterface(fn=bot_streaming,
|
|
| 113 |
additional_inputs_accordion="Generation Hyperparameters",
|
| 114 |
theme=gr.themes.Soft(),
|
| 115 |
title="☄️Meteor",
|
| 116 |
-
description="Meteor is efficient 7B size Large Language and Vision Model built on the help of traversal of rationale"
|
| 117 |
-
"Its inference speed highly depends on assinging non-scheduled GPU (Therefore, once all GPUs are busy, then inference may be taken in infinity)",
|
| 118 |
stop_btn="Stop Generation", multimodal=True)
|
| 119 |
demo.launch()
|
|
|
|
| 100 |
response = generated_text.split('assistant\n')[-1].split('[U')[0].strip()
|
| 101 |
|
| 102 |
except:
|
| 103 |
+
response = "There may be unsupported format: ex) pdf, video, sound. Only supported is single image in this version."
|
| 104 |
+
|
| 105 |
+
# private log print
|
| 106 |
+
text = message['text']
|
| 107 |
+
files = message['files']
|
| 108 |
+
print(f'Text: {text}')
|
| 109 |
+
print(f'MM Files: {files}')
|
| 110 |
+
|
| 111 |
|
| 112 |
buffer = ""
|
| 113 |
for character in response:
|
|
|
|
| 120 |
additional_inputs_accordion="Generation Hyperparameters",
|
| 121 |
theme=gr.themes.Soft(),
|
| 122 |
title="☄️Meteor",
|
| 123 |
+
description="Meteor is efficient 7B size Large Language and Vision Model built on the help of traversal of rationale.\n"
|
| 124 |
+
"Its inference speed highly depends on assinging non-scheduled GPU. (Therefore, once all GPUs are busy, then inference may be taken in infinity)",
|
| 125 |
stop_btn="Stop Generation", multimodal=True)
|
| 126 |
demo.launch()
|