Spaces:
Runtime error
Runtime error
Commit ·
f40f5df
1
Parent(s): 45ecf8c
Update app.py
Browse filesalf take the wheel
app.py
CHANGED
|
@@ -1,5 +1,7 @@
|
|
| 1 |
# Copyright 2023 MosaicML spaces authors
|
| 2 |
# SPDX-License-Identifier: Apache-2.0
|
|
|
|
|
|
|
| 3 |
import datetime
|
| 4 |
import os
|
| 5 |
from threading import Event, Thread
|
|
@@ -177,13 +179,11 @@ with gr.Blocks(
|
|
| 177 |
) as demo:
|
| 178 |
conversation_id = gr.State(get_uuid)
|
| 179 |
gr.Markdown(
|
| 180 |
-
"""<h1><center>
|
| 181 |
-
|
| 182 |
-
This demo is of [MPT-7B-Chat](https://huggingface.co/mosaicml/mpt-7b-chat). It is based on [MPT-7B](https://huggingface.co/mosaicml/mpt-7b) fine-tuned with approximately [171,000 conversation samples from this dataset](https://huggingface.co/datasets/sam-mosaic/vicuna_alpaca_hc3_chatml) and another [217,000 from this dataset](https://huggingface.co/datasets/sam-mosaic/hhrlhf_evol_chatml).
|
| 183 |
|
| 184 |
-
|
| 185 |
|
| 186 |
-
|
| 187 |
"""
|
| 188 |
)
|
| 189 |
chatbot = gr.Chatbot().style(height=500)
|
|
@@ -200,7 +200,7 @@ with gr.Blocks(
|
|
| 200 |
stop = gr.Button("Stop")
|
| 201 |
clear = gr.Button("Clear")
|
| 202 |
with gr.Row():
|
| 203 |
-
with gr.Accordion("Advanced
|
| 204 |
with gr.Row():
|
| 205 |
with gr.Column():
|
| 206 |
with gr.Row():
|
|
@@ -251,10 +251,7 @@ with gr.Blocks(
|
|
| 251 |
)
|
| 252 |
with gr.Row():
|
| 253 |
gr.Markdown(
|
| 254 |
-
"Disclaimer:
|
| 255 |
-
"factually accurate information. MPT-7B was trained on various public datasets; while great efforts "
|
| 256 |
-
"have been taken to clean the pretraining data, it is possible that this model could generate lewd, "
|
| 257 |
-
"biased, or otherwise offensive outputs.",
|
| 258 |
elem_classes=["disclaimer"],
|
| 259 |
)
|
| 260 |
with gr.Row():
|
|
|
|
| 1 |
# Copyright 2023 MosaicML spaces authors
|
| 2 |
# SPDX-License-Identifier: Apache-2.0
|
| 3 |
+
# anything broken is Luke's fault
|
| 4 |
+
|
| 5 |
import datetime
|
| 6 |
import os
|
| 7 |
from threading import Event, Thread
|
|
|
|
| 179 |
) as demo:
|
| 180 |
conversation_id = gr.State(get_uuid)
|
| 181 |
gr.Markdown(
|
| 182 |
+
"""<h1><center>cxntextMPT</center></h1>
|
|
|
|
|
|
|
| 183 |
|
| 184 |
+
This model engages three Matrix LLMs, others pending integration
|
| 185 |
|
| 186 |
+
Running on a potato, be patient.
|
| 187 |
"""
|
| 188 |
)
|
| 189 |
chatbot = gr.Chatbot().style(height=500)
|
|
|
|
| 200 |
stop = gr.Button("Stop")
|
| 201 |
clear = gr.Button("Clear")
|
| 202 |
with gr.Row():
|
| 203 |
+
with gr.Accordion("Advanced", open=False):
|
| 204 |
with gr.Row():
|
| 205 |
with gr.Column():
|
| 206 |
with gr.Row():
|
|
|
|
| 251 |
)
|
| 252 |
with gr.Row():
|
| 253 |
gr.Markdown(
|
| 254 |
+
"Disclaimer: All included models can produce factually incorrect output, and if they don't they will be forced to by Elon.",
|
|
|
|
|
|
|
|
|
|
| 255 |
elem_classes=["disclaimer"],
|
| 256 |
)
|
| 257 |
with gr.Row():
|