lukesteuber commited on
Commit
f40f5df
·
1 Parent(s): 45ecf8c

Update app.py

Browse files

alf take the wheel

Files changed (1) hide show
  1. app.py +7 -10
app.py CHANGED
@@ -1,5 +1,7 @@
1
  # Copyright 2023 MosaicML spaces authors
2
  # SPDX-License-Identifier: Apache-2.0
 
 
3
  import datetime
4
  import os
5
  from threading import Event, Thread
@@ -177,13 +179,11 @@ with gr.Blocks(
177
  ) as demo:
178
  conversation_id = gr.State(get_uuid)
179
  gr.Markdown(
180
- """<h1><center>MosaicML MPT-7B-Chat</center></h1>
181
-
182
- This demo is of [MPT-7B-Chat](https://huggingface.co/mosaicml/mpt-7b-chat). It is based on [MPT-7B](https://huggingface.co/mosaicml/mpt-7b) fine-tuned with approximately [171,000 conversation samples from this dataset](https://huggingface.co/datasets/sam-mosaic/vicuna_alpaca_hc3_chatml) and another [217,000 from this dataset](https://huggingface.co/datasets/sam-mosaic/hhrlhf_evol_chatml).
183
 
184
- If you're interested in [training](https://www.mosaicml.com/training) and [deploying](https://www.mosaicml.com/inference) your own MPT or LLMs, [sign up](https://forms.mosaicml.com/demo?utm_source=huggingface&utm_medium=referral&utm_campaign=mpt-7b) for MosaicML platform.
185
 
186
- This is running on a smaller, shared GPU, so it may take a few seconds to respond. If you want to run it on your own GPU, you can [download the model from HuggingFace](https://huggingface.co/mosaicml/mpt-7b-chat) and run it locally. Or [Duplicate the Space](https://huggingface.co/spaces/mosaicml/mpt-7b-chat?duplicate=true) to skip the queue and run in a private space.
187
  """
188
  )
189
  chatbot = gr.Chatbot().style(height=500)
@@ -200,7 +200,7 @@ with gr.Blocks(
200
  stop = gr.Button("Stop")
201
  clear = gr.Button("Clear")
202
  with gr.Row():
203
- with gr.Accordion("Advanced Options:", open=False):
204
  with gr.Row():
205
  with gr.Column():
206
  with gr.Row():
@@ -251,10 +251,7 @@ with gr.Blocks(
251
  )
252
  with gr.Row():
253
  gr.Markdown(
254
- "Disclaimer: MPT-7B can produce factually incorrect output, and should not be relied on to produce "
255
- "factually accurate information. MPT-7B was trained on various public datasets; while great efforts "
256
- "have been taken to clean the pretraining data, it is possible that this model could generate lewd, "
257
- "biased, or otherwise offensive outputs.",
258
  elem_classes=["disclaimer"],
259
  )
260
  with gr.Row():
 
1
  # Copyright 2023 MosaicML spaces authors
2
  # SPDX-License-Identifier: Apache-2.0
3
+ # anything broken is Luke's fault
4
+
5
  import datetime
6
  import os
7
  from threading import Event, Thread
 
179
  ) as demo:
180
  conversation_id = gr.State(get_uuid)
181
  gr.Markdown(
182
+ """<h1><center>cxntextMPT</center></h1>
 
 
183
 
184
+ This model engages three Matrix LLMs, others pending integration
185
 
186
+ Running on a potato, be patient.
187
  """
188
  )
189
  chatbot = gr.Chatbot().style(height=500)
 
200
  stop = gr.Button("Stop")
201
  clear = gr.Button("Clear")
202
  with gr.Row():
203
+ with gr.Accordion("Advanced", open=False):
204
  with gr.Row():
205
  with gr.Column():
206
  with gr.Row():
 
251
  )
252
  with gr.Row():
253
  gr.Markdown(
254
+ "Disclaimer: All included models can produce factually incorrect output, and if they don't they will be forced to by Elon.",
 
 
 
255
  elem_classes=["disclaimer"],
256
  )
257
  with gr.Row():