EternalRecursion commited on
Commit
1bdd951
·
1 Parent(s): 0886db3
Files changed (1) hide show
  1. app.py +5 -12
app.py CHANGED
@@ -8,7 +8,7 @@ from transformers import AutoTokenizer
8
  from huggingface_hub import upload_file
9
 
10
 
11
- model_id = "Elijahbodden/eliGPTv1.3"
12
 
13
  # MODEL
14
  model = Llama.from_pretrained(
@@ -106,17 +106,10 @@ ci = gr.ChatInterface(
106
  )
107
 
108
 
109
- with gr.Blocks(css=".bubble-gap {gap: 6px !important}", theme="shivi/calm_seafoam") as demo:
110
- gr.Markdown("# EliGPT v1.3")
111
- gr.Markdown("Llama 3 8b finetuned on 2.5k of my discord messages. [Train your own clone!](https://gist.github.com/Elijah-Bodden/1964bd02fcd19efef65f6e0cd92881c4)\nTHE MODEL IS VERY SLOW WHEN MULTIPLE PEOPLE ARE USING IT. YOU CAN DUPLICATE THE SPACE TO GET YOUR OWN DEDICATED INSTANCE.")
112
- with gr.Accordion("Q&A:", open=False):
113
- gr.Markdown("""Q: Why is the model so fucking slow
114
- A: The model might be slow if it hasn't run recently or a lot of people are using it (it's running on llama.cpp on a single a very slow cpu). You can duplicate the space to get your own (free) instance with no wait times.
115
- Q: Why is the model so dumb
116
- A: Llama 3 8b is impressive, but it's still tiny. This model is basically what you'd get if you shoved my brain into a toddler's head - it's just too small to be smart
117
- Q: Either it just made something up or I don't know you at all
118
- A: Probably the former. It's prone to hallucinating facts and opinions I don't hold. Take everything it says with a big grain of salt
119
- """)
120
  ci.render()
121
 
122
  if __name__ == "__main__":
 
8
  from huggingface_hub import upload_file
9
 
10
 
11
+ model_id = "EternalRecursion/llm_clone_llama"
12
 
13
  # MODEL
14
  model = Llama.from_pretrained(
 
106
  )
107
 
108
 
109
+ with gr.Blocks(css=".bubble-gap {gap: 6px !important}", theme="SebastianBravo/simci_css") as demo:
110
+ gr.Markdown("# SamuelGPT v1.0May")
111
+ gr.Markdown("Llama 3 8b finetuned on my conversation data, with a generous amount of code stolen from Elijah Bodden. [Train your own clone!](https://gist.github.com/Elijah-Bodden/1964bd02fcd19efef65f6e0cd92881c4)")
112
+ gr.Markdown("IMPORTANT: THIS MODEL DOES NOT REPRESENT ME AND I AM NOT RESPONSIBLE FOR ANYTHING IT SAYS. ALSO MIGHT BE KINDA SLOW, SORRY.")
 
 
 
 
 
 
 
113
  ci.render()
114
 
115
  if __name__ == "__main__":