XMichaelX commited on
Commit
3d958b3
·
verified ·
1 Parent(s): 0ec5fee

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -16
app.py CHANGED
@@ -1,5 +1,4 @@
1
  import gradio as gr
2
- from gradio import mix
3
 
4
  title = "GPT2"
5
  description = "Gradio Demo for OpenAI GPT2. To use it, simply add your text, or click one of the examples to load them. Read more at the links below."
@@ -7,16 +6,14 @@ description = "Gradio Demo for OpenAI GPT2. To use it, simply add your text, or
7
  article = "<p style='text-align: center'><a href='https://d4mucfpksywv.cloudfront.net/better-language-models/language_models_are_unsupervised_multitask_learners.pdf' target='_blank'>Language Models are Unsupervised Multitask Learners</a></p>"
8
 
9
  examples = [
10
- ['Paris is the capital of',"gpt2-medium"]
11
  ]
12
 
13
- io1 = gr.Interface.load("huggingface/distilgpt2")
14
-
15
- io2 = gr.Interface.load("huggingface/gpt2-large")
16
-
17
- io3 = gr.Interface.load("huggingface/gpt2-medium")
18
-
19
- io4 = gr.Interface.load("huggingface/gpt2-xl")
20
 
21
  def inference(text, model):
22
  if model == "gpt2-large":
@@ -28,15 +25,23 @@ def inference(text, model):
28
  else:
29
  outtext = io1(text)
30
  return outtext
31
-
32
-
33
 
34
- gr.Interface(
 
35
  inference,
36
- [gr.inputs.Textbox(label="Input"),gr.inputs.Dropdown(choices=["distilgpt2","gpt2-medium","gpt2-large","gpt2-xl"], type="value", default="gpt2-medium", label="model")
37
- ],
38
- gr.outputs.Textbox(label="Output"),
 
 
 
 
 
 
39
  examples=examples,
40
  article=article,
41
  title=title,
42
- description=description).launch(enable_queue=True)
 
 
 
 
1
  import gradio as gr
 
2
 
3
  title = "GPT2"
4
  description = "Gradio Demo for OpenAI GPT2. To use it, simply add your text, or click one of the examples to load them. Read more at the links below."
 
6
  article = "<p style='text-align: center'><a href='https://d4mucfpksywv.cloudfront.net/better-language-models/language_models_are_unsupervised_multitask_learners.pdf' target='_blank'>Language Models are Unsupervised Multitask Learners</a></p>"
7
 
8
  examples = [
9
+ ['Paris is the capital of', "gpt2-medium"]
10
  ]
11
 
12
+ # Load all models at startup
13
+ io1 = gr.load("huggingface/distilgpt2")
14
+ io2 = gr.load("huggingface/gpt2-large")
15
+ io3 = gr.load("huggingface/gpt2-medium")
16
+ io4 = gr.load("huggingface/gpt2-xl")
 
 
17
 
18
  def inference(text, model):
19
  if model == "gpt2-large":
 
25
  else:
26
  outtext = io1(text)
27
  return outtext
 
 
28
 
29
+ # Create the interface
30
+ iface = gr.Interface(
31
  inference,
32
+ [
33
+ gr.Textbox(label="Input"),
34
+ gr.Dropdown(
35
+ choices=["distilgpt2", "gpt2-medium", "gpt2-large", "gpt2-xl"],
36
+ value="gpt2-medium",
37
+ label="Model"
38
+ )
39
+ ],
40
+ gr.Textbox(label="Output"),
41
  examples=examples,
42
  article=article,
43
  title=title,
44
+ description=description
45
+ )
46
+
47
+ iface.launch(enable_queue=True)