DHEIVER commited on
Commit
4943051
·
1 Parent(s): 15624af

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -21
app.py CHANGED
@@ -1,25 +1,23 @@
1
  from transformers import pipeline, set_seed
2
  import gradio as grad, random, re
3
 
4
-
5
  gpt2_pipe = pipeline('text-generation', model='Gustavosta/MagicPrompt-Stable-Diffusion', tokenizer='gpt2')
6
  with open("ideas.txt", "r") as f:
7
  line = f.readlines()
8
 
9
-
10
- def generate(starting_text):
11
  seed = random.randint(100, 1000000)
12
  set_seed(seed)
13
 
14
- if starting_text == "":
15
- starting_text: str = line[random.randrange(0, len(line))].replace("\n", "").lower().capitalize()
16
- starting_text: str = re.sub(r"[,:\-–.!;?_]", '', starting_text)
17
 
18
- response = gpt2_pipe(starting_text, max_length=(len(starting_text) + random.randint(60, 90)), num_return_sequences=4)
19
  response_list = []
20
  for x in response:
21
  resp = x['generated_text'].strip()
22
- if resp != starting_text and len(resp) > (len(starting_text) + 4) and resp.endswith((":", "-", "—")) is False:
23
  response_list.append(resp+'\n')
24
 
25
  response_end = "\n".join(response_list)
@@ -29,26 +27,23 @@ def generate(starting_text):
29
  if response_end != "":
30
  return response_end
31
 
 
 
32
 
33
- txt = grad.Textbox(lines=1, label="Initial Text", placeholder="English Text here")
34
- out = grad.Textbox(lines=4, label="Generated Prompts")
35
-
36
- examples = []
37
  for x in range(8):
38
- examples.append(line[random.randrange(0, len(line))].replace("\n", "").lower().capitalize())
39
 
40
- title = "Stable Diffusion Prompt Generator"
41
- description = 'This is a demo of the model series: "MagicPrompt", in this case, aimed at: "Stable Diffusion". To use it, simply submit your text or click on one of the examples. To learn more about the model, [click here](https://huggingface.co/Gustavosta/MagicPrompt-Stable-Diffusion).<br>'
42
 
43
- grad.Interface(fn=generate,
44
  inputs=txt,
45
  outputs=out,
46
- examples=examples,
47
- title=title,
48
- description=description,
49
  article='',
50
  allow_flagging='never',
51
  cache_examples=False,
52
  theme="default").launch(enable_queue=True, debug=True)
53
-
54
-
 
1
  from transformers import pipeline, set_seed
2
  import gradio as grad, random, re
3
 
 
4
  gpt2_pipe = pipeline('text-generation', model='Gustavosta/MagicPrompt-Stable-Diffusion', tokenizer='gpt2')
5
  with open("ideas.txt", "r") as f:
6
  line = f.readlines()
7
 
8
+ def gerar_texto(texto_inicial):
 
9
  seed = random.randint(100, 1000000)
10
  set_seed(seed)
11
 
12
+ if texto_inicial == "":
13
+ texto_inicial = line[random.randrange(0, len(line))].replace("\n", "").lower().capitalize()
14
+ texto_inicial = re.sub(r"[,:\-–.!;?_]", '', texto_inicial)
15
 
16
+ response = gpt2_pipe(texto_inicial, max_length=(len(texto_inicial) + random.randint(60, 90)), num_return_sequences=4)
17
  response_list = []
18
  for x in response:
19
  resp = x['generated_text'].strip()
20
+ if resp != texto_inicial and len(resp) > (len(texto_inicial) + 4) and not resp.endswith((":", "-", "—")):
21
  response_list.append(resp+'\n')
22
 
23
  response_end = "\n".join(response_list)
 
27
  if response_end != "":
28
  return response_end
29
 
30
+ txt = grad.Textbox(lines=1, label="Texto Inicial", placeholder="Digite o texto em inglês aqui")
31
+ out = grad.Textbox(lines=4, label="Prompts Gerados")
32
 
33
+ exemplos = []
 
 
 
34
  for x in range(8):
35
+ exemplos.append(line[random.randrange(0, len(line))].replace("\n", "").lower().capitalize())
36
 
37
+ titulo = "Gerador de Prompt de Difusão Estável"
38
+ descricao = 'Este é um demo da série de modelos: "MagicPrompt", neste caso, focado em: "Difusão Estável". Para utilizá-lo, simplesmente insira seu texto ou clique em um dos exemplos. Para saber mais sobre o modelo, [clique aqui](https://huggingface.co/Gustavosta/MagicPrompt-Stable-Diffusion).<br>'
39
 
40
+ grad.Interface(fn=gerar_texto,
41
  inputs=txt,
42
  outputs=out,
43
+ examples=exemplos,
44
+ title=titulo,
45
+ description=descricao,
46
  article='',
47
  allow_flagging='never',
48
  cache_examples=False,
49
  theme="default").launch(enable_queue=True, debug=True)