Omnibus commited on
Commit
5bba87a
·
verified ·
1 Parent(s): 700ff7d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -20
app.py CHANGED
@@ -23,26 +23,16 @@ def format_prompt(message, history):
23
  agents =[
24
  "MEME_GENERATOR",
25
  ]
26
- def generate(
27
- prompt, history, agent_name=agents[0], sys_prompt="", temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
28
- ):
29
  seed = random.randint(1,1111111111111111)
30
 
31
- agent=MEME_GENERATOR
32
- if agent_name == "MEME_GENERATOR":
33
- agent = MEME_GENERATOR
34
 
35
- system_prompt=agent
36
- temperature = float(temperature)
37
- if temperature < 1e-2:
38
- temperature = 1e-2
39
- top_p = float(top_p)
40
-
41
  generate_kwargs = dict(
42
- temperature=temperature,
43
- max_new_tokens=max_new_tokens,
44
- top_p=top_p,
45
- repetition_penalty=repetition_penalty,
46
  do_sample=True,
47
  seed=seed,
48
  )
@@ -53,7 +43,7 @@ def generate(
53
 
54
  for response in stream:
55
  output += response.token.text
56
- yield output
57
  with open('tmp.txt','w') as t:
58
  t.writelines(output)
59
  t.close()
@@ -70,7 +60,7 @@ def generate(
70
  except Exception as e:
71
  print(e)
72
  pass
73
- return output
74
 
75
  def run(inp,history,model_drop):
76
  #if len(inp)>max_prompt:
@@ -140,14 +130,16 @@ with gr.Blocks() as app:
140
  msg = gr.Textbox()
141
  model_drop=gr.Dropdown(label="Diffusion Models", type="index", choices=[m for m in models], value=models[0])
142
  with gr.Group():
143
- submit_b = gr.Button()
 
144
  with gr.Row():
145
  stop_b = gr.Button("Stop")
146
  clear = gr.ClearButton([msg, chatbot])
147
  with gr.Column(scale=2):
148
  im_out=gr.Image(label="Image")
149
 
150
- sub_b = submit_b.click(run, [msg,chatbot,model_drop],[chatbot,im_out])
 
151
  sub_e = msg.submit(run, [msg, chatbot,model_drop], [chatbot,im_out])
152
  stop_b.click(None,None,None, cancels=[sub_b,sub_e])
153
  app.launch()
 
23
  agents =[
24
  "MEME_GENERATOR",
25
  ]
26
+ def generate(prompt, history):
 
 
27
  seed = random.randint(1,1111111111111111)
28
 
29
+ system_prompt=MEME_GENERATOR
 
 
30
 
 
 
 
 
 
 
31
  generate_kwargs = dict(
32
+ temperature=0.9,
33
+ max_new_tokens=256,
34
+ top_p=0.95,
35
+ repetition_penalty=1.0,
36
  do_sample=True,
37
  seed=seed,
38
  )
 
43
 
44
  for response in stream:
45
  output += response.token.text
46
+ yield [(prompt,output)]
47
  with open('tmp.txt','w') as t:
48
  t.writelines(output)
49
  t.close()
 
60
  except Exception as e:
61
  print(e)
62
  pass
63
+ return [(prompt,output)]
64
 
65
  def run(inp,history,model_drop):
66
  #if len(inp)>max_prompt:
 
130
  msg = gr.Textbox()
131
  model_drop=gr.Dropdown(label="Diffusion Models", type="index", choices=[m for m in models], value=models[0])
132
  with gr.Group():
133
+ submit_b = gr.Button("Meme")
134
+ submit_im = gr.Button("Image")
135
  with gr.Row():
136
  stop_b = gr.Button("Stop")
137
  clear = gr.ClearButton([msg, chatbot])
138
  with gr.Column(scale=2):
139
  im_out=gr.Image(label="Image")
140
 
141
+ sub_b = submit_b.click(generate, [msg,chatbot],[chatbot])
142
+ sub_im = submit_b.click(run, [msg,chatbot,model_drop],[chatbot,im_out])
143
  sub_e = msg.submit(run, [msg, chatbot,model_drop], [chatbot,im_out])
144
  stop_b.click(None,None,None, cancels=[sub_b,sub_e])
145
  app.launch()