johann22 commited on
Commit
430b704
·
verified ·
1 Parent(s): 38f5734

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -21
app.py CHANGED
@@ -184,7 +184,7 @@ def compress_data(c, instruct, history, seed):
184
  return out
185
 
186
 
187
- def compress_data_og(c, instruct, history, seed):
188
  #seed=random.randint(1,1000000000)
189
 
190
  print (c)
@@ -210,7 +210,7 @@ def compress_data_og(c, instruct, history, seed):
210
 
211
  resp = run_gpt(
212
  COMPRESS_DATA_PROMPT,
213
- stop_tokens=["observation:", "task:", "action:", "thought:"],
214
  max_tokens=16000,
215
  seed=seed,
216
  direction=instruct,
@@ -220,9 +220,9 @@ def compress_data_og(c, instruct, history, seed):
220
 
221
  new_history = resp
222
  print (resp)
223
- out+=resp
224
  e=e+chunk
225
- s=s+chunk
226
 
227
  print ("final" + resp)
228
  #history = "observation: {}\n".format(resp)
@@ -230,7 +230,7 @@ def compress_data_og(c, instruct, history, seed):
230
 
231
 
232
 
233
- def summarize(inp,history,mem_check,seed=None,data=None):
234
  if seed==None or seed=="":
235
  seed=random.randint(1,1000000000)
236
  seed=int(seed)
@@ -246,20 +246,20 @@ def summarize(inp,history,mem_check,seed=None,data=None):
246
 
247
  if data != "Error" and data != "" and data != None:
248
  print(inp)
249
- out = str(data)
250
- rl = len(out)
251
- print(f'rl:: {rl}')
252
- c=1
253
- for i in str(out):
254
- #print(f'i:: {i}')
255
- if i == " " or i=="," or i=="\n" or i.isalpha()==True or i.isnumeric()==True:
256
- c +=1
257
- print (f'c:: {c}')
258
- json_out = compress_data(c,inp,out,seed)
259
- history = [(inp,"Generating Report...")]
260
- yield "", history,error_box,json_out
261
 
262
- out = str(json_out)
263
  print (out)
264
  rl = len(out)
265
  print(f'rl:: {rl}')
@@ -268,7 +268,7 @@ def summarize(inp,history,mem_check,seed=None,data=None):
268
  if i == " " or i=="," or i=="\n" or i.isalpha()==True or i.isnumeric()==True:
269
  c +=1
270
  print (f'c2:: {c}')
271
- rawp = compress_data_og(c,inp,out,seed)
272
  history.clear()
273
  history.append((inp,rawp))
274
 
@@ -302,7 +302,8 @@ with gr.Blocks() as app:
302
  with gr.Column(scale=1):
303
  mem_check=gr.Checkbox(label="Memory", value=False)
304
  button=gr.Button()
305
-
 
306
  #models_dd=gr.Dropdown(choices=[m for m in return_list],interactive=True)
307
  with gr.Row():
308
  stop_button=gr.Button("Stop")
@@ -319,6 +320,6 @@ with gr.Blocks() as app:
319
  #trans_btn.click(transcribe,[vid,lang,sz],trans)
320
  clear_btn.click(clear_fn,None,[prompt,chatbot])
321
  #go=button.click(transcribe,[vid,lang,sz],[trans,chatbot]).then(summarize,[prompt,chatbot,mem_check,trans],[prompt,chatbot,e_box,json_out])
322
- go=button.click(summarize,[prompt,chatbot,mem_check,blank_text,trans],[prompt,chatbot,e_box,json_out])
323
  stop_button.click(None,None,None,cancels=[go])
324
  app.queue(default_concurrency_limit=20).launch(show_api=True, show_error=True)
 
184
  return out
185
 
186
 
187
+ def compress_data_og(c, instruct, history, seed, MAX_DATA=MAX_DATA):
188
  #seed=random.randint(1,1000000000)
189
 
190
  print (c)
 
210
 
211
  resp = run_gpt(
212
  COMPRESS_DATA_PROMPT,
213
+ stop_tokens=[],
214
  max_tokens=16000,
215
  seed=seed,
216
  direction=instruct,
 
220
 
221
  new_history = resp
222
  print (resp)
223
+ #out+=resp
224
  e=e+chunk
225
+ s=s+chunk-1000
226
 
227
  print ("final" + resp)
228
  #history = "observation: {}\n".format(resp)
 
230
 
231
 
232
 
233
+ def summarize(inp,history,mem_check,seed=None,data=None,MAX_DATA=MAX_DATA):
234
  if seed==None or seed=="":
235
  seed=random.randint(1,1000000000)
236
  seed=int(seed)
 
246
 
247
  if data != "Error" and data != "" and data != None:
248
  print(inp)
249
+ #out = str(data)
250
+ #rl = len(out)
251
+ #print(f'rl:: {rl}')
252
+ #c=1
253
+ #for i in str(out):
254
+ # #print(f'i:: {i}')
255
+ # if i == " " or i=="," or i=="\n" or i.isalpha()==True or i.isnumeric()==True:
256
+ # c +=1
257
+ #print (f'c:: {c}')
258
+ #json_out = compress_data(c,inp,out,seed)
259
+ #history = [(inp,"Generating Report...")]
260
+ #yield "", history,error_box,json_out
261
 
262
+ out = str(data)
263
  print (out)
264
  rl = len(out)
265
  print(f'rl:: {rl}')
 
268
  if i == " " or i=="," or i=="\n" or i.isalpha()==True or i.isnumeric()==True:
269
  c +=1
270
  print (f'c2:: {c}')
271
+ rawp = compress_data_og(c,inp,out,seed,MAX_DATA)
272
  history.clear()
273
  history.append((inp,rawp))
274
 
 
302
  with gr.Column(scale=1):
303
  mem_check=gr.Checkbox(label="Memory", value=False)
304
  button=gr.Button()
305
+ with gr.Row():
306
+ out_slider=gr.Slider(minimum=1000, maximum=100000, step=1, value=MAX_DATA)
307
  #models_dd=gr.Dropdown(choices=[m for m in return_list],interactive=True)
308
  with gr.Row():
309
  stop_button=gr.Button("Stop")
 
320
  #trans_btn.click(transcribe,[vid,lang,sz],trans)
321
  clear_btn.click(clear_fn,None,[prompt,chatbot])
322
  #go=button.click(transcribe,[vid,lang,sz],[trans,chatbot]).then(summarize,[prompt,chatbot,mem_check,trans],[prompt,chatbot,e_box,json_out])
323
+ go=button.click(summarize,[prompt,chatbot,mem_check,blank_text,trans,out_slider],[prompt,chatbot,e_box,json_out])
324
  stop_button.click(None,None,None,cancels=[go])
325
  app.queue(default_concurrency_limit=20).launch(show_api=True, show_error=True)