mphi commited on
Commit
9869f73
·
verified ·
1 Parent(s): c069d29

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -87,7 +87,7 @@ def run_inference(text, from_lang, to_lang, mode):
87
  ds = LazyTokenizingInferenceDataset([entry], tokenizer, prompt_format)
88
  tok = ds[0]
89
  output = llm_generate(model, tokenizer, tok, debug=False, max_len=512)
90
- return str(entry) + "///" + output[0]
91
 
92
  with gr.Blocks() as demo:
93
  text_input = gr.Textbox(label="Text", lines=6, placeholder="Enter text...")
 
87
  ds = LazyTokenizingInferenceDataset([entry], tokenizer, prompt_format)
88
  tok = ds[0]
89
  output = llm_generate(model, tokenizer, tok, debug=False, max_len=512)
90
+ return output[0]
91
 
92
  with gr.Blocks() as demo:
93
  text_input = gr.Textbox(label="Text", lines=6, placeholder="Enter text...")