neovalle commited on
Commit
db17601
·
verified ·
1 Parent(s): 66771c3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -7
app.py CHANGED
@@ -9,7 +9,6 @@ import pandas as pd
9
 
10
  # Small, free chat models that run on CPU in a basic Space (pick one if you like)
11
  DEFAULT_MODELS = [
12
- "google/gemma-2-2b-it",
13
  "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
14
  "Qwen/Qwen2.5-1.5B-Instruct",
15
  ]
@@ -176,6 +175,7 @@ with gr.Blocks(title="Multi-Prompt Chat (System Prompt Control)") as demo:
176
  wrap=True,
177
  interactive=False,
178
  row_count=(0, "dynamic"),
 
179
  )
180
  out_file = gr.File(label="CSV file", visible=False)
181
 
@@ -193,14 +193,9 @@ with gr.Blocks(title="Multi-Prompt Chat (System Prompt Control)") as demo:
193
  return df
194
 
195
  def _download(df):
196
- # Gradio passes a dict-like table; normalise to DataFrame
197
- if isinstance(df, list):
198
- df = pd.DataFrame(df, columns=["user_prompt", "response", "tokens_out"])
199
- else:
200
- df = pd.DataFrame(df)
201
  path = to_csv(df)
202
  return gr.File.update(value=path, visible=True)
203
-
204
  run_btn.click(
205
  _generate,
206
  inputs=[model_id, system_prompt, prompts_multiline, max_new_tokens, temperature, top_p, top_k, repetition_penalty],
 
9
 
10
  # Small, free chat models that run on CPU in a basic Space (pick one if you like)
11
  DEFAULT_MODELS = [
 
12
  "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
13
  "Qwen/Qwen2.5-1.5B-Instruct",
14
  ]
 
175
  wrap=True,
176
  interactive=False,
177
  row_count=(0, "dynamic"),
178
+ type="pandas",
179
  )
180
  out_file = gr.File(label="CSV file", visible=False)
181
 
 
193
  return df
194
 
195
  def _download(df):
 
 
 
 
 
196
  path = to_csv(df)
197
  return gr.File.update(value=path, visible=True)
198
+
199
  run_btn.click(
200
  _generate,
201
  inputs=[model_id, system_prompt, prompts_multiline, max_new_tokens, temperature, top_p, top_k, repetition_penalty],