oiisa commited on
Commit
ddc18bd
·
verified ·
1 Parent(s): fbd0369

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -11
app.py CHANGED
@@ -124,17 +124,29 @@ if submitted:
124
  input_text = make_query(description, question)
125
  try:
126
  input_ids = tokenizer.encode(input_text, return_tensors="pt")
127
- with st.spinner("Генерация запроса..."):
128
- outputs = model.generate(
129
- input_ids,
130
- max_length=200,
131
- num_beams=5,
132
- early_stopping=True,
133
- pad_token_id=tokenizer.eos_token_id,
134
- )
135
- generated_sql = tokenizer.decode(outputs[0], skip_special_tokens=True)
136
- st.subheader("Результат:")
137
- st.code(generated_sql, language="sql")
 
 
 
 
 
 
 
 
 
 
 
 
138
  except Exception as e:
139
  st.error(f"Ошибка при генерации: {str(e)}")
140
  else:
 
124
  input_text = make_query(description, question)
125
  try:
126
  input_ids = tokenizer.encode(input_text, return_tensors="pt")
127
+
128
+ animation_placeholder = st.empty()
129
+
130
+ for frame in ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"]:
131
+ animation_placeholder.markdown(f"`{frame}` Подготовка к генерации...")
132
+ time.sleep(0.1)
133
+
134
+ animation_placeholder.markdown("`⏳` Генерация SQL-запроса...")
135
+ outputs = model.generate(
136
+ input_ids,
137
+ max_length=200,
138
+ num_beams=5,
139
+ top_p=0.95,
140
+ early_stopping=True,
141
+ pad_token_id=tokenizer.eos_token_id,
142
+ )
143
+
144
+ animation_placeholder.empty()
145
+
146
+ generated_sql = tokenizer.decode(outputs[0], skip_special_tokens=True)
147
+ st.subheader("Результат:")
148
+ st.code(generated_sql, language="sql")
149
+
150
  except Exception as e:
151
  st.error(f"Ошибка при генерации: {str(e)}")
152
  else: