vithacocf commited on
Commit
6649bd1
·
verified ·
1 Parent(s): ea3ea23

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -4
app.py CHANGED
@@ -198,16 +198,39 @@ def extract_pdf_note(file_path: str) -> str:
198
  return ""
199
 
200
  def call_gemini_with_prompt(content_text: str, note_text: str, question: str, model_choice: str, temperature: float, top_p: float):
201
- """Gửi bảng + note vào Gemini"""
202
  api_key = os.environ.get("GOOGLE_API_KEY", DEFAULT_API_KEY)
203
  genai.configure(api_key=api_key)
 
204
  model = genai.GenerativeModel(
205
  model_name=INTERNAL_MODEL_MAP.get(model_choice, "gemini-2.5-flash"),
206
- generation_config={"temperature": temperature, "top_p": top_p}
 
 
 
207
  )
208
- prompt = f"{PROMPT_FREIGHT_JSON}\n\nBelow is the extracted CSV data:\n{content_text}\n\nBelow are the notes:\n{note_text}\n\n{question or ''}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
209
  response = model.generate_content(prompt)
210
- return getattr(response, "text", str(response))
 
 
 
211
 
212
  # ================== MAIN ROUTER ==================
213
  def run_process(file, question, model_choice, temperature, top_p, external_api_url):
 
198
  return ""
199
 
200
  def call_gemini_with_prompt(content_text: str, note_text: str, question: str, model_choice: str, temperature: float, top_p: float):
201
+ """Gửi bảng + note vào Gemini (ưu tiên prompt tùy chỉnh nếu có)"""
202
  api_key = os.environ.get("GOOGLE_API_KEY", DEFAULT_API_KEY)
203
  genai.configure(api_key=api_key)
204
+
205
  model = genai.GenerativeModel(
206
  model_name=INTERNAL_MODEL_MAP.get(model_choice, "gemini-2.5-flash"),
207
+ generation_config={
208
+ "temperature": float(temperature),
209
+ "top_p": float(top_p)
210
+ }
211
  )
212
+
213
+ # Nếu user không nhập câu hỏi riêng, dùng prompt chuẩn FREIGHT_JSON
214
+ base_prompt = question.strip() if question and question.strip() else PROMPT_FREIGHT_JSON
215
+
216
+ prompt = f"""
217
+ {base_prompt}
218
+
219
+ Below is the extracted CSV data:
220
+ {content_text}
221
+
222
+ Below are the notes extracted from the PDF (e.g. Valid From, Origin, Remark, Package Type rules):
223
+ {note_text}
224
+
225
+ Please analyze all data and generate the JSON output following the schema above.
226
+ """
227
+
228
+ print("🧠 Sending prompt to Gemini...")
229
  response = model.generate_content(prompt)
230
+ result_text = getattr(response, "text", str(response))
231
+
232
+ return result_text
233
+
234
 
235
  # ================== MAIN ROUTER ==================
236
  def run_process(file, question, model_choice, temperature, top_p, external_api_url):