Guiyom commited on
Commit
820e2d4
·
verified ·
1 Parent(s): cc476de

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -6
app.py CHANGED
@@ -81,7 +81,7 @@ def replace_graph_placeholders(report_html: str, context: str, initial_query: st
81
  logging.info(f"Generating graph {placeholder_num}")
82
 
83
  try:
84
- visual_html = generate_visual_snippet(instructions, context, initial_query, crumbs)
85
  # Add error boundary and logging
86
  return f'<!-- Graph {placeholder_num} Start -->\n{visual_html}\n<!-- Graph {placeholder_num} End -->'
87
  except Exception as e:
@@ -597,18 +597,25 @@ Proceed."""
597
  try:
598
  response = openai_call(prompt=prompt, model="gpt-4o-mini", max_tokens_param=max_tokens, temperature=temperature)
599
  res_text = response.strip()
 
 
 
600
  # Remove Markdown code fences if present
601
  if res_text.startswith("```"):
602
  res_text = re.sub(r"^```(json)?", "", res_text)
603
  res_text = re.sub(r"```$", "", res_text).strip()
604
- if not res_text:
605
- logging.error("analyze_with_gpt4o: Empty response received from API.")
606
- return {"relevant": "no", "summary": "", "followups": []}
 
607
  try:
608
  result = json.loads(res_text)
609
  except json.JSONDecodeError as je:
610
  logging.error(f"analyze_with_gpt4o: JSON decode error: {je}. Raw response: '{res_text}'")
611
  return {"relevant": "no", "summary": "", "followups": []}
 
 
 
612
  logging.info(f"analyze_with_gpt4o: snippet analysis result: {result}")
613
  return result
614
  except Exception as e:
@@ -1513,8 +1520,8 @@ def iterative_deep_research_gen(initial_query: str, reportstyle: str, breadth: i
1513
  process_log += f"Error retrieving content from {url}: {e}\n"
1514
  continue
1515
  # Skip processing if raw_content is empty or too short (< 1000 characters)
1516
- if not raw_content or len(raw_content) < 1000:
1517
- process_log += f"Content from {url} is too short (<1000 characters), skipping.\n"
1518
  continue
1519
 
1520
  # 1) Clean and do minimal parse
 
81
  logging.info(f"Generating graph {placeholder_num}")
82
 
83
  try:
84
+ visual_html = generate_graph_snippet(instructions, context, initial_query, crumbs)
85
  # Add error boundary and logging
86
  return f'<!-- Graph {placeholder_num} Start -->\n{visual_html}\n<!-- Graph {placeholder_num} End -->'
87
  except Exception as e:
 
597
  try:
598
  response = openai_call(prompt=prompt, model="gpt-4o-mini", max_tokens_param=max_tokens, temperature=temperature)
599
  res_text = response.strip()
600
+ if not res_text:
601
+ logging.error("analyze_with_gpt4o: Empty response received from API.")
602
+ return {"relevant": "no", "summary": "", "followups": []}
603
  # Remove Markdown code fences if present
604
  if res_text.startswith("```"):
605
  res_text = re.sub(r"^```(json)?", "", res_text)
606
  res_text = re.sub(r"```$", "", res_text).strip()
607
+ res_text = res_text.strip().strip("```").strip()
608
+ # Optionally remove any start/end markers like "json" if present:
609
+ if res_text.lower().startswith("json"):
610
+ res_text = res_text[4:].strip()
611
  try:
612
  result = json.loads(res_text)
613
  except json.JSONDecodeError as je:
614
  logging.error(f"analyze_with_gpt4o: JSON decode error: {je}. Raw response: '{res_text}'")
615
  return {"relevant": "no", "summary": "", "followups": []}
616
+ except json.JSONDecodeError as je:
617
+ logging.error(f"analyze_with_gpt4o: JSON decode error: {je}. Raw response: '{res_text}'")
618
+ return {"relevant": "no", "summary": "", "followups": []}
619
  logging.info(f"analyze_with_gpt4o: snippet analysis result: {result}")
620
  return result
621
  except Exception as e:
 
1520
  process_log += f"Error retrieving content from {url}: {e}\n"
1521
  continue
1522
  # Skip processing if raw_content is empty or too short (< 1000 characters)
1523
+ if not raw_content or len(raw_content) < 1000 or "could not be extracted" in raw_content.lower() or "error" in raw_content.lower():
1524
+ process_log += f"Content from {url} is either an error or too short (<1000 characters), skipping.\n"
1525
  continue
1526
 
1527
  # 1) Clean and do minimal parse