Claude commited on
Commit
a2ded15
·
1 Parent(s): 1c21844

fix: Add graceful fallback synthesis when LLM API fails (401/403)

Browse files
Files changed (1) hide show
  1. app.py +36 -20
app.py CHANGED
@@ -138,33 +138,49 @@ def call_perspective(perspective_name: str, query: str) -> str:
138
 
139
  def generate_synthesis(perspectives_responses: Dict[str, str], query: str) -> str:
140
  """Generate synthesis from all perspective responses."""
141
- if not HAS_LLM:
142
- return "MC synthesis: Integrating all perspectives..."
143
 
144
- perspective_text = "\n\n".join(
145
- f"**{name.upper()}**: {response[:200]}"
146
- for name, response in perspectives_responses.items()
147
- )
 
 
148
 
149
- synthesis_prompt = f"""Synthesize these perspective responses on: {query[:100]}
150
 
151
  {perspective_text}
152
 
153
  Brief unified insight:"""
154
 
155
- try:
156
- messages = [
157
- {"role": "system", "content": "Synthesize multi-perspective responses concisely."},
158
- {"role": "user", "content": synthesis_prompt}
159
- ]
160
- response = client.chat_completion(
161
- messages,
162
- max_tokens=256,
163
- temperature=0.7,
164
- )
165
- return response.choices[0].message.content
166
- except Exception as e:
167
- return f"Synthesis error: {str(e)[:100]}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
168
 
169
 
170
  # ================================================================
 
138
 
139
  def generate_synthesis(perspectives_responses: Dict[str, str], query: str) -> str:
140
  """Generate synthesis from all perspective responses."""
 
 
141
 
142
+ # Try LLM synthesis first if available
143
+ if HAS_LLM:
144
+ perspective_text = "\n\n".join(
145
+ f"**{name.upper()}**: {response[:200]}"
146
+ for name, response in perspectives_responses.items()
147
+ )
148
 
149
+ synthesis_prompt = f"""Synthesize these perspective responses on: {query[:100]}
150
 
151
  {perspective_text}
152
 
153
  Brief unified insight:"""
154
 
155
+ try:
156
+ messages = [
157
+ {"role": "system", "content": "Synthesize multi-perspective responses concisely."},
158
+ {"role": "user", "content": synthesis_prompt}
159
+ ]
160
+ response = client.chat_completion(
161
+ messages,
162
+ max_tokens=256,
163
+ temperature=0.7,
164
+ )
165
+ return response.choices[0].message.content
166
+ except Exception as e:
167
+ # Fall through to built-in synthesis below
168
+ print(f"LLM synthesis failed ({e.__class__.__name__}), using built-in synthesis")
169
+
170
+ # Built-in synthesis from perspective responses (no LLM required)
171
+ summaries = []
172
+ for name, response in perspectives_responses.items():
173
+ # Extract first sentence or first 100 chars
174
+ summary = response.split('.')[0][:120].strip()
175
+ if summary:
176
+ summaries.append(f"• **{name.title()}**: {summary}")
177
+
178
+ synthesis = "🔀 **Multi-Perspective Synthesis**\n\n" + "\n".join(summaries)
179
+
180
+ if len(synthesis) < 100:
181
+ synthesis += f"\n\n_Query unified across {len(perspectives_responses)} perspectives._"
182
+
183
+ return synthesis
184
 
185
 
186
  # ================================================================