Spaces:
Sleeping
Sleeping
UI improvements: metric refs, company details, remove status labels
Browse files- Add M01/M02 metric reference format in data table
- Add Company Details section (sector, industry, HQ, employees)
- Capitalize acronyms (GDP, VIX, P/B, P/S, EPS, etc.)
- Remove status labels (REJECTED, Acceptable, Board-ready, PASSED)
- Use human-readable metric names in data quality notes
Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
- frontend/src/App.tsx +3 -5
- frontend/src/components/MCPDataPanel.tsx +38 -6
- src/graph_cyclic.py +1 -4
- src/nodes/analyzer.py +96 -23
- src/nodes/critic.py +3 -3
frontend/src/App.tsx
CHANGED
|
@@ -478,11 +478,9 @@ Generated by Instant SWOT Agent`
|
|
| 478 |
}
|
| 479 |
|
| 480 |
const getScoreBadge = (score: number) => {
|
| 481 |
-
if (score >=
|
| 482 |
-
return { label: "
|
| 483 |
-
|
| 484 |
-
return { label: "Acceptable", variant: "secondary" as const, icon: AlertCircle }
|
| 485 |
-
return { label: "Needs Review", variant: "destructive" as const, icon: XCircle }
|
| 486 |
}
|
| 487 |
|
| 488 |
const handleStockClear = () => {
|
|
|
|
| 478 |
}
|
| 479 |
|
| 480 |
const getScoreBadge = (score: number) => {
|
| 481 |
+
if (score >= 6)
|
| 482 |
+
return { label: "", variant: "default" as const, icon: CheckCircle }
|
| 483 |
+
return { label: "", variant: "destructive" as const, icon: XCircle }
|
|
|
|
|
|
|
| 484 |
}
|
| 485 |
|
| 486 |
const handleStockClear = () => {
|
frontend/src/components/MCPDataPanel.tsx
CHANGED
|
@@ -58,26 +58,58 @@ const METRIC_LABELS: Record<string, string> = {
|
|
| 58 |
vxn: 'VXN',
|
| 59 |
beta: 'Beta',
|
| 60 |
historical_volatility: 'Historical Volatility',
|
|
|
|
| 61 |
implied_volatility: 'Implied Volatility',
|
| 62 |
|
| 63 |
// Macro
|
| 64 |
gdp_growth: 'GDP Growth',
|
|
|
|
| 65 |
interest_rate: 'Interest Rate',
|
| 66 |
cpi_inflation: 'CPI Inflation',
|
|
|
|
| 67 |
unemployment: 'Unemployment',
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 68 |
}
|
| 69 |
|
|
|
|
|
|
|
|
|
|
| 70 |
// Convert snake_case metric name to human-readable label
|
| 71 |
function formatMetricName(metric: string): string {
|
| 72 |
-
// Check
|
|
|
|
|
|
|
|
|
|
|
|
|
| 73 |
if (METRIC_LABELS[metric]) {
|
| 74 |
return METRIC_LABELS[metric]
|
| 75 |
}
|
| 76 |
|
| 77 |
-
// Fallback: convert snake_case to Title Case
|
| 78 |
return metric
|
| 79 |
-
.split(
|
| 80 |
-
.map(word =>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 81 |
.join(' ')
|
| 82 |
}
|
| 83 |
|
|
@@ -552,7 +584,7 @@ export function MCPDataPanel({ metrics, rawData, companyName, ticker, exchange,
|
|
| 552 |
<table className="text-xs">
|
| 553 |
<thead className="bg-muted/30">
|
| 554 |
<tr>
|
| 555 |
-
<th className="px-3 py-1.5 text-left font-medium text-muted-foreground">
|
| 556 |
<th className="px-3 py-1.5 text-left font-medium text-muted-foreground">Metric</th>
|
| 557 |
<th className="px-3 py-1.5 text-right font-medium text-muted-foreground">Value</th>
|
| 558 |
<th className="px-3 py-1.5 text-left font-medium text-muted-foreground">Data Type</th>
|
|
@@ -564,7 +596,7 @@ export function MCPDataPanel({ metrics, rawData, companyName, ticker, exchange,
|
|
| 564 |
<tbody className="divide-y divide-border">
|
| 565 |
{quantitativeRows.map((row, idx) => (
|
| 566 |
<tr key={idx} className="hover:bg-muted/20">
|
| 567 |
-
<td className="px-3 py-1.5 text-muted-foreground">{idx + 1}</td>
|
| 568 |
<td className="px-3 py-1.5">{formatMetricName(row.metric)}</td>
|
| 569 |
<td className="px-3 py-1.5 text-right font-medium">{row.value}</td>
|
| 570 |
<td className="px-3 py-1.5 text-muted-foreground">{row.dataType}</td>
|
|
|
|
| 58 |
vxn: 'VXN',
|
| 59 |
beta: 'Beta',
|
| 60 |
historical_volatility: 'Historical Volatility',
|
| 61 |
+
hist_vol: 'Historical Volatility',
|
| 62 |
implied_volatility: 'Implied Volatility',
|
| 63 |
|
| 64 |
// Macro
|
| 65 |
gdp_growth: 'GDP Growth',
|
| 66 |
+
gdp: 'GDP',
|
| 67 |
interest_rate: 'Interest Rate',
|
| 68 |
cpi_inflation: 'CPI Inflation',
|
| 69 |
+
inflation: 'Inflation',
|
| 70 |
unemployment: 'Unemployment',
|
| 71 |
+
|
| 72 |
+
// Common variations with / or shorthand
|
| 73 |
+
'p/e': 'P/E',
|
| 74 |
+
'p/b': 'P/B',
|
| 75 |
+
'p/s': 'P/S',
|
| 76 |
+
'ev/ebitda': 'EV/EBITDA',
|
| 77 |
+
'ev/revenue': 'EV/Revenue',
|
| 78 |
+
pe: 'P/E',
|
| 79 |
+
pb: 'P/B',
|
| 80 |
+
ps: 'P/S',
|
| 81 |
+
net_margin: 'Net Margin',
|
| 82 |
}
|
| 83 |
|
| 84 |
+
// Acronyms that should stay uppercase
|
| 85 |
+
const ACRONYMS = new Set(['gdp', 'cpi', 'vix', 'vxn', 'pe', 'pb', 'ps', 'ev', 'eps', 'fcf', 'rd', 'ebitda', 'cik', 'ttm', 'fy'])
|
| 86 |
+
|
| 87 |
// Convert snake_case metric name to human-readable label
|
| 88 |
function formatMetricName(metric: string): string {
|
| 89 |
+
// Check lowercase version for case-insensitive matching
|
| 90 |
+
const lowerMetric = metric.toLowerCase()
|
| 91 |
+
if (METRIC_LABELS[lowerMetric]) {
|
| 92 |
+
return METRIC_LABELS[lowerMetric]
|
| 93 |
+
}
|
| 94 |
if (METRIC_LABELS[metric]) {
|
| 95 |
return METRIC_LABELS[metric]
|
| 96 |
}
|
| 97 |
|
| 98 |
+
// Fallback: convert snake_case to Title Case with acronym handling
|
| 99 |
return metric
|
| 100 |
+
.split(/[_\s]+/)
|
| 101 |
+
.map(word => {
|
| 102 |
+
const lower = word.toLowerCase()
|
| 103 |
+
// Keep acronyms uppercase
|
| 104 |
+
if (ACRONYMS.has(lower)) {
|
| 105 |
+
return lower.toUpperCase()
|
| 106 |
+
}
|
| 107 |
+
// Handle P/B, P/E style (already has /)
|
| 108 |
+
if (word.includes('/')) {
|
| 109 |
+
return word.toUpperCase()
|
| 110 |
+
}
|
| 111 |
+
return word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()
|
| 112 |
+
})
|
| 113 |
.join(' ')
|
| 114 |
}
|
| 115 |
|
|
|
|
| 584 |
<table className="text-xs">
|
| 585 |
<thead className="bg-muted/30">
|
| 586 |
<tr>
|
| 587 |
+
<th className="px-3 py-1.5 text-left font-medium text-muted-foreground">Ref</th>
|
| 588 |
<th className="px-3 py-1.5 text-left font-medium text-muted-foreground">Metric</th>
|
| 589 |
<th className="px-3 py-1.5 text-right font-medium text-muted-foreground">Value</th>
|
| 590 |
<th className="px-3 py-1.5 text-left font-medium text-muted-foreground">Data Type</th>
|
|
|
|
| 596 |
<tbody className="divide-y divide-border">
|
| 597 |
{quantitativeRows.map((row, idx) => (
|
| 598 |
<tr key={idx} className="hover:bg-muted/20">
|
| 599 |
+
<td className="px-3 py-1.5 text-muted-foreground">M{String(idx + 1).padStart(2, '0')}</td>
|
| 600 |
<td className="px-3 py-1.5">{formatMetricName(row.metric)}</td>
|
| 601 |
<td className="px-3 py-1.5 text-right font-medium">{row.value}</td>
|
| 602 |
<td className="px-3 py-1.5 text-muted-foreground">{row.dataType}</td>
|
src/graph_cyclic.py
CHANGED
|
@@ -115,7 +115,4 @@ if __name__ == "__main__":
|
|
| 115 |
print(f" - Tracing: Enhanced LangSmith traces available")
|
| 116 |
|
| 117 |
# Quality assessment
|
| 118 |
-
|
| 119 |
-
print(f" - Quality Assessment: ✅ PASSED ({final_score}/10)")
|
| 120 |
-
else:
|
| 121 |
-
print(f" - Quality Assessment: ⚠️ ACCEPTABLE ({final_score} - max revisions reached)")
|
|
|
|
| 115 |
print(f" - Tracing: Enhanced LangSmith traces available")
|
| 116 |
|
| 117 |
# Quality assessment
|
| 118 |
+
print(f" - Quality Score: {final_score}/10")
|
|
|
|
|
|
|
|
|
src/nodes/analyzer.py
CHANGED
|
@@ -149,29 +149,56 @@ def _is_financial_institution(sector: str, industry: str, ticker: str) -> bool:
|
|
| 149 |
|
| 150 |
|
| 151 |
def _extract_company_profile(raw_data: str) -> dict:
|
| 152 |
-
"""Extract
|
| 153 |
try:
|
| 154 |
data = json.loads(raw_data)
|
| 155 |
except json.JSONDecodeError:
|
| 156 |
return {}
|
| 157 |
|
| 158 |
multi_source = data.get("multi_source", {})
|
| 159 |
-
|
| 160 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 161 |
yf_val = multi_source.get("valuation_all", {}).get("yahoo_finance", {}).get("data", {})
|
| 162 |
-
|
| 163 |
-
|
| 164 |
-
if
|
| 165 |
-
|
| 166 |
-
|
| 167 |
-
|
| 168 |
-
|
| 169 |
-
|
| 170 |
-
|
| 171 |
-
|
| 172 |
-
"
|
| 173 |
-
|
| 174 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 175 |
|
| 176 |
|
| 177 |
def _add_activity_log(workflow_id, progress_store, step, message):
|
|
@@ -308,6 +335,32 @@ def _generate_data_report(raw_data: str, is_financial: bool = False) -> str:
|
|
| 308 |
lines.append(f"# Data Report: {company} ({ticker})")
|
| 309 |
lines.append("")
|
| 310 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 311 |
# ========== FINANCIALS ==========
|
| 312 |
fin_all = multi_source.get("fundamentals_all", {})
|
| 313 |
sec_data = fin_all.get("sec_edgar", {}).get("data", {})
|
|
@@ -1099,14 +1152,33 @@ def _format_reference_log(metric_lookup: dict) -> str:
|
|
| 1099 |
return ", ".join(parts)
|
| 1100 |
|
| 1101 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1102 |
def _generate_data_quality_notes(metric_reference: dict) -> dict:
|
| 1103 |
"""
|
| 1104 |
Generate deterministic data quality assessment from metric reference.
|
| 1105 |
|
| 1106 |
Returns:
|
| 1107 |
{
|
| 1108 |
-
"high_confidence": ["
|
| 1109 |
-
"gaps_or_stale": ["
|
| 1110 |
}
|
| 1111 |
"""
|
| 1112 |
from datetime import datetime, timedelta
|
|
@@ -1118,22 +1190,23 @@ def _generate_data_quality_notes(metric_reference: dict) -> dict:
|
|
| 1118 |
|
| 1119 |
for ref_id, entry in metric_reference.items():
|
| 1120 |
key = entry.get("key", "unknown")
|
|
|
|
| 1121 |
raw_value = entry.get("raw_value")
|
| 1122 |
as_of_date = entry.get("as_of_date")
|
| 1123 |
|
| 1124 |
if raw_value is None:
|
| 1125 |
-
gaps_or_stale.append(f"{
|
| 1126 |
elif as_of_date:
|
| 1127 |
try:
|
| 1128 |
date = datetime.strptime(as_of_date, "%Y-%m-%d")
|
| 1129 |
if today - date > threshold:
|
| 1130 |
-
gaps_or_stale.append(f"{
|
| 1131 |
else:
|
| 1132 |
-
high_confidence.append(
|
| 1133 |
except ValueError:
|
| 1134 |
-
high_confidence.append(
|
| 1135 |
else:
|
| 1136 |
-
high_confidence.append(
|
| 1137 |
|
| 1138 |
return {
|
| 1139 |
"high_confidence": high_confidence,
|
|
|
|
| 149 |
|
| 150 |
|
| 151 |
def _extract_company_profile(raw_data: str) -> dict:
|
| 152 |
+
"""Extract company profile details from SEC EDGAR and Yahoo Finance data."""
|
| 153 |
try:
|
| 154 |
data = json.loads(raw_data)
|
| 155 |
except json.JSONDecodeError:
|
| 156 |
return {}
|
| 157 |
|
| 158 |
multi_source = data.get("multi_source", {})
|
| 159 |
+
profile = {}
|
| 160 |
+
|
| 161 |
+
# Try SEC EDGAR for business address (most authoritative)
|
| 162 |
+
sec_data = multi_source.get("fundamentals_all", {}).get("sec_edgar", {}).get("data", {})
|
| 163 |
+
sec_profile = sec_data.get("company_info", {}) or sec_data.get("profile", {})
|
| 164 |
+
|
| 165 |
+
if sec_profile:
|
| 166 |
+
# SEC EDGAR company info
|
| 167 |
+
city = sec_profile.get("city", "")
|
| 168 |
+
state = sec_profile.get("state", sec_profile.get("stateOrCountry", ""))
|
| 169 |
+
if city and state:
|
| 170 |
+
profile["business_address"] = f"{city}, {state}"
|
| 171 |
+
profile["cik"] = sec_profile.get("cik", "")
|
| 172 |
+
profile["sic"] = sec_profile.get("sic", "")
|
| 173 |
+
profile["sic_description"] = sec_profile.get("sicDescription", "")
|
| 174 |
+
|
| 175 |
+
# Try Yahoo Finance for sector/industry and other details
|
| 176 |
yf_val = multi_source.get("valuation_all", {}).get("yahoo_finance", {}).get("data", {})
|
| 177 |
+
yf_profile = yf_val.get("profile", {})
|
| 178 |
+
|
| 179 |
+
if not yf_profile:
|
| 180 |
+
yf_fund = multi_source.get("fundamentals_all", {}).get("yahoo_finance", {}).get("data", {})
|
| 181 |
+
yf_profile = yf_fund.get("profile", {})
|
| 182 |
+
|
| 183 |
+
if yf_profile:
|
| 184 |
+
profile["sector"] = yf_profile.get("sector", "")
|
| 185 |
+
profile["industry"] = yf_profile.get("industry", "")
|
| 186 |
+
profile["employees"] = yf_profile.get("fullTimeEmployees", "")
|
| 187 |
+
profile["website"] = yf_profile.get("website", "")
|
| 188 |
+
# Yahoo Finance may also have address
|
| 189 |
+
if not profile.get("business_address"):
|
| 190 |
+
city = yf_profile.get("city", "")
|
| 191 |
+
state = yf_profile.get("state", "")
|
| 192 |
+
country = yf_profile.get("country", "")
|
| 193 |
+
if city:
|
| 194 |
+
addr_parts = [city]
|
| 195 |
+
if state:
|
| 196 |
+
addr_parts.append(state)
|
| 197 |
+
if country and country != "United States":
|
| 198 |
+
addr_parts.append(country)
|
| 199 |
+
profile["business_address"] = ", ".join(addr_parts)
|
| 200 |
+
|
| 201 |
+
return profile
|
| 202 |
|
| 203 |
|
| 204 |
def _add_activity_log(workflow_id, progress_store, step, message):
|
|
|
|
| 335 |
lines.append(f"# Data Report: {company} ({ticker})")
|
| 336 |
lines.append("")
|
| 337 |
|
| 338 |
+
# ========== COMPANY DETAILS ==========
|
| 339 |
+
company_profile = _extract_company_profile(raw_data)
|
| 340 |
+
if company_profile:
|
| 341 |
+
lines.append("## Company Details")
|
| 342 |
+
lines.append("")
|
| 343 |
+
lines.append("| Field | Value |")
|
| 344 |
+
lines.append("|-------|-------|")
|
| 345 |
+
if company_profile.get("sector"):
|
| 346 |
+
lines.append(f"| Sector | {company_profile['sector']} |")
|
| 347 |
+
if company_profile.get("industry"):
|
| 348 |
+
lines.append(f"| Industry | {company_profile['industry']} |")
|
| 349 |
+
if company_profile.get("business_address"):
|
| 350 |
+
lines.append(f"| Headquarters | {company_profile['business_address']} |")
|
| 351 |
+
if company_profile.get("employees"):
|
| 352 |
+
employees = company_profile['employees']
|
| 353 |
+
if isinstance(employees, int):
|
| 354 |
+
employees = f"{employees:,}"
|
| 355 |
+
lines.append(f"| Employees | {employees} |")
|
| 356 |
+
if company_profile.get("website"):
|
| 357 |
+
lines.append(f"| Website | {company_profile['website']} |")
|
| 358 |
+
if company_profile.get("cik"):
|
| 359 |
+
lines.append(f"| CIK | {company_profile['cik']} |")
|
| 360 |
+
if company_profile.get("sic_description"):
|
| 361 |
+
lines.append(f"| SIC | {company_profile['sic_description']} |")
|
| 362 |
+
lines.append("")
|
| 363 |
+
|
| 364 |
# ========== FINANCIALS ==========
|
| 365 |
fin_all = multi_source.get("fundamentals_all", {})
|
| 366 |
sec_data = fin_all.get("sec_edgar", {}).get("data", {})
|
|
|
|
| 1152 |
return ", ".join(parts)
|
| 1153 |
|
| 1154 |
|
| 1155 |
+
def _format_metric_key(key: str) -> str:
|
| 1156 |
+
"""Format metric key to human-readable name (e.g., pb_ratio -> P/B Ratio)."""
|
| 1157 |
+
METRIC_NAMES = {
|
| 1158 |
+
"revenue": "Revenue", "net_income": "Net Income", "net_margin": "Net Margin",
|
| 1159 |
+
"net_margin_pct": "Net Margin", "gross_margin": "Gross Margin", "operating_margin": "Operating Margin",
|
| 1160 |
+
"free_cash_flow": "Free Cash Flow", "operating_cash_flow": "Operating Cash Flow",
|
| 1161 |
+
"total_assets": "Total Assets", "total_liabilities": "Total Liabilities",
|
| 1162 |
+
"stockholders_equity": "Stockholders' Equity", "debt_to_equity": "Debt/Equity",
|
| 1163 |
+
"eps": "EPS", "market_cap": "Market Cap", "enterprise_value": "Enterprise Value",
|
| 1164 |
+
"trailing_pe": "P/E (Trailing)", "forward_pe": "P/E (Forward)",
|
| 1165 |
+
"pb_ratio": "P/B Ratio", "ps_ratio": "P/S Ratio", "trailing_peg": "PEG Ratio",
|
| 1166 |
+
"price_to_fcf": "Price/FCF", "ev_ebitda": "EV/EBITDA", "ev_revenue": "EV/Revenue",
|
| 1167 |
+
"vix": "VIX", "beta": "Beta", "historical_volatility": "Historical Volatility",
|
| 1168 |
+
"gdp_growth": "GDP Growth", "interest_rate": "Interest Rate",
|
| 1169 |
+
"cpi_inflation": "Inflation", "unemployment": "Unemployment",
|
| 1170 |
+
}
|
| 1171 |
+
return METRIC_NAMES.get(key, key.replace("_", " ").title())
|
| 1172 |
+
|
| 1173 |
+
|
| 1174 |
def _generate_data_quality_notes(metric_reference: dict) -> dict:
|
| 1175 |
"""
|
| 1176 |
Generate deterministic data quality assessment from metric reference.
|
| 1177 |
|
| 1178 |
Returns:
|
| 1179 |
{
|
| 1180 |
+
"high_confidence": ["Revenue", "Net Margin", ...],
|
| 1181 |
+
"gaps_or_stale": ["EPS (stale: 2024-06-30)", "Debt/Equity (missing)"],
|
| 1182 |
}
|
| 1183 |
"""
|
| 1184 |
from datetime import datetime, timedelta
|
|
|
|
| 1190 |
|
| 1191 |
for ref_id, entry in metric_reference.items():
|
| 1192 |
key = entry.get("key", "unknown")
|
| 1193 |
+
display_name = _format_metric_key(key)
|
| 1194 |
raw_value = entry.get("raw_value")
|
| 1195 |
as_of_date = entry.get("as_of_date")
|
| 1196 |
|
| 1197 |
if raw_value is None:
|
| 1198 |
+
gaps_or_stale.append(f"{display_name} (missing)")
|
| 1199 |
elif as_of_date:
|
| 1200 |
try:
|
| 1201 |
date = datetime.strptime(as_of_date, "%Y-%m-%d")
|
| 1202 |
if today - date > threshold:
|
| 1203 |
+
gaps_or_stale.append(f"{display_name} (stale: {as_of_date})")
|
| 1204 |
else:
|
| 1205 |
+
high_confidence.append(display_name)
|
| 1206 |
except ValueError:
|
| 1207 |
+
high_confidence.append(display_name)
|
| 1208 |
else:
|
| 1209 |
+
high_confidence.append(display_name)
|
| 1210 |
|
| 1211 |
return {
|
| 1212 |
"high_confidence": high_confidence,
|
src/nodes/critic.py
CHANGED
|
@@ -495,11 +495,11 @@ def critic_node(state, workflow_id=None, progress_store=None):
|
|
| 495 |
|
| 496 |
# Log status and score
|
| 497 |
if status == "APPROVED":
|
| 498 |
-
score_msg = f"
|
| 499 |
elif status == "ESCALATE":
|
| 500 |
-
score_msg = f"
|
| 501 |
else:
|
| 502 |
-
score_msg = f"
|
| 503 |
_add_activity_log(workflow_id, progress_store, "critic", score_msg)
|
| 504 |
|
| 505 |
# Build critique message
|
|
|
|
| 495 |
|
| 496 |
# Log status and score
|
| 497 |
if status == "APPROVED":
|
| 498 |
+
score_msg = f"Score: {weighted_score:.1f}/10"
|
| 499 |
elif status == "ESCALATE":
|
| 500 |
+
score_msg = f"Score: {weighted_score:.1f}/10"
|
| 501 |
else:
|
| 502 |
+
score_msg = f"Score: {weighted_score:.1f}/10"
|
| 503 |
_add_activity_log(workflow_id, progress_store, "critic", score_msg)
|
| 504 |
|
| 505 |
# Build critique message
|