vn6295337 Claude Opus 4.5 commited on
Commit
50c0d03
·
1 Parent(s): 332e21c

Refactor: Source-keyed output format across all MCP servers

Browse files

- MCPs now emit {source: {data: ...}} format directly
- Simplify mcp_client.py normalizers to pass-through
- Update news-basket to source-keyed structure (tavily, nyt, newsapi)
- Update data schema docs to reflect new output format
- Streamline fundamentals orchestrator

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>

docs/finnhub_data_schema.md CHANGED
@@ -1,37 +1,20 @@
1
- Finnhub Data Schema
2
- ===================
3
-
4
- Endpoint: https://finnhub.io/api/v1/company-news
5
- Method: GET
6
-
7
-
8
- Request Parameters
9
-
10
- | field | type | description |
11
- |--------|--------|-------------------------|
12
- | symbol | string | Stock ticker |
13
- | from | string | Start date (YYYY-MM-DD) |
14
- | to | string | End date (YYYY-MM-DD) |
15
- | token | string | API key |
16
-
17
-
18
- Response (array of articles)
19
-
20
- | field | type | description |
21
- |----------|--------|------------------|
22
- | headline | string | Article headline |
23
- | summary | string | Article summary |
24
- | url | string | Article URL |
25
- | source | string | Publisher name |
26
- | datetime | int | Unix timestamp |
27
-
28
-
29
- Example Result
30
-
31
- | field | value |
32
- |----------|--------------------------------------|
33
- | headline | "Apple Reports Strong Q4 Earnings" |
34
- | summary | "Apple Inc reported quarterly..." |
35
- | url | "https://bloomberg.com/apple-q4..." |
36
- | source | "Bloomberg" |
37
- | datetime | 1736416200 |
 
1
+ ## Company News Endpoint
2
+ `GET https://finnhub.io/api/v1/company-news`
3
+
4
+ ## Request Parameters
5
+ ```
6
+ symbol
7
+ from (YYYY-MM-DD)
8
+ to (YYYY-MM-DD)
9
+ token
10
+ ```
11
+
12
+ ## Response Structure
13
+ ```
14
+ []
15
+ headline
16
+ summary
17
+ url
18
+ source
19
+ datetime (Unix timestamp)
20
+ ```
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
docs/fred_data_schema.md CHANGED
@@ -1,110 +1,55 @@
1
- FRED Data Schema
2
- ================
3
-
4
- Endpoint: https://api.stlouisfed.org/fred/series/observations
5
-
6
-
7
- Raw API Response Structure
8
- --------------------------
9
-
10
- Series Info (seriess[0])
11
-
12
- | field | description |
13
- |---------------------|----------------------------------------|
14
- | id | Series identifier |
15
- | title | Series title |
16
- | units | Data units |
17
- | frequency | Update frequency (Daily, Monthly, etc) |
18
- | seasonal_adjustment | Adjustment type (SA, NSA) |
19
- | last_updated | Last update timestamp |
20
-
21
- Observation (observations[])
22
-
23
- | field | description |
24
- |----------------|-------------------------|
25
- | realtime_start | Real-time period start |
26
- | realtime_end | Real-time period end |
27
- | date | Observation date |
28
- | value | Data value (string) |
29
-
30
-
31
- Series Data
32
- -----------
33
-
34
- GDP Growth (A191RL1Q225SBEA)
35
-
36
- | field | value |
37
- |--------------|--------------------------------------|
38
- | series_id | A191RL1Q225SBEA |
39
- | title | Real Gross Domestic Product |
40
- | units | Percent Change from Preceding Period |
41
- | frequency | Quarterly |
42
- | date | 2025-07-01 |
43
- | value | 4.3 |
44
- | last_updated | 2025-12-23 07:54:34 |
45
-
46
- Interest Rate (FEDFUNDS)
47
-
48
- | field | value |
49
- |--------------|------------------------------|
50
- | series_id | FEDFUNDS |
51
- | title | Federal Funds Effective Rate |
52
- | units | Percent |
53
- | frequency | Monthly |
54
- | date | 2025-12-01 |
55
- | value | 3.72 |
56
- | last_updated | 2026-01-02 15:18:33 |
57
-
58
- CPI (CPIAUCSL)
59
-
60
- | field | value |
61
- |--------------|-----------------------------------------------|
62
- | series_id | CPIAUCSL |
63
- | title | Consumer Price Index for All Urban Consumers |
64
- | units | Index 1982-1984=100 |
65
- | frequency | Monthly |
66
- | date | 2025-11-01 |
67
- | value | 325.031 |
68
- | last_updated | 2025-12-18 08:03:48 |
69
-
70
- Unemployment (UNRATE)
71
-
72
- | field | value |
73
- |--------------|---------------------|
74
- | series_id | UNRATE |
75
- | title | Unemployment Rate |
76
- | units | Percent |
77
- | frequency | Monthly |
78
- | date | 2025-12-01 |
79
- | value | 4.4 |
80
- | last_updated | 2026-01-09 08:10:37 |
81
-
82
- VIX (VIXCLS)
83
-
84
- | field | value |
85
- |--------------|----------------------------|
86
- | series_id | VIXCLS |
87
- | title | CBOE Volatility Index: VIX |
88
- | units | Index |
89
- | frequency | Daily, Close |
90
- | date | 2026-01-08 |
91
- | value | 15.45 |
92
- | last_updated | 2026-01-09 08:37:39 |
93
-
94
- VXN (VXNCLS)
95
-
96
- | field | value |
97
- |--------------|----------------------------------|
98
- | series_id | VXNCLS |
99
- | title | CBOE NASDAQ 100 Volatility Index |
100
- | units | Index |
101
- | frequency | Daily, Close |
102
- | date | 2026-01-08 |
103
- | value | 20.15 |
104
- | last_updated | 2026-01-09 08:37:34 |
105
-
106
-
107
- Time Categories
108
- ---------------
109
- - Macro indicators (GDP, Interest Rate, CPI, Unemployment): date field is observation date
110
- - Volatility indices (VIX, VXN): date field is market close date
 
1
+ ## Base URL
2
+ `https://api.stlouisfed.org/fred`
3
+
4
+ ## Series Info Endpoint
5
+ `GET /series`
6
+
7
+ ### Query Parameters
8
+ ```
9
+ series_id
10
+ api_key
11
+ file_type (json)
12
+ ```
13
+
14
+ ### Response Structure
15
+ ```
16
+ seriess[]
17
+ title
18
+ units
19
+ frequency
20
+ ```
21
+
22
+ ## Series Observations Endpoint
23
+ `GET /series/observations`
24
+
25
+ ### Query Parameters
26
+ ```
27
+ series_id
28
+ api_key
29
+ file_type (json)
30
+ sort_order (desc)
31
+ limit
32
+ ```
33
+
34
+ ### Response Structure
35
+ ```
36
+ observations[]
37
+ date
38
+ value
39
+ ```
40
+
41
+ ## Series IDs Used
42
+
43
+ ### Macro Basket
44
+ ```
45
+ A191RL1Q225SBEA (GDP growth rate)
46
+ FEDFUNDS (Federal Funds Rate)
47
+ CPIAUCSL (Consumer Price Index)
48
+ FPCPITOTLZGUSA (Inflation rate)
49
+ UNRATE (Unemployment Rate)
50
+ ```
51
+
52
+ ### Volatility Basket
53
+ ```
54
+ VIXCLS (VIX Index)
55
+ ```
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
docs/mcp_data_structure.md ADDED
@@ -0,0 +1,144 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # MCP Data Structures
2
+
3
+ Output schemas for all MCP basket servers.
4
+
5
+ ---
6
+
7
+ ## fundamentals-basket
8
+
9
+ ```python
10
+ "sec_edgar": {
11
+ "revenue": {"value": 123456000, "end_date": "2024-09-30", "data_type": "USD", "fiscal_year": 2024, "form": "10-K"},
12
+ "net_income": {"value": ..., "end_date": ..., ...},
13
+ "gross_profit": {...},
14
+ "operating_income": {...},
15
+ "gross_margin_pct": {...},
16
+ "operating_margin_pct": {...},
17
+ "net_margin_pct": {...},
18
+ "eps_basic": {...},
19
+ "eps_diluted": {...},
20
+ "total_assets": {...},
21
+ "total_liabilities": {...},
22
+ "stockholders_equity": {...},
23
+ "long_term_debt": {...},
24
+ "short_term_debt": {...},
25
+ "total_debt": {...},
26
+ "cash": {...},
27
+ "net_debt": {...},
28
+ "debt_to_equity": {...},
29
+ "operating_cash_flow": {...},
30
+ "capital_expenditure": {...},
31
+ "free_cash_flow": {...},
32
+ "company_info": {
33
+ "name": "Apple Inc.",
34
+ "cik": "0000320193",
35
+ "sic": "3571",
36
+ "sic_description": "Electronic Computers",
37
+ "sector": "Technology",
38
+ "industry": "Consumer Electronics"
39
+ }
40
+ },
41
+ "yahoo_finance": {
42
+ "market_cap": {"value": 3000000000000, "as_of": "2024-10-31"},
43
+ "enterprise_value": {...},
44
+ "shares_outstanding": {...},
45
+ "float_shares": {...},
46
+ "held_by_insiders_pct": {...},
47
+ "held_by_institutions_pct": {...}
48
+ }
49
+ ```
50
+
51
+ **Notes:**
52
+ - SEC Edgar metrics vary by sector (banks have different fields than tech companies)
53
+ - Only non-null values are emitted (sparse representation)
54
+
55
+ ---
56
+
57
+ ## valuation-basket
58
+
59
+ ```python
60
+ "yahoo_finance": {
61
+ "current_price": {"value": 175.50, "as_of": "2024-10-31"},
62
+ "trailing_pe": {"value": 28.5, "as_of": "2024-10-31"},
63
+ "forward_pe": {...},
64
+ "peg_ratio": {...},
65
+ "price_to_book": {...},
66
+ "price_to_sales": {...},
67
+ "dividend_yield": {...},
68
+ "52_week_high": {...},
69
+ "52_week_low": {...}
70
+ },
71
+ "alpha_vantage": {
72
+ "ev_ebitda": {"value": 22.3, "as_of": "2024-10-31"}
73
+ }
74
+ ```
75
+
76
+ ---
77
+
78
+ ## volatility-basket
79
+
80
+ ```python
81
+ "fred": {
82
+ "vix": {"value": 18.5, "data_type": "Daily", "as_of": "2024-10-31"},
83
+ "vxn": {"value": 22.1, "data_type": "Daily", "as_of": "2024-10-31"}
84
+ },
85
+ "yahoo_finance": {
86
+ "beta": {"value": 1.25, "data_type": "1Y", "as_of": "2024-10-31"},
87
+ "historical_volatility": {"value": 0.32, "data_type": "1Y", "as_of": "2024-10-31"},
88
+ "implied_volatility": {"value": 0.28, "as_of": "2024-10-31"}
89
+ }
90
+ ```
91
+
92
+ ---
93
+
94
+ ## macro-basket
95
+
96
+ ```python
97
+ "bea": {
98
+ "gdp_growth": {"value": 2.8, "period": "Q3 2024", "as_of": "2024-10-31"}
99
+ },
100
+ "bls": {
101
+ "unemployment_rate": {"value": 3.8, "period": "Oct 2024", "as_of": "2024-10-31"},
102
+ "cpi_yoy": {"value": 3.2, "period": "Oct 2024", "as_of": "2024-10-31"},
103
+ "nonfarm_payrolls": {...}
104
+ },
105
+ "fred": {
106
+ "fed_funds_rate": {"value": 5.33, "as_of": "2024-10-31"},
107
+ "treasury_10y": {"value": 4.25, "as_of": "2024-10-31"},
108
+ "treasury_2y": {...},
109
+ "yield_curve_spread": {...}
110
+ }
111
+ ```
112
+
113
+ ---
114
+
115
+ ## news-basket
116
+
117
+ ```python
118
+ "tavily": [
119
+ {"title": "...", "url": "...", "content": "...", "published_date": "2024-10-31"}
120
+ ],
121
+ "nyt": [
122
+ {"title": "...", "url": "...", "content": "...", "published_date": "2024-10-31"}
123
+ ],
124
+ "newsapi": [
125
+ {"title": "...", "url": "...", "content": "...", "published_date": "2024-10-30"}
126
+ ]
127
+ ```
128
+
129
+ **Date field:** `published_date` = actual article publication date (YYYY-MM-DD)
130
+
131
+ ---
132
+
133
+ ## sentiment-basket
134
+
135
+ ```python
136
+ "finnhub": [
137
+ {"title": "...", "url": "...", "content": "...", "published_date": "2024-10-31"}
138
+ ],
139
+ "reddit": [
140
+ {"title": "...", "url": "...", "content": "...", "published_date": "2024-10-30"}
141
+ ]
142
+ ```
143
+
144
+ **Date field:** `published_date` = article/post creation date (YYYY-MM-DD)
docs/newsapi_data_schema.md CHANGED
@@ -1,41 +1,26 @@
1
- NewsAPI Data Schema
2
- ===================
3
-
4
- Endpoint: https://newsapi.org/v2/everything
5
- Method: GET
6
-
7
- Note: Free tier has 24-hour delay on articles
8
-
9
-
10
- Request Parameters
11
-
12
- | field | type | description |
13
- |----------|--------|------------------------------------------|
14
- | apiKey | string | API key |
15
- | q | string | Search query |
16
- | sortBy | string | "publishedAt", "relevancy", "popularity" |
17
- | language | string | Language code (e.g., "en") |
18
- | pageSize | int | Results per page (max 100) |
19
-
20
-
21
- Response (articles[])
22
-
23
- | field | type | description |
24
- |-------------|--------|-----------------------------|
25
- | title | string | Article title |
26
- | url | string | Article URL |
27
- | description | string | Article description |
28
- | content | string | Article content (truncated) |
29
- | publishedAt | string | ISO date |
30
- | source.name | string | Publisher name |
31
-
32
-
33
- Example Result
34
-
35
- | field | value |
36
- |-------------|----------------------------------------|
37
- | title | "Apple Announces New Product Line" |
38
- | url | "https://techcrunch.com/apple-new..." |
39
- | description | "Apple unveiled its latest products..."|
40
- | publishedAt | "2025-01-08T10:15:00Z" |
41
- | source.name | "TechCrunch" |
 
1
+ ## Endpoint
2
+ `GET https://newsapi.org/v2/everything`
3
+
4
+ ## Request Parameters
5
+ ```
6
+ apiKey
7
+ q
8
+ sortBy (publishedAt, relevancy, popularity)
9
+ language
10
+ pageSize (1-100)
11
+ domains
12
+ ```
13
+
14
+ ## Response Structure
15
+ ```
16
+ status
17
+ totalResults
18
+ articles[]
19
+ title
20
+ url
21
+ description
22
+ content
23
+ publishedAt
24
+ source
25
+ name
26
+ ```
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
docs/nyt_data_schema.md CHANGED
@@ -1,40 +1,28 @@
1
- NYT Article Search Data Schema
2
- ==============================
3
-
4
- Endpoint: https://api.nytimes.com/svc/search/v2/articlesearch.json
5
- Method: GET
6
-
7
-
8
- Request Parameters
9
-
10
- | field | type | description |
11
- |------------|--------|---------------------------------|
12
- | api-key | string | API key |
13
- | q | string | Search query |
14
- | sort | string | "newest", "oldest", "relevance" |
15
- | begin_date | string | YYYYMMDD format |
16
- | end_date | string | YYYYMMDD format |
17
- | page | int | Pagination (0-indexed) |
18
-
19
-
20
- Response (response.docs[])
21
-
22
- | field | type | description |
23
- |----------------|--------|------------------|
24
- | headline.main | string | Article headline |
25
- | web_url | string | Article URL |
26
- | snippet | string | Article snippet |
27
- | lead_paragraph | string | First paragraph |
28
- | pub_date | string | ISO date |
29
- | section_name | string | NYT section |
30
-
31
-
32
- Example Result
33
-
34
- | field | value |
35
- |----------------|----------------------------------------|
36
- | headline.main | "Apple Stock Surges on Earnings" |
37
- | web_url | "https://nytimes.com/2025/01/apple..." |
38
- | snippet | "Apple shares climbed on strong..." |
39
- | pub_date | "2025-01-09T15:30:00Z" |
40
- | section_name | "Business" |
 
1
+ ## Endpoint
2
+ `GET https://api.nytimes.com/svc/search/v2/articlesearch.json`
3
+
4
+ ## Request Parameters
5
+ ```
6
+ api-key
7
+ q
8
+ sort (newest, oldest, relevance)
9
+ page
10
+ begin_date (YYYYMMDD)
11
+ end_date (YYYYMMDD)
12
+ fq (filter query, e.g., news_desk filter)
13
+ ```
14
+
15
+ ## Response Structure
16
+ ```
17
+ response
18
+ meta
19
+ hits
20
+ docs[]
21
+ headline
22
+ main
23
+ web_url
24
+ snippet
25
+ lead_paragraph
26
+ pub_date
27
+ section_name
28
+ ```
 
 
 
 
 
 
 
 
 
 
 
 
docs/reddit_data_schema.md CHANGED
@@ -1,39 +1,34 @@
1
- Reddit Data Schema
2
- ==================
3
-
4
- Endpoint: https://www.reddit.com/r/{subreddit}/search.json
5
- Method: GET
6
- Subreddits: wallstreetbets, stocks
7
-
8
-
9
- Request Parameters
10
-
11
- | field | type | description |
12
- | ----------- | ------ | ---------------------------- |
13
- | q | string | Search query (ticker) |
14
- | sort | string | "relevance", "new", etc. |
15
- | t | string | Time filter ("week") |
16
- | limit | int | Max results |
17
- | restrict_sr | string | "true" to limit to subreddit |
18
-
19
-
20
- Response (data.children[].data)
21
-
22
- | field | type | description |
23
- |-------------|--------|------------------|
24
- | title | string | Post title |
25
- | selftext | string | Post body text |
26
- | ups | int | Upvote count |
27
- | permalink | string | Reddit permalink |
28
- | created_utc | int | Unix timestamp |
29
-
30
-
31
- Example Result
32
-
33
- | field | value |
34
- |-------------|-------------------------------------------|
35
- | title | "AAPL earnings crush - bullish long term" |
36
- | selftext | "Just saw the Q4 numbers and..." |
37
- | ups | 2450 |
38
- | permalink | "/r/stocks/comments/abc123/..." |
39
- | created_utc | 1736351400 |
 
1
+ ## Search Endpoint
2
+ `GET https://www.reddit.com/r/{subreddit}/search.json`
3
+
4
+ ## Request Parameters
5
+ ```
6
+ q
7
+ sort (relevance, top, new, comments)
8
+ t (week, month, all)
9
+ limit (1-100)
10
+ restrict_sr (true/false)
11
+ ```
12
+
13
+ ## Request Headers
14
+ ```
15
+ User-Agent (required)
16
+ ```
17
+
18
+ ## Response Structure
19
+ ```
20
+ data
21
+ children[]
22
+ data
23
+ title
24
+ selftext
25
+ permalink
26
+ ups
27
+ created_utc
28
+ ```
29
+
30
+ ## Subreddits Searched
31
+ ```
32
+ wallstreetbets
33
+ stocks
34
+ ```
 
 
 
 
 
docs/tavily_data_schema.md CHANGED
@@ -1,40 +1,26 @@
1
- Tavily Data Schema
2
- ==================
3
-
4
- Endpoint: https://api.tavily.com/search
5
- Method: POST
6
-
7
-
8
- Request Parameters
9
-
10
- | field | type | description |
11
- |-----------------|--------|-----------------------------|
12
- | api_key | string | API key |
13
- | query | string | Search query |
14
- | search_depth | string | "basic" or "advanced" |
15
- | max_results | int | 1-10 results |
16
- | include_answer | bool | Include AI-generated answer |
17
- | include_domains | array | Limit to specific domains |
18
- | exclude_domains | array | Exclude specific domains |
19
-
20
-
21
- Response (results[])
22
-
23
- | field | type | description |
24
- |----------------|--------|-------------------------|
25
- | title | string | Article title |
26
- | url | string | Article URL |
27
- | content | string | Article snippet/content |
28
- | score | float | Relevance score (0-1) |
29
- | published_date | string | Publication date |
30
-
31
-
32
- Example Result
33
-
34
- | field | value |
35
- |----------------|------------------------------------------|
36
- | title | "Apple Q4 Earnings Beat Expectations" |
37
- | url | "https://example.com/apple-earnings" |
38
- | content | "Apple reported revenue of $119.6B..." |
39
- | score | 0.89 |
40
- | published_date | "2025-01-09" |
 
1
+ ## Endpoint
2
+ `POST https://api.tavily.com/search`
3
+
4
+ ## Request Fields
5
+ ```
6
+ api_key
7
+ query
8
+ search_depth (basic, advanced)
9
+ max_results (1-10)
10
+ include_answer
11
+ include_raw_content
12
+ include_domains[]
13
+ exclude_domains[]
14
+ days
15
+ ```
16
+
17
+ ## Response Structure
18
+ ```
19
+ answer
20
+ results[]
21
+ title
22
+ url
23
+ content
24
+ score
25
+ published_date
26
+ ```
 
 
 
 
 
 
 
 
 
 
 
 
 
 
docs/yahoo_data_schema.md CHANGED
@@ -1,87 +1,68 @@
1
- Yahoo Finance Data Schema
2
- =========================
3
-
4
- Example: AAPL (Apple Inc)
5
-
6
- Time Categories:
7
- - Market Time: Real-time price data (regularMarketTime)
8
- - Fiscal Time: Periodic accounting data (mostRecentQuarter, lastFiscalYearEnd)
9
-
10
-
11
- Company Info
12
-
13
- | field | value |
14
- | -------- | ------------------ |
15
- | longName | Apple Inc. |
16
- | address1 | One Apple Park Way |
17
- | city | Cupertino |
18
- | state | CA |
19
- | country | United States |
20
-
21
-
22
- Valuation (Market Time: regularMarketTime)
23
-
24
- | field | value |
25
- |---------------------|---------------|
26
- | regularMarketTime | 1767992401 |
27
- | marketCap | 3832542658560 |
28
- | enterpriseValue | 3889336156160 |
29
- | trailingPE | 34.721554 |
30
- | forwardPE | 28.341707 |
31
- | enterpriseToEbitda | 26.87 |
32
- | enterpriseToRevenue | 9.346 |
33
- | priceToBook | 51.967537 |
34
-
35
-
36
- Margins, Returns & Growth (Fiscal Time: mostRecentQuarter)
37
-
38
- | field | value |
39
- |-------------------------|------------|
40
- | mostRecentQuarter | 1758931200 |
41
- | grossMargins | 0.46905 |
42
- | ebitdaMargins | 0.34782 |
43
- | operatingMargins | 0.31647 |
44
- | returnOnEquity | 1.71422 |
45
- | returnOnAssets | 0.22964 |
46
- | revenueGrowth | 0.079 |
47
- | earningsQuarterlyGrowth | 0.864 |
48
-
49
-
50
- Earnings (Fiscal Time: lastFiscalYearEnd / earningsTimestamp)
51
-
52
- | field | value |
53
- |-------------------|------------|
54
- | lastFiscalYearEnd | 1758931200 |
55
- | trailingEps | 7.47 |
56
- | earningsTimestamp | 1769720400 |
57
- | forwardEps | 9.15153 |
58
-
59
-
60
- Cash Flow & Liquidity/Debt (Fiscal Time: mostRecentQuarter)
61
-
62
- | field | value |
63
- |-------------------|--------------|
64
- | mostRecentQuarter | 1758931200 |
65
- | freeCashflow | 78862254080 |
66
- | operatingCashflow | 111482003456 |
67
- | totalCash | 54697000960 |
68
- | currentRatio | 0.893 |
69
- | quickRatio | 0.771 |
70
- | debtToEquity | 152.411 |
71
- | totalDebt | 112377004032 |
72
-
73
-
74
- Risk (Market Time: regularMarketTime)
75
-
76
- | field | value |
77
- |-------------------|------------|
78
- | regularMarketTime | 1767992401 |
79
- | beta | 1.093 |
80
-
81
-
82
- Dividends (exDividendDate)
83
-
84
- | field | value |
85
- |----------------|------------|
86
- | exDividendDate | 1762732800 |
87
- | payoutRatio | 0.1367 |
 
1
+ ## yfinance Library (Ticker.info)
2
+
3
+ ### Valuation Fields
4
+ ```
5
+ currentPrice
6
+ regularMarketPrice
7
+ marketCap
8
+ enterpriseValue
9
+ trailingPE
10
+ forwardPE
11
+ priceToSalesTrailing12Months
12
+ priceToBook
13
+ enterpriseToEbitda
14
+ trailingPegRatio
15
+ earningsGrowth
16
+ revenueGrowth
17
+ ```
18
+ ### Fundamentals Fallback Fields
19
+ ```
20
+ totalRevenue
21
+ netIncomeToCommon
22
+ grossProfits
23
+ operatingIncome
24
+ ebitda
25
+ totalCash
26
+ totalDebt
27
+ freeCashflow
28
+ operatingCashflow
29
+ operatingMargins
30
+ profitMargins
31
+ debtToEquity
32
+ longName
33
+ shortName
34
+ sector
35
+ industry
36
+ ```
37
+ ## Chart API
38
+
39
+ ### Endpoint
40
+ `GET https://query1.finance.yahoo.com/v8/finance/chart/{ticker}`
41
+ ### Query Parameters
42
+ ```
43
+ interval (1d)
44
+ range (1y, 3mo, 5d, 1d)
45
+ ```
46
+ ### Response Structure
47
+ ```
48
+ chart
49
+ result[]
50
+ meta
51
+ regularMarketPrice
52
+ previousClose
53
+ indicators
54
+ quote[]
55
+ close[]
56
+ ```
57
+ ## Options API
58
+ ### Endpoint
59
+ `GET https://query1.finance.yahoo.com/v7/finance/options/{ticker}`
60
+ ### Response Structure
61
+ ```
62
+ optionChain
63
+ result[]
64
+ options[]
65
+ calls[]
66
+ strike
67
+ impliedVolatility
68
+ ```
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
mcp-servers/fundamentals-basket/models/schemas.py CHANGED
@@ -181,16 +181,12 @@ class ParsedFinancials:
181
  royalty_expense: Optional[TemporalMetric] = None
182
 
183
  def to_dict(self) -> dict:
184
- """Convert to dictionary for JSON serialization."""
185
- result = {
186
- "ticker": self.ticker,
187
- "source": self.source,
188
- "as_of": self.as_of,
189
- "sector": self.sector,
190
- }
191
 
192
- if self.sic_code:
193
- result["sic_code"] = self.sic_code
 
 
194
 
195
  # Add temporal metrics - universal fields
196
  for field_name in [
@@ -265,12 +261,8 @@ class DebtMetrics:
265
  as_of: str = field(default_factory=lambda: datetime.now().strftime("%Y-%m-%d"))
266
 
267
  def to_dict(self) -> dict:
268
- """Convert to dictionary for JSON serialization."""
269
- result = {
270
- "ticker": self.ticker,
271
- "source": self.source,
272
- "as_of": self.as_of,
273
- }
274
 
275
  for field_name in [
276
  "long_term_debt", "short_term_debt", "total_debt",
@@ -295,12 +287,8 @@ class CashFlowMetrics:
295
  as_of: str = field(default_factory=lambda: datetime.now().strftime("%Y-%m-%d"))
296
 
297
  def to_dict(self) -> dict:
298
- """Convert to dictionary for JSON serialization."""
299
- result = {
300
- "ticker": self.ticker,
301
- "source": self.source,
302
- "as_of": self.as_of,
303
- }
304
 
305
  for field_name in [
306
  "operating_cash_flow", "capital_expenditure",
 
181
  royalty_expense: Optional[TemporalMetric] = None
182
 
183
  def to_dict(self) -> dict:
184
+ """Convert to dictionary for JSON serialization.
 
 
 
 
 
 
185
 
186
+ Only emits metrics (no redundant metadata like ticker, source, sector).
187
+ Metadata is provided via company_info in the orchestrator.
188
+ """
189
+ result = {}
190
 
191
  # Add temporal metrics - universal fields
192
  for field_name in [
 
261
  as_of: str = field(default_factory=lambda: datetime.now().strftime("%Y-%m-%d"))
262
 
263
  def to_dict(self) -> dict:
264
+ """Convert to dictionary for JSON serialization. Only emits metrics."""
265
+ result = {}
 
 
 
 
266
 
267
  for field_name in [
268
  "long_term_debt", "short_term_debt", "total_debt",
 
287
  as_of: str = field(default_factory=lambda: datetime.now().strftime("%Y-%m-%d"))
288
 
289
  def to_dict(self) -> dict:
290
+ """Convert to dictionary for JSON serialization. Only emits metrics."""
291
+ result = {}
 
 
 
 
292
 
293
  for field_name in [
294
  "operating_cash_flow", "capital_expenditure",
mcp-servers/fundamentals-basket/services/orchestrator.py CHANGED
@@ -381,42 +381,27 @@ class OrchestratorService:
381
 
382
  sec_result, yahoo_result = await asyncio.gather(sec_task, yahoo_task)
383
 
384
- # Build normalized source_comparison schema
385
  sources = {}
386
- sec_failed = "error" in sec_result or not sec_result.get("data")
387
 
388
  # Add SEC EDGAR data if available
389
  if not sec_failed:
390
- sources["sec_edgar"] = {
391
- "source": sec_result.get("source"),
392
- "data": sec_result.get("data"),
393
- }
394
 
395
  # Add Yahoo Finance data
396
  if "error" not in yahoo_result:
397
  if sec_failed:
398
  # FALLBACK: Yahoo provides core + supplementary when SEC fails
399
  yahoo_data = await self._get_yahoo_fallback_data(ticker)
400
- if yahoo_data.get("data"):
401
  sources["yahoo_finance"] = yahoo_data
402
- elif yahoo_result.get("data"):
403
  # SUPPLEMENTARY: Only additional metrics
404
- sources["yahoo_finance"] = {
405
- "source": yahoo_result.get("source"),
406
- "data": yahoo_result.get("data"),
407
- }
408
-
409
- # Get company info for response (includes business_address)
410
- company_info = await self.get_company_info(ticker)
411
 
412
- return {
413
- "group": "source_comparison",
414
- "ticker": ticker,
415
- "company": company_info,
416
- "sources": sources,
417
- "source": "fundamentals-basket",
418
- "as_of": datetime.now().strftime("%Y-%m-%d"),
419
- }
420
 
421
  async def _get_sec_data_safe(self, ticker: str) -> Dict[str, Any]:
422
  """Get SEC data with error handling. Returns universal + industry-specific metrics."""
@@ -435,126 +420,30 @@ class OrchestratorService:
435
  sector = get_sector_from_sic(sic_code)
436
 
437
  financials = self.parser.parse_financials(facts, ticker, sector=sector, sic_code=sic_code)
 
 
438
 
439
- # Helper to convert TemporalMetric to dict (include all temporal fields)
440
- def to_metric_dict(tm):
441
- if tm is None:
442
- return None
443
- return {
444
- "value": tm.value,
445
- "end_date": tm.end_date,
446
- "data_type": tm.data_type,
447
- "fiscal_year": tm.fiscal_year,
448
- "form": tm.form,
449
- }
450
 
451
- # Universal metrics (works across all industries)
452
- data = {
453
- "revenue": to_metric_dict(financials.revenue),
454
- "net_income": to_metric_dict(financials.net_income),
455
- "net_margin_pct": to_metric_dict(financials.net_margin_pct),
456
- "total_assets": to_metric_dict(financials.total_assets),
457
- "total_liabilities": to_metric_dict(financials.total_liabilities),
458
- "stockholders_equity": to_metric_dict(financials.stockholders_equity),
459
- }
460
 
461
- # Add industry-specific metrics if available
462
- if sector == "INSURANCE":
463
- data.update({
464
- "premiums_earned": to_metric_dict(financials.premiums_earned),
465
- "claims_incurred": to_metric_dict(financials.claims_incurred),
466
- "underwriting_income": to_metric_dict(financials.underwriting_income),
467
- "investment_income": to_metric_dict(financials.investment_income),
468
- })
469
- elif sector == "BANKS":
470
- data.update({
471
- "net_interest_income": to_metric_dict(financials.net_interest_income),
472
- "provision_credit_losses": to_metric_dict(financials.provision_credit_losses),
473
- "noninterest_income": to_metric_dict(financials.noninterest_income),
474
- "deposits": to_metric_dict(financials.deposits),
475
- })
476
- elif sector == "REAL_ESTATE":
477
- data.update({
478
- "rental_revenue": to_metric_dict(financials.rental_revenue),
479
- "noi": to_metric_dict(financials.noi),
480
- "ffo": to_metric_dict(financials.ffo),
481
- })
482
- elif sector == "OIL_GAS":
483
- data.update({
484
- "oil_gas_revenue": to_metric_dict(financials.oil_gas_revenue),
485
- "production_expense": to_metric_dict(financials.production_expense),
486
- "depletion": to_metric_dict(financials.depletion),
487
- })
488
- elif sector == "UTILITIES":
489
- data.update({
490
- "electric_revenue": to_metric_dict(financials.electric_revenue),
491
- "gas_revenue": to_metric_dict(financials.gas_revenue),
492
- "fuel_cost": to_metric_dict(financials.fuel_cost),
493
- })
494
- elif sector == "TECHNOLOGY":
495
- data.update({
496
- "rd_expense": to_metric_dict(financials.rd_expense),
497
- "deferred_revenue": to_metric_dict(financials.deferred_revenue),
498
- "cost_of_revenue": to_metric_dict(financials.cost_of_revenue),
499
- "goodwill": to_metric_dict(financials.goodwill),
500
- })
501
- elif sector == "HEALTHCARE":
502
- data.update({
503
- "rd_expense": to_metric_dict(financials.rd_expense),
504
- "cost_of_revenue": to_metric_dict(financials.cost_of_revenue),
505
- "inventory": to_metric_dict(financials.inventory),
506
- "selling_general_admin": to_metric_dict(financials.selling_general_admin),
507
- })
508
- elif sector == "RETAIL":
509
- data.update({
510
- "cost_of_goods_sold": to_metric_dict(financials.cost_of_goods_sold),
511
- "inventory": to_metric_dict(financials.inventory),
512
- "selling_general_admin": to_metric_dict(financials.selling_general_admin),
513
- "depreciation": to_metric_dict(financials.depreciation),
514
- })
515
- elif sector == "FINANCIALS":
516
- data.update({
517
- "advisory_fees": to_metric_dict(financials.advisory_fees),
518
- "trading_revenue": to_metric_dict(financials.trading_revenue),
519
- "compensation_expense": to_metric_dict(financials.compensation_expense),
520
- "investment_income": to_metric_dict(financials.investment_income),
521
- })
522
- elif sector == "INDUSTRIALS":
523
- data.update({
524
- "cost_of_goods_sold": to_metric_dict(financials.cost_of_goods_sold),
525
- "inventory": to_metric_dict(financials.inventory),
526
- "backlog": to_metric_dict(financials.backlog),
527
- "capital_expenditure": to_metric_dict(financials.capital_expenditure),
528
- })
529
- elif sector == "TRANSPORTATION":
530
- data.update({
531
- "operating_revenue": to_metric_dict(financials.operating_revenue),
532
- "fuel_expense": to_metric_dict(financials.fuel_expense),
533
- "labor_expense": to_metric_dict(financials.labor_expense),
534
- "depreciation": to_metric_dict(financials.depreciation),
535
- })
536
- elif sector == "MATERIALS":
537
- data.update({
538
- "cost_of_goods_sold": to_metric_dict(financials.cost_of_goods_sold),
539
- "inventory": to_metric_dict(financials.inventory),
540
- "depreciation": to_metric_dict(financials.depreciation),
541
- "capital_expenditure": to_metric_dict(financials.capital_expenditure),
542
- })
543
- elif sector == "MINING":
544
- data.update({
545
- "mining_revenue": to_metric_dict(financials.mining_revenue),
546
- "cost_of_production": to_metric_dict(financials.cost_of_production),
547
- "depletion": to_metric_dict(financials.depletion),
548
- "exploration_expense": to_metric_dict(financials.exploration_expense),
549
- })
550
 
551
- return {
552
- "source": "SEC EDGAR XBRL",
553
- "as_of": datetime.now().strftime("%Y-%m-%d"),
554
- "sector": sector,
555
- "sic_code": sic_code,
556
- "data": data,
557
- }
558
 
559
  except Exception as e:
560
  logger.error(f"SEC data fetch failed for {ticker}: {e}")
@@ -563,84 +452,56 @@ class OrchestratorService:
563
  async def _get_yahoo_data_safe(self, ticker: str) -> Dict[str, Any]:
564
  """Get Yahoo data with error handling. Returns supplementary metrics only."""
565
  try:
566
- data = await self.fetcher.fetch_yfinance(ticker)
567
 
568
- if "error" in data:
569
- return {"error": data["error"], "source": "Yahoo Finance"}
570
 
571
- financials, debt, cash_flow = self.parser.parse_yfinance_data(data, ticker)
572
 
573
- # Helper to convert TemporalMetric to dict (include all temporal fields)
574
- def to_metric_dict(tm):
575
- if tm is None:
576
- return None
577
- return {
578
- "value": tm.value,
579
- "end_date": tm.end_date,
580
- "data_type": tm.data_type,
581
- "fiscal_year": tm.fiscal_year,
582
- "form": tm.form,
583
- }
584
 
585
- # Only supplementary metrics not in SEC EDGAR (avoid duplicates)
586
- return {
587
- "source": "Yahoo Finance",
588
- "as_of": datetime.now().strftime("%Y-%m-%d"),
589
- "data": {
590
- "operating_margin_pct": to_metric_dict(financials.operating_margin_pct),
591
- "total_debt": to_metric_dict(debt.total_debt) if hasattr(debt, 'total_debt') else None,
592
- "operating_cash_flow": to_metric_dict(cash_flow.operating_cash_flow) if hasattr(cash_flow, 'operating_cash_flow') else None,
593
- "free_cash_flow": to_metric_dict(cash_flow.free_cash_flow) if hasattr(cash_flow, 'free_cash_flow') else None,
594
- },
595
- }
596
 
597
  except Exception as e:
598
  logger.error(f"Yahoo data fetch failed for {ticker}: {e}")
599
- return {"error": str(e), "source": "Yahoo Finance"}
600
 
601
  async def _get_yahoo_fallback_data(self, ticker: str) -> Dict[str, Any]:
602
  """Get Yahoo data as fallback when SEC fails. Returns core + supplementary metrics."""
603
  try:
604
- data = await self.fetcher.fetch_yfinance(ticker)
605
 
606
- if "error" in data:
607
- return {"error": data["error"], "source": "Yahoo Finance"}
608
 
609
- financials, debt, cash_flow = self.parser.parse_yfinance_data(data, ticker)
610
 
611
- # Helper to convert TemporalMetric to dict (include all temporal fields)
612
- def to_metric_dict(tm):
613
- if tm is None:
614
- return None
615
- return {
616
- "value": tm.value,
617
- "end_date": tm.end_date,
618
- "data_type": tm.data_type,
619
- "fiscal_year": tm.fiscal_year,
620
- "form": tm.form,
621
- }
622
 
623
- # FALLBACK: Core metrics + supplementary metrics
624
- return {
625
- "source": "Yahoo Finance",
626
- "as_of": datetime.now().strftime("%Y-%m-%d"),
627
- "data": {
628
- # Core metrics (normally from SEC)
629
- "revenue": to_metric_dict(financials.revenue),
630
- "net_income": to_metric_dict(financials.net_income),
631
- "net_margin_pct": to_metric_dict(financials.net_margin_pct),
632
- "total_assets": to_metric_dict(debt.total_assets) if hasattr(debt, 'total_assets') else None,
633
- # Supplementary metrics
634
- "operating_margin_pct": to_metric_dict(financials.operating_margin_pct),
635
- "total_debt": to_metric_dict(debt.total_debt) if hasattr(debt, 'total_debt') else None,
636
- "operating_cash_flow": to_metric_dict(cash_flow.operating_cash_flow) if hasattr(cash_flow, 'operating_cash_flow') else None,
637
- "free_cash_flow": to_metric_dict(cash_flow.free_cash_flow) if hasattr(cash_flow, 'free_cash_flow') else None,
638
- },
639
- }
640
 
641
  except Exception as e:
642
  logger.error(f"Yahoo fallback fetch failed for {ticker}: {e}")
643
- return {"error": str(e), "source": "Yahoo Finance"}
644
 
645
  # =========================================================================
646
  # HELPER METHODS
 
381
 
382
  sec_result, yahoo_result = await asyncio.gather(sec_task, yahoo_task)
383
 
384
+ # Build flat source structure (no "data" wrapper)
385
  sources = {}
386
+ sec_failed = "error" in sec_result or not sec_result
387
 
388
  # Add SEC EDGAR data if available
389
  if not sec_failed:
390
+ sources["sec_edgar"] = sec_result
 
 
 
391
 
392
  # Add Yahoo Finance data
393
  if "error" not in yahoo_result:
394
  if sec_failed:
395
  # FALLBACK: Yahoo provides core + supplementary when SEC fails
396
  yahoo_data = await self._get_yahoo_fallback_data(ticker)
397
+ if yahoo_data and "error" not in yahoo_data:
398
  sources["yahoo_finance"] = yahoo_data
399
+ elif yahoo_result:
400
  # SUPPLEMENTARY: Only additional metrics
401
+ sources["yahoo_finance"] = yahoo_result
 
 
 
 
 
 
402
 
403
+ # Return flat {source: metrics} structure
404
+ return sources
 
 
 
 
 
 
405
 
406
  async def _get_sec_data_safe(self, ticker: str) -> Dict[str, Any]:
407
  """Get SEC data with error handling. Returns universal + industry-specific metrics."""
 
420
  sector = get_sector_from_sic(sic_code)
421
 
422
  financials = self.parser.parse_financials(facts, ticker, sector=sector, sic_code=sic_code)
423
+ debt = self.parser.parse_debt_metrics(facts, ticker)
424
+ cash_flow = self.parser.parse_cash_flow(facts, ticker)
425
 
426
+ # Use to_dict() to emit ALL metrics (universal + sector-specific)
427
+ # Only non-null values are included (sparse representation)
428
+ data = financials.to_dict()
 
 
 
 
 
 
 
 
429
 
430
+ # Merge debt metrics (debt_to_equity, total_debt, etc.)
431
+ debt_dict = debt.to_dict()
432
+ for key in ["long_term_debt", "short_term_debt", "total_debt", "cash", "net_debt", "debt_to_equity"]:
433
+ if key in debt_dict:
434
+ data[key] = debt_dict[key]
 
 
 
 
435
 
436
+ # Merge cash flow metrics (free_cash_flow, operating_cash_flow, etc.)
437
+ cf_dict = cash_flow.to_dict()
438
+ for key in ["operating_cash_flow", "capital_expenditure", "free_cash_flow"]:
439
+ if key in cf_dict:
440
+ data[key] = cf_dict[key]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
441
 
442
+ # Add company_info for sector detection by downstream analyzer
443
+ data["company_info"] = company_info
444
+
445
+ # Return metrics directly (no wrapper)
446
+ return data
 
 
447
 
448
  except Exception as e:
449
  logger.error(f"SEC data fetch failed for {ticker}: {e}")
 
452
  async def _get_yahoo_data_safe(self, ticker: str) -> Dict[str, Any]:
453
  """Get Yahoo data with error handling. Returns supplementary metrics only."""
454
  try:
455
+ raw_data = await self.fetcher.fetch_yfinance(ticker)
456
 
457
+ if "error" in raw_data:
458
+ return {"error": raw_data["error"]}
459
 
460
+ financials, debt, cash_flow = self.parser.parse_yfinance_data(raw_data, ticker)
461
 
462
+ # Use to_dict() and filter to supplementary metrics only
463
+ result = {}
464
+ fin_dict = financials.to_dict()
465
+ for key in ["operating_margin_pct", "gross_margin_pct"]:
466
+ if key in fin_dict:
467
+ result[key] = fin_dict[key]
 
 
 
 
 
468
 
469
+ debt_dict = debt.to_dict()
470
+ for key in ["total_debt"]:
471
+ if key in debt_dict:
472
+ result[key] = debt_dict[key]
473
+
474
+ cf_dict = cash_flow.to_dict()
475
+ for key in ["operating_cash_flow", "free_cash_flow"]:
476
+ if key in cf_dict:
477
+ result[key] = cf_dict[key]
478
+
479
+ return result
480
 
481
  except Exception as e:
482
  logger.error(f"Yahoo data fetch failed for {ticker}: {e}")
483
+ return {"error": str(e)}
484
 
485
  async def _get_yahoo_fallback_data(self, ticker: str) -> Dict[str, Any]:
486
  """Get Yahoo data as fallback when SEC fails. Returns core + supplementary metrics."""
487
  try:
488
+ raw_data = await self.fetcher.fetch_yfinance(ticker)
489
 
490
+ if "error" in raw_data:
491
+ return {"error": raw_data["error"]}
492
 
493
+ financials, debt, cash_flow = self.parser.parse_yfinance_data(raw_data, ticker)
494
 
495
+ # FALLBACK: Use to_dict() for all metrics
496
+ result = financials.to_dict()
497
+ result.update(debt.to_dict())
498
+ result.update(cash_flow.to_dict())
 
 
 
 
 
 
 
499
 
500
+ return result
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
501
 
502
  except Exception as e:
503
  logger.error(f"Yahoo fallback fetch failed for {ticker}: {e}")
504
+ return {"error": str(e)}
505
 
506
  # =========================================================================
507
  # HELPER METHODS
mcp-servers/fundamentals-basket/services/parser.py CHANGED
@@ -354,7 +354,9 @@ class ParserService:
354
  ParsedFinancials with all metrics (universal + industry-specific)
355
  """
356
  # Extract core metrics (universal)
357
- revenue = self.get_latest_value(facts, REVENUE_CONCEPTS)
 
 
358
  net_income = self.get_latest_value(facts, NET_INCOME_CONCEPTS)
359
  gross_profit = self.get_latest_value(facts, GROSS_PROFIT_CONCEPTS)
360
  operating_income = self.get_latest_value(facts, OPERATING_INCOME_CONCEPTS)
 
354
  ParsedFinancials with all metrics (universal + industry-specific)
355
  """
356
  # Extract core metrics (universal)
357
+ # Use get_most_recent_across_concepts for revenue to ensure freshest data
358
+ # (some companies have ASC 606 concept stale while legacy "Revenues" is current)
359
+ revenue = self.get_most_recent_across_concepts(facts, REVENUE_CONCEPTS)
360
  net_income = self.get_latest_value(facts, NET_INCOME_CONCEPTS)
361
  gross_profit = self.get_latest_value(facts, GROSS_PROFIT_CONCEPTS)
362
  operating_income = self.get_latest_value(facts, OPERATING_INCOME_CONCEPTS)
mcp-servers/macro-basket/server.py CHANGED
@@ -42,6 +42,46 @@ import httpx
42
  logging.basicConfig(level=logging.INFO)
43
  logger = logging.getLogger("macro-basket")
44
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
  # Initialize MCP server
46
  server = Server("macro-basket")
47
 
@@ -805,49 +845,73 @@ async def get_all_sources_macro() -> dict:
805
  fred_gdp_task, fred_rates_task, fred_cpi_task, fred_unemp_task
806
  )
807
 
808
- # Use primary source, fallback to secondary if primary failed
809
- gdp = bea_gdp if "error" not in bea_gdp else fred_gdp
810
- cpi = bls_cpi if "error" not in bls_cpi else fred_cpi
811
- unemp = bls_unemp if "error" not in bls_unemp else fred_unemp
812
- rates = fred_rates # FRED is primary for interest rates
813
 
814
- # Build normalized raw_metrics schema with temporal data
815
- return {
816
- "group": "raw_metrics",
817
- "ticker": "MACRO",
818
- "metrics": {
819
  "gdp_growth": {
820
- "value": gdp.get("value") if gdp else None,
821
  "data_type": "Quarterly",
822
- "as_of": gdp.get("date") if gdp else None, # e.g., "2025Q3"
823
- "source": gdp.get("source") if gdp else None,
824
- "fallback": gdp.get("fallback", False) if gdp else True
825
  },
826
- "interest_rate": {
827
- "value": rates.get("value") if rates else None,
828
- "data_type": "Monthly",
829
- "as_of": rates.get("date") if rates else None,
830
- "source": rates.get("source") if rates else None,
831
- "fallback": rates.get("fallback", False) if rates else True
832
- },
833
- "cpi_inflation": {
834
- "value": cpi.get("value") if cpi else None,
835
- "data_type": "Monthly",
836
- "as_of": cpi.get("date") if cpi else None,
837
- "source": cpi.get("source") if cpi else None,
838
- "fallback": cpi.get("fallback", False) if cpi else True
839
- },
840
- "unemployment": {
841
- "value": unemp.get("value") if unemp else None,
842
- "data_type": "Monthly",
843
- "as_of": unemp.get("date") if unemp else None,
844
- "source": unemp.get("source") if unemp else None,
845
- "fallback": unemp.get("fallback", False) if unemp else True
846
- }
847
- },
848
- "source": "macro-basket",
849
- "as_of": datetime.now().strftime("%Y-%m-%d")
850
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
851
 
852
 
853
  async def get_full_macro_basket() -> dict:
 
42
  logging.basicConfig(level=logging.INFO)
43
  logger = logging.getLogger("macro-basket")
44
 
45
+
46
+ def normalize_date_to_iso(date_str: str) -> str:
47
+ """
48
+ Normalize various date formats to YYYY-MM-DD.
49
+
50
+ Examples:
51
+ - "2025Q3" → "2025-09-30" (end of Q3)
52
+ - "2025-November" → "2025-11-30" (end of month)
53
+ - "2025-December" → "2025-12-31"
54
+ - "2025-01" → "2025-01-31"
55
+ """
56
+ if not date_str:
57
+ return None
58
+
59
+ # Already in YYYY-MM-DD format
60
+ if len(date_str) == 10 and date_str[4] == '-' and date_str[7] == '-':
61
+ return date_str
62
+
63
+ # Quarter format: 2025Q3 → 2025-09-30
64
+ if 'Q' in date_str:
65
+ year = date_str[:4]
66
+ quarter = date_str[-1]
67
+ quarter_end = {'1': '03-31', '2': '06-30', '3': '09-30', '4': '12-31'}
68
+ return f"{year}-{quarter_end.get(quarter, '12-31')}"
69
+
70
+ # Month name format: 2025-November → 2025-11-30
71
+ month_map = {
72
+ 'January': ('01', '31'), 'February': ('02', '28'), 'March': ('03', '31'),
73
+ 'April': ('04', '30'), 'May': ('05', '31'), 'June': ('06', '30'),
74
+ 'July': ('07', '31'), 'August': ('08', '31'), 'September': ('09', '30'),
75
+ 'October': ('10', '31'), 'November': ('11', '30'), 'December': ('12', '31')
76
+ }
77
+ for month_name, (month_num, day) in month_map.items():
78
+ if month_name in date_str:
79
+ year = date_str.split('-')[0]
80
+ return f"{year}-{month_num}-{day}"
81
+
82
+ return date_str # Return as-is if no pattern matches
83
+
84
+
85
  # Initialize MCP server
86
  server = Server("macro-basket")
87
 
 
845
  fred_gdp_task, fred_rates_task, fred_cpi_task, fred_unemp_task
846
  )
847
 
848
+ # Build flat {source: metrics} structure (no "data" wrapper)
849
+ sources = {}
850
+ bea_failed = "error" in bea_gdp
851
+ bls_cpi_failed = "error" in bls_cpi
852
+ bls_unemp_failed = "error" in bls_unemp
853
 
854
+ # BEA: GDP (if succeeded)
855
+ if not bea_failed:
856
+ sources["bea"] = {
 
 
857
  "gdp_growth": {
858
+ "value": bea_gdp.get("value"),
859
  "data_type": "Quarterly",
860
+ "as_of": normalize_date_to_iso(bea_gdp.get("date")),
 
 
861
  },
862
+ }
863
+
864
+ # BLS: CPI and Unemployment (if succeeded)
865
+ bls_data = {}
866
+ if not bls_cpi_failed:
867
+ bls_data["cpi_inflation"] = {
868
+ "value": bls_cpi.get("value"),
869
+ "data_type": "Monthly",
870
+ "as_of": normalize_date_to_iso(bls_cpi.get("date")),
871
+ }
872
+ if not bls_unemp_failed:
873
+ bls_data["unemployment"] = {
874
+ "value": bls_unemp.get("value"),
875
+ "data_type": "Monthly",
876
+ "as_of": normalize_date_to_iso(bls_unemp.get("date")),
877
+ }
878
+ if bls_data:
879
+ sources["bls"] = bls_data
880
+
881
+ # FRED: interest_rate (always) + fallbacks for failed BEA/BLS
882
+ fred_data = {}
883
+ # Interest rate - FRED is primary
884
+ if "error" not in fred_rates:
885
+ fred_data["interest_rate"] = {
886
+ "value": fred_rates.get("value"),
887
+ "data_type": "Monthly",
888
+ "as_of": normalize_date_to_iso(fred_rates.get("date")),
889
+ }
890
+ # Fallback: GDP from FRED if BEA failed
891
+ if bea_failed and "error" not in fred_gdp:
892
+ fred_data["gdp_growth"] = {
893
+ "value": fred_gdp.get("value"),
894
+ "data_type": "Quarterly",
895
+ "as_of": normalize_date_to_iso(fred_gdp.get("date")),
896
+ }
897
+ # Fallback: CPI from FRED if BLS failed
898
+ if bls_cpi_failed and "error" not in fred_cpi:
899
+ fred_data["cpi_inflation"] = {
900
+ "value": fred_cpi.get("value"),
901
+ "data_type": "Monthly",
902
+ "as_of": normalize_date_to_iso(fred_cpi.get("date")),
903
+ }
904
+ # Fallback: Unemployment from FRED if BLS failed
905
+ if bls_unemp_failed and "error" not in fred_unemp:
906
+ fred_data["unemployment"] = {
907
+ "value": fred_unemp.get("value"),
908
+ "data_type": "Monthly",
909
+ "as_of": normalize_date_to_iso(fred_unemp.get("date")),
910
+ }
911
+ if fred_data:
912
+ sources["fred"] = fred_data
913
+
914
+ return sources
915
 
916
 
917
  async def get_full_macro_basket() -> dict:
mcp-servers/news-basket/server.py CHANGED
@@ -401,53 +401,46 @@ async def get_all_sources_news(ticker: str, company_name: str = None) -> dict:
401
  tavily_task, nyt_task, newsapi_task
402
  )
403
 
404
- # Combine results
405
- all_results = []
406
- sources_used = []
407
 
408
- # Add Tavily results (inject source name into each article)
409
  if "results" in tavily_result and tavily_result["results"]:
410
- for article in tavily_result["results"]:
411
- article["source"] = article.get("source") or "Tavily"
412
- all_results.extend(tavily_result["results"])
413
- sources_used.append("Tavily")
 
 
 
 
 
414
 
415
- # Add NYT results (inject source name into each article)
416
  if "results" in nyt_result and nyt_result["results"]:
417
- for article in nyt_result["results"]:
418
- article["source"] = article.get("source") or "NYT"
419
- all_results.extend(nyt_result["results"])
420
- sources_used.append("NYT")
 
 
 
 
 
421
 
422
- # Add NewsAPI results (inject source name into each article)
423
  if "results" in newsapi_result and newsapi_result["results"]:
424
- for article in newsapi_result["results"]:
425
- article["source"] = article.get("source") or "NewsAPI"
426
- all_results.extend(newsapi_result["results"])
427
- sources_used.append("NewsAPI")
428
-
429
- # Sort by date (most recent first) - deduplication applied downstream
430
- all_results.sort(key=lambda x: x.get("published_date", "") or "", reverse=True)
431
-
432
- # Build normalized content_analysis schema
433
- items = []
434
- for article in all_results:
435
- items.append({
436
- "title": article.get("title"),
437
- "content": article.get("content") or article.get("snippet"),
438
- "url": article.get("url"),
439
- "datetime": normalize_date(article.get("published_date")),
440
- "source": article.get("source"),
441
- })
442
-
443
- # Return {source: {data: ...}} structure
444
- return {
445
- "news_aggregator": {
446
- "data": {
447
- "items": items,
448
  }
449
- }
450
- }
 
 
451
 
452
 
453
  async def search_going_concern_news(ticker: str, company_name: str = None) -> dict:
 
401
  tavily_task, nyt_task, newsapi_task
402
  )
403
 
404
+ # Build source-keyed structure
405
+ result = {}
 
406
 
407
+ # Add Tavily results
408
  if "results" in tavily_result and tavily_result["results"]:
409
+ result["tavily"] = [
410
+ {
411
+ "title": a.get("title"),
412
+ "url": a.get("url"),
413
+ "content": a.get("content"),
414
+ "published_date": normalize_date(a.get("published_date")),
415
+ }
416
+ for a in tavily_result["results"]
417
+ ]
418
 
419
+ # Add NYT results
420
  if "results" in nyt_result and nyt_result["results"]:
421
+ result["nyt"] = [
422
+ {
423
+ "title": a.get("title"),
424
+ "url": a.get("url"),
425
+ "content": a.get("content") or a.get("snippet"),
426
+ "published_date": normalize_date(a.get("published_date")),
427
+ }
428
+ for a in nyt_result["results"]
429
+ ]
430
 
431
+ # Add NewsAPI results
432
  if "results" in newsapi_result and newsapi_result["results"]:
433
+ result["newsapi"] = [
434
+ {
435
+ "title": a.get("title"),
436
+ "url": a.get("url"),
437
+ "content": a.get("content"),
438
+ "published_date": normalize_date(a.get("published_date")),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
439
  }
440
+ for a in newsapi_result["results"]
441
+ ]
442
+
443
+ return result
444
 
445
 
446
  async def search_going_concern_news(ticker: str, company_name: str = None) -> dict:
mcp-servers/sentiment-basket/server.py CHANGED
@@ -220,48 +220,34 @@ async def get_all_sources_sentiment(ticker: str, company_name: str = "") -> dict
220
 
221
  finnhub, reddit = await asyncio.gather(finnhub_task, reddit_task)
222
 
223
- # Build normalized content_analysis schema
224
- items = []
225
- sources_used = []
226
 
227
  # Add Finnhub articles
228
  if "error" not in finnhub and finnhub.get("articles"):
229
- sources_used.append("Finnhub")
230
- for article in finnhub.get("articles", []):
231
- items.append({
232
- "title": article.get("headline"),
233
- "content": article.get("summary"),
234
- "url": article.get("url"),
235
- "datetime": article.get("datetime"),
236
- "source": "Finnhub",
237
- "subreddit": None, # Not applicable for Finnhub
238
- })
239
 
240
  # Add Reddit posts
241
  if "error" not in reddit and reddit.get("posts"):
242
- sources_used.append("Reddit")
243
- for post in reddit.get("posts", []):
244
- items.append({
245
- "title": post.get("title"),
246
- "content": post.get("selftext"),
247
- "url": post.get("url"),
248
- "datetime": post.get("created_utc"),
249
- "source": "Reddit",
250
- "subreddit": post.get("subreddit"), # Separate subreddit field
251
- })
252
-
253
- # Sort by datetime (most recent first)
254
- items.sort(key=lambda x: x.get("datetime") or "", reverse=True)
255
-
256
- return {
257
- "group": "content_analysis",
258
- "ticker": ticker.upper(),
259
- "items": items,
260
- "item_count": len(items),
261
- "sources_used": sources_used,
262
- "source": "sentiment-basket",
263
- "as_of": datetime.now().strftime("%Y-%m-%d")
264
- }
265
 
266
 
267
  # ============================================================
 
220
 
221
  finnhub, reddit = await asyncio.gather(finnhub_task, reddit_task)
222
 
223
+ # Build source-keyed structure
224
+ result = {}
 
225
 
226
  # Add Finnhub articles
227
  if "error" not in finnhub and finnhub.get("articles"):
228
+ result["finnhub"] = [
229
+ {
230
+ "title": a.get("headline"),
231
+ "url": a.get("url"),
232
+ "content": a.get("summary"),
233
+ "published_date": a.get("datetime"),
234
+ }
235
+ for a in finnhub.get("articles", [])
236
+ ]
 
237
 
238
  # Add Reddit posts
239
  if "error" not in reddit and reddit.get("posts"):
240
+ result["reddit"] = [
241
+ {
242
+ "title": p.get("title"),
243
+ "url": p.get("url"),
244
+ "content": p.get("selftext"),
245
+ "published_date": p.get("created_utc"),
246
+ }
247
+ for p in reddit.get("posts", [])
248
+ ]
249
+
250
+ return result
 
 
 
 
 
 
 
 
 
 
 
 
251
 
252
 
253
  # ============================================================
mcp-servers/valuation-basket/server.py CHANGED
@@ -643,60 +643,59 @@ async def get_full_valuation_basket(ticker: str) -> dict:
643
 
644
  async def get_all_sources_valuation(ticker: str) -> dict:
645
  """
646
- Fetch valuation metrics from Yahoo Finance (primary) with Alpha Vantage fallback.
647
- Returns NORMALIZED schema with 11 universal metrics.
 
 
648
  """
649
- yahoo_result = await fetch_yahoo_quote(ticker)
 
 
 
650
 
651
- # Build normalized schema
652
  sources = {}
 
653
 
654
- # Yahoo Finance as primary source (11 universal metrics, excludes ev_ebitda)
655
- if "error" not in yahoo_result:
 
656
  sources["yahoo_finance"] = {
657
- "source": "Yahoo Finance",
658
- "regular_market_time": yahoo_result.get("regular_market_time"),
659
- "data": {
660
- "current_price": safe_get(yahoo_result, "current_price"),
661
- "market_cap": safe_get(yahoo_result, "market_cap"),
662
- "enterprise_value": safe_get(yahoo_result, "enterprise_value"),
663
- "trailing_pe": safe_get(yahoo_result, "trailing_pe"),
664
- "forward_pe": safe_get(yahoo_result, "forward_pe"),
665
- "ps_ratio": safe_get(yahoo_result, "ps_ratio"),
666
- "pb_ratio": safe_get(yahoo_result, "pb_ratio"),
667
- "trailing_peg": safe_get(yahoo_result, "trailing_peg"),
668
- "forward_peg": safe_get(yahoo_result, "forward_peg"),
669
- "earnings_growth": safe_get(yahoo_result, "earnings_growth"),
670
- "revenue_growth": safe_get(yahoo_result, "revenue_growth"),
671
- }
672
  }
 
 
 
 
 
 
673
  else:
674
- # Fallback to Alpha Vantage if Yahoo Finance fails
675
- alpha_result = await fetch_alpha_vantage_quote(ticker)
676
  if alpha_result and "error" not in alpha_result:
 
677
  sources["alpha_vantage"] = {
678
- "source": "Alpha Vantage",
679
- "latest_quarter": alpha_result.get("latest_quarter"),
680
- "data": {
681
- "current_price": safe_get(alpha_result, "current_price"),
682
- "market_cap": safe_get(alpha_result, "market_cap"),
683
- "trailing_pe": safe_get(alpha_result, "trailing_pe"),
684
- "forward_pe": safe_get(alpha_result, "forward_pe"),
685
- "ps_ratio": safe_get(alpha_result, "ps_ratio"),
686
- "pb_ratio": safe_get(alpha_result, "pb_ratio"),
687
- "trailing_peg": safe_get(alpha_result, "trailing_peg"),
688
- "earnings_growth": safe_get(alpha_result, "earnings_growth"),
689
- "revenue_growth": safe_get(alpha_result, "revenue_growth"),
690
- }
691
  }
692
 
693
- return {
694
- "group": "source_comparison",
695
- "ticker": ticker.upper(),
696
- "sources": sources,
697
- "source": "valuation-basket",
698
- "as_of": datetime.now().strftime("%Y-%m-%d")
699
- }
700
 
701
 
702
  # ============================================================
 
643
 
644
  async def get_all_sources_valuation(ticker: str) -> dict:
645
  """
646
+ Fetch valuation metrics from Yahoo Finance (primary) + Alpha Vantage (supplementary).
647
+
648
+ - Yahoo succeeds: yahoo_finance (core) + alpha_vantage (supplementary: ev_ebitda)
649
+ - Yahoo fails: alpha_vantage (core + supplementary)
650
  """
651
+ # Fetch both sources in parallel
652
+ yahoo_task = fetch_yahoo_quote(ticker)
653
+ alpha_task = fetch_alpha_vantage_quote(ticker)
654
+ yahoo_result, alpha_result = await asyncio.gather(yahoo_task, alpha_task)
655
 
 
656
  sources = {}
657
+ yahoo_failed = "error" in yahoo_result
658
 
659
+ if not yahoo_failed:
660
+ # Yahoo core metrics with temporal data (flat, no "data" wrapper)
661
+ yahoo_as_of = yahoo_result.get("regular_market_time")
662
  sources["yahoo_finance"] = {
663
+ "current_price": {"value": safe_get(yahoo_result, "current_price"), "as_of": yahoo_as_of},
664
+ "market_cap": {"value": safe_get(yahoo_result, "market_cap"), "as_of": yahoo_as_of},
665
+ "enterprise_value": {"value": safe_get(yahoo_result, "enterprise_value"), "as_of": yahoo_as_of},
666
+ "trailing_pe": {"value": safe_get(yahoo_result, "trailing_pe"), "as_of": yahoo_as_of},
667
+ "forward_pe": {"value": safe_get(yahoo_result, "forward_pe"), "as_of": yahoo_as_of},
668
+ "ps_ratio": {"value": safe_get(yahoo_result, "ps_ratio"), "as_of": yahoo_as_of},
669
+ "pb_ratio": {"value": safe_get(yahoo_result, "pb_ratio"), "as_of": yahoo_as_of},
670
+ "trailing_peg": {"value": safe_get(yahoo_result, "trailing_peg"), "as_of": yahoo_as_of},
671
+ "forward_peg": {"value": safe_get(yahoo_result, "forward_peg"), "as_of": yahoo_as_of},
672
+ "earnings_growth": {"value": safe_get(yahoo_result, "earnings_growth"), "as_of": yahoo_as_of},
673
+ "revenue_growth": {"value": safe_get(yahoo_result, "revenue_growth"), "as_of": yahoo_as_of},
 
 
 
 
674
  }
675
+ # Alpha supplementary metrics (ev_ebitda not in Yahoo)
676
+ if alpha_result and "error" not in alpha_result:
677
+ alpha_as_of = alpha_result.get("latest_quarter")
678
+ sources["alpha_vantage"] = {
679
+ "ev_ebitda": {"value": safe_get(alpha_result, "ev_ebitda"), "as_of": alpha_as_of},
680
+ }
681
  else:
682
+ # Fallback: Alpha provides core + supplementary
 
683
  if alpha_result and "error" not in alpha_result:
684
+ alpha_as_of = alpha_result.get("latest_quarter")
685
  sources["alpha_vantage"] = {
686
+ "current_price": {"value": safe_get(alpha_result, "current_price"), "as_of": alpha_as_of},
687
+ "market_cap": {"value": safe_get(alpha_result, "market_cap"), "as_of": alpha_as_of},
688
+ "trailing_pe": {"value": safe_get(alpha_result, "trailing_pe"), "as_of": alpha_as_of},
689
+ "forward_pe": {"value": safe_get(alpha_result, "forward_pe"), "as_of": alpha_as_of},
690
+ "ps_ratio": {"value": safe_get(alpha_result, "ps_ratio"), "as_of": alpha_as_of},
691
+ "pb_ratio": {"value": safe_get(alpha_result, "pb_ratio"), "as_of": alpha_as_of},
692
+ "trailing_peg": {"value": safe_get(alpha_result, "trailing_peg"), "as_of": alpha_as_of},
693
+ "earnings_growth": {"value": safe_get(alpha_result, "earnings_growth"), "as_of": alpha_as_of},
694
+ "revenue_growth": {"value": safe_get(alpha_result, "revenue_growth"), "as_of": alpha_as_of},
695
+ "ev_ebitda": {"value": safe_get(alpha_result, "ev_ebitda"), "as_of": alpha_as_of},
 
 
 
696
  }
697
 
698
+ return sources
 
 
 
 
 
 
699
 
700
 
701
  # ============================================================
mcp-servers/volatility-basket/server.py CHANGED
@@ -898,51 +898,45 @@ async def get_all_sources_volatility(ticker: str) -> dict:
898
  hv = yahoo_hv if "error" not in yahoo_hv else (av_hv or yahoo_hv)
899
  iv = yahoo_iv
900
 
901
- # Build normalized raw_metrics schema with temporal data
902
- return {
903
- "group": "raw_metrics",
904
- "ticker": ticker.upper(),
905
- "metrics": {
906
- "vix": {
907
- "value": vix.get("value"),
908
- "data_type": "Daily",
909
- "as_of": vix.get("as_of"), # FRED observation date
910
- "source": vix.get("source"),
911
- "fallback": vix.get("fallback", False)
912
- },
913
- "vxn": {
914
- "value": vxn.get("value"),
915
- "data_type": "Daily",
916
- "as_of": vxn.get("as_of"), # FRED observation date
917
- "source": vxn.get("source"),
918
- "fallback": vxn.get("fallback", False)
919
- },
920
- "beta": {
921
- "value": beta.get("value") if beta else None,
922
- "data_type": "1Y", # 1 year lookback
923
- "as_of": beta.get("as_of") if beta else None,
924
- "source": beta.get("source") if beta else None,
925
- "fallback": beta.get("fallback", False) if beta else True
926
- },
927
- "historical_volatility": {
928
- "value": hv.get("value") if hv else None,
929
- "data_type": "30D", # 30 day lookback
930
- "as_of": hv.get("as_of") if hv else None,
931
- "source": hv.get("source") if hv else None,
932
- "fallback": hv.get("fallback", False) if hv else True
933
- },
934
- "implied_volatility": {
935
- "value": iv.get("value") if iv else None,
936
- "data_type": "Forward", # Forward-looking from options
937
- "as_of": iv.get("as_of") if iv else None,
938
- "source": iv.get("source") if iv else None,
939
- "fallback": iv.get("fallback", False) if iv else True
940
- }
941
  },
942
- "source": "volatility-basket",
943
- "as_of": datetime.now().strftime("%Y-%m-%d")
944
  }
945
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
946
 
947
  async def get_full_volatility_basket(ticker: str) -> dict:
948
  """
 
898
  hv = yahoo_hv if "error" not in yahoo_hv else (av_hv or yahoo_hv)
899
  iv = yahoo_iv
900
 
901
+ # Build flat {source: metrics} structure (no "data" wrapper)
902
+ sources = {}
903
+
904
+ # FRED: VIX and VXN (market volatility context)
905
+ sources["fred"] = {
906
+ "vix": {
907
+ "value": vix.get("value"),
908
+ "data_type": "Daily",
909
+ "as_of": vix.get("as_of"),
910
+ },
911
+ "vxn": {
912
+ "value": vxn.get("value"),
913
+ "data_type": "Daily",
914
+ "as_of": vxn.get("as_of"),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
915
  },
 
 
916
  }
917
 
918
+ # Yahoo Finance: beta, historical_volatility, implied_volatility
919
+ sources["yahoo_finance"] = {
920
+ "beta": {
921
+ "value": beta.get("value") if beta else None,
922
+ "data_type": "1Y",
923
+ "as_of": beta.get("as_of") if beta else None,
924
+ },
925
+ "historical_volatility": {
926
+ "value": hv.get("value") if hv else None,
927
+ "data_type": "30D",
928
+ "as_of": hv.get("as_of") if hv else None,
929
+ },
930
+ "implied_volatility": {
931
+ "value": iv.get("value") if iv else None,
932
+ "data_type": "Forward",
933
+ "as_of": iv.get("as_of") if iv else None,
934
+ "is_estimated": iv.get("estimated", False) if iv else True,
935
+ },
936
+ }
937
+
938
+ return sources
939
+
940
 
941
  async def get_full_volatility_basket(ticker: str) -> dict:
942
  """
mcp_client.py CHANGED
@@ -427,134 +427,23 @@ async def call_sentiment_mcp(ticker: str, company_name: str = "") -> dict:
427
  # =============================================================================
428
 
429
  def _normalize_volatility(raw: dict) -> dict:
430
- """
431
- Normalize volatility schema.
432
- Input: {"metrics": {"vix": {...}, "beta": {...}, ...}}
433
- Output: {"yahoo_finance": {"data": {...}}, "market_volatility_context": {...}}
434
- """
435
- if not raw or "error" in raw:
436
- return raw
437
-
438
- metrics = raw.get("metrics", {})
439
-
440
- # Extract VIX/VXN for market context
441
- vix = metrics.get("vix", {})
442
- vxn = metrics.get("vxn", {})
443
-
444
- # Extract stock-specific metrics for yahoo_finance
445
- beta = metrics.get("beta", {})
446
- hist_vol = metrics.get("historical_volatility", {})
447
- impl_vol = metrics.get("implied_volatility", {})
448
-
449
- return {
450
- "yahoo_finance": {
451
- "source": "Yahoo Finance",
452
- "data": {
453
- "beta": beta,
454
- "historical_volatility": hist_vol,
455
- "implied_volatility": impl_vol,
456
- }
457
- },
458
- "market_volatility_context": {
459
- "vix": {"value": vix.get("value"), "date": vix.get("as_of")},
460
- "vxn": {"value": vxn.get("value"), "date": vxn.get("as_of")},
461
- },
462
- "source": raw.get("source"),
463
- "as_of": raw.get("as_of"),
464
- }
465
 
466
 
467
  def _normalize_macro(raw: dict) -> dict:
468
- """
469
- Normalize macro schema.
470
- Input: {"metrics": {"gdp_growth": {...}, "interest_rate": {...}, ...}}
471
- Output: {"bea_bls": {"data": {...}}, "fred": {"data": {...}}}
472
- """
473
- if not raw or "error" in raw:
474
- return raw
475
-
476
- metrics = raw.get("metrics", {})
477
-
478
- gdp = metrics.get("gdp_growth", {})
479
- cpi = metrics.get("cpi_inflation", {})
480
- unemp = metrics.get("unemployment", {})
481
- interest = metrics.get("interest_rate", {})
482
-
483
- # BEA/BLS: GDP, CPI, unemployment (primary sources)
484
- # FRED: interest_rate (and fallback for others)
485
- # Note: In get_all_sources_macro, "as_of" field contains the actual data date (e.g., "2025Q3")
486
- return {
487
- "bea_bls": {
488
- "source": "BEA/BLS",
489
- "data": {
490
- "gdp_growth": {"value": gdp.get("value"), "date": gdp.get("as_of")},
491
- "cpi_inflation": {"value": cpi.get("value"), "date": cpi.get("as_of")},
492
- "unemployment": {"value": unemp.get("value"), "date": unemp.get("as_of")},
493
- }
494
- },
495
- "fred": {
496
- "source": "FRED",
497
- "data": {
498
- "interest_rate": {"value": interest.get("value"), "date": interest.get("as_of")},
499
- "gdp_growth": {"value": gdp.get("value"), "date": gdp.get("as_of")} if gdp.get("fallback") else None,
500
- "cpi_inflation": {"value": cpi.get("value"), "date": cpi.get("as_of")} if cpi.get("fallback") else None,
501
- "unemployment": {"value": unemp.get("value"), "date": unemp.get("as_of")} if unemp.get("fallback") else None,
502
- }
503
- },
504
- "source": raw.get("source"),
505
- "as_of": raw.get("as_of"),
506
- }
507
 
508
 
509
  def _normalize_valuation(raw: dict) -> dict:
510
- """
511
- Normalize valuation schema.
512
- Input: {"sources": {"yahoo_finance": {...}, "alpha_vantage": {...}}}
513
- Output: {"yahoo_finance": {"data": {...}}, "alpha_vantage": {"data": {...}}}
514
- """
515
- if not raw or "error" in raw:
516
- return raw
517
-
518
- sources = raw.get("sources", {})
519
-
520
- result = {
521
- "source": raw.get("source"),
522
- "as_of": raw.get("as_of"),
523
- }
524
-
525
- # Flatten sources to top level
526
- if "yahoo_finance" in sources:
527
- result["yahoo_finance"] = sources["yahoo_finance"]
528
- if "alpha_vantage" in sources:
529
- result["alpha_vantage"] = sources["alpha_vantage"]
530
-
531
- return result
532
 
533
 
534
  def _normalize_fundamentals(raw: dict) -> dict:
535
- """
536
- Normalize fundamentals schema.
537
- Input: {"sources": {"sec_edgar": {...}, "yahoo_finance": {...}}}
538
- Output: {"sec_edgar": {"data": {...}}, "yahoo_finance": {"data": {...}}}
539
- """
540
- if not raw or "error" in raw:
541
- return raw
542
-
543
- sources = raw.get("sources", {})
544
-
545
- result = {
546
- "source": raw.get("source"),
547
- "as_of": raw.get("as_of"),
548
- "ticker": raw.get("ticker"),
549
- }
550
-
551
- # Flatten sources to top level
552
- if "sec_edgar" in sources:
553
- result["sec_edgar"] = sources["sec_edgar"]
554
- if "yahoo_finance" in sources:
555
- result["yahoo_finance"] = sources["yahoo_finance"]
556
-
557
- return result
558
 
559
 
560
  def _get_nested_value(data: dict, *keys):
 
427
  # =============================================================================
428
 
429
  def _normalize_volatility(raw: dict) -> dict:
430
+ """Pass-through: MCPs now emit {source: {data: ...}} directly."""
431
+ return raw
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
432
 
433
 
434
  def _normalize_macro(raw: dict) -> dict:
435
+ """Pass-through: MCPs now emit {source: {data: ...}} directly."""
436
+ return raw
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
437
 
438
 
439
  def _normalize_valuation(raw: dict) -> dict:
440
+ """Pass-through: MCPs now emit {source: {data: ...}} directly."""
441
+ return raw
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
442
 
443
 
444
  def _normalize_fundamentals(raw: dict) -> dict:
445
+ """Pass-through: MCPs now emit {source: {data: ...}} directly."""
446
+ return raw
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
447
 
448
 
449
  def _get_nested_value(data: dict, *keys):