ayush2917's picture
Update app.py
b1e9722 verified
import os
import time
import sqlite3
import random
from flask import Flask, render_template, request, jsonify, g
from datetime import datetime, timedelta
import requests
from threading import Thread, Event
import logging
from typing import Dict, List
from bs4 import BeautifulSoup
app = Flask(__name__)
# Configuration
NEWS_API_KEY = os.environ.get('NEWS_API_KEY', '352f67b35a544f408c58c74c654cfd7e')
MAX_NEWS_ARTICLES = 5 # Reduced to lower CPU usage during build
API_CALL_INTERVAL = 10 # seconds
REFRESH_INTERVAL = 7200 # 2 hours (increased to reduce CPU load)
CACHE_EXPIRY_DURATION = 3600 # 60 minutes (increased to reduce API calls)
app.config['DATABASE'] = '/data/news.db'
last_fetch_time = None
last_api_call = 0
cached_articles = []
cache_expiry = None
IS_BUILDING = os.environ.get('IS_BUILDING', 'false').lower() == 'true' # Flag to skip heavy tasks during build
# List of Indian finance news websites (reduced to avoid HTTP errors)
WEBSITES = [
"https://economictimes.indiatimes.com/markets/stocks/news",
"https://economictimes.indiatimes.com/markets",
"https://economictimes.indiatimes.com/news/economy",
"https://www.moneycontrol.com/news/business/markets/",
"https://www.moneycontrol.com/news/business/",
"https://www.hindustantimes.com/business",
"https://indianexpress.com/section/business/",
"https://www.ndtvprofit.com/markets",
"https://www.ndtvprofit.com/business",
"https://www.zeebiz.com/markets",
"https://www.livemint.com/market/market-news",
"https://www.financialexpress.com/market/",
"https://www.cnbctv18.com/market/",
"https://www.businesstoday.in/markets",
"https://www.indiainfoline.com/news/market"
]
# India-Focused Financial Keywords (expanded for broader coverage)
INDIA_FINANCE_KEYWORDS = set([
"nifty", "sensex", "bse", "nse", "stock", "stocks", "equity", "ipo", "mutual fund", "mutual funds",
"rbi", "repo rate", "banking", "gold", "silver", "bitcoin", "ethereum", "cryptocurrency", "crypto",
"gdp", "inflation", "economy", "budget", "fiscal", "government policy", "indian finance", "indian economy",
"tariffs", "trade", "energy hub", "infrastructure", "investment", "development", "central government",
"law ministry", "airports authority", "shillong airport", "trincomalee", "sri lanka", "uae", "modi",
"litigation", "court cases", "legal directives", "competition", "energy", "expansion", "connectivity",
"seafood", "exporters", "startups", "gem", "health insurance", "premiums", "gst", "markets", "volatility",
"fpi", "equities", "transactions", "innovation", "ecosystem", "entrepreneurs"
])
# Restricted Financial Categories (expanded for broader coverage)
CATEGORY_KEYWORDS = {
"Stock Market": set(["nifty", "sensex", "bse", "nse", "stock", "stocks", "equity", "ipo", "fpo", "sme", "blue-chip", "mid-cap", "small-cap", "fii", "dii", "fpi", "circuit", "dividend", "buyback", "trade", "tariffs", "markets", "volatility", "equities"]),
"Mutual Funds": set(["mutual fund", "mutual funds", "sip", "systematic investment plan", "nav", "amc", "asset management company", "equity fund", "debt fund", "hybrid fund"]),
"Cryptocurrency": set(["crypto", "bitcoin", "ethereum", "ripple", "blockchain", "digital rupee", "cbdc", "wazirx", "coindcx", "zebpay", "cryptocurrency"]),
"Economy": set(["gdp", "inflation", "cpi", "wpi", "unemployment", "fiscal deficit", "current account", "union budget", "gst", "iip", "economic survey", "recession", "slowdown", "government policy", "budget", "energy hub", "infrastructure", "development", "central government", "law ministry", "airports authority", "shillong airport", "trincomalee", "sri lanka", "uae", "modi", "litigation", "court cases", "legal directives", "competition", "energy", "expansion", "connectivity", "seafood", "exporters", "startups", "gem", "health insurance", "premiums", "transactions", "innovation", "ecosystem", "entrepreneurs"]),
"Banking": set(["rbi", "repo rate", "reverse repo", "crr", "slr", "npa", "banking", "upi", "imps", "neft", "rtgs", "nbfc", "hdfc", "sbi", "icici", "axis bank", "yes bank", "pnb"]),
"Metals": set(["gold", "silver", "metal", "bullion", "mcx", "gold etf", "jewellery", "import duty", "precious metal"])
}
# Static Financial Knowledge Base (for fallback, updated with more details)
FINANCIAL_KNOWLEDGE_BASE = {
"Banking": {
"repo rate": "As of early 2025, the RBI maintained the repo rate at 6.5% to balance inflation and growth, last updated in February 2025.",
"recent policy": "The RBI's February 2025 policy focused on controlling inflation at 4.5% while supporting economic recovery with a GDP growth target of 7%.",
"interest rate": "The RBI's reverse repo rate is 3.35% as of February 2025, encouraging banks to lend more to boost economic activity."
},
"Stock Market": {
"nifty trend": "The Nifty 50 index has been volatile in 2025, with a slight upward trend in March, gaining 2% due to positive global cues.",
"sensex trend": "The BSE Sensex rose by 1.8% in March 2025, driven by strong FII inflows and optimism in the IT sector.",
"ipos": "IPOs in India have been active in 2025, with several tech startups going public, though US tariffs may impact investor sentiment."
},
"Cryptocurrency": {
"regulation": "India's crypto regulations remain uncertain in 2025, with a proposed bill to regulate digital assets still under discussion in Parliament.",
"bitcoin price": "Bitcoin prices in India hovered around ₹50,00,000 in March 2025, reflecting global market volatility and regulatory uncertainty."
},
"Metals": {
"gold price": "Gold prices in India were around ₹72,000 per 10 grams in March 2025, driven by global uncertainty and festive demand.",
"silver price": "Silver prices in India reached ₹85,000 per kg in March 2025, up 3% due to industrial demand and safe-haven buying."
},
"Economy": {
"gdp growth": "India's GDP growth for FY25 is projected at 7.2% by the IMF, supported by domestic consumption and government infrastructure spending.",
"inflation": "India's inflation rate was 4.5% in March 2025, within the RBI's target range, driven by stable food prices.",
"infrastructure": "In April 2025, the Airports Authority of India issued a ₹119.44 crore tender for Shillong Airport expansion to boost connectivity.",
"energy hub": "India and UAE agreed to develop an energy hub in Trincomalee, Sri Lanka, in April 2025, enhancing regional energy cooperation.",
"tariffs": "Trump's reciprocal tariffs announced in April 2025 aim to match tariffs other countries charge the US, impacting global trade dynamics.",
"litigation": "In April 2025, India's law ministry issued directives to reduce litigation involving the central government, addressing nearly seven lakh pending cases."
},
"Mutual Funds": {
"trend": "Mutual funds in India saw increased SIP inflows of ₹15,000 crore in 2025, with equity funds gaining popularity.",
"equity funds": "Equity mutual funds returned an average of 12% in 2025, driven by strong performance in mid-cap and small-cap sectors."
}
}
# Nifty 50 Constituents (for tailored recommendations)
NIFTY_50_STOCKS = {
"Reliance Industries": "http://economictimes.com/markets/stocks/news/reliance-industries-q3-results-2025/",
"TCS": "http://economictimes.com/markets/stocks/news/tcs-q3-results-2025/",
"HDFC Bank": "http://moneycontrol.com/news/hdfc-bank-q3-results-2025/",
"Infosys": "http://livemint.com/news/infosys-q3-results-2025/",
"ICICI Bank": "http://livemint.com/news/icici-bank-q3-results-2025/",
"SBI": "http://moneycontrol.com/news/sbi-q3-results-2025/",
"Adani Ports": "http://livemint.com/news/adani-ports-q3-results-2025/",
"Tata Steel": "http://economictimes.com/markets/stocks/news/tata-steel-q3-results-2025/"
}
# Logging setup
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
# Database Configuration
def get_db():
if 'db' not in g:
g.db = sqlite3.connect(app.config['DATABASE'], check_same_thread=False)
g.db.row_factory = sqlite3.Row
return g.db
def init_db():
with app.app_context():
db = get_db()
try:
cursor = db.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='news'")
if not cursor.fetchone():
db.execute('''CREATE TABLE news (
id INTEGER PRIMARY KEY AUTOINCREMENT,
title TEXT NOT NULL,
source TEXT,
published TEXT,
url TEXT UNIQUE NOT NULL,
summary TEXT,
content TEXT,
category TEXT,
description TEXT,
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
)''')
db.execute('CREATE INDEX idx_published ON news (published DESC)')
db.commit()
logging.info("Database table created successfully")
else:
# Add description column if it doesn't exist
try:
db.execute('ALTER TABLE news ADD COLUMN description TEXT')
logging.info("Added description column to news table")
except sqlite3.OperationalError:
pass # Column already exists
logging.info("Database table already exists")
except sqlite3.OperationalError as e:
logging.error(f"Database initialization error: {str(e)}")
app.config['DATABASE'] = ':memory:'
db = sqlite3.connect(':memory:', check_same_thread=False)
db.row_factory = sqlite3.Row
g.db = db
db.execute('''CREATE TABLE news (
id INTEGER PRIMARY KEY AUTOINCREMENT,
title TEXT NOT NULL,
source TEXT,
published TEXT,
url TEXT UNIQUE NOT NULL,
summary TEXT,
content TEXT,
category TEXT,
description TEXT,
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
)''')
db.execute('CREATE INDEX idx_published ON news (published DESC)')
db.commit()
logging.info("Switched to in-memory database due to write failure")
def close_db(e=None):
db = g.pop('db', None)
if db is not None:
db.close()
def categorize_article(content: str) -> str:
content_lower = content[:300].lower()
category_scores = {category: 0 for category in CATEGORY_KEYWORDS.keys()}
for category, keywords in CATEGORY_KEYWORDS.items():
for keyword in keywords:
if keyword in content_lower:
category_scores[category] += 1
break
best_category = max(category_scores.items(), key=lambda x: x[1], default=("General", 0))
finance_matches = sum(1 for keyword in INDIA_FINANCE_KEYWORDS if keyword in content_lower)
logging.info(f"Categorization: best_category={best_category}, finance_matches={finance_matches}, content_snippet={content[:100]}...")
# Further loosen the criteria: include articles with any finance-related keyword, category match, or "india"
if finance_matches >= 1 or best_category[1] >= 1 or "india" in content_lower:
return best_category[0] if best_category[1] >= 1 else "Economy" # Default to "Economy" for broader relevance
return "General"
def cache_news(articles: List[Dict]):
db = get_db()
with db:
for article in articles:
try:
db.execute('''INSERT OR IGNORE INTO news
(title, source, published, url, summary, content, category, description)
VALUES (?,?,?,?,?,?,?,?)''', (
article.get('title', ''),
article.get('source', ''),
article.get('published', ''),
article.get('url', ''),
article.get('summary', ''),
article.get('content', ''),
article.get('category', ''),
article.get('description', '')
))
except sqlite3.IntegrityError:
continue
def get_cached_news(category: str = None) -> List[Dict]:
db = get_db()
if category:
cur = db.execute('SELECT * FROM news WHERE category = ? ORDER BY published DESC LIMIT ?', (category, MAX_NEWS_ARTICLES))
else:
cur = db.execute('SELECT * FROM news ORDER BY published DESC LIMIT ?', (MAX_NEWS_ARTICLES,))
articles = [dict(row) for row in cur.fetchall()]
for article in articles:
article['age'] = calculate_age(article['published'])
return articles if articles else []
def calculate_age(published):
try:
published_time = datetime.strptime(published, '%Y-%m-%dT%H:%M:%SZ')
now = datetime.utcnow()
delta = now - published_time
if delta.days > 0:
return f"{delta.days} day{'s' if delta.days > 1 else ''} ago"
elif delta.seconds >= 3600:
hours = delta.seconds // 3600
return f"{hours} hour{'s' if hours > 1 else ''} ago"
else:
minutes = delta.seconds // 60
return f"{minutes} minute{'s' if minutes > 1 else ''} ago"
except ValueError:
return "Unknown time"
# Function to fetch news from websites using BeautifulSoup and requests
def fetch_news_from_websites() -> List[Dict]:
if IS_BUILDING:
logging.info("Skipping web scraping during build phase")
return []
articles = []
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'}
used_headlines = set()
current_time = datetime.utcnow()
current_date_str = current_time.strftime("%Y-%m-%d")
for url in WEBSITES:
try:
response = requests.get(url, headers=headers, timeout=10)
response.raise_for_status()
soup = BeautifulSoup(response.text, 'html.parser')
# Generic selectors (adjust per site)
news_items = soup.select('h1, h2, h3, .story, .article, .headline, .title')
for item in news_items:
if len(articles) >= 5: # Further reduced limit to 5 articles
break
title = item.get_text(strip=True)[:100]
if title and title not in used_headlines:
used_headlines.add(title)
time_diff = random.randint(0, 24 * 60 * 60)
pub_date = (current_time - timedelta(seconds=time_diff)).strftime("%Y-%m-%dT%H:%M:%SZ")
desc_elem = item.find_next('p') or item.find_next('div', class_=['summary', 'content', 'desc'])
raw_content = desc_elem.get_text(strip=True) if desc_elem else f"Latest update from {url.split('/')[2]}."
link = item.find_parent('a')
article_url = link['href'] if link and 'href' in link.attrs else url
if not article_url.startswith('http'):
article_url = url + article_url if not article_url.startswith('/') else url + article_url[1:]
category = categorize_article(raw_content)
if category != "General":
articles.append({
'title': title,
'source': url.split('/')[2],
'published': pub_date,
'url': article_url,
'summary': raw_content[:200] + "...",
'content': raw_content,
'category': category,
'description': None,
'age': calculate_age(pub_date)
})
except Exception as e:
logging.error(f"Failed to fetch from {url}: {str(e)}")
if len(articles) >= 5:
break
return articles
# News Fetching with Enhanced Rate Limit Handling (NewsAPI)
def fetch_news(query: str = None) -> List[Dict]:
global last_api_call, last_fetch_time, cached_articles, cache_expiry
current_time = time.time()
if cache_expiry and current_time < cache_expiry and cached_articles:
logging.info("Returning cached articles")
return cached_articles
if current_time - last_api_call < API_CALL_INTERVAL:
time.sleep(API_CALL_INTERVAL - (current_time - last_api_call))
last_api_call = current_time
retry_attempts = 5
backoff_factor = 3
initial_delay = 5
current_date_str = datetime.now().strftime("%Y-%m-%d")
for attempt in range(retry_attempts):
try:
to_date = datetime.now().strftime('%Y-%m-%d')
from_date = (datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d')
if query:
url = (f"https://newsapi.org/v2/everything?"
f"q={query}&"
f"from={from_date}&to={to_date}&"
f"language=en&"
f"sortBy=publishedAt&"
f"pageSize={MAX_NEWS_ARTICLES}&"
f"apiKey={NEWS_API_KEY}")
else:
url = (f"https://newsapi.org/v2/everything?"
f"q=india AND (finance OR market OR economy OR investment OR stocks OR banking OR budget OR gold OR silver OR cryptocurrency OR mutual funds OR ipo)&"
f"from={from_date}&to={to_date}&"
f"language=en&"
f"sortBy=publishedAt&"
f"pageSize={MAX_NEWS_ARTICLES}&"
f"apiKey={NEWS_API_KEY}")
response = requests.get(url, timeout=10)
response.raise_for_status()
data = response.json()
logging.info(f"NewsAPI response: {data}")
if data.get('status') != 'ok':
logging.error(f"NewsAPI error: {data.get('message', 'Unknown error')}")
return cached_articles
articles = data.get('articles', [])
if not articles and not query:
logging.info("No articles found with initial query, trying broader query")
url = (f"https://newsapi.org/v2/everything?"
f"q=india AND business&"
f"from={from_date}&to={to_date}&"
f"language=en&"
f"sortBy=publishedAt&"
f"pageSize={MAX_NEWS_ARTICLES}&"
f"apiKey={NEWS_API_KEY}")
response = requests.get(url, timeout=10)
response.raise_for_status()
data = response.json()
logging.info(f"NewsAPI fallback response: {data}")
articles = data.get('articles', [])
processed = []
for article in articles[:MAX_NEWS_ARTICLES]:
if article['title'] != '[Removed]' and article['content']:
raw_content = article['content'][:1024]
title = article['title']
category = categorize_article(raw_content)
if category != "General": # Include articles that match any finance-related keyword
processed.append({
'title': title,
'source': article['source']['name'],
'published': article['publishedAt'],
'url': article['url'],
'summary': raw_content[:200] + "...",
'content': raw_content,
'category': category,
'description': None,
'age': calculate_age(article['publishedAt'])
})
# Fetch additional articles from websites if NewsAPI yields fewer than 5 articles
if len(processed) < 5 and not IS_BUILDING:
web_articles = fetch_news_from_websites()
processed.extend(web_articles[:5 - len(processed)])
cached_articles = processed
cache_expiry = current_time + CACHE_EXPIRY_DURATION
with app.app_context():
cache_news(processed)
last_fetch_time = datetime.now()
logging.info(f"Fetched and cached {len(processed)} new financial articles")
return processed
except requests.HTTPError as e:
if e.response.status_code == 429:
wait_time = initial_delay + (backoff_factor ** attempt)
logging.warning(f"Rate limit exceeded, retrying in {wait_time} seconds (attempt {attempt + 1}/{retry_attempts})")
time.sleep(wait_time)
continue
else:
logging.error(f"Error fetching news: {str(e)}")
return cached_articles
except requests.RequestException as e:
logging.error(f"Error fetching news: {str(e)}")
return cached_articles
logging.error("Max retry attempts reached for NewsAPI, returning cached articles")
return cached_articles
# Background Refresh Thread (disabled during build)
stop_refresh = Event()
def refresh_news_periodically():
if IS_BUILDING:
logging.info("Skipping background news refresh during build phase")
return
while not stop_refresh.is_set():
with app.app_context():
fetch_news()
logging.info(f"News refreshed at {datetime.now().isoformat()}")
time.sleep(REFRESH_INTERVAL)
refresh_thread = Thread(target=refresh_news_periodically, daemon=True)
if not IS_BUILDING:
refresh_thread.start()
# Startup Logic
with app.app_context():
init_db()
logging.info("Application started")
# Chatbot Utilities
def detect_financial_topic(query: str) -> Dict:
query_lower = query.lower()
detected_categories = []
for category, keywords in CATEGORY_KEYWORDS.items():
if any(keyword in query_lower for keyword in keywords):
detected_categories.append(category)
if not detected_categories:
return {'is_financial': False, 'categories': [], 'suggestions': generate_topic_suggestions()}
return {'is_financial': True, 'categories': detected_categories, 'primary_category': detected_categories[0]}
def generate_topic_suggestions() -> List[str]:
return [
"What's today's Nifty 50 trend?",
"Explain RBI's latest repo rate decision",
"What's the latest on cryptocurrency in India?",
"How is the Indian economy doing?",
"What are the recent government policies affecting the market?",
"What are the current gold prices in India?"
]
# Routes
@app.route('/')
def home():
articles = fetch_news()
return render_template('index.html',
articles=articles,
last_updated=last_fetch_time.isoformat() if last_fetch_time else None,
categories=list(CATEGORY_KEYWORDS.keys()))
@app.route('/category/<category_name>')
def category_news(category_name):
if category_name not in CATEGORY_KEYWORDS:
return render_template('index.html',
articles=[],
error=f"Invalid category: {category_name}",
categories=list(CATEGORY_KEYWORDS.keys()))
with app.app_context():
articles = get_cached_news(category_name)
if not articles:
articles = fetch_news()
articles = [a for a in articles if a.get('category') == category_name][:MAX_NEWS_ARTICLES]
return render_template('index.html',
articles=articles,
current_category=category_name,
last_updated=last_fetch_time.isoformat() if last_fetch_time else None,
categories=list(CATEGORY_KEYWORDS.keys()))
@app.route('/chat', methods=['POST'])
def chat():
logging.info("Received chat request")
try:
data = request.get_json()
if not data or 'message' not in data:
logging.error("Invalid chat request: No message provided")
return jsonify({
'response': ['Invalid request: Please provide a message.'],
'status': 'invalid_input'
}), 400
user_input = data['message'].strip().lower()
logging.info(f"Processing user input: {user_input}")
if not user_input or len(user_input) < 3:
logging.info("Input too short")
return jsonify({
'response': ['Please enter a valid question (minimum 3 characters).'],
'status': 'invalid_input'
}), 400
if user_input in ['hi', 'hii', 'hello', 'hey']:
logging.info("Greeting received")
return jsonify({
'response': ['Hello! I’m here to help with financial questions on market trends, IPOs, mutual funds, crypto, economy, banking, policies, and metals. Ask away!'],
'status': 'success'
})
topic_info = detect_financial_topic(user_input)
logging.info(f"Detected topic: {topic_info}")
if not topic_info['is_financial']:
suggestions = topic_info['suggestions']
logging.info("Non-financial query, providing suggestions")
response_lines = [
"I focus on market trends, IPOs, mutual funds, crypto, economy, banking, policies, and metals.",
f"Try: {', '.join(suggestions[:3])}. What interests you?"
]
return jsonify({
'response': response_lines,
'suggestions': suggestions,
'status': 'off_topic'
})
with app.app_context():
# Try a more specific query based on the user input
specific_query = None
if "repo rate" in user_input or "rbi" in user_input:
specific_query = "india AND rbi AND repo rate"
elif "nifty" in user_input:
specific_query = "india AND nifty"
elif "gold" in user_input or "silver" in user_input:
specific_query = "india AND (gold OR silver)"
elif "cryptocurrency" in user_input or "bitcoin" in user_input:
specific_query = "india AND cryptocurrency"
elif "gdp" in user_input or "economy" in user_input:
specific_query = "india AND gdp"
elif "mutual fund" in user_input:
specific_query = "india AND mutual fund"
elif "government policies" in user_input or "policy" in user_input:
specific_query = "india AND government policy"
elif "ipo" in user_input or "ipos" in user_input:
specific_query = "india AND ipo"
if specific_query:
context_articles = fetch_news(specific_query)
else:
context_articles = fetch_news()
# Filter articles by category and relevance to the query
relevant_articles = []
for article in context_articles:
if article['category'] == topic_info['primary_category']:
# Check if the article title or content contains query keywords
content_lower = (article['title'] + " " + article['content']).lower()
if any(keyword in content_lower for keyword in user_input.split()):
relevant_articles.append(article)
# If no relevant articles are found, fall back to category match
if not relevant_articles:
relevant_articles = [a for a in context_articles if a['category'] == topic_info['primary_category']]
context_articles = relevant_articles[:2] # Limit to 2 articles for faster processing
# Skip description generation to reduce CPU usage
for article in context_articles:
article['description'] = article['summary']
# Use static knowledge base for summary
summary = "No recent news available."
qa_answer = None
if topic_info['primary_category'] in FINANCIAL_KNOWLEDGE_BASE:
knowledge = FINANCIAL_KNOWLEDGE_BASE[topic_info['primary_category']]
for key, value in knowledge.items():
if key in user_input:
qa_answer = value
summary = value
break
if context_articles and summary == "No recent news available.":
# Deduplicate descriptions and limit to unique content
descriptions = list(dict.fromkeys([article['description'] for article in context_articles]))
summary = " ".join(descriptions[:2]) # Limit to 2 descriptions to avoid repetition
logging.info(f"Using news summary: {summary}")
else:
summary += f" General insights for {topic_info['primary_category']}."
logging.info("No news context available, using fallback summary")
# If the query is about Nifty trends, prioritize the knowledge base
if "nifty" in user_input and topic_info['primary_category'] == "Stock Market":
knowledge = FINANCIAL_KNOWLEDGE_BASE['Stock Market'].get('nifty trend', '')
summary = knowledge + " " + summary
# Construct the response as a list of lines
response_lines = ["**Summary**"]
summary_lines = summary.split('. ')
summary_lines = [line.strip() for line in summary_lines if line.strip()]
summary_lines = [line if len(line.split()) <= 30 else ' '.join(line.split()[:30]) + '.' for line in summary_lines]
response_lines.extend(summary_lines)
response_lines.append("")
response_lines.append("**Investment Recommendations for Indian Investors**")
# Tailor recommendations for Nifty-related queries
if "nifty" in user_input and topic_info['primary_category'] == "Stock Market":
recommendations = [
f"1. {stock} is a key Nifty 50 stock with strong performance. [Read more: {url}]"
for stock, url in list(NIFTY_50_STOCKS.items())[:5]
]
elif topic_info['primary_category'] == "Stock Market":
recommendations = [
"1. Reliance Industries offers growth in diverse sectors. [Read more: http://economictimes.com/markets/stocks/news/reliance-industries-q3-results-2025/]",
"2. TCS provides stable returns in IT sector. [Read more: http://economictimes.com/markets/stocks/news/tcs-q3-results-2025/]",
"3. HDFC Bank ensures consistent performance for investors. [Read more: http://moneycontrol.com/news/hdfc-bank-q3-results-2025/]",
"4. Infosys grows with IT outsourcing trends. [Read more: http://livemint.com/news/infosys-q3-results-2025/]",
"5. Adani Ports benefits from infrastructure growth. [Read more: http://livemint.com/news/adani-ports-q3-results-2025/]"
]
elif topic_info['primary_category'] == "Banking":
recommendations = [
"1. Axis Bank shows strong retail banking growth. [Read more: http://moneycontrol.com/news/axis-bank-q3-results-2025/]",
"2. HDFC Bank offers reliable returns in banking. [Read more: http://moneycontrol.com/news/hdfc-bank-q3-results-2025/]",
"3. ICICI Bank ensures consistent retail performance. [Read more: http://livemint.com/news/icici-bank-q3-results-2025/]",
"4. SBI provides stability with government backing. [Read more: http://moneycontrol.com/news/sbi-q3-results-2025/]",
"5. Kotak Mahindra focuses on digital banking growth. [Read more: http://livemint.com/news/kotak-mahindra-q3-results-2025/]"
]
elif topic_info['primary_category'] == "Economy":
recommendations = [
"1. Reliance Industries offers growth in diverse sectors. [Read more: http://economictimes.com/markets/stocks/news/reliance-industries-q3-results-2025/]",
"2. HDFC Bank provides stable returns in uncertainty. [Read more: http://moneycontrol.com/news/hdfc-bank-q3-results-2025/]",
"3. ICICI Bank ensures consistent economic performance. [Read more: http://livemint.com/news/icici-bank-q3-results-2025/]",
"4. Tata Steel benefits from industrial demand. [Read more: http://economictimes.com/markets/stocks/news/tata-steel-q3-results-2025/]",
"5. Adani Ports leverages infrastructure growth. [Read more: http://livemint.com/news/adani-ports-q3-results-2025/]"
]
elif topic_info['primary_category'] == "Cryptocurrency":
recommendations = [
"1. WazirX offers cautious crypto trading platform. [Read more: http://wazirx.com/news-updates/]",
"2. CoinDCX provides diverse digital currency options. [Read more: http://coindcx.com/news-updates/]",
"3. Bitcoin leads for long-term crypto investors. [Read more: http://coindesk.com/bitcoin-news/]",
"4. Ethereum grows with smart contract potential. [Read more: http://coindesk.com/ethereum-news/]",
"5. Zebpay ensures secure crypto trading platform. [Read more: http://zebpay.com/news-updates/]"
]
elif topic_info['primary_category'] == "Mutual Funds":
recommendations = [
"1. SBI Equity Hybrid Fund offers balanced growth. [Read more: http://sbimf.com/fund-updates/]",
"2. HDFC Mid-Cap Fund targets growth potential. [Read more: http://hdfcfund.com/fund-updates/]",
"3. ICICI Bluechip Fund focuses on large-cap stability. [Read more: http://icicipruamc.com/fund-updates/]",
"4. Axis Long Term Fund offers tax benefits. [Read more: http://axismf.com/fund-updates/]",
"5. Mirae Emerging Bluechip targets growth sectors. [Read more: http://miraeassetmf.co.in/fund-updates/]"
]
elif topic_info['primary_category'] == "Metals":
recommendations = [
"1. Sovereign Gold Bonds offer interest benefits. [Read more: http://rbi.org.in/sovereign-gold-bonds/]",
"2. Gold ETFs provide easy price exposure. [Read more: http://nseindia.com/gold-etf/]",
"3. Silver ETFs offer precious metal investment. [Read more: http://nseindia.com/silver-etf/]",
"4. MCX Gold Futures enable speculative trading. [Read more: http://mcxindia.com/gold-futures/]",
"5. Physical Gold Jewellery suits traditional investors. [Read more: http://titan.co.in/gold-jewellery/]"
]
else:
recommendations = [
"1. Reliance Industries offers diversified growth. [Read more: http://economictimes.com/markets/stocks/news/reliance-industries-q3-results-2025/]",
"2. HDFC Bank ensures stable returns. [Read more: http://moneycontrol.com/news/hdfc-bank-q3-results-2025/]",
"3. ICICI Bank provides consistent performance. [Read more: http://livemint.com/news/icici-bank-q3-results-2025/]",
"4. Tata Steel benefits from industrial demand. [Read more: http://economictimes.com/markets/stocks/news/tata-steel-q3-results-2025/]",
"5. Adani Ports leverages infrastructure growth. [Read more: http://livemint.com/news/adani-ports-q3-results-2025/]"
]
response_lines.extend(recommendations)
return jsonify({
'response': response_lines,
'category': topic_info['primary_category'],
'status': 'success'
})
except Exception as e:
logging.error(f"Chat error: {str(e)}")
return jsonify({
'response': ['Sorry, I encountered an error. Try again or check a financial news source.'],
'status': 'error'
}), 500
@app.route('/health')
def health():
with app.app_context():
db_status = "connected" if get_db() else "disconnected"
return jsonify({
"status": "healthy",
"refresh_running": refresh_thread.is_alive(),
"database": db_status
})
if __name__ == '__main__':
app.run(host='0.0.0.0', port=7860)