Spaces:
Sleeping
Sleeping
Commit ·
44a7a32
1
Parent(s): fe4fff1
Ollama Support Added
Browse files- .gitignore +1 -0
- requirements.txt +9 -2
- src/customAPI.py +968 -0
- src/streamlit_app.py +815 -38
.gitignore
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
/python
|
requirements.txt
CHANGED
|
@@ -1,3 +1,10 @@
|
|
| 1 |
-
|
|
|
|
|
|
|
|
|
|
| 2 |
pandas
|
| 3 |
-
streamlit
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
matplotlib
|
| 2 |
+
requests
|
| 3 |
+
pytz
|
| 4 |
+
lxml
|
| 5 |
pandas
|
| 6 |
+
streamlit
|
| 7 |
+
numpy
|
| 8 |
+
plotly
|
| 9 |
+
langchain
|
| 10 |
+
langchain-ollama
|
src/customAPI.py
ADDED
|
@@ -0,0 +1,968 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import requests
|
| 2 |
+
import logging
|
| 3 |
+
import urllib.parse
|
| 4 |
+
import pytz
|
| 5 |
+
import pandas as pd
|
| 6 |
+
from datetime import datetime
|
| 7 |
+
from urllib.parse import quote , urlencode
|
| 8 |
+
from lxml import etree
|
| 9 |
+
from io import BytesIO
|
| 10 |
+
tz = pytz.timezone('Asia/Kolkata')
|
| 11 |
+
|
| 12 |
+
# Configure logging
|
| 13 |
+
logger = logging.getLogger(__name__)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class NSEAPIError(Exception):
|
| 17 |
+
"""Custom exception for NSE API errors."""
|
| 18 |
+
pass
|
| 19 |
+
|
| 20 |
+
class NSEAPI:
|
| 21 |
+
"""API client for fetching data from www.nseindia.com."""
|
| 22 |
+
|
| 23 |
+
def __init__(self):
|
| 24 |
+
self.base_url_charting = "https://charting.nseindia.com"
|
| 25 |
+
self.base_url_market = "https://www.nseindia.com/api/"
|
| 26 |
+
self.session = requests.Session()
|
| 27 |
+
self.headers = {
|
| 28 |
+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36",
|
| 29 |
+
"Referer": "https://www.nseindia.com/",
|
| 30 |
+
"Origin": "https://www.nseindia.com",
|
| 31 |
+
"Sec-Fetch-Site": "same-origin",
|
| 32 |
+
}
|
| 33 |
+
self.initialize_session()
|
| 34 |
+
|
| 35 |
+
def initialize_session(self):
|
| 36 |
+
"""Fetch NSE homepage to initialize session and get cookies."""
|
| 37 |
+
|
| 38 |
+
try:
|
| 39 |
+
response = self.session.get('https://www.nseindia.com/report-detail/eq_security', headers=self.headers, timeout=10)
|
| 40 |
+
response.raise_for_status()
|
| 41 |
+
logger.info("Session initialized successfully.")
|
| 42 |
+
except requests.exceptions.RequestException as e:
|
| 43 |
+
raise NSEAPIError(f"Error initializing session: {e}")
|
| 44 |
+
|
| 45 |
+
def get_all_equities(self):
|
| 46 |
+
"""
|
| 47 |
+
Retrieves the list of all equities available on NSE.
|
| 48 |
+
|
| 49 |
+
Returns:
|
| 50 |
+
list of dict: Each dictionary contains 'ScripCode', 'TradingSymbol',
|
| 51 |
+
'Description', and 'InstrumentType'.
|
| 52 |
+
|
| 53 |
+
Raises:
|
| 54 |
+
NSEAPIError: If there is an error fetching the data from the server.
|
| 55 |
+
|
| 56 |
+
Example:
|
| 57 |
+
>>> api = NSEAPI()
|
| 58 |
+
>>> equities = api.get_all_equities()
|
| 59 |
+
>>> print(equities[0])
|
| 60 |
+
{
|
| 61 |
+
'ScripCode': '500209',
|
| 62 |
+
'TradingSymbol': 'INFY',
|
| 63 |
+
'Description': 'Infosys Limited',
|
| 64 |
+
'InstrumentType': 'EQ'
|
| 65 |
+
}
|
| 66 |
+
"""
|
| 67 |
+
endpoint = "/Charts/GetEQMasters"
|
| 68 |
+
self.headers['Content-Type'] = "text/plain"
|
| 69 |
+
self.headers['Accept'] = "text/plain"
|
| 70 |
+
|
| 71 |
+
# Fetch data
|
| 72 |
+
try:
|
| 73 |
+
response = self.session.get(urllib.parse.urljoin(self.base_url_charting,endpoint), headers=self.headers)
|
| 74 |
+
response.raise_for_status() # Raises exception for HTTP errors (e.g., 404, 500)
|
| 75 |
+
except requests.exceptions.RequestException as e:
|
| 76 |
+
raise NSEAPIError(f"Error fetching data: {e}")
|
| 77 |
+
|
| 78 |
+
# Parse plain text response
|
| 79 |
+
lines = response.text.split('\n')
|
| 80 |
+
equities = []
|
| 81 |
+
|
| 82 |
+
for line in lines:
|
| 83 |
+
if line.strip(): # Skip empty lines
|
| 84 |
+
fields = [field.strip() for field in line.split('|')]
|
| 85 |
+
if len(fields) == 4: # Ensure correct number of fields
|
| 86 |
+
equities.append({
|
| 87 |
+
'ScripCode': fields[0],
|
| 88 |
+
'TradingSymbol': fields[1],
|
| 89 |
+
'Description': fields[2],
|
| 90 |
+
'InstrumentType': fields[3]
|
| 91 |
+
})
|
| 92 |
+
else:
|
| 93 |
+
logger.warning(f"Malformed line skipped: {line}")
|
| 94 |
+
|
| 95 |
+
# Warn if no valid data is found
|
| 96 |
+
if not equities:
|
| 97 |
+
logger.warning("No valid equity data found in the response")
|
| 98 |
+
|
| 99 |
+
eqdf = pd.DataFrame(equities[1:])
|
| 100 |
+
eqdf.ScripCode = eqdf.ScripCode.apply(int)
|
| 101 |
+
# eqdf = eqdf.query('ScripCode<26000 and ScripCode<=27000')
|
| 102 |
+
eqdf = eqdf.reset_index(drop=True)
|
| 103 |
+
eqdf = eqdf.sort_index()
|
| 104 |
+
|
| 105 |
+
return eqdf
|
| 106 |
+
|
| 107 |
+
def get_all_currencies(self):
|
| 108 |
+
"""
|
| 109 |
+
Retrieves the list of all currencies pairs.
|
| 110 |
+
|
| 111 |
+
Returns:
|
| 112 |
+
list of dict: Each dictionary contains 'ScripCode', 'TradingSymbol',
|
| 113 |
+
'Description', and 'InstrumentType'.
|
| 114 |
+
|
| 115 |
+
Raises:
|
| 116 |
+
NSEAPIError: If there is an error fetching the data from the server.
|
| 117 |
+
|
| 118 |
+
Example:
|
| 119 |
+
>>> api = NSEAPI()
|
| 120 |
+
>>> crc = api.get_all_crc()
|
| 121 |
+
>>> print(crc[0])
|
| 122 |
+
{'
|
| 123 |
+
ScripCode': '1',
|
| 124 |
+
'TradingSymbol': 'USDINR',
|
| 125 |
+
'Description': 'USDINR',
|
| 126 |
+
'InstrumentType': '8'
|
| 127 |
+
}
|
| 128 |
+
"""
|
| 129 |
+
endpoint = "/Charts/GetCDMasters"
|
| 130 |
+
self.headers['Content-Type'] = "text/plain"
|
| 131 |
+
self.headers['Accept'] = "text/plain"
|
| 132 |
+
|
| 133 |
+
# Fetch data
|
| 134 |
+
try:
|
| 135 |
+
response = self.session.get(urllib.parse.urljoin(self.base_url_charting,endpoint), headers=self.headers)
|
| 136 |
+
response.raise_for_status() # Raises exception for HTTP errors (e.g., 404, 500)
|
| 137 |
+
except requests.exceptions.RequestException as e:
|
| 138 |
+
raise NSEAPIError(f"Error fetching data: {e}")
|
| 139 |
+
|
| 140 |
+
# Parse plain text response
|
| 141 |
+
lines = response.text.split('\n')
|
| 142 |
+
fnos = []
|
| 143 |
+
|
| 144 |
+
for line in lines:
|
| 145 |
+
if line.strip(): # Skip empty lines
|
| 146 |
+
fields = [field.strip() for field in line.split('|')]
|
| 147 |
+
if len(fields) == 4: # Ensure correct number of fields
|
| 148 |
+
fnos.append({
|
| 149 |
+
'ScripCode': fields[0],
|
| 150 |
+
'TradingSymbol': fields[1],
|
| 151 |
+
'Description': fields[2],
|
| 152 |
+
'InstrumentType': fields[3]
|
| 153 |
+
})
|
| 154 |
+
else:
|
| 155 |
+
logger.warning(f"Malformed line skipped: {line}")
|
| 156 |
+
|
| 157 |
+
# Warn if no valid data is found
|
| 158 |
+
if not fnos:
|
| 159 |
+
logger.warning("No valid equity data found in the response")
|
| 160 |
+
|
| 161 |
+
return fnos
|
| 162 |
+
|
| 163 |
+
def get_all_fnos(self):
|
| 164 |
+
"""
|
| 165 |
+
Retrieves the list of all fnos available on NSE.
|
| 166 |
+
|
| 167 |
+
Returns:
|
| 168 |
+
list of dict: Each dictionary contains 'ScripCode', 'TradingSymbol',
|
| 169 |
+
'Description', and 'InstrumentType'.
|
| 170 |
+
|
| 171 |
+
Raises:
|
| 172 |
+
NSEAPIError: If there is an error fetching the data from the server.
|
| 173 |
+
|
| 174 |
+
Example:
|
| 175 |
+
>>> api = NSEAPI()
|
| 176 |
+
>>> fnos = api.get_all_fnos()
|
| 177 |
+
>>> print(fnos[0])
|
| 178 |
+
{'
|
| 179 |
+
ScripCode': '35000',
|
| 180 |
+
'TradingSymbol': 'MIDCPNIFTY25MARFUT',
|
| 181 |
+
'Description': 'MIDCPNIFTY 27 Mar 2025',
|
| 182 |
+
'InstrumentType': '1'
|
| 183 |
+
}
|
| 184 |
+
"""
|
| 185 |
+
endpoint = "/Charts/GetFOMasters"
|
| 186 |
+
self.headers['Content-Type'] = "text/plain"
|
| 187 |
+
self.headers['Accept'] = "text/plain"
|
| 188 |
+
|
| 189 |
+
# Fetch data
|
| 190 |
+
try:
|
| 191 |
+
response = self.session.get(urllib.parse.urljoin(self.base_url_charting,endpoint), headers=self.headers)
|
| 192 |
+
response.raise_for_status() # Raises exception for HTTP errors (e.g., 404, 500)
|
| 193 |
+
except requests.exceptions.RequestException as e:
|
| 194 |
+
raise NSEAPIError(f"Error fetching data: {e}")
|
| 195 |
+
|
| 196 |
+
# Parse plain text response
|
| 197 |
+
lines = response.text.split('\n')
|
| 198 |
+
fnos = []
|
| 199 |
+
|
| 200 |
+
for line in lines:
|
| 201 |
+
if line.strip(): # Skip empty lines
|
| 202 |
+
fields = [field.strip() for field in line.split('|')]
|
| 203 |
+
if len(fields) == 4: # Ensure correct number of fields
|
| 204 |
+
fnos.append({
|
| 205 |
+
'ScripCode': fields[0],
|
| 206 |
+
'TradingSymbol': fields[1],
|
| 207 |
+
'Description': fields[2],
|
| 208 |
+
'InstrumentType': fields[3]
|
| 209 |
+
})
|
| 210 |
+
else:
|
| 211 |
+
logger.warning(f"Malformed line skipped: {line}")
|
| 212 |
+
|
| 213 |
+
# Warn if no valid data is found
|
| 214 |
+
if not fnos:
|
| 215 |
+
logger.warning("No valid equity data found in the response")
|
| 216 |
+
|
| 217 |
+
return fnos
|
| 218 |
+
|
| 219 |
+
def get_all_indices(self):
|
| 220 |
+
"""
|
| 221 |
+
Retrieves the list of all indices available on NSE.
|
| 222 |
+
|
| 223 |
+
Returns:
|
| 224 |
+
list of dict: Each dictionary contains details of an index, such as 'index', 'last', 'variation', etc.
|
| 225 |
+
|
| 226 |
+
Raises:
|
| 227 |
+
NSEAPIError: If there is an error fetching the data from the server or if the response is invalid.
|
| 228 |
+
|
| 229 |
+
Example:
|
| 230 |
+
>>> api = NSEAPI()
|
| 231 |
+
>>> indices = api.get_all_indices()
|
| 232 |
+
>>> print(indices[0]['index'])
|
| 233 |
+
NIFTY 50
|
| 234 |
+
"""
|
| 235 |
+
endpoint = "allIndices"
|
| 236 |
+
self.headers['Content-Type'] = "application/json; charset=utf-8"
|
| 237 |
+
self.headers['Accept'] = "application/json; charset=utf-8"
|
| 238 |
+
|
| 239 |
+
try:
|
| 240 |
+
response = self.session.get(
|
| 241 |
+
urllib.parse.urljoin(self.base_url_market, endpoint),
|
| 242 |
+
headers=self.headers
|
| 243 |
+
)
|
| 244 |
+
response.raise_for_status()
|
| 245 |
+
data = response.json()
|
| 246 |
+
except requests.RequestException as e:
|
| 247 |
+
raise NSEAPIError(f"Failed to fetch data: {str(e)}")
|
| 248 |
+
except ValueError:
|
| 249 |
+
raise NSEAPIError("Received invalid JSON response")
|
| 250 |
+
|
| 251 |
+
if "data" not in data or not isinstance(data["data"], list):
|
| 252 |
+
raise NSEAPIError("Response does not contain a valid 'data' list")
|
| 253 |
+
|
| 254 |
+
return pd.DataFrame(data["data"])
|
| 255 |
+
|
| 256 |
+
def get_index_data(self, index_name: str):
|
| 257 |
+
"""
|
| 258 |
+
Retrieves detailed data for a selected index, including its constituents.
|
| 259 |
+
|
| 260 |
+
Args:
|
| 261 |
+
index_name (str): The name of the index (e.g., "NIFTY 50").
|
| 262 |
+
|
| 263 |
+
Returns:
|
| 264 |
+
dict: A dictionary with the following keys:
|
| 265 |
+
- 'index': dict with index details (e.g., lastPrice, change, pChange)
|
| 266 |
+
- 'constituents': list of dicts with stock details (e.g., symbol, lastPrice)
|
| 267 |
+
- 'timestamp': str, the data timestamp
|
| 268 |
+
- 'advance': dict with advances, declines, and unchanged counts
|
| 269 |
+
|
| 270 |
+
Raises:
|
| 271 |
+
NSEAPIError: If the API request fails or the response is invalid.
|
| 272 |
+
|
| 273 |
+
Example:
|
| 274 |
+
>>> api = NSEAPI()
|
| 275 |
+
>>> data = api.get_index_data("NIFTY 50")
|
| 276 |
+
>>> print(data['index']['lastPrice'])
|
| 277 |
+
22552.5
|
| 278 |
+
>>> print(data['constituents'][0]['symbol'])
|
| 279 |
+
RELIANCE
|
| 280 |
+
"""
|
| 281 |
+
# URL-encode the index name to handle spaces and special characters
|
| 282 |
+
encoded_index = quote(index_name)
|
| 283 |
+
url = f"https://www.nseindia.com/api/equity-stockIndices?index={encoded_index}"
|
| 284 |
+
|
| 285 |
+
# Headers to mimic browser request
|
| 286 |
+
headers = {
|
| 287 |
+
"accept": "*/*",
|
| 288 |
+
"accept-language": "en-US,en;q=0.9"
|
| 289 |
+
}
|
| 290 |
+
|
| 291 |
+
# Fetch data
|
| 292 |
+
try:
|
| 293 |
+
response = requests.get(url, headers=headers)
|
| 294 |
+
response.raise_for_status()
|
| 295 |
+
except requests.RequestException as e:
|
| 296 |
+
raise NSEAPIError(f"Failed to fetch data for index '{index_name}': {str(e)}")
|
| 297 |
+
|
| 298 |
+
# Parse JSON response
|
| 299 |
+
try:
|
| 300 |
+
data = response.json()
|
| 301 |
+
except ValueError:
|
| 302 |
+
raise NSEAPIError(f"Invalid JSON response for index '{index_name}'")
|
| 303 |
+
|
| 304 |
+
# Validate required fields
|
| 305 |
+
if "data" not in data or not isinstance(data["data"], list) or not data["data"]:
|
| 306 |
+
raise NSEAPIError(f"No valid data found for index '{index_name}'")
|
| 307 |
+
|
| 308 |
+
# Extract index and constituents
|
| 309 |
+
index_data = None
|
| 310 |
+
constituents = []
|
| 311 |
+
for item in data["data"]:
|
| 312 |
+
if item.get("priority") == 1:
|
| 313 |
+
index_data = item
|
| 314 |
+
else:
|
| 315 |
+
constituents.append(item)
|
| 316 |
+
|
| 317 |
+
if not index_data:
|
| 318 |
+
raise NSEAPIError(f"Index data not found in response for '{index_name}'")
|
| 319 |
+
|
| 320 |
+
# Structure the response
|
| 321 |
+
result = {
|
| 322 |
+
"index": {
|
| 323 |
+
"name": index_data["symbol"],
|
| 324 |
+
"lastPrice": index_data["lastPrice"],
|
| 325 |
+
"change": index_data["change"],
|
| 326 |
+
"pChange": index_data["pChange"],
|
| 327 |
+
"open": index_data["open"],
|
| 328 |
+
"dayHigh": index_data["dayHigh"],
|
| 329 |
+
"dayLow": index_data["dayLow"],
|
| 330 |
+
"previousClose": index_data["previousClose"],
|
| 331 |
+
"yearHigh": index_data["yearHigh"],
|
| 332 |
+
"yearLow": index_data["yearLow"],
|
| 333 |
+
"totalTradedVolume": index_data["totalTradedVolume"],
|
| 334 |
+
"totalTradedValue": index_data["totalTradedValue"]
|
| 335 |
+
},
|
| 336 |
+
"constituents": [
|
| 337 |
+
{
|
| 338 |
+
"symbol": stock["symbol"],
|
| 339 |
+
"lastPrice": stock["lastPrice"],
|
| 340 |
+
"change": stock["change"],
|
| 341 |
+
"pChange": stock["pChange"],
|
| 342 |
+
"open": stock["open"],
|
| 343 |
+
"dayHigh": stock["dayHigh"],
|
| 344 |
+
"dayLow": stock["dayLow"],
|
| 345 |
+
"previousClose": stock["previousClose"],
|
| 346 |
+
"yearHigh": stock["yearHigh"],
|
| 347 |
+
"yearLow": stock["yearLow"],
|
| 348 |
+
"totalTradedVolume": stock["totalTradedVolume"],
|
| 349 |
+
"totalTradedValue": stock["totalTradedValue"]
|
| 350 |
+
}
|
| 351 |
+
for stock in constituents
|
| 352 |
+
],
|
| 353 |
+
"timestamp": data.get("timestamp", ""),
|
| 354 |
+
"advance": data.get("advance", {})
|
| 355 |
+
}
|
| 356 |
+
|
| 357 |
+
return result
|
| 358 |
+
|
| 359 |
+
def get_stock_metadata(self, symbol: str):
|
| 360 |
+
"""
|
| 361 |
+
Retrieves all metadata for a specified stock symbol from the NSE India API.
|
| 362 |
+
|
| 363 |
+
Args:
|
| 364 |
+
symbol (str): The stock symbol (e.g., "RELIANCE").
|
| 365 |
+
|
| 366 |
+
Returns:
|
| 367 |
+
dict: A dictionary containing all metadata for the stock, including:
|
| 368 |
+
- info: Basic stock information (symbol, company name, industry, etc.).
|
| 369 |
+
- metadata: Additional details (series, status, listing date, etc.).
|
| 370 |
+
- securityInfo: Trading status and segment details.
|
| 371 |
+
- priceInfo: Price data (last price, change, VWAP, etc.).
|
| 372 |
+
- industryInfo: Industry classification.
|
| 373 |
+
- preOpenMarket: Pre-open market data.
|
| 374 |
+
|
| 375 |
+
Raises:
|
| 376 |
+
NSEAPIError: If the API request fails, the response is invalid, or required data is missing.
|
| 377 |
+
|
| 378 |
+
Example:
|
| 379 |
+
>>> api = NSEAPI()
|
| 380 |
+
>>> metadata = api.get_stock_metadata("RELIANCE")
|
| 381 |
+
>>> print(metadata["info"]["companyName"])
|
| 382 |
+
Reliance Industries Limited
|
| 383 |
+
>>> print(metadata["priceInfo"]["lastPrice"])
|
| 384 |
+
1246.4
|
| 385 |
+
"""
|
| 386 |
+
# URL-encode the symbol to handle special characters
|
| 387 |
+
encoded_symbol = quote(symbol.split('-')[0])
|
| 388 |
+
print(encoded_symbol)
|
| 389 |
+
|
| 390 |
+
endpoint = f"quote-equity?symbol={encoded_symbol}"
|
| 391 |
+
self.headers['Content-Type'] = "application/json; charset=utf-8"
|
| 392 |
+
self.headers['Accept'] = "application/json; charset=utf-8"
|
| 393 |
+
|
| 394 |
+
# Fetch data
|
| 395 |
+
try:
|
| 396 |
+
response = self.session.get(urllib.parse.urljoin(self.base_url_market,endpoint), headers=self.headers)
|
| 397 |
+
response.raise_for_status() # Raises an HTTPError for bad responses
|
| 398 |
+
except requests.RequestException as e:
|
| 399 |
+
raise NSEAPIError(f"Failed to fetch metadata for symbol '{symbol}': {str(e)}")
|
| 400 |
+
|
| 401 |
+
# Parse JSON response
|
| 402 |
+
try:
|
| 403 |
+
data = response.json()
|
| 404 |
+
except ValueError:
|
| 405 |
+
raise NSEAPIError(f"Invalid JSON response for symbol '{symbol}'")
|
| 406 |
+
|
| 407 |
+
# Verify essential keys are present
|
| 408 |
+
expected_keys = ["info", "metadata", "securityInfo", "priceInfo"]
|
| 409 |
+
if not all(key in data for key in expected_keys):
|
| 410 |
+
raise NSEAPIError(f"Incomplete metadata for symbol '{symbol}'")
|
| 411 |
+
|
| 412 |
+
return data
|
| 413 |
+
|
| 414 |
+
def get_stock_live_trade_info(self, symbol: str):
|
| 415 |
+
"""
|
| 416 |
+
Retrieves all Traded Information for a specified stock symbol from the NSE India API.
|
| 417 |
+
|
| 418 |
+
Args:
|
| 419 |
+
symbol (str): The stock symbol (e.g., "RELIANCE").
|
| 420 |
+
|
| 421 |
+
Returns:
|
| 422 |
+
dict: A dictionary containing all traded info for the stock, including:
|
| 423 |
+
- marketDeptOrderBook: Top Sellers and Buyers positions, Traded Volume Details, Risk margins etc.
|
| 424 |
+
- securityWiseDP: Intraday and Delivery Trade Volume.
|
| 425 |
+
|
| 426 |
+
Raises:
|
| 427 |
+
NSEAPIError: If the API request fails, the response is invalid, or required data is missing.
|
| 428 |
+
|
| 429 |
+
Example:
|
| 430 |
+
>>> api = NSEAPI()
|
| 431 |
+
>>> metadata = api.get_stock_trade_info("RELIANCE")
|
| 432 |
+
>>> print(metadata["tradeInfo"]["totalTradedVolume"])
|
| 433 |
+
161.62
|
| 434 |
+
>>> print(metadata["securityWiseDP"]["deliveryToTradedQuantity"])
|
| 435 |
+
70.19
|
| 436 |
+
"""
|
| 437 |
+
# URL-encode the symbol to handle special characters
|
| 438 |
+
encoded_symbol = quote(symbol)
|
| 439 |
+
|
| 440 |
+
endpoint = f"quote-equity?symbol={encoded_symbol}§ion=trade_info"
|
| 441 |
+
self.headers['Content-Type'] = "application/json; charset=utf-8"
|
| 442 |
+
self.headers['Accept'] = "application/json; charset=utf-8"
|
| 443 |
+
|
| 444 |
+
# Fetch data
|
| 445 |
+
try:
|
| 446 |
+
response = self.session.get(urllib.parse.urljoin(self.base_url_market,endpoint), headers=self.headers)
|
| 447 |
+
response.raise_for_status() # Raises an HTTPError for bad responses
|
| 448 |
+
except requests.RequestException as e:
|
| 449 |
+
raise NSEAPIError(f"Failed to fetch metadata for symbol '{symbol}': {str(e)}")
|
| 450 |
+
|
| 451 |
+
# Parse JSON response
|
| 452 |
+
try:
|
| 453 |
+
data = response.json()
|
| 454 |
+
except ValueError:
|
| 455 |
+
raise NSEAPIError(f"Invalid JSON response for symbol '{symbol}'")
|
| 456 |
+
|
| 457 |
+
print(data.keys())
|
| 458 |
+
|
| 459 |
+
# Verify essential keys are present
|
| 460 |
+
expected_keys = ["marketDeptOrderBook", "securityWiseDP"]
|
| 461 |
+
if not all(key in data for key in expected_keys):
|
| 462 |
+
raise NSEAPIError(f"Incomplete metadata for symbol '{symbol}'")
|
| 463 |
+
|
| 464 |
+
return data
|
| 465 |
+
|
| 466 |
+
def get_historical_data(self, code: str, from_date: datetime, to_date: datetime, time_interval: int = 1, period: str='I'):
|
| 467 |
+
"""
|
| 468 |
+
Retrieves historical intraday data for a given scrip code within the specified date range.
|
| 469 |
+
|
| 470 |
+
Args:
|
| 471 |
+
code (str): The scrip code of the stock or index (e.g., "26000").
|
| 472 |
+
from_date (datetime): The start date and time in UTC.
|
| 473 |
+
to_date (datetime): The end date and time in UTC.
|
| 474 |
+
time_interval (int, optional): The interval (default is 1).
|
| 475 |
+
period (str, optional): (Minute: 'I', Day: 'D')
|
| 476 |
+
|
| 477 |
+
Returns:
|
| 478 |
+
list of dict: A list where each dictionary represents a candlestick with keys:
|
| 479 |
+
- timestamp (int): Unix timestamp in seconds
|
| 480 |
+
- open (float): Opening price
|
| 481 |
+
- high (float): Highest price
|
| 482 |
+
- low (float): Lowest price
|
| 483 |
+
- close (float): Closing price
|
| 484 |
+
- volume (int): Trading volume
|
| 485 |
+
|
| 486 |
+
Raises:
|
| 487 |
+
NSEAPIError: If the API request fails or the response is invalid.
|
| 488 |
+
|
| 489 |
+
Example:
|
| 490 |
+
>>> from datetime import datetime, timezone
|
| 491 |
+
>>> api = NSEAPI()
|
| 492 |
+
>>> code = "26000"
|
| 493 |
+
>>> from_date = datetime(2024, 10, 28, tzinfo=timezone.utc)
|
| 494 |
+
>>> to_date = datetime(2024, 10, 29, tzinfo=timezone.utc)
|
| 495 |
+
>>> data = api.get_historical_data(code, from_date, to_date)
|
| 496 |
+
>>> print(data[0])
|
| 497 |
+
{
|
| 498 |
+
'timestamp': 1740759239,
|
| 499 |
+
'open': 22124.7,
|
| 500 |
+
'high': 22124.7,
|
| 501 |
+
'low': 22124.7,
|
| 502 |
+
'close': 22124.7,
|
| 503 |
+
'volume': 1
|
| 504 |
+
}
|
| 505 |
+
"""
|
| 506 |
+
# Define the API endpoint
|
| 507 |
+
endpoint = "/Charts/symbolhistoricaldata/"
|
| 508 |
+
self.headers['Content-Type'] = "application/json; charset=utf-8"
|
| 509 |
+
self.headers['Accept'] = "application/json; charset=utf-8"
|
| 510 |
+
|
| 511 |
+
# Prepare the JSON payload
|
| 512 |
+
payload = {
|
| 513 |
+
"exch": "N",
|
| 514 |
+
"instrType": "C",
|
| 515 |
+
"scripCode": code,
|
| 516 |
+
"ulToken": code,
|
| 517 |
+
"fromDate": int(from_date.timestamp()),
|
| 518 |
+
"toDate": int(to_date.timestamp()),
|
| 519 |
+
"timeInterval": time_interval,
|
| 520 |
+
"chartPeriod": period,
|
| 521 |
+
"chartStart": 0
|
| 522 |
+
}
|
| 523 |
+
|
| 524 |
+
# Send the POST request
|
| 525 |
+
try:
|
| 526 |
+
response = self.session.post(urllib.parse.urljoin(self.base_url_charting,endpoint), json=payload, headers=self.headers)
|
| 527 |
+
response.raise_for_status() # Raises an exception for HTTP errors
|
| 528 |
+
data = response.json()
|
| 529 |
+
except requests.RequestException as e:
|
| 530 |
+
raise NSEAPIError(f"Failed to fetch data: {str(e)}")
|
| 531 |
+
except ValueError:
|
| 532 |
+
raise NSEAPIError("Received invalid JSON response")
|
| 533 |
+
|
| 534 |
+
# Check if the request was successful
|
| 535 |
+
if data.get("s") != "Ok":
|
| 536 |
+
raise NSEAPIError(f"API returned an error: {data}")
|
| 537 |
+
|
| 538 |
+
# Extract and validate the response arrays
|
| 539 |
+
try:
|
| 540 |
+
timestamps = data["t"]
|
| 541 |
+
opens = data["o"]
|
| 542 |
+
highs = data["h"]
|
| 543 |
+
lows = data["l"]
|
| 544 |
+
closes = data["c"]
|
| 545 |
+
volumes = data["v"]
|
| 546 |
+
|
| 547 |
+
# Ensure all are lists and of the same length
|
| 548 |
+
arrays = [timestamps, opens, highs, lows, closes, volumes]
|
| 549 |
+
if not all(isinstance(arr, list) for arr in arrays):
|
| 550 |
+
raise NSEAPIError("Response contains invalid data types")
|
| 551 |
+
length = len(timestamps)
|
| 552 |
+
if not all(len(arr) == length for arr in arrays):
|
| 553 |
+
raise NSEAPIError("Response arrays have mismatched lengths")
|
| 554 |
+
|
| 555 |
+
# Structure the data into a list of candlesticks
|
| 556 |
+
historical_data = [
|
| 557 |
+
{
|
| 558 |
+
"timestamp": timestamps[i],
|
| 559 |
+
"open": opens[i],
|
| 560 |
+
"high": highs[i],
|
| 561 |
+
"low": lows[i],
|
| 562 |
+
"close": closes[i],
|
| 563 |
+
"volume": volumes[i]
|
| 564 |
+
}
|
| 565 |
+
for i in range(length)
|
| 566 |
+
]
|
| 567 |
+
|
| 568 |
+
return historical_data
|
| 569 |
+
|
| 570 |
+
except KeyError as e:
|
| 571 |
+
raise NSEAPIError(f"Missing expected field in response: {str(e)}")
|
| 572 |
+
|
| 573 |
+
def get_historical_data_v2(self, symbol: str, from_date: datetime, to_date: datetime, time_interval: int = 1, period: str = 'I'):
|
| 574 |
+
"""
|
| 575 |
+
Retrieves historical intraday data using trading symbol from the newer ChartData endpoint.
|
| 576 |
+
|
| 577 |
+
Args:
|
| 578 |
+
symbol (str): The trading symbol of the stock (e.g., "YESBANK-EQ").
|
| 579 |
+
from_date (datetime): Start date in UTC.
|
| 580 |
+
to_date (datetime): End date in UTC.
|
| 581 |
+
time_interval (int, optional): Interval in minutes. Default is 1.
|
| 582 |
+
period (str, optional): Period type - 'I' for intraday, 'D' for daily. Default is 'I'.
|
| 583 |
+
|
| 584 |
+
Returns:
|
| 585 |
+
list of dict: Candlestick data with timestamp, open, high, low, close, volume.
|
| 586 |
+
|
| 587 |
+
Raises:
|
| 588 |
+
NSEAPIError: If the API request fails or the response is invalid.
|
| 589 |
+
"""
|
| 590 |
+
endpoint = "/Charts/ChartData/"
|
| 591 |
+
self.headers['Content-Type'] = "application/json; charset=utf-8"
|
| 592 |
+
self.headers['Accept'] = "*/*"
|
| 593 |
+
|
| 594 |
+
payload = {
|
| 595 |
+
"exch": "N",
|
| 596 |
+
"tradingSymbol": symbol,
|
| 597 |
+
"fromDate": int(from_date.timestamp()),
|
| 598 |
+
"toDate": int(to_date.timestamp()),
|
| 599 |
+
"timeInterval": time_interval,
|
| 600 |
+
"chartPeriod": period,
|
| 601 |
+
"chartStart": 0
|
| 602 |
+
}
|
| 603 |
+
|
| 604 |
+
try:
|
| 605 |
+
response = self.session.post(
|
| 606 |
+
urllib.parse.urljoin(self.base_url_charting, endpoint),
|
| 607 |
+
json=payload,
|
| 608 |
+
headers=self.headers
|
| 609 |
+
)
|
| 610 |
+
response.raise_for_status()
|
| 611 |
+
data = response.json()
|
| 612 |
+
except requests.RequestException as e:
|
| 613 |
+
raise NSEAPIError(f"Failed to fetch data: {str(e)}")
|
| 614 |
+
except ValueError:
|
| 615 |
+
raise NSEAPIError("Received invalid JSON response")
|
| 616 |
+
|
| 617 |
+
if data.get("s") != "Ok":
|
| 618 |
+
raise NSEAPIError(f"API returned an error: {data}")
|
| 619 |
+
|
| 620 |
+
try:
|
| 621 |
+
timestamps = data["t"]
|
| 622 |
+
opens = data["o"]
|
| 623 |
+
highs = data["h"]
|
| 624 |
+
lows = data["l"]
|
| 625 |
+
closes = data["c"]
|
| 626 |
+
volumes = data["v"]
|
| 627 |
+
|
| 628 |
+
arrays = [timestamps, opens, highs, lows, closes, volumes]
|
| 629 |
+
if not all(isinstance(arr, list) for arr in arrays):
|
| 630 |
+
raise NSEAPIError("Response contains invalid data types")
|
| 631 |
+
length = len(timestamps)
|
| 632 |
+
if not all(len(arr) == length for arr in arrays):
|
| 633 |
+
raise NSEAPIError("Response arrays have mismatched lengths")
|
| 634 |
+
|
| 635 |
+
return [
|
| 636 |
+
{
|
| 637 |
+
"timestamp": timestamps[i],
|
| 638 |
+
"open": opens[i],
|
| 639 |
+
"high": highs[i],
|
| 640 |
+
"low": lows[i],
|
| 641 |
+
"close": closes[i],
|
| 642 |
+
"volume": volumes[i]
|
| 643 |
+
}
|
| 644 |
+
for i in range(length)
|
| 645 |
+
]
|
| 646 |
+
|
| 647 |
+
except KeyError as e:
|
| 648 |
+
raise NSEAPIError(f"Missing expected field in response: {str(e)}")
|
| 649 |
+
|
| 650 |
+
def get_historical_trade_info(self, symbol: str, from_date: datetime, to_date: datetime, data_type: str = "priceVolumeDeliverable", series: str = "ALL"):
|
| 651 |
+
"""
|
| 652 |
+
Retrieves historical trade data for a given symbol using the updated NSE API.
|
| 653 |
+
|
| 654 |
+
Args:
|
| 655 |
+
symbol (str): The stock symbol (e.g., "RELIANCE").
|
| 656 |
+
from_date (datetime): The start date.
|
| 657 |
+
to_date (datetime): The end date.
|
| 658 |
+
data_type (str, optional): The type of data to retrieve (default is "priceVolumeDeliverable").
|
| 659 |
+
series (str, optional): The series to retrieve (default is "ALL").
|
| 660 |
+
|
| 661 |
+
Returns:
|
| 662 |
+
list of dict: A list of daily records with historical trade data.
|
| 663 |
+
|
| 664 |
+
Raises:
|
| 665 |
+
NSEAPIError: If the API request fails or the response is invalid.
|
| 666 |
+
|
| 667 |
+
Example:
|
| 668 |
+
>>> from datetime import datetime
|
| 669 |
+
>>> api = NSEAPI()
|
| 670 |
+
>>> data = api.get_historical_trade_info("RELIANCE", datetime(2025, 5, 7), datetime(2025, 5, 8))
|
| 671 |
+
>>> print(data[0]["COP_DELIV_PERC"])
|
| 672 |
+
73.92
|
| 673 |
+
"""
|
| 674 |
+
params = {
|
| 675 |
+
"from": from_date.strftime("%d-%m-%Y"),
|
| 676 |
+
"to": to_date.strftime("%d-%m-%Y"),
|
| 677 |
+
"symbol": symbol.split('-')[0],
|
| 678 |
+
"type": data_type,
|
| 679 |
+
"series": series
|
| 680 |
+
}
|
| 681 |
+
|
| 682 |
+
endpoint = f"historicalOR/generateSecurityWiseHistoricalData?{urlencode(params)}"
|
| 683 |
+
self.headers['Content-Type'] = "application/json; charset=utf-8"
|
| 684 |
+
self.headers['Accept'] = "application/json; charset=utf-8"
|
| 685 |
+
|
| 686 |
+
try:
|
| 687 |
+
response = self.session.get(
|
| 688 |
+
urllib.parse.urljoin(self.base_url_market, endpoint),
|
| 689 |
+
headers=self.headers
|
| 690 |
+
)
|
| 691 |
+
response.raise_for_status()
|
| 692 |
+
data = response.json()
|
| 693 |
+
except requests.RequestException as e:
|
| 694 |
+
raise NSEAPIError(f"Failed to fetch data: {str(e)}")
|
| 695 |
+
except ValueError:
|
| 696 |
+
raise NSEAPIError("Invalid JSON response")
|
| 697 |
+
|
| 698 |
+
if "data" not in data or not isinstance(data["data"], list):
|
| 699 |
+
raise NSEAPIError("Response does not contain a valid 'data' list")
|
| 700 |
+
print(data)
|
| 701 |
+
return data["data"]
|
| 702 |
+
|
| 703 |
+
def get_all_large_deals(self):
|
| 704 |
+
"""
|
| 705 |
+
Retrieves the list of all fnos available on NSE.
|
| 706 |
+
|
| 707 |
+
Returns:
|
| 708 |
+
dict: dictionary contains 'BLOCK_DEALS(count)', 'BLOCK_DEALS_DATA', 'BULK_DEALS(count)', 'BULK_DEALS_DATA', 'SHORT_DEALS(count)', 'SHORT_DEALS_DATA'.
|
| 709 |
+
|
| 710 |
+
Raises:
|
| 711 |
+
NSEAPIError: If there is an error fetching the data from the server.
|
| 712 |
+
|
| 713 |
+
Example:
|
| 714 |
+
>>> api = NSEAPI()
|
| 715 |
+
>>> deals = api.get_all_large_deals()
|
| 716 |
+
>>> print(deals[0])
|
| 717 |
+
{
|
| 718 |
+
"as_on_date": "28-Feb-2025",
|
| 719 |
+
"BULK_DEALS": "1",
|
| 720 |
+
"BULK_DEALS_DATA": [
|
| 721 |
+
{
|
| 722 |
+
"date": "28-Feb-2025",
|
| 723 |
+
"symbol": "BIRDYS",
|
| 724 |
+
"name": "Grill Splendour Ser Ltd",
|
| 725 |
+
"clientName": "AJAY KRISHNAKANT PARIKH",
|
| 726 |
+
"buySell": "BUY",
|
| 727 |
+
"qty": "36000",
|
| 728 |
+
"watp": "84.57",
|
| 729 |
+
"remarks": "-"
|
| 730 |
+
}
|
| 731 |
+
],
|
| 732 |
+
"SHORT_DEALS": "0",
|
| 733 |
+
"SHORT_DEALS_DATA": [],
|
| 734 |
+
"BLOCK_DEALS": "0",
|
| 735 |
+
"BLOCK_DEALS_DATA": []
|
| 736 |
+
}
|
| 737 |
+
"""
|
| 738 |
+
endpoint = "snapshot-capital-market-largedeal"
|
| 739 |
+
self.headers['Content-Type'] = "application/json; charset=utf-8"
|
| 740 |
+
self.headers['Accept'] = "application/json; charset=utf-8"
|
| 741 |
+
|
| 742 |
+
# Fetch data
|
| 743 |
+
try:
|
| 744 |
+
print(urllib.parse.urljoin(self.base_url_market,endpoint))
|
| 745 |
+
response = self.session.get(urllib.parse.urljoin(self.base_url_market,endpoint), headers=self.headers)
|
| 746 |
+
response.raise_for_status() # Raises exception for HTTP errors (e.g., 404, 500)
|
| 747 |
+
except requests.exceptions.RequestException as e:
|
| 748 |
+
raise NSEAPIError(f"Error fetching data: {e}")
|
| 749 |
+
|
| 750 |
+
# Parse plain text response
|
| 751 |
+
deals = response.json()
|
| 752 |
+
|
| 753 |
+
# Warn if no valid data is found
|
| 754 |
+
if not deals:
|
| 755 |
+
logger.warning("No valid equity data found in the response")
|
| 756 |
+
|
| 757 |
+
return deals
|
| 758 |
+
|
| 759 |
+
def get_corporate_actions(self, symbol: str, index: str = "equities"):
|
| 760 |
+
"""
|
| 761 |
+
Retrieves corporate actions (e.g., dividends, bonuses) for a given symbol and issuer.
|
| 762 |
+
|
| 763 |
+
Args:
|
| 764 |
+
symbol (str): Stock symbol (e.g., "RELIANCE").
|
| 765 |
+
issuer (str): Issuer name (e.g., "Reliance Industries Limited").
|
| 766 |
+
index (str, optional): Index type (default is "equities").
|
| 767 |
+
|
| 768 |
+
Returns:
|
| 769 |
+
list of dict: Corporate actions with details like exDate, subject, etc.
|
| 770 |
+
|
| 771 |
+
Raises:
|
| 772 |
+
NSEAPIError: If the API request fails or the response is invalid.
|
| 773 |
+
|
| 774 |
+
Example:
|
| 775 |
+
>>> api = NSEAPI()
|
| 776 |
+
>>> data = api.get_corporate_actions("RELIANCE", "Reliance Industries Limited")
|
| 777 |
+
>>> print(data[0]["subject"])
|
| 778 |
+
Dividend - Rs 10 Per Share
|
| 779 |
+
"""
|
| 780 |
+
params = {
|
| 781 |
+
"index": index,
|
| 782 |
+
"symbol": symbol.split('-')[0]
|
| 783 |
+
}
|
| 784 |
+
|
| 785 |
+
endpoint = f"corporates-corporateActions?{urlencode(params)}"
|
| 786 |
+
self.headers['Content-Type'] = "application/json; charset=utf-8"
|
| 787 |
+
self.headers['Accept'] = "application/json; charset=utf-8"
|
| 788 |
+
|
| 789 |
+
self.headers['referer'] = "https://www.nseindia.com/report-detail/eq_security"
|
| 790 |
+
|
| 791 |
+
try:
|
| 792 |
+
print(urllib.parse.urljoin(self.base_url_market,endpoint))
|
| 793 |
+
response = self.session.get(urllib.parse.urljoin(self.base_url_market,endpoint), headers=self.headers)
|
| 794 |
+
response.raise_for_status()
|
| 795 |
+
data = response.json()
|
| 796 |
+
except requests.RequestException as e:
|
| 797 |
+
raise NSEAPIError(f"Failed to fetch corporate actions: {str(e)}")
|
| 798 |
+
except ValueError:
|
| 799 |
+
raise NSEAPIError("Invalid JSON response")
|
| 800 |
+
|
| 801 |
+
return data
|
| 802 |
+
|
| 803 |
+
def get_financial_results(self, symbol: str, period: str = "Quarterly", index: str = "equities"):
|
| 804 |
+
"""
|
| 805 |
+
Retrieves corporate financial results for a given symbol and issuer over a date range.
|
| 806 |
+
|
| 807 |
+
Args:
|
| 808 |
+
symbol (str): Stock symbol (e.g., "RELIANCE").
|
| 809 |
+
period (str, optional): Financial period (e.g., "Quarterly" or "Annual"). Defaults to "Quarterly".
|
| 810 |
+
index (str, optional): Index type (e.g., "equities"). Defaults to "equities".
|
| 811 |
+
|
| 812 |
+
Returns:
|
| 813 |
+
list of dict: A list of financial result records, each containing details like
|
| 814 |
+
symbol, companyName, period, filingDate, xbrl, consolidated, etc.
|
| 815 |
+
|
| 816 |
+
Raises:
|
| 817 |
+
NSEAPIError: If the API request fails, the response is invalid, or required data is missing.
|
| 818 |
+
|
| 819 |
+
Example:
|
| 820 |
+
>>> api = NSEAPI()
|
| 821 |
+
>>> data = api.get_financial_results(
|
| 822 |
+
... symbol="RELIANCE",
|
| 823 |
+
... period="Quarterly"
|
| 824 |
+
... )
|
| 825 |
+
>>> print(data[0]["symbol"])
|
| 826 |
+
RELIANCE
|
| 827 |
+
>>> print(data[0]["filingDate"])
|
| 828 |
+
16-Jan-2025 20:20
|
| 829 |
+
"""
|
| 830 |
+
# Prepare query parameters
|
| 831 |
+
params = {
|
| 832 |
+
"index": index,
|
| 833 |
+
"symbol": symbol.split('-')[0],
|
| 834 |
+
"period": period
|
| 835 |
+
}
|
| 836 |
+
endpoint = f"corporates-financial-results?{urlencode(params)}"
|
| 837 |
+
|
| 838 |
+
# Set headers
|
| 839 |
+
self.headers['Content-Type'] = "application/json; charset=utf-8"
|
| 840 |
+
self.headers['Accept'] = "application/json; charset=utf-8"
|
| 841 |
+
self.headers['referer'] = "https://www.nseindia.com/companies-listing/corporate-filings-financial-results"
|
| 842 |
+
|
| 843 |
+
# Fetch data
|
| 844 |
+
try:
|
| 845 |
+
response = self.session.get(
|
| 846 |
+
urllib.parse.urljoin(self.base_url_market, endpoint),
|
| 847 |
+
headers=self.headers
|
| 848 |
+
)
|
| 849 |
+
response.raise_for_status()
|
| 850 |
+
data = response.json()
|
| 851 |
+
except requests.RequestException as e:
|
| 852 |
+
raise NSEAPIError(f"Failed to fetch financial results for symbol '{symbol}': {str(e)}")
|
| 853 |
+
except ValueError:
|
| 854 |
+
raise NSEAPIError(f"Invalid JSON response for symbol '{symbol}'")
|
| 855 |
+
|
| 856 |
+
# Validate response
|
| 857 |
+
if not isinstance(data, list):
|
| 858 |
+
raise NSEAPIError(f"Response does not contain a valid list for symbol '{symbol}'")
|
| 859 |
+
if not data:
|
| 860 |
+
logger.warning(f"No financial results found for symbol '{symbol}'")
|
| 861 |
+
|
| 862 |
+
return data
|
| 863 |
+
|
| 864 |
+
def get_bulk_block_short_deals(self, from_date: str, to_date: str, option_type: str = "bulk_deals", symbol: str = None):
|
| 865 |
+
"""
|
| 866 |
+
Retrieves bulk deals data for a specified date range and option type from NSE.
|
| 867 |
+
|
| 868 |
+
Args:
|
| 869 |
+
symbol (str): Stock symbol (e.g., "RELIANCE").
|
| 870 |
+
from_date (str): Start date in DD-MM-YYYY format (e.g., "07-05-2025").
|
| 871 |
+
to_date (str): End date in DD-MM-YYYY format (e.g., "08-05-2025").
|
| 872 |
+
option_type (str, optional): Type of deals to retrieve (e.g., "bulk_deals", "block_deals", "short_selling"). Defaults to "bulk_deals".
|
| 873 |
+
|
| 874 |
+
Returns:
|
| 875 |
+
list of dict: A list of deal records, each containing details like date, symbol, client name, buy/sell, quantity, weighted average trade price, and remarks.
|
| 876 |
+
|
| 877 |
+
Raises:
|
| 878 |
+
NSEAPIError: If the API request fails, the response is invalid, or required data is missing.
|
| 879 |
+
|
| 880 |
+
Example:
|
| 881 |
+
>>> api = NSEAPI()
|
| 882 |
+
>>> deals = api.get_bulk_deals(from_date="07-05-2025", to_date="08-05-2025", option_type="bulk_deals")
|
| 883 |
+
>>> print(deals[0])
|
| 884 |
+
{
|
| 885 |
+
'BD_DT_DATE': '07-MAY-2025',
|
| 886 |
+
'BD_DT_ORDER': '2025-05-06T18:30:00.000+00:00',
|
| 887 |
+
'BD_SYMBOL': 'AARTIDRUGS',
|
| 888 |
+
'BD_SCRIP_NAME': 'Aarti Drugs Ltd.',
|
| 889 |
+
'BD_CLIENT_NAME': 'GRAVITON RESEARCH CAPITAL LLP',
|
| 890 |
+
'BD_BUY_SELL': 'BUY',
|
| 891 |
+
'BD_QTY_TRD': 803477,
|
| 892 |
+
'BD_TP_WATP': 393.1,
|
| 893 |
+
'BD_REMARKS': '-'
|
| 894 |
+
}
|
| 895 |
+
"""
|
| 896 |
+
# Prepare query parameters
|
| 897 |
+
params = {
|
| 898 |
+
"symbol": symbol.split('-')[0] if symbol else "",
|
| 899 |
+
"optionType": option_type,
|
| 900 |
+
"from": from_date,
|
| 901 |
+
"to": to_date
|
| 902 |
+
}
|
| 903 |
+
endpoint = f"historicalOR/bulk-block-short-deals?{urlencode(params)}"
|
| 904 |
+
|
| 905 |
+
# Set headers
|
| 906 |
+
self.headers['Content-Type'] = "application/json; charset=utf-8"
|
| 907 |
+
self.headers['Accept'] = "application/json; charset=utf-8"
|
| 908 |
+
self.headers['referer'] = "https://www.nseindia.com/report-detail/display-bulk-and-block-deals"
|
| 909 |
+
|
| 910 |
+
# Fetch data
|
| 911 |
+
try:
|
| 912 |
+
response = self.session.get(
|
| 913 |
+
urllib.parse.urljoin(self.base_url_market, endpoint),
|
| 914 |
+
headers=self.headers
|
| 915 |
+
)
|
| 916 |
+
response.raise_for_status()
|
| 917 |
+
data = response.json()
|
| 918 |
+
except requests.RequestException as e:
|
| 919 |
+
raise NSEAPIError(f"Error fetching bulk deals data: {str(e)}")
|
| 920 |
+
except ValueError:
|
| 921 |
+
raise NSEAPIError("Invalid JSON response")
|
| 922 |
+
|
| 923 |
+
# Validate response
|
| 924 |
+
if "data" not in data or not isinstance(data["data"], list):
|
| 925 |
+
raise NSEAPIError("Response does not contain a valid 'data' list")
|
| 926 |
+
if not data["data"]:
|
| 927 |
+
logger.warning("No bulk deals data found in the response")
|
| 928 |
+
|
| 929 |
+
return data["data"]
|
| 930 |
+
|
| 931 |
+
|
| 932 |
+
def xbrl_to_dict(self, xml_url):
|
| 933 |
+
try:
|
| 934 |
+
response = self.session.get(xml_url, headers=self.headers)
|
| 935 |
+
response.raise_for_status()
|
| 936 |
+
xml_content = response.content
|
| 937 |
+
|
| 938 |
+
tree = etree.parse(BytesIO(xml_content))
|
| 939 |
+
root = tree.getroot()
|
| 940 |
+
|
| 941 |
+
ns = root.nsmap
|
| 942 |
+
if None in ns:
|
| 943 |
+
ns['default'] = ns.pop(None)
|
| 944 |
+
|
| 945 |
+
data_dict = {}
|
| 946 |
+
for elem in root.iter():
|
| 947 |
+
tag = etree.QName(elem).localname
|
| 948 |
+
value = elem.text.strip() if elem.text else None
|
| 949 |
+
|
| 950 |
+
if value:
|
| 951 |
+
key = f"{tag} ({elem.get('contextRef')})" if elem.get('contextRef') else tag
|
| 952 |
+
data_dict[key] = value
|
| 953 |
+
|
| 954 |
+
return data_dict
|
| 955 |
+
|
| 956 |
+
except requests.RequestException as e:
|
| 957 |
+
print(f"Error fetching XML: {e}")
|
| 958 |
+
except etree.XMLSyntaxError as e:
|
| 959 |
+
print(f"Error parsing XML: {e}")
|
| 960 |
+
return {}
|
| 961 |
+
|
| 962 |
+
if __name__ == "__main__":
|
| 963 |
+
api = NSEAPI()
|
| 964 |
+
try:
|
| 965 |
+
# Test Code
|
| 966 |
+
pass
|
| 967 |
+
except NSEAPIError as e:
|
| 968 |
+
print(f"Failed to fetch deals: {e}")
|
src/streamlit_app.py
CHANGED
|
@@ -1,40 +1,817 @@
|
|
| 1 |
-
import altair as alt
|
| 2 |
-
import numpy as np
|
| 3 |
-
import pandas as pd
|
| 4 |
import streamlit as st
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
}
|
| 32 |
-
|
| 33 |
-
st.
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
+
from datetime import datetime
|
| 3 |
+
import pytz
|
| 4 |
+
import pandas as pd
|
| 5 |
+
import numpy as np
|
| 6 |
+
import plotly.graph_objs as go
|
| 7 |
+
from plotly.subplots import make_subplots
|
| 8 |
+
from customAPI import NSEAPI
|
| 9 |
+
import plotly.express as px
|
| 10 |
+
|
| 11 |
+
# Set page configuration
|
| 12 |
+
st.set_page_config(page_title="Stock Market Application", layout="wide")
|
| 13 |
+
|
| 14 |
+
# Initialize timezone and NSEAPI
|
| 15 |
+
tz = pytz.timezone('Asia/Kolkata')
|
| 16 |
+
api = NSEAPI()
|
| 17 |
+
|
| 18 |
+
# Fetch all equities
|
| 19 |
+
@st.cache_data
|
| 20 |
+
def load_equities():
|
| 21 |
+
equities_df = api.get_all_equities()
|
| 22 |
+
return equities_df
|
| 23 |
+
|
| 24 |
+
# Fetch all indices
|
| 25 |
+
@st.cache_data
|
| 26 |
+
def load_indices():
|
| 27 |
+
indices_df = api.get_all_indices()
|
| 28 |
+
return indices_df
|
| 29 |
+
|
| 30 |
+
eq_list = load_equities()
|
| 31 |
+
inx_list = load_indices()
|
| 32 |
+
|
| 33 |
+
# Sidebar for equity selection and date range
|
| 34 |
+
st.sidebar.header("Stock Selection")
|
| 35 |
+
selected_stock = st.sidebar.selectbox("Select a Stock", eq_list.query('TradingSymbol.str.endswith(\'-EQ\')')['TradingSymbol'].tolist()) #.loc[eq_list['TradingSymbol'].str.endswith('-EQ')]
|
| 36 |
+
|
| 37 |
+
st.sidebar.header("Date Range")
|
| 38 |
+
default_end = datetime.now(tz=tz)
|
| 39 |
+
default_start = datetime(2019, 12, 30, 9, 15, 0, tzinfo=tz)
|
| 40 |
+
start_date = st.sidebar.date_input("Start Date", value=default_start, min_value=default_start, max_value=default_end)
|
| 41 |
+
end_date = st.sidebar.date_input("End Date", value=default_end, min_value=default_start, max_value=default_end)
|
| 42 |
+
|
| 43 |
+
# Convert date inputs to datetime with timezone
|
| 44 |
+
start_date = tz.localize(datetime.combine(start_date, datetime.min.time()))
|
| 45 |
+
end_date = tz.localize(datetime.combine(end_date, datetime.max.time()))
|
| 46 |
+
|
| 47 |
+
# Cache historical data based on stock and date range
|
| 48 |
+
@st.cache_data
|
| 49 |
+
def fetch_historical_data(symbol, start, end):
|
| 50 |
+
return api.get_historical_data_v2(symbol, start, end, 1, "D")
|
| 51 |
+
|
| 52 |
+
# Cache index data for comparative analysis
|
| 53 |
+
@st.cache_data
|
| 54 |
+
def fetch_index_data(index_name, start, end):
|
| 55 |
+
data = api.get_historical_data(index_name, start, end, 1, "D")
|
| 56 |
+
df = pd.DataFrame(data)
|
| 57 |
+
df['date'] = pd.to_datetime(df['timestamp'], unit='s').dt.tz_localize('UTC').dt.tz_convert(tz)
|
| 58 |
+
df.set_index('date', inplace=True)
|
| 59 |
+
df = df[['open', 'high', 'low', 'close', 'volume']]
|
| 60 |
+
df.columns = ['Open', 'High', 'Low', 'Close', 'Volume']
|
| 61 |
+
return df
|
| 62 |
+
|
| 63 |
+
def predict_next_return(df, window_size=3, bin_size=0.5):
|
| 64 |
+
"""
|
| 65 |
+
Predicts the most probable next return based on historical return patterns.
|
| 66 |
+
|
| 67 |
+
Args:
|
| 68 |
+
df (pd.DataFrame): DataFrame containing 'Returns' column with daily returns.
|
| 69 |
+
window_size (int): Size of the sliding window for patterns (default=3).
|
| 70 |
+
bin_size (float): Size of return bins in percentage (default=0.5).
|
| 71 |
+
|
| 72 |
+
Returns:
|
| 73 |
+
dict: Contains the current pattern, historical occurrences, frequency of next returns,
|
| 74 |
+
and the most probable next return.
|
| 75 |
+
"""
|
| 76 |
+
# Discretize returns into bins
|
| 77 |
+
returns = df['Returns'].dropna()
|
| 78 |
+
min_return = returns.min()
|
| 79 |
+
max_return = returns.max()
|
| 80 |
+
bins = np.arange(np.floor(min_return / bin_size) * bin_size,
|
| 81 |
+
np.ceil(max_return / bin_size) * bin_size + bin_size,
|
| 82 |
+
bin_size)
|
| 83 |
+
binned_returns = pd.cut(returns, bins=bins, include_lowest=True, labels=False)
|
| 84 |
+
binned_returns = binned_returns * bin_size # Convert bin indices back to return values
|
| 85 |
+
|
| 86 |
+
# Create sliding window patterns
|
| 87 |
+
patterns = []
|
| 88 |
+
for i in range(len(binned_returns) - window_size):
|
| 89 |
+
pattern = tuple(binned_returns[i:i + window_size])
|
| 90 |
+
next_return = binned_returns[i + window_size] if i + window_size < len(binned_returns) else np.nan
|
| 91 |
+
patterns.append((pattern, next_return))
|
| 92 |
+
|
| 93 |
+
# Get current pattern (last window_size returns)
|
| 94 |
+
current_pattern = tuple(binned_returns[-window_size:]) if len(binned_returns) >= window_size else None
|
| 95 |
+
|
| 96 |
+
if not current_pattern:
|
| 97 |
+
return {
|
| 98 |
+
'current_pattern': None,
|
| 99 |
+
'occurrences': 0,
|
| 100 |
+
'next_return_frequencies': {},
|
| 101 |
+
'most_probable_return': None
|
| 102 |
+
}
|
| 103 |
+
|
| 104 |
+
# Find historical occurrences of the current pattern
|
| 105 |
+
matching_patterns = [(p, r) for p, r in patterns if p == current_pattern and not np.isnan(r)]
|
| 106 |
+
|
| 107 |
+
# Count frequency of next returns
|
| 108 |
+
next_returns = [r for _, r in matching_patterns]
|
| 109 |
+
if not next_returns:
|
| 110 |
+
return {
|
| 111 |
+
'current_pattern': current_pattern,
|
| 112 |
+
'occurrences': 0,
|
| 113 |
+
'next_return_frequencies': {},
|
| 114 |
+
'most_probable_return': None
|
| 115 |
+
}
|
| 116 |
+
|
| 117 |
+
# Calculate frequency of each next return
|
| 118 |
+
freq = pd.Series(next_returns).value_counts().to_dict()
|
| 119 |
+
total_occurrences = len(next_returns)
|
| 120 |
+
freq_percentage = {k: (v / total_occurrences * 100) for k, v in freq.items()}
|
| 121 |
+
|
| 122 |
+
# Find the most probable next return
|
| 123 |
+
most_probable = max(freq.items(), key=lambda x: x[1])[0] if freq else None
|
| 124 |
+
|
| 125 |
+
return {
|
| 126 |
+
'current_pattern': current_pattern,
|
| 127 |
+
'occurrences': total_occurrences,
|
| 128 |
+
'next_return_frequencies': freq_percentage,
|
| 129 |
+
'most_probable_return': most_probable
|
| 130 |
+
}
|
| 131 |
+
|
| 132 |
+
# Main content
|
| 133 |
+
st.title("Stock Market Application")
|
| 134 |
+
|
| 135 |
+
if selected_stock:
|
| 136 |
+
# Fetch historical data
|
| 137 |
+
data = fetch_historical_data(selected_stock, start_date, end_date)
|
| 138 |
+
df = pd.DataFrame(data)
|
| 139 |
+
df['date'] = pd.to_datetime(df['timestamp'], unit='s').dt.tz_localize('Asia/Kolkata')
|
| 140 |
+
df.set_index('date', inplace=True)
|
| 141 |
+
df = df[['open', 'high', 'low', 'close', 'volume']]
|
| 142 |
+
df.columns = ['Open', 'High', 'Low', 'Close', 'Volume']
|
| 143 |
+
|
| 144 |
+
# Calculate additional metrics
|
| 145 |
+
df['SMA5'] = df['Close'].ewm(span=5, adjust=False).mean()
|
| 146 |
+
df['SMA20'] = df['Close'].ewm(span=20, adjust=False).mean()
|
| 147 |
+
df['SMA50'] = df['Close'].rolling(window=50).mean()
|
| 148 |
+
df['SMA100'] = df['Close'].rolling(window=100).mean()
|
| 149 |
+
df['SMA200'] = df['Close'].rolling(window=200).mean()
|
| 150 |
+
df['Returns'] = ((df['Close'] - df['Open']) / df['Open']) * 100
|
| 151 |
+
df['LogReturns'] = np.log(df['Close'] / df['Close'].shift(1))
|
| 152 |
+
df['Volatility'] = df['Returns'].rolling(window=20).std()
|
| 153 |
+
df['Volume_Rank'] = df['Volume'].rank(pct=True) # For volume-conditional analysis
|
| 154 |
+
df['DayOfWeek'] = df.index.dayofweek
|
| 155 |
+
df['Month'] = df.index.month
|
| 156 |
+
df['Year'] = df.index.year
|
| 157 |
+
df['Avg'] = ((df['High']-df['Low'])/2) + df['Low']
|
| 158 |
+
|
| 159 |
+
# Calculate RSI for signal-based conditioning
|
| 160 |
+
delta = df['Close'].diff()
|
| 161 |
+
gain = (delta.where(delta > 0, 0)).rolling(window=14).mean()
|
| 162 |
+
loss = (-delta.where(delta < 0, 0)).rolling(window=14).mean()
|
| 163 |
+
rs = gain / loss
|
| 164 |
+
df['RSI'] = 100 - (100 / (1 + rs))
|
| 165 |
+
|
| 166 |
+
# Price bins for volume per price
|
| 167 |
+
price_bin_size = int((df['High'].max()-df['Low'].min())/20)
|
| 168 |
+
min_price = df['Close'].min()
|
| 169 |
+
max_price = df['Close'].max()
|
| 170 |
+
bins = pd.interval_range(start=int(min_price), end=int(max_price) + 1, freq=price_bin_size)
|
| 171 |
+
df['price_bin'] = pd.cut(df['Avg'], bins=bins)
|
| 172 |
+
volume_per_price = df.groupby('price_bin')['Volume'].sum()
|
| 173 |
+
volume_per_price = volume_per_price.reset_index()
|
| 174 |
+
volume_per_price['price_mid'] = volume_per_price['price_bin'].apply(lambda x: x.mid)
|
| 175 |
+
|
| 176 |
+
# Prepare return distributions
|
| 177 |
+
returns = df['Returns'].dropna()
|
| 178 |
+
log_returns = df['LogReturns'].dropna()
|
| 179 |
+
total_returns = len(returns)
|
| 180 |
+
|
| 181 |
+
# Create main figure
|
| 182 |
+
fig = make_subplots(
|
| 183 |
+
rows=8, cols=2,
|
| 184 |
+
shared_xaxes=False,
|
| 185 |
+
shared_yaxes=False,
|
| 186 |
+
vertical_spacing=0.05,
|
| 187 |
+
horizontal_spacing=0.1,
|
| 188 |
+
subplot_titles=(
|
| 189 |
+
f"{selected_stock} Daily Chart", "Volume per Price Bracket",
|
| 190 |
+
"Volume", "Volume Deviation per Price Bracket",
|
| 191 |
+
"Overall Return Distribution", "Returns in Fixed Ranges",
|
| 192 |
+
"Daily Return Probability by Day of Week", "",
|
| 193 |
+
"5-SMA vs 20-SMA", "5-20 SMA Deviation Distribution",
|
| 194 |
+
"20-SMA vs 50-SMA", "20-50 SMA Deviation Distribution",
|
| 195 |
+
"50-SMA vs 100-SMA", "50-100 SMA Deviation Distribution",
|
| 196 |
+
"100-SMA vs 200-SMA", "100-200 SMA Deviation Distribution",
|
| 197 |
+
),
|
| 198 |
+
row_heights=[0.2, 0.1, 0.1, 0.1, 0.1, 0.12, 0.12, 0.12],
|
| 199 |
+
column_widths=[0.6, 0.4],
|
| 200 |
+
specs=[
|
| 201 |
+
[{"rowspan": 1}, {"rowspan": 1}],
|
| 202 |
+
[{"rowspan": 1}, {"rowspan": 1}],
|
| 203 |
+
[{"rowspan": 1}, {"rowspan": 1}],
|
| 204 |
+
[{"colspan": 2}, None],
|
| 205 |
+
[{"rowspan": 1}, {"rowspan": 1}],
|
| 206 |
+
[{"rowspan": 1}, {"rowspan": 1}],
|
| 207 |
+
[{"rowspan": 1}, {"rowspan": 1}],
|
| 208 |
+
[{"rowspan": 1}, {"rowspan": 1}]
|
| 209 |
+
]
|
| 210 |
+
)
|
| 211 |
+
|
| 212 |
+
# Candlestick chart
|
| 213 |
+
fig.add_trace(
|
| 214 |
+
go.Candlestick(
|
| 215 |
+
x=df.index,
|
| 216 |
+
open=df['Open'],
|
| 217 |
+
high=df['High'],
|
| 218 |
+
low=df['Low'],
|
| 219 |
+
close=df['Close'],
|
| 220 |
+
name=selected_stock,
|
| 221 |
+
increasing_line_color='green',
|
| 222 |
+
decreasing_line_color='red'
|
| 223 |
+
),
|
| 224 |
+
row=1, col=1
|
| 225 |
+
)
|
| 226 |
+
|
| 227 |
+
# Volume per price
|
| 228 |
+
fig.add_trace(
|
| 229 |
+
go.Bar(
|
| 230 |
+
y=volume_per_price['price_mid'],
|
| 231 |
+
x=volume_per_price['Volume'],
|
| 232 |
+
orientation='h',
|
| 233 |
+
name='Volume per Price',
|
| 234 |
+
marker_color='rgba(255, 165, 0, 0.6)'
|
| 235 |
+
),
|
| 236 |
+
row=1, col=2
|
| 237 |
+
)
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
volume_per_price['price_volume_mean'] = volume_per_price['Volume'].mean()
|
| 241 |
+
volume_per_price['price_volume_above_mean'] = volume_per_price['Volume'].where(volume_per_price['Volume']>(0.5*volume_per_price['price_volume_mean']),0)
|
| 242 |
+
|
| 243 |
+
volume_per_price['ma_price_volume_above_mean'] = volume_per_price['price_volume_above_mean'].rolling(window=2).mean()
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
fig.add_trace(
|
| 247 |
+
go.Bar(
|
| 248 |
+
y=volume_per_price['price_mid'],
|
| 249 |
+
x=volume_per_price['ma_price_volume_above_mean'],
|
| 250 |
+
orientation='h',
|
| 251 |
+
name='Volume MA per Price',
|
| 252 |
+
marker_color='rgba(255, 165, 0, 0.6)'
|
| 253 |
+
),
|
| 254 |
+
row=2, col=2
|
| 255 |
+
)
|
| 256 |
+
|
| 257 |
+
# Volume subplot
|
| 258 |
+
fig.add_trace(
|
| 259 |
+
go.Bar(x=df.index, y=df['Volume'], name='Volume', marker_color='rgba(0, 0, 255, 0.5)'),
|
| 260 |
+
row=2, col=1
|
| 261 |
+
)
|
| 262 |
+
|
| 263 |
+
def get_return_color(x):
|
| 264 |
+
if x < 0:
|
| 265 |
+
intensity = min(255, int(abs(x) * 100))
|
| 266 |
+
return f'rgb({255},{255-intensity},{255-intensity})'
|
| 267 |
+
else:
|
| 268 |
+
intensity = min(255, int(x * 100))
|
| 269 |
+
return f'rgb({255-intensity},{255},{255-intensity})'
|
| 270 |
+
|
| 271 |
+
# Overall return distribution
|
| 272 |
+
combined_bin_edges = np.histogram_bin_edges(returns, bins=50)
|
| 273 |
+
hist_data_combined, _ = np.histogram(returns, bins=combined_bin_edges)
|
| 274 |
+
bin_centers_combined = 0.5 * (combined_bin_edges[:-1] + combined_bin_edges[1:])
|
| 275 |
+
customdata_combined = [
|
| 276 |
+
(f"{combined_bin_edges[i]:.2f} to {combined_bin_edges[i+1]:.2f}",
|
| 277 |
+
(hist_data_combined[i] / total_returns) * 100,
|
| 278 |
+
hist_data_combined[i],
|
| 279 |
+
total_returns)
|
| 280 |
+
for i in range(len(hist_data_combined))
|
| 281 |
+
]
|
| 282 |
+
bar_colors = [get_return_color(x) for x in bin_centers_combined]
|
| 283 |
+
|
| 284 |
+
fig.add_trace(
|
| 285 |
+
go.Bar(
|
| 286 |
+
x=bin_centers_combined,
|
| 287 |
+
y=hist_data_combined,
|
| 288 |
+
name='Daily Returns Distribution',
|
| 289 |
+
marker_color=bar_colors,
|
| 290 |
+
marker_line=dict(color='black', width=1),
|
| 291 |
+
opacity=0.8,
|
| 292 |
+
customdata=customdata_combined,
|
| 293 |
+
hovertemplate='Return: %{customdata[0]}<br>Probability: %{customdata[1]:.2f}% %{customdata[2]}/%{customdata[3]}<extra></extra>'
|
| 294 |
+
),
|
| 295 |
+
row=3, col=1
|
| 296 |
+
)
|
| 297 |
+
|
| 298 |
+
# Return distribution (fixed bins)
|
| 299 |
+
bin_edges = list(range(-15, 18, 3))
|
| 300 |
+
bin_labels = [f"{bin_edges[i]}% to {bin_edges[i+1]}%" for i in range(len(bin_edges)-1)]
|
| 301 |
+
bin_centers = [(bin_edges[i] + bin_edges[i+1]) / 2 for i in range(len(bin_edges)-1)]
|
| 302 |
+
bin_counts = [((returns >= bin_edges[i]) & (returns < bin_edges[i+1])).sum() for i in range(len(bin_edges)-1)]
|
| 303 |
+
bin_probs = [count / total_returns * 100 for count in bin_counts]
|
| 304 |
+
|
| 305 |
+
bar_colors = [get_return_color(x) for x in bin_centers]
|
| 306 |
+
|
| 307 |
+
fig.add_trace(
|
| 308 |
+
go.Bar(
|
| 309 |
+
x=bin_probs,
|
| 310 |
+
y=bin_labels,
|
| 311 |
+
orientation='h',
|
| 312 |
+
name='Fixed Return Ranges',
|
| 313 |
+
marker_color=bar_colors,
|
| 314 |
+
hovertemplate='Range: %{y}<br>Probability: %{x:.2f}%<extra></extra>'
|
| 315 |
+
),
|
| 316 |
+
row=3, col=2
|
| 317 |
+
)
|
| 318 |
+
|
| 319 |
+
# Day of Week Returns - Probability Distributions
|
| 320 |
+
dow_returns = df.groupby('DayOfWeek')['Returns'].apply(list)
|
| 321 |
+
dow_labels = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri']
|
| 322 |
+
dow_data = [dow_returns.get(i, []) for i in range(5)]
|
| 323 |
+
colors = ['rgba(0, 128, 128, 0.6)', 'rgba(255, 165, 0, 0.6)', 'rgba(0, 0, 255, 0.6)',
|
| 324 |
+
'rgba(255, 0, 0, 0.6)', 'rgba(128, 0, 128, 0.6)']
|
| 325 |
+
|
| 326 |
+
for i, (day_data, label, color) in enumerate(zip(dow_data, dow_labels, colors)):
|
| 327 |
+
if day_data: # Only plot if there is data for the day
|
| 328 |
+
hist_data, _ = np.histogram(day_data, bins=combined_bin_edges)
|
| 329 |
+
hist_probs = hist_data / np.sum(hist_data) * 100 if np.sum(hist_data) > 0 else hist_data
|
| 330 |
+
fig.add_trace(
|
| 331 |
+
go.Bar(
|
| 332 |
+
x=bin_centers_combined,
|
| 333 |
+
y=hist_probs,
|
| 334 |
+
name=f'{label} Returns',
|
| 335 |
+
marker_color=color,
|
| 336 |
+
opacity=0.6,
|
| 337 |
+
hovertemplate=f'{label} Return: %{{x:.2f}}%<br>Probability: %{{y:.2f}}%<extra></extra>'
|
| 338 |
+
),
|
| 339 |
+
row=4, col=1
|
| 340 |
+
)
|
| 341 |
+
|
| 342 |
+
df_recent1 = df.iloc[-1000:]
|
| 343 |
+
df_recent2 = df.iloc[-1000:]
|
| 344 |
+
df_recent3 = df.iloc[-1000:]
|
| 345 |
+
df_recent4 = df.iloc[-1000:]
|
| 346 |
+
|
| 347 |
+
# Calculate normalized deviations for SMA subplots
|
| 348 |
+
# 5-SMA vs 20-SMA
|
| 349 |
+
deviation_5_20 = (df_recent1['SMA5'] - df_recent1['SMA20']) / df_recent1['SMA20']
|
| 350 |
+
max_abs_deviation_5_20 = np.abs(deviation_5_20).max()
|
| 351 |
+
normalized_deviation_5_20 = deviation_5_20 / max_abs_deviation_5_20 if max_abs_deviation_5_20 != 0 else deviation_5_20
|
| 352 |
+
|
| 353 |
+
# 20-SMA vs 50-SMA
|
| 354 |
+
deviation_20_50 = (df_recent2['SMA20'] - df_recent2['SMA50']) / df_recent2['SMA50']
|
| 355 |
+
max_abs_deviation_20_50 = np.abs(deviation_20_50).max()
|
| 356 |
+
normalized_deviation_20_50 = deviation_20_50 / max_abs_deviation_20_50 if max_abs_deviation_20_50 != 0 else deviation_20_50
|
| 357 |
+
|
| 358 |
+
# 50-SMA vs 100-SMA
|
| 359 |
+
deviation_50_100 = (df_recent3['SMA50'] - df_recent3['SMA100']) / df_recent3['SMA100']
|
| 360 |
+
max_abs_deviation_50_100 = np.abs(deviation_50_100).max()
|
| 361 |
+
normalized_deviation_50_100 = deviation_50_100 / max_abs_deviation_50_100 if max_abs_deviation_50_100 != 0 else deviation_50_100
|
| 362 |
+
|
| 363 |
+
# 100-SMA vs 200-SMA
|
| 364 |
+
deviation_100_200 = (df_recent4['SMA100'] - df_recent4['SMA200']) / df_recent4['SMA200']
|
| 365 |
+
max_abs_deviation_100_200 = np.abs(deviation_100_200).max()
|
| 366 |
+
normalized_deviation_100_200 = deviation_100_200 / max_abs_deviation_100_200 if max_abs_deviation_100_200 != 0 else deviation_100_200
|
| 367 |
+
|
| 368 |
+
# Define histogram bins for normalized deviations
|
| 369 |
+
hist_bins = np.linspace(-1, 1, 21) # 20 bins from -1 to 1
|
| 370 |
+
bin_centers = 0.5 * (hist_bins[:-1] + hist_bins[1:])
|
| 371 |
+
|
| 372 |
+
# 5-SMA vs 20-SMA
|
| 373 |
+
if not normalized_deviation_5_20.empty:
|
| 374 |
+
mean_5_20 = normalized_deviation_5_20.mean()
|
| 375 |
+
std_5_20 = normalized_deviation_5_20.std() if normalized_deviation_5_20.std() != 0 else 1e-10
|
| 376 |
+
# Add mean and ±1σ, ±2σ, ±3σ lines
|
| 377 |
+
for level, label, color, dash in [
|
| 378 |
+
(mean_5_20, 'Mean', 'rgba(128, 0, 128, 1)', 'solid'),
|
| 379 |
+
(mean_5_20 + std_5_20, '+1σ', 'rgba(128, 0, 128, 0.8)', 'dash'),
|
| 380 |
+
(mean_5_20 - std_5_20, '-1σ', 'rgba(128, 0, 128, 0.8)', 'dash'),
|
| 381 |
+
(mean_5_20 + 2 * std_5_20, '+2σ', 'rgba(128, 0, 128, 0.6)', 'dot'),
|
| 382 |
+
(mean_5_20 - 2 * std_5_20, '-2σ', 'rgba(128, 0, 128, 0.6)', 'dot'),
|
| 383 |
+
(mean_5_20 + 3 * std_5_20, '+3σ', 'rgba(128, 0, 128, 0.4)', 'dashdot'),
|
| 384 |
+
(mean_5_20 - 3 * std_5_20, '-3σ', 'rgba(128, 0, 128, 0.4)', 'dashdot')
|
| 385 |
+
]:
|
| 386 |
+
if -1 <= level <= 1: # Only plot within y-axis range
|
| 387 |
+
fig.add_trace(
|
| 388 |
+
go.Scatter(
|
| 389 |
+
x=df_recent1.index,
|
| 390 |
+
y=[level] * len(df_recent1),
|
| 391 |
+
mode='lines',
|
| 392 |
+
name=f'5-20 {label}',
|
| 393 |
+
line=dict(color=color, width=1, dash=dash),
|
| 394 |
+
showlegend=False
|
| 395 |
+
),
|
| 396 |
+
row=5, col=1
|
| 397 |
+
)
|
| 398 |
+
# Add histogram in next column
|
| 399 |
+
hist_data_5_20, _ = np.histogram(normalized_deviation_5_20.dropna(), bins=hist_bins)
|
| 400 |
+
hist_probs_5_20 = hist_data_5_20 / np.sum(hist_data_5_20) * 100 if np.sum(hist_data_5_20) > 0 else hist_data_5_20
|
| 401 |
+
fig.add_trace(
|
| 402 |
+
go.Bar(
|
| 403 |
+
y=bin_centers,
|
| 404 |
+
x=hist_probs_5_20,
|
| 405 |
+
orientation='h',
|
| 406 |
+
name='5-20 Distribution',
|
| 407 |
+
marker_color='rgba(128, 0, 128, 0.6)',
|
| 408 |
+
width=(hist_bins[1] - hist_bins[0]),
|
| 409 |
+
showlegend=False
|
| 410 |
+
),
|
| 411 |
+
row=5, col=2
|
| 412 |
+
)
|
| 413 |
+
fig.add_trace(
|
| 414 |
+
go.Scatter(
|
| 415 |
+
y=[-1, 1],
|
| 416 |
+
x=[mean_5_20, mean_5_20],
|
| 417 |
+
mode='lines',
|
| 418 |
+
name='5-20 Mean',
|
| 419 |
+
line=dict(color='rgba(128, 0, 128, 1)', width=2, dash='solid'),
|
| 420 |
+
showlegend=False
|
| 421 |
+
),
|
| 422 |
+
row=5, col=2
|
| 423 |
+
)
|
| 424 |
+
fig.add_trace(
|
| 425 |
+
go.Scatter(
|
| 426 |
+
x=df_recent1.index,
|
| 427 |
+
y=normalized_deviation_5_20,
|
| 428 |
+
line=dict(color='purple', width=1),
|
| 429 |
+
name='5-SMA Deviation'
|
| 430 |
+
),
|
| 431 |
+
row=5, col=1
|
| 432 |
+
)
|
| 433 |
+
fig.add_trace(
|
| 434 |
+
go.Scatter(
|
| 435 |
+
x=df_recent1.index,
|
| 436 |
+
y=[0] * len(df_recent1),
|
| 437 |
+
line=dict(color='blue', width=1, dash='dash'),
|
| 438 |
+
name='20-SMA Baseline'
|
| 439 |
+
),
|
| 440 |
+
row=5, col=1
|
| 441 |
+
)
|
| 442 |
+
|
| 443 |
+
# 20-SMA vs 50-SMA
|
| 444 |
+
if not normalized_deviation_20_50.empty:
|
| 445 |
+
mean_20_50 = normalized_deviation_20_50.mean()
|
| 446 |
+
std_20_50 = normalized_deviation_20_50.std() if normalized_deviation_20_50.std() != 0 else 1e-10
|
| 447 |
+
for level, label, color, dash in [
|
| 448 |
+
(mean_20_50, 'Mean', 'rgba(0, 0, 255, 1)', 'solid'),
|
| 449 |
+
(mean_20_50 + std_20_50, '+1σ', 'rgba(0, 0, 255, 0.8)', 'dash'),
|
| 450 |
+
(mean_20_50 - std_20_50, '-1σ', 'rgba(0, 0, 255, 0.8)', 'dash'),
|
| 451 |
+
(mean_20_50 + 2 * std_20_50, '+2σ', 'rgba(0, 0, 255, 0.6)', 'dot'),
|
| 452 |
+
(mean_20_50 - 2 * std_20_50, '-2σ', 'rgba(0, 0, 255, 0.6)', 'dot'),
|
| 453 |
+
(mean_20_50 + 3 * std_20_50, '+3σ', 'rgba(0, 0, 255, 0.4)', 'dashdot'),
|
| 454 |
+
(mean_20_50 - 3 * std_20_50, '-3σ', 'rgba(0, 0, 255, 0.4)', 'dashdot')
|
| 455 |
+
]:
|
| 456 |
+
if -1 <= level <= 1:
|
| 457 |
+
fig.add_trace(
|
| 458 |
+
go.Scatter(
|
| 459 |
+
x=df_recent2.index,
|
| 460 |
+
y=[level] * len(df_recent2),
|
| 461 |
+
mode='lines',
|
| 462 |
+
name=f'20-50 {label}',
|
| 463 |
+
line=dict(color=color, width=1, dash=dash),
|
| 464 |
+
showlegend=False
|
| 465 |
+
),
|
| 466 |
+
row=6, col=1
|
| 467 |
+
)
|
| 468 |
+
hist_data_20_50, _ = np.histogram(normalized_deviation_20_50.dropna(), bins=hist_bins)
|
| 469 |
+
hist_probs_20_50 = hist_data_20_50 / np.sum(hist_data_20_50) * 100 if np.sum(hist_data_20_50) > 0 else hist_data_20_50
|
| 470 |
+
fig.add_trace(
|
| 471 |
+
go.Bar(
|
| 472 |
+
y=bin_centers,
|
| 473 |
+
x=hist_probs_20_50,
|
| 474 |
+
orientation='h',
|
| 475 |
+
name='20-50 Distribution',
|
| 476 |
+
marker_color='rgba(0, 0, 255, 0.6)',
|
| 477 |
+
width=(hist_bins[1] - hist_bins[0]),
|
| 478 |
+
showlegend=False
|
| 479 |
+
),
|
| 480 |
+
row=6, col=2
|
| 481 |
+
)
|
| 482 |
+
fig.add_trace(
|
| 483 |
+
go.Scatter(
|
| 484 |
+
y=[-1, 1],
|
| 485 |
+
x=[mean_20_50, mean_20_50],
|
| 486 |
+
mode='lines',
|
| 487 |
+
name='20-50 Mean',
|
| 488 |
+
line=dict(color='rgba(0, 0, 255, 1)', width=2, dash='solid'),
|
| 489 |
+
showlegend=False
|
| 490 |
+
),
|
| 491 |
+
row=6, col=2
|
| 492 |
+
)
|
| 493 |
+
fig.add_trace(
|
| 494 |
+
go.Scatter(
|
| 495 |
+
x=df_recent2.index,
|
| 496 |
+
y=normalized_deviation_20_50,
|
| 497 |
+
line=dict(color='blue', width=1),
|
| 498 |
+
name='20-SMA Deviation'
|
| 499 |
+
),
|
| 500 |
+
row=6, col=1
|
| 501 |
+
)
|
| 502 |
+
fig.add_trace(
|
| 503 |
+
go.Scatter(
|
| 504 |
+
x=df_recent2.index,
|
| 505 |
+
y=[0] * len(df_recent2),
|
| 506 |
+
line=dict(color='green', width=1, dash='dash'),
|
| 507 |
+
name='50-SMA Baseline'
|
| 508 |
+
),
|
| 509 |
+
row=6, col=1
|
| 510 |
+
)
|
| 511 |
+
|
| 512 |
+
# 50-SMA vs 100-SMA
|
| 513 |
+
if not normalized_deviation_50_100.empty:
|
| 514 |
+
mean_50_100 = normalized_deviation_50_100.mean()
|
| 515 |
+
std_50_100 = normalized_deviation_50_100.std() if normalized_deviation_50_100.std() != 0 else 1e-10
|
| 516 |
+
for level, label, color, dash in [
|
| 517 |
+
(mean_50_100, 'Mean', 'rgba(0, 128, 0, 1)', 'solid'),
|
| 518 |
+
(mean_50_100 + std_50_100, '+1σ', 'rgba(0, 128, 0, 0.8)', 'dash'),
|
| 519 |
+
(mean_50_100 - std_50_100, '-1σ', 'rgba(0, 128, 0, 0.8)', 'dash'),
|
| 520 |
+
(mean_50_100 + 2 * std_50_100, '+2σ', 'rgba(0, 128, 0, 0.6)', 'dot'),
|
| 521 |
+
(mean_50_100 - 2 * std_50_100, '-2σ', 'rgba(0, 128, 0, 0.6)', 'dot'),
|
| 522 |
+
(mean_50_100 + 3 * std_50_100, '+3σ', 'rgba(0, 128, 0, 0.4)', 'dashdot'),
|
| 523 |
+
(mean_50_100 - 3 * std_50_100, '-3σ', 'rgba(0, 128, 0, 0.4)', 'dashdot')
|
| 524 |
+
]:
|
| 525 |
+
if -1 <= level <= 1:
|
| 526 |
+
fig.add_trace(
|
| 527 |
+
go.Scatter(
|
| 528 |
+
x=df_recent3.index,
|
| 529 |
+
y=[level] * len(df_recent3),
|
| 530 |
+
mode='lines',
|
| 531 |
+
name=f'50-100 {label}',
|
| 532 |
+
line=dict(color=color, width=1, dash=dash),
|
| 533 |
+
showlegend=False
|
| 534 |
+
),
|
| 535 |
+
row=7, col=1
|
| 536 |
+
)
|
| 537 |
+
hist_data_50_100, _ = np.histogram(normalized_deviation_50_100.dropna(), bins=hist_bins)
|
| 538 |
+
hist_probs_50_100 = hist_data_50_100 / np.sum(hist_data_50_100) * 100 if np.sum(hist_data_50_100) > 0 else hist_data_50_100
|
| 539 |
+
fig.add_trace(
|
| 540 |
+
go.Bar(
|
| 541 |
+
y=bin_centers,
|
| 542 |
+
x=hist_probs_50_100,
|
| 543 |
+
orientation='h',
|
| 544 |
+
name='50-100 Distribution',
|
| 545 |
+
marker_color='rgba(0, 128, 0, 0.6)',
|
| 546 |
+
width=(hist_bins[1] - hist_bins[0]),
|
| 547 |
+
showlegend=False
|
| 548 |
+
),
|
| 549 |
+
row=7, col=2
|
| 550 |
+
)
|
| 551 |
+
fig.add_trace(
|
| 552 |
+
go.Scatter(
|
| 553 |
+
y=[-1, 1],
|
| 554 |
+
x=[mean_50_100, mean_50_100],
|
| 555 |
+
mode='lines',
|
| 556 |
+
name='50-100 Mean',
|
| 557 |
+
line=dict(color='rgba(0, 128, 0, 1)', width=2, dash='solid'),
|
| 558 |
+
showlegend=False
|
| 559 |
+
),
|
| 560 |
+
row=7, col=2
|
| 561 |
+
)
|
| 562 |
+
fig.add_trace(
|
| 563 |
+
go.Scatter(
|
| 564 |
+
x=df_recent3.index,
|
| 565 |
+
y=normalized_deviation_50_100,
|
| 566 |
+
line=dict(color='green', width=1),
|
| 567 |
+
name='50-SMA Deviation'
|
| 568 |
+
),
|
| 569 |
+
row=7, col=1
|
| 570 |
+
)
|
| 571 |
+
fig.add_trace(
|
| 572 |
+
go.Scatter(
|
| 573 |
+
x=df_recent3.index,
|
| 574 |
+
y=[0] * len(df_recent3),
|
| 575 |
+
line=dict(color='orange', width=1, dash='dash'),
|
| 576 |
+
name='100-SMA Baseline'
|
| 577 |
+
),
|
| 578 |
+
row=7, col=1
|
| 579 |
+
)
|
| 580 |
+
|
| 581 |
+
# 100-SMA vs 200-SMA
|
| 582 |
+
if not normalized_deviation_100_200.empty:
|
| 583 |
+
mean_100_200 = normalized_deviation_100_200.mean()
|
| 584 |
+
std_100_200 = normalized_deviation_100_200.std() if normalized_deviation_100_200.std() != 0 else 1e-10
|
| 585 |
+
for level, label, color, dash in [
|
| 586 |
+
(mean_100_200, 'Mean', 'rgba(255, 165, 0, 1)', 'solid'),
|
| 587 |
+
(mean_100_200 + std_100_200, '+1σ', 'rgba(255, 165, 0, 0.8)', 'dash'),
|
| 588 |
+
(mean_100_200 - std_100_200, '-1σ', 'rgba(255, 165, 0, 0.8)', 'dash'),
|
| 589 |
+
(mean_100_200 + 2 * std_100_200, '+2σ', 'rgba(255, 165, 0, 0.6)', 'dot'),
|
| 590 |
+
(mean_100_200 - 2 * std_100_200, '-2σ', 'rgba(255, 165, 0, 0.6)', 'dot'),
|
| 591 |
+
(mean_100_200 + 3 * std_100_200, '+3σ', 'rgba(255, 165, 0, 0.4)', 'dashdot'),
|
| 592 |
+
(mean_100_200 - 3 * std_100_200, '-3σ', 'rgba(255, 165, 0, 0.4)', 'dashdot')
|
| 593 |
+
]:
|
| 594 |
+
if -1 <= level <= 1:
|
| 595 |
+
fig.add_trace(
|
| 596 |
+
go.Scatter(
|
| 597 |
+
x=df_recent4.index,
|
| 598 |
+
y=[level] * len(df_recent4),
|
| 599 |
+
mode='lines',
|
| 600 |
+
name=f'100-200 {label}',
|
| 601 |
+
line=dict(color=color, width=1, dash=dash),
|
| 602 |
+
showlegend=False
|
| 603 |
+
),
|
| 604 |
+
row=8, col=1
|
| 605 |
+
)
|
| 606 |
+
hist_data_100_200, _ = np.histogram(normalized_deviation_100_200.dropna(), bins=hist_bins)
|
| 607 |
+
hist_probs_100_200 = hist_data_100_200 / np.sum(hist_data_100_200) * 100 if np.sum(hist_data_100_200) > 0 else hist_data_100_200
|
| 608 |
+
fig.add_trace(
|
| 609 |
+
go.Bar(
|
| 610 |
+
y=bin_centers,
|
| 611 |
+
x=hist_probs_100_200,
|
| 612 |
+
orientation='h',
|
| 613 |
+
name='100-200 Distribution',
|
| 614 |
+
marker_color='rgba(255, 165, 0, 0.6)',
|
| 615 |
+
width=(hist_bins[1] - hist_bins[0]),
|
| 616 |
+
showlegend=False
|
| 617 |
+
),
|
| 618 |
+
row=8, col=2
|
| 619 |
+
)
|
| 620 |
+
fig.add_trace(
|
| 621 |
+
go.Scatter(
|
| 622 |
+
y=[-1, 1],
|
| 623 |
+
x=[mean_100_200, mean_100_200],
|
| 624 |
+
mode='lines',
|
| 625 |
+
name='100-200 Mean',
|
| 626 |
+
line=dict(color='rgba(255, 165, 0, 1)', width=2, dash='solid'),
|
| 627 |
+
showlegend=False
|
| 628 |
+
),
|
| 629 |
+
row=8, col=2
|
| 630 |
+
)
|
| 631 |
+
fig.add_trace(
|
| 632 |
+
go.Scatter(
|
| 633 |
+
x=df_recent4.index,
|
| 634 |
+
y=normalized_deviation_100_200,
|
| 635 |
+
line=dict(color='orange', width=1),
|
| 636 |
+
name='100-SMA Deviation'
|
| 637 |
+
),
|
| 638 |
+
row=8, col=1
|
| 639 |
+
)
|
| 640 |
+
fig.add_trace(
|
| 641 |
+
go.Scatter(
|
| 642 |
+
x=df_recent4.index,
|
| 643 |
+
y=[0] * len(df_recent4),
|
| 644 |
+
line=dict(color='red', width=1, dash='dash'),
|
| 645 |
+
name='200-SMA Baseline'
|
| 646 |
+
),
|
| 647 |
+
row=8, col=1
|
| 648 |
+
)
|
| 649 |
+
|
| 650 |
+
# Update layout
|
| 651 |
+
fig.update_layout(
|
| 652 |
+
yaxis_title="Price (INR)", xaxis_title="Date",
|
| 653 |
+
yaxis2_title="Price (INR)", xaxis2_title="Volume",
|
| 654 |
+
yaxis3_title="Volume", xaxis3_title="Date",
|
| 655 |
+
yaxis4_title="Frequency", xaxis4_title="Returns (%)",
|
| 656 |
+
yaxis5_title="Probability (%)", xaxis5_title="Returns (%)",
|
| 657 |
+
yaxis6_title="Returns (%)", xaxis6_title="Probability (%)",
|
| 658 |
+
yaxis7_title="Returns (%)", xaxis7_title="Day of the week",
|
| 659 |
+
yaxis8_title="Normalized Deviation", xaxis8_title="Date",
|
| 660 |
+
yaxis8=dict(range=[-1, 1]),
|
| 661 |
+
yaxis9_title="Probability (%)", xaxis9_title="",
|
| 662 |
+
yaxis9=dict(range=[-1, 1]),
|
| 663 |
+
yaxis10_title="Normalized Deviation", xaxis10_title="Date",
|
| 664 |
+
yaxis10=dict(range=[-1, 1]),
|
| 665 |
+
yaxis11_title="Probability (%)", xaxis11_title="",
|
| 666 |
+
yaxis11=dict(range=[-1, 1]),
|
| 667 |
+
yaxis12_title="Normalized Deviation", xaxis12_title="Date",
|
| 668 |
+
yaxis12=dict(range=[-1, 1]),
|
| 669 |
+
yaxis13_title="Probability (%)", xaxis13_title="",
|
| 670 |
+
yaxis13=dict(range=[-1, 1]),
|
| 671 |
+
yaxis14_title="Normalized Deviation", xaxis14_title="Date",
|
| 672 |
+
yaxis14=dict(range=[-1, 1]),
|
| 673 |
+
yaxis15_title="Probability (%)", xaxis15_title="",
|
| 674 |
+
yaxis15=dict(range=[-1, 1]),
|
| 675 |
+
template="plotly_white", xaxis_rangeslider_visible=False,
|
| 676 |
+
height=2000, showlegend=True,
|
| 677 |
+
dragmode='zoom', hovermode='x unified', uirevision='dataset'
|
| 678 |
+
)
|
| 679 |
+
|
| 680 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 681 |
+
|
| 682 |
+
# Additional Visualizations
|
| 683 |
+
st.subheader("Advanced Analysis")
|
| 684 |
+
|
| 685 |
+
# Transition Matrix
|
| 686 |
+
st.subheader("Return Transition Matrix")
|
| 687 |
+
bins = [-np.inf, -2, 0, 2, np.inf]
|
| 688 |
+
labels = ['<-2%', '-2% to 0%', '0% to 2%', '>2%']
|
| 689 |
+
df['Return_Bin'] = pd.cut(df['Returns'], bins=bins, labels=labels)
|
| 690 |
+
df['Next_Return_Bin'] = df['Return_Bin'].shift(-1)
|
| 691 |
+
transition_matrix = pd.crosstab(df['Return_Bin'], df['Next_Return_Bin'], normalize='index') * 100
|
| 692 |
+
fig_tm = px.imshow(
|
| 693 |
+
transition_matrix,
|
| 694 |
+
labels=dict(x="Next Day Return Bin", y="Current Day Return Bin", color="Probability (%)"),
|
| 695 |
+
x=labels, y=labels, color_continuous_scale='Blues'
|
| 696 |
+
)
|
| 697 |
+
fig_tm.update_layout(title="Transition Matrix of Returns")
|
| 698 |
+
st.plotly_chart(fig_tm, use_container_width=True)
|
| 699 |
+
|
| 700 |
+
# Comparative Analysis with NIFTY 50
|
| 701 |
+
st.subheader("Comparative Analysis with NIFTY 50")
|
| 702 |
+
nifty_data = fetch_index_data("26003", start_date, end_date)
|
| 703 |
+
nifty_data['Returns'] = ((nifty_data['Close'] - nifty_data['Open']) / nifty_data['Open']) * 100
|
| 704 |
+
merged_df = pd.merge(df[['Returns']], nifty_data[['Returns']], left_index=True, right_index=True, suffixes=('_Stock', '_Nifty'))
|
| 705 |
+
merged_df['Return_Diff'] = merged_df['Returns_Stock'] - merged_df['Returns_Nifty']
|
| 706 |
+
fig_diff = go.Figure()
|
| 707 |
+
fig_diff.add_trace(
|
| 708 |
+
go.Histogram(
|
| 709 |
+
x=merged_df['Return_Diff'],
|
| 710 |
+
nbinsx=50,
|
| 711 |
+
name='Return Difference (Stock - Nifty)',
|
| 712 |
+
marker_color='rgba(255, 69, 0, 0.6)',
|
| 713 |
+
opacity=0.8
|
| 714 |
+
)
|
| 715 |
+
)
|
| 716 |
+
fig_diff.update_layout(
|
| 717 |
+
title="Histogram of Return Differences (Stock - NIFTY 50)",
|
| 718 |
+
xaxis_title="Return Difference (%)",
|
| 719 |
+
yaxis_title="Frequency",
|
| 720 |
+
template="plotly_white"
|
| 721 |
+
)
|
| 722 |
+
st.plotly_chart(fig_diff, use_container_width=True)
|
| 723 |
+
|
| 724 |
+
# Heatmap of Returns by Day of Week and Month
|
| 725 |
+
st.subheader("Returns by Day of Week and Month")
|
| 726 |
+
heatmap_data = df.pivot_table(values='Returns', index='DayOfWeek', columns='Month', aggfunc='mean')
|
| 727 |
+
heatmap_data.index = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
|
| 728 |
+
heatmap_data.columns = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
|
| 729 |
+
fig_heatmap = px.imshow(
|
| 730 |
+
heatmap_data,
|
| 731 |
+
labels=dict(x="Month", y="Day of Week", color="Average Return (%)"),
|
| 732 |
+
color_continuous_scale='RdYlGn'
|
| 733 |
+
)
|
| 734 |
+
fig_heatmap.update_layout(title="Heatmap of Average Returns by Day of Week and Month")
|
| 735 |
+
st.plotly_chart(fig_heatmap, use_container_width=True)
|
| 736 |
+
|
| 737 |
+
# Pattern-based Return Prediction
|
| 738 |
+
st.subheader("Pattern-based Return Prediction")
|
| 739 |
+
prediction = predict_next_return(df, window_size=3, bin_size=0.5)
|
| 740 |
+
if prediction['current_pattern']:
|
| 741 |
+
st.write(f"**Current Pattern (Last 3 Days' Returns)**: {list(prediction['current_pattern'])}%")
|
| 742 |
+
st.write(f"**Number of Historical Occurrences**: {prediction['occurrences']}")
|
| 743 |
+
if prediction['most_probable_return'] is not None:
|
| 744 |
+
st.write(f"**Most Probable Next Return**: {prediction['most_probable_return']}%")
|
| 745 |
+
st.write("**Frequency of Next Returns**:")
|
| 746 |
+
for return_val, freq in prediction['next_return_frequencies'].items():
|
| 747 |
+
st.write(f"Return {return_val}%: {freq:.2f}%")
|
| 748 |
+
|
| 749 |
+
# Visualize frequency of next returns
|
| 750 |
+
fig_pred = go.Figure()
|
| 751 |
+
fig_pred.add_trace(
|
| 752 |
+
go.Bar(
|
| 753 |
+
x=list(prediction['next_return_frequencies'].keys()),
|
| 754 |
+
y=list(prediction['next_return_frequencies'].values()),
|
| 755 |
+
name='Next Return Probability',
|
| 756 |
+
marker_color='rgba(0, 128, 255, 0.6)'
|
| 757 |
+
)
|
| 758 |
+
)
|
| 759 |
+
fig_pred.update_layout(
|
| 760 |
+
title="Probability Distribution of Next Return",
|
| 761 |
+
xaxis_title="Next Return (%)",
|
| 762 |
+
yaxis_title="Probability (%)",
|
| 763 |
+
template="plotly_white"
|
| 764 |
+
)
|
| 765 |
+
st.plotly_chart(fig_pred, use_container_width=True)
|
| 766 |
+
else:
|
| 767 |
+
st.write("No historical matches found for the current pattern.")
|
| 768 |
+
else:
|
| 769 |
+
st.write("Insufficient data to form a pattern for prediction.")
|
| 770 |
+
|
| 771 |
+
# Stock Information
|
| 772 |
+
selected_stock_meta = eq_list.loc[eq_list['TradingSymbol']==selected_stock]
|
| 773 |
|
| 774 |
+
if(selected_stock_meta.iloc[0]['TradingSymbol'].endswith('EQ')):
|
| 775 |
+
metadata = api.get_stock_metadata(selected_stock)
|
| 776 |
+
enriched = {
|
| 777 |
+
'ScripCode': selected_stock_meta['ScripCode'],
|
| 778 |
+
'TradingSymbol': selected_stock,
|
| 779 |
+
'Description': selected_stock_meta['Description'],
|
| 780 |
+
'InstrumentType': selected_stock_meta['InstrumentType'],
|
| 781 |
+
'CompanyName': metadata['info'].get('companyName'),
|
| 782 |
+
'ISIN': metadata['info'].get('isin'),
|
| 783 |
+
'MarketCapital': int((float(metadata['priceInfo'].get('lastPrice'))*float(metadata['securityInfo'].get('issuedSize')))/pow(10,7)),
|
| 784 |
+
'ListingDate': metadata['info'].get('listingDate'),
|
| 785 |
+
'LastPrice': metadata['priceInfo'].get('lastPrice'),
|
| 786 |
+
'PreviousClose': metadata['priceInfo'].get('previousClose'),
|
| 787 |
+
'Open': metadata['priceInfo'].get('open'),
|
| 788 |
+
'High': metadata['priceInfo']['intraDayHighLow'].get('max'),
|
| 789 |
+
'Low': metadata['priceInfo']['intraDayHighLow'].get('min'),
|
| 790 |
+
'VWAP': metadata['priceInfo'].get('vwap'),
|
| 791 |
+
'52WeekHigh': metadata['priceInfo']['weekHighLow'].get('max'),
|
| 792 |
+
'52WeekLow': metadata['priceInfo']['weekHighLow'].get('min'),
|
| 793 |
+
'SectorPE': metadata['metadata'].get('pdSectorPe'),
|
| 794 |
+
'PE': metadata['metadata'].get('pdSymbolPe'),
|
| 795 |
+
'Sector': metadata['industryInfo'].get('sector'),
|
| 796 |
+
'Macro': metadata['industryInfo'].get('macro'),
|
| 797 |
+
'Industry': metadata['industryInfo'].get('industry'),
|
| 798 |
+
'BasicIndustry': metadata['industryInfo'].get('basicIndustry'),
|
| 799 |
+
}
|
| 800 |
+
|
| 801 |
+
st.subheader("Stock Information")
|
| 802 |
+
st.write(f"**Company Name**: {enriched['CompanyName']}")
|
| 803 |
+
st.write(f"**Sector**: {enriched['Sector']}")
|
| 804 |
+
st.write(f"**Industry**: {enriched['BasicIndustry']}")
|
| 805 |
+
st.write(f"**Market Capital(CR)**: ₹{enriched['MarketCapital']}")
|
| 806 |
+
st.write(f"**PE**: {enriched['PE']}")
|
| 807 |
+
st.write(f"**Sector PE**: {enriched['SectorPE']}")
|
| 808 |
+
st.write(f"**52WeekHigh**: ₹{enriched['52WeekHigh']}")
|
| 809 |
+
st.write(f"**52WeekLow**: ₹{enriched['52WeekLow']}")
|
| 810 |
+
st.write(f"**VWAP**: ₹{enriched['VWAP']}")
|
| 811 |
+
else:
|
| 812 |
+
selected_index_meta = inx_list.loc[inx_list['index']==selected_stock.upper()]
|
| 813 |
+
if len(selected_index_meta)>0:
|
| 814 |
+
st.subheader("Index Information")
|
| 815 |
+
st.write(selected_index_meta.iloc[0])
|
| 816 |
+
else:
|
| 817 |
+
st.write("Select a stock from the sidebar to view the chart.")
|