ketannnn commited on
Commit
5117aff
·
1 Parent(s): 0fb2300

Project completed with dep

Browse files
Files changed (8) hide show
  1. .env +0 -1
  2. .gitignore +120 -0
  3. Dockerfile +26 -0
  4. app.py +26 -21
  5. app_news.py +3 -4
  6. config.json +24 -0
  7. requirements.txt +3 -2
  8. server.ts +0 -0
.env DELETED
@@ -1 +0,0 @@
1
- FEATHERLESS_API_KEY=rc_4d392b96964431c635a93d2e6fef4b7857c22b81edc93b107ce85bead2614fc1
 
 
.gitignore ADDED
@@ -0,0 +1,120 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ *.egg-info/
24
+ .installed.cfg
25
+ *.egg
26
+
27
+ # Virtual environments
28
+ .env
29
+ .venv
30
+ env/
31
+ venv/
32
+ ENV/
33
+ env.bak/
34
+ venv.bak/
35
+
36
+ # Environment variables
37
+ .env.*
38
+ *.env
39
+
40
+ # Django stuff
41
+ *.log
42
+ local_settings.py
43
+ db.sqlite3
44
+ db.sqlite3-journal
45
+
46
+ # Flask stuff
47
+ instance/
48
+ .webassets-cache
49
+
50
+ # FastAPI / general app logs
51
+ *.log
52
+
53
+ # Unit test / coverage reports
54
+ htmlcov/
55
+ .tox/
56
+ .nox/
57
+ .coverage
58
+ .coverage.*
59
+ .cache
60
+ pytest_cache/
61
+ nosetests.xml
62
+ coverage.xml
63
+ *.cover
64
+ *.py,cover
65
+ .hypothesis/
66
+
67
+ # Type checking
68
+ .mypy_cache/
69
+ .dmypy.json
70
+ dmypy.json
71
+
72
+ # Pyre type checker
73
+ .pyre/
74
+
75
+ # Ruff / linting
76
+ .ruff_cache/
77
+
78
+ # PyCharm
79
+ .idea/
80
+
81
+ # VS Code
82
+ .vscode/
83
+
84
+ # Jupyter Notebook
85
+ .ipynb_checkpoints/
86
+
87
+ # pyenv
88
+ .python-version
89
+
90
+ # pipenv
91
+ Pipfile.lock
92
+
93
+ # poetry
94
+ poetry.lock
95
+
96
+ # system files
97
+ .DS_Store
98
+ Thumbs.db
99
+
100
+ # Docker
101
+ *.pid
102
+ *.seed
103
+ *.pid.lock
104
+
105
+ # logs
106
+ logs/
107
+ *.log
108
+
109
+ # temporary files
110
+ tmp/
111
+ temp/
112
+
113
+ # build artifacts
114
+ *.manifest
115
+ *.spec
116
+
117
+ # secrets
118
+ secrets.json
119
+ *.pem
120
+ *.key
Dockerfile ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.11-slim
2
+
3
+ WORKDIR /app
4
+
5
+ # Install system dependencies
6
+ RUN apt-get update && apt-get install -y \
7
+ build-essential \
8
+ curl \
9
+ && rm -rf /var/lib/apt/lists/*
10
+
11
+ # Copy requirements and install
12
+ COPY requirements.txt .
13
+ RUN pip install --no-cache-dir -r requirements.txt
14
+
15
+ # Copy application code (including config.json and routers/)
16
+ COPY . .
17
+
18
+ # Set environment variables for production
19
+ ENV PYTHONUNBUFFERED=1
20
+ ENV PORT=7860
21
+
22
+ # Expose the standard Hugging Face port
23
+ EXPOSE 7860
24
+
25
+ # Run the application using uvicorn
26
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
app.py CHANGED
@@ -14,7 +14,7 @@ from typing import List, Dict, Any, Optional
14
  from huggingface_hub import hf_hub_download
15
  from datetime import datetime, timedelta
16
  from dotenv import load_dotenv
17
- from openai import OpenAI
18
 
19
  # Load environment variables
20
  load_dotenv()
@@ -24,9 +24,8 @@ logging.basicConfig(level=logging.INFO)
24
  logger = logging.getLogger(__name__)
25
 
26
  # --- LLM Client Setup ---
27
- llm_client = OpenAI(
28
- base_url="https://api.featherless.ai/v1",
29
- api_key=os.getenv("FEATHERLESS_API_KEY")
30
  )
31
 
32
  def analyze_cascade_with_llm(
@@ -93,7 +92,7 @@ IMPORTANT: Only include nodes from the NETWORK NODES list. The shocked node must
93
 
94
  try:
95
  response = llm_client.chat.completions.create(
96
- model="moonshotai/Kimi-K2-Instruct",
97
  messages=[
98
  {"role": "system", "content": "You are a financial risk analyst. Always respond with valid JSON only."},
99
  {"role": "user", "content": prompt}
@@ -315,16 +314,23 @@ def _run_ccp_funds_self_checks():
315
  # --- Helper Functions ---
316
 
317
  def load_config():
318
- """Load configuration from frontend/config.json."""
319
  global config
320
  try:
321
- if os.path.exists(CONFIG_PATH):
322
- with open(CONFIG_PATH, "r") as f:
 
 
 
 
 
 
 
323
  config = json.load(f)
324
- logger.info("Config loaded successfully.")
325
  else:
326
  # Fallback default config if file is missing (though unlikely in this setup)
327
- logger.warning(f"Config file not found at {CONFIG_PATH}. Using defaults.")
328
  config = {
329
  "tickers": ["HDFCBANK.NS", "KOTAKBANK.NS", "ICICIBANK.NS", "BAJFINANCE.NS", "BSE.NS",
330
  "TCS.NS", "INFY.NS", "RELIANCE.NS", "SBIN.NS", "ADANIENT.NS",
@@ -762,21 +768,20 @@ async def simulate(request: SimulationRequest):
762
  if not model or not scaler:
763
  raise HTTPException(status_code=503, detail="Model not loaded.")
764
 
765
- all_tickers = config["tickers"]
 
766
  valid_tickers = set(all_tickers)
 
767
 
768
- # Validate tickers
769
- tickers_subset = request.tickers
770
  if not tickers_subset:
771
- raise HTTPException(status_code=400, detail="Tickers list cannot be empty.")
772
-
773
- invalid = [t for t in tickers_subset if t not in valid_tickers]
774
- if invalid:
775
- raise HTTPException(status_code=400, detail=f"Invalid tickers: {invalid}. Must be in config.")
776
 
777
  # Validate shocked_node
778
- if request.shocked_node not in valid_tickers:
779
- raise HTTPException(status_code=400, detail=f"shocked_node '{request.shocked_node}' not in allowed universe.")
 
 
780
 
781
  # Fetch data
782
  start_date = request.start if request.start else config["start"]
@@ -1074,7 +1079,7 @@ Respond in this exact JSON array format:
1074
 
1075
  try:
1076
  response = llm_client.chat.completions.create(
1077
- model="Qwen/Qwen2.5-Coder-32B-Instruct",
1078
  messages=[
1079
  {"role": "system", "content": "You are a financial news generator for Indian markets. Generate realistic, professional news in JSON format only."},
1080
  {"role": "user", "content": prompt}
 
14
  from huggingface_hub import hf_hub_download
15
  from datetime import datetime, timedelta
16
  from dotenv import load_dotenv
17
+ from groq import Groq
18
 
19
  # Load environment variables
20
  load_dotenv()
 
24
  logger = logging.getLogger(__name__)
25
 
26
  # --- LLM Client Setup ---
27
+ llm_client = Groq(
28
+ api_key=os.getenv("GROQ_API_KEY")
 
29
  )
30
 
31
  def analyze_cascade_with_llm(
 
92
 
93
  try:
94
  response = llm_client.chat.completions.create(
95
+ model="llama-3.3-70b-versatile",
96
  messages=[
97
  {"role": "system", "content": "You are a financial risk analyst. Always respond with valid JSON only."},
98
  {"role": "user", "content": prompt}
 
314
  # --- Helper Functions ---
315
 
316
  def load_config():
317
+ """Load configuration from config.json."""
318
  global config
319
  try:
320
+ # Try local config.json first (deployment)
321
+ if os.path.exists("config.json"):
322
+ config_file = "config.json"
323
+ else:
324
+ # Fallback for local development
325
+ config_file = "../frontend/config.json"
326
+
327
+ if os.path.exists(config_file):
328
+ with open(config_file, "r") as f:
329
  config = json.load(f)
330
+ logger.info(f"Config loaded from {config_file}")
331
  else:
332
  # Fallback default config if file is missing (though unlikely in this setup)
333
+ logger.warning(f"Config file not found. Using hardcoded defaults.")
334
  config = {
335
  "tickers": ["HDFCBANK.NS", "KOTAKBANK.NS", "ICICIBANK.NS", "BAJFINANCE.NS", "BSE.NS",
336
  "TCS.NS", "INFY.NS", "RELIANCE.NS", "SBIN.NS", "ADANIENT.NS",
 
768
  if not model or not scaler:
769
  raise HTTPException(status_code=503, detail="Model not loaded.")
770
 
771
+ # NEW: Allow dynamic tickers beyond just config.json
772
+ all_tickers = list(set(config["tickers"] + request.tickers))
773
  valid_tickers = set(all_tickers)
774
+ tickers_subset = [t for t in request.tickers if t] # Take all requested
775
 
 
 
776
  if not tickers_subset:
777
+ tickers_subset = all_tickers
778
+ logger.info("No tickers provided. Falling back to default universe.")
 
 
 
779
 
780
  # Validate shocked_node
781
+ shocked_node = request.shocked_node
782
+ if shocked_node not in valid_tickers:
783
+ logger.warning(f"shocked_node '{shocked_node}' not in allowed universe. Using '{tickers_subset[0]}' as quantitative proxy.")
784
+ shocked_node = tickers_subset[0]
785
 
786
  # Fetch data
787
  start_date = request.start if request.start else config["start"]
 
1079
 
1080
  try:
1081
  response = llm_client.chat.completions.create(
1082
+ model="llama-3.3-70b-versatile",
1083
  messages=[
1084
  {"role": "system", "content": "You are a financial news generator for Indian markets. Generate realistic, professional news in JSON format only."},
1085
  {"role": "user", "content": prompt}
app_news.py CHANGED
@@ -14,7 +14,7 @@ from typing import List, Dict, Any, Optional
14
  from huggingface_hub import hf_hub_download
15
  from datetime import datetime, timedelta
16
  from dotenv import load_dotenv
17
- from openai import OpenAI
18
 
19
  # Load environment variables
20
  load_dotenv()
@@ -24,9 +24,8 @@ logging.basicConfig(level=logging.INFO)
24
  logger = logging.getLogger(__name__)
25
 
26
  # --- LLM Client Setup ---
27
- llm_client = OpenAI(
28
- base_url="https://api.featherless.ai/v1",
29
- api_key=os.getenv("FEATHERLESS_API_KEY")
30
  )
31
 
32
  def analyze_cascade_with_llm(
 
14
  from huggingface_hub import hf_hub_download
15
  from datetime import datetime, timedelta
16
  from dotenv import load_dotenv
17
+ from groq import Groq
18
 
19
  # Load environment variables
20
  load_dotenv()
 
24
  logger = logging.getLogger(__name__)
25
 
26
  # --- LLM Client Setup ---
27
+ llm_client = Groq(
28
+ api_key=os.getenv("GROQ_API_KEY")
 
29
  )
30
 
31
  def analyze_cascade_with_llm(
config.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "tickers": [
3
+ "HDFCBANK.NS",
4
+ "KOTAKBANK.NS",
5
+ "ICICIBANK.NS",
6
+ "BAJFINANCE.NS",
7
+ "BSE.NS",
8
+ "TCS.NS",
9
+ "INFY.NS",
10
+ "RELIANCE.NS",
11
+ "SBIN.NS",
12
+ "ADANIENT.NS",
13
+ "MRF.NS",
14
+ "HINDUNILVR.NS",
15
+ "TATASTEEL.NS",
16
+ "AXISBANK.NS",
17
+ "BHARTIARTL.NS"
18
+ ],
19
+ "ccp_name": "CCP",
20
+ "start": "2024-01-01",
21
+ "ret_window": 20,
22
+ "lookback": 20,
23
+ "delta_ccp": 0.1
24
+ }
requirements.txt CHANGED
@@ -7,6 +7,7 @@ scikit-learn
7
  tensorflow-cpu
8
  huggingface_hub
9
  joblib
10
- pyzmq==26.2.0
11
- openai
12
  python-dotenv
 
 
 
 
7
  tensorflow-cpu
8
  huggingface_hub
9
  joblib
 
 
10
  python-dotenv
11
+ groq
12
+ openai
13
+ httpx
server.ts DELETED
File without changes