rairo commited on
Commit
c15c780
·
verified ·
1 Parent(s): 4fa12e4

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +206 -251
main.py CHANGED
@@ -1,3 +1,4 @@
 
1
  from langchain_google_genai import ChatGoogleGenerativeAI
2
  import pandas as pd
3
  import os
@@ -6,346 +7,300 @@ from flask import Flask, request, jsonify
6
  from flask_cors import CORS, cross_origin
7
  import logging
8
  from dotenv import load_dotenv
9
- from pandasai import SmartDatalake
10
  from pandasai import SmartDataframe
11
  from pandasai.responses.response_parser import ResponseParser
12
- from langchain.prompts import PromptTemplate
13
- from langchain.chains import LLMChain
14
- from datetime import datetime
15
  import matplotlib.pyplot as plt
16
  import google.generativeai as genai
17
  import uuid
18
  import base64
19
- from io import BytesIO
20
  import requests
21
- import urllib.parse # Added for URL encoding
 
 
22
 
23
  load_dotenv()
24
 
25
  app = Flask(__name__)
26
- cors = CORS(app)
27
 
28
- # Set up logging configuration
29
- logging.basicConfig(
30
- level=logging.DEBUG,
31
- format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
32
- )
33
  logger = logging.getLogger(__name__)
34
 
 
 
35
  class FlaskResponse(ResponseParser):
36
  def __init__(self, context):
37
  super().__init__(context)
38
-
39
  def format_dataframe(self, result):
40
- logger.debug("Formatting dataframe result")
41
  return result["value"].to_html()
42
-
43
  def format_plot(self, result):
44
- logger.debug("Formatting plot result")
45
  val = result["value"]
46
- # If val is a matplotlib figure, handle it accordingly.
47
  if hasattr(val, "savefig"):
48
- try:
49
- buf = io.BytesIO()
50
- val.savefig(buf, format="png")
51
- buf.seek(0)
52
- image_base64 = base64.b64encode(buf.read()).decode("utf-8")
53
- logger.debug("Successfully converted matplotlib figure to base64")
54
- return f"data:image/png;base64,{image_base64}"
55
- except Exception as e:
56
- logger.error(f"Error processing figure: {e}")
57
- return str(val)
58
- # If val is a string and is a valid file path, read and encode it.
59
  if isinstance(val, str) and os.path.isfile(os.path.join(val)):
60
- image_path = os.path.join(val)
61
- logger.debug(f"Processing image path: {image_path}")
62
- with open(image_path, "rb") as file:
63
- data = file.read()
64
- base64_data = base64.b64encode(data).decode("utf-8")
65
- logger.debug("Successfully converted image file to base64")
66
- return f"data:image/png;base64,{base64_data}"
67
- # Fallback: return as a string.
68
- logger.debug("Returning plot result as string")
69
  return str(val)
70
-
71
  def format_other(self, result):
72
- logger.debug("Formatting other result type")
73
- # For non-image responses, simply return the value as a string.
74
  return str(result["value"])
75
 
76
-
77
  logger.info("Initializing models...")
78
-
79
  gemini_api_key = os.getenv('Gemini')
80
- if not gemini_api_key:
81
- logger.error("Gemini API key not found in environment variables")
82
- raise ValueError("Gemini API key is required")
83
-
84
- logger.info("Setting up ChatGoogleGenerativeAI...")
85
- # --- Model name reverted to your original specification ---
86
  llm = ChatGoogleGenerativeAI(api_key=gemini_api_key, model='gemini-2.0-flash', temperature=0.1)
87
-
88
- logger.info("Configuring genai...")
89
  genai.configure(api_key=gemini_api_key)
 
 
 
90
 
91
- generation_config = {
92
- "temperature": 0.2,
93
- "top_p": 0.95,
94
- "max_output_tokens": 5000,
95
- }
96
 
97
- # --- Model name reverted to your original specification ---
98
- model = genai.GenerativeModel(
99
- model_name="gemini-2.0-flash-lite-001",
100
- generation_config=generation_config,
101
- )
102
 
103
- guid = uuid.uuid4()
104
- new_filename = f"{guid}"
105
- user_defined_path = os.path.join("/exports/charts", new_filename)
106
- logger.info(f"Chart export path set to: {user_defined_path}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
107
 
108
- # --- REFACTORED Endpoint for chat ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
109
  @app.route("/chat", methods=["POST"])
110
  @cross_origin()
111
  def bot():
112
  logger.info("=== Starting /chat endpoint ===")
113
-
114
  try:
115
- # Log the incoming request
116
- logger.debug(f"Request headers: {dict(request.headers)}")
117
- logger.debug(f"Request data: {request.get_data()}")
118
-
119
- # Retrieve parameters from the request
120
  request_json = request.get_json()
121
- logger.debug(f"Parsed JSON: {request_json}")
122
-
123
- if not request_json:
124
- logger.error("No JSON data in request")
125
- return jsonify({"error": "No JSON data provided in request."}), 400
126
-
127
  profile_id = request_json.get("profile_id")
128
  user_question = request_json.get("user_question")
129
-
130
- logger.info(f"Extracted profile_id: {profile_id}")
131
- logger.info(f"Extracted user_question: {user_question}")
132
-
133
- if not profile_id or not user_question:
134
- logger.error("Missing required parameters")
135
- return jsonify({"error": "Missing 'profile_id' or 'user_question' in request."}), 400
136
 
137
- logger.info(f"Processing request for profile_id: {profile_id}")
138
- logger.info(f"User question: {user_question}")
139
-
140
- # URL encode the profile_id
141
- encoded_profile_id = urllib.parse.quote_plus(str(profile_id))
142
- logger.info(f"URL encoded profile_id: {encoded_profile_id}")
143
-
144
- # Fetch data from the external API
145
  API_URL = "https://irisplustech.com/public/api/business/profile/user/get-recent-transactions-v2"
146
- payload = {'profile_id': encoded_profile_id}
147
-
148
- logger.info(f"Making API call to: {API_URL}")
149
- logger.debug(f"API payload: {payload}")
150
 
 
151
  try:
152
- logger.info("Sending POST request to transaction API...")
153
- response = requests.post(API_URL, data=payload, timeout=30)
154
- logger.info(f"API response status code: {response.status_code}")
155
- logger.debug(f"API response headers: {dict(response.headers)}")
156
-
157
- # Check if the request was successful
158
- if response.status_code != 200:
159
- logger.error(f"API request failed with status {response.status_code}")
160
- logger.error(f"API response text: {response.text}")
161
- return jsonify({
162
- "error": "Failed to fetch data from the transaction API.",
163
- "status_code": response.status_code,
164
- "details": response.text
165
- }), 502 # Bad Gateway
166
-
167
- logger.info("API request successful, parsing JSON response...")
168
- api_data = response.json()
169
- logger.debug(f"API response data keys: {list(api_data.keys()) if isinstance(api_data, dict) else 'Not a dict'}")
170
-
171
- # Check for API-level errors
172
- if api_data.get("error"):
173
- logger.error(f"API returned error: {api_data.get('message', 'No message')}")
174
- return jsonify({
175
- "error": "Transaction API returned an error.",
176
- "message": api_data.get("message", "No message provided.")
177
- }), 400
178
-
179
- transactions = api_data.get("transactions")
180
- logger.info(f"Transactions data type: {type(transactions)}")
181
- logger.info(f"Number of transactions: {len(transactions) if isinstance(transactions, list) else 'N/A'}")
182
-
183
- if transactions is None or not isinstance(transactions, list):
184
- logger.error("Invalid transactions data format")
185
- return jsonify({"error": "Invalid data format from transaction API. 'transactions' key is missing or not a list."}), 500
186
-
187
- if not transactions:
188
- logger.warning("No transactions found for profile")
189
- return jsonify({"answer": "No transaction data was found for this profile. I can't answer any questions."})
190
-
191
- # Convert the transaction data into a dataframe
192
- logger.info("Converting transactions to DataFrame...")
193
  df = pd.DataFrame(transactions)
194
- logger.info(f"DataFrame shape: {df.shape}")
195
- logger.info(f"DataFrame columns: {list(df.columns)}")
196
- logger.debug(f"DataFrame head:\n{df.head()}")
197
-
198
- # Create a SmartDataframe instance using your configuration.
199
- logger.info("Creating SmartDataframe instance...")
200
- pandas_agent = SmartDataframe(
201
- df,
202
- config={
203
- "llm": llm,
204
- "response_parser": FlaskResponse,
205
- "custom_whitelisted_dependencies": [
206
- "os", "io", "sys", "chr", "glob",
207
- "b64decoder", "collections", "geopy",
208
- "geopandas", "wordcloud", "builtins"
209
- ],
210
- "security": "none", "save_charts_path": user_defined_path,
211
- "save_charts": False, "enable_cache": False, "conversational":True
212
- }
213
- )
214
- logger.info("SmartDataframe created successfully")
215
-
216
- # Get the answer from the agent
217
- logger.info(f"Sending question to pandas agent: {user_question}")
218
  answer = pandas_agent.chat(user_question)
219
- logger.info(f"Received answer from agent, type: {type(answer)}")
220
- logger.debug(f"Raw answer: {str(answer)[:500]}...") # Log first 500 chars
221
-
222
- # Process the answer based on its type
223
- logger.info("Processing answer for response format...")
224
- formatted_answer = None
225
- if isinstance(answer, pd.DataFrame):
226
- logger.info("Answer is DataFrame, converting to HTML")
227
- formatted_answer = answer.to_html()
228
- elif isinstance(answer, plt.Figure):
229
- logger.info("Answer is matplotlib Figure, converting to base64")
230
- buf = io.BytesIO()
231
- answer.savefig(buf, format="png")
232
- buf.seek(0)
233
- image_base64 = base64.b64encode(buf.read()).decode("utf-8")
234
- formatted_answer = f"data:image/png;base64,{image_base64}"
235
- else:
236
- logger.info("Answer is other type, converting to string")
237
- formatted_answer = str(answer)
238
 
239
- logger.info(f"Formatted answer length: {len(str(formatted_answer))}")
 
 
 
 
 
 
 
 
 
240
 
241
- # Return the formatted answer as JSON.
242
- logger.info("Returning successful response")
243
- return jsonify({"answer": formatted_answer})
244
-
245
- except requests.exceptions.Timeout as e:
246
- logger.error(f"API request timeout: {e}")
247
- return jsonify({"error": "Transaction API request timed out.", "details": str(e)}), 504
248
- except requests.exceptions.ConnectionError as e:
249
- logger.error(f"API connection error: {e}")
250
- return jsonify({"error": "Could not connect to the transaction API.", "details": str(e)}), 503
251
- except requests.exceptions.RequestException as e:
252
- logger.error(f"API request exception: {e}")
253
- return jsonify({"error": "Could not connect to the transaction API.", "details": str(e)}), 500
254
- except ValueError as e:
255
- logger.error(f"JSON parsing error: {e}")
256
- return jsonify({"error": "Invalid JSON response from transaction API.", "details": str(e)}), 502
257
-
 
 
 
 
 
 
258
  except Exception as e:
259
- logger.exception("An unexpected error occurred in /chat endpoint")
 
260
  return jsonify({"error": "An unexpected server error occurred.", "details": str(e)}), 500
261
 
262
-
263
- # Reports endpoint
264
  @app.route("/report", methods=["POST"])
265
  @cross_origin()
266
  def busines_report():
267
  logger.info("=== Starting /report endpoint ===")
268
-
269
  try:
270
  request_json = request.get_json()
271
  json_data = request_json.get("json_data") if request_json else None
272
-
273
- logger.info(f"Processing report request with data length: {len(str(json_data)) if json_data else 0}")
274
-
275
- prompt = """
276
- You are Quantilytix business analyst. Analyze the following data and generate a comprehensive and insightful business report, including appropriate key perfomance indicators and recommendations Use markdown formatting and tables where necessary. only return the report and nothing else.
277
- data:
278
- """ + str(json_data)
279
-
280
- logger.info("Sending request to generative model for report...")
281
  response = model.generate_content(prompt)
282
- report = response.text
283
- logger.info(f"Generated report length: {len(report)}")
284
-
285
- return jsonify(str(report))
286
-
287
  except Exception as e:
288
  logger.exception("Error in /report endpoint")
289
  return jsonify({"error": "Failed to generate report.", "details": str(e)}), 500
290
 
291
-
292
- # Marketing endpoint
293
  @app.route("/marketing", methods=["POST"])
294
  @cross_origin()
295
  def marketing():
296
  logger.info("=== Starting /marketing endpoint ===")
297
-
298
  try:
299
  request_json = request.get_json()
300
  json_data = request_json.get("json_data") if request_json else None
301
-
302
- logger.info(f"Processing marketing request with data length: {len(str(json_data)) if json_data else 0}")
303
-
304
- prompt = """
305
- You are an Quantilytix Marketing Specialist. Analyze the following data and generate a comprehensive marketing strategy, Only return the marketing strategy. be very creative:
306
- """ + str(json_data)
307
-
308
- logger.info("Sending request to generative model for marketing strategy...")
309
  response = model.generate_content(prompt)
310
- report = response.text
311
- logger.info(f"Generated marketing strategy length: {len(report)}")
312
-
313
- return jsonify(str(report))
314
-
315
  except Exception as e:
316
  logger.exception("Error in /marketing endpoint")
317
  return jsonify({"error": "Failed to generate marketing strategy.", "details": str(e)}), 500
318
 
319
-
320
- # Notifications endpoint
321
  @app.route("/notify", methods=["POST"])
322
  @cross_origin()
323
  def notifications():
324
  logger.info("=== Starting /notify endpoint ===")
325
-
326
  try:
327
  request_json = request.get_json()
328
  json_data = request_json.get("json_data") if request_json else None
329
-
330
- logger.info(f"Processing notification request with data length: {len(str(json_data)) if json_data else 0}")
331
-
332
- prompt = """
333
- You are Quantilytix business analyst. Write a very brief analysis and marketing tips using this business data. your output should be suitable for a notification dashboard so no quips.
334
- """ + str(json_data)
335
-
336
- logger.info("Sending request to generative model for notifications...")
337
  response = model.generate_content(prompt)
338
- report = response.text
339
- logger.info(f"Generated notification content length: {len(report)}")
340
-
341
- return jsonify(str(report))
342
-
343
  except Exception as e:
344
  logger.exception("Error in /notify endpoint")
345
  return jsonify({"error": "Failed to generate notification content.", "details": str(e)}), 500
346
 
347
-
348
  if __name__ == "__main__":
349
- logger.info("Starting Flask application...")
350
- logger.info("Application will run on host=0.0.0.0, port=7860, debug=True")
351
  app.run(debug=True, host="0.0.0.0", port=7860)
 
1
+ # app.py
2
  from langchain_google_genai import ChatGoogleGenerativeAI
3
  import pandas as pd
4
  import os
 
7
  from flask_cors import CORS, cross_origin
8
  import logging
9
  from dotenv import load_dotenv
 
10
  from pandasai import SmartDataframe
11
  from pandasai.responses.response_parser import ResponseParser
12
+ from datetime import datetime, timedelta, timezone
 
 
13
  import matplotlib.pyplot as plt
14
  import google.generativeai as genai
15
  import uuid
16
  import base64
 
17
  import requests
18
+ import urllib.parse
19
+ import json
20
+ import re
21
 
22
  load_dotenv()
23
 
24
  app = Flask(__name__)
25
+ CORS(app)
26
 
27
+ # --- Logging Configuration (Preserved) ---
28
+ logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
 
 
 
29
  logger = logging.getLogger(__name__)
30
 
31
+ # --- PRESERVED RESPONSE PARSER ---
32
+ # Your original FlaskResponse class, ensuring no regressions in PandasAI functionality.
33
  class FlaskResponse(ResponseParser):
34
  def __init__(self, context):
35
  super().__init__(context)
 
36
  def format_dataframe(self, result):
 
37
  return result["value"].to_html()
 
38
  def format_plot(self, result):
 
39
  val = result["value"]
 
40
  if hasattr(val, "savefig"):
41
+ buf = io.BytesIO()
42
+ val.savefig(buf, format="png")
43
+ buf.seek(0)
44
+ return f"data:image/png;base64,{base64.b64encode(buf.read()).decode('utf-8')}"
 
 
 
 
 
 
 
45
  if isinstance(val, str) and os.path.isfile(os.path.join(val)):
46
+ with open(os.path.join(val), "rb") as file:
47
+ return f"data:image/png;base64,{base64.b64encode(file.read()).decode('utf-8')}"
 
 
 
 
 
 
 
48
  return str(val)
 
49
  def format_other(self, result):
 
 
50
  return str(result["value"])
51
 
52
+ # --- AI Model Initialization (Preserved) ---
53
  logger.info("Initializing models...")
 
54
  gemini_api_key = os.getenv('Gemini')
55
+ if not gemini_api_key: raise ValueError("Gemini API key is required.")
 
 
 
 
 
56
  llm = ChatGoogleGenerativeAI(api_key=gemini_api_key, model='gemini-2.0-flash', temperature=0.1)
 
 
57
  genai.configure(api_key=gemini_api_key)
58
+ generation_config = {"temperature": 0.2, "top_p": 0.95, "max_output_tokens": 5000}
59
+ model = genai.GenerativeModel(model_name="gemini-2.0-flash-lite-001", generation_config=generation_config)
60
+ logger.info("AI Models initialized.")
61
 
62
+ user_defined_path = os.path.join("/exports/charts", str(uuid.uuid4()))
63
+ logger.info(f"Chart export path set to: {user_defined_path}")
 
 
 
64
 
 
 
 
 
 
65
 
66
+ # --- TIER 2: COMPREHENSIVE KPI ENGINE (For Intelligent Fallback) ---
67
+ class IrisReportEngine:
68
+ def __init__(self, transactions_data: list, llm_instance):
69
+ self.llm = llm_instance
70
+ self.df = self._load_and_prepare_data(transactions_data)
71
+ self.currency = self._get_primary_currency()
72
+
73
+ def _load_and_prepare_data(self, transactions: list) -> pd.DataFrame:
74
+ if not transactions: return pd.DataFrame()
75
+ df = pd.DataFrame(transactions)
76
+ numeric_cols = ['Units_Sold', 'Unit_Cost_Price', 'Amount']
77
+ for col in numeric_cols:
78
+ df[col] = pd.to_numeric(df[col], errors='coerce').fillna(0)
79
+ df['datetime'] = pd.to_datetime(df['Date'] + ' ' + df['Time'], errors='coerce', utc=True)
80
+ df.dropna(subset=['datetime'], inplace=True)
81
+ df['DayOfWeek'] = df['datetime'].dt.day_name()
82
+ df['HourOfDay'] = df['datetime'].dt.hour
83
+ sales_df = df[df['Transaction_Type'].str.lower() == 'sale'].copy()
84
+ sales_df['Revenue'] = sales_df['Amount']
85
+ sales_df['CostOfGoods'] = sales_df['Unit_Cost_Price'] * sales_df['Units_Sold']
86
+ sales_df['GrossProfit'] = sales_df['Revenue'] - sales_df['CostOfGoods']
87
+ return sales_df
88
+
89
+ def _get_primary_currency(self) -> str:
90
+ return self.df['Currency'].mode()[0] if not self.df.empty and 'Currency' in self.df.columns and not self.df['Currency'].mode().empty else "USD"
91
+
92
+ def _get_comparison_timeframes(self) -> tuple[pd.DataFrame, pd.DataFrame, str]:
93
+ """Returns data for current week, previous week, and a label."""
94
+ now = datetime.now(timezone.utc)
95
+ end_of_current_week = now.replace(hour=23, minute=59, second=59)
96
+ start_of_current_week = (end_of_current_week - timedelta(days=now.weekday())).replace(hour=0, minute=0, second=0)
97
+ end_of_previous_week = start_of_current_week - timedelta(seconds=1)
98
+ start_of_previous_week = (end_of_previous_week - timedelta(days=6)).replace(hour=0, minute=0, second=0)
99
+
100
+ current_period_df = self.df[(self.df['datetime'] >= start_of_current_week) & (self.df['datetime'] <= end_of_current_week)]
101
+ previous_period_df = self.df[(self.df['datetime'] >= start_of_previous_week) & (self.df['datetime'] <= end_of_previous_week)]
102
+
103
+ return current_period_df, previous_period_df, "This Week vs. Last Week"
104
+
105
+ def _calculate_headline_kpis(self, current_df, previous_df):
106
+ current_revenue = current_df['Revenue'].sum()
107
+ previous_revenue = previous_df['Revenue'].sum()
108
+ current_profit = current_df['GrossProfit'].sum()
109
+ previous_profit = previous_df['GrossProfit'].sum()
110
+
111
+ def calc_change(current, previous):
112
+ if previous == 0: return "+100%" if current > 0 else "0.0%"
113
+ change = ((current - previous) / previous) * 100
114
+ return f"{change:+.1f}%"
115
+
116
+ return {
117
+ "Total Revenue": f"{self.currency} {current_revenue:,.2f} ({calc_change(current_revenue, previous_revenue)})",
118
+ "Gross Profit": f"{self.currency} {current_profit:,.2f} ({calc_change(current_profit, previous_profit)})",
119
+ "Transactions": f"{current_df['Invoice_Number'].nunique()} ({calc_change(current_df['Invoice_Number'].nunique(), previous_df['Invoice_Number'].nunique())})"
120
+ }
121
+
122
+ def get_business_intelligence_briefing(self) -> dict:
123
+ if self.df.empty: return {"Status": "No sales data available to generate a briefing."}
124
+
125
+ current_df, previous_df, summary_period = self._get_comparison_timeframes()
126
+ if current_df.empty: return {"Status": f"No sales data was found for the current period ({summary_period})."}
127
 
128
+ # --- KPI Calculations ---
129
+ headline_kpis = self._calculate_headline_kpis(current_df, previous_df)
130
+
131
+ baskets = current_df.groupby('Invoice_Number').agg(BasketProfit=('GrossProfit', 'sum'), ItemsPerBasket=('Units_Sold', 'sum'))
132
+
133
+ products_by_profit = current_df.groupby('Product')['GrossProfit'].sum()
134
+ products_by_units = current_df.groupby('Product')['Units_Sold'].sum()
135
+
136
+ tellers_by_profit = current_df.groupby('Teller_Username')['GrossProfit'].sum()
137
+
138
+ profit_by_hour = current_df.groupby('HourOfDay')['GrossProfit'].sum()
139
+
140
+ # --- BUG FIX: Handle single-entity cases ---
141
+ product_intelligence = {}
142
+ if len(products_by_profit) > 1:
143
+ product_intelligence = {
144
+ "Best in Class (Most Profitable)": products_by_profit.idxmax(),
145
+ "Workhorse (Most Units Sold)": products_by_units.idxmax(),
146
+ "Underperformer (Least Profitable)": products_by_profit[products_by_profit > 0].idxmin() if not products_by_profit[products_by_profit > 0].empty else "N/A"
147
+ }
148
+ elif not products_by_profit.empty:
149
+ product_intelligence = {"Only Product Sold": products_by_profit.index[0]}
150
+
151
+ staff_intelligence = {}
152
+ if len(tellers_by_profit) > 1:
153
+ staff_intelligence = {"Top Performing Teller (by Profit)": tellers_by_profit.idxmax()}
154
+ elif not tellers_by_profit.empty:
155
+ staff_intelligence = {"Only Teller": tellers_by_profit.index[0]}
156
+
157
+
158
+ return {
159
+ "Summary Period": summary_period,
160
+ "Performance Snapshot (vs. Prior Period)": headline_kpis,
161
+ "Basket Analysis": {
162
+ "Average Profit per Basket": f"{self.currency} {baskets['BasketProfit'].mean():,.2f}",
163
+ "Average Items per Basket": f"{baskets['ItemsPerBasket'].mean():,.1f}"
164
+ },
165
+ "Product Intelligence": product_intelligence,
166
+ "Staff & Operations": {
167
+ **staff_intelligence,
168
+ "Most Profitable Hour": f"{profit_by_hour.idxmax()}:00" if not profit_by_hour.empty else "N/A"
169
+ }
170
+ }
171
+
172
+ def synthesize_fallback_response(self, briefing: dict, user_question: str) -> str:
173
+ fallback_prompt = f"""
174
+ You are Iris, an expert business data analyst. Your primary role is to provide intelligent insights and help the user with their business.
175
+ You were unable to process a complex user query. Do not mention the error. Instead, gracefully pivot by presenting a "Business Intelligence Briefing".
176
+ Structure your response with clear markdown headings for each section of the briefing data.
177
+ Crucially, interpret the data in the "Performance Snapshot" - highlight the percentage changes as indicators of trends (e.g., "Revenue is up by 15.2%...").
178
+
179
+ using the business data also provide insight and suggest improvements and ideas where necessary.
180
+
181
+ User's Original Question: "{user_question}"
182
+ Business Intelligence Briefing Data: {json.dumps(briefing, indent=2, ensure_ascii=False)}
183
+ """
184
+ response = self.llm.invoke(fallback_prompt)
185
+ return response.content if hasattr(response, 'content') else str(response)
186
+
187
+ # --- REFACTORED /chat Endpoint with Correct Tiered Logic ---
188
  @app.route("/chat", methods=["POST"])
189
  @cross_origin()
190
  def bot():
191
  logger.info("=== Starting /chat endpoint ===")
 
192
  try:
193
+ # 1. Request Validation and Data Fetching
 
 
 
 
194
  request_json = request.get_json()
 
 
 
 
 
 
195
  profile_id = request_json.get("profile_id")
196
  user_question = request_json.get("user_question")
197
+ if not profile_id or not user_question: return jsonify({"error": "Missing 'profile_id' or 'user_question'."}), 400
 
 
 
 
 
 
198
 
 
 
 
 
 
 
 
 
199
  API_URL = "https://irisplustech.com/public/api/business/profile/user/get-recent-transactions-v2"
200
+ response = requests.post(API_URL, data={'profile_id': urllib.parse.quote_plus(str(profile_id))}, timeout=30)
201
+ response.raise_for_status()
202
+ transactions = response.json().get("transactions")
203
+ if not transactions: return jsonify({"answer": "No transaction data was found for this profile."})
204
 
205
+ # --- TIER 1 (DEFAULT): PANDASAI FIRST ---
206
  try:
207
+ logger.info("Attempting to answer with Tier 1 (PandasAI)...")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
208
  df = pd.DataFrame(transactions)
209
+
210
+ # START: PRESERVED PANDASAI IMPLEMENTATION
211
+ pandas_agent = SmartDataframe(df, config={
212
+ "llm": llm, "response_parser": FlaskResponse,
213
+ "custom_whitelisted_dependencies": [
214
+ "os", "io", "sys", "chr", "glob",
215
+ "b64decoder", "collections", "geopy",
216
+ "geopandas", "wordcloud", "builtins"
217
+ ],
218
+ "security": "none", "save_charts_path": user_defined_path,
219
+ "save_charts": False, "enable_cache": False, "conversational":True
220
+ })
 
 
 
 
 
 
 
 
 
 
 
 
221
  answer = pandas_agent.chat(user_question)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
222
 
223
+ # ROBUSTNESS CHECK: Actively inspect the answer for soft failures.
224
+ is_failure = False
225
+ if answer is None:
226
+ is_failure = True
227
+ logger.warning("PandasAI returned None. Triggering fallback.")
228
+ if isinstance(answer, str):
229
+ fail_strings = ["i am sorry", "i cannot answer", "an error occurred", "unable to answer"]
230
+ if any(s in answer.lower() for s in fail_strings):
231
+ is_failure = True
232
+ logger.warning(f"PandasAI returned a failure string: '{answer}'. Triggering fallback.")
233
 
234
+ if not is_failure:
235
+ logger.info("Successfully answered with Tier 1 (PandasAI).")
236
+ formatted_answer = str(answer)
237
+ if isinstance(answer, pd.DataFrame): formatted_answer = answer.to_html()
238
+ elif isinstance(answer, plt.Figure):
239
+ buf = io.BytesIO()
240
+ answer.savefig(buf, format="png")
241
+ formatted_answer = f"data:image/png;base64,{base64.b64encode(buf.getvalue()).decode('utf-8')}"
242
+ return jsonify({"answer": formatted_answer})
243
+
244
+ except Exception as e:
245
+ logger.warning(f"Tier 1 (PandasAI) failed with exception: '{e}'. Proceeding to Tier 2 Fallback.")
246
+
247
+ # --- TIER 2 (GRACEFUL FALLBACK): COMPREHENSIVE KPI ANALYST ---
248
+ logger.info("Executing Tier 2 Fallback: IrisReportEngine.")
249
+ engine = IrisReportEngine(transactions_data=transactions, llm_instance=llm)
250
+ briefing = engine.get_business_intelligence_briefing()
251
+ fallback_answer = engine.synthesize_fallback_response(briefing, user_question)
252
+ return jsonify({"answer": fallback_answer})
253
+
254
+ except requests.exceptions.RequestException as e:
255
+ logger.error(f"API connection error: {e}")
256
+ return jsonify({"error": "Could not connect to the transaction API.", "details": str(e)}), 503
257
  except Exception as e:
258
+ # TIER 3 (FINAL SAFETY NET)
259
+ logger.exception("A critical unexpected error occurred in /chat endpoint")
260
  return jsonify({"error": "An unexpected server error occurred.", "details": str(e)}), 500
261
 
262
+ # --- UNCHANGED ENDPOINTS ---
 
263
  @app.route("/report", methods=["POST"])
264
  @cross_origin()
265
  def busines_report():
266
  logger.info("=== Starting /report endpoint ===")
 
267
  try:
268
  request_json = request.get_json()
269
  json_data = request_json.get("json_data") if request_json else None
270
+ prompt = "You are Quantilytix business analyst. Analyze the following data and generate a comprehensive and insightful business report, including appropriate key perfomance indicators and recommendations Use markdown formatting and tables where necessary. only return the report and nothing else.\ndata:\n" + str(json_data)
 
 
 
 
 
 
 
 
271
  response = model.generate_content(prompt)
272
+ return jsonify(str(response.text))
 
 
 
 
273
  except Exception as e:
274
  logger.exception("Error in /report endpoint")
275
  return jsonify({"error": "Failed to generate report.", "details": str(e)}), 500
276
 
 
 
277
  @app.route("/marketing", methods=["POST"])
278
  @cross_origin()
279
  def marketing():
280
  logger.info("=== Starting /marketing endpoint ===")
 
281
  try:
282
  request_json = request.get_json()
283
  json_data = request_json.get("json_data") if request_json else None
284
+ prompt = "You are an Quantilytix Marketing Specialist. Analyze the following data and generate a comprehensive marketing strategy, Only return the marketing strategy. be very creative:\n" + str(json_data)
 
 
 
 
 
 
 
285
  response = model.generate_content(prompt)
286
+ return jsonify(str(response.text))
 
 
 
 
287
  except Exception as e:
288
  logger.exception("Error in /marketing endpoint")
289
  return jsonify({"error": "Failed to generate marketing strategy.", "details": str(e)}), 500
290
 
 
 
291
  @app.route("/notify", methods=["POST"])
292
  @cross_origin()
293
  def notifications():
294
  logger.info("=== Starting /notify endpoint ===")
 
295
  try:
296
  request_json = request.get_json()
297
  json_data = request_json.get("json_data") if request_json else None
298
+ prompt = "You are Quantilytix business analyst. Write a very brief analysis and marketing tips using this business data. your output should be suitable for a notification dashboard so no quips.\n" + str(json_data)
 
 
 
 
 
 
 
299
  response = model.generate_content(prompt)
300
+ return jsonify(str(response.text))
 
 
 
 
301
  except Exception as e:
302
  logger.exception("Error in /notify endpoint")
303
  return jsonify({"error": "Failed to generate notification content.", "details": str(e)}), 500
304
 
 
305
  if __name__ == "__main__":
 
 
306
  app.run(debug=True, host="0.0.0.0", port=7860)