rairo commited on
Commit
cd3af11
·
verified ·
1 Parent(s): dd02dc2

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +88 -68
main.py CHANGED
@@ -17,6 +17,7 @@ import google.generativeai as genai
17
  import uuid
18
  import base64
19
  from io import BytesIO
 
20
 
21
  load_dotenv()
22
 
@@ -62,6 +63,7 @@ class FlaskResponse(ResponseParser):
62
 
63
 
64
  gemini_api_key = os.getenv('Gemini')
 
65
  llm = ChatGoogleGenerativeAI(api_key=gemini_api_key, model='gemini-2.0-flash-thinking-exp', temperature=0.1)
66
 
67
  gemini_api_key = os.environ['Gemini']
@@ -74,6 +76,7 @@ generation_config = {
74
  "max_output_tokens": 5000,
75
  }
76
 
 
77
  model = genai.GenerativeModel(
78
  model_name="gemini-2.0-flash-lite-001",
79
  generation_config=generation_config,
@@ -85,63 +88,95 @@ new_filename = f"{guid}"
85
  user_defined_path = os.path.join("/exports/charts", new_filename)
86
 
87
 
88
- # Endpoint for chat
89
  @app.route("/chat", methods=["POST"])
90
  @cross_origin()
91
  def bot():
92
  # Retrieve parameters from the request
93
- json_table = request.json.get("json_table")
94
  user_question = request.json.get("user_question")
95
- print("User question:", user_question)
96
-
97
- # Convert the table data into a dataframe
98
- data = eval(str(json_table))
99
- df = pd.DataFrame(data)
100
- print("Columns in dataframe:", list(df.columns))
101
-
102
- # Create a SmartDataframe instance using your configuration.
103
- pandas_agent = SmartDataframe(
104
- df,
105
- config={
106
- "llm": llm,
107
- "response_parser": FlaskResponse,
108
- "custom_whitelisted_dependencies": [
109
- "os",
110
- "io",
111
- "sys",
112
- "chr",
113
- "glob",
114
- "b64decoder",
115
- "collections",
116
- "geopy",
117
- "geopandas",
118
- "wordcloud",
119
- "builtins"
120
- ],
121
- "security": "none", "save_charts_path": user_defined_path, "save_charts": False, "enable_cache": False, "conversational":True
122
- }
123
- )
124
-
125
- # Get the answer from the agent
126
- answer = pandas_agent.chat(user_question)
127
-
128
- # Process the answer based on its type
129
- formatted_answer = None
130
- if isinstance(answer, pd.DataFrame):
131
- formatted_answer = answer.to_html()
132
- elif isinstance(answer, plt.Figure):
133
- buf = io.BytesIO()
134
- answer.savefig(buf, format="png")
135
- buf.seek(0)
136
- image_base64 = base64.b64encode(buf.read()).decode("utf-8")
137
- formatted_answer = f"data:image/png;base64,{image_base64}"
138
- elif isinstance(answer, (int, float)):
139
- formatted_answer = str(answer)
140
- else:
141
- formatted_answer = str(answer)
142
-
143
- # Return the formatted answer as JSON.
144
- return jsonify({"answer": formatted_answer})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
145
 
146
  # Reports endpoint
147
  @app.route("/report", methods=["POST"])
@@ -178,22 +213,7 @@ You are an Quantilytix Marketing Specialist. Analyze the following data and gene
178
 
179
  return jsonify(str(report))
180
 
181
- # Business Plan
182
- @app.route("/bplan", methods=["POST"])
183
- @cross_origin()
184
- def business_plan():
185
- json_data = request.json.get("json_data")
186
-
187
- prompt = """
188
- You are Quantilytix business analyst. Analyze the following data and generate a comprehensive business plan to help the business look for funding and support. Use markdown formatting and tables where necessary. only return the repor and nothing else, the data:
189
- """ + str(json_data)
190
-
191
-
192
- response = model.generate_content(prompt)
193
- report = response.text
194
-
195
-
196
- return jsonify(str(report))
197
 
198
  #Notificatiions
199
  @app.route("/notify", methods=["POST"])
 
17
  import uuid
18
  import base64
19
  from io import BytesIO
20
+ import requests # Added for making API calls
21
 
22
  load_dotenv()
23
 
 
63
 
64
 
65
  gemini_api_key = os.getenv('Gemini')
66
+ # --- Model name reverted to your original specification ---
67
  llm = ChatGoogleGenerativeAI(api_key=gemini_api_key, model='gemini-2.0-flash-thinking-exp', temperature=0.1)
68
 
69
  gemini_api_key = os.environ['Gemini']
 
76
  "max_output_tokens": 5000,
77
  }
78
 
79
+ # --- Model name reverted to your original specification ---
80
  model = genai.GenerativeModel(
81
  model_name="gemini-2.0-flash-lite-001",
82
  generation_config=generation_config,
 
88
  user_defined_path = os.path.join("/exports/charts", new_filename)
89
 
90
 
91
+ # --- REFACTORED Endpoint for chat ---
92
  @app.route("/chat", methods=["POST"])
93
  @cross_origin()
94
  def bot():
95
  # Retrieve parameters from the request
96
+ profile_id = request.json.get("profile_id")
97
  user_question = request.json.get("user_question")
98
+
99
+ if not profile_id or not user_question:
100
+ return jsonify({"error": "Missing 'profile_id' or 'user_question' in request."}), 400
101
+
102
+ print(f"Received request for profile_id: {profile_id}")
103
+ print(f"User question: {user_question}")
104
+
105
+ # Fetch data from the external API
106
+ API_URL = "https://irisplustech.com/public/api4/business/profile/user/get-recent-transactions-v2"
107
+ payload = {'profile_id': profile_id}
108
+
109
+ try:
110
+ response = requests.post(API_URL, data=payload)
111
+ # Check if the request was successful
112
+ if response.status_code != 200:
113
+ return jsonify({
114
+ "error": "Failed to fetch data from the transaction API.",
115
+ "status_code": response.status_code,
116
+ "details": response.text
117
+ }), 502 # Bad Gateway
118
+
119
+ api_data = response.json()
120
+
121
+ # Check for API-level errors
122
+ if api_data.get("error"):
123
+ return jsonify({
124
+ "error": "Transaction API returned an error.",
125
+ "message": api_data.get("message", "No message provided.")
126
+ }), 400
127
+
128
+ transactions = api_data.get("transactions")
129
+ if transactions is None or not isinstance(transactions, list):
130
+ return jsonify({"error": "Invalid data format from transaction API. 'transactions' key is missing or not a list."}), 500
131
+
132
+ if not transactions:
133
+ return jsonify({"answer": "No transaction data was found for this profile. I can't answer any questions."})
134
+
135
+ # Convert the transaction data into a dataframe
136
+ df = pd.DataFrame(transactions)
137
+ print("Columns in dataframe:", list(df.columns))
138
+
139
+ # Create a SmartDataframe instance using your configuration.
140
+ pandas_agent = SmartDataframe(
141
+ df,
142
+ config={
143
+ "llm": llm,
144
+ "response_parser": FlaskResponse,
145
+ "custom_whitelisted_dependencies": [
146
+ "os", "io", "sys", "chr", "glob",
147
+ "b64decoder", "collections", "geopy",
148
+ "geopandas", "wordcloud", "builtins"
149
+ ],
150
+ "security": "none", "save_charts_path": user_defined_path,
151
+ "save_charts": False, "enable_cache": False, "conversational":True
152
+ }
153
+ )
154
+
155
+ # Get the answer from the agent
156
+ answer = pandas_agent.chat(user_question)
157
+
158
+ # Process the answer based on its type
159
+ formatted_answer = None
160
+ if isinstance(answer, pd.DataFrame):
161
+ formatted_answer = answer.to_html()
162
+ elif isinstance(answer, plt.Figure):
163
+ buf = io.BytesIO()
164
+ answer.savefig(buf, format="png")
165
+ buf.seek(0)
166
+ image_base64 = base64.b64encode(buf.read()).decode("utf-8")
167
+ formatted_answer = f"data:image/png;base64,{image_base64}"
168
+ else:
169
+ formatted_answer = str(answer)
170
+
171
+ # Return the formatted answer as JSON.
172
+ return jsonify({"answer": formatted_answer})
173
+
174
+ except requests.exceptions.RequestException as e:
175
+ return jsonify({"error": "Could not connect to the transaction API.", "details": str(e)}), 500
176
+ except Exception as e:
177
+ logging.exception("An unexpected error occurred in /chat endpoint")
178
+ return jsonify({"error": "An unexpected server error occurred.", "details": str(e)}), 500
179
+
180
 
181
  # Reports endpoint
182
  @app.route("/report", methods=["POST"])
 
213
 
214
  return jsonify(str(report))
215
 
216
+ # Business Plan endpoint REMOVED
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
217
 
218
  #Notificatiions
219
  @app.route("/notify", methods=["POST"])