yoursdvniel commited on
Commit
1ce5fa1
·
verified ·
1 Parent(s): 44d7acc

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +35 -6
main.py CHANGED
@@ -7,13 +7,36 @@ from openai_client import ask_gpt
7
  from prompt_instructions import build_system_message
8
  from role_access import get_allowed_collections
9
  from data_fetcher import fetch_data_from_firestore
10
- from data_planner import determine_data_requirements # 🧠 Replaces inline planner
11
 
12
  app = Flask(__name__)
13
  CORS(app)
14
 
15
  db = get_firestore_client()
16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  @app.route('/chat', methods=['POST'])
18
  def chat():
19
  data = request.json
@@ -24,20 +47,25 @@ def chat():
24
  if not role or not user_input or not company_code:
25
  return jsonify({"error": "Missing role, message, or companyCode"}), 400
26
 
27
- # 🧠 Step 1: Use Gemini to plan the data to fetch
 
 
28
  planning_result = determine_data_requirements(user_input, company_code)
29
- print("📋 Data Plan:", planning_result)
30
 
31
  if "error" in planning_result:
32
  return jsonify({"reply": f"⚠️ Planning error: {planning_result['error']}"})
33
 
34
  try:
35
- firestore_data = fetch_data_from_firestore(planning_result)
36
- print("📦 Firestore Fetched Data:", firestore_data)
 
 
 
37
  except Exception as e:
38
  return jsonify({"reply": f"⚠️ Firestore fetch error: {str(e)}"})
39
 
40
- # 💬 Step 2: Ask Gemini to answer using that data
41
  system_msg = build_system_message(company_code)
42
  data_msg = {
43
  "role": "system",
@@ -48,5 +76,6 @@ def chat():
48
  final_response = ask_gpt([system_msg, data_msg, user_msg])
49
  return jsonify({ "reply": final_response })
50
 
 
51
  if __name__ == "__main__":
52
  app.run(host="0.0.0.0", port=7860)
 
7
  from prompt_instructions import build_system_message
8
  from role_access import get_allowed_collections
9
  from data_fetcher import fetch_data_from_firestore
10
+ from data_planner import determine_data_requirements # 🧠 Gemini planner
11
 
12
  app = Flask(__name__)
13
  CORS(app)
14
 
15
  db = get_firestore_client()
16
 
17
+ # 🔧 Normalize Gemini plan into proper Firestore fetch format
18
+ def normalize_plan(plan: dict) -> dict:
19
+ filters = plan.get("filters", {})
20
+ filter_list = []
21
+
22
+ for k, v in filters.items():
23
+ # 🔄 Adjust "running" to match actual Firestore value
24
+ if k == "status" and v == "running":
25
+ v = "active"
26
+ filter_list.append({"field": k, "op": "==", "value": v})
27
+
28
+ return {
29
+ "collections": [
30
+ {
31
+ "name": col,
32
+ "filters": filter_list,
33
+ "limit": 50
34
+ }
35
+ for col in plan.get("collections", [])
36
+ ]
37
+ }
38
+
39
+
40
  @app.route('/chat', methods=['POST'])
41
  def chat():
42
  data = request.json
 
47
  if not role or not user_input or not company_code:
48
  return jsonify({"error": "Missing role, message, or companyCode"}), 400
49
 
50
+ print("🧠 Incoming user message:", user_input)
51
+
52
+ # 🧠 Step 1: Gemini plans what data to fetch
53
  planning_result = determine_data_requirements(user_input, company_code)
54
+ print("📋 Raw planning result:", planning_result)
55
 
56
  if "error" in planning_result:
57
  return jsonify({"reply": f"⚠️ Planning error: {planning_result['error']}"})
58
 
59
  try:
60
+ normalized_plan = normalize_plan(planning_result)
61
+ print("🛠️ Normalized Plan:", normalized_plan)
62
+
63
+ firestore_data = fetch_data_from_firestore(normalized_plan)
64
+ print("📦 Fetched Firestore Data:", firestore_data)
65
  except Exception as e:
66
  return jsonify({"reply": f"⚠️ Firestore fetch error: {str(e)}"})
67
 
68
+ # 🤖 Step 2: Use Gemini to interpret and answer
69
  system_msg = build_system_message(company_code)
70
  data_msg = {
71
  "role": "system",
 
76
  final_response = ask_gpt([system_msg, data_msg, user_msg])
77
  return jsonify({ "reply": final_response })
78
 
79
+
80
  if __name__ == "__main__":
81
  app.run(host="0.0.0.0", port=7860)