classes/response_df_overview.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from io import StringIO
2
+ from urllib.parse import urlparse
3
+ import streamlit as st
4
+ import requests
5
+ from dotenv import load_dotenv
6
+ import os
7
+ from helper.upload_response import upload_response
8
+ from helper.upload_File import uploadFile
9
+ import json
10
+ from pymongo import MongoClient
11
+
12
+ class dfOverview:
13
+ def __init__(self, model_url):
14
+ self.uploaded_files = []
15
+ self.file_dict = {}
16
+ self.model_url = model_url
17
+ #self.analyst_name = analyst_name
18
+ #self.data_src = data_src
19
+ #self.analyst_description = analyst_description
20
+ self.initialize()
21
+ self.row1()
22
+
23
+ def initialize(self):
24
+ # FOR ENV
25
+ load_dotenv()
26
+
27
+ # AGENT NAME
28
+ #st.header(self.analyst_name)
29
+
30
+ def request_model(self, payload_txt, headers):
31
+ response = requests.post(self.model_url, json=payload_txt, headers=headers)
32
+ response.raise_for_status()
33
+ output = response.json()
34
+ #st.write(output)
35
+ text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
36
+ #text = json.loads(text)
37
+ st.write(text)
38
+ return text
39
+
40
+ def fetch_data(self, data_field):
41
+ mongodb_uri = os.getenv("MONGODB_URI")
42
+ myclient = MongoClient(mongodb_uri)
43
+ mydb = myclient.get_database()
44
+ mycol = mydb["df_data"]
45
+
46
+ # Sort by timestamp field in descending order
47
+ x = mycol.find_one(
48
+ {"data_field": data_field},
49
+ sort=[("timestamp", -1)]
50
+ )
51
+
52
+ x = x["result"]
53
+ return x
54
+
55
+ def process(self):
56
+ with st.spinner('DF Overview Analyst...', show_time=True):
57
+ st.write('')
58
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
59
+ try:
60
+ payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
61
+ payload_txt_model = self.request_model(payload_txt, headers)
62
+ debug_info = {'data_field' : 'DF Overview Analyst', 'result': payload_txt_model}
63
+ upload_response(debug_info)
64
+
65
+ st.session_state['target_market'] = ''
66
+ count = 0
67
+ except Exception as e:
68
+ pass
69
+ st.session_state['analyzing'] = False
70
+
71
+ def row1(self):
72
+ st.session_state['analyzing'] = False
73
+ self.payload = ""
74
+ count = 0
75
+ try:
76
+ session_client_summary = st.session_state['client_summary']
77
+ if session_client_summary == 'uploaded':
78
+ count += 1
79
+ self.payload += self.fetch_data("Client Summary")
80
+ self.payload += self.fetch_data("Client Name")
81
+
82
+ except Exception as e:
83
+ pass
84
+
85
+ if count >= 1:
86
+ self.process()
87
+
88
+
89
+ if __name__ == "__main__":
90
+ st.set_page_config(layout="wide")
91
+
92
+ upload = uploadFile()
classes/response_executive_summary.py ADDED
@@ -0,0 +1,115 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import requests
3
+ from dotenv import load_dotenv
4
+ import os
5
+ from helper.upload_response import upload_response
6
+ from helper.upload_File import uploadFile
7
+ from pymongo import MongoClient
8
+ from helper.data_field import get_analyst_response
9
+
10
+
11
+ class ExecutiveSummary:
12
+ def __init__(self, model_url):
13
+ self.uploaded_files = []
14
+ self.file_dict = {}
15
+ self.model_url = model_url
16
+ #self.analyst_name = analyst_name
17
+ #self.data_src = data_src
18
+ #self.analyst_description = analyst_description
19
+ self.initialize()
20
+ self.row1()
21
+
22
+ def initialize(self):
23
+ # FOR ENV
24
+ load_dotenv()
25
+
26
+ # AGENT NAME
27
+ #st.header(self.analyst_name)
28
+
29
+ def request_model(self, payload_txt, headers):
30
+ response = requests.post(self.model_url, json=payload_txt, headers=headers)
31
+ response.raise_for_status()
32
+ output = response.json()
33
+ #st.write(output)
34
+ text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
35
+ #text = json.loads(text)
36
+ #st.write(text)
37
+ return text
38
+
39
+ def fetch_data(self, data_field):
40
+ mongodb_uri = os.getenv("MONGODB_URI")
41
+ myclient = MongoClient(mongodb_uri)
42
+ mydb = myclient.get_database()
43
+ mycol = mydb["df_data"]
44
+
45
+ # Sort by timestamp field in descending order
46
+ x = mycol.find_one(
47
+ {"data_field": data_field},
48
+ sort=[("timestamp", -1)]
49
+ )
50
+
51
+ x = x["result"]
52
+ return x
53
+
54
+ def process(self):
55
+ with st.spinner('Executive Summary...', show_time=True):
56
+ st.write('')
57
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
58
+ try:
59
+ payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
60
+ payload_txt_model = self.request_model(payload_txt, headers)
61
+ debug_info = {'data_field' : 'Executive Summary', 'result': payload_txt_model}
62
+ upload_response(debug_info)
63
+
64
+ except Exception as e:
65
+ pass
66
+ st.session_state['analyzing'] = False
67
+
68
+ def row1(self):
69
+ st.session_state['analyzing'] = False
70
+ self.payload = ""
71
+
72
+ self.website_and_tools_data = get_analyst_response("Website and Tools Analyst")
73
+ self.sem_data = get_analyst_response("SEM/PPC Analyst")
74
+ self.seo_data = get_analyst_response("SEO Analyst")
75
+ self.on_page_data = get_analyst_response("On Page Analyst")
76
+ self.off_page_data = get_analyst_response("Off Page Analyst")
77
+ self.social_media_data = get_analyst_response("Social Media Analyst")
78
+ self.content_data = get_analyst_response("Content Analyst")
79
+ self.marketpalce_data = get_analyst_response("Marketplace Analyst")
80
+ self.target_market_data = get_analyst_response("Target Market Analyst")
81
+ self.website_audience_data = get_analyst_response("Pull through offers Analyst")
82
+ self.pull_through_data = get_analyst_response("Website Audience Acquisition Analyst")
83
+ self.lld_data = get_analyst_response("LLD/PM/LN Analyst")
84
+ self.pna_data = get_analyst_response("Content - Process and Assets Analyst")
85
+
86
+ analyst_data_dict = {
87
+ "Website and Tools": self.website_and_tools_data,
88
+ "SEM/PPC": self.sem_data,
89
+ "SEO": self.seo_data,
90
+ "On Page": self.on_page_data,
91
+ "Off Page": self.off_page_data,
92
+ "Social Media": self.social_media_data,
93
+ "Content": self.content_data,
94
+ "Marketplace": self.marketpalce_data,
95
+ "Target Market": self.target_market_data,
96
+ "Pull through offers": self.website_audience_data,
97
+ "Website Audience Acquisition": self.pull_through_data,
98
+ "LLD/PM/LN": self.lld_data,
99
+ "Content - Process and Assets": self.pna_data
100
+ }
101
+
102
+
103
+ for analyst_name, data in analyst_data_dict.items():
104
+ self.payload += f"\n\n--- {analyst_name} Analysis ---\n"
105
+ if isinstance(data, list):
106
+ self.payload += "\n".join(map(str, data))
107
+ else:
108
+ self.payload += str(data)
109
+
110
+ self.process()
111
+
112
+ if __name__ == "__main__":
113
+ st.set_page_config(layout="wide")
114
+
115
+ upload = uploadFile()
classes/response_snapshot.py ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import requests
3
+ from dotenv import load_dotenv
4
+ import os
5
+ from helper.upload_response import upload_response
6
+ from helper.upload_File import uploadFile
7
+ from pymongo import MongoClient
8
+ from helper.data_field import get_analyst_response
9
+ import json
10
+
11
+
12
+ class Snapshot:
13
+ def __init__(self, model_url):
14
+ self.uploaded_files = []
15
+ self.file_dict = {}
16
+ self.model_url = model_url
17
+ #self.analyst_name = analyst_name
18
+ #self.data_src = data_src
19
+ #self.analyst_description = analyst_description
20
+ self.initialize()
21
+ self.row1()
22
+
23
+ def initialize(self):
24
+ # FOR ENV
25
+ load_dotenv()
26
+
27
+ # AGENT NAME
28
+ #st.header(self.analyst_name)
29
+
30
+ def request_model(self, payload_txt, headers):
31
+ response = requests.post(self.model_url, json=payload_txt, headers=headers)
32
+ response.raise_for_status()
33
+ output = response.json()
34
+ #st.write(output)
35
+ text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
36
+ text = json.loads(text)
37
+ #st.write(text)
38
+ return text
39
+
40
+ def fetch_data(self, data_field):
41
+ mongodb_uri = os.getenv("MONGODB_URI")
42
+ myclient = MongoClient(mongodb_uri)
43
+ mydb = myclient.get_database()
44
+ mycol = mydb["df_data"]
45
+
46
+ # Sort by timestamp field in descending order
47
+ x = mycol.find_one(
48
+ {"data_field": data_field},
49
+ sort=[("timestamp", -1)]
50
+ )
51
+
52
+ x = x["result"]
53
+ return x
54
+
55
+ def process(self):
56
+ with st.spinner('Snapshot...', show_time=True):
57
+ st.write('')
58
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
59
+ try:
60
+ payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
61
+ payload_txt_model = self.request_model(payload_txt, headers)
62
+ debug_info = {'data_field' : 'Snapshot Analyst', 'result': payload_txt_model}
63
+ upload_response(debug_info)
64
+
65
+ except Exception as e:
66
+ pass
67
+ st.session_state['analyzing'] = False
68
+
69
+ def row1(self):
70
+ st.session_state['analyzing'] = False
71
+ self.payload = ""
72
+
73
+ self.website_and_tools_data = get_analyst_response("Website and Tools Analyst")
74
+ self.sem_data = get_analyst_response("SEM/PPC Analyst")
75
+ self.seo_data = get_analyst_response("SEO Analyst")
76
+ self.social_media_data = get_analyst_response("Social Media Analyst")
77
+ self.content_data = get_analyst_response("Content Analyst")
78
+ self.marketpalce_data = get_analyst_response("Marketplace Analyst")
79
+
80
+ analyst_data_dict = {
81
+ "Website and Tools": self.website_and_tools_data,
82
+ "SEM/PPC": self.sem_data,
83
+ "SEO": self.seo_data,
84
+ "Social Media": self.social_media_data,
85
+ "Content": self.content_data,
86
+ "Marketplace": self.marketpalce_data,
87
+ }
88
+
89
+ for analyst_name, data in analyst_data_dict.items():
90
+ self.payload += f"\n\n--- {analyst_name} Analysis ---\n"
91
+ if isinstance(data, list):
92
+ self.payload += "\n".join(map(str, data))
93
+ else:
94
+ self.payload += str(data)
95
+
96
+ self.process()
97
+
98
+ if __name__ == "__main__":
99
+ st.set_page_config(layout="wide")
100
+
101
+ upload = uploadFile()
helper/data_field.py CHANGED
@@ -5,9 +5,9 @@ def data_field(data_src):
5
  myclient = MongoClient(mongodb_uri)
6
  mydb = myclient.get_database()
7
  mycol = mydb["df_data"]
8
- x = mycol.find_one({"data_field": data_src})
9
- x = x["result"]["question"]
10
- #st.write(x)
11
  return x
12
 
13
  def get_analyst_response(data_src):
 
5
  myclient = MongoClient(mongodb_uri)
6
  mydb = myclient.get_database()
7
  mycol = mydb["df_data"]
8
+ x = mycol.find_one({"data_field": data_src},
9
+ sort=[('timestamp', -1)])
10
+ x = x["result"]
11
  return x
12
 
13
  def get_analyst_response(data_src):
pages/analyzing_page.py CHANGED
@@ -13,6 +13,9 @@ from classes.response_content import Content
13
  from classes.response_sem_ppc import Sem_PPC
14
  from classes.response_marketplace import Marketplace
15
  from classes.response_target_market import TargetMarket
 
 
 
16
 
17
  def run_analysis():
18
  # Placeholders for status updates
@@ -27,6 +30,9 @@ def run_analysis():
27
  sem_ppc = st.empty()
28
  marketplace = st.empty()
29
  target_market = st.empty()
 
 
 
30
 
31
  def run_off_page_analysis():
32
  try:
@@ -138,6 +144,16 @@ def run_analysis():
138
  target_market.error(f"Target Market Analysis failed: {e}")
139
  return None
140
 
 
 
 
 
 
 
 
 
 
 
141
  # Create threads for concurrent execution
142
  off_page_thread = threading.Thread(target=run_off_page_analysis)
143
  on_page_thread = threading.Thread(target=run_on_page_analysis)
@@ -150,6 +166,7 @@ def run_analysis():
150
  content_sem_ppc_thread = threading.Thread(target=run_sem_ppc_analysis)
151
  marketplace_thread = threading.Thread(target=run_marketplace_analysis)
152
  target_market_thread = threading.Thread(target=run_target_market_analysis)
 
153
 
154
  # Attach Streamlit context to threads
155
  add_script_run_ctx(off_page_thread)
@@ -163,6 +180,7 @@ def run_analysis():
163
  add_script_run_ctx(content_sem_ppc_thread)
164
  add_script_run_ctx(marketplace_thread)
165
  add_script_run_ctx(target_market_thread)
 
166
 
167
  # Start threads
168
  off_page_thread.start()
@@ -176,6 +194,7 @@ def run_analysis():
176
  content_sem_ppc_thread.start()
177
  marketplace_thread.start()
178
  target_market_thread.start()
 
179
 
180
  # Wait for threads to complete
181
  off_page_thread.join()
@@ -189,9 +208,27 @@ def run_analysis():
189
  content_sem_ppc_thread.join()
190
  marketplace_thread.join()
191
  target_market_thread.join()
 
192
 
193
  st.markdown("---")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
194
  st.success("🎉 All analyses completed!") # Final success message
 
195
  # --- Display Button After Completion ---
196
  if st.button("View Results", icon="📃"):
197
  st.switch_page("pages/output.py")
 
13
  from classes.response_sem_ppc import Sem_PPC
14
  from classes.response_marketplace import Marketplace
15
  from classes.response_target_market import TargetMarket
16
+ from classes.response_df_overview import dfOverview
17
+ from classes.response_executive_summary import ExecutiveSummary
18
+ from classes.response_snapshot import Snapshot
19
 
20
  def run_analysis():
21
  # Placeholders for status updates
 
30
  sem_ppc = st.empty()
31
  marketplace = st.empty()
32
  target_market = st.empty()
33
+ df_overview_status = st.empty()
34
+ executive_summary_status = st.empty()
35
+ snapshot_status = st.empty()
36
 
37
  def run_off_page_analysis():
38
  try:
 
144
  target_market.error(f"Target Market Analysis failed: {e}")
145
  return None
146
 
147
+ def df_overview_analysis():
148
+ try:
149
+ df_overview_status.info("DF Overview Analysis...")
150
+ result = dfOverview(os.getenv('Model_DF_Overview_Analyst'))
151
+ df_overview_status.success("DF Overview Analysis completed successfully.")
152
+ return result
153
+ except Exception as e:
154
+ df_overview_status.error(f"DF Overview Analysis failed: {e}")
155
+ return None
156
+
157
  # Create threads for concurrent execution
158
  off_page_thread = threading.Thread(target=run_off_page_analysis)
159
  on_page_thread = threading.Thread(target=run_on_page_analysis)
 
166
  content_sem_ppc_thread = threading.Thread(target=run_sem_ppc_analysis)
167
  marketplace_thread = threading.Thread(target=run_marketplace_analysis)
168
  target_market_thread = threading.Thread(target=run_target_market_analysis)
169
+ df_overview_thread = threading.Thread(target=df_overview_analysis)
170
 
171
  # Attach Streamlit context to threads
172
  add_script_run_ctx(off_page_thread)
 
180
  add_script_run_ctx(content_sem_ppc_thread)
181
  add_script_run_ctx(marketplace_thread)
182
  add_script_run_ctx(target_market_thread)
183
+ add_script_run_ctx(df_overview_thread)
184
 
185
  # Start threads
186
  off_page_thread.start()
 
194
  content_sem_ppc_thread.start()
195
  marketplace_thread.start()
196
  target_market_thread.start()
197
+ df_overview_thread.start()
198
 
199
  # Wait for threads to complete
200
  off_page_thread.join()
 
208
  content_sem_ppc_thread.join()
209
  marketplace_thread.join()
210
  target_market_thread.join()
211
+ df_overview_thread.join()
212
 
213
  st.markdown("---")
214
+ snapshot_status.info("Starting Snapshot by Channel Analysis...")
215
+ try:
216
+ snapshot = Snapshot(os.getenv('Model_Snapshot_by_Channel_Analyst'))
217
+ snapshot_status.success("Snapshot by Channel Analysis completed successfully.")
218
+ except Exception as e:
219
+ snapshot_status.error(f"Snapshot by Channel Analysis failed: {e}")
220
+
221
+ executive_summary_status.info("Starting Executive Summary Analysis...")
222
+ try:
223
+ executive_summary = ExecutiveSummary(os.getenv('Model_Executive_Summary_Analyst'))
224
+ executive_summary_status.success("Executive Summary Analysis completed successfully.")
225
+ except Exception as e:
226
+ executive_summary_status.error(f"Executive Summary Analysis failed: {e}")
227
+ st.success("🎉 All analyses completed!") # Final success message
228
+
229
+
230
  st.success("🎉 All analyses completed!") # Final success message
231
+
232
  # --- Display Button After Completion ---
233
  if st.button("View Results", icon="📃"):
234
  st.switch_page("pages/output.py")
pages/home.py CHANGED
@@ -54,7 +54,6 @@ class DigitalFootprintDashboard:
54
  else:
55
  st.session_state["analyze"] = ''
56
 
57
-
58
  self.analyze_button = st.button("Analyze", icon="✨", use_container_width=True)
59
  if self.analyze_button == True:
60
  st.switch_page("pages/analyzing_page.py")
 
54
  else:
55
  st.session_state["analyze"] = ''
56
 
 
57
  self.analyze_button = st.button("Analyze", icon="✨", use_container_width=True)
58
  if self.analyze_button == True:
59
  st.switch_page("pages/analyzing_page.py")
pages/output.py CHANGED
@@ -1,7 +1,7 @@
1
  import json
2
  import os
3
  import streamlit as st
4
- from helper.data_field import get_analyst_response
5
  import time
6
 
7
  st.set_page_config(layout="centered")
@@ -21,21 +21,21 @@ def write_client_footprint():
21
  markdown_table = "| Source/Channel | Current KPI |\n"
22
  markdown_table += "|---|---|\n"
23
  markdown_table += f"| Website Health Score | {result_web['website_overall_health_score']} |\n"
24
- markdown_table += f"| Organic Traffic to the Website | {seo['organic_traffic']} |\n"
25
- markdown_table += f"| Paid Traffic to the Website | {seo['paid_traffic']} |\n"
26
- markdown_table += f"| Referral Traffic to the Website | {seo['referral_traffic']} |\n"
27
  markdown_table += f"| Email Traffic to the Website | N/A |\n"
28
- markdown_table += f"| Direct Traffic to the Website | {seo['direct_traffic']} |\n"
29
  markdown_table += f"| Social Traffic to the Website | N/A |\n"
30
  markdown_table += f"| Display Traffic to the Website | N/A |\n"
31
  markdown_table += f"| Email Database | N/A |\n"
32
- markdown_table += f"| Facebook Followers | {socmed['facebook_followers']} |\n"
33
- markdown_table += f"| Twitter Followers | {socmed['twitter_followers']} |\n"
34
- markdown_table += f"| Instagram Followers | {socmed['instagram_followers']} |\n"
35
- markdown_table += f"| Linkedin Followers | {socmed['linkedin_followers']} |\n"
36
  markdown_table += f"| Google My Business | N/A |\n"
37
- markdown_table += f"| # of Keywords Ranking in Top 10 | {seo['keyword_ranking_in_top_10']} |\n"
38
- markdown_table += f"| # of Keywords Ranking in Top 100 | {seo['keyword_ranking_in_top_100']} |\n"
39
 
40
  return markdown_table
41
 
@@ -145,14 +145,11 @@ def seo_on_page_table(df_data):
145
  else:
146
  st.warning("No data retrieved for analysis.")
147
  # --- End: Loop and display data ---
148
-
149
-
150
 
151
  def display_outputs():
152
- st.markdown("<div id='top'></div>", unsafe_allow_html=True);
153
- client_name = "RMX Creatives"
154
- client_website = "https://rmxcreatives.com/"
155
- overview = f"""**{client_name}** is a financial services company based in Auckland, New Zealand, specializing in providing quick and flexible loan solutions for businesses and individuals. Represented by Paul Stone, LoansOne has enlisted ShoreMarketing to perform a deep dive into their digital footprint to have a view of the holistic status of their digital properties and determine how each property can play part in implementing a stronger digital marketing plan.\n
156
  The Digital Marketing Footprint consists of deep-dive research by ShoreMarketing specialists to help the business leaders of LoansOne understand the effectiveness of their existing digital initiatives with the view of giving them an insight to developing a strategy and effectively allocating business resources to digital properties that will give them the best results.\n
157
  This document represents the results of our audit of LoansOne’s digital marketing and management practices. Our audit covered reviews of key digital areas: Website and Tools, PPC/SEM, SEO, Social Media, and Market Places."""
158
 
@@ -166,7 +163,7 @@ This document represents the results of our audit of LoansOne’s digital market
166
  st.markdown(f"{overview}")
167
  st.markdown("---")
168
  st.markdown("### Executive Summary")
169
- st.markdown(f"Simtech LED's digital footprint reveals significant strengths and areas for improvement that can enhance its competitive positioning in the casino, gaming, and entertainment LED market. The analysis highlights the following key findings and recommendations")
170
  st.markdown("---")
171
 
172
  st.markdown("### CLIENT FOOTPRINT")
@@ -285,11 +282,13 @@ Regardless, it is still a great channel worth investing to improve a business’
285
  st.write("TBD")
286
 
287
  st.markdown("##### WHAT IS THE PULL-THROUGH OFFER?")
288
- st.write(get_analyst_response("Pull through offers Analyst"))
 
289
 
290
 
291
  st.markdown("##### WEBSITE AUDIENCE ACQUISITION")
292
- st.write(get_analyst_response("Website Audience Acquisition Analyst"))
 
293
 
294
  #LLD/PM/LN
295
  lld_data = get_analyst_response("LLD/PM/LN Analyst")
@@ -328,25 +327,46 @@ We have evaluated the process of content development strategy and existing conte
328
  st.markdown("##### DECISION STAGE")
329
  st.write(None)
330
 
331
- st.markdown("---")
332
  st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
 
 
333
 
 
334
  st.markdown("#### CONVERSION – ACTIVATION OF VISITORS")
335
- st.markdown("##### AWARENESS TO TRAFFIC")
336
- st.write("TBD")
337
- st.markdown("##### TRAFFIC TO LEAD CONVERSION")
338
- st.write("TBD")
339
- st.markdown("##### LEAD TO SALES CONVERSION")
340
- st.write("TBD")
341
- st.markdown("##### CONVERSION TO BRAND LOYALTY")
342
- st.write("TBD")
343
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
344
  st.markdown("##### CONNECTION OF ALL ONLINE AND OFFLINE TOUCH POINTS")
345
- st.write("TBD")
346
 
347
  st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
348
 
349
-
 
 
350
  if st.button("Back to Dashboard", icon="🏠"):
351
  st.switch_page("pages/home.py")
352
  display_outputs()
 
1
  import json
2
  import os
3
  import streamlit as st
4
+ from helper.data_field import get_analyst_response, data_field
5
  import time
6
 
7
  st.set_page_config(layout="centered")
 
21
  markdown_table = "| Source/Channel | Current KPI |\n"
22
  markdown_table += "|---|---|\n"
23
  markdown_table += f"| Website Health Score | {result_web['website_overall_health_score']} |\n"
24
+ markdown_table += f"| Organic Traffic to the Website | {seo.get('organic_traffic', 'N/A')} |\n"
25
+ markdown_table += f"| Paid Traffic to the Website | {seo.get('paid_traffic', 'N/A')} |\n"
26
+ markdown_table += f"| Referral Traffic to the Website | {seo.get('referral_traffic', 'N/A')} |\n"
27
  markdown_table += f"| Email Traffic to the Website | N/A |\n"
28
+ markdown_table += f"| Direct Traffic to the Website | {seo.get('direct_traffic', 'N/A')} |\n"
29
  markdown_table += f"| Social Traffic to the Website | N/A |\n"
30
  markdown_table += f"| Display Traffic to the Website | N/A |\n"
31
  markdown_table += f"| Email Database | N/A |\n"
32
+ markdown_table += f"| Facebook Followers | {socmed.get('facebook_followers', 'N/A')} |\n"
33
+ markdown_table += f"| Twitter Followers | {socmed.get('twitter_followers', 'N/A')} |\n"
34
+ markdown_table += f"| Instagram Followers | {socmed.get('instagram_followers', 'N/A')} |\n"
35
+ markdown_table += f"| Linkedin Followers | {socmed.get('linkedin_followers', 'N/A')} |\n"
36
  markdown_table += f"| Google My Business | N/A |\n"
37
+ markdown_table += f"| # of Keywords Ranking in Top 10 | {seo.get('keyword_ranking_in_top_10', 'N/A')} |\n"
38
+ markdown_table += f"| # of Keywords Ranking in Top 100 | {seo.get('keyword_ranking_in_top_100', 'N/A')} |\n"
39
 
40
  return markdown_table
41
 
 
145
  else:
146
  st.warning("No data retrieved for analysis.")
147
  # --- End: Loop and display data ---
 
 
148
 
149
  def display_outputs():
150
+ client_name = data_field("Client Name")
151
+ client_website = data_field("Client Website")
152
+ overview = f"""{client_name} is a financial services company based in Auckland, New Zealand, specializing in providing quick and flexible loan solutions for businesses and individuals. Represented by Paul Stone, LoansOne has enlisted ShoreMarketing to perform a deep dive into their digital footprint to have a view of the holistic status of their digital properties and determine how each property can play part in implementing a stronger digital marketing plan.\n
 
153
  The Digital Marketing Footprint consists of deep-dive research by ShoreMarketing specialists to help the business leaders of LoansOne understand the effectiveness of their existing digital initiatives with the view of giving them an insight to developing a strategy and effectively allocating business resources to digital properties that will give them the best results.\n
154
  This document represents the results of our audit of LoansOne’s digital marketing and management practices. Our audit covered reviews of key digital areas: Website and Tools, PPC/SEM, SEO, Social Media, and Market Places."""
155
 
 
163
  st.markdown(f"{overview}")
164
  st.markdown("---")
165
  st.markdown("### Executive Summary")
166
+ st.markdown(get_analyst_response("Executive Summary Analyst"))
167
  st.markdown("---")
168
 
169
  st.markdown("### CLIENT FOOTPRINT")
 
282
  st.write("TBD")
283
 
284
  st.markdown("##### WHAT IS THE PULL-THROUGH OFFER?")
285
+ pull_through_data = get_analyst_response("Pull through offers Analyst")
286
+ st.write(pull_through_data)
287
 
288
 
289
  st.markdown("##### WEBSITE AUDIENCE ACQUISITION")
290
+ website_audience_data = get_analyst_response("Website Audience Acquisition Analyst")
291
+ st.write(website_audience_data)
292
 
293
  #LLD/PM/LN
294
  lld_data = get_analyst_response("LLD/PM/LN Analyst")
 
327
  st.markdown("##### DECISION STAGE")
328
  st.write(None)
329
 
330
+
331
  st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
332
+ st.markdown("---")
333
+
334
 
335
+ conversion = get_analyst_response("Conversion Analyst")
336
  st.markdown("#### CONVERSION – ACTIVATION OF VISITORS")
 
 
 
 
 
 
 
 
337
 
338
+ if conversion:
339
+ st.markdown("##### AWARENESS TO TRAFFIC")
340
+ st.write(conversion.get('awareness_to_traffic', 'N/A'))
341
+
342
+ st.markdown("##### TRAFFIC TO LEAD CONVERSION")
343
+ st.write(conversion.get('traffic_to_lead', 'N/A'))
344
+
345
+ st.markdown("##### LEAD TO SALES CONVERSION")
346
+ st.write(conversion.get('lead_to_sales', 'N/A'))
347
+
348
+ st.markdown("##### CONVERSION TO BRAND LOYALTY")
349
+ st.write(conversion.get('conversion_to_brand', 'N/A'))
350
+ else:
351
+ st.markdown("##### AWARENESS TO TRAFFIC")
352
+ st.write(None)
353
+ st.markdown("##### TRAFFIC TO LEAD CONVERSION")
354
+ st.write(None)
355
+ st.markdown("##### LEAD TO SALES CONVERSION")
356
+ st.write(None)
357
+ st.markdown("##### CONVERSION TO BRAND LOYALTY")
358
+ st.write(None)
359
+
360
+
361
+ conversion = get_analyst_response("Connection Analyst")
362
  st.markdown("##### CONNECTION OF ALL ONLINE AND OFFLINE TOUCH POINTS")
363
+ st.write(conversion)
364
 
365
  st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
366
 
367
+
368
+
369
+ st.markdown("<div id='top'></div>", unsafe_allow_html=True);
370
  if st.button("Back to Dashboard", icon="🏠"):
371
  st.switch_page("pages/home.py")
372
  display_outputs()