result consolidation

#1
by notjulietxd - opened
classes/Linkedin.py CHANGED
@@ -38,6 +38,8 @@ class Linkedin:
38
  except Exception:
39
  pass
40
  '''
 
 
41
 
42
  def request_model(self, payload_txt):
43
  response = requests.post(self.model_url, json=payload_txt)
@@ -187,6 +189,7 @@ class Linkedin:
187
  }
188
  '''
189
  collect_telemetry(debug_info)
 
190
 
191
  #with st.expander("Debug information", icon="⚙"):
192
  # st.write(debug_info)
 
38
  except Exception:
39
  pass
40
  '''
41
+ if 'linkedin_upload' not in st.session_state:
42
+ st.session_state['linkedin_upload'] = ''
43
 
44
  def request_model(self, payload_txt):
45
  response = requests.post(self.model_url, json=payload_txt)
 
189
  }
190
  '''
191
  collect_telemetry(debug_info)
192
+ st.session_state['linkedin_upload'] = 'uploaded'
193
 
194
  #with st.expander("Debug information", icon="⚙"):
195
  # st.write(debug_info)
classes/Seo.py CHANGED
@@ -162,7 +162,6 @@ class Seo:
162
 
163
  session = st.session_state.analyze
164
  if ((self.uploaded_file or self.others or self.uploaded_file_seo) or (self.page_index or self.bounce_rate)) and session == 'clicked':
165
- combined_text = ""
166
  seo_keywords = ""
167
  traffic_channels = ""
168
  traffic_aqcuisition = ""
@@ -196,8 +195,10 @@ class Seo:
196
  traffic_channels += f"\nReferral Traffic: {referral_traffic}"
197
  traffic_channels += df_traffic.to_csv(index=True)
198
 
199
- except KeyError:
200
  pass
 
 
201
 
202
  try:
203
  df_seo = st.session_state['df_seo']
@@ -207,6 +208,8 @@ class Seo:
207
  seo_keywords += f"\nKeyword Ranking Top 100: {keyword_ranking['Keyword_top_100']}\n\n"
208
 
209
  seo_keywords += df_seo.to_csv(index=True)
 
 
210
  except KeyError:
211
  pass
212
 
@@ -224,6 +227,8 @@ class Seo:
224
  traffic_aqcuisition += f"\nPaid Traffic: {ga4_paid_social}\nOrganic Traffic: {ga4_organic_traffic}\nDirect Traffic: {ga4_direct_traffic}\nReferral Traffic: {ga4_referral_traffic}"
225
  except KeyError:
226
  pass
 
 
227
 
228
  # OUTPUT FOR SEO ANALYST
229
  payload_txt_seo_keywords = {"question": seo_keywords}
 
162
 
163
  session = st.session_state.analyze
164
  if ((self.uploaded_file or self.others or self.uploaded_file_seo) or (self.page_index or self.bounce_rate)) and session == 'clicked':
 
165
  seo_keywords = ""
166
  traffic_channels = ""
167
  traffic_aqcuisition = ""
 
195
  traffic_channels += f"\nReferral Traffic: {referral_traffic}"
196
  traffic_channels += df_traffic.to_csv(index=True)
197
 
198
+ except AttributeError:
199
  pass
200
+ except KeyError:
201
+ st.info("Incorrect SEMRush format. Please upload a valid SEMRush file.")
202
 
203
  try:
204
  df_seo = st.session_state['df_seo']
 
208
  seo_keywords += f"\nKeyword Ranking Top 100: {keyword_ranking['Keyword_top_100']}\n\n"
209
 
210
  seo_keywords += df_seo.to_csv(index=True)
211
+ except AttributeError:
212
+ pass
213
  except KeyError:
214
  pass
215
 
 
227
  traffic_aqcuisition += f"\nPaid Traffic: {ga4_paid_social}\nOrganic Traffic: {ga4_organic_traffic}\nDirect Traffic: {ga4_direct_traffic}\nReferral Traffic: {ga4_referral_traffic}"
228
  except KeyError:
229
  pass
230
+ except TypeError:
231
+ st.info("Incorrect GA4 format. Please upload a valid GA4 file.")
232
 
233
  # OUTPUT FOR SEO ANALYST
234
  payload_txt_seo_keywords = {"question": seo_keywords}
classes/Social_Media_FB.py CHANGED
@@ -39,6 +39,8 @@ class Facebook:
39
  except Exception:
40
  pass
41
  '''
 
 
42
 
43
  def request_model(self, payload_txt):
44
  response = requests.post(self.model_url, json=payload_txt)
@@ -142,7 +144,7 @@ class Facebook:
142
  def process(self):
143
  start_time = time.time()
144
  session = st.session_state.analyze
145
- if ((self.fb_organic_post and self.fb_organic_post.name) or (self.fb_ads_campaign and self.fb_ads_campaign.name) or self.facebooks or self.facebook_rr) and session == 'clicked':
146
  try:
147
  combined_text = ""
148
  with st.spinner('Social Media Analyst...', show_time=True):
@@ -201,7 +203,10 @@ class Facebook:
201
  'result': result,
202
  }
203
  '''
 
 
204
  collect_telemetry(debug_info)
 
205
 
206
  #with st.expander("Debug information", icon="⚙"):
207
  # st.write(debug_info)
 
39
  except Exception:
40
  pass
41
  '''
42
+ if 'fb_upload' not in st.session_state:
43
+ st.session_state['fb_upload'] = ''
44
 
45
  def request_model(self, payload_txt):
46
  response = requests.post(self.model_url, json=payload_txt)
 
144
  def process(self):
145
  start_time = time.time()
146
  session = st.session_state.analyze
147
+ if ((self.fb_organic_post and self.fb_organic_post.name) or (self.fb_ads_campaign and self.fb_ads_campaign.name) or (self.facebooks) or (self.facebook_rr)) and session == 'clicked':
148
  try:
149
  combined_text = ""
150
  with st.spinner('Social Media Analyst...', show_time=True):
 
203
  'result': result,
204
  }
205
  '''
206
+
207
+ st.session_state['fb_upload'] = 'uploaded'
208
  collect_telemetry(debug_info)
209
+ combined_text = ""
210
 
211
  #with st.expander("Debug information", icon="⚙"):
212
  # st.write(debug_info)
classes/Social_Media_IG.py CHANGED
@@ -38,6 +38,8 @@ class Instagram:
38
  except Exception:
39
  pass
40
  '''
 
 
41
 
42
  def request_model(self, payload_txt):
43
  response = requests.post(self.model_url, json=payload_txt)
@@ -135,6 +137,8 @@ class Instagram:
135
  }
136
  '''
137
  collect_telemetry(debug_info)
 
 
138
 
139
 
140
  except AttributeError:
 
38
  except Exception:
39
  pass
40
  '''
41
+ if 'ig_upload' not in st.session_state:
42
+ st.session_state['ig_upload'] = ''
43
 
44
  def request_model(self, payload_txt):
45
  response = requests.post(self.model_url, json=payload_txt)
 
137
  }
138
  '''
139
  collect_telemetry(debug_info)
140
+ st.session_state['ig_upload'] = 'uploaded'
141
+
142
 
143
 
144
  except AttributeError:
classes/Tiktok.py CHANGED
@@ -38,6 +38,8 @@ class Tiktok:
38
  except Exception:
39
  pass
40
  '''
 
 
41
 
42
  def request_model(self, payload_txt):
43
  response = requests.post(self.model_url, json=payload_txt)
@@ -101,6 +103,7 @@ class Tiktok:
101
  except Exception:
102
  pass
103
  return file_name
 
104
  def process(self):
105
  session = st.session_state.analyze
106
  if (self.tiktok_f or self.tiktok_er or self.tiktok_pf) and session == 'clicked':
@@ -134,6 +137,7 @@ class Tiktok:
134
  }
135
  '''
136
  collect_telemetry(debug_info)
 
137
 
138
  #with st.expander("Debug information", icon="⚙"):
139
  # st.write(debug_info)
 
38
  except Exception:
39
  pass
40
  '''
41
+ if 'tiktok_upload' not in st.session_state:
42
+ st.session_state['tiktok_upload'] = ''
43
 
44
  def request_model(self, payload_txt):
45
  response = requests.post(self.model_url, json=payload_txt)
 
103
  except Exception:
104
  pass
105
  return file_name
106
+
107
  def process(self):
108
  session = st.session_state.analyze
109
  if (self.tiktok_f or self.tiktok_er or self.tiktok_pf) and session == 'clicked':
 
137
  }
138
  '''
139
  collect_telemetry(debug_info)
140
+ st.session_state['tiktok_upload'] = 'uploaded'
141
 
142
  #with st.expander("Debug information", icon="⚙"):
143
  # st.write(debug_info)
classes/Twitter.py CHANGED
@@ -38,6 +38,8 @@ class Twitter:
38
  except Exception:
39
  pass
40
  '''
 
 
41
 
42
  def request_model(self, payload_txt):
43
  response = requests.post(self.model_url, json=payload_txt)
@@ -149,7 +151,7 @@ class Twitter:
149
  }
150
  '''
151
  collect_telemetry(debug_info)
152
-
153
  st.session_state['analyzing'] = False
154
  except AttributeError:
155
  st.info("Please upload CSV or PDF files first.")
 
38
  except Exception:
39
  pass
40
  '''
41
+ if 'twitter_upload' not in st.session_state:
42
+ st.session_state['twitter_upload'] = ''
43
 
44
  def request_model(self, payload_txt):
45
  response = requests.post(self.model_url, json=payload_txt)
 
151
  }
152
  '''
153
  collect_telemetry(debug_info)
154
+ st.session_state['twitter_upload'] = 'uploaded'
155
  st.session_state['analyzing'] = False
156
  except AttributeError:
157
  st.info("Please upload CSV or PDF files first.")
classes/Youtube.py CHANGED
@@ -38,6 +38,8 @@ class YouTube:
38
  except Exception:
39
  pass
40
  '''
 
 
41
 
42
  def request_model(self, payload_txt):
43
  response = requests.post(self.model_url, json=payload_txt)
@@ -104,7 +106,7 @@ class YouTube:
104
 
105
  def process(self):
106
  session = st.session_state.analyze
107
- if (self.youtube or self.youtube_er or self.youtube_pf) and session == 'clicked':
108
  try:
109
  combined_text = ""
110
  with st.spinner('Youtube...', show_time=True):
@@ -138,6 +140,7 @@ class YouTube:
138
  }
139
  '''
140
  collect_telemetry(debug_info)
 
141
 
142
  #with st.expander("Debug information", icon="⚙"):
143
  # st.write(debug_info)
 
38
  except Exception:
39
  pass
40
  '''
41
+ if 'youtube_upload' not in st.session_state:
42
+ st.session_state['youtube_upload'] = ''
43
 
44
  def request_model(self, payload_txt):
45
  response = requests.post(self.model_url, json=payload_txt)
 
106
 
107
  def process(self):
108
  session = st.session_state.analyze
109
+ if ((self.youtube or self.youtube_er or self.youtube_pf) and session) == 'clicked':
110
  try:
111
  combined_text = ""
112
  with st.spinner('Youtube...', show_time=True):
 
140
  }
141
  '''
142
  collect_telemetry(debug_info)
143
+ st.session_state['youtube_upload'] = 'uploaded'
144
 
145
  #with st.expander("Debug information", icon="⚙"):
146
  # st.write(debug_info)
classes/response_off.py CHANGED
@@ -32,6 +32,7 @@ class SeoOffPageAnalyst:
32
 
33
  # AGENT NAME
34
  #st.header(self.analyst_name)
 
35
  def request_model(self, payload_txt, headers):
36
  response = requests.post(self.model_url, json=payload_txt, headers=headers)
37
  response.raise_for_status()
 
32
 
33
  # AGENT NAME
34
  #st.header(self.analyst_name)
35
+
36
  def request_model(self, payload_txt, headers):
37
  response = requests.post(self.model_url, json=payload_txt, headers=headers)
38
  response.raise_for_status()
classes/response_on_page.py CHANGED
@@ -60,7 +60,13 @@ class SeoOn:
60
  myclient = MongoClient(mongodb_uri)
61
  mydb = myclient.get_database()
62
  mycol = mydb["df_data"]
63
- x = mycol.find_one({"data_field": data_field})
 
 
 
 
 
 
64
  x = x["result"]
65
  return x
66
 
@@ -68,55 +74,19 @@ class SeoOn:
68
  with st.spinner('SEO On Page...', show_time=True):
69
  st.write('')
70
  # OUTPUT FOR SEO ANALYST
71
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
72
- payload = ""
73
- try:
74
- payload += self.fetch_data("First Meaningful Paint")
75
- except Exception as e:
76
- pass
77
- try:
78
- payload += self.fetch_data("Crawl File")
79
- except Exception as e:
80
- pass
81
-
82
- try:
83
- session_first_meaningful_paint = st.session_state['first_meaningful_paint']
84
- session_crawl_file = st.session_state['crawl_file']
85
- if session_first_meaningful_paint or session_crawl_file == 'uploaded':
86
- payload_txt = {"input_value": payload, "output_type": "text", "input_type": "chat"}
87
  payload_txt_model = self.request_model(payload_txt, headers)
88
  debug_info = {'data_field' : 'On Page Analyst', 'result': payload_txt_model}
89
  upload_response(debug_info)
90
 
91
  st.session_state['first_meaningful_paint'] = ''
92
  st.session_state['crawl_file'] = ''
 
93
  except Exception as e:
94
  pass
95
 
96
- #end_time = time.time()
97
- #time_lapsed = end_time - start_time
98
-
99
- #debug_info = {'data_field' : 'GT Metrix', 'result': result}
100
-
101
-
102
- '''
103
- debug_info = {#'analyst': self.analyst_name,
104
- 'url_uuid': self.model_url.split("-")[-1],
105
- 'time_lapsed' : time_lapsed,
106
- 'crawl_file': [file.name for file in self.uploaded_files] if self.uploaded_files else ['Not available'],
107
- 'gt_metrix': [file.name for file in self.gtmetrix] if self.gtmetrix else ['Not available'],
108
- 'payload': payload_txt,
109
- 'result': result}
110
-
111
- if self.gtmetrix:
112
- collect_telemetry(debug_info)
113
- '''
114
-
115
-
116
- #with st.expander("Debug information", icon="⚙"):
117
- # st.write(debug_info)
118
-
119
-
120
  st.session_state['analyzing'] = False
121
  try:
122
  self.file_dict.popitem()
@@ -124,13 +94,25 @@ class SeoOn:
124
  pass
125
 
126
  def row1(self):
127
-
128
- #st.write("") # FOR THE HIDE BUTTON
129
- #st.write("") # FOR THE HIDE BUTTON
130
- #st.write("AI Analyst Output: ")
131
  st.session_state['analyzing'] = False
132
- #st.write("") # FOR THE HIDE BUTTON
133
- self.process()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
134
 
135
 
136
  if __name__ == "__main__":
 
60
  myclient = MongoClient(mongodb_uri)
61
  mydb = myclient.get_database()
62
  mycol = mydb["df_data"]
63
+
64
+ # Sort by timestamp field in descending order
65
+ x = mycol.find_one(
66
+ {"data_field": data_field},
67
+ sort=[("timestamp", -1)]
68
+ )
69
+
70
  x = x["result"]
71
  return x
72
 
 
74
  with st.spinner('SEO On Page...', show_time=True):
75
  st.write('')
76
  # OUTPUT FOR SEO ANALYST
77
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
78
+ try:
79
+ payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
 
 
 
 
 
 
 
 
 
 
 
 
 
80
  payload_txt_model = self.request_model(payload_txt, headers)
81
  debug_info = {'data_field' : 'On Page Analyst', 'result': payload_txt_model}
82
  upload_response(debug_info)
83
 
84
  st.session_state['first_meaningful_paint'] = ''
85
  st.session_state['crawl_file'] = ''
86
+ count = 0
87
  except Exception as e:
88
  pass
89
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
90
  st.session_state['analyzing'] = False
91
  try:
92
  self.file_dict.popitem()
 
94
  pass
95
 
96
  def row1(self):
 
 
 
 
97
  st.session_state['analyzing'] = False
98
+ self.payload = ""
99
+ count = 0
100
+ try:
101
+ session_first_meaningful_paint = st.session_state['first_meaningful_paint']
102
+ if session_first_meaningful_paint == 'uploaded':
103
+ count += 1
104
+ self.payload += self.fetch_data("First Meaningful Paint")
105
+ except Exception as e:
106
+ pass
107
+ try:
108
+ session_crawl_file = st.session_state['crawl_file']
109
+ if session_crawl_file == 'uploaded':
110
+ count += 1
111
+ self.payload += self.fetch_data("Crawl File")
112
+ except Exception as e:
113
+ pass
114
+ if count >= 1:
115
+ self.process()
116
 
117
 
118
  if __name__ == "__main__":
classes/response_seo.py CHANGED
@@ -48,6 +48,7 @@ class Seo:
48
  response = requests.post(self.model_url, json=payload_txt, headers=headers)
49
  response.raise_for_status()
50
  output = response.json()
 
51
  text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
52
  text = json.loads(text)
53
  #st.write(text)
@@ -67,7 +68,13 @@ class Seo:
67
  myclient = MongoClient(mongodb_uri)
68
  mydb = myclient.get_database()
69
  mycol = mydb["df_data"]
70
- x = mycol.find_one({"data_field": data_field})
 
 
 
 
 
 
71
  x = x["result"]
72
  return x
73
 
@@ -143,41 +150,9 @@ class Seo:
143
  def process (self):
144
  with st.spinner('Seo Analyst...', show_time=True):
145
  st.write('')
146
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
147
- payload = ""
148
- try:
149
- payload += self.fetch_data("SEO Keywords")
150
- except Exception as e:
151
- pass
152
- try:
153
- payload += self.fetch_data("Traffic Channels")
154
- except Exception as e:
155
- pass
156
- try:
157
- payload += self.fetch_data("Traffic Acquisition")
158
- except Exception as e:
159
- pass
160
- try:
161
- payload += self.fetch_data("Pages Indexed")
162
- except Exception as e:
163
- pass
164
- try:
165
- payload += self.fetch_data("Bounce Rate")
166
- except Exception as e:
167
- pass
168
- try:
169
- payload += self.fetch_backlinks("Backlinks")
170
- except Exception as e:
171
- pass
172
-
173
  try:
174
- session_bounce_rate = st.session_state['bounce_rate']
175
- session_page_index = st.session_state['pages_index']
176
- session_others = st.session_state['others']
177
- session_traffic_channels = st.session_state['df_traffic']
178
- session_traffic_aqcuisition = st.session_state['df_seo']
179
- if session_bounce_rate or session_page_index or session_others or session_traffic_aqcuisition or session_traffic_channels == 'uploaded':
180
- payload_txt = {"input_value": payload, "output_type": "text", "input_type": "chat"}
181
  payload_txt_model = self.request_model(payload_txt, headers)
182
  debug_info = {'data_field' : 'SEO Analyst', 'result': payload_txt_model}
183
  upload_response(debug_info)
@@ -187,6 +162,7 @@ class Seo:
187
  st.session_state['others'] = ''
188
  st.session_state['df_traffic'] = ''
189
  st.session_state['df_seo'] = ''
 
190
  except Exception as e:
191
  pass
192
  st.session_state['analyzing'] = False
@@ -195,8 +171,57 @@ class Seo:
195
  st.session_state['analyzing'] = False
196
  #st.write("") # FOR THE HIDE BUTTON
197
  #analyze_button = st.button("Analyze", disabled=initialize_analyze_session())
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
198
 
199
- self.process()
200
 
201
  if __name__ == "__main__":
202
  st.set_page_config(layout="wide")
 
48
  response = requests.post(self.model_url, json=payload_txt, headers=headers)
49
  response.raise_for_status()
50
  output = response.json()
51
+ #st.write(output)
52
  text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
53
  text = json.loads(text)
54
  #st.write(text)
 
68
  myclient = MongoClient(mongodb_uri)
69
  mydb = myclient.get_database()
70
  mycol = mydb["df_data"]
71
+
72
+ # Sort by timestamp field in descending order
73
+ x = mycol.find_one(
74
+ {"data_field": data_field},
75
+ sort=[("timestamp", -1)]
76
+ )
77
+
78
  x = x["result"]
79
  return x
80
 
 
150
  def process (self):
151
  with st.spinner('Seo Analyst...', show_time=True):
152
  st.write('')
153
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
154
  try:
155
+ payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
 
 
 
 
 
 
156
  payload_txt_model = self.request_model(payload_txt, headers)
157
  debug_info = {'data_field' : 'SEO Analyst', 'result': payload_txt_model}
158
  upload_response(debug_info)
 
162
  st.session_state['others'] = ''
163
  st.session_state['df_traffic'] = ''
164
  st.session_state['df_seo'] = ''
165
+ count = 0
166
  except Exception as e:
167
  pass
168
  st.session_state['analyzing'] = False
 
171
  st.session_state['analyzing'] = False
172
  #st.write("") # FOR THE HIDE BUTTON
173
  #analyze_button = st.button("Analyze", disabled=initialize_analyze_session())
174
+ self.payload = ""
175
+ count = 0
176
+ try:
177
+ session_traffic_aqcuisition = st.session_state['df_seo']
178
+ if session_traffic_aqcuisition == 'uploaded':
179
+ count += 1
180
+ self.payload += self.fetch_data("SEO Keywords")
181
+
182
+ except Exception as e:
183
+ pass
184
+ try:
185
+ session_traffic_channels = st.session_state['df_traffic']
186
+ if session_traffic_channels == 'uploaded':
187
+ count += 1
188
+ self.payload += self.fetch_data("Traffic Channels")
189
+ except Exception as e:
190
+ pass
191
+ try:
192
+ session_others = st.session_state['others']
193
+ if session_others == 'uploaded':
194
+ count += 1
195
+ self.payload += self.fetch_data("Traffic Acquisition")
196
+
197
+ except Exception as e:
198
+ pass
199
+ try:
200
+ session_page_index = st.session_state['pages_index']
201
+ if session_page_index == 'uploaded':
202
+ count += 1
203
+ self.payload += self.fetch_data("Pages Indexed")
204
+ except Exception as e:
205
+ pass
206
+ try:
207
+ session_bounce_rate = st.session_state['bounce_rate']
208
+ if session_bounce_rate == 'uploaded':
209
+ print("run")
210
+ count += 1
211
+ self.payload += self.fetch_data("Bounce Rate")
212
+ except Exception as e:
213
+ pass
214
+ try:
215
+ session_backlinks = st.session_state["off_page_file_uploaded"]
216
+ if session_backlinks == 'uploaded':
217
+ count += 1
218
+ self.payload += self.fetch_backlinks("Backlinks")
219
+ except Exception as e:
220
+ pass
221
+
222
+ if count >= 1:
223
+ self.process()
224
 
 
225
 
226
  if __name__ == "__main__":
227
  st.set_page_config(layout="wide")
classes/response_social_media.py ADDED
@@ -0,0 +1,154 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import requests
3
+ from dotenv import load_dotenv
4
+ import os
5
+ import pandas as pd
6
+ import time
7
+ import chardet
8
+ from helper.telemetry import collect_telemetry
9
+ from helper.upload_File import uploadFile
10
+ from helper.button_behaviour import hide_button
11
+ from helper.initialize_analyze_session import initialize_analyze_session
12
+ from pymongo import MongoClient
13
+ import json
14
+ from helper.data_field import data_field
15
+ from helper.upload_response import upload_response
16
+
17
+ class SocialMedia:
18
+ def __init__(self, model_url):
19
+ self.file_dict = {}
20
+ self.model_url = model_url
21
+ #self.analyst_name = analyst_name
22
+ #self.data_src = data_src
23
+ #self.analyst_description = analyst_description
24
+ self.initialize()
25
+ self.row1()
26
+
27
+ def initialize(self):
28
+ # FOR ENV
29
+ load_dotenv()
30
+
31
+ # AGENT NAME
32
+ #st.header(self.analyst_name)
33
+
34
+ # EVALUATION FORM LINK
35
+ '''
36
+ url = os.getenv('Link')
37
+ st.write('Evaluation Form: [Link](%s)' % url)
38
+
39
+ # RETURN BUTTON
40
+ try:
41
+ if st.button("Return", type='primary'):
42
+ st.switch_page("./pages/home.py")
43
+ except Exception:
44
+ pass
45
+ '''
46
+ if 'fb_upload' not in st.session_state:
47
+ st.session_state['fb_upload'] = ''
48
+
49
+ def request_model(self, payload_txt, headers):
50
+ response = requests.post(self.model_url, json=payload_txt, headers=headers)
51
+ response.raise_for_status()
52
+ output = response.json()
53
+ #st.write(output)
54
+ text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
55
+ text = json.loads(text)
56
+ #st.write(text)
57
+ return text
58
+
59
+ def terminate_session(self, session):
60
+ try:
61
+ del st.session_state[session]
62
+ except KeyError:
63
+ pass
64
+
65
+ def fetch_data(self, data_field):
66
+ mongodb_uri = os.getenv("MONGODB_URI")
67
+ myclient = MongoClient(mongodb_uri)
68
+ mydb = myclient.get_database()
69
+ mycol = mydb["df_data"]
70
+
71
+ # Sort by timestamp field in descending order
72
+ x = mycol.find_one(
73
+ {"data_field": data_field},
74
+ sort=[("timestamp", -1)]
75
+ )
76
+
77
+ x = x["result"]
78
+ return x
79
+
80
+ def process(self):
81
+ with st.spinner('Social Media Analyst...', show_time=True):
82
+ st.write('')
83
+
84
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
85
+ try:
86
+ payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
87
+ payload_txt_model = self.request_model(payload_txt, headers)
88
+ debug_info = {'data_field' : 'Social Media Analyst', 'result': payload_txt_model}
89
+ upload_response(debug_info)
90
+
91
+ st.session_state['fb_upload'] = ''
92
+ st.session_state['ig_upload'] = ''
93
+ st.session_state['twitter_upload'] = ''
94
+ st.session_state['youtube_upload'] = ''
95
+ st.session_state['linkedin_upload'] = ''
96
+ st.session_state['tiktok_upload'] = ''
97
+ count = 0
98
+
99
+ except Exception as e:
100
+ pass
101
+
102
+ def row1(self):
103
+ self.payload = ""
104
+ count = 0
105
+ try:
106
+ session_fb = st.session_state['fb_upload']
107
+ if session_fb == 'uploaded':
108
+ count += 1
109
+ self.payload += self.fetch_data("Facebook")
110
+ except Exception as e:
111
+ pass
112
+ try:
113
+ session_ig = st.session_state['ig_upload']
114
+ if session_ig == 'uploaded':
115
+ count += 1
116
+ self.payload += self.fetch_data("Instagram")
117
+ except Exception as e:
118
+ pass
119
+ try:
120
+ session_twitter = st.session_state['twitter_upload']
121
+ if session_twitter == 'uploaded':
122
+ count += 1
123
+ self.payload += self.fetch_data("Twitter")
124
+ except Exception as e:
125
+ pass
126
+ try:
127
+ session_yt = st.session_state['youtube_upload']
128
+ if session_yt == 'uploaded':
129
+ count += 1
130
+ self.payload += self.fetch_data("YouTube")
131
+ except Exception as e:
132
+ pass
133
+ try:
134
+ session_linkedin = st.session_state['linkedin_upload']
135
+ if session_linkedin == 'uploaded':
136
+ count += 1
137
+ self.payload += self.fetch_data("Linkedin")
138
+ except Exception as e:
139
+ pass
140
+ try:
141
+ session_tiktok = st.session_state['tiktok_upload']
142
+ if session_tiktok == 'uploaded':
143
+ count += 1
144
+ self.payload += self.fetch_data("Tiktok")
145
+ except Exception as e:
146
+ pass
147
+ if count >= 1:
148
+ self.process()
149
+
150
+
151
+ if __name__ == "__main__":
152
+ st.set_page_config(layout="wide")
153
+
154
+ upload = uploadFile()
classes/response_website_and_tools.py CHANGED
@@ -59,118 +59,23 @@ class WebsiteAndTools:
59
  myclient = MongoClient(mongodb_uri)
60
  mydb = myclient.get_database()
61
  mycol = mydb["df_data"]
62
- x = mycol.find_one({"data_field": data_field})
 
 
 
 
 
 
63
  x = x["result"]
64
  return x
65
 
66
  def process(self):
67
  with st.spinner('Website and Tools...', show_time=True):
68
- st.write('')
69
- '''
70
- # OUTPUT FOR WEBSITE RESPONSIVENESS
71
- payload_txt_website_responsiveness = {"result": website_responsiveness}
72
- result_website_responsiveness = self.request_model(payload_txt_website_responsiveness)
73
-
74
- # OUTPUT FOR CONTENT MANAGEMENT SYSTEM
75
- payload_txt_content_management_system = {"question": content_management_system}
76
- #result_content_management_system = self.request_model(content_management_system)
77
-
78
- # OUTPUT FOR SSL CERTIFICATE
79
- payload_txt_SSL_certificate = {"question": SSL_certificate}
80
- #result_SSL_certificate = self.request_model(SSL_certificate)
81
-
82
- # OUTPUT FOR WEB ANALYTICS
83
- payload_txt_web_analytics = {"question": web_analytics}
84
- #result_web_analytics = self.request_model(web_analytics)
85
-
86
- # OUTPUT FOR CLIENT RELATIONS MANAGEMENT SYSTEM
87
- payload_txt_client_relations_management_system = {"question": client_relations_management_system}
88
- #result_client_relations_management_system = self.request_model(client_relations_management_system)
89
-
90
- # OUTPUT FOR LEAD GENERATION MECHANISM
91
- payload_txt_lead_generation_mechanism = {"question": lead_generation_mechanism}
92
- #result_lead_generation_mechanism = self.request_model(lead_generation_mechanism)
93
- '''
94
- # OUTPUT FOR SEO ANALYST
95
-
96
- #print(x)
97
  headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
 
98
  try:
99
- payload = ""
100
- session_website_responsiveness = st.session_state['website_responsiveness']
101
- if session_website_responsiveness == 'uploaded':
102
- payload += self.fetch_data("Website Responsiveness")
103
- except Exception as e:
104
- pass
105
- try:
106
- session_content_management_system = st.session_state['content_management_system']
107
- if session_content_management_system == 'uploaded':
108
- payload += self.fetch_data("Content Management System")
109
- except Exception as e:
110
- pass
111
- try:
112
- session_SSL_certificate = st.session_state['SSL_certificate']
113
- if session_SSL_certificate == 'uploaded':
114
- payload += self.fetch_data("SSL Certificate")
115
-
116
- except Exception as e:
117
- pass
118
- try:
119
- session_mobile_responsiveness = st.session_state['mobile_responsiveness']
120
- if session_mobile_responsiveness == 'uploaded':
121
- payload += self.fetch_data("Mobile Responsiveness")
122
- except Exception as e:
123
- pass
124
- try:
125
- session_desktop_loading_speed = st.session_state['desktop_loading_speed']
126
- if session_desktop_loading_speed == 'uploaded':
127
- payload += self.fetch_data("Desktop Loading Speed")
128
- except Exception as e:
129
- pass
130
- try:
131
- session_mobile_loading_speed = st.session_state['mobile_loading_speed']
132
- if session_mobile_loading_speed == 'uploaded':
133
- payload += self.fetch_data("Mobile Loading Speed")
134
- except Exception as e:
135
- pass
136
- try:
137
- session_first_meaningful_paint = st.session_state['first_meaningful_paint']
138
- if session_first_meaningful_paint == 'uploaded':
139
- payload += self.fetch_data("First Meaningful Paint")
140
- except Exception as e:
141
- pass
142
- try:
143
- session_web_analytics = st.session_state['web_analytics']
144
- if session_web_analytics == 'uploaded':
145
- payload += self.fetch_data("Web Analytics")
146
- except Exception as e:
147
- pass
148
- try:
149
- session_client_relations_management_system = st.session_state['client_relations_management_system']
150
- if session_client_relations_management_system == 'uploaded':
151
- payload += self.fetch_data("Client Relations Management System")
152
- except Exception as e:
153
- pass
154
- try:
155
- session_lead_generation_mechanism = st.session_state['lead_generation_mechanism']
156
- if session_lead_generation_mechanism == 'uploaded':
157
- payload += self.fetch_data("Lead Generation Mechanism")
158
- except Exception as e:
159
- pass
160
-
161
- try:
162
- session_website_responsiveness = st.session_state['website_responsiveness']
163
- session_client_relations_management_system = st.session_state['client_relations_management_system']
164
- session_lead_generation_mechanism = st.session_state['lead_generation_mechanism']
165
- session_web_analytics = st.session_state['web_analytics']
166
- session_mobile_responsiveness = st.session_state['mobile_responsiveness']
167
- session_desktop_loading_speed = st.session_state['desktop_loading_speed']
168
- session_mobile_loading_speed = st.session_state['mobile_loading_speed']
169
- session_SSL_certificate = st.session_state['SSL_certificate']
170
- session_content_management_system = st.session_state['content_management_system']
171
-
172
- if session_website_responsiveness or session_client_relations_management_system or session_lead_generation_mechanism or session_web_analytics or session_mobile_responsiveness or session_desktop_loading_speed or session_mobile_loading_speed or session_content_management_system or session_SSL_certificate == 'uploaded':
173
- payload_txt = {"input_value": payload, "output_type": "text", "input_type": "chat"}
174
  payload_txt_model = self.request_model(payload_txt, headers)
175
  debug_info = {'data_field' : 'Website and Tools Analyst', 'result': payload_txt_model}
176
  upload_response(debug_info)
@@ -184,6 +89,7 @@ class WebsiteAndTools:
184
  st.session_state['web_analytics'] = ''
185
  st.session_state['client_relations_management_system'] = ''
186
  st.session_state['lead_generation_mechanism'] = ''
 
187
  except Exception as e:
188
  pass
189
  #end_time = time.time()
@@ -217,15 +123,84 @@ class WebsiteAndTools:
217
  pass
218
 
219
  def row1(self):
220
-
221
- #st.write("") # FOR THE HIDE BUTTON
222
- #st.write("") # FOR THE HIDE BUTTON
223
- #st.write("AI Analyst Output: ")
224
  st.session_state['analyzing'] = False
225
- #st.write("") # FOR THE HIDE BUTTON
226
- self.process()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
227
 
228
-
229
  if __name__ == "__main__":
230
  st.set_page_config(layout="wide")
231
 
 
59
  myclient = MongoClient(mongodb_uri)
60
  mydb = myclient.get_database()
61
  mycol = mydb["df_data"]
62
+
63
+ # Sort by timestamp field in descending order
64
+ x = mycol.find_one(
65
+ {"data_field": data_field},
66
+ sort=[("timestamp", -1)]
67
+ )
68
+
69
  x = x["result"]
70
  return x
71
 
72
  def process(self):
73
  with st.spinner('Website and Tools...', show_time=True):
74
+ st.write('')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
75
  headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
76
+
77
  try:
78
+ payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
79
  payload_txt_model = self.request_model(payload_txt, headers)
80
  debug_info = {'data_field' : 'Website and Tools Analyst', 'result': payload_txt_model}
81
  upload_response(debug_info)
 
89
  st.session_state['web_analytics'] = ''
90
  st.session_state['client_relations_management_system'] = ''
91
  st.session_state['lead_generation_mechanism'] = ''
92
+ count = 0
93
  except Exception as e:
94
  pass
95
  #end_time = time.time()
 
123
  pass
124
 
125
  def row1(self):
 
 
 
 
126
  st.session_state['analyzing'] = False
127
+ self.payload = ""
128
+ count = 0
129
+ try:
130
+ payload = ""
131
+ session_website_responsiveness = st.session_state['website_responsiveness']
132
+ if session_website_responsiveness == 'uploaded':
133
+ count += 1
134
+ self.payload += self.fetch_data("Website Responsiveness")
135
+ except Exception as e:
136
+ pass
137
+ try:
138
+ session_content_management_system = st.session_state['content_management_system']
139
+ if session_content_management_system == 'uploaded':
140
+ count += 1
141
+ self.payload += self.fetch_data("Content Management System")
142
+ except Exception as e:
143
+ pass
144
+ try:
145
+ session_SSL_certificate = st.session_state['SSL_certificate']
146
+ if session_SSL_certificate == 'uploaded':
147
+ count += 1
148
+ self.payload += self.fetch_data("SSL Certificate")
149
+
150
+ except Exception as e:
151
+ pass
152
+ try:
153
+ session_mobile_responsiveness = st.session_state['mobile_responsiveness']
154
+ if session_mobile_responsiveness == 'uploaded':
155
+ count += 1
156
+ self.payload += self.fetch_data("Mobile Responsiveness")
157
+ except Exception as e:
158
+ pass
159
+ try:
160
+ session_desktop_loading_speed = st.session_state['desktop_loading_speed']
161
+ if session_desktop_loading_speed == 'uploaded':
162
+ count += 1
163
+ self.payload += self.fetch_data("Desktop Loading Speed")
164
+ except Exception as e:
165
+ pass
166
+ try:
167
+ session_mobile_loading_speed = st.session_state['mobile_loading_speed']
168
+ if session_mobile_loading_speed == 'uploaded':
169
+ count += 1
170
+ self.payload += self.fetch_data("Mobile Loading Speed")
171
+ except Exception as e:
172
+ pass
173
+ try:
174
+ session_first_meaningful_paint = st.session_state['first_meaningful_paint']
175
+ if session_first_meaningful_paint == 'uploaded':
176
+ count += 1
177
+ self.payload += self.fetch_data("First Meaningful Paint")
178
+ except Exception as e:
179
+ pass
180
+ try:
181
+ session_web_analytics = st.session_state['web_analytics']
182
+ if session_web_analytics == 'uploaded':
183
+ count += 1
184
+ self.payload += self.fetch_data("Web Analytics")
185
+ except Exception as e:
186
+ pass
187
+ try:
188
+ session_client_relations_management_system = st.session_state['client_relations_management_system']
189
+ if session_client_relations_management_system == 'uploaded':
190
+ count += 1
191
+ self.payload += self.fetch_data("Client Relations Management System")
192
+ except Exception as e:
193
+ pass
194
+ try:
195
+ session_lead_generation_mechanism = st.session_state['lead_generation_mechanism']
196
+ if session_lead_generation_mechanism == 'uploaded':
197
+ count += 1
198
+ self.payload += self.fetch_data("Lead Generation Mechanism")
199
+ except Exception as e:
200
+ pass
201
+ if count >= 1:
202
+ self.process()
203
 
 
204
  if __name__ == "__main__":
205
  st.set_page_config(layout="wide")
206
 
classes/website_and_tools.py CHANGED
@@ -277,7 +277,7 @@ class WebsiteAndTools:
277
  self.mobile_responsiveness = st.text_input("Mobile Responsiveness - GTMetrix:", placeholder='Enter Mobile Responsiveness')
278
  self.desktop_loading_speed = st.text_input("Desktop Loading Speed - GTMetrix:", placeholder='Enter Desktop Loading Speed')
279
  self.mobile_loading_speed = st.text_input("Mobile Loading Speed - GTMetrix:", placeholder='Enter Mobile Loading Speed')
280
- self.web_analytics = st.text_input("Web Analytics - BuiltWith:", placeholder='Enter Web Analytics')
281
  self.client_relations_management_system = st.text_input("Client Relations Management System - BuiltWith:", placeholder='Enter Client Relations Management System')
282
  self.lead_generation_mechanism = st.text_input("Lead Generation Mechanism - Business Context (Lead Generation & Lead Nurturing):", placeholder='Enter Lead Generation Mechanism')
283
 
 
277
  self.mobile_responsiveness = st.text_input("Mobile Responsiveness - GTMetrix:", placeholder='Enter Mobile Responsiveness')
278
  self.desktop_loading_speed = st.text_input("Desktop Loading Speed - GTMetrix:", placeholder='Enter Desktop Loading Speed')
279
  self.mobile_loading_speed = st.text_input("Mobile Loading Speed - GTMetrix:", placeholder='Enter Mobile Loading Speed')
280
+ self.web_analytics = st.text_input("Web Analytics - BuiltWith (GA4):", placeholder='Enter Web Analytics')
281
  self.client_relations_management_system = st.text_input("Client Relations Management System - BuiltWith:", placeholder='Enter Client Relations Management System')
282
  self.lead_generation_mechanism = st.text_input("Lead Generation Mechanism - Business Context (Lead Generation & Lead Nurturing):", placeholder='Enter Lead Generation Mechanism')
283
 
helper/telemetry.py CHANGED
@@ -19,7 +19,7 @@ def collect_telemetry(data):
19
  return
20
 
21
  try:
22
- '''
23
  # Get the current UTC time
24
  utc_now = datetime.now(timezone.utc)
25
 
@@ -29,7 +29,7 @@ def collect_telemetry(data):
29
  # Convert the UTC time to GMT+8
30
  timestamp = utc_now.astimezone(gmt_plus_8).isoformat()
31
  data['timestamp'] = timestamp
32
- '''
33
  client = MongoClient(mongodb_uri)
34
  db = client.get_database() # Use the default database specified in the URI
35
  collection = db["df_data"] # Replace "telemetry" with your desired collection name
 
19
  return
20
 
21
  try:
22
+
23
  # Get the current UTC time
24
  utc_now = datetime.now(timezone.utc)
25
 
 
29
  # Convert the UTC time to GMT+8
30
  timestamp = utc_now.astimezone(gmt_plus_8).isoformat()
31
  data['timestamp'] = timestamp
32
+
33
  client = MongoClient(mongodb_uri)
34
  db = client.get_database() # Use the default database specified in the URI
35
  collection = db["df_data"] # Replace "telemetry" with your desired collection name
pages/analyzing_page.py CHANGED
@@ -6,18 +6,15 @@ from classes.response_off import SeoOffPageAnalyst
6
  from classes.response_on_page import SeoOn
7
  from classes.response_website_and_tools import WebsiteAndTools
8
  from classes.response_seo import Seo
 
9
 
10
  def run_analysis():
11
- # Retrieve uploaded files from session state
12
- off_page_file = st.session_state.get('off_page_file_uploaded')
13
- gt_file = st.session_state.get('GT_file_uploaded')
14
- website_and_tools = st.session_state.get('website_and_tools')
15
- seo = st.session_state.get('seo')
16
  # Placeholders for status updates
17
  off_page_status = st.empty()
18
  on_page_status = st.empty()
19
  website_and_tools_status = st.empty()
20
  seo_status = st.empty()
 
21
  # Function to run SEO Off Page Analysis
22
  def run_off_page_analysis():
23
  try:
@@ -59,29 +56,52 @@ def run_analysis():
59
  seo_status.error(f"SEO Analysis failed: {e}")
60
  return None
61
 
 
 
 
 
 
 
 
 
 
 
62
  # Create threads for concurrent execution
63
  off_page_thread = threading.Thread(target=run_off_page_analysis)
64
  on_page_thread = threading.Thread(target=run_on_page_analysis)
65
  website_and_tools_thread = threading.Thread(target=run_website_and_tools_analysis)
66
  seo_thread = threading.Thread(target=run_seo_analysis)
 
67
 
68
  # Attach Streamlit context to threads
69
  add_script_run_ctx(off_page_thread)
70
  add_script_run_ctx(on_page_thread)
71
  add_script_run_ctx(website_and_tools_thread)
72
  add_script_run_ctx(seo_thread)
 
73
 
74
  # Start threads
75
  off_page_thread.start()
76
  on_page_thread.start()
77
  website_and_tools_thread.start()
78
  seo_thread.start()
 
79
 
80
  # Wait for threads to complete
81
  off_page_thread.join()
82
  on_page_thread.join()
83
  website_and_tools_thread.join()
84
  seo_thread.join()
 
 
 
 
 
 
 
 
85
 
86
  # Execute the analysis
 
 
87
  run_analysis()
 
6
  from classes.response_on_page import SeoOn
7
  from classes.response_website_and_tools import WebsiteAndTools
8
  from classes.response_seo import Seo
9
+ from classes.response_social_media import SocialMedia
10
 
11
  def run_analysis():
 
 
 
 
 
12
  # Placeholders for status updates
13
  off_page_status = st.empty()
14
  on_page_status = st.empty()
15
  website_and_tools_status = st.empty()
16
  seo_status = st.empty()
17
+ social_media_status = st.empty()
18
  # Function to run SEO Off Page Analysis
19
  def run_off_page_analysis():
20
  try:
 
56
  seo_status.error(f"SEO Analysis failed: {e}")
57
  return None
58
 
59
+ def run_social_media_analysis():
60
+ try:
61
+ social_media_status.info("Starting Social Media Analysis...")
62
+ result = SocialMedia(os.getenv('MODEL_Social_Media_Analyst'))
63
+ social_media_status.success("Social Media Analysis completed successfully.")
64
+ return result
65
+ except Exception as e:
66
+ social_media_status.error(f"Social Media Analysis failed: {e}")
67
+ return None
68
+
69
  # Create threads for concurrent execution
70
  off_page_thread = threading.Thread(target=run_off_page_analysis)
71
  on_page_thread = threading.Thread(target=run_on_page_analysis)
72
  website_and_tools_thread = threading.Thread(target=run_website_and_tools_analysis)
73
  seo_thread = threading.Thread(target=run_seo_analysis)
74
+ social_media_thread = threading.Thread(target=run_social_media_analysis)
75
 
76
  # Attach Streamlit context to threads
77
  add_script_run_ctx(off_page_thread)
78
  add_script_run_ctx(on_page_thread)
79
  add_script_run_ctx(website_and_tools_thread)
80
  add_script_run_ctx(seo_thread)
81
+ add_script_run_ctx(social_media_thread)
82
 
83
  # Start threads
84
  off_page_thread.start()
85
  on_page_thread.start()
86
  website_and_tools_thread.start()
87
  seo_thread.start()
88
+ social_media_thread.start()
89
 
90
  # Wait for threads to complete
91
  off_page_thread.join()
92
  on_page_thread.join()
93
  website_and_tools_thread.join()
94
  seo_thread.join()
95
+ st.success("🎉 All analyses completed!") # Final success message
96
+ # --- Display Button After Completion ---
97
+ if st.button("View Results"):
98
+ st.switch_page("pages/output.py")
99
+
100
+
101
+
102
+
103
 
104
  # Execute the analysis
105
+ if st.button("Back"):
106
+ st.switch_page("pages/home.py")
107
  run_analysis()
pages/output.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+
4
+ import time
5
+
6
+ def display_outputs():
7
+ client_name = "RMX Creatives"
8
+ overview = f"""**{client_name}** is a financial services company based in Auckland, New Zealand, specializing in providing quick and flexible loan solutions for businesses and individuals. Represented by Paul Stone, LoansOne has enlisted ShoreMarketing to perform a deep dive into their digital footprint to have a view of the holistic status of their digital properties and determine how each property can play part in implementing a stronger digital marketing plan.\n
9
+ The Digital Marketing Footprint consists of deep-dive research by ShoreMarketing specialists to help the business leaders of LoansOne understand the effectiveness of their existing digital initiatives with the view of giving them an insight to developing a strategy and effectively allocating business resources to digital properties that will give them the best results.\n
10
+ This document represents the results of our audit of LoansOne’s digital marketing and management practices. Our audit covered reviews of key digital areas: Website and Tools, PPC/SEM, SEO, Social Media, and Market Places."""
11
+
12
+ # (off_page_thread)
13
+ # (on_page_thread)
14
+ # (website_and_tools_thread)
15
+ # (seo_thread)
16
+ # (social_media_thread)
17
+ st.markdown("## Digital Marketing Audit")
18
+ st.markdown(f"### {client_name}")
19
+
20
+ st.markdown("## DIGITAL FOOTPRINT OVERVIEW")
21
+ st.markdown(f"{overview}")
22
+
23
+ st.markdown("## WEBSITE AND TOOLS")
24
+ st.markdown(f"""In today’s digital age, scaling a business is simply impossible without a website. Websites primarily serve as the center for all online conversions, which makes it equally important to guarantee that all pages are optimised to educate all traffic about the brand and ultimately to usher them into conversion. \n
25
+ In line with this, we have looked into the technology used by **{client_name}** as well as the different digital channels currently in place to see how they are structured and how they are performing.""")
26
+
27
+
28
+ if st.button("Back to Dashboard"):
29
+ st.switch_page("pages/home.py")
30
+ display_outputs()