Files changed (44) hide show
  1. classes/Linkedin.py +1 -1
  2. classes/Off_Page.py +30 -72
  3. classes/On_Page.py +1 -3
  4. classes/On_Page_Crawl.py +1 -1
  5. classes/Seo.py +52 -16
  6. classes/Seo_Backlinks.py +1 -1
  7. classes/Social_Media_FB.py +1 -1
  8. classes/Social_Media_IG.py +1 -1
  9. classes/Tiktok.py +1 -1
  10. classes/Twitter.py +1 -1
  11. classes/Youtube.py +1 -1
  12. classes/amazon.py +15 -59
  13. classes/client_summary.py +7 -7
  14. classes/content.py +9 -98
  15. classes/ebay.py +17 -60
  16. classes/lld_pm_ln.py +1 -1
  17. classes/pull_through_offers.py +1 -1
  18. classes/response_connection_analyst.py +0 -87
  19. classes/response_content.py +1 -1
  20. classes/response_content_process_and_assets_analyst.py +0 -100
  21. classes/response_conversion_analyst.py +0 -118
  22. classes/response_desired_outcome.py +0 -88
  23. classes/response_df_overview.py +0 -95
  24. classes/response_executive_summary.py +3 -1
  25. classes/response_lld_pm_ln.py +1 -1
  26. classes/response_marketplace.py +6 -29
  27. classes/response_off.py +8 -6
  28. classes/response_on_page.py +1 -1
  29. classes/response_pull_through_offers.py +1 -1
  30. classes/response_sem_ppc.py +1 -1
  31. classes/response_seo.py +120 -17
  32. classes/response_snapshot.py +0 -101
  33. classes/response_social_media.py +49 -23
  34. classes/response_target_market.py +1 -1
  35. classes/response_website_and_tools.py +1 -1
  36. classes/response_website_audience_acquisition.py +0 -91
  37. classes/sem_ppc.py +1 -1
  38. classes/website_and_tools.py +1 -1
  39. helper/data_field.py +4 -36
  40. helper/telemetry.py +0 -20
  41. helper/upload_File.py +0 -42
  42. pages/analyzing_page.py +117 -279
  43. pages/home.py +36 -43
  44. pages/output.py +66 -162
classes/Linkedin.py CHANGED
@@ -138,7 +138,7 @@ class Linkedin:
138
  if (self.linkedin_f or (self.linkedin_metrics and self.linkedin_metrics.name) or (self.linkedin_post and self.linkedin_post.name)) and session == "clicked":
139
  try:
140
  combined_text = ""
141
- with st.spinner('Uploading Linkedin Files...', show_time=True):
142
  st.write('')
143
  # INITIALIZING SESSIONS
144
  #combined_text += f"Client Summary: {st.session_state.nature}\n"
 
138
  if (self.linkedin_f or (self.linkedin_metrics and self.linkedin_metrics.name) or (self.linkedin_post and self.linkedin_post.name)) and session == "clicked":
139
  try:
140
  combined_text = ""
141
+ with st.spinner('Linkedin...', show_time=True):
142
  st.write('')
143
  # INITIALIZING SESSIONS
144
  #combined_text += f"Client Summary: {st.session_state.nature}\n"
classes/Off_Page.py CHANGED
@@ -3,12 +3,14 @@ from urllib.parse import urlparse
3
  import streamlit as st
4
  import requests
5
  from dotenv import load_dotenv
 
6
  import time
7
  from helper.telemetry import collect_telemetry
8
  from helper.upload_File import uploadFile
9
  from helper.button_behaviour import hide_button, unhide_button
10
  from helper.initialize_analyze_session import initialize_analyze_session
11
  import pandas as pd
 
12
  import json
13
 
14
  class SeoOffPageAnalyst:
@@ -30,10 +32,6 @@ class SeoOffPageAnalyst:
30
  #st.header(self.analyst_name)
31
  if 'off_page_file_uploaded' not in st.session_state:
32
  st.session_state['off_page_file_uploaded'] = ''
33
- if 'website_audience' not in st.session_state:
34
- st.session_state['website_audience'] = ''
35
- if 'uploaded_files' not in st.session_state:
36
- st.session_state['uploaded_files'] = ''
37
 
38
  def request_model(self, payload_txt, headers):
39
  response = requests.post(self.model_url, json=payload_txt, headers=headers)
@@ -52,95 +50,55 @@ class SeoOffPageAnalyst:
52
  session = st.session_state['analyze']
53
  if self.uploaded_files and session == 'clicked':
54
  combined_text = ""
55
- website_audience = ""
56
- with st.spinner('Uploading Off Page...', show_time=True):
57
  st.write('')
 
 
 
 
 
58
  try:
59
- for file_info in st.session_state['uploaded_files'].values():
60
- '''
61
- if file_info['type'] == 'pdf':
62
- combined_text += file_info['content'] + "\n"
63
- '''
64
- try:
65
- if file_info['type'] == 'csv':
66
- # Load CSV
67
- df = pd.read_csv(StringIO(file_info['content'].to_csv(index=True)))
 
68
 
69
- # Count total rows
70
- num_rows = len(df)
71
-
72
- # Extract unique domains from 'Source url'
73
- df['Source Domain'] = df['Source url'].apply(lambda x: urlparse(x).netloc)
74
- unique_domains = df['Source Domain'].nunique()
75
-
76
- combined_text += f"Total Backlinks Count: {num_rows}\n"
77
- combined_text += f"Referring Domain: {unique_domains}"
78
- #st.info("Backlinks - SEMRush Uploaded Successfuly", icon="ℹ️")
79
- except KeyError:
80
- st.info("Incorrect CSV format. Please upload a valid CSV file.")
81
- pass
82
- except UnboundLocalError:
83
- pass
84
- except AttributeError:
85
- pass
86
  except KeyError:
87
- pass
88
- '''
89
- try:
90
- # Check if upload_website_audience exists in session state and is a dictionary
91
- if 'upload_website_audience' in st.session_state and isinstance(st.session_state['upload_website_audience'], dict):
92
- for file_name, file_info in st.session_state['upload_website_audience'].items():
93
- try:
94
- if file_info['type'] == 'csv':
95
- # Since file_info['content'] is already a DataFrame (from your earlier code)
96
- # No need to convert back from string to DataFrame
97
- df = file_info['content']
98
-
99
- # Process your DataFrame here
100
- # Instead of reading from StringIO, just use the DataFrame directly
101
- website_audience += f"Website Audience Acquisition {df}\n"
102
-
103
- #st.info("Website Audience Acquisition Uploaded Successfully", icon="ℹ️")
104
- except KeyError:
105
- pass
106
- #st.info(f"Incorrect format for {file_name}. Please upload a valid CSV file.")
107
- except Exception as e:
108
- st.error(f"Error processing data: {str(e)}")
109
-
110
- '''
111
  # OUTPUT FOR SEO ANALYST
112
-
 
 
 
 
113
  #result = self.request_model(payload_txt, headers)
114
 
115
  #end_time = time.time()
116
  #time_lapsed = end_time - start_time
117
 
118
- debug_info = {'data_field' : 'Backlinks', 'result': combined_text}
119
- debug_info_website_audience = {'data_field' : 'Website Audience Acquisition', 'result': website_audience}
120
  #debug_info = {'url_uuid': self.model_url.split("-")[-1],'time_lapsed' : time_lapsed, 'files': [*st.session_state['uploaded_files']],'payload': payload_txt, 'result': result}
 
121
 
122
- if self.uploaded_files:
123
- st.session_state['off_page_file_uploaded'] = 'uploaded'
124
- collect_telemetry(debug_info)
125
- #if self.website_audience:
126
- # st.session_state['website_audience'] = 'uploaded'
127
- # collect_telemetry(debug_info_website_audience)
128
-
129
 
130
  #with st.expander("Debug information", icon="⚙"):
131
  # st.write(debug_info)
132
  st.session_state['analyzing'] = False
133
-
134
  def row1(self):
135
  #st.write(self.data_src)
136
  self.uploaded_files = st.file_uploader('Backlinks - SEMRush', type='csv', accept_multiple_files=True, key="seo_off")
137
- #self.website_audience = st.file_uploader('Website Audience Acquisition - GA4', type='csv', accept_multiple_files=True, key="website_audiences")
138
- #self.website_audience = st.text_input("Website Audience Acquisition:", placeholder='Enter Website Audience Acquisition')
139
-
140
  if self.uploaded_files:
141
  upload.multiple_upload_file(self.uploaded_files)
142
- #if self.website_audience:
143
- # upload.upload_website_audience(self.website_audience)
144
 
145
  #st.write("") # FOR THE HIDE BUTTON
146
  #st.write("") # FOR THE HIDE BUTTON
 
3
  import streamlit as st
4
  import requests
5
  from dotenv import load_dotenv
6
+ import os
7
  import time
8
  from helper.telemetry import collect_telemetry
9
  from helper.upload_File import uploadFile
10
  from helper.button_behaviour import hide_button, unhide_button
11
  from helper.initialize_analyze_session import initialize_analyze_session
12
  import pandas as pd
13
+ import asyncio
14
  import json
15
 
16
  class SeoOffPageAnalyst:
 
32
  #st.header(self.analyst_name)
33
  if 'off_page_file_uploaded' not in st.session_state:
34
  st.session_state['off_page_file_uploaded'] = ''
 
 
 
 
35
 
36
  def request_model(self, payload_txt, headers):
37
  response = requests.post(self.model_url, json=payload_txt, headers=headers)
 
50
  session = st.session_state['analyze']
51
  if self.uploaded_files and session == 'clicked':
52
  combined_text = ""
53
+ with st.spinner('SEO Off Page Analyst...', show_time=True):
 
54
  st.write('')
55
+ for file_info in st.session_state['uploaded_files'].values():
56
+ '''
57
+ if file_info['type'] == 'pdf':
58
+ combined_text += file_info['content'] + "\n"
59
+ '''
60
  try:
61
+ if file_info['type'] == 'csv':
62
+ # Load CSV
63
+ df = pd.read_csv(StringIO(file_info['content'].to_csv(index=True)))
64
+
65
+ # Count total rows
66
+ num_rows = len(df)
67
+
68
+ # Extract unique domains from 'Source url'
69
+ df['Source Domain'] = df['Source url'].apply(lambda x: urlparse(x).netloc)
70
+ unique_domains = df['Source Domain'].nunique()
71
 
72
+ combined_text += f"Total Backlinks Count: {num_rows}\n"
73
+ combined_text += f"Referring Domain: {unique_domains}"
74
+ st.info("Backlinks - SEMRush Uploaded Successfuly", icon="ℹ️")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
75
  except KeyError:
76
+ st.info("Incorrect CSV format. Please upload a valid CSV file.")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77
  # OUTPUT FOR SEO ANALYST
78
+ payload_txt = {"question": combined_text}
79
+ headers = {
80
+ "Content-Type": "application/json",
81
+ "x-api-key": f"{os.getenv('x-api-key')}"
82
+ }
83
  #result = self.request_model(payload_txt, headers)
84
 
85
  #end_time = time.time()
86
  #time_lapsed = end_time - start_time
87
 
88
+ debug_info = {'data_field' : 'Backlinks', 'result': payload_txt}
 
89
  #debug_info = {'url_uuid': self.model_url.split("-")[-1],'time_lapsed' : time_lapsed, 'files': [*st.session_state['uploaded_files']],'payload': payload_txt, 'result': result}
90
+ collect_telemetry(debug_info)
91
 
92
+ st.session_state["off_page_file_uploaded"] = 'uploaded'
 
 
 
 
 
 
93
 
94
  #with st.expander("Debug information", icon="⚙"):
95
  # st.write(debug_info)
96
  st.session_state['analyzing'] = False
 
97
  def row1(self):
98
  #st.write(self.data_src)
99
  self.uploaded_files = st.file_uploader('Backlinks - SEMRush', type='csv', accept_multiple_files=True, key="seo_off")
 
 
 
100
  if self.uploaded_files:
101
  upload.multiple_upload_file(self.uploaded_files)
 
 
102
 
103
  #st.write("") # FOR THE HIDE BUTTON
104
  #st.write("") # FOR THE HIDE BUTTON
classes/On_Page.py CHANGED
@@ -84,7 +84,7 @@ class SeoOn:
84
  if (self.first_meaningful_paint or self.uploaded_files) and session == 'clicked':
85
  first_meaningful_paint = ""
86
  crawl_file = ""
87
- with st.spinner('Uploading On Page...', show_time=True):
88
  st.write('')
89
  '''
90
  try:
@@ -119,8 +119,6 @@ class SeoOn:
119
  pass
120
  except KeyError:
121
  pass
122
- except AttributeError:
123
- pass
124
  try:
125
  first_meaningful_paint += f"\nFirst Meaningful Paint: {self.first_meaningful_paint}"
126
  except KeyError:
 
84
  if (self.first_meaningful_paint or self.uploaded_files) and session == 'clicked':
85
  first_meaningful_paint = ""
86
  crawl_file = ""
87
+ with st.spinner('SEO On Page Analyst...', show_time=True):
88
  st.write('')
89
  '''
90
  try:
 
119
  pass
120
  except KeyError:
121
  pass
 
 
122
  try:
123
  first_meaningful_paint += f"\nFirst Meaningful Paint: {self.first_meaningful_paint}"
124
  except KeyError:
classes/On_Page_Crawl.py CHANGED
@@ -78,7 +78,7 @@ class SeoOnCrawl:
78
  session = st.session_state.analyze
79
  if self.uploaded_files and session == 'clicked':
80
  combined_text = ""
81
- with st.spinner('Uploading Crawl File...', show_time=True):
82
  st.write('')
83
  try:
84
  for file_info in st.session_state['uploaded_files'].values():
 
78
  session = st.session_state.analyze
79
  if self.uploaded_files and session == 'clicked':
80
  combined_text = ""
81
+ with st.spinner('SEO On Page Analyst...', show_time=True):
82
  st.write('')
83
  try:
84
  for file_info in st.session_state['uploaded_files'].values():
classes/Seo.py CHANGED
@@ -50,6 +50,43 @@ class Seo:
50
  st.session_state['df_traffic'] = ''
51
  if 'df_seo' not in st.session_state:
52
  st.session_state['df_seo'] = ''
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
 
54
  def detect_encoding(self, uploaded_file):
55
  result = chardet.detect(uploaded_file.read(100000))
@@ -121,6 +158,8 @@ class Seo:
121
  pass
122
 
123
  def process (self):
 
 
124
  session = st.session_state.analyze
125
  if ((self.uploaded_file or self.others or self.uploaded_file_seo) or (self.page_index or self.bounce_rate)) and session == 'clicked':
126
  seo_keywords = ""
@@ -128,7 +167,7 @@ class Seo:
128
  traffic_aqcuisition = ""
129
  pages_index = ""
130
  bounce_rate = ""
131
- with st.spinner('Uploading Seo Files...', show_time=True):
132
  st.write('')
133
 
134
  # INITIALIZING SESSIONS
@@ -158,13 +197,8 @@ class Seo:
158
 
159
  except AttributeError:
160
  pass
161
- except KeyError as e:
162
- # Check if 'df_traffic' is the missing key (no file uploaded)
163
- if self.uploaded_file_seo:
164
- pass
165
- else:
166
- # This would be triggered if df_traffic exists but the other keys are missing
167
- st.info("Incorrect Traffic Channels SEMRush format. Please upload a valid SEMRush file.")
168
 
169
  try:
170
  df_seo = st.session_state['df_seo']
@@ -192,15 +226,17 @@ class Seo:
192
  traffic_aqcuisition += f"Traffics: {traffics}"
193
  traffic_aqcuisition += f"\nPaid Traffic: {ga4_paid_social}\nOrganic Traffic: {ga4_organic_traffic}\nDirect Traffic: {ga4_direct_traffic}\nReferral Traffic: {ga4_referral_traffic}"
194
  except KeyError:
195
-
196
- if self.others:
197
- pass
198
- else:
199
- # This would be triggered if df_traffic exists but the other keys are missing
200
- #st.info("Incorrect Traffic Acquisition GA4 format. Please upload a valid GA4 file.")
201
- pass
202
  except TypeError:
203
- st.info("Incorrect Traffic Acquisition GA4 format. Please upload a valid GA4 file.")
 
 
 
 
 
 
 
 
204
 
205
  #result = self.request_model(payload_txt_seo_keywords)
206
  #end_time = time.time()
 
50
  st.session_state['df_traffic'] = ''
51
  if 'df_seo' not in st.session_state:
52
  st.session_state['df_seo'] = ''
53
+
54
+ def request_model(self, payload_txt):
55
+ response = requests.post(self.model_url, json=payload_txt)
56
+ response.raise_for_status()
57
+ output = response.json()
58
+
59
+ categories = []
60
+ current_footprint = []
61
+ number_of_backlinks = []
62
+
63
+ for key, value in output.items():
64
+ if key == 'json':
65
+ for item in value:
66
+ categories.append(item.get('category', 'N/A').replace('_', ' ').title())
67
+ current_footprint.append(item.get('current_footprint', 'N/A'))
68
+ number_of_backlinks.append(item.get('best_of_breed_solution', 'N/A'))
69
+
70
+ output = ""
71
+ for i in range(len(categories)):
72
+ output += f"\n\n---\n **Category:** {categories[i]}"
73
+ output += f"\n\n **Current Footprint:** {current_footprint[i]}\n\n"
74
+ output += f"**Number of Backlinks:** {number_of_backlinks[i]}"
75
+
76
+ data = {
77
+ "": [str(category) for category in categories],
78
+ "Current Footprint": [str(footprint) for footprint in current_footprint],
79
+ "Best of Breed Solutions": [str(backlink) for backlink in number_of_backlinks]
80
+ }
81
+ df_output = pd.DataFrame(data)
82
+ with st.expander("AI Analysis", expanded=True, icon="🤖"):
83
+ st.table(df_output.style.set_table_styles(
84
+ [{'selector': 'th:first-child, td:first-child', 'props': [('width', '20px')]},
85
+ {'selector': 'th, td', 'props': [('width', '150px'), ('text-align', 'center')]}]
86
+ ).set_properties(**{'text-align': 'center'}))
87
+
88
+
89
+ return output
90
 
91
  def detect_encoding(self, uploaded_file):
92
  result = chardet.detect(uploaded_file.read(100000))
 
158
  pass
159
 
160
  def process (self):
161
+ start_time = time.time()
162
+
163
  session = st.session_state.analyze
164
  if ((self.uploaded_file or self.others or self.uploaded_file_seo) or (self.page_index or self.bounce_rate)) and session == 'clicked':
165
  seo_keywords = ""
 
167
  traffic_aqcuisition = ""
168
  pages_index = ""
169
  bounce_rate = ""
170
+ with st.spinner('Seo Analyst...', show_time=True):
171
  st.write('')
172
 
173
  # INITIALIZING SESSIONS
 
197
 
198
  except AttributeError:
199
  pass
200
+ except KeyError:
201
+ st.info("Incorrect SEMRush format. Please upload a valid SEMRush file.")
 
 
 
 
 
202
 
203
  try:
204
  df_seo = st.session_state['df_seo']
 
226
  traffic_aqcuisition += f"Traffics: {traffics}"
227
  traffic_aqcuisition += f"\nPaid Traffic: {ga4_paid_social}\nOrganic Traffic: {ga4_organic_traffic}\nDirect Traffic: {ga4_direct_traffic}\nReferral Traffic: {ga4_referral_traffic}"
228
  except KeyError:
229
+ pass
 
 
 
 
 
 
230
  except TypeError:
231
+ st.info("Incorrect GA4 format. Please upload a valid GA4 file.")
232
+
233
+ # OUTPUT FOR SEO ANALYST
234
+ payload_txt_seo_keywords = {"question": seo_keywords}
235
+ payload_txt_traffic_channels = {"question": traffic_channels}
236
+ payload_txt_traffic_aqcuisition = {"question": traffic_aqcuisition}
237
+ payload_txt_pages_index = {"question": pages_index}
238
+ payload_txt_bounce_rate = {"question": bounce_rate}
239
+
240
 
241
  #result = self.request_model(payload_txt_seo_keywords)
242
  #end_time = time.time()
classes/Seo_Backlinks.py CHANGED
@@ -214,7 +214,7 @@ class SeoBacklinks:
214
  hide_button()
215
  if self.uploaded_files:
216
  combined_text = ""
217
- with st.spinner('Uploading Backlinks...', show_time=True):
218
  st.write('')
219
  '''
220
  # INITIALIZING SESSIONS
 
214
  hide_button()
215
  if self.uploaded_files:
216
  combined_text = ""
217
+ with st.spinner('Analyzing...', show_time=True):
218
  st.write('')
219
  '''
220
  # INITIALIZING SESSIONS
classes/Social_Media_FB.py CHANGED
@@ -147,7 +147,7 @@ class Facebook:
147
  if ((self.fb_organic_post and self.fb_organic_post.name) or (self.fb_ads_campaign and self.fb_ads_campaign.name) or (self.facebooks) or (self.facebook_rr)) and session == 'clicked':
148
  try:
149
  combined_text = ""
150
- with st.spinner('Uploading FB Files...', show_time=True):
151
  st.write('')
152
  # INITIALIZING SESSIONS
153
  #combined_text += f"Client Summary: {st.session_state.nature}\n"
 
147
  if ((self.fb_organic_post and self.fb_organic_post.name) or (self.fb_ads_campaign and self.fb_ads_campaign.name) or (self.facebooks) or (self.facebook_rr)) and session == 'clicked':
148
  try:
149
  combined_text = ""
150
+ with st.spinner('Social Media Analyst...', show_time=True):
151
  st.write('')
152
  # INITIALIZING SESSIONS
153
  #combined_text += f"Client Summary: {st.session_state.nature}\n"
classes/Social_Media_IG.py CHANGED
@@ -110,7 +110,7 @@ class Instagram:
110
  if (self.instagram or self.instagram_er or self.instagram_pf) and session == 'clicked':
111
  try:
112
  combined_text = ""
113
- with st.spinner('Uploading IG Files...', show_time=True):
114
  st.write('')
115
  try:
116
  combined_text += f"\nInstagram Followers: {self.instagram}"
 
110
  if (self.instagram or self.instagram_er or self.instagram_pf) and session == 'clicked':
111
  try:
112
  combined_text = ""
113
+ with st.spinner('Instagram...', show_time=True):
114
  st.write('')
115
  try:
116
  combined_text += f"\nInstagram Followers: {self.instagram}"
classes/Tiktok.py CHANGED
@@ -109,7 +109,7 @@ class Tiktok:
109
  if (self.tiktok_f or self.tiktok_er or self.tiktok_pf) and session == 'clicked':
110
  try:
111
  combined_text = ""
112
- with st.spinner('Uploading Tiktok Files...', show_time=True):
113
  st.write('')
114
  # INITIALIZING SESSIONS
115
  #combined_text += f"Client Summary: {st.session_state.nature}\n"
 
109
  if (self.tiktok_f or self.tiktok_er or self.tiktok_pf) and session == 'clicked':
110
  try:
111
  combined_text = ""
112
+ with st.spinner('Tiktok...', show_time=True):
113
  st.write('')
114
  # INITIALIZING SESSIONS
115
  #combined_text += f"Client Summary: {st.session_state.nature}\n"
classes/Twitter.py CHANGED
@@ -121,7 +121,7 @@ class Twitter:
121
  if (self.twitter or self.twitter_er or self.twitter_pf) and session == 'clicked':
122
  try:
123
  combined_text = ""
124
- with st.spinner('Uploading Twitter Files...', show_time=True):
125
  st.write('')
126
  # INITIALIZING SESSIONS
127
  combined_text += f"Client Summary: {st.session_state.nature}\n"
 
121
  if (self.twitter or self.twitter_er or self.twitter_pf) and session == 'clicked':
122
  try:
123
  combined_text = ""
124
+ with st.spinner('Twitter...', show_time=True):
125
  st.write('')
126
  # INITIALIZING SESSIONS
127
  combined_text += f"Client Summary: {st.session_state.nature}\n"
classes/Youtube.py CHANGED
@@ -109,7 +109,7 @@ class YouTube:
109
  if ((self.youtube or self.youtube_er or self.youtube_pf) and session) == 'clicked':
110
  try:
111
  combined_text = ""
112
- with st.spinner('Uploading YT Files...', show_time=True):
113
  st.write('')
114
  # INITIALIZING SESSIONS
115
  #combined_text += f"Client Summary: {st.session_state.nature}\n"
 
109
  if ((self.youtube or self.youtube_er or self.youtube_pf) and session) == 'clicked':
110
  try:
111
  combined_text = ""
112
+ with st.spinner('Youtube...', show_time=True):
113
  st.write('')
114
  # INITIALIZING SESSIONS
115
  #combined_text += f"Client Summary: {st.session_state.nature}\n"
classes/amazon.py CHANGED
@@ -31,7 +31,7 @@ class Amazon:
31
  st.switch_page("./pages/home.py")
32
  except Exception:
33
  pass
34
-
35
  if 'product_title_amazon' not in st.session_state:
36
  st.session_state['product_title_amazon'] = ''
37
  if 'images_amazon' not in st.session_state:
@@ -40,25 +40,20 @@ class Amazon:
40
  st.session_state['bullet_points_amazon'] = ''
41
  if 'product_description_amazon' not in st.session_state:
42
  st.session_state['product_description_amazon'] = ''
43
- '''
44
- if 'amazon_marketplace_questionnaires' not in st.session_state:
45
- st.session_state['amazon_marketplace_questionnaires'] = ''
46
 
47
  def process(self):
48
  session = st.session_state.analyze
49
- if (self.amazon_marketplace_questionnaires) and session == 'clicked':
50
  try:
51
- #product_title_amazon = ""
52
- #images_amazon = ""
53
- #bullet_points_amazon = ""
54
- #product_description_amazon = ""
55
- amazon_marketplace_questionnaires = ""
56
 
57
- with st.spinner('Uploading Amazon Files...', show_time=True):
58
  st.write('')
59
  # INITIALIZING SESSIONS
60
  #combined_text += f"Client Summary: {st.session_state.nature}\n"
61
- '''
62
  try:
63
  product_title_amazon += f"\nProduct Title: {self.product_title_amazon}"
64
  except KeyError:
@@ -75,11 +70,6 @@ class Amazon:
75
  product_description_amazon += f"\nProduct Description: {self.product_description_amazon}"
76
  except KeyError:
77
  pass
78
- '''
79
- try:
80
- amazon_marketplace_questionnaires += f"Marketplace Questionnaires - Amazon: {self.amazon_marketplace_questionnaires}"
81
- except KeyError:
82
- pass
83
 
84
  # OUTPUT FOR SEO ANALYST
85
  #payload_txt = {"question": combined_text}
@@ -87,13 +77,11 @@ class Amazon:
87
 
88
  #end_time = time.time()
89
  #time_lapsed = end_time - start_time
90
- '''
91
  debug_info_product_title_amazon = {'data_field' : 'Product Title - Amazon', 'result': self.product_title_amazon}
92
  debug_info_images_amazon = {'data_field' : 'Images - Amazon', 'result': self.images_amazon}
93
  debug_info_bullet_points_amazon = {'data_field' : 'Bullet Points - Amazon', 'result': self.bullet_points_amazon}
94
  debug_product_description_amazon = {'data_field' : 'Product Description - Amazon', 'result': self.product_description_amazon}
95
- '''
96
- debug_amazon_marketplace_questionnaires = {'data_field' : 'Marketplace Questionnaires - Amazon', 'result': self.amazon_marketplace_questionnaires}
97
 
98
  '''
99
  debug_info = {
@@ -103,6 +91,7 @@ class Amazon:
103
  'payload': payload_txt,
104
  'result': result,
105
  }
 
106
  if self.product_title_amazon:
107
  st.session_state['product_title_amazon'] = 'uploaded'
108
  collect_telemetry(debug_info_product_title_amazon)
@@ -115,52 +104,19 @@ class Amazon:
115
  if self.product_description_amazon:
116
  st.session_state['product_description_amazon'] = 'uploaded'
117
  collect_telemetry(debug_product_description_amazon)
118
- '''
119
- if self.amazon_marketplace_questionnaires:
120
- if self.amazon_marketplace_questionnaires != self.template:
121
- st.session_state['amazon_marketplace_questionnaires'] = 'uploaded'
122
- collect_telemetry(debug_amazon_marketplace_questionnaires)
123
- else:
124
- pass
125
 
 
126
  st.session_state['analyzing'] = False
127
  except AttributeError:
128
  st.info("Please upload CSV or PDF files first.")
129
  hide_button()
130
 
131
  def row1(self):
132
- #self.product_title_amazon = st.text_input("Product Title - Amazon:", placeholder='Enter Product Title')
133
- #self.images_amazon = st.text_input("Images - Amazon:", placeholder='Enter Images')
134
- #self.bullet_points_amazon = st.text_input("Bullet Points - Amazon:", placeholder='Enter Bullet Points')
135
- #self.product_description_amazon = st.text_input("Product Description - Amazon:", placeholder='Enter Product Description')
136
- self.template = ("Product Title:\n"
137
- "a. Does the product title include relevant keywords (e.g., Product Brand/Description + Product Line + Material or Key Ingredient + Color + Size + Quantity)?\n"
138
- "b. Is the title within Amazon’s recommended character limit (≤200 characters)?\n"
139
- "c. Other Remarks:\n\n"
140
- "Images:\n"
141
- "a. Is the main image on a pure white background?\n"
142
- "b. Are there any logos, watermarks, or text on any images?\n"
143
- "c. Do the images showcase the product from multiple angles?\n"
144
- "d. Are the images high-resolution and zoomable?\n"
145
- "e. Other Remarks:\n\n"
146
- "Bullet Points:\n"
147
- "a. Do the bullets exceed 250 characters?\n"
148
- "b. Are the bullet points clear and concise?\n"
149
- "c. Do they highlight key features, benefits, and unique selling points?\n"
150
- "d. Are keywords naturally included in the bullet points?\n"
151
- "e. Other Remarks:\n\n"
152
- "Product Description:\n"
153
- "a. Is the product description complete and well-formatted?\n"
154
- "b. Is it within the 2000-character limit?\n"
155
- "c. Does it include important product specifications (size, material, compatibility)?\n"
156
- "d. Are there any customer reviews or ratings?\n"
157
- "e. If yes, is the average rating above 4 stars?\n"
158
- "f. Other Remarks:")
159
- self.amazon_marketplace_questionnaires = st.text_area(
160
- "Marketplace Questionnaires - Amazon:",
161
- value=self.template,
162
- height=600
163
- )
164
  self.process()
165
 
166
  if __name__ == "__main__":
 
31
  st.switch_page("./pages/home.py")
32
  except Exception:
33
  pass
34
+ '''
35
  if 'product_title_amazon' not in st.session_state:
36
  st.session_state['product_title_amazon'] = ''
37
  if 'images_amazon' not in st.session_state:
 
40
  st.session_state['bullet_points_amazon'] = ''
41
  if 'product_description_amazon' not in st.session_state:
42
  st.session_state['product_description_amazon'] = ''
 
 
 
43
 
44
  def process(self):
45
  session = st.session_state.analyze
46
+ if (self.product_title_amazon or self.images_amazon or self.bullet_points_amazon or self.product_description_amazon) and session == 'clicked':
47
  try:
48
+ product_title_amazon = ""
49
+ images_amazon = ""
50
+ bullet_points_amazon = ""
51
+ product_description_amazon = ""
 
52
 
53
+ with st.spinner('Aamzon...', show_time=True):
54
  st.write('')
55
  # INITIALIZING SESSIONS
56
  #combined_text += f"Client Summary: {st.session_state.nature}\n"
 
57
  try:
58
  product_title_amazon += f"\nProduct Title: {self.product_title_amazon}"
59
  except KeyError:
 
70
  product_description_amazon += f"\nProduct Description: {self.product_description_amazon}"
71
  except KeyError:
72
  pass
 
 
 
 
 
73
 
74
  # OUTPUT FOR SEO ANALYST
75
  #payload_txt = {"question": combined_text}
 
77
 
78
  #end_time = time.time()
79
  #time_lapsed = end_time - start_time
80
+
81
  debug_info_product_title_amazon = {'data_field' : 'Product Title - Amazon', 'result': self.product_title_amazon}
82
  debug_info_images_amazon = {'data_field' : 'Images - Amazon', 'result': self.images_amazon}
83
  debug_info_bullet_points_amazon = {'data_field' : 'Bullet Points - Amazon', 'result': self.bullet_points_amazon}
84
  debug_product_description_amazon = {'data_field' : 'Product Description - Amazon', 'result': self.product_description_amazon}
 
 
85
 
86
  '''
87
  debug_info = {
 
91
  'payload': payload_txt,
92
  'result': result,
93
  }
94
+ '''
95
  if self.product_title_amazon:
96
  st.session_state['product_title_amazon'] = 'uploaded'
97
  collect_telemetry(debug_info_product_title_amazon)
 
104
  if self.product_description_amazon:
105
  st.session_state['product_description_amazon'] = 'uploaded'
106
  collect_telemetry(debug_product_description_amazon)
 
 
 
 
 
 
 
107
 
108
+
109
  st.session_state['analyzing'] = False
110
  except AttributeError:
111
  st.info("Please upload CSV or PDF files first.")
112
  hide_button()
113
 
114
  def row1(self):
115
+ self.product_title_amazon = st.text_input("Product Title - Amazon:", placeholder='Enter Product Title')
116
+ self.images_amazon = st.text_input("Images - Amazon:", placeholder='Enter Images')
117
+ self.bullet_points_amazon = st.text_input("Bullet Points - Amazon:", placeholder='Enter Bullet Points')
118
+ self.product_description_amazon = st.text_input("Product Description - Amazon:", placeholder='Enter Product Description')
119
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
120
  self.process()
121
 
122
  if __name__ == "__main__":
classes/client_summary.py CHANGED
@@ -6,7 +6,7 @@ from helper.button_behaviour import hide_button
6
  from helper.initialize_analyze_session import initialize_analyze_session
7
 
8
 
9
- class ClientSummary:
10
  def __init__(self):
11
  self.initialize()
12
  self.row1()
@@ -18,13 +18,13 @@ class ClientSummary:
18
  st.session_state['client_summary'] = ''
19
  if 'client_name' not in st.session_state:
20
  st.session_state['client_name'] = ''
21
- if 'client_website' not in st.session_state:
22
- st.session_state['client_website'] = ''
23
  if 'target_market' not in st.session_state:
24
  st.session_state['target_market'] = ''
25
 
26
  def process (self):
27
- with st.spinner('Uploading Client Details...', show_time=True):
28
  st.write('')
29
  client_summary = ""
30
  client_name = ""
@@ -44,10 +44,10 @@ class ClientSummary:
44
  collect_telemetry(debug_client_summary)
45
  if self.name:
46
  st.session_state['client_name'] = 'uploaded'
47
- collect_telemetry(debug_client_name)
48
- if self.website:
49
- st.session_state['client_website'] = 'uploaded'
50
  collect_telemetry(debug_client_website)
 
 
 
51
 
52
  def row1(self):
53
  self.client_summary = st.text_area("Client Summary:", help="Name of business, nature of business, location, products/services")
 
6
  from helper.initialize_analyze_session import initialize_analyze_session
7
 
8
 
9
+ class CientSummary:
10
  def __init__(self):
11
  self.initialize()
12
  self.row1()
 
18
  st.session_state['client_summary'] = ''
19
  if 'client_name' not in st.session_state:
20
  st.session_state['client_name'] = ''
21
+ if 'client_webiste' not in st.session_state:
22
+ st.session_state['client_webiste'] = ''
23
  if 'target_market' not in st.session_state:
24
  st.session_state['target_market'] = ''
25
 
26
  def process (self):
27
+ with st.spinner('Seo Analyst...', show_time=True):
28
  st.write('')
29
  client_summary = ""
30
  client_name = ""
 
44
  collect_telemetry(debug_client_summary)
45
  if self.name:
46
  st.session_state['client_name'] = 'uploaded'
 
 
 
47
  collect_telemetry(debug_client_website)
48
+ if self.website:
49
+ st.session_state['client_webiste'] = 'uploaded'
50
+ collect_telemetry(debug_client_name)
51
 
52
  def row1(self):
53
  self.client_summary = st.text_area("Client Summary:", help="Name of business, nature of business, location, products/services")
classes/content.py CHANGED
@@ -35,7 +35,7 @@ class Content:
35
  session = st.session_state.analyze
36
 
37
  if (self.content_in_the_website or self.content_outside_the_website) and session == 'clicked':
38
- with st.spinner('Uploading Contents...', show_time=True):
39
  st.write('')
40
  content_in_the_website = ""
41
  content_outside_the_website = ""
@@ -52,18 +52,13 @@ class Content:
52
  debug_info_content_outside_the_website = {'data_field' : 'Content outside the Website', 'result': content_outside_the_website}
53
 
54
  if self.content_in_the_website:
55
- if self.content_in_the_website != self.template_content_in_the_website:
56
- st.session_state['content_in_the_website'] = 'uploaded'
57
- collect_telemetry(debug_info_content_in_the_website)
58
- else:
59
- pass
60
  if self.content_outside_the_website:
61
- if self.content_outside_the_website != self.template_content_outside_the_website:
62
- st.session_state['content_outside_the_website'] = 'uploaded'
63
- collect_telemetry(debug_info_content_outside_the_website)
64
- else:
65
- pass
66
-
67
  #with st.expander("Debug information", icon="⚙"):
68
  # st.write(debug_info)
69
 
@@ -75,92 +70,8 @@ class Content:
75
  pass
76
 
77
  def row1(self):
78
- self.template_content_in_the_website = ("Content and Messaging:\n"
79
- "a. Is the text easy to read and understand?\n"
80
- "b. Does it clearly explain what the business offers?\n"
81
- "c. Are the brand’s Unique Selling Propositions (USPs) clearly emphasized?\n"
82
- "d. Other Remarks:\n\n"
83
-
84
- "Call-to-Action (CTAs):\n"
85
- "a. Are CTAs prominently placed and easy to find?\n"
86
- "b. Are the CTAs strong and action-focused?\n"
87
- "c. Do they appear in the right places?\n"
88
- "d. Other Remarks:\n\n"
89
-
90
- "Images and Videos:\n"
91
- "a. Are the images and videos high quality?\n"
92
- "b. Do they clearly show the product or service?\n"
93
- "c. Are the videos easy to watch (no unnecessary clicks or extra steps)?\n"
94
- "d. Are the visuals engaging upon first glance?\n"
95
- "e. Other Remarks:\n\n"
96
-
97
- "Blog and SEO:\n"
98
- "a. Does the site have a blog section?\n"
99
- "b. Is the blog updated regularly?\n"
100
- "c. Are the articles helpful, relevant, and informative?\n"
101
- "d. Are internal links used to guide users to related or deeper content?\n"
102
- "e. Other Remarks:\n\n"
103
-
104
- "User Experience (UX) and Navigation:\n"
105
- "a. Is the site easy to navigate with clear menus and categories?\n"
106
- "b. Can users quickly find important information?\n"
107
- "c. Are any interactions creating unnecessary friction (e.g., signups to view content)?\n"
108
- "d. Other Remarks:\n\n"
109
-
110
- "Product or Services (For E-Commerce):\n"
111
- "a. Does the site clearly explain the company’s products or services?\n"
112
- "b. Are product or service details clear?\n"
113
- "c. Are there enough pictures or videos?\n"
114
- "d. Is there a sense of urgency or promotions to encourage action?\n"
115
- "e. Other Remarks:")
116
- self.template_content_outside_the_website = ("Backlinks and Referring Domains:\n"
117
- "a. Are there backlinks from relevant and authoritative websites?\n"
118
- "b. Do the referring sites belong to the same industry or niche?\n"
119
- "c. Are there spammy or low-quality backlinks (e.g., thin directories)?\n"
120
- "d. Are the backlinks helpful and align with Google’s Helpful Content guidelines?\n"
121
- "e. Are there any guest posts or articles on other websites?\n"
122
- "f. Other Remarks:\n\n"
123
-
124
- "Community Engagement (Forums and Platforms):\n"
125
- "a. Is the brand/product/company mentioned in online forums (e.g., Reddit, or Quora)?\n"
126
- "b. Are the mentions from forums and platforms generally positive?\n"
127
- "c. Are the mentions from forums and platforms generally negative?\n"
128
- "d. Is it actively participating or present in these communities?\n"
129
- "e. Is there a strategy for using personas or ambassadors to represent the client’s company/brand/product?\n"
130
- "f. Other Remarks:\n\n"
131
-
132
- "Online Reviews and Reputation Management:\n"
133
- "a. Are there recent reviews on platforms like Google, Trustpilot, or Yelp?\n"
134
- "b. Are the reviews mostly positive?\n"
135
- "c. Are the reviews mostly negative?\n"
136
- "d. Is the client responding to reviews, especially complaints or fake ones?\n"
137
- "e. Do the reviews mention recurring issues (e.g., poor support, unsolicited emails)?\n"
138
- "f. Other Remarks:\n\n"
139
-
140
- "Public Relations and Media Coverage:\n"
141
- "a. Has the client’s company been featured in news sites or magazines?\n"
142
- "b. Are the articles helpful and recent?\n"
143
- "c. Are PR opportunities being used to boost awareness?\n"
144
- "d. Other Remarks:\n\n"
145
-
146
- "Branded Blog Networks:\n"
147
- "a. Are there any off-page blog sites created by the client’s company?\n"
148
- "b. Is the content unique, helpful, and adding SEO value?\n"
149
- "c. Can the content be moved or consolidated into the main site?\n"
150
- "d. Other Remarks:\n\n"
151
-
152
- "Email Marketing & CRM Engagement:\n"
153
- "a. Is email being used to follow up with customers or leads (e.g., newsletters, cart recovery)?\n"
154
- "b. Are they sending follow-up emails?\n"
155
- "c. Are emails building relationships and promoting content or reviews?\n"
156
- "d. Other Remarks:")
157
-
158
- self.content_in_the_website = st.text_area("Content in the Website:",
159
- value=self.template_content_in_the_website,
160
- height=600)
161
- self.content_outside_the_website = st.text_area("Content outside the Website:",
162
- value=self.template_content_outside_the_website,
163
- height=600)
164
 
165
  self.process()
166
 
 
35
  session = st.session_state.analyze
36
 
37
  if (self.content_in_the_website or self.content_outside_the_website) and session == 'clicked':
38
+ with st.spinner('SEO On Page Analyst...', show_time=True):
39
  st.write('')
40
  content_in_the_website = ""
41
  content_outside_the_website = ""
 
52
  debug_info_content_outside_the_website = {'data_field' : 'Content outside the Website', 'result': content_outside_the_website}
53
 
54
  if self.content_in_the_website:
55
+ st.session_state['content_in_the_website'] = 'uploaded'
56
+ collect_telemetry(debug_info_content_in_the_website)
 
 
 
57
  if self.content_outside_the_website:
58
+ st.session_state['content_outside_the_website'] = 'uploaded'
59
+ collect_telemetry(debug_info_content_outside_the_website)
60
+
61
+
 
 
62
  #with st.expander("Debug information", icon="⚙"):
63
  # st.write(debug_info)
64
 
 
70
  pass
71
 
72
  def row1(self):
73
+ self.content_in_the_website = st.text_input("Content in the Website (Website Content)", placeholder='Enter Content in the Website')
74
+ self.content_outside_the_website = st.text_input("Content outside the Website (Website Content)", placeholder='Enter Content outside the Website')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
75
 
76
  self.process()
77
 
classes/ebay.py CHANGED
@@ -31,7 +31,7 @@ class eBay:
31
  st.switch_page("./pages/home.py")
32
  except Exception:
33
  pass
34
-
35
  if 'product_title_ebay' not in st.session_state:
36
  st.session_state['product_title_ebay'] = ''
37
  if 'category_ebay' not in st.session_state:
@@ -40,25 +40,20 @@ class eBay:
40
  st.session_state['images_ebay'] = ''
41
  if 'product_description_ebay' not in st.session_state:
42
  st.session_state['product_description_ebay'] = ''
43
- '''
44
- if 'ebay_marketplace_questionnaires' not in st.session_state:
45
- st.session_state['ebay_marketplace_questionnaires'] = ''
46
 
47
  def process(self):
48
  session = st.session_state.analyze
49
- if (self.ebay_marketplace_questionnaires) and session == 'clicked':
50
  try:
51
- #product_title_ebay = ""
52
- #category_ebay = ""
53
- #images_ebay = ""
54
- #product_description_ebay = ""
55
- ebay_marketplace_questionnaires = ""
56
 
57
- with st.spinner('Uploading Ebay Files...', show_time=True):
58
  st.write('')
59
  # INITIALIZING SESSIONS
60
  #combined_text += f"Client Summary: {st.session_state.nature}\n"
61
- '''
62
  try:
63
  product_title_ebay += f"\nProduct Title: {self.product_title_ebay}"
64
  except KeyError:
@@ -75,11 +70,6 @@ class eBay:
75
  product_description_ebay += f"\nProduct Description: {self.product_description_ebay}"
76
  except KeyError:
77
  pass
78
- '''
79
- try:
80
- ebay_marketplace_questionnaires += f"Marketplace Questionnaires - eBay: {self.ebay_marketplace_questionnaires}"
81
- except KeyError:
82
- pass
83
 
84
  # OUTPUT FOR SEO ANALYST
85
  #payload_txt = {"question": combined_text}
@@ -88,12 +78,10 @@ class eBay:
88
  #end_time = time.time()
89
  #time_lapsed = end_time - start_time
90
 
91
- #debug_info_product_title_ebay = {'data_field' : 'Product Title - eBay', 'result': self.product_title_ebay}
92
- #debug_category_ebay = {'data_field' : 'Category - eBay', 'result': self.category_ebay}
93
- #debug_images_ebay = {'data_field' : 'Images - eBay', 'result': self.images_ebay}
94
- #debug_product_description_ebay = {'data_field' : 'Product Description - eBay', 'result': self.product_description_ebay}
95
-
96
- debug_ebay_marketplace_questionnaires = {'data_field' : 'Marketplace Questionnaires - eBay', 'result': self.ebay_marketplace_questionnaires}
97
 
98
  '''
99
  debug_info = {
@@ -103,7 +91,7 @@ class eBay:
103
  'payload': payload_txt,
104
  'result': result,
105
  }
106
-
107
  if self.product_title_ebay:
108
  st.session_state['product_title_ebay'] = 'uploaded'
109
  collect_telemetry(debug_info_product_title_ebay)
@@ -116,13 +104,7 @@ class eBay:
116
  if self.product_description_ebay:
117
  st.session_state['product_description_ebay'] = 'uploaded'
118
  collect_telemetry(debug_product_description_ebay)
119
- '''
120
- if self.ebay_marketplace_questionnaires:
121
- if self.ebay_marketplace_questionnaires != self.template:
122
- st.session_state['ebay_marketplace_questionnaires'] = 'uploaded'
123
- collect_telemetry(debug_ebay_marketplace_questionnaires)
124
- else:
125
- pass
126
 
127
  st.session_state['analyzing'] = False
128
  except AttributeError:
@@ -130,36 +112,11 @@ class eBay:
130
  hide_button()
131
 
132
  def row1(self):
133
- #self.product_title_ebay = st.text_input("Product Title - eBay:", placeholder='Enter Product Title')
134
- #self.category_ebay = st.text_input("Images - eBay:", placeholder='Enter Images')
135
- #self.images_ebay = st.text_input("Bullet Points - eBay:", placeholder='Enter Bullet Points')
136
- #self.product_description_ebay = st.text_input("Product Description - eBay:", placeholder='Enter Product Description')
137
 
138
- self.template = ("Product Title:\n"
139
- "a. Is the title within the 80-character limit?\n"
140
- "b. Does the title include important details (e.g., brand, color, quantity, compatibility)?\n"
141
- "c. Are search keywords used effectively in the title?\n"
142
- "d. Other Remarks:\n\n"
143
- "Category:\n"
144
- "a. Is the product listed under the correct eBay category?\n"
145
- "b. Other Remarks:\n\n"
146
- "Images:\n"
147
- "a. Are the product images on a white background?\n"
148
- "b. Are there any text, logos, or watermarks on the images?\n"
149
- "c. Are the images high-resolution and zoomable?\n"
150
- "d. Other Remarks:\n\n"
151
- "Product Description:\n"
152
- "a. Is the product description complete and detailed?\n"
153
- "b. Are bullet points used to highlight features and benefits?\n"
154
- "c. Are there images embedded in the description?\n"
155
- "d. Are there any technical issues (e.g., broken images, loading errors)?\n"
156
- "e. Is there consistent keyword usage in the description?\n"
157
- "f. Other Remarks:")
158
- self.ebay_marketplace_questionnaires = st.text_area(
159
- "Marketplace Questionnaires - eBay:",
160
- value=self.template,
161
- height=600
162
- )
163
  self.process()
164
 
165
  if __name__ == "__main__":
 
31
  st.switch_page("./pages/home.py")
32
  except Exception:
33
  pass
34
+ '''
35
  if 'product_title_ebay' not in st.session_state:
36
  st.session_state['product_title_ebay'] = ''
37
  if 'category_ebay' not in st.session_state:
 
40
  st.session_state['images_ebay'] = ''
41
  if 'product_description_ebay' not in st.session_state:
42
  st.session_state['product_description_ebay'] = ''
 
 
 
43
 
44
  def process(self):
45
  session = st.session_state.analyze
46
+ if (self.product_title_ebay or self.category_ebay or self.images_ebay or self.product_description_ebay) and session == 'clicked':
47
  try:
48
+ product_title_ebay = ""
49
+ category_ebay = ""
50
+ images_ebay = ""
51
+ product_description_ebay = ""
 
52
 
53
+ with st.spinner('eBay...', show_time=True):
54
  st.write('')
55
  # INITIALIZING SESSIONS
56
  #combined_text += f"Client Summary: {st.session_state.nature}\n"
 
57
  try:
58
  product_title_ebay += f"\nProduct Title: {self.product_title_ebay}"
59
  except KeyError:
 
70
  product_description_ebay += f"\nProduct Description: {self.product_description_ebay}"
71
  except KeyError:
72
  pass
 
 
 
 
 
73
 
74
  # OUTPUT FOR SEO ANALYST
75
  #payload_txt = {"question": combined_text}
 
78
  #end_time = time.time()
79
  #time_lapsed = end_time - start_time
80
 
81
+ debug_info_product_title_ebay = {'data_field' : 'Product Title - eBay', 'result': self.product_title_ebay}
82
+ debug_category_ebay = {'data_field' : 'Category - eBay', 'result': self.category_ebay}
83
+ debug_images_ebay = {'data_field' : 'Images - eBay', 'result': self.images_ebay}
84
+ debug_product_description_ebay = {'data_field' : 'Product Description - eBay', 'result': self.product_description_ebay}
 
 
85
 
86
  '''
87
  debug_info = {
 
91
  'payload': payload_txt,
92
  'result': result,
93
  }
94
+ '''
95
  if self.product_title_ebay:
96
  st.session_state['product_title_ebay'] = 'uploaded'
97
  collect_telemetry(debug_info_product_title_ebay)
 
104
  if self.product_description_ebay:
105
  st.session_state['product_description_ebay'] = 'uploaded'
106
  collect_telemetry(debug_product_description_ebay)
107
+
 
 
 
 
 
 
108
 
109
  st.session_state['analyzing'] = False
110
  except AttributeError:
 
112
  hide_button()
113
 
114
  def row1(self):
115
+ self.product_title_ebay = st.text_input("Product Title - eBay:", placeholder='Enter Product Title')
116
+ self.category_ebay = st.text_input("Images - eBay:", placeholder='Enter Images')
117
+ self.images_ebay = st.text_input("Bullet Points - eBay:", placeholder='Enter Bullet Points')
118
+ self.product_description_ebay = st.text_input("Product Description - eBay:", placeholder='Enter Product Description')
119
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
120
  self.process()
121
 
122
  if __name__ == "__main__":
classes/lld_pm_ln.py CHANGED
@@ -34,7 +34,7 @@ class LLD_PM_LN:
34
  session = st.session_state.analyze
35
  if (self.lead_generation_mechanism) and session == 'clicked':
36
  lead_generation_mechanism = ""
37
- with st.spinner('Uploading Lead Generation...', show_time=True):
38
  st.write('')
39
 
40
  try:
 
34
  session = st.session_state.analyze
35
  if (self.lead_generation_mechanism) and session == 'clicked':
36
  lead_generation_mechanism = ""
37
+ with st.spinner('SEO On Page Analyst...', show_time=True):
38
  st.write('')
39
 
40
  try:
classes/pull_through_offers.py CHANGED
@@ -30,7 +30,7 @@ class PullThroughOffers:
30
  session = st.session_state.analyze
31
  if (self.pull_through_offers) and session == 'clicked':
32
  pull_through_offers = ""
33
- with st.spinner('Uploading Pull Through Offers...', show_time=True):
34
  st.write('')
35
 
36
  try:
 
30
  session = st.session_state.analyze
31
  if (self.pull_through_offers) and session == 'clicked':
32
  pull_through_offers = ""
33
+ with st.spinner('SEO On Page Analyst...', show_time=True):
34
  st.write('')
35
 
36
  try:
classes/response_connection_analyst.py DELETED
@@ -1,87 +0,0 @@
1
- import streamlit as st
2
- import requests
3
- from dotenv import load_dotenv
4
- import os
5
- from helper.upload_response import upload_response
6
- from helper.upload_File import uploadFile
7
- from pymongo import MongoClient
8
- import json
9
-
10
- class ConnectionAnalyst:
11
- def __init__(self, model_url):
12
- self.uploaded_files = []
13
- self.file_dict = {}
14
- self.model_url = model_url
15
- #self.analyst_name = analyst_name
16
- #self.data_src = data_src
17
- #self.analyst_description = analyst_description
18
- self.initialize()
19
- self.row1()
20
-
21
- def initialize(self):
22
- # FOR ENV
23
- load_dotenv()
24
-
25
- # AGENT NAME
26
- #st.header(self.analyst_name)
27
-
28
- def request_model(self, payload_txt, headers):
29
- response = requests.post(self.model_url, json=payload_txt, headers=headers)
30
- response.raise_for_status()
31
- output = response.json()
32
- #st.write(output)
33
- text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
34
- #text = json.loads(text)
35
- #st.write(text)
36
- return text
37
-
38
- def fetch_data(self, data_field):
39
- mongodb_uri = os.getenv("MONGODB_URI")
40
- myclient = MongoClient(mongodb_uri)
41
- mydb = myclient.get_database()
42
- mycol = mydb["df_data"]
43
-
44
- # Sort by timestamp field in descending order
45
- x = mycol.find_one(
46
- {"data_field": data_field},
47
- sort=[("timestamp", -1)]
48
- )
49
-
50
- x = x["result"]
51
- return x
52
-
53
- def process(self):
54
- with st.spinner('Connection Analyst...', show_time=True):
55
- st.write('')
56
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
57
- try:
58
- payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
59
- payload_txt_model = self.request_model(payload_txt, headers)
60
- debug_info = {'data_field' : 'Connection Analyst', 'result': payload_txt_model}
61
- upload_response(debug_info)
62
-
63
- count = 0
64
- except Exception as e:
65
- pass
66
- st.session_state['analyzing'] = False
67
-
68
- def row1(self):
69
- st.session_state['analyzing'] = False
70
- self.payload = ""
71
- count = 0
72
- try:
73
- session_client_summary = st.session_state['client_summary']
74
- if session_client_summary == 'uploaded':
75
- count += 1
76
- self.payload += self.fetch_data("Client Summary")
77
- except Exception as e:
78
- pass
79
-
80
- if count >= 1:
81
- self.process()
82
-
83
-
84
- if __name__ == "__main__":
85
- st.set_page_config(layout="wide")
86
-
87
- upload = uploadFile()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
classes/response_content.py CHANGED
@@ -72,7 +72,7 @@ class Content:
72
  def process(self):
73
  with st.spinner('Content Analyst...', show_time=True):
74
  st.write('')
75
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
76
  try:
77
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
78
  payload_txt_model = self.request_model(payload_txt, headers)
 
72
  def process(self):
73
  with st.spinner('Content Analyst...', show_time=True):
74
  st.write('')
75
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
76
  try:
77
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
78
  payload_txt_model = self.request_model(payload_txt, headers)
classes/response_content_process_and_assets_analyst.py DELETED
@@ -1,100 +0,0 @@
1
- import streamlit as st
2
- import requests
3
- from dotenv import load_dotenv
4
- import os
5
- from helper.upload_response import upload_response
6
- from helper.upload_File import uploadFile
7
- from pymongo import MongoClient
8
- import json
9
- class Content_Process_and_Assets_Analyst:
10
- def __init__(self, model_url):
11
- self.uploaded_files = []
12
- self.file_dict = {}
13
- self.model_url = model_url
14
- #self.analyst_name = analyst_name
15
- #self.data_src = data_src
16
- #self.analyst_description = analyst_description
17
- self.initialize()
18
- self.row1()
19
-
20
- def initialize(self):
21
- # FOR ENV
22
- load_dotenv()
23
-
24
- # AGENT NAME
25
- #st.header(self.analyst_name)
26
-
27
- def request_model(self, payload_txt, headers):
28
- response = requests.post(self.model_url, json=payload_txt, headers=headers)
29
- response.raise_for_status()
30
- output = response.json()
31
- #st.write(output)
32
- text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
33
- text = json.loads(text)
34
- #st.write(text)
35
- return text
36
-
37
- def fetch_data(self, data_field):
38
- mongodb_uri = os.getenv("MONGODB_URI")
39
- myclient = MongoClient(mongodb_uri)
40
- mydb = myclient.get_database()
41
- mycol = mydb["df_data"]
42
-
43
- # Sort by timestamp field in descending order
44
- x = mycol.find_one(
45
- {"data_field": data_field},
46
- sort=[("timestamp", -1)]
47
- )
48
-
49
- x = x["result"]
50
- return x
51
-
52
- def process(self):
53
- with st.spinner('Content - Process and Assets Analyst...', show_time=True):
54
- st.write('')
55
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
56
- try:
57
- payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
58
- payload_txt_model = self.request_model(payload_txt, headers)
59
- debug_info = {'data_field' : 'Content - Process and Assets Analyst', 'result': payload_txt_model}
60
- upload_response(debug_info)
61
-
62
- count = 0
63
- except Exception as e:
64
- pass
65
- st.session_state['analyzing'] = False
66
-
67
- def row1(self):
68
- st.session_state['analyzing'] = False
69
- self.payload = ""
70
- count = 0
71
- try:
72
- session_client_summary = st.session_state['client_summary']
73
- if session_client_summary == 'uploaded':
74
- count += 1
75
- self.payload += self.fetch_data("Client Summary")
76
- except Exception as e:
77
- pass
78
- try:
79
- session_content_in_the_website = st.session_state['content_in_the_website']
80
- if session_content_in_the_website == 'uploaded':
81
- count += 1
82
- self.payload += self.fetch_data("Content in the Website")
83
- except Exception as e:
84
- pass
85
- try:
86
- session_content_outside_the_website = st.session_state['content_outside_the_website']
87
- if session_content_outside_the_website == 'uploaded':
88
- count += 1
89
- self.payload += self.fetch_data("Content outside the Website")
90
- except Exception as e:
91
- pass
92
-
93
- if count >= 3:
94
- self.process()
95
-
96
-
97
- if __name__ == "__main__":
98
- st.set_page_config(layout="wide")
99
-
100
- upload = uploadFile()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
classes/response_conversion_analyst.py DELETED
@@ -1,118 +0,0 @@
1
- import streamlit as st
2
- import requests
3
- from dotenv import load_dotenv
4
- import os
5
- from helper.upload_response import upload_response
6
- from helper.upload_File import uploadFile
7
- from pymongo import MongoClient
8
- import json
9
-
10
- class ConversionAnalyst:
11
- def __init__(self, model_url):
12
- self.uploaded_files = []
13
- self.file_dict = {}
14
- self.model_url = model_url
15
- #self.analyst_name = analyst_name
16
- #self.data_src = data_src
17
- #self.analyst_description = analyst_description
18
- self.initialize()
19
- self.row1()
20
-
21
- def initialize(self):
22
- # FOR ENV
23
- load_dotenv()
24
-
25
- # AGENT NAME
26
- #st.header(self.analyst_name)
27
-
28
- def request_model(self, payload_txt, headers):
29
- response = requests.post(self.model_url, json=payload_txt, headers=headers)
30
- response.raise_for_status()
31
- output = response.json()
32
- #st.write(output)
33
- text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
34
- text = json.loads(text)
35
- #st.write(text)
36
- return text
37
-
38
- def fetch_data(self, data_field):
39
- mongodb_uri = os.getenv("MONGODB_URI")
40
- myclient = MongoClient(mongodb_uri)
41
- mydb = myclient.get_database()
42
- mycol = mydb["df_data"]
43
-
44
- # Sort by timestamp field in descending order
45
- x = mycol.find_one(
46
- {"data_field": data_field},
47
- sort=[("timestamp", -1)]
48
- )
49
-
50
- x = x["result"]
51
- return x
52
-
53
- def process(self):
54
- with st.spinner('Conversion Analyst...', show_time=True):
55
- st.write('')
56
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
57
- try:
58
- payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
59
- payload_txt_model = self.request_model(payload_txt, headers)
60
- debug_info = {'data_field' : 'Conversion Analyst', 'result': payload_txt_model}
61
- upload_response(debug_info)
62
-
63
- count = 0
64
- except Exception as e:
65
- pass
66
- st.session_state['analyzing'] = False
67
-
68
- def row1(self):
69
- st.session_state['analyzing'] = False
70
- self.payload = ""
71
- count = 0
72
- try:
73
- session_lead_generation_mechanism = st.session_state['lead_generation_mechanism']
74
- if session_lead_generation_mechanism == 'uploaded':
75
- count += 1
76
- self.payload += self.fetch_data("Lead Generation Mechanism")
77
- except Exception as e:
78
- pass
79
- try:
80
- session_client_relations_management_system = st.session_state['client_relations_management_system']
81
- if session_client_relations_management_system == 'uploaded':
82
- count += 1
83
- self.payload += self.fetch_data("Client Relations Management System")
84
- except Exception as e:
85
- pass
86
- try:
87
- session_pull_through_offers = st.session_state['pull_through_offers']
88
- if session_pull_through_offers == 'uploaded':
89
- count += 1
90
- self.payload += self.fetch_data("Pull through offers")
91
- except Exception as e:
92
- pass
93
- try:
94
- session_content_in_the_website = st.session_state['content_in_the_website']
95
- if session_content_in_the_website == 'uploaded':
96
- count += 1
97
- self.payload += self.fetch_data("Content in the Website")
98
- except Exception as e:
99
- pass
100
- try:
101
- session_content_outside_the_website = st.session_state['content_outside_the_website']
102
- if session_content_outside_the_website == 'uploaded':
103
- count += 1
104
- self.payload += self.fetch_data("Content outside the Website")
105
- except Exception as e:
106
- pass
107
-
108
- if count >= 1:
109
- name = self.fetch_data("Client Name")
110
- website = self.fetch_data("Client Website")
111
- self.payload = name + website + self.payload
112
- self.process()
113
-
114
-
115
- if __name__ == "__main__":
116
- st.set_page_config(layout="wide")
117
-
118
- upload = uploadFile()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
classes/response_desired_outcome.py DELETED
@@ -1,88 +0,0 @@
1
- import streamlit as st
2
- import requests
3
- from dotenv import load_dotenv
4
- import os
5
- from helper.upload_response import upload_response
6
- from helper.upload_File import uploadFile
7
- from pymongo import MongoClient
8
- import json
9
-
10
- class DesiredOutcome:
11
- def __init__(self, model_url):
12
- self.uploaded_files = []
13
- self.file_dict = {}
14
- self.model_url = model_url
15
- #self.analyst_name = analyst_name
16
- #self.data_src = data_src
17
- #self.analyst_description = analyst_description
18
- self.initialize()
19
- self.row1()
20
-
21
- def initialize(self):
22
- # FOR ENV
23
- load_dotenv()
24
-
25
- # AGENT NAME
26
- #st.header(self.analyst_name)
27
-
28
- def request_model(self, payload_txt, headers):
29
- response = requests.post(self.model_url, json=payload_txt, headers=headers)
30
- response.raise_for_status()
31
- output = response.json()
32
- #st.write(output)
33
- text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
34
- #text = json.loads(text)
35
- #st.write(text)
36
- return text
37
-
38
- def fetch_data(self, data_field):
39
- mongodb_uri = os.getenv("MONGODB_URI")
40
- myclient = MongoClient(mongodb_uri)
41
- mydb = myclient.get_database()
42
- mycol = mydb["df_data"]
43
-
44
- # Sort by timestamp field in descending order
45
- x = mycol.find_one(
46
- {"data_field": data_field},
47
- sort=[("timestamp", -1)]
48
- )
49
-
50
- x = x["result"]
51
- return x
52
-
53
- def process(self):
54
- with st.spinner('Desired Outcome Analyst...', show_time=True):
55
- st.write('')
56
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
57
- try:
58
- payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
59
- payload_txt_model = self.request_model(payload_txt, headers)
60
- debug_info = {'data_field' : 'Desired Outcomes Analyst', 'result': payload_txt_model}
61
- upload_response(debug_info)
62
-
63
- st.session_state['client_summary'] = ''
64
- count = 0
65
- except Exception as e:
66
- pass
67
- st.session_state['analyzing'] = False
68
-
69
- def row1(self):
70
- st.session_state['analyzing'] = False
71
- self.payload = ""
72
- count = 0
73
- try:
74
- session_client_summary = st.session_state['client_summary']
75
- if session_client_summary == 'uploaded':
76
- count += 1
77
- self.payload += self.fetch_data("Client Summary")
78
- except Exception as e:
79
- pass
80
-
81
- if count >= 1:
82
- self.process()
83
-
84
-
85
- if __name__ == "__main__":
86
- st.set_page_config(layout="wide")
87
-
88
- upload = uploadFile()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
classes/response_df_overview.py DELETED
@@ -1,95 +0,0 @@
1
- from io import StringIO
2
- from urllib.parse import urlparse
3
- import streamlit as st
4
- import requests
5
- from dotenv import load_dotenv
6
- import os
7
- from helper.upload_response import upload_response
8
- from helper.upload_File import uploadFile
9
- import json
10
- from pymongo import MongoClient
11
-
12
- class dfOverview:
13
- def __init__(self, model_url):
14
- self.uploaded_files = []
15
- self.file_dict = {}
16
- self.model_url = model_url
17
- #self.analyst_name = analyst_name
18
- #self.data_src = data_src
19
- #self.analyst_description = analyst_description
20
- self.initialize()
21
- self.row1()
22
-
23
- def initialize(self):
24
- # FOR ENV
25
- load_dotenv()
26
-
27
- # AGENT NAME
28
- #st.header(self.analyst_name)
29
-
30
- def request_model(self, payload_txt, headers):
31
- response = requests.post(self.model_url, json=payload_txt, headers=headers)
32
- response.raise_for_status()
33
- output = response.json()
34
- #st.write(output)
35
- text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
36
- #text = json.loads(text)
37
- #st.write(text)
38
- return text
39
-
40
- def fetch_data(self, data_field):
41
- mongodb_uri = os.getenv("MONGODB_URI")
42
- myclient = MongoClient(mongodb_uri)
43
- mydb = myclient.get_database()
44
- mycol = mydb["df_data"]
45
-
46
- # Sort by timestamp field in descending order
47
- x = mycol.find_one(
48
- {"data_field": data_field},
49
- sort=[("timestamp", -1)]
50
- )
51
-
52
- x = x["result"]
53
- return x
54
-
55
- def process(self):
56
- with st.spinner('DF Overview Analyst...', show_time=True):
57
- st.write('')
58
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
59
- try:
60
- payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
61
- payload_txt_model = self.request_model(payload_txt, headers)
62
- debug_info = {'data_field' : 'DF Overview Analyst', 'result': payload_txt_model}
63
- upload_response(debug_info)
64
-
65
- st.session_state['client_summary'] = ''
66
- st.session_state['client_name'] = ''
67
- count = 0
68
- except Exception as e:
69
- pass
70
- st.session_state['analyzing'] = False
71
-
72
- def row1(self):
73
- st.session_state['analyzing'] = False
74
- self.payload = ""
75
- count = 0
76
- try:
77
- session_client_summary = st.session_state['client_summary']
78
- if session_client_summary == 'uploaded':
79
- count += 1
80
- self.payload += self.fetch_data("Client Summary")
81
- self.payload += self.fetch_data("Client Name")
82
-
83
- except Exception as e:
84
- pass
85
-
86
- if count >= 1:
87
- summary = self.fetch_data("Client Summary")
88
- self.payload = summary + self.payload
89
- self.process()
90
-
91
-
92
- if __name__ == "__main__":
93
- st.set_page_config(layout="wide")
94
-
95
- upload = uploadFile()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
classes/response_executive_summary.py CHANGED
@@ -1,9 +1,11 @@
 
1
  import streamlit as st
2
  import requests
3
  from dotenv import load_dotenv
4
  import os
5
  from helper.upload_response import upload_response
6
  from helper.upload_File import uploadFile
 
7
  from pymongo import MongoClient
8
  from helper.data_field import get_analyst_response
9
 
@@ -54,7 +56,7 @@ class ExecutiveSummary:
54
  def process(self):
55
  with st.spinner('Executive Summary...', show_time=True):
56
  st.write('')
57
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
58
  try:
59
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
60
  payload_txt_model = self.request_model(payload_txt, headers)
 
1
+ from urllib.parse import urlparse
2
  import streamlit as st
3
  import requests
4
  from dotenv import load_dotenv
5
  import os
6
  from helper.upload_response import upload_response
7
  from helper.upload_File import uploadFile
8
+ import json
9
  from pymongo import MongoClient
10
  from helper.data_field import get_analyst_response
11
 
 
56
  def process(self):
57
  with st.spinner('Executive Summary...', show_time=True):
58
  st.write('')
59
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
60
  try:
61
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
62
  payload_txt_model = self.request_model(payload_txt, headers)
classes/response_lld_pm_ln.py CHANGED
@@ -73,7 +73,7 @@ class LLD_PM_LN:
73
  def process(self):
74
  with st.spinner('LLD/PM/LN Analyst...', show_time=True):
75
  st.write('')
76
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
77
  try:
78
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
79
  payload_txt_model = self.request_model(payload_txt, headers)
 
73
  def process(self):
74
  with st.spinner('LLD/PM/LN Analyst...', show_time=True):
75
  st.write('')
76
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
77
  try:
78
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
79
  payload_txt_model = self.request_model(payload_txt, headers)
classes/response_marketplace.py CHANGED
@@ -39,13 +39,10 @@ class Marketplace:
39
  response.raise_for_status()
40
  output = response.json()
41
  #st.write(output)
42
- text_amazon = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
43
- #text_ebay = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]["ebay"]
44
- text_amazon = json.loads(text_amazon)
45
- #text_ebay = json.loads(text_ebay)
46
- #text = text_amazon + text_ebay
47
- #st.write(text_amazon)
48
- return text_amazon
49
 
50
  def fetch_backlinks(self, data_field):
51
  mongodb_uri = os.getenv("MONGODB_URI")
@@ -74,13 +71,13 @@ class Marketplace:
74
  def process (self):
75
  with st.spinner('Marketplace Analyst...', show_time=True):
76
  st.write('')
77
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
78
  try:
79
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
80
  payload_txt_model = self.request_model(payload_txt, headers)
81
  debug_info = {'data_field' : 'Marketplace Analyst', 'result': payload_txt_model}
82
  upload_response(debug_info)
83
- '''
84
  st.session_state['product_title_amazon'] = ''
85
  st.session_state['images_amazon'] = ''
86
  st.session_state['bullet_points_amazon'] = ''
@@ -89,9 +86,6 @@ class Marketplace:
89
  st.session_state['category_ebay'] = ''
90
  st.session_state['images_ebay'] = ''
91
  st.session_state['product_description_ebay'] = ''
92
- '''
93
- st.session_state['amazon_marketplace_questionnaires'] = ''
94
- st.session_state['ebay_marketplace_questionnaires'] = ''
95
  count = 0
96
  except Exception as e:
97
  pass
@@ -103,7 +97,6 @@ class Marketplace:
103
  #analyze_button = st.button("Analyze", disabled=initialize_analyze_session())
104
  self.payload = ""
105
  count = 0
106
- '''
107
  try:
108
  session_product_title_amazon = st.session_state['product_title_amazon']
109
  if session_product_title_amazon == 'uploaded':
@@ -162,22 +155,6 @@ class Marketplace:
162
  self.payload += self.fetch_data("Product Description - eBay")
163
  except Exception as e:
164
  pass
165
- '''
166
- try:
167
- session_amazon_marketplace_questionnaires = st.session_state['amazon_marketplace_questionnaires']
168
- if session_amazon_marketplace_questionnaires == 'uploaded':
169
- count += 1
170
- self.payload += self.fetch_data("Marketplace Questionnaires - Amazon")
171
- except Exception as e:
172
- pass
173
-
174
- try:
175
- session_ebay_marketplace_questionnaires = st.session_state['ebay_marketplace_questionnaires']
176
- if session_ebay_marketplace_questionnaires == 'uploaded':
177
- count += 1
178
- self.payload += self.fetch_data("Marketplace Questionnaires - eBay")
179
- except Exception as e:
180
- pass
181
 
182
  if count >= 1:
183
  summary = self.fetch_data("Client Summary")
 
39
  response.raise_for_status()
40
  output = response.json()
41
  #st.write(output)
42
+ text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
43
+ text = json.loads(text)
44
+ #st.write(text)
45
+ return text
 
 
 
46
 
47
  def fetch_backlinks(self, data_field):
48
  mongodb_uri = os.getenv("MONGODB_URI")
 
71
  def process (self):
72
  with st.spinner('Marketplace Analyst...', show_time=True):
73
  st.write('')
74
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
75
  try:
76
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
77
  payload_txt_model = self.request_model(payload_txt, headers)
78
  debug_info = {'data_field' : 'Marketplace Analyst', 'result': payload_txt_model}
79
  upload_response(debug_info)
80
+
81
  st.session_state['product_title_amazon'] = ''
82
  st.session_state['images_amazon'] = ''
83
  st.session_state['bullet_points_amazon'] = ''
 
86
  st.session_state['category_ebay'] = ''
87
  st.session_state['images_ebay'] = ''
88
  st.session_state['product_description_ebay'] = ''
 
 
 
89
  count = 0
90
  except Exception as e:
91
  pass
 
97
  #analyze_button = st.button("Analyze", disabled=initialize_analyze_session())
98
  self.payload = ""
99
  count = 0
 
100
  try:
101
  session_product_title_amazon = st.session_state['product_title_amazon']
102
  if session_product_title_amazon == 'uploaded':
 
155
  self.payload += self.fetch_data("Product Description - eBay")
156
  except Exception as e:
157
  pass
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
158
 
159
  if count >= 1:
160
  summary = self.fetch_data("Client Summary")
classes/response_off.py CHANGED
@@ -9,6 +9,8 @@ from helper.upload_response import upload_response
9
  from helper.upload_File import uploadFile
10
  from helper.button_behaviour import hide_button, unhide_button
11
  from helper.initialize_analyze_session import initialize_analyze_session
 
 
12
  import json
13
  from pymongo import MongoClient
14
  from helper.data_field import data_field
@@ -35,12 +37,11 @@ class SeoOffPageAnalyst:
35
  response = requests.post(self.model_url, json=payload_txt, headers=headers)
36
  response.raise_for_status()
37
  output = response.json()
38
- #st.write(output)
39
  text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
40
  text = json.loads(text)
41
- #st.write(text)
42
- #backlinks = text[0]
43
- #referring_domains = text[1]
44
 
45
  return text
46
 
@@ -62,7 +63,7 @@ class SeoOffPageAnalyst:
62
  def process(self):
63
  with st.spinner('SEO Off Page Analyst...', show_time=True):
64
  st.write('')
65
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
66
  try:
67
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
68
  payload_txt_model = self.request_model(payload_txt, headers)
@@ -70,7 +71,7 @@ class SeoOffPageAnalyst:
70
  upload_response(debug_info)
71
 
72
  st.session_state['off_page_file_uploaded'] = ''
73
-
74
  except Exception as e:
75
  pass
76
  st.session_state['analyzing'] = False
@@ -87,6 +88,7 @@ class SeoOffPageAnalyst:
87
 
88
  except Exception as e:
89
  pass
 
90
  if count >= 1:
91
  summary = self.fetch_data("Client Summary")
92
  self.payload = summary + self.payload
 
9
  from helper.upload_File import uploadFile
10
  from helper.button_behaviour import hide_button, unhide_button
11
  from helper.initialize_analyze_session import initialize_analyze_session
12
+ import pandas as pd
13
+ import asyncio
14
  import json
15
  from pymongo import MongoClient
16
  from helper.data_field import data_field
 
37
  response = requests.post(self.model_url, json=payload_txt, headers=headers)
38
  response.raise_for_status()
39
  output = response.json()
 
40
  text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
41
  text = json.loads(text)
42
+
43
+ backlinks = text[0]
44
+ referring_domains = text[1]
45
 
46
  return text
47
 
 
63
  def process(self):
64
  with st.spinner('SEO Off Page Analyst...', show_time=True):
65
  st.write('')
66
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
67
  try:
68
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
69
  payload_txt_model = self.request_model(payload_txt, headers)
 
71
  upload_response(debug_info)
72
 
73
  st.session_state['off_page_file_uploaded'] = ''
74
+ count = 0
75
  except Exception as e:
76
  pass
77
  st.session_state['analyzing'] = False
 
88
 
89
  except Exception as e:
90
  pass
91
+
92
  if count >= 1:
93
  summary = self.fetch_data("Client Summary")
94
  self.payload = summary + self.payload
classes/response_on_page.py CHANGED
@@ -74,7 +74,7 @@ class SeoOn:
74
  with st.spinner('SEO On Page...', show_time=True):
75
  st.write('')
76
  # OUTPUT FOR SEO ANALYST
77
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
78
  try:
79
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
80
  payload_txt_model = self.request_model(payload_txt, headers)
 
74
  with st.spinner('SEO On Page...', show_time=True):
75
  st.write('')
76
  # OUTPUT FOR SEO ANALYST
77
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
78
  try:
79
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
80
  payload_txt_model = self.request_model(payload_txt, headers)
classes/response_pull_through_offers.py CHANGED
@@ -72,7 +72,7 @@ class PullThroughOffers:
72
  def process(self):
73
  with st.spinner('Pull through offers...', show_time=True):
74
  st.write('')
75
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
76
  try:
77
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
78
  payload_txt_model = self.request_model(payload_txt, headers)
 
72
  def process(self):
73
  with st.spinner('Pull through offers...', show_time=True):
74
  st.write('')
75
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
76
  try:
77
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
78
  payload_txt_model = self.request_model(payload_txt, headers)
classes/response_sem_ppc.py CHANGED
@@ -79,7 +79,7 @@ class Sem_PPC:
79
  def process (self):
80
  with st.spinner('SEM/PPC Analyst...', show_time=True):
81
  st.write('')
82
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
83
  try:
84
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
85
  payload_txt_model = self.request_model(payload_txt, headers)
 
79
  def process (self):
80
  with st.spinner('SEM/PPC Analyst...', show_time=True):
81
  st.write('')
82
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
83
  try:
84
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
85
  payload_txt_model = self.request_model(payload_txt, headers)
classes/response_seo.py CHANGED
@@ -60,7 +60,7 @@ class Seo:
60
  mydb = myclient.get_database()
61
  mycol = mydb["df_data"]
62
  x = mycol.find_one({"data_field": data_field})
63
- x = x["result"]
64
  return x
65
 
66
  def fetch_data(self, data_field):
@@ -78,10 +78,79 @@ class Seo:
78
  x = x["result"]
79
  return x
80
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
  def process (self):
82
  with st.spinner('Seo Analyst...', show_time=True):
83
  st.write('')
84
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
85
  try:
86
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
87
  payload_txt_model = self.request_model(payload_txt, headers)
@@ -93,7 +162,7 @@ class Seo:
93
  st.session_state['others'] = ''
94
  st.session_state['df_traffic'] = ''
95
  st.session_state['df_seo'] = ''
96
-
97
  except Exception as e:
98
  pass
99
  st.session_state['analyzing'] = False
@@ -103,21 +172,55 @@ class Seo:
103
  #st.write("") # FOR THE HIDE BUTTON
104
  #analyze_button = st.button("Analyze", disabled=initialize_analyze_session())
105
  self.payload = ""
106
- self.payload += self.fetch_data("SEO Keywords")
107
-
108
- self.payload += self.fetch_data("Traffic Channels")
109
-
110
- self.payload += self.fetch_data("Traffic Acquisition")
111
-
112
- self.payload += self.fetch_data("Pages Indexed")
113
-
114
- self.payload += self.fetch_data("Bounce Rate")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
115
 
116
- self.payload += self.fetch_backlinks("Backlinks")
117
-
118
- summary = self.fetch_data("Client Summary")
119
- self.payload = summary + self.payload
120
- self.process()
121
 
122
 
123
  if __name__ == "__main__":
 
60
  mydb = myclient.get_database()
61
  mycol = mydb["df_data"]
62
  x = mycol.find_one({"data_field": data_field})
63
+ x = x["result"]['question']
64
  return x
65
 
66
  def fetch_data(self, data_field):
 
78
  x = x["result"]
79
  return x
80
 
81
+ def detect_encoding(self, uploaded_file):
82
+ result = chardet.detect(uploaded_file.read(100000))
83
+ uploaded_file.seek(0) # Reset file pointer to the beginning
84
+ return result['encoding']
85
+
86
+ def keyword_ranking(self, df_seo):
87
+ keyword_ranking = df_seo
88
+ st.session_state['keyword_ranking'] = keyword_ranking
89
+
90
+ keywords_ranking_sorted = keyword_ranking.sort_values("Position", ascending=True)
91
+
92
+ keywords_ranking_top_10 = keywords_ranking_sorted[keywords_ranking_sorted["Position"] <= 10].shape[0]
93
+ keywords_ranking_top_100 = keywords_ranking_sorted[keywords_ranking_sorted["Position"] <= 100].shape[0]
94
+
95
+ keyword_ranking = {
96
+ 'Keyword_top_10': keywords_ranking_top_10,
97
+ 'Keyword_top_100': keywords_ranking_top_100
98
+ }
99
+ st.session_state['keyword_ranking'] = keyword_ranking
100
+
101
+ def traffic_files(self, df_traffic):
102
+ traffic_channels = df_traffic
103
+ try:
104
+ traffic_channels.rename(columns={traffic_channels.columns[0]: 'date'}, inplace=True)
105
+ traffic_channels['date'] = pd.to_datetime(traffic_channels['date'], format='mixed')
106
+ except pandas._libs.tslibs.parsing.DateParseError:
107
+ pass
108
+ traffic_channels_sort = traffic_channels.sort_values("date", ascending=False)
109
+
110
+ organic_traffic = traffic_channels_sort['Organic Search'].values[0]
111
+ paid_traffic = traffic_channels_sort['Paid Search'].values[0]
112
+ direct_traffic = traffic_channels_sort['Direct'].values[0]
113
+ referral_traffic = traffic_channels_sort['Referral'].values[0]
114
+
115
+ st.session_state['organic_traffic'] = organic_traffic
116
+ st.session_state['paid_traffic'] = paid_traffic
117
+ st.session_state['direct_traffic'] = direct_traffic
118
+ st.session_state['referral_traffic'] = referral_traffic
119
+
120
+ def ga4_traffic(self, others):
121
+ st.session_state['others'] = others
122
+
123
+ ga4_paid_social = others['Sessions'].values[0]
124
+ ga4_organic_traffic = others['Sessions'].values[4]
125
+ ga4_direct_traffic = others['Sessions'].values[2]
126
+ ga4_referral_traffic = others['Sessions'].values[3]
127
+
128
+ st.session_state['ga4_paid_social'] = ga4_paid_social
129
+ st.session_state['ga4_organic_traffic'] = ga4_organic_traffic
130
+ st.session_state['ga4_direct_traffic'] = ga4_direct_traffic
131
+ st.session_state['ga4_referral_traffic'] = ga4_referral_traffic
132
+
133
+ def delete_sessions(self):
134
+ try:
135
+ del st.session_state['df_traffic']
136
+ del st.session_state['others']
137
+ del st.session_state['df_seo']
138
+ del st.session_state['keyword_ranking']
139
+ del st.session_state['ga4_paid_social']
140
+ del st.session_state['ga4_organic_traffic']
141
+ del st.session_state['ga4_direct_traffic']
142
+ del st.session_state['ga4_referral_traffic']
143
+ del st.session_state['organic_traffic']
144
+ del st.session_state['paid_traffic']
145
+ del st.session_state['direct_traffic']
146
+ del st.session_state['referral_traffic']
147
+ except KeyError:
148
+ pass
149
+
150
  def process (self):
151
  with st.spinner('Seo Analyst...', show_time=True):
152
  st.write('')
153
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
154
  try:
155
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
156
  payload_txt_model = self.request_model(payload_txt, headers)
 
162
  st.session_state['others'] = ''
163
  st.session_state['df_traffic'] = ''
164
  st.session_state['df_seo'] = ''
165
+ count = 0
166
  except Exception as e:
167
  pass
168
  st.session_state['analyzing'] = False
 
172
  #st.write("") # FOR THE HIDE BUTTON
173
  #analyze_button = st.button("Analyze", disabled=initialize_analyze_session())
174
  self.payload = ""
175
+ count = 0
176
+ try:
177
+ session_traffic_aqcuisition = st.session_state['df_seo']
178
+ if session_traffic_aqcuisition == 'uploaded':
179
+ count += 1
180
+ self.payload += self.fetch_data("SEO Keywords")
181
+
182
+ except Exception as e:
183
+ pass
184
+ try:
185
+ session_traffic_channels = st.session_state['df_traffic']
186
+ if session_traffic_channels == 'uploaded':
187
+ count += 1
188
+ self.payload += self.fetch_data("Traffic Channels")
189
+ except Exception as e:
190
+ pass
191
+ try:
192
+ session_others = st.session_state['others']
193
+ if session_others == 'uploaded':
194
+ count += 1
195
+ self.payload += self.fetch_data("Traffic Acquisition")
196
+
197
+ except Exception as e:
198
+ pass
199
+ try:
200
+ session_page_index = st.session_state['pages_index']
201
+ if session_page_index == 'uploaded':
202
+ count += 1
203
+ self.payload += self.fetch_data("Pages Indexed")
204
+ except Exception as e:
205
+ pass
206
+ try:
207
+ session_bounce_rate = st.session_state['bounce_rate']
208
+ if session_bounce_rate == 'uploaded':
209
+ count += 1
210
+ self.payload += self.fetch_data("Bounce Rate")
211
+ except Exception as e:
212
+ pass
213
+ try:
214
+ session_backlinks = st.session_state["off_page_file_uploaded"]
215
+ if session_backlinks == 'uploaded':
216
+ self.payload += self.fetch_backlinks("Backlinks")
217
+ except Exception as e:
218
+ pass
219
 
220
+ if count >= 1:
221
+ summary = self.fetch_data("Client Summary")
222
+ self.payload = summary + self.payload
223
+ self.process()
 
224
 
225
 
226
  if __name__ == "__main__":
classes/response_snapshot.py DELETED
@@ -1,101 +0,0 @@
1
- import streamlit as st
2
- import requests
3
- from dotenv import load_dotenv
4
- import os
5
- from helper.upload_response import upload_response
6
- from helper.upload_File import uploadFile
7
- from pymongo import MongoClient
8
- from helper.data_field import get_analyst_response
9
- import json
10
-
11
-
12
- class Snapshot:
13
- def __init__(self, model_url):
14
- self.uploaded_files = []
15
- self.file_dict = {}
16
- self.model_url = model_url
17
- #self.analyst_name = analyst_name
18
- #self.data_src = data_src
19
- #self.analyst_description = analyst_description
20
- self.initialize()
21
- self.row1()
22
-
23
- def initialize(self):
24
- # FOR ENV
25
- load_dotenv()
26
-
27
- # AGENT NAME
28
- #st.header(self.analyst_name)
29
-
30
- def request_model(self, payload_txt, headers):
31
- response = requests.post(self.model_url, json=payload_txt, headers=headers)
32
- response.raise_for_status()
33
- output = response.json()
34
- #st.write(output)
35
- text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
36
- text = json.loads(text)
37
- #st.write(text)
38
- return text
39
-
40
- def fetch_data(self, data_field):
41
- mongodb_uri = os.getenv("MONGODB_URI")
42
- myclient = MongoClient(mongodb_uri)
43
- mydb = myclient.get_database()
44
- mycol = mydb["df_data"]
45
-
46
- # Sort by timestamp field in descending order
47
- x = mycol.find_one(
48
- {"data_field": data_field},
49
- sort=[("timestamp", -1)]
50
- )
51
-
52
- x = x["result"]
53
- return x
54
-
55
- def process(self):
56
- with st.spinner('Snapshot...', show_time=True):
57
- st.write('')
58
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
59
- try:
60
- payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
61
- payload_txt_model = self.request_model(payload_txt, headers)
62
- debug_info = {'data_field' : 'Snapshot Analyst', 'result': payload_txt_model}
63
- upload_response(debug_info)
64
-
65
- except Exception as e:
66
- pass
67
- st.session_state['analyzing'] = False
68
-
69
- def row1(self):
70
- st.session_state['analyzing'] = False
71
- self.payload = ""
72
-
73
- self.website_and_tools_data = get_analyst_response("Website and Tools Analyst")
74
- self.sem_data = get_analyst_response("SEM/PPC Analyst")
75
- self.seo_data = get_analyst_response("SEO Analyst")
76
- self.social_media_data = get_analyst_response("Social Media Analyst")
77
- self.content_data = get_analyst_response("Content Analyst")
78
- self.marketpalce_data = get_analyst_response("Marketplace Analyst")
79
-
80
- analyst_data_dict = {
81
- "Website and Tools": self.website_and_tools_data,
82
- "SEM/PPC": self.sem_data,
83
- "SEO": self.seo_data,
84
- "Social Media": self.social_media_data,
85
- "Content": self.content_data,
86
- "Marketplace": self.marketpalce_data,
87
- }
88
-
89
- for analyst_name, data in analyst_data_dict.items():
90
- self.payload += f"\n\n--- {analyst_name} Analysis ---\n"
91
- if isinstance(data, list):
92
- self.payload += "\n".join(map(str, data))
93
- else:
94
- self.payload += str(data)
95
-
96
- self.process()
97
-
98
- if __name__ == "__main__":
99
- st.set_page_config(layout="wide")
100
-
101
- upload = uploadFile()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
classes/response_social_media.py CHANGED
@@ -80,7 +80,7 @@ class SocialMedia:
80
  def process(self):
81
  with st.spinner('Social Media Analyst...', show_time=True):
82
  st.write('')
83
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
84
  try:
85
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
86
  payload_txt_model = self.request_model(payload_txt, headers)
@@ -101,28 +101,54 @@ class SocialMedia:
101
 
102
  def row1(self):
103
  st.session_state['analyzing'] = False
104
- self.payload = ""
105
-
106
- self.payload += self.fetch_data("Facebook")
107
-
108
-
109
- self.payload += self.fetch_data("Instagram")
110
-
111
-
112
- self.payload += self.fetch_data("Twitter")
113
-
114
-
115
- self.payload += self.fetch_data("YouTube")
116
-
117
-
118
- self.payload += self.fetch_data("Linkedin")
119
-
120
-
121
- self.payload += self.fetch_data("Tiktok")
122
-
123
- summary = self.fetch_data("Client Summary")
124
- self.payload = summary + self.payload
125
- self.process()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
126
 
127
 
128
  if __name__ == "__main__":
 
80
  def process(self):
81
  with st.spinner('Social Media Analyst...', show_time=True):
82
  st.write('')
83
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
84
  try:
85
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
86
  payload_txt_model = self.request_model(payload_txt, headers)
 
101
 
102
  def row1(self):
103
  st.session_state['analyzing'] = False
104
+ self.payload = ""
105
+ count = 0
106
+ try:
107
+ session_fb = st.session_state['fb_upload']
108
+ if session_fb == 'uploaded':
109
+ count += 1
110
+ self.payload += self.fetch_data("Facebook")
111
+ except Exception as e:
112
+ pass
113
+ try:
114
+ session_ig = st.session_state['ig_upload']
115
+ if session_ig == 'uploaded':
116
+ count += 1
117
+ self.payload += self.fetch_data("Instagram")
118
+ except Exception as e:
119
+ pass
120
+ try:
121
+ session_twitter = st.session_state['twitter_upload']
122
+ if session_twitter == 'uploaded':
123
+ count += 1
124
+ self.payload += self.fetch_data("Twitter")
125
+ except Exception as e:
126
+ pass
127
+ try:
128
+ session_yt = st.session_state['youtube_upload']
129
+ if session_yt == 'uploaded':
130
+ count += 1
131
+ self.payload += self.fetch_data("YouTube")
132
+ except Exception as e:
133
+ pass
134
+ try:
135
+ session_linkedin = st.session_state['linkedin_upload']
136
+ if session_linkedin == 'uploaded':
137
+ count += 1
138
+ self.payload += self.fetch_data("Linkedin")
139
+ except Exception as e:
140
+ pass
141
+ try:
142
+ session_tiktok = st.session_state['tiktok_upload']
143
+ if session_tiktok == 'uploaded':
144
+ count += 1
145
+ self.payload += self.fetch_data("Tiktok")
146
+ except Exception as e:
147
+ pass
148
+ if count >= 1:
149
+ summary = self.fetch_data("Client Summary")
150
+ self.payload = summary + self.payload
151
+ self.process()
152
 
153
 
154
  if __name__ == "__main__":
classes/response_target_market.py CHANGED
@@ -55,7 +55,7 @@ class TargetMarket:
55
  def process(self):
56
  with st.spinner('Target Market Analyst...', show_time=True):
57
  st.write('')
58
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
59
  try:
60
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
61
  payload_txt_model = self.request_model(payload_txt, headers)
 
55
  def process(self):
56
  with st.spinner('Target Market Analyst...', show_time=True):
57
  st.write('')
58
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
59
  try:
60
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
61
  payload_txt_model = self.request_model(payload_txt, headers)
classes/response_website_and_tools.py CHANGED
@@ -72,7 +72,7 @@ class WebsiteAndTools:
72
  def process(self):
73
  with st.spinner('Website and Tools...', show_time=True):
74
  st.write('')
75
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
76
  try:
77
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
78
  payload_txt_model = self.request_model(payload_txt, headers)
 
72
  def process(self):
73
  with st.spinner('Website and Tools...', show_time=True):
74
  st.write('')
75
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
76
  try:
77
  payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
78
  payload_txt_model = self.request_model(payload_txt, headers)
classes/response_website_audience_acquisition.py DELETED
@@ -1,91 +0,0 @@
1
- import streamlit as st
2
- import requests
3
- from dotenv import load_dotenv
4
- import os
5
- from helper.upload_response import upload_response
6
- from helper.upload_File import uploadFile
7
- from pymongo import MongoClient
8
- import json
9
-
10
- class WebsiteAudienceAcquisition:
11
- def __init__(self, model_url):
12
- self.uploaded_files = []
13
- self.file_dict = {}
14
- self.model_url = model_url
15
- #self.analyst_name = analyst_name
16
- #self.data_src = data_src
17
- #self.analyst_description = analyst_description
18
- self.initialize()
19
- self.row1()
20
-
21
- def initialize(self):
22
- # FOR ENV
23
- load_dotenv()
24
-
25
- # AGENT NAME
26
- #st.header(self.analyst_name)
27
-
28
- def request_model(self, payload_txt, headers):
29
- response = requests.post(self.model_url, json=payload_txt, headers=headers)
30
- response.raise_for_status()
31
- output = response.json()
32
- #st.write(output)
33
- text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
34
- #text = json.loads(text)
35
- #st.write(text)
36
- return text
37
-
38
- def fetch_data(self, data_field):
39
- mongodb_uri = os.getenv("MONGODB_URI")
40
- myclient = MongoClient(mongodb_uri)
41
- mydb = myclient.get_database()
42
- mycol = mydb["df_data"]
43
-
44
- # Sort by timestamp field in descending order
45
- x = mycol.find_one(
46
- {"data_field": data_field},
47
- sort=[("timestamp", -1)]
48
- )
49
-
50
- x = x["result"]
51
- return x
52
-
53
- def process(self):
54
- with st.spinner('Website Audience Acquisition...', show_time=True):
55
- st.write('')
56
- headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x_api_key')}"}
57
- try:
58
- payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
59
- payload_txt_model = self.request_model(payload_txt, headers)
60
- debug_info = {'data_field' : 'Website Audience Acquisition', 'result': payload_txt_model}
61
- upload_response(debug_info)
62
- st.session_state['website_audience'] = ''
63
-
64
- count = 0
65
- except Exception as e:
66
- pass
67
- st.session_state['analyzing'] = False
68
-
69
- def row1(self):
70
- st.session_state['analyzing'] = False
71
- self.payload = ""
72
- count = 0
73
- try:
74
- session_content_outside_the_website = st.session_state['others']
75
- if session_content_outside_the_website == 'uploaded':
76
- count += 1
77
- self.payload += self.fetch_data("Traffic Acquisition")
78
- except Exception as e:
79
- pass
80
-
81
- if count >= 1:
82
- name = self.fetch_data("Client Name")
83
- website = self.fetch_data("Client Website")
84
- self.payload = name + website + self.payload
85
- self.process()
86
-
87
-
88
- if __name__ == "__main__":
89
- st.set_page_config(layout="wide")
90
-
91
- upload = uploadFile()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
classes/sem_ppc.py CHANGED
@@ -56,7 +56,7 @@ class Sem_PPC:
56
  mobile_ads = ""
57
  video_ads = ""
58
  shopping_ads = ""
59
- with st.spinner('Uploading SEM/PPC...', show_time=True):
60
  st.write('')
61
  # INITIALIZING SESSIONS
62
  #combined_text += f"Client Summary: {st.session_state.nature}\n"
 
56
  mobile_ads = ""
57
  video_ads = ""
58
  shopping_ads = ""
59
+ with st.spinner('Ads...', show_time=True):
60
  st.write('')
61
  # INITIALIZING SESSIONS
62
  #combined_text += f"Client Summary: {st.session_state.nature}\n"
classes/website_and_tools.py CHANGED
@@ -70,7 +70,7 @@ class WebsiteAndTools:
70
  client_relations_management_system = ""
71
  mobile_loading_speed = ""
72
 
73
- with st.spinner('Uploading Website and Tools...', show_time=True):
74
  st.write('')
75
  '''
76
  try:
 
70
  client_relations_management_system = ""
71
  mobile_loading_speed = ""
72
 
73
+ with st.spinner('SEO On Page Analyst...', show_time=True):
74
  st.write('')
75
  '''
76
  try:
helper/data_field.py CHANGED
@@ -5,9 +5,9 @@ def data_field(data_src):
5
  myclient = MongoClient(mongodb_uri)
6
  mydb = myclient.get_database()
7
  mycol = mydb["df_data"]
8
- x = mycol.find_one({"data_field": data_src},
9
- sort=[('timestamp', -1)])
10
- x = x["result"]
11
  return x
12
 
13
  def get_analyst_response(data_src):
@@ -27,37 +27,5 @@ def get_analyst_response(data_src):
27
  if myclient:
28
  myclient.close()
29
 
30
-
31
- def get_marketplace_response(data_src):
32
- try:
33
- mongodb_uri = os.getenv("MONGODB_URI")
34
- myclient = MongoClient(mongodb_uri)
35
- mydb = myclient.get_database()
36
- mycol = mydb["df_response"]
37
-
38
- # Find the most recent document matching the data_src
39
- document = mycol.find_one(
40
- {"data_field": data_src},
41
- sort=[('timestamp', -1)]
42
- )
43
-
44
- # Check if document exists and has the result field
45
- if document and "result" in document:
46
- # Extract amazon and ebay data separately
47
- amazon_data = document["result"].get("amazon", [])
48
- ebay_data = document["result"].get("ebay", [])
49
-
50
- # Combine both datasets into a single list
51
- combined_data = amazon_data + ebay_data
52
-
53
- return combined_data
54
- else:
55
- print(f"No matching document or 'result' field found for data_src: {data_src} in df_response. 404")
56
- return None
57
- except Exception as e:
58
- print(f"Error retrieving data: {str(e)}")
59
- return None
60
- finally:
61
- if myclient:
62
- myclient.close()
63
 
 
5
  myclient = MongoClient(mongodb_uri)
6
  mydb = myclient.get_database()
7
  mycol = mydb["df_data"]
8
+ x = mycol.find_one({"data_field": data_src})
9
+ x = x["result"]["question"]
10
+ #st.write(x)
11
  return x
12
 
13
  def get_analyst_response(data_src):
 
27
  if myclient:
28
  myclient.close()
29
 
30
+ #
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
 
helper/telemetry.py CHANGED
@@ -39,23 +39,3 @@ def collect_telemetry(data):
39
  print(f"Error sending data to MongoDB: {e}")
40
  finally:
41
  client.close()
42
-
43
- def clear_collection(data):
44
- """
45
- Deletes all documents from a specified MongoDB collection.
46
- """
47
- mongodb_uri = os.getenv("MONGODB_URI")
48
- if not mongodb_uri:
49
- print("Deletion skipped: No database configured.")
50
- return
51
-
52
- try:
53
- client = MongoClient(mongodb_uri)
54
- db = client.get_default_database()
55
- collection = db[f"{data}"] # Replace with your collection name
56
- result = collection.delete_many({})
57
- print(f"Deleted {result.deleted_count} documents from the collection.")
58
- except Exception as e:
59
- print(f"Error deleting documents: {e}")
60
- finally:
61
- client.close()
 
39
  print(f"Error sending data to MongoDB: {e}")
40
  finally:
41
  client.close()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
helper/upload_File.py CHANGED
@@ -28,48 +28,6 @@ class uploadFile:
28
  pass
29
 
30
  st.session_state['uploaded_files'] = self.file_dict
31
-
32
- def upload_website_audience(self, uploaded_files):
33
- for _ in range(len(self.file_dict)):
34
- self.file_dict.popitem()
35
-
36
- for uploaded_file in uploaded_files:
37
- if uploaded_file.type == "application/pdf":
38
- try:
39
- with pymupdf.open(stream=uploaded_file.read(), filetype="pdf") as doc:
40
- text = chr(12).join([page.get_text() for page in doc])
41
- self.file_dict[uploaded_file.name] = {'type': 'pdf', 'content': text}
42
- except Exception:
43
- pass
44
- elif uploaded_file.type == "text/csv":
45
- try:
46
- # Skip comment lines that start with #
47
- df = pd.read_csv(
48
- uploaded_file,
49
- comment='#', # Treat lines starting with # as comments
50
- engine='python' # Use more flexible engine
51
- )
52
- self.file_dict[uploaded_file.name] = {'type': 'csv', 'content': df}
53
- except Exception as e:
54
- print(f"Error processing CSV: {str(e)}")
55
- # If that fails, you could try a more manual approach
56
- try:
57
- uploaded_file.seek(0)
58
- raw_text = uploaded_file.read().decode('utf-8')
59
- # Get only non-comment lines
60
- data_lines = [line for line in raw_text.split('\n') if not line.strip().startswith('#')]
61
-
62
- # Use StringIO to create a file-like object from the filtered lines
63
- from io import StringIO
64
- csv_data = StringIO('\n'.join(data_lines))
65
-
66
- # Read from the filtered data
67
- df = pd.read_csv(csv_data)
68
- self.file_dict[uploaded_file.name] = {'type': 'csv', 'content': df}
69
- except Exception as e:
70
- print(f"Second attempt failed: {str(e)}")
71
-
72
- st.session_state['upload_website_audience'] = self.file_dict
73
 
74
  def upload_file_seo(self, uploaded_files):
75
  for _ in range(len(self.file_dict)):
 
28
  pass
29
 
30
  st.session_state['uploaded_files'] = self.file_dict
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
 
32
  def upload_file_seo(self, uploaded_files):
33
  for _ in range(len(self.file_dict)):
pages/analyzing_page.py CHANGED
@@ -1,7 +1,6 @@
1
  import os
2
  import streamlit as st
3
  import threading
4
- import time
5
  from streamlit.runtime.scriptrunner import add_script_run_ctx
6
  from classes.response_off import SeoOffPageAnalyst
7
  from classes.response_on_page import SeoOn
@@ -14,371 +13,210 @@ from classes.response_content import Content
14
  from classes.response_sem_ppc import Sem_PPC
15
  from classes.response_marketplace import Marketplace
16
  from classes.response_target_market import TargetMarket
17
- from classes.response_df_overview import dfOverview
18
- from classes.response_desired_outcome import DesiredOutcome
19
- from classes.response_conversion_analyst import ConversionAnalyst
20
- from classes.response_website_audience_acquisition import WebsiteAudienceAcquisition
21
- from classes.response_content_process_and_assets_analyst import Content_Process_and_Assets_Analyst
22
- from classes.response_connection_analyst import ConnectionAnalyst
23
  from classes.response_executive_summary import ExecutiveSummary
24
- from classes.response_snapshot import Snapshot
25
-
26
- # Initialize session state to track if analysis has been run
27
- if 'analysis_completed' not in st.session_state:
28
- st.session_state.analysis_completed = False
29
-
30
- # Create a thread-safe way to update the UI
31
- class ThreadSafeAnalysis:
32
- def __init__(self, placeholder, name):
33
- self.placeholder = placeholder
34
- self.name = name
35
- self.lock = threading.Lock()
36
-
37
- def update_info(self, message):
38
- with self.lock:
39
- try:
40
- self.placeholder.info(message)
41
- except Exception:
42
- # Silently ignore errors here - this prevents "bad set index" errors
43
- pass
44
-
45
- def update_success(self, message):
46
- with self.lock:
47
- try:
48
- self.placeholder.success(message)
49
- except Exception:
50
- # Silently ignore errors here
51
- pass
52
-
53
- def update_error(self, message):
54
- with self.lock:
55
- try:
56
- self.placeholder.error(message)
57
- except Exception:
58
- # Silently ignore errors here
59
- pass
60
 
61
  def run_analysis():
62
- # Create placeholders for status updates
63
- placeholders = {
64
- "off_page": st.empty(),
65
- "on_page": st.empty(),
66
- "website_tools": st.empty(),
67
- "seo": st.empty(),
68
- "social_media": st.empty(),
69
- "lld_pm_ln": st.empty(),
70
- "pull_through": st.empty(),
71
- "content": st.empty(),
72
- "sem_ppc": st.empty(),
73
- "marketplace": st.empty(),
74
- "target_market": st.empty(),
75
- "df_overview": st.empty(),
76
- "desired_outcome": st.empty(),
77
- "conversion": st.empty(),
78
- "website_audience": st.empty(),
79
- "content_process_and_assets": st.empty(),
80
- "connection": st.empty(),
81
- "snapshot": st.empty(),
82
- "executive_summary": st.empty(),
83
-
84
- }
85
-
86
- # Create thread-safe handlers for each analysis type
87
- handlers = {name: ThreadSafeAnalysis(placeholder, name)
88
- for name, placeholder in placeholders.items()}
89
-
90
- # Define all analysis functions
91
  def run_off_page_analysis():
92
- handler = handlers["off_page"]
93
  try:
94
- handler.update_info("Running SEO Off Page Analysis...")
95
  result = SeoOffPageAnalyst(os.getenv('MODEL_Off_Page_Analyst'))
96
- handler.update_success("SEO Off Page Analysis completed successfully.")
97
  return result
98
  except Exception as e:
99
- handler.update_error(f"SEO Off Page Analysis failed: {str(e)}")
100
  return None
101
 
102
  def run_on_page_analysis():
103
- handler = handlers["on_page"]
104
  try:
105
- handler.update_info("Running On Page Analysis...")
106
  result = SeoOn(os.getenv('MODEL_On_Page_Analyst'))
107
- handler.update_success("On Page Analysis completed successfully.")
108
  return result
109
  except Exception as e:
110
- handler.update_error(f"On Page Analysis failed: {str(e)}")
111
  return None
112
 
113
  def run_website_and_tools_analysis():
114
- handler = handlers["website_tools"]
115
  try:
116
- handler.update_info("Running Website and Tools Analysis...")
117
  result = WebsiteAndTools(os.getenv('Model_Website_and_Tools_Analyst'))
118
- handler.update_success("Website and Tools completed successfully.")
119
  return result
120
  except Exception as e:
121
- handler.update_error(f"Website and Tools Analysis failed: {str(e)}")
122
  return None
123
 
124
  def run_seo_analysis():
125
- handler = handlers["seo"]
126
  try:
127
- handler.update_info("Running SEO Analysis...")
128
  result = Seo(os.getenv('MODEL_SEO_Analyst'))
129
- handler.update_success("SEO Analysis completed successfully.")
130
  return result
131
  except Exception as e:
132
- handler.update_error(f"SEO Analysis failed: {str(e)}")
133
  return None
134
 
135
  def run_social_media_analysis():
136
- handler = handlers["social_media"]
137
  try:
138
- handler.update_info("Running Social Media Analysis...")
139
  result = SocialMedia(os.getenv('MODEL_Social_Media_Analyst'))
140
- handler.update_success("Social Media Analysis completed successfully.")
141
  return result
142
  except Exception as e:
143
- handler.update_error(f"Social Media Analysis failed: {str(e)}")
144
  return None
145
 
146
  def run_lld_pm_ln():
147
- handler = handlers["lld_pm_ln"]
148
  try:
149
- handler.update_info("Running LLD/PM/LN Analysis...")
150
  result = LLD_PM_LN(os.getenv('Model_LLD_PM_LN_ANALYST'))
151
- handler.update_success("LLD/PM/LN completed successfully.")
152
  return result
153
  except Exception as e:
154
- handler.update_error(f"LLD/PM/LN Analysis failed: {str(e)}")
155
  return None
156
 
157
  def run_pull_through_offers():
158
- handler = handlers["pull_through"]
159
  try:
160
- handler.update_info("Running Pull through offer Analysis...")
161
  result = PullThroughOffers(os.getenv('Model_Pull_Through_Offers_Analyst'))
162
- handler.update_success("Pull through offer completed successfully.")
163
  return result
164
  except Exception as e:
165
- handler.update_error(f"Pull through offer Analysis failed: {str(e)}")
166
  return None
167
 
168
  def run_content():
169
- handler = handlers["content"]
170
  try:
171
- handler.update_info("Running Content Analysis...")
172
  result = Content(os.getenv('Model_Content'))
173
- handler.update_success("Content Analysis completed successfully.")
174
  return result
175
  except Exception as e:
176
- handler.update_error(f"Content Analysis failed: {str(e)}")
177
  return None
178
 
179
  def run_sem_ppc_analysis():
180
- handler = handlers["sem_ppc"]
181
  try:
182
- handler.update_info("Running SEM/PPC Analysis...")
183
  result = Sem_PPC(os.getenv('Model_SEM_PPC_Analyst'))
184
- handler.update_success("SEM/PPC Analysis completed successfully.")
185
  return result
186
  except Exception as e:
187
- handler.update_error(f"SEM/PPC Analysis failed: {str(e)}")
188
  return None
189
 
190
  def run_marketplace_analysis():
191
- handler = handlers["marketplace"]
192
  try:
193
- handler.update_info("Running Marketplace Analysis...")
194
- result = Marketplace(os.getenv('Model_Marketplace_Analyst'))
195
- handler.update_success("Marketplace Analysis completed successfully.")
196
  return result
197
  except Exception as e:
198
- handler.update_error(f"Marketplace Analysis failed: {str(e)}")
199
  return None
200
 
201
  def run_target_market_analysis():
202
- handler = handlers["target_market"]
203
  try:
204
- handler.update_info("Running Target Market Analysis...")
205
  result = TargetMarket(os.getenv('Model_Target_Market_Analyst'))
206
- handler.update_success("Target Market Analysis completed successfully.")
207
- return result
208
- except Exception as e:
209
- handler.update_error(f"Target Market Analysis failed: {str(e)}")
210
- return None
211
-
212
- def run_df_overview_analysis():
213
- handler = handlers["df_overview"]
214
- try:
215
- handler.update_info("Running DF Overview Analysis...")
216
- result = dfOverview(os.getenv('Model_DF_Overview_Analyst'))
217
- handler.update_success("DF Overview Analysis completed successfully.")
218
- return result
219
- except Exception as e:
220
- handler.update_error(f"DF Overview Analysis failed: {str(e)}")
221
- return None
222
-
223
- def run_desired_outcomes_analysis():
224
- handler = handlers["desired_outcome"]
225
- try:
226
- handler.update_info("Running Desired Outcomes Analysis...")
227
- result = DesiredOutcome(os.getenv('Model_Desired_Outcomes_DM_Analyst'))
228
- handler.update_success("Desired Outcomes Analysis completed successfully.")
229
  return result
230
  except Exception as e:
231
- handler.update_error(f"Desired Outcomes Analysis failed: {str(e)}")
232
- return None
233
-
234
- def run_conversion_analysis():
235
- handler = handlers["conversion"]
236
- try:
237
- handler.update_info("Running Conversion Analysis...")
238
- result = ConversionAnalyst(os.getenv('Model_Conversion_Analyst'))
239
- handler.update_success("Conversion Analysis completed successfully.")
240
- return result
241
- except Exception as e:
242
- handler.update_error(f"Conversion Analysis failed: {str(e)}")
243
- return None
244
-
245
- def run_website_audience():
246
- handler = handlers["website_audience"]
247
- try:
248
- handler.update_info("Running Website Audience Acquisition Analysis...")
249
- result = WebsiteAudienceAcquisition(os.getenv('Model_Website_Audience_Acquisition_Analyst'))
250
- handler.update_success("Website Audience Acquisition Analysis completed successfully.")
251
- return result
252
- except Exception as e:
253
- handler.update_error(f"Website Audience Acquisition Analysis failed: {str(e)}")
254
- return None
255
-
256
- def run_content_process_and_assets_analysis():
257
- handler = handlers["content_process_and_assets"]
258
- try:
259
- handler.update_info("Running Content - Process and Assets Analysis...")
260
- result = Content_Process_and_Assets_Analyst(os.getenv('Model_Content_Process_and_Assets_Analyst'))
261
- handler.update_success("Content - Process and Assets Analysis completed successfully.")
262
- return result
263
- except Exception as e:
264
- handler.update_error(f"Content - Process and Assets Analysis failed: {str(e)}")
265
- return None
266
-
267
- def run_connection_analysis():
268
- handler = handlers["connection"]
269
- try:
270
- handler.update_info("Connection Analysis...")
271
- result = ConnectionAnalyst(os.getenv('Model_Connection_Analyst'))
272
- handler.update_success("Connection Analysis completed successfully.")
273
- return result
274
- except Exception as e:
275
- handler.update_error(f"Connection Analysis failed: {str(e)}")
276
- return None
277
-
278
- def run_snapshot_analysis():
279
- handler = handlers["snapshot"]
280
- try:
281
- handler.update_info("Running Snapshot by Channel Analysis...")
282
- result = Snapshot(os.getenv('Model_Snapshot_by_Channel_Analyst'))
283
- handler.update_success("Snapshot by Channel Analysis completed successfully.")
284
- return result
285
- except Exception as e:
286
- handler.update_error(f"Snapshot by Channel Analysis failed: {str(e)}")
287
  return None
288
 
289
  def run_executive_summary_analysis():
290
- handler = handlers["executive_summary"]
291
  try:
292
- handler.update_info("Running Executive Summary Analysis...")
293
  result = ExecutiveSummary(os.getenv('Model_Executive_Summary_Analyst'))
294
- handler.update_success("Executive Summary Analysis completed successfully.")
295
  return result
296
  except Exception as e:
297
- handler.update_error(f"Executive Summary Analysis failed: {str(e)}")
298
  return None
299
 
300
- # Define first batch of analyses
301
- threads_first_batch = [
302
- (run_off_page_analysis, "off_page"),
303
- (run_on_page_analysis, "on_page"),
304
- (run_website_and_tools_analysis, "website_tools"),
305
- (run_seo_analysis, "seo"),
306
- (run_social_media_analysis, "social_media"),
307
- (run_lld_pm_ln, "lld_pm_ln"),
308
- (run_pull_through_offers, "pull_through"),
309
- (run_content, "content"),
310
- (run_sem_ppc_analysis, "sem_ppc"),
311
- #(run_marketplace_analysis, "marketplace"),
312
- (run_target_market_analysis, "target_market"),
313
- (run_df_overview_analysis, "df_overview"),
314
- (run_desired_outcomes_analysis, "desired_outcome"),
315
- (run_content_process_and_assets_analysis, "content_process_and_assets"),
316
- (run_conversion_analysis, "conversion"),
317
- (run_website_audience, "website_audience"),
318
- (run_connection_analysis, "connection")
319
- ]
320
-
321
- # Create and start first batch threads with small delays to prevent UI conflicts
322
- thread_objects_first_batch = []
323
- for i, (func, name) in enumerate(threads_first_batch):
324
- # Add a small stagger to thread start times to reduce conflicts
325
- time.sleep(0.1)
326
- thread = threading.Thread(target=func, name=name)
327
- add_script_run_ctx(thread) # Attach Streamlit context
328
- thread_objects_first_batch.append(thread)
329
- thread.start()
330
-
331
- # Wait for all first batch threads to complete
332
- for thread in thread_objects_first_batch:
333
- thread.join()
334
-
335
- # Add a separator
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
336
  try:
337
- st.markdown("---")
338
- except Exception:
339
- pass
340
-
341
- # Wait a bit to let UI stabilize before starting second batch
342
- time.sleep(0.5)
343
 
344
- # Create threads for second batch (snapshot and executive summary)
345
- threads_second_batch = [
346
- (run_snapshot_analysis, "snapshot"),
347
- (run_executive_summary_analysis, "executive_summary")
348
- ]
349
-
350
- # Create and start second batch threads
351
- thread_objects_second_batch = []
352
- for i, (func, name) in enumerate(threads_second_batch):
353
- # Add a small stagger between threads
354
- time.sleep(0.2)
355
- thread = threading.Thread(target=func, name=name)
356
- add_script_run_ctx(thread) # Attach Streamlit context
357
- thread_objects_second_batch.append(thread)
358
- thread.start()
359
-
360
- # Wait for second batch threads to complete
361
- for thread in thread_objects_second_batch:
362
- thread.join()
363
-
364
- # Set analysis_completed to True when all analyses are done
365
- st.session_state.analysis_completed = True
366
- try:
367
- st.success("🎉 All analyses completed!")
368
- except Exception:
369
- pass
370
 
371
- # Navigation button
372
  if st.button("Back"):
373
- st.switch_page("pages/home.py")
374
-
375
- # Main logic
376
- if not st.session_state.analysis_completed:
377
- run_analysis()
378
- else:
379
- st.info("Analysis has already been completed.")
380
-
381
- # View Results button (only displayed after analysis is completed)
382
- if st.session_state.analysis_completed and st.button("View Results", icon="📃"):
383
- st.switch_page("pages/output.py")
384
- st.session_state.analysis_completed = False
 
1
  import os
2
  import streamlit as st
3
  import threading
 
4
  from streamlit.runtime.scriptrunner import add_script_run_ctx
5
  from classes.response_off import SeoOffPageAnalyst
6
  from classes.response_on_page import SeoOn
 
13
  from classes.response_sem_ppc import Sem_PPC
14
  from classes.response_marketplace import Marketplace
15
  from classes.response_target_market import TargetMarket
 
 
 
 
 
 
16
  from classes.response_executive_summary import ExecutiveSummary
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
 
18
  def run_analysis():
19
+ # Placeholders for status updates
20
+ off_page_status = st.empty()
21
+ on_page_status = st.empty()
22
+ website_and_tools_status = st.empty()
23
+ seo_status = st.empty()
24
+ social_media_status = st.empty()
25
+ lld_pm_ln_status = st.empty()
26
+ pull_through_offers_status = st.empty()
27
+ content_status = st.empty()
28
+ sem_ppc = st.empty()
29
+ marketplace = st.empty()
30
+ target_market = st.empty()
31
+ executive_summary_status = st.empty()
32
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  def run_off_page_analysis():
 
34
  try:
35
+ off_page_status.info("Starting SEO Off Page Analysis...")
36
  result = SeoOffPageAnalyst(os.getenv('MODEL_Off_Page_Analyst'))
37
+ off_page_status.success("SEO Off Page Analysis completed successfully.")
38
  return result
39
  except Exception as e:
40
+ off_page_status.error(f"SEO Off Page Analysis failed: {e}")
41
  return None
42
 
43
  def run_on_page_analysis():
 
44
  try:
45
+ on_page_status.info("Starting On Page Analysis...")
46
  result = SeoOn(os.getenv('MODEL_On_Page_Analyst'))
47
+ on_page_status.success("On Page Analysis completed successfully.")
48
  return result
49
  except Exception as e:
50
+ on_page_status.error(f"On Page Analysis failed: {e}")
51
  return None
52
 
53
  def run_website_and_tools_analysis():
 
54
  try:
55
+ website_and_tools_status.info("Starting Website and Tools Analysis...")
56
  result = WebsiteAndTools(os.getenv('Model_Website_and_Tools_Analyst'))
57
+ website_and_tools_status.success("Website and Tools completed successfully.")
58
  return result
59
  except Exception as e:
60
+ on_page_status.error(f"Website and Tools Analysis failed: {e}")
61
  return None
62
 
63
  def run_seo_analysis():
 
64
  try:
65
+ seo_status.info("Starting SEO Analysis...")
66
  result = Seo(os.getenv('MODEL_SEO_Analyst'))
67
+ seo_status.success("SEO Analysis completed successfully.")
68
  return result
69
  except Exception as e:
70
+ seo_status.error(f"SEO Analysis failed: {e}")
71
  return None
72
 
73
  def run_social_media_analysis():
 
74
  try:
75
+ social_media_status.info("Starting Social Media Analysis...")
76
  result = SocialMedia(os.getenv('MODEL_Social_Media_Analyst'))
77
+ social_media_status.success("Social Media Analysis completed successfully.")
78
  return result
79
  except Exception as e:
80
+ social_media_status.error(f"Social Media Analysis failed: {e}")
81
  return None
82
 
83
  def run_lld_pm_ln():
 
84
  try:
85
+ lld_pm_ln_status.info("Starting LLD/PM/LN Analysis...")
86
  result = LLD_PM_LN(os.getenv('Model_LLD_PM_LN_ANALYST'))
87
+ lld_pm_ln_status.success("LLD/PM/LN completed successfully.")
88
  return result
89
  except Exception as e:
90
+ lld_pm_ln_status.error(f"LLD/PM/LN Analysis failed: {e}")
91
  return None
92
 
93
  def run_pull_through_offers():
 
94
  try:
95
+ pull_through_offers_status.info("Starting Pull through offer Analysis...")
96
  result = PullThroughOffers(os.getenv('Model_Pull_Through_Offers_Analyst'))
97
+ pull_through_offers_status.success("Pull through offer completed successfully.")
98
  return result
99
  except Exception as e:
100
+ pull_through_offers_status.error(f"Pull through offer Analysis failed: {e}")
101
  return None
102
 
103
  def run_content():
 
104
  try:
105
+ content_status.info("Starting Content Analysis...")
106
  result = Content(os.getenv('Model_Content'))
107
+ content_status.success("Content Analysis completed successfully.")
108
  return result
109
  except Exception as e:
110
+ content_status.error(f"Content Analysis failed: {e}")
111
  return None
112
 
113
  def run_sem_ppc_analysis():
 
114
  try:
115
+ sem_ppc.info("Starting SEM/PPC Analysis...")
116
  result = Sem_PPC(os.getenv('Model_SEM_PPC_Analyst'))
117
+ sem_ppc.success("SEM/PPC Analysis completed successfully.")
118
  return result
119
  except Exception as e:
120
+ sem_ppc.error(f"SEM/PPC Analysis failed: {e}")
121
  return None
122
 
123
  def run_marketplace_analysis():
 
124
  try:
125
+ marketplace.info("Starting Marketplace Analysis...")
126
+ result = Marketplace(os.getenv('Model_SEM_PPC_Analyst'))
127
+ marketplace.success("Marketplace Analysis completed successfully.")
128
  return result
129
  except Exception as e:
130
+ marketplace.error(f"Marketplace Analysis failed: {e}")
131
  return None
132
 
133
  def run_target_market_analysis():
 
134
  try:
135
+ target_market.info("Starting Target Market Analysis...")
136
  result = TargetMarket(os.getenv('Model_Target_Market_Analyst'))
137
+ target_market.success("Target Market Analysis completed successfully.")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
138
  return result
139
  except Exception as e:
140
+ target_market.error(f"Target Market Analysis failed: {e}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
141
  return None
142
 
143
  def run_executive_summary_analysis():
 
144
  try:
145
+ executive_summary = st.empty().info("Starting Executive Summary Analysis...")
146
  result = ExecutiveSummary(os.getenv('Model_Executive_Summary_Analyst'))
147
+ executive_summary = st.empty().success("Executive Summary Analysis completed successfully.")
148
  return result
149
  except Exception as e:
150
+ executive_summary = st.empty().error(f"Executive Summary Analysis failed: {e}")
151
  return None
152
 
153
+ # Create threads for concurrent execution
154
+ off_page_thread = threading.Thread(target=run_off_page_analysis)
155
+ on_page_thread = threading.Thread(target=run_on_page_analysis)
156
+ website_and_tools_thread = threading.Thread(target=run_website_and_tools_analysis)
157
+ seo_thread = threading.Thread(target=run_seo_analysis)
158
+ social_media_thread = threading.Thread(target=run_social_media_analysis)
159
+ llm_pm_ln_thread = threading.Thread(target=run_lld_pm_ln)
160
+ pull_through_offers_thread = threading.Thread(target=run_pull_through_offers)
161
+ content_thread = threading.Thread(target=run_content)
162
+ content_sem_ppc_thread = threading.Thread(target=run_sem_ppc_analysis)
163
+ marketplace_thread = threading.Thread(target=run_marketplace_analysis)
164
+ target_market_thread = threading.Thread(target=run_target_market_analysis)
165
+
166
+ # Attach Streamlit context to threads
167
+ add_script_run_ctx(off_page_thread)
168
+ add_script_run_ctx(on_page_thread)
169
+ add_script_run_ctx(website_and_tools_thread)
170
+ add_script_run_ctx(seo_thread)
171
+ add_script_run_ctx(social_media_thread)
172
+ add_script_run_ctx(llm_pm_ln_thread)
173
+ add_script_run_ctx(pull_through_offers_thread)
174
+ add_script_run_ctx(content_thread)
175
+ add_script_run_ctx(content_sem_ppc_thread)
176
+ add_script_run_ctx(marketplace_thread)
177
+ add_script_run_ctx(target_market_thread)
178
+
179
+ # Start threads
180
+ off_page_thread.start()
181
+ on_page_thread.start()
182
+ website_and_tools_thread.start()
183
+ seo_thread.start()
184
+ social_media_thread.start()
185
+ llm_pm_ln_thread.start()
186
+ pull_through_offers_thread.start()
187
+ content_thread.start()
188
+ content_sem_ppc_thread.start()
189
+ marketplace_thread.start()
190
+ target_market_thread.start()
191
+
192
+ # Wait for threads to complete
193
+ off_page_thread.join()
194
+ on_page_thread.join()
195
+ website_and_tools_thread.join()
196
+ seo_thread.join()
197
+ social_media_thread.join()
198
+ llm_pm_ln_thread.join()
199
+ pull_through_offers_thread.join()
200
+ content_thread.join()
201
+ content_sem_ppc_thread.join()
202
+ marketplace_thread.join()
203
+ target_market_thread.join()
204
+
205
+ st.markdown("---")
206
+ executive_summary_status.info("Starting Executive Summary Analysis...")
207
  try:
208
+ executive_summary = ExecutiveSummary(os.getenv('Model_Executive_Summary_Analyst'))
209
+ executive_summary_status.success("Executive Summary Analysis completed successfully.")
210
+ except Exception as e:
211
+ executive_summary_status.error(f"Executive Summary Analysis failed: {e}")
212
+ st.success("🎉 All analyses completed!") # Final success message
 
213
 
214
+ st.success("🎉 All analyses completed!") # Final success message
215
+ # --- Display Button After Completion ---
216
+ if st.button("View Results", icon="📃"):
217
+ st.switch_page("pages/output.py")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
218
 
219
+ # Execute the analysis
220
  if st.button("Back"):
221
+ st.switch_page("pages/home.py")
222
+ run_analysis()
 
 
 
 
 
 
 
 
 
 
pages/home.py CHANGED
@@ -2,6 +2,8 @@ import os
2
  import streamlit as st
3
  from classes.Off_Page import SeoOffPageAnalyst
4
  from classes.On_Page import SeoOn
 
 
5
  from classes.Seo import Seo
6
  from classes.Social_Media_FB import Facebook
7
  from classes.Social_Media_IG import Instagram
@@ -10,7 +12,7 @@ from classes.Youtube import YouTube
10
  from classes.Linkedin import Linkedin
11
  from classes.Tiktok import Tiktok
12
  from classes.website_and_tools import WebsiteAndTools
13
- from classes.client_summary import ClientSummary
14
  from classes.pull_through_offers import PullThroughOffers
15
  from classes.lld_pm_ln import LLD_PM_LN
16
  from classes.content import Content
@@ -19,7 +21,6 @@ from classes.amazon import Amazon
19
  from classes.ebay import eBay
20
  import asyncio
21
  from helper.upload_button import hide_button, unhide_button
22
- from helper.telemetry import clear_collection
23
  import time
24
 
25
  class DigitalFootprintDashboard:
@@ -39,40 +40,29 @@ class DigitalFootprintDashboard:
39
  st.session_state['nature'] = ''
40
  if 'analyze' not in st.session_state:
41
  st.session_state['analyze'] = ''
42
- if 'analysis_completed' not in st.session_state:
43
- st.session_state.analysis_completed = False
44
- if 'uploading' not in st.session_state:
45
- st.session_state['uploading'] = False
46
 
47
  async def create_row1(self):
48
  """Create the first row with four columns"""
49
  col1, col2, col3, col4, col5 = st.columns(5, border=True, gap="medium", vertical_alignment="top")
50
 
51
  with col1:
52
- button_label = "Uploading..." if st.session_state['uploading'] else "Sync Data"
53
- if st.button(button_label, key="sync_button", icon="🔄", use_container_width=True):
54
- st.session_state['uploading'] = True
55
- st.session_state['analyze'] = 'clicked'
56
-
57
- st.session_state['uploading'] = False
58
  else:
59
  st.session_state["analyze"] = ''
60
 
61
- #self.upload_file_button = st.button("Sync Data", st.session_state['analyze'], icon="🔄", use_container_width=True)
62
-
63
- #if self.upload_file_button == True:
64
- # st.session_state["analyze"] = 'clicked'
65
- #unhide_button()
66
- #else:
67
- # st.session_state["analyze"] = ''
68
-
69
- analyze_disabled = st.session_state.get('analyze') != 'clicked'
70
- if st.button("Analyze", key="analyze_button", icon="✨", use_container_width=True, disabled=analyze_disabled):
71
- st.session_state.analysis_completed = False
72
  st.switch_page("pages/analyzing_page.py")
 
 
 
 
73
 
74
 
75
- self.client_summary = ClientSummary()
76
 
77
  with col2:
78
  st.write("## Website Traffic")
@@ -109,40 +99,43 @@ class DigitalFootprintDashboard:
109
  self.website_and_tools = WebsiteAndTools(os.getenv('MODEL_On_Page_Analyst'))
110
  self.lld_pm_ln = LLD_PM_LN(os.getenv('Model_LLD_PM_LN_ANALYST'))
111
  self.pull_through_offers = PullThroughOffers(os.getenv('Model_Pull_Through_Offers_Analyst'))
 
112
 
113
  return col1, col2, col3, col4, col5
114
 
115
  async def create_row2(self):
116
  """Create the first row with four columns"""
117
- col1, col4 = st.columns(2, border=True, gap="medium", vertical_alignment="top")
118
- # col1, col2, col3, col4 = st.columns(4, border=True, gap="medium", vertical_alignment="top")
119
 
120
  with col1:
121
  st.write("## Ads")
122
  self.sem_ppc = Sem_PPC(os.getenv('Model_SEM_PPC_Analyst'))
123
- # with col2:
124
- # st.write("## Amazon")
125
- # self.amazon = Amazon(os.getenv('Model_SEM_PPC_Analyst'))
126
- # with col3:
127
- # st.write("## eBay")
128
- # self.ebay = eBay(os.getenv('Model_SEM_PPC_Analyst'))
129
- with col4:
130
- st.write("## Website Content")
131
- self.content = Content(os.getenv('Model_Content'))
132
- return col1, col4
133
 
134
- async def delete_button(self):
135
- reset_button = st.button("RESET ALL",icon="🗑️", use_container_width=True)
136
-
137
- if reset_button:
138
- clear_collection("df_data")
139
- clear_collection("df_response")
 
 
 
 
 
 
 
140
 
141
  async def main(self):
142
  """Main method to run the dashboard"""
143
  await self.create_row1()
144
  await self.create_row2()
145
- await self.delete_button()
146
  #self.run_analysis()
147
 
148
  # Main execution
 
2
  import streamlit as st
3
  from classes.Off_Page import SeoOffPageAnalyst
4
  from classes.On_Page import SeoOn
5
+ from classes.On_Page_Crawl import SeoOnCrawl
6
+ from classes.Seo_Backlinks import SeoBacklinks
7
  from classes.Seo import Seo
8
  from classes.Social_Media_FB import Facebook
9
  from classes.Social_Media_IG import Instagram
 
12
  from classes.Linkedin import Linkedin
13
  from classes.Tiktok import Tiktok
14
  from classes.website_and_tools import WebsiteAndTools
15
+ from classes.client_summary import CientSummary
16
  from classes.pull_through_offers import PullThroughOffers
17
  from classes.lld_pm_ln import LLD_PM_LN
18
  from classes.content import Content
 
21
  from classes.ebay import eBay
22
  import asyncio
23
  from helper.upload_button import hide_button, unhide_button
 
24
  import time
25
 
26
  class DigitalFootprintDashboard:
 
40
  st.session_state['nature'] = ''
41
  if 'analyze' not in st.session_state:
42
  st.session_state['analyze'] = ''
 
 
 
 
43
 
44
  async def create_row1(self):
45
  """Create the first row with four columns"""
46
  col1, col2, col3, col4, col5 = st.columns(5, border=True, gap="medium", vertical_alignment="top")
47
 
48
  with col1:
49
+
50
+ self.upload_file_button = st.button("Sync Data", st.session_state['analyze'], icon="🔄", use_container_width=True)
51
+ if self.upload_file_button == True:
52
+ st.session_state["analyze"] = 'clicked'
53
+ unhide_button()
 
54
  else:
55
  st.session_state["analyze"] = ''
56
 
57
+ self.analyze_button = st.button("Analyze", icon="", use_container_width=True)
58
+ if self.analyze_button == True:
 
 
 
 
 
 
 
 
 
59
  st.switch_page("pages/analyzing_page.py")
60
+ else:
61
+ hide_button()
62
+
63
+ self.client_summary = CientSummary()
64
 
65
 
 
66
 
67
  with col2:
68
  st.write("## Website Traffic")
 
99
  self.website_and_tools = WebsiteAndTools(os.getenv('MODEL_On_Page_Analyst'))
100
  self.lld_pm_ln = LLD_PM_LN(os.getenv('Model_LLD_PM_LN_ANALYST'))
101
  self.pull_through_offers = PullThroughOffers(os.getenv('Model_Pull_Through_Offers_Analyst'))
102
+ self.content = Content(os.getenv('Model_Content'))
103
 
104
  return col1, col2, col3, col4, col5
105
 
106
  async def create_row2(self):
107
  """Create the first row with four columns"""
108
+ col1, col2, col3, col4 = st.columns(4, border=True, gap="medium", vertical_alignment="top")
 
109
 
110
  with col1:
111
  st.write("## Ads")
112
  self.sem_ppc = Sem_PPC(os.getenv('Model_SEM_PPC_Analyst'))
113
+ with col2:
114
+ st.write("## Amazon")
115
+ self.amazon = Amazon(os.getenv('Model_SEM_PPC_Analyst'))
116
+ with col3:
117
+ st.write("## eBay")
118
+ self.ebay = eBay(os.getenv('Model_SEM_PPC_Analyst'))
119
+ return col1, col2, col3
 
 
 
120
 
121
+ async def run_analysis(self):
122
+ result = await asyncio.gather(
123
+ self.gtmetrix.process(),
124
+ self.backlinks.process(),
125
+ self.keywords.process(),
126
+ self.facebook.process(),
127
+ self.instagram.process(),
128
+ self.twitter.process(),
129
+ self.youtube.process(),
130
+ self.linkedin.process(),
131
+ self.tiktok.process(),
132
+ )
133
+ st.session_state.analyze = False
134
 
135
  async def main(self):
136
  """Main method to run the dashboard"""
137
  await self.create_row1()
138
  await self.create_row2()
 
139
  #self.run_analysis()
140
 
141
  # Main execution
pages/output.py CHANGED
@@ -1,116 +1,45 @@
1
  import json
2
  import os
3
  import streamlit as st
4
- from helper.data_field import get_analyst_response, data_field, get_marketplace_response
5
- import time
6
 
7
  st.set_page_config(layout="centered")
8
 
9
  def write_client_footprint():
10
 
11
- try:
12
- web = get_analyst_response("Website and Tools Analyst")
13
- result_web = {item["category"]: item["current_footprint"] for item in web}
14
- except TypeError:
15
- result_web = None
16
-
17
- try:
18
- seo = get_analyst_response("SEO Analyst")
19
- seo = {item["category"]: item["current_footprint"] for item in seo}
20
- except TypeError:
21
- seo = None
22
-
23
- try:
24
- socmed = get_analyst_response("Social Media Analyst")
25
- socmed = {item["category"]: item["current_footprint"] for item in socmed}
26
- except TypeError:
27
- socmed = None
28
-
29
- def safe_get(data, key):
30
- value = data.get(key)
31
- return value if value else "N/A"
32
 
33
  markdown_table = "| Source/Channel | Current KPI |\n"
34
  markdown_table += "|---|---|\n"
35
- markdown_table += f"| Website Health Score | {safe_get(result_web, 'website_overall_health_score')} |\n"
36
- markdown_table += f"| Organic Traffic to the Website | {safe_get(seo, 'organic_traffic')} |\n"
37
- markdown_table += f"| Paid Traffic to the Website | {safe_get(seo, 'paid_traffic')} |\n"
38
- markdown_table += f"| Referral Traffic to the Website | {safe_get(seo, 'referral_traffic')} |\n"
39
  markdown_table += f"| Email Traffic to the Website | N/A |\n"
40
- markdown_table += f"| Direct Traffic to the Website | {safe_get(seo, 'direct_traffic')} |\n"
41
  markdown_table += f"| Social Traffic to the Website | N/A |\n"
42
  markdown_table += f"| Display Traffic to the Website | N/A |\n"
43
  markdown_table += f"| Email Database | N/A |\n"
44
- markdown_table += f"| Facebook Followers | {safe_get(socmed, 'facebook_followers')} |\n"
45
- markdown_table += f"| Twitter Followers | {safe_get(socmed, 'twitter_followers')} |\n"
46
- markdown_table += f"| Instagram Followers | {safe_get(socmed, 'instagram_followers')} |\n"
47
- markdown_table += f"| Linkedin Followers | {safe_get(socmed, 'linkedin_followers')} |\n"
48
  markdown_table += f"| Google My Business | N/A |\n"
49
- markdown_table += f"| # of Keywords Ranking in Top 10 | {safe_get(seo, 'keyword_ranking_in_top_10')} |\n"
50
- markdown_table += f"| # of Keywords Ranking in Top 100 | {safe_get(seo, 'keyword_ranking_in_top_100')} |\n"
51
 
52
  return markdown_table
53
 
54
- def write_snapshot(data):
55
-
56
- if data:
57
- try:
58
-
59
- parsed_data = data
60
-
61
- if isinstance(parsed_data, list):
62
- # Create Markdown table header
63
- markdown_table = "| Channel | Status | Requirements | What's Needed to Deliver |\n"
64
- markdown_table += "|---|---|---|---|\n"
65
-
66
- # Loop through the list of dictionaries
67
- for item in parsed_data:
68
- # Use .get() for safety in case keys are missing
69
- channel = item.get('channel', 'N/A')
70
- status = item.get('status', 'N/A')
71
- requirements = item.get('requirements', 'N/A')
72
- deliver = item.get('deliver', 'N/A')
73
-
74
- # Add a row to the Markdown table string
75
- # Replace underscores with spaces and apply title case to category
76
- # Replace underscores with spaces first
77
- channel_temp = channel.replace('_', ' ')
78
-
79
- # Apply title case if there are multiple words, otherwise uppercase
80
- if ' ' in channel_temp: # Check for spaces directly
81
- channel_formatted = channel_temp.title()
82
- else:
83
- channel_formatted = channel_temp.upper() # Use upper() instead of upper_case()
84
-
85
-
86
- markdown_table += f"| {channel_formatted} | {status} | {requirements} | {deliver} |\n"
87
-
88
-
89
- # Display the complete Markdown table
90
- st.markdown(markdown_table)
91
-
92
- # Handle case if data is not a list (e.g., a single dictionary)
93
- elif isinstance(parsed_data, dict):
94
- st.write("Analysis Result (Summary):")
95
- # You might want to display dictionary data differently
96
- st.json(parsed_data) # Example: Display as JSON
97
- else:
98
- st.warning("data is not in the expected list format.")
99
- st.write(parsed_data) # Show the raw data
100
-
101
- except json.JSONDecodeError:
102
- st.error("Error: Could not parse the data as JSON.")
103
- st.text(data) # Show the raw string data
104
- except AttributeError:
105
- st.error("Error: Could not find expected keys ('channel', 'status', 'requirements', 'deliver') in the data.")
106
- st.write(parsed_data) # Show the data that caused the error
107
- except Exception as e:
108
- st.error(f"An unexpected error occurred while processing data: {e}")
109
- st.write(data) # Show the raw data
110
- else:
111
- st.warning("No data retrieved for analysis.")
112
- # --- End: Loop and display data ---
113
-
114
  def write_table(website_and_tools_data):
115
 
116
  if website_and_tools_data:
@@ -133,7 +62,6 @@ def write_table(website_and_tools_data):
133
  # Add a row to the Markdown table string
134
  # Replace underscores with spaces and apply title case to category
135
  category_formatted = category.replace('_', ' ').title()
136
-
137
  current_footprint_formatted = current_footprint.replace('_', ' ')
138
  best_of_breed_formatted = best_of_breed.replace('_', ' ')
139
 
@@ -219,22 +147,25 @@ def seo_on_page_table(df_data):
219
  # --- End: Loop and display data ---
220
 
221
  def display_outputs():
222
- client_name = data_field("Client Name")
223
- client_website = data_field("Client Website")
224
-
225
-
 
 
226
 
227
- overview = get_analyst_response("DF Overview Analyst")
228
 
229
  st.markdown("# Digital Marketing Audit")
230
  st.markdown(f"for: **{client_name} ({client_website})**")
231
  st.write("")
232
-
 
233
  st.markdown("### DIGITAL FOOTPRINT OVERVIEW")
234
  st.markdown(f"{overview}")
235
  st.markdown("---")
236
  st.markdown("### Executive Summary")
237
- st.markdown(get_analyst_response("Executive Summary"))
 
238
  st.markdown("---")
239
 
240
  st.markdown("### CLIENT FOOTPRINT")
@@ -252,7 +183,7 @@ def display_outputs():
252
  st.markdown("---")
253
 
254
  st.markdown("### SNAPSHOT BY CHANNEL")
255
- write_snapshot(get_analyst_response("Snapshot Analyst")) #write_snapshot
256
  st.markdown("---")
257
 
258
  st.markdown("## AUDITS PER CHANNEL")
@@ -264,7 +195,7 @@ In line with this, we have looked into the technology used by **{client_name}**
264
  website_and_tools_data = get_analyst_response("Website and Tools Analyst")
265
  write_table(website_and_tools_data)
266
 
267
- #st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
268
  st.markdown("---")
269
 
270
  st.markdown("### SEARCH ENGINE MARKETING/PPC")
@@ -276,7 +207,7 @@ Currently, {client_name} has already explored numerous online advertising. Its c
276
  sem_data = get_analyst_response("SEM/PPC Analyst")
277
  write_table(sem_data)
278
 
279
- #st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
280
  st.markdown("---")
281
 
282
  st.markdown("### SEARCH ENGINE OPTIMIZATION")
@@ -287,7 +218,7 @@ There are two types of SEO based on where the optimization is implemented: On-pa
287
  seo_data = get_analyst_response("SEO Analyst")
288
  write_table(seo_data)
289
 
290
- #st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
291
  st.markdown("---")
292
 
293
  # Write On Page Table
@@ -295,15 +226,15 @@ There are two types of SEO based on where the optimization is implemented: On-pa
295
  on_page_data = get_analyst_response("On Page Analyst")
296
  seo_on_page_table(on_page_data)
297
 
298
- #st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
299
  st.markdown("---")
300
 
301
  # Write Off Page Table
302
  st.markdown("### OFF PAGE OPTIMIZATION")
303
- on_page_data = get_analyst_response("SEO Off Page Analyst")
304
  seo_on_page_table(on_page_data)
305
 
306
- #st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
307
  st.markdown("---")
308
 
309
  # Write SocMed Table
@@ -314,7 +245,7 @@ Regardless, it is still a great channel worth investing to improve a business’
314
  social_media_data = get_analyst_response("Social Media Analyst")
315
  write_table(social_media_data)
316
 
317
- #st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
318
  st.markdown("---")
319
 
320
  # Write SocMed Table
@@ -322,14 +253,15 @@ Regardless, it is still a great channel worth investing to improve a business’
322
  st.markdown(f"""Content is king in digital marketing. People log into the internet to look for and consume information in different formats: text-based, video, audio, or image. Content is what help businesses establish their expertise in the industry, convert leads into customers, guide their customers through their sales funnel, and build relationships with their customers. """)
323
  content_data = get_analyst_response("Content Analyst")
324
  write_table(content_data)
325
- #st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
326
  st.markdown("---")
327
 
328
- # if (get_analyst_response("Marketplace Analyst")):
329
- # st.markdown("### MARKET PLACE")
330
- # st.table(get_analyst_response("Marketplace Analyst"))
331
- # st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
332
- # st.markdown("---")
 
333
 
334
 
335
  st.markdown("## OTHER INFORMATION")
@@ -349,7 +281,7 @@ Regardless, it is still a great channel worth investing to improve a business’
349
  st.write(target_market_data['summary'])
350
 
351
  st.markdown("##### WHAT IS THE DESIRED OUTCOMES OF DIGITAL MARKETING?")
352
- st.markdown(get_analyst_response("Desired Outcomes Analyst"))
353
 
354
  st.markdown("##### WHAT IS THE PULL-THROUGH OFFER?")
355
  pull_through_data = get_analyst_response("Pull through offers Analyst")
@@ -357,7 +289,7 @@ Regardless, it is still a great channel worth investing to improve a business’
357
 
358
 
359
  st.markdown("##### WEBSITE AUDIENCE ACQUISITION")
360
- website_audience_data = get_analyst_response("Website Audience Acquisition")
361
  st.write(website_audience_data)
362
 
363
  #LLD/PM/LN
@@ -371,7 +303,7 @@ Regardless, it is still a great channel worth investing to improve a business’
371
  st.markdown("##### LEAD NURTURING")
372
  st.write(lld_data.get('lead_nurturing', None))
373
 
374
- #st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
375
  st.markdown("---")
376
 
377
 
@@ -379,21 +311,15 @@ Regardless, it is still a great channel worth investing to improve a business’
379
  st.write(f"""Content is king in digital marketing. People log into the internet to look for and consume information in different formats: text-based, video, audio, or image. Content is what help businesses establish their expertise in the industry, convert leads into customers, guide their customers through their sales funnel, and build relationships with their customers. \n
380
  We have evaluated the process of content development strategy and existing content assets of {client_name} based on how they serve clients throughout the customer journey. """)
381
 
382
- def safe_value(data: dict, key: str) -> str:
383
- try:
384
- value = data.get(key)
385
- return value if value else "N/A"
386
- except Exception:
387
- return "N/A"
388
-
389
  pna_data = get_analyst_response("Content - Process and Assets Analyst")
390
  if pna_data:
391
  st.markdown("##### AWARENESS STAGE")
392
- st.write(safe_value(pna_data, 'awareness_stage'))
393
  st.markdown("##### CONSIDERATION STAGE")
394
- st.write(safe_value(pna_data, 'consideration_stage'))
395
  st.markdown("##### DECISION STAGE")
396
- st.write(safe_value(pna_data, 'decision_stage'))
397
 
398
  else:
399
  st.markdown("##### AWARENESS STAGE")
@@ -403,46 +329,24 @@ We have evaluated the process of content development strategy and existing conte
403
  st.markdown("##### DECISION STAGE")
404
  st.write(None)
405
 
406
-
407
- #st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
408
  st.markdown("---")
 
409
 
410
-
411
- conversion = get_analyst_response("Conversion Analyst")
412
  st.markdown("#### CONVERSION – ACTIVATION OF VISITORS")
 
 
 
 
 
 
 
 
413
 
414
- if conversion:
415
- st.markdown("##### AWARENESS TO TRAFFIC")
416
- st.write(safe_value(conversion, 'awareness_to_traffic'))
417
-
418
- st.markdown("##### TRAFFIC TO LEAD CONVERSION")
419
- st.write(safe_value(conversion, 'traffic_to_lead'))
420
-
421
- st.markdown("##### LEAD TO SALES CONVERSION")
422
- st.write(safe_value(conversion, 'lead_to_sales'))
423
-
424
- st.markdown("##### CONVERSION TO BRAND LOYALTY")
425
- st.write(safe_value(conversion, 'conversion_to_brand'))
426
- else:
427
- st.markdown("##### AWARENESS TO TRAFFIC")
428
- st.write(None)
429
- st.markdown("##### TRAFFIC TO LEAD CONVERSION")
430
- st.write(None)
431
- st.markdown("##### LEAD TO SALES CONVERSION")
432
- st.write(None)
433
- st.markdown("##### CONVERSION TO BRAND LOYALTY")
434
- st.write(None)
435
-
436
-
437
- conversion = get_analyst_response("Connection Analyst")
438
  st.markdown("##### CONNECTION OF ALL ONLINE AND OFFLINE TOUCH POINTS")
439
- st.write(conversion)
440
 
441
- #st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
442
-
443
-
444
 
445
- st.markdown("<div id='top'></div>", unsafe_allow_html=True);
446
  if st.button("Back to Dashboard", icon="🏠"):
447
  st.switch_page("pages/home.py")
448
  display_outputs()
 
1
  import json
2
  import os
3
  import streamlit as st
4
+ from helper.data_field import get_analyst_response
5
+ from helper.telemetry import collect_telemetry
6
 
7
  st.set_page_config(layout="centered")
8
 
9
  def write_client_footprint():
10
 
11
+ web = get_analyst_response("Website and Tools Analyst")
12
+ result_web = {item["category"]: item["current_footprint"] for item in web}
13
+
14
+ seo = get_analyst_response("SEO Analyst")
15
+ seo = {item["category"]: item["current_footprint"] for item in seo}
16
+
17
+ socmed = get_analyst_response("Social Media Analyst")
18
+ socmed = {item["category"]: item["current_footprint"] for item in socmed}
19
+
 
 
 
 
 
 
 
 
 
 
 
 
20
 
21
  markdown_table = "| Source/Channel | Current KPI |\n"
22
  markdown_table += "|---|---|\n"
23
+ markdown_table += f"| Website Health Score | {result_web['website_overall_health_score']} |\n"
24
+ markdown_table += f"| Organic Traffic to the Website | {seo['organic_traffic']} |\n"
25
+ markdown_table += f"| Paid Traffic to the Website | {seo['paid_traffic']} |\n"
26
+ markdown_table += f"| Referral Traffic to the Website | {seo['referral_traffic']} |\n"
27
  markdown_table += f"| Email Traffic to the Website | N/A |\n"
28
+ markdown_table += f"| Direct Traffic to the Website | {seo['direct_traffic']} |\n"
29
  markdown_table += f"| Social Traffic to the Website | N/A |\n"
30
  markdown_table += f"| Display Traffic to the Website | N/A |\n"
31
  markdown_table += f"| Email Database | N/A |\n"
32
+ markdown_table += f"| Facebook Followers | {socmed['facebook_followers']} |\n"
33
+ markdown_table += f"| Twitter Followers | {socmed['twitter_followers']} |\n"
34
+ markdown_table += f"| Instagram Followers | {socmed['instagram_followers']} |\n"
35
+ markdown_table += f"| Linkedin Followers | {socmed['linkedin_followers']} |\n"
36
  markdown_table += f"| Google My Business | N/A |\n"
37
+ markdown_table += f"| # of Keywords Ranking in Top 10 | {seo['keyword_ranking_in_top_10']} |\n"
38
+ markdown_table += f"| # of Keywords Ranking in Top 100 | {seo['keyword_ranking_in_top_100']} |\n"
39
 
40
  return markdown_table
41
 
42
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
  def write_table(website_and_tools_data):
44
 
45
  if website_and_tools_data:
 
62
  # Add a row to the Markdown table string
63
  # Replace underscores with spaces and apply title case to category
64
  category_formatted = category.replace('_', ' ').title()
 
65
  current_footprint_formatted = current_footprint.replace('_', ' ')
66
  best_of_breed_formatted = best_of_breed.replace('_', ' ')
67
 
 
147
  # --- End: Loop and display data ---
148
 
149
  def display_outputs():
150
+ st.markdown("<div id='top'></div>", unsafe_allow_html=True);
151
+ client_name = "RMX Creatives"
152
+ client_website = "https://rmxcreatives.com/"
153
+ overview = f"""**{client_name}** is a financial services company based in Auckland, New Zealand, specializing in providing quick and flexible loan solutions for businesses and individuals. Represented by Paul Stone, LoansOne has enlisted ShoreMarketing to perform a deep dive into their digital footprint to have a view of the holistic status of their digital properties and determine how each property can play part in implementing a stronger digital marketing plan.\n
154
+ The Digital Marketing Footprint consists of deep-dive research by ShoreMarketing specialists to help the business leaders of LoansOne understand the effectiveness of their existing digital initiatives with the view of giving them an insight to developing a strategy and effectively allocating business resources to digital properties that will give them the best results.\n
155
+ This document represents the results of our audit of LoansOne’s digital marketing and management practices. Our audit covered reviews of key digital areas: Website and Tools, PPC/SEM, SEO, Social Media, and Market Places."""
156
 
 
157
 
158
  st.markdown("# Digital Marketing Audit")
159
  st.markdown(f"for: **{client_name} ({client_website})**")
160
  st.write("")
161
+ st.write("")
162
+ st.write("")
163
  st.markdown("### DIGITAL FOOTPRINT OVERVIEW")
164
  st.markdown(f"{overview}")
165
  st.markdown("---")
166
  st.markdown("### Executive Summary")
167
+
168
+ st.markdown(f"Simtech LED's digital footprint reveals significant strengths and areas for improvement that can enhance its competitive positioning in the casino, gaming, and entertainment LED market. The analysis highlights the following key findings and recommendations")
169
  st.markdown("---")
170
 
171
  st.markdown("### CLIENT FOOTPRINT")
 
183
  st.markdown("---")
184
 
185
  st.markdown("### SNAPSHOT BY CHANNEL")
186
+ st.write("TBD")
187
  st.markdown("---")
188
 
189
  st.markdown("## AUDITS PER CHANNEL")
 
195
  website_and_tools_data = get_analyst_response("Website and Tools Analyst")
196
  write_table(website_and_tools_data)
197
 
198
+ st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
199
  st.markdown("---")
200
 
201
  st.markdown("### SEARCH ENGINE MARKETING/PPC")
 
207
  sem_data = get_analyst_response("SEM/PPC Analyst")
208
  write_table(sem_data)
209
 
210
+ st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
211
  st.markdown("---")
212
 
213
  st.markdown("### SEARCH ENGINE OPTIMIZATION")
 
218
  seo_data = get_analyst_response("SEO Analyst")
219
  write_table(seo_data)
220
 
221
+ st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
222
  st.markdown("---")
223
 
224
  # Write On Page Table
 
226
  on_page_data = get_analyst_response("On Page Analyst")
227
  seo_on_page_table(on_page_data)
228
 
229
+ st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
230
  st.markdown("---")
231
 
232
  # Write Off Page Table
233
  st.markdown("### OFF PAGE OPTIMIZATION")
234
+ on_page_data = get_analyst_response("Off Page Analyst")
235
  seo_on_page_table(on_page_data)
236
 
237
+ st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
238
  st.markdown("---")
239
 
240
  # Write SocMed Table
 
245
  social_media_data = get_analyst_response("Social Media Analyst")
246
  write_table(social_media_data)
247
 
248
+ st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
249
  st.markdown("---")
250
 
251
  # Write SocMed Table
 
253
  st.markdown(f"""Content is king in digital marketing. People log into the internet to look for and consume information in different formats: text-based, video, audio, or image. Content is what help businesses establish their expertise in the industry, convert leads into customers, guide their customers through their sales funnel, and build relationships with their customers. """)
254
  content_data = get_analyst_response("Content Analyst")
255
  write_table(content_data)
256
+ st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
257
  st.markdown("---")
258
 
259
+ if (get_analyst_response("Marketplace Analyst")):
260
+ st.markdown("### MARKET PLACE")
261
+ marketpalce_data = get_analyst_response("Marketplace Analyst")
262
+ write_table(marketpalce_data)
263
+ st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
264
+ st.markdown("---")
265
 
266
 
267
  st.markdown("## OTHER INFORMATION")
 
281
  st.write(target_market_data['summary'])
282
 
283
  st.markdown("##### WHAT IS THE DESIRED OUTCOMES OF DIGITAL MARKETING?")
284
+ st.write("TBD")
285
 
286
  st.markdown("##### WHAT IS THE PULL-THROUGH OFFER?")
287
  pull_through_data = get_analyst_response("Pull through offers Analyst")
 
289
 
290
 
291
  st.markdown("##### WEBSITE AUDIENCE ACQUISITION")
292
+ website_audience_data = get_analyst_response("Website Audience Acquisition Analyst")
293
  st.write(website_audience_data)
294
 
295
  #LLD/PM/LN
 
303
  st.markdown("##### LEAD NURTURING")
304
  st.write(lld_data.get('lead_nurturing', None))
305
 
306
+ st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
307
  st.markdown("---")
308
 
309
 
 
311
  st.write(f"""Content is king in digital marketing. People log into the internet to look for and consume information in different formats: text-based, video, audio, or image. Content is what help businesses establish their expertise in the industry, convert leads into customers, guide their customers through their sales funnel, and build relationships with their customers. \n
312
  We have evaluated the process of content development strategy and existing content assets of {client_name} based on how they serve clients throughout the customer journey. """)
313
 
314
+
 
 
 
 
 
 
315
  pna_data = get_analyst_response("Content - Process and Assets Analyst")
316
  if pna_data:
317
  st.markdown("##### AWARENESS STAGE")
318
+ st.write(pna_data.get('awareness_stage', 'N/A'))
319
  st.markdown("##### CONSIDERATION STAGE")
320
+ st.write(pna_data.get('consideration_stage', 'N/A'))
321
  st.markdown("##### DECISION STAGE")
322
+ st.write(pna_data.get('decision_stage', 'N/A'))
323
 
324
  else:
325
  st.markdown("##### AWARENESS STAGE")
 
329
  st.markdown("##### DECISION STAGE")
330
  st.write(None)
331
 
 
 
332
  st.markdown("---")
333
+ st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
334
 
 
 
335
  st.markdown("#### CONVERSION – ACTIVATION OF VISITORS")
336
+ st.markdown("##### AWARENESS TO TRAFFIC")
337
+ st.write("TBD")
338
+ st.markdown("##### TRAFFIC TO LEAD CONVERSION")
339
+ st.write("TBD")
340
+ st.markdown("##### LEAD TO SALES CONVERSION")
341
+ st.write("TBD")
342
+ st.markdown("##### CONVERSION TO BRAND LOYALTY")
343
+ st.write("TBD")
344
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
345
  st.markdown("##### CONNECTION OF ALL ONLINE AND OFFLINE TOUCH POINTS")
346
+ st.write("TBD")
347
 
348
+ st.markdown("<a href='#top'>Go to top</a>", unsafe_allow_html=True)
 
 
349
 
 
350
  if st.button("Back to Dashboard", icon="🏠"):
351
  st.switch_page("pages/home.py")
352
  display_outputs()