Ronio Jerico Roque commited on
Commit
b438494
·
1 Parent(s): bbb39ae

Add SEO analysis modules and refactor existing code

Browse files

- Implemented `SeoOn`, `Seo`, and `WebsiteAndTools` classes for handling on-page SEO, general SEO analysis, and website/tool evaluations.
- Created helper functions for data fetching and response uploading to MongoDB.
- Introduced new page `analyzing_page.py` to manage concurrent analysis of different SEO aspects.
- Refactored `home.py` to integrate new analysis classes and streamline the user interface.
- Added session state management for various SEO metrics and analysis results.
- Enhanced error handling and user feedback during analysis processes.

classes/Off_Page.py CHANGED
@@ -11,6 +11,7 @@ from helper.button_behaviour import hide_button, unhide_button
11
  from helper.initialize_analyze_session import initialize_analyze_session
12
  import pandas as pd
13
  import asyncio
 
14
 
15
  class SeoOffPageAnalyst:
16
  def __init__(self, model_url):
@@ -29,45 +30,20 @@ class SeoOffPageAnalyst:
29
 
30
  # AGENT NAME
31
  #st.header(self.analyst_name)
 
 
32
 
33
- def request_model(self, payload_txt):
34
- response = requests.post(self.model_url, json=payload_txt)
35
  response.raise_for_status()
36
  output = response.json()
 
 
37
 
38
- categories = []
39
- remarks = []
40
- count = []
41
 
42
- for key, value in output.items():
43
- if key == 'json':
44
- for item in value:
45
- categories.append(item.get('elements', 'N/A').replace('_', ' ').title())
46
- remarks.append(item.get('remarks', 'N/A'))
47
- count.append(item.get('count', 'N/A'))
48
-
49
- output = ""
50
- for i in range(len(categories)):
51
- output += f"\n\n---\n **Category:** {categories[i]}"
52
- output += f"\n\n **Remarks:** {remarks[i]}\n\n"
53
- output += f"**Count:** {count[i]}"
54
-
55
- data = {
56
- "": [str(category) for category in categories],
57
- "Count": [str(count) for count in count],
58
- "Remarks": [str(remark) for remark in remarks]
59
-
60
- }
61
-
62
- df_output = pd.DataFrame(data)
63
- '''
64
- with st.expander("AI Analysis", expanded=True, icon="🤖"):
65
- st.table(df_output.style.set_table_styles(
66
- [{'selector': 'th:first-child, td:first-child', 'props': [('width', '20px')]},
67
- {'selector': 'th, td', 'props': [('width', '150px'), ('text-align', 'center')]}]
68
- ).set_properties(**{'text-align': 'center'}))
69
- '''
70
- return output
71
 
72
  def process(self):
73
  start_time = time.time()
@@ -81,6 +57,7 @@ class SeoOffPageAnalyst:
81
  if file_info['type'] == 'pdf':
82
  combined_text += file_info['content'] + "\n"
83
  '''
 
84
  if file_info['type'] == 'csv':
85
  # Load CSV
86
  df = pd.read_csv(StringIO(file_info['content'].to_csv(index=True)))
@@ -94,22 +71,29 @@ class SeoOffPageAnalyst:
94
 
95
  combined_text += f"Total Backlinks Count: {num_rows}\n"
96
  combined_text += f"Referring Domain: {unique_domains}"
97
-
 
 
98
  # OUTPUT FOR SEO ANALYST
99
  payload_txt = {"question": combined_text}
100
- #result = self.request_model(payload_txt)
 
 
 
 
101
 
102
  #end_time = time.time()
103
  #time_lapsed = end_time - start_time
104
 
105
- debug_info = {'data_field' : 'Backlinks', 'result': combined_text}
106
  #debug_info = {'url_uuid': self.model_url.split("-")[-1],'time_lapsed' : time_lapsed, 'files': [*st.session_state['uploaded_files']],'payload': payload_txt, 'result': result}
107
  collect_telemetry(debug_info)
108
 
 
 
109
  #with st.expander("Debug information", icon="⚙"):
110
  # st.write(debug_info)
111
  st.session_state['analyzing'] = False
112
-
113
  def row1(self):
114
  #st.write(self.data_src)
115
  self.uploaded_files = st.file_uploader('Backlinks - SEMRush', type='csv', accept_multiple_files=True, key="seo_off")
 
11
  from helper.initialize_analyze_session import initialize_analyze_session
12
  import pandas as pd
13
  import asyncio
14
+ import json
15
 
16
  class SeoOffPageAnalyst:
17
  def __init__(self, model_url):
 
30
 
31
  # AGENT NAME
32
  #st.header(self.analyst_name)
33
+ if 'off_page_file_uploaded' not in st.session_state:
34
+ st.session_state['off_page_file_uploaded'] = ''
35
 
36
+ def request_model(self, payload_txt, headers):
37
+ response = requests.post(self.model_url, json=payload_txt, headers=headers)
38
  response.raise_for_status()
39
  output = response.json()
40
+ text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
41
+ text = json.loads(text)
42
 
43
+ backlinks = text[0]
44
+ referring_domains = text[1]
 
45
 
46
+ return text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
 
48
  def process(self):
49
  start_time = time.time()
 
57
  if file_info['type'] == 'pdf':
58
  combined_text += file_info['content'] + "\n"
59
  '''
60
+ try:
61
  if file_info['type'] == 'csv':
62
  # Load CSV
63
  df = pd.read_csv(StringIO(file_info['content'].to_csv(index=True)))
 
71
 
72
  combined_text += f"Total Backlinks Count: {num_rows}\n"
73
  combined_text += f"Referring Domain: {unique_domains}"
74
+ st.info("Backlinks - SEMRush Uploaded Successfuly", icon="ℹ️")
75
+ except KeyError:
76
+ st.info("Incorrect CSV format. Please upload a valid CSV file.")
77
  # OUTPUT FOR SEO ANALYST
78
  payload_txt = {"question": combined_text}
79
+ headers = {
80
+ "Content-Type": "application/json",
81
+ "x-api-key": f"{os.getenv('x-api-key')}"
82
+ }
83
+ #result = self.request_model(payload_txt, headers)
84
 
85
  #end_time = time.time()
86
  #time_lapsed = end_time - start_time
87
 
88
+ debug_info = {'data_field' : 'Backlinks', 'result': payload_txt}
89
  #debug_info = {'url_uuid': self.model_url.split("-")[-1],'time_lapsed' : time_lapsed, 'files': [*st.session_state['uploaded_files']],'payload': payload_txt, 'result': result}
90
  collect_telemetry(debug_info)
91
 
92
+ st.session_state["off_page_file_uploaded"] = 'uploaded'
93
+
94
  #with st.expander("Debug information", icon="⚙"):
95
  # st.write(debug_info)
96
  st.session_state['analyzing'] = False
 
97
  def row1(self):
98
  #st.write(self.data_src)
99
  self.uploaded_files = st.file_uploader('Backlinks - SEMRush', type='csv', accept_multiple_files=True, key="seo_off")
classes/On_Page.py ADDED
@@ -0,0 +1,180 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import requests
3
+ from dotenv import load_dotenv
4
+ import os
5
+ import time
6
+ from helper.telemetry import collect_telemetry
7
+ from helper.upload_File import uploadFile
8
+ from helper.button_behaviour import hide_button, unhide_button
9
+ from helper.initialize_analyze_session import initialize_analyze_session
10
+ import pandas as pd
11
+
12
+ class SeoOn:
13
+ def __init__(self, model_url):
14
+ self.uploaded_files = []
15
+ self.file_dict = {}
16
+ self.file_gt = {}
17
+ self.model_url = model_url
18
+ #self.analyst_name = analyst_name
19
+ #self.data_src = data_src
20
+ #self.analyst_description = analyst_description
21
+ self.initialize()
22
+
23
+ self.row1()
24
+
25
+ def initialize(self):
26
+ # FOR ENV
27
+ load_dotenv()
28
+
29
+ # AGENT NAME
30
+ #st.header(self.analyst_name)
31
+
32
+ # EVALUATION FORM LINK
33
+ #url = os.getenv('Link')
34
+ #st.write('Evaluation Form: [Link](%s)' % url)
35
+
36
+ # RETURN BUTTON
37
+ '''try:
38
+ if st.button("Return", type='primary'):
39
+ st.switch_page("./pages/home.py")
40
+ except Exception:
41
+ pass'''
42
+ if 'crawl_file' not in st.session_state:
43
+ st.session_state['crawl_file'] = ''
44
+ if 'first_meaningful_paint' not in st.session_state:
45
+ st.session_state['first_meaningful_paint'] = ''
46
+
47
+ def request_model(self, payload_txt):
48
+ response = requests.post(self.model_url, json=payload_txt)
49
+ response.raise_for_status()
50
+ output = response.json()
51
+
52
+ categories = []
53
+ remarks = []
54
+
55
+ for key, value in output.items():
56
+ if key == 'json':
57
+ for item in value:
58
+ categories.append(item.get('elements', 'N/A').replace('_', ' ').title())
59
+ remarks.append(item.get('remarks', 'N/A'))
60
+
61
+ output = ""
62
+ for i in range(len(categories)):
63
+ output += f"\n\n---\n **Category:** {categories[i]}"
64
+ output += f"\n\n **Remarks:** {remarks[i]}\n\n"
65
+
66
+ data = {
67
+ "Category": [str(category) for category in categories],
68
+ "Remarks": [str(footprint) for footprint in remarks],
69
+ }
70
+ df_output = pd.DataFrame(data)
71
+ '''
72
+ with st.expander("AI Analysis", expanded=True, icon="🤖"):
73
+ st.table(df_output.style.set_table_styles(
74
+ [{'selector': 'th:first-child, td:first-child', 'props': [('width', '20px')]},
75
+ {'selector': 'th, td', 'props': [('width', '150px'), ('text-align', 'center')]}]
76
+ ).set_properties(**{'text-align': 'center'}))
77
+ '''
78
+
79
+ return output
80
+
81
+ def process(self):
82
+ session = st.session_state.analyze
83
+ start_time = time.time()
84
+ if (self.first_meaningful_paint or self.uploaded_files) and session == 'clicked':
85
+ first_meaningful_paint = ""
86
+ crawl_file = ""
87
+ with st.spinner('SEO On Page Analyst...', show_time=True):
88
+ st.write('')
89
+ '''
90
+ try:
91
+ for file_info in st.session_state['uploaded_files'].values():
92
+ if file_info['type'] == 'pdf':
93
+ combined_text += file_info['content'] + "\n"
94
+ elif file_info['type'] == 'csv':
95
+ try:
96
+ combined_text += "CrawlFile CSV: {"+ file_info['content'].to_csv(index=True) + "\n"
97
+ except AttributeError:
98
+ pass
99
+ except KeyError:
100
+ pass
101
+
102
+ try:
103
+ for f in st.session_state['uploaded_gt'].values():
104
+ if f['type'] == 'pdf':
105
+ crawl_file += "GTmetrix: {"+ f['content'] + "}\n"
106
+ elif f['type'] == 'csv':
107
+ crawl_file += f['content'].to_csv(index=True) + "\n"
108
+ except KeyError:
109
+ pass
110
+ '''
111
+ try:
112
+ for file_info in st.session_state['uploaded_files'].values():
113
+ if file_info['type'] == 'pdf':
114
+ crawl_file += file_info['content'] + "\n"
115
+ elif file_info['type'] == 'csv':
116
+ try:
117
+ crawl_file += "CrawlFile CSV: {"+ file_info['content'].to_csv(index=True) + "\n"
118
+ except AttributeError:
119
+ pass
120
+ except KeyError:
121
+ pass
122
+ try:
123
+ first_meaningful_paint += f"\nFirst Meaningful Paint: {self.first_meaningful_paint}"
124
+ except KeyError:
125
+ pass
126
+
127
+ debug_info_first_meaningful_paint = {'data_field' : 'First Meaningful Paint', 'result': first_meaningful_paint}
128
+ debug_info_crawl_file = {'data_field' : 'Crawl File', 'result': crawl_file}
129
+
130
+ if self.first_meaningful_paint:
131
+ st.session_state['first_meaningful_paint'] = 'uploaded'
132
+ collect_telemetry(debug_info_first_meaningful_paint)
133
+ if self.uploaded_files:
134
+ st.session_state['crawl_file'] = 'uploaded'
135
+ collect_telemetry(debug_info_crawl_file)
136
+
137
+ #with st.expander("Debug information", icon="⚙"):
138
+ # st.write(debug_info)
139
+
140
+ st.session_state['analyzing'] = False
141
+ try:
142
+ self.file_dict.popitem()
143
+ except KeyError:
144
+ pass
145
+
146
+ def row1(self):
147
+ #st.write(self.data_src)
148
+ #self.uploaded_files = st.file_uploader("Upload Backlink List (PDF)", type=['pdf', 'csv'], accept_multiple_files=True, key="seo_on")
149
+ #self.gtmetrix = st.file_uploader("GTmetrix:", type=['pdf', 'csv'], accept_multiple_files=True, key="seo_on_gt")
150
+ '''
151
+ if self.uploaded_files:
152
+ upload.multiple_upload_file(self.uploaded_files)
153
+ self.file_dict = upload.file_dict
154
+
155
+ if self.gtmetrix:
156
+ upload.upload_gt(self.gtmetrix)
157
+ '''
158
+ self.uploaded_files = st.file_uploader("Crawl File - ScreamingFrog:", type=['pdf', 'csv'], accept_multiple_files=True)
159
+ #self.gtmetrix = st.file_uploader("GTmetrix", type=['pdf', 'csv'], accept_multiple_files=True, key="seo_on_gt")
160
+ if self.uploaded_files:
161
+ upload.multiple_upload_file(self.uploaded_files)
162
+ self.file_dict = upload.file_dict
163
+
164
+ self.first_meaningful_paint = st.text_input("First Meaningful Paint - GTMetrix:", placeholder='Enter First Meaningful Paint')
165
+
166
+
167
+ #st.write("") # FOR THE HIDE BUTTON
168
+ #st.write("") # FOR THE HIDE BUTTON
169
+ #st.write("AI Analyst Output: ")
170
+ st.session_state['analyzing'] = False
171
+ #st.write("") # FOR THE HIDE BUTTON
172
+ self.process()
173
+
174
+
175
+
176
+
177
+ if __name__ == "__main__":
178
+ st.set_page_config(layout="wide")
179
+
180
+ upload = uploadFile()
classes/On_Page_Crawl.py CHANGED
@@ -76,9 +76,7 @@ class SeoOnCrawl:
76
 
77
  def process(self):
78
  session = st.session_state.analyze
79
- if session == 'clicked':
80
- start_time = time.time()
81
- if self.uploaded_files:
82
  combined_text = ""
83
  with st.spinner('SEO On Page Analyst...', show_time=True):
84
  st.write('')
 
76
 
77
  def process(self):
78
  session = st.session_state.analyze
79
+ if self.uploaded_files and session == 'clicked':
 
 
80
  combined_text = ""
81
  with st.spinner('SEO On Page Analyst...', show_time=True):
82
  st.write('')
classes/Seo.py CHANGED
@@ -40,7 +40,17 @@ class Seo:
40
  st.switch_page("./pages/home.py")
41
  except Exception:
42
  pass'''
43
-
 
 
 
 
 
 
 
 
 
 
44
  def request_model(self, payload_txt):
45
  response = requests.post(self.model_url, json=payload_txt)
46
  response.raise_for_status()
@@ -149,9 +159,9 @@ class Seo:
149
 
150
  def process (self):
151
  start_time = time.time()
152
-
153
  session = st.session_state.analyze
154
- if (self.uploaded_file or self.others or self.uploaded_file_seo) and session == 'clicked':
155
  combined_text = ""
156
  seo_keywords = ""
157
  traffic_channels = ""
@@ -180,8 +190,6 @@ class Seo:
180
  direct_traffic = st.session_state['direct_traffic']
181
  referral_traffic = st.session_state['referral_traffic']
182
 
183
-
184
-
185
  traffic_channels += f"\nOrganic Traffic: {organic_traffic}"
186
  traffic_channels += f"\nPaid Traffic: {paid_traffic}"
187
  traffic_channels += f"\nDirect Traffic: {direct_traffic}"
@@ -225,7 +233,7 @@ class Seo:
225
  payload_txt_bounce_rate = {"question": bounce_rate}
226
 
227
 
228
- #result = self.request_model(payload_txt)
229
  #end_time = time.time()
230
  #time_lapsed = end_time - start_time
231
  debug_info_seo_keywords = {'data_field' : 'SEO Keywords', 'result': seo_keywords}
@@ -246,14 +254,19 @@ class Seo:
246
  }
247
  '''
248
  if self.bounce_rate:
 
249
  collect_telemetry(debug_info_bounce_rate)
250
  if self.page_index:
 
251
  collect_telemetry(debug_info_pages_index)
252
  if self.others:
 
253
  collect_telemetry(debug_info_traffic_aqcuisition)
254
  if self.uploaded_file:
 
255
  collect_telemetry(debug_info_traffic_channels)
256
  if self.uploaded_file_seo:
 
257
  collect_telemetry(debug_info_seo_keywords)
258
 
259
  #with st.expander("Debug information", icon="⚙"):
 
40
  st.switch_page("./pages/home.py")
41
  except Exception:
42
  pass'''
43
+ if 'bounce_rate' not in st.session_state:
44
+ st.session_state['bounce_rate'] = ''
45
+ if 'page_index' not in st.session_state:
46
+ st.session_state['page_index'] = ''
47
+ if 'others' not in st.session_state:
48
+ st.session_state['others'] = ''
49
+ if 'df_traffic' not in st.session_state:
50
+ st.session_state['df_traffic'] = ''
51
+ if 'df_seo' not in st.session_state:
52
+ st.session_state['df_seo'] = ''
53
+
54
  def request_model(self, payload_txt):
55
  response = requests.post(self.model_url, json=payload_txt)
56
  response.raise_for_status()
 
159
 
160
  def process (self):
161
  start_time = time.time()
162
+
163
  session = st.session_state.analyze
164
+ if ((self.uploaded_file or self.others or self.uploaded_file_seo) or (self.page_index or self.bounce_rate)) and session == 'clicked':
165
  combined_text = ""
166
  seo_keywords = ""
167
  traffic_channels = ""
 
190
  direct_traffic = st.session_state['direct_traffic']
191
  referral_traffic = st.session_state['referral_traffic']
192
 
 
 
193
  traffic_channels += f"\nOrganic Traffic: {organic_traffic}"
194
  traffic_channels += f"\nPaid Traffic: {paid_traffic}"
195
  traffic_channels += f"\nDirect Traffic: {direct_traffic}"
 
233
  payload_txt_bounce_rate = {"question": bounce_rate}
234
 
235
 
236
+ #result = self.request_model(payload_txt_seo_keywords)
237
  #end_time = time.time()
238
  #time_lapsed = end_time - start_time
239
  debug_info_seo_keywords = {'data_field' : 'SEO Keywords', 'result': seo_keywords}
 
254
  }
255
  '''
256
  if self.bounce_rate:
257
+ st.session_state['bounce_rate'] = 'uploaded'
258
  collect_telemetry(debug_info_bounce_rate)
259
  if self.page_index:
260
+ st.session_state['pages_index'] = 'uploaded'
261
  collect_telemetry(debug_info_pages_index)
262
  if self.others:
263
+ st.session_state['others'] = 'uploaded'
264
  collect_telemetry(debug_info_traffic_aqcuisition)
265
  if self.uploaded_file:
266
+ st.session_state['df_traffic'] = 'uploaded'
267
  collect_telemetry(debug_info_traffic_channels)
268
  if self.uploaded_file_seo:
269
+ st.session_state['df_seo'] = 'uploaded'
270
  collect_telemetry(debug_info_seo_keywords)
271
 
272
  #with st.expander("Debug information", icon="⚙"):
classes/Seo_Backlinks.py CHANGED
@@ -291,12 +291,7 @@ class SeoBacklinks:
291
  #with st.expander("Debug information", icon="⚙"):
292
  # st.write(debug_info)
293
 
294
- for df_traffic in st.session_state.keys():
295
- del st.session_state[df_traffic]
296
- for df_seo in st.session_state.keys():
297
- del st.session_state[df_seo]
298
- for others in st.session_state.keys():
299
- del st.session_state[others]
300
 
301
  st.session_state['analyzing'] = False
302
 
 
291
  #with st.expander("Debug information", icon="⚙"):
292
  # st.write(debug_info)
293
 
294
+
 
 
 
 
 
295
 
296
  st.session_state['analyzing'] = False
297
 
classes/Social_Media.py CHANGED
@@ -330,10 +330,6 @@ class SocialMediaAnalyst:
330
  with st.expander("Debug information", icon="⚙"):
331
  st.write(debug_info)
332
 
333
- for df in st.session_state.keys():
334
- del st.session_state[df]
335
- for facebook_ad_campaign in st.session_state.keys():
336
- del st.session_state[facebook_ad_campaign]
337
 
338
  st.session_state['analyzing'] = False
339
  else:
 
330
  with st.expander("Debug information", icon="⚙"):
331
  st.write(debug_info)
332
 
 
 
 
 
333
 
334
  st.session_state['analyzing'] = False
335
  else:
classes/Social_Media_FB.py CHANGED
@@ -142,10 +142,8 @@ class Facebook:
142
  def process(self):
143
  start_time = time.time()
144
  session = st.session_state.analyze
145
- if session == 'clicked':
146
- hide_button()
147
  try:
148
- if (self.fb_organic_post and self.fb_organic_post.name) or (self.fb_ads_campaign and self.fb_ads_campaign.name):
149
  combined_text = ""
150
  with st.spinner('Social Media Analyst...', show_time=True):
151
  st.write('')
 
142
  def process(self):
143
  start_time = time.time()
144
  session = st.session_state.analyze
145
+ if ((self.fb_organic_post and self.fb_organic_post.name) or (self.fb_ads_campaign and self.fb_ads_campaign.name) or self.facebooks or self.facebook_rr) and session == 'clicked':
 
146
  try:
 
147
  combined_text = ""
148
  with st.spinner('Social Media Analyst...', show_time=True):
149
  st.write('')
classes/response_off.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from io import StringIO
2
+ from urllib.parse import urlparse
3
+ import streamlit as st
4
+ import requests
5
+ from dotenv import load_dotenv
6
+ import os
7
+ import time
8
+ from helper.upload_response import upload_response
9
+ from helper.upload_File import uploadFile
10
+ from helper.button_behaviour import hide_button, unhide_button
11
+ from helper.initialize_analyze_session import initialize_analyze_session
12
+ import pandas as pd
13
+ import asyncio
14
+ import json
15
+ from pymongo import MongoClient
16
+ from helper.data_field import data_field
17
+
18
+ class SeoOffPageAnalyst:
19
+ def __init__(self, model_url):
20
+ self.uploaded_files = []
21
+ self.file_dict = {}
22
+ self.model_url = model_url
23
+ #self.analyst_name = analyst_name
24
+ #self.data_src = data_src
25
+ #self.analyst_description = analyst_description
26
+ self.initialize()
27
+ self.row1()
28
+
29
+ def initialize(self):
30
+ # FOR ENV
31
+ load_dotenv()
32
+
33
+ # AGENT NAME
34
+ #st.header(self.analyst_name)
35
+ def request_model(self, payload_txt, headers):
36
+ response = requests.post(self.model_url, json=payload_txt, headers=headers)
37
+ response.raise_for_status()
38
+ output = response.json()
39
+ text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
40
+ text = json.loads(text)
41
+
42
+ backlinks = text[0]
43
+ referring_domains = text[1]
44
+
45
+ return text
46
+
47
+ def process(self):
48
+ session = st.session_state['off_page_file_uploaded']
49
+ if session == 'uploaded':
50
+ with st.spinner('SEO Off Page Analyst...', show_time=True):
51
+ st.write('')
52
+
53
+ # OUTPUT FOR SEO ANALYST
54
+ payload_txt = {"input_value": data_field("Backlinks"),
55
+ "output_type": "text",
56
+ "input_type": "chat"}
57
+ headers = {
58
+ "Content-Type": "application/json",
59
+ "x-api-key": f"{os.getenv('x-api-key')}"
60
+ }
61
+ result = self.request_model(payload_txt, headers)
62
+
63
+ #end_time = time.time()
64
+ #time_lapsed = end_time - start_time
65
+
66
+ debug_info = {'data_field' : 'Off Page Analyst', 'result': result}
67
+ #debug_info = {'url_uuid': self.model_url.split("-")[-1],'time_lapsed' : time_lapsed, 'files': [*st.session_state['uploaded_files']],'payload': payload_txt, 'result': result}
68
+ upload_response(debug_info)
69
+
70
+ #with st.expander("Debug information", icon="⚙"):
71
+ # st.write(debug_info)
72
+ st.session_state["off_page_file_uploaded"] = ''
73
+ st.session_state['analyzing'] = False
74
+
75
+ def row1(self):
76
+ st.session_state['analyzing'] = False
77
+ session = st.session_state['off_page_file_uploaded']
78
+ if session == 'uploaded':
79
+ self.process()
80
+
81
+
82
+ if __name__ == "__main__":
83
+ st.set_page_config(layout="wide")
84
+
85
+ upload = uploadFile()
classes/response_on_page.py ADDED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import requests
3
+ from dotenv import load_dotenv
4
+ import os
5
+ import time
6
+ from helper.telemetry import collect_telemetry
7
+ from helper.upload_File import uploadFile
8
+ from helper.button_behaviour import hide_button, unhide_button
9
+ from helper.initialize_analyze_session import initialize_analyze_session
10
+ import pandas as pd
11
+ from pymongo import MongoClient
12
+ import json
13
+ from helper.data_field import data_field
14
+ from helper.upload_response import upload_response
15
+
16
+ class SeoOn:
17
+ def __init__(self, model_url):
18
+ self.uploaded_files = []
19
+ self.file_dict = {}
20
+ self.file_gt = {}
21
+ self.model_url = model_url
22
+ #self.analyst_name = analyst_name
23
+ #self.data_src = data_src
24
+ #self.analyst_description = analyst_description
25
+ self.initialize()
26
+
27
+ self.row1()
28
+
29
+ def initialize(self):
30
+ # FOR ENV
31
+ load_dotenv()
32
+
33
+ # AGENT NAME
34
+ #st.header(self.analyst_name)
35
+
36
+ # EVALUATION FORM LINK
37
+ #url = os.getenv('Link')
38
+ #st.write('Evaluation Form: [Link](%s)' % url)
39
+
40
+ # RETURN BUTTON
41
+ '''try:
42
+ if st.button("Return", type='primary'):
43
+ st.switch_page("./pages/home.py")
44
+ except Exception:
45
+ pass'''
46
+
47
+ def request_model(self, payload_txt, headers):
48
+ response = requests.post(self.model_url, json=payload_txt, headers=headers)
49
+ response.raise_for_status()
50
+
51
+ output = response.json()
52
+ #st.write(output)
53
+ text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
54
+ text = json.loads(text)
55
+ #st.write(text)
56
+ return text
57
+
58
+ def fetch_data(self, data_field):
59
+ mongodb_uri = os.getenv("MONGODB_URI")
60
+ myclient = MongoClient(mongodb_uri)
61
+ mydb = myclient.get_database()
62
+ mycol = mydb["df_data"]
63
+ x = mycol.find_one({"data_field": data_field})
64
+ x = x["result"]
65
+ return x
66
+
67
+ def process(self):
68
+ with st.spinner('SEO On Page...', show_time=True):
69
+ st.write('')
70
+ # OUTPUT FOR SEO ANALYST
71
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
72
+ payload = ""
73
+ try:
74
+ payload += self.fetch_data("First Meaningful Paint")
75
+ except Exception as e:
76
+ pass
77
+ try:
78
+ payload += self.fetch_data("Crawl File")
79
+ except Exception as e:
80
+ pass
81
+
82
+ try:
83
+ session_first_meaningful_paint = st.session_state['first_meaningful_paint']
84
+ session_crawl_file = st.session_state['crawl_file']
85
+ if session_first_meaningful_paint or session_crawl_file == 'uploaded':
86
+ payload_txt = {"input_value": payload, "output_type": "text", "input_type": "chat"}
87
+ payload_txt_model = self.request_model(payload_txt, headers)
88
+ debug_info = {'data_field' : 'On Page Analyst', 'result': payload_txt_model}
89
+ upload_response(debug_info)
90
+
91
+ st.session_state['first_meaningful_paint'] = ''
92
+ st.session_state['crawl_file'] = ''
93
+ except Exception as e:
94
+ pass
95
+
96
+ #end_time = time.time()
97
+ #time_lapsed = end_time - start_time
98
+
99
+ #debug_info = {'data_field' : 'GT Metrix', 'result': result}
100
+
101
+
102
+ '''
103
+ debug_info = {#'analyst': self.analyst_name,
104
+ 'url_uuid': self.model_url.split("-")[-1],
105
+ 'time_lapsed' : time_lapsed,
106
+ 'crawl_file': [file.name for file in self.uploaded_files] if self.uploaded_files else ['Not available'],
107
+ 'gt_metrix': [file.name for file in self.gtmetrix] if self.gtmetrix else ['Not available'],
108
+ 'payload': payload_txt,
109
+ 'result': result}
110
+
111
+ if self.gtmetrix:
112
+ collect_telemetry(debug_info)
113
+ '''
114
+
115
+
116
+ #with st.expander("Debug information", icon="⚙"):
117
+ # st.write(debug_info)
118
+
119
+
120
+ st.session_state['analyzing'] = False
121
+ try:
122
+ self.file_dict.popitem()
123
+ except KeyError:
124
+ pass
125
+
126
+ def row1(self):
127
+
128
+ #st.write("") # FOR THE HIDE BUTTON
129
+ #st.write("") # FOR THE HIDE BUTTON
130
+ #st.write("AI Analyst Output: ")
131
+ st.session_state['analyzing'] = False
132
+ #st.write("") # FOR THE HIDE BUTTON
133
+ self.process()
134
+
135
+
136
+ if __name__ == "__main__":
137
+ st.set_page_config(layout="wide")
138
+
139
+ upload = uploadFile()
classes/response_seo.py ADDED
@@ -0,0 +1,204 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import requests
3
+ from dotenv import load_dotenv
4
+ import os
5
+ import pandas as pd
6
+ import pandas._libs.tslibs.parsing
7
+ import time
8
+ import chardet
9
+ from helper.telemetry import collect_telemetry
10
+ from helper.upload_File import uploadFile
11
+ from helper.button_behaviour import hide_button
12
+ from helper.initialize_analyze_session import initialize_analyze_session
13
+ from pymongo import MongoClient
14
+ from helper.data_field import data_field
15
+ from helper.upload_response import upload_response
16
+ import json
17
+
18
+ class Seo:
19
+ def __init__(self, model_url):
20
+ self.uploaded_files = []
21
+ self.file_dict = {}
22
+ self.model_url = model_url
23
+ #self.analyst_name = analyst_name
24
+ #self.data_src = data_src
25
+ #self.analyst_description = analyst_description
26
+ self.initialize()
27
+ self.row1()
28
+
29
+ def initialize(self):
30
+ # FOR ENV
31
+ load_dotenv()
32
+
33
+ # AGENT NAME
34
+ #st.header(self.analyst_name)
35
+
36
+ # EVALUATION FORM LINK
37
+ '''url = os.getenv('Link')
38
+ st.write('Evaluation Form: [Link](%s)' % url)
39
+
40
+ # RETURN BUTTON
41
+ try:
42
+ if st.button("Return", type='primary'):
43
+ st.switch_page("./pages/home.py")
44
+ except Exception:
45
+ pass'''
46
+
47
+ def request_model(self, payload_txt, headers):
48
+ response = requests.post(self.model_url, json=payload_txt, headers=headers)
49
+ response.raise_for_status()
50
+ output = response.json()
51
+ text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
52
+ text = json.loads(text)
53
+ #st.write(text)
54
+ return text
55
+
56
+ def fetch_backlinks(self, data_field):
57
+ mongodb_uri = os.getenv("MONGODB_URI")
58
+ myclient = MongoClient(mongodb_uri)
59
+ mydb = myclient.get_database()
60
+ mycol = mydb["df_data"]
61
+ x = mycol.find_one({"data_field": data_field})
62
+ x = x["result"]['question']
63
+ return x
64
+
65
+ def fetch_data(self, data_field):
66
+ mongodb_uri = os.getenv("MONGODB_URI")
67
+ myclient = MongoClient(mongodb_uri)
68
+ mydb = myclient.get_database()
69
+ mycol = mydb["df_data"]
70
+ x = mycol.find_one({"data_field": data_field})
71
+ x = x["result"]
72
+ return x
73
+
74
+ def detect_encoding(self, uploaded_file):
75
+ result = chardet.detect(uploaded_file.read(100000))
76
+ uploaded_file.seek(0) # Reset file pointer to the beginning
77
+ return result['encoding']
78
+
79
+ def keyword_ranking(self, df_seo):
80
+ keyword_ranking = df_seo
81
+ st.session_state['keyword_ranking'] = keyword_ranking
82
+
83
+ keywords_ranking_sorted = keyword_ranking.sort_values("Position", ascending=True)
84
+
85
+ keywords_ranking_top_10 = keywords_ranking_sorted[keywords_ranking_sorted["Position"] <= 10].shape[0]
86
+ keywords_ranking_top_100 = keywords_ranking_sorted[keywords_ranking_sorted["Position"] <= 100].shape[0]
87
+
88
+ keyword_ranking = {
89
+ 'Keyword_top_10': keywords_ranking_top_10,
90
+ 'Keyword_top_100': keywords_ranking_top_100
91
+ }
92
+ st.session_state['keyword_ranking'] = keyword_ranking
93
+
94
+ def traffic_files(self, df_traffic):
95
+ traffic_channels = df_traffic
96
+ try:
97
+ traffic_channels.rename(columns={traffic_channels.columns[0]: 'date'}, inplace=True)
98
+ traffic_channels['date'] = pd.to_datetime(traffic_channels['date'], format='mixed')
99
+ except pandas._libs.tslibs.parsing.DateParseError:
100
+ pass
101
+ traffic_channels_sort = traffic_channels.sort_values("date", ascending=False)
102
+
103
+ organic_traffic = traffic_channels_sort['Organic Search'].values[0]
104
+ paid_traffic = traffic_channels_sort['Paid Search'].values[0]
105
+ direct_traffic = traffic_channels_sort['Direct'].values[0]
106
+ referral_traffic = traffic_channels_sort['Referral'].values[0]
107
+
108
+ st.session_state['organic_traffic'] = organic_traffic
109
+ st.session_state['paid_traffic'] = paid_traffic
110
+ st.session_state['direct_traffic'] = direct_traffic
111
+ st.session_state['referral_traffic'] = referral_traffic
112
+
113
+ def ga4_traffic(self, others):
114
+ st.session_state['others'] = others
115
+
116
+ ga4_paid_social = others['Sessions'].values[0]
117
+ ga4_organic_traffic = others['Sessions'].values[4]
118
+ ga4_direct_traffic = others['Sessions'].values[2]
119
+ ga4_referral_traffic = others['Sessions'].values[3]
120
+
121
+ st.session_state['ga4_paid_social'] = ga4_paid_social
122
+ st.session_state['ga4_organic_traffic'] = ga4_organic_traffic
123
+ st.session_state['ga4_direct_traffic'] = ga4_direct_traffic
124
+ st.session_state['ga4_referral_traffic'] = ga4_referral_traffic
125
+
126
+ def delete_sessions(self):
127
+ try:
128
+ del st.session_state['df_traffic']
129
+ del st.session_state['others']
130
+ del st.session_state['df_seo']
131
+ del st.session_state['keyword_ranking']
132
+ del st.session_state['ga4_paid_social']
133
+ del st.session_state['ga4_organic_traffic']
134
+ del st.session_state['ga4_direct_traffic']
135
+ del st.session_state['ga4_referral_traffic']
136
+ del st.session_state['organic_traffic']
137
+ del st.session_state['paid_traffic']
138
+ del st.session_state['direct_traffic']
139
+ del st.session_state['referral_traffic']
140
+ except KeyError:
141
+ pass
142
+
143
+ def process (self):
144
+ with st.spinner('Seo Analyst...', show_time=True):
145
+ st.write('')
146
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
147
+ payload = ""
148
+ try:
149
+ payload += self.fetch_data("SEO Keywords")
150
+ except Exception as e:
151
+ pass
152
+ try:
153
+ payload += self.fetch_data("Traffic Channels")
154
+ except Exception as e:
155
+ pass
156
+ try:
157
+ payload += self.fetch_data("Traffic Acquisition")
158
+ except Exception as e:
159
+ pass
160
+ try:
161
+ payload += self.fetch_data("Pages Indexed")
162
+ except Exception as e:
163
+ pass
164
+ try:
165
+ payload += self.fetch_data("Bounce Rate")
166
+ except Exception as e:
167
+ pass
168
+ try:
169
+ payload += self.fetch_backlinks("Backlinks")
170
+ except Exception as e:
171
+ pass
172
+
173
+ try:
174
+ session_bounce_rate = st.session_state['bounce_rate']
175
+ session_page_index = st.session_state['pages_index']
176
+ session_others = st.session_state['others']
177
+ session_traffic_channels = st.session_state['df_traffic']
178
+ session_traffic_aqcuisition = st.session_state['df_seo']
179
+ if session_bounce_rate or session_page_index or session_others or session_traffic_aqcuisition or session_traffic_channels == 'uploaded':
180
+ payload_txt = {"input_value": payload, "output_type": "text", "input_type": "chat"}
181
+ payload_txt_model = self.request_model(payload_txt, headers)
182
+ debug_info = {'data_field' : 'SEO Analyst', 'result': payload_txt_model}
183
+ upload_response(debug_info)
184
+
185
+ st.session_state['bounce_rate'] = ''
186
+ st.session_state['pages_index'] = ''
187
+ st.session_state['others'] = ''
188
+ st.session_state['df_traffic'] = ''
189
+ st.session_state['df_seo'] = ''
190
+ except Exception as e:
191
+ pass
192
+ st.session_state['analyzing'] = False
193
+
194
+ def row1(self):
195
+ st.session_state['analyzing'] = False
196
+ #st.write("") # FOR THE HIDE BUTTON
197
+ #analyze_button = st.button("Analyze", disabled=initialize_analyze_session())
198
+
199
+ self.process()
200
+
201
+ if __name__ == "__main__":
202
+ st.set_page_config(layout="wide")
203
+
204
+ upload = uploadFile()
classes/response_website_and_tools.py ADDED
@@ -0,0 +1,232 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import requests
3
+ from dotenv import load_dotenv
4
+ import os
5
+ import time
6
+ from helper.telemetry import collect_telemetry
7
+ from helper.upload_File import uploadFile
8
+ from helper.button_behaviour import hide_button, unhide_button
9
+ from helper.initialize_analyze_session import initialize_analyze_session
10
+ import pandas as pd
11
+ from pymongo import MongoClient
12
+ import json
13
+ from helper.data_field import data_field
14
+ from helper.upload_response import upload_response
15
+
16
+ class WebsiteAndTools:
17
+ def __init__(self, model_url):
18
+ self.uploaded_files = []
19
+ self.file_dict = {}
20
+ self.file_gt = {}
21
+ self.model_url = model_url
22
+ #self.analyst_name = analyst_name
23
+ #self.data_src = data_src
24
+ #self.analyst_description = analyst_description
25
+ self.initialize()
26
+
27
+ self.row1()
28
+
29
+ def initialize(self):
30
+ # FOR ENV
31
+ load_dotenv()
32
+
33
+ # AGENT NAME
34
+ #st.header(self.analyst_name)
35
+
36
+ # EVALUATION FORM LINK
37
+ #url = os.getenv('Link')
38
+ #st.write('Evaluation Form: [Link](%s)' % url)
39
+
40
+ # RETURN BUTTON
41
+ '''try:
42
+ if st.button("Return", type='primary'):
43
+ st.switch_page("./pages/home.py")
44
+ except Exception:
45
+ pass'''
46
+
47
+ def request_model(self, payload_txt, headers):
48
+ response = requests.post(self.model_url, json=payload_txt, headers=headers)
49
+ response.raise_for_status()
50
+
51
+ output = response.json()
52
+ text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
53
+ text = json.loads(text)
54
+ #st.write(text)
55
+ return text
56
+
57
+ def fetch_data(self, data_field):
58
+ mongodb_uri = os.getenv("MONGODB_URI")
59
+ myclient = MongoClient(mongodb_uri)
60
+ mydb = myclient.get_database()
61
+ mycol = mydb["df_data"]
62
+ x = mycol.find_one({"data_field": data_field})
63
+ x = x["result"]
64
+ return x
65
+
66
+ def process(self):
67
+ with st.spinner('Website and Tools...', show_time=True):
68
+ st.write('')
69
+ '''
70
+ # OUTPUT FOR WEBSITE RESPONSIVENESS
71
+ payload_txt_website_responsiveness = {"result": website_responsiveness}
72
+ result_website_responsiveness = self.request_model(payload_txt_website_responsiveness)
73
+
74
+ # OUTPUT FOR CONTENT MANAGEMENT SYSTEM
75
+ payload_txt_content_management_system = {"question": content_management_system}
76
+ #result_content_management_system = self.request_model(content_management_system)
77
+
78
+ # OUTPUT FOR SSL CERTIFICATE
79
+ payload_txt_SSL_certificate = {"question": SSL_certificate}
80
+ #result_SSL_certificate = self.request_model(SSL_certificate)
81
+
82
+ # OUTPUT FOR WEB ANALYTICS
83
+ payload_txt_web_analytics = {"question": web_analytics}
84
+ #result_web_analytics = self.request_model(web_analytics)
85
+
86
+ # OUTPUT FOR CLIENT RELATIONS MANAGEMENT SYSTEM
87
+ payload_txt_client_relations_management_system = {"question": client_relations_management_system}
88
+ #result_client_relations_management_system = self.request_model(client_relations_management_system)
89
+
90
+ # OUTPUT FOR LEAD GENERATION MECHANISM
91
+ payload_txt_lead_generation_mechanism = {"question": lead_generation_mechanism}
92
+ #result_lead_generation_mechanism = self.request_model(lead_generation_mechanism)
93
+ '''
94
+ # OUTPUT FOR SEO ANALYST
95
+
96
+ #print(x)
97
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
98
+ try:
99
+ payload = ""
100
+ session_website_responsiveness = st.session_state['website_responsiveness']
101
+ if session_website_responsiveness == 'uploaded':
102
+ payload += self.fetch_data("Website Responsiveness")
103
+ except Exception as e:
104
+ pass
105
+ try:
106
+ session_content_management_system = st.session_state['content_management_system']
107
+ if session_content_management_system == 'uploaded':
108
+ payload += self.fetch_data("Content Management System")
109
+ except Exception as e:
110
+ pass
111
+ try:
112
+ session_SSL_certificate = st.session_state['SSL_certificate']
113
+ if session_SSL_certificate == 'uploaded':
114
+ payload += self.fetch_data("SSL Certificate")
115
+
116
+ except Exception as e:
117
+ pass
118
+ try:
119
+ session_mobile_responsiveness = st.session_state['mobile_responsiveness']
120
+ if session_mobile_responsiveness == 'uploaded':
121
+ payload += self.fetch_data("Mobile Responsiveness")
122
+ except Exception as e:
123
+ pass
124
+ try:
125
+ session_desktop_loading_speed = st.session_state['desktop_loading_speed']
126
+ if session_desktop_loading_speed == 'uploaded':
127
+ payload += self.fetch_data("Desktop Loading Speed")
128
+ except Exception as e:
129
+ pass
130
+ try:
131
+ session_mobile_loading_speed = st.session_state['mobile_loading_speed']
132
+ if session_mobile_loading_speed == 'uploaded':
133
+ payload += self.fetch_data("Mobile Loading Speed")
134
+ except Exception as e:
135
+ pass
136
+ try:
137
+ session_first_meaningful_paint = st.session_state['first_meaningful_paint']
138
+ if session_first_meaningful_paint == 'uploaded':
139
+ payload += self.fetch_data("First Meaningful Paint")
140
+ except Exception as e:
141
+ pass
142
+ try:
143
+ session_web_analytics = st.session_state['web_analytics']
144
+ if session_web_analytics == 'uploaded':
145
+ payload += self.fetch_data("Web Analytics")
146
+ except Exception as e:
147
+ pass
148
+ try:
149
+ session_client_relations_management_system = st.session_state['client_relations_management_system']
150
+ if session_client_relations_management_system == 'uploaded':
151
+ payload += self.fetch_data("Client Relations Management System")
152
+ except Exception as e:
153
+ pass
154
+ try:
155
+ session_lead_generation_mechanism = st.session_state['lead_generation_mechanism']
156
+ if session_lead_generation_mechanism == 'uploaded':
157
+ payload += self.fetch_data("Lead Generation Mechanism")
158
+ except Exception as e:
159
+ pass
160
+
161
+ try:
162
+ session_website_responsiveness = st.session_state['website_responsiveness']
163
+ session_client_relations_management_system = st.session_state['client_relations_management_system']
164
+ session_lead_generation_mechanism = st.session_state['lead_generation_mechanism']
165
+ session_web_analytics = st.session_state['web_analytics']
166
+ session_mobile_responsiveness = st.session_state['mobile_responsiveness']
167
+ session_desktop_loading_speed = st.session_state['desktop_loading_speed']
168
+ session_mobile_loading_speed = st.session_state['mobile_loading_speed']
169
+ session_SSL_certificate = st.session_state['SSL_certificate']
170
+ session_content_management_system = st.session_state['content_management_system']
171
+
172
+ if session_website_responsiveness or session_client_relations_management_system or session_lead_generation_mechanism or session_web_analytics or session_mobile_responsiveness or session_desktop_loading_speed or session_mobile_loading_speed or session_content_management_system or session_SSL_certificate == 'uploaded':
173
+ payload_txt = {"input_value": payload, "output_type": "text", "input_type": "chat"}
174
+ payload_txt_model = self.request_model(payload_txt, headers)
175
+ debug_info = {'data_field' : 'Website and Tools Analyst', 'result': payload_txt_model}
176
+ upload_response(debug_info)
177
+ st.session_state['website_responsiveness'] = ''
178
+ st.session_state['content_management_system'] = ''
179
+ st.session_state['SSL_certificate'] = ''
180
+ st.session_state['mobile_responsiveness'] = ''
181
+ st.session_state['desktop_loading_speed'] = ''
182
+ st.session_state['mobile_loading_speed'] = ''
183
+ st.session_state['first_meaningful_paint'] = ''
184
+ st.session_state['web_analytics'] = ''
185
+ st.session_state['client_relations_management_system'] = ''
186
+ st.session_state['lead_generation_mechanism'] = ''
187
+ except Exception as e:
188
+ pass
189
+ #end_time = time.time()
190
+ #time_lapsed = end_time - start_time
191
+
192
+ #debug_info = {'data_field' : 'GT Metrix', 'result': result}
193
+
194
+
195
+ '''
196
+ debug_info = {#'analyst': self.analyst_name,
197
+ 'url_uuid': self.model_url.split("-")[-1],
198
+ 'time_lapsed' : time_lapsed,
199
+ 'crawl_file': [file.name for file in self.uploaded_files] if self.uploaded_files else ['Not available'],
200
+ 'gt_metrix': [file.name for file in self.gtmetrix] if self.gtmetrix else ['Not available'],
201
+ 'payload': payload_txt,
202
+ 'result': result}
203
+
204
+ if self.gtmetrix:
205
+ collect_telemetry(debug_info)
206
+ '''
207
+
208
+
209
+ #with st.expander("Debug information", icon="⚙"):
210
+ # st.write(debug_info)
211
+
212
+
213
+ st.session_state['analyzing'] = False
214
+ try:
215
+ self.file_dict.popitem()
216
+ except KeyError:
217
+ pass
218
+
219
+ def row1(self):
220
+
221
+ #st.write("") # FOR THE HIDE BUTTON
222
+ #st.write("") # FOR THE HIDE BUTTON
223
+ #st.write("AI Analyst Output: ")
224
+ st.session_state['analyzing'] = False
225
+ #st.write("") # FOR THE HIDE BUTTON
226
+ self.process()
227
+
228
+
229
+ if __name__ == "__main__":
230
+ st.set_page_config(layout="wide")
231
+
232
+ upload = uploadFile()
classes/{On_Page_GT.py → website_and_tools.py} RENAMED
@@ -9,7 +9,7 @@ from helper.button_behaviour import hide_button, unhide_button
9
  from helper.initialize_analyze_session import initialize_analyze_session
10
  import pandas as pd
11
 
12
- class SeoOnGT:
13
  def __init__(self, model_url):
14
  self.uploaded_files = []
15
  self.file_dict = {}
@@ -39,7 +39,26 @@ class SeoOnGT:
39
  st.switch_page("./pages/home.py")
40
  except Exception:
41
  pass'''
42
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
  def request_model(self, payload_txt):
44
  response = requests.post(self.model_url, json=payload_txt)
45
  response.raise_for_status()
@@ -77,8 +96,7 @@ class SeoOnGT:
77
  def process(self):
78
  session = st.session_state.analyze
79
  start_time = time.time()
80
- if (self.website_responsiveness or self.content_management_system or self.SSL_certificate or self.web_analytics or self.client_relations_management_system or self.lead_generation_mechanism or self.first_meaningful_paint or self.mobile_responsiveness or self.mobile_loading_speed or self.desktop_loading_speed) and session == 'clicked':
81
- combined_text = ""
82
  website_responsiveness = ""
83
  content_management_system = ""
84
  SSL_certificate = ""
@@ -88,7 +106,6 @@ class SeoOnGT:
88
  web_analytics = ""
89
  client_relations_management_system = ""
90
  mobile_loading_speed = ""
91
- first_meaningful_paint = ""
92
  lead_generation_mechanism = ""
93
  with st.spinner('SEO On Page Analyst...', show_time=True):
94
  st.write('')
@@ -104,28 +121,55 @@ class SeoOnGT:
104
  pass
105
  except KeyError:
106
  pass
107
- '''
108
  try:
109
  for f in st.session_state['uploaded_gt'].values():
110
  if f['type'] == 'pdf':
111
- combined_text += "GTmetrix: {"+ f['content'] + "}\n"
112
  elif f['type'] == 'csv':
113
- combined_text += f['content'].to_csv(index=True) + "\n"
114
  except KeyError:
115
  pass
 
116
  try:
117
  website_responsiveness += f"\nWebsite Responsiveness: {self.website_responsiveness}"
118
- content_management_system += f"\nContent Management System: {self.content_management_system}%"
 
 
 
 
 
 
119
  SSL_certificate += f"\nSSL Certificate: {self.SSL_certificate}"
 
 
 
120
  mobile_responsiveness += f"\nMobile Responsiveness: {self.mobile_responsiveness}"
 
 
 
121
  desktop_loading_speed += f"\nDesktop Loading Speed: {self.desktop_loading_speed}"
 
 
 
122
  mobile_loading_speed += f"\nMobile Loading Speed: {self.mobile_loading_speed}"
123
- first_meaningful_paint += f"\nFirst Meaningful Paint: {self.first_meaningful_paint}"
 
 
124
  web_analytics += f"\nWeb Analytics: {self.web_analytics}"
 
 
 
 
125
  client_relations_management_system += f"\nClient Relations Management System: {self.client_relations_management_system}"
 
 
 
126
  lead_generation_mechanism += f"\nLead Generation Mechanism: {self.lead_generation_mechanism}"
127
  except KeyError:
128
  pass
 
 
129
  # OUTPUT FOR WEBSITE RESPONSIVENESS
130
  payload_txt_website_responsiveness = {"question": website_responsiveness}
131
  #result_website_responsiveness = self.request_model(website_responsiveness)
@@ -151,19 +195,17 @@ class SeoOnGT:
151
  #result_lead_generation_mechanism = self.request_model(lead_generation_mechanism)
152
 
153
  # OUTPUT FOR SEO ANALYST
154
- payload_txt = {"question": combined_text}
155
  #result = self.request_model(payload_txt)
156
  #end_time = time.time()
157
  #time_lapsed = end_time - start_time
158
 
159
- debug_info = {'data_field' : 'GT Metrix', 'result': combined_text}
160
  debug_info_website_responsiveness = {'data_field' : 'Website Responsiveness', 'result': website_responsiveness}
161
  debug_info_content_management_system = {'data_field' : 'Content Management System', 'result': content_management_system}
162
  debug_info_SSL_certificate = {'data_field' : 'SSL Certificate', 'result': SSL_certificate}
163
  debug_info_mobile_responsiveness = {'data_field' : 'Mobile Responsiveness', 'result': mobile_responsiveness}
164
  debug_info_desktop_loading_speed = {'data_field' : 'Desktop Loading Speed', 'result': desktop_loading_speed}
165
  debug_info_mobile_loading_speed = {'data_field' : 'Mobile Loading Speed', 'result': mobile_loading_speed}
166
- debug_info_first_meaningful_paint = {'data_field' : 'First Meaningful Paint', 'result': first_meaningful_paint}
167
  debug_info_web_analytics = {'data_field' : 'Web Analytics', 'result': web_analytics}
168
  debug_info_client_relations_management_system = {'data_field' : 'Client Relations Management System', 'result': client_relations_management_system}
169
  debug_info_lead_generation_mechanism = {'data_field' : 'Lead Generation Mechanism', 'result': lead_generation_mechanism}
@@ -180,24 +222,31 @@ class SeoOnGT:
180
  collect_telemetry(debug_info)
181
  '''
182
  if self.website_responsiveness:
 
183
  collect_telemetry(debug_info_website_responsiveness)
184
  if self.content_management_system:
 
185
  collect_telemetry(debug_info_content_management_system)
186
  if self.SSL_certificate:
 
187
  collect_telemetry(debug_info_SSL_certificate)
188
  if self.mobile_responsiveness:
 
189
  collect_telemetry(debug_info_mobile_responsiveness)
190
  if self.desktop_loading_speed:
 
191
  collect_telemetry(debug_info_desktop_loading_speed)
192
  if self.mobile_loading_speed:
 
193
  collect_telemetry(debug_info_mobile_loading_speed)
194
- if self.first_meaningful_paint:
195
- collect_telemetry(debug_info_first_meaningful_paint)
196
  if self.web_analytics:
 
197
  collect_telemetry(debug_info_web_analytics)
198
  if self.client_relations_management_system:
 
199
  collect_telemetry(debug_info_client_relations_management_system)
200
  if self.lead_generation_mechanism:
 
201
  collect_telemetry(debug_info_lead_generation_mechanism)
202
 
203
  #with st.expander("Debug information", icon="⚙"):
@@ -205,9 +254,6 @@ class SeoOnGT:
205
 
206
 
207
  st.session_state['analyzing'] = False
208
-
209
- for df_seo in st.session_state.keys():
210
- del st.session_state[df_seo]
211
  try:
212
  self.file_dict.popitem()
213
  except KeyError:
@@ -233,7 +279,6 @@ class SeoOnGT:
233
  self.mobile_loading_speed = st.text_input("Mobile Loading Speed - GTMetrix:", placeholder='Enter Mobile Loading Speed')
234
  self.web_analytics = st.text_input("Web Analytics - BuiltWith:", placeholder='Enter Web Analytics')
235
  self.client_relations_management_system = st.text_input("Client Relations Management System - BuiltWith:", placeholder='Enter Client Relations Management System')
236
- self.first_meaningful_paint = st.text_input("First Meaningful Paint - GTMetrix:", placeholder='Enter First Meaningful Paint')
237
  self.lead_generation_mechanism = st.text_input("Lead Generation Mechanism - Business Context (Lead Generation & Lead Nurturing):", placeholder='Enter Lead Generation Mechanism')
238
 
239
  #st.write("") # FOR THE HIDE BUTTON
@@ -242,9 +287,6 @@ class SeoOnGT:
242
  st.session_state['analyzing'] = False
243
  #st.write("") # FOR THE HIDE BUTTON
244
  self.process()
245
-
246
-
247
-
248
 
249
  if __name__ == "__main__":
250
  st.set_page_config(layout="wide")
 
9
  from helper.initialize_analyze_session import initialize_analyze_session
10
  import pandas as pd
11
 
12
+ class WebsiteAndTools:
13
  def __init__(self, model_url):
14
  self.uploaded_files = []
15
  self.file_dict = {}
 
39
  st.switch_page("./pages/home.py")
40
  except Exception:
41
  pass'''
42
+ if 'website_responsiveness' not in st.session_state:
43
+ st.session_state['website_responsiveness'] = ''
44
+ if 'content_management_system' not in st.session_state:
45
+ st.session_state['content_management_system'] = ''
46
+ if 'SSL_certificate' not in st.session_state:
47
+ st.session_state['SSL_certificate'] = ''
48
+ if 'mobile_responsiveness' not in st.session_state:
49
+ st.session_state['mobile_responsiveness'] = ''
50
+ if 'desktop_loading_speed' not in st.session_state:
51
+ st.session_state['desktop_loading_speed'] = ''
52
+ if 'mobile_loading_speed' not in st.session_state:
53
+ st.session_state['mobile_loading_speed'] = ''
54
+ if 'web_analytics' not in st.session_state:
55
+ st.session_state['web_analytics'] = ''
56
+ if 'client_relations_management_system' not in st.session_state:
57
+ st.session_state['client_relations_management_system'] = ''
58
+ if 'lead_generation_mechanism' not in st.session_state:
59
+ st.session_state['lead_generation_mechanism'] = ''
60
+
61
+
62
  def request_model(self, payload_txt):
63
  response = requests.post(self.model_url, json=payload_txt)
64
  response.raise_for_status()
 
96
  def process(self):
97
  session = st.session_state.analyze
98
  start_time = time.time()
99
+ if (self.website_responsiveness or self.content_management_system or self.SSL_certificate or self.web_analytics or self.client_relations_management_system or self.lead_generation_mechanism or self.mobile_responsiveness or self.mobile_loading_speed or self.desktop_loading_speed) and session == 'clicked':
 
100
  website_responsiveness = ""
101
  content_management_system = ""
102
  SSL_certificate = ""
 
106
  web_analytics = ""
107
  client_relations_management_system = ""
108
  mobile_loading_speed = ""
 
109
  lead_generation_mechanism = ""
110
  with st.spinner('SEO On Page Analyst...', show_time=True):
111
  st.write('')
 
121
  pass
122
  except KeyError:
123
  pass
124
+
125
  try:
126
  for f in st.session_state['uploaded_gt'].values():
127
  if f['type'] == 'pdf':
128
+ crawl_file += "GTmetrix: {"+ f['content'] + "}\n"
129
  elif f['type'] == 'csv':
130
+ crawl_file += f['content'].to_csv(index=True) + "\n"
131
  except KeyError:
132
  pass
133
+ '''
134
  try:
135
  website_responsiveness += f"\nWebsite Responsiveness: {self.website_responsiveness}"
136
+ except KeyError:
137
+ pass
138
+ try:
139
+ content_management_system += f"\nContent Management System: {self.content_management_system}"
140
+ except KeyError:
141
+ pass
142
+ try:
143
  SSL_certificate += f"\nSSL Certificate: {self.SSL_certificate}"
144
+ except KeyError:
145
+ pass
146
+ try:
147
  mobile_responsiveness += f"\nMobile Responsiveness: {self.mobile_responsiveness}"
148
+ except KeyError:
149
+ pass
150
+ try:
151
  desktop_loading_speed += f"\nDesktop Loading Speed: {self.desktop_loading_speed}"
152
+ except KeyError:
153
+ pass
154
+ try:
155
  mobile_loading_speed += f"\nMobile Loading Speed: {self.mobile_loading_speed}"
156
+ except KeyError:
157
+ pass
158
+ try:
159
  web_analytics += f"\nWeb Analytics: {self.web_analytics}"
160
+
161
+ except KeyError:
162
+ pass
163
+ try:
164
  client_relations_management_system += f"\nClient Relations Management System: {self.client_relations_management_system}"
165
+ except KeyError:
166
+ pass
167
+ try:
168
  lead_generation_mechanism += f"\nLead Generation Mechanism: {self.lead_generation_mechanism}"
169
  except KeyError:
170
  pass
171
+
172
+
173
  # OUTPUT FOR WEBSITE RESPONSIVENESS
174
  payload_txt_website_responsiveness = {"question": website_responsiveness}
175
  #result_website_responsiveness = self.request_model(website_responsiveness)
 
195
  #result_lead_generation_mechanism = self.request_model(lead_generation_mechanism)
196
 
197
  # OUTPUT FOR SEO ANALYST
198
+ #payload_txt = {"question": combined_text}
199
  #result = self.request_model(payload_txt)
200
  #end_time = time.time()
201
  #time_lapsed = end_time - start_time
202
 
 
203
  debug_info_website_responsiveness = {'data_field' : 'Website Responsiveness', 'result': website_responsiveness}
204
  debug_info_content_management_system = {'data_field' : 'Content Management System', 'result': content_management_system}
205
  debug_info_SSL_certificate = {'data_field' : 'SSL Certificate', 'result': SSL_certificate}
206
  debug_info_mobile_responsiveness = {'data_field' : 'Mobile Responsiveness', 'result': mobile_responsiveness}
207
  debug_info_desktop_loading_speed = {'data_field' : 'Desktop Loading Speed', 'result': desktop_loading_speed}
208
  debug_info_mobile_loading_speed = {'data_field' : 'Mobile Loading Speed', 'result': mobile_loading_speed}
 
209
  debug_info_web_analytics = {'data_field' : 'Web Analytics', 'result': web_analytics}
210
  debug_info_client_relations_management_system = {'data_field' : 'Client Relations Management System', 'result': client_relations_management_system}
211
  debug_info_lead_generation_mechanism = {'data_field' : 'Lead Generation Mechanism', 'result': lead_generation_mechanism}
 
222
  collect_telemetry(debug_info)
223
  '''
224
  if self.website_responsiveness:
225
+ st.session_state['website_responsiveness'] = 'uploaded'
226
  collect_telemetry(debug_info_website_responsiveness)
227
  if self.content_management_system:
228
+ st.session_state['content_management_system'] = 'uploaded'
229
  collect_telemetry(debug_info_content_management_system)
230
  if self.SSL_certificate:
231
+ st.session_state['SSL_certificate'] = 'uploaded'
232
  collect_telemetry(debug_info_SSL_certificate)
233
  if self.mobile_responsiveness:
234
+ st.session_state['mobile_responsiveness'] = 'uploaded'
235
  collect_telemetry(debug_info_mobile_responsiveness)
236
  if self.desktop_loading_speed:
237
+ st.session_state['desktop_loading_speed'] = 'uploaded'
238
  collect_telemetry(debug_info_desktop_loading_speed)
239
  if self.mobile_loading_speed:
240
+ st.session_state['mobile_loading_speed'] = 'uploaded'
241
  collect_telemetry(debug_info_mobile_loading_speed)
 
 
242
  if self.web_analytics:
243
+ st.session_state['web_analytics'] = 'uploaded'
244
  collect_telemetry(debug_info_web_analytics)
245
  if self.client_relations_management_system:
246
+ st.session_state['client_relations_management_system'] = 'uploaded'
247
  collect_telemetry(debug_info_client_relations_management_system)
248
  if self.lead_generation_mechanism:
249
+ st.session_state['lead_generation_mechanism'] = 'uploaded'
250
  collect_telemetry(debug_info_lead_generation_mechanism)
251
 
252
  #with st.expander("Debug information", icon="⚙"):
 
254
 
255
 
256
  st.session_state['analyzing'] = False
 
 
 
257
  try:
258
  self.file_dict.popitem()
259
  except KeyError:
 
279
  self.mobile_loading_speed = st.text_input("Mobile Loading Speed - GTMetrix:", placeholder='Enter Mobile Loading Speed')
280
  self.web_analytics = st.text_input("Web Analytics - BuiltWith:", placeholder='Enter Web Analytics')
281
  self.client_relations_management_system = st.text_input("Client Relations Management System - BuiltWith:", placeholder='Enter Client Relations Management System')
 
282
  self.lead_generation_mechanism = st.text_input("Lead Generation Mechanism - Business Context (Lead Generation & Lead Nurturing):", placeholder='Enter Lead Generation Mechanism')
283
 
284
  #st.write("") # FOR THE HIDE BUTTON
 
287
  st.session_state['analyzing'] = False
288
  #st.write("") # FOR THE HIDE BUTTON
289
  self.process()
 
 
 
290
 
291
  if __name__ == "__main__":
292
  st.set_page_config(layout="wide")
helper/data_field.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pymongo import MongoClient
2
+ import os
3
+ def data_field(data_src):
4
+ mongodb_uri = os.getenv("MONGODB_URI")
5
+ myclient = MongoClient(mongodb_uri)
6
+ mydb = myclient.get_database()
7
+ mycol = mydb["df_data"]
8
+ x = mycol.find_one({"data_field": data_src})
9
+ x = x["result"]["question"]
10
+ #st.write(x)
11
+ return x
helper/load.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+
2
+ def load(num):
3
+ upload = num
4
+ return num
5
+ num = 2
helper/upload_button.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+
3
+ def hide_button():
4
+ st.markdown(
5
+ """
6
+ <style>
7
+ .element-container:nth-of-type(3) button {
8
+ display: none;
9
+ }
10
+ </style>
11
+ """,
12
+ unsafe_allow_html=True,
13
+ )
14
+
15
+ def unhide_button():
16
+ st.markdown(
17
+ """
18
+ <style>
19
+ element-container:nth-of-type(3) button {
20
+ display: inline;
21
+ }
22
+ </style>
23
+ """,
24
+ unsafe_allow_html=True,
25
+ )
helper/upload_response.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import json
3
+ from datetime import datetime, timedelta, timezone
4
+ from pymongo import MongoClient
5
+ from dotenv import load_dotenv
6
+
7
+ load_dotenv()
8
+
9
+ def upload_response(data):
10
+ """
11
+ Sends JSON data to a remote MongoDB instance.
12
+
13
+ Args:
14
+ data (dict): The JSON data to send.
15
+ """
16
+ mongodb_uri = os.getenv("MONGODB_URI")
17
+ if not mongodb_uri:
18
+ print("Telemetry skipped, no database configured.")
19
+ return
20
+
21
+ try:
22
+
23
+ # Get the current UTC time
24
+ utc_now = datetime.now(timezone.utc)
25
+
26
+ # Define the GMT+8 timezone offset
27
+ gmt_plus_8 = timezone(timedelta(hours=8))
28
+
29
+ # Convert the UTC time to GMT+8
30
+ timestamp = utc_now.astimezone(gmt_plus_8).isoformat()
31
+ data['timestamp'] = timestamp
32
+
33
+ client = MongoClient(mongodb_uri)
34
+ db = client.get_database() # Use the default database specified in the URI
35
+ collection = db["df_response"] # Replace "telemetry" with your desired collection name
36
+ collection.insert_one(data)
37
+ print("Data successfully sent to MongoDB.")
38
+ except Exception as e:
39
+ print(f"Error sending data to MongoDB: {e}")
40
+ finally:
41
+ client.close()
pages/analyzing_page.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+ import threading
4
+ from streamlit.runtime.scriptrunner import add_script_run_ctx
5
+ from classes.response_off import SeoOffPageAnalyst
6
+ from classes.response_on_page import SeoOn
7
+ from classes.response_website_and_tools import WebsiteAndTools
8
+ from classes.response_seo import Seo
9
+
10
+ def run_analysis():
11
+ # Retrieve uploaded files from session state
12
+ off_page_file = st.session_state.get('off_page_file_uploaded')
13
+ gt_file = st.session_state.get('GT_file_uploaded')
14
+ website_and_tools = st.session_state.get('website_and_tools')
15
+ seo = st.session_state.get('seo')
16
+ # Placeholders for status updates
17
+ off_page_status = st.empty()
18
+ on_page_status = st.empty()
19
+ website_and_tools_status = st.empty()
20
+ seo_status = st.empty()
21
+ # Function to run SEO Off Page Analysis
22
+ def run_off_page_analysis():
23
+ try:
24
+ off_page_status.info("Starting SEO Off Page Analysis...")
25
+ result = SeoOffPageAnalyst(os.getenv('MODEL_Off_Page_Analyst'))
26
+ off_page_status.success("SEO Off Page Analysis completed successfully.")
27
+ return result
28
+ except Exception as e:
29
+ off_page_status.error(f"SEO Off Page Analysis failed: {e}")
30
+ return None
31
+
32
+ # Function to run On Page Analysis
33
+ def run_on_page_analysis():
34
+ try:
35
+ on_page_status.info("Starting On Page Analysis...")
36
+ result = SeoOn(os.getenv('MODEL_On_Page_Analyst'))
37
+ on_page_status.success("On Page Analysis completed successfully.")
38
+ return result
39
+ except Exception as e:
40
+ on_page_status.error(f"On Page Analysis failed: {e}")
41
+ return None
42
+
43
+ def run_website_and_tools_analysis():
44
+ try:
45
+ website_and_tools_status.info("Starting Website and Tools Analysis...")
46
+ result = WebsiteAndTools(os.getenv('Model_Website_and_Tools_Analyst'))
47
+ website_and_tools_status.success("Website and Tools completed successfully.")
48
+ return result
49
+ except Exception as e:
50
+ on_page_status.error(f"Website and Tools Analysis failed: {e}")
51
+ return None
52
+ def run_seo_analysis():
53
+ try:
54
+ seo_status.info("Starting SEO Analysis...")
55
+ result = Seo(os.getenv('MODEL_SEO_Analyst'))
56
+ seo_status.success("SEO Analysis completed successfully.")
57
+ return result
58
+ except Exception as e:
59
+ seo_status.error(f"SEO Analysis failed: {e}")
60
+ return None
61
+
62
+ # Create threads for concurrent execution
63
+ off_page_thread = threading.Thread(target=run_off_page_analysis)
64
+ on_page_thread = threading.Thread(target=run_on_page_analysis)
65
+ website_and_tools_thread = threading.Thread(target=run_website_and_tools_analysis)
66
+ seo_thread = threading.Thread(target=run_seo_analysis)
67
+
68
+ # Attach Streamlit context to threads
69
+ add_script_run_ctx(off_page_thread)
70
+ add_script_run_ctx(on_page_thread)
71
+ add_script_run_ctx(website_and_tools_thread)
72
+ add_script_run_ctx(seo_thread)
73
+
74
+ # Start threads
75
+ off_page_thread.start()
76
+ on_page_thread.start()
77
+ website_and_tools_thread.start()
78
+ seo_thread.start()
79
+
80
+ # Wait for threads to complete
81
+ off_page_thread.join()
82
+ on_page_thread.join()
83
+ website_and_tools_thread.join()
84
+ seo_thread.join()
85
+
86
+ # Execute the analysis
87
+ run_analysis()
pages/home.py CHANGED
@@ -1,7 +1,7 @@
1
  import os
2
  import streamlit as st
3
  from classes.Off_Page import SeoOffPageAnalyst
4
- from classes.On_Page_GT import SeoOnGT
5
  from classes.On_Page_Crawl import SeoOnCrawl
6
  from classes.Seo_Backlinks import SeoBacklinks
7
  from classes.Seo import Seo
@@ -11,8 +11,10 @@ from classes.Twitter import Twitter
11
  from classes.Youtube import YouTube
12
  from classes.Linkedin import Linkedin
13
  from classes.Tiktok import Tiktok
 
14
  import asyncio
15
  import time
 
16
 
17
  class DigitalFootprintDashboard:
18
  def __init__(self):
@@ -31,10 +33,6 @@ class DigitalFootprintDashboard:
31
  st.session_state['nature'] = ''
32
  if 'analyze' not in st.session_state:
33
  st.session_state['analyze'] = ''
34
- if 'seo' not in st.session_state:
35
- st.session_state['seo'] = ''
36
- if 'twitter' not in st.session_state:
37
- st.session_state['twitter'] = ''
38
 
39
  async def create_row1(self):
40
  """Create the first row with four columns"""
@@ -48,16 +46,19 @@ class DigitalFootprintDashboard:
48
  )
49
  st.session_state.nature = txt
50
 
51
- analyze_button = st.button("Analyze", st.session_state['analyze'])
52
- if analyze_button == True:
53
  st.session_state["analyze"] = 'clicked'
54
- st.session_state['seo'] = 'clicked'
55
- st.session_state['twitter'] = 'clicked'
56
  else:
57
  st.session_state["analyze"] = ''
58
- st.session_state['seo'] = ''
59
- st.session_state['twitter'] = ''
60
-
 
 
 
 
61
  with col2:
62
  st.write("## Website Traffic")
63
  self.backlinks = SeoOffPageAnalyst(os.getenv('MODEL_Off_Page_Analyst'))
@@ -85,8 +86,9 @@ class DigitalFootprintDashboard:
85
 
86
  with col4:
87
  st.write("## Website Structure")
88
- self.crawl = SeoOnCrawl(os.getenv('MODEL_On_Page_Analyst'))
89
- self.gtmetrix = SeoOnGT(os.getenv('MODEL_On_Page_Analyst'))
 
90
 
91
  return col1, col2, col3, col4
92
 
@@ -101,14 +103,13 @@ class DigitalFootprintDashboard:
101
  self.youtube.process(),
102
  self.linkedin.process(),
103
  self.tiktok.process(),
104
- self.crawl.process()
105
  )
106
  st.session_state.analyze = False
107
 
108
  async def main(self):
109
  """Main method to run the dashboard"""
110
  await self.create_row1()
111
- await self.run_analysis()
112
 
113
  # Main execution
114
  if __name__ == "__main__":
 
1
  import os
2
  import streamlit as st
3
  from classes.Off_Page import SeoOffPageAnalyst
4
+ from classes.On_Page import SeoOn
5
  from classes.On_Page_Crawl import SeoOnCrawl
6
  from classes.Seo_Backlinks import SeoBacklinks
7
  from classes.Seo import Seo
 
11
  from classes.Youtube import YouTube
12
  from classes.Linkedin import Linkedin
13
  from classes.Tiktok import Tiktok
14
+ from classes.website_and_tools import WebsiteAndTools
15
  import asyncio
16
  import time
17
+ from helper.upload_button import hide_button, unhide_button
18
 
19
  class DigitalFootprintDashboard:
20
  def __init__(self):
 
33
  st.session_state['nature'] = ''
34
  if 'analyze' not in st.session_state:
35
  st.session_state['analyze'] = ''
 
 
 
 
36
 
37
  async def create_row1(self):
38
  """Create the first row with four columns"""
 
46
  )
47
  st.session_state.nature = txt
48
 
49
+ upload_file_button = st.button("Upload File", st.session_state['analyze'])
50
+ if upload_file_button == True:
51
  st.session_state["analyze"] = 'clicked'
52
+ unhide_button()
 
53
  else:
54
  st.session_state["analyze"] = ''
55
+
56
+ analyze_button = st.button("Analyze")
57
+ if analyze_button == True:
58
+ st.switch_page("pages/analyzing_page.py")
59
+ else:
60
+ hide_button()
61
+
62
  with col2:
63
  st.write("## Website Traffic")
64
  self.backlinks = SeoOffPageAnalyst(os.getenv('MODEL_Off_Page_Analyst'))
 
86
 
87
  with col4:
88
  st.write("## Website Structure")
89
+ #self.crawl = SeoOnCrawl(os.getenv('MODEL_On_Page_Analyst'))
90
+ self.on_page = SeoOn(os.getenv('MODEL_On_Page_Analyst'))
91
+ self.website_and_tools = WebsiteAndTools(os.getenv('MODEL_On_Page_Analyst'))
92
 
93
  return col1, col2, col3, col4
94
 
 
103
  self.youtube.process(),
104
  self.linkedin.process(),
105
  self.tiktok.process(),
 
106
  )
107
  st.session_state.analyze = False
108
 
109
  async def main(self):
110
  """Main method to run the dashboard"""
111
  await self.create_row1()
112
+ #self.run_analysis()
113
 
114
  # Main execution
115
  if __name__ == "__main__":