Update app.py
Browse files
app.py
CHANGED
|
@@ -220,211 +220,211 @@ class GEOSEOApp:
|
|
| 220 |
st.error(f"An error occurred: {str(e)}")
|
| 221 |
|
| 222 |
def render_content_enhancement_tab(self):
|
| 223 |
-
|
| 224 |
-
|
| 225 |
-
|
| 226 |
|
| 227 |
-
|
| 228 |
-
|
| 229 |
-
|
| 230 |
-
|
| 231 |
-
|
| 232 |
-
|
| 233 |
|
| 234 |
-
|
| 235 |
-
|
| 236 |
-
|
| 237 |
-
|
| 238 |
-
|
| 239 |
-
|
| 240 |
|
| 241 |
-
|
| 242 |
-
|
| 243 |
-
|
| 244 |
-
|
| 245 |
-
|
| 246 |
-
|
| 247 |
-
|
| 248 |
-
|
| 249 |
# Run content analysis and optimization
|
| 250 |
-
|
| 251 |
-
|
| 252 |
-
|
| 253 |
-
|
| 254 |
-
|
| 255 |
|
| 256 |
-
|
| 257 |
-
|
| 258 |
-
|
| 259 |
|
| 260 |
-
|
| 261 |
-
|
| 262 |
-
|
| 263 |
-
|
| 264 |
|
| 265 |
-
|
| 266 |
-
|
| 267 |
-
|
| 268 |
-
|
| 269 |
-
|
| 270 |
-
|
| 271 |
-
|
| 272 |
-
|
| 273 |
-
|
| 274 |
-
|
| 275 |
-
|
| 276 |
-
|
| 277 |
-
|
| 278 |
-
|
| 279 |
-
|
| 280 |
-
|
| 281 |
-
|
| 282 |
-
|
| 283 |
-
|
| 284 |
-
|
| 285 |
-
|
| 286 |
-
|
| 287 |
|
| 288 |
-
|
| 289 |
-
|
| 290 |
-
|
| 291 |
-
|
| 292 |
-
|
| 293 |
-
|
| 294 |
-
|
| 295 |
-
|
| 296 |
-
|
| 297 |
-
|
| 298 |
-
|
| 299 |
-
|
| 300 |
-
|
| 301 |
-
|
| 302 |
-
|
| 303 |
-
|
| 304 |
-
|
| 305 |
-
|
| 306 |
-
|
| 307 |
-
|
| 308 |
-
|
| 309 |
-
result = st.session_state.rag_chain.invoke({"question": user_query})
|
| 310 |
-
st.success("Answer:")
|
| 311 |
-
st.write(result["result"])
|
| 312 |
|
| 313 |
-
|
| 314 |
-
|
| 315 |
-
|
| 316 |
-
|
| 317 |
-
|
| 318 |
-
|
| 319 |
-
|
| 320 |
-
|
| 321 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 322 |
|
| 323 |
-
|
| 324 |
-
|
| 325 |
|
| 326 |
|
| 327 |
def render_website_analysis_tab(self):
|
| 328 |
-
|
| 329 |
-
|
| 330 |
-
|
| 331 |
|
| 332 |
-
|
| 333 |
-
|
| 334 |
-
|
| 335 |
-
|
| 336 |
-
|
| 337 |
-
|
| 338 |
|
| 339 |
-
|
| 340 |
-
|
| 341 |
-
|
| 342 |
-
|
| 343 |
-
|
| 344 |
-
|
| 345 |
|
| 346 |
-
|
| 347 |
-
|
| 348 |
-
|
| 349 |
-
|
| 350 |
-
|
| 351 |
-
|
| 352 |
-
|
| 353 |
-
|
| 354 |
-
|
| 355 |
-
|
| 356 |
|
| 357 |
-
|
| 358 |
-
|
| 359 |
-
|
| 360 |
-
|
| 361 |
-
|
| 362 |
-
|
| 363 |
-
|
| 364 |
|
| 365 |
-
|
| 366 |
-
|
| 367 |
-
|
| 368 |
|
| 369 |
-
|
| 370 |
|
| 371 |
-
|
| 372 |
-
|
| 373 |
-
|
| 374 |
-
|
| 375 |
-
|
| 376 |
-
|
| 377 |
-
|
| 378 |
-
|
| 379 |
-
|
| 380 |
-
|
| 381 |
|
| 382 |
-
|
| 383 |
-
|
| 384 |
-
|
| 385 |
-
|
| 386 |
-
|
| 387 |
|
| 388 |
-
|
| 389 |
-
|
| 390 |
-
|
| 391 |
|
| 392 |
-
|
| 393 |
-
|
| 394 |
-
|
| 395 |
-
|
| 396 |
|
| 397 |
-
|
| 398 |
-
|
| 399 |
|
| 400 |
-
|
| 401 |
-
|
| 402 |
-
|
| 403 |
|
| 404 |
-
|
| 405 |
-
|
| 406 |
-
|
| 407 |
-
|
| 408 |
|
| 409 |
-
|
| 410 |
-
|
| 411 |
|
| 412 |
-
|
| 413 |
-
|
| 414 |
-
|
| 415 |
-
|
| 416 |
-
|
| 417 |
-
|
| 418 |
-
|
| 419 |
-
|
| 420 |
-
|
| 421 |
-
|
| 422 |
-
|
| 423 |
-
|
| 424 |
-
|
| 425 |
-
|
| 426 |
-
|
| 427 |
-
|
| 428 |
|
| 429 |
def render_multilingual_tab(self):
|
| 430 |
st.markdown("### π Multilingual Translator")
|
|
|
|
| 220 |
st.error(f"An error occurred: {str(e)}")
|
| 221 |
|
| 222 |
def render_content_enhancement_tab(self):
|
| 223 |
+
"""Render Content Enhancement tab"""
|
| 224 |
+
st.header("π§ Content Enhancement")
|
| 225 |
+
st.markdown("Analyze and optimize your content for better AI/LLM performance.")
|
| 226 |
|
| 227 |
+
# Content input
|
| 228 |
+
input_text = st.text_area(
|
| 229 |
+
"Enter content to analyze and enhance:",
|
| 230 |
+
height=200,
|
| 231 |
+
key="enhancement_input"
|
| 232 |
+
)
|
| 233 |
|
| 234 |
+
# Analysis options
|
| 235 |
+
col1, col2 = st.columns(2)
|
| 236 |
+
with col1:
|
| 237 |
+
analyze_only = st.checkbox("Analysis only (no rewriting)", value=False)
|
| 238 |
+
with col2:
|
| 239 |
+
include_keywords = st.checkbox("Include keyword suggestions", value=True)
|
| 240 |
|
| 241 |
+
# Submit button
|
| 242 |
+
if st.button("π§ Analyze & Enhance", key="enhancement_submit"):
|
| 243 |
+
if not input_text.strip():
|
| 244 |
+
st.warning("Please enter some content to analyze.")
|
| 245 |
+
return
|
| 246 |
+
|
| 247 |
+
try:
|
| 248 |
+
with st.spinner("Analyzing content..."):
|
| 249 |
# Run content analysis and optimization
|
| 250 |
+
result = self.content_optimizer.optimize_content(
|
| 251 |
+
input_text,
|
| 252 |
+
analyze_only=analyze_only,
|
| 253 |
+
include_keywords=include_keywords
|
| 254 |
+
)
|
| 255 |
|
| 256 |
+
if result.get("error"):
|
| 257 |
+
st.error(f"Analysis failed: {result['error']}")
|
| 258 |
+
return
|
| 259 |
|
| 260 |
+
# Display results
|
| 261 |
+
if analyze_only:
|
| 262 |
+
st.success("Content analysis completed successfully!")
|
| 263 |
+
st.markdown("### π Analysis Results")
|
| 264 |
|
| 265 |
+
# Show scores
|
| 266 |
+
scores = result.get("scores", {})
|
| 267 |
+
if scores:
|
| 268 |
+
col1, col2, col3 = st.columns(3)
|
| 269 |
+
|
| 270 |
+
with col1:
|
| 271 |
+
clarity = scores.get("clarity", 0)
|
| 272 |
+
st.metric("Clarity", f"{clarity}/10")
|
| 273 |
+
|
| 274 |
+
with col2:
|
| 275 |
+
structure = scores.get("structuredness", 0)
|
| 276 |
+
st.metric("Structure", f"{structure}/10")
|
| 277 |
+
|
| 278 |
+
with col3:
|
| 279 |
+
answerability = scores.get("answerability", 0)
|
| 280 |
+
st.metric("Answerability", f"{answerability}/10")
|
| 281 |
+
|
| 282 |
+
# Show keywords
|
| 283 |
+
keywords = result.get("keywords", [])
|
| 284 |
+
if keywords:
|
| 285 |
+
st.markdown("#### π Key Terms")
|
| 286 |
+
st.write(", ".join(keywords))
|
| 287 |
|
| 288 |
+
# Show optimized content
|
| 289 |
+
optimized_text = result.get("optimized_text", "")
|
| 290 |
+
if optimized_text:
|
| 291 |
+
st.markdown("#### β¨ Optimized Content")
|
| 292 |
+
st.text_area(
|
| 293 |
+
"Enhanced version:",
|
| 294 |
+
value=optimized_text,
|
| 295 |
+
height=200,
|
| 296 |
+
key="optimized_output"
|
| 297 |
+
)
|
| 298 |
+
|
| 299 |
+
# β
Optional RAG-based Q&A on the analyzed content
|
| 300 |
+
st.markdown("### π¬ Ask a question about the analyzed content:")
|
| 301 |
+
user_query = st.text_input("Enter your question:", key="enhancement_q")
|
| 302 |
+
|
| 303 |
+
if user_query:
|
| 304 |
+
from langchain.docstore.document import Document
|
| 305 |
+
new_doc = Document(page_content=optimized_text or input_text)
|
| 306 |
+
vectorstore = create_vectorstore_from_text([new_doc], self.embeddings)
|
| 307 |
+
st.session_state.rag_chain = create_rag_chain(self.llm, vectorstore)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 308 |
|
| 309 |
+
result = st.session_state.rag_chain.invoke({"question": user_query})
|
| 310 |
+
st.success("Answer:")
|
| 311 |
+
st.write(result["result"])
|
| 312 |
+
|
| 313 |
+
# Export option
|
| 314 |
+
if st.button("π₯ Export Results"):
|
| 315 |
+
export_data = self.result_exporter.export_enhancement_results(result)
|
| 316 |
+
st.download_button(
|
| 317 |
+
label="Download Analysis Report",
|
| 318 |
+
data=json.dumps(export_data, indent=2),
|
| 319 |
+
file_name=f"content_analysis_{int(time.time())}.json",
|
| 320 |
+
mime="application/json"
|
| 321 |
+
)
|
| 322 |
|
| 323 |
+
except Exception as e:
|
| 324 |
+
st.error(f"An error occurred: {str(e)}")
|
| 325 |
|
| 326 |
|
| 327 |
def render_website_analysis_tab(self):
|
| 328 |
+
"""Render Website GEO Analysis tab"""
|
| 329 |
+
st.header("π Website GEO Analysis")
|
| 330 |
+
st.markdown("Analyze websites for Generative Engine Optimization (GEO) performance.")
|
| 331 |
|
| 332 |
+
# URL input
|
| 333 |
+
col1, col2 = st.columns([3, 1])
|
| 334 |
+
with col1:
|
| 335 |
+
website_url = st.text_input("Enter website URL:", placeholder="https://example.com")
|
| 336 |
+
with col2:
|
| 337 |
+
max_pages = st.selectbox("Pages to analyze:", [1, 3, 5], index=0)
|
| 338 |
|
| 339 |
+
# Analysis options
|
| 340 |
+
col1, col2 = st.columns(2)
|
| 341 |
+
with col1:
|
| 342 |
+
include_subpages = st.checkbox("Include subpages", value=False)
|
| 343 |
+
with col2:
|
| 344 |
+
detailed_analysis = st.checkbox("Detailed analysis", value=True)
|
| 345 |
|
| 346 |
+
# Submit button
|
| 347 |
+
if st.button("π Analyze Website", key="website_analyze"):
|
| 348 |
+
if not website_url.strip():
|
| 349 |
+
st.warning("Please enter a website URL.")
|
| 350 |
+
return
|
| 351 |
+
|
| 352 |
+
try:
|
| 353 |
+
# Normalize URL
|
| 354 |
+
if not website_url.startswith(('http://', 'https://')):
|
| 355 |
+
website_url = 'https://' + website_url
|
| 356 |
|
| 357 |
+
with st.spinner(f"Analyzing website: {website_url}"):
|
| 358 |
+
# Parse website content
|
| 359 |
+
pages_data = self.webpage_parser.parse_website(
|
| 360 |
+
website_url,
|
| 361 |
+
max_pages=max_pages,
|
| 362 |
+
include_subpages=include_subpages
|
| 363 |
+
)
|
| 364 |
|
| 365 |
+
if not pages_data:
|
| 366 |
+
st.error("Could not extract content from the website.")
|
| 367 |
+
return
|
| 368 |
|
| 369 |
+
st.success(f"Successfully extracted content from {len(pages_data)} page(s)")
|
| 370 |
|
| 371 |
+
# Analyze GEO scores
|
| 372 |
+
with st.spinner("Calculating GEO scores..."):
|
| 373 |
+
geo_results = []
|
| 374 |
+
for i, page_data in enumerate(pages_data):
|
| 375 |
+
with st.spinner(f"Analyzing page {i+1}/{len(pages_data)}..."):
|
| 376 |
+
analysis = self.geo_scorer.analyze_page_geo(
|
| 377 |
+
page_data['content'],
|
| 378 |
+
page_data['title'],
|
| 379 |
+
detailed=detailed_analysis
|
| 380 |
+
)
|
| 381 |
|
| 382 |
+
if not analysis.get('error'):
|
| 383 |
+
analysis['page_data'] = page_data
|
| 384 |
+
geo_results.append(analysis)
|
| 385 |
+
else:
|
| 386 |
+
st.warning(f"Could not analyze page {i+1}: {analysis['error']}")
|
| 387 |
|
| 388 |
+
if not geo_results:
|
| 389 |
+
st.error("Could not analyze any pages from the website.")
|
| 390 |
+
return
|
| 391 |
|
| 392 |
+
# Combine all page content for RAG
|
| 393 |
+
combined_content = "\n\n".join([page['content'] for page in pages_data])
|
| 394 |
+
from langchain.docstore.document import Document
|
| 395 |
+
doc = Document(page_content=combined_content)
|
| 396 |
|
| 397 |
+
vectorstore = create_vectorstore_from_text([doc], self.embeddings)
|
| 398 |
+
st.session_state.rag_chain = create_rag_chain(self.llm, vectorstore)
|
| 399 |
|
| 400 |
+
# RAG-based Q&A
|
| 401 |
+
st.markdown("### π¬ Ask a question about the website:")
|
| 402 |
+
user_query = st.text_input("Ask here:", key="website_q")
|
| 403 |
|
| 404 |
+
if user_query:
|
| 405 |
+
result = st.session_state.rag_chain.invoke({"question": user_query})
|
| 406 |
+
st.success("Answer:")
|
| 407 |
+
st.write(result["result"])
|
| 408 |
|
| 409 |
+
# Display results
|
| 410 |
+
self.display_geo_results(geo_results, website_url)
|
| 411 |
|
| 412 |
+
# Export functionality
|
| 413 |
+
st.markdown("### π₯ Export Results")
|
| 414 |
+
if st.button("π Generate Full Report"):
|
| 415 |
+
report_data = self.result_exporter.export_geo_results(
|
| 416 |
+
geo_results,
|
| 417 |
+
website_url
|
| 418 |
+
)
|
| 419 |
+
st.download_button(
|
| 420 |
+
label="Download GEO Report",
|
| 421 |
+
data=json.dumps(report_data, indent=2),
|
| 422 |
+
file_name=f"geo_analysis_{website_url.replace('https://', '').replace('/', '_')}.json",
|
| 423 |
+
mime="application/json"
|
| 424 |
+
)
|
| 425 |
+
|
| 426 |
+
except Exception as e:
|
| 427 |
+
st.error(f"An error occurred during website analysis: {str(e)}")
|
| 428 |
|
| 429 |
def render_multilingual_tab(self):
|
| 430 |
st.markdown("### π Multilingual Translator")
|