Spaces:
Build error
Build error
| import streamlit as st | |
| import tempfile | |
| import os | |
| import shutil | |
| from langchain.embeddings.openai import OpenAIEmbeddings | |
| from langchain.text_splitter import CharacterTextSplitter | |
| from langchain.vectorstores import FAISS | |
| from langchain_community.document_loaders import WebBaseLoader | |
| from langchain.chains.question_answering import load_qa_chain | |
| from langchain_openai import ChatOpenAI | |
| from reportlab.lib.pagesizes import letter | |
| from reportlab.pdfgen import canvas | |
| from reportlab.pdfbase.pdfmetrics import stringWidth | |
| # Hardcoded OpenAI API Key | |
| os.environ['OPENAI_API_KEY'] = os.getenv('OPENAI_API_KEY') | |
| # Streamlit UI | |
| st.title("🔍 AI Benefits Analysis for Any Company") | |
| # User input: Only Website URL (with placeholder) | |
| website_url = st.text_input("Enter Website URL", placeholder="e.g., https://www.companywebsite.com") | |
| # Fixed question for AI analysis | |
| fixed_question = ( | |
| "Analyze how Artificial Intelligence (AI) can benefit this company based on its industry, " | |
| "key operations, and challenges. Provide insights on AI-driven improvements in customer experience, " | |
| "automation, sales, risk management, decision-making, and innovation. Include an AI implementation roadmap, " | |
| "challenges, solutions, and future opportunities with real-world examples." | |
| ) | |
| # Temporary directory to store FAISS index | |
| temp_dir = tempfile.gettempdir() | |
| faiss_db_path = os.path.join(temp_dir, "faiss_index_dir") | |
| # Function to fetch and process website data | |
| def build_embeddings(url): | |
| st.info("Fetching and processing website data...") | |
| # Load website data | |
| loader = WebBaseLoader(url) | |
| raw_text = loader.load() | |
| # Chunking the fetched text | |
| text_splitter = CharacterTextSplitter(separator='\n', chunk_size=500, chunk_overlap=50) | |
| docs = text_splitter.split_documents(raw_text) | |
| # Creating embeddings | |
| embeddings = OpenAIEmbeddings() | |
| docsearch = FAISS.from_documents(docs, embeddings) | |
| # Save FAISS index | |
| if os.path.exists(faiss_db_path): | |
| shutil.rmtree(faiss_db_path) | |
| os.makedirs(faiss_db_path) | |
| docsearch.save_local(faiss_db_path) | |
| return docsearch | |
| # Function to save text to a PDF file | |
| def save_text_to_pdf(text, file_path): | |
| c = canvas.Canvas(file_path, pagesize=letter) | |
| width, height = letter | |
| # Define margins | |
| margin_x = 50 | |
| margin_y = 50 | |
| max_width = width - 2 * margin_x # Usable text width | |
| # Title | |
| c.setFont("Helvetica-Bold", 16) | |
| c.drawString(margin_x, height - margin_y, "AI Benefits Analysis Report") | |
| # Move cursor down | |
| y_position = height - margin_y - 30 | |
| c.setFont("Helvetica", 12) | |
| # Function to wrap text within max_width | |
| def wrap_text(text, font_name, font_size, max_width): | |
| words = text.split() | |
| lines = [] | |
| current_line = "" | |
| for word in words: | |
| test_line = current_line + " " + word if current_line else word | |
| if stringWidth(test_line, font_name, font_size) <= max_width: | |
| current_line = test_line | |
| else: | |
| lines.append(current_line) | |
| current_line = word | |
| if current_line: | |
| lines.append(current_line) | |
| return lines | |
| # Process text | |
| lines = text.split("\n") | |
| wrapped_lines = [] | |
| for line in lines: | |
| wrapped_lines.extend(wrap_text(line, "Helvetica", 12, max_width)) | |
| # Write text line by line with proper spacing | |
| for line in wrapped_lines: | |
| if y_position < margin_y: # If at bottom of page, create a new page | |
| c.showPage() | |
| c.setFont("Helvetica", 12) | |
| y_position = height - margin_y | |
| c.drawString(margin_x, y_position, line) | |
| y_position -= 16 # Line spacing | |
| c.save() | |
| # Run everything in one click | |
| if st.button("Get AI Insights") and website_url: | |
| docsearch = build_embeddings(website_url) | |
| # AI Benefits Analysis | |
| st.subheader("💬 AI Benefits Analysis") | |
| chain = load_qa_chain(ChatOpenAI(model="gpt-4o"), chain_type="stuff") | |
| docs = docsearch.similarity_search(fixed_question) | |
| response = chain.run(input_documents=docs, question=fixed_question) | |
| st.write("**AI Insights:**", response) | |
| # Save the AI insights as a PDF | |
| pdf_file = tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") | |
| save_text_to_pdf(response, pdf_file.name) | |
| # Provide download link for the generated PDF file | |
| with open(pdf_file.name, "rb") as f: | |
| st.download_button( | |
| label="Download AI Insights as PDF File", | |
| data=f, | |
| file_name="ai_benefits_analysis_report.pdf", | |
| mime="application/pdf" | |
| ) |