File size: 3,260 Bytes
e1d1e41
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
import streamlit as st
import tempfile
import os
import shutil
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.text_splitter import CharacterTextSplitter
from langchain.vectorstores import FAISS
from langchain_community.document_loaders import WebBaseLoader
from langchain.chains.question_answering import load_qa_chain
from langchain_openai import ChatOpenAI
import os
from reportlab.lib.pagesizes import letter
from reportlab.pdfgen import canvas

# Hardcoded OpenAI API Key
os.environ['OPENAI_API_KEY'] = os.getenv('OPENAI_API_KEY')

# Streamlit UI
st.title("🔍 AI Benefits Analysis for Any Company")

# User input: Only Website URL (with placeholder)
website_url = st.text_input("Enter Website URL", placeholder="e.g., https://www.companywebsite.com")

# Fixed question for AI analysis
fixed_question = (
    "Analyze how Artificial Intelligence (AI) can benefit this company based on its industry, "
    "key operations, and challenges. Provide insights on AI-driven improvements in customer experience, "
    "automation, sales, risk management, decision-making, and innovation. Include an AI implementation roadmap, "
    "challenges, solutions, and future opportunities with real-world examples."
)

# Temporary directory to store FAISS index
temp_dir = tempfile.gettempdir()
faiss_db_path = os.path.join(temp_dir, "faiss_index_dir")

# Function to fetch and process website data
def build_embeddings(url):
    st.info("Fetching and processing website data...")

    # Load website data
    loader = WebBaseLoader(url)
    raw_text = loader.load()

    # Chunking the fetched text
    text_splitter = CharacterTextSplitter(separator='\n', chunk_size=500, chunk_overlap=50)
    docs = text_splitter.split_documents(raw_text)

    # Creating embeddings
    embeddings = OpenAIEmbeddings()
    docsearch = FAISS.from_documents(docs, embeddings)

    # Save FAISS index
    if os.path.exists(faiss_db_path):
        shutil.rmtree(faiss_db_path)
    os.makedirs(faiss_db_path)
    docsearch.save_local(faiss_db_path)

    return docsearch

# Function to generate PDF
def generate_pdf(text, file_path):
    c = canvas.Canvas(file_path, pagesize=letter)
    c.setFont("Helvetica", 12)
    width, height = letter
    text_object = c.beginText(40, height - 40)
    text_object.textLines(text)
    c.drawText(text_object)
    c.showPage()
    c.save()

# Run everything in one click
if st.button("Get AI Insights") and website_url:
    docsearch = build_embeddings(website_url)

    # AI Benefits Analysis
    st.subheader("💬 AI Benefits Analysis")

    chain = load_qa_chain(ChatOpenAI(model="gpt-4o"), chain_type="stuff")
    docs = docsearch.similarity_search(fixed_question)
    response = chain.run(input_documents=docs, question=fixed_question)

    st.write("**AI Insights:**", response)

    # Generate PDF from the AI insights
    pdf_file = tempfile.NamedTemporaryFile(delete=False, suffix=".pdf")
    generate_pdf(response, pdf_file.name)

    # Provide download link for the generated PDF
    with open(pdf_file.name, "rb") as f:
        st.download_button(
            label="Download AI Insights as PDF",
            data=f,
            file_name="ai_benefits_analysis.pdf",
            mime="application/pdf"
        )