GiantAnalytics commited on
Commit
e1d1e41
·
verified ·
1 Parent(s): d783238

Creating app.py

Browse files
Files changed (1) hide show
  1. app.py +95 -0
app.py ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import tempfile
3
+ import os
4
+ import shutil
5
+ from langchain.embeddings.openai import OpenAIEmbeddings
6
+ from langchain.text_splitter import CharacterTextSplitter
7
+ from langchain.vectorstores import FAISS
8
+ from langchain_community.document_loaders import WebBaseLoader
9
+ from langchain.chains.question_answering import load_qa_chain
10
+ from langchain_openai import ChatOpenAI
11
+ import os
12
+ from reportlab.lib.pagesizes import letter
13
+ from reportlab.pdfgen import canvas
14
+
15
+ # Hardcoded OpenAI API Key
16
+ os.environ['OPENAI_API_KEY'] = os.getenv('OPENAI_API_KEY')
17
+
18
+ # Streamlit UI
19
+ st.title("🔍 AI Benefits Analysis for Any Company")
20
+
21
+ # User input: Only Website URL (with placeholder)
22
+ website_url = st.text_input("Enter Website URL", placeholder="e.g., https://www.companywebsite.com")
23
+
24
+ # Fixed question for AI analysis
25
+ fixed_question = (
26
+ "Analyze how Artificial Intelligence (AI) can benefit this company based on its industry, "
27
+ "key operations, and challenges. Provide insights on AI-driven improvements in customer experience, "
28
+ "automation, sales, risk management, decision-making, and innovation. Include an AI implementation roadmap, "
29
+ "challenges, solutions, and future opportunities with real-world examples."
30
+ )
31
+
32
+ # Temporary directory to store FAISS index
33
+ temp_dir = tempfile.gettempdir()
34
+ faiss_db_path = os.path.join(temp_dir, "faiss_index_dir")
35
+
36
+ # Function to fetch and process website data
37
+ def build_embeddings(url):
38
+ st.info("Fetching and processing website data...")
39
+
40
+ # Load website data
41
+ loader = WebBaseLoader(url)
42
+ raw_text = loader.load()
43
+
44
+ # Chunking the fetched text
45
+ text_splitter = CharacterTextSplitter(separator='\n', chunk_size=500, chunk_overlap=50)
46
+ docs = text_splitter.split_documents(raw_text)
47
+
48
+ # Creating embeddings
49
+ embeddings = OpenAIEmbeddings()
50
+ docsearch = FAISS.from_documents(docs, embeddings)
51
+
52
+ # Save FAISS index
53
+ if os.path.exists(faiss_db_path):
54
+ shutil.rmtree(faiss_db_path)
55
+ os.makedirs(faiss_db_path)
56
+ docsearch.save_local(faiss_db_path)
57
+
58
+ return docsearch
59
+
60
+ # Function to generate PDF
61
+ def generate_pdf(text, file_path):
62
+ c = canvas.Canvas(file_path, pagesize=letter)
63
+ c.setFont("Helvetica", 12)
64
+ width, height = letter
65
+ text_object = c.beginText(40, height - 40)
66
+ text_object.textLines(text)
67
+ c.drawText(text_object)
68
+ c.showPage()
69
+ c.save()
70
+
71
+ # Run everything in one click
72
+ if st.button("Get AI Insights") and website_url:
73
+ docsearch = build_embeddings(website_url)
74
+
75
+ # AI Benefits Analysis
76
+ st.subheader("💬 AI Benefits Analysis")
77
+
78
+ chain = load_qa_chain(ChatOpenAI(model="gpt-4o"), chain_type="stuff")
79
+ docs = docsearch.similarity_search(fixed_question)
80
+ response = chain.run(input_documents=docs, question=fixed_question)
81
+
82
+ st.write("**AI Insights:**", response)
83
+
84
+ # Generate PDF from the AI insights
85
+ pdf_file = tempfile.NamedTemporaryFile(delete=False, suffix=".pdf")
86
+ generate_pdf(response, pdf_file.name)
87
+
88
+ # Provide download link for the generated PDF
89
+ with open(pdf_file.name, "rb") as f:
90
+ st.download_button(
91
+ label="Download AI Insights as PDF",
92
+ data=f,
93
+ file_name="ai_benefits_analysis.pdf",
94
+ mime="application/pdf"
95
+ )