GiantAnalytics commited on
Commit
bef21a8
·
verified ·
1 Parent(s): cb20588

Creating app.py

Browse files
Files changed (1) hide show
  1. app.py +119 -0
app.py ADDED
@@ -0,0 +1,119 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import tempfile
3
+ import os
4
+ import shutil
5
+ from langchain.embeddings.openai import OpenAIEmbeddings
6
+ from langchain.text_splitter import CharacterTextSplitter
7
+ from langchain.vectorstores import FAISS
8
+ from langchain_community.document_loaders import WebBaseLoader
9
+ from langchain.chains.question_answering import load_qa_chain
10
+ from langchain_openai import ChatOpenAI
11
+ from reportlab.lib.pagesizes import letter
12
+ from reportlab.pdfgen import canvas
13
+
14
+ # Hardcoded OpenAI API Key
15
+ os.environ['OPENAI_API_KEY'] = os.getenv('OPENAI_API_KEY')
16
+
17
+ # Streamlit UI
18
+ st.title("🔍 AI Benefits Analysis for Any Company")
19
+
20
+ # User input: Only Website URL (with placeholder)
21
+ website_url = st.text_input("Enter Website URL", placeholder="e.g., https://www.companywebsite.com")
22
+
23
+ # Fixed question for AI analysis
24
+ fixed_question = (
25
+ "Analyze how Artificial Intelligence (AI) can benefit this company based on its industry, "
26
+ "key operations, and challenges. Provide insights on AI-driven improvements in customer experience, "
27
+ "automation, sales, risk management, decision-making, and innovation. Include an AI implementation roadmap, "
28
+ "challenges, solutions, and future opportunities with real-world examples."
29
+ )
30
+
31
+ # Temporary directory to store FAISS index
32
+ temp_dir = tempfile.gettempdir()
33
+ faiss_db_path = os.path.join(temp_dir, "faiss_index_dir")
34
+
35
+ # Function to fetch and process website data
36
+ def build_embeddings(url):
37
+ st.info("Fetching and processing website data...")
38
+
39
+ # Load website data
40
+ loader = WebBaseLoader(url)
41
+ raw_text = loader.load()
42
+
43
+ # Chunking the fetched text
44
+ text_splitter = CharacterTextSplitter(separator='\n', chunk_size=500, chunk_overlap=50)
45
+ docs = text_splitter.split_documents(raw_text)
46
+
47
+ # Creating embeddings
48
+ embeddings = OpenAIEmbeddings()
49
+ docsearch = FAISS.from_documents(docs, embeddings)
50
+
51
+ # Save FAISS index
52
+ if os.path.exists(faiss_db_path):
53
+ shutil.rmtree(faiss_db_path)
54
+ os.makedirs(faiss_db_path)
55
+ docsearch.save_local(faiss_db_path)
56
+
57
+ return docsearch
58
+
59
+ # Function to save text to a PDF file
60
+ def save_text_to_pdf(text, file_path):
61
+ c = canvas.Canvas(file_path, pagesize=letter)
62
+ width, height = letter
63
+
64
+ # Define margins
65
+ margin_x = 50
66
+ margin_y = 50
67
+ max_width = width - 2 * margin_x # Usable text width
68
+ max_height = height - 2 * margin_y # Usable text height
69
+
70
+ # Title
71
+ c.setFont("Helvetica-Bold", 16)
72
+ c.drawString(margin_x, height - margin_y, "AI Benefits Analysis Report")
73
+
74
+ # Move cursor down
75
+ y_position = height - margin_y - 30
76
+ c.setFont("Helvetica", 12)
77
+
78
+ # Split text into lines that fit within max_width
79
+ lines = text.split("\n")
80
+ wrapped_lines = []
81
+ for line in lines:
82
+ wrapped_lines.extend(simpleSplit(line, "Helvetica", 12, max_width))
83
+
84
+ # Write text line by line with proper spacing
85
+ for line in wrapped_lines:
86
+ if y_position < margin_y: # If at bottom of page, create a new page
87
+ c.showPage()
88
+ c.setFont("Helvetica", 12)
89
+ y_position = height - margin_y
90
+ c.drawString(margin_x, y_position, line)
91
+ y_position -= 16 # Line spacing
92
+
93
+ c.save()
94
+
95
+ # Run everything in one click
96
+ if st.button("Get AI Insights") and website_url:
97
+ docsearch = build_embeddings(website_url)
98
+
99
+ # AI Benefits Analysis
100
+ st.subheader("💬 AI Benefits Analysis")
101
+
102
+ chain = load_qa_chain(ChatOpenAI(model="gpt-4o"), chain_type="stuff")
103
+ docs = docsearch.similarity_search(fixed_question)
104
+ response = chain.run(input_documents=docs, question=fixed_question)
105
+
106
+ st.write("**AI Insights:**", response)
107
+
108
+ # Save the AI insights as a PDF
109
+ pdf_file = tempfile.NamedTemporaryFile(delete=False, suffix=".pdf")
110
+ save_text_to_pdf(response, pdf_file.name)
111
+
112
+ # Provide download link for the generated PDF file
113
+ with open(pdf_file.name, "rb") as f:
114
+ st.download_button(
115
+ label="Download AI Insights as PDF File",
116
+ data=f,
117
+ file_name="ai_benefits_analysis_report.pdf",
118
+ mime="application/pdf"
119
+ )