GiantAnalytics commited on
Commit
880a2ba
·
verified ·
1 Parent(s): 1cc26e4

Creating app.py

Browse files
Files changed (1) hide show
  1. app.py +86 -0
app.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import tempfile
3
+ import os
4
+ import shutil
5
+ from langchain.embeddings.openai import OpenAIEmbeddings
6
+ from langchain.text_splitter import CharacterTextSplitter
7
+ from langchain.vectorstores import FAISS
8
+ from langchain_community.document_loaders import WebBaseLoader
9
+ from langchain.chains.question_answering import load_qa_chain
10
+ from langchain_openai import ChatOpenAI
11
+
12
+ # Hardcoded OpenAI API Key
13
+ os.environ['OPENAI_API_KEY'] = os.getenv('OPENAI_API_KEY')
14
+
15
+ # Streamlit UI
16
+ st.title("🔍 AI Benefits Analysis for Any Company")
17
+
18
+ # User input: Only Website URL (with placeholder)
19
+ website_url = st.text_input("Enter Website URL", placeholder="e.g., https://www.companywebsite.com")
20
+
21
+ # Fixed question for AI analysis
22
+ fixed_question = (
23
+ "Analyze how Artificial Intelligence (AI) can benefit this company based on its industry, "
24
+ "key operations, and challenges. Provide insights on AI-driven improvements in customer experience, "
25
+ "automation, sales, risk management, decision-making, and innovation. Include an AI implementation roadmap, "
26
+ "challenges, solutions, and future opportunities with real-world examples."
27
+ )
28
+
29
+ # Temporary directory to store FAISS index
30
+ temp_dir = tempfile.gettempdir()
31
+ faiss_db_path = os.path.join(temp_dir, "faiss_index_dir")
32
+
33
+ # Function to fetch and process website data
34
+ def build_embeddings(url):
35
+ st.info("Fetching and processing website data...")
36
+
37
+ # Load website data
38
+ loader = WebBaseLoader(url)
39
+ raw_text = loader.load()
40
+
41
+ # Chunking the fetched text
42
+ text_splitter = CharacterTextSplitter(separator='\n', chunk_size=500, chunk_overlap=50)
43
+ docs = text_splitter.split_documents(raw_text)
44
+
45
+ # Creating embeddings
46
+ embeddings = OpenAIEmbeddings()
47
+ docsearch = FAISS.from_documents(docs, embeddings)
48
+
49
+ # Save FAISS index
50
+ if os.path.exists(faiss_db_path):
51
+ shutil.rmtree(faiss_db_path)
52
+ os.makedirs(faiss_db_path)
53
+ docsearch.save_local(faiss_db_path)
54
+
55
+ return docsearch
56
+
57
+ # Function to save text to a .txt file
58
+ def save_text_to_file(text, file_path):
59
+ with open(file_path, "w") as f:
60
+ f.write(text)
61
+
62
+ # Run everything in one click
63
+ if st.button("Get AI Insights") and website_url:
64
+ docsearch = build_embeddings(website_url)
65
+
66
+ # AI Benefits Analysis
67
+ st.subheader("💬 AI Benefits Analysis")
68
+
69
+ chain = load_qa_chain(ChatOpenAI(model="gpt-4o"), chain_type="stuff")
70
+ docs = docsearch.similarity_search(fixed_question)
71
+ response = chain.run(input_documents=docs, question=fixed_question)
72
+
73
+ st.write("**AI Insights:**", response)
74
+
75
+ # Save the AI insights as a .txt file
76
+ txt_file = tempfile.NamedTemporaryFile(delete=False, suffix=".txt")
77
+ save_text_to_file(response, txt_file.name)
78
+
79
+ # Provide download link for the generated text file
80
+ with open(txt_file.name, "rb") as f:
81
+ st.download_button(
82
+ label="Download AI Insights as Text File",
83
+ data=f,
84
+ file_name="ai_benefits_analysis_report.txt",
85
+ mime="text/plain"
86
+ )