GiantAnalytics commited on
Commit
f5fe1ad
·
verified ·
1 Parent(s): 2e60db7

Creating app.py

Browse files
Files changed (1) hide show
  1. app.py +69 -0
app.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import tempfile
3
+ import os
4
+ import shutil
5
+ from langchain.embeddings.openai import OpenAIEmbeddings
6
+ from langchain.text_splitter import CharacterTextSplitter
7
+ from langchain.vectorstores import FAISS
8
+ from langchain_community.document_loaders import WebBaseLoader
9
+ from langchain.chains.question_answering import load_qa_chain
10
+ from langchain_openai import ChatOpenAI
11
+
12
+ # Hardcoded OpenAI API Key
13
+ OPENAI_API_KEY = "your-openai-api-key" # Replace with your actual API key
14
+ os.environ['OPENAI_API_KEY'] = OPENAI_API_KEY
15
+
16
+ # Streamlit UI
17
+ st.title("🔍 AI Benefits Analysis for Any Company")
18
+
19
+ # User input: Only Website URL (with placeholder)
20
+ website_url = st.text_input("Enter Website URL", placeholder="e.g., https://www.companywebsite.com")
21
+
22
+ # Fixed question for AI analysis
23
+ fixed_question = (
24
+ "Analyze how Artificial Intelligence (AI) can benefit this company based on its industry, "
25
+ "key operations, and challenges. Provide insights on AI-driven improvements in customer experience, "
26
+ "automation, sales, risk management, decision-making, and innovation. Include an AI implementation roadmap, "
27
+ "challenges, solutions, and future opportunities with real-world examples."
28
+ )
29
+
30
+ # Temporary directory to store FAISS index
31
+ temp_dir = tempfile.gettempdir()
32
+ faiss_db_path = os.path.join(temp_dir, "faiss_index_dir")
33
+
34
+ # Function to fetch and process website data
35
+ def build_embeddings(url):
36
+ st.info("Fetching and processing website data...")
37
+
38
+ # Load website data
39
+ loader = WebBaseLoader(url)
40
+ raw_text = loader.load()
41
+
42
+ # Chunking the fetched text
43
+ text_splitter = CharacterTextSplitter(separator='\n', chunk_size=500, chunk_overlap=50)
44
+ docs = text_splitter.split_documents(raw_text)
45
+
46
+ # Creating embeddings
47
+ embeddings = OpenAIEmbeddings()
48
+ docsearch = FAISS.from_documents(docs, embeddings)
49
+
50
+ # Save FAISS index
51
+ if os.path.exists(faiss_db_path):
52
+ shutil.rmtree(faiss_db_path)
53
+ os.makedirs(faiss_db_path)
54
+ docsearch.save_local(faiss_db_path)
55
+
56
+ return docsearch
57
+
58
+ # Run everything in one click
59
+ if st.button("Get AI Insights") and website_url:
60
+ docsearch = build_embeddings(website_url)
61
+
62
+ # AI Benefits Analysis
63
+ st.subheader("💬 AI Benefits Analysis")
64
+
65
+ chain = load_qa_chain(ChatOpenAI(model="gpt-4o"), chain_type="stuff")
66
+ docs = docsearch.similarity_search(fixed_question)
67
+ response = chain.run(input_documents=docs, question=fixed_question)
68
+
69
+ st.write("**AI Insights:**", response)