Vlad Bastina commited on
Commit
9e71d08
·
0 Parent(s):

first commit

Browse files
.gitattributes ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ *.pdf filter=lfs diff=lfs merge=lfs -text
2
+ *.PNG filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ .streamlit/secrets.toml
.streamlit/config.toml ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ [theme]
2
+ base="light"
__pycache__/gemini_call.cpython-312.pyc ADDED
Binary file (4.27 kB). View file
 
app.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from pypdf import PdfReader
3
+ import os
4
+ from gemini_call import GeminiQanA
5
+
6
+ if __name__ == "__main__":
7
+ #setup env variables
8
+ os.environ['GOOGLE_API_KEY']=st.secrets['GOOGLE_API_KEY']
9
+ st.set_page_config(page_title=None, page_icon=None, layout="centered", initial_sidebar_state="expanded", menu_items=None)
10
+ # Streamlit UI
11
+ st.title("📄 AI PDF Chatbot")
12
+ st.sidebar.image("zega_logo.PNG",width=300)
13
+ text_from_pdf:str = ""
14
+ # File uploader
15
+ uploaded_file = st.file_uploader("Upload a PDF (Max 50 pages)", type=["pdf"])
16
+ if uploaded_file:
17
+ pdf_document = PdfReader(uploaded_file)
18
+
19
+ for page_num, page in enumerate(pdf_document.pages):
20
+ text = page.extract_text()
21
+
22
+ text_from_pdf += text.strip().replace("\n" ," ")
23
+
24
+ chatbot = GeminiQanA(text_from_pdf)
25
+ st.success("PDF uploaded and processed!")
26
+ else:
27
+ text_from_pdf = ""
28
+ if "messages" in st.session_state:
29
+ st.session_state.messages = []
30
+
31
+ # Chat interface
32
+ if "messages" not in st.session_state:
33
+ st.session_state.messages = []
34
+
35
+ for message in st.session_state.messages:
36
+ with st.chat_message(message["role"]):
37
+ st.markdown(message["content"])
38
+
39
+ question = st.text_input("Ask a question about the document:")
40
+ if st.button("Ask AI") and question:
41
+ st.session_state.messages.append({"role": "user", "content": question})
42
+ with st.spinner("Fetching response from assistant..."):
43
+ answer = chatbot.answer_question(question)
44
+ st.session_state.messages.append({"role": "assistant", "content": answer})
45
+
46
+ with st.chat_message("user"):
47
+ st.markdown(question)
48
+ with st.chat_message("assistant"):
49
+ st.markdown(answer)
gemini_call.py ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import google.generativeai as genai
2
+ import os
3
+
4
+ class GeminiQanA:
5
+ def __init__(self,text:str=''):
6
+ """Initializes the Gemini question answerer by loading the model."""
7
+ self.api_key = os.getenv("GOOGLE_API_KEY")
8
+ genai.configure(api_key=self.api_key)
9
+ self.model = self._load_model(text)
10
+
11
+ def _load_model(self,text:str):
12
+ """Loads the generative AI model with a system instruction."""
13
+ final_prompt = f'''Task:
14
+ You are an advanced AI system designed to answer questions based on a provided document. Your goal is to analyze the given text thoroughly and provide the most accurate, well-structured, and insightful response.
15
+
16
+ Instructions:
17
+
18
+ 1. Carefully Read and Analyze the Document:
19
+
20
+ -Take your time to process the entire document before answering.
21
+ -Identify key details, themes, facts, and context relevant to the question.
22
+
23
+ 2. Understand the Question Fully:
24
+
25
+ -Determine what specific information is being asked.
26
+ -If multiple interpretations are possible, consider the most relevant one based on the document.
27
+
28
+ 3. Formulate a Comprehensive Answer:
29
+
30
+ -Provide a clear, concise, and informative response.
31
+ -Support your answer with direct references or logical inferences from the document.
32
+ -Maintain a structured response format for clarity.
33
+
34
+ 4. Ensure Accuracy and Depth:
35
+
36
+ -Avoid assumptions beyond the given document.
37
+ -If the document does not contain the necessary information, state that explicitly.
38
+ -Provide additional context if it enhances the response.
39
+
40
+ 5. Maintain a Professional and Neutral Tone:
41
+
42
+ -Present information objectively without personal bias.
43
+ -Keep the response clear and reader-friendly.
44
+
45
+ Example Format:
46
+
47
+ Document Provided:
48
+ "The solar system consists of the Sun and the celestial objects bound to it by gravity, including eight planets, moons, asteroids, and comets. The Earth is the third planet from the Sun and supports life due to its atmosphere, water, and suitable climate."
49
+
50
+ User Question:
51
+ "Why is Earth suitable for life?"
52
+
53
+ AI Response:
54
+ "Earth is suitable for life primarily due to three key factors: its atmosphere, presence of water, and favorable climate. The atmosphere contains oxygen and protects against harmful radiation, water is essential for biological processes, and the climate remains within a range that supports diverse ecosystems. These conditions make Earth uniquely habitable compared to other planets in the solar system."
55
+
56
+ Provided text for refference:
57
+ {text}
58
+
59
+ Question:
60
+ '''
61
+
62
+ return genai.GenerativeModel("gemini-1.5-pro", system_instruction=final_prompt)
63
+
64
+ def answer_question(self, question: str) -> str:
65
+ """Performs the API call to Gemini and returns the sentiment analysis response."""
66
+ response = self.model.generate_content(question)
67
+ return response.text
68
+
69
+ if __name__ == "__main__":
70
+ analyzer = GeminiQanA()
71
+ response = analyzer.answer_question("Hello,how are you?")
72
+ print(response)
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ protobuf==5.29.3
2
+ pypdf==5.3.0
3
+ streamlit==1.42.0
4
+ google-generativeai==0.8.4
zega_logo.PNG ADDED

Git LFS Details

  • SHA256: ab929904c4eadf8cc1aadc9a797a469f20d31a5636770f1db2789f2096033558
  • Pointer size: 131 Bytes
  • Size of remote file: 116 kB