gmustafa413 commited on
Commit
58bc589
·
verified ·
1 Parent(s): adc7987

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +41 -56
app.py CHANGED
@@ -1,29 +1,29 @@
1
- import os
2
  import gradio as gr
3
  import numpy as np
4
  import google.generativeai as genai
5
  import faiss
6
  from sentence_transformers import SentenceTransformer
7
  from datasets import load_dataset
8
- from dotenv import load_dotenv
9
  import warnings
10
 
11
  # Suppress warnings
12
  warnings.filterwarnings("ignore")
13
 
14
- # Load environment variables
15
- load_dotenv()
16
-
17
- # Configuration
18
  MODEL_NAME = "all-MiniLM-L6-v2"
19
  GENAI_MODEL = "gemini-pro"
20
  DATASET_NAME = "midrees2806/7K_Dataset"
21
  CHUNK_SIZE = 500
22
  TOP_K = 3
23
 
24
- # Workaround for huggingface_hub compatibility
25
- import huggingface_hub
26
- huggingface_hub.__version__ = "0.13.4"
 
 
 
 
27
 
28
  class GeminiRAGSystem:
29
  def __init__(self):
@@ -31,7 +31,6 @@ class GeminiRAGSystem:
31
  self.chunks = []
32
  self.dataset_loaded = False
33
  self.loading_error = None
34
- self.gemini_api_key = "AIzaSyASrFvE3gFPigihza0JTuALzZmBx0Kc3d0"
35
 
36
  # Initialize embedding model
37
  try:
@@ -39,26 +38,20 @@ class GeminiRAGSystem:
39
  except Exception as e:
40
  raise RuntimeError(f"Failed to initialize embedding model: {str(e)}")
41
 
42
- # Configure Gemini
43
- if self.gemini_api_key:
44
- genai.configure(api_key=self.gemini_api_key)
45
-
46
  # Load dataset
47
  self.load_dataset()
48
 
49
  def load_dataset(self):
50
- """Load dataset synchronously with error handling"""
51
  try:
52
- # Load dataset directly
53
  dataset = load_dataset(
54
  DATASET_NAME,
55
  split='train',
56
  download_mode="force_redownload"
57
  )
58
 
59
- # Process dataset
60
  if 'text' in dataset.features:
61
- self.chunks = dataset['text'][:1000] # Limit to first 1000 entries
62
  elif 'context' in dataset.features:
63
  self.chunks = dataset['context'][:1000]
64
  else:
@@ -96,13 +89,11 @@ class GeminiRAGSystem:
96
  return ""
97
 
98
  def generate_response(self, query: str) -> str:
99
- """Generate response with robust error handling"""
100
  if not self.dataset_loaded:
101
  if self.loading_error:
102
  return f"⚠️ Dataset loading failed: {self.loading_error}"
103
- return "⚠️ System initialization in progress..."
104
- if not self.gemini_api_key:
105
- return "🔑 API key not configured"
106
 
107
  context = self.get_relevant_context(query)
108
  if not context:
@@ -116,61 +107,55 @@ class GeminiRAGSystem:
116
 
117
  try:
118
  model = genai.GenerativeModel(GENAI_MODEL)
119
- response = model.generate_content(prompt)
120
- return response.text
 
 
 
 
 
 
 
 
121
  except Exception as e:
122
  return f"⚠️ API Error: {str(e)}"
123
 
124
  # Initialize system
125
  try:
126
  rag_system = GeminiRAGSystem()
 
127
  except Exception as e:
128
- raise RuntimeError(f"System initialization failed: {str(e)}")
 
129
 
130
  # Create interface
131
- with gr.Blocks(title="UE Chatbot") as app:
132
- gr.Markdown("# UE 24 Hour Service")
133
 
134
  with gr.Row():
135
- chatbot = gr.Chatbot(
136
- height=500,
137
- bubble_full_width=False
138
- )
139
 
140
  with gr.Row():
141
- query = gr.Textbox(
142
- label="Your question",
143
- placeholder="Ask your question...",
144
- scale=4
145
- )
146
- submit_btn = gr.Button("Submit", variant="primary", scale=1)
147
 
148
  with gr.Row():
149
- clear_btn = gr.Button("Clear Chat", variant="secondary")
150
-
151
- # Status indicator
152
- status = gr.Textbox(
153
- label="System Status",
154
- value="Ready" if rag_system.dataset_loaded else f"Initializing... {rag_system.loading_error or ''}",
155
- interactive=False
156
- )
157
 
158
- # Event handlers
159
  def respond(message, chat_history):
160
- try:
161
- response = rag_system.generate_response(message)
162
- chat_history.append((message, response))
163
- return "", chat_history
164
- except Exception as e:
165
- chat_history.append((message, f"Error: {str(e)}"))
166
- return "", chat_history
167
 
168
  def clear_chat():
169
  return []
170
 
171
- submit_btn.click(respond, [query, chatbot], [query, chatbot])
172
- query.submit(respond, [query, chatbot], [query, chatbot])
173
  clear_btn.click(clear_chat, outputs=chatbot)
174
 
175
  if __name__ == "__main__":
176
- app.launch(share=True)
 
 
1
  import gradio as gr
2
  import numpy as np
3
  import google.generativeai as genai
4
  import faiss
5
  from sentence_transformers import SentenceTransformer
6
  from datasets import load_dataset
 
7
  import warnings
8
 
9
  # Suppress warnings
10
  warnings.filterwarnings("ignore")
11
 
12
+ # Configuration - PUT YOUR API KEY HERE
13
+ GEMINI_API_KEY = "AIzaSyASrFvE3gFPigihza0JTuALzZmBx0Kc3d0" # ⚠️ REPLACE WITH YOUR KEY
 
 
14
  MODEL_NAME = "all-MiniLM-L6-v2"
15
  GENAI_MODEL = "gemini-pro"
16
  DATASET_NAME = "midrees2806/7K_Dataset"
17
  CHUNK_SIZE = 500
18
  TOP_K = 3
19
 
20
+ # Initialize Gemini
21
+ genai.configure(
22
+ api_key=GEMINI_API_KEY,
23
+ client_options={
24
+ 'api_endpoint': "https://generativelanguage.googleapis.com/v1beta"
25
+ }
26
+ )
27
 
28
  class GeminiRAGSystem:
29
  def __init__(self):
 
31
  self.chunks = []
32
  self.dataset_loaded = False
33
  self.loading_error = None
 
34
 
35
  # Initialize embedding model
36
  try:
 
38
  except Exception as e:
39
  raise RuntimeError(f"Failed to initialize embedding model: {str(e)}")
40
 
 
 
 
 
41
  # Load dataset
42
  self.load_dataset()
43
 
44
  def load_dataset(self):
45
+ """Load dataset synchronously"""
46
  try:
 
47
  dataset = load_dataset(
48
  DATASET_NAME,
49
  split='train',
50
  download_mode="force_redownload"
51
  )
52
 
 
53
  if 'text' in dataset.features:
54
+ self.chunks = dataset['text'][:1000] # Use first 1000 entries
55
  elif 'context' in dataset.features:
56
  self.chunks = dataset['context'][:1000]
57
  else:
 
89
  return ""
90
 
91
  def generate_response(self, query: str) -> str:
92
+ """Generate response with error handling"""
93
  if not self.dataset_loaded:
94
  if self.loading_error:
95
  return f"⚠️ Dataset loading failed: {self.loading_error}"
96
+ return "⚠️ System initializing..."
 
 
97
 
98
  context = self.get_relevant_context(query)
99
  if not context:
 
107
 
108
  try:
109
  model = genai.GenerativeModel(GENAI_MODEL)
110
+ response = model.generate_content(
111
+ prompt,
112
+ generation_config=genai.types.GenerationConfig(
113
+ temperature=0.3
114
+ )
115
+ )
116
+
117
+ if response.candidates and response.candidates[0].content.parts:
118
+ return response.candidates[0].content.parts[0].text
119
+ return "⚠️ No response from API"
120
  except Exception as e:
121
  return f"⚠️ API Error: {str(e)}"
122
 
123
  # Initialize system
124
  try:
125
  rag_system = GeminiRAGSystem()
126
+ init_status = "✅ System ready" if rag_system.dataset_loaded else f"⚠️ Initializing... {rag_system.loading_error or ''}"
127
  except Exception as e:
128
+ init_status = f" Initialization failed: {str(e)}"
129
+ rag_system = None
130
 
131
  # Create interface
132
+ with gr.Blocks(title="Document Chatbot") as app:
133
+ gr.Markdown("# Document Chatbot with Gemini")
134
 
135
  with gr.Row():
136
+ chatbot = gr.Chatbot(height=500)
 
 
 
137
 
138
  with gr.Row():
139
+ query = gr.Textbox(label="Your question", placeholder="Ask about the documents...")
 
 
 
 
 
140
 
141
  with gr.Row():
142
+ submit_btn = gr.Button("Submit", variant="primary")
143
+ clear_btn = gr.Button("Clear", variant="secondary")
144
+
145
+ status = gr.Textbox(label="Status", value=init_status)
 
 
 
 
146
 
 
147
  def respond(message, chat_history):
148
+ if not rag_system:
149
+ return chat_history + [(message, "System initialization failed")]
150
+ response = rag_system.generate_response(message)
151
+ return chat_history + [(message, response)]
 
 
 
152
 
153
  def clear_chat():
154
  return []
155
 
156
+ submit_btn.click(respond, [query, chatbot], [chatbot])
157
+ query.submit(respond, [query, chatbot], [chatbot])
158
  clear_btn.click(clear_chat, outputs=chatbot)
159
 
160
  if __name__ == "__main__":
161
+ app.launch()