Bryceeee commited on
Commit
8fbc2e2
Β·
verified Β·
1 Parent(s): 0cfa3a6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +104 -254
app.py CHANGED
@@ -1,276 +1,126 @@
1
  """
2
- Hugging Face Spaces Entry Point for CSRC Car Manual RAG System
3
- This is the entry point for Hugging Face Spaces deployment
4
  """
5
  import os
6
- import sys
7
- from pathlib import Path
8
-
9
- # Add the current directory to Python path for Spaces environment
10
- sys.path.insert(0, str(Path(__file__).parent))
11
-
12
  from openai import OpenAI
13
- from src.config import Config
14
- from src.vector_store import VectorStoreManager
15
- from src.rag_query import RAGQueryEngine
16
- from src.question_generator import QuestionGenerator
17
- from src.knowledge_graph import KnowledgeGraphGenerator
18
- from src.gradio_interface import GradioInterfaceBuilder
19
-
20
- # Import personalized learning if available
21
- try:
22
- from modules.personalized_learning import UserProfilingSystem, LearningPathGenerator, AdaptiveLearningEngine
23
- PERSONALIZED_LEARNING_AVAILABLE = True
24
- except ImportError:
25
- PERSONALIZED_LEARNING_AVAILABLE = False
26
- print("⚠️ Personalized learning modules not available")
27
 
28
- # Import proactive learning if available
29
- try:
30
- from modules.proactive_learning import ProactiveLearningEngine
31
- PROACTIVE_LEARNING_AVAILABLE = True
32
- except ImportError:
33
- PROACTIVE_LEARNING_AVAILABLE = False
34
- print("⚠️ Proactive learning modules not available")
35
 
36
- # Import scenario contextualization if available
37
- try:
38
- from modules.scenario_contextualization.database.scenario_database import ScenarioDatabase
39
- from modules.scenario_contextualization.integration.feature_extractor import ADASFeatureExtractor
40
- from modules.scenario_contextualization.retrieval.scenario_retriever import ScenarioRetriever
41
- from modules.scenario_contextualization.formatting.constructive_formatter import ConstructiveFormatter
42
- from modules.scenario_contextualization.integration.enhanced_rag_engine import EnhancedRAGEngine
43
- SCENARIO_CONTEXTUALIZATION_AVAILABLE = True
44
- except ImportError as e:
45
- SCENARIO_CONTEXTUALIZATION_AVAILABLE = False
46
- print(f"⚠️ Scenario contextualization modules not available: {e}")
47
 
48
-
49
- def initialize_system(config: Config) -> dict:
50
- """Initialize the RAG system components"""
51
- # Initialize OpenAI client
52
- if not config.openai_api_key:
53
- raise ValueError(
54
- "OPENAI_API_KEY not found! Please set it in Hugging Face Spaces Secrets. "
55
- "Go to Settings > Secrets and add OPENAI_API_KEY"
56
- )
57
-
58
- client = OpenAI(api_key=config.openai_api_key)
59
 
60
- # Initialize vector store manager
61
- vector_store_manager = VectorStoreManager(client)
62
 
63
- # Get or create vector store
64
- vector_store_id = config.get_vector_store_id()
65
-
66
- if not vector_store_id:
67
- print("πŸ“¦ Creating new vector store...")
68
- pdf_files = config.get_pdf_files()
 
 
 
 
 
69
 
70
- if not pdf_files:
71
- raise ValueError(f"No PDF files found in {config.car_manual_dir}")
72
 
73
- vector_store_details = vector_store_manager.create_vector_store(config.vector_store_name)
74
- if not vector_store_details:
75
- raise RuntimeError("Failed to create vector store")
 
 
76
 
77
- vector_store_id = vector_store_details["id"]
78
- config.save_vector_store_id(vector_store_id, config.vector_store_name)
 
 
79
 
80
- # Upload files
81
- upload_stats = vector_store_manager.upload_pdf_files(pdf_files, vector_store_id)
82
- if upload_stats["successful_uploads"] == 0:
83
- raise RuntimeError("Failed to upload any files")
84
- else:
85
- print(f"βœ… Using existing vector store: {vector_store_id}")
86
-
87
- # Initialize RAG query engine
88
- rag_engine = RAGQueryEngine(client, vector_store_id, config.model)
89
-
90
- # Initialize question generator
91
- question_generator = QuestionGenerator(client, rag_engine)
92
-
93
- # Initialize knowledge graph generator
94
- knowledge_graph = KnowledgeGraphGenerator(client, vector_store_id, str(config.output_dir))
95
-
96
- # Initialize personalized learning (if available)
97
- user_profiling = None
98
- learning_path_generator = None
99
- adaptive_engine = None
100
-
101
- if PERSONALIZED_LEARNING_AVAILABLE:
102
- try:
103
- user_profiling = UserProfilingSystem()
104
- learning_path_generator = LearningPathGenerator(user_profiling, config.available_topics)
105
- adaptive_engine = AdaptiveLearningEngine(user_profiling, learning_path_generator)
106
- print("βœ… Personalized Learning System initialized!")
107
- except Exception as e:
108
- print(f"⚠️ Error initializing Personalized Learning System: {e}")
109
-
110
- # Initialize proactive learning (if available)
111
- proactive_engine = None
112
- if PROACTIVE_LEARNING_AVAILABLE and user_profiling:
113
- try:
114
- proactive_engine = ProactiveLearningEngine(
115
- client, rag_engine, user_profiling, adaptive_engine, config.available_topics
116
- )
117
- print("βœ… Proactive Learning Assistance initialized!")
118
- except Exception as e:
119
- print(f"⚠️ Error initializing Proactive Learning Assistance: {e}")
120
-
121
- # Initialize scenario contextualization (if available)
122
- enhanced_rag_engine = None
123
- if SCENARIO_CONTEXTUALIZATION_AVAILABLE:
124
- try:
125
- scenario_database = ScenarioDatabase()
126
- feature_extractor = ADASFeatureExtractor(use_llm=False, client=client)
127
- scenario_retriever = ScenarioRetriever(
128
- scenario_database=scenario_database,
129
- scenario_vector_store_id=None,
130
- client=client
131
- )
132
- formatter = ConstructiveFormatter()
133
- enhanced_rag_engine = EnhancedRAGEngine(
134
- base_rag_engine=rag_engine,
135
- scenario_retriever=scenario_retriever,
136
- feature_extractor=feature_extractor,
137
- formatter=formatter
138
- )
139
- print("βœ… Scenario Contextualization initialized!")
140
- except Exception as e:
141
- print(f"⚠️ Error initializing Scenario Contextualization: {e}")
142
- import traceback
143
- traceback.print_exc()
144
-
145
- return {
146
- "client": client,
147
- "vector_store_manager": vector_store_manager,
148
- "rag_engine": rag_engine,
149
- "question_generator": question_generator,
150
- "knowledge_graph": knowledge_graph,
151
- "user_profiling": user_profiling,
152
- "learning_path_generator": learning_path_generator,
153
- "adaptive_engine": adaptive_engine,
154
- "proactive_engine": proactive_engine,
155
- "enhanced_rag_engine": enhanced_rag_engine,
156
- "config": config
157
- }
158
 
159
 
160
- def create_app():
161
- """Create and return the Gradio app for Hugging Face Spaces"""
162
- print("=" * 60)
163
- print("πŸš— CSRC Car Manual RAG System - Hugging Face Spaces")
164
- print("=" * 60)
165
 
166
- # Load configuration
167
- config = Config()
168
-
169
- # Initialize system
170
- try:
171
- components = initialize_system(config)
172
- except Exception as e:
173
- print(f"❌ Error initializing system: {e}")
174
- import gradio as gr
175
-
176
- # Create error interface
177
- error_msg = f"""
178
- # ❌ Initialization Error
179
-
180
- **Error:** {str(e)}
181
-
182
- **Possible solutions:**
183
- 1. Check if OPENAI_API_KEY is set in Spaces Secrets (Settings > Secrets)
184
- 2. Ensure PDF files are in the `car_manual/` directory
185
- 3. Check the logs for more details
186
- """
187
-
188
- def error_display():
189
- return error_msg
190
-
191
- error_interface = gr.Interface(
192
- fn=error_display,
193
- inputs=None,
194
- outputs=gr.Markdown(),
195
- title="CSRC Car Manual RAG System",
196
- description="An error occurred during initialization. Please check the logs."
197
  )
198
- return error_interface
199
-
200
- # Build Gradio interface
201
- print("\n🌐 Building Gradio interface...")
202
- try:
203
- interface_builder = GradioInterfaceBuilder(
204
- rag_engine=components["rag_engine"],
205
- question_generator=components["question_generator"],
206
- knowledge_graph=components["knowledge_graph"],
207
- config=components["config"],
208
- user_profiling=components["user_profiling"],
209
- adaptive_engine=components["adaptive_engine"],
210
- proactive_engine=components["proactive_engine"]
211
- )
212
-
213
- print("πŸ“¦ Creating interface components...")
214
- demo = interface_builder.create_interface()
215
- print("βœ… Gradio interface created successfully!")
216
- return demo
217
- except Exception as e:
218
- print(f"❌ Error building Gradio interface: {e}")
219
- import traceback
220
- traceback.print_exc()
221
 
222
- # Return error interface
223
- import gradio as gr
224
- error_msg = f"""
225
- # ❌ Interface Building Error
226
-
227
- **Error:** {str(e)}
228
-
229
- **Possible solutions:**
230
- 1. Check Gradio version compatibility
231
- 2. Check the logs for detailed error information
232
- 3. Verify all dependencies are installed correctly
233
-
234
- **Traceback:**
235
- ```
236
- {traceback.format_exc()}
237
- ```
238
- """
239
-
240
- def error_display():
241
- return error_msg
242
-
243
- error_interface = gr.Interface(
244
- fn=error_display,
245
- inputs=None,
246
- outputs=gr.Markdown(),
247
- title="CSRC Car Manual RAG System",
248
- description="An error occurred while building the interface. Please check the logs."
249
  )
250
- return error_interface
 
 
 
 
 
 
 
 
 
 
 
251
 
252
 
253
- # Create the app for Hugging Face Spaces
254
- # Spaces will automatically detect Gradio and run this
255
- # Use module-level variable to prevent multiple initializations
256
- if 'demo' not in globals():
257
- try:
258
- print("πŸ”„ Initializing app...")
259
- demo = create_app()
260
- print("βœ… App initialized successfully!")
261
- except Exception as e:
262
- print(f"❌ Error creating app: {e}")
263
- import traceback
264
- traceback.print_exc()
265
- # Create a minimal error interface
266
- import gradio as gr
267
- demo = gr.Interface(
268
- fn=lambda: f"Error: {str(e)}\n\nPlease check the logs for details.",
269
- inputs=None,
270
- outputs=gr.Textbox(),
271
- title="CSRC Car Manual RAG System - Error",
272
- description="An error occurred during initialization. Please check the logs."
273
- )
274
- else:
275
- print("ℹ️ App already initialized, reusing existing instance")
276
 
 
 
 
 
1
  """
2
+ Minimal version of app.py for Hugging Face Spaces
3
+ Use this for testing if the full version doesn't work
4
  """
5
  import os
6
+ import gradio as gr
 
 
 
 
 
7
  from openai import OpenAI
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
 
9
+ # Simple configuration
10
+ OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
11
+ VECTOR_STORE_ID = os.getenv("VECTOR_STORE_ID", "vs_69022a9edd74819199bf9a34a83e877b")
12
+ MODEL = "gpt-4o-mini"
 
 
 
13
 
 
 
 
 
 
 
 
 
 
 
 
14
 
15
+ def query_rag(question: str) -> str:
16
+ """Simple RAG query function"""
17
+ if not OPENAI_API_KEY:
18
+ return "❌ Error: OPENAI_API_KEY not set in Spaces Secrets"
 
 
 
 
 
 
 
19
 
20
+ if not VECTOR_STORE_ID:
21
+ return "❌ Error: VECTOR_STORE_ID not set"
22
 
23
+ try:
24
+ client = OpenAI(api_key=OPENAI_API_KEY)
25
+
26
+ # Create assistant with file search
27
+ assistant = client.beta.assistants.create(
28
+ name="Car Manual Assistant",
29
+ instructions="You are a helpful assistant that answers questions about car manuals.",
30
+ model=MODEL,
31
+ tools=[{"type": "file_search"}],
32
+ tool_resources={"file_search": {"vector_store_ids": [VECTOR_STORE_ID]}}
33
+ )
34
 
35
+ # Create thread and run
36
+ thread = client.beta.threads.create()
37
 
38
+ message = client.beta.threads.messages.create(
39
+ thread_id=thread.id,
40
+ role="user",
41
+ content=question
42
+ )
43
 
44
+ run = client.beta.threads.runs.create_and_poll(
45
+ thread_id=thread.id,
46
+ assistant_id=assistant.id
47
+ )
48
 
49
+ if run.status == 'completed':
50
+ messages = client.beta.threads.messages.list(thread_id=thread.id)
51
+ response = messages.data[0].content[0].text.value
52
+
53
+ # Cleanup
54
+ client.beta.assistants.delete(assistant.id)
55
+
56
+ return response
57
+ else:
58
+ return f"❌ Query failed with status: {run.status}"
59
+
60
+ except Exception as e:
61
+ return f"❌ Error: {str(e)}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
62
 
63
 
64
+ # Create Gradio interface
65
+ def create_interface():
66
+ """Create minimal Gradio interface"""
 
 
67
 
68
+ with gr.Blocks(title="CSRC Car Manual RAG") as demo:
69
+ gr.Markdown("""
70
+ # πŸš— CSRC Car Manual RAG System
71
+ ## Minimal Version for Testing
72
+
73
+ This is a simplified version for testing Hugging Face Spaces deployment.
74
+ """)
75
+
76
+ with gr.Row():
77
+ with gr.Column():
78
+ question_input = gr.Textbox(
79
+ label="Your Question",
80
+ placeholder="Ask anything about the car manual...",
81
+ lines=3
82
+ )
83
+ submit_btn = gr.Button("Submit", variant="primary")
84
+
85
+ with gr.Column():
86
+ answer_output = gr.Textbox(
87
+ label="Answer",
88
+ lines=10
89
+ )
90
+
91
+ gr.Examples(
92
+ examples=[
93
+ ["What are the main features of the adaptive cruise control?"],
94
+ ["How do I use the parking assist system?"],
95
+ ["What should I check during regular maintenance?"]
96
+ ],
97
+ inputs=question_input
 
98
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
99
 
100
+ submit_btn.click(
101
+ fn=query_rag,
102
+ inputs=question_input,
103
+ outputs=answer_output
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
104
  )
105
+
106
+ gr.Markdown("""
107
+ ---
108
+ ### Status
109
+ - βœ… Using vector store: `{}`
110
+ - πŸ”‘ API Key: {}
111
+ """.format(
112
+ VECTOR_STORE_ID,
113
+ "βœ… Set" if OPENAI_API_KEY else "❌ Not Set"
114
+ ))
115
+
116
+ return demo
117
 
118
 
119
+ # Create and launch
120
+ print("πŸ”„ Creating minimal app...")
121
+ demo = create_interface()
122
+ print("βœ… App created!")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
123
 
124
+ if __name__ == "__main__":
125
+ print("πŸš€ Launching app...")
126
+ demo.launch()