Chia Woon Yap commited on
Commit
db126ec
ยท
verified ยท
1 Parent(s): 0463f3f

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +523 -0
app.py ADDED
@@ -0,0 +1,523 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import pandas as pd
3
+ import numpy as np
4
+ import joblib
5
+ import plotly.graph_objects as go
6
+ import plotly.express as px
7
+ from huggingface_hub import hf_hub_download
8
+ import os
9
+ from pathlib import Path
10
+ import warnings
11
+ warnings.filterwarnings('ignore')
12
+ import re
13
+ from groq import Groq
14
+
15
+ # Initialize Groq client
16
+ groq_api_key = os.getenv("GROQ_API_KEY", "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")
17
+ if groq_api_key:
18
+ client = Groq(api_key=groq_api_key)
19
+ else:
20
+ print("โš ๏ธ GROQ_API_KEY not found. Chat functionality will be limited.")
21
+ client = None
22
+
23
+ # Try to import xgboost, but fallback to scikit-learn
24
+ try:
25
+ import xgboost as xgb
26
+ XGB_AVAILABLE = True
27
+ print("โœ… XGBoost is available")
28
+ except ImportError:
29
+ XGB_AVAILABLE = False
30
+ print("โš ๏ธ XGBoost not available, using scikit-learn models")
31
+ from sklearn.ensemble import RandomForestRegressor
32
+
33
+ def create_dummy_model(model_type):
34
+ """Create a realistic dummy model that has all required methods"""
35
+ class RealisticDummyModel:
36
+ def __init__(self, model_type):
37
+ self.model_type = model_type
38
+ self.n_features_in_ = 9
39
+ self.feature_names_in_ = [
40
+ 'floor_area_sqm', 'storey_level', 'flat_age', 'remaining_lease',
41
+ 'transaction_year', 'flat_type_encoded', 'town_encoded',
42
+ 'flat_model_encoded', 'dummy_feature'
43
+ ]
44
+ # Add methods that might be called by joblib or other code
45
+ self.get_params = lambda deep=True: {}
46
+ self.set_params = lambda **params: self
47
+
48
+ def predict(self, X):
49
+ # Realistic prediction logic
50
+ if isinstance(X, np.ndarray) and len(X.shape) == 2:
51
+ X = X[0] # Take first row if it's a 2D array
52
+
53
+ floor_area = X[0]
54
+ storey_level = X[1]
55
+ flat_age = X[2]
56
+ town_encoded = X[6]
57
+ flat_type_encoded = X[5]
58
+
59
+ base_price = floor_area * (4800 + town_encoded * 200)
60
+ storey_bonus = storey_level * 2500
61
+ age_discount = flat_age * 1800
62
+
63
+ price = base_price + storey_bonus - age_discount + 35000
64
+ if storey_level > 20: price += 15000
65
+ if flat_age < 10: price += 20000
66
+
67
+ return np.array([max(300000, price)])
68
+
69
+ return RealisticDummyModel(model_type)()
70
+
71
+ def safe_joblib_load(filepath):
72
+ """Safely load joblib file with error handling"""
73
+ try:
74
+ model = joblib.load(filepath)
75
+ print(f"โœ… Successfully loaded model from {filepath}")
76
+
77
+ # Check if model has required methods
78
+ if not hasattr(model, 'predict'):
79
+ print("โŒ Loaded object doesn't have predict method")
80
+ return None
81
+
82
+ # Add missing methods if needed
83
+ if not hasattr(model, 'get_params'):
84
+ model.get_params = lambda deep=True: {}
85
+ if not hasattr(model, 'set_params'):
86
+ model.set_params = lambda **params: model
87
+
88
+ return model
89
+
90
+ except Exception as e:
91
+ print(f"โŒ Error loading model from {filepath}: {e}")
92
+ return None
93
+
94
+ def load_models():
95
+ """Load models with robust error handling"""
96
+ models = {}
97
+
98
+ # Try to load XGBoost model
99
+ try:
100
+ xgboost_path = hf_hub_download(
101
+ repo_id="Lesterchia174/HDB_Price_Predictor",
102
+ filename="best_model_xgboost.joblib",
103
+ repo_type="space"
104
+ )
105
+ models['xgboost'] = safe_joblib_load(xgboost_path)
106
+ if models['xgboost'] is None:
107
+ print("โš ๏ธ Creating dummy model for XGBoost")
108
+ models['xgboost'] = create_dummy_model("xgboost")
109
+ else:
110
+ print("โœ… XGBoost model loaded and validated")
111
+
112
+ except Exception as e:
113
+ print(f"โŒ Error downloading XGBoost model: {e}")
114
+ print("โš ๏ธ Creating dummy model for XGBoost")
115
+ models['xgboost'] = create_dummy_model("xgboost")
116
+
117
+ return models
118
+
119
+ def load_data():
120
+ """Load data using Hugging Face Hub"""
121
+ try:
122
+ data_path = hf_hub_download(
123
+ repo_id="Lesterchia174/HDB_Price_Predictor",
124
+ filename="base_hdb_resale_prices_2015Jan-2025Jun_processed.csv",
125
+ repo_type="space"
126
+ )
127
+ df = pd.read_csv(data_path)
128
+ print("โœ… Data loaded successfully via Hugging Face Hub")
129
+ return df
130
+ except Exception as e:
131
+ print(f"โŒ Error loading data: {e}")
132
+ return create_sample_data()
133
+
134
+ def create_sample_data():
135
+ """Create sample data if real data isn't available"""
136
+ np.random.seed(42)
137
+ towns = ['ANG MO KIO', 'BEDOK', 'TAMPINES', 'WOODLANDS', 'JURONG WEST']
138
+ flat_types = ['4 ROOM', '5 ROOM', 'EXECUTIVE']
139
+ flat_models = ['Improved', 'Model A', 'New Generation']
140
+
141
+ data = []
142
+ for _ in range(100):
143
+ town = np.random.choice(towns)
144
+ flat_type = np.random.choice(flat_types)
145
+ flat_model = np.random.choice(flat_models)
146
+ floor_area = np.random.randint(85, 150)
147
+ storey = np.random.randint(1, 25)
148
+ age = np.random.randint(0, 40)
149
+
150
+ base_price = floor_area * 5000
151
+ town_bonus = towns.index(town) * 20000
152
+ storey_bonus = storey * 2000
153
+ age_discount = age * 1500
154
+ flat_type_bonus = flat_types.index(flat_type) * 30000
155
+
156
+ resale_price = base_price + town_bonus + storey_bonus - age_discount + flat_type_bonus
157
+ resale_price = max(300000, resale_price + np.random.randint(-20000, 20000))
158
+
159
+ data.append({
160
+ 'town': town, 'flat_type': flat_type, 'flat_model': flat_model,
161
+ 'floor_area_sqm': floor_area, 'storey_level': storey,
162
+ 'flat_age': age, 'resale_price': resale_price
163
+ })
164
+
165
+ return pd.DataFrame(data)
166
+
167
+ def preprocess_input(user_input, model_type='xgboost'):
168
+ """Preprocess user input for prediction with correct feature mapping"""
169
+ # Flat type mapping
170
+ flat_type_mapping = {'1 ROOM': 1, '2 ROOM': 2, '3 ROOM': 3, '4 ROOM': 4,
171
+ '5 ROOM': 5, 'EXECUTIVE': 6, 'MULTI-GENERATION': 7}
172
+
173
+ # Town mapping
174
+ town_mapping = {
175
+ 'SENGKANG': 0, 'WOODLANDS': 1, 'TAMPINES': 2, 'PUNGGOL': 3,
176
+ 'JURONG WEST': 4, 'YISHUN': 5, 'BEDOK': 6, 'HOUGANG': 7,
177
+ 'CHOA CHU KANG': 8, 'ANG MO KIO': 9
178
+ }
179
+
180
+ # Flat model mapping
181
+ flat_model_mapping = {
182
+ 'Model A': 0, 'Improved': 1, 'New Generation': 2,
183
+ 'Standard': 3, 'Premium': 4
184
+ }
185
+
186
+ # Create input array with features
187
+ input_features = [
188
+ user_input['floor_area_sqm'], # Feature 1
189
+ user_input['storey_level'], # Feature 2
190
+ user_input['flat_age'], # Feature 3
191
+ 99 - user_input['flat_age'], # Feature 4: remaining_lease
192
+ 2025, # Feature 5: transaction_year
193
+ flat_type_mapping.get(user_input['flat_type'], 4), # Feature 6: flat_type_ordinal
194
+ town_mapping.get(user_input['town'], 0), # Feature 7: town_encoded
195
+ flat_model_mapping.get(user_input['flat_model'], 0), # Feature 8: flat_model_encoded
196
+ 1 # Feature 9: (placeholder)
197
+ ]
198
+
199
+ return np.array([input_features])
200
+
201
+ def create_market_insights_chart(data, user_input, predicted_price):
202
+ """Create market insights visualization"""
203
+ if data is None or len(data) == 0:
204
+ return None
205
+
206
+ similar_properties = data[
207
+ (data['flat_type'] == user_input['flat_type']) &
208
+ (data['town'] == user_input['town'])
209
+ ]
210
+
211
+ if len(similar_properties) < 5:
212
+ similar_properties = data[data['flat_type'] == user_input['flat_type']]
213
+
214
+ if len(similar_properties) > 0:
215
+ fig = px.scatter(similar_properties, x='floor_area_sqm', y='resale_price',
216
+ color='flat_model',
217
+ title=f"Market Position: {user_input['flat_type']} in {user_input['town']}",
218
+ labels={'floor_area_sqm': 'Floor Area (sqm)', 'resale_price': 'Resale Price (SGD)'})
219
+
220
+ # Add model prediction
221
+ fig.add_trace(go.Scatter(x=[user_input['floor_area_sqm']], y=[predicted_price],
222
+ mode='markers',
223
+ marker=dict(symbol='star', size=20, color='red',
224
+ line=dict(width=2, color='darkred')),
225
+ name='XGBoost Prediction'))
226
+
227
+ fig.update_layout(template="plotly_white", height=400, showlegend=True)
228
+ return fig
229
+ return None
230
+
231
+ def predict_hdb_price(town, flat_type, flat_model, floor_area_sqm, storey_level, flat_age):
232
+ """Main prediction function for Gradio with robust error handling"""
233
+ user_input = {
234
+ 'town': town,
235
+ 'flat_type': flat_type,
236
+ 'flat_model': flat_model,
237
+ 'floor_area_sqm': floor_area_sqm,
238
+ 'storey_level': storey_level,
239
+ 'flat_age': flat_age
240
+ }
241
+
242
+ try:
243
+ processed_input = preprocess_input(user_input)
244
+
245
+ # Get prediction with error handling
246
+ try:
247
+ predicted_price = max(0, float(models['xgboost'].predict(processed_input)[0]))
248
+ except Exception as e:
249
+ print(f"โŒ XGBoost prediction error: {e}")
250
+ predicted_price = 400000 # Fallback value
251
+
252
+ # Create insights
253
+ remaining_lease = 99 - flat_age
254
+ price_per_sqm = predicted_price / floor_area_sqm
255
+
256
+ insights = f"""
257
+ **Property Summary:**
258
+ - Location: {town}
259
+ - Type: {flat_type}
260
+ - Model: {flat_model}
261
+ - Area: {floor_area_sqm} sqm
262
+ - Floor: Level {storey_level}
263
+ - Age: {flat_age} years
264
+ - Remaining Lease: {remaining_lease} years
265
+ - Price per sqm: ${price_per_sqm:,.0f}
266
+
267
+ **Predicted Price: ${predicted_price:,.0f}**
268
+
269
+ **Financing Eligibility:**
270
+ """
271
+
272
+ if remaining_lease >= 60:
273
+ insights += "โœ… Bank loan eligible"
274
+ elif remaining_lease >= 20:
275
+ insights += "โš ๏ธ HDB loan eligible only"
276
+ else:
277
+ insights += "โŒ Limited financing options"
278
+
279
+ # Create chart
280
+ chart = create_market_insights_chart(data, user_input, predicted_price)
281
+
282
+ return f"${predicted_price:,.0f}", chart, insights
283
+
284
+ except Exception as e:
285
+ error_msg = f"Prediction failed. Error: {str(e)}"
286
+ print(error_msg)
287
+ return "Error: Prediction failed", None, error_msg
288
+
289
+ def extract_parameters_from_query(query):
290
+ """Extract HDB parameters from natural language query using LLM"""
291
+ if not groq_api_key or client is None:
292
+ return {"error": "Please set GROQ_API_KEY environment variable to use chat functionality."}
293
+
294
+ try:
295
+ # System prompt to guide the LLM
296
+ system_prompt = """You are an expert at extracting parameters for HDB price prediction from natural language queries.
297
+ Extract the following parameters if mentioned in the query:
298
+ - town (e.g., Ang Mo Kio, Bedok, Tampines)
299
+ - flat_type (e.g., 3 ROOM, 4 ROOM, 5 ROOM, EXECUTIVE)
300
+ - flat_model (e.g., Improved, Model A, New Generation, Standard, Premium)
301
+ - floor_area_sqm (floor area in square meters)
302
+ - storey_level (floor level)
303
+ - flat_age (age of flat in years)
304
+
305
+ Return only a JSON object with the extracted parameters. If a parameter is not mentioned, set it to null.
306
+ Example: {"town": "ANG MO KIO", "flat_type": "4 ROOM", "flat_model": "Improved", "floor_area_sqm": 95, "storey_level": 8, "flat_age": 15}"""
307
+
308
+ # Query the LLM
309
+ completion = client.chat.completions.create(
310
+ model="llama-3.3-70b-versatile",
311
+ messages=[
312
+ {"role": "system", "content": system_prompt},
313
+ {"role": "user", "content": query}
314
+ ],
315
+ temperature=0.1,
316
+ max_tokens=200
317
+ )
318
+
319
+ # Extract and parse the JSON response
320
+ response = completion.choices[0].message.content
321
+ # Clean the response to extract just the JSON
322
+ json_match = re.search(r'\{.*\}', response, re.DOTALL)
323
+ if json_match:
324
+ import json
325
+ params = json.loads(json_match.group())
326
+ return params
327
+ else:
328
+ return {"error": "Could not extract parameters from query"}
329
+
330
+ except Exception as e:
331
+ return {"error": f"Error processing query: {str(e)}"}
332
+
333
+ def is_small_talk(query):
334
+ """Check if the query is small talk/casual conversation"""
335
+ small_talk_keywords = [
336
+ 'hello', 'hi', 'hey', 'good morning', 'good afternoon', 'good evening',
337
+ 'how are you', 'how are things', "what's up", 'how do you do',
338
+ 'thank you', 'thanks', 'bye', 'goodbye', 'see you', 'nice to meet you',
339
+ 'who are you', 'what can you do', 'help', 'tell me about yourself'
340
+ ]
341
+
342
+ query_lower = query.lower()
343
+ return any(keyword in query_lower for keyword in small_talk_keywords)
344
+
345
+ def handle_small_talk(query):
346
+ """Handle small talk queries with appropriate responses"""
347
+ query_lower = query.lower()
348
+
349
+ if any(greeting in query_lower for greeting in ['hello', 'hi', 'hey', 'good morning', 'good afternoon', 'good evening']):
350
+ return "Hello! ๐Ÿ‘‹ I'm your HDB price assistant. How can I help you today?"
351
+
352
+ elif any(how_are_you in query_lower for how_are_you in ['how are you', 'how are things', "what's up", 'how do you do']):
353
+ return "I'm doing great, thanks for asking! I'm here to help you with HDB price predictions and information. What can I assist you with today?"
354
+
355
+ elif any(thanks in query_lower for thanks in ['thank you', 'thanks']):
356
+ return "You're welcome! ๐Ÿ˜Š Is there anything else you'd like to know about HDB prices?"
357
+
358
+ elif any(bye in query_lower for bye in ['bye', 'goodbye', 'see you']):
359
+ return "Goodbye! ๐Ÿ‘‹ Feel free to come back if you have more questions about HDB prices!"
360
+
361
+ elif 'who are you' in query_lower:
362
+ return "I'm an AI assistant specialized in helping with HDB resale price predictions and information. I can estimate property values based on various factors like location, flat type, size, and age."
363
+
364
+ elif 'what can you do' in query_lower or 'help' in query_lower:
365
+ return "I can help you with:\n- Predicting HDB resale prices\n- Answering questions about HDB properties\n- Providing market insights\n\nJust tell me about the property you're interested in (location, type, size, etc.) and I'll give you an estimate!"
366
+
367
+ elif 'tell me about yourself' in query_lower:
368
+ return "I'm an AI assistant powered by machine learning models trained on HDB resale data. I can provide price estimates and insights about public housing in Singapore. My goal is to help you make informed decisions about HDB properties!"
369
+
370
+ else:
371
+ return "I'm here to help with HDB price predictions and information. How can I assist you today?"
372
+
373
+ def chat_with_llm(query, chat_history):
374
+ """Handle chat queries about HDB pricing and small talk"""
375
+ if not groq_api_key or client is None:
376
+ return "Please set GROQ_API_KEY environment variable to use chat functionality.", chat_history
377
+
378
+ # Check if it's small talk
379
+ if is_small_talk(query):
380
+ response = handle_small_talk(query)
381
+ chat_history.append((query, response))
382
+ return response, chat_history
383
+
384
+ # Extract parameters from the query
385
+ params = extract_parameters_from_query(query)
386
+
387
+ if "error" in params:
388
+ # If we couldn't extract parameters, check if it's a general HDB question
389
+ if any(keyword in query.lower() for keyword in ['hdb', 'price', 'cost', 'value', 'property', 'flat', 'house']):
390
+ # Use LLM to answer general HDB questions
391
+ try:
392
+ completion = client.chat.completions.create(
393
+ model="llama-3.3-70b-versatile",
394
+ messages=[
395
+ {"role": "system", "content": "You are a helpful assistant specialized in HDB (Housing & Development Board) properties in Singapore. Provide accurate, helpful information about HDB prices, policies, and market trends."},
396
+ {"role": "user", "content": query}
397
+ ],
398
+ temperature=0.3,
399
+ max_tokens=300
400
+ )
401
+ response = completion.choices[0].message.content
402
+ chat_history.append((query, response))
403
+ return response, chat_history
404
+ except Exception as e:
405
+ error_msg = f"I specialize in HDB price predictions. For accurate estimates, please provide details like location, flat type, size, and age. Error: {str(e)}"
406
+ chat_history.append((query, error_msg))
407
+ return error_msg, chat_history
408
+ else:
409
+ # Not a small talk and not HDB-related
410
+ response = "I specialize in HDB price predictions and information. If you have questions about Singapore public housing, I'd be happy to help! Otherwise, you might want to try a more general AI assistant."
411
+ chat_history.append((query, response))
412
+ return response, chat_history
413
+
414
+ # Check if we have enough parameters to make a prediction
415
+ required_params = ['town', 'flat_type', 'flat_model', 'floor_area_sqm', 'storey_level', 'flat_age']
416
+ missing_params = [param for param in required_params if params.get(param) is None]
417
+
418
+ if missing_params:
419
+ # If missing parameters, ask for them
420
+ missing_list = ", ".join(missing_params)
421
+ response = f"I need more information to make a prediction. Please provide: {missing_list}."
422
+ chat_history.append((query, response))
423
+ return response, chat_history
424
+
425
+ # If we have all parameters, make a prediction
426
+ try:
427
+ # Convert string numbers to appropriate types
428
+ if isinstance(params['floor_area_sqm'], str):
429
+ params['floor_area_sqm'] = float(params['floor_area_sqm'])
430
+ if isinstance(params['storey_level'], str):
431
+ params['storey_level'] = int(params['storey_level'])
432
+ if isinstance(params['flat_age'], str):
433
+ params['flat_age'] = int(params['flat_age'])
434
+
435
+ # Make prediction
436
+ price, chart, insights = predict_hdb_price(
437
+ params['town'], params['flat_type'], params['flat_model'],
438
+ params['floor_area_sqm'], params['storey_level'], params['flat_age']
439
+ )
440
+
441
+ # Format response
442
+ response = f"Based on your query:\n\n"
443
+ response += f"๐Ÿ“ Town: {params['town']}\n"
444
+ response += f"๐Ÿ  Flat Type: {params['flat_type']}\n"
445
+ response += f"๐Ÿ“ Floor Area: {params['floor_area_sqm']} sqm\n"
446
+ response += f"๐Ÿข Storey Level: {params['storey_level']}\n"
447
+ response += f"๐Ÿ“… Flat Age: {params['flat_age']} years\n\n"
448
+ response += f"๐Ÿ’ฐ Predicted Price: {price}\n\n"
449
+ response += insights
450
+
451
+ chat_history.append((query, response))
452
+ return response, chat_history
453
+
454
+ except Exception as e:
455
+ error_msg = f"Error making prediction: {str(e)}"
456
+ chat_history.append((query, error_msg))
457
+ return error_msg, chat_history
458
+
459
+ # Preload models and data
460
+ print("Loading models and data...")
461
+ models = load_models()
462
+ data = load_data()
463
+
464
+ # Define Gradio interface
465
+ towns_list = [
466
+ 'SENGKANG', 'WOODLANDS', 'TAMPINES', 'PUNGGOL', 'JURONG WEST',
467
+ 'YISHUN', 'BEDOK', 'HOUGANG', 'CHOA CHU KANG', 'ANG MO KIO'
468
+ ]
469
+
470
+ flat_types = ['3 ROOM', '4 ROOM', '5 ROOM', 'EXECUTIVE', '2 ROOM', '1 ROOM']
471
+ flat_models = ['Model A', 'Improved', 'New Generation', 'Standard', 'Premium']
472
+
473
+ # Create Gradio interface with chatbot
474
+ with gr.Blocks(title="๐Ÿ  HDB Price Predictor + Chat", theme=gr.themes.Soft()) as demo:
475
+ gr.Markdown("# ๐Ÿ  HDB Price Predictor + AI Chat")
476
+ gr.Markdown("Predict HDB resale prices using XGBoost model or chat with our AI assistant")
477
+
478
+ with gr.Tab("Traditional Interface"):
479
+ with gr.Row():
480
+ with gr.Column():
481
+ town = gr.Dropdown(label="Town", choices=sorted(towns_list), value="ANG MO KIO")
482
+ flat_type = gr.Dropdown(label="Flat Type", choices=sorted(flat_types), value="4 ROOM")
483
+ flat_model = gr.Dropdown(label="Flat Model", choices=sorted(flat_models), value="Improved")
484
+ floor_area_sqm = gr.Slider(label="Floor Area (sqm)", minimum=30, maximum=200, value=95, step=5)
485
+ storey_level = gr.Slider(label="Storey Level", minimum=1, maximum=50, value=8, step=1)
486
+ flat_age = gr.Slider(label="Flat Age (years)", minimum=0, maximum=99, value=15, step=1)
487
+
488
+ predict_btn = gr.Button("๐Ÿ”ฎ Predict Price", variant="primary")
489
+
490
+ with gr.Column():
491
+ predicted_price = gr.Label(label="๐Ÿ’ฐ Predicted Price")
492
+ insights = gr.Markdown(label="๐Ÿ“‹ Property Summary")
493
+
494
+ with gr.Row():
495
+ chart_output = gr.Plot(label="๐Ÿ“ˆ Market Insights")
496
+
497
+ # Connect button to function
498
+ predict_btn.click(
499
+ fn=predict_hdb_price,
500
+ inputs=[town, flat_type, flat_model, floor_area_sqm, storey_level, flat_age],
501
+ outputs=[predicted_price, chart_output, insights]
502
+ )
503
+
504
+ with gr.Tab("AI Chat Assistant"):
505
+ gr.Markdown("๐Ÿ’ฌ Chat with our AI assistant to get HDB price predictions using natural language!")
506
+ gr.Markdown("Example: 'What would be the price of a 4-room model A flat in Ang Mo Kio with 95 sqm, on the 8th floor, that's 15 years old?'")
507
+ gr.Markdown("You can also say hello, ask how I am, or ask general questions about HDB!")
508
+
509
+ chatbot = gr.Chatbot(label="HDB Price Chatbot", height=500)
510
+ msg = gr.Textbox(label="Your question", placeholder="Type your message here...")
511
+ clear = gr.Button("Clear Chat")
512
+
513
+ def respond(message, chat_history):
514
+ response, updated_history = chat_with_llm(message, chat_history)
515
+ return updated_history
516
+
517
+ msg.submit(respond, [msg, chatbot], [chatbot])
518
+ clear.click(lambda: None, None, [chatbot], queue=False)
519
+
520
+ # To run in Colab
521
+ if __name__ == "__main__":
522
+ # Let Gradio automatically find an available port
523
+ demo.launch(server_name="0.0.0.0", share=True)