| """ |
| Investor-facing Interactive Analytics Dashboard |
| Built with Gradio for deployment to Hugging Face Spaces |
| Updated for Gradio 6.0 compatibility |
| """ |
|
|
| import os |
| import logging |
| from datetime import datetime, timedelta |
| from typing import Optional, Tuple |
|
|
| import gradio as gr |
| import pandas as pd |
| import numpy as np |
|
|
| from db import db_connector |
| from queries import query_builder |
| from queries_demo import demo_generator |
| from components import ( |
| create_date_range_inputs, |
| create_filter_options, |
| create_kpi_grid, |
| create_line_chart, |
| create_bar_chart, |
| create_pie_chart, |
| create_geo_heatmap, |
| create_density_heatmap, |
| create_data_table, |
| df_to_csv |
| ) |
| from config import DEMO_MODE_ENABLED |
|
|
| |
| logging.basicConfig( |
| level=logging.INFO, |
| format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' |
| ) |
| logger = logging.getLogger(__name__) |
|
|
| |
| |
| |
|
|
| class AppState: |
| """Manage application state and mode.""" |
| |
| def __init__(self): |
| self.demo_mode = DEMO_MODE_ENABLED or not db_connector.is_available() |
| self.db_available = db_connector.is_available() |
| |
| if self.demo_mode: |
| logger.warning("Running in DEMO MODE - using synthetic data") |
| else: |
| logger.info("Connected to database - using live data") |
| |
| def toggle_demo_mode(self): |
| """Toggle between demo and live mode.""" |
| if not self.db_available: |
| return "Database not available - cannot switch to live mode" |
| |
| self.demo_mode = not self.demo_mode |
| mode = "DEMO" if self.demo_mode else "LIVE" |
| logger.info(f"Switched to {mode} mode") |
| return f"Now in {mode} mode" |
|
|
|
|
| app_state = AppState() |
|
|
|
|
| |
| |
| |
|
|
| def parse_date_string(date_str: str, is_end: bool = False) -> datetime: |
| """Parse date string to datetime object.""" |
| try: |
| dt = datetime.strptime(date_str, "%Y-%m-%d") |
| if is_end: |
| dt = dt.replace(hour=23, minute=59, second=59) |
| return dt |
| except (ValueError, TypeError): |
| |
| if is_end: |
| return datetime.now().replace(hour=23, minute=59, second=59) |
| else: |
| return datetime.now() - timedelta(days=90) |
|
|
|
|
| |
| |
| |
|
|
| def fetch_data(query_func, *args, **kwargs) -> Optional[pd.DataFrame]: |
| """ |
| Fetch data from database or demo mode. |
| |
| Args: |
| query_func: Query builder function |
| *args, **kwargs: Arguments for query function |
| |
| Returns: |
| DataFrame or None |
| """ |
| try: |
| if app_state.demo_mode: |
| |
| func_name = query_func.__name__ |
| |
| |
| method_mapping = { |
| 'get_new_users_query': 'get_new_users', |
| 'get_verified_users_query': 'get_verified_users', |
| 'get_activated_by_first_trip_query': 'get_activated_by_first_trip', |
| 'get_trips_over_time_query': 'get_trips_over_time', |
| 'get_trip_metrics_query': 'get_trip_metrics', |
| 'get_driver_type_distribution_query': 'get_driver_type_distribution', |
| 'get_solo_shared_split_query': 'get_solo_shared_split', |
| 'get_user_locations_query': 'get_user_locations', |
| 'get_transactions_over_time_query': 'get_transactions_over_time' |
| } |
| |
| demo_method_name = method_mapping.get(func_name) |
| if demo_method_name: |
| demo_method = getattr(demo_generator, demo_method_name, None) |
| if demo_method: |
| return demo_method(*args, **kwargs) |
| |
| logger.warning(f"Demo method not found for: {func_name}") |
| return None |
| else: |
| |
| query, params = query_func(*args, **kwargs) |
| return db_connector.execute_query(query, params) |
| |
| except Exception as e: |
| logger.error(f"Error fetching data: {str(e)}") |
| return None |
|
|
|
|
| |
| |
| |
|
|
| def render_overview_tab( |
| start_date_str: str, |
| end_date_str: str, |
| granularity: str |
| ) -> Tuple: |
| """Render Overview tab with KPIs and trends.""" |
| |
| |
| start_date = parse_date_string(start_date_str) |
| end_date = parse_date_string(end_date_str, is_end=True) |
| |
| |
| period_days = (end_date - start_date).days |
| prev_start = start_date - timedelta(days=period_days) |
| prev_end = start_date |
| |
| |
| new_users_df = fetch_data( |
| query_builder.get_new_users_query, |
| start_date, end_date, granularity |
| ) |
| |
| trip_metrics_df = fetch_data( |
| query_builder.get_trip_metrics_query, |
| start_date, end_date |
| ) |
| |
| |
| prev_users_df = fetch_data( |
| query_builder.get_new_users_query, |
| prev_start, prev_end, granularity |
| ) |
| |
| prev_trip_metrics_df = fetch_data( |
| query_builder.get_trip_metrics_query, |
| prev_start, prev_end |
| ) |
| |
| |
| total_new_users = new_users_df['new_users'].sum() if new_users_df is not None and not new_users_df.empty else 0 |
| prev_total_users = prev_users_df['new_users'].sum() if prev_users_df is not None and not prev_users_df.empty else 1 |
| user_delta = ((total_new_users - prev_total_users) / max(prev_total_users, 1) * 100) |
| |
| |
| if app_state.demo_mode: |
| mau = demo_generator.get_rolling_active_users(end_date, 30) |
| prev_mau = demo_generator.get_rolling_active_users(prev_end, 30) |
| else: |
| query, params = query_builder.get_rolling_active_users_query(end_date, 30) |
| mau_df = db_connector.execute_query(query, params) |
| mau = mau_df.iloc[0]['active_users'] if mau_df is not None and not mau_df.empty else 0 |
| |
| prev_query, prev_params = query_builder.get_rolling_active_users_query(prev_end, 30) |
| prev_mau_df = db_connector.execute_query(prev_query, prev_params) |
| prev_mau = prev_mau_df.iloc[0]['active_users'] if prev_mau_df is not None and not prev_mau_df.empty else 1 |
| |
| mau_delta = ((mau - prev_mau) / max(prev_mau, 1) * 100) |
| |
| |
| if trip_metrics_df is not None and not trip_metrics_df.empty: |
| total_trips = trip_metrics_df.iloc[0]['total_trips'] |
| avg_distance = trip_metrics_df.iloc[0]['avg_distance_miles'] or 0 |
| total_co2 = trip_metrics_df.iloc[0]['total_co2_reduced'] or 0 |
| else: |
| total_trips = avg_distance = total_co2 = 0 |
| |
| if prev_trip_metrics_df is not None and not prev_trip_metrics_df.empty: |
| prev_trips = prev_trip_metrics_df.iloc[0]['total_trips'] or 1 |
| else: |
| prev_trips = 1 |
| |
| trip_delta = ((total_trips - prev_trips) / max(prev_trips, 1) * 100) |
| |
| |
| if app_state.demo_mode: |
| completion_rate = 90.0 |
| else: |
| query, params = query_builder.get_trip_completion_rate_query(start_date, end_date) |
| comp_df = db_connector.execute_query(query, params) |
| completion_rate = comp_df.iloc[0]['completion_rate'] if comp_df is not None and not comp_df.empty else 0 |
| |
| |
| kpis = [ |
| { |
| "title": "New Users", |
| "value": total_new_users, |
| "format_type": "users", |
| "delta": user_delta |
| }, |
| { |
| "title": "Monthly Active Users", |
| "value": mau, |
| "format_type": "users", |
| "delta": mau_delta |
| }, |
| { |
| "title": "Total Trips", |
| "value": total_trips, |
| "format_type": "trips", |
| "delta": trip_delta |
| }, |
| { |
| "title": "Avg Distance", |
| "value": avg_distance, |
| "format_type": "distance" |
| }, |
| { |
| "title": "Completion Rate", |
| "value": completion_rate, |
| "format_type": "percentage" |
| }, |
| { |
| "title": "COβ Reduced", |
| "value": total_co2, |
| "format_type": "co2" |
| } |
| ] |
| |
| kpi_html = create_kpi_grid(kpis) |
| |
| |
| user_trend = create_line_chart( |
| new_users_df if new_users_df is not None else pd.DataFrame(), |
| 'period', 'new_users', |
| 'New User Registrations Over Time', |
| 'Date', 'New Users' |
| ) |
| |
| trips_df = fetch_data( |
| query_builder.get_trips_over_time_query, |
| start_date, end_date, granularity |
| ) |
| |
| trip_trend = create_line_chart( |
| trips_df if trips_df is not None else pd.DataFrame(), |
| 'period', 'trip_count', |
| 'Trip Volume Over Time', |
| 'Date', 'Trips' |
| ) |
| |
| return kpi_html, user_trend, trip_trend |
|
|
|
|
| |
| |
| |
|
|
| def render_users_tab( |
| start_date_str: str, |
| end_date_str: str, |
| granularity: str |
| ) -> Tuple: |
| """Render Users tab with growth and retention metrics.""" |
| |
| |
| start_date = parse_date_string(start_date_str) |
| end_date = parse_date_string(end_date_str, is_end=True) |
| |
| |
| new_users_df = fetch_data( |
| query_builder.get_new_users_query, |
| start_date, end_date, granularity |
| ) |
| |
| new_users_chart = create_line_chart( |
| new_users_df if new_users_df is not None else pd.DataFrame(), |
| 'period', 'new_users', |
| 'New User Registrations', |
| 'Date', 'New Users' |
| ) |
| |
| |
| verified_df = fetch_data( |
| query_builder.get_verified_users_query, |
| start_date, end_date, granularity |
| ) |
| |
| verified_chart = create_line_chart( |
| verified_df if verified_df is not None else pd.DataFrame(), |
| 'period', 'verified_users', |
| 'Verified Users Over Time', |
| 'Date', 'Verified Users' |
| ) |
| |
| |
| activated_df = fetch_data( |
| query_builder.get_activated_by_first_trip_query, |
| start_date, end_date, granularity |
| ) |
| |
| activated_chart = create_line_chart( |
| activated_df if activated_df is not None else pd.DataFrame(), |
| 'period', 'activated_users', |
| 'Users Activated by First Trip', |
| 'Date', 'Activated Users' |
| ) |
| |
| |
| if app_state.demo_mode: |
| |
| cohort_data = [] |
| for i in range(12): |
| cohort_date = start_date + timedelta(days=i*7) |
| if cohort_date <= end_date: |
| cohort_size = np.random.randint(100, 500) |
| cohort_data.append({ |
| 'cohort_date': cohort_date.strftime('%Y-%m-%d'), |
| 'cohort_size': cohort_size, |
| 'retained_users': int(cohort_size * np.random.uniform(0.2, 0.6)) |
| }) |
| retention_df = pd.DataFrame(cohort_data) |
| if not retention_df.empty: |
| retention_df['retention_rate'] = (retention_df['retained_users'] / retention_df['cohort_size'] * 100).round(1) |
| else: |
| query, params = query_builder.get_cohort_retention_query(start_date, end_date, 7) |
| retention_df = db_connector.execute_query(query, params) |
| if retention_df is not None and not retention_df.empty: |
| retention_df['retention_rate'] = (retention_df['retained_users'] / retention_df['cohort_size'] * 100).round(1) |
| |
| retention_table = create_data_table( |
| retention_df if retention_df is not None else pd.DataFrame(), |
| "7-Day Cohort Retention" |
| ) |
| |
| |
| export_data = new_users_df if new_users_df is not None else pd.DataFrame() |
| csv_data = df_to_csv(export_data, "users_export.csv") |
| |
| return ( |
| new_users_chart, |
| verified_chart, |
| activated_chart, |
| retention_table, |
| csv_data |
| ) |
|
|
|
|
| |
| |
| |
|
|
| def render_trips_tab( |
| start_date_str: str, |
| end_date_str: str, |
| granularity: str, |
| driver_type: str |
| ) -> Tuple: |
| """Render Trips tab with volume and impact metrics.""" |
| |
| |
| start_date = parse_date_string(start_date_str) |
| end_date = parse_date_string(end_date_str, is_end=True) |
| |
| driver_type_filter = None if driver_type == "All" else driver_type |
| |
| |
| trips_df = fetch_data( |
| query_builder.get_trips_over_time_query, |
| start_date, end_date, granularity, driver_type_filter |
| ) |
| |
| trips_chart = create_line_chart( |
| trips_df if trips_df is not None else pd.DataFrame(), |
| 'period', 'trip_count', |
| 'Trip Volume Over Time', |
| 'Date', 'Trips' |
| ) |
| |
| |
| driver_dist_df = fetch_data( |
| query_builder.get_driver_type_distribution_query, |
| start_date, end_date |
| ) |
| |
| driver_pie = create_pie_chart( |
| driver_dist_df if driver_dist_df is not None else pd.DataFrame(), |
| 'driver_type', 'trip_count', |
| 'Trips by Driver Type' |
| ) |
| |
| |
| solo_shared_df = fetch_data( |
| query_builder.get_solo_shared_split_query, |
| start_date, end_date |
| ) |
| |
| solo_shared_pie = create_pie_chart( |
| solo_shared_df if solo_shared_df is not None else pd.DataFrame(), |
| 'trip_type', 'trip_count', |
| 'Solo vs Shared Trips' |
| ) |
| |
| |
| metrics_df = fetch_data( |
| query_builder.get_trip_metrics_query, |
| start_date, end_date, driver_type_filter |
| ) |
| |
| if metrics_df is not None and not metrics_df.empty: |
| row = metrics_df.iloc[0] |
| impact_data = pd.DataFrame([ |
| {'Metric': 'Total COβ Reduced (g)', 'Value': f"{row.get('total_co2_reduced', 0) or 0:,.1f}"}, |
| {'Metric': 'Avg COβ per Trip (g)', 'Value': f"{row.get('avg_co2_per_trip', 0) or 0:,.2f}"}, |
| {'Metric': 'Total NOx Reduced (g)', 'Value': f"{row.get('total_nox_reduced', 0) or 0:,.1f}"}, |
| {'Metric': 'Total PM2.5 Reduced (g)', 'Value': f"{row.get('total_pm25_reduced', 0) or 0:,.2f}"}, |
| {'Metric': 'Total Distance (miles)', 'Value': f"{row.get('total_distance_miles', 0) or 0:,.0f}"}, |
| {'Metric': 'Shared Miles', 'Value': f"{row.get('total_shared_miles', 0) or 0:,.0f}"}, |
| {'Metric': 'Trees Saved', 'Value': f"{row.get('total_trees_saved', 0) or 0:,.2f}"}, |
| {'Metric': 'Total Points Earned', 'Value': f"{row.get('total_points', 0) or 0:,.0f}"} |
| ]) |
| else: |
| impact_data = pd.DataFrame() |
| |
| impact_table = create_data_table(impact_data, "Environmental Impact Summary") |
| |
| csv_data = df_to_csv(trips_df if trips_df is not None else pd.DataFrame(), "trips_export.csv") |
| |
| return ( |
| trips_chart, |
| driver_pie, |
| solo_shared_pie, |
| impact_table, |
| csv_data |
| ) |
|
|
|
|
| |
| |
| |
|
|
| def render_geography_tab() -> Tuple: |
| """Render Geography tab with heat maps.""" |
| |
| |
| if app_state.demo_mode: |
| locations_df = demo_generator.get_user_locations() |
| |
| locations_df['user_count'] = 1 |
| else: |
| query, params = query_builder.get_user_locations_query() |
| locations_df = db_connector.execute_query(query, params) |
| |
| |
| hover_cols = None |
| if locations_df is not None and not locations_df.empty: |
| available_cols = locations_df.columns.tolist() |
| hover_cols = [c for c in ['city', 'state'] if c in available_cols] |
| if not hover_cols: |
| hover_cols = None |
| |
| |
| heat_map = create_geo_heatmap( |
| locations_df if locations_df is not None else pd.DataFrame(), |
| 'latitude', 'longitude', |
| size_col='user_count' if locations_df is not None and 'user_count' in locations_df.columns else None, |
| hover_data=hover_cols, |
| title='User Geographic Distribution' |
| ) |
| |
| |
| if locations_df is not None and not locations_df.empty and 'state' in locations_df.columns: |
| top_markets = locations_df.groupby('state')['user_count'].sum().reset_index() |
| top_markets = top_markets.sort_values('user_count', ascending=False).head(10) |
| top_markets.columns = ['State', 'Users'] |
| else: |
| top_markets = pd.DataFrame() |
| |
| markets_table = create_data_table(top_markets, "Top 10 Markets by Users") |
| |
| csv_data = df_to_csv(locations_df if locations_df is not None else pd.DataFrame(), "geography_export.csv") |
| |
| return ( |
| heat_map, |
| markets_table, |
| csv_data |
| ) |
|
|
|
|
| |
| |
| |
|
|
| def render_rewards_tab( |
| start_date_str: str, |
| end_date_str: str, |
| granularity: str |
| ) -> Tuple: |
| """Render Rewards tab with transaction metrics.""" |
| |
| |
| start_date = parse_date_string(start_date_str) |
| end_date = parse_date_string(end_date_str, is_end=True) |
| |
| |
| metrics_df = fetch_data( |
| query_builder.get_trip_metrics_query, |
| start_date, end_date |
| ) |
| |
| if metrics_df is not None and not metrics_df.empty: |
| total_points = metrics_df.iloc[0].get('total_points', 0) or 0 |
| total_gas_savings = metrics_df.iloc[0].get('total_gas_savings', 0) or 0 |
| points_summary = pd.DataFrame([ |
| {'Metric': 'Total Points Earned', 'Value': f"{total_points:,.0f}"}, |
| {'Metric': 'Total Gas Savings', 'Value': f"${total_gas_savings:,.2f}"} |
| ]) |
| else: |
| points_summary = pd.DataFrame() |
| |
| points_table = create_data_table(points_summary, "Rewards Summary") |
| |
| |
| if app_state.demo_mode: |
| num_periods = min(20, (end_date - start_date).days) |
| periods = pd.date_range(start=start_date, end=end_date, periods=max(num_periods, 2)) |
| trans_df = pd.DataFrame({ |
| 'period': periods, |
| 'transaction_count': np.random.randint(50, 200, len(periods)), |
| 'total_amount': np.random.uniform(500, 2000, len(periods)) |
| }) |
| else: |
| query, params = query_builder.get_transactions_over_time_query(start_date, end_date, granularity) |
| trans_df = db_connector.execute_query(query, params) |
| |
| trans_chart = create_line_chart( |
| trans_df if trans_df is not None else pd.DataFrame(), |
| 'period', 'transaction_count', |
| 'Transactions Over Time', |
| 'Date', 'Transactions' |
| ) |
| |
| csv_data = df_to_csv(trans_df if trans_df is not None else pd.DataFrame(), "rewards_export.csv") |
| |
| return ( |
| points_table, |
| trans_chart, |
| csv_data |
| ) |
|
|
|
|
| |
| |
| |
|
|
| def build_gradio_app(): |
| """Build and configure Gradio interface.""" |
| |
| |
| app_theme = gr.themes.Soft() |
| app_css = """ |
| .gradio-container { |
| font-family: 'Arial', sans-serif; |
| } |
| .tab-nav button { |
| font-size: 16px; |
| font-weight: 500; |
| } |
| """ |
| |
| with gr.Blocks() as demo: |
| |
| |
| gr.Markdown( |
| """ |
| # π Hytch Trip Analytics Dashboard |
| ### Real-time insights for investor conversations |
| """ |
| ) |
| |
| |
| mode_text = "π‘ DEMO MODE - Using synthetic data" if app_state.demo_mode else "π’ LIVE MODE - Connected to database" |
| status_display = gr.Markdown(mode_text) |
| |
| |
| with gr.Row(): |
| with gr.Column(scale=2): |
| start_default, end_default = create_date_range_inputs() |
| start_date_input = gr.Textbox( |
| label="Start Date (YYYY-MM-DD)", |
| value=start_default.strftime("%Y-%m-%d"), |
| placeholder="2024-01-01" |
| ) |
| end_date_input = gr.Textbox( |
| label="End Date (YYYY-MM-DD)", |
| value=end_default.strftime("%Y-%m-%d"), |
| placeholder="2024-12-31" |
| ) |
| |
| with gr.Column(scale=1): |
| filter_opts = create_filter_options() |
| granularity_input = gr.Dropdown( |
| choices=filter_opts["granularity"], |
| value="day", |
| label="Granularity" |
| ) |
| driver_type_input = gr.Dropdown( |
| choices=filter_opts["driver_types"], |
| value="All", |
| label="Driver Type" |
| ) |
| |
| with gr.Column(scale=1): |
| refresh_btn = gr.Button("π Refresh Data", variant="primary") |
| if app_state.db_available: |
| toggle_mode_btn = gr.Button("Toggle Demo/Live Mode") |
| |
| |
| with gr.Tabs(): |
| |
| |
| with gr.Tab("π Overview"): |
| overview_kpis = gr.HTML() |
| with gr.Row(): |
| overview_user_chart = gr.Plot() |
| overview_trip_chart = gr.Plot() |
| |
| refresh_btn.click( |
| render_overview_tab, |
| inputs=[start_date_input, end_date_input, granularity_input], |
| outputs=[overview_kpis, overview_user_chart, overview_trip_chart] |
| ) |
| |
| |
| demo.load( |
| render_overview_tab, |
| inputs=[start_date_input, end_date_input, granularity_input], |
| outputs=[overview_kpis, overview_user_chart, overview_trip_chart] |
| ) |
| |
| |
| with gr.Tab("π₯ Users"): |
| with gr.Row(): |
| users_new_chart = gr.Plot() |
| users_verified_chart = gr.Plot() |
| |
| users_activated_chart = gr.Plot() |
| users_retention_table = gr.HTML() |
| users_export_data = gr.Textbox(label="Export Data (CSV)", lines=5, visible=False) |
| users_export_btn = gr.Button("π₯ Export Users Data") |
| |
| refresh_btn.click( |
| render_users_tab, |
| inputs=[start_date_input, end_date_input, granularity_input], |
| outputs=[ |
| users_new_chart, |
| users_verified_chart, |
| users_activated_chart, |
| users_retention_table, |
| users_export_data |
| ] |
| ) |
| |
| |
| with gr.Tab("π Trips"): |
| trips_volume_chart = gr.Plot() |
| with gr.Row(): |
| trips_driver_pie = gr.Plot() |
| trips_solo_shared_pie = gr.Plot() |
| |
| trips_impact_table = gr.HTML() |
| trips_export_data = gr.Textbox(label="Export Data (CSV)", lines=5, visible=False) |
| trips_export_btn = gr.Button("π₯ Export Trips Data") |
| |
| refresh_btn.click( |
| render_trips_tab, |
| inputs=[start_date_input, end_date_input, granularity_input, driver_type_input], |
| outputs=[ |
| trips_volume_chart, |
| trips_driver_pie, |
| trips_solo_shared_pie, |
| trips_impact_table, |
| trips_export_data |
| ] |
| ) |
| |
| |
| with gr.Tab("πΊοΈ Geography"): |
| geo_heat_map = gr.Plot() |
| geo_markets_table = gr.HTML() |
| geo_export_data = gr.Textbox(label="Export Data (CSV)", lines=5, visible=False) |
| geo_export_btn = gr.Button("π₯ Export Geography Data") |
| |
| refresh_btn.click( |
| render_geography_tab, |
| outputs=[geo_heat_map, geo_markets_table, geo_export_data] |
| ) |
| |
| |
| demo.load( |
| render_geography_tab, |
| outputs=[geo_heat_map, geo_markets_table, geo_export_data] |
| ) |
| |
| |
| with gr.Tab("π Rewards"): |
| rewards_points_table = gr.HTML() |
| rewards_trans_chart = gr.Plot() |
| rewards_export_data = gr.Textbox(label="Export Data (CSV)", lines=5, visible=False) |
| rewards_export_btn = gr.Button("π₯ Export Rewards Data") |
| |
| refresh_btn.click( |
| render_rewards_tab, |
| inputs=[start_date_input, end_date_input, granularity_input], |
| outputs=[rewards_points_table, rewards_trans_chart, rewards_export_data] |
| ) |
| |
| |
| if app_state.db_available: |
| def toggle_mode(): |
| msg = app_state.toggle_demo_mode() |
| new_mode = "π‘ DEMO MODE - Using synthetic data" if app_state.demo_mode else "π’ LIVE MODE - Connected to database" |
| return new_mode |
| |
| toggle_mode_btn.click( |
| toggle_mode, |
| outputs=[status_display] |
| ) |
| |
| |
| gr.Markdown( |
| """ |
| --- |
| **Data Security Notice**: All database credentials are stored as encrypted environment variables. |
| No sensitive information is logged or displayed. |
| |
| **Hytch** - Rideshare and carpooling with environmental impact tracking π± |
| """ |
| ) |
| |
| return demo, app_theme, app_css |
|
|
|
|
| |
| |
| |
|
|
| if __name__ == "__main__": |
| app, theme, css = build_gradio_app() |
| app.launch( |
| server_name="0.0.0.0", |
| server_port=7860, |
| share=False, |
| show_error=True, |
| theme=theme, |
| css=css |
| ) |