Spaces:
Sleeping
Sleeping
Fix circuit graph visuals: remove node outlines, softer edges, theme-aware layer labels, reduce grid, black text for all labels/legend/ticks
ab1ff5b
| import random | |
| import numpy as np | |
| import torch | |
| import os | |
| import streamlit as st | |
| def get_theme_colors(): | |
| """Return a dict of CSS color values for the current theme (dark / light).""" | |
| is_light = st.session_state.get("theme_mode", "dark") == "light" | |
| if is_light: | |
| return { | |
| # Card / box backgrounds | |
| "card_bg": "#f0f1f5", | |
| "card_bg_alt": "#e9ebf0", | |
| "card_bg_deep": "#e2e4ea", | |
| "surface": "#ffffff", | |
| # Text | |
| "text": "#1b1b2f", | |
| "text_secondary": "#4a5568", | |
| "text_muted": "#6b7280", | |
| # Borders | |
| "border": "#d1d5db", | |
| "border_light": "#e5e7eb", | |
| # Accent banner | |
| "banner_bg": "#e8ecf6", | |
| "banner_text": "#1b1b2f", | |
| # Verification | |
| "verify_bg": "#f0f2f6", | |
| # Code / monospace | |
| "code_bg": "#ecedf2", | |
| "code_text": "#1b1b2f", | |
| # Graph / circuit | |
| "graph_edge": "#b0b8c4", | |
| "node_outline": "rgba(0,0,0,0)", | |
| "layer_label": "#2f3f70", | |
| "grid_color": "rgba(200,200,210,0.15)", | |
| } | |
| else: | |
| return { | |
| "card_bg": "#2b2b2b", | |
| "card_bg_alt": "#1a1a1a", | |
| "card_bg_deep": "#0E1117", | |
| "surface": "#0E1117", | |
| "text": "#ffffff", | |
| "text_secondary": "#DCDCDC", | |
| "text_muted": "#888888", | |
| "border": "#444", | |
| "border_light": "#262730", | |
| "banner_bg": "#2f3f70", | |
| "banner_text": "#f5f7fb", | |
| "verify_bg": "#1a1a1a", | |
| "code_bg": "#1a1a1a", | |
| "code_text": "#DCDCDC", | |
| # Graph / circuit | |
| "graph_edge": "gray", | |
| "node_outline": "black", | |
| "layer_label": "#dcae36", | |
| "grid_color": "rgba(200,200,200,0.3)", | |
| } | |
| def set_seed(seed_value=42): | |
| # Set a seed for reproducibility. | |
| random.seed(seed_value) | |
| np.random.seed(seed_value) | |
| torch.manual_seed(seed_value) | |
| if torch.cuda.is_available(): | |
| torch.cuda.manual_seed_all(seed_value) | |
| def init_qwen_api(): | |
| # Set up the API configuration for Qwen. | |
| api_key = os.environ.get("QWEN_API_KEY") | |
| if not api_key: | |
| # Fallback for local testing if env var is missing | |
| api_key = "fd89601072073b34725b18a36333c805" | |
| return { | |
| "api_key": api_key, | |
| "api_endpoint": "https://chat-ai.academiccloud.de/v1", | |
| "model": "qwen3-vl-30b-a3b-instruct" | |
| } |