#!/usr/bin/env python3 """ Example: Using WandB Logging with ShinkaEvolve This example demonstrates how to enable WandB logging to track evolution progress. Prerequisites: pip install wandb wandb login # Login to your WandB account Usage: python examples/wandb_logging_example.py """ import sys from pathlib import Path # Add parent directory to path sys.path.insert(0, str(Path(__file__).parent.parent)) from shinka.core import EvolutionRunner, EvolutionConfig from shinka.database import DatabaseConfig from shinka.launch import LocalJobConfig def main(): """Run circle packing with WandB logging enabled.""" # Configure job job_config = LocalJobConfig( eval_program_path="examples/circle_packing/evaluate_ori.py" ) # Database configuration db_config = DatabaseConfig( db_path="examples/circle_packing/results/wandb_test/evolution_db.sqlite", num_islands=2, # Using 2 islands to demonstrate per-island tracking archive_size=40, elite_selection_ratio=0.3, num_archive_inspirations=4, num_top_k_inspirations=2, migration_interval=10, migration_rate=0.1, island_elitism=True, parent_selection_strategy="weighted", parent_selection_lambda=10.0, ) # Task description search_task_sys_msg = """You are an expert mathematician specializing in circle packing problems. Your task is to maximize the sum of radii when packing 26 circles in a unit square [0,1] x [0,1]. """ # Evolution configuration with WandB enabled evo_config = EvolutionConfig( task_sys_msg=search_task_sys_msg, patch_types=["diff", "full", "cross"], patch_type_probs=[0.6, 0.3, 0.1], num_generations=20, # Small test run max_parallel_jobs=4, max_patch_resamples=3, max_patch_attempts=3, job_type="local", language="python", llm_models=[ "native-gemini-2.5-flash", "native-gemini-2.5-pro", ], llm_kwargs=dict( temperatures=[0.5, 0.7, 1.0], max_tokens=32768, ), meta_rec_interval=10, meta_llm_models=["native-gemini-2.5-flash"], meta_llm_kwargs=dict(temperatures=[0.7], max_tokens=16384), meta_max_recommendations=5, embedding_model="text-embedding-3-small", code_embed_sim_threshold=0.995, novelty_llm_models=["native-gemini-2.5-flash"], novelty_llm_kwargs=dict(temperatures=[0.7], max_tokens=16384), llm_dynamic_selection="ucb1", llm_dynamic_selection_kwargs=dict(exploration_coef=1.0), init_program_path="examples/circle_packing/initial.py", results_dir="examples/circle_packing/results/wandb_test", use_text_feedback=False, # ===== WandB Configuration ===== enable_wandb=True, wandb_project="shinka-evolve-demo", # Your WandB project name wandb_entity=None, # Optional: your WandB username/team wandb_run_name="circle-packing-test", # Optional: custom run name wandb_tags=["circle-packing", "test", "2-islands"], # Optional: tags ) print("=" * 80) print("🚀 Starting Evolution with WandB Logging") print("=" * 80) print(f"📊 WandB Project: {evo_config.wandb_project}") print(f"🏃 Run Name: {evo_config.wandb_run_name}") print(f"🏝️ Islands: {db_config.num_islands}") print(f"📈 Generations: {evo_config.num_generations}") print("=" * 80) print() print("Metrics being tracked:") print(" • generation/score - Best score in each generation") print(" • generation/best_score_so_far - Global best score") print(" • generation/avg_score - Average score per generation") print(" • generation/num_correct - Number of correct programs") print() print("Per-island metrics:") print(" • island_0/score, island_0/best_score") print(" • island_1/score, island_1/best_score") print() print("View results at: https://wandb.ai/") print("=" * 80) print() # Create and run evolution evo_runner = EvolutionRunner( evo_config=evo_config, job_config=job_config, db_config=db_config, verbose=True, ) try: evo_runner.run() print() print("=" * 80) print("✅ Evolution completed successfully!") print("=" * 80) print() print(f"📊 View results on WandB:") if evo_runner.wandb_run: print(f" {evo_runner.wandb_run.url}") print() except KeyboardInterrupt: print() print("=" * 80) print("⚠️ Evolution interrupted by user") print("=" * 80) except Exception as e: print() print("=" * 80) print(f"❌ Evolution failed with error: {e}") print("=" * 80) raise if __name__ == "__main__": main()