File size: 197 Bytes
d62e46d
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
{
  "algorithm": "PPO",
  "environment": "LunarLander-v2",
  "framework": "stable-baselines3",
  "upload_date": "2025-10-01T18:47:11.883129",
  "success_rate": "90%+",
  "average_reward": "219.9"
}