ppo-Huggy / run_logs /training_status.json
Polu's picture
Huggy
8f1acc7
{
"Huggy": {
"checkpoints": [
{
"steps": 199759,
"file_path": "results/Huggy/Huggy/Huggy-199759.onnx",
"reward": 3.350345881168659,
"creation_time": 1704641227.2462058,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199759.pt"
]
},
{
"steps": 399874,
"file_path": "results/Huggy/Huggy/Huggy-399874.onnx",
"reward": 3.7032485558436465,
"creation_time": 1704641476.4657404,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399874.pt"
]
},
{
"steps": 599989,
"file_path": "results/Huggy/Huggy/Huggy-599989.onnx",
"reward": 3.576109935839971,
"creation_time": 1704641727.3079703,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599989.pt"
]
},
{
"steps": 799912,
"file_path": "results/Huggy/Huggy/Huggy-799912.onnx",
"reward": 3.5677145110761246,
"creation_time": 1704641972.7965388,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799912.pt"
]
},
{
"steps": 999953,
"file_path": "results/Huggy/Huggy/Huggy-999953.onnx",
"reward": 4.0176340124259395,
"creation_time": 1704642224.7006278,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999953.pt"
]
},
{
"steps": 1199960,
"file_path": "results/Huggy/Huggy/Huggy-1199960.onnx",
"reward": 3.259038219326421,
"creation_time": 1704642479.501692,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199960.pt"
]
},
{
"steps": 1399835,
"file_path": "results/Huggy/Huggy/Huggy-1399835.onnx",
"reward": 3.714304764672081,
"creation_time": 1704642730.6170914,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399835.pt"
]
},
{
"steps": 1599794,
"file_path": "results/Huggy/Huggy/Huggy-1599794.onnx",
"reward": 3.9094237511727346,
"creation_time": 1704642981.4910452,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599794.pt"
]
},
{
"steps": 1799500,
"file_path": "results/Huggy/Huggy/Huggy-1799500.onnx",
"reward": 4.077949463504634,
"creation_time": 1704643235.0381804,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799500.pt"
]
},
{
"steps": 1999977,
"file_path": "results/Huggy/Huggy/Huggy-1999977.onnx",
"reward": 5.060395081837972,
"creation_time": 1704643483.5451283,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999977.pt"
]
},
{
"steps": 2000092,
"file_path": "results/Huggy/Huggy/Huggy-2000092.onnx",
"reward": 5.054141879081726,
"creation_time": 1704643483.6754365,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000092.pt"
]
}
],
"final_checkpoint": {
"steps": 2000092,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 5.054141879081726,
"creation_time": 1704643483.6754365,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000092.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
}
}