ppo-Huggy / run_logs /training_status.json
ale2x72's picture
Huggy
4ee9205 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199701,
"file_path": "results/Huggy2/Huggy/Huggy-199701.onnx",
"reward": 3.2605453729629517,
"creation_time": 1770564914.6707227,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199701.pt"
]
},
{
"steps": 399962,
"file_path": "results/Huggy2/Huggy/Huggy-399962.onnx",
"reward": 3.9731755124198065,
"creation_time": 1770565174.7191348,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399962.pt"
]
},
{
"steps": 599951,
"file_path": "results/Huggy2/Huggy/Huggy-599951.onnx",
"reward": 3.718696775890532,
"creation_time": 1770565438.5947728,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599951.pt"
]
},
{
"steps": 799976,
"file_path": "results/Huggy2/Huggy/Huggy-799976.onnx",
"reward": 3.727033668479254,
"creation_time": 1770565703.162327,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799976.pt"
]
},
{
"steps": 999361,
"file_path": "results/Huggy2/Huggy/Huggy-999361.onnx",
"reward": 3.602728424128145,
"creation_time": 1770565967.4545724,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999361.pt"
]
},
{
"steps": 1199989,
"file_path": "results/Huggy2/Huggy/Huggy-1199989.onnx",
"reward": 3.7365232731614793,
"creation_time": 1770566237.1129532,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199989.pt"
]
},
{
"steps": 1399973,
"file_path": "results/Huggy2/Huggy/Huggy-1399973.onnx",
"reward": 2.6284845590591432,
"creation_time": 1770566496.343397,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399973.pt"
]
},
{
"steps": 1599561,
"file_path": "results/Huggy2/Huggy/Huggy-1599561.onnx",
"reward": 3.7449142902754664,
"creation_time": 1770566750.5419216,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599561.pt"
]
},
{
"steps": 1799916,
"file_path": "results/Huggy2/Huggy/Huggy-1799916.onnx",
"reward": 3.615269157561389,
"creation_time": 1770567011.075386,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799916.pt"
]
},
{
"steps": 1999981,
"file_path": "results/Huggy2/Huggy/Huggy-1999981.onnx",
"reward": 4.09378256542342,
"creation_time": 1770567272.1833022,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999981.pt"
]
},
{
"steps": 2000050,
"file_path": "results/Huggy2/Huggy/Huggy-2000050.onnx",
"reward": 4.080278079177058,
"creation_time": 1770567272.2862375,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000050.pt"
]
}
],
"final_checkpoint": {
"steps": 2000050,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.080278079177058,
"creation_time": 1770567272.2862375,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000050.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.8.0+cu128"
}
}