ppo-Huggy / run_logs /training_status.json
HibaST's picture
Huggy
35d6336 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199869,
"file_path": "results/Huggy2/Huggy/Huggy-199869.onnx",
"reward": 3.9310570347542857,
"creation_time": 1742998366.6648962,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199869.pt"
]
},
{
"steps": 399830,
"file_path": "results/Huggy2/Huggy/Huggy-399830.onnx",
"reward": 3.7706618110338845,
"creation_time": 1742998779.8506048,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399830.pt"
]
},
{
"steps": 599960,
"file_path": "results/Huggy2/Huggy/Huggy-599960.onnx",
"reward": 4.1419618129730225,
"creation_time": 1742999197.652521,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599960.pt"
]
},
{
"steps": 799905,
"file_path": "results/Huggy2/Huggy/Huggy-799905.onnx",
"reward": 3.8789012869199118,
"creation_time": 1742999604.4510307,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799905.pt"
]
},
{
"steps": 999862,
"file_path": "results/Huggy2/Huggy/Huggy-999862.onnx",
"reward": 4.0542660020291805,
"creation_time": 1743000026.3446934,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999862.pt"
]
},
{
"steps": 1199985,
"file_path": "results/Huggy2/Huggy/Huggy-1199985.onnx",
"reward": 3.7424153720631317,
"creation_time": 1743000443.7513483,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199985.pt"
]
},
{
"steps": 1399933,
"file_path": "results/Huggy2/Huggy/Huggy-1399933.onnx",
"reward": 1.387825886408488,
"creation_time": 1743000837.9982781,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399933.pt"
]
},
{
"steps": 1599982,
"file_path": "results/Huggy2/Huggy/Huggy-1599982.onnx",
"reward": 3.717770909406862,
"creation_time": 1743001218.3218472,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599982.pt"
]
},
{
"steps": 1799354,
"file_path": "results/Huggy2/Huggy/Huggy-1799354.onnx",
"reward": 3.7786192757742745,
"creation_time": 1743001603.40823,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799354.pt"
]
},
{
"steps": 1999997,
"file_path": "results/Huggy2/Huggy/Huggy-1999997.onnx",
"reward": 3.787991131606855,
"creation_time": 1743001984.5333993,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999997.pt"
]
},
{
"steps": 2000086,
"file_path": "results/Huggy2/Huggy/Huggy-2000086.onnx",
"reward": 3.8080041928169055,
"creation_time": 1743001984.6370673,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000086.pt"
]
}
],
"final_checkpoint": {
"steps": 2000086,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.8080041928169055,
"creation_time": 1743001984.6370673,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000086.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}