ppo-Huggy / run_logs /training_status.json
Rebixa's picture
Huggy
267f666 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199991,
"file_path": "results/Huggy2/Huggy/Huggy-199991.onnx",
"reward": 3.4632522804396495,
"creation_time": 1773162135.9058068,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199991.pt"
]
},
{
"steps": 399960,
"file_path": "results/Huggy2/Huggy/Huggy-399960.onnx",
"reward": 4.098400668094032,
"creation_time": 1773162394.089552,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399960.pt"
]
},
{
"steps": 599962,
"file_path": "results/Huggy2/Huggy/Huggy-599962.onnx",
"reward": 4.243434274897856,
"creation_time": 1773162657.6066413,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599962.pt"
]
},
{
"steps": 799994,
"file_path": "results/Huggy2/Huggy/Huggy-799994.onnx",
"reward": 3.717765840403552,
"creation_time": 1773162921.6012406,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799994.pt"
]
},
{
"steps": 999973,
"file_path": "results/Huggy2/Huggy/Huggy-999973.onnx",
"reward": 3.5846185789108276,
"creation_time": 1773163187.0580113,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999973.pt"
]
},
{
"steps": 1199932,
"file_path": "results/Huggy2/Huggy/Huggy-1199932.onnx",
"reward": 3.452671104355862,
"creation_time": 1773163453.39212,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199932.pt"
]
},
{
"steps": 1399910,
"file_path": "results/Huggy2/Huggy/Huggy-1399910.onnx",
"reward": 3.56216427418574,
"creation_time": 1773163715.9221354,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399910.pt"
]
},
{
"steps": 1599934,
"file_path": "results/Huggy2/Huggy/Huggy-1599934.onnx",
"reward": 3.5780154905577963,
"creation_time": 1773163982.0835497,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599934.pt"
]
},
{
"steps": 1799429,
"file_path": "results/Huggy2/Huggy/Huggy-1799429.onnx",
"reward": 3.88084121234715,
"creation_time": 1773164246.2242885,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799429.pt"
]
},
{
"steps": 1999991,
"file_path": "results/Huggy2/Huggy/Huggy-1999991.onnx",
"reward": 3.3162522315979004,
"creation_time": 1773164513.6644468,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999991.pt"
]
},
{
"steps": 2000102,
"file_path": "results/Huggy2/Huggy/Huggy-2000102.onnx",
"reward": 3.5845241149266562,
"creation_time": 1773164513.7762005,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000102.pt"
]
}
],
"final_checkpoint": {
"steps": 2000102,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.5845241149266562,
"creation_time": 1773164513.7762005,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000102.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.8.0+cu128"
}
}