ppo-Huggy / run_logs /training_status.json
sergiogijon's picture
Huggy
cbd56da verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199935,
"file_path": "results/Huggy2/Huggy/Huggy-199935.onnx",
"reward": 3.1233518037243164,
"creation_time": 1753204834.8137565,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199935.pt"
]
},
{
"steps": 399968,
"file_path": "results/Huggy2/Huggy/Huggy-399968.onnx",
"reward": 3.8062544093615767,
"creation_time": 1753205085.0386803,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399968.pt"
]
},
{
"steps": 599954,
"file_path": "results/Huggy2/Huggy/Huggy-599954.onnx",
"reward": 4.206334585235233,
"creation_time": 1753205334.765005,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599954.pt"
]
},
{
"steps": 799982,
"file_path": "results/Huggy2/Huggy/Huggy-799982.onnx",
"reward": 3.722755788495301,
"creation_time": 1753205582.9367814,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799982.pt"
]
},
{
"steps": 999921,
"file_path": "results/Huggy2/Huggy/Huggy-999921.onnx",
"reward": 3.4499849299589793,
"creation_time": 1753205833.5425766,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999921.pt"
]
},
{
"steps": 1199925,
"file_path": "results/Huggy2/Huggy/Huggy-1199925.onnx",
"reward": 3.8014868270783198,
"creation_time": 1753206081.4332507,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199925.pt"
]
},
{
"steps": 1399969,
"file_path": "results/Huggy2/Huggy/Huggy-1399969.onnx",
"reward": 3.6429585885371405,
"creation_time": 1753206326.6312425,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399969.pt"
]
},
{
"steps": 1599948,
"file_path": "results/Huggy2/Huggy/Huggy-1599948.onnx",
"reward": 3.4172163092173062,
"creation_time": 1753206572.6052222,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599948.pt"
]
},
{
"steps": 1799969,
"file_path": "results/Huggy2/Huggy/Huggy-1799969.onnx",
"reward": 3.5386639772317348,
"creation_time": 1753206820.0156407,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799969.pt"
]
},
{
"steps": 1999604,
"file_path": "results/Huggy2/Huggy/Huggy-1999604.onnx",
"reward": 3.2544048547744753,
"creation_time": 1753207068.5625432,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999604.pt"
]
},
{
"steps": 2000354,
"file_path": "results/Huggy2/Huggy/Huggy-2000354.onnx",
"reward": 2.6505732969804243,
"creation_time": 1753207068.7026856,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000354.pt"
]
}
],
"final_checkpoint": {
"steps": 2000354,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 2.6505732969804243,
"creation_time": 1753207068.7026856,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000354.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.1+cu126"
}
}