ppo-Huggy / run_logs /training_status.json
nullne's picture
Huggy
878e57e verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199809,
"file_path": "results/Huggy/Huggy/Huggy-199809.onnx",
"reward": 3.2781962293844957,
"creation_time": 1705386970.3640604,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199809.pt"
]
},
{
"steps": 399952,
"file_path": "results/Huggy/Huggy/Huggy-399952.onnx",
"reward": 3.256965451974135,
"creation_time": 1705387209.7832837,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399952.pt"
]
},
{
"steps": 599762,
"file_path": "results/Huggy/Huggy/Huggy-599762.onnx",
"reward": 4.190524910177503,
"creation_time": 1705387450.4201872,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599762.pt"
]
},
{
"steps": 799994,
"file_path": "results/Huggy/Huggy/Huggy-799994.onnx",
"reward": 3.9599131621471066,
"creation_time": 1705387686.3415945,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799994.pt"
]
},
{
"steps": 999969,
"file_path": "results/Huggy/Huggy/Huggy-999969.onnx",
"reward": 3.825522133644591,
"creation_time": 1705387925.901796,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999969.pt"
]
},
{
"steps": 1199857,
"file_path": "results/Huggy/Huggy/Huggy-1199857.onnx",
"reward": 3.493637527227402,
"creation_time": 1705388166.0105116,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199857.pt"
]
},
{
"steps": 1399956,
"file_path": "results/Huggy/Huggy/Huggy-1399956.onnx",
"reward": 3.5334275143869807,
"creation_time": 1705388401.4918058,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399956.pt"
]
},
{
"steps": 1599392,
"file_path": "results/Huggy/Huggy/Huggy-1599392.onnx",
"reward": 3.842052171707153,
"creation_time": 1705388642.6015437,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599392.pt"
]
},
{
"steps": 1799968,
"file_path": "results/Huggy/Huggy/Huggy-1799968.onnx",
"reward": 3.545014360057774,
"creation_time": 1705388883.1641817,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799968.pt"
]
},
{
"steps": 1999335,
"file_path": "results/Huggy/Huggy/Huggy-1999335.onnx",
"reward": 3.7656857443578318,
"creation_time": 1705389121.4628313,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999335.pt"
]
},
{
"steps": 2000085,
"file_path": "results/Huggy/Huggy/Huggy-2000085.onnx",
"reward": 3.7334842544105187,
"creation_time": 1705389121.601453,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000085.pt"
]
}
],
"final_checkpoint": {
"steps": 2000085,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7334842544105187,
"creation_time": 1705389121.601453,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000085.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
}
}