ppo-Huggy / run_logs /training_status.json
rezashr's picture
Huggy
653c2de
{
"Huggy": {
"checkpoints": [
{
"steps": 199678,
"file_path": "results/Huggy/Huggy/Huggy-199678.onnx",
"reward": 3.491595530719088,
"creation_time": 1702177741.3142295,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199678.pt"
]
},
{
"steps": 399985,
"file_path": "results/Huggy/Huggy/Huggy-399985.onnx",
"reward": 3.829529643058777,
"creation_time": 1702177992.2308664,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399985.pt"
]
},
{
"steps": 599998,
"file_path": "results/Huggy/Huggy/Huggy-599998.onnx",
"reward": 2.5906599521636964,
"creation_time": 1702178245.3571653,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599998.pt"
]
},
{
"steps": 799964,
"file_path": "results/Huggy/Huggy/Huggy-799964.onnx",
"reward": 3.3942061483470445,
"creation_time": 1702178496.6901484,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799964.pt"
]
},
{
"steps": 999882,
"file_path": "results/Huggy/Huggy/Huggy-999882.onnx",
"reward": 3.7531710542658323,
"creation_time": 1702178754.5488563,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999882.pt"
]
},
{
"steps": 1199961,
"file_path": "results/Huggy/Huggy/Huggy-1199961.onnx",
"reward": 3.732652371459537,
"creation_time": 1702179012.3293362,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199961.pt"
]
},
{
"steps": 1399980,
"file_path": "results/Huggy/Huggy/Huggy-1399980.onnx",
"reward": 3.7232633627009117,
"creation_time": 1702179265.0752974,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399980.pt"
]
},
{
"steps": 1599939,
"file_path": "results/Huggy/Huggy/Huggy-1599939.onnx",
"reward": 3.633005137591399,
"creation_time": 1702179524.5820057,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599939.pt"
]
},
{
"steps": 1799862,
"file_path": "results/Huggy/Huggy/Huggy-1799862.onnx",
"reward": 3.6636668590531833,
"creation_time": 1702179784.5766828,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799862.pt"
]
},
{
"steps": 1999995,
"file_path": "results/Huggy/Huggy/Huggy-1999995.onnx",
"reward": 3.9332037766774497,
"creation_time": 1702180043.5431097,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999995.pt"
]
},
{
"steps": 2000047,
"file_path": "results/Huggy/Huggy/Huggy-2000047.onnx",
"reward": 3.2103559970855713,
"creation_time": 1702180043.6584086,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000047.pt"
]
}
],
"final_checkpoint": {
"steps": 2000047,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.2103559970855713,
"creation_time": 1702180043.6584086,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000047.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.1+cu121"
}
}