ppo-Huggy / run_logs /training_status.json
Asheron's picture
Huggy
7a9a3d4
{
"Huggy": {
"checkpoints": [
{
"steps": 199868,
"file_path": "results/Huggy/Huggy/Huggy-199868.onnx",
"reward": 3.584040815547361,
"creation_time": 1698024038.326213,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199868.pt"
]
},
{
"steps": 399979,
"file_path": "results/Huggy/Huggy/Huggy-399979.onnx",
"reward": 3.4504592676054346,
"creation_time": 1698024293.7307656,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399979.pt"
]
},
{
"steps": 599987,
"file_path": "results/Huggy/Huggy/Huggy-599987.onnx",
"reward": 4.430177330970764,
"creation_time": 1698024551.8027718,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599987.pt"
]
},
{
"steps": 799964,
"file_path": "results/Huggy/Huggy/Huggy-799964.onnx",
"reward": 3.6356045820125162,
"creation_time": 1698024811.3465786,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799964.pt"
]
},
{
"steps": 999803,
"file_path": "results/Huggy/Huggy/Huggy-999803.onnx",
"reward": 3.581784037332381,
"creation_time": 1698025077.216644,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999803.pt"
]
},
{
"steps": 1199999,
"file_path": "results/Huggy/Huggy/Huggy-1199999.onnx",
"reward": 3.870035224182661,
"creation_time": 1698025337.4742882,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199999.pt"
]
},
{
"steps": 1399967,
"file_path": "results/Huggy/Huggy/Huggy-1399967.onnx",
"reward": 3.671557087709408,
"creation_time": 1698025593.3925312,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399967.pt"
]
},
{
"steps": 1599549,
"file_path": "results/Huggy/Huggy/Huggy-1599549.onnx",
"reward": 3.6941406977555107,
"creation_time": 1698025853.9928517,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599549.pt"
]
},
{
"steps": 1799962,
"file_path": "results/Huggy/Huggy/Huggy-1799962.onnx",
"reward": 3.845548536870387,
"creation_time": 1698026116.1884625,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799962.pt"
]
},
{
"steps": 1999953,
"file_path": "results/Huggy/Huggy/Huggy-1999953.onnx",
"reward": 3.8759397149085997,
"creation_time": 1698026388.5477939,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999953.pt"
]
},
{
"steps": 2000014,
"file_path": "results/Huggy/Huggy/Huggy-2000014.onnx",
"reward": 3.8538536700335415,
"creation_time": 1698026388.6592422,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000014.pt"
]
}
],
"final_checkpoint": {
"steps": 2000014,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.8538536700335415,
"creation_time": 1698026388.6592422,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000014.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}