ppo-Huggy / run_logs /training_status.json
mojtabak's picture
Huggy
131496b
{
"Huggy": {
"checkpoints": [
{
"steps": 199886,
"file_path": "results/Huggy/Huggy/Huggy-199886.onnx",
"reward": 3.233296436922891,
"creation_time": 1690298503.0920918,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199886.pt"
]
},
{
"steps": 399901,
"file_path": "results/Huggy/Huggy/Huggy-399901.onnx",
"reward": 3.76595734591995,
"creation_time": 1690298744.2579286,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399901.pt"
]
},
{
"steps": 599789,
"file_path": "results/Huggy/Huggy/Huggy-599789.onnx",
"reward": 3.6810237073898318,
"creation_time": 1690298988.1089058,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599789.pt"
]
},
{
"steps": 799990,
"file_path": "results/Huggy/Huggy/Huggy-799990.onnx",
"reward": 3.818647909097831,
"creation_time": 1690299234.357346,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799990.pt"
]
},
{
"steps": 999923,
"file_path": "results/Huggy/Huggy/Huggy-999923.onnx",
"reward": 3.9483550096813,
"creation_time": 1690299485.854431,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999923.pt"
]
},
{
"steps": 1199885,
"file_path": "results/Huggy/Huggy/Huggy-1199885.onnx",
"reward": 3.8949550835483047,
"creation_time": 1690299740.052826,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199885.pt"
]
},
{
"steps": 1399970,
"file_path": "results/Huggy/Huggy/Huggy-1399970.onnx",
"reward": 3.8955499362527277,
"creation_time": 1690299999.3565803,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399970.pt"
]
},
{
"steps": 1599911,
"file_path": "results/Huggy/Huggy/Huggy-1599911.onnx",
"reward": 3.9088350880745404,
"creation_time": 1690300256.2972302,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599911.pt"
]
},
{
"steps": 1799969,
"file_path": "results/Huggy/Huggy/Huggy-1799969.onnx",
"reward": 3.8797617698537894,
"creation_time": 1690300514.151814,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799969.pt"
]
},
{
"steps": 1999907,
"file_path": "results/Huggy/Huggy/Huggy-1999907.onnx",
"reward": 3.370636321604252,
"creation_time": 1690300779.9701304,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999907.pt"
]
},
{
"steps": 2000016,
"file_path": "results/Huggy/Huggy/Huggy-2000016.onnx",
"reward": 3.41414407527808,
"creation_time": 1690300780.0966208,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000016.pt"
]
}
],
"final_checkpoint": {
"steps": 2000016,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.41414407527808,
"creation_time": 1690300780.0966208,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000016.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}