ppo-Huggy / run_logs /training_status.json
SpookyWooky5's picture
Huggy
7054bf7
{
"Huggy": {
"checkpoints": [
{
"steps": 199893,
"file_path": "results/Huggy/Huggy/Huggy-199893.onnx",
"reward": 3.56051266770209,
"creation_time": 1679509467.2711146,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199893.pt"
]
},
{
"steps": 399933,
"file_path": "results/Huggy/Huggy/Huggy-399933.onnx",
"reward": 3.996859548553344,
"creation_time": 1679509699.4646158,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399933.pt"
]
},
{
"steps": 599780,
"file_path": "results/Huggy/Huggy/Huggy-599780.onnx",
"reward": 4.039203202724456,
"creation_time": 1679509933.9171326,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599780.pt"
]
},
{
"steps": 799953,
"file_path": "results/Huggy/Huggy/Huggy-799953.onnx",
"reward": 3.921503622664346,
"creation_time": 1679510163.6534188,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799953.pt"
]
},
{
"steps": 999931,
"file_path": "results/Huggy/Huggy/Huggy-999931.onnx",
"reward": 4.028045606022038,
"creation_time": 1679510399.241932,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999931.pt"
]
},
{
"steps": 1199997,
"file_path": "results/Huggy/Huggy/Huggy-1199997.onnx",
"reward": 3.5647477801416962,
"creation_time": 1679510683.080957,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199997.pt"
]
},
{
"steps": 1399964,
"file_path": "results/Huggy/Huggy/Huggy-1399964.onnx",
"reward": 3.7837401833736672,
"creation_time": 1679510952.0855079,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399964.pt"
]
},
{
"steps": 1599979,
"file_path": "results/Huggy/Huggy/Huggy-1599979.onnx",
"reward": 3.616572165644014,
"creation_time": 1679511218.559148,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599979.pt"
]
},
{
"steps": 1799913,
"file_path": "results/Huggy/Huggy/Huggy-1799913.onnx",
"reward": 3.614098554565793,
"creation_time": 1679511485.9608955,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799913.pt"
]
},
{
"steps": 1999937,
"file_path": "results/Huggy/Huggy/Huggy-1999937.onnx",
"reward": 3.298995651304722,
"creation_time": 1679511727.138803,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999937.pt"
]
},
{
"steps": 2000018,
"file_path": "results/Huggy/Huggy/Huggy-2000018.onnx",
"reward": 3.3370324553865376,
"creation_time": 1679511727.2575102,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000018.pt"
]
}
],
"final_checkpoint": {
"steps": 2000018,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.3370324553865376,
"creation_time": 1679511727.2575102,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000018.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}