ppo-Huggy / run_logs /training_status.json
mariololo's picture
Huggy
92dc97b
{
"Huggy": {
"checkpoints": [
{
"steps": 199970,
"file_path": "results/Huggy/Huggy/Huggy-199970.onnx",
"reward": 3.6497249968590273,
"creation_time": 1687452706.206084,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199970.pt"
]
},
{
"steps": 399902,
"file_path": "results/Huggy/Huggy/Huggy-399902.onnx",
"reward": 3.6954685220351586,
"creation_time": 1687452950.0558646,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399902.pt"
]
},
{
"steps": 599924,
"file_path": "results/Huggy/Huggy/Huggy-599924.onnx",
"reward": 3.9866218185424804,
"creation_time": 1687453194.965177,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599924.pt"
]
},
{
"steps": 799892,
"file_path": "results/Huggy/Huggy/Huggy-799892.onnx",
"reward": 3.6192790958143415,
"creation_time": 1687453437.2529004,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799892.pt"
]
},
{
"steps": 999896,
"file_path": "results/Huggy/Huggy/Huggy-999896.onnx",
"reward": 3.92352103185253,
"creation_time": 1687453683.1904168,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999896.pt"
]
},
{
"steps": 1199906,
"file_path": "results/Huggy/Huggy/Huggy-1199906.onnx",
"reward": 3.5534893880287806,
"creation_time": 1687453929.3804054,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199906.pt"
]
},
{
"steps": 1399770,
"file_path": "results/Huggy/Huggy/Huggy-1399770.onnx",
"reward": 3.8893091678619385,
"creation_time": 1687454174.014185,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399770.pt"
]
},
{
"steps": 1599960,
"file_path": "results/Huggy/Huggy/Huggy-1599960.onnx",
"reward": 3.421684212928271,
"creation_time": 1687454413.1818793,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599960.pt"
]
},
{
"steps": 1799896,
"file_path": "results/Huggy/Huggy/Huggy-1799896.onnx",
"reward": 3.4766246378421783,
"creation_time": 1687454657.7108314,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799896.pt"
]
},
{
"steps": 1999813,
"file_path": "results/Huggy/Huggy/Huggy-1999813.onnx",
"reward": 2.8525150775909425,
"creation_time": 1687454902.6542892,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999813.pt"
]
},
{
"steps": 2000563,
"file_path": "results/Huggy/Huggy/Huggy-2000563.onnx",
"reward": 1.850028117497762,
"creation_time": 1687454902.8009462,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000563.pt"
]
}
],
"final_checkpoint": {
"steps": 2000563,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 1.850028117497762,
"creation_time": 1687454902.8009462,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000563.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}