ppo-Huggy / run_logs /training_status.json
giuseppemassafra's picture
Huggy
62617db
{
"Huggy": {
"checkpoints": [
{
"steps": 199744,
"file_path": "results/Huggy/Huggy/Huggy-199744.onnx",
"reward": 3.1445085446039838,
"creation_time": 1689683838.4455435,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199744.pt"
]
},
{
"steps": 399967,
"file_path": "results/Huggy/Huggy/Huggy-399967.onnx",
"reward": 3.764233831848417,
"creation_time": 1689684121.1853437,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399967.pt"
]
},
{
"steps": 599853,
"file_path": "results/Huggy/Huggy/Huggy-599853.onnx",
"reward": 4.138824885541743,
"creation_time": 1689684406.6459935,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599853.pt"
]
},
{
"steps": 799936,
"file_path": "results/Huggy/Huggy/Huggy-799936.onnx",
"reward": 3.7733950419794944,
"creation_time": 1689684687.1543565,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799936.pt"
]
},
{
"steps": 999926,
"file_path": "results/Huggy/Huggy/Huggy-999926.onnx",
"reward": 3.8644425283130417,
"creation_time": 1689684974.8264644,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999926.pt"
]
},
{
"steps": 1199909,
"file_path": "results/Huggy/Huggy/Huggy-1199909.onnx",
"reward": 3.9487316558758416,
"creation_time": 1689685260.5246148,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199909.pt"
]
},
{
"steps": 1399994,
"file_path": "results/Huggy/Huggy/Huggy-1399994.onnx",
"reward": 3.8440502775731935,
"creation_time": 1689685534.9843645,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399994.pt"
]
},
{
"steps": 1599919,
"file_path": "results/Huggy/Huggy/Huggy-1599919.onnx",
"reward": 3.9067077037615654,
"creation_time": 1689685816.996233,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599919.pt"
]
},
{
"steps": 1799963,
"file_path": "results/Huggy/Huggy/Huggy-1799963.onnx",
"reward": 3.767394603252411,
"creation_time": 1689686103.0654178,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799963.pt"
]
},
{
"steps": 1999724,
"file_path": "results/Huggy/Huggy/Huggy-1999724.onnx",
"reward": 4.048731521684296,
"creation_time": 1689686395.0270617,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999724.pt"
]
},
{
"steps": 2000474,
"file_path": "results/Huggy/Huggy/Huggy-2000474.onnx",
"reward": 3.892103223800659,
"creation_time": 1689686395.197127,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000474.pt"
]
}
],
"final_checkpoint": {
"steps": 2000474,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.892103223800659,
"creation_time": 1689686395.197127,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000474.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}