ppo-Huggy / run_logs /training_status.json
giobin's picture
pushing Huggy to the hub
726859d
{
"Huggy": {
"checkpoints": [
{
"steps": 199870,
"file_path": "results/Huggy/Huggy/Huggy-199870.onnx",
"reward": 3.1015315312605636,
"creation_time": 1676560237.0311213,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199870.pt"
]
},
{
"steps": 399994,
"file_path": "results/Huggy/Huggy/Huggy-399994.onnx",
"reward": 4.064119425686923,
"creation_time": 1676560469.3268383,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399994.pt"
]
},
{
"steps": 599969,
"file_path": "results/Huggy/Huggy/Huggy-599969.onnx",
"reward": 3.834944183176214,
"creation_time": 1676560706.474293,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599969.pt"
]
},
{
"steps": 799968,
"file_path": "results/Huggy/Huggy/Huggy-799968.onnx",
"reward": 4.1026964552664875,
"creation_time": 1676560941.1490257,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799968.pt"
]
},
{
"steps": 999911,
"file_path": "results/Huggy/Huggy/Huggy-999911.onnx",
"reward": 4.159994662976732,
"creation_time": 1676561181.322819,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999911.pt"
]
},
{
"steps": 1199997,
"file_path": "results/Huggy/Huggy/Huggy-1199997.onnx",
"reward": 3.771703551116499,
"creation_time": 1676561421.988563,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199997.pt"
]
},
{
"steps": 1399945,
"file_path": "results/Huggy/Huggy/Huggy-1399945.onnx",
"reward": 3.8846617298466817,
"creation_time": 1676561662.7334049,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399945.pt"
]
},
{
"steps": 1599919,
"file_path": "results/Huggy/Huggy/Huggy-1599919.onnx",
"reward": 3.805255610731584,
"creation_time": 1676561898.5747435,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599919.pt"
]
},
{
"steps": 1799977,
"file_path": "results/Huggy/Huggy/Huggy-1799977.onnx",
"reward": 3.836373483912545,
"creation_time": 1676562138.287129,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799977.pt"
]
},
{
"steps": 1999846,
"file_path": "results/Huggy/Huggy/Huggy-1999846.onnx",
"reward": 4.151238728213954,
"creation_time": 1676562376.88345,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999846.pt"
]
},
{
"steps": 2000020,
"file_path": "results/Huggy/Huggy/Huggy-2000020.onnx",
"reward": 4.175666545232137,
"creation_time": 1676562377.0100806,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000020.pt"
]
}
],
"final_checkpoint": {
"steps": 2000020,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.175666545232137,
"creation_time": 1676562377.0100806,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000020.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}