ppo-Huggy / run_logs /training_status.json
lancechen's picture
Huggy
641bb9c
{
"Huggy": {
"checkpoints": [
{
"steps": 199984,
"file_path": "results/Huggy/Huggy/Huggy-199984.onnx",
"reward": 3.58822490611384,
"creation_time": 1676332798.2028658,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199984.pt"
]
},
{
"steps": 399971,
"file_path": "results/Huggy/Huggy/Huggy-399971.onnx",
"reward": 3.711019727721143,
"creation_time": 1676333027.4156046,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399971.pt"
]
},
{
"steps": 599966,
"file_path": "results/Huggy/Huggy/Huggy-599966.onnx",
"reward": 3.058650666475296,
"creation_time": 1676333255.4265769,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599966.pt"
]
},
{
"steps": 799959,
"file_path": "results/Huggy/Huggy/Huggy-799959.onnx",
"reward": 4.188776282582964,
"creation_time": 1676333484.4624214,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799959.pt"
]
},
{
"steps": 999920,
"file_path": "results/Huggy/Huggy/Huggy-999920.onnx",
"reward": 3.5961453399658203,
"creation_time": 1676333717.8010254,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999920.pt"
]
},
{
"steps": 1199980,
"file_path": "results/Huggy/Huggy/Huggy-1199980.onnx",
"reward": 4.858606203838631,
"creation_time": 1676333949.9812891,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199980.pt"
]
},
{
"steps": 1399991,
"file_path": "results/Huggy/Huggy/Huggy-1399991.onnx",
"reward": 3.8100363575735465,
"creation_time": 1676334178.6423962,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399991.pt"
]
},
{
"steps": 1599991,
"file_path": "results/Huggy/Huggy/Huggy-1599991.onnx",
"reward": 4.014995852302264,
"creation_time": 1676334413.0130758,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599991.pt"
]
},
{
"steps": 1799513,
"file_path": "results/Huggy/Huggy/Huggy-1799513.onnx",
"reward": 4.098654067143798,
"creation_time": 1676334646.2556176,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799513.pt"
]
},
{
"steps": 1999650,
"file_path": "results/Huggy/Huggy/Huggy-1999650.onnx",
"reward": 4.339677048766094,
"creation_time": 1676334879.4929357,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999650.pt"
]
},
{
"steps": 2000400,
"file_path": "results/Huggy/Huggy/Huggy-2000400.onnx",
"reward": 4.035115465521812,
"creation_time": 1676334879.6331363,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000400.pt"
]
}
],
"final_checkpoint": {
"steps": 2000400,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.035115465521812,
"creation_time": 1676334879.6331363,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000400.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}