PPO-Huggy / run_logs /training_status.json
Jayjun Lee
Huggy
4f20eb9
{
"Huggy": {
"checkpoints": [
{
"steps": 199850,
"file_path": "results/Huggy/Huggy/Huggy-199850.onnx",
"reward": 3.360575611629183,
"creation_time": 1674198378.2811687,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199850.pt"
]
},
{
"steps": 399981,
"file_path": "results/Huggy/Huggy/Huggy-399981.onnx",
"reward": 3.305669616711767,
"creation_time": 1674198468.1685781,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399981.pt"
]
},
{
"steps": 599955,
"file_path": "results/Huggy/Huggy/Huggy-599955.onnx",
"reward": 3.924845902543319,
"creation_time": 1674198561.5125284,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599955.pt"
]
},
{
"steps": 799997,
"file_path": "results/Huggy/Huggy/Huggy-799997.onnx",
"reward": 3.8582855585643223,
"creation_time": 1674198652.5328631,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799997.pt"
]
},
{
"steps": 999954,
"file_path": "results/Huggy/Huggy/Huggy-999954.onnx",
"reward": 4.259268446999081,
"creation_time": 1674198745.6114724,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999954.pt"
]
},
{
"steps": 1199964,
"file_path": "results/Huggy/Huggy/Huggy-1199964.onnx",
"reward": 4.231335049090178,
"creation_time": 1674198838.6477885,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199964.pt"
]
},
{
"steps": 1399956,
"file_path": "results/Huggy/Huggy/Huggy-1399956.onnx",
"reward": 4.217164802551269,
"creation_time": 1674198931.9118323,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399956.pt"
]
},
{
"steps": 1599938,
"file_path": "results/Huggy/Huggy/Huggy-1599938.onnx",
"reward": 3.6899434136132063,
"creation_time": 1674199023.7747047,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599938.pt"
]
},
{
"steps": 1799984,
"file_path": "results/Huggy/Huggy/Huggy-1799984.onnx",
"reward": 3.707051181916109,
"creation_time": 1674199117.0911868,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799984.pt"
]
},
{
"steps": 1999973,
"file_path": "results/Huggy/Huggy/Huggy-1999973.onnx",
"reward": 3.4578677025334588,
"creation_time": 1674199210.8728452,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999973.pt"
]
},
{
"steps": 2000056,
"file_path": "results/Huggy/Huggy/Huggy-2000056.onnx",
"reward": 3.488679931561152,
"creation_time": 1674199210.9388735,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000056.pt"
]
}
],
"final_checkpoint": {
"steps": 2000056,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.488679931561152,
"creation_time": 1674199210.9388735,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000056.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}