ppo-Huggy / run_logs /training_status.json
vind's picture
Huggy
99aa9ab
{
"Huggy": {
"checkpoints": [
{
"steps": 199899,
"file_path": "results/Huggy/Huggy/Huggy-199899.onnx",
"reward": 4.056700628454035,
"creation_time": 1684218735.6357465,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199899.pt"
]
},
{
"steps": 399881,
"file_path": "results/Huggy/Huggy/Huggy-399881.onnx",
"reward": 3.542244958380858,
"creation_time": 1684218989.2083433,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399881.pt"
]
},
{
"steps": 599902,
"file_path": "results/Huggy/Huggy/Huggy-599902.onnx",
"reward": 3.124928891658783,
"creation_time": 1684219243.1542647,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599902.pt"
]
},
{
"steps": 799922,
"file_path": "results/Huggy/Huggy/Huggy-799922.onnx",
"reward": 3.6595023368181807,
"creation_time": 1684219499.0046482,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799922.pt"
]
},
{
"steps": 999964,
"file_path": "results/Huggy/Huggy/Huggy-999964.onnx",
"reward": 3.9807418529634124,
"creation_time": 1684219757.7955658,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999964.pt"
]
},
{
"steps": 1199998,
"file_path": "results/Huggy/Huggy/Huggy-1199998.onnx",
"reward": 3.74494939492299,
"creation_time": 1684220016.7788725,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199998.pt"
]
},
{
"steps": 1399997,
"file_path": "results/Huggy/Huggy/Huggy-1399997.onnx",
"reward": 4.4208059840732155,
"creation_time": 1684220279.0472212,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399997.pt"
]
},
{
"steps": 1599953,
"file_path": "results/Huggy/Huggy/Huggy-1599953.onnx",
"reward": 3.5696793566636047,
"creation_time": 1684220539.8933442,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599953.pt"
]
},
{
"steps": 1799949,
"file_path": "results/Huggy/Huggy/Huggy-1799949.onnx",
"reward": 3.4926841686356744,
"creation_time": 1684220800.8634973,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799949.pt"
]
},
{
"steps": 1999998,
"file_path": "results/Huggy/Huggy/Huggy-1999998.onnx",
"reward": 3.5337851228136006,
"creation_time": 1684221063.162038,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999998.pt"
]
},
{
"steps": 2000119,
"file_path": "results/Huggy/Huggy/Huggy-2000119.onnx",
"reward": 3.6283921599388123,
"creation_time": 1684221063.2912507,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000119.pt"
]
}
],
"final_checkpoint": {
"steps": 2000119,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.6283921599388123,
"creation_time": 1684221063.2912507,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000119.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}