ppo-Huggy / run_logs /training_status.json
yangchenx's picture
Huggy
2721b03 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199841,
"file_path": "results/Huggy2/Huggy/Huggy-199841.onnx",
"reward": 3.705980376473495,
"creation_time": 1722511350.0945785,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199841.pt"
]
},
{
"steps": 399864,
"file_path": "results/Huggy2/Huggy/Huggy-399864.onnx",
"reward": 3.7404073073583493,
"creation_time": 1722511580.7063284,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399864.pt"
]
},
{
"steps": 599884,
"file_path": "results/Huggy2/Huggy/Huggy-599884.onnx",
"reward": 3.117369299843198,
"creation_time": 1722511815.8560889,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599884.pt"
]
},
{
"steps": 799960,
"file_path": "results/Huggy2/Huggy/Huggy-799960.onnx",
"reward": 3.793547558122211,
"creation_time": 1722512047.6637642,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799960.pt"
]
},
{
"steps": 999294,
"file_path": "results/Huggy2/Huggy/Huggy-999294.onnx",
"reward": 3.5910532438025182,
"creation_time": 1722512282.4304438,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999294.pt"
]
},
{
"steps": 1199954,
"file_path": "results/Huggy2/Huggy/Huggy-1199954.onnx",
"reward": 4.140930661968157,
"creation_time": 1722512518.9686956,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199954.pt"
]
},
{
"steps": 1399995,
"file_path": "results/Huggy2/Huggy/Huggy-1399995.onnx",
"reward": 3.773957911697594,
"creation_time": 1722512749.360848,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399995.pt"
]
},
{
"steps": 1401988,
"file_path": "results/Huggy2/Huggy/Huggy-1401988.onnx",
"reward": 3.904681622982025,
"creation_time": 1722512755.2275996,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1401988.pt"
]
},
{
"steps": 1599936,
"file_path": "results/Huggy2/Huggy/Huggy-1599936.onnx",
"reward": 3.612091050043211,
"creation_time": 1722513246.8450406,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599936.pt"
]
},
{
"steps": 1799950,
"file_path": "results/Huggy2/Huggy/Huggy-1799950.onnx",
"reward": 4.151727098427462,
"creation_time": 1722513483.4980226,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799950.pt"
]
},
{
"steps": 1999919,
"file_path": "results/Huggy2/Huggy/Huggy-1999919.onnx",
"reward": 3.2546598660318473,
"creation_time": 1722513719.0368397,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999919.pt"
]
},
{
"steps": 2000015,
"file_path": "results/Huggy2/Huggy/Huggy-2000015.onnx",
"reward": 3.3931681752204894,
"creation_time": 1722513719.1548002,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000015.pt"
]
}
],
"final_checkpoint": {
"steps": 2000015,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.3931681752204894,
"creation_time": 1722513719.1548002,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000015.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.1+cu121"
}
}