ppo-Huggy / run_logs /training_status.json
S0urtamarind's picture
Huggy
6134983
{
"Huggy": {
"checkpoints": [
{
"steps": 199950,
"file_path": "results/Huggy/Huggy/Huggy-199950.onnx",
"reward": 3.4062623959868703,
"creation_time": 1702886187.1272273,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199950.pt"
]
},
{
"steps": 399808,
"file_path": "results/Huggy/Huggy/Huggy-399808.onnx",
"reward": 3.611920095625378,
"creation_time": 1702886435.6585555,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399808.pt"
]
},
{
"steps": 599986,
"file_path": "results/Huggy/Huggy/Huggy-599986.onnx",
"reward": 3.692910841533116,
"creation_time": 1702886677.354494,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599986.pt"
]
},
{
"steps": 799656,
"file_path": "results/Huggy/Huggy/Huggy-799656.onnx",
"reward": 3.6354684107833437,
"creation_time": 1702886921.9433012,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799656.pt"
]
},
{
"steps": 999727,
"file_path": "results/Huggy/Huggy/Huggy-999727.onnx",
"reward": 3.7728435299920697,
"creation_time": 1702887178.942856,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999727.pt"
]
},
{
"steps": 1199936,
"file_path": "results/Huggy/Huggy/Huggy-1199936.onnx",
"reward": 3.7144682962786066,
"creation_time": 1702887439.1126673,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199936.pt"
]
},
{
"steps": 1399933,
"file_path": "results/Huggy/Huggy/Huggy-1399933.onnx",
"reward": 4.439088561318138,
"creation_time": 1702887693.1730077,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399933.pt"
]
},
{
"steps": 1599256,
"file_path": "results/Huggy/Huggy/Huggy-1599256.onnx",
"reward": 3.7733156386908,
"creation_time": 1702887950.2699902,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599256.pt"
]
},
{
"steps": 1799904,
"file_path": "results/Huggy/Huggy/Huggy-1799904.onnx",
"reward": 3.8234311088367745,
"creation_time": 1702888210.9063537,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799904.pt"
]
},
{
"steps": 1999929,
"file_path": "results/Huggy/Huggy/Huggy-1999929.onnx",
"reward": 3.2045689699601154,
"creation_time": 1702888468.704659,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999929.pt"
]
},
{
"steps": 2000043,
"file_path": "results/Huggy/Huggy/Huggy-2000043.onnx",
"reward": 3.232327241897583,
"creation_time": 1702888468.8293793,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000043.pt"
]
}
],
"final_checkpoint": {
"steps": 2000043,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.232327241897583,
"creation_time": 1702888468.8293793,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000043.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
}
}