ppo-Huggy / run_logs /training_status.json
Logii33's picture
Huggy
896f369 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199918,
"file_path": "results/Huggy2/Huggy/Huggy-199918.onnx",
"reward": 3.303909224177164,
"creation_time": 1721671813.4544485,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199918.pt"
]
},
{
"steps": 399971,
"file_path": "results/Huggy2/Huggy/Huggy-399971.onnx",
"reward": 3.619380308409869,
"creation_time": 1721672045.9198825,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399971.pt"
]
},
{
"steps": 599986,
"file_path": "results/Huggy2/Huggy/Huggy-599986.onnx",
"reward": 3.547675359707612,
"creation_time": 1721672282.211582,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599986.pt"
]
},
{
"steps": 799975,
"file_path": "results/Huggy2/Huggy/Huggy-799975.onnx",
"reward": 4.037158878012137,
"creation_time": 1721672517.6105616,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799975.pt"
]
},
{
"steps": 999916,
"file_path": "results/Huggy2/Huggy/Huggy-999916.onnx",
"reward": 3.9065602137928916,
"creation_time": 1721672759.9834232,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999916.pt"
]
},
{
"steps": 1199950,
"file_path": "results/Huggy2/Huggy/Huggy-1199950.onnx",
"reward": 4.207437417086433,
"creation_time": 1721673004.9655879,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199950.pt"
]
},
{
"steps": 1399980,
"file_path": "results/Huggy2/Huggy/Huggy-1399980.onnx",
"reward": 4.333074365343366,
"creation_time": 1721673247.292907,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399980.pt"
]
},
{
"steps": 1599909,
"file_path": "results/Huggy2/Huggy/Huggy-1599909.onnx",
"reward": 3.630615335522276,
"creation_time": 1721673483.6188102,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599909.pt"
]
},
{
"steps": 1799974,
"file_path": "results/Huggy2/Huggy/Huggy-1799974.onnx",
"reward": 3.7245816177792017,
"creation_time": 1721673726.5551496,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799974.pt"
]
},
{
"steps": 1999985,
"file_path": "results/Huggy2/Huggy/Huggy-1999985.onnx",
"reward": 3.1323545277118683,
"creation_time": 1721673969.405692,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999985.pt"
]
},
{
"steps": 2000041,
"file_path": "results/Huggy2/Huggy/Huggy-2000041.onnx",
"reward": 3.129666168114235,
"creation_time": 1721673969.5378497,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000041.pt"
]
}
],
"final_checkpoint": {
"steps": 2000041,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.129666168114235,
"creation_time": 1721673969.5378497,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000041.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.1+cu121"
}
}