ppo-Huggy / run_logs /training_status.json
pableitorr's picture
Huggy
ed6abaf verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199949,
"file_path": "results/Huggy2/Huggy/Huggy-199949.onnx",
"reward": 3.5743355703732322,
"creation_time": 1726005135.2897997,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199949.pt"
]
},
{
"steps": 399955,
"file_path": "results/Huggy2/Huggy/Huggy-399955.onnx",
"reward": 3.2759685336597384,
"creation_time": 1726005390.62055,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399955.pt"
]
},
{
"steps": 599959,
"file_path": "results/Huggy2/Huggy/Huggy-599959.onnx",
"reward": 3.3838655253251395,
"creation_time": 1726005649.7343037,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599959.pt"
]
},
{
"steps": 799953,
"file_path": "results/Huggy2/Huggy/Huggy-799953.onnx",
"reward": 3.6283652115535068,
"creation_time": 1726005902.9084454,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799953.pt"
]
},
{
"steps": 999418,
"file_path": "results/Huggy2/Huggy/Huggy-999418.onnx",
"reward": 3.7103443171951795,
"creation_time": 1726006158.7877414,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999418.pt"
]
},
{
"steps": 1199351,
"file_path": "results/Huggy2/Huggy/Huggy-1199351.onnx",
"reward": 3.4335891723632814,
"creation_time": 1726006417.7357094,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199351.pt"
]
},
{
"steps": 1399895,
"file_path": "results/Huggy2/Huggy/Huggy-1399895.onnx",
"reward": 3.529466582938682,
"creation_time": 1726006671.3237877,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399895.pt"
]
},
{
"steps": 1599942,
"file_path": "results/Huggy2/Huggy/Huggy-1599942.onnx",
"reward": 3.606279244992585,
"creation_time": 1726006927.9068854,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599942.pt"
]
},
{
"steps": 1799923,
"file_path": "results/Huggy2/Huggy/Huggy-1799923.onnx",
"reward": 4.048771358620037,
"creation_time": 1726007186.3704703,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799923.pt"
]
},
{
"steps": 1999751,
"file_path": "results/Huggy2/Huggy/Huggy-1999751.onnx",
"reward": 3.6512423851822,
"creation_time": 1726007439.3822546,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999751.pt"
]
},
{
"steps": 2000501,
"file_path": "results/Huggy2/Huggy/Huggy-2000501.onnx",
"reward": 3.6081498411466493,
"creation_time": 1726007439.5648756,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000501.pt"
]
}
],
"final_checkpoint": {
"steps": 2000501,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.6081498411466493,
"creation_time": 1726007439.5648756,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000501.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.4.0+cu121"
}
}