ppo-Huggy / run_logs /training_status.json
Protao's picture
Huggy
ceb6c9a
{
"Huggy": {
"checkpoints": [
{
"steps": 199782,
"file_path": "results/Huggy/Huggy/Huggy-199782.onnx",
"reward": 3.627623024633375,
"creation_time": 1701615344.7092767,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199782.pt"
]
},
{
"steps": 399980,
"file_path": "results/Huggy/Huggy/Huggy-399980.onnx",
"reward": 3.9619176434352994,
"creation_time": 1701615603.5364857,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399980.pt"
]
},
{
"steps": 599921,
"file_path": "results/Huggy/Huggy/Huggy-599921.onnx",
"reward": 3.6646862183847735,
"creation_time": 1701615871.5902584,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599921.pt"
]
},
{
"steps": 799974,
"file_path": "results/Huggy/Huggy/Huggy-799974.onnx",
"reward": 3.9728948006758817,
"creation_time": 1701616134.7045114,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799974.pt"
]
},
{
"steps": 999952,
"file_path": "results/Huggy/Huggy/Huggy-999952.onnx",
"reward": 3.832118730087065,
"creation_time": 1701616407.1820014,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999952.pt"
]
},
{
"steps": 1199514,
"file_path": "results/Huggy/Huggy/Huggy-1199514.onnx",
"reward": 3.8153154132317524,
"creation_time": 1701616679.3392615,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199514.pt"
]
},
{
"steps": 1399997,
"file_path": "results/Huggy/Huggy/Huggy-1399997.onnx",
"reward": 3.9667369804599066,
"creation_time": 1701616952.5377295,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399997.pt"
]
},
{
"steps": 1599957,
"file_path": "results/Huggy/Huggy/Huggy-1599957.onnx",
"reward": 3.8279292667555116,
"creation_time": 1701617220.333116,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599957.pt"
]
},
{
"steps": 1799982,
"file_path": "results/Huggy/Huggy/Huggy-1799982.onnx",
"reward": 3.773540531527506,
"creation_time": 1701617486.4187324,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799982.pt"
]
},
{
"steps": 1999972,
"file_path": "results/Huggy/Huggy/Huggy-1999972.onnx",
"reward": 3.542524108584498,
"creation_time": 1701617753.1592283,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999972.pt"
]
},
{
"steps": 2000126,
"file_path": "results/Huggy/Huggy/Huggy-2000126.onnx",
"reward": 3.59170204318232,
"creation_time": 1701617753.2715938,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000126.pt"
]
}
],
"final_checkpoint": {
"steps": 2000126,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.59170204318232,
"creation_time": 1701617753.2715938,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000126.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.1+cu121"
}
}