ppo-Huggy / run_logs /training_status.json
jiudth's picture
Huggy
58a5017 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199895,
"file_path": "results/Huggy/Huggy/Huggy-199895.onnx",
"reward": 3.203258582523891,
"creation_time": 1706082988.928789,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199895.pt"
]
},
{
"steps": 399999,
"file_path": "results/Huggy/Huggy/Huggy-399999.onnx",
"reward": 3.495071865618229,
"creation_time": 1706083216.939465,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399999.pt"
]
},
{
"steps": 599285,
"file_path": "results/Huggy/Huggy/Huggy-599285.onnx",
"reward": 4.110542631149292,
"creation_time": 1706083447.4459288,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599285.pt"
]
},
{
"steps": 799959,
"file_path": "results/Huggy/Huggy/Huggy-799959.onnx",
"reward": 3.7573453472720253,
"creation_time": 1706083677.804114,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799959.pt"
]
},
{
"steps": 999889,
"file_path": "results/Huggy/Huggy/Huggy-999889.onnx",
"reward": 3.6820022859610617,
"creation_time": 1706083914.9692025,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999889.pt"
]
},
{
"steps": 1199957,
"file_path": "results/Huggy/Huggy/Huggy-1199957.onnx",
"reward": 3.4375372250233927,
"creation_time": 1706084157.0016418,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199957.pt"
]
},
{
"steps": 1399975,
"file_path": "results/Huggy/Huggy/Huggy-1399975.onnx",
"reward": 3.8689557086853754,
"creation_time": 1706084399.1592026,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399975.pt"
]
},
{
"steps": 1599379,
"file_path": "results/Huggy/Huggy/Huggy-1599379.onnx",
"reward": 3.7089657023167,
"creation_time": 1706084635.917107,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599379.pt"
]
},
{
"steps": 1799942,
"file_path": "results/Huggy/Huggy/Huggy-1799942.onnx",
"reward": 3.7801069021224976,
"creation_time": 1706084872.1169808,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799942.pt"
]
},
{
"steps": 1999937,
"file_path": "results/Huggy/Huggy/Huggy-1999937.onnx",
"reward": 4.131566408068635,
"creation_time": 1706085104.7272694,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999937.pt"
]
},
{
"steps": 2000036,
"file_path": "results/Huggy/Huggy/Huggy-2000036.onnx",
"reward": 4.20254400101575,
"creation_time": 1706085104.8401027,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000036.pt"
]
}
],
"final_checkpoint": {
"steps": 2000036,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.20254400101575,
"creation_time": 1706085104.8401027,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000036.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
}
}