ppo-Huggy / run_logs /training_status.json
Dipper00's picture
Huggy
66d4ea4 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199992,
"file_path": "results/Huggy2/Huggy/Huggy-199992.onnx",
"reward": 3.498836932012013,
"creation_time": 1762323654.1614058,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199992.pt"
]
},
{
"steps": 399955,
"file_path": "results/Huggy2/Huggy/Huggy-399955.onnx",
"reward": 3.908743223836345,
"creation_time": 1762323905.3250773,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399955.pt"
]
},
{
"steps": 599962,
"file_path": "results/Huggy2/Huggy/Huggy-599962.onnx",
"reward": 3.744540496876365,
"creation_time": 1762324157.7042563,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599962.pt"
]
},
{
"steps": 799971,
"file_path": "results/Huggy2/Huggy/Huggy-799971.onnx",
"reward": 3.877978397120396,
"creation_time": 1762324409.1697278,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799971.pt"
]
},
{
"steps": 999990,
"file_path": "results/Huggy2/Huggy/Huggy-999990.onnx",
"reward": 3.8587789896093767,
"creation_time": 1762324661.7944806,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999990.pt"
]
},
{
"steps": 1199932,
"file_path": "results/Huggy2/Huggy/Huggy-1199932.onnx",
"reward": 3.628594122503115,
"creation_time": 1762324916.481404,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199932.pt"
]
},
{
"steps": 1399627,
"file_path": "results/Huggy2/Huggy/Huggy-1399627.onnx",
"reward": 4.219074181147984,
"creation_time": 1762325173.2625635,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399627.pt"
]
},
{
"steps": 1599961,
"file_path": "results/Huggy2/Huggy/Huggy-1599961.onnx",
"reward": 4.137397013374211,
"creation_time": 1762325425.7080233,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599961.pt"
]
},
{
"steps": 1799984,
"file_path": "results/Huggy2/Huggy/Huggy-1799984.onnx",
"reward": 3.8961841759474383,
"creation_time": 1762325681.8022277,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799984.pt"
]
},
{
"steps": 1999972,
"file_path": "results/Huggy2/Huggy/Huggy-1999972.onnx",
"reward": 3.7498748706919804,
"creation_time": 1762325935.6484177,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999972.pt"
]
},
{
"steps": 2000039,
"file_path": "results/Huggy2/Huggy/Huggy-2000039.onnx",
"reward": 3.7514582035834327,
"creation_time": 1762325935.8087273,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000039.pt"
]
}
],
"final_checkpoint": {
"steps": 2000039,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.7514582035834327,
"creation_time": 1762325935.8087273,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000039.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.8.0+cu128"
}
}