ppo-Huggy / run_logs /training_status.json
Minazor's picture
Huggy
02b4e1a verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199886,
"file_path": "results/Huggy2/Huggy/Huggy-199886.onnx",
"reward": 3.164125293493271,
"creation_time": 1719389372.9138255,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199886.pt"
]
},
{
"steps": 399946,
"file_path": "results/Huggy2/Huggy/Huggy-399946.onnx",
"reward": 3.9328543646261096,
"creation_time": 1719389624.7935815,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399946.pt"
]
},
{
"steps": 599895,
"file_path": "results/Huggy2/Huggy/Huggy-599895.onnx",
"reward": 3.61534566928943,
"creation_time": 1719389883.6335235,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599895.pt"
]
},
{
"steps": 799967,
"file_path": "results/Huggy2/Huggy/Huggy-799967.onnx",
"reward": 3.9767384130687,
"creation_time": 1719390151.3908653,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799967.pt"
]
},
{
"steps": 999988,
"file_path": "results/Huggy2/Huggy/Huggy-999988.onnx",
"reward": 3.5853201687335967,
"creation_time": 1719390417.2261448,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999988.pt"
]
},
{
"steps": 1199950,
"file_path": "results/Huggy2/Huggy/Huggy-1199950.onnx",
"reward": 3.8637238493332497,
"creation_time": 1719390677.3303397,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199950.pt"
]
},
{
"steps": 1399946,
"file_path": "results/Huggy2/Huggy/Huggy-1399946.onnx",
"reward": 1.7177467346191406,
"creation_time": 1719390936.418606,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399946.pt"
]
},
{
"steps": 1599887,
"file_path": "results/Huggy2/Huggy/Huggy-1599887.onnx",
"reward": 3.840523594810117,
"creation_time": 1719391194.8844774,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599887.pt"
]
},
{
"steps": 1799956,
"file_path": "results/Huggy2/Huggy/Huggy-1799956.onnx",
"reward": 3.7763367904411567,
"creation_time": 1719391459.0216665,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799956.pt"
]
},
{
"steps": 1999980,
"file_path": "results/Huggy2/Huggy/Huggy-1999980.onnx",
"reward": 3.643176257610321,
"creation_time": 1719391718.7146478,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999980.pt"
]
},
{
"steps": 2000062,
"file_path": "results/Huggy2/Huggy/Huggy-2000062.onnx",
"reward": 3.740333342552185,
"creation_time": 1719391718.9049618,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000062.pt"
]
}
],
"final_checkpoint": {
"steps": 2000062,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.740333342552185,
"creation_time": 1719391718.9049618,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000062.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}