ppo-Huggy / run_logs /training_status.json
CheN70's picture
Huggy
394224e verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199921,
"file_path": "results/Huggy2/Huggy/Huggy-199921.onnx",
"reward": 3.2915911352312244,
"creation_time": 1732501495.1106932,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199921.pt"
]
},
{
"steps": 399960,
"file_path": "results/Huggy2/Huggy/Huggy-399960.onnx",
"reward": 3.7131972081959246,
"creation_time": 1732501768.538885,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399960.pt"
]
},
{
"steps": 599856,
"file_path": "results/Huggy2/Huggy/Huggy-599856.onnx",
"reward": 3.9887210987508297,
"creation_time": 1732502043.6386287,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599856.pt"
]
},
{
"steps": 799940,
"file_path": "results/Huggy2/Huggy/Huggy-799940.onnx",
"reward": 3.827136724886268,
"creation_time": 1732502313.6075234,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799940.pt"
]
},
{
"steps": 999957,
"file_path": "results/Huggy2/Huggy/Huggy-999957.onnx",
"reward": 4.122941030875633,
"creation_time": 1732502594.8730736,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999957.pt"
]
},
{
"steps": 1199920,
"file_path": "results/Huggy2/Huggy/Huggy-1199920.onnx",
"reward": 4.324775080147543,
"creation_time": 1732502872.3054748,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199920.pt"
]
},
{
"steps": 1399919,
"file_path": "results/Huggy2/Huggy/Huggy-1399919.onnx",
"reward": 3.5938045263290403,
"creation_time": 1732503143.896332,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399919.pt"
]
},
{
"steps": 1599896,
"file_path": "results/Huggy2/Huggy/Huggy-1599896.onnx",
"reward": 3.7714439015932033,
"creation_time": 1732503423.6124282,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599896.pt"
]
},
{
"steps": 1799983,
"file_path": "results/Huggy2/Huggy/Huggy-1799983.onnx",
"reward": 3.9049496381802666,
"creation_time": 1732503703.5166838,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799983.pt"
]
},
{
"steps": 1999965,
"file_path": "results/Huggy2/Huggy/Huggy-1999965.onnx",
"reward": 3.8605953227906,
"creation_time": 1732503976.1614137,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999965.pt"
]
},
{
"steps": 2000057,
"file_path": "results/Huggy2/Huggy/Huggy-2000057.onnx",
"reward": 3.884088410064578,
"creation_time": 1732503976.2918687,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000057.pt"
]
}
],
"final_checkpoint": {
"steps": 2000057,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.884088410064578,
"creation_time": 1732503976.2918687,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000057.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}