ppo-Huggy / run_logs /training_status.json
UzzyDizzy's picture
Huggy
b299deb verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199784,
"file_path": "results/Huggy2/Huggy/Huggy-199784.onnx",
"reward": 3.6435323470625383,
"creation_time": 1746634873.1536891,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199784.pt"
]
},
{
"steps": 399926,
"file_path": "results/Huggy2/Huggy/Huggy-399926.onnx",
"reward": 4.061027772593916,
"creation_time": 1746635119.5486748,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399926.pt"
]
},
{
"steps": 599938,
"file_path": "results/Huggy2/Huggy/Huggy-599938.onnx",
"reward": 4.368405183156331,
"creation_time": 1746635363.9008,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599938.pt"
]
},
{
"steps": 799928,
"file_path": "results/Huggy2/Huggy/Huggy-799928.onnx",
"reward": 3.780398346023795,
"creation_time": 1746635608.883399,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799928.pt"
]
},
{
"steps": 999969,
"file_path": "results/Huggy2/Huggy/Huggy-999969.onnx",
"reward": 3.68961349881698,
"creation_time": 1746635858.4374533,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999969.pt"
]
},
{
"steps": 1199950,
"file_path": "results/Huggy2/Huggy/Huggy-1199950.onnx",
"reward": 3.1563179859748254,
"creation_time": 1746636103.2377048,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199950.pt"
]
},
{
"steps": 1399924,
"file_path": "results/Huggy2/Huggy/Huggy-1399924.onnx",
"reward": 3.603928401975921,
"creation_time": 1746636353.8577995,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399924.pt"
]
},
{
"steps": 1599916,
"file_path": "results/Huggy2/Huggy/Huggy-1599916.onnx",
"reward": 3.547220748026394,
"creation_time": 1746636604.0194721,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599916.pt"
]
},
{
"steps": 1799934,
"file_path": "results/Huggy2/Huggy/Huggy-1799934.onnx",
"reward": 3.6047013824636287,
"creation_time": 1746636851.784012,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799934.pt"
]
},
{
"steps": 1999330,
"file_path": "results/Huggy2/Huggy/Huggy-1999330.onnx",
"reward": 3.46688095793698,
"creation_time": 1746637095.6971333,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999330.pt"
]
},
{
"steps": 2000080,
"file_path": "results/Huggy2/Huggy/Huggy-2000080.onnx",
"reward": 3.4297293573617935,
"creation_time": 1746637095.8484302,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000080.pt"
]
}
],
"final_checkpoint": {
"steps": 2000080,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.4297293573617935,
"creation_time": 1746637095.8484302,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000080.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.0+cu126"
}
}