ppo-Huggy / run_logs /training_status.json
aristidescc's picture
Training results from Huggy
145eb90 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199548,
"file_path": "results/Huggy2/Huggy/Huggy-199548.onnx",
"reward": 3.1826938097937063,
"creation_time": 1741868201.9525352,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199548.pt"
]
},
{
"steps": 399932,
"file_path": "results/Huggy2/Huggy/Huggy-399932.onnx",
"reward": 3.353353696768401,
"creation_time": 1741868342.3641374,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399932.pt"
]
},
{
"steps": 599906,
"file_path": "results/Huggy2/Huggy/Huggy-599906.onnx",
"reward": 2.9317873318990073,
"creation_time": 1741868485.9544742,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599906.pt"
]
},
{
"steps": 799959,
"file_path": "results/Huggy2/Huggy/Huggy-799959.onnx",
"reward": 3.9597037204729966,
"creation_time": 1741868629.3178804,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799959.pt"
]
},
{
"steps": 999997,
"file_path": "results/Huggy2/Huggy/Huggy-999997.onnx",
"reward": 3.764352852957589,
"creation_time": 1741868776.3362806,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999997.pt"
]
},
{
"steps": 1199958,
"file_path": "results/Huggy2/Huggy/Huggy-1199958.onnx",
"reward": 4.318657267384413,
"creation_time": 1741868923.5323472,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199958.pt"
]
},
{
"steps": 1399938,
"file_path": "results/Huggy2/Huggy/Huggy-1399938.onnx",
"reward": 3.7926481915758803,
"creation_time": 1741869068.3807623,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399938.pt"
]
},
{
"steps": 1599775,
"file_path": "results/Huggy2/Huggy/Huggy-1599775.onnx",
"reward": 3.8855330933289354,
"creation_time": 1741869216.9373164,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599775.pt"
]
},
{
"steps": 1799958,
"file_path": "results/Huggy2/Huggy/Huggy-1799958.onnx",
"reward": 3.7864504701051955,
"creation_time": 1741869365.6062033,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799958.pt"
]
},
{
"steps": 1999943,
"file_path": "results/Huggy2/Huggy/Huggy-1999943.onnx",
"reward": 3.883663914420388,
"creation_time": 1741869514.717848,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999943.pt"
]
},
{
"steps": 2000048,
"file_path": "results/Huggy2/Huggy/Huggy-2000048.onnx",
"reward": 3.846529165903727,
"creation_time": 1741869514.8162832,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000048.pt"
]
}
],
"final_checkpoint": {
"steps": 2000048,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.846529165903727,
"creation_time": 1741869514.8162832,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000048.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}