ppo-Huggy / run_logs /training_status.json
guife33's picture
Huggy
a5ca92d verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199817,
"file_path": "results/Huggy2/Huggy/Huggy-199817.onnx",
"reward": 3.7517750285289906,
"creation_time": 1741393074.7401745,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199817.pt"
]
},
{
"steps": 399939,
"file_path": "results/Huggy2/Huggy/Huggy-399939.onnx",
"reward": 3.7148561477661133,
"creation_time": 1741393314.859963,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399939.pt"
]
},
{
"steps": 599994,
"file_path": "results/Huggy2/Huggy/Huggy-599994.onnx",
"reward": 3.934512116407093,
"creation_time": 1741393555.9554026,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599994.pt"
]
},
{
"steps": 799956,
"file_path": "results/Huggy2/Huggy/Huggy-799956.onnx",
"reward": 3.7264468756467934,
"creation_time": 1741393799.073518,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799956.pt"
]
},
{
"steps": 999970,
"file_path": "results/Huggy2/Huggy/Huggy-999970.onnx",
"reward": 3.631711447966918,
"creation_time": 1741394053.930857,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999970.pt"
]
},
{
"steps": 1199978,
"file_path": "results/Huggy2/Huggy/Huggy-1199978.onnx",
"reward": 4.1851261002676825,
"creation_time": 1741394308.2477446,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199978.pt"
]
},
{
"steps": 1399976,
"file_path": "results/Huggy2/Huggy/Huggy-1399976.onnx",
"reward": 4.023298978805542,
"creation_time": 1741394557.696586,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399976.pt"
]
},
{
"steps": 1599944,
"file_path": "results/Huggy2/Huggy/Huggy-1599944.onnx",
"reward": 3.6402086581413946,
"creation_time": 1741394802.9416733,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599944.pt"
]
},
{
"steps": 1799998,
"file_path": "results/Huggy2/Huggy/Huggy-1799998.onnx",
"reward": 3.5638161412740157,
"creation_time": 1741395050.8512807,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799998.pt"
]
},
{
"steps": 1999952,
"file_path": "results/Huggy2/Huggy/Huggy-1999952.onnx",
"reward": 3.24203388105359,
"creation_time": 1741395313.7978532,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999952.pt"
]
},
{
"steps": 2000009,
"file_path": "results/Huggy2/Huggy/Huggy-2000009.onnx",
"reward": 3.22086206181296,
"creation_time": 1741395313.9112096,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000009.pt"
]
}
],
"final_checkpoint": {
"steps": 2000009,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.22086206181296,
"creation_time": 1741395313.9112096,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000009.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}