ppo-Huggy / run_logs /training_status.json
HuggingDLRL's picture
My first Huggy
4749d93 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199817,
"file_path": "results/Huggy2/Huggy/Huggy-199817.onnx",
"reward": 3.2990418589777417,
"creation_time": 1741571519.1108122,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199817.pt"
]
},
{
"steps": 399934,
"file_path": "results/Huggy2/Huggy/Huggy-399934.onnx",
"reward": 3.787602406829151,
"creation_time": 1741571748.5157278,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399934.pt"
]
},
{
"steps": 599944,
"file_path": "results/Huggy2/Huggy/Huggy-599944.onnx",
"reward": 3.8667411327362062,
"creation_time": 1741571979.6590152,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599944.pt"
]
},
{
"steps": 799841,
"file_path": "results/Huggy2/Huggy/Huggy-799841.onnx",
"reward": 4.000321441611578,
"creation_time": 1741572213.0315852,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799841.pt"
]
},
{
"steps": 999966,
"file_path": "results/Huggy2/Huggy/Huggy-999966.onnx",
"reward": 3.72682295535125,
"creation_time": 1741572452.7215323,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999966.pt"
]
},
{
"steps": 1199973,
"file_path": "results/Huggy2/Huggy/Huggy-1199973.onnx",
"reward": 3.9725667093739365,
"creation_time": 1741572687.7524283,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199973.pt"
]
},
{
"steps": 1399994,
"file_path": "results/Huggy2/Huggy/Huggy-1399994.onnx",
"reward": 3.8202268884827695,
"creation_time": 1741572920.3529062,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399994.pt"
]
},
{
"steps": 1599747,
"file_path": "results/Huggy2/Huggy/Huggy-1599747.onnx",
"reward": 3.8954239212202304,
"creation_time": 1741573160.1738946,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599747.pt"
]
},
{
"steps": 1799994,
"file_path": "results/Huggy2/Huggy/Huggy-1799994.onnx",
"reward": 3.713467988229933,
"creation_time": 1741573397.963846,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799994.pt"
]
},
{
"steps": 1999999,
"file_path": "results/Huggy2/Huggy/Huggy-1999999.onnx",
"reward": 4.575440777672662,
"creation_time": 1741573636.2226293,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999999.pt"
]
},
{
"steps": 2000115,
"file_path": "results/Huggy2/Huggy/Huggy-2000115.onnx",
"reward": 4.640576696395874,
"creation_time": 1741573636.3420064,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000115.pt"
]
}
],
"final_checkpoint": {
"steps": 2000115,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.640576696395874,
"creation_time": 1741573636.3420064,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000115.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}