ppo-Huggy / run_logs /training_status.json
Monadillo's picture
Huggy
1c6a413 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199851,
"file_path": "results/Huggy2/Huggy/Huggy-199851.onnx",
"reward": 3.3133340748873623,
"creation_time": 1743332483.8879097,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199851.pt"
]
},
{
"steps": 399969,
"file_path": "results/Huggy2/Huggy/Huggy-399969.onnx",
"reward": 3.553787069595777,
"creation_time": 1743332719.9323237,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399969.pt"
]
},
{
"steps": 599950,
"file_path": "results/Huggy2/Huggy/Huggy-599950.onnx",
"reward": 3.5225955694913864,
"creation_time": 1743332963.0038474,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599950.pt"
]
},
{
"steps": 799978,
"file_path": "results/Huggy2/Huggy/Huggy-799978.onnx",
"reward": 3.417569469743305,
"creation_time": 1743333199.838102,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799978.pt"
]
},
{
"steps": 999472,
"file_path": "results/Huggy2/Huggy/Huggy-999472.onnx",
"reward": 3.775792263832289,
"creation_time": 1743333441.5363863,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999472.pt"
]
},
{
"steps": 1199910,
"file_path": "results/Huggy2/Huggy/Huggy-1199910.onnx",
"reward": 3.884026861190796,
"creation_time": 1743333686.5596733,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199910.pt"
]
},
{
"steps": 1399946,
"file_path": "results/Huggy2/Huggy/Huggy-1399946.onnx",
"reward": 3.7661133555687623,
"creation_time": 1743333928.5492992,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399946.pt"
]
},
{
"steps": 1599958,
"file_path": "results/Huggy2/Huggy/Huggy-1599958.onnx",
"reward": 3.710076411028166,
"creation_time": 1743334174.3947096,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599958.pt"
]
},
{
"steps": 1799988,
"file_path": "results/Huggy2/Huggy/Huggy-1799988.onnx",
"reward": 3.7270342746627665,
"creation_time": 1743334420.063246,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799988.pt"
]
},
{
"steps": 1999944,
"file_path": "results/Huggy2/Huggy/Huggy-1999944.onnx",
"reward": 4.484341039802089,
"creation_time": 1743334663.4146013,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999944.pt"
]
},
{
"steps": 2000060,
"file_path": "results/Huggy2/Huggy/Huggy-2000060.onnx",
"reward": 4.520500432042515,
"creation_time": 1743334663.5363622,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000060.pt"
]
}
],
"final_checkpoint": {
"steps": 2000060,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.520500432042515,
"creation_time": 1743334663.5363622,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000060.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}