ppo-Huggy / run_logs /training_status.json
quadcoders's picture
Huggy
6c220bc verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199964,
"file_path": "results/Huggy2/Huggy/Huggy-199964.onnx",
"reward": 3.032767789704459,
"creation_time": 1750347416.3456514,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199964.pt"
]
},
{
"steps": 399904,
"file_path": "results/Huggy2/Huggy/Huggy-399904.onnx",
"reward": 3.532982940971851,
"creation_time": 1750347654.3695858,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399904.pt"
]
},
{
"steps": 599922,
"file_path": "results/Huggy2/Huggy/Huggy-599922.onnx",
"reward": 3.172830641269684,
"creation_time": 1750347904.5018253,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599922.pt"
]
},
{
"steps": 799959,
"file_path": "results/Huggy2/Huggy/Huggy-799959.onnx",
"reward": 3.727103035413582,
"creation_time": 1750348154.8178513,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799959.pt"
]
},
{
"steps": 999979,
"file_path": "results/Huggy2/Huggy/Huggy-999979.onnx",
"reward": 3.8849629350872927,
"creation_time": 1750348400.6416998,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999979.pt"
]
},
{
"steps": 1199954,
"file_path": "results/Huggy2/Huggy/Huggy-1199954.onnx",
"reward": 3.2983216175011227,
"creation_time": 1750348646.6429687,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199954.pt"
]
},
{
"steps": 1399572,
"file_path": "results/Huggy2/Huggy/Huggy-1399572.onnx",
"reward": 3.8612060150550924,
"creation_time": 1750348891.4666991,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399572.pt"
]
},
{
"steps": 1599936,
"file_path": "results/Huggy2/Huggy/Huggy-1599936.onnx",
"reward": 3.804264554800081,
"creation_time": 1750349138.5063937,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599936.pt"
]
},
{
"steps": 1799932,
"file_path": "results/Huggy2/Huggy/Huggy-1799932.onnx",
"reward": 3.7042795243086637,
"creation_time": 1750349392.6623867,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799932.pt"
]
},
{
"steps": 1999992,
"file_path": "results/Huggy2/Huggy/Huggy-1999992.onnx",
"reward": 4.114450494448344,
"creation_time": 1750349640.961941,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999992.pt"
]
},
{
"steps": 2000075,
"file_path": "results/Huggy2/Huggy/Huggy-2000075.onnx",
"reward": 4.163933910429478,
"creation_time": 1750349641.0754673,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000075.pt"
]
}
],
"final_checkpoint": {
"steps": 2000075,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.163933910429478,
"creation_time": 1750349641.0754673,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000075.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.1+cu126"
}
}