ppo-Huggy / run_logs /training_status.json
rgny's picture
Huggy
412679b
{
"Huggy": {
"checkpoints": [
{
"steps": 199903,
"file_path": "results/Huggy/Huggy/Huggy-199903.onnx",
"reward": 3.6174305109750655,
"creation_time": 1702883969.2473252,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199903.pt"
]
},
{
"steps": 399874,
"file_path": "results/Huggy/Huggy/Huggy-399874.onnx",
"reward": 3.957682558174791,
"creation_time": 1702884216.521817,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399874.pt"
]
},
{
"steps": 599992,
"file_path": "results/Huggy/Huggy/Huggy-599992.onnx",
"reward": 4.322481362258687,
"creation_time": 1702884472.4252894,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599992.pt"
]
},
{
"steps": 799932,
"file_path": "results/Huggy/Huggy/Huggy-799932.onnx",
"reward": 3.837205373998826,
"creation_time": 1702884720.030896,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799932.pt"
]
},
{
"steps": 999967,
"file_path": "results/Huggy/Huggy/Huggy-999967.onnx",
"reward": 3.854091562697145,
"creation_time": 1702884976.9255478,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999967.pt"
]
},
{
"steps": 1199944,
"file_path": "results/Huggy/Huggy/Huggy-1199944.onnx",
"reward": 3.6203983006653964,
"creation_time": 1702885232.7576659,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199944.pt"
]
},
{
"steps": 1399828,
"file_path": "results/Huggy/Huggy/Huggy-1399828.onnx",
"reward": 3.88075967937568,
"creation_time": 1702885489.3349454,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399828.pt"
]
},
{
"steps": 1599920,
"file_path": "results/Huggy/Huggy/Huggy-1599920.onnx",
"reward": 3.680290058143157,
"creation_time": 1702885753.4832149,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599920.pt"
]
},
{
"steps": 1799916,
"file_path": "results/Huggy/Huggy/Huggy-1799916.onnx",
"reward": 3.527952533089713,
"creation_time": 1702886010.9436252,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799916.pt"
]
},
{
"steps": 1999978,
"file_path": "results/Huggy/Huggy/Huggy-1999978.onnx",
"reward": 4.35149982770284,
"creation_time": 1702886266.8997421,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999978.pt"
]
},
{
"steps": 2000093,
"file_path": "results/Huggy/Huggy/Huggy-2000093.onnx",
"reward": 4.486942812800407,
"creation_time": 1702886267.0194616,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000093.pt"
]
}
],
"final_checkpoint": {
"steps": 2000093,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.486942812800407,
"creation_time": 1702886267.0194616,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000093.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
}
}