ppo-Huggy / run_logs /training_status.json
DivyaMathi's picture
Huggy
6b535db verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199817,
"file_path": "results/Huggy2/Huggy/Huggy-199817.onnx",
"reward": 3.19423674812188,
"creation_time": 1710249439.577181,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199817.pt"
]
},
{
"steps": 399709,
"file_path": "results/Huggy2/Huggy/Huggy-399709.onnx",
"reward": 4.268288218368919,
"creation_time": 1710249698.845837,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399709.pt"
]
},
{
"steps": 599974,
"file_path": "results/Huggy2/Huggy/Huggy-599974.onnx",
"reward": 3.814871216813723,
"creation_time": 1710249965.398285,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599974.pt"
]
},
{
"steps": 799978,
"file_path": "results/Huggy2/Huggy/Huggy-799978.onnx",
"reward": 3.8861017000962454,
"creation_time": 1710250221.2664132,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799978.pt"
]
},
{
"steps": 999920,
"file_path": "results/Huggy2/Huggy/Huggy-999920.onnx",
"reward": 3.7547354932845107,
"creation_time": 1710250479.9388747,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999920.pt"
]
},
{
"steps": 1199999,
"file_path": "results/Huggy2/Huggy/Huggy-1199999.onnx",
"reward": 4.011817926168442,
"creation_time": 1710250740.703719,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199999.pt"
]
},
{
"steps": 1399989,
"file_path": "results/Huggy2/Huggy/Huggy-1399989.onnx",
"reward": 3.946830700218426,
"creation_time": 1710251000.7395666,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399989.pt"
]
},
{
"steps": 1599457,
"file_path": "results/Huggy2/Huggy/Huggy-1599457.onnx",
"reward": 3.8135288125740554,
"creation_time": 1710251264.7689369,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599457.pt"
]
},
{
"steps": 1799395,
"file_path": "results/Huggy2/Huggy/Huggy-1799395.onnx",
"reward": 4.226514048874378,
"creation_time": 1710251526.4810948,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799395.pt"
]
},
{
"steps": 1999983,
"file_path": "results/Huggy2/Huggy/Huggy-1999983.onnx",
"reward": 3.995642066001892,
"creation_time": 1710251791.2947812,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999983.pt"
]
},
{
"steps": 2000041,
"file_path": "results/Huggy2/Huggy/Huggy-2000041.onnx",
"reward": 3.979092068142361,
"creation_time": 1710251791.4351344,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000041.pt"
]
}
],
"final_checkpoint": {
"steps": 2000041,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.979092068142361,
"creation_time": 1710251791.4351344,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000041.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}