Huggy / run_logs /training_status.json
Ricardo54321's picture
Huggy
dbb9015 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199790,
"file_path": "results/Huggy/Huggy/Huggy-199790.onnx",
"reward": 3.067930056290193,
"creation_time": 1705090272.2086349,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199790.pt"
]
},
{
"steps": 399894,
"file_path": "results/Huggy/Huggy/Huggy-399894.onnx",
"reward": 3.7044727364007164,
"creation_time": 1705090507.676923,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399894.pt"
]
},
{
"steps": 599963,
"file_path": "results/Huggy/Huggy/Huggy-599963.onnx",
"reward": 3.7526417845173885,
"creation_time": 1705090745.1518192,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599963.pt"
]
},
{
"steps": 799881,
"file_path": "results/Huggy/Huggy/Huggy-799881.onnx",
"reward": 3.8093206540534372,
"creation_time": 1705090982.0157363,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799881.pt"
]
},
{
"steps": 999958,
"file_path": "results/Huggy/Huggy/Huggy-999958.onnx",
"reward": 4.0134356507888205,
"creation_time": 1705091226.1618216,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999958.pt"
]
},
{
"steps": 1199900,
"file_path": "results/Huggy/Huggy/Huggy-1199900.onnx",
"reward": 3.593204798797766,
"creation_time": 1705091468.6942644,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199900.pt"
]
},
{
"steps": 1399919,
"file_path": "results/Huggy/Huggy/Huggy-1399919.onnx",
"reward": 5.244382262229919,
"creation_time": 1705091710.591582,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399919.pt"
]
},
{
"steps": 1599975,
"file_path": "results/Huggy/Huggy/Huggy-1599975.onnx",
"reward": 3.863116238309049,
"creation_time": 1705091951.215262,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599975.pt"
]
},
{
"steps": 1799999,
"file_path": "results/Huggy/Huggy/Huggy-1799999.onnx",
"reward": 3.902797180130368,
"creation_time": 1705092192.4570508,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799999.pt"
]
},
{
"steps": 1999990,
"file_path": "results/Huggy/Huggy/Huggy-1999990.onnx",
"reward": 3.5941206216812134,
"creation_time": 1705092435.0509217,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999990.pt"
]
},
{
"steps": 2000740,
"file_path": "results/Huggy/Huggy/Huggy-2000740.onnx",
"reward": 3.262958184532497,
"creation_time": 1705092435.2047608,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000740.pt"
]
}
],
"final_checkpoint": {
"steps": 2000740,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.262958184532497,
"creation_time": 1705092435.2047608,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000740.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
}
}