ppo-Huggy / run_logs /training_status.json
acrenn's picture
Huggy
11b0839
{
"Huggy": {
"checkpoints": [
{
"steps": 199900,
"file_path": "results/Huggy/Huggy/Huggy-199900.onnx",
"reward": 3.2451953206743513,
"creation_time": 1699538879.6935356,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199900.pt"
]
},
{
"steps": 399975,
"file_path": "results/Huggy/Huggy/Huggy-399975.onnx",
"reward": 3.5982605028152466,
"creation_time": 1699539105.1839025,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399975.pt"
]
},
{
"steps": 599881,
"file_path": "results/Huggy/Huggy/Huggy-599881.onnx",
"reward": 4.041099429130554,
"creation_time": 1699539331.0174558,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599881.pt"
]
},
{
"steps": 799923,
"file_path": "results/Huggy/Huggy/Huggy-799923.onnx",
"reward": 3.8778386551375483,
"creation_time": 1699539558.73437,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799923.pt"
]
},
{
"steps": 999975,
"file_path": "results/Huggy/Huggy/Huggy-999975.onnx",
"reward": 4.062879339341195,
"creation_time": 1699539787.4094188,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999975.pt"
]
},
{
"steps": 1199891,
"file_path": "results/Huggy/Huggy/Huggy-1199891.onnx",
"reward": 3.6577411389005356,
"creation_time": 1699540016.6502535,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199891.pt"
]
},
{
"steps": 1399950,
"file_path": "results/Huggy/Huggy/Huggy-1399950.onnx",
"reward": 3.3204028367996217,
"creation_time": 1699540242.8943996,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399950.pt"
]
},
{
"steps": 1599749,
"file_path": "results/Huggy/Huggy/Huggy-1599749.onnx",
"reward": 3.8973520923686285,
"creation_time": 1699540465.4408796,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599749.pt"
]
},
{
"steps": 1799915,
"file_path": "results/Huggy/Huggy/Huggy-1799915.onnx",
"reward": 3.912986560459555,
"creation_time": 1699540692.735106,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799915.pt"
]
},
{
"steps": 1999933,
"file_path": "results/Huggy/Huggy/Huggy-1999933.onnx",
"reward": 4.199988320469856,
"creation_time": 1699540921.7558813,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999933.pt"
]
},
{
"steps": 2000003,
"file_path": "results/Huggy/Huggy/Huggy-2000003.onnx",
"reward": 4.201001550021925,
"creation_time": 1699540921.853931,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000003.pt"
]
}
],
"final_checkpoint": {
"steps": 2000003,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.201001550021925,
"creation_time": 1699540921.853931,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000003.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}