ppo-Huggy / run_logs /training_status.json
vnykr's picture
Huggy
3302cff
{
"Huggy": {
"checkpoints": [
{
"steps": 199964,
"file_path": "results/Huggy/Huggy/Huggy-199964.onnx",
"reward": 3.426654611184047,
"creation_time": 1685079349.8119488,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199964.pt"
]
},
{
"steps": 399973,
"file_path": "results/Huggy/Huggy/Huggy-399973.onnx",
"reward": 3.8038307864901046,
"creation_time": 1685079583.6454756,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399973.pt"
]
},
{
"steps": 599348,
"file_path": "results/Huggy/Huggy/Huggy-599348.onnx",
"reward": 4.680380609300402,
"creation_time": 1685079818.9820092,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599348.pt"
]
},
{
"steps": 799948,
"file_path": "results/Huggy/Huggy/Huggy-799948.onnx",
"reward": 3.89914449129967,
"creation_time": 1685080054.4299304,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799948.pt"
]
},
{
"steps": 999915,
"file_path": "results/Huggy/Huggy/Huggy-999915.onnx",
"reward": 3.9158123957388327,
"creation_time": 1685080295.8533735,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999915.pt"
]
},
{
"steps": 1199910,
"file_path": "results/Huggy/Huggy/Huggy-1199910.onnx",
"reward": 4.480115046395975,
"creation_time": 1685080538.3217578,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199910.pt"
]
},
{
"steps": 1399999,
"file_path": "results/Huggy/Huggy/Huggy-1399999.onnx",
"reward": 5.221705453736441,
"creation_time": 1685080780.937763,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399999.pt"
]
},
{
"steps": 1599995,
"file_path": "results/Huggy/Huggy/Huggy-1599995.onnx",
"reward": 3.8227402429649797,
"creation_time": 1685081017.2422907,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599995.pt"
]
},
{
"steps": 1799995,
"file_path": "results/Huggy/Huggy/Huggy-1799995.onnx",
"reward": 3.9821163335240874,
"creation_time": 1685081258.6645887,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799995.pt"
]
},
{
"steps": 1999634,
"file_path": "results/Huggy/Huggy/Huggy-1999634.onnx",
"reward": 4.080747081076398,
"creation_time": 1685081499.4208684,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999634.pt"
]
},
{
"steps": 2000384,
"file_path": "results/Huggy/Huggy/Huggy-2000384.onnx",
"reward": 3.967282802298449,
"creation_time": 1685081499.6168416,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000384.pt"
]
}
],
"final_checkpoint": {
"steps": 2000384,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.967282802298449,
"creation_time": 1685081499.6168416,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000384.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}