ppo-Huggy / run_logs /training_status.json
SafetyMary's picture
initial commit
f8a7dcc
{
"Huggy": {
"checkpoints": [
{
"steps": 199944,
"file_path": "results/Huggy/Huggy/Huggy-199944.onnx",
"reward": 3.3292161217757634,
"creation_time": 1693806972.5419567,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199944.pt"
]
},
{
"steps": 399862,
"file_path": "results/Huggy/Huggy/Huggy-399862.onnx",
"reward": 3.258472844532558,
"creation_time": 1693807208.2260158,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399862.pt"
]
},
{
"steps": 599953,
"file_path": "results/Huggy/Huggy/Huggy-599953.onnx",
"reward": 4.240046461423238,
"creation_time": 1693807451.3681772,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599953.pt"
]
},
{
"steps": 799958,
"file_path": "results/Huggy/Huggy/Huggy-799958.onnx",
"reward": 2.9439691498614193,
"creation_time": 1693807689.4230928,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799958.pt"
]
},
{
"steps": 999991,
"file_path": "results/Huggy/Huggy/Huggy-999991.onnx",
"reward": 3.3093774914741516,
"creation_time": 1693807932.4634435,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999991.pt"
]
},
{
"steps": 1199990,
"file_path": "results/Huggy/Huggy/Huggy-1199990.onnx",
"reward": 3.2868186994032427,
"creation_time": 1693808180.3155313,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199990.pt"
]
},
{
"steps": 1399957,
"file_path": "results/Huggy/Huggy/Huggy-1399957.onnx",
"reward": 3.6747263215291195,
"creation_time": 1693808427.0825276,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399957.pt"
]
},
{
"steps": 1599995,
"file_path": "results/Huggy/Huggy/Huggy-1599995.onnx",
"reward": 3.559696609826432,
"creation_time": 1693808681.4459012,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599995.pt"
]
},
{
"steps": 1799960,
"file_path": "results/Huggy/Huggy/Huggy-1799960.onnx",
"reward": 3.4373239487409593,
"creation_time": 1693808929.712599,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799960.pt"
]
},
{
"steps": 1999283,
"file_path": "results/Huggy/Huggy/Huggy-1999283.onnx",
"reward": 3.5134500516285687,
"creation_time": 1693809165.5571227,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999283.pt"
]
},
{
"steps": 2000033,
"file_path": "results/Huggy/Huggy/Huggy-2000033.onnx",
"reward": 3.476740527525544,
"creation_time": 1693809165.783854,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000033.pt"
]
}
],
"final_checkpoint": {
"steps": 2000033,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.476740527525544,
"creation_time": 1693809165.783854,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000033.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}