ppo-Huggy / run_logs /training_status.json
gan11's picture
Push Huggy to the Hub
890e539
{
"Huggy": {
"checkpoints": [
{
"steps": 199912,
"file_path": "results/Huggy/Huggy/Huggy-199912.onnx",
"reward": 3.2716751984187535,
"creation_time": 1683550714.799502,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199912.pt"
]
},
{
"steps": 399963,
"file_path": "results/Huggy/Huggy/Huggy-399963.onnx",
"reward": 3.471695656959827,
"creation_time": 1683550959.937126,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399963.pt"
]
},
{
"steps": 599967,
"file_path": "results/Huggy/Huggy/Huggy-599967.onnx",
"reward": 3.0822011470794677,
"creation_time": 1683551214.18858,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599967.pt"
]
},
{
"steps": 799935,
"file_path": "results/Huggy/Huggy/Huggy-799935.onnx",
"reward": 3.9259892258702256,
"creation_time": 1683551463.8945687,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799935.pt"
]
},
{
"steps": 999978,
"file_path": "results/Huggy/Huggy/Huggy-999978.onnx",
"reward": 3.7236185098949233,
"creation_time": 1683551715.5205722,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999978.pt"
]
},
{
"steps": 1199978,
"file_path": "results/Huggy/Huggy/Huggy-1199978.onnx",
"reward": 3.6853801012039185,
"creation_time": 1683551965.3535445,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199978.pt"
]
},
{
"steps": 1399990,
"file_path": "results/Huggy/Huggy/Huggy-1399990.onnx",
"reward": 3.5479433273485568,
"creation_time": 1683552212.130923,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399990.pt"
]
},
{
"steps": 1599992,
"file_path": "results/Huggy/Huggy/Huggy-1599992.onnx",
"reward": 3.8388001170471635,
"creation_time": 1683552460.415747,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599992.pt"
]
},
{
"steps": 1799988,
"file_path": "results/Huggy/Huggy/Huggy-1799988.onnx",
"reward": 3.266467278980347,
"creation_time": 1683552708.006286,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799988.pt"
]
},
{
"steps": 1999510,
"file_path": "results/Huggy/Huggy/Huggy-1999510.onnx",
"reward": 3.820780538309079,
"creation_time": 1683552952.3927305,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999510.pt"
]
},
{
"steps": 2000260,
"file_path": "results/Huggy/Huggy/Huggy-2000260.onnx",
"reward": 3.788431058945268,
"creation_time": 1683552952.623626,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000260.pt"
]
}
],
"final_checkpoint": {
"steps": 2000260,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.788431058945268,
"creation_time": 1683552952.623626,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000260.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}