ppo-Huggy / run_logs /training_status.json
mgmeskill's picture
Huggy
5791cf8
{
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
},
"Huggy": {
"checkpoints": [
{
"steps": 0,
"file_path": "results/Huggy/Huggy/Huggy-0.onnx",
"reward": null,
"creation_time": 1686252026.0558405,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-0.pt"
]
},
{
"steps": 199843,
"file_path": "results/Huggy/Huggy/Huggy-199843.onnx",
"reward": 3.6255324446238006,
"creation_time": 1686252387.2589586,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199843.pt"
]
},
{
"steps": 399946,
"file_path": "results/Huggy/Huggy/Huggy-399946.onnx",
"reward": 3.4500055549895925,
"creation_time": 1686252654.5045674,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399946.pt"
]
},
{
"steps": 599975,
"file_path": "results/Huggy/Huggy/Huggy-599975.onnx",
"reward": 2.6945110162099204,
"creation_time": 1686252919.0646813,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599975.pt"
]
},
{
"steps": 799931,
"file_path": "results/Huggy/Huggy/Huggy-799931.onnx",
"reward": 4.069296962576486,
"creation_time": 1686253186.142335,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799931.pt"
]
},
{
"steps": 999998,
"file_path": "results/Huggy/Huggy/Huggy-999998.onnx",
"reward": 3.831956578625573,
"creation_time": 1686253455.7867126,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999998.pt"
]
},
{
"steps": 1199933,
"file_path": "results/Huggy/Huggy/Huggy-1199933.onnx",
"reward": 3.661555941357757,
"creation_time": 1686253725.129971,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199933.pt"
]
},
{
"steps": 1399975,
"file_path": "results/Huggy/Huggy/Huggy-1399975.onnx",
"reward": 3.7206270032458835,
"creation_time": 1686253989.4489653,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399975.pt"
]
},
{
"steps": 1599881,
"file_path": "results/Huggy/Huggy/Huggy-1599881.onnx",
"reward": 3.778032935327954,
"creation_time": 1686254259.1058104,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599881.pt"
]
},
{
"steps": 1799981,
"file_path": "results/Huggy/Huggy/Huggy-1799981.onnx",
"reward": 3.6260766496783807,
"creation_time": 1686254530.6086464,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799981.pt"
]
},
{
"steps": 1999913,
"file_path": "results/Huggy/Huggy/Huggy-1999913.onnx",
"reward": 3.5581157088279722,
"creation_time": 1686254792.2824025,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999913.pt"
]
},
{
"steps": 2000663,
"file_path": "results/Huggy/Huggy/Huggy-2000663.onnx",
"reward": 3.228021576291039,
"creation_time": 1686254792.426582,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000663.pt"
]
}
],
"final_checkpoint": {
"steps": 2000663,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.228021576291039,
"creation_time": 1686254792.426582,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000663.pt"
]
}
}
}