Huggy / run_logs /training_status.json
nokotin's picture
Huggy model commit
bcffdd0
{
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
},
"Huggy": {
"checkpoints": [
{
"steps": 0,
"file_path": "results/Huggy/Huggy/Huggy-0.onnx",
"reward": null,
"creation_time": 1690799502.516632,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-0.pt"
]
},
{
"steps": 199865,
"file_path": "results/Huggy/Huggy/Huggy-199865.onnx",
"reward": 3.5589173754056294,
"creation_time": 1690799762.6140656,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199865.pt"
]
},
{
"steps": 399802,
"file_path": "results/Huggy/Huggy/Huggy-399802.onnx",
"reward": 3.4880662290379405,
"creation_time": 1690800010.0298672,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399802.pt"
]
},
{
"steps": 599963,
"file_path": "results/Huggy/Huggy/Huggy-599963.onnx",
"reward": 4.216823683065527,
"creation_time": 1690800252.8932443,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599963.pt"
]
},
{
"steps": 799934,
"file_path": "results/Huggy/Huggy/Huggy-799934.onnx",
"reward": 3.738711552487479,
"creation_time": 1690800490.7954288,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799934.pt"
]
},
{
"steps": 999955,
"file_path": "results/Huggy/Huggy/Huggy-999955.onnx",
"reward": 4.102765288673529,
"creation_time": 1690800738.0701203,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999955.pt"
]
},
{
"steps": 1199312,
"file_path": "results/Huggy/Huggy/Huggy-1199312.onnx",
"reward": 3.7051892925481327,
"creation_time": 1690800978.5415463,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199312.pt"
]
},
{
"steps": 1399998,
"file_path": "results/Huggy/Huggy/Huggy-1399998.onnx",
"reward": 3.857414568095228,
"creation_time": 1690801210.6216786,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399998.pt"
]
},
{
"steps": 1599948,
"file_path": "results/Huggy/Huggy/Huggy-1599948.onnx",
"reward": 3.953227109089494,
"creation_time": 1690801446.1829636,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599948.pt"
]
},
{
"steps": 1799961,
"file_path": "results/Huggy/Huggy/Huggy-1799961.onnx",
"reward": 3.9178244495391845,
"creation_time": 1690801682.7267933,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799961.pt"
]
},
{
"steps": 1999923,
"file_path": "results/Huggy/Huggy/Huggy-1999923.onnx",
"reward": 3.90739736745232,
"creation_time": 1690801922.748231,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999923.pt"
]
},
{
"steps": 2000080,
"file_path": "results/Huggy/Huggy/Huggy-2000080.onnx",
"reward": 3.9308210733609323,
"creation_time": 1690801922.9518437,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000080.pt"
]
}
],
"final_checkpoint": {
"steps": 2000080,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.9308210733609323,
"creation_time": 1690801922.9518437,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000080.pt"
]
}
}
}