ppo-Huggy / run_logs /training_status.json
eldraco's picture
Huggy model for eldraco
d592e97
{
"Huggy": {
"checkpoints": [
{
"steps": 199862,
"file_path": "results/Huggy/Huggy/Huggy-199862.onnx",
"reward": 3.3265118147096326,
"creation_time": 1675007767.9743953,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199862.pt"
]
},
{
"steps": 399878,
"file_path": "results/Huggy/Huggy/Huggy-399878.onnx",
"reward": 3.453739340488727,
"creation_time": 1675007989.5295348,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399878.pt"
]
},
{
"steps": 599916,
"file_path": "results/Huggy/Huggy/Huggy-599916.onnx",
"reward": 3.944403968359295,
"creation_time": 1675008213.2657406,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599916.pt"
]
},
{
"steps": 799948,
"file_path": "results/Huggy/Huggy/Huggy-799948.onnx",
"reward": 3.9303776239384116,
"creation_time": 1675008437.3224752,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799948.pt"
]
},
{
"steps": 999908,
"file_path": "results/Huggy/Huggy/Huggy-999908.onnx",
"reward": 3.924884378437012,
"creation_time": 1675008662.8947632,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999908.pt"
]
},
{
"steps": 1199936,
"file_path": "results/Huggy/Huggy/Huggy-1199936.onnx",
"reward": 3.5427611686181333,
"creation_time": 1675008890.8096015,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199936.pt"
]
},
{
"steps": 1399991,
"file_path": "results/Huggy/Huggy/Huggy-1399991.onnx",
"reward": 3.834401845932007,
"creation_time": 1675009116.7830787,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399991.pt"
]
},
{
"steps": 1599559,
"file_path": "results/Huggy/Huggy/Huggy-1599559.onnx",
"reward": 3.691717808092794,
"creation_time": 1675009340.7660565,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599559.pt"
]
},
{
"steps": 1799996,
"file_path": "results/Huggy/Huggy/Huggy-1799996.onnx",
"reward": 3.661238310042392,
"creation_time": 1675009565.6786664,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799996.pt"
]
},
{
"steps": 1999983,
"file_path": "results/Huggy/Huggy/Huggy-1999983.onnx",
"reward": 3.7725293739982275,
"creation_time": 1675009787.711068,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999983.pt"
]
},
{
"steps": 2000042,
"file_path": "results/Huggy/Huggy/Huggy-2000042.onnx",
"reward": 3.7016177674134574,
"creation_time": 1675009787.8189952,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000042.pt"
]
}
],
"final_checkpoint": {
"steps": 2000042,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7016177674134574,
"creation_time": 1675009787.8189952,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000042.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}