ppo-Huggy / run_logs /training_status.json
AdanLee's picture
Huggy
e9a25bd
{
"Huggy": {
"checkpoints": [
{
"steps": 199948,
"file_path": "results/Huggy/Huggy/Huggy-199948.onnx",
"reward": 3.1734457314014435,
"creation_time": 1689387742.8579233,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199948.pt"
]
},
{
"steps": 399947,
"file_path": "results/Huggy/Huggy/Huggy-399947.onnx",
"reward": 3.411338895559311,
"creation_time": 1689387985.2961528,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399947.pt"
]
},
{
"steps": 599925,
"file_path": "results/Huggy/Huggy/Huggy-599925.onnx",
"reward": 3.9213137443249044,
"creation_time": 1689388232.7346053,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599925.pt"
]
},
{
"steps": 799984,
"file_path": "results/Huggy/Huggy/Huggy-799984.onnx",
"reward": 3.7712435957363675,
"creation_time": 1689388481.3018954,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799984.pt"
]
},
{
"steps": 999972,
"file_path": "results/Huggy/Huggy/Huggy-999972.onnx",
"reward": 3.8549390554428102,
"creation_time": 1689388731.6558034,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999972.pt"
]
},
{
"steps": 1199997,
"file_path": "results/Huggy/Huggy/Huggy-1199997.onnx",
"reward": 3.7334249542470563,
"creation_time": 1689388981.6127832,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199997.pt"
]
},
{
"steps": 1399973,
"file_path": "results/Huggy/Huggy/Huggy-1399973.onnx",
"reward": 3.8802470252503474,
"creation_time": 1689389227.9858913,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399973.pt"
]
},
{
"steps": 1599930,
"file_path": "results/Huggy/Huggy/Huggy-1599930.onnx",
"reward": 3.6776683857807746,
"creation_time": 1689389480.35077,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599930.pt"
]
},
{
"steps": 1799751,
"file_path": "results/Huggy/Huggy/Huggy-1799751.onnx",
"reward": 3.6129267944777306,
"creation_time": 1689389728.582474,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799751.pt"
]
},
{
"steps": 1999979,
"file_path": "results/Huggy/Huggy/Huggy-1999979.onnx",
"reward": 3.518454725925739,
"creation_time": 1689389975.683237,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999979.pt"
]
},
{
"steps": 2000048,
"file_path": "results/Huggy/Huggy/Huggy-2000048.onnx",
"reward": 3.532416060566902,
"creation_time": 1689389975.8176835,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000048.pt"
]
}
],
"final_checkpoint": {
"steps": 2000048,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.532416060566902,
"creation_time": 1689389975.8176835,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000048.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}