ppo-Huggy / run_logs /training_status.json
pumpitup521's picture
Push Huggy to the Hub
3d35b9e
{
"Huggy": {
"checkpoints": [
{
"steps": 199910,
"file_path": "results/Huggy/Huggy/Huggy-199910.onnx",
"reward": 3.2676649180011474,
"creation_time": 1685497398.395678,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199910.pt"
]
},
{
"steps": 399948,
"file_path": "results/Huggy/Huggy/Huggy-399948.onnx",
"reward": 3.724433808259561,
"creation_time": 1685497827.799121,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399948.pt"
]
},
{
"steps": 599708,
"file_path": "results/Huggy/Huggy/Huggy-599708.onnx",
"reward": 3.7902503858009973,
"creation_time": 1685498264.8189783,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599708.pt"
]
},
{
"steps": 799953,
"file_path": "results/Huggy/Huggy/Huggy-799953.onnx",
"reward": 3.643055557222157,
"creation_time": 1685498702.3937628,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799953.pt"
]
},
{
"steps": 999953,
"file_path": "results/Huggy/Huggy/Huggy-999953.onnx",
"reward": 3.8069274106828295,
"creation_time": 1685499168.419007,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999953.pt"
]
},
{
"steps": 1199961,
"file_path": "results/Huggy/Huggy/Huggy-1199961.onnx",
"reward": 3.73731866197766,
"creation_time": 1685499642.4957783,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199961.pt"
]
},
{
"steps": 1399948,
"file_path": "results/Huggy/Huggy/Huggy-1399948.onnx",
"reward": 3.6599918583692133,
"creation_time": 1685500099.6460783,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399948.pt"
]
},
{
"steps": 1599818,
"file_path": "results/Huggy/Huggy/Huggy-1599818.onnx",
"reward": 4.0516011569810955,
"creation_time": 1685500566.68711,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599818.pt"
]
},
{
"steps": 1799299,
"file_path": "results/Huggy/Huggy/Huggy-1799299.onnx",
"reward": 3.7588414464678084,
"creation_time": 1685501026.7484486,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799299.pt"
]
},
{
"steps": 1999895,
"file_path": "results/Huggy/Huggy/Huggy-1999895.onnx",
"reward": 4.618322720130284,
"creation_time": 1685501487.738118,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999895.pt"
]
},
{
"steps": 2000015,
"file_path": "results/Huggy/Huggy/Huggy-2000015.onnx",
"reward": 4.874019320194538,
"creation_time": 1685501487.8951497,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000015.pt"
]
}
],
"final_checkpoint": {
"steps": 2000015,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.874019320194538,
"creation_time": 1685501487.8951497,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000015.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}