ppo-Huggy / run_logs /training_status.json
leonard-pak's picture
Huggy
1236560
{
"Huggy": {
"checkpoints": [
{
"steps": 199905,
"file_path": "results/Huggy/Huggy/Huggy-199905.onnx",
"reward": 3.3092469342549644,
"creation_time": 1692207518.5104814,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199905.pt"
]
},
{
"steps": 399884,
"file_path": "results/Huggy/Huggy/Huggy-399884.onnx",
"reward": 3.4380345126477683,
"creation_time": 1692207758.27776,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399884.pt"
]
},
{
"steps": 599967,
"file_path": "results/Huggy/Huggy/Huggy-599967.onnx",
"reward": 4.384771470365854,
"creation_time": 1692208001.3732157,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599967.pt"
]
},
{
"steps": 799631,
"file_path": "results/Huggy/Huggy/Huggy-799631.onnx",
"reward": 3.98404302180938,
"creation_time": 1692208242.5147896,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799631.pt"
]
},
{
"steps": 999965,
"file_path": "results/Huggy/Huggy/Huggy-999965.onnx",
"reward": 3.650358837662321,
"creation_time": 1692208482.7399585,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999965.pt"
]
},
{
"steps": 1199921,
"file_path": "results/Huggy/Huggy/Huggy-1199921.onnx",
"reward": 3.2955497635735407,
"creation_time": 1692208727.2067075,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199921.pt"
]
},
{
"steps": 1399981,
"file_path": "results/Huggy/Huggy/Huggy-1399981.onnx",
"reward": 3.8136069530790504,
"creation_time": 1692208966.6962907,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399981.pt"
]
},
{
"steps": 1599934,
"file_path": "results/Huggy/Huggy/Huggy-1599934.onnx",
"reward": 3.855297839641571,
"creation_time": 1692209211.977099,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599934.pt"
]
},
{
"steps": 1799913,
"file_path": "results/Huggy/Huggy/Huggy-1799913.onnx",
"reward": 4.011012318208046,
"creation_time": 1692209454.1279912,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799913.pt"
]
},
{
"steps": 1999961,
"file_path": "results/Huggy/Huggy/Huggy-1999961.onnx",
"reward": 3.9588215053081512,
"creation_time": 1692209699.452074,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999961.pt"
]
},
{
"steps": 2000035,
"file_path": "results/Huggy/Huggy/Huggy-2000035.onnx",
"reward": 3.947611131668091,
"creation_time": 1692209699.5754623,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000035.pt"
]
}
],
"final_checkpoint": {
"steps": 2000035,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.947611131668091,
"creation_time": 1692209699.5754623,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000035.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}