ppo-Huggy / run_logs /training_status.json
ag159's picture
Huggy
0b25b50
{
"Huggy": {
"checkpoints": [
{
"steps": 199990,
"file_path": "results/Huggy/Huggy/Huggy-199990.onnx",
"reward": 3.2398770570755007,
"creation_time": 1687879647.0259352,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199990.pt"
]
},
{
"steps": 399808,
"file_path": "results/Huggy/Huggy/Huggy-399808.onnx",
"reward": 3.7379476557607236,
"creation_time": 1687879889.7843857,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399808.pt"
]
},
{
"steps": 599970,
"file_path": "results/Huggy/Huggy/Huggy-599970.onnx",
"reward": 3.7556804548949003,
"creation_time": 1687880132.8062375,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599970.pt"
]
},
{
"steps": 799993,
"file_path": "results/Huggy/Huggy/Huggy-799993.onnx",
"reward": 4.110807997081916,
"creation_time": 1687880380.1166587,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799993.pt"
]
},
{
"steps": 999972,
"file_path": "results/Huggy/Huggy/Huggy-999972.onnx",
"reward": 3.6285449887614654,
"creation_time": 1687880627.1524205,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999972.pt"
]
},
{
"steps": 1199981,
"file_path": "results/Huggy/Huggy/Huggy-1199981.onnx",
"reward": 3.8198154859426543,
"creation_time": 1687880875.771046,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199981.pt"
]
},
{
"steps": 1399924,
"file_path": "results/Huggy/Huggy/Huggy-1399924.onnx",
"reward": 4.020685529708862,
"creation_time": 1687881132.8120205,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399924.pt"
]
},
{
"steps": 1599966,
"file_path": "results/Huggy/Huggy/Huggy-1599966.onnx",
"reward": 3.8707698696288304,
"creation_time": 1687881382.6870747,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599966.pt"
]
},
{
"steps": 1799999,
"file_path": "results/Huggy/Huggy/Huggy-1799999.onnx",
"reward": 3.8898335576057432,
"creation_time": 1687881635.8406553,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799999.pt"
]
},
{
"steps": 1999912,
"file_path": "results/Huggy/Huggy/Huggy-1999912.onnx",
"reward": 3.62631313941058,
"creation_time": 1687881889.1859872,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999912.pt"
]
},
{
"steps": 2000017,
"file_path": "results/Huggy/Huggy/Huggy-2000017.onnx",
"reward": 3.67759626186811,
"creation_time": 1687881889.307285,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000017.pt"
]
}
],
"final_checkpoint": {
"steps": 2000017,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.67759626186811,
"creation_time": 1687881889.307285,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000017.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}