ppo-Huggy / run_logs /training_status.json
NicholasGri's picture
Huggy first
3d54361
{
"Huggy": {
"checkpoints": [
{
"steps": 199979,
"file_path": "results/Huggy/Huggy/Huggy-199979.onnx",
"reward": 3.717273044766802,
"creation_time": 1698741501.5245621,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199979.pt"
]
},
{
"steps": 399945,
"file_path": "results/Huggy/Huggy/Huggy-399945.onnx",
"reward": 3.610892675817013,
"creation_time": 1698741741.2307177,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399945.pt"
]
},
{
"steps": 599871,
"file_path": "results/Huggy/Huggy/Huggy-599871.onnx",
"reward": 3.3671116462120643,
"creation_time": 1698741983.792542,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599871.pt"
]
},
{
"steps": 799960,
"file_path": "results/Huggy/Huggy/Huggy-799960.onnx",
"reward": 3.6943607035889685,
"creation_time": 1698742222.832424,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799960.pt"
]
},
{
"steps": 999995,
"file_path": "results/Huggy/Huggy/Huggy-999995.onnx",
"reward": 3.6842676837419726,
"creation_time": 1698742461.2298222,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999995.pt"
]
},
{
"steps": 1199913,
"file_path": "results/Huggy/Huggy/Huggy-1199913.onnx",
"reward": 3.3022128450564847,
"creation_time": 1698742696.1808004,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199913.pt"
]
},
{
"steps": 1399903,
"file_path": "results/Huggy/Huggy/Huggy-1399903.onnx",
"reward": 3.4630135865772473,
"creation_time": 1698742923.6456678,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399903.pt"
]
},
{
"steps": 1599910,
"file_path": "results/Huggy/Huggy/Huggy-1599910.onnx",
"reward": 3.576322006602441,
"creation_time": 1698743156.3996642,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599910.pt"
]
},
{
"steps": 1799905,
"file_path": "results/Huggy/Huggy/Huggy-1799905.onnx",
"reward": 3.923102285311772,
"creation_time": 1698743390.6487355,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799905.pt"
]
},
{
"steps": 1999940,
"file_path": "results/Huggy/Huggy/Huggy-1999940.onnx",
"reward": 1.8854584097862244,
"creation_time": 1698743623.770402,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999940.pt"
]
},
{
"steps": 2000019,
"file_path": "results/Huggy/Huggy/Huggy-2000019.onnx",
"reward": 3.092347423235575,
"creation_time": 1698743623.8753734,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000019.pt"
]
}
],
"final_checkpoint": {
"steps": 2000019,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.092347423235575,
"creation_time": 1698743623.8753734,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000019.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}