ppo-Huggy / run_logs /training_status.json
SkyR's picture
Huggy
062e376
{
"Huggy": {
"checkpoints": [
{
"steps": 199901,
"file_path": "results/Huggy-by-sky/Huggy/Huggy-199901.onnx",
"reward": 3.6621491210801262,
"creation_time": 1700818751.8675728,
"auxillary_file_paths": [
"results/Huggy-by-sky/Huggy/Huggy-199901.pt"
]
},
{
"steps": 399945,
"file_path": "results/Huggy-by-sky/Huggy/Huggy-399945.onnx",
"reward": 3.709345332911757,
"creation_time": 1700818996.9752333,
"auxillary_file_paths": [
"results/Huggy-by-sky/Huggy/Huggy-399945.pt"
]
},
{
"steps": 599952,
"file_path": "results/Huggy-by-sky/Huggy/Huggy-599952.onnx",
"reward": 3.7031884094079337,
"creation_time": 1700819242.648889,
"auxillary_file_paths": [
"results/Huggy-by-sky/Huggy/Huggy-599952.pt"
]
},
{
"steps": 799951,
"file_path": "results/Huggy-by-sky/Huggy/Huggy-799951.onnx",
"reward": 3.771892686405092,
"creation_time": 1700819485.3821585,
"auxillary_file_paths": [
"results/Huggy-by-sky/Huggy/Huggy-799951.pt"
]
},
{
"steps": 999983,
"file_path": "results/Huggy-by-sky/Huggy/Huggy-999983.onnx",
"reward": 3.5984812896698712,
"creation_time": 1700819735.2169144,
"auxillary_file_paths": [
"results/Huggy-by-sky/Huggy/Huggy-999983.pt"
]
},
{
"steps": 1199935,
"file_path": "results/Huggy-by-sky/Huggy/Huggy-1199935.onnx",
"reward": 3.589407667517662,
"creation_time": 1700819983.1083817,
"auxillary_file_paths": [
"results/Huggy-by-sky/Huggy/Huggy-1199935.pt"
]
},
{
"steps": 1399884,
"file_path": "results/Huggy-by-sky/Huggy/Huggy-1399884.onnx",
"reward": 3.7205348631635826,
"creation_time": 1700820225.7399418,
"auxillary_file_paths": [
"results/Huggy-by-sky/Huggy/Huggy-1399884.pt"
]
},
{
"steps": 1599826,
"file_path": "results/Huggy-by-sky/Huggy/Huggy-1599826.onnx",
"reward": 3.736203762411161,
"creation_time": 1700820471.6573794,
"auxillary_file_paths": [
"results/Huggy-by-sky/Huggy/Huggy-1599826.pt"
]
},
{
"steps": 1799996,
"file_path": "results/Huggy-by-sky/Huggy/Huggy-1799996.onnx",
"reward": 3.2418309756451182,
"creation_time": 1700820719.6175127,
"auxillary_file_paths": [
"results/Huggy-by-sky/Huggy/Huggy-1799996.pt"
]
},
{
"steps": 1999450,
"file_path": "results/Huggy-by-sky/Huggy/Huggy-1999450.onnx",
"reward": 2.831151383263724,
"creation_time": 1700820966.1599321,
"auxillary_file_paths": [
"results/Huggy-by-sky/Huggy/Huggy-1999450.pt"
]
},
{
"steps": 2000200,
"file_path": "results/Huggy-by-sky/Huggy/Huggy-2000200.onnx",
"reward": 2.0301255583763123,
"creation_time": 1700820966.2856276,
"auxillary_file_paths": [
"results/Huggy-by-sky/Huggy/Huggy-2000200.pt"
]
}
],
"final_checkpoint": {
"steps": 2000200,
"file_path": "results/Huggy-by-sky/Huggy.onnx",
"reward": 2.0301255583763123,
"creation_time": 1700820966.2856276,
"auxillary_file_paths": [
"results/Huggy-by-sky/Huggy/Huggy-2000200.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}