ppo-Huggy / run_logs /training_status.json
ARandomFrenchDev's picture
Huggy
c096aba
{
"Huggy": {
"checkpoints": [
{
"steps": 199943,
"file_path": "results/Huggy/Huggy/Huggy-199943.onnx",
"reward": 3.257116871220725,
"creation_time": 1673736726.5999303,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199943.pt"
]
},
{
"steps": 399956,
"file_path": "results/Huggy/Huggy/Huggy-399956.onnx",
"reward": 3.761014635280027,
"creation_time": 1673736970.5598707,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399956.pt"
]
},
{
"steps": 599918,
"file_path": "results/Huggy/Huggy/Huggy-599918.onnx",
"reward": 3.7753614112734795,
"creation_time": 1673737218.5512218,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599918.pt"
]
},
{
"steps": 799885,
"file_path": "results/Huggy/Huggy/Huggy-799885.onnx",
"reward": 3.831595074213468,
"creation_time": 1673737464.4043872,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799885.pt"
]
},
{
"steps": 999925,
"file_path": "results/Huggy/Huggy/Huggy-999925.onnx",
"reward": 3.9383834545848933,
"creation_time": 1673737714.9010298,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999925.pt"
]
},
{
"steps": 1199960,
"file_path": "results/Huggy/Huggy/Huggy-1199960.onnx",
"reward": 3.70155416018721,
"creation_time": 1673737963.2653654,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199960.pt"
]
},
{
"steps": 1399985,
"file_path": "results/Huggy/Huggy/Huggy-1399985.onnx",
"reward": 3.9921726647308606,
"creation_time": 1673738210.65857,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399985.pt"
]
},
{
"steps": 1599975,
"file_path": "results/Huggy/Huggy/Huggy-1599975.onnx",
"reward": 3.7029664789498185,
"creation_time": 1673738461.2183626,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599975.pt"
]
},
{
"steps": 1799546,
"file_path": "results/Huggy/Huggy/Huggy-1799546.onnx",
"reward": 3.8519391489920216,
"creation_time": 1673738712.0026655,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799546.pt"
]
},
{
"steps": 1999979,
"file_path": "results/Huggy/Huggy/Huggy-1999979.onnx",
"reward": 3.455308129390081,
"creation_time": 1673738959.8633447,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999979.pt"
]
},
{
"steps": 2000050,
"file_path": "results/Huggy/Huggy/Huggy-2000050.onnx",
"reward": 3.455520388242361,
"creation_time": 1673738959.993769,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000050.pt"
]
}
],
"final_checkpoint": {
"steps": 2000050,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.455520388242361,
"creation_time": 1673738959.993769,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000050.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}