ppo-Huggy / run_logs /training_status.json
daspartho's picture
Huggy
15e90e5
{
"Huggy": {
"checkpoints": [
{
"steps": 199976,
"file_path": "results/Huggy/Huggy/Huggy-199976.onnx",
"reward": 3.1844317392564156,
"creation_time": 1671130566.1990402,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199976.pt"
]
},
{
"steps": 399919,
"file_path": "results/Huggy/Huggy/Huggy-399919.onnx",
"reward": 4.124792956915058,
"creation_time": 1671130783.1572888,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399919.pt"
]
},
{
"steps": 599960,
"file_path": "results/Huggy/Huggy/Huggy-599960.onnx",
"reward": 3.821101505180885,
"creation_time": 1671131003.2031808,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599960.pt"
]
},
{
"steps": 799964,
"file_path": "results/Huggy/Huggy/Huggy-799964.onnx",
"reward": 3.8173812147898554,
"creation_time": 1671131220.4055114,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799964.pt"
]
},
{
"steps": 999991,
"file_path": "results/Huggy/Huggy/Huggy-999991.onnx",
"reward": 3.8201296779417224,
"creation_time": 1671131439.6347263,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999991.pt"
]
},
{
"steps": 1199968,
"file_path": "results/Huggy/Huggy/Huggy-1199968.onnx",
"reward": 3.833928134067949,
"creation_time": 1671131661.0547493,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199968.pt"
]
},
{
"steps": 1399922,
"file_path": "results/Huggy/Huggy/Huggy-1399922.onnx",
"reward": 3.0553558349609373,
"creation_time": 1671131881.893191,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399922.pt"
]
},
{
"steps": 1599922,
"file_path": "results/Huggy/Huggy/Huggy-1599922.onnx",
"reward": 3.9675717463540794,
"creation_time": 1671132100.1904178,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599922.pt"
]
},
{
"steps": 1799974,
"file_path": "results/Huggy/Huggy/Huggy-1799974.onnx",
"reward": 4.130667641514638,
"creation_time": 1671132320.1892743,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799974.pt"
]
},
{
"steps": 1999917,
"file_path": "results/Huggy/Huggy/Huggy-1999917.onnx",
"reward": 3.875538887346492,
"creation_time": 1671132543.9520254,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999917.pt"
]
},
{
"steps": 2000042,
"file_path": "results/Huggy/Huggy/Huggy-2000042.onnx",
"reward": 3.8922635282295337,
"creation_time": 1671132544.067533,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000042.pt"
]
}
],
"final_checkpoint": {
"steps": 2000042,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.8922635282295337,
"creation_time": 1671132544.067533,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000042.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}