Huggy / run_logs /training_status.json
pinaggle's picture
Huggy
7c91b46
{
"Huggy": {
"checkpoints": [
{
"steps": 199826,
"file_path": "results/Huggy/Huggy/Huggy-199826.onnx",
"reward": 3.424987089633942,
"creation_time": 1679709275.8204043,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199826.pt"
]
},
{
"steps": 399989,
"file_path": "results/Huggy/Huggy/Huggy-399989.onnx",
"reward": 3.620717401466062,
"creation_time": 1679709513.2526538,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399989.pt"
]
},
{
"steps": 599927,
"file_path": "results/Huggy/Huggy/Huggy-599927.onnx",
"reward": 3.8435658356722664,
"creation_time": 1679709755.5673134,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599927.pt"
]
},
{
"steps": 799915,
"file_path": "results/Huggy/Huggy/Huggy-799915.onnx",
"reward": 3.905392430294519,
"creation_time": 1679709994.7836645,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799915.pt"
]
},
{
"steps": 999982,
"file_path": "results/Huggy/Huggy/Huggy-999982.onnx",
"reward": 3.6553765963916938,
"creation_time": 1679710238.3411062,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999982.pt"
]
},
{
"steps": 1199995,
"file_path": "results/Huggy/Huggy/Huggy-1199995.onnx",
"reward": 3.6893972939736135,
"creation_time": 1679710484.046169,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199995.pt"
]
},
{
"steps": 1399994,
"file_path": "results/Huggy/Huggy/Huggy-1399994.onnx",
"reward": 5.568816979726155,
"creation_time": 1679710724.8025863,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399994.pt"
]
},
{
"steps": 1599983,
"file_path": "results/Huggy/Huggy/Huggy-1599983.onnx",
"reward": 4.001543043171548,
"creation_time": 1679710964.416071,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599983.pt"
]
},
{
"steps": 1799932,
"file_path": "results/Huggy/Huggy/Huggy-1799932.onnx",
"reward": 4.1141169132330475,
"creation_time": 1679711205.9490452,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799932.pt"
]
},
{
"steps": 1999961,
"file_path": "results/Huggy/Huggy/Huggy-1999961.onnx",
"reward": 3.520652088251981,
"creation_time": 1679711445.8292356,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999961.pt"
]
},
{
"steps": 2000021,
"file_path": "results/Huggy/Huggy/Huggy-2000021.onnx",
"reward": 3.511775044458253,
"creation_time": 1679711445.9484286,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000021.pt"
]
}
],
"final_checkpoint": {
"steps": 2000021,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.511775044458253,
"creation_time": 1679711445.9484286,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000021.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}