ppo-Huggy / run_logs /training_status.json
andge's picture
Huggy
fc412e6
{
"Huggy": {
"checkpoints": [
{
"steps": 199662,
"file_path": "results/Huggy/Huggy/Huggy-199662.onnx",
"reward": 3.655677793731152,
"creation_time": 1671307440.0414708,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199662.pt"
]
},
{
"steps": 399923,
"file_path": "results/Huggy/Huggy/Huggy-399923.onnx",
"reward": 4.144107504167419,
"creation_time": 1671307656.8523517,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399923.pt"
]
},
{
"steps": 599981,
"file_path": "results/Huggy/Huggy/Huggy-599981.onnx",
"reward": 4.015199021056846,
"creation_time": 1671307879.8749607,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599981.pt"
]
},
{
"steps": 799921,
"file_path": "results/Huggy/Huggy/Huggy-799921.onnx",
"reward": 4.014777593138094,
"creation_time": 1671308098.3851142,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799921.pt"
]
},
{
"steps": 999950,
"file_path": "results/Huggy/Huggy/Huggy-999950.onnx",
"reward": 4.089368429760005,
"creation_time": 1671308321.3361773,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999950.pt"
]
},
{
"steps": 1199886,
"file_path": "results/Huggy/Huggy/Huggy-1199886.onnx",
"reward": 3.6728130247175077,
"creation_time": 1671308542.4460905,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199886.pt"
]
},
{
"steps": 1399990,
"file_path": "results/Huggy/Huggy/Huggy-1399990.onnx",
"reward": 4.061800101224114,
"creation_time": 1671308765.0469708,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399990.pt"
]
},
{
"steps": 1599960,
"file_path": "results/Huggy/Huggy/Huggy-1599960.onnx",
"reward": 3.9129366634184852,
"creation_time": 1671308983.4431932,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599960.pt"
]
},
{
"steps": 1799922,
"file_path": "results/Huggy/Huggy/Huggy-1799922.onnx",
"reward": 3.918753323176049,
"creation_time": 1671309208.1993306,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799922.pt"
]
},
{
"steps": 1999994,
"file_path": "results/Huggy/Huggy/Huggy-1999994.onnx",
"reward": 3.9729022117037522,
"creation_time": 1671309430.4106145,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999994.pt"
]
},
{
"steps": 2000104,
"file_path": "results/Huggy/Huggy/Huggy-2000104.onnx",
"reward": 4.004029777142908,
"creation_time": 1671309430.5285738,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000104.pt"
]
}
],
"final_checkpoint": {
"steps": 2000104,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.004029777142908,
"creation_time": 1671309430.5285738,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000104.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}