ppo-Huggy / run_logs /training_status.json
jcramirezpr's picture
huggy bonus unit 1
2e8274e
{
"Huggy": {
"checkpoints": [
{
"steps": 199703,
"file_path": "results/Huggy/Huggy/Huggy-199703.onnx",
"reward": 3.1260443338576485,
"creation_time": 1676645584.3311868,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199703.pt"
]
},
{
"steps": 399960,
"file_path": "results/Huggy/Huggy/Huggy-399960.onnx",
"reward": 3.958091486806739,
"creation_time": 1676645871.0474346,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399960.pt"
]
},
{
"steps": 599998,
"file_path": "results/Huggy/Huggy/Huggy-599998.onnx",
"reward": 3.9131109416484833,
"creation_time": 1676646126.4709218,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599998.pt"
]
},
{
"steps": 799972,
"file_path": "results/Huggy/Huggy/Huggy-799972.onnx",
"reward": 3.6283251222811246,
"creation_time": 1676646371.0656378,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799972.pt"
]
},
{
"steps": 999886,
"file_path": "results/Huggy/Huggy/Huggy-999886.onnx",
"reward": 3.8056729422963183,
"creation_time": 1676646620.078845,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999886.pt"
]
},
{
"steps": 1199966,
"file_path": "results/Huggy/Huggy/Huggy-1199966.onnx",
"reward": 3.8284423287709552,
"creation_time": 1676646861.6264496,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199966.pt"
]
},
{
"steps": 1399941,
"file_path": "results/Huggy/Huggy/Huggy-1399941.onnx",
"reward": 4.525734118052891,
"creation_time": 1676647101.052584,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399941.pt"
]
},
{
"steps": 1599989,
"file_path": "results/Huggy/Huggy/Huggy-1599989.onnx",
"reward": 3.812506914138794,
"creation_time": 1676647335.6110265,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599989.pt"
]
},
{
"steps": 1799970,
"file_path": "results/Huggy/Huggy/Huggy-1799970.onnx",
"reward": 4.1016740644251115,
"creation_time": 1676647570.7567887,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799970.pt"
]
},
{
"steps": 1999354,
"file_path": "results/Huggy/Huggy/Huggy-1999354.onnx",
"reward": 3.986873305979229,
"creation_time": 1676647822.6192887,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999354.pt"
]
},
{
"steps": 2000104,
"file_path": "results/Huggy/Huggy/Huggy-2000104.onnx",
"reward": 3.865196709521115,
"creation_time": 1676647822.7721155,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000104.pt"
]
}
],
"final_checkpoint": {
"steps": 2000104,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.865196709521115,
"creation_time": 1676647822.7721155,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000104.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}