ppo-Huggy / run_logs /training_status.json
fredchung's picture
Initial Huggy model.
80f8fd5
{
"Huggy": {
"checkpoints": [
{
"steps": 199862,
"file_path": "results/Huggy/Huggy/Huggy-199862.onnx",
"reward": 3.7213807047390546,
"creation_time": 1695532281.6887274,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199862.pt"
]
},
{
"steps": 399968,
"file_path": "results/Huggy/Huggy/Huggy-399968.onnx",
"reward": 3.703768805545919,
"creation_time": 1695532536.849055,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399968.pt"
]
},
{
"steps": 599910,
"file_path": "results/Huggy/Huggy/Huggy-599910.onnx",
"reward": 4.105148624490808,
"creation_time": 1695532795.4879217,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599910.pt"
]
},
{
"steps": 799875,
"file_path": "results/Huggy/Huggy/Huggy-799875.onnx",
"reward": 3.7870995589665006,
"creation_time": 1695533047.082458,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799875.pt"
]
},
{
"steps": 999974,
"file_path": "results/Huggy/Huggy/Huggy-999974.onnx",
"reward": 3.798361283869831,
"creation_time": 1695533304.9202757,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999974.pt"
]
},
{
"steps": 1199993,
"file_path": "results/Huggy/Huggy/Huggy-1199993.onnx",
"reward": 3.974850088023068,
"creation_time": 1695533559.3355353,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199993.pt"
]
},
{
"steps": 1399967,
"file_path": "results/Huggy/Huggy/Huggy-1399967.onnx",
"reward": 3.713268595933914,
"creation_time": 1695533817.614329,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399967.pt"
]
},
{
"steps": 1599977,
"file_path": "results/Huggy/Huggy/Huggy-1599977.onnx",
"reward": 3.9535972590142108,
"creation_time": 1695534068.9025807,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599977.pt"
]
},
{
"steps": 1799977,
"file_path": "results/Huggy/Huggy/Huggy-1799977.onnx",
"reward": 3.735279014863466,
"creation_time": 1695534320.0167632,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799977.pt"
]
},
{
"steps": 1999996,
"file_path": "results/Huggy/Huggy/Huggy-1999996.onnx",
"reward": 3.3200302093456955,
"creation_time": 1695534571.1587918,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999996.pt"
]
},
{
"steps": 2000078,
"file_path": "results/Huggy/Huggy/Huggy-2000078.onnx",
"reward": 3.3734220713377,
"creation_time": 1695534571.2832103,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000078.pt"
]
}
],
"final_checkpoint": {
"steps": 2000078,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.3734220713377,
"creation_time": 1695534571.2832103,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000078.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}