ppo-Huggy / run_logs /training_status.json
DarkRodry's picture
tutorial huggy model
8712396
{
"Huggy": {
"checkpoints": [
{
"steps": 199896,
"file_path": "results/Huggy/Huggy/Huggy-199896.onnx",
"reward": 3.612084915509095,
"creation_time": 1687539230.8452544,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199896.pt"
]
},
{
"steps": 399817,
"file_path": "results/Huggy/Huggy/Huggy-399817.onnx",
"reward": 3.8889575314848392,
"creation_time": 1687539465.5972667,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399817.pt"
]
},
{
"steps": 599901,
"file_path": "results/Huggy/Huggy/Huggy-599901.onnx",
"reward": 4.300596370841518,
"creation_time": 1687539707.6736856,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599901.pt"
]
},
{
"steps": 799860,
"file_path": "results/Huggy/Huggy/Huggy-799860.onnx",
"reward": 3.9045793049371063,
"creation_time": 1687539945.3576224,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799860.pt"
]
},
{
"steps": 999894,
"file_path": "results/Huggy/Huggy/Huggy-999894.onnx",
"reward": 3.654422153524086,
"creation_time": 1687540186.6062126,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999894.pt"
]
},
{
"steps": 1199945,
"file_path": "results/Huggy/Huggy/Huggy-1199945.onnx",
"reward": 3.7019429956929066,
"creation_time": 1687540432.2408895,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199945.pt"
]
},
{
"steps": 1399977,
"file_path": "results/Huggy/Huggy/Huggy-1399977.onnx",
"reward": 3.974743670650891,
"creation_time": 1687540690.5023232,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399977.pt"
]
},
{
"steps": 1599316,
"file_path": "results/Huggy/Huggy/Huggy-1599316.onnx",
"reward": 3.8921450099279715,
"creation_time": 1687540950.4267564,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599316.pt"
]
},
{
"steps": 1799422,
"file_path": "results/Huggy/Huggy/Huggy-1799422.onnx",
"reward": 4.088703505791627,
"creation_time": 1687541213.9225786,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799422.pt"
]
},
{
"steps": 1999997,
"file_path": "results/Huggy/Huggy/Huggy-1999997.onnx",
"reward": 3.938833038237962,
"creation_time": 1687541491.0016062,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999997.pt"
]
},
{
"steps": 2000083,
"file_path": "results/Huggy/Huggy/Huggy-2000083.onnx",
"reward": 3.9648538191666765,
"creation_time": 1687541491.1366098,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000083.pt"
]
}
],
"final_checkpoint": {
"steps": 2000083,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.9648538191666765,
"creation_time": 1687541491.1366098,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000083.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}