ppo-Huggy / run_logs /training_status.json
commanderxa's picture
Huggy
c419ac9
{
"Huggy": {
"checkpoints": [
{
"steps": 199727,
"file_path": "results/Huggy/Huggy/Huggy-199727.onnx",
"reward": 3.870077588728496,
"creation_time": 1698941392.4255881,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199727.pt"
]
},
{
"steps": 399890,
"file_path": "results/Huggy/Huggy/Huggy-399890.onnx",
"reward": 4.1437079182692935,
"creation_time": 1698941633.5718145,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399890.pt"
]
},
{
"steps": 599771,
"file_path": "results/Huggy/Huggy/Huggy-599771.onnx",
"reward": 3.7608245611190796,
"creation_time": 1698941877.6498055,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599771.pt"
]
},
{
"steps": 799968,
"file_path": "results/Huggy/Huggy/Huggy-799968.onnx",
"reward": 3.9700387773059664,
"creation_time": 1698942117.4246058,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799968.pt"
]
},
{
"steps": 999976,
"file_path": "results/Huggy/Huggy/Huggy-999976.onnx",
"reward": 3.7302528203350223,
"creation_time": 1698942363.0167975,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999976.pt"
]
},
{
"steps": 1199965,
"file_path": "results/Huggy/Huggy/Huggy-1199965.onnx",
"reward": 3.835403141975403,
"creation_time": 1698942608.1225584,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199965.pt"
]
},
{
"steps": 1399965,
"file_path": "results/Huggy/Huggy/Huggy-1399965.onnx",
"reward": 3.832685154781007,
"creation_time": 1698942847.321269,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399965.pt"
]
},
{
"steps": 1599970,
"file_path": "results/Huggy/Huggy/Huggy-1599970.onnx",
"reward": 3.5581488031513837,
"creation_time": 1698943093.2804108,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599970.pt"
]
},
{
"steps": 1799980,
"file_path": "results/Huggy/Huggy/Huggy-1799980.onnx",
"reward": 3.710685677420009,
"creation_time": 1698943338.1741087,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799980.pt"
]
},
{
"steps": 1999995,
"file_path": "results/Huggy/Huggy/Huggy-1999995.onnx",
"reward": 3.2675003664834157,
"creation_time": 1698943583.80725,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999995.pt"
]
},
{
"steps": 2000041,
"file_path": "results/Huggy/Huggy/Huggy-2000041.onnx",
"reward": 3.283654887100746,
"creation_time": 1698943583.9151244,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000041.pt"
]
}
],
"final_checkpoint": {
"steps": 2000041,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.283654887100746,
"creation_time": 1698943583.9151244,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000041.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}