ppo-Huggy / run_logs /training_status.json
smartbotfactory's picture
Huggy
69d7da4
{
"Huggy": {
"checkpoints": [
{
"steps": 199966,
"file_path": "results/Huggy/Huggy/Huggy-199966.onnx",
"reward": 3.3193543385714293,
"creation_time": 1677040746.4647925,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199966.pt"
]
},
{
"steps": 399992,
"file_path": "results/Huggy/Huggy/Huggy-399992.onnx",
"reward": 3.7021435361642103,
"creation_time": 1677040988.744778,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399992.pt"
]
},
{
"steps": 599975,
"file_path": "results/Huggy/Huggy/Huggy-599975.onnx",
"reward": 3.9600702297119867,
"creation_time": 1677041235.7360325,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599975.pt"
]
},
{
"steps": 799929,
"file_path": "results/Huggy/Huggy/Huggy-799929.onnx",
"reward": 3.6391965920726457,
"creation_time": 1677041480.51077,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799929.pt"
]
},
{
"steps": 999943,
"file_path": "results/Huggy/Huggy/Huggy-999943.onnx",
"reward": 3.7400763424757484,
"creation_time": 1677041729.346259,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999943.pt"
]
},
{
"steps": 1199990,
"file_path": "results/Huggy/Huggy/Huggy-1199990.onnx",
"reward": 3.661453866257387,
"creation_time": 1677041975.7665586,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199990.pt"
]
},
{
"steps": 1399969,
"file_path": "results/Huggy/Huggy/Huggy-1399969.onnx",
"reward": 3.8098485484474995,
"creation_time": 1677042219.9536133,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399969.pt"
]
},
{
"steps": 1599970,
"file_path": "results/Huggy/Huggy/Huggy-1599970.onnx",
"reward": 3.714769761246371,
"creation_time": 1677042468.4368196,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599970.pt"
]
},
{
"steps": 1799997,
"file_path": "results/Huggy/Huggy/Huggy-1799997.onnx",
"reward": 3.919185585097263,
"creation_time": 1677042716.7145681,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799997.pt"
]
},
{
"steps": 1999727,
"file_path": "results/Huggy/Huggy/Huggy-1999727.onnx",
"reward": 3.885337793827057,
"creation_time": 1677042966.6255744,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999727.pt"
]
},
{
"steps": 2000149,
"file_path": "results/Huggy/Huggy/Huggy-2000149.onnx",
"reward": 3.9279750847234958,
"creation_time": 1677042966.7647905,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000149.pt"
]
}
],
"final_checkpoint": {
"steps": 2000149,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.9279750847234958,
"creation_time": 1677042966.7647905,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000149.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}