ppo-Huggy / run_logs /training_status.json
MrDivakaruni's picture
Huggy
c7333dc
{
"Huggy": {
"checkpoints": [
{
"steps": 199791,
"file_path": "results/Huggy/Huggy/Huggy-199791.onnx",
"reward": 3.4568364024162292,
"creation_time": 1671432717.6919873,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199791.pt"
]
},
{
"steps": 399604,
"file_path": "results/Huggy/Huggy/Huggy-399604.onnx",
"reward": 3.6252012404741025,
"creation_time": 1671432933.7230594,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399604.pt"
]
},
{
"steps": 599885,
"file_path": "results/Huggy/Huggy/Huggy-599885.onnx",
"reward": 4.008761882781982,
"creation_time": 1671433151.5953593,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599885.pt"
]
},
{
"steps": 799945,
"file_path": "results/Huggy/Huggy/Huggy-799945.onnx",
"reward": 3.7177812482999717,
"creation_time": 1671433366.4454346,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799945.pt"
]
},
{
"steps": 999920,
"file_path": "results/Huggy/Huggy/Huggy-999920.onnx",
"reward": 3.7471163395954217,
"creation_time": 1671433586.8332605,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999920.pt"
]
},
{
"steps": 1199982,
"file_path": "results/Huggy/Huggy/Huggy-1199982.onnx",
"reward": 3.109307156668769,
"creation_time": 1671433804.6533952,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199982.pt"
]
},
{
"steps": 1399995,
"file_path": "results/Huggy/Huggy/Huggy-1399995.onnx",
"reward": 3.7925699782925983,
"creation_time": 1671434021.135567,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399995.pt"
]
},
{
"steps": 1599962,
"file_path": "results/Huggy/Huggy/Huggy-1599962.onnx",
"reward": 3.7574203978885303,
"creation_time": 1671434237.485929,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599962.pt"
]
},
{
"steps": 1799964,
"file_path": "results/Huggy/Huggy/Huggy-1799964.onnx",
"reward": 3.675552126523611,
"creation_time": 1671434454.9933429,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799964.pt"
]
},
{
"steps": 1999989,
"file_path": "results/Huggy/Huggy/Huggy-1999989.onnx",
"reward": 3.499608057632781,
"creation_time": 1671434667.3004525,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999989.pt"
]
},
{
"steps": 2000046,
"file_path": "results/Huggy/Huggy/Huggy-2000046.onnx",
"reward": 3.5065751585156417,
"creation_time": 1671434667.4095287,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000046.pt"
]
}
],
"final_checkpoint": {
"steps": 2000046,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.5065751585156417,
"creation_time": 1671434667.4095287,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000046.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}