ppo-Huggy / run_logs /training_status.json
Jas23's picture
Huggy
c7cf1fe
{
"Huggy": {
"checkpoints": [
{
"steps": 199771,
"file_path": "results/Huggy/Huggy/Huggy-199771.onnx",
"reward": 3.2315756210914026,
"creation_time": 1696427009.6985219,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199771.pt"
]
},
{
"steps": 399901,
"file_path": "results/Huggy/Huggy/Huggy-399901.onnx",
"reward": 3.865251488604788,
"creation_time": 1696427243.427855,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399901.pt"
]
},
{
"steps": 599922,
"file_path": "results/Huggy/Huggy/Huggy-599922.onnx",
"reward": 3.361267301169309,
"creation_time": 1696427482.3369634,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599922.pt"
]
},
{
"steps": 799810,
"file_path": "results/Huggy/Huggy/Huggy-799810.onnx",
"reward": 3.9600403505292805,
"creation_time": 1696427717.9829037,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799810.pt"
]
},
{
"steps": 999383,
"file_path": "results/Huggy/Huggy/Huggy-999383.onnx",
"reward": 4.07512805389084,
"creation_time": 1696427958.164763,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999383.pt"
]
},
{
"steps": 1199991,
"file_path": "results/Huggy/Huggy/Huggy-1199991.onnx",
"reward": 3.5848920933711224,
"creation_time": 1696428201.563898,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199991.pt"
]
},
{
"steps": 1399971,
"file_path": "results/Huggy/Huggy/Huggy-1399971.onnx",
"reward": 3.7509961485862733,
"creation_time": 1696428443.8684785,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399971.pt"
]
},
{
"steps": 1599970,
"file_path": "results/Huggy/Huggy/Huggy-1599970.onnx",
"reward": 3.8904009980348806,
"creation_time": 1696428680.110024,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599970.pt"
]
},
{
"steps": 1799941,
"file_path": "results/Huggy/Huggy/Huggy-1799941.onnx",
"reward": 3.984359612414887,
"creation_time": 1696428924.0571036,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799941.pt"
]
},
{
"steps": 1999927,
"file_path": "results/Huggy/Huggy/Huggy-1999927.onnx",
"reward": 3.9611506378146966,
"creation_time": 1696429166.1662176,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999927.pt"
]
},
{
"steps": 2000025,
"file_path": "results/Huggy/Huggy/Huggy-2000025.onnx",
"reward": 3.9756240099668503,
"creation_time": 1696429166.3127604,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000025.pt"
]
}
],
"final_checkpoint": {
"steps": 2000025,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.9756240099668503,
"creation_time": 1696429166.3127604,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000025.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}