imflash217's picture
add an agent trained for HUGGY environment using Unity's
cef80cd
{
"Huggy": {
"checkpoints": [
{
"steps": 199935,
"file_path": "results/FlashHuggy/Huggy/Huggy-199935.onnx",
"reward": 3.304571220979971,
"creation_time": 1673661272.169846,
"auxillary_file_paths": [
"results/FlashHuggy/Huggy/Huggy-199935.pt"
]
},
{
"steps": 399897,
"file_path": "results/FlashHuggy/Huggy/Huggy-399897.onnx",
"reward": 3.829487290978432,
"creation_time": 1673661487.647215,
"auxillary_file_paths": [
"results/FlashHuggy/Huggy/Huggy-399897.pt"
]
},
{
"steps": 599968,
"file_path": "results/FlashHuggy/Huggy/Huggy-599968.onnx",
"reward": 3.340796402909539,
"creation_time": 1673661708.270494,
"auxillary_file_paths": [
"results/FlashHuggy/Huggy/Huggy-599968.pt"
]
},
{
"steps": 799971,
"file_path": "results/FlashHuggy/Huggy/Huggy-799971.onnx",
"reward": 4.1956852388625245,
"creation_time": 1673661926.8203893,
"auxillary_file_paths": [
"results/FlashHuggy/Huggy/Huggy-799971.pt"
]
},
{
"steps": 999884,
"file_path": "results/FlashHuggy/Huggy/Huggy-999884.onnx",
"reward": 3.929773710764848,
"creation_time": 1673662146.4660113,
"auxillary_file_paths": [
"results/FlashHuggy/Huggy/Huggy-999884.pt"
]
},
{
"steps": 1199969,
"file_path": "results/FlashHuggy/Huggy/Huggy-1199969.onnx",
"reward": 3.865914594482731,
"creation_time": 1673662366.452228,
"auxillary_file_paths": [
"results/FlashHuggy/Huggy/Huggy-1199969.pt"
]
},
{
"steps": 1399973,
"file_path": "results/FlashHuggy/Huggy/Huggy-1399973.onnx",
"reward": 3.8500112186778677,
"creation_time": 1673662589.2231812,
"auxillary_file_paths": [
"results/FlashHuggy/Huggy/Huggy-1399973.pt"
]
},
{
"steps": 1599980,
"file_path": "results/FlashHuggy/Huggy/Huggy-1599980.onnx",
"reward": 3.945067383163604,
"creation_time": 1673662807.2532465,
"auxillary_file_paths": [
"results/FlashHuggy/Huggy/Huggy-1599980.pt"
]
},
{
"steps": 1799900,
"file_path": "results/FlashHuggy/Huggy/Huggy-1799900.onnx",
"reward": 3.803860918919843,
"creation_time": 1673663031.0737464,
"auxillary_file_paths": [
"results/FlashHuggy/Huggy/Huggy-1799900.pt"
]
},
{
"steps": 1999967,
"file_path": "results/FlashHuggy/Huggy/Huggy-1999967.onnx",
"reward": 3.5672380570322275,
"creation_time": 1673663253.0621486,
"auxillary_file_paths": [
"results/FlashHuggy/Huggy/Huggy-1999967.pt"
]
},
{
"steps": 2000118,
"file_path": "results/FlashHuggy/Huggy/Huggy-2000118.onnx",
"reward": 3.638067181293781,
"creation_time": 1673663253.1812181,
"auxillary_file_paths": [
"results/FlashHuggy/Huggy/Huggy-2000118.pt"
]
}
],
"final_checkpoint": {
"steps": 2000118,
"file_path": "results/FlashHuggy/Huggy.onnx",
"reward": 3.638067181293781,
"creation_time": 1673663253.1812181,
"auxillary_file_paths": [
"results/FlashHuggy/Huggy/Huggy-2000118.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}