moontasirabtahee's picture
Huggy
b4cf538 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199839,
"file_path": "results/Huggy2/Huggy/Huggy-199839.onnx",
"reward": 3.5311864460668256,
"creation_time": 1717099480.76562,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199839.pt"
]
},
{
"steps": 399878,
"file_path": "results/Huggy2/Huggy/Huggy-399878.onnx",
"reward": 3.7420224855023045,
"creation_time": 1717099717.3040924,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399878.pt"
]
},
{
"steps": 599908,
"file_path": "results/Huggy2/Huggy/Huggy-599908.onnx",
"reward": 4.867665033591421,
"creation_time": 1717099957.9978063,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599908.pt"
]
},
{
"steps": 799947,
"file_path": "results/Huggy2/Huggy/Huggy-799947.onnx",
"reward": 3.748485175018408,
"creation_time": 1717100196.8072662,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799947.pt"
]
},
{
"steps": 999950,
"file_path": "results/Huggy2/Huggy/Huggy-999950.onnx",
"reward": 3.7428840758308533,
"creation_time": 1717100439.8733716,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999950.pt"
]
},
{
"steps": 1199764,
"file_path": "results/Huggy2/Huggy/Huggy-1199764.onnx",
"reward": 3.5237439265756896,
"creation_time": 1717100685.6948397,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199764.pt"
]
},
{
"steps": 1399643,
"file_path": "results/Huggy2/Huggy/Huggy-1399643.onnx",
"reward": 4.3622515598932905,
"creation_time": 1717100930.968463,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399643.pt"
]
},
{
"steps": 1599976,
"file_path": "results/Huggy2/Huggy/Huggy-1599976.onnx",
"reward": 3.6921708428790803,
"creation_time": 1717101171.7678487,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599976.pt"
]
},
{
"steps": 1799977,
"file_path": "results/Huggy2/Huggy/Huggy-1799977.onnx",
"reward": 3.724430932188934,
"creation_time": 1717101426.0483813,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799977.pt"
]
},
{
"steps": 1999992,
"file_path": "results/Huggy2/Huggy/Huggy-1999992.onnx",
"reward": 3.5701128746333874,
"creation_time": 1717101666.3276532,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999992.pt"
]
},
{
"steps": 2000038,
"file_path": "results/Huggy2/Huggy/Huggy-2000038.onnx",
"reward": 3.578634641109369,
"creation_time": 1717101666.4981315,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000038.pt"
]
}
],
"final_checkpoint": {
"steps": 2000038,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.578634641109369,
"creation_time": 1717101666.4981315,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000038.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}