Huggy_Dog / run_logs /training_status.json
TEnsorTHiru's picture
Huggy
f32b13e
{
"Huggy": {
"checkpoints": [
{
"steps": 199993,
"file_path": "results/Huggy/Huggy/Huggy-199993.onnx",
"reward": 3.2231403564063594,
"creation_time": 1702189852.559583,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199993.pt"
]
},
{
"steps": 399955,
"file_path": "results/Huggy/Huggy/Huggy-399955.onnx",
"reward": 3.31037422143496,
"creation_time": 1702190111.516668,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399955.pt"
]
},
{
"steps": 599944,
"file_path": "results/Huggy/Huggy/Huggy-599944.onnx",
"reward": 4.351004174777439,
"creation_time": 1702190371.7101717,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599944.pt"
]
},
{
"steps": 799290,
"file_path": "results/Huggy/Huggy/Huggy-799290.onnx",
"reward": 3.573318763803213,
"creation_time": 1702190627.1727443,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799290.pt"
]
},
{
"steps": 999979,
"file_path": "results/Huggy/Huggy/Huggy-999979.onnx",
"reward": 3.364635509566257,
"creation_time": 1702190890.5418005,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999979.pt"
]
},
{
"steps": 1199679,
"file_path": "results/Huggy/Huggy/Huggy-1199679.onnx",
"reward": 3.721624312301477,
"creation_time": 1702191149.6372888,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199679.pt"
]
},
{
"steps": 1399871,
"file_path": "results/Huggy/Huggy/Huggy-1399871.onnx",
"reward": 3.586604287160323,
"creation_time": 1702191408.1608357,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399871.pt"
]
},
{
"steps": 1599567,
"file_path": "results/Huggy/Huggy/Huggy-1599567.onnx",
"reward": 3.3383848210361515,
"creation_time": 1702191670.1185737,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599567.pt"
]
},
{
"steps": 1799987,
"file_path": "results/Huggy/Huggy/Huggy-1799987.onnx",
"reward": 4.070258033041861,
"creation_time": 1702191932.7720013,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799987.pt"
]
},
{
"steps": 1999941,
"file_path": "results/Huggy/Huggy/Huggy-1999941.onnx",
"reward": 3.480802477859869,
"creation_time": 1702192190.2270904,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999941.pt"
]
},
{
"steps": 2000024,
"file_path": "results/Huggy/Huggy/Huggy-2000024.onnx",
"reward": 3.483766389615608,
"creation_time": 1702192190.391701,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000024.pt"
]
}
],
"final_checkpoint": {
"steps": 2000024,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.483766389615608,
"creation_time": 1702192190.391701,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000024.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.1+cu121"
}
}