Hugggy / run_logs /training_status.json
IlluminatiPudding's picture
Huggy
974849d
{
"Huggy": {
"checkpoints": [
{
"steps": 199925,
"file_path": "results/Huggy/Huggy/Huggy-199925.onnx",
"reward": 3.2058789682766746,
"creation_time": 1697433613.3949654,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199925.pt"
]
},
{
"steps": 399840,
"file_path": "results/Huggy/Huggy/Huggy-399840.onnx",
"reward": 3.7530351768840444,
"creation_time": 1697433869.5474691,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399840.pt"
]
},
{
"steps": 599971,
"file_path": "results/Huggy/Huggy/Huggy-599971.onnx",
"reward": 3.499417503674825,
"creation_time": 1697434128.3373022,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599971.pt"
]
},
{
"steps": 799713,
"file_path": "results/Huggy/Huggy/Huggy-799713.onnx",
"reward": 4.083117828024237,
"creation_time": 1697434386.6321754,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799713.pt"
]
},
{
"steps": 999962,
"file_path": "results/Huggy/Huggy/Huggy-999962.onnx",
"reward": 3.9698327001950418,
"creation_time": 1697434645.826872,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999962.pt"
]
},
{
"steps": 1199990,
"file_path": "results/Huggy/Huggy/Huggy-1199990.onnx",
"reward": 3.8937958919838684,
"creation_time": 1697434906.0192618,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199990.pt"
]
},
{
"steps": 1399975,
"file_path": "results/Huggy/Huggy/Huggy-1399975.onnx",
"reward": 3.4606650273005166,
"creation_time": 1697435165.545737,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399975.pt"
]
},
{
"steps": 1599879,
"file_path": "results/Huggy/Huggy/Huggy-1599879.onnx",
"reward": 3.7633613358714624,
"creation_time": 1697435422.311666,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599879.pt"
]
},
{
"steps": 1799945,
"file_path": "results/Huggy/Huggy/Huggy-1799945.onnx",
"reward": 3.9802071661562533,
"creation_time": 1697435678.856324,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799945.pt"
]
},
{
"steps": 1999965,
"file_path": "results/Huggy/Huggy/Huggy-1999965.onnx",
"reward": 3.7520276445608873,
"creation_time": 1697435937.2812765,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999965.pt"
]
},
{
"steps": 2000083,
"file_path": "results/Huggy/Huggy/Huggy-2000083.onnx",
"reward": 3.7962506987013906,
"creation_time": 1697435937.394238,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000083.pt"
]
}
],
"final_checkpoint": {
"steps": 2000083,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7962506987013906,
"creation_time": 1697435937.394238,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000083.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.0.1+cu118"
}
}