Huggy / run_logs /training_status.json
roieri100's picture
Huggy
2681de6 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199928,
"file_path": "results/Huggy2/Huggy/Huggy-199928.onnx",
"reward": 3.3398073413155296,
"creation_time": 1721329489.598796,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199928.pt"
]
},
{
"steps": 399946,
"file_path": "results/Huggy2/Huggy/Huggy-399946.onnx",
"reward": 3.375769746108133,
"creation_time": 1721329723.52185,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399946.pt"
]
},
{
"steps": 599922,
"file_path": "results/Huggy2/Huggy/Huggy-599922.onnx",
"reward": 3.439303125205793,
"creation_time": 1721329963.0985126,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599922.pt"
]
},
{
"steps": 799961,
"file_path": "results/Huggy2/Huggy/Huggy-799961.onnx",
"reward": 3.7467383615127425,
"creation_time": 1721330198.2255802,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799961.pt"
]
},
{
"steps": 999931,
"file_path": "results/Huggy2/Huggy/Huggy-999931.onnx",
"reward": 3.817496872276341,
"creation_time": 1721330435.2703485,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999931.pt"
]
},
{
"steps": 1199964,
"file_path": "results/Huggy2/Huggy/Huggy-1199964.onnx",
"reward": 3.6617385262534734,
"creation_time": 1721330673.2916229,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199964.pt"
]
},
{
"steps": 1399985,
"file_path": "results/Huggy2/Huggy/Huggy-1399985.onnx",
"reward": 3.601507070329454,
"creation_time": 1721330908.9555495,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399985.pt"
]
},
{
"steps": 1599984,
"file_path": "results/Huggy2/Huggy/Huggy-1599984.onnx",
"reward": 3.478937815646736,
"creation_time": 1721331146.6207397,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599984.pt"
]
},
{
"steps": 1799959,
"file_path": "results/Huggy2/Huggy/Huggy-1799959.onnx",
"reward": 3.711737678534743,
"creation_time": 1721331387.550481,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799959.pt"
]
},
{
"steps": 1999978,
"file_path": "results/Huggy2/Huggy/Huggy-1999978.onnx",
"reward": 4.631653785705566,
"creation_time": 1721331627.8594358,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999978.pt"
]
},
{
"steps": 2000072,
"file_path": "results/Huggy2/Huggy/Huggy-2000072.onnx",
"reward": 4.736527019076878,
"creation_time": 1721331627.9746675,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000072.pt"
]
}
],
"final_checkpoint": {
"steps": 2000072,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.736527019076878,
"creation_time": 1721331627.9746675,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000072.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.1+cu121"
}
}