ppo-Huggy / run_logs /training_status.json
AlexandreManai's picture
Huggy
36da6e9 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199714,
"file_path": "results/Huggy2/Huggy/Huggy-199714.onnx",
"reward": 3.6676728641777707,
"creation_time": 1709584446.802781,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199714.pt"
]
},
{
"steps": 399992,
"file_path": "results/Huggy2/Huggy/Huggy-399992.onnx",
"reward": 3.6443264646189553,
"creation_time": 1709584673.8806112,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399992.pt"
]
},
{
"steps": 599981,
"file_path": "results/Huggy2/Huggy/Huggy-599981.onnx",
"reward": 3.570964066982269,
"creation_time": 1709584904.6507087,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599981.pt"
]
},
{
"steps": 799478,
"file_path": "results/Huggy2/Huggy/Huggy-799478.onnx",
"reward": 3.766676047673592,
"creation_time": 1709585130.6552663,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799478.pt"
]
},
{
"steps": 999923,
"file_path": "results/Huggy2/Huggy/Huggy-999923.onnx",
"reward": 3.9795389704082322,
"creation_time": 1709585363.4145126,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999923.pt"
]
},
{
"steps": 1199816,
"file_path": "results/Huggy2/Huggy/Huggy-1199816.onnx",
"reward": 3.8786371384348186,
"creation_time": 1709585600.7678304,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199816.pt"
]
},
{
"steps": 1399924,
"file_path": "results/Huggy2/Huggy/Huggy-1399924.onnx",
"reward": null,
"creation_time": 1709585833.175637,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399924.pt"
]
},
{
"steps": 1599319,
"file_path": "results/Huggy2/Huggy/Huggy-1599319.onnx",
"reward": 3.957650661468506,
"creation_time": 1709586060.2935889,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599319.pt"
]
},
{
"steps": 1799889,
"file_path": "results/Huggy2/Huggy/Huggy-1799889.onnx",
"reward": 3.8471216152353986,
"creation_time": 1709586293.9527276,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799889.pt"
]
},
{
"steps": 1999940,
"file_path": "results/Huggy2/Huggy/Huggy-1999940.onnx",
"reward": 4.577943665640695,
"creation_time": 1709586525.4159176,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999940.pt"
]
},
{
"steps": 2000021,
"file_path": "results/Huggy2/Huggy/Huggy-2000021.onnx",
"reward": 4.438754320144653,
"creation_time": 1709586525.5366879,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000021.pt"
]
}
],
"final_checkpoint": {
"steps": 2000021,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.438754320144653,
"creation_time": 1709586525.5366879,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000021.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}