ppo-Huggy / run_logs /training_status.json
Penguin-N's picture
Huggy
50da39a
{
"Huggy": {
"checkpoints": [
{
"steps": 199836,
"file_path": "results/Huggy/Huggy/Huggy-199836.onnx",
"reward": 3.2086491561403463,
"creation_time": 1695387518.2520094,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199836.pt"
]
},
{
"steps": 399967,
"file_path": "results/Huggy/Huggy/Huggy-399967.onnx",
"reward": 3.856270899959639,
"creation_time": 1695387795.4011412,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399967.pt"
]
},
{
"steps": 599851,
"file_path": "results/Huggy/Huggy/Huggy-599851.onnx",
"reward": 4.251424312591553,
"creation_time": 1695388078.6762586,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599851.pt"
]
},
{
"steps": 799982,
"file_path": "results/Huggy/Huggy/Huggy-799982.onnx",
"reward": 3.686887707652115,
"creation_time": 1695388359.774267,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799982.pt"
]
},
{
"steps": 999977,
"file_path": "results/Huggy/Huggy/Huggy-999977.onnx",
"reward": 3.7279611091161597,
"creation_time": 1695388645.003019,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999977.pt"
]
},
{
"steps": 1199953,
"file_path": "results/Huggy/Huggy/Huggy-1199953.onnx",
"reward": 4.020260489716822,
"creation_time": 1695388931.314441,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199953.pt"
]
},
{
"steps": 1399975,
"file_path": "results/Huggy/Huggy/Huggy-1399975.onnx",
"reward": 3.975845017122186,
"creation_time": 1695389211.0389469,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399975.pt"
]
},
{
"steps": 1599934,
"file_path": "results/Huggy/Huggy/Huggy-1599934.onnx",
"reward": 3.552722630962249,
"creation_time": 1695389498.6741178,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599934.pt"
]
},
{
"steps": 1799978,
"file_path": "results/Huggy/Huggy/Huggy-1799978.onnx",
"reward": 3.9611952494061184,
"creation_time": 1695389784.0158973,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799978.pt"
]
},
{
"steps": 1999632,
"file_path": "results/Huggy/Huggy/Huggy-1999632.onnx",
"reward": 3.490899452796349,
"creation_time": 1695390067.9840913,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999632.pt"
]
},
{
"steps": 2000382,
"file_path": "results/Huggy/Huggy/Huggy-2000382.onnx",
"reward": 2.998300790786743,
"creation_time": 1695390068.1423914,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000382.pt"
]
}
],
"final_checkpoint": {
"steps": 2000382,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 2.998300790786743,
"creation_time": 1695390068.1423914,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000382.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}