ppo-Huggy / run_logs /training_status.json
DenCT's picture
Huggy
03d30f3
{
"Huggy": {
"checkpoints": [
{
"steps": 199949,
"file_path": "results/Huggy/Huggy/Huggy-199949.onnx",
"reward": 3.6713402703696607,
"creation_time": 1700919179.2461498,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199949.pt"
]
},
{
"steps": 399944,
"file_path": "results/Huggy/Huggy/Huggy-399944.onnx",
"reward": 3.8429261088371276,
"creation_time": 1700919430.9642546,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399944.pt"
]
},
{
"steps": 599867,
"file_path": "results/Huggy/Huggy/Huggy-599867.onnx",
"reward": 4.6983922123909,
"creation_time": 1700919689.9986145,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599867.pt"
]
},
{
"steps": 799988,
"file_path": "results/Huggy/Huggy/Huggy-799988.onnx",
"reward": 3.592869768309039,
"creation_time": 1700919943.8493369,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799988.pt"
]
},
{
"steps": 999929,
"file_path": "results/Huggy/Huggy/Huggy-999929.onnx",
"reward": 3.780677246243766,
"creation_time": 1700920205.2556415,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999929.pt"
]
},
{
"steps": 1199581,
"file_path": "results/Huggy/Huggy/Huggy-1199581.onnx",
"reward": 3.8490450026897283,
"creation_time": 1700920468.950303,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199581.pt"
]
},
{
"steps": 1399951,
"file_path": "results/Huggy/Huggy/Huggy-1399951.onnx",
"reward": 3.881956226669007,
"creation_time": 1700920731.4480836,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399951.pt"
]
},
{
"steps": 1599964,
"file_path": "results/Huggy/Huggy/Huggy-1599964.onnx",
"reward": 3.556954741029811,
"creation_time": 1700920992.6100821,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599964.pt"
]
},
{
"steps": 1799982,
"file_path": "results/Huggy/Huggy/Huggy-1799982.onnx",
"reward": 3.3010575811741716,
"creation_time": 1700921247.628115,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799982.pt"
]
},
{
"steps": 1999699,
"file_path": "results/Huggy/Huggy/Huggy-1999699.onnx",
"reward": 3.000727210726057,
"creation_time": 1700921508.3149974,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999699.pt"
]
},
{
"steps": 2000449,
"file_path": "results/Huggy/Huggy/Huggy-2000449.onnx",
"reward": 2.2512875497341156,
"creation_time": 1700921508.510106,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000449.pt"
]
}
],
"final_checkpoint": {
"steps": 2000449,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 2.2512875497341156,
"creation_time": 1700921508.510106,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000449.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}