ppo-Huggy / run_logs /training_status.json
Sebu's picture
Huggy
8350ac7 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199923,
"file_path": "results/Huggy2/Huggy/Huggy-199923.onnx",
"reward": 3.010779789515904,
"creation_time": 1708594812.4073117,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199923.pt"
]
},
{
"steps": 399991,
"file_path": "results/Huggy2/Huggy/Huggy-399991.onnx",
"reward": 3.594485632636968,
"creation_time": 1708595038.195535,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399991.pt"
]
},
{
"steps": 599969,
"file_path": "results/Huggy2/Huggy/Huggy-599969.onnx",
"reward": 3.886573081215223,
"creation_time": 1708595267.1048977,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599969.pt"
]
},
{
"steps": 799996,
"file_path": "results/Huggy2/Huggy/Huggy-799996.onnx",
"reward": 3.708375915424111,
"creation_time": 1708595497.945142,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799996.pt"
]
},
{
"steps": 999998,
"file_path": "results/Huggy2/Huggy/Huggy-999998.onnx",
"reward": 4.01442478734906,
"creation_time": 1708595728.9174092,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999998.pt"
]
},
{
"steps": 1199316,
"file_path": "results/Huggy2/Huggy/Huggy-1199316.onnx",
"reward": 3.474983574176321,
"creation_time": 1708595960.8275325,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199316.pt"
]
},
{
"steps": 1399928,
"file_path": "results/Huggy2/Huggy/Huggy-1399928.onnx",
"reward": 4.003123954815023,
"creation_time": 1708596188.3584433,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399928.pt"
]
},
{
"steps": 1599975,
"file_path": "results/Huggy2/Huggy/Huggy-1599975.onnx",
"reward": 3.5718506108159604,
"creation_time": 1708596419.9705513,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599975.pt"
]
},
{
"steps": 1799696,
"file_path": "results/Huggy2/Huggy/Huggy-1799696.onnx",
"reward": 3.9323033777255456,
"creation_time": 1708596651.9300272,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799696.pt"
]
},
{
"steps": 1999935,
"file_path": "results/Huggy2/Huggy/Huggy-1999935.onnx",
"reward": 3.674261450767517,
"creation_time": 1708596882.520589,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999935.pt"
]
},
{
"steps": 2000067,
"file_path": "results/Huggy2/Huggy/Huggy-2000067.onnx",
"reward": 3.7020465339102397,
"creation_time": 1708596882.6395905,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000067.pt"
]
}
],
"final_checkpoint": {
"steps": 2000067,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.7020465339102397,
"creation_time": 1708596882.6395905,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000067.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.0+cu121"
}
}