ppo-Huggy / run_logs /training_status.json
shunnaidder's picture
Huggy
7a2a277
{
"Huggy": {
"checkpoints": [
{
"steps": 199526,
"file_path": "results/Huggy/Huggy/Huggy-199526.onnx",
"reward": 3.327817824151781,
"creation_time": 1698273731.057298,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199526.pt"
]
},
{
"steps": 399866,
"file_path": "results/Huggy/Huggy/Huggy-399866.onnx",
"reward": 3.52766728584583,
"creation_time": 1698273983.5290837,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399866.pt"
]
},
{
"steps": 599993,
"file_path": "results/Huggy/Huggy/Huggy-599993.onnx",
"reward": 4.209592887333462,
"creation_time": 1698274236.6345313,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599993.pt"
]
},
{
"steps": 799861,
"file_path": "results/Huggy/Huggy/Huggy-799861.onnx",
"reward": 3.8152971251234824,
"creation_time": 1698274489.7397587,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799861.pt"
]
},
{
"steps": 999958,
"file_path": "results/Huggy/Huggy/Huggy-999958.onnx",
"reward": 3.6858594645153393,
"creation_time": 1698274743.1874905,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999958.pt"
]
},
{
"steps": 1199925,
"file_path": "results/Huggy/Huggy/Huggy-1199925.onnx",
"reward": 3.683245999472482,
"creation_time": 1698275001.318569,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199925.pt"
]
},
{
"steps": 1399963,
"file_path": "results/Huggy/Huggy/Huggy-1399963.onnx",
"reward": 4.0246346174781005,
"creation_time": 1698275253.587483,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399963.pt"
]
},
{
"steps": 1599998,
"file_path": "results/Huggy/Huggy/Huggy-1599998.onnx",
"reward": 4.090720156695219,
"creation_time": 1698275508.7374837,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599998.pt"
]
},
{
"steps": 1799952,
"file_path": "results/Huggy/Huggy/Huggy-1799952.onnx",
"reward": 3.780651983250393,
"creation_time": 1698275771.923579,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799952.pt"
]
},
{
"steps": 1999982,
"file_path": "results/Huggy/Huggy/Huggy-1999982.onnx",
"reward": 3.437151964973001,
"creation_time": 1698276040.9974256,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999982.pt"
]
},
{
"steps": 2000091,
"file_path": "results/Huggy/Huggy/Huggy-2000091.onnx",
"reward": 3.5355012212480816,
"creation_time": 1698276041.1146905,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000091.pt"
]
}
],
"final_checkpoint": {
"steps": 2000091,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.5355012212480816,
"creation_time": 1698276041.1146905,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000091.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}