Huggy-ppo-mlagents / run_logs /training_status.json
-N
Huggy
fd0082e verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199825,
"file_path": "results/Huggy2/Huggy/Huggy-199825.onnx",
"reward": 3.6249835984460237,
"creation_time": 1713528657.2044537,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199825.pt"
]
},
{
"steps": 399930,
"file_path": "results/Huggy2/Huggy/Huggy-399930.onnx",
"reward": 3.856712209550958,
"creation_time": 1713528906.1191604,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399930.pt"
]
},
{
"steps": 599942,
"file_path": "results/Huggy2/Huggy/Huggy-599942.onnx",
"reward": 3.828015446662903,
"creation_time": 1713529164.2023172,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599942.pt"
]
},
{
"steps": 799964,
"file_path": "results/Huggy2/Huggy/Huggy-799964.onnx",
"reward": 4.056712900354563,
"creation_time": 1713529410.8465471,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799964.pt"
]
},
{
"steps": 999967,
"file_path": "results/Huggy2/Huggy/Huggy-999967.onnx",
"reward": 3.883179734541675,
"creation_time": 1713529663.6873984,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999967.pt"
]
},
{
"steps": 1199938,
"file_path": "results/Huggy2/Huggy/Huggy-1199938.onnx",
"reward": 3.549451815585295,
"creation_time": 1713529912.6181216,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199938.pt"
]
},
{
"steps": 1399977,
"file_path": "results/Huggy2/Huggy/Huggy-1399977.onnx",
"reward": 3.981098348444158,
"creation_time": 1713530161.3225987,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399977.pt"
]
},
{
"steps": 1599953,
"file_path": "results/Huggy2/Huggy/Huggy-1599953.onnx",
"reward": 3.531011972955601,
"creation_time": 1713530408.4737034,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599953.pt"
]
},
{
"steps": 1799952,
"file_path": "results/Huggy2/Huggy/Huggy-1799952.onnx",
"reward": 3.670169608733233,
"creation_time": 1713530654.9205148,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799952.pt"
]
},
{
"steps": 1999900,
"file_path": "results/Huggy2/Huggy/Huggy-1999900.onnx",
"reward": 3.548304762159075,
"creation_time": 1713530903.3175042,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999900.pt"
]
},
{
"steps": 2000013,
"file_path": "results/Huggy2/Huggy/Huggy-2000013.onnx",
"reward": 3.5415411428971724,
"creation_time": 1713530903.435252,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000013.pt"
]
}
],
"final_checkpoint": {
"steps": 2000013,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.5415411428971724,
"creation_time": 1713530903.435252,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000013.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}