ppo-Huggy / run_logs /training_status.json
malerbe's picture
Huggy
ab60910 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199727,
"file_path": "results/Huggy/Huggy/Huggy-199727.onnx",
"reward": 3.267557317477006,
"creation_time": 1716535590.2773945,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199727.pt"
]
},
{
"steps": 399996,
"file_path": "results/Huggy/Huggy/Huggy-399996.onnx",
"reward": 3.9484308135341593,
"creation_time": 1716535861.6619978,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399996.pt"
]
},
{
"steps": 599957,
"file_path": "results/Huggy/Huggy/Huggy-599957.onnx",
"reward": 3.430152203887701,
"creation_time": 1716536136.8376865,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599957.pt"
]
},
{
"steps": 799925,
"file_path": "results/Huggy/Huggy/Huggy-799925.onnx",
"reward": 3.9383336086530942,
"creation_time": 1716536402.4914868,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799925.pt"
]
},
{
"steps": 999948,
"file_path": "results/Huggy/Huggy/Huggy-999948.onnx",
"reward": 3.7672657829394445,
"creation_time": 1716536677.1117167,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999948.pt"
]
},
{
"steps": 1199925,
"file_path": "results/Huggy/Huggy/Huggy-1199925.onnx",
"reward": 3.8866901330409513,
"creation_time": 1716536950.626649,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199925.pt"
]
},
{
"steps": 1399879,
"file_path": "results/Huggy/Huggy/Huggy-1399879.onnx",
"reward": 4.673703670501709,
"creation_time": 1716537220.3702395,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399879.pt"
]
},
{
"steps": 1599980,
"file_path": "results/Huggy/Huggy/Huggy-1599980.onnx",
"reward": 3.822165444817232,
"creation_time": 1716537487.0274704,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599980.pt"
]
},
{
"steps": 1799498,
"file_path": "results/Huggy/Huggy/Huggy-1799498.onnx",
"reward": 4.185623844277184,
"creation_time": 1716537747.7287698,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799498.pt"
]
},
{
"steps": 1999982,
"file_path": "results/Huggy/Huggy/Huggy-1999982.onnx",
"reward": 3.2853155592654613,
"creation_time": 1716538013.4639938,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999982.pt"
]
},
{
"steps": 2000082,
"file_path": "results/Huggy/Huggy/Huggy-2000082.onnx",
"reward": 3.324601178367933,
"creation_time": 1716538013.5853064,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000082.pt"
]
}
],
"final_checkpoint": {
"steps": 2000082,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.324601178367933,
"creation_time": 1716538013.5853064,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000082.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}