ppo-Huggy / run_logs /training_status.json
Neomedallion's picture
Huggy
6d7c7e2 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199811,
"file_path": "results/Huggy2/Huggy/Huggy-199811.onnx",
"reward": 3.1577204579398748,
"creation_time": 1709011181.9722836,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199811.pt"
]
},
{
"steps": 399991,
"file_path": "results/Huggy2/Huggy/Huggy-399991.onnx",
"reward": 4.04076335347932,
"creation_time": 1709011454.960431,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399991.pt"
]
},
{
"steps": 599927,
"file_path": "results/Huggy2/Huggy/Huggy-599927.onnx",
"reward": 3.8407411575317383,
"creation_time": 1709011733.4501503,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599927.pt"
]
},
{
"steps": 799926,
"file_path": "results/Huggy2/Huggy/Huggy-799926.onnx",
"reward": 3.7653383912200136,
"creation_time": 1709012009.56713,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799926.pt"
]
},
{
"steps": 999976,
"file_path": "results/Huggy2/Huggy/Huggy-999976.onnx",
"reward": 3.958354109439297,
"creation_time": 1709012281.6810107,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999976.pt"
]
},
{
"steps": 1199966,
"file_path": "results/Huggy2/Huggy/Huggy-1199966.onnx",
"reward": 3.9427081651630855,
"creation_time": 1709012545.6358445,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199966.pt"
]
},
{
"steps": 1399291,
"file_path": "results/Huggy2/Huggy/Huggy-1399291.onnx",
"reward": 3.767989499906523,
"creation_time": 1709012807.979791,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399291.pt"
]
},
{
"steps": 1599978,
"file_path": "results/Huggy2/Huggy/Huggy-1599978.onnx",
"reward": 3.564654503396885,
"creation_time": 1709013072.7602017,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599978.pt"
]
},
{
"steps": 1799999,
"file_path": "results/Huggy2/Huggy/Huggy-1799999.onnx",
"reward": 3.4947403207253873,
"creation_time": 1709013340.2925375,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799999.pt"
]
},
{
"steps": 1999905,
"file_path": "results/Huggy2/Huggy/Huggy-1999905.onnx",
"reward": 3.0314113795757294,
"creation_time": 1709013607.0845046,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999905.pt"
]
},
{
"steps": 2000018,
"file_path": "results/Huggy2/Huggy/Huggy-2000018.onnx",
"reward": 3.259492223079388,
"creation_time": 1709013607.2391646,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000018.pt"
]
}
],
"final_checkpoint": {
"steps": 2000018,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.259492223079388,
"creation_time": 1709013607.2391646,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000018.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}