ppo-Huggy / run_logs /training_status.json
Bishdata's picture
Huggy
dca12cf verified
{
"Huggy": {
"checkpoints": [
{
"steps": 2000005,
"file_path": "results/Huggy2/Huggy/Huggy-2000005.onnx",
"reward": null,
"creation_time": 1730971659.624453,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000005.pt"
]
},
{
"steps": 2000005,
"file_path": "results/Huggy2/Huggy/Huggy-2000005.onnx",
"reward": null,
"creation_time": 1730971782.3741643,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000005.pt"
]
},
{
"steps": 3999998,
"file_path": "results/Huggy2/Huggy/Huggy-3999998.onnx",
"reward": 2.4132518768310547,
"creation_time": 1730994114.5808702,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-3999998.pt"
]
},
{
"steps": 4199907,
"file_path": "results/Huggy2/Huggy/Huggy-4199907.onnx",
"reward": 4.058940881227394,
"creation_time": 1730994232.9291582,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-4199907.pt"
]
},
{
"steps": 4399979,
"file_path": "results/Huggy2/Huggy/Huggy-4399979.onnx",
"reward": 3.8276446838836886,
"creation_time": 1730994363.0733788,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-4399979.pt"
]
},
{
"steps": 4599952,
"file_path": "results/Huggy2/Huggy/Huggy-4599952.onnx",
"reward": 4.115278379619122,
"creation_time": 1730994491.3636684,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-4599952.pt"
]
},
{
"steps": 4799970,
"file_path": "results/Huggy2/Huggy/Huggy-4799970.onnx",
"reward": 3.8198996998889503,
"creation_time": 1730994627.9772441,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-4799970.pt"
]
},
{
"steps": 4999992,
"file_path": "results/Huggy2/Huggy/Huggy-4999992.onnx",
"reward": 3.9485985781284088,
"creation_time": 1730994751.6365228,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-4999992.pt"
]
},
{
"steps": 5199997,
"file_path": "results/Huggy2/Huggy/Huggy-5199997.onnx",
"reward": 3.9786050888086786,
"creation_time": 1730994892.7962255,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-5199997.pt"
]
},
{
"steps": 5399985,
"file_path": "results/Huggy2/Huggy/Huggy-5399985.onnx",
"reward": 3.775513978327735,
"creation_time": 1730995032.5354316,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-5399985.pt"
]
},
{
"steps": 5599953,
"file_path": "results/Huggy2/Huggy/Huggy-5599953.onnx",
"reward": 3.994883793015634,
"creation_time": 1730995158.5294242,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-5599953.pt"
]
},
{
"steps": 5799923,
"file_path": "results/Huggy2/Huggy/Huggy-5799923.onnx",
"reward": 4.160376542880211,
"creation_time": 1730995281.4174619,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-5799923.pt"
]
},
{
"steps": 5999981,
"file_path": "results/Huggy2/Huggy/Huggy-5999981.onnx",
"reward": 3.7408325822553903,
"creation_time": 1730995411.4886782,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-5999981.pt"
]
},
{
"steps": 6000002,
"file_path": "results/Huggy2/Huggy/Huggy-6000002.onnx",
"reward": 3.7149273405472436,
"creation_time": 1730995411.5590863,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-6000002.pt"
]
},
{
"steps": 6000002,
"file_path": "results/Huggy2/Huggy/Huggy-6000002.onnx",
"reward": null,
"creation_time": 1731145338.8742433,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-6000002.pt"
]
}
],
"final_checkpoint": {
"steps": 6000002,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": null,
"creation_time": 1731145338.8742433,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-6000002.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu118"
}
}