ppo-Huggy / run_logs /training_status.json
Lakshya2k's picture
Huggy
4a2cc3c
{
"Huggy": {
"checkpoints": [
{
"steps": 199709,
"file_path": "results/Huggy/Huggy/Huggy-199709.onnx",
"reward": 3.3725423514842987,
"creation_time": 1677347275.9403732,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199709.pt"
]
},
{
"steps": 399852,
"file_path": "results/Huggy/Huggy/Huggy-399852.onnx",
"reward": 3.7150314697679483,
"creation_time": 1677347535.1199374,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399852.pt"
]
},
{
"steps": 599809,
"file_path": "results/Huggy/Huggy/Huggy-599809.onnx",
"reward": 3.9986566172705755,
"creation_time": 1677347794.1464276,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599809.pt"
]
},
{
"steps": 799968,
"file_path": "results/Huggy/Huggy/Huggy-799968.onnx",
"reward": 3.808503575299097,
"creation_time": 1677348049.1185973,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799968.pt"
]
},
{
"steps": 999963,
"file_path": "results/Huggy/Huggy/Huggy-999963.onnx",
"reward": 3.921331531368196,
"creation_time": 1677348305.618175,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999963.pt"
]
},
{
"steps": 1199325,
"file_path": "results/Huggy/Huggy/Huggy-1199325.onnx",
"reward": 3.755876414359562,
"creation_time": 1677348567.8980813,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199325.pt"
]
},
{
"steps": 1399992,
"file_path": "results/Huggy/Huggy/Huggy-1399992.onnx",
"reward": null,
"creation_time": 1677348833.0348434,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399992.pt"
]
},
{
"steps": 1599886,
"file_path": "results/Huggy/Huggy/Huggy-1599886.onnx",
"reward": 3.8764238839911433,
"creation_time": 1677349096.2295873,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599886.pt"
]
},
{
"steps": 1799998,
"file_path": "results/Huggy/Huggy/Huggy-1799998.onnx",
"reward": 3.719649252185115,
"creation_time": 1677349360.1350176,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799998.pt"
]
},
{
"steps": 1999999,
"file_path": "results/Huggy/Huggy/Huggy-1999999.onnx",
"reward": 4.078105940657147,
"creation_time": 1677349627.98955,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999999.pt"
]
},
{
"steps": 2000071,
"file_path": "results/Huggy/Huggy/Huggy-2000071.onnx",
"reward": 4.043685748179754,
"creation_time": 1677349628.1101716,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000071.pt"
]
}
],
"final_checkpoint": {
"steps": 2000071,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.043685748179754,
"creation_time": 1677349628.1101716,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000071.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}