ppo-Huggy / run_logs /training_status.json
marimurta's picture
Huggy
d19ae7a
{
"Huggy": {
"checkpoints": [
{
"steps": 199866,
"file_path": "results/Huggy/Huggy/Huggy-199866.onnx",
"reward": 3.389312726966405,
"creation_time": 1679411371.7801688,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199866.pt"
]
},
{
"steps": 399976,
"file_path": "results/Huggy/Huggy/Huggy-399976.onnx",
"reward": 3.3847440391216637,
"creation_time": 1679411601.7676964,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399976.pt"
]
},
{
"steps": 599968,
"file_path": "results/Huggy/Huggy/Huggy-599968.onnx",
"reward": 4.757373319731818,
"creation_time": 1679411832.5797167,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599968.pt"
]
},
{
"steps": 799383,
"file_path": "results/Huggy/Huggy/Huggy-799383.onnx",
"reward": 3.5191941897977483,
"creation_time": 1679412063.8638332,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799383.pt"
]
},
{
"steps": 999953,
"file_path": "results/Huggy/Huggy/Huggy-999953.onnx",
"reward": 3.8496181054429695,
"creation_time": 1679412296.1556606,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999953.pt"
]
},
{
"steps": 1199933,
"file_path": "results/Huggy/Huggy/Huggy-1199933.onnx",
"reward": 3.15467232465744,
"creation_time": 1679412528.1790328,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199933.pt"
]
},
{
"steps": 1399426,
"file_path": "results/Huggy/Huggy/Huggy-1399426.onnx",
"reward": 3.5698776614498087,
"creation_time": 1679412755.460634,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399426.pt"
]
},
{
"steps": 1599251,
"file_path": "results/Huggy/Huggy/Huggy-1599251.onnx",
"reward": 3.615391506167019,
"creation_time": 1679412984.596274,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599251.pt"
]
},
{
"steps": 1799415,
"file_path": "results/Huggy/Huggy/Huggy-1799415.onnx",
"reward": 3.3281793815118297,
"creation_time": 1679413215.406816,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799415.pt"
]
},
{
"steps": 1999952,
"file_path": "results/Huggy/Huggy/Huggy-1999952.onnx",
"reward": 3.5443757451199867,
"creation_time": 1679413442.9348953,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999952.pt"
]
},
{
"steps": 2000014,
"file_path": "results/Huggy/Huggy/Huggy-2000014.onnx",
"reward": 3.5440585047967974,
"creation_time": 1679413443.0560203,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000014.pt"
]
}
],
"final_checkpoint": {
"steps": 2000014,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.5440585047967974,
"creation_time": 1679413443.0560203,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000014.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}