Huggy / run_logs /training_status.json
J4F4N4F's picture
Huggy from Deep RL Course tutorial
554c31c
{
"Huggy": {
"checkpoints": [
{
"steps": 199730,
"file_path": "results/Huggy/Huggy/Huggy-199730.onnx",
"reward": 3.7394220884029683,
"creation_time": 1671297630.4542983,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199730.pt"
]
},
{
"steps": 399967,
"file_path": "results/Huggy/Huggy/Huggy-399967.onnx",
"reward": 3.855240312360582,
"creation_time": 1671297838.1046531,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399967.pt"
]
},
{
"steps": 599611,
"file_path": "results/Huggy/Huggy/Huggy-599611.onnx",
"reward": 4.0293217174934615,
"creation_time": 1671298049.6080408,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599611.pt"
]
},
{
"steps": 799964,
"file_path": "results/Huggy/Huggy/Huggy-799964.onnx",
"reward": 3.9756351925840425,
"creation_time": 1671298259.725362,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799964.pt"
]
},
{
"steps": 999908,
"file_path": "results/Huggy/Huggy/Huggy-999908.onnx",
"reward": 3.6901726198483664,
"creation_time": 1671298474.012572,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999908.pt"
]
},
{
"steps": 1199971,
"file_path": "results/Huggy/Huggy/Huggy-1199971.onnx",
"reward": 3.720043533378177,
"creation_time": 1671298685.9869683,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199971.pt"
]
},
{
"steps": 1399936,
"file_path": "results/Huggy/Huggy/Huggy-1399936.onnx",
"reward": 3.2534813506262643,
"creation_time": 1671298899.1451514,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399936.pt"
]
},
{
"steps": 1599262,
"file_path": "results/Huggy/Huggy/Huggy-1599262.onnx",
"reward": 3.8307744946617346,
"creation_time": 1671299110.437332,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599262.pt"
]
},
{
"steps": 1799999,
"file_path": "results/Huggy/Huggy/Huggy-1799999.onnx",
"reward": 3.609573281986613,
"creation_time": 1671299323.4573584,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799999.pt"
]
},
{
"steps": 1999254,
"file_path": "results/Huggy/Huggy/Huggy-1999254.onnx",
"reward": 3.6181099111750976,
"creation_time": 1671299539.8483822,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999254.pt"
]
},
{
"steps": 2000004,
"file_path": "results/Huggy/Huggy/Huggy-2000004.onnx",
"reward": 3.4895776410897574,
"creation_time": 1671299540.006816,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000004.pt"
]
}
],
"final_checkpoint": {
"steps": 2000004,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.4895776410897574,
"creation_time": 1671299540.006816,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000004.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}