ppo-Huggy / run_logs /training_status.json
juansebashr's picture
Huggy
e124e4d
{
"Huggy": {
"checkpoints": [
{
"steps": 199971,
"file_path": "results/Huggy/Huggy/Huggy-199971.onnx",
"reward": 3.4206849116545457,
"creation_time": 1678645629.5217116,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199971.pt"
]
},
{
"steps": 399907,
"file_path": "results/Huggy/Huggy/Huggy-399907.onnx",
"reward": 3.771656357190188,
"creation_time": 1678646082.6809175,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399907.pt"
]
},
{
"steps": 599974,
"file_path": "results/Huggy/Huggy/Huggy-599974.onnx",
"reward": 3.868033132769845,
"creation_time": 1678646537.1936383,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599974.pt"
]
},
{
"steps": 799996,
"file_path": "results/Huggy/Huggy/Huggy-799996.onnx",
"reward": 3.8262615453597553,
"creation_time": 1678646977.3643146,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799996.pt"
]
},
{
"steps": 999964,
"file_path": "results/Huggy/Huggy/Huggy-999964.onnx",
"reward": 3.758117717323881,
"creation_time": 1678647434.8519118,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999964.pt"
]
},
{
"steps": 1199968,
"file_path": "results/Huggy/Huggy/Huggy-1199968.onnx",
"reward": 3.425200112439968,
"creation_time": 1678647888.3476746,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199968.pt"
]
},
{
"steps": 1399948,
"file_path": "results/Huggy/Huggy/Huggy-1399948.onnx",
"reward": 3.9462812564759977,
"creation_time": 1678648322.763745,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399948.pt"
]
},
{
"steps": 1599961,
"file_path": "results/Huggy/Huggy/Huggy-1599961.onnx",
"reward": 3.7933624973447304,
"creation_time": 1678648774.9372487,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599961.pt"
]
},
{
"steps": 1799877,
"file_path": "results/Huggy/Huggy/Huggy-1799877.onnx",
"reward": 3.339998567269908,
"creation_time": 1678649233.2222145,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799877.pt"
]
},
{
"steps": 1999352,
"file_path": "results/Huggy/Huggy/Huggy-1999352.onnx",
"reward": 3.4843166307969526,
"creation_time": 1678649688.224004,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999352.pt"
]
},
{
"steps": 2000102,
"file_path": "results/Huggy/Huggy/Huggy-2000102.onnx",
"reward": 2.895453155040741,
"creation_time": 1678649688.4095037,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000102.pt"
]
}
],
"final_checkpoint": {
"steps": 2000102,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 2.895453155040741,
"creation_time": 1678649688.4095037,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000102.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}