Huggy_test / run_logs /training_status.json
arindampande's picture
Huggy
9e6a507
{
"Huggy": {
"checkpoints": [
{
"steps": 199871,
"file_path": "results/Huggy/Huggy/Huggy-199871.onnx",
"reward": 3.228386906286081,
"creation_time": 1698668488.673658,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199871.pt"
]
},
{
"steps": 399955,
"file_path": "results/Huggy/Huggy/Huggy-399955.onnx",
"reward": 3.805081605911255,
"creation_time": 1698668709.9611118,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399955.pt"
]
},
{
"steps": 599791,
"file_path": "results/Huggy/Huggy/Huggy-599791.onnx",
"reward": 3.5623866792987373,
"creation_time": 1698668956.906278,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599791.pt"
]
},
{
"steps": 799889,
"file_path": "results/Huggy/Huggy/Huggy-799889.onnx",
"reward": 4.011840008199215,
"creation_time": 1698669186.8369021,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799889.pt"
]
},
{
"steps": 999990,
"file_path": "results/Huggy/Huggy/Huggy-999990.onnx",
"reward": 3.886854353751845,
"creation_time": 1698669425.7802446,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999990.pt"
]
},
{
"steps": 1199368,
"file_path": "results/Huggy/Huggy/Huggy-1199368.onnx",
"reward": 3.907998294742019,
"creation_time": 1698669661.7718546,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199368.pt"
]
},
{
"steps": 1399962,
"file_path": "results/Huggy/Huggy/Huggy-1399962.onnx",
"reward": 2.943259080251058,
"creation_time": 1698669899.237849,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399962.pt"
]
},
{
"steps": 1599508,
"file_path": "results/Huggy/Huggy/Huggy-1599508.onnx",
"reward": 3.7204236514542415,
"creation_time": 1698670128.7468452,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599508.pt"
]
},
{
"steps": 1799954,
"file_path": "results/Huggy/Huggy/Huggy-1799954.onnx",
"reward": 4.142077611610953,
"creation_time": 1698670358.5541947,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799954.pt"
]
},
{
"steps": 1999991,
"file_path": "results/Huggy/Huggy/Huggy-1999991.onnx",
"reward": 3.86302490234375,
"creation_time": 1698670587.593143,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999991.pt"
]
},
{
"steps": 2000068,
"file_path": "results/Huggy/Huggy/Huggy-2000068.onnx",
"reward": 3.8766193049294606,
"creation_time": 1698670587.6943164,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000068.pt"
]
}
],
"final_checkpoint": {
"steps": 2000068,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.8766193049294606,
"creation_time": 1698670587.6943164,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000068.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}