huggy-rl / run_logs /training_status.json
Metaseeker348's picture
Huggy
5dc04f1 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199627,
"file_path": "results/Huggy2/Huggy/Huggy-199627.onnx",
"reward": 3.487236640670083,
"creation_time": 1751882859.1210475,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199627.pt"
]
},
{
"steps": 399971,
"file_path": "results/Huggy2/Huggy/Huggy-399971.onnx",
"reward": 3.623989084545447,
"creation_time": 1751883112.3964903,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399971.pt"
]
},
{
"steps": 599796,
"file_path": "results/Huggy2/Huggy/Huggy-599796.onnx",
"reward": 4.245588064193726,
"creation_time": 1751883367.5974045,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599796.pt"
]
},
{
"steps": 799997,
"file_path": "results/Huggy2/Huggy/Huggy-799997.onnx",
"reward": 3.7336768091541446,
"creation_time": 1751883623.8437662,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799997.pt"
]
},
{
"steps": 999975,
"file_path": "results/Huggy2/Huggy/Huggy-999975.onnx",
"reward": 3.7932500042537654,
"creation_time": 1751883880.9672172,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999975.pt"
]
},
{
"steps": 1199936,
"file_path": "results/Huggy2/Huggy/Huggy-1199936.onnx",
"reward": 3.533001492763388,
"creation_time": 1751884138.08096,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199936.pt"
]
},
{
"steps": 1399954,
"file_path": "results/Huggy2/Huggy/Huggy-1399954.onnx",
"reward": 3.7939584255218506,
"creation_time": 1751884394.3602488,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399954.pt"
]
},
{
"steps": 1599934,
"file_path": "results/Huggy2/Huggy/Huggy-1599934.onnx",
"reward": 3.559893709162007,
"creation_time": 1751884654.75414,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599934.pt"
]
},
{
"steps": 1799577,
"file_path": "results/Huggy2/Huggy/Huggy-1799577.onnx",
"reward": 3.5513242502366342,
"creation_time": 1751884914.3179996,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799577.pt"
]
},
{
"steps": 1999847,
"file_path": "results/Huggy2/Huggy/Huggy-1999847.onnx",
"reward": 3.337948759396871,
"creation_time": 1751885174.7724211,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999847.pt"
]
},
{
"steps": 2000597,
"file_path": "results/Huggy2/Huggy/Huggy-2000597.onnx",
"reward": 2.38155129977635,
"creation_time": 1751885174.9002569,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000597.pt"
]
}
],
"final_checkpoint": {
"steps": 2000597,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 2.38155129977635,
"creation_time": 1751885174.9002569,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000597.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.1+cu126"
}
}