Huggy / run_logs /training_status.json
ssain0771's picture
Push Huggy to the Hub
1b3002d verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199927,
"file_path": "results/Huggy/Huggy/Huggy-199927.onnx",
"reward": 3.4567552032605025,
"creation_time": 1766825046.6777072,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199927.pt"
]
},
{
"steps": 399886,
"file_path": "results/Huggy/Huggy/Huggy-399886.onnx",
"reward": 3.825348467066668,
"creation_time": 1766825302.5439258,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399886.pt"
]
},
{
"steps": 599964,
"file_path": "results/Huggy/Huggy/Huggy-599964.onnx",
"reward": 4.492297998181096,
"creation_time": 1766825578.3155653,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599964.pt"
]
},
{
"steps": 799990,
"file_path": "results/Huggy/Huggy/Huggy-799990.onnx",
"reward": 3.9404149951018725,
"creation_time": 1766825850.5171914,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799990.pt"
]
},
{
"steps": 999994,
"file_path": "results/Huggy/Huggy/Huggy-999994.onnx",
"reward": 3.651615955241739,
"creation_time": 1766826126.6417174,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999994.pt"
]
},
{
"steps": 1199964,
"file_path": "results/Huggy/Huggy/Huggy-1199964.onnx",
"reward": 4.013338340334146,
"creation_time": 1766826414.9896743,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199964.pt"
]
},
{
"steps": 1399978,
"file_path": "results/Huggy/Huggy/Huggy-1399978.onnx",
"reward": 4.190168869495392,
"creation_time": 1766826694.4315867,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399978.pt"
]
},
{
"steps": 1599948,
"file_path": "results/Huggy/Huggy/Huggy-1599948.onnx",
"reward": 3.8851666094697253,
"creation_time": 1766826961.167826,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599948.pt"
]
},
{
"steps": 1799945,
"file_path": "results/Huggy/Huggy/Huggy-1799945.onnx",
"reward": 3.9014162350827313,
"creation_time": 1766827228.8822672,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799945.pt"
]
},
{
"steps": 1999973,
"file_path": "results/Huggy/Huggy/Huggy-1999973.onnx",
"reward": 3.778796369990995,
"creation_time": 1766827494.0094929,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999973.pt"
]
},
{
"steps": 2000064,
"file_path": "results/Huggy/Huggy/Huggy-2000064.onnx",
"reward": 3.7853421606714766,
"creation_time": 1766827494.1149662,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000064.pt"
]
}
],
"final_checkpoint": {
"steps": 2000064,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7853421606714766,
"creation_time": 1766827494.1149662,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000064.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.8.0+cu128"
}
}