ppo-Huggy / run_logs /training_status.json
gsgoncalves's picture
Huggy
48e4d0d verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199742,
"file_path": "results/Huggy2/Huggy/Huggy-199742.onnx",
"reward": 3.774859515018761,
"creation_time": 1717680401.3598123,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199742.pt"
]
},
{
"steps": 399813,
"file_path": "results/Huggy2/Huggy/Huggy-399813.onnx",
"reward": 4.0364426341321735,
"creation_time": 1717680657.659959,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399813.pt"
]
},
{
"steps": 599964,
"file_path": "results/Huggy2/Huggy/Huggy-599964.onnx",
"reward": 3.9790708948584164,
"creation_time": 1717680925.2980316,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599964.pt"
]
},
{
"steps": 799957,
"file_path": "results/Huggy2/Huggy/Huggy-799957.onnx",
"reward": 3.9135451526709004,
"creation_time": 1717681191.4225729,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799957.pt"
]
},
{
"steps": 999995,
"file_path": "results/Huggy2/Huggy/Huggy-999995.onnx",
"reward": 3.8647703644792,
"creation_time": 1717681462.0416322,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999995.pt"
]
},
{
"steps": 1199950,
"file_path": "results/Huggy2/Huggy/Huggy-1199950.onnx",
"reward": 3.75774330967351,
"creation_time": 1717681730.389013,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199950.pt"
]
},
{
"steps": 1399857,
"file_path": "results/Huggy2/Huggy/Huggy-1399857.onnx",
"reward": 4.0934242975144155,
"creation_time": 1717681999.6366966,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399857.pt"
]
},
{
"steps": 1599991,
"file_path": "results/Huggy2/Huggy/Huggy-1599991.onnx",
"reward": 3.861476890607314,
"creation_time": 1717682261.6098466,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599991.pt"
]
},
{
"steps": 1799927,
"file_path": "results/Huggy2/Huggy/Huggy-1799927.onnx",
"reward": 3.795108373959859,
"creation_time": 1717682529.8541746,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799927.pt"
]
},
{
"steps": 1999972,
"file_path": "results/Huggy2/Huggy/Huggy-1999972.onnx",
"reward": 3.9131751334530183,
"creation_time": 1717682796.8937342,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999972.pt"
]
},
{
"steps": 2000101,
"file_path": "results/Huggy2/Huggy/Huggy-2000101.onnx",
"reward": 3.906965946883298,
"creation_time": 1717682797.0729413,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000101.pt"
]
}
],
"final_checkpoint": {
"steps": 2000101,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.906965946883298,
"creation_time": 1717682797.0729413,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000101.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}