ppo-Huggy / run_logs /training_status.json
amiune's picture
Huggy
7936faa verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199706,
"file_path": "results/Huggy2/Huggy/Huggy-199706.onnx",
"reward": 3.6708125724936975,
"creation_time": 1718627879.1545203,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199706.pt"
]
},
{
"steps": 399821,
"file_path": "results/Huggy2/Huggy/Huggy-399821.onnx",
"reward": 3.390478904310026,
"creation_time": 1718628111.0392523,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399821.pt"
]
},
{
"steps": 599995,
"file_path": "results/Huggy2/Huggy/Huggy-599995.onnx",
"reward": 4.070602995784659,
"creation_time": 1718628341.9957957,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599995.pt"
]
},
{
"steps": 799870,
"file_path": "results/Huggy2/Huggy/Huggy-799870.onnx",
"reward": 3.909766625899535,
"creation_time": 1718628567.7966812,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799870.pt"
]
},
{
"steps": 999967,
"file_path": "results/Huggy2/Huggy/Huggy-999967.onnx",
"reward": 3.7311449041872313,
"creation_time": 1718628801.3779979,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999967.pt"
]
},
{
"steps": 1199986,
"file_path": "results/Huggy2/Huggy/Huggy-1199986.onnx",
"reward": 3.8307492747364273,
"creation_time": 1718629033.8487434,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199986.pt"
]
},
{
"steps": 1399914,
"file_path": "results/Huggy2/Huggy/Huggy-1399914.onnx",
"reward": 3.6222688115161397,
"creation_time": 1718629269.304678,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399914.pt"
]
},
{
"steps": 1599961,
"file_path": "results/Huggy2/Huggy/Huggy-1599961.onnx",
"reward": 3.990064581344416,
"creation_time": 1718629497.9402518,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599961.pt"
]
},
{
"steps": 1799968,
"file_path": "results/Huggy2/Huggy/Huggy-1799968.onnx",
"reward": 3.7953160681015206,
"creation_time": 1718629734.8401031,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799968.pt"
]
},
{
"steps": 1999980,
"file_path": "results/Huggy2/Huggy/Huggy-1999980.onnx",
"reward": 3.6895004319293156,
"creation_time": 1718629970.584662,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999980.pt"
]
},
{
"steps": 2000730,
"file_path": "results/Huggy2/Huggy/Huggy-2000730.onnx",
"reward": 3.5617286176012275,
"creation_time": 1718629970.733158,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000730.pt"
]
}
],
"final_checkpoint": {
"steps": 2000730,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.5617286176012275,
"creation_time": 1718629970.733158,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000730.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}