ppo-Huggy / run_logs /training_status.json
BMukhtar's picture
Huggy
84cd125 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199957,
"file_path": "results/Huggy2/Huggy/Huggy-199957.onnx",
"reward": 3.2709279374072424,
"creation_time": 1720088110.107061,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199957.pt"
]
},
{
"steps": 399810,
"file_path": "results/Huggy2/Huggy/Huggy-399810.onnx",
"reward": 3.827178885687643,
"creation_time": 1720088364.3007808,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399810.pt"
]
},
{
"steps": 599930,
"file_path": "results/Huggy2/Huggy/Huggy-599930.onnx",
"reward": 4.950990200042725,
"creation_time": 1720088617.4781437,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599930.pt"
]
},
{
"steps": 799936,
"file_path": "results/Huggy2/Huggy/Huggy-799936.onnx",
"reward": 3.8962056463864183,
"creation_time": 1720088866.936427,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799936.pt"
]
},
{
"steps": 999966,
"file_path": "results/Huggy2/Huggy/Huggy-999966.onnx",
"reward": 3.8571443000285743,
"creation_time": 1720089113.9239264,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999966.pt"
]
},
{
"steps": 1199947,
"file_path": "results/Huggy2/Huggy/Huggy-1199947.onnx",
"reward": 3.5642308217507823,
"creation_time": 1720089365.6414294,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199947.pt"
]
},
{
"steps": 1399962,
"file_path": "results/Huggy2/Huggy/Huggy-1399962.onnx",
"reward": 3.9266388416290283,
"creation_time": 1720089614.4484885,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399962.pt"
]
},
{
"steps": 1599965,
"file_path": "results/Huggy2/Huggy/Huggy-1599965.onnx",
"reward": 3.6004283437963394,
"creation_time": 1720089857.8779643,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599965.pt"
]
},
{
"steps": 1799601,
"file_path": "results/Huggy2/Huggy/Huggy-1799601.onnx",
"reward": 4.056206464767456,
"creation_time": 1720090105.437227,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799601.pt"
]
},
{
"steps": 1999946,
"file_path": "results/Huggy2/Huggy/Huggy-1999946.onnx",
"reward": 3.3460950083651784,
"creation_time": 1720090357.269121,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999946.pt"
]
},
{
"steps": 2000008,
"file_path": "results/Huggy2/Huggy/Huggy-2000008.onnx",
"reward": 3.3806482593218488,
"creation_time": 1720090357.3866599,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000008.pt"
]
}
],
"final_checkpoint": {
"steps": 2000008,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.3806482593218488,
"creation_time": 1720090357.3866599,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000008.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}