ppo-Huggy / run_logs /training_status.json
Han
Huggy
a394deb
raw
history blame
4.37 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199808,
"file_path": "results/Huggy/Huggy/Huggy-199808.onnx",
"reward": 3.251420315275801,
"creation_time": 1688044263.5681138,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199808.pt"
]
},
{
"steps": 399936,
"file_path": "results/Huggy/Huggy/Huggy-399936.onnx",
"reward": 3.9530328905328793,
"creation_time": 1688044488.2637491,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399936.pt"
]
},
{
"steps": 599944,
"file_path": "results/Huggy/Huggy/Huggy-599944.onnx",
"reward": 4.024938638393696,
"creation_time": 1688044719.4068463,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599944.pt"
]
},
{
"steps": 799979,
"file_path": "results/Huggy/Huggy/Huggy-799979.onnx",
"reward": 3.8504011961253912,
"creation_time": 1688044949.847796,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799979.pt"
]
},
{
"steps": 999997,
"file_path": "results/Huggy/Huggy/Huggy-999997.onnx",
"reward": 3.6565454808792266,
"creation_time": 1688045181.0345163,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999997.pt"
]
},
{
"steps": 1199920,
"file_path": "results/Huggy/Huggy/Huggy-1199920.onnx",
"reward": 3.7417772815508,
"creation_time": 1688045412.1220458,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199920.pt"
]
},
{
"steps": 1399883,
"file_path": "results/Huggy/Huggy/Huggy-1399883.onnx",
"reward": 3.8251371634312163,
"creation_time": 1688045641.458995,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399883.pt"
]
},
{
"steps": 1599917,
"file_path": "results/Huggy/Huggy/Huggy-1599917.onnx",
"reward": 4.030896011873972,
"creation_time": 1688045877.2203014,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599917.pt"
]
},
{
"steps": 1799958,
"file_path": "results/Huggy/Huggy/Huggy-1799958.onnx",
"reward": 3.882163771752561,
"creation_time": 1688046111.590538,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799958.pt"
]
},
{
"steps": 1999952,
"file_path": "results/Huggy/Huggy/Huggy-1999952.onnx",
"reward": 3.9574905850670556,
"creation_time": 1688046349.1190069,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999952.pt"
]
},
{
"steps": 2000093,
"file_path": "results/Huggy/Huggy/Huggy-2000093.onnx",
"reward": 4.026885001555733,
"creation_time": 1688046349.2081778,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000093.pt"
]
}
],
"final_checkpoint": {
"steps": 2000093,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.026885001555733,
"creation_time": 1688046349.2081778,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000093.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}