ppo-Huggy / run_logs /training_status.json
MUTSC's picture
Huggy
046868d
raw
history blame contribute delete
No virus
4.38 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199898,
"file_path": "results/Huggy/Huggy/Huggy-199898.onnx",
"reward": 3.309586272664266,
"creation_time": 1692094664.1695714,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199898.pt"
]
},
{
"steps": 399982,
"file_path": "results/Huggy/Huggy/Huggy-399982.onnx",
"reward": 3.4831342736879987,
"creation_time": 1692094896.5980365,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399982.pt"
]
},
{
"steps": 599979,
"file_path": "results/Huggy/Huggy/Huggy-599979.onnx",
"reward": 3.4851311995432925,
"creation_time": 1692095137.1138897,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599979.pt"
]
},
{
"steps": 799925,
"file_path": "results/Huggy/Huggy/Huggy-799925.onnx",
"reward": 3.9158858420308102,
"creation_time": 1692095374.078658,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799925.pt"
]
},
{
"steps": 999866,
"file_path": "results/Huggy/Huggy/Huggy-999866.onnx",
"reward": 3.9868480636362444,
"creation_time": 1692095611.6596138,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999866.pt"
]
},
{
"steps": 1199977,
"file_path": "results/Huggy/Huggy/Huggy-1199977.onnx",
"reward": 3.5235213276111716,
"creation_time": 1692095850.5032027,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199977.pt"
]
},
{
"steps": 1399886,
"file_path": "results/Huggy/Huggy/Huggy-1399886.onnx",
"reward": 4.268586802482605,
"creation_time": 1692096088.3802967,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399886.pt"
]
},
{
"steps": 1599840,
"file_path": "results/Huggy/Huggy/Huggy-1599840.onnx",
"reward": 3.9653872561320074,
"creation_time": 1692096325.8467634,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599840.pt"
]
},
{
"steps": 1799938,
"file_path": "results/Huggy/Huggy/Huggy-1799938.onnx",
"reward": 3.6831249157223134,
"creation_time": 1692096570.6399484,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799938.pt"
]
},
{
"steps": 1999987,
"file_path": "results/Huggy/Huggy/Huggy-1999987.onnx",
"reward": 3.6348602490292654,
"creation_time": 1692096813.423705,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999987.pt"
]
},
{
"steps": 2000110,
"file_path": "results/Huggy/Huggy/Huggy-2000110.onnx",
"reward": 3.719199127442128,
"creation_time": 1692096813.6090257,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000110.pt"
]
}
],
"final_checkpoint": {
"steps": 2000110,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.719199127442128,
"creation_time": 1692096813.6090257,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000110.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}