ppo-Huggy / run_logs /training_status.json
denaggels's picture
Huggy
4c82f73 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199838,
"file_path": "results/Huggy2/Huggy/Huggy-199838.onnx",
"reward": 3.5830016378629006,
"creation_time": 1736855607.5762873,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199838.pt"
]
},
{
"steps": 399999,
"file_path": "results/Huggy2/Huggy/Huggy-399999.onnx",
"reward": 3.6541025234481035,
"creation_time": 1736855847.7160537,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399999.pt"
]
},
{
"steps": 599922,
"file_path": "results/Huggy2/Huggy/Huggy-599922.onnx",
"reward": 3.76259446144104,
"creation_time": 1736856092.4938457,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599922.pt"
]
},
{
"steps": 799980,
"file_path": "results/Huggy2/Huggy/Huggy-799980.onnx",
"reward": 3.9345181097344653,
"creation_time": 1736856336.946186,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799980.pt"
]
},
{
"steps": 999981,
"file_path": "results/Huggy2/Huggy/Huggy-999981.onnx",
"reward": 3.619158430437071,
"creation_time": 1736856582.094566,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999981.pt"
]
},
{
"steps": 1199976,
"file_path": "results/Huggy2/Huggy/Huggy-1199976.onnx",
"reward": 3.7137353397332706,
"creation_time": 1736856831.8315644,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199976.pt"
]
},
{
"steps": 1399980,
"file_path": "results/Huggy2/Huggy/Huggy-1399980.onnx",
"reward": 3.903633639290558,
"creation_time": 1736857077.6177542,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399980.pt"
]
},
{
"steps": 1599936,
"file_path": "results/Huggy2/Huggy/Huggy-1599936.onnx",
"reward": 3.684596856173716,
"creation_time": 1736857330.7010765,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599936.pt"
]
},
{
"steps": 1799593,
"file_path": "results/Huggy2/Huggy/Huggy-1799593.onnx",
"reward": 3.6463548947783075,
"creation_time": 1736857588.9057882,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799593.pt"
]
},
{
"steps": 1999969,
"file_path": "results/Huggy2/Huggy/Huggy-1999969.onnx",
"reward": 2.3739715417226157,
"creation_time": 1736857844.3942223,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999969.pt"
]
},
{
"steps": 2000040,
"file_path": "results/Huggy2/Huggy/Huggy-2000040.onnx",
"reward": 2.347602163042341,
"creation_time": 1736857844.5165937,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000040.pt"
]
}
],
"final_checkpoint": {
"steps": 2000040,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 2.347602163042341,
"creation_time": 1736857844.5165937,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000040.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}