HuggyTraining / run_logs /training_status.json
Chirag Malhotra
Huggy
fb86ab2
{
"Huggy": {
"checkpoints": [
{
"steps": 1999922,
"file_path": "results/Huggy/Huggy/Huggy-1999922.onnx",
"reward": 3.8654306222652566,
"creation_time": 1688069838.4345078,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999922.pt"
]
},
{
"steps": 2000006,
"file_path": "results/Huggy/Huggy/Huggy-2000006.onnx",
"reward": 3.866448467060671,
"creation_time": 1688069838.5553,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000006.pt"
]
},
{
"steps": 2000006,
"file_path": "results/Huggy/Huggy/Huggy-2000006.onnx",
"reward": null,
"creation_time": 1688070497.2221453,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000006.pt"
]
},
{
"steps": 2000006,
"file_path": "results/Huggy/Huggy/Huggy-2000006.onnx",
"reward": null,
"creation_time": 1688070628.3207586,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000006.pt"
]
},
{
"steps": 2000006,
"file_path": "results/Huggy/Huggy/Huggy-2000006.onnx",
"reward": null,
"creation_time": 1688070658.377307,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000006.pt"
]
},
{
"steps": 2000006,
"file_path": "results/Huggy/Huggy/Huggy-2000006.onnx",
"reward": null,
"creation_time": 1688070686.7492425,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000006.pt"
]
},
{
"steps": 2000006,
"file_path": "results/Huggy/Huggy/Huggy-2000006.onnx",
"reward": null,
"creation_time": 1688070711.7361941,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000006.pt"
]
},
{
"steps": 2000006,
"file_path": "results/Huggy/Huggy/Huggy-2000006.onnx",
"reward": null,
"creation_time": 1688070737.78152,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000006.pt"
]
},
{
"steps": 2000006,
"file_path": "results/Huggy/Huggy/Huggy-2000006.onnx",
"reward": null,
"creation_time": 1688070769.7891285,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000006.pt"
]
},
{
"steps": 2199787,
"file_path": "results/Huggy/Huggy/Huggy-2199787.onnx",
"reward": 3.345773091081713,
"creation_time": 1688073473.460917,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2199787.pt"
]
},
{
"steps": 2399928,
"file_path": "results/Huggy/Huggy/Huggy-2399928.onnx",
"reward": 3.478961085356199,
"creation_time": 1688073742.9096844,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2399928.pt"
]
},
{
"steps": 2599910,
"file_path": "results/Huggy/Huggy/Huggy-2599910.onnx",
"reward": 3.68402361869812,
"creation_time": 1688074013.6015458,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2599910.pt"
]
},
{
"steps": 2799974,
"file_path": "results/Huggy/Huggy/Huggy-2799974.onnx",
"reward": 3.8231999143880073,
"creation_time": 1688074275.5085018,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2799974.pt"
]
},
{
"steps": 2999906,
"file_path": "results/Huggy/Huggy/Huggy-2999906.onnx",
"reward": 3.703413576704182,
"creation_time": 1688074550.6508315,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2999906.pt"
]
},
{
"steps": 3000023,
"file_path": "results/Huggy/Huggy/Huggy-3000023.onnx",
"reward": 3.7201410008936513,
"creation_time": 1688074550.815447,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-3000023.pt"
]
}
],
"final_checkpoint": {
"steps": 3000023,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7201410008936513,
"creation_time": 1688074550.815447,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-3000023.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}