Skip to content
Snippets Groups Projects
Commit 9168b38e authored by Yoann Schneider's avatar Yoann Schneider :tennis: Committed by Solene Tarride
Browse files

Remove unused hardware, software and date

parent 0f97e244
No related branches found
No related tags found
1 merge request!119Remove unused hardware, software and date
...@@ -4,8 +4,6 @@ import json ...@@ -4,8 +4,6 @@ import json
import os import os
import pickle import pickle
import random import random
import sys
from datetime import date
from time import time from time import time
import numpy as np import numpy as np
...@@ -523,7 +521,6 @@ class GenericTrainingManager: ...@@ -523,7 +521,6 @@ class GenericTrainingManager:
return return
params = copy.deepcopy(self.params) params = copy.deepcopy(self.params)
params = class_to_str_dict(params) params = class_to_str_dict(params)
params["date"] = date.today().strftime("%d/%m/%Y")
total_params = 0 total_params = 0
for model_name in self.models.keys(): for model_name in self.models.keys():
current_params = compute_nb_params(self.models[model_name]) current_params = compute_nb_params(self.models[model_name])
...@@ -533,21 +530,6 @@ class GenericTrainingManager: ...@@ -533,21 +530,6 @@ class GenericTrainingManager:
] ]
total_params += current_params total_params += current_params
params["model_params"]["total_params"] = "{:,}".format(total_params) params["model_params"]["total_params"] = "{:,}".format(total_params)
params["hardware"] = dict()
if self.device != "cpu":
for i in range(self.params["training_params"]["nb_gpu"]):
params["hardware"][str(i)] = "{} {}".format(
torch.cuda.get_device_name(i), torch.cuda.get_device_properties(i)
)
else:
params["hardware"]["0"] = "CPU"
params["software"] = {
"python_version": sys.version,
"pytorch_version": torch.__version__,
"cuda_version": torch.version.cuda,
"cudnn_version": torch.backends.cudnn.version(),
}
with open(path, "w") as f: with open(path, "w") as f:
json.dump(params, f, indent=4) json.dump(params, f, indent=4)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment