diff --git a/segment/train.py b/segment/train.py index 26f0d0c13c78..5a5f15f10d84 100644 --- a/segment/train.py +++ b/segment/train.py @@ -91,17 +91,6 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio data_dict = None if RANK in {-1, 0}: logger = GenericLogger(opt=opt, console_logger=LOGGER) - # loggers = Loggers(save_dir, weights, opt, hyp, LOGGER) # loggers instance - # if loggers.clearml: - # data_dict = loggers.clearml.data_dict # None if no ClearML dataset or filled in by ClearML - # if loggers.wandb: - # data_dict = loggers.wandb.data_dict - # if resume: - # weights, epochs, hyp, batch_size = opt.weights, opt.epochs, opt.hyp, opt.batch_size - # - # # Register actions - # for k in methods(loggers): - # callbacks.register_action(k, callback=getattr(loggers, k)) # Config plots = not evolve and not opt.noplots # create plots @@ -400,7 +389,6 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio 'ema': deepcopy(ema.ema).half(), 'updates': ema.updates, 'optimizer': optimizer.state_dict(), - # 'wandb_id': loggers.wandb.wandb_run.id if loggers.wandb else None, 'opt': vars(opt), 'date': datetime.now().isoformat()} diff --git a/train.py b/train.py index 177e081c8c37..c24a8e81531d 100644 --- a/train.py +++ b/train.py @@ -53,7 +53,6 @@ one_cycle, print_args, print_mutation, strip_optimizer, yaml_save) from utils.loggers import Loggers from utils.loggers.comet.comet_utils import check_comet_resume -from utils.loggers.wandb.wandb_utils import check_wandb_resume from utils.loss import ComputeLoss from utils.metrics import fitness from utils.plots import plot_evolve @@ -375,7 +374,6 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio 'ema': deepcopy(ema.ema).half(), 'updates': ema.updates, 'optimizer': optimizer.state_dict(), - 'wandb_id': loggers.wandb.wandb_run.id if loggers.wandb else None, 'opt': vars(opt), 'date': datetime.now().isoformat()} @@ -483,7 +481,7 @@ def main(opt, callbacks=Callbacks()): check_requirements() # Resume (from specified or most recent last.pt) - if opt.resume and not check_wandb_resume(opt) and not check_comet_resume(opt) and not opt.evolve: + if opt.resume and not check_comet_resume(opt) and not opt.evolve: last = Path(check_file(opt.resume) if isinstance(opt.resume, str) else get_latest_run()) opt_yaml = last.parent.parent / 'opt.yaml' # train options yaml opt_data = opt.data # original dataset diff --git a/utils/general.py b/utils/general.py index 76bc0b1d7a79..8ea0ad07ed13 100644 --- a/utils/general.py +++ b/utils/general.py @@ -956,7 +956,7 @@ def strip_optimizer(f='best.pt', s=''): # from utils.general import *; strip_op x = torch.load(f, map_location=torch.device('cpu')) if x.get('ema'): x['model'] = x['ema'] # replace model with ema - for k in 'optimizer', 'best_fitness', 'wandb_id', 'ema', 'updates': # keys + for k in 'optimizer', 'best_fitness', 'ema', 'updates': # keys x[k] = None x['epoch'] = -1 x['model'].half() # to FP16