|
28 | 28 | from utils.datasets import create_dataloader
|
29 | 29 | from utils.general import labels_to_class_weights, increment_path, labels_to_image_weights, init_seeds, \
|
30 | 30 | fitness, strip_optimizer, get_latest_run, check_dataset, check_file, check_git_status, check_img_size, \
|
31 |
| - print_mutation, set_logging |
| 31 | + print_mutation, set_logging, one_cycle |
32 | 32 | from utils.google_utils import attempt_download
|
33 | 33 | from utils.loss import compute_loss
|
34 | 34 | from utils.plots import plot_images, plot_labels, plot_results, plot_evolution
|
@@ -126,12 +126,12 @@ def train(hyp, opt, device, tb_writer=None, wandb=None):
|
126 | 126 |
|
127 | 127 | # Scheduler https://arxiv.org/pdf/1812.01187.pdf
|
128 | 128 | # https://pytorch.org/docs/stable/_modules/torch/optim/lr_scheduler.html#OneCycleLR
|
129 |
| - lf = lambda x: ((1 + math.cos(x * math.pi / epochs)) / 2) * (1 - hyp['lrf']) + hyp['lrf'] # cosine |
| 129 | + lf = one_cycle(1, hyp['lrf'], epochs) # cosine 1->hyp['lrf'] |
130 | 130 | scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lf)
|
131 | 131 | # plot_lr_scheduler(optimizer, scheduler, epochs)
|
132 | 132 |
|
133 | 133 | # Logging
|
134 |
| - if wandb and wandb.run is None: |
| 134 | + if rank in [-1, 0] and wandb and wandb.run is None: |
135 | 135 | opt.hyp = hyp # add hyperparameters
|
136 | 136 | wandb_run = wandb.init(config=opt, resume="allow",
|
137 | 137 | project='YOLOv5' if opt.project == 'runs/train' else Path(opt.project).stem,
|
|
0 commit comments