From d98b75863a925c1fc52f6269f2ffed99ea0eadde Mon Sep 17 00:00:00 2001 From: faiga91 Date: Fri, 8 Nov 2024 20:51:03 +0100 Subject: [PATCH 1/7] Fix future warnings --- train.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/train.py b/train.py index 59c372afe..21875fcc2 100644 --- a/train.py +++ b/train.py @@ -105,7 +105,7 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio if pretrained: with torch_distributed_zero_first(LOCAL_RANK): weights = attempt_download(weights) # download if not found locally - ckpt = torch.load(weights, map_location='cpu') # load checkpoint to CPU to avoid CUDA memory leak + ckpt = torch.load(weights, map_location='cpu', weights_only=False) # load checkpoint to CPU to avoid CUDA memory leak model = Model(cfg or ckpt['model'].yaml, ch=3, nc=nc, anchors=hyp.get('anchors')).to(device) # create exclude = ['anchor'] if (cfg or hyp.get('anchors')) and not resume else [] # exclude keys csd = ckpt['model'].float().state_dict() # checkpoint state_dict as FP32 @@ -192,6 +192,9 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio shuffle=True, min_items=opt.min_items) labels = np.concatenate(dataset.labels, 0) + print(f'labels {labels}') + unique_class_ids = np.unique(labels[:, 0]) + print("Unique class IDs:", unique_class_ids) mlc = int(labels[:, 0].max()) # max label class assert mlc < nc, f'Label class {mlc} exceeds nc={nc} in {data}. Possible class labels are 0-{nc - 1}' @@ -241,7 +244,7 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio maps = np.zeros(nc) # mAP per class results = (0, 0, 0, 0, 0, 0, 0) # P, R, mAP@.5, mAP@.5-.95, val_loss(box, obj, cls) scheduler.last_epoch = start_epoch - 1 # do not move - scaler = torch.cuda.amp.GradScaler(enabled=amp) + scaler = torch.amp.GradScaler(enabled=amp) stopper, stop = EarlyStopping(patience=opt.patience), False compute_loss = ComputeLoss(model) # init loss class callbacks.run('on_train_start') @@ -299,7 +302,7 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio imgs = nn.functional.interpolate(imgs, size=ns, mode='bilinear', align_corners=False) # Forward - with torch.cuda.amp.autocast(amp): + with torch.amp.autocast(device_type='cuda', dtype=torch.float16 if amp else torch.float32): pred = model(imgs) # forward loss, loss_items = compute_loss(pred, targets.to(device)) # loss scaled by batch_size if RANK != -1: @@ -493,7 +496,7 @@ def main(opt, callbacks=Callbacks()): with open(opt_yaml, errors='ignore') as f: d = yaml.safe_load(f) else: - d = torch.load(last, map_location='cpu')['opt'] + d = torch.load(last, map_location='cpu', weights_only=False)['opt'] opt = argparse.Namespace(**d) # replace opt.cfg, opt.weights, opt.resume = '', str(last), True # reinstate if is_url(opt_data): From 573a2cd9235a8cd149b5efa7b94110c464b69338 Mon Sep 17 00:00:00 2001 From: faiga91 Date: Mon, 11 Nov 2024 22:13:39 +0100 Subject: [PATCH 2/7] disable augmentation --- train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/train.py b/train.py index 21875fcc2..a5e5d68d6 100644 --- a/train.py +++ b/train.py @@ -180,7 +180,7 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio gs, single_cls, hyp=hyp, - augment=True, + augment=False, cache=None if opt.cache == 'val' else opt.cache, rect=opt.rect, rank=LOCAL_RANK, From ab050b87c23275d3a0827a9682a3eca61dbc3cd7 Mon Sep 17 00:00:00 2001 From: faiga91 Date: Mon, 11 Nov 2024 22:13:55 +0100 Subject: [PATCH 3/7] add weights_only=False --- models/experimental.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/models/experimental.py b/models/experimental.py index b1a466a6c..c2b573ede 100644 --- a/models/experimental.py +++ b/models/experimental.py @@ -240,7 +240,7 @@ def attempt_load(weights, device=None, inplace=True, fuse=True): model = Ensemble() for w in weights if isinstance(weights, list) else [weights]: - ckpt = torch.load(attempt_download(w), map_location='cpu') # load + ckpt = torch.load(attempt_download(w), map_location='cpu', weights_only=False) # load ckpt = (ckpt.get('ema') or ckpt['model']).to(device).float() # FP32 model # Model compatibility updates From 82974f81bf10535ba10e6f39c9458aa03f2bbf1a Mon Sep 17 00:00:00 2001 From: faiga91 Date: Mon, 11 Nov 2024 22:14:11 +0100 Subject: [PATCH 4/7] weights_only=False --- utils/general.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/general.py b/utils/general.py index efe78b29a..5ff8fa164 100644 --- a/utils/general.py +++ b/utils/general.py @@ -996,7 +996,7 @@ def non_max_suppression( def strip_optimizer(f='best.pt', s=''): # from utils.general import *; strip_optimizer() # Strip optimizer from 'f' to finalize training, optionally save as 's' - x = torch.load(f, map_location=torch.device('cpu')) + x = torch.load(f, map_location=torch.device('cpu'), weights_only=False) if x.get('ema'): x['model'] = x['ema'] # replace model with ema for k in 'optimizer', 'best_fitness', 'ema', 'updates': # keys From c96a87c52fe62c44b6984bf4cd14abc052427beb Mon Sep 17 00:00:00 2001 From: faiga91 Date: Mon, 11 Nov 2024 22:14:47 +0100 Subject: [PATCH 5/7] update fontsize to getbbox --- utils/plots.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/utils/plots.py b/utils/plots.py index fa49dc19d..31d01eae3 100644 --- a/utils/plots.py +++ b/utils/plots.py @@ -83,7 +83,9 @@ def box_label(self, box, label='', color=(128, 128, 128), txt_color=(255, 255, 2 if self.pil or not is_ascii(label): self.draw.rectangle(box, width=self.lw, outline=color) # box if label: - w, h = self.font.getsize(label) # text width, height + bbox = self.font.getbbox(label) + w, h = bbox[2] - bbox[0], bbox[3] - bbox[1] # width and height + outside = box[1] - h >= 0 # label fits outside box self.draw.rectangle( (box[0], box[1] - h if outside else box[1], box[0] + w + 1, @@ -162,7 +164,7 @@ def rectangle(self, xy, fill=None, outline=None, width=1): def text(self, xy, text, txt_color=(255, 255, 255), anchor='top'): # Add text to image (PIL-only) if anchor == 'bottom': # start y from font bottom - w, h = self.font.getsize(text) # text width, height + w, h = self.font.getbbox(text) # text width, height xy[1] += 1 - h self.draw.text(xy, text, fill=txt_color, font=self.font) From 76886b32a08c09fe6c3b357973ab012da5578260 Mon Sep 17 00:00:00 2001 From: faiga91 Date: Wed, 20 Nov 2024 11:17:20 +0100 Subject: [PATCH 6/7] Set augement in the data loader to True --- train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/train.py b/train.py index a5e5d68d6..21875fcc2 100644 --- a/train.py +++ b/train.py @@ -180,7 +180,7 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio gs, single_cls, hyp=hyp, - augment=False, + augment=True, cache=None if opt.cache == 'val' else opt.cache, rect=opt.rect, rank=LOCAL_RANK, From c46a854a8d464fa2188566ce280227dbc71bcf2f Mon Sep 17 00:00:00 2001 From: faiga91 Date: Fri, 22 Nov 2024 13:36:10 +0100 Subject: [PATCH 7/7] make img writable before plotting --- utils/plots.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/utils/plots.py b/utils/plots.py index 31d01eae3..e1bcd3f6b 100644 --- a/utils/plots.py +++ b/utils/plots.py @@ -95,6 +95,8 @@ def box_label(self, box, label='', color=(128, 128, 128), txt_color=(255, 255, 2 # self.draw.text((box[0], box[1]), label, fill=txt_color, font=self.font, anchor='ls') # for PIL>8.0 self.draw.text((box[0], box[1] - h if outside else box[1]), label, fill=txt_color, font=self.font) else: # cv2 + if not self.im.flags.writeable: + self.im = self.im.copy() p1, p2 = (int(box[0]), int(box[1])), (int(box[2]), int(box[3])) cv2.rectangle(self.im, p1, p2, color, thickness=self.lw, lineType=cv2.LINE_AA) if label: