From f89a8a1a75d8b85f7170002f5e4de5f4f9f29f0f Mon Sep 17 00:00:00 2001 From: MasterBin-IIAU Date: Fri, 26 Mar 2021 13:50:05 +0800 Subject: [PATCH] remove unused code --- README.md | 8 ++-- lib/utils/box_ops_detr.py | 88 ------------------------------------- lib/utils/gen_fname_list.py | 33 -------------- lib/utils/loading.py | 24 ---------- lib/utils/write_to_lmdb.py | 39 ---------------- 5 files changed, 4 insertions(+), 188 deletions(-) delete mode 100644 lib/utils/box_ops_detr.py delete mode 100644 lib/utils/gen_fname_list.py delete mode 100644 lib/utils/loading.py delete mode 100644 lib/utils/write_to_lmdb.py diff --git a/README.md b/README.md index 588c92e..4b4c016 100644 --- a/README.md +++ b/README.md @@ -81,13 +81,13 @@ python tracking/analysis_results.py # need to modify tracker configs and names ``` - GOT10K-test ``` -python tracking/test.py stark_s baseline_got10k_only --dataset got10k_test --threads 32 -python lib/test/utils/transform_got10k.py --tracker_name stark_s --cfg_name baseline_got10k_only +python tracking/test.py stark_st baseline_got10k_only --dataset got10k_test --threads 32 +python lib/test/utils/transform_got10k.py --tracker_name stark_st --cfg_name baseline_got10k_only ``` - TrackingNet ``` -python tracking/test.py stark_s baseline --dataset trackingnet --threads 32 -python lib/test/utils/transform_trackingnet.py --tracker_name stark_s --cfg_name baseline +python tracking/test.py stark_st baseline --dataset trackingnet --threads 32 +python lib/test/utils/transform_trackingnet.py --tracker_name stark_st --cfg_name baseline ``` - VOT2020 Before evaluating "STARK+AR" on VOT2020, please install some extra packages following [external/AR/README.md](external/AR/README.md) diff --git a/lib/utils/box_ops_detr.py b/lib/utils/box_ops_detr.py deleted file mode 100644 index 9c088e5..0000000 --- a/lib/utils/box_ops_detr.py +++ /dev/null @@ -1,88 +0,0 @@ -# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved -""" -Utilities for bounding box manipulation and GIoU. -""" -import torch -from torchvision.ops.boxes import box_area - - -def box_cxcywh_to_xyxy(x): - x_c, y_c, w, h = x.unbind(-1) - b = [(x_c - 0.5 * w), (y_c - 0.5 * h), - (x_c + 0.5 * w), (y_c + 0.5 * h)] - return torch.stack(b, dim=-1) - - -def box_xyxy_to_cxcywh(x): - x0, y0, x1, y1 = x.unbind(-1) - b = [(x0 + x1) / 2, (y0 + y1) / 2, - (x1 - x0), (y1 - y0)] - return torch.stack(b, dim=-1) - - -# modified from torchvision to also return the union -def box_iou(boxes1, boxes2): - area1 = box_area(boxes1) - area2 = box_area(boxes2) - - lt = torch.max(boxes1[:, None, :2], boxes2[:, :2]) # [N,M,2] - rb = torch.min(boxes1[:, None, 2:], boxes2[:, 2:]) # [N,M,2] - - wh = (rb - lt).clamp(min=0) # [N,M,2] - inter = wh[:, :, 0] * wh[:, :, 1] # [N,M] - - union = area1[:, None] + area2 - inter - - iou = inter / union - return iou, union - - -def generalized_box_iou(boxes1, boxes2): - """ - Generalized IoU from https://giou.stanford.edu/ - - The boxes should be in [x0, y0, x1, y1] format - - Returns a [N, M] pairwise matrix, where N = len(boxes1) - and M = len(boxes2) - """ - # degenerate boxes gives inf / nan results - # so do an early check - assert (boxes1[:, 2:] >= boxes1[:, :2]).all() - assert (boxes2[:, 2:] >= boxes2[:, :2]).all() - iou, union = box_iou(boxes1, boxes2) - - lt = torch.min(boxes1[:, None, :2], boxes2[:, :2]) - rb = torch.max(boxes1[:, None, 2:], boxes2[:, 2:]) - - wh = (rb - lt).clamp(min=0) # [N,M,2] - area = wh[:, :, 0] * wh[:, :, 1] - - return iou - (area - union) / area - - -def masks_to_boxes(masks): - """Compute the bounding boxes around the provided masks - - The masks should be in format [N, H, W] where N is the number of masks, (H, W) are the spatial dimensions. - - Returns a [N, 4] tensors, with the boxes in xyxy format - """ - if masks.numel() == 0: - return torch.zeros((0, 4), device=masks.device) - - h, w = masks.shape[-2:] - - y = torch.arange(0, h, dtype=torch.float) - x = torch.arange(0, w, dtype=torch.float) - y, x = torch.meshgrid(y, x) - - x_mask = (masks * x.unsqueeze(0)) - x_max = x_mask.flatten(1).max(-1)[0] - x_min = x_mask.masked_fill(~(masks.bool()), 1e8).flatten(1).min(-1)[0] - - y_mask = (masks * y.unsqueeze(0)) - y_max = y_mask.flatten(1).max(-1)[0] - y_min = y_mask.masked_fill(~(masks.bool()), 1e8).flatten(1).min(-1)[0] - - return torch.stack([x_min, y_min, x_max, y_max], 1) diff --git a/lib/utils/gen_fname_list.py b/lib/utils/gen_fname_list.py deleted file mode 100644 index bdbbd99..0000000 --- a/lib/utils/gen_fname_list.py +++ /dev/null @@ -1,33 +0,0 @@ -import os -import argparse - -parser = argparse.ArgumentParser(description='generate namelist') -parser.add_argument('--base_dir', type=str, required=True, help='data directory.') -parser.add_argument('--result_file', type=str, required=True, help="result file.") -args = parser.parse_args() -base_dir = args.base_dir -result_file = args.result_file - -# base_dir = "/data/sda/v-yanbi/iccv21/LittleBoy/data/got10k/" # replace it with your own path -# result_file = "/data/sda/v-yanbi/iccv21/LittleBoy/data/got10k.namelist" -# base_dir = "/data/sda/v-yanbi/iccv21/LittleBoy/data/lasot/" # replace it with your own path -# result_file = "/data/sda/v-yanbi/iccv21/LittleBoy/data/lasot.namelist" -# base_dir = "/data/sda/v-yanbi/iccv21/LittleBoy/data/vid/" # replace it with your own path -# result_file = "/data/sda/v-yanbi/iccv21/LittleBoy/data/vid.namelist" -# base_dir = "/data/sda/v-yanbi/iccv21/LittleBoy/data/coco/" # replace it with your own path -# result_file = "/data/sda/v-yanbi/iccv21/LittleBoy/data/coco.namelist" - -assert (base_dir.endswith('/')) -fnames = [] -for root, dirs, files in os.walk(base_dir): - for file in files: - path = os.path.join(root, file) - if not os.path.exists(path): - print("%s doesn't exist.") - rela_path = path.replace(base_dir, "") - fnames.append(rela_path) - - -with open(result_file, 'w') as fout: - for name in fnames: - fout.write(name + '\n') diff --git a/lib/utils/loading.py b/lib/utils/loading.py deleted file mode 100644 index c10a502..0000000 --- a/lib/utils/loading.py +++ /dev/null @@ -1,24 +0,0 @@ -import sys -import importlib - - -# for loading previously trained model - - -def setup_legacy_env(): - importlib.import_module('lib.train') - sys.modules['ltr'] = sys.modules['lib.train'] - importlib.import_module('lib.models') - sys.modules['ltr.models'] = sys.modules['lib.models'] - for m in ('littleboy',): - importlib.import_module('lib.models.' + m) - sys.modules['ltr.models.' + m] = sys.modules['lib.models.' + m] - - -def cleanup_legacy_env(): - del_modules = [] - for m in sys.modules.keys(): - if m.startswith('ltr'): - del_modules.append(m) - for m in del_modules: - del sys.modules[m] \ No newline at end of file diff --git a/lib/utils/write_to_lmdb.py b/lib/utils/write_to_lmdb.py deleted file mode 100644 index b1d1ee1..0000000 --- a/lib/utils/write_to_lmdb.py +++ /dev/null @@ -1,39 +0,0 @@ -import lmdb -import os -from tqdm import tqdm -import argparse - -parser = argparse.ArgumentParser(description='generate lmdb') -parser.add_argument('--base_dir', type=str, required=True, help='data directory.') -parser.add_argument('--result_file', type=str, required=True, help="result file.") -args = parser.parse_args() -base_dir = args.base_dir -result_file = args.result_file -# result_file = "/data/sda/v-yanbi/iccv21/LittleBoy_clean/data/got10k.namelist" -# base_dir = "/data/sda/v-yanbi/iccv21/LittleBoy_clean/data/got10k" -# result_file = "/data/sda/v-yanbi/iccv21/LittleBoy_clean/data/lasot.namelist" -# base_dir = "/data/sda/v-yanbi/iccv21/LittleBoy_clean/data/lasot" -# result_file = "/data/sda/v-yanbi/iccv21/LittleBoy/data/vid.namelist" -# base_dir = "/data/sda/v-yanbi/iccv21/LittleBoy/data/vid" -# result_file = "/data/sda/v-yanbi/iccv21/LittleBoy/data/coco.namelist" -# base_dir = "/data/sda/v-yanbi/iccv21/LittleBoy/data/coco" - -namelist = [x.strip() for x in open(result_file).readlines()] -print('number:', len(namelist)) - -if base_dir.endswith("/"): - lmdb_fname = base_dir[:-1] + '_lmdb' -else: - lmdb_fname = base_dir + '_lmdb' -env = lmdb.open(lmdb_fname, map_size=1024 ** 4) -txn = env.begin(write=True) - -for i, t in enumerate(tqdm(namelist)): - if i % 100000 == 0: - txn.commit() - txn = env.begin(write=True) - with open(os.path.join(base_dir, t), 'rb') as fin: - txn.put(key=t.encode(), value=fin.read()) - -txn.commit() -env.close()