Skip to content

Commit

Permalink
update docs
Browse files Browse the repository at this point in the history
  • Loading branch information
L1aoXingyu committed Jan 23, 2021
1 parent b5c3c0a commit a53fd17
Show file tree
Hide file tree
Showing 16 changed files with 33 additions and 75 deletions.
4 changes: 2 additions & 2 deletions docs/index.rst
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
.. detectron2 documentation master file, created by
.. fastreid documentation master file, created by
sphinx-quickstart on Sat Sep 21 13:46:45 2019.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to detectron2's documentation!
Welcome to fastreid's documentation!
======================================

.. toctree::
Expand Down
9 changes: 0 additions & 9 deletions docs/modules/export.rst

This file was deleted.

32 changes: 16 additions & 16 deletions docs/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,20 +1,20 @@
termcolor
numpy
tqdm
docutils==0.16
# https://github.com/sphinx-doc/sphinx/commit/7acd3ada3f38076af7b2b5c9f3b60bb9c2587a3d
git+git://github.com/sphinx-doc/sphinx.git@7acd3ada3f38076af7b2b5c9f3b60bb9c2587a3d
recommonmark==0.6.0
sphinx_rtd_theme
matplotlib
scipy
Pillow
numpy
prettytable
easydict
scikit-learn
pyyaml
yacs
termcolor
tabulate
tensorboard
opencv-python
pyyaml
yacs
termcolor
scikit-learn
tabulate
cloudpickle
Pillow
future
requests
six
git+git://github.com/facebookresearch/fvcore.git
https://download.pytorch.org/whl/cpu/torch-1.5.0%2Bcpu-cp37-cp37m-linux_x86_64.whl
https://download.pytorch.org/whl/cpu/torchvision-0.6.0%2Bcpu-cp37-cp37m-linux_x86_64.whl
gdown
faiss-gpu
6 changes: 1 addition & 5 deletions fastreid/data/datasets/caviara.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,12 @@
"""

import os
from scipy.io import loadmat
from glob import glob

from fastreid.data.datasets import DATASET_REGISTRY
from fastreid.data.datasets.bases import ImageDataset
import pdb
import random
import numpy as np

__all__ = ['CAVIARa',]
__all__ = ['CAVIARa', ]


@DATASET_REGISTRY.register()
Expand Down
6 changes: 3 additions & 3 deletions fastreid/data/datasets/cuhk03.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def preprocess_split(self):

import h5py
from imageio import imwrite
from scipy.io import loadmat
from scipy import io

PathManager.mkdirs(self.imgs_detected_dir)
PathManager.mkdirs(self.imgs_labeled_dir)
Expand Down Expand Up @@ -236,7 +236,7 @@ def _extract_new_split(split_dict, img_dir):

print('Creating new split for detected images (767/700) ...')
train_info, query_info, gallery_info = _extract_new_split(
loadmat(self.split_new_det_mat_path),
io.loadmat(self.split_new_det_mat_path),
self.imgs_detected_dir
)
split = [{
Expand All @@ -256,7 +256,7 @@ def _extract_new_split(split_dict, img_dir):

print('Creating new split for labeled images (767/700) ...')
train_info, query_info, gallery_info = _extract_new_split(
loadmat(self.split_new_lab_mat_path),
io.loadmat(self.split_new_lab_mat_path),
self.imgs_labeled_dir
)
split = [{
Expand Down
5 changes: 1 addition & 4 deletions fastreid/data/datasets/sysu_mm.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,10 @@
"""

import os
from scipy.io import loadmat
from glob import glob

from fastreid.data.datasets import DATASET_REGISTRY
from fastreid.data.datasets.bases import ImageDataset
import pdb

__all__ = ['SYSU_mm', ]

Expand All @@ -37,7 +35,7 @@ def process_train(self, train_path):
data = []

file_path_list = ['cam1', 'cam2', 'cam4', 'cam5']

for file_path in file_path_list:
camid = self.dataset_name + "_" + file_path
pid_list = os.listdir(os.path.join(train_path, file_path))
Expand All @@ -47,4 +45,3 @@ def process_train(self, train_path):
for img_path in img_list:
data.append([img_path, pid, camid])
return data

8 changes: 2 additions & 6 deletions fastreid/data/datasets/thermalworld.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,12 @@
"""

import os
from scipy.io import loadmat
from glob import glob

from fastreid.data.datasets import DATASET_REGISTRY
from fastreid.data.datasets.bases import ImageDataset
import pdb
import random
import numpy as np

__all__ = ['Thermalworld',]
__all__ = ['Thermalworld', ]


@DATASET_REGISTRY.register()
Expand All @@ -40,7 +36,7 @@ def process_train(self, train_path):
pid_list = os.listdir(train_path)
for pid_dir in pid_list:
pid = self.dataset_name + "_" + pid_dir
img_list = glob(os.path.join(train_path, pid_dir, "*.jpg"))
img_list = glob(os.path.join(train_path, pid_dir, "*.jpg"))
for img_path in img_list:
camid = self.dataset_name + "_cam0"
data.append([img_path, pid, camid])
Expand Down
3 changes: 1 addition & 2 deletions fastreid/engine/defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import argparse
import logging
import os
import math
import sys
from collections import OrderedDict

Expand Down Expand Up @@ -247,7 +246,6 @@ def __init__(self, cfg):
**self.scheduler,
)


self.start_epoch = 0
self.max_epoch = cfg.SOLVER.MAX_EPOCH
self.max_iter = self.max_epoch * self.iters_per_epoch
Expand Down Expand Up @@ -323,6 +321,7 @@ def build_hooks(self):
cfg.SOLVER.FREEZE_ITERS,
cfg.SOLVER.FREEZE_FC_ITERS,
))

# Do PreciseBN before checkpointer, because it updates the model and need to
# be saved by checkpointer.
# This is not always the best: if checkpointing has a different frequency,
Expand Down
2 changes: 1 addition & 1 deletion fastreid/engine/hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@
from collections import Counter

import torch
from torch import nn
from apex.parallel import DistributedDataParallel
from torch import nn

from fastreid.evaluation.testing import flatten_results_dict
from fastreid.solver import optim
Expand Down
1 change: 0 additions & 1 deletion fastreid/engine/train_loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
import numpy as np
import torch
from apex import amp
from apex.parallel import DistributedDataParallel

import fastreid.utils.comm as comm
from fastreid.utils.events import EventStorage, get_event_storage
Expand Down
3 changes: 1 addition & 2 deletions fastreid/evaluation/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from .evaluator import DatasetEvaluator, inference_context, inference_on_dataset
from .rank import evaluate_rank
from .roc import evaluate_roc
from .reid_evaluation import ReidEvaluator
from .roc import evaluate_roc
from .testing import print_csv_format, verify_results

__all__ = [k for k in globals().keys() if not k.startswith("_")]
1 change: 0 additions & 1 deletion fastreid/evaluation/rank_cylib/rank_cy.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import cython
import numpy as np
cimport numpy as np
from collections import defaultdict
import faiss


"""
Expand Down
2 changes: 1 addition & 1 deletion fastreid/modeling/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@
META_ARCH_REGISTRY,
)

__all__ = [k for k in globals().keys() if k not in k.startswith("_")]
__all__ = [k for k in globals().keys() if not k.startswith("_")]
2 changes: 1 addition & 1 deletion fastreid/modeling/losses/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,4 @@
from .focal_loss import focal_loss
from .triplet_loss import triplet_loss

__all__ = [k for k in globals().keys() if k not in k.startswith("_")]
__all__ = [k for k in globals().keys() if not k.startswith("_")]
4 changes: 3 additions & 1 deletion fastreid/utils/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,11 @@
import sys
import time
from collections import Counter
from .file_io import PathManager

from termcolor import colored

from .file_io import PathManager


class _ColorfulFormatter(logging.Formatter):
def __init__(self, *args, **kwargs):
Expand Down
20 changes: 0 additions & 20 deletions requirements.txt

This file was deleted.

0 comments on commit a53fd17

Please sign in to comment.