Skip to content

Commit

Permalink
update paper links in docs
Browse files Browse the repository at this point in the history
Summary: Pull Request resolved: fairinternal/detectron2#405

Reviewed By: rbgirshick

Differential Revision: D21408066

Pulled By: ppwwyyxx

fbshipit-source-id: 5120f5e0e8e0bca9842ca263cb706ca3fbf0ecb8
  • Loading branch information
ppwwyyxx authored and facebook-github-bot committed May 5, 2020
1 parent 09bbf08 commit 1b09e42
Show file tree
Hide file tree
Showing 19 changed files with 113 additions and 73 deletions.
2 changes: 1 addition & 1 deletion detectron2/data/samplers/distributed_sampler.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ class RepeatFactorTrainingSampler(Sampler):
as the fraction of images in the training set (without repeats) in which category c
appears.
See https://arxiv.org/abs/1908.03195 (>= v2) Appendix B.2.
See :paper:`lvis` (>= v2) Appendix B.2.
"""

def __init__(self, dataset_dicts, repeat_thresh, shuffle=True, seed=None):
Expand Down
4 changes: 2 additions & 2 deletions detectron2/layers/deform_conv.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ def __init__(
activation=None,
):
"""
Deformable convolution.
Deformable convolution from :paper:`deformconv`.
Arguments are similar to :class:`Conv2D`. Extra arguments:
Expand Down Expand Up @@ -418,7 +418,7 @@ def __init__(
activation=None,
):
"""
Modulated deformable convolution.
Modulated deformable convolution from :paper:`deformconv2`.
Arguments are similar to :class:`Conv2D`. Extra arguments:
Expand Down
1 change: 1 addition & 0 deletions detectron2/modeling/backbone/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,5 @@
from .fpn import FPN
from .resnet import ResNet, ResNetBlockBase, build_resnet_backbone, make_stage

__all__ = [k for k in globals().keys() if not k.startswith("_")]
# TODO can expose more resnet blocks after careful consideration
2 changes: 1 addition & 1 deletion detectron2/modeling/backbone/fpn.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

class FPN(Backbone):
"""
This module implements Feature Pyramid Network.
This module implements :paper:`FPN`.
It creates pyramid features built on top of some input feature maps.
"""

Expand Down
24 changes: 16 additions & 8 deletions detectron2/modeling/backbone/resnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@

class BasicBlock(CNNBlockBase):
"""
The basic residual block for ResNet-18 and ResNet-34, with two 3x3 conv layers
and a projection shortcut if needed.
The basic residual block for ResNet-18 and ResNet-34 defined in :paper:`ResNet`,
with two 3x3 conv layers and a projection shortcut if needed.
"""

def __init__(self, in_channels, out_channels, *, stride=1, norm="BN"):
Expand Down Expand Up @@ -105,9 +105,9 @@ def forward(self, x):

class BottleneckBlock(CNNBlockBase):
"""
The standard bottleneck residual block used by ResNet-50, 101 and 152.
It contains 3 conv layers with kernels 1x1, 3x3, 1x1, and a projection
shortcut if needed.
The standard bottleneck residual block used by ResNet-50, 101 and 152
defined in :paper:`ResNet`. It contains 3 conv layers with kernels
1x1, 3x3, 1x1, and a projection shortcut if needed.
"""

def __init__(
Expand Down Expand Up @@ -218,7 +218,8 @@ def forward(self, x):

class DeformBottleneckBlock(ResNetBlockBase):
"""
Similar to :class:`BottleneckBlock`, but with deformable conv in the 3x3 convolution.
Similar to :class:`BottleneckBlock`, but with :paper:`deformable conv <deformconv>`
in the 3x3 convolution.
"""

def __init__(
Expand Down Expand Up @@ -394,6 +395,10 @@ def forward(self, x):


class ResNet(Backbone):
"""
Implement :paper:`ResNet`.
"""

def __init__(self, stem, stages, num_classes=None, out_features=None):
"""
Args:
Expand Down Expand Up @@ -479,10 +484,13 @@ def freeze(self, freeze_at=0):
Freeze the first several stages of the ResNet. Commonly used in
fine-tuning.
Layers that produce the same feature map spatial size are defined as one
"stage" by :paper:`FPN`.
Args:
freeze_at (int): number of stem and stages to freeze.
freeze_at (int): number of stages to freeze.
`1` means freezing the stem. `2` means freezing the stem and
the first stage, etc.
one residual stage, etc.
Returns:
nn.Module: this ResNet itself
Expand Down
4 changes: 2 additions & 2 deletions detectron2/modeling/matcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,8 +106,8 @@ def set_low_quality_matches_(self, match_labels, match_quality_matrix):
maximum overlap with it (including ties); for each prediction in that set, if
it is unmatched, then match it to the ground-truth G.
This function implements the RPN assignment case (i) in Sec. 3.1.2 of the
Faster R-CNN paper: https://arxiv.org/pdf/1506.01497v3.pdf.
This function implements the RPN assignment case (i) in Sec. 3.1.2 of
:paper:`Faster R-CNN`.
"""
# For each gt, find the prediction with which it has highest quality
highest_quality_foreach_gt, _ = match_quality_matrix.max(dim=1)
Expand Down
2 changes: 1 addition & 1 deletion detectron2/modeling/meta_arch/panoptic_fpn.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
@META_ARCH_REGISTRY.register()
class PanopticFPN(nn.Module):
"""
Main class for Panoptic FPN architectures (see https://arxiv.org/abs/1901.02446).
Implement the paper :paper:`PanopticFPN`.
"""

def __init__(self, cfg):
Expand Down
4 changes: 4 additions & 0 deletions detectron2/modeling/meta_arch/rcnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,10 @@ def _postprocess(instances, batched_inputs, image_sizes):

@META_ARCH_REGISTRY.register()
class ProposalNetwork(nn.Module):
"""
A meta architecture that only predicts object proposals.
"""

def __init__(self, cfg):
super().__init__()
self.backbone = build_backbone(cfg)
Expand Down
7 changes: 3 additions & 4 deletions detectron2/modeling/meta_arch/retinanet.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def permute_all_cls_and_box_to_N_HWA_K_and_concat(box_cls, box_delta, num_classe
@META_ARCH_REGISTRY.register()
class RetinaNet(nn.Module):
"""
Implement RetinaNet (https://arxiv.org/abs/1708.02002).
Implement RetinaNet in :paper:`RetinaNet`.
"""

def __init__(self, cfg):
Expand Down Expand Up @@ -218,7 +218,7 @@ def losses(self, gt_classes, gt_anchors_deltas, pred_class_logits, pred_anchor_d
:meth:`RetinaNetHead.forward`.
Returns:
dict[str: Tensor]:
dict[str, Tensor]:
mapping from a named loss to a scalar tensor
storing the loss. Used during training only. The dict keys are:
"loss_cls" and "loss_box_reg"
Expand Down Expand Up @@ -273,8 +273,7 @@ def get_ground_truth(self, anchors, targets):
Returns:
gt_classes (Tensor):
An integer tensor of shape (N, R) storing ground-truth
labels for each anchor.
An integer tensor of shape (N, R) storing ground-truth labels for each anchor.
R is the total number of anchors, i.e. the sum of Hi x Wi x A for all levels.
Anchors with an IoU with some target higher than the foreground threshold
are assigned their corresponding label in the [0, K-1] range.
Expand Down
8 changes: 4 additions & 4 deletions detectron2/modeling/meta_arch/semantic_seg.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def device(self):
def forward(self, batched_inputs):
"""
Args:
batched_inputs: a list, batched outputs of :class:`DatasetMapper` .
batched_inputs: a list, batched outputs of :class:`DatasetMapper`.
Each item in the list contains the inputs for one image.
For now, each item in the list is a dict that contains:
Expand Down Expand Up @@ -100,9 +100,9 @@ def build_sem_seg_head(cfg, input_shape):
@SEM_SEG_HEADS_REGISTRY.register()
class SemSegFPNHead(nn.Module):
"""
A semantic segmentation head described in detail in the Panoptic Feature Pyramid Networks paper
(https://arxiv.org/abs/1901.02446). It takes FPN features as input and merges information from
all levels of the FPN into single output.
A semantic segmentation head described in :paper:`PanopticFPN`.
It takes FPN features as input and merges information from all
levels of the FPN into single output.
"""

def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]):
Expand Down
10 changes: 5 additions & 5 deletions detectron2/modeling/proposal_generator/rpn.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,10 @@ def build_rpn_head(cfg, input_shape):
@RPN_HEAD_REGISTRY.register()
class StandardRPNHead(nn.Module):
"""
RPN classification and regression heads. Uses a 3x3 conv to produce a shared
hidden state from which one 1x1 conv predicts objectness logits for each anchor
and a second 1x1 conv predicts bounding-box deltas specifying how to deform
each anchor into an object proposal.
Standard RPN classification and regression heads described in :paper:`Faster R-CNN`.
Uses a 3x3 conv to produce a shared hidden state from which one 1x1 conv predicts
objectness logits for each anchor and a second 1x1 conv predicts bounding-box deltas
specifying how to deform each anchor into an object proposal.
"""

def __init__(self, cfg, input_shape: List[ShapeSpec]):
Expand Down Expand Up @@ -91,7 +91,7 @@ def forward(self, features):
@PROPOSAL_GENERATOR_REGISTRY.register()
class RPN(nn.Module):
"""
Region Proposal Network, introduced by the Faster R-CNN paper.
Region Proposal Network, introduced by :paper:`Faster R-CNN`.
"""

def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]):
Expand Down
6 changes: 1 addition & 5 deletions detectron2/modeling/proposal_generator/rrpn.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,11 +125,7 @@ def find_top_rrpn_proposals(
@PROPOSAL_GENERATOR_REGISTRY.register()
class RRPN(RPN):
"""
Rotated RPN subnetwork.
Please refer to https://arxiv.org/pdf/1703.01086.pdf for the original RRPN paper:
Ma, J., Shao, W., Ye, H., Wang, L., Wang, H., Zheng, Y., & Xue, X. (2018).
Arbitrary-oriented scene text detection via rotation proposals.
IEEE Transactions on Multimedia, 20(11), 3111-3122.
Rotated Region Proposal Network described in :paper:`RRPN`.
"""

def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]):
Expand Down
4 changes: 4 additions & 0 deletions detectron2/modeling/roi_heads/cascade_rcnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,10 @@ def backward(ctx, grad_output):

@ROI_HEADS_REGISTRY.register()
class CascadeROIHeads(StandardROIHeads):
"""
Implement :paper:`Cascade R-CNN`.
"""

def _init_box_head(self, cfg, input_shape):
# fmt: off
pooler_resolution = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION
Expand Down
2 changes: 1 addition & 1 deletion detectron2/modeling/roi_heads/keypoint_head.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def keypoint_rcnn_inference(pred_keypoint_logits, pred_instances):

class BaseKeypointRCNNHead(nn.Module):
"""
Implement the basic Keypoint R-CNN losses and inference logic.
Implement the basic Keypoint R-CNN losses and inference logic described in :paper:`Mask R-CNN`.
"""

@configurable
Expand Down
2 changes: 1 addition & 1 deletion detectron2/modeling/roi_heads/mask_head.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def mask_rcnn_inference(pred_mask_logits, pred_instances):

class BaseMaskRCNNHead(nn.Module):
"""
Implement the basic Mask R-CNN losses and inference logic.
Implement the basic Mask R-CNN losses and inference logic described in :paper:`Mask R-CNN`
"""

@configurable
Expand Down
2 changes: 1 addition & 1 deletion detectron2/solver/lr_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def _get_warmup_factor_at_iter(
) -> float:
"""
Return the learning rate warmup factor at a specific iteration.
See https://arxiv.org/abs/1706.02677 for more details.
See :paper:`in1k1h` for more details.
Args:
method (str): warmup method; either "constant" or "linear".
Expand Down
6 changes: 3 additions & 3 deletions detectron2/utils/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def write(self):
# or when SimpleTrainer is not used
data_time = None

eta_string = "N/A"
eta_string = None
try:
iter_time = storage.history("time").global_avg()
eta_seconds = storage.history("time").median(1000) * (self._max_iter - iteration)
Expand Down Expand Up @@ -211,8 +211,8 @@ def write(self):

# NOTE: max_mem is parsed by grep in "dev/parse_results.sh"
self.logger.info(
" eta: {eta} iter: {iter} {losses} {time}{data_time}lr: {lr} {memory}".format(
eta=eta_string,
" {eta}iter: {iter} {losses} {time}{data_time}lr: {lr} {memory}".format(
eta=f"eta: {eta_string} " if eta_string else "",
iter=iteration,
losses=" ".join(
[
Expand Down
Loading

0 comments on commit 1b09e42

Please sign in to comment.