Unverified Commit 5ba57eae authored by eellison's avatar eellison Committed by GitHub
Browse files

Clean up no longer needed workarorudns (#2261)


Co-authored-by: default avatareellison <eellison@fb.com>
parent 3974cfeb
...@@ -6,13 +6,6 @@ from torch import Tensor ...@@ -6,13 +6,6 @@ from torch import Tensor
import torchvision import torchvision
# TODO: https://github.com/pytorch/pytorch/issues/26727
def zeros_like(tensor, dtype):
# type: (Tensor, int) -> Tensor
return torch.zeros_like(tensor, dtype=dtype, layout=tensor.layout,
device=tensor.device, pin_memory=tensor.is_pinned())
class BalancedPositiveNegativeSampler(object): class BalancedPositiveNegativeSampler(object):
""" """
This class samples batches, ensuring that they contain a fixed proportion of positives This class samples batches, ensuring that they contain a fixed proportion of positives
...@@ -66,15 +59,15 @@ class BalancedPositiveNegativeSampler(object): ...@@ -66,15 +59,15 @@ class BalancedPositiveNegativeSampler(object):
neg_idx_per_image = negative[perm2] neg_idx_per_image = negative[perm2]
# create binary mask from indices # create binary mask from indices
pos_idx_per_image_mask = zeros_like( pos_idx_per_image_mask = torch.zeros_like(
matched_idxs_per_image, dtype=torch.uint8 matched_idxs_per_image, dtype=torch.uint8
) )
neg_idx_per_image_mask = zeros_like( neg_idx_per_image_mask = torch.zeros_like(
matched_idxs_per_image, dtype=torch.uint8 matched_idxs_per_image, dtype=torch.uint8
) )
pos_idx_per_image_mask[pos_idx_per_image] = torch.tensor(1, dtype=torch.uint8) pos_idx_per_image_mask[pos_idx_per_image] = 1
neg_idx_per_image_mask[neg_idx_per_image] = torch.tensor(1, dtype=torch.uint8) neg_idx_per_image_mask[neg_idx_per_image] = 1
pos_idx.append(pos_idx_per_image_mask) pos_idx.append(pos_idx_per_image_mask)
neg_idx.append(neg_idx_per_image_mask) neg_idx.append(neg_idx_per_image_mask)
...@@ -304,8 +297,8 @@ class Matcher(object): ...@@ -304,8 +297,8 @@ class Matcher(object):
between_thresholds = (matched_vals >= self.low_threshold) & ( between_thresholds = (matched_vals >= self.low_threshold) & (
matched_vals < self.high_threshold matched_vals < self.high_threshold
) )
matches[below_low_threshold] = torch.tensor(self.BELOW_LOW_THRESHOLD) matches[below_low_threshold] = self.BELOW_LOW_THRESHOLD
matches[between_thresholds] = torch.tensor(self.BETWEEN_THRESHOLDS) matches[between_thresholds] = self.BETWEEN_THRESHOLDS
if self.allow_low_quality_matches: if self.allow_low_quality_matches:
assert all_matches is not None assert all_matches is not None
......
...@@ -155,8 +155,8 @@ def keypoints_to_heatmap(keypoints, rois, heatmap_size): ...@@ -155,8 +155,8 @@ def keypoints_to_heatmap(keypoints, rois, heatmap_size):
y = (y - offset_y) * scale_y y = (y - offset_y) * scale_y
y = y.floor().long() y = y.floor().long()
x[x_boundary_inds] = torch.tensor(heatmap_size - 1) x[x_boundary_inds] = heatmap_size - 1
y[y_boundary_inds] = torch.tensor(heatmap_size - 1) y[y_boundary_inds] = heatmap_size - 1
valid_loc = (x >= 0) & (y >= 0) & (x < heatmap_size) & (y < heatmap_size) valid_loc = (x >= 0) & (y >= 0) & (x < heatmap_size) & (y < heatmap_size)
vis = keypoints[..., 2] > 0 vis = keypoints[..., 2] > 0
...@@ -584,11 +584,11 @@ class RoIHeads(torch.nn.Module): ...@@ -584,11 +584,11 @@ class RoIHeads(torch.nn.Module):
# Label background (below the low threshold) # Label background (below the low threshold)
bg_inds = matched_idxs_in_image == self.proposal_matcher.BELOW_LOW_THRESHOLD bg_inds = matched_idxs_in_image == self.proposal_matcher.BELOW_LOW_THRESHOLD
labels_in_image[bg_inds] = torch.tensor(0) labels_in_image[bg_inds] = 0
# Label ignore proposals (between low and high thresholds) # Label ignore proposals (between low and high thresholds)
ignore_inds = matched_idxs_in_image == self.proposal_matcher.BETWEEN_THRESHOLDS ignore_inds = matched_idxs_in_image == self.proposal_matcher.BETWEEN_THRESHOLDS
labels_in_image[ignore_inds] = torch.tensor(-1) # -1 is ignored by sampler labels_in_image[ignore_inds] = -1 # -1 is ignored by sampler
matched_idxs.append(clamped_matched_idxs_in_image) matched_idxs.append(clamped_matched_idxs_in_image)
labels.append(labels_in_image) labels.append(labels_in_image)
......
...@@ -350,11 +350,11 @@ class RegionProposalNetwork(torch.nn.Module): ...@@ -350,11 +350,11 @@ class RegionProposalNetwork(torch.nn.Module):
# Background (negative examples) # Background (negative examples)
bg_indices = matched_idxs == self.proposal_matcher.BELOW_LOW_THRESHOLD bg_indices = matched_idxs == self.proposal_matcher.BELOW_LOW_THRESHOLD
labels_per_image[bg_indices] = torch.tensor(0.0) labels_per_image[bg_indices] = 0.0
# discard indices that are between thresholds # discard indices that are between thresholds
inds_to_discard = matched_idxs == self.proposal_matcher.BETWEEN_THRESHOLDS inds_to_discard = matched_idxs == self.proposal_matcher.BETWEEN_THRESHOLDS
labels_per_image[inds_to_discard] = torch.tensor(-1.0) labels_per_image[inds_to_discard] = -1.0
labels.append(labels_per_image) labels.append(labels_per_image)
matched_gt_boxes.append(matched_gt_boxes_per_image) matched_gt_boxes.append(matched_gt_boxes_per_image)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment