Commit e7ff1bec authored by Vishnu Banna's avatar Vishnu Banna
Browse files

pre proc ops test

parent 8a5c229d
...@@ -19,7 +19,7 @@ class Parser(parser.Parser): ...@@ -19,7 +19,7 @@ class Parser(parser.Parser):
output_size, output_size,
anchors, anchors,
expanded_strides, expanded_strides,
anchor_free_limits=None, level_limit=None,
max_num_instances=200, max_num_instances=200,
area_thresh=0.1, area_thresh=0.1,
aug_rand_hue=1.0, aug_rand_hue=1.0,
...@@ -49,7 +49,7 @@ class Parser(parser.Parser): ...@@ -49,7 +49,7 @@ class Parser(parser.Parser):
anchors: `Dict[List[Union[int, float]]]` values for each anchor box. anchors: `Dict[List[Union[int, float]]]` values for each anchor box.
expanded_strides: `Dict[int]` for how much the model scales down the expanded_strides: `Dict[int]` for how much the model scales down the
images at the largest level. images at the largest level.
anchor_free_limits: `List` the box sizes that will be allowed at each FPN level_limit: `List` the box sizes that will be allowed at each FPN
level as is done in the FCOS and YOLOX paper for anchor free box level as is done in the FCOS and YOLOX paper for anchor free box
assignment. Anchor free will perform worse than Anchor based, but only assignment. Anchor free will perform worse than Anchor based, but only
slightly. slightly.
...@@ -117,7 +117,7 @@ class Parser(parser.Parser): ...@@ -117,7 +117,7 @@ class Parser(parser.Parser):
# Set the anchor boxes for each scale # Set the anchor boxes for each scale
self._anchors = anchors self._anchors = anchors
self._anchor_free_limits = anchor_free_limits self._level_limit = level_limit
# anchor labeling paramters # anchor labeling paramters
self._use_tie_breaker = use_tie_breaker self._use_tie_breaker = use_tie_breaker
...@@ -150,7 +150,7 @@ class Parser(parser.Parser): ...@@ -150,7 +150,7 @@ class Parser(parser.Parser):
keys = list(self._anchors.keys()) keys = list(self._anchors.keys())
if self._anchor_free_limits is not None: if self._level_limit is not None:
maxim = 2000 maxim = 2000
self._scale_up = {key: maxim // self._max_num_instances for key in keys} self._scale_up = {key: maxim // self._max_num_instances for key in keys}
self._anchor_t = -0.01 self._anchor_t = -0.01
...@@ -330,13 +330,13 @@ class Parser(parser.Parser): ...@@ -330,13 +330,13 @@ class Parser(parser.Parser):
updates = {} updates = {}
true_grids = {} true_grids = {}
if self._anchor_free_limits is not None: if self._level_limit is not None:
self._anchor_free_limits = [0.0] + self._anchor_free_limits + [np.inf] self._level_limit = [0.0] + self._level_limit + [np.inf]
# for each prediction path generate a properly scaled output prediction map # for each prediction path generate a properly scaled output prediction map
for i, key in enumerate(self._anchors.keys()): for i, key in enumerate(self._anchors.keys()):
if self._anchor_free_limits is not None: if self._level_limit is not None:
fpn_limits = self._anchor_free_limits[i:i + 2] fpn_limits = self._level_limit[i:i + 2]
else: else:
fpn_limits = None fpn_limits = None
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment