Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
ResNet50_tensorflow
Commits
9c0d7874
Commit
9c0d7874
authored
Nov 17, 2021
by
Fan Yang
Committed by
A. Unique TensorFlower
Nov 17, 2021
Browse files
Internal change
PiperOrigin-RevId: 410666850
parent
c32ce7cf
Changes
6
Show whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
232 additions
and
19 deletions
+232
-19
official/vision/beta/evaluation/coco_utils_test.py
official/vision/beta/evaluation/coco_utils_test.py
+49
-0
official/vision/beta/evaluation/iou_test.py
official/vision/beta/evaluation/iou_test.py
+16
-0
official/vision/beta/evaluation/panoptic_quality_evaluator_test.py
...vision/beta/evaluation/panoptic_quality_evaluator_test.py
+12
-6
official/vision/beta/evaluation/segmentation_metrics.py
official/vision/beta/evaluation/segmentation_metrics.py
+8
-13
official/vision/beta/evaluation/segmentation_metrics_test.py
official/vision/beta/evaluation/segmentation_metrics_test.py
+77
-0
official/vision/beta/evaluation/wod_detection_evaluator_test.py
...al/vision/beta/evaluation/wod_detection_evaluator_test.py
+70
-0
No files found.
official/vision/beta/evaluation/coco_utils_test.py
0 → 100644
View file @
9c0d7874
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for coco_utils."""
import
os
import
tensorflow
as
tf
from
official.vision.beta.dataloaders
import
tfexample_utils
from
official.vision.beta.evaluation
import
coco_utils
class
CocoUtilsTest
(
tf
.
test
.
TestCase
):
def
test_scan_and_generator_annotation_file
(
self
):
num_samples
=
10
example
=
tfexample_utils
.
create_detection_test_example
(
image_height
=
512
,
image_width
=
512
,
image_channel
=
3
,
num_instances
=
10
)
tf_examples
=
[
example
]
*
num_samples
data_file
=
os
.
path
.
join
(
self
.
create_tempdir
(),
'test.tfrecord'
)
tfexample_utils
.
dump_to_tfrecord
(
record_file
=
data_file
,
tf_examples
=
tf_examples
)
annotation_file
=
os
.
path
.
join
(
self
.
create_tempdir
(),
'annotation.json'
)
coco_utils
.
scan_and_generator_annotation_file
(
file_pattern
=
data_file
,
file_type
=
'tfrecord'
,
num_samples
=
num_samples
,
include_mask
=
True
,
annotation_file
=
annotation_file
)
self
.
assertTrue
(
tf
.
io
.
gfile
.
exists
(
annotation_file
),
msg
=
'Annotation file {annotation_file} does not exists.'
)
if
__name__
==
'__main__'
:
tf
.
test
.
main
()
official/vision/beta/evaluation/iou_test.py
View file @
9c0d7874
...
...
@@ -95,5 +95,21 @@ class MeanIoUTest(tf.test.TestCase):
expected_result
=
[
0
,
1
/
(
1
+
1
-
1
)]
self
.
assertAllClose
(
expected_result
,
result
,
atol
=
1e-3
)
def
test_update_state_annd_result
(
self
):
y_pred
=
[
0
,
1
,
0
,
1
]
y_true
=
[
0
,
0
,
1
,
1
]
m_obj
=
iou
.
PerClassIoU
(
num_classes
=
2
)
m_obj
.
update_state
(
y_true
,
y_pred
)
result
=
m_obj
.
result
()
# cm = [[1, 1],
# [1, 1]]
# sum_row = [2, 2], sum_col = [2, 2], true_positives = [1, 1]
# iou = true_positives / (sum_row + sum_col - true_positives))
expected_result
=
[
1
/
(
2
+
2
-
1
),
1
/
(
2
+
2
-
1
)]
self
.
assertAllClose
(
expected_result
,
result
,
atol
=
1e-3
)
if
__name__
==
'__main__'
:
tf
.
test
.
main
()
official/vision/beta/evaluation/panoptic_quality_evaluator_test.py
View file @
9c0d7874
...
...
@@ -45,19 +45,25 @@ class PanopticQualityEvaluatorTest(tf.test.TestCase):
dtype
=
np
.
uint16
)
groundtruths
=
{
'category_mask'
:
tf
.
convert_to_tensor
(
category_mask
),
'instance_mask'
:
tf
.
convert_to_tensor
(
groundtruth_instance_mask
)
'category_mask'
:
tf
.
convert_to_tensor
([
category_mask
]),
'instance_mask'
:
tf
.
convert_to_tensor
([
groundtruth_instance_mask
]),
'image_info'
:
tf
.
convert_to_tensor
([[[
6
,
6
],
[
6
,
6
],
[
1.0
,
1.0
],
[
0
,
0
]]],
dtype
=
tf
.
float32
)
}
predictions
=
{
'category_mask'
:
tf
.
convert_to_tensor
(
category_mask
),
'instance_mask'
:
tf
.
convert_to_tensor
(
good_det_instance_mask
)
'category_mask'
:
tf
.
convert_to_tensor
(
[
category_mask
]
),
'instance_mask'
:
tf
.
convert_to_tensor
(
[
good_det_instance_mask
]
)
}
pq_evaluator
=
panoptic_quality_evaluator
.
PanopticQualityEvaluator
(
num_categories
=
1
,
ignored_label
=
2
,
max_instances_per_category
=
16
,
offset
=
16
)
offset
=
16
,
rescale_predictions
=
True
)
for
_
in
range
(
2
):
pq_evaluator
.
update_state
(
groundtruths
,
predictions
)
...
...
@@ -70,7 +76,7 @@ class PanopticQualityEvaluatorTest(tf.test.TestCase):
[
1
,
1
,
1
,
1
,
1
,
1
],
],
dtype
=
np
.
uint16
)
predictions
[
'instance_mask'
]
=
tf
.
convert_to_tensor
(
bad_det_instance_mask
)
predictions
[
'instance_mask'
]
=
tf
.
convert_to_tensor
(
[
bad_det_instance_mask
]
)
for
_
in
range
(
2
):
pq_evaluator
.
update_state
(
groundtruths
,
predictions
)
...
...
official/vision/beta/evaluation/segmentation_metrics.py
View file @
9c0d7874
...
...
@@ -41,8 +41,7 @@ class MeanIoU(tf.keras.metrics.MeanIoU):
dtype: data type of the metric result.
"""
self
.
_rescale_predictions
=
rescale_predictions
super
(
MeanIoU
,
self
).
__init__
(
num_classes
=
num_classes
,
name
=
name
,
dtype
=
dtype
)
super
().
__init__
(
num_classes
=
num_classes
,
name
=
name
,
dtype
=
dtype
)
def
update_state
(
self
,
y_true
,
y_pred
):
"""Updates metric state.
...
...
@@ -120,8 +119,7 @@ class MeanIoU(tf.keras.metrics.MeanIoU):
flatten_masks
=
tf
.
reshape
(
masks
,
shape
=
[
-
1
])
flatten_valid_masks
=
tf
.
reshape
(
valid_masks
,
shape
=
[
-
1
])
super
(
MeanIoU
,
self
).
update_state
(
flatten_masks
,
flatten_predictions
,
super
().
update_state
(
flatten_masks
,
flatten_predictions
,
tf
.
cast
(
flatten_valid_masks
,
tf
.
float32
))
...
...
@@ -148,8 +146,7 @@ class PerClassIoU(iou.PerClassIoU):
dtype: data type of the metric result.
"""
self
.
_rescale_predictions
=
rescale_predictions
super
(
PerClassIoU
,
self
).
__init__
(
num_classes
=
num_classes
,
name
=
name
,
dtype
=
dtype
)
super
().
__init__
(
num_classes
=
num_classes
,
name
=
name
,
dtype
=
dtype
)
def
update_state
(
self
,
y_true
,
y_pred
):
"""Updates metric state.
...
...
@@ -213,8 +210,7 @@ class PerClassIoU(iou.PerClassIoU):
flatten_predictions
=
tf
.
reshape
(
predicted_mask
,
shape
=
[
1
,
-
1
])
flatten_masks
=
tf
.
reshape
(
mask
,
shape
=
[
1
,
-
1
])
flatten_valid_masks
=
tf
.
reshape
(
valid_mask
,
shape
=
[
1
,
-
1
])
super
(
PerClassIoU
,
self
).
update_state
(
flatten_masks
,
flatten_predictions
,
super
().
update_state
(
flatten_masks
,
flatten_predictions
,
tf
.
cast
(
flatten_valid_masks
,
tf
.
float32
))
else
:
...
...
@@ -227,6 +223,5 @@ class PerClassIoU(iou.PerClassIoU):
flatten_masks
=
tf
.
reshape
(
masks
,
shape
=
[
-
1
])
flatten_valid_masks
=
tf
.
reshape
(
valid_masks
,
shape
=
[
-
1
])
super
(
PerClassIoU
,
self
).
update_state
(
flatten_masks
,
flatten_predictions
,
super
().
update_state
(
flatten_masks
,
flatten_predictions
,
tf
.
cast
(
flatten_valid_masks
,
tf
.
float32
))
official/vision/beta/evaluation/segmentation_metrics_test.py
0 → 100644
View file @
9c0d7874
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for segmentation_metrics."""
from
absl.testing
import
parameterized
import
numpy
as
np
import
tensorflow
as
tf
from
official.vision.beta.evaluation
import
segmentation_metrics
class
SegmentationMetricsTest
(
parameterized
.
TestCase
,
tf
.
test
.
TestCase
):
def
_create_test_data
(
self
):
y_pred_cls0
=
np
.
expand_dims
(
np
.
array
([[
1
,
1
,
0
],
[
1
,
1
,
0
],
[
0
,
0
,
0
]],
dtype
=
np
.
uint16
),
axis
=
(
0
,
-
1
))
y_pred_cls1
=
np
.
expand_dims
(
np
.
array
([[
0
,
0
,
0
],
[
0
,
0
,
1
],
[
0
,
0
,
1
]],
dtype
=
np
.
uint16
),
axis
=
(
0
,
-
1
))
y_pred
=
np
.
concatenate
((
y_pred_cls0
,
y_pred_cls1
),
axis
=-
1
)
y_true
=
{
'masks'
:
np
.
expand_dims
(
np
.
array
([[
0
,
0
,
0
,
0
,
0
,
0
],
[
0
,
0
,
0
,
0
,
0
,
0
],
[
0
,
0
,
0
,
0
,
0
,
0
],
[
0
,
0
,
0
,
1
,
1
,
1
],
[
0
,
0
,
0
,
1
,
1
,
1
],
[
0
,
0
,
0
,
1
,
1
,
1
]],
dtype
=
np
.
uint16
),
axis
=
(
0
,
-
1
)),
'valid_masks'
:
np
.
ones
([
1
,
6
,
6
,
1
],
dtype
=
np
.
uint16
),
'image_info'
:
np
.
array
([[[
6
,
6
],
[
3
,
3
],
[
0.5
,
0.5
],
[
0
,
0
]]],
dtype
=
np
.
float32
)
}
return
y_pred
,
y_true
@
parameterized
.
parameters
(
True
,
False
)
def
test_mean_iou_metric
(
self
,
rescale_predictions
):
tf
.
config
.
experimental_run_functions_eagerly
(
True
)
mean_iou_metric
=
segmentation_metrics
.
MeanIoU
(
num_classes
=
2
,
rescale_predictions
=
rescale_predictions
)
y_pred
,
y_true
=
self
.
_create_test_data
()
# Disable autograph for correct coverage statistics.
update_fn
=
tf
.
autograph
.
experimental
.
do_not_convert
(
mean_iou_metric
.
update_state
)
update_fn
(
y_true
=
y_true
,
y_pred
=
y_pred
)
miou
=
mean_iou_metric
.
result
()
self
.
assertAlmostEqual
(
miou
.
numpy
(),
0.762
,
places
=
3
)
@
parameterized
.
parameters
(
True
,
False
)
def
test_per_class_mean_iou_metric
(
self
,
rescale_predictions
):
per_class_iou_metric
=
segmentation_metrics
.
PerClassIoU
(
num_classes
=
2
,
rescale_predictions
=
rescale_predictions
)
y_pred
,
y_true
=
self
.
_create_test_data
()
# Disable autograph for correct coverage statistics.
update_fn
=
tf
.
autograph
.
experimental
.
do_not_convert
(
per_class_iou_metric
.
update_state
)
update_fn
(
y_true
=
y_true
,
y_pred
=
y_pred
)
per_class_miou
=
per_class_iou_metric
.
result
()
self
.
assertAllClose
(
per_class_miou
.
numpy
(),
[
0.857
,
0.667
],
atol
=
1e-3
)
if
__name__
==
'__main__'
:
tf
.
test
.
main
()
official/vision/beta/evaluation/wod_detection_evaluator_test.py
0 → 100644
View file @
9c0d7874
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for wod_detection_evaluator."""
import
tensorflow
as
tf
from
official.vision.beta.evaluation
import
wod_detection_evaluator
class
WodDetectionEvaluatorTest
(
tf
.
test
.
TestCase
):
def
_create_test_data
(
self
):
y_pred
=
{
'source_id'
:
tf
.
convert_to_tensor
([
1
],
dtype
=
tf
.
int64
),
'image_info'
:
tf
.
convert_to_tensor
([[[
100
,
100
],
[
50
,
50
],
[
0.5
,
0.5
],
[
0
,
0
]]],
dtype
=
tf
.
float32
),
'num_detections'
:
tf
.
convert_to_tensor
([
4
],
dtype
=
tf
.
int64
),
'detection_boxes'
:
tf
.
convert_to_tensor
(
[[[
0.1
,
0.15
,
0.2
,
0.25
],
[
0.35
,
0.18
,
0.43
,
0.4
],
[
0.2
,
0.1
,
0.3
,
0.2
],
[
0.65
,
0.55
,
0.75
,
0.85
]]],
dtype
=
tf
.
float32
),
'detection_classes'
:
tf
.
convert_to_tensor
([[
1
,
1
,
2
,
2
]],
dtype
=
tf
.
int64
),
'detection_scores'
:
tf
.
convert_to_tensor
([[
0.95
,
0.5
,
0.1
,
0.7
]],
dtype
=
tf
.
float32
)
}
y_true
=
{
'source_id'
:
tf
.
convert_to_tensor
([
1
],
dtype
=
tf
.
int64
),
'num_detections'
:
tf
.
convert_to_tensor
([
4
],
dtype
=
tf
.
int64
),
'boxes'
:
tf
.
convert_to_tensor
([[[
0.1
,
0.15
,
0.2
,
0.25
],
[
0.3
,
0.2
,
0.4
,
0.3
],
[
0.4
,
0.3
,
0.5
,
0.6
],
[
0.6
,
0.5
,
0.7
,
0.8
]]],
dtype
=
tf
.
float32
),
'classes'
:
tf
.
convert_to_tensor
([[
1
,
1
,
1
,
2
]],
dtype
=
tf
.
int64
),
'difficulties'
:
tf
.
zeros
([
1
,
4
],
dtype
=
tf
.
int64
)
}
return
y_pred
,
y_true
def
test_wod_detection_evaluator
(
self
):
wod_detection_metric
=
wod_detection_evaluator
.
WOD2dDetectionEvaluator
()
y_pred
,
y_true
=
self
.
_create_test_data
()
wod_detection_metric
.
update_state
(
groundtruths
=
y_true
,
predictions
=
y_pred
)
metrics
=
wod_detection_metric
.
evaluate
()
for
_
,
metric_value
in
metrics
.
items
():
self
.
assertAlmostEqual
(
metric_value
.
numpy
(),
0.0
,
places
=
3
)
if
__name__
==
'__main__'
:
tf
.
test
.
main
()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment