"git@developer.sourcefind.cn:OpenDAS/autoawq.git" did not exist on "e09dc751012f4c1036ae4045edeb92a74ba6e043"
Commit b0a6416e authored by anivegesana's avatar anivegesana
Browse files

"BackBone"

parent b05dfa31
...@@ -64,6 +64,7 @@ class RevNet(hyperparams.Config): ...@@ -64,6 +64,7 @@ class RevNet(hyperparams.Config):
# Specifies the depth of RevNet. # Specifies the depth of RevNet.
model_id: int = 56 model_id: int = 56
from official.vision.beta.projects.yolo.configs.backbones import DarkNet
@dataclasses.dataclass @dataclasses.dataclass
class Backbone(hyperparams.OneOfConfig): class Backbone(hyperparams.OneOfConfig):
...@@ -85,3 +86,4 @@ class Backbone(hyperparams.OneOfConfig): ...@@ -85,3 +86,4 @@ class Backbone(hyperparams.OneOfConfig):
efficientnet: EfficientNet = EfficientNet() efficientnet: EfficientNet = EfficientNet()
spinenet: SpineNet = SpineNet() spinenet: SpineNet = SpineNet()
mobilenet: MobileNet = MobileNet() mobilenet: MobileNet = MobileNet()
darknet: DarkNet = DarkNet()
...@@ -33,11 +33,13 @@ class Decoder(decoder.Decoder): ...@@ -33,11 +33,13 @@ class Decoder(decoder.Decoder):
'image/class/label': ( 'image/class/label': (
tf.io.FixedLenFeature((), tf.int64, default_value=-1)) tf.io.FixedLenFeature((), tf.int64, default_value=-1))
} }
'''
def decode(self, serialized_example): def decode(self, serialized_example):
return tf.io.parse_single_example( return tf.io.parse_single_example(
serialized_example, self._keys_to_features) serialized_example, self._keys_to_features)
'''
def decode(self, data):
return {'image/encoded': data['image'], 'image/class/label': data['label']}
class Parser(parser.Parser): class Parser(parser.Parser):
"""Parser to parse an image and its annotations into a dictionary of tensors.""" """Parser to parse an image and its annotations into a dictionary of tensors."""
...@@ -72,11 +74,11 @@ class Parser(parser.Parser): ...@@ -72,11 +74,11 @@ class Parser(parser.Parser):
def _parse_train_data(self, decoded_tensors): def _parse_train_data(self, decoded_tensors):
"""Parses data for training.""" """Parses data for training."""
label = tf.cast(decoded_tensors['image/class/label'], dtype=tf.int32) label = tf.cast(decoded_tensors['image/class/label'], dtype=tf.int32)
'''
image_bytes = decoded_tensors['image/encoded'] image_bytes = decoded_tensors['image/encoded']
image_shape = tf.image.extract_jpeg_shape(image_bytes) image_shape = tf.image.extract_jpeg_shape(image_bytes)
# Crops image. # Crops image.
# TODO(pengchong): support image format other than JPEG. # TODO(pengchong): support image format other than JPEG.
cropped_image = preprocess_ops.random_crop_image_v2( cropped_image = preprocess_ops.random_crop_image_v2(
...@@ -85,7 +87,8 @@ class Parser(parser.Parser): ...@@ -85,7 +87,8 @@ class Parser(parser.Parser):
tf.reduce_all(tf.equal(tf.shape(cropped_image), image_shape)), tf.reduce_all(tf.equal(tf.shape(cropped_image), image_shape)),
lambda: preprocess_ops.center_crop_image_v2(image_bytes, image_shape), lambda: preprocess_ops.center_crop_image_v2(image_bytes, image_shape),
lambda: cropped_image) lambda: cropped_image)
'''
image = tf.cast(decoded_tensors['image/encoded'], tf.float32)
if self._aug_rand_hflip: if self._aug_rand_hflip:
image = tf.image.random_flip_left_right(image) image = tf.image.random_flip_left_right(image)
...@@ -106,12 +109,14 @@ class Parser(parser.Parser): ...@@ -106,12 +109,14 @@ class Parser(parser.Parser):
def _parse_eval_data(self, decoded_tensors): def _parse_eval_data(self, decoded_tensors):
"""Parses data for evaluation.""" """Parses data for evaluation."""
label = tf.cast(decoded_tensors['image/class/label'], dtype=tf.int32) label = tf.cast(decoded_tensors['image/class/label'], dtype=tf.int32)
'''
image_bytes = decoded_tensors['image/encoded'] image_bytes = decoded_tensors['image/encoded']
image_shape = tf.image.extract_jpeg_shape(image_bytes) image_shape = tf.image.extract_jpeg_shape(image_bytes)
# Center crops and resizes image. # Center crops and resizes image.
image = preprocess_ops.center_crop_image_v2(image_bytes, image_shape) image = preprocess_ops.center_crop_image_v2(image_bytes, image_shape)
'''
image = tf.cast(decoded_tensors['image/encoded'], tf.float32)
image = tf.image.resize( image = tf.image.resize(
image, self._output_size, method=tf.image.ResizeMethod.BILINEAR) image, self._output_size, method=tf.image.ResizeMethod.BILINEAR)
......
...@@ -20,3 +20,4 @@ from official.utils.testing import mock_task ...@@ -20,3 +20,4 @@ from official.utils.testing import mock_task
from official.vision import beta from official.vision import beta
from official.vision.beta.projects import yolo from official.vision.beta.projects import yolo
from official.vision.beta.projects.yolo.modeling.backbones import Darknet
...@@ -4,7 +4,7 @@ import dataclasses ...@@ -4,7 +4,7 @@ import dataclasses
from typing import Optional from typing import Optional
from official.modeling import hyperparams from official.modeling import hyperparams
from official.vision.beta.configs import backbones # from official.vision.beta.configs import backbones
@dataclasses.dataclass @dataclasses.dataclass
...@@ -13,6 +13,6 @@ class DarkNet(hyperparams.Config): ...@@ -13,6 +13,6 @@ class DarkNet(hyperparams.Config):
model_id: str = "darknet53" model_id: str = "darknet53"
@dataclasses.dataclass # @dataclasses.dataclass
class Backbone(backbones.Backbone): # class Backbone(backbones.Backbone):
darknet: DarkNet = DarkNet() # darknet: DarkNet = DarkNet()
...@@ -19,23 +19,23 @@ task: ...@@ -19,23 +19,23 @@ task:
tfds_split: 'test' tfds_split: 'test'
tfds_download: True tfds_download: True
is_training: True is_training: True
global_batch_size: 128 global_batch_size: 2
dtype: 'float16' dtype: 'float16'
validation_data: validation_data:
tfds_name: 'imagenet_a' tfds_name: 'imagenet_a'
tfds_split: 'test' tfds_split: 'test'
tfds_download: True tfds_download: True
is_training: False is_training: False
global_batch_size: 128 global_batch_size: 2
dtype: 'float16' dtype: 'float16'
drop_remainder: False drop_remainder: False
trainer: trainer:
train_steps: 800000 # in the paper train_steps: 51200000 # in the paper
validation_steps: 400 # size of validation data validation_steps: 25600 # size of validation data
validation_interval: 10000 validation_interval: 150
steps_per_loop: 10000 steps_per_loop: 150
summary_interval: 10000 summary_interval: 150
checkpoint_interval: 10000 checkpoint_interval: 150
optimizer_config: optimizer_config:
optimizer: optimizer:
type: 'sgd' type: 'sgd'
...@@ -47,8 +47,8 @@ trainer: ...@@ -47,8 +47,8 @@ trainer:
initial_learning_rate: 0.1 initial_learning_rate: 0.1
end_learning_rate: 0.0001 end_learning_rate: 0.0001
power: 4.0 power: 4.0
decay_steps: 799000 decay_steps: 51136000
warmup: warmup:
type: 'linear' type: 'linear'
linear: linear:
warmup_steps: 1000 #lr rise from 0 to 0.1 over 1000 steps warmup_steps: 64000 #lr rise from 0 to 0.1 over 1000 steps
...@@ -157,6 +157,10 @@ BACKBONES = { ...@@ -157,6 +157,10 @@ BACKBONES = {
"cspdarknettiny": CSPDARKNETTINY "cspdarknettiny": CSPDARKNETTINY
} }
CSPBlockConfig = collections.namedtuple('CSPBlockConfig', ['layer', 'stack', 'repetitions', 'bottleneck',
'filters', 'kernel_size', 'strides', 'padding', 'activation', 'route',
'output_name', 'is_output'])
@ks.utils.register_keras_serializable(package='yolo') @ks.utils.register_keras_serializable(package='yolo')
class Darknet(ks.Model): class Darknet(ks.Model):
...@@ -177,7 +181,11 @@ class Darknet(ks.Model): ...@@ -177,7 +181,11 @@ class Darknet(ks.Model):
config=None, config=None,
**kwargs): **kwargs):
layer_specs, splits = Darknet.get_config(model_id) # layer_specs, splits = Darknet.get_config(model_id)
print(len(BACKBONES[model_id]['backbone'][-1]))
layer_specs = [CSPBlockConfig(*config) for config in BACKBONES[model_id]['backbone']]
splits = BACKBONES[model_id]['splits']
self._model_name = model_id self._model_name = model_id
self._splits = splits self._splits = splits
self._input_shape = input_shape self._input_shape = input_shape
......
...@@ -6,7 +6,7 @@ import tensorflow.keras as ks ...@@ -6,7 +6,7 @@ import tensorflow.keras as ks
import tensorflow.keras.backend as K import tensorflow.keras.backend as K
from ._Identity import Identity from ._Identity import Identity
from yolo.modeling.functions.mish_activation import mish from official.vision.beta.projects.yolo.modeling.functions.mish_activation import mish
@ks.utils.register_keras_serializable(package='yolo') @ks.utils.register_keras_serializable(package='yolo')
......
"""Contains common building blocks for yolo neural networks."""
import tensorflow as tf
import tensorflow.keras as ks
@ks.utils.register_keras_serializable(package='yolo')
class Identity(ks.layers.Layer):
def __init__(self, **kwargs):
super().__init__(**kwargs)
return
def call(self, input):
return input
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment